mcp-voice-hooks 1.0.16 → 1.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -42
- package/bin/cli.js +11 -0
- package/dist/unified-server.js +152 -48
- package/dist/unified-server.js.map +1 -1
- package/package.json +1 -1
- package/public/app.js +51 -2
package/README.md
CHANGED
@@ -1,36 +1,54 @@
|
|
1
|
-
# Claude Code
|
1
|
+
# Voice Mode for Claude Code
|
2
2
|
|
3
|
-
|
3
|
+
Voice Mode for Claude Code allows you to have a continuous two-way conversation with Claude Code, hands-free.
|
4
|
+
|
5
|
+
It uses the new [Claude Code hooks](https://docs.anthropic.com/en/docs/claude-code/hooks) to deliver voice input to Claude while it works.
|
6
|
+
|
7
|
+
This lets you speak continuously to Claude - interrupt, redirect, or provide feedback without stopping.
|
4
8
|
|
5
9
|
Optionally enable text-to-speech to have Claude speak back to you.
|
6
10
|
|
7
|
-
|
11
|
+
Voice recognition and text-to-speech are handled by the browser, so there is nothing to download, and no API keys are needed.
|
12
|
+
|
13
|
+
## Demo Video
|
14
|
+
|
15
|
+
[](https://youtu.be/GbDatJtm8_k)
|
8
16
|
|
9
|
-
|
17
|
+
## Installation
|
10
18
|
|
11
|
-
|
19
|
+
Installation is easy.
|
20
|
+
|
21
|
+
### 1. Install Claude Code
|
12
22
|
|
13
23
|
```bash
|
14
|
-
#
|
15
|
-
|
24
|
+
# IMPORTANT: there is a bug in the @latest version of Claude Code (1.0.44), so you must specify the version explicitly
|
25
|
+
npm install -g @anthropic-ai/claude-code@">=1.0.45"
|
26
|
+
```
|
16
27
|
|
17
|
-
|
28
|
+
### 2. Install Voice Mode
|
29
|
+
|
30
|
+
```bash
|
31
|
+
npx mcp-voice-hooks install-hooks
|
18
32
|
claude mcp add voice-hooks npx mcp-voice-hooks
|
33
|
+
```
|
34
|
+
|
35
|
+
## Usage
|
36
|
+
|
37
|
+
### 1. Start Claude Code
|
19
38
|
|
20
|
-
|
39
|
+
```bash
|
21
40
|
claude
|
22
41
|
```
|
23
42
|
|
24
|
-
|
43
|
+
### 2. Start Listening
|
44
|
+
|
45
|
+
The browser interface will automatically open after 3 seconds (<http://localhost:5111>).
|
25
46
|
|
26
|
-
|
47
|
+
Click "Start Listening"
|
27
48
|
|
28
|
-
|
49
|
+
### 3. Speak
|
29
50
|
|
30
|
-
|
31
|
-
- Queuing utterances for processing by Claude Code
|
32
|
-
- Using hooks to ensure Claude checks for voice input before tool use and before stopping
|
33
|
-
- Allowing natural interruptions like "No, stop that" or "Wait, try something else"
|
51
|
+
Say something to Claude. You will need to send one message in the Claude Code CLI to start the conversation.
|
34
52
|
|
35
53
|
## Browser Compatibility
|
36
54
|
|
@@ -65,8 +83,6 @@ To use Siri voices with voice-hooks, you need to set your system voice and selec
|
|
65
83
|
|
66
84
|
Other downloaded voices will show up in the voice dropdown in the voice-hooks browser interface so you can select them there directly, instead of using the "Mac System Voice" option.
|
67
85
|
|
68
|
-
### Selecting and downloading high quality Browser Voices
|
69
|
-
|
70
86
|
## Manual Hook Installation
|
71
87
|
|
72
88
|
The hooks are automatically installed/updated when the MCP server starts. However, if you need to manually install or reconfigure the hooks:
|
@@ -96,10 +112,6 @@ This will:
|
|
96
112
|
- Clean up voice hooks from your project's `.claude/settings.json`
|
97
113
|
- Preserve any custom hooks you've added
|
98
114
|
|
99
|
-
## Known Limitations
|
100
|
-
|
101
|
-
- **Intermittent Stop Hook Execution**: Claude Code's Stop hooks are not triggered if the agent stops immediately after a tool call. This results in the assistant occasionally stopping without checking for voice input. This will be fixed in Claude Code 1.0.45. [github issue](https://github.com/anthropics/claude-code/issues/3113#issuecomment-3047324928)
|
102
|
-
|
103
115
|
## Development Mode
|
104
116
|
|
105
117
|
If you're developing mcp-voice-hooks itself:
|
@@ -129,51 +141,79 @@ claude
|
|
129
141
|
- For changes to **browser files** (`public/*`), just restart Claude Code
|
130
142
|
- Then restart Claude Code to use the updated code
|
131
143
|
|
132
|
-
###
|
144
|
+
### Configuration
|
145
|
+
|
146
|
+
#### Port Configuration
|
133
147
|
|
134
|
-
|
148
|
+
The default port is 5111. To use a different port, add to your project's `.claude/settings.json`:
|
135
149
|
|
136
|
-
```
|
137
|
-
|
150
|
+
```json
|
151
|
+
{
|
152
|
+
"env": {
|
153
|
+
"MCP_VOICE_HOOKS_PORT": "8080"
|
154
|
+
}
|
155
|
+
}
|
138
156
|
```
|
139
157
|
|
140
|
-
|
158
|
+
#### Browser Auto-Open
|
159
|
+
|
160
|
+
When running in MCP-managed mode, the browser will automatically open if no frontend connects within 3 seconds. To disable this behavior:
|
141
161
|
|
142
162
|
```json
|
143
163
|
{
|
144
|
-
"
|
145
|
-
"
|
146
|
-
"type": "stdio",
|
147
|
-
"command": "npm",
|
148
|
-
"args": ["run", "mcp-proxy"],
|
149
|
-
"env": {}
|
150
|
-
}
|
164
|
+
"env": {
|
165
|
+
"MCP_VOICE_HOOKS_AUTO_OPEN_BROWSER": "false"
|
151
166
|
}
|
152
167
|
}
|
153
168
|
```
|
154
169
|
|
155
|
-
|
156
|
-
|
157
|
-
#### Port Configuration
|
170
|
+
#### Auto-Deliver Voice Input Before Tools
|
158
171
|
|
159
|
-
|
172
|
+
By default, voice input is not automatically delivered before tool execution to allow for faster tool execution. To enable auto-delivery before tools:
|
160
173
|
|
161
174
|
```json
|
162
175
|
{
|
163
176
|
"env": {
|
164
|
-
"
|
177
|
+
"MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS": "true"
|
165
178
|
}
|
166
179
|
}
|
167
180
|
```
|
168
181
|
|
169
|
-
|
182
|
+
When auto-delivery before tools is enabled:
|
170
183
|
|
171
|
-
|
184
|
+
- Voice input is automatically delivered before each tool execution
|
185
|
+
- Tools may be delayed if there's pending voice input
|
186
|
+
- This ensures voice commands are processed before tools run
|
187
|
+
- **Note**: This setting only applies when `MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT` is enabled (default)
|
188
|
+
|
189
|
+
When auto-delivery before tools is disabled (default):
|
190
|
+
|
191
|
+
- Tools will execute immediately without checking for pending voice input
|
192
|
+
- Voice input will only be processed at the stop hook or post-tool hook
|
193
|
+
- **Important**: Delivered utterances that require voice responses will still be enforced
|
194
|
+
- This provides better performance when voice interruption before tools is not needed
|
195
|
+
|
196
|
+
#### Auto-Deliver Voice Input (Default)
|
197
|
+
|
198
|
+
By default, mcp-voice-hooks automatically delivers voice input to Claude after tool use, before speaking, and before stopping:
|
199
|
+
|
200
|
+
- The `dequeue_utterances` and `wait_for_utterance` MCP tools are hidden from Claude
|
201
|
+
- Voice input is automatically delivered when Claude performs any action
|
202
|
+
- Claude receives voice input naturally without needing to explicitly call mcp-voice-hooks tools
|
203
|
+
|
204
|
+
To disable auto-delivery:
|
172
205
|
|
173
206
|
```json
|
174
207
|
{
|
175
208
|
"env": {
|
176
|
-
"
|
209
|
+
"MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT": "false"
|
177
210
|
}
|
178
211
|
}
|
179
212
|
```
|
213
|
+
|
214
|
+
When auto-delivery is disabled:
|
215
|
+
|
216
|
+
- The `dequeue_utterances` and `wait_for_utterance` tools become visible
|
217
|
+
- Hooks no longer automatically process voice input
|
218
|
+
- Claude will be blocked from making tool calls until it manually dequeues voice input
|
219
|
+
- This mode is useful for debugging or when you want manual control
|
package/bin/cli.js
CHANGED
@@ -110,6 +110,17 @@ async function configureClaudeCodeSettings() {
|
|
110
110
|
}
|
111
111
|
]
|
112
112
|
}
|
113
|
+
],
|
114
|
+
"PostToolUse": [
|
115
|
+
{
|
116
|
+
"matcher": "^(?!mcp__voice-hooks__).*",
|
117
|
+
"hooks": [
|
118
|
+
{
|
119
|
+
"type": "command",
|
120
|
+
"command": "curl -s -X POST \"http://localhost:${MCP_VOICE_HOOKS_PORT:-5111}/api/hooks/post-tool\" || echo '{}'"
|
121
|
+
}
|
122
|
+
]
|
123
|
+
}
|
113
124
|
]
|
114
125
|
};
|
115
126
|
|
package/dist/unified-server.js
CHANGED
@@ -20,6 +20,9 @@ import {
|
|
20
20
|
var __filename = fileURLToPath(import.meta.url);
|
21
21
|
var __dirname = path.dirname(__filename);
|
22
22
|
var WAIT_TIMEOUT_SECONDS = 60;
|
23
|
+
var HTTP_PORT = process.env.MCP_VOICE_HOOKS_PORT ? parseInt(process.env.MCP_VOICE_HOOKS_PORT) : 5111;
|
24
|
+
var AUTO_DELIVER_VOICE_INPUT = process.env.MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT !== "false";
|
25
|
+
var AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS = process.env.MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS === "true";
|
23
26
|
var execAsync = promisify(exec);
|
24
27
|
async function playNotificationSound() {
|
25
28
|
try {
|
@@ -60,6 +63,8 @@ var UtteranceQueue = class {
|
|
60
63
|
};
|
61
64
|
var IS_MCP_MANAGED = process.argv.includes("--mcp-managed");
|
62
65
|
var queue = new UtteranceQueue();
|
66
|
+
var lastToolUseTimestamp = null;
|
67
|
+
var lastSpeakTimestamp = null;
|
63
68
|
var voicePreferences = {
|
64
69
|
voiceResponsesEnabled: false,
|
65
70
|
voiceInputActive: false
|
@@ -108,49 +113,56 @@ app.get("/api/utterances/status", (_req, res) => {
|
|
108
113
|
delivered
|
109
114
|
});
|
110
115
|
});
|
111
|
-
|
116
|
+
function dequeueUtterancesCore() {
|
112
117
|
if (!voicePreferences.voiceInputActive) {
|
113
|
-
|
118
|
+
return {
|
114
119
|
success: false,
|
115
120
|
error: "Voice input is not active. Cannot dequeue utterances when voice input is disabled."
|
116
|
-
}
|
117
|
-
return;
|
121
|
+
};
|
118
122
|
}
|
119
123
|
const pendingUtterances = queue.utterances.filter((u) => u.status === "pending").sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
|
120
124
|
pendingUtterances.forEach((u) => {
|
121
125
|
queue.markDelivered(u.id);
|
122
126
|
});
|
123
|
-
|
127
|
+
return {
|
124
128
|
success: true,
|
125
129
|
utterances: pendingUtterances.map((u) => ({
|
126
130
|
text: u.text,
|
127
131
|
timestamp: u.timestamp
|
128
132
|
}))
|
129
|
-
}
|
133
|
+
};
|
134
|
+
}
|
135
|
+
app.post("/api/dequeue-utterances", (_req, res) => {
|
136
|
+
const result = dequeueUtterancesCore();
|
137
|
+
if (!result.success && result.error) {
|
138
|
+
res.status(400).json(result);
|
139
|
+
return;
|
140
|
+
}
|
141
|
+
res.json(result);
|
130
142
|
});
|
131
|
-
|
143
|
+
async function waitForUtteranceCore() {
|
132
144
|
if (!voicePreferences.voiceInputActive) {
|
133
|
-
|
145
|
+
return {
|
134
146
|
success: false,
|
135
147
|
error: "Voice input is not active. Cannot wait for utterances when voice input is disabled."
|
136
|
-
}
|
137
|
-
return;
|
148
|
+
};
|
138
149
|
}
|
139
150
|
const secondsToWait = WAIT_TIMEOUT_SECONDS;
|
140
151
|
const maxWaitMs = secondsToWait * 1e3;
|
141
152
|
const startTime = Date.now();
|
142
|
-
debugLog(`[
|
153
|
+
debugLog(`[WaitCore] Starting wait_for_utterance (${secondsToWait}s)`);
|
154
|
+
notifyWaitStatus(true);
|
143
155
|
let firstTime = true;
|
144
156
|
while (Date.now() - startTime < maxWaitMs) {
|
145
157
|
if (!voicePreferences.voiceInputActive) {
|
146
|
-
debugLog("[
|
147
|
-
|
158
|
+
debugLog("[WaitCore] Voice input deactivated during wait_for_utterance");
|
159
|
+
notifyWaitStatus(false);
|
160
|
+
return {
|
148
161
|
success: true,
|
149
162
|
utterances: [],
|
150
163
|
message: "Voice input was deactivated",
|
151
164
|
waitTime: Date.now() - startTime
|
152
|
-
}
|
153
|
-
return;
|
165
|
+
};
|
154
166
|
}
|
155
167
|
const pendingUtterances = queue.utterances.filter(
|
156
168
|
(u) => u.status === "pending"
|
@@ -160,7 +172,8 @@ app.post("/api/wait-for-utterances", async (req, res) => {
|
|
160
172
|
sortedUtterances.forEach((u) => {
|
161
173
|
queue.markDelivered(u.id);
|
162
174
|
});
|
163
|
-
|
175
|
+
notifyWaitStatus(false);
|
176
|
+
return {
|
164
177
|
success: true,
|
165
178
|
utterances: sortedUtterances.map((u) => ({
|
166
179
|
id: u.id,
|
@@ -171,8 +184,7 @@ app.post("/api/wait-for-utterances", async (req, res) => {
|
|
171
184
|
})),
|
172
185
|
count: pendingUtterances.length,
|
173
186
|
waitTime: Date.now() - startTime
|
174
|
-
}
|
175
|
-
return;
|
187
|
+
};
|
176
188
|
}
|
177
189
|
if (firstTime) {
|
178
190
|
firstTime = false;
|
@@ -180,12 +192,21 @@ app.post("/api/wait-for-utterances", async (req, res) => {
|
|
180
192
|
}
|
181
193
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
182
194
|
}
|
183
|
-
|
195
|
+
notifyWaitStatus(false);
|
196
|
+
return {
|
184
197
|
success: true,
|
185
198
|
utterances: [],
|
186
199
|
message: `No utterances found after waiting ${secondsToWait} seconds.`,
|
187
200
|
waitTime: maxWaitMs
|
188
|
-
}
|
201
|
+
};
|
202
|
+
}
|
203
|
+
app.post("/api/wait-for-utterances", async (_req, res) => {
|
204
|
+
const result = await waitForUtteranceCore();
|
205
|
+
if (!result.success && result.error) {
|
206
|
+
res.status(400).json(result);
|
207
|
+
return;
|
208
|
+
}
|
209
|
+
res.json(result);
|
189
210
|
});
|
190
211
|
app.get("/api/has-pending-utterances", (_req, res) => {
|
191
212
|
const pendingCount = queue.utterances.filter((u) => u.status === "pending").length;
|
@@ -244,10 +265,24 @@ function handleHookRequest(attemptedAction) {
|
|
244
265
|
if (voiceInputActive) {
|
245
266
|
const pendingUtterances = queue.utterances.filter((u) => u.status === "pending");
|
246
267
|
if (pendingUtterances.length > 0) {
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
268
|
+
if (AUTO_DELIVER_VOICE_INPUT) {
|
269
|
+
if (attemptedAction === "tool" && !AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS) {
|
270
|
+
} else {
|
271
|
+
const dequeueResult = dequeueUtterancesCore();
|
272
|
+
if (dequeueResult.success && dequeueResult.utterances && dequeueResult.utterances.length > 0) {
|
273
|
+
const reversedUtterances = dequeueResult.utterances.reverse();
|
274
|
+
return {
|
275
|
+
decision: "block",
|
276
|
+
reason: formatVoiceUtterances(reversedUtterances)
|
277
|
+
};
|
278
|
+
}
|
279
|
+
}
|
280
|
+
} else {
|
281
|
+
return {
|
282
|
+
decision: "block",
|
283
|
+
reason: `${pendingUtterances.length} pending utterance(s) available. Use the dequeue_utterances tool to retrieve them.`
|
284
|
+
};
|
285
|
+
}
|
251
286
|
}
|
252
287
|
}
|
253
288
|
if (voiceResponsesEnabled) {
|
@@ -262,22 +297,67 @@ function handleHookRequest(attemptedAction) {
|
|
262
297
|
};
|
263
298
|
}
|
264
299
|
}
|
265
|
-
if (attemptedAction === "tool") {
|
300
|
+
if (attemptedAction === "tool" || attemptedAction === "post-tool") {
|
301
|
+
lastToolUseTimestamp = /* @__PURE__ */ new Date();
|
266
302
|
return { decision: "approve" };
|
267
303
|
}
|
268
304
|
if (attemptedAction === "wait") {
|
305
|
+
if (voiceResponsesEnabled && lastToolUseTimestamp && (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {
|
306
|
+
return {
|
307
|
+
decision: "block",
|
308
|
+
reason: "Assistant must speak after using tools. Please use the speak tool to respond before waiting for utterances."
|
309
|
+
};
|
310
|
+
}
|
269
311
|
return { decision: "approve" };
|
270
312
|
}
|
271
313
|
if (attemptedAction === "speak") {
|
272
314
|
return { decision: "approve" };
|
273
315
|
}
|
274
316
|
if (attemptedAction === "stop") {
|
275
|
-
if (
|
317
|
+
if (voiceResponsesEnabled && lastToolUseTimestamp && (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {
|
276
318
|
return {
|
277
319
|
decision: "block",
|
278
|
-
reason: "Assistant
|
320
|
+
reason: "Assistant must speak after using tools. Please use the speak tool to respond before proceeding."
|
279
321
|
};
|
280
322
|
}
|
323
|
+
if (voiceInputActive) {
|
324
|
+
if (AUTO_DELIVER_VOICE_INPUT) {
|
325
|
+
return (async () => {
|
326
|
+
try {
|
327
|
+
debugLog(`[Stop Hook] Auto-calling wait_for_utterance...`);
|
328
|
+
const data = await waitForUtteranceCore();
|
329
|
+
debugLog(`[Stop Hook] wait_for_utterance response: ${JSON.stringify(data)}`);
|
330
|
+
if (!data.success && data.error) {
|
331
|
+
return {
|
332
|
+
decision: "approve",
|
333
|
+
reason: data.error
|
334
|
+
};
|
335
|
+
}
|
336
|
+
if (data.utterances && data.utterances.length > 0) {
|
337
|
+
return {
|
338
|
+
decision: "block",
|
339
|
+
reason: formatVoiceUtterances(data.utterances)
|
340
|
+
};
|
341
|
+
}
|
342
|
+
return {
|
343
|
+
decision: "approve",
|
344
|
+
reason: data.message || "No utterances found during wait"
|
345
|
+
};
|
346
|
+
} catch (error) {
|
347
|
+
debugLog(`[Stop Hook] Error calling wait_for_utterance: ${error}`);
|
348
|
+
return {
|
349
|
+
decision: "approve",
|
350
|
+
reason: "Auto-wait encountered an error, proceeding"
|
351
|
+
};
|
352
|
+
}
|
353
|
+
})();
|
354
|
+
} else {
|
355
|
+
return {
|
356
|
+
decision: "block",
|
357
|
+
reason: "Assistant tried to end its response, but voice input is active. Stopping is not allowed without first checking for voice input. Assistant should now use wait_for_utterance to check for voice input"
|
358
|
+
};
|
359
|
+
}
|
360
|
+
}
|
281
361
|
return {
|
282
362
|
decision: "approve",
|
283
363
|
reason: "No utterances since last timeout"
|
@@ -289,8 +369,8 @@ app.post("/api/hooks/pre-tool", (_req, res) => {
|
|
289
369
|
const result = handleHookRequest("tool");
|
290
370
|
res.json(result);
|
291
371
|
});
|
292
|
-
app.post("/api/hooks/stop", (_req, res) => {
|
293
|
-
const result = handleHookRequest("stop");
|
372
|
+
app.post("/api/hooks/stop", async (_req, res) => {
|
373
|
+
const result = await handleHookRequest("stop");
|
294
374
|
res.json(result);
|
295
375
|
});
|
296
376
|
app.post("/api/hooks/pre-speak", (_req, res) => {
|
@@ -301,6 +381,10 @@ app.post("/api/hooks/pre-wait", (_req, res) => {
|
|
301
381
|
const result = handleHookRequest("wait");
|
302
382
|
res.json(result);
|
303
383
|
});
|
384
|
+
app.post("/api/hooks/post-tool", (_req, res) => {
|
385
|
+
const result = handleHookRequest("post-tool");
|
386
|
+
res.json(result);
|
387
|
+
});
|
304
388
|
app.delete("/api/utterances", (_req, res) => {
|
305
389
|
const clearedCount = queue.utterances.length;
|
306
390
|
queue.clear();
|
@@ -331,6 +415,20 @@ function notifyTTSClients(text) {
|
|
331
415
|
`);
|
332
416
|
});
|
333
417
|
}
|
418
|
+
function notifyWaitStatus(isWaiting) {
|
419
|
+
const message = JSON.stringify({ type: "waitStatus", isWaiting });
|
420
|
+
ttsClients.forEach((client) => {
|
421
|
+
client.write(`data: ${message}
|
422
|
+
|
423
|
+
`);
|
424
|
+
});
|
425
|
+
}
|
426
|
+
function formatVoiceUtterances(utterances) {
|
427
|
+
const utteranceTexts = utterances.map((u) => `"${u.text}"`).join("\n");
|
428
|
+
return `Assistant received voice input from the user (${utterances.length} utterance${utterances.length !== 1 ? "s" : ""}):
|
429
|
+
|
430
|
+
${utteranceTexts}${getVoiceResponseReminder()}`;
|
431
|
+
}
|
334
432
|
app.post("/api/voice-preferences", (req, res) => {
|
335
433
|
const { voiceResponsesEnabled } = req.body;
|
336
434
|
voicePreferences.voiceResponsesEnabled = !!voiceResponsesEnabled;
|
@@ -371,6 +469,7 @@ app.post("/api/speak", async (req, res) => {
|
|
371
469
|
u.status = "responded";
|
372
470
|
debugLog(`[Queue] marked as responded: "${u.text}" [id: ${u.id}]`);
|
373
471
|
});
|
472
|
+
lastSpeakTimestamp = /* @__PURE__ */ new Date();
|
374
473
|
res.json({
|
375
474
|
success: true,
|
376
475
|
message: "Text spoken successfully",
|
@@ -408,14 +507,17 @@ app.post("/api/speak-system", async (req, res) => {
|
|
408
507
|
app.get("/", (_req, res) => {
|
409
508
|
res.sendFile(path.join(__dirname, "..", "public", "index.html"));
|
410
509
|
});
|
411
|
-
var HTTP_PORT = process.env.MCP_VOICE_HOOKS_PORT ? parseInt(process.env.MCP_VOICE_HOOKS_PORT) : 5111;
|
412
510
|
app.listen(HTTP_PORT, async () => {
|
413
511
|
if (!IS_MCP_MANAGED) {
|
414
512
|
console.log(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);
|
415
513
|
console.log(`[Mode] Running in ${IS_MCP_MANAGED ? "MCP-managed" : "standalone"} mode`);
|
514
|
+
console.log(`[Auto-deliver] Voice input auto-delivery is ${AUTO_DELIVER_VOICE_INPUT ? "enabled (tools hidden)" : "disabled (tools shown)"}`);
|
515
|
+
console.log(`[Pre-tool Hook] Auto-deliver voice input before tools is ${AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS ? "enabled" : "disabled"}`);
|
416
516
|
} else {
|
417
517
|
console.error(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);
|
418
518
|
console.error(`[Mode] Running in MCP-managed mode`);
|
519
|
+
console.error(`[Auto-deliver] Voice input auto-delivery is ${AUTO_DELIVER_VOICE_INPUT ? "enabled (tools hidden)" : "disabled (tools shown)"}`);
|
520
|
+
console.error(`[Pre-tool Hook] Auto-deliver voice input before tools is ${AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS ? "enabled" : "disabled"}`);
|
419
521
|
}
|
420
522
|
const autoOpenBrowser = process.env.MCP_VOICE_HOOKS_AUTO_OPEN_BROWSER !== "false";
|
421
523
|
if (IS_MCP_MANAGED && autoOpenBrowser) {
|
@@ -452,8 +554,9 @@ if (IS_MCP_MANAGED) {
|
|
452
554
|
}
|
453
555
|
);
|
454
556
|
mcpServer.setRequestHandler(ListToolsRequestSchema, async () => {
|
455
|
-
|
456
|
-
|
557
|
+
const tools = [];
|
558
|
+
if (!AUTO_DELIVER_VOICE_INPUT) {
|
559
|
+
tools.push(
|
457
560
|
{
|
458
561
|
name: "dequeue_utterances",
|
459
562
|
description: "Dequeue pending utterances and mark them as delivered",
|
@@ -469,23 +572,24 @@ if (IS_MCP_MANAGED) {
|
|
469
572
|
type: "object",
|
470
573
|
properties: {}
|
471
574
|
}
|
472
|
-
},
|
473
|
-
{
|
474
|
-
name: "speak",
|
475
|
-
description: "Speak text using text-to-speech and mark delivered utterances as responded",
|
476
|
-
inputSchema: {
|
477
|
-
type: "object",
|
478
|
-
properties: {
|
479
|
-
text: {
|
480
|
-
type: "string",
|
481
|
-
description: "The text to speak"
|
482
|
-
}
|
483
|
-
},
|
484
|
-
required: ["text"]
|
485
|
-
}
|
486
575
|
}
|
487
|
-
|
488
|
-
}
|
576
|
+
);
|
577
|
+
}
|
578
|
+
tools.push({
|
579
|
+
name: "speak",
|
580
|
+
description: "Speak text using text-to-speech and mark delivered utterances as responded",
|
581
|
+
inputSchema: {
|
582
|
+
type: "object",
|
583
|
+
properties: {
|
584
|
+
text: {
|
585
|
+
type: "string",
|
586
|
+
description: "The text to speak"
|
587
|
+
}
|
588
|
+
},
|
589
|
+
required: ["text"]
|
590
|
+
}
|
591
|
+
});
|
592
|
+
return { tools };
|
489
593
|
});
|
490
594
|
mcpServer.setRequestHandler(CallToolRequestSchema, async (request) => {
|
491
595
|
const { name, arguments: args } = request.params;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/unified-server.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport express from 'express';\nimport type { Request, Response } from 'express';\nimport cors from 'cors';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport { randomUUID } from 'crypto';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { Server } from '@modelcontextprotocol/sdk/server/index.js';\nimport { debugLog } from './debug.ts';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport {\n CallToolRequestSchema,\n ListToolsRequestSchema,\n} from '@modelcontextprotocol/sdk/types.js';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\n// Constants\nconst WAIT_TIMEOUT_SECONDS = 60;\n\n// Promisified exec for async/await\nconst execAsync = promisify(exec);\n\n// Function to play a sound notification\nasync function playNotificationSound() {\n try {\n // Use macOS system sound\n await execAsync('afplay /System/Library/Sounds/Funk.aiff');\n debugLog('[Sound] Played notification sound');\n } catch (error) {\n debugLog(`[Sound] Failed to play sound: ${error}`);\n // Don't throw - sound is not critical\n }\n}\n\n// Shared utterance queue\ninterface Utterance {\n id: string;\n text: string;\n timestamp: Date;\n status: 'pending' | 'delivered' | 'responded';\n}\n\nclass UtteranceQueue {\n utterances: Utterance[] = [];\n\n add(text: string, timestamp?: Date): Utterance {\n const utterance: Utterance = {\n id: randomUUID(),\n text: text.trim(),\n timestamp: timestamp || new Date(),\n status: 'pending'\n };\n\n this.utterances.push(utterance);\n debugLog(`[Queue] queued: \"${utterance.text}\"\t[id: ${utterance.id}]`);\n return utterance;\n }\n\n getRecent(limit: number = 10): Utterance[] {\n return this.utterances\n .sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime())\n .slice(0, limit);\n }\n\n markDelivered(id: string): void {\n const utterance = this.utterances.find(u => u.id === id);\n if (utterance) {\n utterance.status = 'delivered';\n debugLog(`[Queue] delivered: \"${utterance.text}\"\t[id: ${id}]`);\n }\n }\n\n clear(): void {\n const count = this.utterances.length;\n this.utterances = [];\n debugLog(`[Queue] Cleared ${count} utterances`);\n }\n}\n\n// Determine if we're running in MCP-managed mode\nconst IS_MCP_MANAGED = process.argv.includes('--mcp-managed');\n\n// Global state\nconst queue = new UtteranceQueue();\n// TODO: Uncomment these when Claude Code 1.0.45 is released and we reinstate speak-before-stop requirement\n// let lastToolUseTimestamp: Date | null = null;\n// let lastSpeakTimestamp: Date | null = null;\n\n// Voice preferences (controlled by browser)\nlet voicePreferences = {\n voiceResponsesEnabled: false,\n voiceInputActive: false\n};\n\n// HTTP Server Setup (always created)\nconst app = express();\napp.use(cors());\napp.use(express.json());\napp.use(express.static(path.join(__dirname, '..', 'public')));\n\n// API Routes\napp.post('/api/potential-utterances', (req: Request, res: Response) => {\n const { text, timestamp } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n const parsedTimestamp = timestamp ? new Date(timestamp) : undefined;\n const utterance = queue.add(text, parsedTimestamp);\n res.json({\n success: true,\n utterance: {\n id: utterance.id,\n text: utterance.text,\n timestamp: utterance.timestamp,\n status: utterance.status,\n },\n });\n});\n\napp.get('/api/utterances', (req: Request, res: Response) => {\n const limit = parseInt(req.query.limit as string) || 10;\n const utterances = queue.getRecent(limit);\n\n res.json({\n utterances: utterances.map(u => ({\n id: u.id,\n text: u.text,\n timestamp: u.timestamp,\n status: u.status,\n })),\n });\n});\n\napp.get('/api/utterances/status', (_req: Request, res: Response) => {\n const total = queue.utterances.length;\n const pending = queue.utterances.filter(u => u.status === 'pending').length;\n const delivered = queue.utterances.filter(u => u.status === 'delivered').length;\n\n res.json({\n total,\n pending,\n delivered,\n });\n});\n\n// MCP server integration\napp.post('/api/dequeue-utterances', (req: Request, res: Response) => {\n // Check if voice input is active\n if (!voicePreferences.voiceInputActive) {\n res.status(400).json({\n success: false,\n error: 'Voice input is not active. Cannot dequeue utterances when voice input is disabled.'\n });\n return;\n }\n\n const pendingUtterances = queue.utterances\n .filter(u => u.status === 'pending')\n .sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());\n\n // Mark as delivered\n pendingUtterances.forEach(u => {\n queue.markDelivered(u.id);\n });\n\n res.json({\n success: true,\n utterances: pendingUtterances.map(u => ({\n text: u.text,\n timestamp: u.timestamp,\n })),\n });\n});\n\n// Wait for utterance endpoint\napp.post('/api/wait-for-utterances', async (req: Request, res: Response) => {\n // Check if voice input is active\n if (!voicePreferences.voiceInputActive) {\n res.status(400).json({\n success: false,\n error: 'Voice input is not active. Cannot wait for utterances when voice input is disabled.'\n });\n return;\n }\n\n const secondsToWait = WAIT_TIMEOUT_SECONDS;\n const maxWaitMs = secondsToWait * 1000;\n const startTime = Date.now();\n\n debugLog(`[Server] Starting wait_for_utterance (${secondsToWait}s)`);\n\n\n let firstTime = true;\n\n // Poll for utterances\n while (Date.now() - startTime < maxWaitMs) {\n // Check if voice input is still active\n if (!voicePreferences.voiceInputActive) {\n debugLog('[Server] Voice input deactivated during wait_for_utterance');\n res.json({\n success: true,\n utterances: [],\n message: 'Voice input was deactivated',\n waitTime: Date.now() - startTime,\n });\n return;\n }\n\n const pendingUtterances = queue.utterances.filter(\n u => u.status === 'pending'\n );\n\n if (pendingUtterances.length > 0) {\n // Found utterances\n\n // Sort by timestamp (oldest first)\n const sortedUtterances = pendingUtterances\n .sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());\n\n // Mark utterances as delivered\n sortedUtterances.forEach(u => {\n queue.markDelivered(u.id);\n });\n\n res.json({\n success: true,\n utterances: sortedUtterances.map(u => ({\n id: u.id,\n text: u.text,\n timestamp: u.timestamp,\n status: 'delivered', // They are now delivered\n })),\n count: pendingUtterances.length,\n waitTime: Date.now() - startTime,\n });\n return;\n }\n\n if (firstTime) {\n firstTime = false;\n // Play notification sound since we're about to start waiting\n await playNotificationSound();\n }\n\n // Wait 100ms before checking again\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n\n // Timeout reached - no utterances found\n\n res.json({\n success: true,\n utterances: [],\n message: `No utterances found after waiting ${secondsToWait} seconds.`,\n waitTime: maxWaitMs,\n });\n});\n\n\n// API for pre-tool hook to check for pending utterances\napp.get('/api/has-pending-utterances', (_req: Request, res: Response) => {\n const pendingCount = queue.utterances.filter(u => u.status === 'pending').length;\n const hasPending = pendingCount > 0;\n\n res.json({\n hasPending,\n pendingCount\n });\n});\n\n// Unified action validation endpoint\napp.post('/api/validate-action', (req: Request, res: Response) => {\n const { action } = req.body;\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n\n if (!action || !['tool-use', 'stop'].includes(action)) {\n res.status(400).json({ error: 'Invalid action. Must be \"tool-use\" or \"stop\"' });\n return;\n }\n\n // Only check for pending utterances if voice input is active\n if (voicePreferences.voiceInputActive) {\n const pendingUtterances = queue.utterances.filter(u => u.status === 'pending');\n if (pendingUtterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'dequeue_utterances',\n reason: `${pendingUtterances.length} pending utterance(s) must be dequeued first. Please use dequeue_utterances to process them.`\n });\n return;\n }\n }\n\n // Check for delivered but unresponded utterances (when voice enabled)\n if (voiceResponsesEnabled) {\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n if (deliveredUtterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'speak',\n reason: `${deliveredUtterances.length} delivered utterance(s) require voice response. Please use the speak tool to respond before proceeding.`\n });\n return;\n }\n }\n\n // For stop action, check if we should wait (only if voice input is active)\n if (action === 'stop' && voicePreferences.voiceInputActive) {\n if (queue.utterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'wait_for_utterance',\n reason: 'Assistant tried to end its response. Stopping is not allowed without first checking for voice input. Assistant should now use wait_for_utterance to check for voice input'\n });\n return;\n }\n }\n\n // All checks passed - action is allowed\n res.json({\n allowed: true\n });\n});\n\n// Unified hook handler\nfunction handleHookRequest(attemptedAction: 'tool' | 'speak' | 'wait' | 'stop'): { decision: 'approve' | 'block', reason?: string } {\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n const voiceInputActive = voicePreferences.voiceInputActive;\n\n // 1. Check for pending utterances (only if voice input is active)\n if (voiceInputActive) {\n const pendingUtterances = queue.utterances.filter(u => u.status === 'pending');\n if (pendingUtterances.length > 0) {\n // Allow dequeue to proceed (dequeue doesn't go through hooks)\n return {\n decision: 'block',\n reason: `${pendingUtterances.length} pending utterance(s) must be dequeued first. Please use dequeue_utterances to process them.`\n };\n }\n }\n\n // 2. Check for delivered utterances (when voice enabled)\n if (voiceResponsesEnabled) {\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n if (deliveredUtterances.length > 0) {\n // Only allow speak to proceed\n if (attemptedAction === 'speak') {\n return { decision: 'approve' };\n }\n return {\n decision: 'block',\n reason: `${deliveredUtterances.length} delivered utterance(s) require voice response. Please use the speak tool to respond before proceeding.`\n };\n }\n }\n\n // 3. Handle tool action\n if (attemptedAction === 'tool') {\n // TODO: Uncomment when Claude Code 1.0.45 is released\n // lastToolUseTimestamp = new Date();\n return { decision: 'approve' };\n }\n\n // 4. Handle wait for utterance\n if (attemptedAction === 'wait') {\n // TEMPORARILY COMMENTED OUT - TODO: Remove comment to re-enable speak requirement when 1.0.45 is released\n /*\n if (voiceResponsesEnabled && lastToolUseTimestamp &&\n (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {\n return {\n decision: 'block',\n reason: 'Assistant must speak after using tools. Please use the speak tool to respond before waiting for utterances.'\n };\n }\n */\n return { decision: 'approve' };\n }\n\n // 5. Handle speak\n if (attemptedAction === 'speak') {\n return { decision: 'approve' };\n }\n\n // 6. Handle stop\n if (attemptedAction === 'stop') {\n // Check if must speak after tool use\n // TEMPORARILY COMMENTED OUT - TODO: Remove comment to re-enable speak requirement when 1.0.45 is released\n /*\n if (voiceResponsesEnabled && lastToolUseTimestamp &&\n (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {\n return {\n decision: 'block',\n reason: 'Assistant must speak after using tools. Please use the speak tool to respond before proceeding.'\n };\n }\n */\n\n // Check if should wait for utterances (only if voice input is active)\n if (voiceInputActive) {\n return {\n decision: 'block',\n reason: 'Assistant tried to end its response, but voice input is active. Stopping is not allowed without first checking for voice input. Assistant should now use wait_for_utterance to check for voice input'\n };\n }\n\n return {\n decision: 'approve',\n reason: 'No utterances since last timeout'\n };\n }\n\n // Default to approve (shouldn't reach here)\n return { decision: 'approve' };\n}\n\n// Dedicated hook endpoints that return in Claude's expected format\napp.post('/api/hooks/pre-tool', (_req: Request, res: Response) => {\n const result = handleHookRequest('tool');\n res.json(result);\n});\n\napp.post('/api/hooks/stop', (_req: Request, res: Response) => {\n const result = handleHookRequest('stop');\n res.json(result);\n});\n\n// Pre-speak hook endpoint\napp.post('/api/hooks/pre-speak', (_req: Request, res: Response) => {\n const result = handleHookRequest('speak');\n res.json(result);\n});\n\n// Pre-wait hook endpoint\napp.post('/api/hooks/pre-wait', (_req: Request, res: Response) => {\n const result = handleHookRequest('wait');\n res.json(result);\n});\n\n// API to clear all utterances\napp.delete('/api/utterances', (_req: Request, res: Response) => {\n const clearedCount = queue.utterances.length;\n queue.clear();\n\n res.json({\n success: true,\n message: `Cleared ${clearedCount} utterances`,\n clearedCount\n });\n});\n\n// Server-Sent Events for TTS notifications\nconst ttsClients = new Set<Response>();\n\napp.get('/api/tts-events', (_req: Request, res: Response) => {\n res.writeHead(200, {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n });\n\n // Send initial connection message\n res.write('data: {\"type\":\"connected\"}\\n\\n');\n\n // Add client to set\n ttsClients.add(res);\n\n // Remove client on disconnect\n res.on('close', () => {\n ttsClients.delete(res);\n });\n});\n\n// Helper function to notify all connected TTS clients\nfunction notifyTTSClients(text: string) {\n const message = JSON.stringify({ type: 'speak', text });\n ttsClients.forEach(client => {\n client.write(`data: ${message}\\n\\n`);\n });\n}\n\n// API for voice preferences\napp.post('/api/voice-preferences', (req: Request, res: Response) => {\n const { voiceResponsesEnabled } = req.body;\n\n // Update preferences\n voicePreferences.voiceResponsesEnabled = !!voiceResponsesEnabled;\n\n debugLog(`[Preferences] Updated: voiceResponses=${voicePreferences.voiceResponsesEnabled}`);\n\n res.json({\n success: true,\n preferences: voicePreferences\n });\n});\n\n// API for voice input state\napp.post('/api/voice-input-state', (req: Request, res: Response) => {\n const { active } = req.body;\n\n // Update voice input state\n voicePreferences.voiceInputActive = !!active;\n\n debugLog(`[Voice Input] ${voicePreferences.voiceInputActive ? 'Started' : 'Stopped'} listening`);\n\n res.json({\n success: true,\n voiceInputActive: voicePreferences.voiceInputActive\n });\n});\n\n// API for text-to-speech\napp.post('/api/speak', async (req: Request, res: Response) => {\n const { text } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n // Check if voice responses are enabled\n if (!voicePreferences.voiceResponsesEnabled) {\n debugLog(`[Speak] Voice responses disabled, returning error`);\n res.status(400).json({\n error: 'Voice responses are disabled',\n message: 'Cannot speak when voice responses are disabled'\n });\n return;\n }\n\n try {\n // Always notify browser clients - they decide how to speak\n notifyTTSClients(text);\n debugLog(`[Speak] Sent text to browser for TTS: \"${text}\"`);\n\n // Note: The browser will decide whether to use system voice or browser voice\n\n // Mark all delivered utterances as responded\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n deliveredUtterances.forEach(u => {\n u.status = 'responded';\n debugLog(`[Queue] marked as responded: \"${u.text}\"\t[id: ${u.id}]`);\n });\n\n // TODO: Uncomment when Claude Code 1.0.45 is released\n // lastSpeakTimestamp = new Date();\n\n res.json({\n success: true,\n message: 'Text spoken successfully',\n respondedCount: deliveredUtterances.length\n });\n } catch (error) {\n debugLog(`[Speak] Failed to speak text: ${error}`);\n res.status(500).json({\n error: 'Failed to speak text',\n details: error instanceof Error ? error.message : String(error)\n });\n }\n});\n\n// API for system text-to-speech (always uses Mac say command)\napp.post('/api/speak-system', async (req: Request, res: Response) => {\n const { text, rate = 150 } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n try {\n // Execute text-to-speech using macOS say command\n // Note: Mac say command doesn't support volume control\n await execAsync(`say -r ${rate} \"${text.replace(/\"/g, '\\\\\"')}\"`);\n debugLog(`[Speak System] Spoke text using macOS say: \"${text}\" (rate: ${rate})`);\n\n res.json({\n success: true,\n message: 'Text spoken successfully via system voice'\n });\n } catch (error) {\n debugLog(`[Speak System] Failed to speak text: ${error}`);\n res.status(500).json({\n error: 'Failed to speak text via system voice',\n details: error instanceof Error ? error.message : String(error)\n });\n }\n});\n\napp.get('/', (_req: Request, res: Response) => {\n res.sendFile(path.join(__dirname, '..', 'public', 'index.html'));\n});\n\n// Start HTTP server\nconst HTTP_PORT = process.env.MCP_VOICE_HOOKS_PORT ? parseInt(process.env.MCP_VOICE_HOOKS_PORT) : 5111;\napp.listen(HTTP_PORT, async () => {\n if (!IS_MCP_MANAGED) {\n console.log(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);\n console.log(`[Mode] Running in ${IS_MCP_MANAGED ? 'MCP-managed' : 'standalone'} mode`);\n } else {\n // In MCP mode, write to stderr to avoid interfering with protocol\n console.error(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);\n console.error(`[Mode] Running in MCP-managed mode`);\n }\n \n // Auto-open browser if no frontend connects within 3 seconds\n const autoOpenBrowser = process.env.MCP_VOICE_HOOKS_AUTO_OPEN_BROWSER !== 'false'; // Default to true\n if (IS_MCP_MANAGED && autoOpenBrowser) {\n setTimeout(async () => {\n if (ttsClients.size === 0) {\n debugLog('[Browser] No frontend connected, opening browser...');\n try {\n const open = (await import('open')).default;\n await open(`http://localhost:${HTTP_PORT}`);\n } catch (error) {\n debugLog('[Browser] Failed to open browser:', error);\n }\n } else {\n debugLog(`[Browser] Frontend already connected (${ttsClients.size} client(s))`)\n }\n }, 3000);\n }\n});\n\n// Helper function to get voice response reminder\nfunction getVoiceResponseReminder(): string {\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n return voiceResponsesEnabled\n ? '\\n\\nThe user has enabled voice responses, so use the \\'speak\\' tool to respond to the user\\'s voice input before proceeding.'\n : '';\n}\n\n// MCP Server Setup (only if MCP-managed)\nif (IS_MCP_MANAGED) {\n // Use stderr in MCP mode to avoid interfering with protocol\n console.error('[MCP] Initializing MCP server...');\n\n const mcpServer = new Server(\n {\n name: 'voice-hooks',\n version: '1.0.0',\n },\n {\n capabilities: {\n tools: {},\n },\n }\n );\n\n // Tool handlers\n mcpServer.setRequestHandler(ListToolsRequestSchema, async () => {\n return {\n tools: [\n {\n name: 'dequeue_utterances',\n description: 'Dequeue pending utterances and mark them as delivered',\n inputSchema: {\n type: 'object',\n properties: {},\n },\n },\n {\n name: 'wait_for_utterance',\n description: 'Wait for an utterance to be available or until timeout',\n inputSchema: {\n type: 'object',\n properties: {},\n },\n },\n {\n name: 'speak',\n description: 'Speak text using text-to-speech and mark delivered utterances as responded',\n inputSchema: {\n type: 'object',\n properties: {\n text: {\n type: 'string',\n description: 'The text to speak',\n },\n },\n required: ['text'],\n },\n },\n ],\n };\n });\n\n mcpServer.setRequestHandler(CallToolRequestSchema, async (request) => {\n const { name, arguments: args } = request.params;\n\n try {\n if (name === 'dequeue_utterances') {\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/dequeue-utterances`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({}),\n });\n\n const data = await response.json() as any;\n\n // Check if the request was successful\n if (!response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${data.error || 'Failed to dequeue utterances'}`,\n },\n ],\n };\n }\n\n if (data.utterances.length === 0) {\n return {\n content: [\n {\n type: 'text',\n text: 'No recent utterances found.',\n },\n ],\n };\n }\n\n return {\n content: [\n {\n type: 'text',\n text: `Dequeued ${data.utterances.length} utterance(s):\\n\\n${data.utterances.reverse().map((u: any) => `\"${u.text}\"\\t[time: ${new Date(u.timestamp).toISOString()}]`).join('\\n')\n }${getVoiceResponseReminder()}`,\n },\n ],\n };\n }\n\n if (name === 'wait_for_utterance') {\n debugLog(`[MCP] Calling wait_for_utterance`);\n\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/wait-for-utterances`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({}),\n });\n\n const data = await response.json() as any;\n\n // Check if the request was successful\n if (!response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${data.error || 'Failed to wait for utterances'}`,\n },\n ],\n };\n }\n\n if (data.utterances && data.utterances.length > 0) {\n const utteranceTexts = data.utterances\n .map((u: any) => `[${u.timestamp}] \"${u.text}\"`)\n .join('\\n');\n\n return {\n content: [\n {\n type: 'text',\n text: `Found ${data.count} utterance(s):\\n\\n${utteranceTexts}${getVoiceResponseReminder()}`,\n },\n ],\n };\n } else {\n return {\n content: [\n {\n type: 'text',\n text: data.message || `No utterances found. Timed out.`,\n },\n ],\n };\n }\n }\n\n if (name === 'speak') {\n const text = args?.text as string;\n\n if (!text || !text.trim()) {\n return {\n content: [\n {\n type: 'text',\n text: 'Error: Text is required for speak tool',\n },\n ],\n isError: true,\n };\n }\n\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/speak`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ text }),\n });\n\n const data = await response.json() as any;\n\n if (response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: '', // Return empty string for success\n },\n ],\n };\n } else {\n return {\n content: [\n {\n type: 'text',\n text: `Error speaking text: ${data.error || 'Unknown error'}`,\n },\n ],\n isError: true,\n };\n }\n }\n\n throw new Error(`Unknown tool: ${name}`);\n } catch (error) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${error instanceof Error ? error.message : String(error)}`,\n },\n ],\n isError: true,\n };\n }\n });\n\n // Connect via stdio\n const transport = new StdioServerTransport();\n mcpServer.connect(transport);\n // Use stderr in MCP mode to avoid interfering with protocol\n console.error('[MCP] Server connected via stdio');\n} else {\n // Only log in standalone mode\n if (!IS_MCP_MANAGED) {\n console.log('[MCP] Skipping MCP server initialization (not in MCP-managed mode)');\n }\n}"],"mappings":";;;;;;AAEA,OAAO,aAAa;AAEpB,OAAO,UAAU;AACjB,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAC9B,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,cAAc;AAEvB,SAAS,4BAA4B;AACrC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AAEP,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,KAAK,QAAQ,UAAU;AAGzC,IAAM,uBAAuB;AAG7B,IAAM,YAAY,UAAU,IAAI;AAGhC,eAAe,wBAAwB;AACrC,MAAI;AAEF,UAAM,UAAU,yCAAyC;AACzD,aAAS,mCAAmC;AAAA,EAC9C,SAAS,OAAO;AACd,aAAS,iCAAiC,KAAK,EAAE;AAAA,EAEnD;AACF;AAUA,IAAM,iBAAN,MAAqB;AAAA,EACnB,aAA0B,CAAC;AAAA,EAE3B,IAAI,MAAc,WAA6B;AAC7C,UAAM,YAAuB;AAAA,MAC3B,IAAI,WAAW;AAAA,MACf,MAAM,KAAK,KAAK;AAAA,MAChB,WAAW,aAAa,oBAAI,KAAK;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,SAAK,WAAW,KAAK,SAAS;AAC9B,aAAS,oBAAoB,UAAU,IAAI,UAAU,UAAU,EAAE,GAAG;AACpE,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,QAAgB,IAAiB;AACzC,WAAO,KAAK,WACT,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC,EAC5D,MAAM,GAAG,KAAK;AAAA,EACnB;AAAA,EAEA,cAAc,IAAkB;AAC9B,UAAM,YAAY,KAAK,WAAW,KAAK,OAAK,EAAE,OAAO,EAAE;AACvD,QAAI,WAAW;AACb,gBAAU,SAAS;AACnB,eAAS,uBAAuB,UAAU,IAAI,UAAU,EAAE,GAAG;AAAA,IAC/D;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,UAAM,QAAQ,KAAK,WAAW;AAC9B,SAAK,aAAa,CAAC;AACnB,aAAS,mBAAmB,KAAK,aAAa;AAAA,EAChD;AACF;AAGA,IAAM,iBAAiB,QAAQ,KAAK,SAAS,eAAe;AAG5D,IAAM,QAAQ,IAAI,eAAe;AAMjC,IAAI,mBAAmB;AAAA,EACrB,uBAAuB;AAAA,EACvB,kBAAkB;AACpB;AAGA,IAAM,MAAM,QAAQ;AACpB,IAAI,IAAI,KAAK,CAAC;AACd,IAAI,IAAI,QAAQ,KAAK,CAAC;AACtB,IAAI,IAAI,QAAQ,OAAO,KAAK,KAAK,WAAW,MAAM,QAAQ,CAAC,CAAC;AAG5D,IAAI,KAAK,6BAA6B,CAAC,KAAc,QAAkB;AACrE,QAAM,EAAE,MAAM,UAAU,IAAI,IAAI;AAEhC,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAEA,QAAM,kBAAkB,YAAY,IAAI,KAAK,SAAS,IAAI;AAC1D,QAAM,YAAY,MAAM,IAAI,MAAM,eAAe;AACjD,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,WAAW;AAAA,MACT,IAAI,UAAU;AAAA,MACd,MAAM,UAAU;AAAA,MAChB,WAAW,UAAU;AAAA,MACrB,QAAQ,UAAU;AAAA,IACpB;AAAA,EACF,CAAC;AACH,CAAC;AAED,IAAI,IAAI,mBAAmB,CAAC,KAAc,QAAkB;AAC1D,QAAM,QAAQ,SAAS,IAAI,MAAM,KAAe,KAAK;AACrD,QAAM,aAAa,MAAM,UAAU,KAAK;AAExC,MAAI,KAAK;AAAA,IACP,YAAY,WAAW,IAAI,QAAM;AAAA,MAC/B,IAAI,EAAE;AAAA,MACN,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,MACb,QAAQ,EAAE;AAAA,IACZ,EAAE;AAAA,EACJ,CAAC;AACH,CAAC;AAED,IAAI,IAAI,0BAA0B,CAAC,MAAe,QAAkB;AAClE,QAAM,QAAQ,MAAM,WAAW;AAC/B,QAAM,UAAU,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AACrE,QAAM,YAAY,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW,EAAE;AAEzE,MAAI,KAAK;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,2BAA2B,CAAC,KAAc,QAAkB;AAEnE,MAAI,CAAC,iBAAiB,kBAAkB;AACtC,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,SAAS;AAAA,MACT,OAAO;AAAA,IACT,CAAC;AACD;AAAA,EACF;AAEA,QAAM,oBAAoB,MAAM,WAC7B,OAAO,OAAK,EAAE,WAAW,SAAS,EAClC,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAG/D,oBAAkB,QAAQ,OAAK;AAC7B,UAAM,cAAc,EAAE,EAAE;AAAA,EAC1B,CAAC;AAED,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,YAAY,kBAAkB,IAAI,QAAM;AAAA,MACtC,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,IACf,EAAE;AAAA,EACJ,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,4BAA4B,OAAO,KAAc,QAAkB;AAE1E,MAAI,CAAC,iBAAiB,kBAAkB;AACtC,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,SAAS;AAAA,MACT,OAAO;AAAA,IACT,CAAC;AACD;AAAA,EACF;AAEA,QAAM,gBAAgB;AACtB,QAAM,YAAY,gBAAgB;AAClC,QAAM,YAAY,KAAK,IAAI;AAE3B,WAAS,yCAAyC,aAAa,IAAI;AAGnE,MAAI,YAAY;AAGhB,SAAO,KAAK,IAAI,IAAI,YAAY,WAAW;AAEzC,QAAI,CAAC,iBAAiB,kBAAkB;AACtC,eAAS,4DAA4D;AACrE,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,YAAY,CAAC;AAAA,QACb,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AACD;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,WAAW;AAAA,MACzC,OAAK,EAAE,WAAW;AAAA,IACpB;AAEA,QAAI,kBAAkB,SAAS,GAAG;AAIhC,YAAM,mBAAmB,kBACtB,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAG/D,uBAAiB,QAAQ,OAAK;AAC5B,cAAM,cAAc,EAAE,EAAE;AAAA,MAC1B,CAAC;AAED,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,YAAY,iBAAiB,IAAI,QAAM;AAAA,UACrC,IAAI,EAAE;AAAA,UACN,MAAM,EAAE;AAAA,UACR,WAAW,EAAE;AAAA,UACb,QAAQ;AAAA;AAAA,QACV,EAAE;AAAA,QACF,OAAO,kBAAkB;AAAA,QACzB,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB,CAAC;AACD;AAAA,IACF;AAEA,QAAI,WAAW;AACb,kBAAY;AAEZ,YAAM,sBAAsB;AAAA,IAC9B;AAGA,UAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,EACvD;AAIA,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,YAAY,CAAC;AAAA,IACb,SAAS,qCAAqC,aAAa;AAAA,IAC3D,UAAU;AAAA,EACZ,CAAC;AACH,CAAC;AAID,IAAI,IAAI,+BAA+B,CAAC,MAAe,QAAkB;AACvE,QAAM,eAAe,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAC1E,QAAM,aAAa,eAAe;AAElC,MAAI,KAAK;AAAA,IACP;AAAA,IACA;AAAA,EACF,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,wBAAwB,CAAC,KAAc,QAAkB;AAChE,QAAM,EAAE,OAAO,IAAI,IAAI;AACvB,QAAM,wBAAwB,iBAAiB;AAE/C,MAAI,CAAC,UAAU,CAAC,CAAC,YAAY,MAAM,EAAE,SAAS,MAAM,GAAG;AACrD,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,+CAA+C,CAAC;AAC9E;AAAA,EACF;AAGA,MAAI,iBAAiB,kBAAkB;AACrC,UAAM,oBAAoB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS;AAC7E,QAAI,kBAAkB,SAAS,GAAG;AAChC,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,GAAG,kBAAkB,MAAM;AAAA,MACrC,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,uBAAuB;AACzB,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,QAAI,oBAAoB,SAAS,GAAG;AAClC,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,GAAG,oBAAoB,MAAM;AAAA,MACvC,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,UAAU,iBAAiB,kBAAkB;AAC1D,QAAI,MAAM,WAAW,SAAS,GAAG;AAC/B,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ;AAAA,MACV,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,EACX,CAAC;AACH,CAAC;AAGD,SAAS,kBAAkB,iBAAyG;AAClI,QAAM,wBAAwB,iBAAiB;AAC/C,QAAM,mBAAmB,iBAAiB;AAG1C,MAAI,kBAAkB;AACpB,UAAM,oBAAoB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS;AAC7E,QAAI,kBAAkB,SAAS,GAAG;AAEhC,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ,GAAG,kBAAkB,MAAM;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,uBAAuB;AACzB,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,QAAI,oBAAoB,SAAS,GAAG;AAElC,UAAI,oBAAoB,SAAS;AAC/B,eAAO,EAAE,UAAU,UAAU;AAAA,MAC/B;AACA,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ,GAAG,oBAAoB,MAAM;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,oBAAoB,QAAQ;AAG9B,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,QAAQ;AAW9B,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,SAAS;AAC/B,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,QAAQ;AAc9B,QAAI,kBAAkB;AACpB,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAEA,WAAO;AAAA,MACL,UAAU;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,SAAO,EAAE,UAAU,UAAU;AAC/B;AAGA,IAAI,KAAK,uBAAuB,CAAC,MAAe,QAAkB;AAChE,QAAM,SAAS,kBAAkB,MAAM;AACvC,MAAI,KAAK,MAAM;AACjB,CAAC;AAED,IAAI,KAAK,mBAAmB,CAAC,MAAe,QAAkB;AAC5D,QAAM,SAAS,kBAAkB,MAAM;AACvC,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,KAAK,wBAAwB,CAAC,MAAe,QAAkB;AACjE,QAAM,SAAS,kBAAkB,OAAO;AACxC,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,KAAK,uBAAuB,CAAC,MAAe,QAAkB;AAChE,QAAM,SAAS,kBAAkB,MAAM;AACvC,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,OAAO,mBAAmB,CAAC,MAAe,QAAkB;AAC9D,QAAM,eAAe,MAAM,WAAW;AACtC,QAAM,MAAM;AAEZ,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,SAAS,WAAW,YAAY;AAAA,IAChC;AAAA,EACF,CAAC;AACH,CAAC;AAGD,IAAM,aAAa,oBAAI,IAAc;AAErC,IAAI,IAAI,mBAAmB,CAAC,MAAe,QAAkB;AAC3D,MAAI,UAAU,KAAK;AAAA,IACjB,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,IACjB,cAAc;AAAA,EAChB,CAAC;AAGD,MAAI,MAAM,gCAAgC;AAG1C,aAAW,IAAI,GAAG;AAGlB,MAAI,GAAG,SAAS,MAAM;AACpB,eAAW,OAAO,GAAG;AAAA,EACvB,CAAC;AACH,CAAC;AAGD,SAAS,iBAAiB,MAAc;AACtC,QAAM,UAAU,KAAK,UAAU,EAAE,MAAM,SAAS,KAAK,CAAC;AACtD,aAAW,QAAQ,YAAU;AAC3B,WAAO,MAAM,SAAS,OAAO;AAAA;AAAA,CAAM;AAAA,EACrC,CAAC;AACH;AAGA,IAAI,KAAK,0BAA0B,CAAC,KAAc,QAAkB;AAClE,QAAM,EAAE,sBAAsB,IAAI,IAAI;AAGtC,mBAAiB,wBAAwB,CAAC,CAAC;AAE3C,WAAS,yCAAyC,iBAAiB,qBAAqB,EAAE;AAE1F,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,aAAa;AAAA,EACf,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,0BAA0B,CAAC,KAAc,QAAkB;AAClE,QAAM,EAAE,OAAO,IAAI,IAAI;AAGvB,mBAAiB,mBAAmB,CAAC,CAAC;AAEtC,WAAS,iBAAiB,iBAAiB,mBAAmB,YAAY,SAAS,YAAY;AAE/F,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,kBAAkB,iBAAiB;AAAA,EACrC,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,cAAc,OAAO,KAAc,QAAkB;AAC5D,QAAM,EAAE,KAAK,IAAI,IAAI;AAErB,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAGA,MAAI,CAAC,iBAAiB,uBAAuB;AAC3C,aAAS,mDAAmD;AAC5D,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AACD;AAAA,EACF;AAEA,MAAI;AAEF,qBAAiB,IAAI;AACrB,aAAS,0CAA0C,IAAI,GAAG;AAK1D,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,wBAAoB,QAAQ,OAAK;AAC/B,QAAE,SAAS;AACX,eAAS,iCAAiC,EAAE,IAAI,UAAU,EAAE,EAAE,GAAG;AAAA,IACnE,CAAC;AAKD,QAAI,KAAK;AAAA,MACP,SAAS;AAAA,MACT,SAAS;AAAA,MACT,gBAAgB,oBAAoB;AAAA,IACtC,CAAC;AAAA,EACH,SAAS,OAAO;AACd,aAAS,iCAAiC,KAAK,EAAE;AACjD,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAChE,CAAC;AAAA,EACH;AACF,CAAC;AAGD,IAAI,KAAK,qBAAqB,OAAO,KAAc,QAAkB;AACnE,QAAM,EAAE,MAAM,OAAO,IAAI,IAAI,IAAI;AAEjC,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAEA,MAAI;AAGF,UAAM,UAAU,UAAU,IAAI,KAAK,KAAK,QAAQ,MAAM,KAAK,CAAC,GAAG;AAC/D,aAAS,+CAA+C,IAAI,YAAY,IAAI,GAAG;AAE/E,QAAI,KAAK;AAAA,MACP,SAAS;AAAA,MACT,SAAS;AAAA,IACX,CAAC;AAAA,EACH,SAAS,OAAO;AACd,aAAS,wCAAwC,KAAK,EAAE;AACxD,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAChE,CAAC;AAAA,EACH;AACF,CAAC;AAED,IAAI,IAAI,KAAK,CAAC,MAAe,QAAkB;AAC7C,MAAI,SAAS,KAAK,KAAK,WAAW,MAAM,UAAU,YAAY,CAAC;AACjE,CAAC;AAGD,IAAM,YAAY,QAAQ,IAAI,uBAAuB,SAAS,QAAQ,IAAI,oBAAoB,IAAI;AAClG,IAAI,OAAO,WAAW,YAAY;AAChC,MAAI,CAAC,gBAAgB;AACnB,YAAQ,IAAI,+CAA+C,SAAS,EAAE;AACtE,YAAQ,IAAI,qBAAqB,iBAAiB,gBAAgB,YAAY,OAAO;AAAA,EACvF,OAAO;AAEL,YAAQ,MAAM,+CAA+C,SAAS,EAAE;AACxE,YAAQ,MAAM,oCAAoC;AAAA,EACpD;AAGA,QAAM,kBAAkB,QAAQ,IAAI,sCAAsC;AAC1E,MAAI,kBAAkB,iBAAiB;AACrC,eAAW,YAAY;AACrB,UAAI,WAAW,SAAS,GAAG;AACzB,iBAAS,qDAAqD;AAC9D,YAAI;AACF,gBAAM,QAAQ,MAAM,OAAO,MAAM,GAAG;AACpC,gBAAM,KAAK,oBAAoB,SAAS,EAAE;AAAA,QAC5C,SAAS,OAAO;AACd,mBAAS,qCAAqC,KAAK;AAAA,QACrD;AAAA,MACF,OAAO;AACL,iBAAS,yCAAyC,WAAW,IAAI,aAAa;AAAA,MAChF;AAAA,IACF,GAAG,GAAI;AAAA,EACT;AACF,CAAC;AAGD,SAAS,2BAAmC;AAC1C,QAAM,wBAAwB,iBAAiB;AAC/C,SAAO,wBACH,8HACA;AACN;AAGA,IAAI,gBAAgB;AAElB,UAAQ,MAAM,kCAAkC;AAEhD,QAAM,YAAY,IAAI;AAAA,IACpB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA;AAAA,MACE,cAAc;AAAA,QACZ,OAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAGA,YAAU,kBAAkB,wBAAwB,YAAY;AAC9D,WAAO;AAAA,MACL,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,YACX,MAAM;AAAA,YACN,YAAY,CAAC;AAAA,UACf;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,YACX,MAAM;AAAA,YACN,YAAY,CAAC;AAAA,UACf;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,YACX,MAAM;AAAA,YACN,YAAY;AAAA,cACV,MAAM;AAAA,gBACJ,MAAM;AAAA,gBACN,aAAa;AAAA,cACf;AAAA,YACF;AAAA,YACA,UAAU,CAAC,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,YAAU,kBAAkB,uBAAuB,OAAO,YAAY;AACpE,UAAM,EAAE,MAAM,WAAW,KAAK,IAAI,QAAQ;AAE1C,QAAI;AACF,UAAI,SAAS,sBAAsB;AACjC,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,2BAA2B;AAAA,UACnF,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,CAAC,CAAC;AAAA,QACzB,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,UAAU,KAAK,SAAS,8BAA8B;AAAA,cAC9D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,WAAW,WAAW,GAAG;AAChC,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,MAAM,YAAY,KAAK,WAAW,MAAM;AAAA;AAAA,EAAqB,KAAK,WAAW,QAAQ,EAAE,IAAI,CAAC,MAAW,IAAI,EAAE,IAAI,YAAa,IAAI,KAAK,EAAE,SAAS,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,IAAI,CAC7K,GAAG,yBAAyB,CAAC;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS,sBAAsB;AACjC,iBAAS,kCAAkC;AAE3C,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,4BAA4B;AAAA,UACpF,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,CAAC,CAAC;AAAA,QACzB,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,UAAU,KAAK,SAAS,+BAA+B;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,cAAc,KAAK,WAAW,SAAS,GAAG;AACjD,gBAAM,iBAAiB,KAAK,WACzB,IAAI,CAAC,MAAW,IAAI,EAAE,SAAS,MAAM,EAAE,IAAI,GAAG,EAC9C,KAAK,IAAI;AAEZ,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,SAAS,KAAK,KAAK;AAAA;AAAA,EAAqB,cAAc,GAAG,yBAAyB,CAAC;AAAA,cAC3F;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,KAAK,WAAW;AAAA,cACxB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS,SAAS;AACpB,cAAM,OAAO,MAAM;AAEnB,YAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,cACR;AAAA,YACF;AAAA,YACA,SAAS;AAAA,UACX;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,cAAc;AAAA,UACtE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,EAAE,KAAK,CAAC;AAAA,QAC/B,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,YAAI,SAAS,IAAI;AACf,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,wBAAwB,KAAK,SAAS,eAAe;AAAA,cAC7D;AAAA,YACF;AAAA,YACA,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAEA,YAAM,IAAI,MAAM,iBAAiB,IAAI,EAAE;AAAA,IACzC,SAAS,OAAO;AACd,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACxE;AAAA,QACF;AAAA,QACA,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,YAAY,IAAI,qBAAqB;AAC3C,YAAU,QAAQ,SAAS;AAE3B,UAAQ,MAAM,kCAAkC;AAClD,OAAO;AAEL,MAAI,CAAC,gBAAgB;AACnB,YAAQ,IAAI,oEAAoE;AAAA,EAClF;AACF;","names":[]}
|
1
|
+
{"version":3,"sources":["../src/unified-server.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport express from 'express';\nimport type { Request, Response } from 'express';\nimport cors from 'cors';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\nimport { randomUUID } from 'crypto';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { Server } from '@modelcontextprotocol/sdk/server/index.js';\nimport { debugLog } from './debug.ts';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport {\n CallToolRequestSchema,\n ListToolsRequestSchema,\n} from '@modelcontextprotocol/sdk/types.js';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\n// Constants\nconst WAIT_TIMEOUT_SECONDS = 60;\nconst HTTP_PORT = process.env.MCP_VOICE_HOOKS_PORT ? parseInt(process.env.MCP_VOICE_HOOKS_PORT) : 5111;\nconst AUTO_DELIVER_VOICE_INPUT = process.env.MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT !== 'false'; // Default to true (auto-deliver enabled)\nconst AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS = process.env.MCP_VOICE_HOOKS_AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS === 'true'; // Default to false (don't auto-deliver voice input before tools. Only effective if auto-deliver is enabled)\n\n// Promisified exec for async/await\nconst execAsync = promisify(exec);\n\n// Function to play a sound notification\nasync function playNotificationSound() {\n try {\n // Use macOS system sound\n await execAsync('afplay /System/Library/Sounds/Funk.aiff');\n debugLog('[Sound] Played notification sound');\n } catch (error) {\n debugLog(`[Sound] Failed to play sound: ${error}`);\n // Don't throw - sound is not critical\n }\n}\n\n// Shared utterance queue\ninterface Utterance {\n id: string;\n text: string;\n timestamp: Date;\n status: 'pending' | 'delivered' | 'responded';\n}\n\nclass UtteranceQueue {\n utterances: Utterance[] = [];\n\n add(text: string, timestamp?: Date): Utterance {\n const utterance: Utterance = {\n id: randomUUID(),\n text: text.trim(),\n timestamp: timestamp || new Date(),\n status: 'pending'\n };\n\n this.utterances.push(utterance);\n debugLog(`[Queue] queued: \"${utterance.text}\"\t[id: ${utterance.id}]`);\n return utterance;\n }\n\n getRecent(limit: number = 10): Utterance[] {\n return this.utterances\n .sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime())\n .slice(0, limit);\n }\n\n markDelivered(id: string): void {\n const utterance = this.utterances.find(u => u.id === id);\n if (utterance) {\n utterance.status = 'delivered';\n debugLog(`[Queue] delivered: \"${utterance.text}\"\t[id: ${id}]`);\n }\n }\n\n clear(): void {\n const count = this.utterances.length;\n this.utterances = [];\n debugLog(`[Queue] Cleared ${count} utterances`);\n }\n}\n\n// Determine if we're running in MCP-managed mode\nconst IS_MCP_MANAGED = process.argv.includes('--mcp-managed');\n\n// Global state\nconst queue = new UtteranceQueue();\nlet lastToolUseTimestamp: Date | null = null;\nlet lastSpeakTimestamp: Date | null = null;\n\n// Voice preferences (controlled by browser)\nlet voicePreferences = {\n voiceResponsesEnabled: false,\n voiceInputActive: false\n};\n\n// HTTP Server Setup (always created)\nconst app = express();\napp.use(cors());\napp.use(express.json());\napp.use(express.static(path.join(__dirname, '..', 'public')));\n\n// API Routes\napp.post('/api/potential-utterances', (req: Request, res: Response) => {\n const { text, timestamp } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n const parsedTimestamp = timestamp ? new Date(timestamp) : undefined;\n const utterance = queue.add(text, parsedTimestamp);\n res.json({\n success: true,\n utterance: {\n id: utterance.id,\n text: utterance.text,\n timestamp: utterance.timestamp,\n status: utterance.status,\n },\n });\n});\n\napp.get('/api/utterances', (req: Request, res: Response) => {\n const limit = parseInt(req.query.limit as string) || 10;\n const utterances = queue.getRecent(limit);\n\n res.json({\n utterances: utterances.map(u => ({\n id: u.id,\n text: u.text,\n timestamp: u.timestamp,\n status: u.status,\n })),\n });\n});\n\napp.get('/api/utterances/status', (_req: Request, res: Response) => {\n const total = queue.utterances.length;\n const pending = queue.utterances.filter(u => u.status === 'pending').length;\n const delivered = queue.utterances.filter(u => u.status === 'delivered').length;\n\n res.json({\n total,\n pending,\n delivered,\n });\n});\n\n// Shared dequeue logic\nfunction dequeueUtterancesCore() {\n // Check if voice input is active\n if (!voicePreferences.voiceInputActive) {\n return {\n success: false,\n error: 'Voice input is not active. Cannot dequeue utterances when voice input is disabled.'\n };\n }\n\n const pendingUtterances = queue.utterances\n .filter(u => u.status === 'pending')\n .sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());\n\n // Mark as delivered\n pendingUtterances.forEach(u => {\n queue.markDelivered(u.id);\n });\n\n return {\n success: true,\n utterances: pendingUtterances.map(u => ({\n text: u.text,\n timestamp: u.timestamp,\n })),\n };\n}\n\n// MCP server integration\napp.post('/api/dequeue-utterances', (_req: Request, res: Response) => {\n const result = dequeueUtterancesCore();\n\n if (!result.success && result.error) {\n res.status(400).json(result);\n return;\n }\n\n res.json(result);\n});\n\n// Shared wait for utterance logic\nasync function waitForUtteranceCore() {\n // Check if voice input is active\n if (!voicePreferences.voiceInputActive) {\n return {\n success: false,\n error: 'Voice input is not active. Cannot wait for utterances when voice input is disabled.'\n };\n }\n\n const secondsToWait = WAIT_TIMEOUT_SECONDS;\n const maxWaitMs = secondsToWait * 1000;\n const startTime = Date.now();\n\n debugLog(`[WaitCore] Starting wait_for_utterance (${secondsToWait}s)`);\n\n // Notify frontend that wait has started\n notifyWaitStatus(true);\n\n let firstTime = true;\n\n // Poll for utterances\n while (Date.now() - startTime < maxWaitMs) {\n // Check if voice input is still active\n if (!voicePreferences.voiceInputActive) {\n debugLog('[WaitCore] Voice input deactivated during wait_for_utterance');\n notifyWaitStatus(false); // Notify wait has ended\n return {\n success: true,\n utterances: [],\n message: 'Voice input was deactivated',\n waitTime: Date.now() - startTime,\n };\n }\n\n const pendingUtterances = queue.utterances.filter(\n u => u.status === 'pending'\n );\n\n if (pendingUtterances.length > 0) {\n // Found utterances\n\n // Sort by timestamp (oldest first)\n const sortedUtterances = pendingUtterances\n .sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());\n\n // Mark utterances as delivered\n sortedUtterances.forEach(u => {\n queue.markDelivered(u.id);\n });\n\n notifyWaitStatus(false); // Notify wait has ended\n return {\n success: true,\n utterances: sortedUtterances.map(u => ({\n id: u.id,\n text: u.text,\n timestamp: u.timestamp,\n status: 'delivered', // They are now delivered\n })),\n count: pendingUtterances.length,\n waitTime: Date.now() - startTime,\n };\n }\n\n if (firstTime) {\n firstTime = false;\n // Play notification sound since we're about to start waiting\n await playNotificationSound();\n }\n\n // Wait 100ms before checking again\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n\n // Timeout reached - no utterances found\n notifyWaitStatus(false); // Notify wait has ended\n return {\n success: true,\n utterances: [],\n message: `No utterances found after waiting ${secondsToWait} seconds.`,\n waitTime: maxWaitMs,\n };\n}\n\n// Wait for utterance endpoint\napp.post('/api/wait-for-utterances', async (_req: Request, res: Response) => {\n const result = await waitForUtteranceCore();\n\n // If error response, return 400 status\n if (!result.success && result.error) {\n res.status(400).json(result);\n return;\n }\n\n res.json(result);\n});\n\n\n// API for pre-tool hook to check for pending utterances\napp.get('/api/has-pending-utterances', (_req: Request, res: Response) => {\n const pendingCount = queue.utterances.filter(u => u.status === 'pending').length;\n const hasPending = pendingCount > 0;\n\n res.json({\n hasPending,\n pendingCount\n });\n});\n\n// Unified action validation endpoint\napp.post('/api/validate-action', (req: Request, res: Response) => {\n const { action } = req.body;\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n\n if (!action || !['tool-use', 'stop'].includes(action)) {\n res.status(400).json({ error: 'Invalid action. Must be \"tool-use\" or \"stop\"' });\n return;\n }\n\n // Only check for pending utterances if voice input is active\n if (voicePreferences.voiceInputActive) {\n const pendingUtterances = queue.utterances.filter(u => u.status === 'pending');\n if (pendingUtterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'dequeue_utterances',\n reason: `${pendingUtterances.length} pending utterance(s) must be dequeued first. Please use dequeue_utterances to process them.`\n });\n return;\n }\n }\n\n // Check for delivered but unresponded utterances (when voice enabled)\n if (voiceResponsesEnabled) {\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n if (deliveredUtterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'speak',\n reason: `${deliveredUtterances.length} delivered utterance(s) require voice response. Please use the speak tool to respond before proceeding.`\n });\n return;\n }\n }\n\n // For stop action, check if we should wait (only if voice input is active)\n if (action === 'stop' && voicePreferences.voiceInputActive) {\n if (queue.utterances.length > 0) {\n res.json({\n allowed: false,\n requiredAction: 'wait_for_utterance',\n reason: 'Assistant tried to end its response. Stopping is not allowed without first checking for voice input. Assistant should now use wait_for_utterance to check for voice input'\n });\n return;\n }\n }\n\n // All checks passed - action is allowed\n res.json({\n allowed: true\n });\n});\n\n// Unified hook handler\nfunction handleHookRequest(attemptedAction: 'tool' | 'speak' | 'wait' | 'stop' | 'post-tool'): { decision: 'approve' | 'block', reason?: string } | Promise<{ decision: 'approve' | 'block', reason?: string }> {\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n const voiceInputActive = voicePreferences.voiceInputActive;\n\n // 1. Check for pending utterances (different behavior based on action and settings)\n if (voiceInputActive) {\n const pendingUtterances = queue.utterances.filter(u => u.status === 'pending');\n if (pendingUtterances.length > 0) {\n if (AUTO_DELIVER_VOICE_INPUT) {\n // Auto mode: check if we should auto-deliver\n if (attemptedAction === 'tool' && !AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS) {\n // Skip auto-delivery for tools when disabled\n } else {\n // Auto-dequeue for non-tool actions, or for tools when enabled\n const dequeueResult = dequeueUtterancesCore();\n\n if (dequeueResult.success && dequeueResult.utterances && dequeueResult.utterances.length > 0) {\n // Reverse to show oldest first\n const reversedUtterances = dequeueResult.utterances.reverse();\n\n return {\n decision: 'block',\n reason: formatVoiceUtterances(reversedUtterances)\n };\n }\n }\n } else {\n // Manual mode: always block and tell assistant to use dequeue_utterances tool\n return {\n decision: 'block',\n reason: `${pendingUtterances.length} pending utterance(s) available. Use the dequeue_utterances tool to retrieve them.`\n };\n }\n }\n }\n\n // 2. Check for delivered utterances (when voice enabled)\n if (voiceResponsesEnabled) {\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n if (deliveredUtterances.length > 0) {\n // Only allow speak to proceed\n if (attemptedAction === 'speak') {\n return { decision: 'approve' };\n }\n return {\n decision: 'block',\n reason: `${deliveredUtterances.length} delivered utterance(s) require voice response. Please use the speak tool to respond before proceeding.`\n };\n }\n }\n\n // 3. Handle tool and post-tool actions\n if (attemptedAction === 'tool' || attemptedAction === 'post-tool') {\n lastToolUseTimestamp = new Date();\n return { decision: 'approve' };\n }\n\n // 4. Handle wait for utterance\n if (attemptedAction === 'wait') {\n if (voiceResponsesEnabled && lastToolUseTimestamp &&\n (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {\n return {\n decision: 'block',\n reason: 'Assistant must speak after using tools. Please use the speak tool to respond before waiting for utterances.'\n };\n }\n return { decision: 'approve' };\n }\n\n // 5. Handle speak\n if (attemptedAction === 'speak') {\n return { decision: 'approve' };\n }\n\n // 6. Handle stop\n if (attemptedAction === 'stop') {\n // Check if must speak after tool use\n if (voiceResponsesEnabled && lastToolUseTimestamp &&\n (!lastSpeakTimestamp || lastSpeakTimestamp < lastToolUseTimestamp)) {\n return {\n decision: 'block',\n reason: 'Assistant must speak after using tools. Please use the speak tool to respond before proceeding.'\n };\n }\n\n // Check if should wait for utterances (only if voice input is active)\n if (voiceInputActive) {\n if (AUTO_DELIVER_VOICE_INPUT) {\n // Auto-wait for utterances\n return (async () => {\n try {\n debugLog(`[Stop Hook] Auto-calling wait_for_utterance...`);\n const data = await waitForUtteranceCore();\n debugLog(`[Stop Hook] wait_for_utterance response: ${JSON.stringify(data)}`);\n\n // If error (voice input not active), treat as no utterances found\n if (!data.success && data.error) {\n return {\n decision: 'approve' as const,\n reason: data.error\n };\n }\n\n // If utterances were found, block and return them\n if (data.utterances && data.utterances.length > 0) {\n return {\n decision: 'block' as const,\n reason: formatVoiceUtterances(data.utterances)\n };\n }\n\n // If no utterances found (including when voice was deactivated), approve stop\n return {\n decision: 'approve' as const,\n reason: data.message || 'No utterances found during wait'\n };\n } catch (error) {\n debugLog(`[Stop Hook] Error calling wait_for_utterance: ${error}`);\n // In auto-deliver mode, fail open on errors\n return {\n decision: 'approve' as const,\n reason: 'Auto-wait encountered an error, proceeding'\n };\n }\n })();\n } else {\n // Manual mode: block and tell assistant to use wait_for_utterance tool\n return {\n decision: 'block',\n reason: 'Assistant tried to end its response, but voice input is active. Stopping is not allowed without first checking for voice input. Assistant should now use wait_for_utterance to check for voice input'\n };\n }\n }\n\n return {\n decision: 'approve',\n reason: 'No utterances since last timeout'\n };\n }\n\n // Default to approve (shouldn't reach here)\n return { decision: 'approve' };\n}\n\n// Dedicated hook endpoints that return in Claude's expected format\napp.post('/api/hooks/pre-tool', (_req: Request, res: Response) => {\n const result = handleHookRequest('tool');\n res.json(result);\n});\n\napp.post('/api/hooks/stop', async (_req: Request, res: Response) => {\n const result = await handleHookRequest('stop');\n res.json(result);\n});\n\n// Pre-speak hook endpoint\napp.post('/api/hooks/pre-speak', (_req: Request, res: Response) => {\n const result = handleHookRequest('speak');\n res.json(result);\n});\n\n// Pre-wait hook endpoint\napp.post('/api/hooks/pre-wait', (_req: Request, res: Response) => {\n const result = handleHookRequest('wait');\n res.json(result);\n});\n\n// Post-tool hook endpoint\napp.post('/api/hooks/post-tool', (_req: Request, res: Response) => {\n // Use the unified handler with 'post-tool' action\n const result = handleHookRequest('post-tool');\n res.json(result);\n});\n\n// API to clear all utterances\napp.delete('/api/utterances', (_req: Request, res: Response) => {\n const clearedCount = queue.utterances.length;\n queue.clear();\n\n res.json({\n success: true,\n message: `Cleared ${clearedCount} utterances`,\n clearedCount\n });\n});\n\n// Server-Sent Events for TTS notifications\nconst ttsClients = new Set<Response>();\n\napp.get('/api/tts-events', (_req: Request, res: Response) => {\n res.writeHead(200, {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n });\n\n // Send initial connection message\n res.write('data: {\"type\":\"connected\"}\\n\\n');\n\n // Add client to set\n ttsClients.add(res);\n\n // Remove client on disconnect\n res.on('close', () => {\n ttsClients.delete(res);\n });\n});\n\n// Helper function to notify all connected TTS clients\nfunction notifyTTSClients(text: string) {\n const message = JSON.stringify({ type: 'speak', text });\n ttsClients.forEach(client => {\n client.write(`data: ${message}\\n\\n`);\n });\n}\n\n// Helper function to notify all connected clients about wait status\nfunction notifyWaitStatus(isWaiting: boolean) {\n const message = JSON.stringify({ type: 'waitStatus', isWaiting });\n ttsClients.forEach(client => {\n client.write(`data: ${message}\\n\\n`);\n });\n}\n\n// Helper function to format voice utterances for display\nfunction formatVoiceUtterances(utterances: any[]): string {\n const utteranceTexts = utterances\n .map(u => `\"${u.text}\"`)\n .join('\\n');\n\n return `Assistant received voice input from the user (${utterances.length} utterance${utterances.length !== 1 ? 's' : ''}):\\n\\n${utteranceTexts}${getVoiceResponseReminder()}`;\n}\n\n// API for voice preferences\napp.post('/api/voice-preferences', (req: Request, res: Response) => {\n const { voiceResponsesEnabled } = req.body;\n\n // Update preferences\n voicePreferences.voiceResponsesEnabled = !!voiceResponsesEnabled;\n\n debugLog(`[Preferences] Updated: voiceResponses=${voicePreferences.voiceResponsesEnabled}`);\n\n res.json({\n success: true,\n preferences: voicePreferences\n });\n});\n\n// API for voice input state\napp.post('/api/voice-input-state', (req: Request, res: Response) => {\n const { active } = req.body;\n\n // Update voice input state\n voicePreferences.voiceInputActive = !!active;\n\n debugLog(`[Voice Input] ${voicePreferences.voiceInputActive ? 'Started' : 'Stopped'} listening`);\n\n res.json({\n success: true,\n voiceInputActive: voicePreferences.voiceInputActive\n });\n});\n\n// API for text-to-speech\napp.post('/api/speak', async (req: Request, res: Response) => {\n const { text } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n // Check if voice responses are enabled\n if (!voicePreferences.voiceResponsesEnabled) {\n debugLog(`[Speak] Voice responses disabled, returning error`);\n res.status(400).json({\n error: 'Voice responses are disabled',\n message: 'Cannot speak when voice responses are disabled'\n });\n return;\n }\n\n try {\n // Always notify browser clients - they decide how to speak\n notifyTTSClients(text);\n debugLog(`[Speak] Sent text to browser for TTS: \"${text}\"`);\n\n // Note: The browser will decide whether to use system voice or browser voice\n\n // Mark all delivered utterances as responded\n const deliveredUtterances = queue.utterances.filter(u => u.status === 'delivered');\n deliveredUtterances.forEach(u => {\n u.status = 'responded';\n debugLog(`[Queue] marked as responded: \"${u.text}\"\t[id: ${u.id}]`);\n });\n\n lastSpeakTimestamp = new Date();\n\n res.json({\n success: true,\n message: 'Text spoken successfully',\n respondedCount: deliveredUtterances.length\n });\n } catch (error) {\n debugLog(`[Speak] Failed to speak text: ${error}`);\n res.status(500).json({\n error: 'Failed to speak text',\n details: error instanceof Error ? error.message : String(error)\n });\n }\n});\n\n// API for system text-to-speech (always uses Mac say command)\napp.post('/api/speak-system', async (req: Request, res: Response) => {\n const { text, rate = 150 } = req.body;\n\n if (!text || !text.trim()) {\n res.status(400).json({ error: 'Text is required' });\n return;\n }\n\n try {\n // Execute text-to-speech using macOS say command\n // Note: Mac say command doesn't support volume control\n await execAsync(`say -r ${rate} \"${text.replace(/\"/g, '\\\\\"')}\"`);\n debugLog(`[Speak System] Spoke text using macOS say: \"${text}\" (rate: ${rate})`);\n\n res.json({\n success: true,\n message: 'Text spoken successfully via system voice'\n });\n } catch (error) {\n debugLog(`[Speak System] Failed to speak text: ${error}`);\n res.status(500).json({\n error: 'Failed to speak text via system voice',\n details: error instanceof Error ? error.message : String(error)\n });\n }\n});\n\napp.get('/', (_req: Request, res: Response) => {\n res.sendFile(path.join(__dirname, '..', 'public', 'index.html'));\n});\n\n// Start HTTP server\napp.listen(HTTP_PORT, async () => {\n if (!IS_MCP_MANAGED) {\n console.log(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);\n console.log(`[Mode] Running in ${IS_MCP_MANAGED ? 'MCP-managed' : 'standalone'} mode`);\n console.log(`[Auto-deliver] Voice input auto-delivery is ${AUTO_DELIVER_VOICE_INPUT ? 'enabled (tools hidden)' : 'disabled (tools shown)'}`);\n console.log(`[Pre-tool Hook] Auto-deliver voice input before tools is ${AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS ? 'enabled' : 'disabled'}`);\n } else {\n // In MCP mode, write to stderr to avoid interfering with protocol\n console.error(`[HTTP] Server listening on http://localhost:${HTTP_PORT}`);\n console.error(`[Mode] Running in MCP-managed mode`);\n console.error(`[Auto-deliver] Voice input auto-delivery is ${AUTO_DELIVER_VOICE_INPUT ? 'enabled (tools hidden)' : 'disabled (tools shown)'}`);\n console.error(`[Pre-tool Hook] Auto-deliver voice input before tools is ${AUTO_DELIVER_VOICE_INPUT_BEFORE_TOOLS ? 'enabled' : 'disabled'}`);\n }\n\n // Auto-open browser if no frontend connects within 3 seconds\n const autoOpenBrowser = process.env.MCP_VOICE_HOOKS_AUTO_OPEN_BROWSER !== 'false'; // Default to true\n if (IS_MCP_MANAGED && autoOpenBrowser) {\n setTimeout(async () => {\n if (ttsClients.size === 0) {\n debugLog('[Browser] No frontend connected, opening browser...');\n try {\n const open = (await import('open')).default;\n await open(`http://localhost:${HTTP_PORT}`);\n } catch (error) {\n debugLog('[Browser] Failed to open browser:', error);\n }\n } else {\n debugLog(`[Browser] Frontend already connected (${ttsClients.size} client(s))`)\n }\n }, 3000);\n }\n});\n\n// Helper function to get voice response reminder\nfunction getVoiceResponseReminder(): string {\n const voiceResponsesEnabled = voicePreferences.voiceResponsesEnabled;\n return voiceResponsesEnabled\n ? '\\n\\nThe user has enabled voice responses, so use the \\'speak\\' tool to respond to the user\\'s voice input before proceeding.'\n : '';\n}\n\n// MCP Server Setup (only if MCP-managed)\nif (IS_MCP_MANAGED) {\n // Use stderr in MCP mode to avoid interfering with protocol\n console.error('[MCP] Initializing MCP server...');\n\n const mcpServer = new Server(\n {\n name: 'voice-hooks',\n version: '1.0.0',\n },\n {\n capabilities: {\n tools: {},\n },\n }\n );\n\n // Tool handlers\n mcpServer.setRequestHandler(ListToolsRequestSchema, async () => {\n const tools = [];\n\n // Only show dequeue_utterances and wait_for_utterance if auto-deliver is disabled\n if (!AUTO_DELIVER_VOICE_INPUT) {\n tools.push(\n {\n name: 'dequeue_utterances',\n description: 'Dequeue pending utterances and mark them as delivered',\n inputSchema: {\n type: 'object',\n properties: {},\n },\n },\n {\n name: 'wait_for_utterance',\n description: 'Wait for an utterance to be available or until timeout',\n inputSchema: {\n type: 'object',\n properties: {},\n },\n }\n );\n }\n\n // Always show the speak tool\n tools.push({\n name: 'speak',\n description: 'Speak text using text-to-speech and mark delivered utterances as responded',\n inputSchema: {\n type: 'object',\n properties: {\n text: {\n type: 'string',\n description: 'The text to speak',\n },\n },\n required: ['text'],\n },\n });\n\n return { tools };\n });\n\n mcpServer.setRequestHandler(CallToolRequestSchema, async (request) => {\n const { name, arguments: args } = request.params;\n\n try {\n if (name === 'dequeue_utterances') {\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/dequeue-utterances`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({}),\n });\n\n const data = await response.json() as any;\n\n // Check if the request was successful\n if (!response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${data.error || 'Failed to dequeue utterances'}`,\n },\n ],\n };\n }\n\n if (data.utterances.length === 0) {\n return {\n content: [\n {\n type: 'text',\n text: 'No recent utterances found.',\n },\n ],\n };\n }\n\n return {\n content: [\n {\n type: 'text',\n text: `Dequeued ${data.utterances.length} utterance(s):\\n\\n${data.utterances.reverse().map((u: any) => `\"${u.text}\"\\t[time: ${new Date(u.timestamp).toISOString()}]`).join('\\n')\n }${getVoiceResponseReminder()}`,\n },\n ],\n };\n }\n\n if (name === 'wait_for_utterance') {\n debugLog(`[MCP] Calling wait_for_utterance`);\n\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/wait-for-utterances`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({}),\n });\n\n const data = await response.json() as any;\n\n // Check if the request was successful\n if (!response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${data.error || 'Failed to wait for utterances'}`,\n },\n ],\n };\n }\n\n if (data.utterances && data.utterances.length > 0) {\n const utteranceTexts = data.utterances\n .map((u: any) => `[${u.timestamp}] \"${u.text}\"`)\n .join('\\n');\n\n return {\n content: [\n {\n type: 'text',\n text: `Found ${data.count} utterance(s):\\n\\n${utteranceTexts}${getVoiceResponseReminder()}`,\n },\n ],\n };\n } else {\n return {\n content: [\n {\n type: 'text',\n text: data.message || `No utterances found. Timed out.`,\n },\n ],\n };\n }\n }\n\n if (name === 'speak') {\n const text = args?.text as string;\n\n if (!text || !text.trim()) {\n return {\n content: [\n {\n type: 'text',\n text: 'Error: Text is required for speak tool',\n },\n ],\n isError: true,\n };\n }\n\n const response = await fetch(`http://localhost:${HTTP_PORT}/api/speak`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ text }),\n });\n\n const data = await response.json() as any;\n\n if (response.ok) {\n return {\n content: [\n {\n type: 'text',\n text: '', // Return empty string for success\n },\n ],\n };\n } else {\n return {\n content: [\n {\n type: 'text',\n text: `Error speaking text: ${data.error || 'Unknown error'}`,\n },\n ],\n isError: true,\n };\n }\n }\n\n throw new Error(`Unknown tool: ${name}`);\n } catch (error) {\n return {\n content: [\n {\n type: 'text',\n text: `Error: ${error instanceof Error ? error.message : String(error)}`,\n },\n ],\n isError: true,\n };\n }\n });\n\n // Connect via stdio\n const transport = new StdioServerTransport();\n mcpServer.connect(transport);\n // Use stderr in MCP mode to avoid interfering with protocol\n console.error('[MCP] Server connected via stdio');\n} else {\n // Only log in standalone mode\n if (!IS_MCP_MANAGED) {\n console.log('[MCP] Skipping MCP server initialization (not in MCP-managed mode)');\n }\n}"],"mappings":";;;;;;AAEA,OAAO,aAAa;AAEpB,OAAO,UAAU;AACjB,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAC9B,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,cAAc;AAEvB,SAAS,4BAA4B;AACrC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AAEP,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,KAAK,QAAQ,UAAU;AAGzC,IAAM,uBAAuB;AAC7B,IAAM,YAAY,QAAQ,IAAI,uBAAuB,SAAS,QAAQ,IAAI,oBAAoB,IAAI;AAClG,IAAM,2BAA2B,QAAQ,IAAI,6CAA6C;AAC1F,IAAM,wCAAwC,QAAQ,IAAI,0DAA0D;AAGpH,IAAM,YAAY,UAAU,IAAI;AAGhC,eAAe,wBAAwB;AACrC,MAAI;AAEF,UAAM,UAAU,yCAAyC;AACzD,aAAS,mCAAmC;AAAA,EAC9C,SAAS,OAAO;AACd,aAAS,iCAAiC,KAAK,EAAE;AAAA,EAEnD;AACF;AAUA,IAAM,iBAAN,MAAqB;AAAA,EACnB,aAA0B,CAAC;AAAA,EAE3B,IAAI,MAAc,WAA6B;AAC7C,UAAM,YAAuB;AAAA,MAC3B,IAAI,WAAW;AAAA,MACf,MAAM,KAAK,KAAK;AAAA,MAChB,WAAW,aAAa,oBAAI,KAAK;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,SAAK,WAAW,KAAK,SAAS;AAC9B,aAAS,oBAAoB,UAAU,IAAI,UAAU,UAAU,EAAE,GAAG;AACpE,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,QAAgB,IAAiB;AACzC,WAAO,KAAK,WACT,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC,EAC5D,MAAM,GAAG,KAAK;AAAA,EACnB;AAAA,EAEA,cAAc,IAAkB;AAC9B,UAAM,YAAY,KAAK,WAAW,KAAK,OAAK,EAAE,OAAO,EAAE;AACvD,QAAI,WAAW;AACb,gBAAU,SAAS;AACnB,eAAS,uBAAuB,UAAU,IAAI,UAAU,EAAE,GAAG;AAAA,IAC/D;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,UAAM,QAAQ,KAAK,WAAW;AAC9B,SAAK,aAAa,CAAC;AACnB,aAAS,mBAAmB,KAAK,aAAa;AAAA,EAChD;AACF;AAGA,IAAM,iBAAiB,QAAQ,KAAK,SAAS,eAAe;AAG5D,IAAM,QAAQ,IAAI,eAAe;AACjC,IAAI,uBAAoC;AACxC,IAAI,qBAAkC;AAGtC,IAAI,mBAAmB;AAAA,EACrB,uBAAuB;AAAA,EACvB,kBAAkB;AACpB;AAGA,IAAM,MAAM,QAAQ;AACpB,IAAI,IAAI,KAAK,CAAC;AACd,IAAI,IAAI,QAAQ,KAAK,CAAC;AACtB,IAAI,IAAI,QAAQ,OAAO,KAAK,KAAK,WAAW,MAAM,QAAQ,CAAC,CAAC;AAG5D,IAAI,KAAK,6BAA6B,CAAC,KAAc,QAAkB;AACrE,QAAM,EAAE,MAAM,UAAU,IAAI,IAAI;AAEhC,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAEA,QAAM,kBAAkB,YAAY,IAAI,KAAK,SAAS,IAAI;AAC1D,QAAM,YAAY,MAAM,IAAI,MAAM,eAAe;AACjD,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,WAAW;AAAA,MACT,IAAI,UAAU;AAAA,MACd,MAAM,UAAU;AAAA,MAChB,WAAW,UAAU;AAAA,MACrB,QAAQ,UAAU;AAAA,IACpB;AAAA,EACF,CAAC;AACH,CAAC;AAED,IAAI,IAAI,mBAAmB,CAAC,KAAc,QAAkB;AAC1D,QAAM,QAAQ,SAAS,IAAI,MAAM,KAAe,KAAK;AACrD,QAAM,aAAa,MAAM,UAAU,KAAK;AAExC,MAAI,KAAK;AAAA,IACP,YAAY,WAAW,IAAI,QAAM;AAAA,MAC/B,IAAI,EAAE;AAAA,MACN,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,MACb,QAAQ,EAAE;AAAA,IACZ,EAAE;AAAA,EACJ,CAAC;AACH,CAAC;AAED,IAAI,IAAI,0BAA0B,CAAC,MAAe,QAAkB;AAClE,QAAM,QAAQ,MAAM,WAAW;AAC/B,QAAM,UAAU,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AACrE,QAAM,YAAY,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW,EAAE;AAEzE,MAAI,KAAK;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH,CAAC;AAGD,SAAS,wBAAwB;AAE/B,MAAI,CAAC,iBAAiB,kBAAkB;AACtC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,oBAAoB,MAAM,WAC7B,OAAO,OAAK,EAAE,WAAW,SAAS,EAClC,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAG/D,oBAAkB,QAAQ,OAAK;AAC7B,UAAM,cAAc,EAAE,EAAE;AAAA,EAC1B,CAAC;AAED,SAAO;AAAA,IACL,SAAS;AAAA,IACT,YAAY,kBAAkB,IAAI,QAAM;AAAA,MACtC,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,IACf,EAAE;AAAA,EACJ;AACF;AAGA,IAAI,KAAK,2BAA2B,CAAC,MAAe,QAAkB;AACpE,QAAM,SAAS,sBAAsB;AAErC,MAAI,CAAC,OAAO,WAAW,OAAO,OAAO;AACnC,QAAI,OAAO,GAAG,EAAE,KAAK,MAAM;AAC3B;AAAA,EACF;AAEA,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,eAAe,uBAAuB;AAEpC,MAAI,CAAC,iBAAiB,kBAAkB;AACtC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,gBAAgB;AACtB,QAAM,YAAY,gBAAgB;AAClC,QAAM,YAAY,KAAK,IAAI;AAE3B,WAAS,2CAA2C,aAAa,IAAI;AAGrE,mBAAiB,IAAI;AAErB,MAAI,YAAY;AAGhB,SAAO,KAAK,IAAI,IAAI,YAAY,WAAW;AAEzC,QAAI,CAAC,iBAAiB,kBAAkB;AACtC,eAAS,8DAA8D;AACvE,uBAAiB,KAAK;AACtB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,YAAY,CAAC;AAAA,QACb,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,WAAW;AAAA,MACzC,OAAK,EAAE,WAAW;AAAA,IACpB;AAEA,QAAI,kBAAkB,SAAS,GAAG;AAIhC,YAAM,mBAAmB,kBACtB,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAG/D,uBAAiB,QAAQ,OAAK;AAC5B,cAAM,cAAc,EAAE,EAAE;AAAA,MAC1B,CAAC;AAED,uBAAiB,KAAK;AACtB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,YAAY,iBAAiB,IAAI,QAAM;AAAA,UACrC,IAAI,EAAE;AAAA,UACN,MAAM,EAAE;AAAA,UACR,WAAW,EAAE;AAAA,UACb,QAAQ;AAAA;AAAA,QACV,EAAE;AAAA,QACF,OAAO,kBAAkB;AAAA,QACzB,UAAU,KAAK,IAAI,IAAI;AAAA,MACzB;AAAA,IACF;AAEA,QAAI,WAAW;AACb,kBAAY;AAEZ,YAAM,sBAAsB;AAAA,IAC9B;AAGA,UAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,EACvD;AAGA,mBAAiB,KAAK;AACtB,SAAO;AAAA,IACL,SAAS;AAAA,IACT,YAAY,CAAC;AAAA,IACb,SAAS,qCAAqC,aAAa;AAAA,IAC3D,UAAU;AAAA,EACZ;AACF;AAGA,IAAI,KAAK,4BAA4B,OAAO,MAAe,QAAkB;AAC3E,QAAM,SAAS,MAAM,qBAAqB;AAG1C,MAAI,CAAC,OAAO,WAAW,OAAO,OAAO;AACnC,QAAI,OAAO,GAAG,EAAE,KAAK,MAAM;AAC3B;AAAA,EACF;AAEA,MAAI,KAAK,MAAM;AACjB,CAAC;AAID,IAAI,IAAI,+BAA+B,CAAC,MAAe,QAAkB;AACvE,QAAM,eAAe,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAC1E,QAAM,aAAa,eAAe;AAElC,MAAI,KAAK;AAAA,IACP;AAAA,IACA;AAAA,EACF,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,wBAAwB,CAAC,KAAc,QAAkB;AAChE,QAAM,EAAE,OAAO,IAAI,IAAI;AACvB,QAAM,wBAAwB,iBAAiB;AAE/C,MAAI,CAAC,UAAU,CAAC,CAAC,YAAY,MAAM,EAAE,SAAS,MAAM,GAAG;AACrD,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,+CAA+C,CAAC;AAC9E;AAAA,EACF;AAGA,MAAI,iBAAiB,kBAAkB;AACrC,UAAM,oBAAoB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS;AAC7E,QAAI,kBAAkB,SAAS,GAAG;AAChC,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,GAAG,kBAAkB,MAAM;AAAA,MACrC,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,uBAAuB;AACzB,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,QAAI,oBAAoB,SAAS,GAAG;AAClC,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ,GAAG,oBAAoB,MAAM;AAAA,MACvC,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,UAAU,iBAAiB,kBAAkB;AAC1D,QAAI,MAAM,WAAW,SAAS,GAAG;AAC/B,UAAI,KAAK;AAAA,QACP,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,QAAQ;AAAA,MACV,CAAC;AACD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,EACX,CAAC;AACH,CAAC;AAGD,SAAS,kBAAkB,iBAAqL;AAC9M,QAAM,wBAAwB,iBAAiB;AAC/C,QAAM,mBAAmB,iBAAiB;AAG1C,MAAI,kBAAkB;AACpB,UAAM,oBAAoB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,SAAS;AAC7E,QAAI,kBAAkB,SAAS,GAAG;AAChC,UAAI,0BAA0B;AAE5B,YAAI,oBAAoB,UAAU,CAAC,uCAAuC;AAAA,QAE1E,OAAO;AAEL,gBAAM,gBAAgB,sBAAsB;AAE5C,cAAI,cAAc,WAAW,cAAc,cAAc,cAAc,WAAW,SAAS,GAAG;AAE5F,kBAAM,qBAAqB,cAAc,WAAW,QAAQ;AAE5D,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ,sBAAsB,kBAAkB;AAAA,YAClD;AAAA,UACF;AAAA,QACF;AAAA,MACF,OAAO;AAEL,eAAO;AAAA,UACL,UAAU;AAAA,UACV,QAAQ,GAAG,kBAAkB,MAAM;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,uBAAuB;AACzB,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,QAAI,oBAAoB,SAAS,GAAG;AAElC,UAAI,oBAAoB,SAAS;AAC/B,eAAO,EAAE,UAAU,UAAU;AAAA,MAC/B;AACA,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ,GAAG,oBAAoB,MAAM;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,oBAAoB,UAAU,oBAAoB,aAAa;AACjE,2BAAuB,oBAAI,KAAK;AAChC,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,QAAQ;AAC9B,QAAI,yBAAyB,yBAC1B,CAAC,sBAAsB,qBAAqB,uBAAuB;AACpE,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AACA,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,SAAS;AAC/B,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAGA,MAAI,oBAAoB,QAAQ;AAE9B,QAAI,yBAAyB,yBAC1B,CAAC,sBAAsB,qBAAqB,uBAAuB;AACpE,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAGA,QAAI,kBAAkB;AACpB,UAAI,0BAA0B;AAE5B,gBAAQ,YAAY;AAClB,cAAI;AACF,qBAAS,gDAAgD;AACzD,kBAAM,OAAO,MAAM,qBAAqB;AACxC,qBAAS,4CAA4C,KAAK,UAAU,IAAI,CAAC,EAAE;AAG3E,gBAAI,CAAC,KAAK,WAAW,KAAK,OAAO;AAC/B,qBAAO;AAAA,gBACL,UAAU;AAAA,gBACV,QAAQ,KAAK;AAAA,cACf;AAAA,YACF;AAGA,gBAAI,KAAK,cAAc,KAAK,WAAW,SAAS,GAAG;AACjD,qBAAO;AAAA,gBACL,UAAU;AAAA,gBACV,QAAQ,sBAAsB,KAAK,UAAU;AAAA,cAC/C;AAAA,YACF;AAGA,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ,KAAK,WAAW;AAAA,YAC1B;AAAA,UACF,SAAS,OAAO;AACd,qBAAS,iDAAiD,KAAK,EAAE;AAEjE,mBAAO;AAAA,cACL,UAAU;AAAA,cACV,QAAQ;AAAA,YACV;AAAA,UACF;AAAA,QACF,GAAG;AAAA,MACL,OAAO;AAEL,eAAO;AAAA,UACL,UAAU;AAAA,UACV,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,UAAU;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,SAAO,EAAE,UAAU,UAAU;AAC/B;AAGA,IAAI,KAAK,uBAAuB,CAAC,MAAe,QAAkB;AAChE,QAAM,SAAS,kBAAkB,MAAM;AACvC,MAAI,KAAK,MAAM;AACjB,CAAC;AAED,IAAI,KAAK,mBAAmB,OAAO,MAAe,QAAkB;AAClE,QAAM,SAAS,MAAM,kBAAkB,MAAM;AAC7C,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,KAAK,wBAAwB,CAAC,MAAe,QAAkB;AACjE,QAAM,SAAS,kBAAkB,OAAO;AACxC,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,KAAK,uBAAuB,CAAC,MAAe,QAAkB;AAChE,QAAM,SAAS,kBAAkB,MAAM;AACvC,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,KAAK,wBAAwB,CAAC,MAAe,QAAkB;AAEjE,QAAM,SAAS,kBAAkB,WAAW;AAC5C,MAAI,KAAK,MAAM;AACjB,CAAC;AAGD,IAAI,OAAO,mBAAmB,CAAC,MAAe,QAAkB;AAC9D,QAAM,eAAe,MAAM,WAAW;AACtC,QAAM,MAAM;AAEZ,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,SAAS,WAAW,YAAY;AAAA,IAChC;AAAA,EACF,CAAC;AACH,CAAC;AAGD,IAAM,aAAa,oBAAI,IAAc;AAErC,IAAI,IAAI,mBAAmB,CAAC,MAAe,QAAkB;AAC3D,MAAI,UAAU,KAAK;AAAA,IACjB,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,IACjB,cAAc;AAAA,EAChB,CAAC;AAGD,MAAI,MAAM,gCAAgC;AAG1C,aAAW,IAAI,GAAG;AAGlB,MAAI,GAAG,SAAS,MAAM;AACpB,eAAW,OAAO,GAAG;AAAA,EACvB,CAAC;AACH,CAAC;AAGD,SAAS,iBAAiB,MAAc;AACtC,QAAM,UAAU,KAAK,UAAU,EAAE,MAAM,SAAS,KAAK,CAAC;AACtD,aAAW,QAAQ,YAAU;AAC3B,WAAO,MAAM,SAAS,OAAO;AAAA;AAAA,CAAM;AAAA,EACrC,CAAC;AACH;AAGA,SAAS,iBAAiB,WAAoB;AAC5C,QAAM,UAAU,KAAK,UAAU,EAAE,MAAM,cAAc,UAAU,CAAC;AAChE,aAAW,QAAQ,YAAU;AAC3B,WAAO,MAAM,SAAS,OAAO;AAAA;AAAA,CAAM;AAAA,EACrC,CAAC;AACH;AAGA,SAAS,sBAAsB,YAA2B;AACxD,QAAM,iBAAiB,WACpB,IAAI,OAAK,IAAI,EAAE,IAAI,GAAG,EACtB,KAAK,IAAI;AAEZ,SAAO,iDAAiD,WAAW,MAAM,aAAa,WAAW,WAAW,IAAI,MAAM,EAAE;AAAA;AAAA,EAAS,cAAc,GAAG,yBAAyB,CAAC;AAC9K;AAGA,IAAI,KAAK,0BAA0B,CAAC,KAAc,QAAkB;AAClE,QAAM,EAAE,sBAAsB,IAAI,IAAI;AAGtC,mBAAiB,wBAAwB,CAAC,CAAC;AAE3C,WAAS,yCAAyC,iBAAiB,qBAAqB,EAAE;AAE1F,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,aAAa;AAAA,EACf,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,0BAA0B,CAAC,KAAc,QAAkB;AAClE,QAAM,EAAE,OAAO,IAAI,IAAI;AAGvB,mBAAiB,mBAAmB,CAAC,CAAC;AAEtC,WAAS,iBAAiB,iBAAiB,mBAAmB,YAAY,SAAS,YAAY;AAE/F,MAAI,KAAK;AAAA,IACP,SAAS;AAAA,IACT,kBAAkB,iBAAiB;AAAA,EACrC,CAAC;AACH,CAAC;AAGD,IAAI,KAAK,cAAc,OAAO,KAAc,QAAkB;AAC5D,QAAM,EAAE,KAAK,IAAI,IAAI;AAErB,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAGA,MAAI,CAAC,iBAAiB,uBAAuB;AAC3C,aAAS,mDAAmD;AAC5D,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS;AAAA,IACX,CAAC;AACD;AAAA,EACF;AAEA,MAAI;AAEF,qBAAiB,IAAI;AACrB,aAAS,0CAA0C,IAAI,GAAG;AAK1D,UAAM,sBAAsB,MAAM,WAAW,OAAO,OAAK,EAAE,WAAW,WAAW;AACjF,wBAAoB,QAAQ,OAAK;AAC/B,QAAE,SAAS;AACX,eAAS,iCAAiC,EAAE,IAAI,UAAU,EAAE,EAAE,GAAG;AAAA,IACnE,CAAC;AAED,yBAAqB,oBAAI,KAAK;AAE9B,QAAI,KAAK;AAAA,MACP,SAAS;AAAA,MACT,SAAS;AAAA,MACT,gBAAgB,oBAAoB;AAAA,IACtC,CAAC;AAAA,EACH,SAAS,OAAO;AACd,aAAS,iCAAiC,KAAK,EAAE;AACjD,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAChE,CAAC;AAAA,EACH;AACF,CAAC;AAGD,IAAI,KAAK,qBAAqB,OAAO,KAAc,QAAkB;AACnE,QAAM,EAAE,MAAM,OAAO,IAAI,IAAI,IAAI;AAEjC,MAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,QAAI,OAAO,GAAG,EAAE,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;AAAA,EACF;AAEA,MAAI;AAGF,UAAM,UAAU,UAAU,IAAI,KAAK,KAAK,QAAQ,MAAM,KAAK,CAAC,GAAG;AAC/D,aAAS,+CAA+C,IAAI,YAAY,IAAI,GAAG;AAE/E,QAAI,KAAK;AAAA,MACP,SAAS;AAAA,MACT,SAAS;AAAA,IACX,CAAC;AAAA,EACH,SAAS,OAAO;AACd,aAAS,wCAAwC,KAAK,EAAE;AACxD,QAAI,OAAO,GAAG,EAAE,KAAK;AAAA,MACnB,OAAO;AAAA,MACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAChE,CAAC;AAAA,EACH;AACF,CAAC;AAED,IAAI,IAAI,KAAK,CAAC,MAAe,QAAkB;AAC7C,MAAI,SAAS,KAAK,KAAK,WAAW,MAAM,UAAU,YAAY,CAAC;AACjE,CAAC;AAGD,IAAI,OAAO,WAAW,YAAY;AAChC,MAAI,CAAC,gBAAgB;AACnB,YAAQ,IAAI,+CAA+C,SAAS,EAAE;AACtE,YAAQ,IAAI,qBAAqB,iBAAiB,gBAAgB,YAAY,OAAO;AACrF,YAAQ,IAAI,+CAA+C,2BAA2B,2BAA2B,wBAAwB,EAAE;AAC3I,YAAQ,IAAI,4DAA4D,wCAAwC,YAAY,UAAU,EAAE;AAAA,EAC1I,OAAO;AAEL,YAAQ,MAAM,+CAA+C,SAAS,EAAE;AACxE,YAAQ,MAAM,oCAAoC;AAClD,YAAQ,MAAM,+CAA+C,2BAA2B,2BAA2B,wBAAwB,EAAE;AAC7I,YAAQ,MAAM,4DAA4D,wCAAwC,YAAY,UAAU,EAAE;AAAA,EAC5I;AAGA,QAAM,kBAAkB,QAAQ,IAAI,sCAAsC;AAC1E,MAAI,kBAAkB,iBAAiB;AACrC,eAAW,YAAY;AACrB,UAAI,WAAW,SAAS,GAAG;AACzB,iBAAS,qDAAqD;AAC9D,YAAI;AACF,gBAAM,QAAQ,MAAM,OAAO,MAAM,GAAG;AACpC,gBAAM,KAAK,oBAAoB,SAAS,EAAE;AAAA,QAC5C,SAAS,OAAO;AACd,mBAAS,qCAAqC,KAAK;AAAA,QACrD;AAAA,MACF,OAAO;AACL,iBAAS,yCAAyC,WAAW,IAAI,aAAa;AAAA,MAChF;AAAA,IACF,GAAG,GAAI;AAAA,EACT;AACF,CAAC;AAGD,SAAS,2BAAmC;AAC1C,QAAM,wBAAwB,iBAAiB;AAC/C,SAAO,wBACH,8HACA;AACN;AAGA,IAAI,gBAAgB;AAElB,UAAQ,MAAM,kCAAkC;AAEhD,QAAM,YAAY,IAAI;AAAA,IACpB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA;AAAA,MACE,cAAc;AAAA,QACZ,OAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAGA,YAAU,kBAAkB,wBAAwB,YAAY;AAC9D,UAAM,QAAQ,CAAC;AAGf,QAAI,CAAC,0BAA0B;AAC7B,YAAM;AAAA,QACJ;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,YACX,MAAM;AAAA,YACN,YAAY,CAAC;AAAA,UACf;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,YACX,MAAM;AAAA,YACN,YAAY,CAAC;AAAA,UACf;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,aAAa;AAAA,MACb,aAAa;AAAA,QACX,MAAM;AAAA,QACN,YAAY;AAAA,UACV,MAAM;AAAA,YACJ,MAAM;AAAA,YACN,aAAa;AAAA,UACf;AAAA,QACF;AAAA,QACA,UAAU,CAAC,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAED,WAAO,EAAE,MAAM;AAAA,EACjB,CAAC;AAED,YAAU,kBAAkB,uBAAuB,OAAO,YAAY;AACpE,UAAM,EAAE,MAAM,WAAW,KAAK,IAAI,QAAQ;AAE1C,QAAI;AACF,UAAI,SAAS,sBAAsB;AACjC,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,2BAA2B;AAAA,UACnF,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,CAAC,CAAC;AAAA,QACzB,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,UAAU,KAAK,SAAS,8BAA8B;AAAA,cAC9D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,WAAW,WAAW,GAAG;AAChC,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,MAAM,YAAY,KAAK,WAAW,MAAM;AAAA;AAAA,EAAqB,KAAK,WAAW,QAAQ,EAAE,IAAI,CAAC,MAAW,IAAI,EAAE,IAAI,YAAa,IAAI,KAAK,EAAE,SAAS,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,IAAI,CAC7K,GAAG,yBAAyB,CAAC;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS,sBAAsB;AACjC,iBAAS,kCAAkC;AAE3C,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,4BAA4B;AAAA,UACpF,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,CAAC,CAAC;AAAA,QACzB,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,UAAU,KAAK,SAAS,+BAA+B;AAAA,cAC/D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,cAAc,KAAK,WAAW,SAAS,GAAG;AACjD,gBAAM,iBAAiB,KAAK,WACzB,IAAI,CAAC,MAAW,IAAI,EAAE,SAAS,MAAM,EAAE,IAAI,GAAG,EAC9C,KAAK,IAAI;AAEZ,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,SAAS,KAAK,KAAK;AAAA;AAAA,EAAqB,cAAc,GAAG,yBAAyB,CAAC;AAAA,cAC3F;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,KAAK,WAAW;AAAA,cACxB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS,SAAS;AACpB,cAAM,OAAO,MAAM;AAEnB,YAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,GAAG;AACzB,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,cACR;AAAA,YACF;AAAA,YACA,SAAS;AAAA,UACX;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,MAAM,oBAAoB,SAAS,cAAc;AAAA,UACtE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAC9C,MAAM,KAAK,UAAU,EAAE,KAAK,CAAC;AAAA,QAC/B,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,YAAI,SAAS,IAAI;AACf,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,YACL,SAAS;AAAA,cACP;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,wBAAwB,KAAK,SAAS,eAAe;AAAA,cAC7D;AAAA,YACF;AAAA,YACA,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAEA,YAAM,IAAI,MAAM,iBAAiB,IAAI,EAAE;AAAA,IACzC,SAAS,OAAO;AACd,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACxE;AAAA,QACF;AAAA,QACA,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,YAAY,IAAI,qBAAqB;AAC3C,YAAU,QAAQ,SAAS;AAE3B,UAAQ,MAAM,kCAAkC;AAClD,OAAO;AAEL,MAAI,CAAC,gBAAgB;AACnB,YAAQ,IAAI,oEAAoE;AAAA,EAClF;AACF;","names":[]}
|
package/package.json
CHANGED
package/public/app.js
CHANGED
@@ -153,7 +153,7 @@ class VoiceHooksClient {
|
|
153
153
|
});
|
154
154
|
|
155
155
|
this.testTTSBtn.addEventListener('click', () => {
|
156
|
-
this.speakText('
|
156
|
+
this.speakText('This is Voice Mode for Claude Code. How can I help you today?');
|
157
157
|
});
|
158
158
|
|
159
159
|
// Voice toggle listeners
|
@@ -369,6 +369,8 @@ class VoiceHooksClient {
|
|
369
369
|
|
370
370
|
if (data.type === 'speak' && data.text) {
|
371
371
|
this.speakText(data.text);
|
372
|
+
} else if (data.type === 'waitStatus') {
|
373
|
+
this.handleWaitStatus(data.isWaiting);
|
372
374
|
}
|
373
375
|
} catch (error) {
|
374
376
|
console.error('Failed to parse TTS event:', error);
|
@@ -452,7 +454,40 @@ class VoiceHooksClient {
|
|
452
454
|
this.voiceSelect.value = savedVoice;
|
453
455
|
this.selectedVoice = savedVoice;
|
454
456
|
} else {
|
455
|
-
|
457
|
+
// Look for Google US English Male voice first
|
458
|
+
let googleUSMaleIndex = -1;
|
459
|
+
let microsoftAndrewIndex = -1;
|
460
|
+
|
461
|
+
this.voices.forEach((voice, index) => {
|
462
|
+
const voiceName = voice.name.toLowerCase();
|
463
|
+
|
464
|
+
// Check for Google US English Male
|
465
|
+
if (voiceName.includes('google') &&
|
466
|
+
voiceName.includes('us') &&
|
467
|
+
voiceName.includes('english')) {
|
468
|
+
googleUSMaleIndex = index;
|
469
|
+
}
|
470
|
+
|
471
|
+
// Check for Microsoft Andrew Online
|
472
|
+
if (voiceName.includes('microsoft') &&
|
473
|
+
voiceName.includes('andrew') &&
|
474
|
+
voiceName.includes('online')) {
|
475
|
+
microsoftAndrewIndex = index;
|
476
|
+
}
|
477
|
+
});
|
478
|
+
|
479
|
+
if (googleUSMaleIndex !== -1) {
|
480
|
+
this.selectedVoice = `browser:${googleUSMaleIndex}`;
|
481
|
+
this.voiceSelect.value = this.selectedVoice;
|
482
|
+
this.debugLog('Defaulting to Google US English Male voice');
|
483
|
+
} else if (microsoftAndrewIndex !== -1) {
|
484
|
+
this.selectedVoice = `browser:${microsoftAndrewIndex}`;
|
485
|
+
this.voiceSelect.value = this.selectedVoice;
|
486
|
+
this.debugLog('Google US English Male not found, defaulting to Microsoft Andrew Online');
|
487
|
+
} else {
|
488
|
+
this.selectedVoice = 'system';
|
489
|
+
this.debugLog('Preferred voices not found, using system default');
|
490
|
+
}
|
456
491
|
}
|
457
492
|
|
458
493
|
// Update warnings based on selected voice
|
@@ -658,6 +693,20 @@ class VoiceHooksClient {
|
|
658
693
|
this.systemVoiceInfo.style.display = 'none';
|
659
694
|
}
|
660
695
|
}
|
696
|
+
|
697
|
+
handleWaitStatus(isWaiting) {
|
698
|
+
const listeningIndicatorText = this.listeningIndicator.querySelector('span');
|
699
|
+
|
700
|
+
if (isWaiting) {
|
701
|
+
// Claude is waiting for voice input
|
702
|
+
listeningIndicatorText.textContent = 'Claude is paused and waiting for voice input';
|
703
|
+
this.debugLog('Claude is waiting for voice input');
|
704
|
+
} else {
|
705
|
+
// Back to normal listening state
|
706
|
+
listeningIndicatorText.textContent = 'Listening...';
|
707
|
+
this.debugLog('Claude finished waiting');
|
708
|
+
}
|
709
|
+
}
|
661
710
|
}
|
662
711
|
|
663
712
|
// Initialize the client when the page loads
|