@ouro.bot/cli 0.1.0-alpha.561 → 0.1.0-alpha.563
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/changelog.json +16 -0
- package/dist/heart/daemon/cli-exec.js +17 -2
- package/dist/heart/daemon/sense-manager.js +3 -0
- package/dist/heart/identity.js +2 -2
- package/dist/heart/turn-context.js +5 -1
- package/dist/mind/prompt.js +6 -2
- package/dist/repertoire/tools-session.js +6 -0
- package/dist/repertoire/tools-surface.js +17 -0
- package/dist/senses/bluebubbles/index.js +50 -0
- package/dist/senses/bluebubbles-meta-guard.js +40 -0
- package/dist/senses/voice/index.js +1 -0
- package/dist/senses/voice/twilio-phone.js +462 -0
- package/dist/senses/voice-twilio-entry.js +216 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -105,7 +105,7 @@ Task docs do not live in this repo anymore. Planning and doing docs live in the
|
|
|
105
105
|
- Human TTY commands share one CLI surface family: bare `ouro` opens the home deck, `ouro up` uses the boot checklist, `ouro connect`/`ouro auth verify`/`ouro repair` agree on provider and vault truth, and `ouro help`/`ouro whoami`/`ouro versions`/`ouro hatch` render through the same Ouro-branded wizard/guide language instead of raw transcript walls. Orientation commands such as root `ouro connect` may use shorter live probes, while startup and verification commands own durable readiness updates.
|
|
106
106
|
- Human-facing CLI commands that can wait on browser auth, vault IO, daemon startup, daemon restart, provider checks, or connector setup use a shared progress checklist. If a cursor may blink for more than a few seconds, the command should print or animate the current step instead of going quiet.
|
|
107
107
|
- CLI commands that mutate bundle config, such as vault setup or `ouro connect bluebubbles`, run bundle sync after the change when `sync.enabled` is true and report a compact `bundle sync:` line.
|
|
108
|
-
- Voice is transcript-first: voice sessions use the ordinary `state/sessions/<friend>/voice/<key>.json` session path and appear in Ouro Mailbox as text transcripts. ElevenLabs API credentials live in portable `runtime/config` at `integrations.elevenLabsApiKey`; Whisper.cpp CLI/model paths live in the machine runtime item at `voice.whisperCliPath` and `voice.whisperModelPath`.
|
|
108
|
+
- Voice is transcript-first: voice sessions use the ordinary `state/sessions/<friend>/voice/<key>.json` session path and appear in Ouro Mailbox as text transcripts. ElevenLabs API credentials live in portable `runtime/config` at `integrations.elevenLabsApiKey` and `integrations.elevenLabsVoiceId`; Whisper.cpp CLI/model paths live in the machine runtime item at `voice.whisperCliPath` and `voice.whisperModelPath`. Phone calls, browser meetings, and local microphone capture are transports under the single `voice` sense, not separate senses; the Twilio phone transport uses Twilio Record -> Whisper.cpp -> voice session -> ElevenLabs -> Twilio Play.
|
|
109
109
|
- The daemon discovers bundles dynamically from `~/AgentBundles`.
|
|
110
110
|
- `ouro status` reports version, last-updated time, discovered agents, senses, and workers.
|
|
111
111
|
- `bundle-meta.json` tracks the runtime version that last touched a bundle.
|
package/changelog.json
CHANGED
|
@@ -1,6 +1,22 @@
|
|
|
1
1
|
{
|
|
2
2
|
"_note": "This changelog is maintained as part of the PR/version-bump workflow. Agent-curated, not auto-generated. Agents read this file directly via read_file to understand what changed between versions.",
|
|
3
3
|
"versions": [
|
|
4
|
+
{
|
|
5
|
+
"version": "0.1.0-alpha.563",
|
|
6
|
+
"changes": [
|
|
7
|
+
"The default agent credential vault host now points at the work-substrate Vaultwarden endpoint `https://vault.ouro.bot`.",
|
|
8
|
+
"Vault config normalization keeps `https://vault.ouroboros.bot` and the underlying Azure Container Apps hostname as legacy unlock-source candidates during the hostname migration.",
|
|
9
|
+
"Vault config and unlock tests now cover the `vault.ouro.bot` canonical host while preserving side-effect-free fallback lookup for legacy local unlock material."
|
|
10
|
+
]
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
"version": "0.1.0-alpha.562",
|
|
14
|
+
"changes": [
|
|
15
|
+
"BlueBubbles outbound delivery now blocks narrow internal/meta markers such as `[surfaced from inner dialog]`, `[pending from ...]:`, routing-control prompt sections, and `<think>` tags before they can reach iMessage.",
|
|
16
|
+
"The guard fails closed across surface, proactive BlueBubbles sends, pending-drain retries, normal flush, and speak/flushNow paths so blocked internal text is logged instead of queued for later delivery.",
|
|
17
|
+
"Regression coverage now keeps ordinary user-facing prose about inner-dialog concepts deliverable while preventing the reported internal surfaced-thought leakage path."
|
|
18
|
+
]
|
|
19
|
+
},
|
|
4
20
|
{
|
|
5
21
|
"version": "0.1.0-alpha.561",
|
|
6
22
|
"changes": [
|
|
@@ -2540,6 +2540,10 @@ async function buildConnectMenu(agent, deps, onProgress) {
|
|
|
2540
2540
|
const elevenLabsApiKey = runtimeConfig.ok
|
|
2541
2541
|
? readRuntimeConfigString(runtimeConfig.config, "integrations.elevenLabsApiKey")
|
|
2542
2542
|
: null;
|
|
2543
|
+
const elevenLabsVoiceId = runtimeConfig.ok
|
|
2544
|
+
? readRuntimeConfigString(runtimeConfig.config, "integrations.elevenLabsVoiceId")
|
|
2545
|
+
?? readRuntimeConfigString(runtimeConfig.config, "voice.elevenLabsVoiceId")
|
|
2546
|
+
: null;
|
|
2543
2547
|
const shouldVerifyPerplexity = runtimeConfig.ok && !!perplexityApiKey;
|
|
2544
2548
|
const shouldVerifyEmbeddings = runtimeConfig.ok && !!embeddingsApiKey;
|
|
2545
2549
|
let perplexityVerification;
|
|
@@ -2613,6 +2617,7 @@ async function buildConnectMenu(agent, deps, onProgress) {
|
|
|
2613
2617
|
const voiceStatus = runtimeConfig.ok
|
|
2614
2618
|
? machineRuntime.ok
|
|
2615
2619
|
? elevenLabsApiKey
|
|
2620
|
+
&& elevenLabsVoiceId
|
|
2616
2621
|
&& hasRuntimeConfigValue(machineRuntime.config, "voice.whisperCliPath")
|
|
2617
2622
|
&& hasRuntimeConfigValue(machineRuntime.config, "voice.whisperModelPath")
|
|
2618
2623
|
&& voiceEnabled
|
|
@@ -2719,6 +2724,7 @@ async function buildConnectMenu(agent, deps, onProgress) {
|
|
|
2719
2724
|
detailLines: runtimeConfig.ok && machineRuntime.ok
|
|
2720
2725
|
? [
|
|
2721
2726
|
elevenLabsApiKey ? "ElevenLabs API key saved in portable runtime config" : "missing integrations.elevenLabsApiKey",
|
|
2727
|
+
elevenLabsVoiceId ? "ElevenLabs voice ID saved in portable runtime config" : "missing integrations.elevenLabsVoiceId",
|
|
2722
2728
|
hasRuntimeConfigValue(machineRuntime.config, "voice.whisperCliPath") ? "Whisper.cpp CLI path saved for this machine" : "missing voice.whisperCliPath",
|
|
2723
2729
|
hasRuntimeConfigValue(machineRuntime.config, "voice.whisperModelPath") ? "Whisper.cpp model path saved for this machine" : "missing voice.whisperModelPath",
|
|
2724
2730
|
]
|
|
@@ -4270,13 +4276,22 @@ function connectMenuTarget(answer) {
|
|
|
4270
4276
|
async function executeConnectVoice(agent, deps) {
|
|
4271
4277
|
const message = [
|
|
4272
4278
|
`Voice foundation for ${agent}`,
|
|
4273
|
-
"Configure
|
|
4279
|
+
"Configure portable ElevenLabs settings with:",
|
|
4274
4280
|
` ouro vault config set --agent ${agent} --key integrations.elevenLabsApiKey`,
|
|
4281
|
+
` ouro vault config set --agent ${agent} --key integrations.elevenLabsVoiceId`,
|
|
4275
4282
|
"Configure this machine's Whisper.cpp attachment with:",
|
|
4276
4283
|
` ouro vault config set --agent ${agent} --scope machine --key voice.whisperCliPath`,
|
|
4277
4284
|
` ouro vault config set --agent ${agent} --scope machine --key voice.whisperModelPath`,
|
|
4285
|
+
"Optional Twilio phone testing setup:",
|
|
4286
|
+
` ouro vault config set --agent ${agent} --key voice.twilioAccountSid`,
|
|
4287
|
+
` ouro vault config set --agent ${agent} --key voice.twilioAuthToken`,
|
|
4288
|
+
` ouro vault config set --agent ${agent} --scope machine --key voice.twilioPublicUrl`,
|
|
4289
|
+
` ouro vault config set --agent ${agent} --scope machine --key voice.twilioPort --value 18910`,
|
|
4290
|
+
` ouro vault config set --agent ${agent} --scope machine --key voice.twilioDefaultFriendId --value ari`,
|
|
4291
|
+
` node dist/senses/voice-twilio-entry.js --agent ${agent} --port 18910 --public-url https://<cloudflare-tunnel>`,
|
|
4292
|
+
`Set the Twilio number's Voice webhook to POST https://<cloudflare-tunnel>/voice/twilio/incoming.`,
|
|
4278
4293
|
"Then enable agent.json: senses.voice.enabled = true and restart with `ouro up`.",
|
|
4279
|
-
"Meeting links use URL intake plus BlackHole/Multi-Output readiness checks.
|
|
4294
|
+
"Meeting links use URL intake plus BlackHole/Multi-Output readiness checks. Phone testing uses Twilio Record -> Whisper.cpp -> voice session -> ElevenLabs -> Twilio Play.",
|
|
4280
4295
|
].join("\n");
|
|
4281
4296
|
deps.writeStdout(message);
|
|
4282
4297
|
return message;
|
|
@@ -197,6 +197,9 @@ function senseFactsFromRuntimeConfig(agent, senses, runtimeConfig, machineRuntim
|
|
|
197
197
|
const missing = [];
|
|
198
198
|
if (!textField(integrations, "elevenLabsApiKey"))
|
|
199
199
|
missing.push("integrations.elevenLabsApiKey");
|
|
200
|
+
if (!textField(integrations, "elevenLabsVoiceId") && !textField(payload.voice, "elevenLabsVoiceId")) {
|
|
201
|
+
missing.push("integrations.elevenLabsVoiceId");
|
|
202
|
+
}
|
|
200
203
|
if (!textField(voice, "whisperCliPath"))
|
|
201
204
|
missing.push("voice.whisperCliPath");
|
|
202
205
|
if (!textField(voice, "whisperModelPath"))
|
package/dist/heart/identity.js
CHANGED
|
@@ -79,9 +79,9 @@ exports.DEFAULT_AGENT_PHRASES = {
|
|
|
79
79
|
tool: ["running tool"],
|
|
80
80
|
followup: ["processing"],
|
|
81
81
|
};
|
|
82
|
-
exports.DEFAULT_VAULT_SERVER_URL = "https://vault.
|
|
82
|
+
exports.DEFAULT_VAULT_SERVER_URL = "https://vault.ouro.bot";
|
|
83
83
|
exports.LEGACY_VAULT_SERVER_URL_ALIASES = [
|
|
84
|
-
"https://vault.
|
|
84
|
+
"https://vault.ouroboros.bot",
|
|
85
85
|
"https://ouro-vault.gentleflower-74452a1e.eastus2.azurecontainerapps.io",
|
|
86
86
|
];
|
|
87
87
|
function normalizeVaultServerUrl(serverUrl) {
|
|
@@ -165,6 +165,7 @@ function readSenseStatusLines() {
|
|
|
165
165
|
const bluebubbles = recordOrUndefined(machinePayload.bluebubbles) ?? recordOrUndefined(payload.bluebubbles);
|
|
166
166
|
const mailroom = recordOrUndefined(runtimePayload.mailroom) ?? recordOrUndefined(payload.mailroom);
|
|
167
167
|
const voice = recordOrUndefined(machinePayload.voice) ?? recordOrUndefined(payload.voice);
|
|
168
|
+
const portableVoice = recordOrUndefined(runtimePayload.voice) ?? recordOrUndefined(payload.voice);
|
|
168
169
|
const integrations = recordOrUndefined(runtimePayload.integrations) ?? recordOrUndefined(payload.integrations);
|
|
169
170
|
const privateKeys = mailroom?.privateKeys;
|
|
170
171
|
const configured = {
|
|
@@ -172,7 +173,10 @@ function readSenseStatusLines() {
|
|
|
172
173
|
teams: hasTextField(teams, "clientId") && hasTextField(teams, "clientSecret") && hasTextField(teams, "tenantId"),
|
|
173
174
|
bluebubbles: hasTextField(bluebubbles, "serverUrl") && hasTextField(bluebubbles, "password"),
|
|
174
175
|
mail: hasTextField(mailroom, "mailboxAddress") && !!privateKeys && typeof privateKeys === "object" && !Array.isArray(privateKeys),
|
|
175
|
-
voice: hasTextField(integrations, "elevenLabsApiKey")
|
|
176
|
+
voice: hasTextField(integrations, "elevenLabsApiKey")
|
|
177
|
+
&& (hasTextField(integrations, "elevenLabsVoiceId") || hasTextField(portableVoice, "elevenLabsVoiceId"))
|
|
178
|
+
&& hasTextField(voice, "whisperCliPath")
|
|
179
|
+
&& hasTextField(voice, "whisperModelPath"),
|
|
176
180
|
};
|
|
177
181
|
const rows = [
|
|
178
182
|
{ label: "CLI", status: "interactive" },
|
package/dist/mind/prompt.js
CHANGED
|
@@ -441,6 +441,7 @@ function localSenseStatusLines() {
|
|
|
441
441
|
const bluebubbles = recordOrUndefined(machinePayload.bluebubbles) ?? recordOrUndefined(payload.bluebubbles);
|
|
442
442
|
const mailroom = recordOrUndefined(runtimePayload.mailroom) ?? recordOrUndefined(payload.mailroom);
|
|
443
443
|
const voice = recordOrUndefined(machinePayload.voice) ?? recordOrUndefined(payload.voice);
|
|
444
|
+
const portableVoice = recordOrUndefined(runtimePayload.voice) ?? recordOrUndefined(payload.voice);
|
|
444
445
|
const integrations = recordOrUndefined(runtimePayload.integrations) ?? recordOrUndefined(payload.integrations);
|
|
445
446
|
const privateKeys = mailroom?.privateKeys;
|
|
446
447
|
const configured = {
|
|
@@ -448,7 +449,10 @@ function localSenseStatusLines() {
|
|
|
448
449
|
teams: hasTextField(teams, "clientId") && hasTextField(teams, "clientSecret") && hasTextField(teams, "tenantId"),
|
|
449
450
|
bluebubbles: hasTextField(bluebubbles, "serverUrl") && hasTextField(bluebubbles, "password"),
|
|
450
451
|
mail: hasTextField(mailroom, "mailboxAddress") && !!privateKeys && typeof privateKeys === "object" && !Array.isArray(privateKeys),
|
|
451
|
-
voice: hasTextField(integrations, "elevenLabsApiKey")
|
|
452
|
+
voice: hasTextField(integrations, "elevenLabsApiKey")
|
|
453
|
+
&& (hasTextField(integrations, "elevenLabsVoiceId") || hasTextField(portableVoice, "elevenLabsVoiceId"))
|
|
454
|
+
&& hasTextField(voice, "whisperCliPath")
|
|
455
|
+
&& hasTextField(voice, "whisperModelPath"),
|
|
452
456
|
};
|
|
453
457
|
const rows = [
|
|
454
458
|
{ label: "CLI", status: "interactive" },
|
|
@@ -502,7 +506,7 @@ function senseRuntimeGuidance(channel, preReadStatusLines) {
|
|
|
502
506
|
lines.push("mail validation diagnostics: health checks, bounded mail tools, access logs, and UI inspection can support validation, but they are evidence inside those paths, not additional paths. If asked to name golden paths, do not include diagnostic commands, tool names, or status checks in the answer.");
|
|
503
507
|
lines.push("mail diagnostic naming: `ouro doctor` is installation-wide; do not invent `ouro doctor --agent <agent>`.");
|
|
504
508
|
lines.push("mail setup boundaries: do not invent `ouro auth verify --provider mail`, HEY OAuth, HEY IMAP, `ouro mcp call mail ...`, policy flags, autonomous sending, destructive mail actions, or production MX/DNS/forwarding changes. HEY export, HEY forwarding, DNS, MX cutover, sending, and destructive actions require explicit human confirmation.");
|
|
505
|
-
lines.push("voice setup truth: voice sessions are transcript-first local sessions. ElevenLabs credentials belong in portable runtime/config at `integrations.elevenLabsApiKey`; Whisper.cpp CLI/model paths belong in the machine runtime item under `voice.whisperCliPath` and `voice.whisperModelPath`. Meeting links
|
|
509
|
+
lines.push("voice setup truth: voice sessions are transcript-first local sessions. ElevenLabs credentials belong in portable runtime/config at `integrations.elevenLabsApiKey` and `integrations.elevenLabsVoiceId`; Whisper.cpp CLI/model paths belong in the machine runtime item under `voice.whisperCliPath` and `voice.whisperModelPath`. Meeting links have URL intake and local BlackHole/Multi-Output readiness checks; phone testing uses Twilio Record -> Whisper.cpp -> voice session -> ElevenLabs -> Twilio Play. Live browser join/injection remains an explicit handoff edge until provider automation lands.");
|
|
506
510
|
if (channel === "cli") {
|
|
507
511
|
lines.push("cli is interactive: it is available when the user opens it, not something `ouro up` daemonizes.");
|
|
508
512
|
}
|
|
@@ -656,6 +656,12 @@ exports.sessionToolDefinitions = [
|
|
|
656
656
|
detail: "bluebubbles could not resolve a routable target for that session",
|
|
657
657
|
};
|
|
658
658
|
}
|
|
659
|
+
if (result.reason === "blocked_meta_content") {
|
|
660
|
+
return {
|
|
661
|
+
status: "blocked",
|
|
662
|
+
detail: "blocked: contains internal meta markers",
|
|
663
|
+
};
|
|
664
|
+
}
|
|
659
665
|
if (result.reason === "send_error") {
|
|
660
666
|
return {
|
|
661
667
|
status: "failed",
|
|
@@ -39,6 +39,8 @@ const identity_1 = require("../heart/identity");
|
|
|
39
39
|
const surface_tool_1 = require("../senses/surface-tool");
|
|
40
40
|
const obligations_1 = require("../arc/obligations");
|
|
41
41
|
const session_activity_1 = require("../heart/session-activity");
|
|
42
|
+
const bluebubbles_meta_guard_1 = require("../senses/bluebubbles-meta-guard");
|
|
43
|
+
const runtime_1 = require("../nerves/runtime");
|
|
42
44
|
const path = __importStar(require("path"));
|
|
43
45
|
// Surface tool schema — canonical home. Handler lives in senses/surface-tool.ts.
|
|
44
46
|
exports.surfaceToolDef = {
|
|
@@ -71,6 +73,21 @@ exports.surfaceToolDef = {
|
|
|
71
73
|
exports.surfaceToolDefinition = {
|
|
72
74
|
tool: exports.surfaceToolDef,
|
|
73
75
|
handler: async (args, ctx) => {
|
|
76
|
+
const rawContent = args.content ?? "";
|
|
77
|
+
if ((0, bluebubbles_meta_guard_1.containsInternalMetaMarkers)(rawContent)) {
|
|
78
|
+
(0, runtime_1.emitNervesEvent)({
|
|
79
|
+
level: "warn",
|
|
80
|
+
component: "repertoire",
|
|
81
|
+
event: "tools.surface_meta_blocked",
|
|
82
|
+
message: "surface tool blocked: internal meta markers in content",
|
|
83
|
+
meta: {
|
|
84
|
+
hasDelegationId: Boolean(args.delegationId),
|
|
85
|
+
hasFriendId: Boolean(args.friendId),
|
|
86
|
+
contentLength: rawContent.length,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
return "failed — blocked: contains internal meta markers";
|
|
90
|
+
}
|
|
74
91
|
const queue = ctx?.delegatedOrigins ?? [];
|
|
75
92
|
const agentName = (() => { try {
|
|
76
93
|
return (0, identity_1.getAgentName)();
|
|
@@ -69,6 +69,7 @@ const prompt_1 = require("../../mind/prompt");
|
|
|
69
69
|
const mcp_manager_1 = require("../../repertoire/mcp-manager");
|
|
70
70
|
const runtime_1 = require("../../nerves/runtime");
|
|
71
71
|
const proactive_content_guard_1 = require("../proactive-content-guard");
|
|
72
|
+
const bluebubbles_meta_guard_1 = require("../bluebubbles-meta-guard");
|
|
72
73
|
const model_1 = require("./model");
|
|
73
74
|
const client_1 = require("./client");
|
|
74
75
|
const inbound_log_1 = require("./inbound-log");
|
|
@@ -616,6 +617,17 @@ function createBlueBubblesCallbacks(client, chat, replyTarget, isGroupChat, onVi
|
|
|
616
617
|
if (!trimmed)
|
|
617
618
|
return;
|
|
618
619
|
textBuffer = "";
|
|
620
|
+
if ((0, bluebubbles_meta_guard_1.containsInternalMetaMarkers)(trimmed)) {
|
|
621
|
+
(0, bluebubbles_meta_guard_1.emitBluebubblesMetaBlocked)({
|
|
622
|
+
site: "flushNow",
|
|
623
|
+
message: "bluebubbles speak text blocked: internal meta markers",
|
|
624
|
+
meta: {
|
|
625
|
+
chatGuid: chat.chatGuid ?? null,
|
|
626
|
+
messageLength: trimmed.length,
|
|
627
|
+
},
|
|
628
|
+
});
|
|
629
|
+
return;
|
|
630
|
+
}
|
|
619
631
|
await client.sendText({
|
|
620
632
|
chat,
|
|
621
633
|
text: trimmed,
|
|
@@ -650,6 +662,17 @@ function createBlueBubblesCallbacks(client, chat, replyTarget, isGroupChat, onVi
|
|
|
650
662
|
enqueue("typing_stop", async () => { await client.setTyping(chat, false); });
|
|
651
663
|
await queue;
|
|
652
664
|
}
|
|
665
|
+
if ((0, bluebubbles_meta_guard_1.containsInternalMetaMarkers)(trimmed)) {
|
|
666
|
+
(0, bluebubbles_meta_guard_1.emitBluebubblesMetaBlocked)({
|
|
667
|
+
site: "flush",
|
|
668
|
+
message: "bluebubbles outbound text blocked: internal meta markers",
|
|
669
|
+
meta: {
|
|
670
|
+
chatGuid: chat.chatGuid ?? null,
|
|
671
|
+
messageLength: trimmed.length,
|
|
672
|
+
},
|
|
673
|
+
});
|
|
674
|
+
return;
|
|
675
|
+
}
|
|
653
676
|
await client.sendText({
|
|
654
677
|
chat,
|
|
655
678
|
text: trimmed,
|
|
@@ -1925,6 +1948,17 @@ function buildChatRefForSessionKey(friend, sessionKey) {
|
|
|
1925
1948
|
};
|
|
1926
1949
|
}
|
|
1927
1950
|
async function sendProactiveBlueBubblesMessageToSession(params, deps = {}) {
|
|
1951
|
+
if ((0, bluebubbles_meta_guard_1.containsInternalMetaMarkers)(params.text)) {
|
|
1952
|
+
(0, bluebubbles_meta_guard_1.emitBluebubblesMetaBlocked)({
|
|
1953
|
+
site: "proactive",
|
|
1954
|
+
message: "bluebubbles proactive send blocked: internal meta markers",
|
|
1955
|
+
meta: {
|
|
1956
|
+
friendId: params.friendId,
|
|
1957
|
+
sessionKey: params.sessionKey,
|
|
1958
|
+
},
|
|
1959
|
+
});
|
|
1960
|
+
return { delivered: false, reason: "blocked_meta_content" };
|
|
1961
|
+
}
|
|
1928
1962
|
const resolvedDeps = { ...defaultDeps, ...deps };
|
|
1929
1963
|
const client = resolvedDeps.createClient();
|
|
1930
1964
|
const store = resolvedDeps.createFriendStore();
|
|
@@ -2131,6 +2165,22 @@ async function drainAndSendPendingBlueBubbles(deps = {}, pendingRoot) {
|
|
|
2131
2165
|
catch { /* ignore */ }
|
|
2132
2166
|
continue;
|
|
2133
2167
|
}
|
|
2168
|
+
if ((0, bluebubbles_meta_guard_1.containsInternalMetaMarkers)(messageText)) {
|
|
2169
|
+
result.skipped++;
|
|
2170
|
+
try {
|
|
2171
|
+
fs.unlinkSync(filePath);
|
|
2172
|
+
}
|
|
2173
|
+
catch { /* ignore */ }
|
|
2174
|
+
(0, bluebubbles_meta_guard_1.emitBluebubblesMetaBlocked)({
|
|
2175
|
+
site: "drain",
|
|
2176
|
+
message: "bluebubbles drain blocked: internal meta markers",
|
|
2177
|
+
meta: {
|
|
2178
|
+
friendId,
|
|
2179
|
+
filePath,
|
|
2180
|
+
},
|
|
2181
|
+
});
|
|
2182
|
+
continue;
|
|
2183
|
+
}
|
|
2134
2184
|
const internalBlockReason = (0, proactive_content_guard_1.getProactiveInternalContentBlockReason)(messageText);
|
|
2135
2185
|
if (internalBlockReason) {
|
|
2136
2186
|
result.skipped++;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Outbound BlueBubbles meta-content guard.
|
|
3
|
+
//
|
|
4
|
+
// Blocks accidental delivery of internal/meta text — pipeline section markers,
|
|
5
|
+
// surfacing-mechanics prefixes, reasoning tags — to the live iMessage channel.
|
|
6
|
+
// Failure mode is "drop and log", never queue for later delivery.
|
|
7
|
+
//
|
|
8
|
+
// Patterns are deliberately narrow: bracketed system markers and angle-bracket
|
|
9
|
+
// reasoning tags. Plain prose mentioning "inner dialog" or "attention queue"
|
|
10
|
+
// is NOT blocked, so user-facing replies that legitimately discuss those
|
|
11
|
+
// concepts still pass.
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.containsInternalMetaMarkers = containsInternalMetaMarkers;
|
|
14
|
+
exports.emitBluebubblesMetaBlocked = emitBluebubblesMetaBlocked;
|
|
15
|
+
const runtime_1 = require("../nerves/runtime");
|
|
16
|
+
const META_CONTENT_PATTERNS = [
|
|
17
|
+
/\[surfaced from inner dialog\]/i,
|
|
18
|
+
/\[pending from [^\]]+\]:/i,
|
|
19
|
+
/\[conversation scope:/i,
|
|
20
|
+
/\[recent active lanes\]/i,
|
|
21
|
+
/\[routing control:/i,
|
|
22
|
+
/<\/?think>/i,
|
|
23
|
+
];
|
|
24
|
+
function containsInternalMetaMarkers(text) {
|
|
25
|
+
if (!text)
|
|
26
|
+
return false;
|
|
27
|
+
return META_CONTENT_PATTERNS.some((pattern) => pattern.test(text));
|
|
28
|
+
}
|
|
29
|
+
function emitBluebubblesMetaBlocked(options) {
|
|
30
|
+
(0, runtime_1.emitNervesEvent)({
|
|
31
|
+
level: "warn",
|
|
32
|
+
component: "senses",
|
|
33
|
+
event: "senses.bluebubbles_meta_blocked",
|
|
34
|
+
message: options.message,
|
|
35
|
+
meta: {
|
|
36
|
+
site: options.site,
|
|
37
|
+
...options.meta,
|
|
38
|
+
},
|
|
39
|
+
});
|
|
40
|
+
}
|
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.TWILIO_PHONE_WEBHOOK_BASE_PATH = exports.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS = exports.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS = exports.DEFAULT_TWILIO_PHONE_PORT = void 0;
|
|
37
|
+
exports.computeTwilioSignature = computeTwilioSignature;
|
|
38
|
+
exports.validateTwilioSignature = validateTwilioSignature;
|
|
39
|
+
exports.twilioRecordingMediaUrl = twilioRecordingMediaUrl;
|
|
40
|
+
exports.defaultTwilioRecordingDownloader = defaultTwilioRecordingDownloader;
|
|
41
|
+
exports.createTwilioPhoneBridge = createTwilioPhoneBridge;
|
|
42
|
+
exports.startTwilioPhoneBridgeServer = startTwilioPhoneBridgeServer;
|
|
43
|
+
const crypto = __importStar(require("node:crypto"));
|
|
44
|
+
const fs = __importStar(require("fs/promises"));
|
|
45
|
+
const http = __importStar(require("http"));
|
|
46
|
+
const path = __importStar(require("path"));
|
|
47
|
+
const runtime_1 = require("../../nerves/runtime");
|
|
48
|
+
const playback_1 = require("./playback");
|
|
49
|
+
const turn_1 = require("./turn");
|
|
50
|
+
exports.DEFAULT_TWILIO_PHONE_PORT = 18910;
|
|
51
|
+
exports.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS = 2;
|
|
52
|
+
exports.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS = 30;
|
|
53
|
+
exports.TWILIO_PHONE_WEBHOOK_BASE_PATH = "/voice/twilio";
|
|
54
|
+
function bodyText(body) {
|
|
55
|
+
if (body === undefined)
|
|
56
|
+
return "";
|
|
57
|
+
if (typeof body === "string")
|
|
58
|
+
return body;
|
|
59
|
+
return Buffer.from(body).toString("utf8");
|
|
60
|
+
}
|
|
61
|
+
function formParams(rawBody) {
|
|
62
|
+
const parsed = new URLSearchParams(rawBody);
|
|
63
|
+
const params = {};
|
|
64
|
+
for (const [key, value] of parsed) {
|
|
65
|
+
params[key] = value;
|
|
66
|
+
}
|
|
67
|
+
return params;
|
|
68
|
+
}
|
|
69
|
+
function headerValue(headers, name) {
|
|
70
|
+
const wanted = name.toLowerCase();
|
|
71
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
72
|
+
if (key.toLowerCase() !== wanted)
|
|
73
|
+
continue;
|
|
74
|
+
if (Array.isArray(value))
|
|
75
|
+
return value[0] ?? "";
|
|
76
|
+
return value ?? "";
|
|
77
|
+
}
|
|
78
|
+
return "";
|
|
79
|
+
}
|
|
80
|
+
function xmlResponse(body) {
|
|
81
|
+
return {
|
|
82
|
+
statusCode: 200,
|
|
83
|
+
headers: { "content-type": "text/xml; charset=utf-8" },
|
|
84
|
+
body: `<?xml version="1.0" encoding="UTF-8"?><Response>${body}</Response>`,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function textResponse(statusCode, body) {
|
|
88
|
+
return {
|
|
89
|
+
statusCode,
|
|
90
|
+
headers: { "content-type": "text/plain; charset=utf-8" },
|
|
91
|
+
body,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function binaryResponse(body, contentType) {
|
|
95
|
+
return {
|
|
96
|
+
statusCode: 200,
|
|
97
|
+
headers: {
|
|
98
|
+
"content-type": contentType,
|
|
99
|
+
"cache-control": "private, max-age=300",
|
|
100
|
+
},
|
|
101
|
+
body,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
function escapeXml(input) {
|
|
105
|
+
return input
|
|
106
|
+
.replace(/&/g, "&")
|
|
107
|
+
.replace(/</g, "<")
|
|
108
|
+
.replace(/>/g, ">")
|
|
109
|
+
.replace(/"/g, """)
|
|
110
|
+
.replace(/'/g, "'");
|
|
111
|
+
}
|
|
112
|
+
function routeUrl(publicBaseUrl, route) {
|
|
113
|
+
return new URL(route, publicBaseUrl).toString();
|
|
114
|
+
}
|
|
115
|
+
function requestPublicUrl(publicBaseUrl, requestPath) {
|
|
116
|
+
return routeUrl(publicBaseUrl, requestPath);
|
|
117
|
+
}
|
|
118
|
+
function recordTwiml(options) {
|
|
119
|
+
return `<Record action="${escapeXml(routeUrl(options.publicBaseUrl, `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/recording`))}" method="POST" playBeep="false" timeout="${options.timeoutSeconds}" maxLength="${options.maxLengthSeconds}" trim="trim-silence" />`;
|
|
120
|
+
}
|
|
121
|
+
function redirectTwiml(publicBaseUrl) {
|
|
122
|
+
return `<Redirect method="POST">${escapeXml(routeUrl(publicBaseUrl, `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/listen`))}</Redirect>`;
|
|
123
|
+
}
|
|
124
|
+
function sayTwiml(message) {
|
|
125
|
+
return `<Say>${escapeXml(message)}</Say>`;
|
|
126
|
+
}
|
|
127
|
+
function playTwiml(url) {
|
|
128
|
+
return `<Play>${escapeXml(url)}</Play>`;
|
|
129
|
+
}
|
|
130
|
+
function safeSegment(input) {
|
|
131
|
+
const cleaned = input.trim().replace(/[^A-Za-z0-9._-]+/g, "-").replace(/^-+|-+$/g, "");
|
|
132
|
+
return cleaned || "unknown";
|
|
133
|
+
}
|
|
134
|
+
function decodeSafeSegment(input) {
|
|
135
|
+
try {
|
|
136
|
+
const decoded = decodeURIComponent(input);
|
|
137
|
+
if (!/^[A-Za-z0-9._-]+$/.test(decoded))
|
|
138
|
+
return null;
|
|
139
|
+
if (decoded === "." || decoded === "..")
|
|
140
|
+
return null;
|
|
141
|
+
return decoded;
|
|
142
|
+
}
|
|
143
|
+
catch {
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
function contentTypeForAudio(fileName) {
|
|
148
|
+
const ext = path.extname(fileName).toLowerCase();
|
|
149
|
+
if (ext === ".mp3")
|
|
150
|
+
return "audio/mpeg";
|
|
151
|
+
if (ext === ".wav")
|
|
152
|
+
return "audio/wav";
|
|
153
|
+
if (ext === ".pcm")
|
|
154
|
+
return "audio/pcm";
|
|
155
|
+
return "application/octet-stream";
|
|
156
|
+
}
|
|
157
|
+
function friendIdFromCaller(from, callSid) {
|
|
158
|
+
const phoneish = from.replace(/[^0-9A-Za-z]+/g, "");
|
|
159
|
+
return phoneish ? `twilio-${phoneish}` : `twilio-${safeSegment(callSid)}`;
|
|
160
|
+
}
|
|
161
|
+
function parseRecordingParams(params) {
|
|
162
|
+
const callSid = params.CallSid?.trim();
|
|
163
|
+
const recordingSid = params.RecordingSid?.trim();
|
|
164
|
+
const recordingUrl = params.RecordingUrl?.trim();
|
|
165
|
+
if (!callSid || !recordingSid || !recordingUrl)
|
|
166
|
+
return null;
|
|
167
|
+
return {
|
|
168
|
+
callSid,
|
|
169
|
+
recordingSid,
|
|
170
|
+
recordingUrl,
|
|
171
|
+
from: params.From?.trim() ?? "",
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
function recordAgainResponse(publicBaseUrl, message) {
|
|
175
|
+
return xmlResponse(`${sayTwiml(message)}${recordTwiml({
|
|
176
|
+
publicBaseUrl,
|
|
177
|
+
timeoutSeconds: exports.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS,
|
|
178
|
+
maxLengthSeconds: exports.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS,
|
|
179
|
+
})}`);
|
|
180
|
+
}
|
|
181
|
+
function errorMessage(error) {
|
|
182
|
+
return error instanceof Error ? error.message : String(error);
|
|
183
|
+
}
|
|
184
|
+
function computeTwilioSignature(input) {
|
|
185
|
+
const payload = input.url + Object.keys(input.params)
|
|
186
|
+
.sort()
|
|
187
|
+
.map((key) => `${key}${input.params[key]}`)
|
|
188
|
+
.join("");
|
|
189
|
+
return crypto.createHmac("sha1", input.authToken).update(payload).digest("base64");
|
|
190
|
+
}
|
|
191
|
+
function validateTwilioSignature(input) {
|
|
192
|
+
if (!input.authToken.trim())
|
|
193
|
+
return true;
|
|
194
|
+
if (!input.signature.trim())
|
|
195
|
+
return false;
|
|
196
|
+
const expected = Buffer.from(computeTwilioSignature(input));
|
|
197
|
+
const actual = Buffer.from(input.signature);
|
|
198
|
+
return actual.length === expected.length && crypto.timingSafeEqual(actual, expected);
|
|
199
|
+
}
|
|
200
|
+
function twilioRecordingMediaUrl(recordingUrl) {
|
|
201
|
+
const url = new URL(recordingUrl);
|
|
202
|
+
if (!/\.[A-Za-z0-9]+$/.test(url.pathname)) {
|
|
203
|
+
url.pathname = `${url.pathname}.wav`;
|
|
204
|
+
}
|
|
205
|
+
return url.toString();
|
|
206
|
+
}
|
|
207
|
+
async function defaultTwilioRecordingDownloader(request) {
|
|
208
|
+
const headers = {};
|
|
209
|
+
if (request.accountSid && request.authToken) {
|
|
210
|
+
headers.Authorization = `Basic ${Buffer.from(`${request.accountSid}:${request.authToken}`).toString("base64")}`;
|
|
211
|
+
}
|
|
212
|
+
const response = await fetch(request.recordingUrl, { headers });
|
|
213
|
+
if (!response.ok) {
|
|
214
|
+
throw new Error(`Twilio recording download failed: ${response.status} ${response.statusText}`.trim());
|
|
215
|
+
}
|
|
216
|
+
return Buffer.from(await response.arrayBuffer());
|
|
217
|
+
}
|
|
218
|
+
function verifyRequest(options, request, params) {
|
|
219
|
+
const authToken = options.twilioAuthToken?.trim();
|
|
220
|
+
if (!authToken)
|
|
221
|
+
return true;
|
|
222
|
+
return validateTwilioSignature({
|
|
223
|
+
authToken,
|
|
224
|
+
url: requestPublicUrl(options.publicBaseUrl, request.path),
|
|
225
|
+
params,
|
|
226
|
+
signature: headerValue(request.headers, "x-twilio-signature"),
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
async function handleIncoming(options) {
|
|
230
|
+
const greeting = options.greetingText ?? "Connected to Ouro voice. Speak after the prompt.";
|
|
231
|
+
(0, runtime_1.emitNervesEvent)({
|
|
232
|
+
component: "senses",
|
|
233
|
+
event: "senses.voice_twilio_incoming",
|
|
234
|
+
message: "Twilio voice call connected",
|
|
235
|
+
meta: { agentName: options.agentName },
|
|
236
|
+
});
|
|
237
|
+
return xmlResponse(`${sayTwiml(greeting)}${recordTwiml({
|
|
238
|
+
publicBaseUrl: options.publicBaseUrl,
|
|
239
|
+
timeoutSeconds: options.recordTimeoutSeconds ?? exports.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS,
|
|
240
|
+
maxLengthSeconds: options.recordMaxLengthSeconds ?? exports.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS,
|
|
241
|
+
})}`);
|
|
242
|
+
}
|
|
243
|
+
async function handleListen(options) {
|
|
244
|
+
return xmlResponse(recordTwiml({
|
|
245
|
+
publicBaseUrl: options.publicBaseUrl,
|
|
246
|
+
timeoutSeconds: options.recordTimeoutSeconds ?? exports.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS,
|
|
247
|
+
maxLengthSeconds: options.recordMaxLengthSeconds ?? exports.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS,
|
|
248
|
+
}));
|
|
249
|
+
}
|
|
250
|
+
async function handleRecording(options, params) {
|
|
251
|
+
const recording = parseRecordingParams(params);
|
|
252
|
+
if (!recording) {
|
|
253
|
+
(0, runtime_1.emitNervesEvent)({
|
|
254
|
+
level: "warn",
|
|
255
|
+
component: "senses",
|
|
256
|
+
event: "senses.voice_twilio_recording_rejected",
|
|
257
|
+
message: "Twilio recording callback was missing required fields",
|
|
258
|
+
meta: { agentName: options.agentName },
|
|
259
|
+
});
|
|
260
|
+
return recordAgainResponse(options.publicBaseUrl, "I did not receive audio. Please try again.");
|
|
261
|
+
}
|
|
262
|
+
const safeCallSid = safeSegment(recording.callSid);
|
|
263
|
+
const safeRecordingSid = safeSegment(recording.recordingSid);
|
|
264
|
+
const callDir = path.join(options.outputDir, safeCallSid);
|
|
265
|
+
const inputPath = path.join(callDir, `${safeRecordingSid}.wav`);
|
|
266
|
+
const utteranceId = `twilio-${safeCallSid}-${safeRecordingSid}`;
|
|
267
|
+
const downloadRecording = options.downloadRecording ?? defaultTwilioRecordingDownloader;
|
|
268
|
+
(0, runtime_1.emitNervesEvent)({
|
|
269
|
+
component: "senses",
|
|
270
|
+
event: "senses.voice_twilio_turn_start",
|
|
271
|
+
message: "starting Twilio voice turn",
|
|
272
|
+
meta: { agentName: options.agentName, callSid: safeCallSid, recordingSid: safeRecordingSid },
|
|
273
|
+
});
|
|
274
|
+
try {
|
|
275
|
+
await fs.mkdir(callDir, { recursive: true });
|
|
276
|
+
const mediaUrl = twilioRecordingMediaUrl(recording.recordingUrl);
|
|
277
|
+
const audio = await downloadRecording({
|
|
278
|
+
recordingUrl: mediaUrl,
|
|
279
|
+
accountSid: options.twilioAccountSid?.trim() || undefined,
|
|
280
|
+
authToken: options.twilioAuthToken?.trim() || undefined,
|
|
281
|
+
});
|
|
282
|
+
await fs.writeFile(inputPath, audio);
|
|
283
|
+
const transcript = await options.transcriber.transcribe({
|
|
284
|
+
utteranceId,
|
|
285
|
+
audioPath: inputPath,
|
|
286
|
+
});
|
|
287
|
+
const turn = await (0, turn_1.runVoiceLoopbackTurn)({
|
|
288
|
+
agentName: options.agentName,
|
|
289
|
+
friendId: options.defaultFriendId?.trim() || friendIdFromCaller(recording.from, recording.callSid),
|
|
290
|
+
sessionKey: `twilio-${safeCallSid}`,
|
|
291
|
+
transcript,
|
|
292
|
+
tts: options.tts,
|
|
293
|
+
runSenseTurn: options.runSenseTurn,
|
|
294
|
+
});
|
|
295
|
+
if (turn.tts.status !== "delivered") {
|
|
296
|
+
return xmlResponse(`${sayTwiml("voice output failed after the text response was captured.")}${redirectTwiml(options.publicBaseUrl)}`);
|
|
297
|
+
}
|
|
298
|
+
const playback = await (0, playback_1.writeVoicePlaybackArtifact)({
|
|
299
|
+
utteranceId,
|
|
300
|
+
delivery: turn.tts,
|
|
301
|
+
outputDir: callDir,
|
|
302
|
+
});
|
|
303
|
+
const audioUrl = routeUrl(options.publicBaseUrl, `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/audio/${encodeURIComponent(safeCallSid)}/${encodeURIComponent(path.basename(playback.audioPath))}`);
|
|
304
|
+
(0, runtime_1.emitNervesEvent)({
|
|
305
|
+
component: "senses",
|
|
306
|
+
event: "senses.voice_twilio_turn_end",
|
|
307
|
+
message: "finished Twilio voice turn",
|
|
308
|
+
meta: { agentName: options.agentName, callSid: safeCallSid, recordingSid: safeRecordingSid, audioPath: playback.audioPath },
|
|
309
|
+
});
|
|
310
|
+
return xmlResponse(`${playTwiml(audioUrl)}${redirectTwiml(options.publicBaseUrl)}`);
|
|
311
|
+
}
|
|
312
|
+
catch (error) {
|
|
313
|
+
(0, runtime_1.emitNervesEvent)({
|
|
314
|
+
level: "error",
|
|
315
|
+
component: "senses",
|
|
316
|
+
event: "senses.voice_twilio_turn_error",
|
|
317
|
+
message: "Twilio voice turn failed",
|
|
318
|
+
meta: {
|
|
319
|
+
agentName: options.agentName,
|
|
320
|
+
callSid: safeCallSid,
|
|
321
|
+
recordingSid: safeRecordingSid,
|
|
322
|
+
error: errorMessage(error),
|
|
323
|
+
},
|
|
324
|
+
});
|
|
325
|
+
return xmlResponse(`${sayTwiml("I could not process that audio. Please try again.")}${redirectTwiml(options.publicBaseUrl)}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
async function handleAudio(options, requestPath) {
|
|
329
|
+
const prefix = `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/audio/`;
|
|
330
|
+
const pathOnly = requestPath.split("?")[0];
|
|
331
|
+
const rest = pathOnly.slice(prefix.length);
|
|
332
|
+
const parts = rest.split("/");
|
|
333
|
+
if (parts.length !== 2)
|
|
334
|
+
return textResponse(404, "not found");
|
|
335
|
+
const [callSidPart, fileNamePart] = parts;
|
|
336
|
+
const callSid = decodeSafeSegment(callSidPart);
|
|
337
|
+
const fileName = decodeSafeSegment(fileNamePart);
|
|
338
|
+
if (!callSid || !fileName)
|
|
339
|
+
return textResponse(404, "not found");
|
|
340
|
+
const baseDir = path.resolve(options.outputDir, callSid);
|
|
341
|
+
const audioPath = path.resolve(baseDir, fileName);
|
|
342
|
+
try {
|
|
343
|
+
const audio = await fs.readFile(audioPath);
|
|
344
|
+
(0, runtime_1.emitNervesEvent)({
|
|
345
|
+
component: "senses",
|
|
346
|
+
event: "senses.voice_twilio_audio_served",
|
|
347
|
+
message: "served Twilio voice audio artifact",
|
|
348
|
+
meta: { agentName: options.agentName, callSid, fileName },
|
|
349
|
+
});
|
|
350
|
+
return binaryResponse(audio, contentTypeForAudio(fileName));
|
|
351
|
+
}
|
|
352
|
+
catch {
|
|
353
|
+
return textResponse(404, "not found");
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
function createTwilioPhoneBridge(options) {
|
|
357
|
+
new URL(options.publicBaseUrl);
|
|
358
|
+
return {
|
|
359
|
+
async handle(request) {
|
|
360
|
+
const method = request.method.toUpperCase();
|
|
361
|
+
const requestPath = request.path.startsWith("/") ? request.path : `/${request.path}`;
|
|
362
|
+
const routePath = requestPath.split("?")[0];
|
|
363
|
+
if (method === "GET" && requestPath.startsWith(`${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/audio/`)) {
|
|
364
|
+
return handleAudio(options, requestPath);
|
|
365
|
+
}
|
|
366
|
+
if (method === "GET" && routePath === `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/health`) {
|
|
367
|
+
return textResponse(200, "ok");
|
|
368
|
+
}
|
|
369
|
+
if (method !== "POST")
|
|
370
|
+
return textResponse(405, "method not allowed");
|
|
371
|
+
const params = formParams(bodyText(request.body));
|
|
372
|
+
if (!verifyRequest(options, { ...request, path: requestPath }, params)) {
|
|
373
|
+
(0, runtime_1.emitNervesEvent)({
|
|
374
|
+
level: "warn",
|
|
375
|
+
component: "senses",
|
|
376
|
+
event: "senses.voice_twilio_signature_rejected",
|
|
377
|
+
message: "rejected Twilio webhook with invalid signature",
|
|
378
|
+
meta: { agentName: options.agentName, path: requestPath },
|
|
379
|
+
});
|
|
380
|
+
return textResponse(403, "invalid Twilio signature");
|
|
381
|
+
}
|
|
382
|
+
if (routePath === `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/incoming`)
|
|
383
|
+
return handleIncoming(options);
|
|
384
|
+
if (routePath === `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/listen`)
|
|
385
|
+
return handleListen(options);
|
|
386
|
+
if (routePath === `${exports.TWILIO_PHONE_WEBHOOK_BASE_PATH}/recording`)
|
|
387
|
+
return handleRecording(options, params);
|
|
388
|
+
return textResponse(404, "not found");
|
|
389
|
+
},
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
function readRequestBody(req, limitBytes = 1_000_000) {
|
|
393
|
+
return new Promise((resolve, reject) => {
|
|
394
|
+
const chunks = [];
|
|
395
|
+
let byteLength = 0;
|
|
396
|
+
req.on("data", (chunk) => {
|
|
397
|
+
byteLength += chunk.byteLength;
|
|
398
|
+
if (byteLength > limitBytes) {
|
|
399
|
+
reject(new Error("request body too large"));
|
|
400
|
+
req.destroy();
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
chunks.push(chunk);
|
|
404
|
+
});
|
|
405
|
+
req.on("end", () => resolve(Buffer.concat(chunks)));
|
|
406
|
+
req.on("error", reject);
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
async function startTwilioPhoneBridgeServer(options) {
|
|
410
|
+
const port = options.port ?? exports.DEFAULT_TWILIO_PHONE_PORT;
|
|
411
|
+
const host = options.host ?? "127.0.0.1";
|
|
412
|
+
const bridge = createTwilioPhoneBridge(options);
|
|
413
|
+
const server = http.createServer(async (req, res) => {
|
|
414
|
+
try {
|
|
415
|
+
const body = await readRequestBody(req);
|
|
416
|
+
const response = await bridge.handle({
|
|
417
|
+
method: req.method,
|
|
418
|
+
path: req.url,
|
|
419
|
+
headers: req.headers,
|
|
420
|
+
body,
|
|
421
|
+
});
|
|
422
|
+
res.writeHead(response.statusCode, response.headers);
|
|
423
|
+
res.end(response.body);
|
|
424
|
+
}
|
|
425
|
+
catch (error) {
|
|
426
|
+
(0, runtime_1.emitNervesEvent)({
|
|
427
|
+
level: "error",
|
|
428
|
+
component: "senses",
|
|
429
|
+
event: "senses.voice_twilio_server_error",
|
|
430
|
+
message: "Twilio voice bridge server failed a request",
|
|
431
|
+
meta: { agentName: options.agentName, error: errorMessage(error) },
|
|
432
|
+
});
|
|
433
|
+
res.writeHead(500, { "content-type": "text/plain; charset=utf-8" });
|
|
434
|
+
res.end("internal server error");
|
|
435
|
+
}
|
|
436
|
+
});
|
|
437
|
+
await new Promise((resolve, reject) => {
|
|
438
|
+
const onError = (error) => {
|
|
439
|
+
server.off("listening", onListening);
|
|
440
|
+
reject(error);
|
|
441
|
+
};
|
|
442
|
+
const onListening = () => {
|
|
443
|
+
server.off("error", onError);
|
|
444
|
+
resolve();
|
|
445
|
+
};
|
|
446
|
+
server.once("error", onError);
|
|
447
|
+
server.once("listening", onListening);
|
|
448
|
+
server.listen(port, host);
|
|
449
|
+
});
|
|
450
|
+
(0, runtime_1.emitNervesEvent)({
|
|
451
|
+
component: "senses",
|
|
452
|
+
event: "senses.voice_twilio_server_start",
|
|
453
|
+
message: "Twilio voice bridge server started",
|
|
454
|
+
meta: { agentName: options.agentName, host, port, publicBaseUrl: options.publicBaseUrl },
|
|
455
|
+
});
|
|
456
|
+
const actualPort = server.address().port;
|
|
457
|
+
return {
|
|
458
|
+
bridge,
|
|
459
|
+
server,
|
|
460
|
+
localUrl: `http://${host}:${actualPort}`,
|
|
461
|
+
};
|
|
462
|
+
}
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
function readRequiredAgentName() {
|
|
37
|
+
const agentArgIndex = process.argv.indexOf("--agent");
|
|
38
|
+
const value = agentArgIndex >= 0 ? process.argv[agentArgIndex + 1] : undefined;
|
|
39
|
+
if (value)
|
|
40
|
+
return value;
|
|
41
|
+
process.stderr.write("Missing required --agent <name> argument.\nUsage: node dist/senses/voice-twilio-entry.js --agent ouroboros --public-url https://<tunnel>\n");
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
const agentName = readRequiredAgentName();
|
|
45
|
+
const path = __importStar(require("path"));
|
|
46
|
+
const identity_1 = require("../heart/identity");
|
|
47
|
+
const machine_identity_1 = require("../heart/machine-identity");
|
|
48
|
+
const runtime_logging_1 = require("../heart/daemon/runtime-logging");
|
|
49
|
+
const provider_credentials_1 = require("../heart/provider-credentials");
|
|
50
|
+
const runtime_credentials_1 = require("../heart/runtime-credentials");
|
|
51
|
+
const runtime_1 = require("../nerves/runtime");
|
|
52
|
+
const elevenlabs_1 = require("./voice/elevenlabs");
|
|
53
|
+
const whisper_1 = require("./voice/whisper");
|
|
54
|
+
const twilio_phone_1 = require("./voice/twilio-phone");
|
|
55
|
+
function argValue(name) {
|
|
56
|
+
const index = process.argv.indexOf(name);
|
|
57
|
+
if (index < 0)
|
|
58
|
+
return undefined;
|
|
59
|
+
const value = process.argv[index + 1];
|
|
60
|
+
return value && !value.startsWith("--") ? value : undefined;
|
|
61
|
+
}
|
|
62
|
+
function asRecord(value) {
|
|
63
|
+
return value && typeof value === "object" && !Array.isArray(value) ? value : null;
|
|
64
|
+
}
|
|
65
|
+
function configString(config, dottedPath) {
|
|
66
|
+
let cursor = config;
|
|
67
|
+
for (const segment of dottedPath.split(".")) {
|
|
68
|
+
const record = asRecord(cursor);
|
|
69
|
+
if (!record)
|
|
70
|
+
return undefined;
|
|
71
|
+
cursor = record[segment];
|
|
72
|
+
}
|
|
73
|
+
return typeof cursor === "string" && cursor.trim() ? cursor.trim() : undefined;
|
|
74
|
+
}
|
|
75
|
+
function configNumber(config, dottedPath) {
|
|
76
|
+
let cursor = config;
|
|
77
|
+
for (const segment of dottedPath.split(".")) {
|
|
78
|
+
const record = asRecord(cursor);
|
|
79
|
+
if (!record)
|
|
80
|
+
return undefined;
|
|
81
|
+
cursor = record[segment];
|
|
82
|
+
}
|
|
83
|
+
if (typeof cursor === "number" && Number.isFinite(cursor))
|
|
84
|
+
return cursor;
|
|
85
|
+
if (typeof cursor === "string" && cursor.trim()) {
|
|
86
|
+
const parsed = Number(cursor);
|
|
87
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
88
|
+
}
|
|
89
|
+
return undefined;
|
|
90
|
+
}
|
|
91
|
+
function requireConfig(result, label) {
|
|
92
|
+
if (result.ok)
|
|
93
|
+
return result.config;
|
|
94
|
+
throw new Error(`${label} unavailable: ${result.error}`);
|
|
95
|
+
}
|
|
96
|
+
function required(value, guidance) {
|
|
97
|
+
if (value)
|
|
98
|
+
return value;
|
|
99
|
+
throw new Error(guidance);
|
|
100
|
+
}
|
|
101
|
+
function numberArg(name) {
|
|
102
|
+
const raw = argValue(name);
|
|
103
|
+
if (!raw)
|
|
104
|
+
return undefined;
|
|
105
|
+
const parsed = Number(raw);
|
|
106
|
+
if (!Number.isFinite(parsed))
|
|
107
|
+
throw new Error(`${name} must be a number`);
|
|
108
|
+
return parsed;
|
|
109
|
+
}
|
|
110
|
+
function selectedAgentProviders(config) {
|
|
111
|
+
const providers = new Set();
|
|
112
|
+
providers.add(config.humanFacing.provider);
|
|
113
|
+
providers.add(config.agentFacing.provider);
|
|
114
|
+
if (config.provider)
|
|
115
|
+
providers.add(config.provider);
|
|
116
|
+
return [...providers];
|
|
117
|
+
}
|
|
118
|
+
async function cacheSelectedProviderCredentials(agentName) {
|
|
119
|
+
const providers = selectedAgentProviders((0, identity_1.loadAgentConfig)());
|
|
120
|
+
const pool = await (0, provider_credentials_1.refreshProviderCredentialPool)(agentName, { providers });
|
|
121
|
+
if (!pool.ok) {
|
|
122
|
+
throw new Error(`provider credentials unavailable for phone voice: ${pool.error}`);
|
|
123
|
+
}
|
|
124
|
+
const missing = providers.filter((provider) => !pool.pool.providers[provider]);
|
|
125
|
+
if (missing.length > 0) {
|
|
126
|
+
throw new Error(`missing provider credentials for phone voice: ${missing.join(", ")}`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
function writeReadyInstructions(localUrl, publicBaseUrl) {
|
|
130
|
+
process.stdout.write([
|
|
131
|
+
"Twilio phone voice bridge ready.",
|
|
132
|
+
`local: ${localUrl}`,
|
|
133
|
+
`public: ${publicBaseUrl}`,
|
|
134
|
+
`Twilio Voice webhook: POST ${new URL(`${twilio_phone_1.TWILIO_PHONE_WEBHOOK_BASE_PATH}/incoming`, publicBaseUrl).toString()}`,
|
|
135
|
+
"",
|
|
136
|
+
].join("\n"));
|
|
137
|
+
}
|
|
138
|
+
(0, runtime_logging_1.configureDaemonRuntimeLogger)("voice");
|
|
139
|
+
(0, runtime_1.emitNervesEvent)({
|
|
140
|
+
component: "senses",
|
|
141
|
+
event: "senses.entry_boot",
|
|
142
|
+
message: "booting Twilio Voice entrypoint",
|
|
143
|
+
meta: { entry: "voice-twilio", agentName },
|
|
144
|
+
});
|
|
145
|
+
async function main() {
|
|
146
|
+
await (0, runtime_credentials_1.waitForRuntimeCredentialBootstrap)(agentName);
|
|
147
|
+
const machine = (0, machine_identity_1.loadOrCreateMachineIdentity)();
|
|
148
|
+
await Promise.all([
|
|
149
|
+
(0, runtime_credentials_1.refreshRuntimeCredentialConfig)(agentName, { preserveCachedOnFailure: true }).catch(() => undefined),
|
|
150
|
+
(0, runtime_credentials_1.refreshMachineRuntimeCredentialConfig)(agentName, machine.machineId, { preserveCachedOnFailure: true }).catch(() => undefined),
|
|
151
|
+
]);
|
|
152
|
+
await cacheSelectedProviderCredentials(agentName);
|
|
153
|
+
const runtimeConfig = requireConfig((0, runtime_credentials_1.readRuntimeCredentialConfig)(agentName), "portable runtime/config");
|
|
154
|
+
const machineConfig = requireConfig((0, runtime_credentials_1.readMachineRuntimeCredentialConfig)(agentName), "machine runtime config");
|
|
155
|
+
const port = numberArg("--port")
|
|
156
|
+
?? configNumber(machineConfig, "voice.twilioPort")
|
|
157
|
+
?? twilio_phone_1.DEFAULT_TWILIO_PHONE_PORT;
|
|
158
|
+
const host = argValue("--host")
|
|
159
|
+
?? configString(machineConfig, "voice.twilioHost")
|
|
160
|
+
?? "127.0.0.1";
|
|
161
|
+
const publicBaseUrl = required(argValue("--public-url") ?? configString(machineConfig, "voice.twilioPublicUrl"), `missing public URL; run 'cloudflared tunnel --url http://127.0.0.1:${port}' and restart with --public-url https://<tunnel>`);
|
|
162
|
+
const elevenLabsApiKey = required(configString(runtimeConfig, "integrations.elevenLabsApiKey"), "missing integrations.elevenLabsApiKey; run 'ouro connect voice --agent <agent>' for setup guidance");
|
|
163
|
+
const elevenLabsVoiceId = required(argValue("--elevenlabs-voice-id")
|
|
164
|
+
?? configString(runtimeConfig, "integrations.elevenLabsVoiceId")
|
|
165
|
+
?? configString(runtimeConfig, "voice.elevenLabsVoiceId"), "missing integrations.elevenLabsVoiceId; save the ElevenLabs voice ID before starting phone voice");
|
|
166
|
+
const whisperCliPath = required(configString(machineConfig, "voice.whisperCliPath"), "missing voice.whisperCliPath in this machine's runtime config");
|
|
167
|
+
const whisperModelPath = required(configString(machineConfig, "voice.whisperModelPath"), "missing voice.whisperModelPath in this machine's runtime config");
|
|
168
|
+
const outputDir = argValue("--output-dir")
|
|
169
|
+
?? configString(machineConfig, "voice.twilioOutputDir")
|
|
170
|
+
?? path.join((0, identity_1.getAgentRoot)(agentName), "state", "voice", "twilio-phone");
|
|
171
|
+
const defaultFriendId = argValue("--friend")
|
|
172
|
+
?? configString(machineConfig, "voice.twilioDefaultFriendId");
|
|
173
|
+
const twilioAccountSid = configString(runtimeConfig, "voice.twilioAccountSid");
|
|
174
|
+
const twilioAuthToken = configString(runtimeConfig, "voice.twilioAuthToken");
|
|
175
|
+
const recordTimeoutSeconds = numberArg("--record-timeout")
|
|
176
|
+
?? configNumber(machineConfig, "voice.twilioRecordTimeoutSeconds")
|
|
177
|
+
?? twilio_phone_1.DEFAULT_TWILIO_RECORD_TIMEOUT_SECONDS;
|
|
178
|
+
const recordMaxLengthSeconds = numberArg("--record-max-length")
|
|
179
|
+
?? configNumber(machineConfig, "voice.twilioRecordMaxLengthSeconds")
|
|
180
|
+
?? twilio_phone_1.DEFAULT_TWILIO_RECORD_MAX_LENGTH_SECONDS;
|
|
181
|
+
const transcriber = (0, whisper_1.createWhisperCppTranscriber)({
|
|
182
|
+
whisperCliPath,
|
|
183
|
+
modelPath: whisperModelPath,
|
|
184
|
+
});
|
|
185
|
+
const tts = (0, elevenlabs_1.createElevenLabsTtsClient)({
|
|
186
|
+
apiKey: elevenLabsApiKey,
|
|
187
|
+
voiceId: elevenLabsVoiceId,
|
|
188
|
+
outputFormat: "mp3_44100_128",
|
|
189
|
+
});
|
|
190
|
+
const bridge = await (0, twilio_phone_1.startTwilioPhoneBridgeServer)({
|
|
191
|
+
agentName,
|
|
192
|
+
publicBaseUrl,
|
|
193
|
+
outputDir,
|
|
194
|
+
transcriber,
|
|
195
|
+
tts,
|
|
196
|
+
port,
|
|
197
|
+
host,
|
|
198
|
+
twilioAccountSid,
|
|
199
|
+
twilioAuthToken,
|
|
200
|
+
defaultFriendId,
|
|
201
|
+
recordTimeoutSeconds,
|
|
202
|
+
recordMaxLengthSeconds,
|
|
203
|
+
});
|
|
204
|
+
writeReadyInstructions(bridge.localUrl, publicBaseUrl);
|
|
205
|
+
}
|
|
206
|
+
main().catch((error) => {
|
|
207
|
+
(0, runtime_1.emitNervesEvent)({
|
|
208
|
+
level: "error",
|
|
209
|
+
component: "senses",
|
|
210
|
+
event: "senses.entry_error",
|
|
211
|
+
message: "Twilio Voice entrypoint failed",
|
|
212
|
+
meta: { entry: "voice-twilio", agentName, error: error instanceof Error ? error.message : String(error) },
|
|
213
|
+
});
|
|
214
|
+
process.stderr.write(`${error instanceof Error ? error.message : String(error)}\n`);
|
|
215
|
+
process.exit(1);
|
|
216
|
+
});
|