@ouro.bot/cli 0.1.0-alpha.560 → 0.1.0-alpha.562
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/changelog.json +16 -0
- package/dist/heart/daemon/cli-exec.js +17 -2
- package/dist/heart/daemon/sense-manager.js +3 -0
- package/dist/heart/turn-context.js +5 -1
- package/dist/mind/prompt.js +6 -2
- package/dist/repertoire/tools-session.js +6 -0
- package/dist/repertoire/tools-surface.js +17 -0
- package/dist/senses/bluebubbles/index.js +50 -0
- package/dist/senses/bluebubbles-meta-guard.js +40 -0
- package/dist/senses/shared-turn.js +4 -1
- package/dist/senses/voice/audio-routing.js +119 -0
- package/dist/senses/voice/elevenlabs.js +54 -1
- package/dist/senses/voice/golden-path.js +116 -0
- package/dist/senses/voice/index.js +5 -0
- package/dist/senses/voice/meeting.js +113 -0
- package/dist/senses/voice/playback.js +139 -0
- package/dist/senses/voice/twilio-phone.js +462 -0
- package/dist/senses/voice/whisper.js +29 -1
- package/dist/senses/voice-twilio-entry.js +216 -0
- package/package.json +1 -1
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runVoiceGoldenPath = runVoiceGoldenPath;
|
|
4
|
+
const runtime_1 = require("../../nerves/runtime");
|
|
5
|
+
const audio_routing_1 = require("./audio-routing");
|
|
6
|
+
const meeting_1 = require("./meeting");
|
|
7
|
+
const playback_1 = require("./playback");
|
|
8
|
+
const turn_1 = require("./turn");
|
|
9
|
+
const defaultVoiceMeetingJoiner = {
|
|
10
|
+
async join(request) {
|
|
11
|
+
const result = {
|
|
12
|
+
status: "manual_required",
|
|
13
|
+
detail: `Open ${request.meeting.redactedUrl} in a browser profile whose meeting audio is routed through Multi-Output Device.`,
|
|
14
|
+
};
|
|
15
|
+
(0, runtime_1.emitNervesEvent)({
|
|
16
|
+
component: "senses",
|
|
17
|
+
event: "senses.voice_meeting_join_manual_required",
|
|
18
|
+
message: "voice meeting join requires browser handoff",
|
|
19
|
+
meta: {
|
|
20
|
+
provider: request.meeting.provider,
|
|
21
|
+
sessionKey: request.meeting.sessionKey,
|
|
22
|
+
routingStatus: request.audioRouting.status,
|
|
23
|
+
},
|
|
24
|
+
});
|
|
25
|
+
return result;
|
|
26
|
+
},
|
|
27
|
+
};
|
|
28
|
+
function defaultUtteranceId() {
|
|
29
|
+
return `voice-${Date.now().toString(36)}`;
|
|
30
|
+
}
|
|
31
|
+
function playbackSkipped() {
|
|
32
|
+
(0, runtime_1.emitNervesEvent)({
|
|
33
|
+
component: "senses",
|
|
34
|
+
event: "senses.voice_golden_path_playback_skipped",
|
|
35
|
+
message: "voice golden path skipped playback because TTS failed",
|
|
36
|
+
meta: { reason: "tts_failed" },
|
|
37
|
+
});
|
|
38
|
+
return {
|
|
39
|
+
status: "skipped",
|
|
40
|
+
reason: "tts_failed",
|
|
41
|
+
playbackAttempted: false,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
async function runVoiceGoldenPath(options) {
|
|
45
|
+
(0, runtime_1.emitNervesEvent)({
|
|
46
|
+
component: "senses",
|
|
47
|
+
event: "senses.voice_golden_path_start",
|
|
48
|
+
message: "starting voice golden path",
|
|
49
|
+
meta: { agentName: options.agentName, friendId: options.friendId },
|
|
50
|
+
});
|
|
51
|
+
try {
|
|
52
|
+
const meeting = (0, meeting_1.parseVoiceMeetingUrl)(options.meetingUrl);
|
|
53
|
+
const audioRouting = await (options.inspectAudioRouting ?? audio_routing_1.inspectVoiceAudioRouting)();
|
|
54
|
+
const joiner = options.meetingJoiner ?? defaultVoiceMeetingJoiner;
|
|
55
|
+
const join = await joiner.join({ meeting, audioRouting });
|
|
56
|
+
const utteranceId = options.utteranceId ?? defaultUtteranceId();
|
|
57
|
+
const transcript = await options.transcriber.transcribe({
|
|
58
|
+
utteranceId,
|
|
59
|
+
audioPath: options.audioPath,
|
|
60
|
+
language: options.language,
|
|
61
|
+
});
|
|
62
|
+
const sessionKey = options.sessionKey ?? meeting.sessionKey;
|
|
63
|
+
const runSenseTurn = options.runSenseTurn;
|
|
64
|
+
const turn = await (0, turn_1.runVoiceLoopbackTurn)({
|
|
65
|
+
agentName: options.agentName,
|
|
66
|
+
friendId: options.friendId,
|
|
67
|
+
sessionKey,
|
|
68
|
+
transcript,
|
|
69
|
+
tts: options.tts,
|
|
70
|
+
runSenseTurn,
|
|
71
|
+
});
|
|
72
|
+
const writePlaybackArtifact = options.writePlaybackArtifact ?? playback_1.writeVoicePlaybackArtifact;
|
|
73
|
+
const playback = turn.tts.status === "delivered"
|
|
74
|
+
? await writePlaybackArtifact({
|
|
75
|
+
utteranceId,
|
|
76
|
+
delivery: turn.tts,
|
|
77
|
+
outputDir: options.outputDir,
|
|
78
|
+
playAudio: options.playAudio ?? false,
|
|
79
|
+
})
|
|
80
|
+
: playbackSkipped();
|
|
81
|
+
const result = {
|
|
82
|
+
meeting,
|
|
83
|
+
audioRouting,
|
|
84
|
+
join,
|
|
85
|
+
transcript,
|
|
86
|
+
responseText: turn.responseText,
|
|
87
|
+
ponderDeferred: turn.ponderDeferred,
|
|
88
|
+
tts: turn.tts,
|
|
89
|
+
playback,
|
|
90
|
+
sessionKey,
|
|
91
|
+
};
|
|
92
|
+
(0, runtime_1.emitNervesEvent)({
|
|
93
|
+
component: "senses",
|
|
94
|
+
event: "senses.voice_golden_path_end",
|
|
95
|
+
message: "finished voice golden path",
|
|
96
|
+
meta: {
|
|
97
|
+
sessionKey,
|
|
98
|
+
joinStatus: join.status,
|
|
99
|
+
ttsStatus: turn.tts.status,
|
|
100
|
+
playbackStatus: playback.status,
|
|
101
|
+
},
|
|
102
|
+
});
|
|
103
|
+
return result;
|
|
104
|
+
}
|
|
105
|
+
catch (error) {
|
|
106
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
107
|
+
(0, runtime_1.emitNervesEvent)({
|
|
108
|
+
level: "error",
|
|
109
|
+
component: "senses",
|
|
110
|
+
event: "senses.voice_golden_path_error",
|
|
111
|
+
message: "voice golden path failed",
|
|
112
|
+
meta: { error: message },
|
|
113
|
+
});
|
|
114
|
+
throw new Error(`voice golden path failed: ${message}`);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
@@ -20,3 +20,8 @@ __exportStar(require("./transcript"), exports);
|
|
|
20
20
|
__exportStar(require("./whisper"), exports);
|
|
21
21
|
__exportStar(require("./elevenlabs"), exports);
|
|
22
22
|
__exportStar(require("./turn"), exports);
|
|
23
|
+
__exportStar(require("./meeting"), exports);
|
|
24
|
+
__exportStar(require("./audio-routing"), exports);
|
|
25
|
+
__exportStar(require("./playback"), exports);
|
|
26
|
+
__exportStar(require("./golden-path"), exports);
|
|
27
|
+
__exportStar(require("./twilio-phone"), exports);
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.redactVoiceMeetingUrl = redactVoiceMeetingUrl;
|
|
4
|
+
exports.parseVoiceMeetingUrl = parseVoiceMeetingUrl;
|
|
5
|
+
const crypto_1 = require("crypto");
|
|
6
|
+
const runtime_1 = require("../../nerves/runtime");
|
|
7
|
+
function isRiversideHost(host) {
|
|
8
|
+
return host === "riverside.fm" || host === "riverside.com";
|
|
9
|
+
}
|
|
10
|
+
function stableMeetingHash(provider, url) {
|
|
11
|
+
return (0, crypto_1.createHash)("sha256")
|
|
12
|
+
.update(`${provider}:${url.protocol}//${url.host}${url.pathname}`)
|
|
13
|
+
.digest("hex")
|
|
14
|
+
.slice(0, 12);
|
|
15
|
+
}
|
|
16
|
+
function redactPath(pathname) {
|
|
17
|
+
const parts = pathname.split("/").filter(Boolean);
|
|
18
|
+
if (parts.length === 0)
|
|
19
|
+
return "/";
|
|
20
|
+
if (parts.length === 1)
|
|
21
|
+
return "/:redacted";
|
|
22
|
+
return `/${parts[0]}/:redacted`;
|
|
23
|
+
}
|
|
24
|
+
function redactVoiceMeetingUrl(input) {
|
|
25
|
+
try {
|
|
26
|
+
const url = new URL(input);
|
|
27
|
+
return `${url.protocol}//${url.host}${redactPath(url.pathname)}`;
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return ":invalid";
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
function parseVoiceMeetingUrl(input) {
|
|
34
|
+
const trimmed = input.trim();
|
|
35
|
+
if (!trimmed) {
|
|
36
|
+
(0, runtime_1.emitNervesEvent)({
|
|
37
|
+
level: "error",
|
|
38
|
+
component: "senses",
|
|
39
|
+
event: "senses.voice_meeting_rejected",
|
|
40
|
+
message: "voice meeting URL is empty",
|
|
41
|
+
meta: { reason: "empty" },
|
|
42
|
+
});
|
|
43
|
+
throw new Error("voice meeting URL is empty");
|
|
44
|
+
}
|
|
45
|
+
let url;
|
|
46
|
+
try {
|
|
47
|
+
url = new URL(trimmed);
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
(0, runtime_1.emitNervesEvent)({
|
|
51
|
+
level: "error",
|
|
52
|
+
component: "senses",
|
|
53
|
+
event: "senses.voice_meeting_rejected",
|
|
54
|
+
message: "voice meeting URL is invalid",
|
|
55
|
+
meta: { reason: "invalid_url" },
|
|
56
|
+
});
|
|
57
|
+
throw new Error("voice meeting URL is invalid");
|
|
58
|
+
}
|
|
59
|
+
if (url.protocol !== "http:" && url.protocol !== "https:") {
|
|
60
|
+
(0, runtime_1.emitNervesEvent)({
|
|
61
|
+
level: "error",
|
|
62
|
+
component: "senses",
|
|
63
|
+
event: "senses.voice_meeting_rejected",
|
|
64
|
+
message: "voice meeting URL must be http or https",
|
|
65
|
+
meta: { protocol: url.protocol },
|
|
66
|
+
});
|
|
67
|
+
throw new Error("voice meeting URL must be http or https");
|
|
68
|
+
}
|
|
69
|
+
const host = url.hostname.toLowerCase();
|
|
70
|
+
const provider = isRiversideHost(host) ? "riverside" : "generic";
|
|
71
|
+
if (provider === "riverside" && !url.pathname.startsWith("/studio/")) {
|
|
72
|
+
(0, runtime_1.emitNervesEvent)({
|
|
73
|
+
level: "error",
|
|
74
|
+
component: "senses",
|
|
75
|
+
event: "senses.voice_meeting_rejected",
|
|
76
|
+
message: "Riverside voice meeting URLs must use /studio/",
|
|
77
|
+
meta: { host, redactedUrl: redactVoiceMeetingUrl(trimmed) },
|
|
78
|
+
});
|
|
79
|
+
throw new Error("Riverside voice meeting URLs must use /studio/");
|
|
80
|
+
}
|
|
81
|
+
if (provider === "generic" && url.protocol !== "https:") {
|
|
82
|
+
(0, runtime_1.emitNervesEvent)({
|
|
83
|
+
level: "error",
|
|
84
|
+
component: "senses",
|
|
85
|
+
event: "senses.voice_meeting_rejected",
|
|
86
|
+
message: "generic voice meeting URLs must use https",
|
|
87
|
+
meta: { host, redactedUrl: redactVoiceMeetingUrl(trimmed) },
|
|
88
|
+
});
|
|
89
|
+
throw new Error("generic voice meeting URLs must use https");
|
|
90
|
+
}
|
|
91
|
+
const hash = stableMeetingHash(provider, url);
|
|
92
|
+
const meeting = {
|
|
93
|
+
originalUrl: trimmed,
|
|
94
|
+
provider,
|
|
95
|
+
host,
|
|
96
|
+
pathname: url.pathname,
|
|
97
|
+
redactedUrl: redactVoiceMeetingUrl(trimmed),
|
|
98
|
+
sessionKey: `voice-${provider}-${hash}`,
|
|
99
|
+
requiresBrowserJoin: true,
|
|
100
|
+
};
|
|
101
|
+
(0, runtime_1.emitNervesEvent)({
|
|
102
|
+
component: "senses",
|
|
103
|
+
event: "senses.voice_meeting_parsed",
|
|
104
|
+
message: "voice meeting URL parsed",
|
|
105
|
+
meta: {
|
|
106
|
+
provider: meeting.provider,
|
|
107
|
+
host: meeting.host,
|
|
108
|
+
sessionKey: meeting.sessionKey,
|
|
109
|
+
redactedUrl: meeting.redactedUrl,
|
|
110
|
+
},
|
|
111
|
+
});
|
|
112
|
+
return meeting;
|
|
113
|
+
}
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.writeVoicePlaybackArtifact = writeVoicePlaybackArtifact;
|
|
37
|
+
const fs = __importStar(require("fs/promises"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const runtime_1 = require("../../nerves/runtime");
|
|
40
|
+
const audio_routing_1 = require("./audio-routing");
|
|
41
|
+
function audioExtension(mimeType) {
|
|
42
|
+
if (mimeType === "audio/mpeg")
|
|
43
|
+
return "mp3";
|
|
44
|
+
if (mimeType === "audio/wav" || mimeType === "audio/x-wav")
|
|
45
|
+
return "wav";
|
|
46
|
+
if (mimeType.startsWith("audio/pcm"))
|
|
47
|
+
return "pcm";
|
|
48
|
+
return "audio";
|
|
49
|
+
}
|
|
50
|
+
function safeFileStem(input) {
|
|
51
|
+
const stem = input
|
|
52
|
+
.trim()
|
|
53
|
+
.toLowerCase()
|
|
54
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
55
|
+
.replace(/^-+|-+$/g, "");
|
|
56
|
+
return stem || "utterance";
|
|
57
|
+
}
|
|
58
|
+
function commandFailureMessage(exitCode, result) {
|
|
59
|
+
const stderr = result.stderr?.trim();
|
|
60
|
+
if (stderr)
|
|
61
|
+
return `exit ${exitCode}: ${stderr}`;
|
|
62
|
+
const stdout = result.stdout?.trim();
|
|
63
|
+
if (stdout)
|
|
64
|
+
return `exit ${exitCode}: ${stdout}`;
|
|
65
|
+
return `exit ${exitCode}`;
|
|
66
|
+
}
|
|
67
|
+
async function writeVoicePlaybackArtifact(request) {
|
|
68
|
+
const mkdir = request.mkdir ?? fs.mkdir;
|
|
69
|
+
const writeFile = request.writeFile ?? fs.writeFile;
|
|
70
|
+
const commandRunner = request.commandRunner ?? (0, audio_routing_1.createNodeVoiceCommandRunner)();
|
|
71
|
+
const timeoutMs = request.timeoutMs ?? 30_000;
|
|
72
|
+
const playbackCommandPath = request.playbackCommandPath ?? "afplay";
|
|
73
|
+
const audioPath = path.join(request.outputDir, `${safeFileStem(request.utteranceId)}.${audioExtension(request.delivery.mimeType)}`);
|
|
74
|
+
await mkdir(request.outputDir, { recursive: true });
|
|
75
|
+
await writeFile(audioPath, request.delivery.audio);
|
|
76
|
+
(0, runtime_1.emitNervesEvent)({
|
|
77
|
+
component: "senses",
|
|
78
|
+
event: "senses.voice_playback_artifact_written",
|
|
79
|
+
message: "voice playback artifact written",
|
|
80
|
+
meta: {
|
|
81
|
+
utteranceId: request.utteranceId,
|
|
82
|
+
audioPath,
|
|
83
|
+
byteLength: request.delivery.byteLength,
|
|
84
|
+
mimeType: request.delivery.mimeType,
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
if (request.playAudio !== true) {
|
|
88
|
+
return {
|
|
89
|
+
status: "written",
|
|
90
|
+
audioPath,
|
|
91
|
+
byteLength: request.delivery.byteLength,
|
|
92
|
+
mimeType: request.delivery.mimeType,
|
|
93
|
+
playbackAttempted: false,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
(0, runtime_1.emitNervesEvent)({
|
|
97
|
+
component: "senses",
|
|
98
|
+
event: "senses.voice_playback_start",
|
|
99
|
+
message: "starting voice playback",
|
|
100
|
+
meta: { utteranceId: request.utteranceId, audioPath, playbackCommandPath },
|
|
101
|
+
});
|
|
102
|
+
try {
|
|
103
|
+
const result = await commandRunner(playbackCommandPath, [audioPath], { timeoutMs });
|
|
104
|
+
if (typeof result.exitCode === "number" && result.exitCode !== 0) {
|
|
105
|
+
throw new Error(commandFailureMessage(result.exitCode, result));
|
|
106
|
+
}
|
|
107
|
+
(0, runtime_1.emitNervesEvent)({
|
|
108
|
+
component: "senses",
|
|
109
|
+
event: "senses.voice_playback_end",
|
|
110
|
+
message: "finished voice playback",
|
|
111
|
+
meta: { utteranceId: request.utteranceId, audioPath },
|
|
112
|
+
});
|
|
113
|
+
return {
|
|
114
|
+
status: "played",
|
|
115
|
+
audioPath,
|
|
116
|
+
byteLength: request.delivery.byteLength,
|
|
117
|
+
mimeType: request.delivery.mimeType,
|
|
118
|
+
playbackAttempted: true,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
catch (error) {
|
|
122
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
123
|
+
(0, runtime_1.emitNervesEvent)({
|
|
124
|
+
level: "error",
|
|
125
|
+
component: "senses",
|
|
126
|
+
event: "senses.voice_playback_error",
|
|
127
|
+
message: "voice playback failed",
|
|
128
|
+
meta: { utteranceId: request.utteranceId, audioPath, error: message },
|
|
129
|
+
});
|
|
130
|
+
return {
|
|
131
|
+
status: "failed",
|
|
132
|
+
audioPath,
|
|
133
|
+
byteLength: request.delivery.byteLength,
|
|
134
|
+
mimeType: request.delivery.mimeType,
|
|
135
|
+
playbackAttempted: true,
|
|
136
|
+
error: message,
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
}
|