@conversionpros/aiva 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +148 -0
- package/auto-deploy.js +190 -0
- package/bin/aiva.js +81 -0
- package/cli-sync.js +126 -0
- package/d2a-prompt-template.txt +106 -0
- package/diagnostics-api.js +304 -0
- package/docs/ara-dedup-fix-scope.md +112 -0
- package/docs/ara-fix-round2-scope.md +61 -0
- package/docs/ara-greeting-fix-scope.md +70 -0
- package/docs/calendar-date-fix-scope.md +28 -0
- package/docs/getting-started.md +115 -0
- package/docs/network-architecture-rollout-scope.md +43 -0
- package/docs/scope-google-oauth-integration.md +351 -0
- package/docs/settings-page-scope.md +50 -0
- package/docs/xai-imagine-scope.md +116 -0
- package/docs/xai-voice-integration-scope.md +115 -0
- package/docs/xai-voice-tools-scope.md +165 -0
- package/email-router.js +512 -0
- package/follow-up-handler.js +606 -0
- package/gateway-monitor.js +158 -0
- package/google-email.js +379 -0
- package/google-oauth.js +310 -0
- package/grok-imagine.js +97 -0
- package/health-reporter.js +287 -0
- package/invisible-prefix-base.txt +206 -0
- package/invisible-prefix-owner.txt +26 -0
- package/invisible-prefix-slim.txt +10 -0
- package/invisible-prefix.txt +43 -0
- package/knowledge-base.js +472 -0
- package/lib/cli.js +19 -0
- package/lib/config.js +124 -0
- package/lib/health.js +57 -0
- package/lib/process.js +207 -0
- package/lib/server.js +42 -0
- package/lib/setup.js +472 -0
- package/meta-capi.js +206 -0
- package/meta-leads.js +411 -0
- package/notion-oauth.js +323 -0
- package/package.json +61 -0
- package/public/agent-config.html +241 -0
- package/public/aiva-avatar-anime.png +0 -0
- package/public/css/docs.css.bak +688 -0
- package/public/css/onboarding.css +543 -0
- package/public/diagrams/claude-subscription-pool.html +329 -0
- package/public/diagrams/claude-subscription-pool.png +0 -0
- package/public/docs-icon.png +0 -0
- package/public/escalation.html +237 -0
- package/public/group-config.html +300 -0
- package/public/icon-192.png +0 -0
- package/public/icon-512.png +0 -0
- package/public/icons/agents.svg +1 -0
- package/public/icons/attach.svg +1 -0
- package/public/icons/characters.svg +1 -0
- package/public/icons/chat.svg +1 -0
- package/public/icons/docs.svg +1 -0
- package/public/icons/heartbeat.svg +1 -0
- package/public/icons/messages.svg +1 -0
- package/public/icons/mic.svg +1 -0
- package/public/icons/notes.svg +1 -0
- package/public/icons/settings.svg +1 -0
- package/public/icons/tasks.svg +1 -0
- package/public/images/onboarding/p0-communication-layer.png +0 -0
- package/public/images/onboarding/p0-infinite-surface.png +0 -0
- package/public/images/onboarding/p0-learning-model.png +0 -0
- package/public/images/onboarding/p0-meet-aiva.png +0 -0
- package/public/images/onboarding/p4-contact-intelligence.png +0 -0
- package/public/images/onboarding/p4-context-compounds.png +0 -0
- package/public/images/onboarding/p4-message-router.png +0 -0
- package/public/images/onboarding/p4-per-contact-rules.png +0 -0
- package/public/images/onboarding/p4-send-messages.png +0 -0
- package/public/images/onboarding/p6-be-precise.png +0 -0
- package/public/images/onboarding/p6-review-escalations.png +0 -0
- package/public/images/onboarding/p6-voice-input.png +0 -0
- package/public/images/onboarding/p7-completion.png +0 -0
- package/public/index.html +11594 -0
- package/public/js/onboarding.js +699 -0
- package/public/manifest.json +24 -0
- package/public/messages-v2.html +2824 -0
- package/public/permission-approve.html.bak +107 -0
- package/public/permissions.html +150 -0
- package/public/styles/design-system.css +68 -0
- package/router-db.js +604 -0
- package/router-utils.js +28 -0
- package/router-v2/adapters/imessage.js +191 -0
- package/router-v2/adapters/quo.js +82 -0
- package/router-v2/adapters/whatsapp.js +192 -0
- package/router-v2/contact-manager.js +234 -0
- package/router-v2/conversation-engine.js +498 -0
- package/router-v2/data/knowledge-base.json +176 -0
- package/router-v2/data/router-v2.db +0 -0
- package/router-v2/data/router-v2.db-shm +0 -0
- package/router-v2/data/router-v2.db-wal +0 -0
- package/router-v2/data/router.db +0 -0
- package/router-v2/db.js +457 -0
- package/router-v2/escalation-bridge.js +540 -0
- package/router-v2/follow-up-engine.js +347 -0
- package/router-v2/index.js +441 -0
- package/router-v2/ingestion.js +213 -0
- package/router-v2/knowledge-base.js +231 -0
- package/router-v2/lead-qualifier.js +152 -0
- package/router-v2/learning-loop.js +202 -0
- package/router-v2/outbound-sender.js +160 -0
- package/router-v2/package.json +13 -0
- package/router-v2/permission-gate.js +86 -0
- package/router-v2/playbook.js +177 -0
- package/router-v2/prompts/base.js +52 -0
- package/router-v2/prompts/first-contact.js +38 -0
- package/router-v2/prompts/lead-qualification.js +37 -0
- package/router-v2/prompts/scheduling.js +72 -0
- package/router-v2/prompts/style-overrides.js +22 -0
- package/router-v2/scheduler.js +301 -0
- package/router-v2/scripts/migrate-v1-to-v2.js +215 -0
- package/router-v2/scripts/seed-faq.js +67 -0
- package/router-v2/seed-knowledge-base.js +39 -0
- package/router-v2/utils/ai.js +129 -0
- package/router-v2/utils/phone.js +52 -0
- package/router-v2/utils/response-validator.js +98 -0
- package/router-v2/utils/sanitize.js +222 -0
- package/router.js +5005 -0
- package/routes/google-calendar.js +186 -0
- package/scripts/deploy.sh +62 -0
- package/scripts/macos-calendar.sh +232 -0
- package/scripts/onboard-device.sh +466 -0
- package/server.js +5131 -0
- package/start.sh +24 -0
- package/templates/AGENTS.md +548 -0
- package/templates/IDENTITY.md +15 -0
- package/templates/docs-agents.html +132 -0
- package/templates/docs-app.html +130 -0
- package/templates/docs-home.html +83 -0
- package/templates/docs-imessage.html +121 -0
- package/templates/docs-tasks.html +123 -0
- package/templates/docs-tips.html +175 -0
- package/templates/getting-started.html +809 -0
- package/templates/invisible-prefix-base.txt +171 -0
- package/templates/invisible-prefix-owner.txt +282 -0
- package/templates/invisible-prefix.txt +338 -0
- package/templates/manifest.json +61 -0
- package/templates/memory-org/clients.md +7 -0
- package/templates/memory-org/credentials.md +9 -0
- package/templates/memory-org/devices.md +7 -0
- package/templates/updates.html +464 -0
- package/templates/workspace/AGENTS.md.tmpl +161 -0
- package/templates/workspace/HEARTBEAT.md.tmpl +17 -0
- package/templates/workspace/IDENTITY.md.tmpl +15 -0
- package/templates/workspace/MEMORY.md.tmpl +16 -0
- package/templates/workspace/SOUL.md.tmpl +51 -0
- package/templates/workspace/USER.md.tmpl +25 -0
- package/tts-proxy.js +96 -0
- package/voice-call-local.js +731 -0
- package/voice-call.js +732 -0
- package/wa-listener.js +354 -0
package/voice-call.js
ADDED
|
@@ -0,0 +1,732 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-App Voice Call Module for AIVA — xAI Voice Agent API
|
|
3
|
+
*
|
|
4
|
+
* Real-time bidirectional voice via xAI's Realtime WebSocket API (Ara voice).
|
|
5
|
+
* Client streams PCM16 audio → server proxies to xAI → streams audio back.
|
|
6
|
+
* xAI handles VAD (voice activity detection) — no push-to-talk needed.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const WebSocket = require('ws');
|
|
10
|
+
const fs = require('fs');
|
|
11
|
+
const path = require('path');
|
|
12
|
+
const { exec: execCb } = require('child_process');
|
|
13
|
+
|
|
14
|
+
const { execSync } = require('child_process');
|
|
15
|
+
|
|
16
|
+
const XAI_API_KEY = process.env.XAI_API_KEY || 'xai-Gn37fuJg5ty4gvWFG2rbth34AxNORUKH8r4vTXQDtjwMGUqKZ7nYy8u2YStosGUCVBEg7VMHSqQZcKS4';
|
|
17
|
+
const XAI_WS_URL = 'wss://api.x.ai/v1/realtime';
|
|
18
|
+
const CONTEXT_API = 'http://localhost:3847/api/context/voice';
|
|
19
|
+
|
|
20
|
+
// Active call sessions
|
|
21
|
+
const activeCalls = new Map();
|
|
22
|
+
|
|
23
|
+
// (dedup tracking is now per-session — see callSession)
|
|
24
|
+
|
|
25
|
+
async function fetchContext() {
|
|
26
|
+
try {
|
|
27
|
+
const res = await fetch(CONTEXT_API);
|
|
28
|
+
if (!res.ok) throw new Error(`Context API ${res.status}`);
|
|
29
|
+
return await res.json();
|
|
30
|
+
} catch (e) {
|
|
31
|
+
console.error('[voice-call] Failed to fetch context:', e.message);
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async function fetchLiveData() {
|
|
37
|
+
const result = { tasks: '', todayCalendar: '', weekCalendar: '' };
|
|
38
|
+
const now = new Date();
|
|
39
|
+
const fmt = (d) => d.toISOString().split('T')[0];
|
|
40
|
+
const addDays = (d, n) => { const r = new Date(d); r.setDate(r.getDate() + n); return r; };
|
|
41
|
+
|
|
42
|
+
// 1. Fetch open tasks
|
|
43
|
+
try {
|
|
44
|
+
const res = await fetch(TASK_API, { headers: INTERNAL_HEADERS });
|
|
45
|
+
if (res.ok) {
|
|
46
|
+
let tasks = await res.json();
|
|
47
|
+
if (Array.isArray(tasks.tasks)) tasks = tasks.tasks;
|
|
48
|
+
const open = (Array.isArray(tasks) ? tasks : []).filter(t => t.status !== 'done');
|
|
49
|
+
if (open.length) {
|
|
50
|
+
result.tasks = open.map(t => `- ${t.title} (${t.status}, assigned to ${t.assignee || 'unassigned'})`).join('\n');
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
} catch (e) {
|
|
54
|
+
console.error('[voice-call] Failed to pre-fetch tasks:', e.message);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// 2. Fetch today's calendar
|
|
58
|
+
try {
|
|
59
|
+
const today = fmt(now);
|
|
60
|
+
const tomorrow = fmt(addDays(now, 1));
|
|
61
|
+
const accounts = ['brandon@conversionmarketingpros.com', 'burgan.brandon@gmail.com'];
|
|
62
|
+
let allLines = [];
|
|
63
|
+
for (const acct of accounts) {
|
|
64
|
+
try {
|
|
65
|
+
const raw = execSync(`gog calendar list --account ${acct} --from ${today} --to ${tomorrow}`, { timeout: 10000, encoding: 'utf-8' }).trim();
|
|
66
|
+
if (raw) {
|
|
67
|
+
const lines = raw.split('\n').filter(l => !l.startsWith('ID ') && !l.startsWith('#') && l.trim());
|
|
68
|
+
// Skip "Busy" entries (free/busy from secondary calendars)
|
|
69
|
+
allLines.push(...lines.filter(l => !l.includes('Busy')));
|
|
70
|
+
}
|
|
71
|
+
} catch (e) { /* skip failed account */ }
|
|
72
|
+
}
|
|
73
|
+
// Filter out past events
|
|
74
|
+
if (allLines.length) {
|
|
75
|
+
const remaining = allLines.filter(line => {
|
|
76
|
+
const timeMatch = line.match(/(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})/);
|
|
77
|
+
if (timeMatch) {
|
|
78
|
+
const eventTime = new Date(timeMatch[1].replace(' ', 'T'));
|
|
79
|
+
return eventTime > now;
|
|
80
|
+
}
|
|
81
|
+
return true;
|
|
82
|
+
});
|
|
83
|
+
result.todayCalendar = remaining.join('\n');
|
|
84
|
+
}
|
|
85
|
+
} catch (e) {
|
|
86
|
+
console.error('[voice-call] Failed to pre-fetch today calendar:', e.message);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// 3. Fetch this week's calendar
|
|
90
|
+
try {
|
|
91
|
+
const today = fmt(now);
|
|
92
|
+
const weekEnd = fmt(addDays(now, 7));
|
|
93
|
+
const accounts2 = ['brandon@conversionmarketingpros.com', 'burgan.brandon@gmail.com'];
|
|
94
|
+
let weekLines = [];
|
|
95
|
+
for (const acct of accounts2) {
|
|
96
|
+
try {
|
|
97
|
+
const raw = execSync(`gog calendar list --account ${acct} --from ${today} --to ${weekEnd}`, { timeout: 10000, encoding: 'utf-8' }).trim();
|
|
98
|
+
if (raw) {
|
|
99
|
+
const lines = raw.split('\n').filter(l => !l.startsWith('ID ') && !l.startsWith('#') && l.trim());
|
|
100
|
+
weekLines.push(...lines.filter(l => !l.includes('Busy')));
|
|
101
|
+
}
|
|
102
|
+
} catch (e) { /* skip failed account */ }
|
|
103
|
+
}
|
|
104
|
+
// Filter out past events from this week too
|
|
105
|
+
weekLines = weekLines.filter(line => {
|
|
106
|
+
const timeMatch = line.match(/(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})/);
|
|
107
|
+
if (timeMatch) {
|
|
108
|
+
const eventTime = new Date(timeMatch[1].replace(' ', 'T'));
|
|
109
|
+
return eventTime > now;
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
});
|
|
113
|
+
if (weekLines.length) result.weekCalendar = weekLines.join('\n');
|
|
114
|
+
} catch (e) {
|
|
115
|
+
console.error('[voice-call] Failed to pre-fetch week calendar:', e.message);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
return result;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function readPersonalityFiles() {
|
|
122
|
+
const workspacePath = path.join(process.env.HOME || '', '.openclaw', 'workspace');
|
|
123
|
+
const files = ['SOUL.md', 'USER.md', 'IDENTITY.md'];
|
|
124
|
+
const contents = {};
|
|
125
|
+
for (const file of files) {
|
|
126
|
+
try {
|
|
127
|
+
contents[file] = fs.readFileSync(path.join(workspacePath, file), 'utf-8');
|
|
128
|
+
} catch (e) {
|
|
129
|
+
console.error(`[voice-call] Failed to read ${file}:`, e.message);
|
|
130
|
+
contents[file] = '';
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return contents;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function buildSystemPrompt(ctx) {
|
|
137
|
+
// Read personality files fresh at call time
|
|
138
|
+
const personality = readPersonalityFiles();
|
|
139
|
+
|
|
140
|
+
const now = new Date();
|
|
141
|
+
const timeStr = now.toLocaleString('en-US', { timeZone: 'America/Los_Angeles', weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', hour: 'numeric', minute: '2-digit', hour12: true });
|
|
142
|
+
|
|
143
|
+
let prompt = `You are AIVA (AI-VA), Brandon Burgan's AI assistant. You are speaking on a voice call through the AIVA app using the Ara voice. Embody the personality below completely — you ARE Aiva.
|
|
144
|
+
|
|
145
|
+
CURRENT DATE AND TIME: ${timeStr} (Pacific Time)
|
|
146
|
+
Use this to speak intelligently about schedules — if calendar events are in the past today, don't mention them as upcoming. Only reference events that haven't happened yet.
|
|
147
|
+
When discussing calendar events, speak naturally using the event names directly — say "You've got a meeting with Daniel Wee at 9" not "You have a calendar event called Daniel Wee." Treat events like a human assistant would describe a schedule.
|
|
148
|
+
|
|
149
|
+
TASK RULES:
|
|
150
|
+
- Unless explicitly asked, don't mention finished/done tasks. Brandon only cares about what's still open (to do, in progress, needs review).
|
|
151
|
+
- When listing tasks, default to non-done tasks only.
|
|
152
|
+
|
|
153
|
+
CRITICAL VOICE RULES:
|
|
154
|
+
- Keep responses concise (1-3 sentences). This is a voice call, not a text chat.
|
|
155
|
+
- Use contractions. Be casual and natural.
|
|
156
|
+
- No markdown, bullet points, or formatting — everything is spoken aloud.
|
|
157
|
+
- No emojis or special characters.
|
|
158
|
+
- Start the conversation with a brief, casual greeting like "Hey Brandon, what's up?" and WAIT for the user to speak. Do NOT volunteer any information (calendar, tasks, etc.) until the user tells you what they need. Your job is to listen first, then assist. Only bring up calendar/tasks/context when the user asks or when it's directly relevant to what they're discussing.`;
|
|
159
|
+
|
|
160
|
+
if (personality['SOUL.md']) prompt += `\n\n--- PERSONALITY & VALUES ---\n${personality['SOUL.md']}`;
|
|
161
|
+
if (personality['USER.md']) prompt += `\n\n--- ABOUT BRANDON ---\n${personality['USER.md']}`;
|
|
162
|
+
if (personality['IDENTITY.md']) prompt += `\n\n--- IDENTITY ---\n${personality['IDENTITY.md']}`;
|
|
163
|
+
|
|
164
|
+
// Append dynamic context
|
|
165
|
+
if (ctx) {
|
|
166
|
+
if (ctx.activeTasks?.length) {
|
|
167
|
+
const tasks = ctx.activeTasks.slice(0, 10).map(t => `- ${t.title} (${t.status})`).join('\n');
|
|
168
|
+
prompt += `\n\nActive tasks (reference only — do NOT mention unless asked):\n${tasks}`;
|
|
169
|
+
}
|
|
170
|
+
if (ctx.recentChat?.length) {
|
|
171
|
+
const recent = ctx.recentChat.slice(-10).map(m => `${m.from}: ${m.text?.slice(0, 100)}`).join('\n');
|
|
172
|
+
prompt += `\n\nRecent chat context (reference only):\n${recent}`;
|
|
173
|
+
}
|
|
174
|
+
if (ctx.calendar?.length) {
|
|
175
|
+
const now = new Date();
|
|
176
|
+
const futureEvents = ctx.calendar.filter(e => {
|
|
177
|
+
const eventTime = e.time || e.start || '';
|
|
178
|
+
if (!eventTime) return true;
|
|
179
|
+
try {
|
|
180
|
+
const eventDate = new Date(eventTime);
|
|
181
|
+
return eventDate > now;
|
|
182
|
+
} catch { return true; }
|
|
183
|
+
});
|
|
184
|
+
if (futureEvents.length) {
|
|
185
|
+
const events = futureEvents.slice(0, 5).map(e => `- ${e.title || e.summary} (${e.time || e.start || ''})`).join('\n');
|
|
186
|
+
prompt += `\n\nUpcoming calendar events (reference only — do NOT mention unless asked):\n${events}`;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
return prompt;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// ── Tool definitions for xAI voice agent ──
|
|
195
|
+
const VOICE_TOOLS = [
|
|
196
|
+
{
|
|
197
|
+
type: 'function',
|
|
198
|
+
name: 'create_task',
|
|
199
|
+
description: "Create a new task on the AIVA task board. Use this when Brandon asks you to add a task, create a to-do, or remember to do something.",
|
|
200
|
+
parameters: {
|
|
201
|
+
type: 'object',
|
|
202
|
+
properties: {
|
|
203
|
+
title: { type: 'string', description: 'Task title' },
|
|
204
|
+
description: { type: 'string', description: 'Task description or details' },
|
|
205
|
+
priority: { type: 'string', enum: ['low', 'normal', 'high'], description: 'Task priority' },
|
|
206
|
+
assignee: { type: 'string', description: 'Who to assign to (default: aiva)' },
|
|
207
|
+
},
|
|
208
|
+
required: ['title'],
|
|
209
|
+
},
|
|
210
|
+
},
|
|
211
|
+
{
|
|
212
|
+
type: 'function',
|
|
213
|
+
name: 'list_tasks',
|
|
214
|
+
description: "List tasks on the AIVA task board. Use when Brandon asks what's on the board, what tasks are pending, or what's in progress.",
|
|
215
|
+
parameters: {
|
|
216
|
+
type: 'object',
|
|
217
|
+
properties: {
|
|
218
|
+
status: { type: 'string', enum: ['todo', 'in-progress', 'needs-review', 'done', 'all'], description: 'Filter by status. Use "todo" for to-do items, "in-progress" for in progress, etc.' },
|
|
219
|
+
},
|
|
220
|
+
},
|
|
221
|
+
},
|
|
222
|
+
{
|
|
223
|
+
type: 'function',
|
|
224
|
+
name: 'update_task',
|
|
225
|
+
description: 'Update an existing task (change status, add notes, reassign). Use when Brandon says to move a task, mark something done, or add details to a task.',
|
|
226
|
+
parameters: {
|
|
227
|
+
type: 'object',
|
|
228
|
+
properties: {
|
|
229
|
+
task_title: { type: 'string', description: 'Title or partial title to find the task' },
|
|
230
|
+
status: { type: 'string', enum: ['todo', 'in-progress', 'needs-review', 'done'] },
|
|
231
|
+
notes: { type: 'string', description: 'Note to add to the task' },
|
|
232
|
+
assignee: { type: 'string', description: 'Reassign to someone' },
|
|
233
|
+
},
|
|
234
|
+
required: ['task_title'],
|
|
235
|
+
},
|
|
236
|
+
},
|
|
237
|
+
{
|
|
238
|
+
type: 'function',
|
|
239
|
+
name: 'check_calendar',
|
|
240
|
+
description: "Check Brandon's calendar for today or upcoming events. Use when he asks about his schedule, meetings, or what's coming up.",
|
|
241
|
+
parameters: {
|
|
242
|
+
type: 'object',
|
|
243
|
+
properties: {
|
|
244
|
+
timeframe: { type: 'string', enum: ['today', 'tomorrow', 'this_week', 'this_month'], description: 'Time range to check' },
|
|
245
|
+
},
|
|
246
|
+
},
|
|
247
|
+
},
|
|
248
|
+
{
|
|
249
|
+
type: 'function',
|
|
250
|
+
name: 'send_message',
|
|
251
|
+
description: 'Send a text message in the AIVA app chat. Use when Brandon asks you to write something down, send a note, or post a message.',
|
|
252
|
+
parameters: {
|
|
253
|
+
type: 'object',
|
|
254
|
+
properties: {
|
|
255
|
+
message: { type: 'string', description: 'The message to send' },
|
|
256
|
+
},
|
|
257
|
+
required: ['message'],
|
|
258
|
+
},
|
|
259
|
+
},
|
|
260
|
+
{
|
|
261
|
+
type: 'function',
|
|
262
|
+
name: 'search_documents',
|
|
263
|
+
description: "Search through AIVA's workspace documents and memory files. Use when Brandon asks about past conversations, notes, or stored information.",
|
|
264
|
+
parameters: {
|
|
265
|
+
type: 'object',
|
|
266
|
+
properties: {
|
|
267
|
+
query: { type: 'string', description: 'Search query' },
|
|
268
|
+
},
|
|
269
|
+
required: ['query'],
|
|
270
|
+
},
|
|
271
|
+
},
|
|
272
|
+
{
|
|
273
|
+
type: 'function',
|
|
274
|
+
name: 'generate_media',
|
|
275
|
+
description: 'Generate an image or video using AI. Use when Brandon asks you to create, generate, or make an image or video.',
|
|
276
|
+
parameters: {
|
|
277
|
+
type: 'object',
|
|
278
|
+
properties: {
|
|
279
|
+
type: { type: 'string', enum: ['image', 'video'], description: 'Type of media to generate' },
|
|
280
|
+
prompt: { type: 'string', description: 'Description of what to generate' },
|
|
281
|
+
duration: { type: 'number', description: 'Video duration in seconds (1-15, default 5)' },
|
|
282
|
+
aspect_ratio: { type: 'string', description: 'Aspect ratio (16:9, 9:16, 1:1, 4:3, etc.)' },
|
|
283
|
+
},
|
|
284
|
+
required: ['type', 'prompt'],
|
|
285
|
+
},
|
|
286
|
+
},
|
|
287
|
+
];
|
|
288
|
+
|
|
289
|
+
const TASK_API = 'http://localhost:3847/api/tasks';
|
|
290
|
+
const INTERNAL_HEADERS = { 'Content-Type': 'application/json', 'x-aiva-internal': 'true' };
|
|
291
|
+
|
|
292
|
+
async function executeTool(name, args) {
|
|
293
|
+
console.log(`[voice-call] Executing tool: ${name}`, args);
|
|
294
|
+
try {
|
|
295
|
+
switch (name) {
|
|
296
|
+
case 'create_task': {
|
|
297
|
+
const body = {
|
|
298
|
+
title: args.title,
|
|
299
|
+
description: args.description || '',
|
|
300
|
+
priority: args.priority || 'normal',
|
|
301
|
+
assignee: args.assignee || 'aiva',
|
|
302
|
+
status: 'todo',
|
|
303
|
+
requestedFor: 'brandon',
|
|
304
|
+
};
|
|
305
|
+
const res = await fetch(TASK_API, { method: 'POST', headers: INTERNAL_HEADERS, body: JSON.stringify(body) });
|
|
306
|
+
if (!res.ok) throw new Error(`API ${res.status}`);
|
|
307
|
+
const task = await res.json();
|
|
308
|
+
return { success: true, message: `Task "${args.title}" created successfully.`, taskId: task.id || task._id };
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
case 'list_tasks': {
|
|
312
|
+
const res = await fetch(TASK_API, { headers: INTERNAL_HEADERS });
|
|
313
|
+
if (!res.ok) throw new Error(`API ${res.status}`);
|
|
314
|
+
let tasks = await res.json();
|
|
315
|
+
if (Array.isArray(tasks.tasks)) tasks = tasks.tasks;
|
|
316
|
+
if (args.status && args.status !== 'all') {
|
|
317
|
+
tasks = tasks.filter(t => t.status === args.status);
|
|
318
|
+
}
|
|
319
|
+
const statusLabel = { 'todo': 'to do', 'in-progress': 'in progress', 'needs-review': 'needs review', 'done': 'done' };
|
|
320
|
+
const summary = tasks.slice(0, 15).map(t => `${t.title} (${statusLabel[t.status] || t.status}, ${t.assignee || 'unassigned'})`);
|
|
321
|
+
return { success: true, count: tasks.length, tasks: summary };
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
case 'update_task': {
|
|
325
|
+
// Find task by title match
|
|
326
|
+
const res = await fetch(TASK_API, { headers: INTERNAL_HEADERS });
|
|
327
|
+
if (!res.ok) throw new Error(`API ${res.status}`);
|
|
328
|
+
let tasks = await res.json();
|
|
329
|
+
if (Array.isArray(tasks.tasks)) tasks = tasks.tasks;
|
|
330
|
+
const needle = args.task_title.toLowerCase();
|
|
331
|
+
const match = tasks.find(t => t.title.toLowerCase().includes(needle));
|
|
332
|
+
if (!match) return { success: false, message: `No task found matching "${args.task_title}"` };
|
|
333
|
+
|
|
334
|
+
const updates = {};
|
|
335
|
+
if (args.status) updates.status = args.status;
|
|
336
|
+
if (args.assignee) updates.assignee = args.assignee;
|
|
337
|
+
if (args.notes) updates.notes = (match.notes || '') + '\n' + args.notes;
|
|
338
|
+
|
|
339
|
+
const id = match.id || match._id;
|
|
340
|
+
const putRes = await fetch(`${TASK_API}/${id}`, { method: 'PUT', headers: INTERNAL_HEADERS, body: JSON.stringify(updates) });
|
|
341
|
+
if (!putRes.ok) throw new Error(`Update API ${putRes.status}`);
|
|
342
|
+
return { success: true, message: `Task "${match.title}" updated.` };
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
case 'check_calendar': {
|
|
346
|
+
const tf = args.timeframe || 'today';
|
|
347
|
+
let cmd;
|
|
348
|
+
const now = new Date();
|
|
349
|
+
const fmt = (d) => d.toISOString().split('T')[0];
|
|
350
|
+
const addDays = (d, n) => { const r = new Date(d); r.setDate(r.getDate() + n); return r; };
|
|
351
|
+
let fromDate, toDate;
|
|
352
|
+
if (tf === 'today') { fromDate = fmt(now); toDate = fmt(addDays(now, 1)); }
|
|
353
|
+
else if (tf === 'tomorrow') { fromDate = fmt(addDays(now, 1)); toDate = fmt(addDays(now, 2)); }
|
|
354
|
+
else if (tf === 'this_month') { fromDate = fmt(now); toDate = fmt(addDays(now, 30)); }
|
|
355
|
+
else { fromDate = fmt(now); toDate = fmt(addDays(now, 7)); }
|
|
356
|
+
const calAccounts = ['brandon@conversionmarketingpros.com', 'burgan.brandon@gmail.com'];
|
|
357
|
+
let calResults = [];
|
|
358
|
+
for (const acct of calAccounts) {
|
|
359
|
+
try {
|
|
360
|
+
const r = execSync(`gog calendar list --account ${acct} --from ${fromDate} --to ${toDate}`, { timeout: 10000, encoding: 'utf-8' }).trim();
|
|
361
|
+
if (r) calResults.push(...r.split('\n').filter(l => !l.startsWith('ID ') && !l.startsWith('#') && l.trim() && !l.includes('Busy')));
|
|
362
|
+
} catch (e) { /* skip */ }
|
|
363
|
+
}
|
|
364
|
+
// Filter past events for today view
|
|
365
|
+
if (tf === 'today') {
|
|
366
|
+
calResults = calResults.filter(line => {
|
|
367
|
+
const timeMatch = line.match(/(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})/);
|
|
368
|
+
if (timeMatch) {
|
|
369
|
+
const eventTime = new Date(timeMatch[1].replace(' ', 'T'));
|
|
370
|
+
return eventTime > now;
|
|
371
|
+
}
|
|
372
|
+
return true;
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
return { success: true, calendar: calResults.join('\n') || 'No upcoming events today.' };
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
case 'send_message': {
|
|
379
|
+
const res = await fetch('http://localhost:3847/api/chat/aiva-reply', {
|
|
380
|
+
method: 'POST',
|
|
381
|
+
headers: INTERNAL_HEADERS,
|
|
382
|
+
body: JSON.stringify({ userId: 'brandon', text: args.message }),
|
|
383
|
+
});
|
|
384
|
+
if (!res.ok) throw new Error(`Chat API ${res.status}`);
|
|
385
|
+
return { success: true, message: 'Message sent to chat.' };
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
case 'search_documents': {
|
|
389
|
+
const q = args.query.replace(/['"\\]/g, '');
|
|
390
|
+
const searchPaths = [
|
|
391
|
+
path.join(process.env.HOME || '', '.openclaw', 'workspace', 'memory'),
|
|
392
|
+
path.join(process.env.HOME || '', '.openclaw', 'workspace', 'MEMORY.md'),
|
|
393
|
+
];
|
|
394
|
+
return new Promise((resolve) => {
|
|
395
|
+
execCb(`grep -r -i -l "${q}" ${searchPaths.join(' ')} 2>/dev/null | head -5`, { timeout: 5000 }, (err, stdout) => {
|
|
396
|
+
const files = (stdout || '').trim().split('\n').filter(Boolean);
|
|
397
|
+
if (!files.length) {
|
|
398
|
+
resolve({ success: true, results: 'No matching documents found.' });
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
// Read snippets from matching files
|
|
402
|
+
const snippets = files.map(f => {
|
|
403
|
+
try {
|
|
404
|
+
const content = fs.readFileSync(f, 'utf-8');
|
|
405
|
+
const lines = content.split('\n');
|
|
406
|
+
const matchLines = lines.filter(l => l.toLowerCase().includes(q.toLowerCase())).slice(0, 3);
|
|
407
|
+
return { file: path.basename(f), matches: matchLines.join(' | ') };
|
|
408
|
+
} catch { return { file: path.basename(f), matches: '(could not read)' }; }
|
|
409
|
+
});
|
|
410
|
+
resolve({ success: true, results: snippets });
|
|
411
|
+
});
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
case 'generate_media': {
|
|
416
|
+
const opts = {};
|
|
417
|
+
if (args.duration) opts.duration = args.duration;
|
|
418
|
+
if (args.aspect_ratio) opts.aspectRatio = args.aspect_ratio;
|
|
419
|
+
const genRes = await fetch('http://localhost:3847/api/generate', {
|
|
420
|
+
method: 'POST',
|
|
421
|
+
headers: INTERNAL_HEADERS,
|
|
422
|
+
body: JSON.stringify({ type: args.type, prompt: args.prompt, options: opts }),
|
|
423
|
+
});
|
|
424
|
+
if (!genRes.ok) {
|
|
425
|
+
const err = await genRes.text();
|
|
426
|
+
throw new Error(`Generate API ${genRes.status}: ${err}`);
|
|
427
|
+
}
|
|
428
|
+
const genData = await genRes.json();
|
|
429
|
+
const mediaType = args.type === 'video' ? 'video' : 'image';
|
|
430
|
+
return { success: true, message: `${mediaType} generated and saved to chat. Brandon can see it in the app.`, url: genData.url };
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
default:
|
|
434
|
+
return { success: false, message: `Unknown tool: ${name}` };
|
|
435
|
+
}
|
|
436
|
+
} catch (e) {
|
|
437
|
+
console.error(`[voice-call] Tool ${name} error:`, e.message);
|
|
438
|
+
return { success: false, message: `Error: ${e.message}` };
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
function setupVoiceCall(io) {
|
|
443
|
+
io.on('connection', (socket) => {
|
|
444
|
+
|
|
445
|
+
socket.on('voice-call-start', async (data) => {
|
|
446
|
+
console.log('[voice-call] Call started by:', data?.userId || 'unknown');
|
|
447
|
+
|
|
448
|
+
// Fetch context and live data for system prompt
|
|
449
|
+
const [ctx, liveData] = await Promise.all([
|
|
450
|
+
fetchContext().catch(() => null),
|
|
451
|
+
fetchLiveData().catch(() => ({ tasks: '', todayCalendar: '', weekCalendar: '' })),
|
|
452
|
+
]);
|
|
453
|
+
let systemPrompt = buildSystemPrompt(ctx);
|
|
454
|
+
|
|
455
|
+
// Inject pre-fetched live data
|
|
456
|
+
const liveDataSections = [];
|
|
457
|
+
if (liveData.tasks) liveDataSections.push(`CURRENT TASKS (reference only — do NOT mention unless asked):\n${liveData.tasks}`);
|
|
458
|
+
if (liveData.todayCalendar) liveDataSections.push(`TODAY'S REMAINING CALENDAR (reference only — do NOT mention unless asked):\n${liveData.todayCalendar}`);
|
|
459
|
+
if (liveData.weekCalendar) liveDataSections.push(`THIS WEEK'S CALENDAR (reference only — do NOT mention unless asked):\n${liveData.weekCalendar}`);
|
|
460
|
+
if (liveDataSections.length) {
|
|
461
|
+
systemPrompt += '\n\n' + liveDataSections.join('\n\n');
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
// Open WebSocket to xAI
|
|
465
|
+
let xaiWs;
|
|
466
|
+
try {
|
|
467
|
+
xaiWs = new WebSocket(XAI_WS_URL, {
|
|
468
|
+
headers: { 'Authorization': `Bearer ${XAI_API_KEY}` },
|
|
469
|
+
});
|
|
470
|
+
} catch (e) {
|
|
471
|
+
console.error('[voice-call] Failed to create xAI WebSocket:', e.message);
|
|
472
|
+
socket.emit('voice-call-error', { error: 'Failed to connect to voice service' });
|
|
473
|
+
return;
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
const callSession = {
|
|
477
|
+
id: Date.now().toString(),
|
|
478
|
+
userId: data?.userId || 'brandon',
|
|
479
|
+
xaiWs,
|
|
480
|
+
startedAt: new Date().toISOString(),
|
|
481
|
+
transcript: [], // {role, text}
|
|
482
|
+
currentResponseText: '',
|
|
483
|
+
completedResponseTexts: [], // track completed response texts
|
|
484
|
+
currentResponseId: null, // track current response ID
|
|
485
|
+
suppressCurrentResponse: false,
|
|
486
|
+
};
|
|
487
|
+
activeCalls.set(socket.id, callSession);
|
|
488
|
+
|
|
489
|
+
xaiWs.on('open', () => {
|
|
490
|
+
console.log('[voice-call] xAI WebSocket connected');
|
|
491
|
+
|
|
492
|
+
// Configure session
|
|
493
|
+
const sessionConfig = {
|
|
494
|
+
type: 'session.update',
|
|
495
|
+
session: {
|
|
496
|
+
voice: 'Ara',
|
|
497
|
+
instructions: systemPrompt,
|
|
498
|
+
turn_detection: { type: 'server_vad' },
|
|
499
|
+
audio: {
|
|
500
|
+
input: { format: { type: 'audio/pcm', rate: 24000 } },
|
|
501
|
+
output: { format: { type: 'audio/pcm', rate: 24000 } },
|
|
502
|
+
},
|
|
503
|
+
tools: VOICE_TOOLS,
|
|
504
|
+
},
|
|
505
|
+
};
|
|
506
|
+
xaiWs.send(JSON.stringify(sessionConfig));
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
xaiWs.on('message', async (rawMsg) => {
|
|
510
|
+
let msg;
|
|
511
|
+
try {
|
|
512
|
+
msg = JSON.parse(rawMsg.toString());
|
|
513
|
+
} catch {
|
|
514
|
+
return;
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
switch (msg.type) {
|
|
518
|
+
case 'session.updated':
|
|
519
|
+
console.log('[voice-call] Session configured, ready');
|
|
520
|
+
socket.emit('voice-call-ready', { callId: callSession.id });
|
|
521
|
+
break;
|
|
522
|
+
|
|
523
|
+
case 'input_audio_buffer.speech_started':
|
|
524
|
+
socket.emit('voice-call-status', { status: 'user-speaking' });
|
|
525
|
+
break;
|
|
526
|
+
|
|
527
|
+
case 'input_audio_buffer.speech_stopped':
|
|
528
|
+
socket.emit('voice-call-status', { status: 'processing' });
|
|
529
|
+
break;
|
|
530
|
+
|
|
531
|
+
case 'conversation.item.input_audio_transcription.completed':
|
|
532
|
+
if (msg.transcript) {
|
|
533
|
+
callSession.transcript.push({ role: 'user', text: msg.transcript });
|
|
534
|
+
socket.emit('voice-call-user-transcript', { text: msg.transcript });
|
|
535
|
+
}
|
|
536
|
+
break;
|
|
537
|
+
|
|
538
|
+
case 'response.created':
|
|
539
|
+
callSession.currentResponseText = '';
|
|
540
|
+
callSession.currentResponseId = msg.response?.id || null;
|
|
541
|
+
socket.emit('voice-call-status', { status: 'speaking' });
|
|
542
|
+
break;
|
|
543
|
+
|
|
544
|
+
case 'response.output_audio.delta':
|
|
545
|
+
if (msg.delta) {
|
|
546
|
+
if (!callSession.suppressCurrentResponse) {
|
|
547
|
+
socket.emit('voice-call-audio-delta', { audio: msg.delta });
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
break;
|
|
551
|
+
|
|
552
|
+
case 'response.output_audio_transcript.delta':
|
|
553
|
+
if (msg.delta) {
|
|
554
|
+
callSession.currentResponseText += msg.delta;
|
|
555
|
+
// Once we have 50+ chars, check if this matches a previous response
|
|
556
|
+
if (!callSession.suppressCurrentResponse && callSession.currentResponseText.length >= 50) {
|
|
557
|
+
const prefix = callSession.currentResponseText.slice(0, 50);
|
|
558
|
+
if (callSession.completedResponseTexts.some(t => t.slice(0, 50) === prefix)) {
|
|
559
|
+
callSession.suppressCurrentResponse = true;
|
|
560
|
+
console.log('[voice-call] Suppressing duplicate response audio');
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
if (!callSession.suppressCurrentResponse) {
|
|
564
|
+
socket.emit('voice-call-transcript-delta', { text: msg.delta });
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
break;
|
|
568
|
+
|
|
569
|
+
case 'response.output_audio_transcript.done':
|
|
570
|
+
if (msg.transcript && !callSession.suppressCurrentResponse) {
|
|
571
|
+
callSession.transcript.push({ role: 'assistant', text: msg.transcript });
|
|
572
|
+
}
|
|
573
|
+
break;
|
|
574
|
+
|
|
575
|
+
case 'response.done': {
|
|
576
|
+
socket.emit('voice-call-status', { status: 'listening' });
|
|
577
|
+
const respText = callSession.currentResponseText;
|
|
578
|
+
if (respText && !callSession.suppressCurrentResponse) {
|
|
579
|
+
callSession.completedResponseTexts.push(respText);
|
|
580
|
+
// Keep only last 5 to prevent memory growth
|
|
581
|
+
if (callSession.completedResponseTexts.length > 5) {
|
|
582
|
+
callSession.completedResponseTexts.shift();
|
|
583
|
+
}
|
|
584
|
+
socket.emit('voice-call-response-done', { text: respText });
|
|
585
|
+
}
|
|
586
|
+
// Reset for next response
|
|
587
|
+
callSession.suppressCurrentResponse = false;
|
|
588
|
+
break;
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
case 'response.function_call_arguments.done': {
|
|
592
|
+
const { name: fnName, call_id, arguments: argsStr } = msg;
|
|
593
|
+
console.log(`[voice-call] Function call: ${fnName} (${call_id})`);
|
|
594
|
+
try {
|
|
595
|
+
const fnArgs = JSON.parse(argsStr || '{}');
|
|
596
|
+
const result = await executeTool(fnName, fnArgs);
|
|
597
|
+
xaiWs.send(JSON.stringify({
|
|
598
|
+
type: 'conversation.item.create',
|
|
599
|
+
item: {
|
|
600
|
+
type: 'function_call_output',
|
|
601
|
+
call_id: call_id,
|
|
602
|
+
output: JSON.stringify(result),
|
|
603
|
+
},
|
|
604
|
+
}));
|
|
605
|
+
xaiWs.send(JSON.stringify({ type: 'response.create' }));
|
|
606
|
+
} catch (toolErr) {
|
|
607
|
+
console.error('[voice-call] Tool execution error:', toolErr);
|
|
608
|
+
xaiWs.send(JSON.stringify({
|
|
609
|
+
type: 'conversation.item.create',
|
|
610
|
+
item: {
|
|
611
|
+
type: 'function_call_output',
|
|
612
|
+
call_id: call_id,
|
|
613
|
+
output: JSON.stringify({ success: false, message: toolErr.message }),
|
|
614
|
+
},
|
|
615
|
+
}));
|
|
616
|
+
xaiWs.send(JSON.stringify({ type: 'response.create' }));
|
|
617
|
+
}
|
|
618
|
+
break;
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
case 'error':
|
|
622
|
+
console.error('[voice-call] xAI error:', msg.error);
|
|
623
|
+
socket.emit('voice-call-error', { error: msg.error?.message || 'Voice service error' });
|
|
624
|
+
break;
|
|
625
|
+
}
|
|
626
|
+
});
|
|
627
|
+
|
|
628
|
+
xaiWs.on('error', (e) => {
|
|
629
|
+
console.error('[voice-call] xAI WebSocket error:', e.message);
|
|
630
|
+
socket.emit('voice-call-error', { error: 'Voice connection error' });
|
|
631
|
+
});
|
|
632
|
+
|
|
633
|
+
xaiWs.on('close', (code, reason) => {
|
|
634
|
+
console.log(`[voice-call] xAI WebSocket closed: ${code} ${reason}`);
|
|
635
|
+
if (activeCalls.has(socket.id)) {
|
|
636
|
+
socket.emit('voice-call-ended', { reason: 'Voice service disconnected' });
|
|
637
|
+
}
|
|
638
|
+
});
|
|
639
|
+
});
|
|
640
|
+
|
|
641
|
+
// Client sends PCM16 audio chunks as base64
|
|
642
|
+
socket.on('voice-call-audio-chunk', (data) => {
|
|
643
|
+
const session = activeCalls.get(socket.id);
|
|
644
|
+
if (!session || !session.xaiWs || session.xaiWs.readyState !== WebSocket.OPEN) return;
|
|
645
|
+
|
|
646
|
+
if (data?.audio) {
|
|
647
|
+
session.xaiWs.send(JSON.stringify({
|
|
648
|
+
type: 'input_audio_buffer.append',
|
|
649
|
+
audio: data.audio,
|
|
650
|
+
}));
|
|
651
|
+
}
|
|
652
|
+
});
|
|
653
|
+
|
|
654
|
+
socket.on('voice-call-end', () => {
|
|
655
|
+
const session = activeCalls.get(socket.id);
|
|
656
|
+
if (!session) return;
|
|
657
|
+
cleanupCall(socket.id, session);
|
|
658
|
+
});
|
|
659
|
+
|
|
660
|
+
socket.on('disconnect', () => {
|
|
661
|
+
const session = activeCalls.get(socket.id);
|
|
662
|
+
if (session) {
|
|
663
|
+
console.log('[voice-call] Socket disconnected, cleaning up call');
|
|
664
|
+
cleanupCall(socket.id, session);
|
|
665
|
+
}
|
|
666
|
+
});
|
|
667
|
+
});
|
|
668
|
+
|
|
669
|
+
function cleanupCall(socketId, session) {
|
|
670
|
+
console.log(`[voice-call] Call ended (${session.transcript.length} turns)`);
|
|
671
|
+
|
|
672
|
+
// Close xAI WebSocket
|
|
673
|
+
if (session.xaiWs) {
|
|
674
|
+
try { session.xaiWs.close(); } catch {}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// Save transcript if there was conversation
|
|
678
|
+
if (session.transcript.length > 0) {
|
|
679
|
+
try {
|
|
680
|
+
const logDir = path.join(process.env.HOME || '', '.openclaw', 'workspace', 'memory', 'call-logs');
|
|
681
|
+
fs.mkdirSync(logDir, { recursive: true });
|
|
682
|
+
const ts = new Date().toISOString().replace(/[:.]/g, '-');
|
|
683
|
+
const pendingFile = path.join(logDir, `pending_ara_${ts}.json`);
|
|
684
|
+
|
|
685
|
+
// Calculate duration in seconds
|
|
686
|
+
const startTime = new Date(session.startedAt).getTime();
|
|
687
|
+
const endTime = Date.now();
|
|
688
|
+
const durationSeconds = Math.round((endTime - startTime) / 1000);
|
|
689
|
+
const durationMin = Math.floor(durationSeconds / 60);
|
|
690
|
+
const durationSec = durationSeconds % 60;
|
|
691
|
+
|
|
692
|
+
const logData = {
|
|
693
|
+
type: 'ara-voice-call',
|
|
694
|
+
timestamp: new Date().toISOString(),
|
|
695
|
+
duration: durationSeconds,
|
|
696
|
+
transcript: session.transcript,
|
|
697
|
+
};
|
|
698
|
+
|
|
699
|
+
fs.writeFileSync(pendingFile, JSON.stringify(logData, null, 2));
|
|
700
|
+
console.log(`[voice-call] Transcript saved: ${pendingFile}`);
|
|
701
|
+
|
|
702
|
+
// Fire wake hook to AIVA app
|
|
703
|
+
const relPath = `memory/call-logs/pending_ara_${ts}.json`;
|
|
704
|
+
const wakeText = `[VOICE-CALL-COMPLETE] Ara voice call ended. Duration: ${durationMin}m ${durationSec}s. Transcript saved to ${relPath}. Process action items.`;
|
|
705
|
+
|
|
706
|
+
fetch('http://localhost:3847/hooks/wake', {
|
|
707
|
+
method: 'POST',
|
|
708
|
+
headers: {
|
|
709
|
+
'Content-Type': 'application/json',
|
|
710
|
+
'x-aiva-internal': 'true',
|
|
711
|
+
},
|
|
712
|
+
body: JSON.stringify({ text: wakeText }),
|
|
713
|
+
}).catch(e => console.error('[voice-call] Failed to fire wake hook:', e.message));
|
|
714
|
+
|
|
715
|
+
// Also try OpenClaw wake hook as backup
|
|
716
|
+
fetch('http://localhost:18789/hooks/wake', {
|
|
717
|
+
method: 'POST',
|
|
718
|
+
headers: { 'Content-Type': 'application/json', 'Authorization': 'Bearer aiva-hook-secret-2026' },
|
|
719
|
+
body: JSON.stringify({ text: wakeText, mode: 'now' }),
|
|
720
|
+
}).catch(e => console.error('[voice-call] OpenClaw wake hook failed:', e.message));
|
|
721
|
+
} catch (e) {
|
|
722
|
+
console.error('[voice-call] Failed to save transcript:', e.message);
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
activeCalls.delete(socketId);
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
console.log('[voice-call] Voice call handler initialized (xAI Voice Agent - Ara)');
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
module.exports = { setupVoiceCall };
|