@hivegpt/hiveai-angular 0.0.585 → 0.0.586

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/bundles/hivegpt-hiveai-angular.umd.js +653 -251
  2. package/bundles/hivegpt-hiveai-angular.umd.js.map +1 -1
  3. package/bundles/hivegpt-hiveai-angular.umd.min.js +1 -1
  4. package/bundles/hivegpt-hiveai-angular.umd.min.js.map +1 -1
  5. package/esm2015/hivegpt-hiveai-angular.js +6 -4
  6. package/esm2015/lib/components/voice-agent/components/voice-agent-modal/voice-agent-modal.component.js +24 -17
  7. package/esm2015/lib/components/voice-agent/services/audio-analyzer.service.js +3 -3
  8. package/esm2015/lib/components/voice-agent/services/daily-voice-client.service.js +312 -0
  9. package/esm2015/lib/components/voice-agent/services/voice-agent.service.js +155 -189
  10. package/esm2015/lib/components/voice-agent/services/websocket-voice-client.service.js +95 -0
  11. package/esm2015/lib/components/voice-agent/voice-agent.module.js +7 -3
  12. package/fesm2015/hivegpt-hiveai-angular.js +578 -207
  13. package/fesm2015/hivegpt-hiveai-angular.js.map +1 -1
  14. package/hivegpt-hiveai-angular.d.ts +5 -3
  15. package/hivegpt-hiveai-angular.d.ts.map +1 -1
  16. package/hivegpt-hiveai-angular.metadata.json +1 -1
  17. package/lib/components/voice-agent/components/voice-agent-modal/voice-agent-modal.component.d.ts +4 -7
  18. package/lib/components/voice-agent/components/voice-agent-modal/voice-agent-modal.component.d.ts.map +1 -1
  19. package/lib/components/voice-agent/services/audio-analyzer.service.d.ts +2 -2
  20. package/lib/components/voice-agent/services/daily-voice-client.service.d.ts +65 -0
  21. package/lib/components/voice-agent/services/daily-voice-client.service.d.ts.map +1 -0
  22. package/lib/components/voice-agent/services/voice-agent.service.d.ts +17 -23
  23. package/lib/components/voice-agent/services/voice-agent.service.d.ts.map +1 -1
  24. package/lib/components/voice-agent/services/websocket-voice-client.service.d.ts +49 -0
  25. package/lib/components/voice-agent/services/websocket-voice-client.service.d.ts.map +1 -0
  26. package/lib/components/voice-agent/voice-agent.module.d.ts +2 -2
  27. package/lib/components/voice-agent/voice-agent.module.d.ts.map +1 -1
  28. package/package.json +1 -1
@@ -1,8 +1,10 @@
1
1
  (function (global, factory) {
2
- typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@angular/cdk/overlay'), require('@angular/cdk/portal'), require('@angular/common/http'), require('@angular/core'), require('@angular/platform-browser'), require('rxjs'), require('rxjs/operators'), require('@angular/common'), require('ngx-socket-io'), require('@angular/forms'), require('microsoft-cognitiveservices-speech-sdk'), require('marked'), require('@pipecat-ai/client-js'), require('@pipecat-ai/websocket-transport'), require('@angular/material/icon'), require('@angular/material/sidenav'), require('ngx-quill')) :
3
- typeof define === 'function' && define.amd ? define('@hivegpt/hiveai-angular', ['exports', '@angular/cdk/overlay', '@angular/cdk/portal', '@angular/common/http', '@angular/core', '@angular/platform-browser', 'rxjs', 'rxjs/operators', '@angular/common', 'ngx-socket-io', '@angular/forms', 'microsoft-cognitiveservices-speech-sdk', 'marked', '@pipecat-ai/client-js', '@pipecat-ai/websocket-transport', '@angular/material/icon', '@angular/material/sidenav', 'ngx-quill'], factory) :
4
- (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory((global.hivegpt = global.hivegpt || {}, global.hivegpt["hiveai-angular"] = {}), global.ng.cdk.overlay, global.ng.cdk.portal, global.ng.common.http, global.ng.core, global.ng.platformBrowser, global.rxjs, global.rxjs.operators, global.ng.common, global.ngxSocketIo, global.ng.forms, global.SpeechSDK, global.marked, global.clientJs, global.websocketTransport, global.ng.material.icon, global.ng.material.sidenav, global.ngxQuill));
5
- })(this, (function (exports, overlay, portal, i1, i0, platformBrowser, rxjs, operators, common, ngxSocketIo, forms, SpeechSDK, marked, clientJs, websocketTransport, icon, sidenav, ngxQuill) { 'use strict';
2
+ typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@angular/cdk/overlay'), require('@angular/cdk/portal'), require('@angular/common/http'), require('@angular/core'), require('@angular/platform-browser'), require('rxjs'), require('rxjs/operators'), require('@angular/common'), require('ngx-socket-io'), require('@angular/forms'), require('microsoft-cognitiveservices-speech-sdk'), require('marked'), require('@daily-co/daily-js'), require('@angular/material/icon'), require('@angular/material/sidenav'), require('ngx-quill')) :
3
+ typeof define === 'function' && define.amd ? define('@hivegpt/hiveai-angular', ['exports', '@angular/cdk/overlay', '@angular/cdk/portal', '@angular/common/http', '@angular/core', '@angular/platform-browser', 'rxjs', 'rxjs/operators', '@angular/common', 'ngx-socket-io', '@angular/forms', 'microsoft-cognitiveservices-speech-sdk', 'marked', '@daily-co/daily-js', '@angular/material/icon', '@angular/material/sidenav', 'ngx-quill'], factory) :
4
+ (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory((global.hivegpt = global.hivegpt || {}, global.hivegpt["hiveai-angular"] = {}), global.ng.cdk.overlay, global.ng.cdk.portal, global.ng.common.http, global.ng.core, global.ng.platformBrowser, global.rxjs, global.rxjs.operators, global.ng.common, global.ngxSocketIo, global.ng.forms, global.SpeechSDK, global.marked, global.Daily, global.ng.material.icon, global.ng.material.sidenav, global.ngxQuill));
5
+ })(this, (function (exports, overlay, portal, i1, i0, platformBrowser, rxjs, operators, common, ngxSocketIo, forms, SpeechSDK, marked, Daily, icon, sidenav, ngxQuill) { 'use strict';
6
+
7
+ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
6
8
 
7
9
  function _interopNamespace(e) {
8
10
  if (e && e.__esModule) return e;
@@ -26,6 +28,7 @@
26
28
  var i0__namespace = /*#__PURE__*/_interopNamespace(i0);
27
29
  var SpeechSDK__namespace = /*#__PURE__*/_interopNamespace(SpeechSDK);
28
30
  var marked__namespace = /*#__PURE__*/_interopNamespace(marked);
31
+ var Daily__default = /*#__PURE__*/_interopDefaultLegacy(Daily);
29
32
 
30
33
  /******************************************************************************
31
34
  Copyright (c) Microsoft Corporation.
@@ -1215,8 +1218,8 @@
1215
1218
  ]; };
1216
1219
 
1217
1220
  /**
1218
- * Audio analyzer for waveform visualization and local (mic) speaking detection.
1219
- * VoiceAgentService may combine this with WebSocket server events for call state.
1221
+ * Audio analyzer for waveform visualization only.
1222
+ * Do NOT use isUserSpeaking$ for call state; speaking state must come from Daily.js.
1220
1223
  */
1221
1224
  var AudioAnalyzerService = /** @class */ (function () {
1222
1225
  function AudioAnalyzerService() {
@@ -1339,24 +1342,455 @@
1339
1342
  ];
1340
1343
 
1341
1344
  /**
1342
- * Voice agent orchestrator using the official PipecatClient SDK.
1345
+ * WebSocket-only client for voice agent signaling.
1346
+ * CRITICAL: Uses native WebSocket only. NO Socket.IO, NO ngx-socket-io.
1343
1347
  *
1344
- * Audio flow (mirrors the React reference implementation):
1345
- * - Local mic: acquired by PipecatClient.initDevices(); local track fed to
1346
- * AudioAnalyzerService for waveform visualisation.
1347
- * - Bot audio: received as a MediaStreamTrack via RTVIEvent.TrackStarted,
1348
- * played through a hidden <audio> element.
1349
- * - All binary protobuf framing / RTVI protocol handled by
1350
- * @pipecat-ai/client-js + @pipecat-ai/websocket-transport.
1348
+ * Responsibilities:
1349
+ * - Connect to ws_url (from POST /ai/ask-voice response)
1350
+ * - Parse JSON messages (room_created, user_transcript, bot_transcript)
1351
+ * - Emit roomCreated$, userTranscript$, botTranscript$
1352
+ * - NO audio logic, NO mic logic. Audio is handled by Daily.js (WebRTC).
1353
+ */
1354
+ var WebSocketVoiceClientService = /** @class */ (function () {
1355
+ function WebSocketVoiceClientService() {
1356
+ this.ws = null;
1357
+ this.roomCreatedSubject = new rxjs.Subject();
1358
+ this.userTranscriptSubject = new rxjs.Subject();
1359
+ this.botTranscriptSubject = new rxjs.Subject();
1360
+ /** Emits room_url when backend sends room_created. */
1361
+ this.roomCreated$ = this.roomCreatedSubject.asObservable();
1362
+ /** Emits user transcript updates. */
1363
+ this.userTranscript$ = this.userTranscriptSubject.asObservable();
1364
+ /** Emits bot transcript updates. */
1365
+ this.botTranscript$ = this.botTranscriptSubject.asObservable();
1366
+ }
1367
+ /** Connect to signaling WebSocket. No audio over this connection. */
1368
+ WebSocketVoiceClientService.prototype.connect = function (wsUrl) {
1369
+ var _this = this;
1370
+ var _a;
1371
+ if (((_a = this.ws) === null || _a === void 0 ? void 0 : _a.readyState) === WebSocket.OPEN) {
1372
+ return;
1373
+ }
1374
+ if (this.ws) {
1375
+ this.ws.close();
1376
+ this.ws = null;
1377
+ }
1378
+ try {
1379
+ this.ws = new WebSocket(wsUrl);
1380
+ this.ws.onmessage = function (event) {
1381
+ var _a;
1382
+ try {
1383
+ var msg = JSON.parse(event.data);
1384
+ if ((msg === null || msg === void 0 ? void 0 : msg.type) === 'room_created') {
1385
+ var roomUrl = ((_a = msg.room_url) !== null && _a !== void 0 ? _a : msg.roomUrl);
1386
+ if (typeof roomUrl === 'string') {
1387
+ _this.roomCreatedSubject.next(roomUrl);
1388
+ }
1389
+ }
1390
+ else if ((msg === null || msg === void 0 ? void 0 : msg.type) === 'user_transcript' && typeof msg.text === 'string') {
1391
+ _this.userTranscriptSubject.next({
1392
+ text: msg.text,
1393
+ final: msg.final === true,
1394
+ });
1395
+ }
1396
+ else if ((msg === null || msg === void 0 ? void 0 : msg.type) === 'bot_transcript' && typeof msg.text === 'string') {
1397
+ _this.botTranscriptSubject.next(msg.text);
1398
+ }
1399
+ }
1400
+ catch (_b) {
1401
+ // Ignore non-JSON or unknown messages
1402
+ }
1403
+ };
1404
+ this.ws.onerror = function () {
1405
+ _this.disconnect();
1406
+ };
1407
+ this.ws.onclose = function () {
1408
+ _this.ws = null;
1409
+ };
1410
+ }
1411
+ catch (err) {
1412
+ console.error('WebSocketVoiceClient: connect failed', err);
1413
+ this.ws = null;
1414
+ throw err;
1415
+ }
1416
+ };
1417
+ /** Disconnect and cleanup. */
1418
+ WebSocketVoiceClientService.prototype.disconnect = function () {
1419
+ if (this.ws) {
1420
+ this.ws.close();
1421
+ this.ws = null;
1422
+ }
1423
+ };
1424
+ Object.defineProperty(WebSocketVoiceClientService.prototype, "isConnected", {
1425
+ /** Whether the WebSocket is open. */
1426
+ get: function () {
1427
+ var _a;
1428
+ return ((_a = this.ws) === null || _a === void 0 ? void 0 : _a.readyState) === WebSocket.OPEN;
1429
+ },
1430
+ enumerable: false,
1431
+ configurable: true
1432
+ });
1433
+ return WebSocketVoiceClientService;
1434
+ }());
1435
+ WebSocketVoiceClientService.ɵprov = i0__namespace.ɵɵdefineInjectable({ factory: function WebSocketVoiceClientService_Factory() { return new WebSocketVoiceClientService(); }, token: WebSocketVoiceClientService, providedIn: "root" });
1436
+ WebSocketVoiceClientService.decorators = [
1437
+ { type: i0.Injectable, args: [{
1438
+ providedIn: 'root',
1439
+ },] }
1440
+ ];
1441
+
1442
+ /**
1443
+ * Daily.js WebRTC client for voice agent audio.
1444
+ * Responsibilities:
1445
+ * - Create and manage Daily CallObject
1446
+ * - Join Daily room using room_url
1447
+ * - Handle mic capture + speaker playback
1448
+ * - Bot speaking detection via AnalyserNode on remote track (instant)
1449
+ * - User speaking detection via active-speaker-change
1450
+ * - Expose speaking$ (bot speaking), userSpeaking$ (user speaking), micMuted$
1451
+ * - Expose localStream$ for waveform visualization (AudioAnalyzerService)
1452
+ */
1453
+ var DailyVoiceClientService = /** @class */ (function () {
1454
+ function DailyVoiceClientService(ngZone) {
1455
+ this.ngZone = ngZone;
1456
+ this.callObject = null;
1457
+ this.localStream = null;
1458
+ this.localSessionId = null;
1459
+ /** Explicit playback of remote (bot) audio; required in some browsers. */
1460
+ this.remoteAudioElement = null;
1461
+ /** AnalyserNode-based remote audio monitor for instant bot speaking detection. */
1462
+ this.remoteAudioContext = null;
1463
+ this.remoteSpeakingRAF = null;
1464
+ this.speakingSubject = new rxjs.BehaviorSubject(false);
1465
+ this.userSpeakingSubject = new rxjs.BehaviorSubject(false);
1466
+ this.micMutedSubject = new rxjs.BehaviorSubject(false);
1467
+ this.localStreamSubject = new rxjs.BehaviorSubject(null);
1468
+ this.firstRemoteAudioFrameSubject = new rxjs.BehaviorSubject(false);
1469
+ /** True when bot (remote participant) is the active speaker. */
1470
+ this.speaking$ = this.speakingSubject.asObservable();
1471
+ /** True when user (local participant) is the active speaker. */
1472
+ this.userSpeaking$ = this.userSpeakingSubject.asObservable();
1473
+ /** True when mic is muted. */
1474
+ this.micMuted$ = this.micMutedSubject.asObservable();
1475
+ /** Emits local mic stream for waveform visualization. */
1476
+ this.localStream$ = this.localStreamSubject.asObservable();
1477
+ /** Emits true once when first remote audio frame starts playing. */
1478
+ this.firstRemoteAudioFrame$ = this.firstRemoteAudioFrameSubject.asObservable();
1479
+ }
1480
+ /**
1481
+ * Connect to Daily room. Acquires mic first for waveform, then joins with audio.
1482
+ * @param roomUrl Daily room URL (from room_created)
1483
+ * @param token Optional meeting token
1484
+ */
1485
+ DailyVoiceClientService.prototype.connect = function (roomUrl, token) {
1486
+ return __awaiter(this, void 0, void 0, function () {
1487
+ var stream, audioTrack, callObject, joinOptions, participants, err_1;
1488
+ return __generator(this, function (_e) {
1489
+ switch (_e.label) {
1490
+ case 0:
1491
+ if (!this.callObject) return [3 /*break*/, 2];
1492
+ return [4 /*yield*/, this.disconnect()];
1493
+ case 1:
1494
+ _e.sent();
1495
+ _e.label = 2;
1496
+ case 2:
1497
+ _e.trys.push([2, 5, , 6]);
1498
+ return [4 /*yield*/, navigator.mediaDevices.getUserMedia({ audio: true })];
1499
+ case 3:
1500
+ stream = _e.sent();
1501
+ audioTrack = stream.getAudioTracks()[0];
1502
+ if (!audioTrack) {
1503
+ stream.getTracks().forEach(function (t) { return t.stop(); });
1504
+ throw new Error('No audio track');
1505
+ }
1506
+ this.localStream = stream;
1507
+ this.localStreamSubject.next(stream);
1508
+ callObject = Daily__default["default"].createCallObject({
1509
+ videoSource: false,
1510
+ audioSource: audioTrack,
1511
+ });
1512
+ this.callObject = callObject;
1513
+ this.setupEventHandlers(callObject);
1514
+ joinOptions = { url: roomUrl };
1515
+ if (typeof token === 'string' && token.trim() !== '') {
1516
+ joinOptions.token = token;
1517
+ }
1518
+ return [4 /*yield*/, callObject.join(joinOptions)];
1519
+ case 4:
1520
+ _e.sent();
1521
+ console.log("[VoiceDebug] Room connected (Daily join complete) \u2014 " + new Date().toISOString());
1522
+ participants = callObject.participants();
1523
+ if (participants === null || participants === void 0 ? void 0 : participants.local) {
1524
+ this.localSessionId = participants.local.session_id;
1525
+ }
1526
+ // Start with mic muted; VoiceAgentService auto-unmutes after first remote audio frame.
1527
+ callObject.setLocalAudio(false);
1528
+ this.micMutedSubject.next(true);
1529
+ return [3 /*break*/, 6];
1530
+ case 5:
1531
+ err_1 = _e.sent();
1532
+ this.cleanup();
1533
+ throw err_1;
1534
+ case 6: return [2 /*return*/];
1535
+ }
1536
+ });
1537
+ });
1538
+ };
1539
+ DailyVoiceClientService.prototype.setupEventHandlers = function (call) {
1540
+ var _this = this;
1541
+ // active-speaker-change: used ONLY for user speaking detection.
1542
+ // Bot speaking is detected by our own AnalyserNode (instant, no debounce).
1543
+ call.on('active-speaker-change', function (event) {
1544
+ _this.ngZone.run(function () {
1545
+ var _a;
1546
+ var peerId = (_a = event === null || event === void 0 ? void 0 : event.activeSpeaker) === null || _a === void 0 ? void 0 : _a.peerId;
1547
+ if (!peerId || !_this.localSessionId) {
1548
+ _this.userSpeakingSubject.next(false);
1549
+ return;
1550
+ }
1551
+ var isLocal = peerId === _this.localSessionId;
1552
+ _this.userSpeakingSubject.next(isLocal);
1553
+ });
1554
+ });
1555
+ // track-started / track-stopped: set up remote audio playback + AnalyserNode monitor.
1556
+ call.on('track-started', function (event) {
1557
+ _this.ngZone.run(function () {
1558
+ var _a, _b, _c, _d;
1559
+ var p = event === null || event === void 0 ? void 0 : event.participant;
1560
+ var type = (_a = event === null || event === void 0 ? void 0 : event.type) !== null && _a !== void 0 ? _a : (_b = event === null || event === void 0 ? void 0 : event.track) === null || _b === void 0 ? void 0 : _b.kind;
1561
+ var track = event === null || event === void 0 ? void 0 : event.track;
1562
+ if (p && !p.local && type === 'audio') {
1563
+ console.log("[VoiceDebug] Got audio track from backend (track-started) \u2014 readyState=" + (track === null || track === void 0 ? void 0 : track.readyState) + ", muted=" + (track === null || track === void 0 ? void 0 : track.muted) + " \u2014 " + new Date().toISOString());
1564
+ var audioTrack = track !== null && track !== void 0 ? track : (_d = (_c = p.tracks) === null || _c === void 0 ? void 0 : _c.audio) === null || _d === void 0 ? void 0 : _d.track;
1565
+ if (audioTrack && typeof audioTrack === 'object') {
1566
+ _this.playRemoteTrack(audioTrack);
1567
+ _this.monitorRemoteAudio(audioTrack);
1568
+ }
1569
+ }
1570
+ });
1571
+ });
1572
+ call.on('track-stopped', function (event) {
1573
+ _this.ngZone.run(function () {
1574
+ var _a, _b;
1575
+ var p = event === null || event === void 0 ? void 0 : event.participant;
1576
+ var type = (_a = event === null || event === void 0 ? void 0 : event.type) !== null && _a !== void 0 ? _a : (_b = event === null || event === void 0 ? void 0 : event.track) === null || _b === void 0 ? void 0 : _b.kind;
1577
+ if (p && !p.local && type === 'audio') {
1578
+ _this.stopRemoteAudioMonitor();
1579
+ _this.stopRemoteAudio();
1580
+ }
1581
+ });
1582
+ });
1583
+ call.on('left-meeting', function () {
1584
+ _this.ngZone.run(function () { return _this.cleanup(); });
1585
+ });
1586
+ call.on('error', function (event) {
1587
+ _this.ngZone.run(function () {
1588
+ var _a;
1589
+ console.error('DailyVoiceClient: Daily error', (_a = event === null || event === void 0 ? void 0 : event.errorMsg) !== null && _a !== void 0 ? _a : event);
1590
+ _this.cleanup();
1591
+ });
1592
+ });
1593
+ };
1594
+ /**
1595
+ * Play remote (bot) audio track via a dedicated audio element.
1596
+ * Required in many browsers where Daily's internal playback does not output to speakers.
1597
+ */
1598
+ DailyVoiceClientService.prototype.playRemoteTrack = function (track) {
1599
+ var _this = this;
1600
+ this.stopRemoteAudio();
1601
+ try {
1602
+ console.log("[VoiceDebug] playRemoteTrack called \u2014 track.readyState=" + track.readyState + ", track.muted=" + track.muted + " \u2014 " + new Date().toISOString());
1603
+ track.onunmute = function () {
1604
+ console.log("[VoiceDebug] Remote audio track UNMUTED (audio data arriving) \u2014 " + new Date().toISOString());
1605
+ };
1606
+ var stream = new MediaStream([track]);
1607
+ var audio = new Audio();
1608
+ audio.autoplay = true;
1609
+ audio.srcObject = stream;
1610
+ this.remoteAudioElement = audio;
1611
+ audio.onplaying = function () {
1612
+ console.log("[VoiceDebug] Audio element PLAYING (browser started playback) \u2014 " + new Date().toISOString());
1613
+ };
1614
+ var firstTimeUpdate_1 = true;
1615
+ audio.ontimeupdate = function () {
1616
+ if (firstTimeUpdate_1) {
1617
+ firstTimeUpdate_1 = false;
1618
+ console.log("[VoiceDebug] Audio element first TIMEUPDATE (actual audio output) \u2014 " + new Date().toISOString());
1619
+ _this.firstRemoteAudioFrameSubject.next(true);
1620
+ }
1621
+ };
1622
+ var p = audio.play();
1623
+ if (p && typeof p.then === 'function') {
1624
+ p.then(function () {
1625
+ console.log("[VoiceDebug] audio.play() resolved \u2014 " + new Date().toISOString());
1626
+ _this.firstRemoteAudioFrameSubject.next(true);
1627
+ }).catch(function (err) {
1628
+ console.warn('DailyVoiceClient: remote audio play failed (may need user gesture)', err);
1629
+ });
1630
+ }
1631
+ }
1632
+ catch (err) {
1633
+ console.warn('DailyVoiceClient: failed to create remote audio element', err);
1634
+ }
1635
+ };
1636
+ /**
1637
+ * Monitor remote audio track energy via AnalyserNode.
1638
+ * Polls at ~60fps and flips speakingSubject based on actual audio energy.
1639
+ */
1640
+ DailyVoiceClientService.prototype.monitorRemoteAudio = function (track) {
1641
+ var _this = this;
1642
+ this.stopRemoteAudioMonitor();
1643
+ try {
1644
+ var ctx = new AudioContext();
1645
+ var source = ctx.createMediaStreamSource(new MediaStream([track]));
1646
+ var analyser_1 = ctx.createAnalyser();
1647
+ analyser_1.fftSize = 256;
1648
+ source.connect(analyser_1);
1649
+ this.remoteAudioContext = ctx;
1650
+ var dataArray_1 = new Uint8Array(analyser_1.frequencyBinCount);
1651
+ var THRESHOLD_1 = 5;
1652
+ var SILENCE_MS_1 = 1500;
1653
+ var lastSoundTime_1 = 0;
1654
+ var isSpeaking_1 = false;
1655
+ var poll_1 = function () {
1656
+ if (!_this.remoteAudioContext)
1657
+ return;
1658
+ analyser_1.getByteFrequencyData(dataArray_1);
1659
+ var sum = 0;
1660
+ for (var i = 0; i < dataArray_1.length; i++) {
1661
+ sum += dataArray_1[i];
1662
+ }
1663
+ var avg = sum / dataArray_1.length;
1664
+ var now = Date.now();
1665
+ if (avg > THRESHOLD_1) {
1666
+ lastSoundTime_1 = now;
1667
+ if (!isSpeaking_1) {
1668
+ isSpeaking_1 = true;
1669
+ console.log("[VoiceDebug] Bot audio energy detected (speaking=true) \u2014 avg=" + avg.toFixed(1) + " \u2014 " + new Date().toISOString());
1670
+ _this.ngZone.run(function () {
1671
+ _this.userSpeakingSubject.next(false);
1672
+ _this.speakingSubject.next(true);
1673
+ });
1674
+ }
1675
+ }
1676
+ else if (isSpeaking_1 && now - lastSoundTime_1 > SILENCE_MS_1) {
1677
+ isSpeaking_1 = false;
1678
+ console.log("[VoiceDebug] Bot audio silence detected (speaking=false) \u2014 " + new Date().toISOString());
1679
+ _this.ngZone.run(function () { return _this.speakingSubject.next(false); });
1680
+ }
1681
+ _this.remoteSpeakingRAF = requestAnimationFrame(poll_1);
1682
+ };
1683
+ this.remoteSpeakingRAF = requestAnimationFrame(poll_1);
1684
+ }
1685
+ catch (err) {
1686
+ console.warn('DailyVoiceClient: failed to create remote audio monitor', err);
1687
+ }
1688
+ };
1689
+ DailyVoiceClientService.prototype.stopRemoteAudioMonitor = function () {
1690
+ if (this.remoteSpeakingRAF) {
1691
+ cancelAnimationFrame(this.remoteSpeakingRAF);
1692
+ this.remoteSpeakingRAF = null;
1693
+ }
1694
+ if (this.remoteAudioContext) {
1695
+ this.remoteAudioContext.close().catch(function () { });
1696
+ this.remoteAudioContext = null;
1697
+ }
1698
+ };
1699
+ DailyVoiceClientService.prototype.stopRemoteAudio = function () {
1700
+ if (this.remoteAudioElement) {
1701
+ try {
1702
+ this.remoteAudioElement.pause();
1703
+ this.remoteAudioElement.srcObject = null;
1704
+ }
1705
+ catch (_) { }
1706
+ this.remoteAudioElement = null;
1707
+ }
1708
+ };
1709
+ /** Set mic muted state. */
1710
+ DailyVoiceClientService.prototype.setMuted = function (muted) {
1711
+ if (!this.callObject)
1712
+ return;
1713
+ this.callObject.setLocalAudio(!muted);
1714
+ this.micMutedSubject.next(muted);
1715
+ };
1716
+ /** Disconnect and cleanup. */
1717
+ DailyVoiceClientService.prototype.disconnect = function () {
1718
+ return __awaiter(this, void 0, void 0, function () {
1719
+ var e_1;
1720
+ return __generator(this, function (_e) {
1721
+ switch (_e.label) {
1722
+ case 0:
1723
+ if (!this.callObject) {
1724
+ this.cleanup();
1725
+ return [2 /*return*/];
1726
+ }
1727
+ _e.label = 1;
1728
+ case 1:
1729
+ _e.trys.push([1, 3, , 4]);
1730
+ return [4 /*yield*/, this.callObject.leave()];
1731
+ case 2:
1732
+ _e.sent();
1733
+ return [3 /*break*/, 4];
1734
+ case 3:
1735
+ e_1 = _e.sent();
1736
+ return [3 /*break*/, 4];
1737
+ case 4:
1738
+ this.cleanup();
1739
+ return [2 /*return*/];
1740
+ }
1741
+ });
1742
+ });
1743
+ };
1744
+ DailyVoiceClientService.prototype.cleanup = function () {
1745
+ this.stopRemoteAudioMonitor();
1746
+ this.stopRemoteAudio();
1747
+ if (this.callObject) {
1748
+ this.callObject.destroy().catch(function () { });
1749
+ this.callObject = null;
1750
+ }
1751
+ if (this.localStream) {
1752
+ this.localStream.getTracks().forEach(function (t) { return t.stop(); });
1753
+ this.localStream = null;
1754
+ }
1755
+ this.localSessionId = null;
1756
+ this.speakingSubject.next(false);
1757
+ this.userSpeakingSubject.next(false);
1758
+ this.localStreamSubject.next(null);
1759
+ this.firstRemoteAudioFrameSubject.next(false);
1760
+ // Keep last micMuted state; will reset on next connect
1761
+ };
1762
+ return DailyVoiceClientService;
1763
+ }());
1764
+ DailyVoiceClientService.ɵprov = i0__namespace.ɵɵdefineInjectable({ factory: function DailyVoiceClientService_Factory() { return new DailyVoiceClientService(i0__namespace.ɵɵinject(i0__namespace.NgZone)); }, token: DailyVoiceClientService, providedIn: "root" });
1765
+ DailyVoiceClientService.decorators = [
1766
+ { type: i0.Injectable, args: [{
1767
+ providedIn: 'root',
1768
+ },] }
1769
+ ];
1770
+ DailyVoiceClientService.ctorParameters = function () { return [
1771
+ { type: i0.NgZone }
1772
+ ]; };
1773
+
1774
+ /**
1775
+ * Voice agent orchestrator. Coordinates WebSocket (signaling) and Daily.js (WebRTC audio).
1776
+ *
1777
+ * CRITICAL: This service must NEVER use Socket.IO or ngx-socket-io. Voice flow uses only:
1778
+ * - Native WebSocket (WebSocketVoiceClientService) for signaling (room_created, transcripts)
1779
+ * - Daily.js (DailyVoiceClientService) for WebRTC audio. Audio does NOT flow over WebSocket.
1780
+ *
1781
+ * - Maintains callState, statusText, duration, isMicMuted, isUserSpeaking, audioLevels
1782
+ * - Uses WebSocket for room_created and transcripts only (no audio)
1783
+ * - Uses Daily.js for all audio, mic, and real-time speaking detection
1351
1784
  */
1352
1785
  var VoiceAgentService = /** @class */ (function () {
1353
- function VoiceAgentService(audioAnalyzer, platformTokenRefresh, ngZone,
1786
+ function VoiceAgentService(audioAnalyzer, wsClient, dailyClient, platformTokenRefresh,
1354
1787
  /** `Object` not `object` — ngc metadata collection rejects the `object` type in DI params. */
1355
1788
  platformId) {
1356
1789
  var _this = this;
1357
1790
  this.audioAnalyzer = audioAnalyzer;
1791
+ this.wsClient = wsClient;
1792
+ this.dailyClient = dailyClient;
1358
1793
  this.platformTokenRefresh = platformTokenRefresh;
1359
- this.ngZone = ngZone;
1360
1794
  this.platformId = platformId;
1361
1795
  this.callStateSubject = new rxjs.BehaviorSubject('idle');
1362
1796
  this.statusTextSubject = new rxjs.BehaviorSubject('');
@@ -1368,10 +1802,9 @@
1368
1802
  this.botTranscriptSubject = new rxjs.Subject();
1369
1803
  this.callStartTime = 0;
1370
1804
  this.durationInterval = null;
1371
- this.pcClient = null;
1372
- this.botAudioElement = null;
1373
1805
  this.subscriptions = new rxjs.Subscription();
1374
1806
  this.destroy$ = new rxjs.Subject();
1807
+ this.hasAutoUnmutedAfterFirstAudio = false;
1375
1808
  this.callState$ = this.callStateSubject.asObservable();
1376
1809
  this.statusText$ = this.statusTextSubject.asObservable();
1377
1810
  this.duration$ = this.durationSubject.asObservable();
@@ -1380,135 +1813,141 @@
1380
1813
  this.audioLevels$ = this.audioLevelsSubject.asObservable();
1381
1814
  this.userTranscript$ = this.userTranscriptSubject.asObservable();
1382
1815
  this.botTranscript$ = this.botTranscriptSubject.asObservable();
1816
+ // Waveform visualization only - do NOT use for speaking state
1383
1817
  this.subscriptions.add(this.audioAnalyzer.audioLevels$.subscribe(function (levels) { return _this.audioLevelsSubject.next(levels); }));
1384
1818
  }
1385
1819
  VoiceAgentService.prototype.ngOnDestroy = function () {
1386
1820
  this.destroy$.next();
1387
1821
  this.subscriptions.unsubscribe();
1388
- void this.disconnect();
1822
+ this.disconnect();
1389
1823
  };
1390
- /** Reset to idle (e.g. when modal re-opens so user can click Start Call). */
1824
+ /** Reset to idle state (e.g. when modal opens so user can click Start Call). */
1391
1825
  VoiceAgentService.prototype.resetToIdle = function () {
1392
1826
  if (this.callStateSubject.value === 'idle')
1393
1827
  return;
1394
- void this.disconnect();
1828
+ this.stopDurationTimer();
1829
+ this.audioAnalyzer.stop();
1830
+ this.wsClient.disconnect();
1831
+ // Fire-and-forget: Daily disconnect is async; connect() will await if needed
1832
+ void this.dailyClient.disconnect();
1395
1833
  this.callStateSubject.next('idle');
1396
1834
  this.statusTextSubject.next('');
1397
1835
  this.durationSubject.next('0:00');
1836
+ this.hasAutoUnmutedAfterFirstAudio = false;
1398
1837
  };
1399
1838
  VoiceAgentService.prototype.connect = function (apiUrl, token, botId, conversationId, apiKey, eventToken, eventId, eventUrl, domainAuthority, usersApiUrl) {
1400
1839
  return __awaiter(this, void 0, void 0, function () {
1401
- var accessToken, ensured, e_1, baseUrl, pcClient, requestHeaders, error_1;
1840
+ var accessToken, ensured, e_1, baseUrl, postUrl, headers, res, json, wsUrl, error_1;
1402
1841
  var _this = this;
1403
- return __generator(this, function (_d) {
1404
- switch (_d.label) {
1842
+ return __generator(this, function (_a) {
1843
+ switch (_a.label) {
1405
1844
  case 0:
1406
1845
  if (this.callStateSubject.value !== 'idle') {
1407
- console.warn('[HiveGpt Voice] Call already in progress');
1846
+ console.warn('Call already in progress');
1408
1847
  return [2 /*return*/];
1409
1848
  }
1410
- _d.label = 1;
1849
+ _a.label = 1;
1411
1850
  case 1:
1412
- _d.trys.push([1, 8, , 10]);
1851
+ _a.trys.push([1, 8, , 10]);
1413
1852
  this.callStateSubject.next('connecting');
1414
1853
  this.statusTextSubject.next('Connecting...');
1415
1854
  accessToken = token;
1416
1855
  if (!(usersApiUrl && common.isPlatformBrowser(this.platformId))) return [3 /*break*/, 5];
1417
- _d.label = 2;
1856
+ _a.label = 2;
1418
1857
  case 2:
1419
- _d.trys.push([2, 4, , 5]);
1858
+ _a.trys.push([2, 4, , 5]);
1420
1859
  return [4 /*yield*/, this.platformTokenRefresh
1421
1860
  .ensureValidAccessToken(token, usersApiUrl)
1422
1861
  .pipe(operators.take(1))
1423
1862
  .toPromise()];
1424
1863
  case 3:
1425
- ensured = _d.sent();
1426
- if (ensured === null || ensured === void 0 ? void 0 : ensured.accessToken)
1864
+ ensured = _a.sent();
1865
+ if (ensured === null || ensured === void 0 ? void 0 : ensured.accessToken) {
1427
1866
  accessToken = ensured.accessToken;
1867
+ }
1428
1868
  return [3 /*break*/, 5];
1429
1869
  case 4:
1430
- e_1 = _d.sent();
1431
- console.warn('[HiveGpt Voice] Token refresh failed', e_1);
1870
+ e_1 = _a.sent();
1871
+ console.warn('[HiveGpt Voice] Token refresh before connect failed', e_1);
1432
1872
  return [3 /*break*/, 5];
1433
1873
  case 5:
1434
1874
  baseUrl = apiUrl.replace(/\/$/, '');
1435
- pcClient = new clientJs.PipecatClient({
1436
- transport: new websocketTransport.WebSocketTransport(),
1437
- enableMic: true,
1438
- enableCam: false,
1439
- callbacks: {
1440
- onConnected: function () { return _this.ngZone.run(function () { return _this.onPipecatConnected(); }); },
1441
- onDisconnected: function () { return _this.ngZone.run(function () { return _this.onPipecatDisconnected(); }); },
1442
- onBotReady: function () { return _this.ngZone.run(function () { return _this.onBotReady(); }); },
1443
- onUserTranscript: function (data) { return _this.ngZone.run(function () { return _this.userTranscriptSubject.next({ text: data.text, final: !!data.final }); }); },
1444
- onBotTranscript: function (data) { return _this.ngZone.run(function () { return _this.botTranscriptSubject.next(data.text); }); },
1445
- onError: function (err) {
1446
- _this.ngZone.run(function () {
1447
- console.error('[HiveGpt Voice] PipecatClient error', err);
1448
- _this.callStateSubject.next('ended');
1449
- _this.statusTextSubject.next('Connection failed');
1450
- });
1451
- },
1452
- },
1453
- });
1454
- this.pcClient = pcClient;
1455
- // Bot audio arrives as a MediaStreamTrack — wire to a hidden <audio> element
1456
- pcClient.on(clientJs.RTVIEvent.TrackStarted, function (track, participant) {
1457
- if (!(participant === null || participant === void 0 ? void 0 : participant.local) && track.kind === 'audio') {
1458
- _this.ngZone.run(function () { return _this.setupBotAudioTrack(track); });
1459
- }
1460
- });
1461
- // Speaking state comes straight from RTVI events
1462
- pcClient.on(clientJs.RTVIEvent.BotStartedSpeaking, function () { return _this.ngZone.run(function () { return _this.onBotStartedSpeaking(); }); });
1463
- pcClient.on(clientJs.RTVIEvent.BotStoppedSpeaking, function () { return _this.ngZone.run(function () { return _this.onBotStoppedSpeaking(); }); });
1464
- pcClient.on(clientJs.RTVIEvent.UserStartedSpeaking, function () { return _this.ngZone.run(function () {
1465
- _this.isUserSpeakingSubject.next(true);
1466
- _this.callStateSubject.next('listening');
1467
- _this.statusTextSubject.next('Listening...');
1468
- }); });
1469
- pcClient.on(clientJs.RTVIEvent.UserStoppedSpeaking, function () { return _this.ngZone.run(function () {
1470
- _this.isUserSpeakingSubject.next(false);
1471
- if (_this.callStateSubject.value === 'listening') {
1472
- // Brief 'Processing...' while we wait for the bot to respond.
1473
- _this.callStateSubject.next('connected');
1474
- _this.statusTextSubject.next('Processing...');
1475
- }
1476
- }); });
1477
- // Acquire mic (triggers browser permission prompt)
1478
- return [4 /*yield*/, pcClient.initDevices()];
1479
- case 6:
1480
- // Acquire mic (triggers browser permission prompt)
1481
- _d.sent();
1482
- requestHeaders = new Headers();
1483
- requestHeaders.append('Authorization', "Bearer " + accessToken);
1484
- requestHeaders.append('x-api-key', apiKey);
1485
- requestHeaders.append('hive-bot-id', botId);
1486
- requestHeaders.append('domain-authority', domainAuthority);
1487
- requestHeaders.append('eventUrl', eventUrl);
1488
- requestHeaders.append('eventId', eventId);
1489
- requestHeaders.append('eventToken', eventToken);
1490
- requestHeaders.append('ngrok-skip-browser-warning', 'true');
1491
- // POST to /ai/ask-voice-socket → receives { ws_url } → WebSocketTransport connects
1492
- return [4 /*yield*/, pcClient.startBotAndConnect({
1493
- endpoint: baseUrl + "/ai/ask-voice-socket",
1494
- headers: requestHeaders,
1495
- requestData: {
1875
+ postUrl = baseUrl + "/ai/ask-voice";
1876
+ headers = {
1877
+ 'Content-Type': 'application/json',
1878
+ Authorization: "Bearer " + accessToken,
1879
+ 'x-api-key': apiKey,
1880
+ 'hive-bot-id': botId,
1881
+ 'domain-authority': domainAuthority,
1882
+ eventUrl: eventUrl,
1883
+ eventId: eventId,
1884
+ eventToken: eventToken,
1885
+ 'ngrok-skip-browser-warning': 'true',
1886
+ };
1887
+ return [4 /*yield*/, fetch(postUrl, {
1888
+ method: 'POST',
1889
+ headers: headers,
1890
+ body: JSON.stringify({
1496
1891
  bot_id: botId,
1497
1892
  conversation_id: conversationId,
1498
1893
  voice: 'alloy',
1499
- },
1894
+ }),
1500
1895
  })];
1896
+ case 6:
1897
+ res = _a.sent();
1898
+ if (!res.ok) {
1899
+ throw new Error("HTTP " + res.status);
1900
+ }
1901
+ return [4 /*yield*/, res.json()];
1501
1902
  case 7:
1502
- // POST to /ai/ask-voice-socket → receives { ws_url } → WebSocketTransport connects
1503
- _d.sent();
1903
+ json = _a.sent();
1904
+ wsUrl = json === null || json === void 0 ? void 0 : json.rn_ws_url;
1905
+ if (!wsUrl || typeof wsUrl !== 'string') {
1906
+ throw new Error('No ws_url in response');
1907
+ }
1908
+ // Subscribe to room_created BEFORE connecting to avoid race
1909
+ this.wsClient.roomCreated$
1910
+ .pipe(operators.take(1), operators.takeUntil(this.destroy$))
1911
+ .subscribe(function (roomUrl) { return __awaiter(_this, void 0, void 0, function () {
1912
+ var err_1;
1913
+ return __generator(this, function (_a) {
1914
+ switch (_a.label) {
1915
+ case 0:
1916
+ _a.trys.push([0, 2, , 4]);
1917
+ return [4 /*yield*/, this.onRoomCreated(roomUrl)];
1918
+ case 1:
1919
+ _a.sent();
1920
+ return [3 /*break*/, 4];
1921
+ case 2:
1922
+ err_1 = _a.sent();
1923
+ console.error('Daily join failed:', err_1);
1924
+ this.callStateSubject.next('ended');
1925
+ this.statusTextSubject.next('Connection failed');
1926
+ return [4 /*yield*/, this.disconnect()];
1927
+ case 3:
1928
+ _a.sent();
1929
+ throw err_1;
1930
+ case 4: return [2 /*return*/];
1931
+ }
1932
+ });
1933
+ }); });
1934
+ // Forward transcripts from WebSocket
1935
+ this.subscriptions.add(this.wsClient.userTranscript$
1936
+ .pipe(operators.takeUntil(this.destroy$))
1937
+ .subscribe(function (t) { return _this.userTranscriptSubject.next(t); }));
1938
+ this.subscriptions.add(this.wsClient.botTranscript$
1939
+ .pipe(operators.takeUntil(this.destroy$))
1940
+ .subscribe(function (t) { return _this.botTranscriptSubject.next(t); }));
1941
+ // Connect signaling WebSocket (no audio over WS)
1942
+ this.wsClient.connect(wsUrl);
1504
1943
  return [3 /*break*/, 10];
1505
1944
  case 8:
1506
- error_1 = _d.sent();
1507
- console.error('[HiveGpt Voice] connect failed', error_1);
1945
+ error_1 = _a.sent();
1946
+ console.error('Error connecting voice agent:', error_1);
1508
1947
  this.callStateSubject.next('ended');
1509
- return [4 /*yield*/, this.cleanupPipecatClient()];
1948
+ return [4 /*yield*/, this.disconnect()];
1510
1949
  case 9:
1511
- _d.sent();
1950
+ _a.sent();
1512
1951
  this.statusTextSubject.next('Connection failed');
1513
1952
  throw error_1;
1514
1953
  case 10: return [2 /*return*/];
@@ -1516,143 +1955,106 @@
1516
1955
  });
1517
1956
  });
1518
1957
  };
1519
- VoiceAgentService.prototype.onPipecatConnected = function () {
1520
- // Start the duration timer from the moment the session is live.
1521
- this.callStartTime = Date.now();
1522
- this.startDurationTimer();
1523
- this.callStateSubject.next('connected');
1524
- this.statusTextSubject.next('Connected');
1525
- this.isMicMutedSubject.next(false);
1526
- this.startLocalMicAnalyzer();
1527
- };
1528
- VoiceAgentService.prototype.onPipecatDisconnected = function () {
1529
- this.stopDurationTimer();
1530
- this.callStartTime = 0;
1531
- this.audioAnalyzer.stop();
1532
- this.stopBotAudio();
1533
- this.callStateSubject.next('ended');
1534
- this.statusTextSubject.next('Call Ended');
1535
- };
1536
- VoiceAgentService.prototype.onBotReady = function () {
1537
- var _a, _b, _c;
1538
- // Retry track wiring in case tracks weren't ready at onConnected.
1539
- this.startLocalMicAnalyzer();
1540
- var botTrack = (_c = (_b = (_a = this.pcClient) === null || _a === void 0 ? void 0 : _a.tracks()) === null || _b === void 0 ? void 0 : _b.bot) === null || _c === void 0 ? void 0 : _c.audio;
1541
- if (botTrack)
1542
- this.setupBotAudioTrack(botTrack);
1543
- // Bot is initialised — signal that we're now waiting for user input.
1544
- this.statusTextSubject.next('Listening...');
1545
- };
1546
- VoiceAgentService.prototype.startLocalMicAnalyzer = function () {
1547
- var _a, _b, _c;
1548
- var localTrack = (_c = (_b = (_a = this.pcClient) === null || _a === void 0 ? void 0 : _a.tracks()) === null || _b === void 0 ? void 0 : _b.local) === null || _c === void 0 ? void 0 : _c.audio;
1549
- if (localTrack) {
1550
- this.audioAnalyzer.start(new MediaStream([localTrack]));
1551
- }
1552
- };
1553
- VoiceAgentService.prototype.onBotStartedSpeaking = function () {
1554
- this.callStateSubject.next('talking');
1555
- this.statusTextSubject.next('Talking...');
1556
- // Mark user as no longer speaking when bot takes the turn.
1557
- this.isUserSpeakingSubject.next(false);
1558
- };
1559
- VoiceAgentService.prototype.onBotStoppedSpeaking = function () {
1560
- if (this.callStateSubject.value === 'talking') {
1561
- this.callStateSubject.next('connected');
1562
- this.statusTextSubject.next('Listening...');
1563
- }
1564
- };
1565
- VoiceAgentService.prototype.setupBotAudioTrack = function (track) {
1566
- var _a;
1567
- if (!this.botAudioElement) {
1568
- this.botAudioElement = new Audio();
1569
- this.botAudioElement.autoplay = true;
1570
- }
1571
- var existing = (_a = this.botAudioElement.srcObject) === null || _a === void 0 ? void 0 : _a.getAudioTracks()[0];
1572
- if ((existing === null || existing === void 0 ? void 0 : existing.id) === track.id)
1573
- return;
1574
- this.botAudioElement.srcObject = new MediaStream([track]);
1575
- this.botAudioElement.play().catch(function (err) { return console.warn('[HiveGpt Voice] Bot audio play blocked', err); });
1576
- };
1577
- VoiceAgentService.prototype.stopBotAudio = function () {
1578
- var _a;
1579
- if (this.botAudioElement) {
1580
- try {
1581
- this.botAudioElement.pause();
1582
- (_a = this.botAudioElement.srcObject) === null || _a === void 0 ? void 0 : _a.getAudioTracks().forEach(function (t) { return t.stop(); });
1583
- this.botAudioElement.srcObject = null;
1584
- }
1585
- catch (_b) {
1586
- // ignore
1587
- }
1588
- this.botAudioElement = null;
1589
- }
1590
- };
1591
- VoiceAgentService.prototype.disconnect = function () {
1958
+ VoiceAgentService.prototype.onRoomCreated = function (roomUrl) {
1592
1959
  return __awaiter(this, void 0, void 0, function () {
1593
- return __generator(this, function (_d) {
1594
- switch (_d.label) {
1595
- case 0:
1596
- this.stopDurationTimer();
1597
- this.callStartTime = 0;
1598
- this.audioAnalyzer.stop();
1599
- this.stopBotAudio();
1600
- return [4 /*yield*/, this.cleanupPipecatClient()];
1960
+ var _this = this;
1961
+ return __generator(this, function (_a) {
1962
+ switch (_a.label) {
1963
+ case 0:
1964
+ // Connect Daily.js for WebRTC audio
1965
+ return [4 /*yield*/, this.dailyClient.connect(roomUrl)];
1601
1966
  case 1:
1602
- _d.sent();
1603
- this.callStateSubject.next('ended');
1604
- this.statusTextSubject.next('Call Ended');
1967
+ // Connect Daily.js for WebRTC audio
1968
+ _a.sent();
1969
+ this.hasAutoUnmutedAfterFirstAudio = false;
1970
+ // Waveform: use local mic stream from Daily client
1971
+ this.dailyClient.localStream$
1972
+ .pipe(operators.filter(function (s) { return s != null; }), operators.take(1))
1973
+ .subscribe(function (stream) {
1974
+ _this.audioAnalyzer.start(stream);
1975
+ });
1976
+ this.subscriptions.add(this.dailyClient.userSpeaking$.subscribe(function (s) { return _this.isUserSpeakingSubject.next(s); }));
1977
+ this.subscriptions.add(rxjs.combineLatest([
1978
+ this.dailyClient.speaking$,
1979
+ this.dailyClient.userSpeaking$,
1980
+ ]).subscribe(function (_a) {
1981
+ var _b = __read(_a, 2), bot = _b[0], user = _b[1];
1982
+ var current = _this.callStateSubject.value;
1983
+ if (current === 'connecting' && !bot) {
1984
+ return;
1985
+ }
1986
+ if (current === 'connecting' && bot) {
1987
+ _this.callStartTime = Date.now();
1988
+ _this.startDurationTimer();
1989
+ _this.callStateSubject.next('talking');
1990
+ return;
1991
+ }
1992
+ if (user) {
1993
+ _this.callStateSubject.next('listening');
1994
+ }
1995
+ else if (bot) {
1996
+ _this.callStateSubject.next('talking');
1997
+ }
1998
+ else if (current === 'talking' || current === 'listening') {
1999
+ _this.callStateSubject.next('connected');
2000
+ }
2001
+ }));
2002
+ this.subscriptions.add(this.dailyClient.micMuted$.subscribe(function (muted) { return _this.isMicMutedSubject.next(muted); }));
2003
+ // One-time auto-unmute after first remote audio frame starts playing.
2004
+ // This keeps initial capture muted until bot audio is heard, then restores normal mic flow.
2005
+ this.subscriptions.add(this.dailyClient.firstRemoteAudioFrame$
2006
+ .pipe(operators.filter(function (hasFirstFrame) { return hasFirstFrame; }), operators.take(1))
2007
+ .subscribe(function () {
2008
+ if (_this.hasAutoUnmutedAfterFirstAudio)
2009
+ return;
2010
+ _this.hasAutoUnmutedAfterFirstAudio = true;
2011
+ if (_this.isMicMutedSubject.value) {
2012
+ _this.dailyClient.setMuted(false);
2013
+ }
2014
+ }));
2015
+ this.statusTextSubject.next('Connecting...');
1605
2016
  return [2 /*return*/];
1606
2017
  }
1607
2018
  });
1608
2019
  });
1609
2020
  };
1610
- VoiceAgentService.prototype.cleanupPipecatClient = function () {
2021
+ VoiceAgentService.prototype.disconnect = function () {
1611
2022
  return __awaiter(this, void 0, void 0, function () {
1612
- var _a_1;
1613
- return __generator(this, function (_d) {
1614
- switch (_d.label) {
2023
+ return __generator(this, function (_a) {
2024
+ switch (_a.label) {
1615
2025
  case 0:
1616
- if (!this.pcClient) return [3 /*break*/, 5];
1617
- _d.label = 1;
2026
+ this.stopDurationTimer();
2027
+ this.audioAnalyzer.stop();
2028
+ // Daily first, then WebSocket
2029
+ return [4 /*yield*/, this.dailyClient.disconnect()];
1618
2030
  case 1:
1619
- _d.trys.push([1, 3, , 4]);
1620
- return [4 /*yield*/, this.pcClient.disconnect()];
1621
- case 2:
1622
- _d.sent();
1623
- return [3 /*break*/, 4];
1624
- case 3:
1625
- _a_1 = _d.sent();
1626
- return [3 /*break*/, 4];
1627
- case 4:
1628
- this.pcClient = null;
1629
- _d.label = 5;
1630
- case 5: return [2 /*return*/];
2031
+ // Daily first, then WebSocket
2032
+ _a.sent();
2033
+ this.wsClient.disconnect();
2034
+ this.callStateSubject.next('ended');
2035
+ this.statusTextSubject.next('Call Ended');
2036
+ this.hasAutoUnmutedAfterFirstAudio = false;
2037
+ return [2 /*return*/];
1631
2038
  }
1632
2039
  });
1633
2040
  });
1634
2041
  };
1635
2042
  VoiceAgentService.prototype.toggleMic = function () {
1636
- if (!this.pcClient)
1637
- return;
1638
- var nextMuted = !this.isMicMutedSubject.value;
1639
- this.pcClient.enableMic(!nextMuted);
1640
- this.isMicMutedSubject.next(nextMuted);
1641
- if (nextMuted)
1642
- this.isUserSpeakingSubject.next(false);
2043
+ var current = this.isMicMutedSubject.value;
2044
+ this.dailyClient.setMuted(!current);
1643
2045
  };
1644
2046
  VoiceAgentService.prototype.startDurationTimer = function () {
1645
2047
  var _this = this;
1646
- var tick = function () {
2048
+ var updateDuration = function () {
1647
2049
  if (_this.callStartTime > 0) {
1648
2050
  var elapsed = Math.floor((Date.now() - _this.callStartTime) / 1000);
1649
- var m = Math.floor(elapsed / 60);
1650
- var s = elapsed % 60;
1651
- _this.durationSubject.next(m + ":" + String(s).padStart(2, '0'));
2051
+ var minutes = Math.floor(elapsed / 60);
2052
+ var seconds = elapsed % 60;
2053
+ _this.durationSubject.next(minutes + ":" + String(seconds).padStart(2, '0'));
1652
2054
  }
1653
2055
  };
1654
- tick();
1655
- this.durationInterval = setInterval(tick, 1000);
2056
+ updateDuration();
2057
+ this.durationInterval = setInterval(updateDuration, 1000);
1656
2058
  };
1657
2059
  VoiceAgentService.prototype.stopDurationTimer = function () {
1658
2060
  if (this.durationInterval) {
@@ -1662,7 +2064,7 @@
1662
2064
  };
1663
2065
  return VoiceAgentService;
1664
2066
  }());
1665
- VoiceAgentService.ɵprov = i0__namespace.ɵɵdefineInjectable({ factory: function VoiceAgentService_Factory() { return new VoiceAgentService(i0__namespace.ɵɵinject(AudioAnalyzerService), i0__namespace.ɵɵinject(PlatformTokenRefreshService), i0__namespace.ɵɵinject(i0__namespace.NgZone), i0__namespace.ɵɵinject(i0__namespace.PLATFORM_ID)); }, token: VoiceAgentService, providedIn: "root" });
2067
+ VoiceAgentService.ɵprov = i0__namespace.ɵɵdefineInjectable({ factory: function VoiceAgentService_Factory() { return new VoiceAgentService(i0__namespace.ɵɵinject(AudioAnalyzerService), i0__namespace.ɵɵinject(WebSocketVoiceClientService), i0__namespace.ɵɵinject(DailyVoiceClientService), i0__namespace.ɵɵinject(PlatformTokenRefreshService), i0__namespace.ɵɵinject(i0__namespace.PLATFORM_ID)); }, token: VoiceAgentService, providedIn: "root" });
1666
2068
  VoiceAgentService.decorators = [
1667
2069
  { type: i0.Injectable, args: [{
1668
2070
  providedIn: 'root',
@@ -1670,8 +2072,9 @@
1670
2072
  ];
1671
2073
  VoiceAgentService.ctorParameters = function () { return [
1672
2074
  { type: AudioAnalyzerService },
2075
+ { type: WebSocketVoiceClientService },
2076
+ { type: DailyVoiceClientService },
1673
2077
  { type: PlatformTokenRefreshService },
1674
- { type: i0.NgZone },
1675
2078
  { type: Object, decorators: [{ type: i0.Inject, args: [i0.PLATFORM_ID,] }] }
1676
2079
  ]; };
1677
2080
 
@@ -1702,27 +2105,12 @@
1702
2105
  this.isMicMuted = false;
1703
2106
  this.isUserSpeaking = false;
1704
2107
  this.audioLevels = [];
2108
+ this.isSpeaking = false;
2109
+ /** Track whether call has transitioned out of initial connected state. */
2110
+ this.hasLeftConnectedOnce = false;
1705
2111
  this.subscriptions = [];
1706
2112
  this.isConnecting = false;
1707
2113
  }
1708
- Object.defineProperty(VoiceAgentModalComponent.prototype, "isBotTalking", {
1709
- /** True while the bot is speaking — drives avatar pulse animation and voice visualizer. */
1710
- get: function () { return this.callState === 'talking'; },
1711
- enumerable: false,
1712
- configurable: true
1713
- });
1714
- Object.defineProperty(VoiceAgentModalComponent.prototype, "isUserActive", {
1715
- /** True while the user is actively speaking — drives waveform active color. */
1716
- get: function () { return this.callState === 'listening' && this.isUserSpeaking && !this.isMicMuted; },
1717
- enumerable: false,
1718
- configurable: true
1719
- });
1720
- Object.defineProperty(VoiceAgentModalComponent.prototype, "isProcessing", {
1721
- /** True during the brief processing pause between user speech and bot response. */
1722
- get: function () { return this.callState === 'connected' && this.statusText === 'Processing...'; },
1723
- enumerable: false,
1724
- configurable: true
1725
- });
1726
2114
  VoiceAgentModalComponent.prototype.ngOnInit = function () {
1727
2115
  var _this = this;
1728
2116
  var _a, _b, _c, _d, _e, _f, _g, _h;
@@ -1744,8 +2132,16 @@
1744
2132
  this.agentAvatar = this.injectedConfig.agentAvatar;
1745
2133
  this.usersApiUrl = (_h = this.injectedConfig.usersApiUrl) !== null && _h !== void 0 ? _h : this.usersApiUrl;
1746
2134
  }
2135
+ // Subscribe to observables
1747
2136
  this.subscriptions.push(this.voiceAgentService.callState$.subscribe(function (state) {
1748
2137
  _this.callState = state;
2138
+ _this.isSpeaking = state === 'talking';
2139
+ if (state === 'listening' || state === 'talking') {
2140
+ _this.hasLeftConnectedOnce = true;
2141
+ }
2142
+ if (state === 'idle' || state === 'ended') {
2143
+ _this.hasLeftConnectedOnce = false;
2144
+ }
1749
2145
  }));
1750
2146
  this.subscriptions.push(this.voiceAgentService.statusText$.subscribe(function (text) {
1751
2147
  _this.statusText = text;
@@ -1830,16 +2226,18 @@
1830
2226
  return minHeight + (n / 100) * (maxHeight - minHeight);
1831
2227
  };
1832
2228
  Object.defineProperty(VoiceAgentModalComponent.prototype, "statusLabel", {
1833
- /** Status label for active call — driven by callState + service statusText. */
2229
+ /** Status label for active call. */
1834
2230
  get: function () {
1835
- switch (this.callState) {
1836
- case 'connecting': return 'Connecting...';
1837
- case 'talking': return 'Talking...';
1838
- case 'listening': return 'Listening...';
1839
- // 'connected' covers: initial connect, between turns (Listening / Processing)
1840
- case 'connected': return this.statusText || 'Connected';
1841
- default: return this.statusText || '';
2231
+ if (this.callState === 'connecting')
2232
+ return this.statusText || 'Connecting...';
2233
+ if (this.callState === 'talking')
2234
+ return 'Talking...';
2235
+ if (this.callState === 'listening')
2236
+ return 'Listening';
2237
+ if (this.callState === 'connected') {
2238
+ return this.hasLeftConnectedOnce ? 'Talking...' : 'Connected';
1842
2239
  }
2240
+ return this.statusText || '';
1843
2241
  },
1844
2242
  enumerable: false,
1845
2243
  configurable: true
@@ -1868,8 +2266,8 @@
1868
2266
  VoiceAgentModalComponent.decorators = [
1869
2267
  { type: i0.Component, args: [{
1870
2268
  selector: 'hivegpt-voice-agent-modal',
1871
- template: "<div class=\"voice-agent-modal-overlay\" (click)=\"endCall()\">\n <div\n class=\"voice-container voice-agent-modal\"\n (click)=\"$event.stopPropagation()\"\n >\n <!-- Header -->\n <div class=\"header\">\n <div class=\"header-left\">\n <div class=\"header-icon\">\n <svg\n width=\"16\"\n height=\"16\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M12 1C8.13 1 5 4.13 5 8V14C5 17.87 8.13 21 12 21C15.87 21 19 17.87 19 14V8C19 4.13 15.87 1 12 1Z\"\n fill=\"currentColor\"\n />\n <path\n d=\"M12 23C10.34 23 9 21.66 9 20H15C15 21.66 13.66 23 12 23Z\"\n fill=\"currentColor\"\n />\n </svg>\n </div>\n <span class=\"header-title\">Voice</span>\n </div>\n <button\n class=\"close-button\"\n (click)=\"endCall()\"\n type=\"button\"\n aria-label=\"Close\"\n >\n <svg\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M18 6L6 18M6 6L18 18\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n </div>\n\n <!-- Avatar Section with glow -->\n <div class=\"avatar-section\">\n <div class=\"avatar-glow\" [class.glow-talking]=\"isBotTalking\" [class.glow-listening]=\"callState === 'listening'\"></div>\n\n <!-- Particle ring \u2014 visible while bot is talking -->\n <div *ngIf=\"isBotTalking\" class=\"particles-container\">\n <span *ngFor=\"let i of [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]\"\n class=\"particle\"\n [style.--i]=\"i\"\n [style.animationDelay]=\"(i * 0.15) + 's'\">\n </span>\n </div>\n\n <div class=\"avatar-wrapper\" [class.speaking]=\"isBotTalking\" [class.listening]=\"callState === 'listening'\">\n <img class=\"avatar-image\" [src]=\"displayAvatarUrl\" alt=\"Nia\" />\n </div>\n </div>\n\n <!-- Agent Info: Nia + Collaboration Manager AI Agent Specialist -->\n <div class=\"agent-info\">\n <div class=\"agent-name\">\n Nia\n <span class=\"ai-badge\">AI</span>\n </div>\n <p class=\"agent-role\">COP30 AI Agent </p>\n </div>\n\n <!-- Start Call (when idle only) -->\n <div *ngIf=\"callState === 'idle'\" class=\"start-call-section\">\n <p *ngIf=\"statusText === 'Connection failed'\" class=\"error-message\">\n {{ statusText }}\n </p>\n <button\n class=\"start-call-button\"\n type=\"button\"\n [disabled]=\"isConnecting\"\n (click)=\"startCall()\"\n >\n <span *ngIf=\"isConnecting\">Connecting...</span>\n <span *ngIf=\"!isConnecting && statusText === 'Connection failed'\"\n >Retry</span\n >\n <span *ngIf=\"!isConnecting && statusText !== 'Connection failed'\"\n >Start Call</span\n >\n </button>\n </div>\n\n <!-- Call Ended: status + Call Again / Back to Chat -->\n <div *ngIf=\"callState === 'ended'\" class=\"call-ended-section\">\n <p class=\"call-ended-status\">\n <span class=\"status-text\">Call Ended</span>\n <span class=\"status-timer\">{{ duration }}</span>\n </p>\n <div class=\"call-ended-controls\">\n <button\n class=\"action-btn\"\n type=\"button\"\n (click)=\"callAgain()\"\n title=\"Call Again\"\n >\n <svg\n width=\"18\"\n height=\"18\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-linejoin=\"round\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path d=\"M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8\" />\n <path d=\"M3 3v5h5\" />\n </svg>\n Call Again\n </button>\n <button\n class=\"action-btn\"\n type=\"button\"\n (click)=\"backToChat()\"\n title=\"Back to Chat\"\n >\n <svg\n width=\"18\"\n height=\"18\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-linejoin=\"round\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path d=\"M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z\" />\n </svg>\n Back to Chat\n </button>\n </div>\n </div>\n\n <!-- Status (when connecting or in-call: Talking... / Listening / Connected + timer) -->\n <div\n class=\"status-indicator status-inline\"\n *ngIf=\"callState !== 'idle' && callState !== 'ended'\"\n >\n <div *ngIf=\"callState === 'connecting'\" class=\"status-connecting\">\n <svg\n class=\"spinner\"\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <circle\n cx=\"12\"\n cy=\"12\"\n r=\"10\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-dasharray=\"31.416\"\n stroke-dashoffset=\"31.416\"\n >\n <animate\n attributeName=\"stroke-dasharray\"\n dur=\"2s\"\n values=\"0 31.416;15.708 15.708;0 31.416;0 31.416\"\n repeatCount=\"indefinite\"\n />\n <animate\n attributeName=\"stroke-dashoffset\"\n dur=\"2s\"\n values=\"0;-15.708;-31.416;-31.416\"\n repeatCount=\"indefinite\"\n />\n </circle>\n </svg>\n <span class=\"status-text\">{{ statusText }}</span>\n </div>\n <div\n *ngIf=\"callState !== 'connecting'\"\n class=\"status-connected status-inline-row\"\n >\n <span class=\"status-text\" [class.status-talking]=\"isBotTalking\" [class.status-listening]=\"callState === 'listening'\" [class.status-processing]=\"isProcessing\">\n {{ statusLabel }}\n </span>\n\n <!-- Animated bars \u2014 visible while bot is talking -->\n <div *ngIf=\"isBotTalking\" class=\"voice-visualizer\">\n <div class=\"vbar\"></div>\n <div class=\"vbar\"></div>\n <div class=\"vbar\"></div>\n <div class=\"vbar\"></div>\n </div>\n\n <!-- Bouncing dots \u2014 visible during processing pause -->\n <div *ngIf=\"isProcessing\" class=\"processing-dots\">\n <span></span><span></span><span></span>\n </div>\n\n <span class=\"status-timer\">{{ duration }}</span>\n </div>\n </div>\n\n <!-- Waveform: always visible during an active call, active (coloured) when user speaks -->\n <div\n *ngIf=\"callState === 'connected' || callState === 'listening' || callState === 'talking'\"\n class=\"waveform-container\"\n >\n <div class=\"waveform-bars\">\n <div\n *ngFor=\"let level of audioLevels; let i = index\"\n class=\"waveform-bar\"\n [class.active]=\"isUserActive\"\n [style.height.px]=\"getWaveformHeight(level, i)\"\n ></div>\n </div>\n </div>\n\n <!-- Call Controls (when connected) -->\n <div\n class=\"controls\"\n *ngIf=\"\n callState === 'connected' ||\n callState === 'listening' ||\n callState === 'talking'\n \"\n >\n <div\n style=\"\n display: flex;\n align-items: center;\n gap: 2px;\n flex-direction: column;\n \"\n >\n <button\n class=\"control-btn mic-btn\"\n [class.muted]=\"isMicMuted\"\n (click)=\"toggleMic()\"\n type=\"button\"\n [title]=\"isMicMuted ? 'Unmute' : 'Mute'\"\n >\n <!-- Microphone icon (unmuted) -->\n <svg\n *ngIf=\"!isMicMuted\"\n width=\"24\"\n height=\"24\"\n viewBox=\"-5 0 32 32\"\n xmlns=\"http://www.w3.org/2000/svg\"\n fill=\"currentColor\"\n >\n <g transform=\"translate(-105, -307)\">\n <path\n d=\"M111,314 C111,311.238 113.239,309 116,309 C118.761,309 121,311.238 121,314 L121,324 C121,326.762 118.761,329 116,329 C113.239,329 111,326.762 111,324 L111,314 L111,314 Z M116,331 C119.866,331 123,327.866 123,324 L123,314 C123,310.134 119.866,307 116,307 C112.134,307 109,310.134 109,314 L109,324 C109,327.866 112.134,331 116,331 L116,331 Z M127,326 L125,326 C124.089,330.007 120.282,333 116,333 C111.718,333 107.911,330.007 107,326 L105,326 C105.883,330.799 110.063,334.51 115,334.955 L115,337 L114,337 C113.448,337 113,337.448 113,338 C113,338.553 113.448,339 114,339 L118,339 C118.552,339 119,338.553 119,338 C119,337.448 118.552,337 118,337 L117,337 L117,334.955 C121.937,334.51 126.117,330.799 127,326 L127,326 Z\"\n />\n </g>\n </svg>\n <!-- Microphone icon (muted) -->\n <svg\n *ngIf=\"isMicMuted\"\n width=\"24\"\n height=\"24\"\n viewBox=\"-5 0 32 32\"\n xmlns=\"http://www.w3.org/2000/svg\"\n fill=\"currentColor\"\n >\n <g transform=\"translate(-105, -307)\">\n <path\n d=\"M111,314 C111,311.238 113.239,309 116,309 C118.761,309 121,311.238 121,314 L121,324 C121,326.762 118.761,329 116,329 C113.239,329 111,326.762 111,324 L111,314 L111,314 Z M116,331 C119.866,331 123,327.866 123,324 L123,314 C123,310.134 119.866,307 116,307 C112.134,307 109,310.134 109,314 L109,324 C109,327.866 112.134,331 116,331 L116,331 Z M127,326 L125,326 C124.089,330.007 120.282,333 116,333 C111.718,333 107.911,330.007 107,326 L105,326 C105.883,330.799 110.063,334.51 115,334.955 L115,337 L114,337 C113.448,337 113,337.448 113,338 C113,338.553 113.448,339 114,339 L118,339 C118.552,339 119,338.553 119,338 C119,337.448 118.552,337 118,337 L117,337 L117,334.955 C121.937,334.51 126.117,330.799 127,326 L127,326 Z\"\n />\n </g>\n <path\n d=\"M2 2 L30 30\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n <span class=\"control-label\">Mute</span>\n </div>\n\n <div\n style=\"\n display: flex;\n align-items: center;\n gap: 2px;\n flex-direction: column;\n \"\n >\n <button\n class=\"control-btn end-call-btn\"\n (click)=\"hangUp()\"\n type=\"button\"\n title=\"End Call\"\n >\n <svg\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M18 6L6 18M6 6L18 18\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n <span class=\"control-label\">End Call</span>\n </div>\n </div>\n </div>\n</div>\n",
1872
- styles: [":host{display:block}.voice-agent-modal-overlay{align-items:flex-end;backdrop-filter:blur(4px);background:rgba(0,0,0,.5);bottom:0;display:flex;font-family:Segoe UI,Tahoma,Geneva,Verdana,sans-serif;justify-content:flex-end;left:0;padding:24px;position:fixed;right:0;top:0;z-index:99999}.voice-container.voice-agent-modal{align-items:center;animation:modalEnter .3s ease-out;background:#fff;border-radius:30px;box-shadow:0 10px 40px rgba(0,0,0,.1);display:flex;flex-direction:column;max-width:440px;min-height:600px;padding:30px;position:relative;text-align:center;width:100%}@keyframes modalEnter{0%{opacity:0;transform:translateY(20px)}to{opacity:1;transform:translateY(0)}}.header{justify-content:space-between;margin-bottom:5px;width:100%}.header,.header-left{align-items:center;display:flex}.header-left{gap:8px}.header-icon{align-items:center;background:#0f172a;border-radius:50%;color:#fff;display:flex;height:28px;justify-content:center;width:28px}.header-title{color:#0f172a;font-size:18px;font-weight:500}.close-button{align-items:center;background:none;border:none;color:#0f172a;cursor:pointer;display:flex;justify-content:center;padding:8px;transition:color .2s}.close-button:hover{color:#475569}.avatar-section{margin-bottom:24px;position:relative}.avatar-wrapper{align-items:center;background:#0ea5a4;background:linear-gradient(135deg,#ccfbf1,#0ea5a4);border-radius:50%;display:flex;height:180px;justify-content:center;padding:6px;position:relative;width:180px}.avatar-image{-o-object-fit:cover;border:4px solid #fff;border-radius:50%;height:100%;object-fit:cover;width:100%}.avatar-glow{background:radial-gradient(circle,rgba(14,165,164,.2) 0,transparent 70%);height:240px;left:50%;pointer-events:none;position:absolute;top:50%;transform:translate(-50%,-50%);transition:opacity .4s ease;width:240px;z-index:-1}.avatar-glow.glow-talking{animation:glowPulse 1.5s ease-in-out infinite;background:radial-gradient(circle,rgba(14,165,164,.35) 0,transparent 65%);height:280px;width:280px}.avatar-glow.glow-listening{background:radial-gradient(circle,rgba(99,102,241,.25) 0,transparent 65%)}@keyframes glowPulse{0%,to{opacity:.7;transform:translate(-50%,-50%) scale(1)}50%{opacity:1;transform:translate(-50%,-50%) scale(1.08)}}.avatar-wrapper.speaking{animation:avatarPulse 1.4s ease-in-out infinite}.avatar-wrapper.listening{animation:avatarListenPulse 1.8s ease-in-out infinite}@keyframes avatarPulse{0%,to{box-shadow:0 0 0 0 rgba(14,165,164,.5)}50%{box-shadow:0 0 0 18px rgba(14,165,164,0)}}@keyframes avatarListenPulse{0%,to{box-shadow:0 0 0 0 rgba(99,102,241,.4)}50%{box-shadow:0 0 0 14px rgba(99,102,241,0)}}.particles-container{height:0;left:50%;pointer-events:none;position:absolute;top:50%;width:0;z-index:2}.particle{animation:particleOrbit 2.4s ease-in-out infinite;animation-delay:var(--delay,0s);background:#0ea5a4;border-radius:50%;height:7px;opacity:0;position:absolute;transform-origin:0 0;width:7px}@keyframes particleOrbit{0%{opacity:0;transform:rotate(calc(var(--i, 0)*22.5deg)) translateY(-108px) scale(.4)}25%{opacity:.9}75%{opacity:.9}to{opacity:0;transform:rotate(calc(var(--i, 0)*22.5deg + 45deg)) translateY(-108px) scale(.4)}}.agent-info{margin-bottom:40px}.agent-name{align-items:center;color:#0f172a;display:flex;font-size:24px;font-weight:700;gap:8px;justify-content:center;margin-bottom:8px}.ai-badge{background:#0ea5a4;border-radius:6px;color:#fff;font-size:10px;font-weight:700;padding:2px 6px}.agent-role{color:#0f172a;font-size:16px;font-weight:500;margin:0}.start-call-section{align-items:center;display:flex;flex-direction:column;gap:16px;margin-bottom:24px}.error-message{color:#dc2626;font-size:14px;margin:0}.start-call-button{background:#0ea5a4;border:none;border-radius:12px;color:#fff;cursor:pointer;font-size:16px;font-weight:600;padding:14px 32px;transition:background .2s}.start-call-button:hover:not(:disabled){background:#0d9488}.start-call-button:disabled{cursor:not-allowed!important;opacity:.7}.status-indicator{justify-content:center;margin-bottom:10px}.status-connecting,.status-indicator{align-items:center;display:flex;gap:12px}.spinner{animation:spin 1s linear infinite;color:#0ea5a4}@keyframes spin{0%{transform:rotate(0deg)}to{transform:rotate(1turn)}}.status-text{color:#0f172a;font-size:16px;font-weight:400;transition:color .25s ease}.status-text.status-talking{color:#0ea5a4;font-weight:500}.status-text.status-listening{color:#6366f1;font-weight:500}.status-text.status-processing{color:#94a3b8}.status-timer{color:#0f172a;font-size:16px;font-weight:500}.voice-visualizer{align-items:center;display:flex;gap:3px;height:18px}.vbar{animation:vbarBounce 1s ease-in-out infinite;background:#0ea5a4;border-radius:2px;height:6px;width:3px}.vbar:first-child{animation-delay:0s}.vbar:nth-child(2){animation-delay:.15s}.vbar:nth-child(3){animation-delay:.3s}.vbar:nth-child(4){animation-delay:.45s}@keyframes vbarBounce{0%,to{height:4px;opacity:.5}50%{height:16px;opacity:1}}.processing-dots{align-items:center;display:flex;gap:4px}.processing-dots span{animation:dotFade 1.2s ease-in-out infinite;background:#94a3b8;border-radius:50%;display:inline-block;height:5px;width:5px}.processing-dots span:first-child{animation-delay:0s}.processing-dots span:nth-child(2){animation-delay:.2s}.processing-dots span:nth-child(3){animation-delay:.4s}@keyframes dotFade{0%,80%,to{opacity:.4;transform:scale(.8)}40%{opacity:1;transform:scale(1.2)}}.status-connected{align-items:center;display:flex;flex-direction:column;gap:4px}.status-inline .status-inline-row{align-items:center;flex-direction:row;gap:8px}.call-ended-section{align-items:center;display:flex;flex-direction:column;gap:16px;margin-bottom:24px}.call-ended-status{align-items:center;color:#0f172a;display:flex;font-size:16px;gap:8px;justify-content:center;margin:0}.call-ended-status .status-text{font-weight:400}.call-ended-status .status-timer{font-weight:500}.call-ended-controls{align-items:center;display:flex;flex-wrap:wrap;gap:16px;justify-content:center}.action-btn{align-items:center;background:#fff;border:1px solid #e2e8f0;border-radius:24px;color:#0f172a;cursor:pointer;display:flex;font-size:14px;font-weight:500;gap:8px;padding:12px 24px;transition:background .2s ease}.action-btn:hover{background:#f8fafc}.waveform-container{margin-bottom:10px;padding:0 8px}.waveform-bars,.waveform-container{align-items:center;display:flex;height:56px;justify-content:center;width:100%}.waveform-bars{gap:2px}.waveform-bar{background:#cbd5e1;border-radius:99px;flex:0 0 2px;min-height:2px;transition:height .1s ease-out;width:2px}.waveform-bar.active{background:linear-gradient(180deg,#0ea5a4,#0d9488);box-shadow:0 0 4px rgba(14,165,164,.5)}.controls{gap:24px;width:100%}.control-btn,.controls{align-items:center;display:flex;justify-content:center}.control-btn{border:none;border-radius:50%;cursor:pointer;flex-direction:column;gap:4px;height:60px;transition:transform .2s ease;width:60px}.control-btn:hover{transform:scale(1.05)}.control-btn:active{transform:scale(.95)}.control-label{color:#0f172a;font-size:12px;font-weight:500}.mic-btn{background:#e2e8f0}.mic-btn,.mic-btn .control-label{color:#475569}.mic-btn.muted{background:#e2e8f0;color:#475569}.end-call-btn{background:#ef4444;color:#fff}.end-call-btn .control-label{color:#fff}.end-call-btn:hover{background:#dc2626}"]
2269
+ template: "<div class=\"voice-agent-modal-overlay\" (click)=\"endCall()\">\n <div\n class=\"voice-container voice-agent-modal\"\n (click)=\"$event.stopPropagation()\"\n >\n <!-- Header -->\n <div class=\"header\">\n <div class=\"header-left\">\n <div class=\"header-icon\">\n <svg\n width=\"16\"\n height=\"16\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M12 1C8.13 1 5 4.13 5 8V14C5 17.87 8.13 21 12 21C15.87 21 19 17.87 19 14V8C19 4.13 15.87 1 12 1Z\"\n fill=\"currentColor\"\n />\n <path\n d=\"M12 23C10.34 23 9 21.66 9 20H15C15 21.66 13.66 23 12 23Z\"\n fill=\"currentColor\"\n />\n </svg>\n </div>\n <span class=\"header-title\">Voice</span>\n </div>\n <button\n class=\"close-button\"\n (click)=\"endCall()\"\n type=\"button\"\n aria-label=\"Close\"\n >\n <svg\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M18 6L6 18M6 6L18 18\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n </div>\n\n <!-- Avatar Section with glow -->\n <div class=\"avatar-section\">\n <div class=\"avatar-glow\"></div>\n <div class=\"avatar-wrapper\" [class.speaking]=\"isSpeaking\">\n <img class=\"avatar-image\" [src]=\"displayAvatarUrl\" alt=\"Nia\" />\n </div>\n </div>\n\n <!-- Agent Info: Nia + Collaboration Manager AI Agent Specialist -->\n <div class=\"agent-info\">\n <div class=\"agent-name\">\n Nia\n <span class=\"ai-badge\">AI</span>\n </div>\n <p class=\"agent-role\">COP30 AI Agent </p>\n </div>\n\n <!-- Start Call (when idle only) -->\n <div *ngIf=\"callState === 'idle'\" class=\"start-call-section\">\n <p *ngIf=\"statusText === 'Connection failed'\" class=\"error-message\">\n {{ statusText }}\n </p>\n <button\n class=\"start-call-button\"\n type=\"button\"\n [disabled]=\"isConnecting\"\n (click)=\"startCall()\"\n >\n <span *ngIf=\"isConnecting\">Connecting...</span>\n <span *ngIf=\"!isConnecting && statusText === 'Connection failed'\"\n >Retry</span\n >\n <span *ngIf=\"!isConnecting && statusText !== 'Connection failed'\"\n >Start Call</span\n >\n </button>\n </div>\n\n <!-- Call Ended: status + Call Again / Back to Chat -->\n <div *ngIf=\"callState === 'ended'\" class=\"call-ended-section\">\n <p class=\"call-ended-status\">\n <span class=\"status-text\">Call Ended</span>\n <span class=\"status-timer\">{{ duration }}</span>\n </p>\n <div class=\"call-ended-controls\">\n <button\n class=\"action-btn\"\n type=\"button\"\n (click)=\"callAgain()\"\n title=\"Call Again\"\n >\n <svg\n width=\"18\"\n height=\"18\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-linejoin=\"round\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path d=\"M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8\" />\n <path d=\"M3 3v5h5\" />\n </svg>\n Call Again\n </button>\n <button\n class=\"action-btn\"\n type=\"button\"\n (click)=\"backToChat()\"\n title=\"Back to Chat\"\n >\n <svg\n width=\"18\"\n height=\"18\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-linejoin=\"round\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path d=\"M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z\" />\n </svg>\n Back to Chat\n </button>\n </div>\n </div>\n\n <!-- Status (when connecting or in-call: Talking... / Listening / Connected + timer) -->\n <div\n class=\"status-indicator status-inline\"\n *ngIf=\"callState !== 'idle' && callState !== 'ended'\"\n >\n <div *ngIf=\"callState === 'connecting'\" class=\"status-connecting\">\n <svg\n class=\"spinner\"\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <circle\n cx=\"12\"\n cy=\"12\"\n r=\"10\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n stroke-dasharray=\"31.416\"\n stroke-dashoffset=\"31.416\"\n >\n <animate\n attributeName=\"stroke-dasharray\"\n dur=\"2s\"\n values=\"0 31.416;15.708 15.708;0 31.416;0 31.416\"\n repeatCount=\"indefinite\"\n />\n <animate\n attributeName=\"stroke-dashoffset\"\n dur=\"2s\"\n values=\"0;-15.708;-31.416;-31.416\"\n repeatCount=\"indefinite\"\n />\n </circle>\n </svg>\n <span class=\"status-text\">{{ statusText }}</span>\n </div>\n <div\n *ngIf=\"callState !== 'connecting'\"\n class=\"status-connected status-inline-row\"\n >\n <span class=\"status-text\">{{ statusLabel }}</span>\n <span class=\"status-timer\">{{ duration }}</span>\n </div>\n </div>\n\n <!-- Waveform -->\n <div\n *ngIf=\"callState === 'listening'\"\n class=\"waveform-container\"\n >\n <div class=\"waveform-bars\">\n <div\n *ngFor=\"let level of audioLevels; let i = index\"\n class=\"waveform-bar\"\n [class.active]=\"isUserSpeaking\"\n [style.height.px]=\"getWaveformHeight(level, i)\"\n ></div>\n </div>\n </div>\n\n <!-- Call Controls (when connected) -->\n <div\n class=\"controls\"\n *ngIf=\"\n callState === 'connected' ||\n callState === 'listening' ||\n callState === 'talking'\n \"\n >\n <div\n style=\"\n display: flex;\n align-items: center;\n gap: 2px;\n flex-direction: column;\n \"\n >\n <button\n class=\"control-btn mic-btn\"\n [class.muted]=\"isMicMuted\"\n (click)=\"toggleMic()\"\n type=\"button\"\n [title]=\"isMicMuted ? 'Unmute' : 'Mute'\"\n >\n <!-- Microphone icon (unmuted) -->\n <svg\n *ngIf=\"!isMicMuted\"\n width=\"24\"\n height=\"24\"\n viewBox=\"-5 0 32 32\"\n xmlns=\"http://www.w3.org/2000/svg\"\n fill=\"currentColor\"\n >\n <g transform=\"translate(-105, -307)\">\n <path\n d=\"M111,314 C111,311.238 113.239,309 116,309 C118.761,309 121,311.238 121,314 L121,324 C121,326.762 118.761,329 116,329 C113.239,329 111,326.762 111,324 L111,314 L111,314 Z M116,331 C119.866,331 123,327.866 123,324 L123,314 C123,310.134 119.866,307 116,307 C112.134,307 109,310.134 109,314 L109,324 C109,327.866 112.134,331 116,331 L116,331 Z M127,326 L125,326 C124.089,330.007 120.282,333 116,333 C111.718,333 107.911,330.007 107,326 L105,326 C105.883,330.799 110.063,334.51 115,334.955 L115,337 L114,337 C113.448,337 113,337.448 113,338 C113,338.553 113.448,339 114,339 L118,339 C118.552,339 119,338.553 119,338 C119,337.448 118.552,337 118,337 L117,337 L117,334.955 C121.937,334.51 126.117,330.799 127,326 L127,326 Z\"\n />\n </g>\n </svg>\n <!-- Microphone icon (muted) -->\n <svg\n *ngIf=\"isMicMuted\"\n width=\"24\"\n height=\"24\"\n viewBox=\"-5 0 32 32\"\n xmlns=\"http://www.w3.org/2000/svg\"\n fill=\"currentColor\"\n >\n <g transform=\"translate(-105, -307)\">\n <path\n d=\"M111,314 C111,311.238 113.239,309 116,309 C118.761,309 121,311.238 121,314 L121,324 C121,326.762 118.761,329 116,329 C113.239,329 111,326.762 111,324 L111,314 L111,314 Z M116,331 C119.866,331 123,327.866 123,324 L123,314 C123,310.134 119.866,307 116,307 C112.134,307 109,310.134 109,314 L109,324 C109,327.866 112.134,331 116,331 L116,331 Z M127,326 L125,326 C124.089,330.007 120.282,333 116,333 C111.718,333 107.911,330.007 107,326 L105,326 C105.883,330.799 110.063,334.51 115,334.955 L115,337 L114,337 C113.448,337 113,337.448 113,338 C113,338.553 113.448,339 114,339 L118,339 C118.552,339 119,338.553 119,338 C119,337.448 118.552,337 118,337 L117,337 L117,334.955 C121.937,334.51 126.117,330.799 127,326 L127,326 Z\"\n />\n </g>\n <path\n d=\"M2 2 L30 30\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n <span class=\"control-label\">Mute</span>\n </div>\n\n <div\n style=\"\n display: flex;\n align-items: center;\n gap: 2px;\n flex-direction: column;\n \"\n >\n <button\n class=\"control-btn end-call-btn\"\n (click)=\"hangUp()\"\n type=\"button\"\n title=\"End Call\"\n >\n <svg\n width=\"24\"\n height=\"24\"\n viewBox=\"0 0 24 24\"\n fill=\"none\"\n xmlns=\"http://www.w3.org/2000/svg\"\n >\n <path\n d=\"M18 6L6 18M6 6L18 18\"\n stroke=\"currentColor\"\n stroke-width=\"2\"\n stroke-linecap=\"round\"\n />\n </svg>\n </button>\n <span class=\"control-label\">End Call</span>\n </div>\n </div>\n </div>\n</div>\n",
2270
+ styles: [":host{display:block}.voice-agent-modal-overlay{align-items:flex-end;backdrop-filter:blur(4px);background:rgba(0,0,0,.5);bottom:0;display:flex;font-family:Segoe UI,Tahoma,Geneva,Verdana,sans-serif;justify-content:flex-end;left:0;padding:24px;position:fixed;right:0;top:0;z-index:99999}.voice-container.voice-agent-modal{align-items:center;animation:modalEnter .3s ease-out;background:#fff;border-radius:30px;box-shadow:0 10px 40px rgba(0,0,0,.1);display:flex;flex-direction:column;max-width:440px;min-height:600px;padding:30px;position:relative;text-align:center;width:100%}@keyframes modalEnter{0%{opacity:0;transform:translateY(20px)}to{opacity:1;transform:translateY(0)}}.header{justify-content:space-between;margin-bottom:5px;width:100%}.header,.header-left{align-items:center;display:flex}.header-left{gap:8px}.header-icon{align-items:center;background:#0f172a;border-radius:50%;color:#fff;display:flex;height:28px;justify-content:center;width:28px}.header-title{color:#0f172a;font-size:18px;font-weight:500}.close-button{align-items:center;background:none;border:none;color:#0f172a;cursor:pointer;display:flex;justify-content:center;padding:8px;transition:color .2s}.close-button:hover{color:#475569}.avatar-section{margin-bottom:24px;position:relative}.avatar-wrapper{align-items:center;background:#0ea5a4;background:linear-gradient(135deg,#ccfbf1,#0ea5a4);border-radius:50%;display:flex;height:180px;justify-content:center;padding:6px;position:relative;width:180px}.avatar-image{-o-object-fit:cover;border:4px solid #fff;border-radius:50%;height:100%;object-fit:cover;width:100%}.avatar-glow{background:radial-gradient(circle,rgba(14,165,164,.2) 0,transparent 70%);height:240px;left:50%;pointer-events:none;position:absolute;top:50%;transform:translate(-50%,-50%);width:240px;z-index:-1}.avatar-wrapper.speaking{animation:avatarPulse 2s ease-in-out infinite}@keyframes avatarPulse{0%,to{box-shadow:0 0 0 0 rgba(14,165,164,.4)}50%{box-shadow:0 0 0 15px rgba(14,165,164,0)}}.agent-info{margin-bottom:40px}.agent-name{align-items:center;color:#0f172a;display:flex;font-size:24px;font-weight:700;gap:8px;justify-content:center;margin-bottom:8px}.ai-badge{background:#0ea5a4;border-radius:6px;color:#fff;font-size:10px;font-weight:700;padding:2px 6px}.agent-role{color:#0f172a;font-size:16px;font-weight:500;margin:0}.start-call-section{align-items:center;display:flex;flex-direction:column;gap:16px;margin-bottom:24px}.error-message{color:#dc2626;font-size:14px;margin:0}.start-call-button{background:#0ea5a4;border:none;border-radius:12px;color:#fff;cursor:pointer;font-size:16px;font-weight:600;padding:14px 32px;transition:background .2s}.start-call-button:hover:not(:disabled){background:#0d9488}.start-call-button:disabled{cursor:not-allowed!important;opacity:.7}.status-indicator{justify-content:center;margin-bottom:10px}.status-connecting,.status-indicator{align-items:center;display:flex;gap:12px}.spinner{animation:spin 1s linear infinite;color:#0ea5a4}@keyframes spin{0%{transform:rotate(0deg)}to{transform:rotate(1turn)}}.status-text{font-weight:400}.status-text,.status-timer{color:#0f172a;font-size:16px}.status-timer{font-weight:500}.status-connected{align-items:center;display:flex;flex-direction:column;gap:4px}.status-inline .status-inline-row{align-items:center;flex-direction:row;gap:8px}.call-ended-section{align-items:center;display:flex;flex-direction:column;gap:16px;margin-bottom:24px}.call-ended-status{align-items:center;color:#0f172a;display:flex;font-size:16px;gap:8px;justify-content:center;margin:0}.call-ended-status .status-text{font-weight:400}.call-ended-status .status-timer{font-weight:500}.call-ended-controls{align-items:center;display:flex;flex-wrap:wrap;gap:16px;justify-content:center}.action-btn{align-items:center;background:#fff;border:1px solid #e2e8f0;border-radius:24px;color:#0f172a;cursor:pointer;display:flex;font-size:14px;font-weight:500;gap:8px;padding:12px 24px;transition:background .2s ease}.action-btn:hover{background:#f8fafc}.waveform-container{margin-bottom:10px;padding:0 8px}.waveform-bars,.waveform-container{align-items:center;display:flex;height:56px;justify-content:center;width:100%}.waveform-bars{gap:2px}.waveform-bar{background:#cbd5e1;border-radius:99px;flex:0 0 2px;min-height:2px;transition:height .1s ease-out;width:2px}.waveform-bar.active{background:linear-gradient(180deg,#0ea5a4,#0d9488);box-shadow:0 0 4px rgba(14,165,164,.5)}.controls{gap:24px;width:100%}.control-btn,.controls{align-items:center;display:flex;justify-content:center}.control-btn{border:none;border-radius:50%;cursor:pointer;flex-direction:column;gap:4px;height:60px;transition:transform .2s ease;width:60px}.control-btn:hover{transform:scale(1.05)}.control-btn:active{transform:scale(.95)}.control-label{color:#0f172a;font-size:12px;font-weight:500}.mic-btn{background:#e2e8f0}.mic-btn,.mic-btn .control-label{color:#475569}.mic-btn.muted{background:#e2e8f0;color:#475569}.end-call-btn{background:#ef4444;color:#fff}.end-call-btn .control-label{color:#fff}.end-call-btn:hover{background:#dc2626}"]
1873
2271
  },] }
1874
2272
  ];
1875
2273
  VoiceAgentModalComponent.ctorParameters = function () { return [
@@ -5857,8 +6255,8 @@
5857
6255
  };
5858
6256
 
5859
6257
  /**
5860
- * Voice agent module. Uses @pipecat-ai/client-js + @pipecat-ai/websocket-transport
5861
- * (peer dependencies) for WebSocket transport, RTVI protocol, and audio.
6258
+ * Voice agent module. Uses native WebSocket + Daily.js only.
6259
+ * Does NOT use Socket.IO or ngx-socket-io.
5862
6260
  */
5863
6261
  var VoiceAgentModule = /** @class */ (function () {
5864
6262
  function VoiceAgentModule() {
@@ -5876,6 +6274,8 @@
5876
6274
  providers: [
5877
6275
  VoiceAgentService,
5878
6276
  AudioAnalyzerService,
6277
+ WebSocketVoiceClientService,
6278
+ DailyVoiceClientService
5879
6279
  ],
5880
6280
  exports: [
5881
6281
  VoiceAgentModalComponent
@@ -6176,9 +6576,11 @@
6176
6576
  exports["ɵc"] = ConversationService;
6177
6577
  exports["ɵd"] = NotificationSocket;
6178
6578
  exports["ɵe"] = TranslationService;
6179
- exports["ɵf"] = VideoPlayerComponent;
6180
- exports["ɵg"] = SafeHtmlPipe;
6181
- exports["ɵh"] = BotHtmlEditorComponent;
6579
+ exports["ɵf"] = WebSocketVoiceClientService;
6580
+ exports["ɵg"] = DailyVoiceClientService;
6581
+ exports["ɵh"] = VideoPlayerComponent;
6582
+ exports["ɵi"] = SafeHtmlPipe;
6583
+ exports["ɵj"] = BotHtmlEditorComponent;
6182
6584
 
6183
6585
  Object.defineProperty(exports, '__esModule', { value: true });
6184
6586