@primoia/vocall-react 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,2056 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ ChatRole: () => ChatRole,
24
+ FieldType: () => FieldType,
25
+ FrameSplitter: () => FrameSplitter,
26
+ VocallChat: () => VocallChat,
27
+ VocallClient: () => VocallClient,
28
+ VocallFab: () => VocallFab,
29
+ VocallProvider: () => VocallProvider,
30
+ VocallStatus: () => VocallStatus,
31
+ VocallStatusPill: () => VocallStatusPill,
32
+ WebVoiceService: () => WebVoiceService,
33
+ useVocall: () => useVocall,
34
+ useVocallAction: () => useVocallAction,
35
+ useVocallClient: () => useVocallClient,
36
+ useVocallField: () => useVocallField,
37
+ useVocallVoice: () => useVocallVoice
38
+ });
39
+ module.exports = __toCommonJS(index_exports);
40
+
41
+ // src/protocol/types.ts
42
+ var FieldType = /* @__PURE__ */ ((FieldType2) => {
43
+ FieldType2["Text"] = "text";
44
+ FieldType2["Number"] = "number";
45
+ FieldType2["Currency"] = "currency";
46
+ FieldType2["Date"] = "date";
47
+ FieldType2["Datetime"] = "datetime";
48
+ FieldType2["Email"] = "email";
49
+ FieldType2["Phone"] = "phone";
50
+ FieldType2["Masked"] = "masked";
51
+ FieldType2["Select"] = "select";
52
+ FieldType2["Autocomplete"] = "autocomplete";
53
+ FieldType2["Checkbox"] = "checkbox";
54
+ FieldType2["Radio"] = "radio";
55
+ FieldType2["Textarea"] = "textarea";
56
+ FieldType2["File"] = "file";
57
+ FieldType2["Hidden"] = "hidden";
58
+ return FieldType2;
59
+ })(FieldType || {});
60
+ var ChatRole = /* @__PURE__ */ ((ChatRole2) => {
61
+ ChatRole2["User"] = "user";
62
+ ChatRole2["Agent"] = "agent";
63
+ ChatRole2["System"] = "system";
64
+ return ChatRole2;
65
+ })(ChatRole || {});
66
+ var VocallStatus = /* @__PURE__ */ ((VocallStatus2) => {
67
+ VocallStatus2["Disconnected"] = "disconnected";
68
+ VocallStatus2["Idle"] = "idle";
69
+ VocallStatus2["Listening"] = "listening";
70
+ VocallStatus2["Recording"] = "recording";
71
+ VocallStatus2["Thinking"] = "thinking";
72
+ VocallStatus2["Speaking"] = "speaking";
73
+ VocallStatus2["Executing"] = "executing";
74
+ return VocallStatus2;
75
+ })(VocallStatus || {});
76
+
77
+ // src/voice/frame-splitter.ts
78
+ var FrameSplitter = class {
79
+ constructor(frameSizeBytes = 640) {
80
+ this.offset = 0;
81
+ this.frameSizeBytes = frameSizeBytes;
82
+ this.buffer = new Uint8Array(frameSizeBytes);
83
+ }
84
+ /**
85
+ * Feed arbitrary-length PCM bytes into the splitter.
86
+ * Whenever a full frame is assembled, `emit` is called with a copy.
87
+ */
88
+ feed(pcmBytes, emit) {
89
+ let srcPos = 0;
90
+ const srcLen = pcmBytes.length;
91
+ while (srcPos < srcLen) {
92
+ const remaining = this.frameSizeBytes - this.offset;
93
+ const available = srcLen - srcPos;
94
+ const toCopy = Math.min(available, remaining);
95
+ this.buffer.set(pcmBytes.subarray(srcPos, srcPos + toCopy), this.offset);
96
+ this.offset += toCopy;
97
+ srcPos += toCopy;
98
+ if (this.offset === this.frameSizeBytes) {
99
+ emit(new Uint8Array(this.buffer));
100
+ this.offset = 0;
101
+ }
102
+ }
103
+ }
104
+ /** Reset the internal buffer, discarding any partial frame. */
105
+ reset() {
106
+ this.offset = 0;
107
+ }
108
+ /** Number of bytes currently buffered (incomplete frame). */
109
+ get buffered() {
110
+ return this.offset;
111
+ }
112
+ };
113
+
114
+ // src/voice/web-voice-service.ts
115
+ var TARGET_SAMPLE_RATE = 16e3;
116
+ var FRAME_SIZE_BYTES = 640;
117
+ var SCRIPT_BUFFER_SIZE = 4096;
118
+ var FADE_IN_SAMPLES = 128;
119
+ var SILENT_FRAME = new Uint8Array(FRAME_SIZE_BYTES);
120
+ var WebVoiceService = class {
121
+ constructor() {
122
+ // -------------------------------------------------------------------------
123
+ // Public state
124
+ // -------------------------------------------------------------------------
125
+ this.onAudioLevel = null;
126
+ this.onPlaybackComplete = null;
127
+ this._isCapturing = false;
128
+ this._isPlaying = false;
129
+ this._isMonitoring = false;
130
+ this._isMuted = false;
131
+ // -------------------------------------------------------------------------
132
+ // Private capture state
133
+ // -------------------------------------------------------------------------
134
+ this._audioCtx = null;
135
+ this._stream = null;
136
+ this._sourceNode = null;
137
+ this._scriptNode = null;
138
+ this._splitter = new FrameSplitter(FRAME_SIZE_BYTES);
139
+ this._sendChunk = null;
140
+ // -------------------------------------------------------------------------
141
+ // Private playback state
142
+ // -------------------------------------------------------------------------
143
+ this._playbackCtx = null;
144
+ this._playbackQueue = [];
145
+ this._currentSource = null;
146
+ this._pendingDecodes = 0;
147
+ }
148
+ get isSupported() {
149
+ return typeof navigator !== "undefined" && typeof navigator.mediaDevices !== "undefined" && typeof navigator.mediaDevices.getUserMedia === "function" && typeof AudioContext !== "undefined";
150
+ }
151
+ get isCapturing() {
152
+ return this._isCapturing;
153
+ }
154
+ get isPlaying() {
155
+ return this._isPlaying;
156
+ }
157
+ get isMonitoring() {
158
+ return this._isMonitoring;
159
+ }
160
+ // -------------------------------------------------------------------------
161
+ // Capture
162
+ // -------------------------------------------------------------------------
163
+ async startCapture(sendChunk) {
164
+ if (this._isCapturing) return;
165
+ this._sendChunk = sendChunk;
166
+ await this._initCapturePipeline(false);
167
+ this._isCapturing = true;
168
+ }
169
+ stopCapture() {
170
+ this._teardownCapturePipeline();
171
+ this._isCapturing = false;
172
+ this._sendChunk = null;
173
+ }
174
+ // -------------------------------------------------------------------------
175
+ // Monitor mode (capture pipeline without sending)
176
+ // -------------------------------------------------------------------------
177
+ async startMonitor() {
178
+ if (this._isMonitoring) return;
179
+ await this._initCapturePipeline(true);
180
+ this._isMonitoring = true;
181
+ }
182
+ stopMonitor() {
183
+ this._teardownCapturePipeline();
184
+ this._isMonitoring = false;
185
+ }
186
+ // -------------------------------------------------------------------------
187
+ // Mute
188
+ // -------------------------------------------------------------------------
189
+ muteMic(muted) {
190
+ this._isMuted = muted;
191
+ if (this._stream) {
192
+ const tracks = this._stream.getAudioTracks();
193
+ for (const track of tracks) {
194
+ track.enabled = !muted;
195
+ }
196
+ }
197
+ }
198
+ // -------------------------------------------------------------------------
199
+ // Playback
200
+ // -------------------------------------------------------------------------
201
+ playAudio(wavData) {
202
+ this._playbackQueue.push(wavData);
203
+ this._pendingDecodes++;
204
+ this._isPlaying = true;
205
+ this._decodeAndEnqueue(wavData);
206
+ }
207
+ stopPlayback() {
208
+ if (this._currentSource) {
209
+ try {
210
+ this._currentSource.onended = null;
211
+ this._currentSource.stop();
212
+ } catch {
213
+ }
214
+ this._currentSource = null;
215
+ }
216
+ this._playbackQueue = [];
217
+ this._pendingDecodes = 0;
218
+ this._isPlaying = false;
219
+ }
220
+ // -------------------------------------------------------------------------
221
+ // Dispose
222
+ // -------------------------------------------------------------------------
223
+ dispose() {
224
+ this.stopCapture();
225
+ this.stopMonitor();
226
+ this.stopPlayback();
227
+ if (this._audioCtx) {
228
+ this._audioCtx.close().catch(() => {
229
+ });
230
+ this._audioCtx = null;
231
+ }
232
+ if (this._playbackCtx) {
233
+ this._playbackCtx.close().catch(() => {
234
+ });
235
+ this._playbackCtx = null;
236
+ }
237
+ this.onAudioLevel = null;
238
+ this.onPlaybackComplete = null;
239
+ }
240
+ // -------------------------------------------------------------------------
241
+ // Private: capture pipeline
242
+ // -------------------------------------------------------------------------
243
+ async _initCapturePipeline(monitorOnly) {
244
+ this._stream = await navigator.mediaDevices.getUserMedia({
245
+ audio: {
246
+ channelCount: 1,
247
+ echoCancellation: true,
248
+ noiseSuppression: false,
249
+ autoGainControl: true
250
+ }
251
+ });
252
+ if (!this._audioCtx) {
253
+ this._audioCtx = new AudioContext();
254
+ }
255
+ if (this._audioCtx.state === "suspended") {
256
+ await this._audioCtx.resume();
257
+ }
258
+ const nativeRate = this._audioCtx.sampleRate;
259
+ this._sourceNode = this._audioCtx.createMediaStreamSource(this._stream);
260
+ this._scriptNode = this._audioCtx.createScriptProcessor(SCRIPT_BUFFER_SIZE, 1, 1);
261
+ this._splitter.reset();
262
+ this._scriptNode.onaudioprocess = (event) => {
263
+ const inputData = event.inputBuffer.getChannelData(0);
264
+ this._computeAndEmitLevel(inputData);
265
+ if (this._isMuted) {
266
+ if (!monitorOnly && this._sendChunk) {
267
+ const silentPcm = this._downsampleToS16LE(new Float32Array(inputData.length), nativeRate);
268
+ this._splitter.feed(silentPcm, (frame) => {
269
+ this._sendChunk(new Uint8Array(SILENT_FRAME));
270
+ });
271
+ }
272
+ return;
273
+ }
274
+ const pcmBytes = this._downsampleToS16LE(inputData, nativeRate);
275
+ this._splitter.feed(pcmBytes, (frame) => {
276
+ if (!monitorOnly && this._sendChunk) {
277
+ this._sendChunk(frame);
278
+ }
279
+ });
280
+ };
281
+ this._sourceNode.connect(this._scriptNode);
282
+ this._scriptNode.connect(this._audioCtx.destination);
283
+ }
284
+ _teardownCapturePipeline() {
285
+ if (this._scriptNode) {
286
+ this._scriptNode.onaudioprocess = null;
287
+ this._scriptNode.disconnect();
288
+ this._scriptNode = null;
289
+ }
290
+ if (this._sourceNode) {
291
+ this._sourceNode.disconnect();
292
+ this._sourceNode = null;
293
+ }
294
+ if (this._stream) {
295
+ for (const track of this._stream.getTracks()) {
296
+ track.stop();
297
+ }
298
+ this._stream = null;
299
+ }
300
+ this._splitter.reset();
301
+ this._isMuted = false;
302
+ }
303
+ // -------------------------------------------------------------------------
304
+ // Private: audio processing
305
+ // -------------------------------------------------------------------------
306
+ /**
307
+ * Downsample Float32 audio at native sample rate to S16LE at TARGET_SAMPLE_RATE
308
+ * using linear interpolation.
309
+ */
310
+ _downsampleToS16LE(input, fromRate) {
311
+ const ratio = fromRate / TARGET_SAMPLE_RATE;
312
+ const outputLen = Math.floor(input.length / ratio);
313
+ const output = new Uint8Array(outputLen * 2);
314
+ const view = new DataView(output.buffer);
315
+ for (let i = 0; i < outputLen; i++) {
316
+ const srcIdx = i * ratio;
317
+ const srcFloor = Math.floor(srcIdx);
318
+ const srcCeil = Math.min(srcFloor + 1, input.length - 1);
319
+ const frac = srcIdx - srcFloor;
320
+ let sample = input[srcFloor] + (input[srcCeil] - input[srcFloor]) * frac;
321
+ if (sample > 1) sample = 1;
322
+ else if (sample < -1) sample = -1;
323
+ const s16 = Math.round(sample * 32767);
324
+ view.setInt16(i * 2, s16, true);
325
+ }
326
+ return output;
327
+ }
328
+ /**
329
+ * Compute RMS level from Float32 audio samples.
330
+ * Result is scaled by 4 and clamped to [0, 1].
331
+ */
332
+ _computeAndEmitLevel(samples) {
333
+ if (!this.onAudioLevel) return;
334
+ let sum = 0;
335
+ for (let i = 0; i < samples.length; i++) {
336
+ sum += samples[i] * samples[i];
337
+ }
338
+ const rms = Math.sqrt(sum / samples.length);
339
+ const level = Math.min(rms * 4, 1);
340
+ this.onAudioLevel(level);
341
+ }
342
+ // -------------------------------------------------------------------------
343
+ // Private: playback pipeline
344
+ // -------------------------------------------------------------------------
345
+ async _decodeAndEnqueue(wavData) {
346
+ if (!this._playbackCtx) {
347
+ this._playbackCtx = new AudioContext();
348
+ }
349
+ if (this._playbackCtx.state === "suspended") {
350
+ await this._playbackCtx.resume();
351
+ }
352
+ try {
353
+ const arrayBuffer = wavData.buffer.slice(
354
+ wavData.byteOffset,
355
+ wavData.byteOffset + wavData.byteLength
356
+ );
357
+ const audioBuffer = await this._playbackCtx.decodeAudioData(arrayBuffer);
358
+ this._applyFadeIn(audioBuffer);
359
+ this._pendingDecodes--;
360
+ if (!this._isPlaying) return;
361
+ if (!this._currentSource) {
362
+ this._playBuffer(audioBuffer);
363
+ }
364
+ } catch {
365
+ this._pendingDecodes--;
366
+ this._checkPlaybackComplete();
367
+ }
368
+ }
369
+ _applyFadeIn(audioBuffer) {
370
+ const fadeLen = Math.min(FADE_IN_SAMPLES, audioBuffer.length);
371
+ for (let ch = 0; ch < audioBuffer.numberOfChannels; ch++) {
372
+ const data = audioBuffer.getChannelData(ch);
373
+ for (let i = 0; i < fadeLen; i++) {
374
+ data[i] *= i / fadeLen;
375
+ }
376
+ }
377
+ }
378
+ _playBuffer(audioBuffer) {
379
+ if (!this._playbackCtx) return;
380
+ const source = this._playbackCtx.createBufferSource();
381
+ source.buffer = audioBuffer;
382
+ source.connect(this._playbackCtx.destination);
383
+ source.onended = () => {
384
+ this._currentSource = null;
385
+ this._playNextChunk();
386
+ };
387
+ this._currentSource = source;
388
+ source.start();
389
+ }
390
+ _playNextChunk() {
391
+ if (this._playbackQueue.length > 0) {
392
+ this._playbackQueue.shift();
393
+ }
394
+ if (this._playbackQueue.length > 0) {
395
+ const nextData = this._playbackQueue[0];
396
+ this._pendingDecodes++;
397
+ this._decodeAndEnqueue(nextData);
398
+ } else {
399
+ this._checkPlaybackComplete();
400
+ }
401
+ }
402
+ _checkPlaybackComplete() {
403
+ if (this._playbackQueue.length === 0 && this._pendingDecodes <= 0) {
404
+ this._isPlaying = false;
405
+ this.onPlaybackComplete?.();
406
+ }
407
+ }
408
+ };
409
+
410
+ // src/client/vocall-client.ts
411
+ var TARGET_SAMPLE_RATE2 = 16e3;
412
+ var _VocallClient = class _VocallClient {
413
+ // -----------------------------------------------------------------------
414
+ // Constructor
415
+ // -----------------------------------------------------------------------
416
+ constructor(serverUrl, options) {
417
+ // -----------------------------------------------------------------------
418
+ // Observable state (getters + onChange)
419
+ // -----------------------------------------------------------------------
420
+ this._status = "disconnected" /* Disconnected */;
421
+ this._connected = false;
422
+ this._messages = [];
423
+ this._sessionId = null;
424
+ // -----------------------------------------------------------------------
425
+ // Voice state (public getters)
426
+ // -----------------------------------------------------------------------
427
+ this._voiceEnabled = false;
428
+ this._voiceState = "idle";
429
+ this._recording = false;
430
+ this._partialTranscription = null;
431
+ this._audioLevel = 0;
432
+ // -----------------------------------------------------------------------
433
+ // Voice internals
434
+ // -----------------------------------------------------------------------
435
+ this._voiceWs = null;
436
+ this._alwaysListening = false;
437
+ this._ttsActive = false;
438
+ this._ttsEndReceived = false;
439
+ this._llmDone = false;
440
+ this._pendingTtsChunks = 0;
441
+ this._voice = null;
442
+ // -----------------------------------------------------------------------
443
+ // Change notification (React hooks subscribe here)
444
+ // -----------------------------------------------------------------------
445
+ this._listeners = /* @__PURE__ */ new Set();
446
+ // -----------------------------------------------------------------------
447
+ // Callbacks (set by the host app)
448
+ // -----------------------------------------------------------------------
449
+ this.onNavigate = null;
450
+ this.onToast = null;
451
+ this.onConfirm = null;
452
+ this.onOpenModal = null;
453
+ this.onCloseModal = null;
454
+ // -----------------------------------------------------------------------
455
+ // Field & action registries
456
+ // -----------------------------------------------------------------------
457
+ this._fields = /* @__PURE__ */ new Map();
458
+ this._actions = /* @__PURE__ */ new Map();
459
+ // -----------------------------------------------------------------------
460
+ // Internal state
461
+ // -----------------------------------------------------------------------
462
+ this._ws = null;
463
+ this._manifest = null;
464
+ this._tokenBuffer = "";
465
+ this._streamingMessage = null;
466
+ this._reconnectTimer = null;
467
+ this._reconnectAttempts = 0;
468
+ this._intentionalDisconnect = false;
469
+ this._pendingConfirmSeq = -1;
470
+ this.serverUrl = serverUrl;
471
+ this.token = options?.token;
472
+ this._visitorId = options?.visitorId ?? _VocallClient._loadOrCreateVisitorId();
473
+ }
474
+ get visitorId() {
475
+ return this._visitorId;
476
+ }
477
+ set visitorId(id) {
478
+ this._visitorId = id;
479
+ }
480
+ get status() {
481
+ return this._status;
482
+ }
483
+ get connected() {
484
+ return this._connected;
485
+ }
486
+ get messages() {
487
+ return this._messages;
488
+ }
489
+ get sessionId() {
490
+ return this._sessionId;
491
+ }
492
+ get voiceEnabled() {
493
+ return this._voiceEnabled;
494
+ }
495
+ /** Whether the platform supports voice capture (Web Audio API). */
496
+ get voiceSupported() {
497
+ return this._getVoice().isSupported;
498
+ }
499
+ get voiceState() {
500
+ return this._voiceState;
501
+ }
502
+ get recording() {
503
+ return this._recording;
504
+ }
505
+ get partialTranscription() {
506
+ return this._partialTranscription;
507
+ }
508
+ get audioLevel() {
509
+ return this._audioLevel;
510
+ }
511
+ _getVoice() {
512
+ if (!this._voice) {
513
+ this._voice = new WebVoiceService();
514
+ this._voice.onAudioLevel = (level) => {
515
+ this._audioLevel = level;
516
+ this._notify();
517
+ };
518
+ this._voice.onPlaybackComplete = () => {
519
+ this._onPlaybackComplete();
520
+ };
521
+ }
522
+ return this._voice;
523
+ }
524
+ subscribe(listener) {
525
+ this._listeners.add(listener);
526
+ return () => {
527
+ this._listeners.delete(listener);
528
+ };
529
+ }
530
+ _notify() {
531
+ for (const listener of this._listeners) {
532
+ listener();
533
+ }
534
+ }
535
+ // -----------------------------------------------------------------------
536
+ // Visitor ID persistence
537
+ // -----------------------------------------------------------------------
538
+ static _generateUuid() {
539
+ if (typeof crypto !== "undefined" && crypto.randomUUID) {
540
+ return crypto.randomUUID();
541
+ }
542
+ return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => {
543
+ const r = Math.random() * 16 | 0;
544
+ const v = c === "x" ? r : r & 3 | 8;
545
+ return v.toString(16);
546
+ });
547
+ }
548
+ static _loadOrCreateVisitorId() {
549
+ const STORAGE_KEY = "vocall_visitor_id";
550
+ try {
551
+ const existing = localStorage.getItem(STORAGE_KEY);
552
+ if (existing) return existing;
553
+ const id = _VocallClient._generateUuid();
554
+ localStorage.setItem(STORAGE_KEY, id);
555
+ return id;
556
+ } catch {
557
+ return _VocallClient._generateUuid();
558
+ }
559
+ }
560
+ // -----------------------------------------------------------------------
561
+ // Field & action registration
562
+ // -----------------------------------------------------------------------
563
+ registerField(screenId, fieldId, entry) {
564
+ if (!this._fields.has(screenId)) {
565
+ this._fields.set(screenId, /* @__PURE__ */ new Map());
566
+ }
567
+ this._fields.get(screenId).set(fieldId, entry);
568
+ }
569
+ unregisterField(screenId, fieldId) {
570
+ this._fields.get(screenId)?.delete(fieldId);
571
+ if (this._fields.get(screenId)?.size === 0) {
572
+ this._fields.delete(screenId);
573
+ }
574
+ }
575
+ unregisterScreen(screenId) {
576
+ this._fields.delete(screenId);
577
+ this._actions.delete(screenId);
578
+ }
579
+ registerAction(screenId, actionId, callback) {
580
+ if (!this._actions.has(screenId)) {
581
+ this._actions.set(screenId, /* @__PURE__ */ new Map());
582
+ }
583
+ this._actions.get(screenId).set(actionId, callback);
584
+ }
585
+ unregisterAction(screenId, actionId) {
586
+ this._actions.get(screenId)?.delete(actionId);
587
+ }
588
+ findField(fieldId) {
589
+ for (const screen of this._fields.values()) {
590
+ if (screen.has(fieldId)) return screen.get(fieldId);
591
+ }
592
+ return void 0;
593
+ }
594
+ findAction(actionId) {
595
+ for (const screen of this._actions.values()) {
596
+ if (screen.has(actionId)) return screen.get(actionId);
597
+ }
598
+ return void 0;
599
+ }
600
+ // -----------------------------------------------------------------------
601
+ // Connection
602
+ // -----------------------------------------------------------------------
603
+ connect(manifest) {
604
+ this._manifest = manifest;
605
+ this._intentionalDisconnect = false;
606
+ this._doConnect();
607
+ }
608
+ _doConnect() {
609
+ this.disconnect(false);
610
+ const url = new URL(this.serverUrl);
611
+ url.searchParams.set("visitor_id", this._visitorId);
612
+ if (this.token) {
613
+ url.searchParams.set("token", this.token);
614
+ }
615
+ try {
616
+ this._ws = new WebSocket(url.toString());
617
+ this._ws.onmessage = (event) => {
618
+ this._onMessage(event.data);
619
+ };
620
+ this._ws.onclose = () => {
621
+ this._onDone();
622
+ };
623
+ this._ws.onerror = () => {
624
+ this._onError();
625
+ };
626
+ } catch {
627
+ this._scheduleReconnect();
628
+ }
629
+ }
630
+ disconnect(intentional = true) {
631
+ if (intentional) this._intentionalDisconnect = true;
632
+ if (this._reconnectTimer) {
633
+ clearTimeout(this._reconnectTimer);
634
+ this._reconnectTimer = null;
635
+ }
636
+ this._closeVoiceWs();
637
+ if (this._voice) {
638
+ this._voice.stopCapture();
639
+ this._voice.stopPlayback();
640
+ }
641
+ this._voiceState = "idle";
642
+ this._recording = false;
643
+ this._partialTranscription = null;
644
+ this._audioLevel = 0;
645
+ this._alwaysListening = false;
646
+ this._resetTtsState();
647
+ if (this._ws) {
648
+ this._ws.onclose = null;
649
+ this._ws.onerror = null;
650
+ this._ws.onmessage = null;
651
+ this._ws.close();
652
+ this._ws = null;
653
+ }
654
+ this._sessionId = null;
655
+ this._tokenBuffer = "";
656
+ this._streamingMessage = null;
657
+ this._pendingConfirmSeq = -1;
658
+ this._status = "disconnected" /* Disconnected */;
659
+ this._connected = false;
660
+ this._notify();
661
+ }
662
+ // -----------------------------------------------------------------------
663
+ // Sending messages
664
+ // -----------------------------------------------------------------------
665
+ sendText(text) {
666
+ if (!text.trim() || !this._ws) return;
667
+ this._addMessage({ role: "user" /* User */, text, timestamp: /* @__PURE__ */ new Date() });
668
+ this._wsSend({ type: "text", message: text });
669
+ }
670
+ sendConfirm(seq, confirmed) {
671
+ this._wsSend({ type: "confirm", seq, confirmed });
672
+ this._pendingConfirmSeq = -1;
673
+ }
674
+ sendResult(seq, results, state) {
675
+ const msg = { type: "result", seq, results };
676
+ if (state) msg.state = state;
677
+ this._wsSend(msg);
678
+ }
679
+ sendState(state) {
680
+ this._wsSend(state);
681
+ }
682
+ _wsSend(obj) {
683
+ if (this._ws && this._ws.readyState === WebSocket.OPEN) {
684
+ this._ws.send(JSON.stringify(obj));
685
+ }
686
+ }
687
+ // -----------------------------------------------------------------------
688
+ // Incoming message handling
689
+ // -----------------------------------------------------------------------
690
+ _onMessage(raw) {
691
+ let json;
692
+ try {
693
+ json = JSON.parse(raw);
694
+ } catch {
695
+ return;
696
+ }
697
+ const type = json["type"];
698
+ if (!type) return;
699
+ switch (type) {
700
+ case "config":
701
+ this._handleConfig(json);
702
+ break;
703
+ case "chat":
704
+ this._handleChat(json);
705
+ break;
706
+ case "chat_token":
707
+ this._handleChatToken(json);
708
+ break;
709
+ case "chat_end":
710
+ this._flushTokenBuffer();
711
+ break;
712
+ case "status":
713
+ this._handleStatus(json);
714
+ break;
715
+ case "command":
716
+ this._handleCommand(json);
717
+ break;
718
+ case "error":
719
+ this._handleError(json);
720
+ break;
721
+ case "history":
722
+ this._handleHistory(json);
723
+ break;
724
+ }
725
+ }
726
+ _handleConfig(config) {
727
+ this._sessionId = config.sessionId;
728
+ this._status = "idle" /* Idle */;
729
+ this._connected = true;
730
+ this._reconnectAttempts = 0;
731
+ this._voiceEnabled = config.features?.voice === true;
732
+ this._notify();
733
+ if (this._manifest) {
734
+ this._wsSend(this._manifest);
735
+ }
736
+ }
737
+ _handleHistory(json) {
738
+ const items = json["messages"];
739
+ if (!items || !items.length) return;
740
+ this._messages = [];
741
+ for (const item of items) {
742
+ const role = item["role"];
743
+ const content = item["content"];
744
+ if (!content) continue;
745
+ const chatRole = role === "user" ? "user" /* User */ : role === "assistant" ? "agent" /* Agent */ : null;
746
+ if (!chatRole) continue;
747
+ this._messages.push({ role: chatRole, text: content, timestamp: /* @__PURE__ */ new Date() });
748
+ }
749
+ this._notify();
750
+ }
751
+ _handleChat(chat) {
752
+ this._flushTokenBuffer();
753
+ this._addMessage({
754
+ role: "agent" /* Agent */,
755
+ text: chat.message,
756
+ timestamp: /* @__PURE__ */ new Date()
757
+ });
758
+ }
759
+ _handleChatToken(token) {
760
+ this._tokenBuffer += token.token;
761
+ if (this._streamingMessage) {
762
+ this._streamingMessage.text = this._tokenBuffer;
763
+ } else {
764
+ this._streamingMessage = {
765
+ role: "agent" /* Agent */,
766
+ text: this._tokenBuffer,
767
+ timestamp: /* @__PURE__ */ new Date()
768
+ };
769
+ this._messages.push(this._streamingMessage);
770
+ }
771
+ this._notify();
772
+ }
773
+ _flushTokenBuffer() {
774
+ this._tokenBuffer = "";
775
+ this._streamingMessage = null;
776
+ }
777
+ _handleStatus(status) {
778
+ this._status = this._mapStatus(status.status);
779
+ this._notify();
780
+ }
781
+ _mapStatus(status) {
782
+ switch (status) {
783
+ case "idle":
784
+ return "idle" /* Idle */;
785
+ case "listening":
786
+ return "listening" /* Listening */;
787
+ case "recording":
788
+ return "recording" /* Recording */;
789
+ case "thinking":
790
+ return "thinking" /* Thinking */;
791
+ case "speaking":
792
+ return "speaking" /* Speaking */;
793
+ case "executing":
794
+ return "executing" /* Executing */;
795
+ default:
796
+ return "idle" /* Idle */;
797
+ }
798
+ }
799
+ _handleError(error) {
800
+ this._addMessage({
801
+ role: "system" /* System */,
802
+ text: error.message,
803
+ timestamp: /* @__PURE__ */ new Date()
804
+ });
805
+ this._status = "idle" /* Idle */;
806
+ this._notify();
807
+ }
808
+ // -----------------------------------------------------------------------
809
+ // Command execution
810
+ // -----------------------------------------------------------------------
811
+ async _handleCommand(cmd) {
812
+ const results = new Array(cmd.actions.length);
813
+ const sequentialIndices = [];
814
+ const parallelIndices = [];
815
+ for (let i = 0; i < cmd.actions.length; i++) {
816
+ if (_VocallClient.SEQUENTIAL_ACTIONS.has(cmd.actions[i].do)) {
817
+ sequentialIndices.push(i);
818
+ } else {
819
+ parallelIndices.push(i);
820
+ }
821
+ }
822
+ for (const i of sequentialIndices) {
823
+ try {
824
+ await this._executeAction(cmd.actions[i], cmd.seq);
825
+ results[i] = { index: i, success: true };
826
+ } catch (e) {
827
+ results[i] = { index: i, success: false, error: String(e) };
828
+ }
829
+ }
830
+ if (parallelIndices.length > 0) {
831
+ await Promise.all(
832
+ parallelIndices.map(async (i) => {
833
+ try {
834
+ await this._executeAction(cmd.actions[i], cmd.seq);
835
+ results[i] = { index: i, success: true };
836
+ } catch (e) {
837
+ results[i] = { index: i, success: false, error: String(e) };
838
+ }
839
+ })
840
+ );
841
+ }
842
+ for (let i = 0; i < results.length; i++) {
843
+ if (!results[i]) {
844
+ results[i] = { index: i, success: false, error: "not executed" };
845
+ }
846
+ }
847
+ this.sendResult(cmd.seq, results);
848
+ }
849
+ async _executeAction(action, seq) {
850
+ switch (action.do) {
851
+ case "navigate":
852
+ await this._execNavigate(action);
853
+ break;
854
+ case "fill":
855
+ await this._execFill(action);
856
+ break;
857
+ case "clear":
858
+ this._execClear(action);
859
+ break;
860
+ case "select":
861
+ this._execSelect(action);
862
+ break;
863
+ case "click":
864
+ await this._execClick(action);
865
+ break;
866
+ case "focus":
867
+ this._execFocus(action);
868
+ break;
869
+ case "highlight":
870
+ this._execHighlight(action);
871
+ break;
872
+ case "scroll_to":
873
+ this._execScrollTo(action);
874
+ break;
875
+ case "show_toast":
876
+ this._execShowToast(action);
877
+ break;
878
+ case "ask_confirm":
879
+ this._execAskConfirm(action, seq);
880
+ break;
881
+ case "open_modal":
882
+ this._execOpenModal(action);
883
+ break;
884
+ case "close_modal":
885
+ this._execCloseModal();
886
+ break;
887
+ case "enable":
888
+ this._execEnable(action);
889
+ break;
890
+ case "disable":
891
+ this._execDisable(action);
892
+ break;
893
+ }
894
+ }
895
+ async _execNavigate(action) {
896
+ const screenId = action.screen;
897
+ if (!screenId) return;
898
+ this.onNavigate?.(screenId);
899
+ await this._sleep(_VocallClient.RETRY_DELAY_MS);
900
+ }
901
+ async _execFill(action) {
902
+ const fieldId = action.field;
903
+ if (!fieldId) return;
904
+ let entry = this.findField(fieldId);
905
+ for (let i = 0; i < _VocallClient.MAX_RETRIES && !entry; i++) {
906
+ await this._sleep(_VocallClient.RETRY_DELAY_MS);
907
+ entry = this.findField(fieldId);
908
+ }
909
+ if (!entry) {
910
+ throw new Error(`field "${fieldId}" not registered`);
911
+ }
912
+ const value = action.value != null ? String(action.value) : "";
913
+ const animate = action.animate ?? "typewriter";
914
+ if (animate === "typewriter" && value.length > 1) {
915
+ await this._typewriterFill(entry, value, action.speed ?? 40);
916
+ } else {
917
+ entry.setValue(value);
918
+ }
919
+ }
920
+ async _typewriterFill(entry, value, speedMs) {
921
+ entry.element.classList.add("filling");
922
+ entry.element.focus();
923
+ entry.setValue("");
924
+ for (let i = 0; i < value.length; i++) {
925
+ entry.setValue(value.substring(0, i + 1));
926
+ await this._sleep(speedMs);
927
+ }
928
+ entry.element.classList.remove("filling");
929
+ }
930
+ _execClear(action) {
931
+ const fieldId = action.field;
932
+ if (!fieldId) return;
933
+ const entry = this.findField(fieldId);
934
+ if (entry) entry.setValue("");
935
+ }
936
+ _execSelect(action) {
937
+ const fieldId = action.field;
938
+ if (!fieldId) return;
939
+ const entry = this.findField(fieldId);
940
+ if (entry) entry.setValue(String(action.value ?? ""));
941
+ }
942
+ async _execClick(action) {
943
+ const actionId = action.action;
944
+ if (!actionId) return;
945
+ let callback = this.findAction(actionId);
946
+ for (let i = 0; i < _VocallClient.MAX_RETRIES && !callback; i++) {
947
+ await this._sleep(_VocallClient.RETRY_DELAY_MS);
948
+ callback = this.findAction(actionId);
949
+ }
950
+ if (!callback) {
951
+ throw new Error(`action "${actionId}" not registered`);
952
+ }
953
+ await callback();
954
+ }
955
+ _execFocus(action) {
956
+ const fieldId = action.field;
957
+ if (!fieldId) return;
958
+ const entry = this.findField(fieldId);
959
+ if (entry) entry.element.focus();
960
+ }
961
+ _execHighlight(action) {
962
+ const fieldId = action.field;
963
+ if (!fieldId) return;
964
+ const entry = this.findField(fieldId);
965
+ if (!entry) return;
966
+ entry.element.scrollIntoView({ behavior: "smooth", block: "center" });
967
+ entry.element.classList.add("highlighted");
968
+ const duration = action.duration ?? 2e3;
969
+ setTimeout(() => entry.element.classList.remove("highlighted"), duration);
970
+ }
971
+ _execScrollTo(action) {
972
+ const fieldId = action.field;
973
+ if (!fieldId) return;
974
+ const entry = this.findField(fieldId);
975
+ if (entry) entry.element.scrollIntoView({ behavior: "smooth", block: "center" });
976
+ }
977
+ _execShowToast(action) {
978
+ const message = action.message ?? "";
979
+ const level = action.level ?? "info";
980
+ const duration = action.duration;
981
+ this.onToast?.(message, level, duration);
982
+ }
983
+ _execAskConfirm(action, seq) {
984
+ this._pendingConfirmSeq = seq;
985
+ const message = action.message ?? "Confirmar?";
986
+ this.onConfirm?.(seq, message);
987
+ }
988
+ _execOpenModal(action) {
989
+ const modalId = action.modal;
990
+ if (!modalId) return;
991
+ this.onOpenModal?.(modalId, action.query);
992
+ }
993
+ _execCloseModal() {
994
+ this.onCloseModal?.();
995
+ }
996
+ _execEnable(action) {
997
+ const entry = action.field ? this.findField(action.field) : void 0;
998
+ if (entry && entry.element instanceof HTMLInputElement) {
999
+ entry.element.disabled = false;
1000
+ }
1001
+ }
1002
+ _execDisable(action) {
1003
+ const entry = action.field ? this.findField(action.field) : void 0;
1004
+ if (entry && entry.element instanceof HTMLInputElement) {
1005
+ entry.element.disabled = true;
1006
+ }
1007
+ }
1008
+ // -----------------------------------------------------------------------
1009
+ // Voice: public methods
1010
+ // -----------------------------------------------------------------------
1011
+ async startAlwaysListening() {
1012
+ if (!this._getVoice().isSupported || !this._connected) return;
1013
+ this._alwaysListening = true;
1014
+ await this._openVoiceWs(false);
1015
+ const voice = this._getVoice();
1016
+ await voice.startCapture((chunk) => {
1017
+ if (this._voiceWs && this._voiceWs.readyState === WebSocket.OPEN) {
1018
+ this._voiceWs.send(chunk);
1019
+ }
1020
+ });
1021
+ this._voiceState = "listening";
1022
+ this._recording = false;
1023
+ this._notify();
1024
+ }
1025
+ stopAlwaysListening() {
1026
+ this._alwaysListening = false;
1027
+ const voice = this._getVoice();
1028
+ voice.stopCapture();
1029
+ voice.stopPlayback();
1030
+ this._closeVoiceWs();
1031
+ this._voiceState = "idle";
1032
+ this._recording = false;
1033
+ this._partialTranscription = null;
1034
+ this._audioLevel = 0;
1035
+ this._resetTtsState();
1036
+ this._notify();
1037
+ }
1038
+ async startRecording() {
1039
+ if (!this._getVoice().isSupported || !this._connected) return;
1040
+ this._alwaysListening = false;
1041
+ await this._openVoiceWs(true);
1042
+ const voice = this._getVoice();
1043
+ await voice.startCapture((chunk) => {
1044
+ if (this._voiceWs && this._voiceWs.readyState === WebSocket.OPEN) {
1045
+ this._voiceWs.send(chunk);
1046
+ }
1047
+ });
1048
+ this._voiceState = "recording";
1049
+ this._recording = true;
1050
+ this._notify();
1051
+ }
1052
+ stopRecording() {
1053
+ const voice = this._getVoice();
1054
+ voice.stopCapture();
1055
+ this._recording = false;
1056
+ this._voiceState = "thinking";
1057
+ this._notify();
1058
+ if (this._voiceWs && this._voiceWs.readyState === WebSocket.OPEN) {
1059
+ this._voiceWs.send(JSON.stringify({ type: "eof" }));
1060
+ }
1061
+ }
1062
+ interrupt() {
1063
+ const voice = this._getVoice();
1064
+ voice.stopPlayback();
1065
+ this._ttsActive = false;
1066
+ if (this._voiceWs && this._voiceWs.readyState === WebSocket.OPEN) {
1067
+ this._voiceWs.send(JSON.stringify({ type: "interrupt" }));
1068
+ this._voiceWs.send(JSON.stringify({ type: "tts_state", active: false }));
1069
+ }
1070
+ if (this._alwaysListening) {
1071
+ voice.muteMic(false);
1072
+ this._voiceState = "listening";
1073
+ } else {
1074
+ this._voiceState = "idle";
1075
+ }
1076
+ this._partialTranscription = null;
1077
+ this._resetTtsState();
1078
+ this._notify();
1079
+ }
1080
+ // -----------------------------------------------------------------------
1081
+ // Voice: private WS management
1082
+ // -----------------------------------------------------------------------
1083
+ async _openVoiceWs(directMode) {
1084
+ this._closeVoiceWs();
1085
+ const url = new URL(this.serverUrl);
1086
+ url.pathname = "/ws/stream";
1087
+ url.searchParams.set("visitor_id", this._visitorId);
1088
+ if (this.token) {
1089
+ url.searchParams.set("token", this.token);
1090
+ }
1091
+ return new Promise((resolve, reject) => {
1092
+ try {
1093
+ this._voiceWs = new WebSocket(url.toString());
1094
+ this._voiceWs.binaryType = "arraybuffer";
1095
+ this._voiceWs.onopen = () => {
1096
+ const config = {
1097
+ type: "config",
1098
+ sample_rate: TARGET_SAMPLE_RATE2
1099
+ };
1100
+ if (directMode) {
1101
+ config["mode"] = "direct";
1102
+ }
1103
+ this._voiceWs.send(JSON.stringify(config));
1104
+ resolve();
1105
+ };
1106
+ this._voiceWs.onmessage = (event) => {
1107
+ this._onVoiceStreamMessage(event);
1108
+ };
1109
+ this._voiceWs.onclose = () => {
1110
+ this._voiceWs = null;
1111
+ };
1112
+ this._voiceWs.onerror = () => {
1113
+ this._voiceWs = null;
1114
+ reject(new Error("Voice WebSocket connection failed"));
1115
+ };
1116
+ } catch (e) {
1117
+ reject(e);
1118
+ }
1119
+ });
1120
+ }
1121
+ _closeVoiceWs() {
1122
+ if (this._voiceWs) {
1123
+ this._voiceWs.onclose = null;
1124
+ this._voiceWs.onerror = null;
1125
+ this._voiceWs.onmessage = null;
1126
+ this._voiceWs.close();
1127
+ this._voiceWs = null;
1128
+ }
1129
+ }
1130
+ _resetTtsState() {
1131
+ this._ttsActive = false;
1132
+ this._ttsEndReceived = false;
1133
+ this._llmDone = false;
1134
+ this._pendingTtsChunks = 0;
1135
+ }
1136
+ // -----------------------------------------------------------------------
1137
+ // Voice: stream message handler
1138
+ // -----------------------------------------------------------------------
1139
+ _onVoiceStreamMessage(event) {
1140
+ const { data } = event;
1141
+ if (data instanceof ArrayBuffer) {
1142
+ const voice = this._getVoice();
1143
+ voice.playAudio(new Uint8Array(data));
1144
+ return;
1145
+ }
1146
+ let json;
1147
+ try {
1148
+ json = JSON.parse(data);
1149
+ } catch {
1150
+ return;
1151
+ }
1152
+ const type = json["type"];
1153
+ if (!type) return;
1154
+ switch (type) {
1155
+ case "wake_word":
1156
+ this._voiceState = "recording";
1157
+ this._recording = true;
1158
+ this._partialTranscription = null;
1159
+ this._notify();
1160
+ break;
1161
+ case "partial":
1162
+ this._partialTranscription = json["text"] ?? null;
1163
+ this._notify();
1164
+ break;
1165
+ case "transcription":
1166
+ this._partialTranscription = null;
1167
+ this._voiceState = "thinking";
1168
+ this._recording = false;
1169
+ this._notify();
1170
+ break;
1171
+ case "llm_start":
1172
+ this._llmDone = false;
1173
+ break;
1174
+ case "llm_token": {
1175
+ const text = json["text"];
1176
+ if (text) {
1177
+ this._tokenBuffer += text;
1178
+ if (this._streamingMessage) {
1179
+ this._streamingMessage.text = this._tokenBuffer;
1180
+ } else {
1181
+ this._streamingMessage = {
1182
+ role: "agent" /* Agent */,
1183
+ text: this._tokenBuffer,
1184
+ timestamp: /* @__PURE__ */ new Date()
1185
+ };
1186
+ this._messages = [...this._messages, this._streamingMessage];
1187
+ }
1188
+ this._notify();
1189
+ }
1190
+ break;
1191
+ }
1192
+ case "llm_end":
1193
+ this._llmDone = true;
1194
+ this._flushTokenBuffer();
1195
+ this._ttsEndReceived = this._pendingTtsChunks <= 0 && this._llmDone;
1196
+ break;
1197
+ case "tts_start":
1198
+ this._pendingTtsChunks++;
1199
+ this._ttsActive = true;
1200
+ this._ttsEndReceived = false;
1201
+ this._voiceState = "speaking";
1202
+ this._getVoice().muteMic(true);
1203
+ this._notify();
1204
+ break;
1205
+ case "tts_end":
1206
+ this._pendingTtsChunks--;
1207
+ if (this._pendingTtsChunks <= 0 && this._llmDone) {
1208
+ this._ttsEndReceived = true;
1209
+ }
1210
+ break;
1211
+ case "interrupted":
1212
+ this._getVoice().stopPlayback();
1213
+ this._ttsActive = false;
1214
+ this._resetTtsState();
1215
+ if (this._alwaysListening) {
1216
+ this._getVoice().muteMic(false);
1217
+ this._voiceState = "listening";
1218
+ } else {
1219
+ this._voiceState = "idle";
1220
+ }
1221
+ this._partialTranscription = null;
1222
+ this._notify();
1223
+ break;
1224
+ case "conversation_end":
1225
+ this._getVoice().stopPlayback();
1226
+ this._ttsActive = false;
1227
+ this._resetTtsState();
1228
+ if (this._alwaysListening) {
1229
+ this._getVoice().muteMic(false);
1230
+ this._voiceState = "listening";
1231
+ } else {
1232
+ this._voiceState = "idle";
1233
+ this._closeVoiceWs();
1234
+ this._getVoice().stopCapture();
1235
+ this._recording = false;
1236
+ }
1237
+ this._partialTranscription = null;
1238
+ this._notify();
1239
+ break;
1240
+ }
1241
+ }
1242
+ // -----------------------------------------------------------------------
1243
+ // Voice: playback completion handler
1244
+ // -----------------------------------------------------------------------
1245
+ _onPlaybackComplete() {
1246
+ if (this._alwaysListening) {
1247
+ if (!this._ttsEndReceived) return;
1248
+ this._finalizeTts();
1249
+ } else {
1250
+ }
1251
+ }
1252
+ _finalizeTts() {
1253
+ this._ttsActive = false;
1254
+ if (this._voiceWs && this._voiceWs.readyState === WebSocket.OPEN) {
1255
+ this._voiceWs.send(JSON.stringify({ type: "tts_state", active: false }));
1256
+ }
1257
+ this._getVoice().muteMic(false);
1258
+ this._voiceState = "listening";
1259
+ this._recording = false;
1260
+ this._partialTranscription = null;
1261
+ this._resetTtsState();
1262
+ this._notify();
1263
+ }
1264
+ // -----------------------------------------------------------------------
1265
+ // Message management
1266
+ // -----------------------------------------------------------------------
1267
+ _addMessage(msg) {
1268
+ this._messages = [...this._messages, msg];
1269
+ this._notify();
1270
+ }
1271
+ clearMessages() {
1272
+ this._messages = [];
1273
+ this._tokenBuffer = "";
1274
+ this._streamingMessage = null;
1275
+ this._notify();
1276
+ }
1277
+ // -----------------------------------------------------------------------
1278
+ // Reconnection
1279
+ // -----------------------------------------------------------------------
1280
+ _onDone() {
1281
+ this._connected = false;
1282
+ this._ws = null;
1283
+ if (this._intentionalDisconnect) {
1284
+ this._notify();
1285
+ return;
1286
+ }
1287
+ this._status = "disconnected" /* Disconnected */;
1288
+ this._notify();
1289
+ this._scheduleReconnect();
1290
+ }
1291
+ _onError() {
1292
+ if (this._intentionalDisconnect) return;
1293
+ this._status = "disconnected" /* Disconnected */;
1294
+ this._connected = false;
1295
+ this._notify();
1296
+ this._scheduleReconnect();
1297
+ }
1298
+ _scheduleReconnect() {
1299
+ if (this._intentionalDisconnect) return;
1300
+ if (this._reconnectAttempts >= _VocallClient.MAX_RECONNECT_ATTEMPTS) return;
1301
+ if (this._reconnectTimer) {
1302
+ clearTimeout(this._reconnectTimer);
1303
+ }
1304
+ const delaySec = Math.min(this._reconnectAttempts + 1, 15);
1305
+ this._reconnectAttempts++;
1306
+ this._reconnectTimer = setTimeout(() => this._doConnect(), delaySec * 1e3);
1307
+ }
1308
+ // -----------------------------------------------------------------------
1309
+ // Helpers
1310
+ // -----------------------------------------------------------------------
1311
+ _sleep(ms) {
1312
+ return new Promise((resolve) => setTimeout(resolve, ms));
1313
+ }
1314
+ // -----------------------------------------------------------------------
1315
+ // Dispose
1316
+ // -----------------------------------------------------------------------
1317
+ destroy() {
1318
+ this.disconnect(true);
1319
+ if (this._voice) {
1320
+ this._voice.dispose();
1321
+ this._voice = null;
1322
+ }
1323
+ this._listeners.clear();
1324
+ }
1325
+ };
1326
+ _VocallClient.MAX_RECONNECT_ATTEMPTS = 10;
1327
+ _VocallClient.RETRY_DELAY_MS = 300;
1328
+ _VocallClient.MAX_RETRIES = 3;
1329
+ // Sequential actions that must not run in parallel
1330
+ _VocallClient.SEQUENTIAL_ACTIONS = /* @__PURE__ */ new Set([
1331
+ "navigate",
1332
+ "click",
1333
+ "open_modal",
1334
+ "close_modal",
1335
+ "ask_confirm",
1336
+ "show_toast"
1337
+ ]);
1338
+ var VocallClient = _VocallClient;
1339
+
1340
+ // src/context/vocall-provider.tsx
1341
+ var import_react = require("react");
1342
+ var import_jsx_runtime = require("react/jsx-runtime");
1343
+ var VocallContext = (0, import_react.createContext)(null);
1344
+ function VocallProvider({ serverUrl, token, visitorId, children }) {
1345
+ const clientRef = (0, import_react.useRef)(null);
1346
+ const client = (0, import_react.useMemo)(() => {
1347
+ clientRef.current?.destroy();
1348
+ const c = new VocallClient(serverUrl, { token, visitorId });
1349
+ clientRef.current = c;
1350
+ return c;
1351
+ }, [serverUrl]);
1352
+ client.token = token;
1353
+ (0, import_react.useEffect)(() => {
1354
+ return () => {
1355
+ clientRef.current?.destroy();
1356
+ clientRef.current = null;
1357
+ };
1358
+ }, []);
1359
+ return /* @__PURE__ */ (0, import_jsx_runtime.jsx)(VocallContext.Provider, { value: client, children });
1360
+ }
1361
+ function useVocallClient() {
1362
+ const client = (0, import_react.useContext)(VocallContext);
1363
+ if (!client) {
1364
+ throw new Error("useVocallClient must be used within a <VocallProvider>");
1365
+ }
1366
+ return client;
1367
+ }
1368
+
1369
+ // src/hooks/use-vocall.ts
1370
+ var import_react2 = require("react");
1371
+ function useVocall() {
1372
+ const client = useVocallClient();
1373
+ const subscribe = (0, import_react2.useCallback)(
1374
+ (onStoreChange) => client.subscribe(onStoreChange),
1375
+ [client]
1376
+ );
1377
+ const status = (0, import_react2.useSyncExternalStore)(
1378
+ subscribe,
1379
+ () => client.status,
1380
+ () => "disconnected" /* Disconnected */
1381
+ );
1382
+ const connected = (0, import_react2.useSyncExternalStore)(
1383
+ subscribe,
1384
+ () => client.connected,
1385
+ () => false
1386
+ );
1387
+ const messages = (0, import_react2.useSyncExternalStore)(
1388
+ subscribe,
1389
+ () => client.messages,
1390
+ () => []
1391
+ );
1392
+ const sessionId = (0, import_react2.useSyncExternalStore)(
1393
+ subscribe,
1394
+ () => client.sessionId,
1395
+ () => null
1396
+ );
1397
+ const voiceEnabled = (0, import_react2.useSyncExternalStore)(
1398
+ subscribe,
1399
+ () => client.voiceEnabled,
1400
+ () => false
1401
+ );
1402
+ const voiceSupported = (0, import_react2.useSyncExternalStore)(
1403
+ subscribe,
1404
+ () => client.voiceSupported,
1405
+ () => false
1406
+ );
1407
+ const voiceState = (0, import_react2.useSyncExternalStore)(
1408
+ subscribe,
1409
+ () => client.voiceState,
1410
+ () => "idle"
1411
+ );
1412
+ const recording = (0, import_react2.useSyncExternalStore)(
1413
+ subscribe,
1414
+ () => client.recording,
1415
+ () => false
1416
+ );
1417
+ const partialTranscription = (0, import_react2.useSyncExternalStore)(
1418
+ subscribe,
1419
+ () => client.partialTranscription,
1420
+ () => null
1421
+ );
1422
+ const sendText = (0, import_react2.useCallback)(
1423
+ (text) => client.sendText(text),
1424
+ [client]
1425
+ );
1426
+ const connect = (0, import_react2.useCallback)(
1427
+ (manifest) => client.connect(manifest),
1428
+ [client]
1429
+ );
1430
+ const disconnect = (0, import_react2.useCallback)(
1431
+ () => client.disconnect(),
1432
+ [client]
1433
+ );
1434
+ const clearMessages = (0, import_react2.useCallback)(
1435
+ () => client.clearMessages(),
1436
+ [client]
1437
+ );
1438
+ return {
1439
+ client,
1440
+ status,
1441
+ messages,
1442
+ connected,
1443
+ sessionId,
1444
+ voiceEnabled,
1445
+ voiceSupported,
1446
+ voiceState,
1447
+ recording,
1448
+ partialTranscription,
1449
+ sendText,
1450
+ connect,
1451
+ disconnect,
1452
+ clearMessages
1453
+ };
1454
+ }
1455
+
1456
+ // src/hooks/use-vocall-field.ts
1457
+ var import_react3 = require("react");
1458
+ function useVocallField(screenId, fieldId, options) {
1459
+ const client = useVocallClient();
1460
+ const [registered, setRegistered] = (0, import_react3.useState)(false);
1461
+ const elementRef = (0, import_react3.useRef)(null);
1462
+ const ref = (0, import_react3.useCallback)(
1463
+ (element) => {
1464
+ if (elementRef.current && registered) {
1465
+ client.unregisterField(screenId, fieldId);
1466
+ setRegistered(false);
1467
+ }
1468
+ elementRef.current = element;
1469
+ if (element) {
1470
+ const isInput = element instanceof HTMLInputElement || element instanceof HTMLTextAreaElement || element instanceof HTMLSelectElement;
1471
+ client.registerField(screenId, fieldId, {
1472
+ element,
1473
+ setValue: options?.setValue ?? ((value) => {
1474
+ if (isInput) {
1475
+ element.value = value;
1476
+ element.dispatchEvent(new Event("input", { bubbles: true }));
1477
+ element.dispatchEvent(new Event("change", { bubbles: true }));
1478
+ }
1479
+ }),
1480
+ getValue: options?.getValue ?? (() => {
1481
+ if (isInput) {
1482
+ return element.value;
1483
+ }
1484
+ return "";
1485
+ })
1486
+ });
1487
+ setRegistered(true);
1488
+ }
1489
+ },
1490
+ [client, screenId, fieldId, options?.setValue, options?.getValue, registered]
1491
+ );
1492
+ (0, import_react3.useEffect)(() => {
1493
+ return () => {
1494
+ client.unregisterField(screenId, fieldId);
1495
+ };
1496
+ }, [client, screenId, fieldId]);
1497
+ return { ref, registered };
1498
+ }
1499
+
1500
+ // src/hooks/use-vocall-action.ts
1501
+ var import_react4 = require("react");
1502
+ function useVocallAction(screenId, actionId, callback) {
1503
+ const client = useVocallClient();
1504
+ const callbackRef = (0, import_react4.useRef)(callback);
1505
+ callbackRef.current = callback;
1506
+ (0, import_react4.useEffect)(() => {
1507
+ const handler = () => callbackRef.current();
1508
+ client.registerAction(screenId, actionId, handler);
1509
+ return () => {
1510
+ client.unregisterAction(screenId, actionId);
1511
+ };
1512
+ }, [client, screenId, actionId]);
1513
+ }
1514
+
1515
+ // src/hooks/use-vocall-voice.ts
1516
+ var import_react5 = require("react");
1517
+ function useVocallVoice() {
1518
+ const client = useVocallClient();
1519
+ const subscribe = (0, import_react5.useCallback)(
1520
+ (onStoreChange) => client.subscribe(onStoreChange),
1521
+ [client]
1522
+ );
1523
+ const voiceEnabled = (0, import_react5.useSyncExternalStore)(
1524
+ subscribe,
1525
+ () => client.voiceEnabled,
1526
+ () => false
1527
+ );
1528
+ const voiceState = (0, import_react5.useSyncExternalStore)(
1529
+ subscribe,
1530
+ () => client.voiceState,
1531
+ () => "idle"
1532
+ );
1533
+ const recording = (0, import_react5.useSyncExternalStore)(
1534
+ subscribe,
1535
+ () => client.recording,
1536
+ () => false
1537
+ );
1538
+ const partialTranscription = (0, import_react5.useSyncExternalStore)(
1539
+ subscribe,
1540
+ () => client.partialTranscription,
1541
+ () => null
1542
+ );
1543
+ const audioLevel = (0, import_react5.useSyncExternalStore)(
1544
+ subscribe,
1545
+ () => client.audioLevel,
1546
+ () => 0
1547
+ );
1548
+ const startAlwaysListening = (0, import_react5.useCallback)(
1549
+ () => client.startAlwaysListening(),
1550
+ [client]
1551
+ );
1552
+ const stopAlwaysListening = (0, import_react5.useCallback)(
1553
+ () => client.stopAlwaysListening(),
1554
+ [client]
1555
+ );
1556
+ const startRecording = (0, import_react5.useCallback)(
1557
+ () => client.startRecording(),
1558
+ [client]
1559
+ );
1560
+ const stopRecording = (0, import_react5.useCallback)(
1561
+ () => client.stopRecording(),
1562
+ [client]
1563
+ );
1564
+ const interrupt = (0, import_react5.useCallback)(
1565
+ () => client.interrupt(),
1566
+ [client]
1567
+ );
1568
+ return {
1569
+ voiceEnabled,
1570
+ voiceState,
1571
+ recording,
1572
+ partialTranscription,
1573
+ audioLevel,
1574
+ startAlwaysListening,
1575
+ stopAlwaysListening,
1576
+ startRecording,
1577
+ stopRecording,
1578
+ interrupt
1579
+ };
1580
+ }
1581
+
1582
+ // src/components/VocallChat.tsx
1583
+ var import_react6 = require("react");
1584
+ var import_jsx_runtime2 = require("react/jsx-runtime");
1585
+ var STATUS_LABELS = {
1586
+ thinking: "Pensando...",
1587
+ executing: "Executando...",
1588
+ speaking: "Falando...",
1589
+ listening: "Ouvindo...",
1590
+ recording: "Gravando..."
1591
+ };
1592
+ function VocallChat({
1593
+ open,
1594
+ onClose,
1595
+ assistantName = "Emma",
1596
+ className,
1597
+ style
1598
+ }) {
1599
+ const { status, messages, connected, voiceSupported, recording, sendText, clearMessages, client } = useVocall();
1600
+ const [input, setInput] = (0, import_react6.useState)("");
1601
+ const [micActive, setMicActive] = (0, import_react6.useState)(false);
1602
+ const messagesEndRef = (0, import_react6.useRef)(null);
1603
+ (0, import_react6.useEffect)(() => {
1604
+ if (micActive && !recording) {
1605
+ setMicActive(false);
1606
+ }
1607
+ }, [micActive, recording]);
1608
+ const toggleMic = (0, import_react6.useCallback)(() => {
1609
+ if (micActive) {
1610
+ client.stopRecording();
1611
+ setMicActive(false);
1612
+ } else {
1613
+ client.startRecording();
1614
+ setMicActive(true);
1615
+ }
1616
+ }, [micActive, client]);
1617
+ (0, import_react6.useEffect)(() => {
1618
+ messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
1619
+ }, [messages]);
1620
+ const handleSend = (0, import_react6.useCallback)(() => {
1621
+ const text = input.trim();
1622
+ if (!text || !connected) return;
1623
+ sendText(text);
1624
+ setInput("");
1625
+ }, [input, connected, sendText]);
1626
+ const handleKeyDown = (0, import_react6.useCallback)(
1627
+ (e) => {
1628
+ if (e.key === "Enter" && !e.shiftKey) {
1629
+ e.preventDefault();
1630
+ handleSend();
1631
+ }
1632
+ },
1633
+ [handleSend]
1634
+ );
1635
+ if (!open) return null;
1636
+ const statusLabel = STATUS_LABELS[status] || "";
1637
+ const statusClass = status !== "idle" /* Idle */ ? status : "";
1638
+ return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
1639
+ "div",
1640
+ {
1641
+ className: `vocall-chat-panel ${className || ""}`,
1642
+ style: {
1643
+ position: "fixed",
1644
+ bottom: 76,
1645
+ right: 16,
1646
+ width: 360,
1647
+ height: 560,
1648
+ background: "#fff",
1649
+ borderRadius: 16,
1650
+ boxShadow: "0 8px 32px rgba(0,0,0,0.15)",
1651
+ display: "flex",
1652
+ flexDirection: "column",
1653
+ zIndex: 999,
1654
+ overflow: "hidden",
1655
+ ...style
1656
+ },
1657
+ children: [
1658
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
1659
+ "div",
1660
+ {
1661
+ className: `vocall-chat-header ${statusClass}`,
1662
+ style: {
1663
+ padding: "8px 12px",
1664
+ background: "#1E293B",
1665
+ borderRadius: "16px 16px 0 0",
1666
+ display: "flex",
1667
+ alignItems: "center",
1668
+ gap: 8,
1669
+ borderBottom: `2px solid ${statusClass ? _statusBorderColor(status) : "transparent"}`,
1670
+ transition: "border-color 0.3s"
1671
+ },
1672
+ children: [
1673
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1674
+ "div",
1675
+ {
1676
+ style: {
1677
+ width: 8,
1678
+ height: 8,
1679
+ borderRadius: "50%",
1680
+ background: connected ? "#22C55E" : "#EF4444",
1681
+ boxShadow: connected ? "0 0 6px rgba(34,197,94,0.6)" : "none",
1682
+ flexShrink: 0
1683
+ }
1684
+ }
1685
+ ),
1686
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { style: { fontSize: 13, fontWeight: 600, color: "#fff" }, children: assistantName }),
1687
+ statusLabel && /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(import_jsx_runtime2.Fragment, { children: [
1688
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { style: { fontSize: 13, color: "#CBD5E1" }, children: "\xB7" }),
1689
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { style: { fontSize: 12, fontWeight: 500, color: _statusColor(status) }, children: statusLabel })
1690
+ ] }),
1691
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { style: { flex: 1 } }),
1692
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1693
+ "button",
1694
+ {
1695
+ onClick: clearMessages,
1696
+ title: "Limpar",
1697
+ style: {
1698
+ width: 28,
1699
+ height: 28,
1700
+ display: "flex",
1701
+ alignItems: "center",
1702
+ justifyContent: "center",
1703
+ border: "none",
1704
+ background: "none",
1705
+ color: "#CBD5E1",
1706
+ cursor: "pointer",
1707
+ borderRadius: 6,
1708
+ fontSize: 14
1709
+ },
1710
+ children: "\u{1F5D1}"
1711
+ }
1712
+ ),
1713
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1714
+ "button",
1715
+ {
1716
+ onClick: onClose,
1717
+ title: "Fechar",
1718
+ style: {
1719
+ width: 28,
1720
+ height: 28,
1721
+ display: "flex",
1722
+ alignItems: "center",
1723
+ justifyContent: "center",
1724
+ border: "none",
1725
+ background: "none",
1726
+ color: "#CBD5E1",
1727
+ cursor: "pointer",
1728
+ borderRadius: 6,
1729
+ fontSize: 14
1730
+ },
1731
+ children: "\u2715"
1732
+ }
1733
+ )
1734
+ ]
1735
+ }
1736
+ ),
1737
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
1738
+ "div",
1739
+ {
1740
+ style: {
1741
+ flex: 1,
1742
+ overflowY: "auto",
1743
+ padding: 12,
1744
+ display: "flex",
1745
+ flexDirection: "column",
1746
+ gap: 6
1747
+ },
1748
+ children: [
1749
+ messages.length === 0 ? /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
1750
+ "div",
1751
+ {
1752
+ style: {
1753
+ flex: 1,
1754
+ display: "flex",
1755
+ flexDirection: "column",
1756
+ alignItems: "center",
1757
+ justifyContent: "center",
1758
+ color: "#94A3B8",
1759
+ gap: 8
1760
+ },
1761
+ children: [
1762
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { style: { fontSize: 32 }, children: "\u{1F4AC}" }),
1763
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { style: { fontSize: 13 }, children: connected ? `Diga algo para a ${assistantName}` : "Conecte-se para conversar" })
1764
+ ]
1765
+ }
1766
+ ) : messages.map((msg, i) => /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1767
+ "div",
1768
+ {
1769
+ style: {
1770
+ display: "flex",
1771
+ maxWidth: 280,
1772
+ alignSelf: msg.role === "user" /* User */ ? "flex-end" : msg.role === "system" /* System */ ? "center" : "flex-start"
1773
+ },
1774
+ children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1775
+ "div",
1776
+ {
1777
+ style: {
1778
+ padding: "8px 12px",
1779
+ borderRadius: 12,
1780
+ fontSize: 13,
1781
+ lineHeight: 1.5,
1782
+ wordBreak: "break-word",
1783
+ ..._bubbleStyle(msg.role)
1784
+ },
1785
+ children: msg.text
1786
+ }
1787
+ )
1788
+ },
1789
+ i
1790
+ )),
1791
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { ref: messagesEndRef })
1792
+ ]
1793
+ }
1794
+ ),
1795
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
1796
+ "div",
1797
+ {
1798
+ style: {
1799
+ padding: "6px 8px 10px 12px",
1800
+ display: "flex",
1801
+ alignItems: "center",
1802
+ gap: 6,
1803
+ borderTop: "1px solid #E2E8F0"
1804
+ },
1805
+ children: [
1806
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1807
+ "input",
1808
+ {
1809
+ type: "text",
1810
+ value: input,
1811
+ onChange: (e) => setInput(e.target.value),
1812
+ onKeyDown: handleKeyDown,
1813
+ disabled: !connected || micActive,
1814
+ placeholder: micActive ? "Gravando... clique para parar" : "Digite uma mensagem...",
1815
+ style: {
1816
+ flex: 1,
1817
+ padding: "10px 12px",
1818
+ fontSize: 13,
1819
+ fontFamily: "inherit",
1820
+ border: "1px solid #E2E8F0",
1821
+ borderRadius: 10,
1822
+ background: "#F8FAFC",
1823
+ color: micActive ? "#FF4060" : "#1E293B",
1824
+ outline: "none",
1825
+ opacity: connected ? 1 : 0.5
1826
+ }
1827
+ }
1828
+ ),
1829
+ voiceSupported && /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1830
+ "button",
1831
+ {
1832
+ onClick: toggleMic,
1833
+ disabled: !connected,
1834
+ style: {
1835
+ width: 36,
1836
+ height: 36,
1837
+ display: "flex",
1838
+ alignItems: "center",
1839
+ justifyContent: "center",
1840
+ border: "none",
1841
+ borderRadius: "50%",
1842
+ background: micActive ? "#FF4060" : "#3B82F6",
1843
+ color: "#fff",
1844
+ cursor: connected ? "pointer" : "not-allowed",
1845
+ flexShrink: 0
1846
+ },
1847
+ children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("svg", { width: "18", height: "18", viewBox: "0 0 24 24", fill: "currentColor", children: micActive ? /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("rect", { x: "6", y: "6", width: "12", height: "12", rx: "2" }) : /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("path", { d: "M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3zm-1-9c0-.55.45-1 1-1s1 .45 1 1v6c0 .55-.45 1-1 1s-1-.45-1-1V5zm6 6c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z" }) })
1848
+ }
1849
+ ),
1850
+ /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
1851
+ "button",
1852
+ {
1853
+ onClick: handleSend,
1854
+ disabled: !connected || !input.trim() || micActive,
1855
+ style: {
1856
+ width: 36,
1857
+ height: 36,
1858
+ display: "flex",
1859
+ alignItems: "center",
1860
+ justifyContent: "center",
1861
+ border: "none",
1862
+ borderRadius: "50%",
1863
+ background: connected && input.trim() && !micActive ? "#3B82F6" : "#CBD5E1",
1864
+ color: "#fff",
1865
+ cursor: connected && input.trim() && !micActive ? "pointer" : "not-allowed",
1866
+ flexShrink: 0,
1867
+ fontSize: 16
1868
+ },
1869
+ children: "\u27A4"
1870
+ }
1871
+ )
1872
+ ]
1873
+ }
1874
+ )
1875
+ ]
1876
+ }
1877
+ );
1878
+ }
1879
+ function _bubbleStyle(role) {
1880
+ switch (role) {
1881
+ case "user" /* User */:
1882
+ return { background: "#3B82F6", color: "#fff" };
1883
+ case "agent" /* Agent */:
1884
+ return { background: "#F1F5F9", color: "#1E293B" };
1885
+ case "system" /* System */:
1886
+ return {
1887
+ background: "rgba(245,158,11,0.08)",
1888
+ color: "#F59E0B",
1889
+ fontSize: 11,
1890
+ fontStyle: "italic",
1891
+ textAlign: "center",
1892
+ maxWidth: "100%"
1893
+ };
1894
+ }
1895
+ }
1896
+ function _statusBorderColor(status) {
1897
+ switch (status) {
1898
+ case "thinking" /* Thinking */:
1899
+ return "#8B5CF6";
1900
+ case "executing" /* Executing */:
1901
+ return "#F59E0B";
1902
+ case "speaking" /* Speaking */:
1903
+ return "#FFB347";
1904
+ case "listening" /* Listening */:
1905
+ return "#00D4FF";
1906
+ case "recording" /* Recording */:
1907
+ return "#FF4060";
1908
+ default:
1909
+ return "transparent";
1910
+ }
1911
+ }
1912
+ function _statusColor(status) {
1913
+ switch (status) {
1914
+ case "thinking" /* Thinking */:
1915
+ return "#8B5CF6";
1916
+ case "executing" /* Executing */:
1917
+ return "#F59E0B";
1918
+ case "speaking" /* Speaking */:
1919
+ return "#FFB347";
1920
+ case "listening" /* Listening */:
1921
+ return "#00D4FF";
1922
+ case "recording" /* Recording */:
1923
+ return "#FF4060";
1924
+ default:
1925
+ return "#94A3B8";
1926
+ }
1927
+ }
1928
+
1929
+ // src/components/VocallFab.tsx
1930
+ var import_jsx_runtime3 = require("react/jsx-runtime");
1931
+ var FAB_COLORS = {
1932
+ disconnected: { bg: "#94A3B8", shadow: "rgba(148,163,184,0.3)" },
1933
+ idle: { bg: "#3B82F6", shadow: "rgba(59,130,246,0.35)" },
1934
+ thinking: { bg: "#8B5CF6", shadow: "rgba(139,92,246,0.4)" },
1935
+ executing: { bg: "#F59E0B", shadow: "rgba(245,158,11,0.4)" },
1936
+ speaking: { bg: "#FFB347", shadow: "rgba(255,179,71,0.4)" },
1937
+ listening: { bg: "#00D4FF", shadow: "rgba(0,212,255,0.4)" },
1938
+ recording: { bg: "#FF4060", shadow: "rgba(255,64,96,0.4)" }
1939
+ };
1940
+ function VocallFab({ onClick, className, style }) {
1941
+ const { status, connected } = useVocall();
1942
+ const key = !connected ? "disconnected" : status;
1943
+ const colors = FAB_COLORS[key] ?? FAB_COLORS.idle;
1944
+ const isSpinner = status === "thinking" /* Thinking */ || status === "executing" /* Executing */;
1945
+ const isVoiceActive = status === "listening" /* Listening */ || status === "recording" /* Recording */ || status === "speaking" /* Speaking */;
1946
+ return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
1947
+ "button",
1948
+ {
1949
+ className: `vocall-fab ${className || ""}`,
1950
+ onClick,
1951
+ style: {
1952
+ position: "fixed",
1953
+ bottom: 16,
1954
+ right: 16,
1955
+ width: 52,
1956
+ height: 52,
1957
+ borderRadius: "50%",
1958
+ background: colors.bg,
1959
+ color: "#fff",
1960
+ border: "none",
1961
+ cursor: "pointer",
1962
+ display: "flex",
1963
+ alignItems: "center",
1964
+ justifyContent: "center",
1965
+ boxShadow: `0 4px 12px ${colors.shadow}`,
1966
+ transition: "all 0.3s ease-in-out",
1967
+ zIndex: 1e3,
1968
+ fontSize: 24,
1969
+ ...style
1970
+ },
1971
+ children: isSpinner ? /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
1972
+ "div",
1973
+ {
1974
+ style: {
1975
+ width: 24,
1976
+ height: 24,
1977
+ border: "2px solid rgba(255,255,255,0.3)",
1978
+ borderTopColor: "#fff",
1979
+ borderRadius: "50%",
1980
+ animation: "vocall-spin 0.8s linear infinite"
1981
+ }
1982
+ }
1983
+ ) : isVoiceActive ? /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("svg", { width: "24", height: "24", viewBox: "0 0 24 24", fill: "currentColor", children: /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("path", { d: "M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3zm-1-9c0-.55.45-1 1-1s1 .45 1 1v6c0 .55-.45 1-1 1s-1-.45-1-1V5zm6 6c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z" }) }) : /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("span", { children: "\u{1F916}" })
1984
+ }
1985
+ );
1986
+ }
1987
+
1988
+ // src/components/VocallStatus.tsx
1989
+ var import_jsx_runtime4 = require("react/jsx-runtime");
1990
+ var STATUS_CONFIG = {
1991
+ thinking: { label: "Pensando...", bg: "rgba(139,92,246,0.12)", color: "#8B5CF6" },
1992
+ executing: { label: "Executando...", bg: "rgba(245,158,11,0.12)", color: "#F59E0B" },
1993
+ speaking: { label: "Falando...", bg: "rgba(255,179,71,0.12)", color: "#FFB347" },
1994
+ listening: { label: "Ouvindo...", bg: "rgba(0,212,255,0.12)", color: "#00D4FF" },
1995
+ recording: { label: "Gravando...", bg: "rgba(239,68,68,0.12)", color: "#FF4060" }
1996
+ };
1997
+ function VocallStatusPill({ className, style }) {
1998
+ const { status, connected } = useVocall();
1999
+ const config = STATUS_CONFIG[status];
2000
+ if (!config || !connected || status === "idle" /* Idle */ || status === "disconnected" /* Disconnected */) {
2001
+ return null;
2002
+ }
2003
+ return /* @__PURE__ */ (0, import_jsx_runtime4.jsxs)(
2004
+ "div",
2005
+ {
2006
+ className: `vocall-status-pill ${className || ""}`,
2007
+ style: {
2008
+ display: "inline-flex",
2009
+ alignItems: "center",
2010
+ gap: 6,
2011
+ padding: "4px 10px",
2012
+ borderRadius: 12,
2013
+ fontSize: 11,
2014
+ fontWeight: 500,
2015
+ background: config.bg,
2016
+ color: config.color,
2017
+ ...style
2018
+ },
2019
+ children: [
2020
+ /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
2021
+ "div",
2022
+ {
2023
+ style: {
2024
+ width: 12,
2025
+ height: 12,
2026
+ border: "1.5px solid currentColor",
2027
+ borderTopColor: "transparent",
2028
+ borderRadius: "50%",
2029
+ animation: "vocall-spin 0.8s linear infinite"
2030
+ }
2031
+ }
2032
+ ),
2033
+ /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("span", { children: config.label })
2034
+ ]
2035
+ }
2036
+ );
2037
+ }
2038
+ // Annotate the CommonJS export names for ESM import in node:
2039
+ 0 && (module.exports = {
2040
+ ChatRole,
2041
+ FieldType,
2042
+ FrameSplitter,
2043
+ VocallChat,
2044
+ VocallClient,
2045
+ VocallFab,
2046
+ VocallProvider,
2047
+ VocallStatus,
2048
+ VocallStatusPill,
2049
+ WebVoiceService,
2050
+ useVocall,
2051
+ useVocallAction,
2052
+ useVocallClient,
2053
+ useVocallField,
2054
+ useVocallVoice
2055
+ });
2056
+ //# sourceMappingURL=index.cjs.map