ttp-agent-sdk 2.34.3 → 2.34.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/agent-widget.dev.js
CHANGED
|
@@ -23031,9 +23031,18 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23031
23031
|
|
|
23032
23032
|
// Queue for prepared AudioBuffers (ready to schedule)
|
|
23033
23033
|
_this.preparedBuffer = [];
|
|
23034
|
+
|
|
23035
|
+
// Maximum buffer sizes to prevent unbounded memory growth
|
|
23036
|
+
// If backend sends sentences faster than playback, oldest frames are dropped
|
|
23037
|
+
_this.MAX_PREPARED_BUFFER_SIZE = 200; // Max prepared frames (~10-12 seconds at 600ms per frame)
|
|
23038
|
+
_this.MAX_PCM_CHUNK_QUEUE_SIZE = 50; // Max raw PCM chunks
|
|
23039
|
+
|
|
23034
23040
|
_this.isProcessingPcmQueue = false;
|
|
23035
23041
|
_this.isSchedulingFrames = false;
|
|
23036
23042
|
|
|
23043
|
+
// Timeout to detect empty sentences (audio_start but no chunks)
|
|
23044
|
+
_this._emptySentenceTimeout = null;
|
|
23045
|
+
|
|
23037
23046
|
// Minimal scheduling delay to avoid scheduling audio in the past
|
|
23038
23047
|
// REMOVED: Lookahead buffering was causing quality degradation due to browser resampling/timing issues
|
|
23039
23048
|
// Now we only schedule with minimal delay (20ms) just enough to avoid gaps
|
|
@@ -23049,6 +23058,10 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23049
23058
|
// Cleared when markNewSentence() is called (signals new audio is starting)
|
|
23050
23059
|
_this._isStopped = false;
|
|
23051
23060
|
|
|
23061
|
+
// Track current sentence ID to reject chunks from previous sentences
|
|
23062
|
+
// Incremented each time markNewSentence() is called
|
|
23063
|
+
_this._currentSentenceId = 0;
|
|
23064
|
+
|
|
23052
23065
|
// Codec registry
|
|
23053
23066
|
|
|
23054
23067
|
_this.codecs = {
|
|
@@ -23185,7 +23198,7 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23185
23198
|
value: (function () {
|
|
23186
23199
|
var _playChunk = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee(pcmData) {
|
|
23187
23200
|
var _this3 = this;
|
|
23188
|
-
var preparedFrame, _this$audioContext;
|
|
23201
|
+
var preparedFrame, dropped, _this$audioContext;
|
|
23189
23202
|
return _regenerator().w(function (_context) {
|
|
23190
23203
|
while (1) switch (_context.n) {
|
|
23191
23204
|
case 0:
|
|
@@ -23199,6 +23212,20 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23199
23212
|
// Pre-process frame immediately (convert to AudioBuffer)
|
|
23200
23213
|
preparedFrame = this.prepareChunk(pcmData);
|
|
23201
23214
|
if (preparedFrame) {
|
|
23215
|
+
// CRITICAL: Clear empty sentence timeout since chunks are arriving
|
|
23216
|
+
// This resets the timer for the current sentence
|
|
23217
|
+
if (this._emptySentenceTimeout) {
|
|
23218
|
+
clearTimeout(this._emptySentenceTimeout);
|
|
23219
|
+
this._emptySentenceTimeout = null;
|
|
23220
|
+
}
|
|
23221
|
+
|
|
23222
|
+
// CRITICAL: Prevent unbounded buffer growth
|
|
23223
|
+
// If backend sends sentences faster than playback, drop oldest frames
|
|
23224
|
+
if (this.preparedBuffer.length >= this.MAX_PREPARED_BUFFER_SIZE) {
|
|
23225
|
+
dropped = this.preparedBuffer.shift(); // Drop oldest frame
|
|
23226
|
+
console.warn("\u26A0\uFE0F AudioPlayer: preparedBuffer at max size (".concat(this.MAX_PREPARED_BUFFER_SIZE, "), dropped oldest frame"));
|
|
23227
|
+
}
|
|
23228
|
+
|
|
23202
23229
|
// Add prepared frame to buffer
|
|
23203
23230
|
this.preparedBuffer.push(preparedFrame);
|
|
23204
23231
|
|
|
@@ -23206,7 +23233,7 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23206
23233
|
|
|
23207
23234
|
// Use requestAnimationFrame to avoid blocking, but ensure scheduling happens
|
|
23208
23235
|
|
|
23209
|
-
if (!this.isSchedulingFrames) {
|
|
23236
|
+
if (!this.isSchedulingFrames && !this._isStopped) {
|
|
23210
23237
|
// Schedule immediately if not already scheduling
|
|
23211
23238
|
|
|
23212
23239
|
this.schedulePreparedFrames();
|
|
@@ -23218,7 +23245,7 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23218
23245
|
// Use a short timeout to ensure we check again after current scheduling completes
|
|
23219
23246
|
|
|
23220
23247
|
setTimeout(function () {
|
|
23221
|
-
if (_this3.preparedBuffer.length > 0 && !_this3.isSchedulingFrames) {
|
|
23248
|
+
if (_this3.preparedBuffer.length > 0 && !_this3.isSchedulingFrames && !_this3._isStopped) {
|
|
23222
23249
|
_this3.schedulePreparedFrames();
|
|
23223
23250
|
}
|
|
23224
23251
|
}, 5); // Very short delay to check after current scheduling completes
|
|
@@ -23357,7 +23384,7 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23357
23384
|
value: (function () {
|
|
23358
23385
|
var _schedulePreparedFrames = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2() {
|
|
23359
23386
|
var _this4 = this;
|
|
23360
|
-
var queuedFrames, targetLookaheadFrames, _this$audioContext2, _this$audioContext3, _this$audioContext4, _this$audioContext5, scheduledCount, _loop, _t;
|
|
23387
|
+
var queuedFrames, targetLookaheadFrames, _this$audioContext2, _this$audioContext3, _this$audioContext4, _this$audioContext5, scheduledCount, _loop, _ret, _t;
|
|
23361
23388
|
return _regenerator().w(function (_context3) {
|
|
23362
23389
|
while (1) switch (_context3.p = _context3.n) {
|
|
23363
23390
|
case 0:
|
|
@@ -23409,14 +23436,21 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23409
23436
|
return _regenerator().w(function (_context2) {
|
|
23410
23437
|
while (1) switch (_context2.n) {
|
|
23411
23438
|
case 0:
|
|
23439
|
+
if (!_this4._isStopped) {
|
|
23440
|
+
_context2.n = 1;
|
|
23441
|
+
break;
|
|
23442
|
+
}
|
|
23443
|
+
console.log('🛑 AudioPlayer: Stopping frame scheduling - playback was stopped');
|
|
23444
|
+
return _context2.a(2, 0);
|
|
23445
|
+
case 1:
|
|
23412
23446
|
// Get next prepared frame
|
|
23413
23447
|
preparedFrame = _this4.preparedBuffer.shift();
|
|
23414
23448
|
if (preparedFrame) {
|
|
23415
|
-
_context2.n =
|
|
23449
|
+
_context2.n = 2;
|
|
23416
23450
|
break;
|
|
23417
23451
|
}
|
|
23418
|
-
return _context2.a(2,
|
|
23419
|
-
case
|
|
23452
|
+
return _context2.a(2, 0);
|
|
23453
|
+
case 2:
|
|
23420
23454
|
// Create source and schedule playback
|
|
23421
23455
|
source = _this4.audioContext.createBufferSource();
|
|
23422
23456
|
source.buffer = preparedFrame.buffer;
|
|
@@ -23493,29 +23527,46 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23493
23527
|
// Track when this buffer finishes (for cleanup only)
|
|
23494
23528
|
|
|
23495
23529
|
source.onended = function () {
|
|
23496
|
-
//
|
|
23530
|
+
// CRITICAL: Check if playback was stopped before processing cleanup
|
|
23531
|
+
// This prevents race conditions where onended fires after stopImmediate() was called
|
|
23532
|
+
if (_this4._isStopped) {
|
|
23533
|
+
// Playback was stopped, ignore this callback
|
|
23534
|
+
return;
|
|
23535
|
+
}
|
|
23497
23536
|
|
|
23537
|
+
// CRITICAL: Only process if source is still in scheduledSources set
|
|
23538
|
+
// This prevents race conditions where stopImmediate() cleared the set but callback fires later
|
|
23539
|
+
if (!_this4.scheduledSources.has(source)) {
|
|
23540
|
+
// Source was already removed (probably by stopImmediate), ignore this callback
|
|
23541
|
+
return;
|
|
23542
|
+
}
|
|
23543
|
+
|
|
23544
|
+
// Remove from tracked sources
|
|
23498
23545
|
_this4.scheduledSources.delete(source);
|
|
23499
|
-
|
|
23546
|
+
|
|
23547
|
+
// Only decrement if we're still playing and buffer count is positive
|
|
23548
|
+
if (!_this4._isStopped && _this4.scheduledBuffers > 0) {
|
|
23549
|
+
_this4.scheduledBuffers--;
|
|
23550
|
+
}
|
|
23500
23551
|
|
|
23501
23552
|
// If no more scheduled buffers and no prepared frames, playback is complete
|
|
23502
23553
|
|
|
23503
|
-
if (_this4.scheduledBuffers === 0 && _this4.preparedBuffer.length === 0 && _this4.pcmChunkQueue.length === 0) {
|
|
23554
|
+
if (_this4.scheduledBuffers === 0 && _this4.preparedBuffer.length === 0 && _this4.pcmChunkQueue.length === 0 && !_this4._isStopped) {
|
|
23504
23555
|
_this4.isPlaying = false;
|
|
23505
23556
|
_this4.isSchedulingFrames = false;
|
|
23506
23557
|
console.log('🛑 AudioPlayer: Emitting playbackStopped event (all buffers finished)');
|
|
23507
23558
|
_this4.emit('playbackStopped');
|
|
23508
|
-
} else if (_this4.preparedBuffer.length > 0) {
|
|
23559
|
+
} else if (_this4.preparedBuffer.length > 0 && !_this4._isStopped) {
|
|
23509
23560
|
// More frames available, schedule them immediately
|
|
23510
23561
|
|
|
23511
23562
|
// Use setTimeout to avoid blocking, but schedule quickly
|
|
23512
23563
|
|
|
23513
23564
|
setTimeout(function () {
|
|
23514
|
-
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames) {
|
|
23565
|
+
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && !_this4._isStopped) {
|
|
23515
23566
|
_this4.schedulePreparedFrames();
|
|
23516
23567
|
}
|
|
23517
23568
|
}, 0);
|
|
23518
|
-
} else if (_this4.scheduledBuffers > 0) {
|
|
23569
|
+
} else if (_this4.scheduledBuffers > 0 && !_this4._isStopped) {
|
|
23519
23570
|
// No more prepared frames but still have scheduled buffers playing
|
|
23520
23571
|
|
|
23521
23572
|
// Set up a check to schedule new frames when they arrive
|
|
@@ -23523,9 +23574,13 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23523
23574
|
// Keep checking periodically until we have no more scheduled buffers
|
|
23524
23575
|
|
|
23525
23576
|
var _checkForMoreFrames = function checkForMoreFrames() {
|
|
23577
|
+
// CRITICAL: Check if stopped before scheduling
|
|
23578
|
+
if (_this4._isStopped) {
|
|
23579
|
+
return;
|
|
23580
|
+
}
|
|
23526
23581
|
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && _this4.scheduledBuffers > 0) {
|
|
23527
23582
|
_this4.schedulePreparedFrames();
|
|
23528
|
-
} else if (_this4.scheduledBuffers > 0) {
|
|
23583
|
+
} else if (_this4.scheduledBuffers > 0 && !_this4._isStopped) {
|
|
23529
23584
|
// Keep checking - frames might arrive soon
|
|
23530
23585
|
|
|
23531
23586
|
setTimeout(_checkForMoreFrames, 10);
|
|
@@ -23539,7 +23594,7 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23539
23594
|
console.log('🎵 AudioPlayer: Emitting playbackStarted event');
|
|
23540
23595
|
_this4.emit('playbackStarted');
|
|
23541
23596
|
}
|
|
23542
|
-
case
|
|
23597
|
+
case 3:
|
|
23543
23598
|
return _context2.a(2);
|
|
23544
23599
|
}
|
|
23545
23600
|
}, _loop);
|
|
@@ -23551,7 +23606,8 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23551
23606
|
}
|
|
23552
23607
|
return _context3.d(_regeneratorValues(_loop()), 6);
|
|
23553
23608
|
case 6:
|
|
23554
|
-
|
|
23609
|
+
_ret = _context3.v;
|
|
23610
|
+
if (!(_ret === 0)) {
|
|
23555
23611
|
_context3.n = 7;
|
|
23556
23612
|
break;
|
|
23557
23613
|
}
|
|
@@ -23568,11 +23624,11 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23568
23624
|
|
|
23569
23625
|
// Use requestAnimationFrame for smooth scheduling without blocking
|
|
23570
23626
|
|
|
23571
|
-
if (this.preparedBuffer.length > 0) {
|
|
23627
|
+
if (this.preparedBuffer.length > 0 && !this._isStopped) {
|
|
23572
23628
|
// More frames arrived, schedule them immediately
|
|
23573
23629
|
|
|
23574
23630
|
requestAnimationFrame(function () {
|
|
23575
|
-
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames) {
|
|
23631
|
+
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && !_this4._isStopped) {
|
|
23576
23632
|
_this4.schedulePreparedFrames();
|
|
23577
23633
|
}
|
|
23578
23634
|
});
|
|
@@ -23580,19 +23636,19 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23580
23636
|
|
|
23581
23637
|
// Always set up a periodic check if we have scheduled buffers playing
|
|
23582
23638
|
// This ensures continuous playback even if frames arrive slowly
|
|
23583
|
-
if (this.scheduledBuffers > 0) {
|
|
23639
|
+
if (this.scheduledBuffers > 0 && !this._isStopped) {
|
|
23584
23640
|
// Set up a periodic check to schedule new frames as they arrive
|
|
23585
23641
|
// Use a shorter interval to catch new frames quickly
|
|
23586
23642
|
setTimeout(function () {
|
|
23587
|
-
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && _this4.scheduledBuffers > 0) {
|
|
23643
|
+
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && _this4.scheduledBuffers > 0 && !_this4._isStopped) {
|
|
23588
23644
|
_this4.schedulePreparedFrames();
|
|
23589
|
-
} else if (_this4.scheduledBuffers > 0) {
|
|
23645
|
+
} else if (_this4.scheduledBuffers > 0 && !_this4._isStopped) {
|
|
23590
23646
|
// Keep checking even if no frames yet - they might arrive soon
|
|
23591
23647
|
|
|
23592
23648
|
// Recursively check until we have no more scheduled buffers
|
|
23593
23649
|
|
|
23594
23650
|
setTimeout(function () {
|
|
23595
|
-
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && _this4.scheduledBuffers > 0) {
|
|
23651
|
+
if (_this4.preparedBuffer.length > 0 && !_this4.isSchedulingFrames && _this4.scheduledBuffers > 0 && !_this4._isStopped) {
|
|
23596
23652
|
_this4.schedulePreparedFrames();
|
|
23597
23653
|
}
|
|
23598
23654
|
}, 10);
|
|
@@ -23659,16 +23715,23 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23659
23715
|
return this.waitForAudioContextReady();
|
|
23660
23716
|
case 4:
|
|
23661
23717
|
if (!(this.pcmChunkQueue.length > 0)) {
|
|
23662
|
-
_context4.n =
|
|
23718
|
+
_context4.n = 7;
|
|
23719
|
+
break;
|
|
23720
|
+
}
|
|
23721
|
+
if (!this._isStopped) {
|
|
23722
|
+
_context4.n = 5;
|
|
23663
23723
|
break;
|
|
23664
23724
|
}
|
|
23725
|
+
console.log('🛑 AudioPlayer: Stopping PCM queue processing - playback was stopped');
|
|
23726
|
+
return _context4.a(3, 7);
|
|
23727
|
+
case 5:
|
|
23665
23728
|
pcmData = this.pcmChunkQueue.shift();
|
|
23666
23729
|
if (pcmData) {
|
|
23667
|
-
_context4.n =
|
|
23730
|
+
_context4.n = 6;
|
|
23668
23731
|
break;
|
|
23669
23732
|
}
|
|
23670
23733
|
return _context4.a(3, 4);
|
|
23671
|
-
case
|
|
23734
|
+
case 6:
|
|
23672
23735
|
// Ensure even byte count for 16-bit PCM
|
|
23673
23736
|
processedData = pcmData;
|
|
23674
23737
|
if (pcmData.byteLength % 2 !== 0) {
|
|
@@ -23762,23 +23825,23 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
23762
23825
|
}
|
|
23763
23826
|
_context4.n = 4;
|
|
23764
23827
|
break;
|
|
23765
|
-
case
|
|
23828
|
+
case 7:
|
|
23766
23829
|
// end while loop
|
|
23767
23830
|
|
|
23768
23831
|
// All chunks scheduled, reset processing flag
|
|
23769
23832
|
this.isProcessingPcmQueue = false;
|
|
23770
|
-
_context4.n =
|
|
23833
|
+
_context4.n = 9;
|
|
23771
23834
|
break;
|
|
23772
|
-
case
|
|
23773
|
-
_context4.p =
|
|
23835
|
+
case 8:
|
|
23836
|
+
_context4.p = 8;
|
|
23774
23837
|
_t2 = _context4.v;
|
|
23775
23838
|
console.error('❌ AudioPlayer v2: Error playing chunk:', _t2);
|
|
23776
23839
|
this.emit('playbackError', _t2);
|
|
23777
23840
|
this.isProcessingPcmQueue = false;
|
|
23778
|
-
case
|
|
23841
|
+
case 9:
|
|
23779
23842
|
return _context4.a(2);
|
|
23780
23843
|
}
|
|
23781
|
-
}, _callee3, this, [[3,
|
|
23844
|
+
}, _callee3, this, [[3, 8]]);
|
|
23782
23845
|
}));
|
|
23783
23846
|
function processPcmQueue() {
|
|
23784
23847
|
return _processPcmQueue.apply(this, arguments);
|
|
@@ -24638,6 +24701,15 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
24638
24701
|
console.log(' scheduledSources.size:', this.scheduledSources.size);
|
|
24639
24702
|
console.log(' scheduledBuffers:', this.scheduledBuffers);
|
|
24640
24703
|
|
|
24704
|
+
// CRITICAL: Set stopped flag FIRST to prevent new audio from being queued/scheduled
|
|
24705
|
+
// This prevents race conditions where audio chunks arrive after stop but before sources are stopped
|
|
24706
|
+
// The flag will be cleared when markNewSentence() is called (signals new audio is starting)
|
|
24707
|
+
this._isStopped = true;
|
|
24708
|
+
|
|
24709
|
+
// CRITICAL: Stop scheduling immediately to prevent frames from being scheduled
|
|
24710
|
+
// This ensures schedulePreparedFrames() loop will exit on next _isStopped check
|
|
24711
|
+
this.isSchedulingFrames = false;
|
|
24712
|
+
|
|
24641
24713
|
// Stop current source (legacy queue-based system)
|
|
24642
24714
|
|
|
24643
24715
|
if (this.currentSource) {
|
|
@@ -24659,27 +24731,31 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
24659
24731
|
if (this.scheduledSources.size > 0) {
|
|
24660
24732
|
console.log(" Stopping ".concat(this.scheduledSources.size, " scheduled sources..."));
|
|
24661
24733
|
var stoppedCount = 0;
|
|
24662
|
-
var _iterator = _createForOfIteratorHelper(this.scheduledSources),
|
|
24663
|
-
_step;
|
|
24664
|
-
try {
|
|
24665
|
-
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
24666
|
-
var source = _step.value;
|
|
24667
|
-
try {
|
|
24668
|
-
source.stop();
|
|
24669
|
-
stoppedCount++;
|
|
24670
|
-
} catch (e) {
|
|
24671
|
-
// Ignore if already stopped or not started yet
|
|
24672
24734
|
|
|
24673
|
-
|
|
24674
|
-
|
|
24735
|
+
// Store sources to stop and count before clearing the set
|
|
24736
|
+
var sourcesToStop = Array.from(this.scheduledSources);
|
|
24737
|
+
var sourcesCount = sourcesToStop.length;
|
|
24738
|
+
|
|
24739
|
+
// Clear the set BEFORE stopping sources to prevent onended callbacks from modifying it
|
|
24740
|
+
// This ensures onended callbacks will see empty set and return early
|
|
24741
|
+
this.scheduledSources.clear();
|
|
24742
|
+
|
|
24743
|
+
// CRITICAL: Reset scheduledBuffers to 0 BEFORE stopping sources
|
|
24744
|
+
// This prevents onended callbacks from decrementing it below 0
|
|
24745
|
+
// Any onended callbacks that fire will see scheduledSources is empty and return early
|
|
24746
|
+
this.scheduledBuffers = 0;
|
|
24747
|
+
for (var _i = 0, _sourcesToStop = sourcesToStop; _i < _sourcesToStop.length; _i++) {
|
|
24748
|
+
var source = _sourcesToStop[_i];
|
|
24749
|
+
try {
|
|
24750
|
+
source.stop();
|
|
24751
|
+
stoppedCount++;
|
|
24752
|
+
} catch (e) {
|
|
24753
|
+
// Ignore if already stopped or not started yet
|
|
24754
|
+
|
|
24755
|
+
console.log(' Source already stopped or not started:', e.message);
|
|
24675
24756
|
}
|
|
24676
|
-
} catch (err) {
|
|
24677
|
-
_iterator.e(err);
|
|
24678
|
-
} finally {
|
|
24679
|
-
_iterator.f();
|
|
24680
24757
|
}
|
|
24681
|
-
console.log(" Stopped ".concat(stoppedCount, " sources"));
|
|
24682
|
-
this.scheduledSources.clear();
|
|
24758
|
+
console.log(" Stopped ".concat(stoppedCount, " sources (cleared ").concat(sourcesCount, " from scheduledSources)"));
|
|
24683
24759
|
}
|
|
24684
24760
|
|
|
24685
24761
|
// Clear state
|
|
@@ -24695,15 +24771,15 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
24695
24771
|
this.isProcessingPcmQueue = false;
|
|
24696
24772
|
this.isSchedulingFrames = false;
|
|
24697
24773
|
|
|
24698
|
-
//
|
|
24699
|
-
|
|
24700
|
-
|
|
24701
|
-
|
|
24774
|
+
// Clear empty sentence timeout (barge-in means sentence was interrupted, not empty)
|
|
24775
|
+
if (this._emptySentenceTimeout) {
|
|
24776
|
+
clearTimeout(this._emptySentenceTimeout);
|
|
24777
|
+
this._emptySentenceTimeout = null;
|
|
24778
|
+
}
|
|
24702
24779
|
|
|
24703
24780
|
// Reset scheduling properties
|
|
24704
|
-
|
|
24781
|
+
// Note: scheduledBuffers was already reset to 0 above when clearing scheduledSources
|
|
24705
24782
|
this.nextStartTime = 0;
|
|
24706
|
-
this.scheduledBuffers = 0;
|
|
24707
24783
|
|
|
24708
24784
|
// Clear transcript state
|
|
24709
24785
|
this.clearTranscriptState();
|
|
@@ -24723,14 +24799,77 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
24723
24799
|
}, {
|
|
24724
24800
|
key: "markNewSentence",
|
|
24725
24801
|
value: function markNewSentence(text) {
|
|
24726
|
-
|
|
24802
|
+
var _this9 = this;
|
|
24803
|
+
var wasStopped = this._isStopped;
|
|
24804
|
+
var isCurrentlyPlaying = this.isPlaying || this.scheduledSources.size > 0;
|
|
24805
|
+
|
|
24806
|
+
// CRITICAL: Clear stopped flag when new audio starts (after barge-in)
|
|
24727
24807
|
// This allows new audio chunks to be queued after barge-in
|
|
24728
24808
|
if (this._isStopped) {
|
|
24729
|
-
console.log('🛑 AudioPlayer: Clearing stopped flag - new audio starting');
|
|
24809
|
+
console.log('🛑 AudioPlayer: Clearing stopped flag - new audio starting after barge-in');
|
|
24730
24810
|
this._isStopped = false;
|
|
24811
|
+
|
|
24812
|
+
// CRITICAL: Reset scheduling state when starting a new sentence after a stop
|
|
24813
|
+
// This ensures the new sentence starts immediately without delay
|
|
24814
|
+
// Reset nextStartTime so first chunk schedules immediately (not in the past)
|
|
24815
|
+
this.nextStartTime = 0;
|
|
24816
|
+
|
|
24817
|
+
// CRITICAL: Reset scheduledBuffers to 0, but ensure it's not negative
|
|
24818
|
+
// This accounts for any onended callbacks that might fire from stopped sources
|
|
24819
|
+
// If scheduledBuffers is negative, it means onended callbacks fired after stopImmediate()
|
|
24820
|
+
// In that case, we should reset to 0 to start fresh
|
|
24821
|
+
if (this.scheduledBuffers < 0) {
|
|
24822
|
+
console.log("\uD83D\uDD04 AudioPlayer: scheduledBuffers was negative (".concat(this.scheduledBuffers, "), resetting to 0"));
|
|
24823
|
+
}
|
|
24824
|
+
this.scheduledBuffers = 0;
|
|
24825
|
+
|
|
24826
|
+
// CRITICAL: ALWAYS clear preparedBuffer after barge-in - any remaining chunks are from previous sentence
|
|
24827
|
+
// This prevents old audio from playing when new sentence starts after interruption
|
|
24828
|
+
if (this.preparedBuffer.length > 0) {
|
|
24829
|
+
console.log("\uD83D\uDD04 AudioPlayer: Clearing ".concat(this.preparedBuffer.length, " prepared frames from previous sentence (after barge-in)"));
|
|
24830
|
+
}
|
|
24831
|
+
this.preparedBuffer = [];
|
|
24832
|
+
|
|
24833
|
+
// CRITICAL: Also clear pcmChunkQueue to prevent raw chunks from previous sentence being processed
|
|
24834
|
+
if (this.pcmChunkQueue.length > 0) {
|
|
24835
|
+
console.log("\uD83D\uDD04 AudioPlayer: Clearing ".concat(this.pcmChunkQueue.length, " raw PCM chunks from previous sentence (after barge-in)"));
|
|
24836
|
+
}
|
|
24837
|
+
this.pcmChunkQueue = [];
|
|
24838
|
+
this.isProcessingPcmQueue = false;
|
|
24839
|
+
console.log('🔄 AudioPlayer: Reset scheduling state for new sentence (after barge-in)');
|
|
24840
|
+
} else if (isCurrentlyPlaying) {
|
|
24841
|
+
// New sentence received while audio is currently playing (no barge-in)
|
|
24842
|
+
// Don't clear buffers or reset state - let current sentence finish, then new one will play
|
|
24843
|
+
console.log("\uD83D\uDCDD AudioPlayer: New sentence queued while audio playing - will start after current sentence finishes");
|
|
24731
24844
|
}
|
|
24845
|
+
|
|
24846
|
+
// Always update pending sentence text (for transcript display)
|
|
24732
24847
|
this.pendingSentenceText = text;
|
|
24733
|
-
console.log("\uD83D\uDCDD AudioPlayer: New sentence marked: \"".concat(text.substring(0, 40), "...\""));
|
|
24848
|
+
console.log("\uD83D\uDCDD AudioPlayer: New sentence marked: \"".concat(text.substring(0, 40), "...\" (wasStopped: ").concat(wasStopped, ", isPlaying: ").concat(isCurrentlyPlaying, ")"));
|
|
24849
|
+
|
|
24850
|
+
// CRITICAL: Set timeout to detect empty sentences (audio_start but no chunks)
|
|
24851
|
+
// This prevents queue blocking if backend sends audio_start but no audio chunks follow
|
|
24852
|
+
if (this._emptySentenceTimeout) {
|
|
24853
|
+
clearTimeout(this._emptySentenceTimeout);
|
|
24854
|
+
}
|
|
24855
|
+
var sentenceText = text; // Capture for timeout callback
|
|
24856
|
+
this._emptySentenceTimeout = setTimeout(function () {
|
|
24857
|
+
// Check if this sentence still has no chunks after timeout
|
|
24858
|
+
if (_this9.pendingSentenceText === sentenceText && _this9.scheduledBuffers === 0 && _this9.preparedBuffer.length === 0 && _this9.pcmChunkQueue.length === 0 && !_this9._isStopped) {
|
|
24859
|
+
console.warn("\u26A0\uFE0F AudioPlayer: Empty sentence detected after 5s timeout - no chunks received for: \"".concat(sentenceText.substring(0, 40), "...\""));
|
|
24860
|
+
// Clear pending sentence to unblock next sentence
|
|
24861
|
+
if (_this9.pendingSentenceText === sentenceText) {
|
|
24862
|
+
_this9.pendingSentenceText = null;
|
|
24863
|
+
}
|
|
24864
|
+
// Emit playbackStopped to allow next sentence to start
|
|
24865
|
+
// Only if we're not currently playing (to avoid interrupting real playback)
|
|
24866
|
+
if (!_this9.isPlaying && _this9.scheduledSources.size === 0) {
|
|
24867
|
+
console.log('🛑 AudioPlayer: Emitting playbackStopped for empty sentence timeout');
|
|
24868
|
+
_this9.emit('playbackStopped');
|
|
24869
|
+
}
|
|
24870
|
+
}
|
|
24871
|
+
_this9._emptySentenceTimeout = null;
|
|
24872
|
+
}, 5000); // 5 second timeout - adjust based on expected chunk arrival rate
|
|
24734
24873
|
}
|
|
24735
24874
|
|
|
24736
24875
|
/**
|
|
@@ -24739,35 +24878,35 @@ var AudioPlayer = /*#__PURE__*/function (_EventEmitter) {
|
|
|
24739
24878
|
}, {
|
|
24740
24879
|
key: "startTranscriptChecker",
|
|
24741
24880
|
value: function startTranscriptChecker() {
|
|
24742
|
-
var
|
|
24881
|
+
var _this0 = this;
|
|
24743
24882
|
if (this.isCheckingTranscripts) return;
|
|
24744
24883
|
this.isCheckingTranscripts = true;
|
|
24745
24884
|
console.log('📝 AudioPlayer: Transcript checker started');
|
|
24746
24885
|
var _checkLoop = function checkLoop() {
|
|
24747
|
-
if (!
|
|
24748
|
-
var currentTime =
|
|
24749
|
-
var
|
|
24750
|
-
|
|
24886
|
+
if (!_this0.isCheckingTranscripts || !_this0.audioContext) return;
|
|
24887
|
+
var currentTime = _this0.audioContext.currentTime;
|
|
24888
|
+
var _iterator = _createForOfIteratorHelper(_this0.sentenceTimings),
|
|
24889
|
+
_step;
|
|
24751
24890
|
try {
|
|
24752
|
-
for (
|
|
24753
|
-
var timing =
|
|
24891
|
+
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
24892
|
+
var timing = _step.value;
|
|
24754
24893
|
if (!timing.displayed && currentTime >= timing.startTime) {
|
|
24755
24894
|
timing.displayed = true;
|
|
24756
24895
|
console.log("\uD83D\uDCDD AudioPlayer: Display transcript at ".concat(currentTime.toFixed(3), "s: \"").concat(timing.text.substring(0, 40), "...\""));
|
|
24757
|
-
|
|
24896
|
+
_this0.emit('transcriptDisplay', {
|
|
24758
24897
|
text: timing.text
|
|
24759
24898
|
});
|
|
24760
24899
|
}
|
|
24761
24900
|
}
|
|
24762
24901
|
} catch (err) {
|
|
24763
|
-
|
|
24902
|
+
_iterator.e(err);
|
|
24764
24903
|
} finally {
|
|
24765
|
-
|
|
24904
|
+
_iterator.f();
|
|
24766
24905
|
}
|
|
24767
|
-
if (
|
|
24906
|
+
if (_this0.isPlaying || _this0.scheduledBuffers > 0) {
|
|
24768
24907
|
requestAnimationFrame(_checkLoop);
|
|
24769
24908
|
} else {
|
|
24770
|
-
|
|
24909
|
+
_this0.isCheckingTranscripts = false;
|
|
24771
24910
|
console.log('📝 AudioPlayer: Transcript checker stopped');
|
|
24772
24911
|
}
|
|
24773
24912
|
};
|
|
@@ -26129,15 +26268,16 @@ var VoiceSDK_v2 = /*#__PURE__*/function (_EventEmitter) {
|
|
|
26129
26268
|
// Store the text in AudioPlayer for synced display when audio actually starts playing
|
|
26130
26269
|
console.log('📝 VoiceSDK v2: Received audio_start with text:', message.text);
|
|
26131
26270
|
|
|
26132
|
-
//
|
|
26133
|
-
//
|
|
26271
|
+
// NOTE: We do NOT stop current audio here - that only happens on user barge-in (stop_playing)
|
|
26272
|
+
// If audio is already playing, the new sentence will queue and wait for current one to finish
|
|
26273
|
+
// This allows sentences to play sequentially without interruption
|
|
26134
26274
|
if (this.audioPlayer && (this.audioPlayer.isPlaying || ((_this$audioPlayer$sch = this.audioPlayer.scheduledSources) === null || _this$audioPlayer$sch === void 0 ? void 0 : _this$audioPlayer$sch.size) > 0)) {
|
|
26135
|
-
console.log('
|
|
26136
|
-
this.audioPlayer.stopImmediate();
|
|
26275
|
+
console.log('📝 VoiceSDK v2: New sentence received while audio playing - will queue and wait for current sentence to finish');
|
|
26137
26276
|
}
|
|
26138
26277
|
if (message.text && this.audioPlayer) {
|
|
26139
26278
|
// Use AudioPlayer's transcript sync mechanism
|
|
26140
26279
|
// AudioPlayer will emit transcriptDisplay when audio actually starts playing (synced with audioContext.currentTime)
|
|
26280
|
+
// If audio is currently playing, markNewSentence will queue this sentence
|
|
26141
26281
|
this.audioPlayer.markNewSentence(message.text);
|
|
26142
26282
|
}
|
|
26143
26283
|
// Also emit as message for other listeners
|