@dialtribe/react-sdk 0.1.0-alpha.19 → 0.1.0-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{dialtribe-streamer-jxyxtG7Z.d.ts → dialtribe-streamer-D9ulVBVb.d.ts} +39 -25
- package/dist/{dialtribe-streamer-Bb6LLFG2.d.mts → dialtribe-streamer-DH23BseY.d.mts} +39 -25
- package/dist/dialtribe-streamer.d.mts +1 -1
- package/dist/dialtribe-streamer.d.ts +1 -1
- package/dist/dialtribe-streamer.js +264 -87
- package/dist/dialtribe-streamer.js.map +1 -1
- package/dist/dialtribe-streamer.mjs +264 -87
- package/dist/dialtribe-streamer.mjs.map +1 -1
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +264 -87
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +264 -87
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
|
@@ -316,6 +316,9 @@ var WebSocketStreamer = class {
|
|
|
316
316
|
this.isHotSwapping = false;
|
|
317
317
|
// Track if we're swapping media streams
|
|
318
318
|
this.startTime = 0;
|
|
319
|
+
// Canvas-based rendering for seamless camera flips
|
|
320
|
+
// MediaRecorder records from canvas stream, so track changes don't affect it
|
|
321
|
+
this.canvasState = null;
|
|
319
322
|
this.streamKey = options.streamKey;
|
|
320
323
|
this.mediaStream = options.mediaStream;
|
|
321
324
|
this.isVideo = options.isVideo;
|
|
@@ -324,6 +327,36 @@ var WebSocketStreamer = class {
|
|
|
324
327
|
this.onStateChange = options.onStateChange;
|
|
325
328
|
this.onError = options.onError;
|
|
326
329
|
}
|
|
330
|
+
/**
|
|
331
|
+
* Calculate scaled dimensions for fitting video into canvas.
|
|
332
|
+
* @param mode - "contain" fits video inside canvas, "cover" fills canvas (cropping)
|
|
333
|
+
*/
|
|
334
|
+
calculateScaledDimensions(videoWidth, videoHeight, canvasWidth, canvasHeight, mode) {
|
|
335
|
+
const videoAspect = videoWidth / videoHeight;
|
|
336
|
+
const canvasAspect = canvasWidth / canvasHeight;
|
|
337
|
+
const useWidthBased = mode === "contain" ? videoAspect > canvasAspect : videoAspect <= canvasAspect;
|
|
338
|
+
if (useWidthBased) {
|
|
339
|
+
const width = canvasWidth;
|
|
340
|
+
const height = canvasWidth / videoAspect;
|
|
341
|
+
return { x: 0, y: (canvasHeight - height) / 2, width, height };
|
|
342
|
+
} else {
|
|
343
|
+
const height = canvasHeight;
|
|
344
|
+
const width = canvasHeight * videoAspect;
|
|
345
|
+
return { x: (canvasWidth - width) / 2, y: 0, width, height };
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Invalidate cached scaling dimensions (call when video source changes)
|
|
350
|
+
*/
|
|
351
|
+
invalidateScalingCache() {
|
|
352
|
+
if (this.canvasState) {
|
|
353
|
+
this.canvasState.cachedContain = null;
|
|
354
|
+
this.canvasState.cachedCover = null;
|
|
355
|
+
this.canvasState.cachedNeedsBackground = false;
|
|
356
|
+
this.canvasState.lastVideoWidth = 0;
|
|
357
|
+
this.canvasState.lastVideoHeight = 0;
|
|
358
|
+
}
|
|
359
|
+
}
|
|
327
360
|
/**
|
|
328
361
|
* Validate stream key format
|
|
329
362
|
* Stream keys must follow format: {tierCode}{foreignId}_{randomKey}
|
|
@@ -350,6 +383,130 @@ var WebSocketStreamer = class {
|
|
|
350
383
|
isVIP: tierCode === "b" || tierCode === "w"
|
|
351
384
|
});
|
|
352
385
|
}
|
|
386
|
+
/**
|
|
387
|
+
* Set up canvas-based rendering pipeline for video streams.
|
|
388
|
+
* This allows seamless camera flips by changing the video source
|
|
389
|
+
* without affecting MediaRecorder (which records from the canvas).
|
|
390
|
+
*/
|
|
391
|
+
setupCanvasRendering() {
|
|
392
|
+
console.log("\u{1F3A8} Setting up canvas-based rendering for seamless camera flips");
|
|
393
|
+
const videoTrack = this.mediaStream.getVideoTracks()[0];
|
|
394
|
+
const settings = videoTrack?.getSettings() || {};
|
|
395
|
+
const width = settings.width || 1280;
|
|
396
|
+
const height = settings.height || 720;
|
|
397
|
+
console.log(`\u{1F4D0} Video dimensions: ${width}x${height}`);
|
|
398
|
+
const canvas = document.createElement("canvas");
|
|
399
|
+
canvas.width = width;
|
|
400
|
+
canvas.height = height;
|
|
401
|
+
const ctx = canvas.getContext("2d");
|
|
402
|
+
if (!ctx) {
|
|
403
|
+
throw new Error("Failed to get 2D canvas context - canvas rendering unavailable");
|
|
404
|
+
}
|
|
405
|
+
const videoElement = document.createElement("video");
|
|
406
|
+
videoElement.srcObject = this.mediaStream;
|
|
407
|
+
videoElement.muted = true;
|
|
408
|
+
videoElement.playsInline = true;
|
|
409
|
+
videoElement.play().catch((e) => console.warn("Video autoplay warning:", e));
|
|
410
|
+
const frameRate = settings.frameRate || 30;
|
|
411
|
+
const stream = canvas.captureStream(frameRate);
|
|
412
|
+
const audioTracks = this.mediaStream.getAudioTracks();
|
|
413
|
+
audioTracks.forEach((track) => {
|
|
414
|
+
stream.addTrack(track);
|
|
415
|
+
});
|
|
416
|
+
console.log(`\u{1F3AC} Canvas stream created with ${frameRate}fps video + ${audioTracks.length} audio track(s)`);
|
|
417
|
+
this.canvasState = {
|
|
418
|
+
canvas,
|
|
419
|
+
ctx,
|
|
420
|
+
videoElement,
|
|
421
|
+
stream,
|
|
422
|
+
renderLoopId: 0,
|
|
423
|
+
// Will be set below
|
|
424
|
+
useBlurBackground: true,
|
|
425
|
+
slowFrameCount: 0,
|
|
426
|
+
cachedContain: null,
|
|
427
|
+
cachedCover: null,
|
|
428
|
+
cachedNeedsBackground: false,
|
|
429
|
+
lastVideoWidth: 0,
|
|
430
|
+
lastVideoHeight: 0
|
|
431
|
+
};
|
|
432
|
+
const state = this.canvasState;
|
|
433
|
+
const renderFrame = () => {
|
|
434
|
+
if (!this.canvasState || state !== this.canvasState) return;
|
|
435
|
+
const { ctx: ctx2, canvas: canvas2, videoElement: videoElement2 } = state;
|
|
436
|
+
if (videoElement2.paused) {
|
|
437
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
const canvasWidth = canvas2.width;
|
|
441
|
+
const canvasHeight = canvas2.height;
|
|
442
|
+
const videoWidth = videoElement2.videoWidth;
|
|
443
|
+
const videoHeight = videoElement2.videoHeight;
|
|
444
|
+
if (videoWidth === 0 || videoHeight === 0) {
|
|
445
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
if (videoWidth !== state.lastVideoWidth || videoHeight !== state.lastVideoHeight) {
|
|
449
|
+
state.lastVideoWidth = videoWidth;
|
|
450
|
+
state.lastVideoHeight = videoHeight;
|
|
451
|
+
state.cachedContain = this.calculateScaledDimensions(
|
|
452
|
+
videoWidth,
|
|
453
|
+
videoHeight,
|
|
454
|
+
canvasWidth,
|
|
455
|
+
canvasHeight,
|
|
456
|
+
"contain"
|
|
457
|
+
);
|
|
458
|
+
state.cachedCover = this.calculateScaledDimensions(
|
|
459
|
+
videoWidth,
|
|
460
|
+
videoHeight,
|
|
461
|
+
canvasWidth,
|
|
462
|
+
canvasHeight,
|
|
463
|
+
"cover"
|
|
464
|
+
);
|
|
465
|
+
state.cachedNeedsBackground = Math.abs(state.cachedContain.width - canvasWidth) > 1 || Math.abs(state.cachedContain.height - canvasHeight) > 1;
|
|
466
|
+
console.log(`\u{1F4D0} Video dimensions changed: ${videoWidth}x${videoHeight}, needsBackground: ${state.cachedNeedsBackground}`);
|
|
467
|
+
}
|
|
468
|
+
const contain = state.cachedContain;
|
|
469
|
+
const cover = state.cachedCover;
|
|
470
|
+
const frameStart = performance.now();
|
|
471
|
+
if (state.cachedNeedsBackground && state.useBlurBackground) {
|
|
472
|
+
ctx2.save();
|
|
473
|
+
ctx2.filter = "blur(20px)";
|
|
474
|
+
ctx2.drawImage(videoElement2, cover.x, cover.y, cover.width, cover.height);
|
|
475
|
+
ctx2.restore();
|
|
476
|
+
ctx2.fillStyle = "rgba(0, 0, 0, 0.5)";
|
|
477
|
+
ctx2.fillRect(0, 0, canvasWidth, canvasHeight);
|
|
478
|
+
} else if (state.cachedNeedsBackground) {
|
|
479
|
+
ctx2.fillStyle = "#000";
|
|
480
|
+
ctx2.fillRect(0, 0, canvasWidth, canvasHeight);
|
|
481
|
+
}
|
|
482
|
+
ctx2.drawImage(videoElement2, contain.x, contain.y, contain.width, contain.height);
|
|
483
|
+
const frameDuration = performance.now() - frameStart;
|
|
484
|
+
if (frameDuration > 16 && state.useBlurBackground) {
|
|
485
|
+
state.slowFrameCount++;
|
|
486
|
+
if (state.slowFrameCount > 5) {
|
|
487
|
+
console.log("\u26A1 Disabling blur background for performance");
|
|
488
|
+
state.useBlurBackground = false;
|
|
489
|
+
}
|
|
490
|
+
} else if (frameDuration <= 16) {
|
|
491
|
+
state.slowFrameCount = 0;
|
|
492
|
+
}
|
|
493
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
494
|
+
};
|
|
495
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
496
|
+
console.log("\u2705 Canvas rendering pipeline ready (with adaptive blur background)");
|
|
497
|
+
return stream;
|
|
498
|
+
}
|
|
499
|
+
/**
|
|
500
|
+
* Clean up canvas rendering resources
|
|
501
|
+
*/
|
|
502
|
+
cleanupCanvasRendering() {
|
|
503
|
+
if (!this.canvasState) return;
|
|
504
|
+
cancelAnimationFrame(this.canvasState.renderLoopId);
|
|
505
|
+
this.canvasState.videoElement.pause();
|
|
506
|
+
this.canvasState.videoElement.srcObject = null;
|
|
507
|
+
this.canvasState.stream.getTracks().forEach((track) => track.stop());
|
|
508
|
+
this.canvasState = null;
|
|
509
|
+
}
|
|
353
510
|
/**
|
|
354
511
|
* Build WebSocket URL from stream key
|
|
355
512
|
*/
|
|
@@ -369,6 +526,10 @@ var WebSocketStreamer = class {
|
|
|
369
526
|
*/
|
|
370
527
|
async start() {
|
|
371
528
|
try {
|
|
529
|
+
this.userStopped = false;
|
|
530
|
+
this.chunksSent = 0;
|
|
531
|
+
this.bytesSent = 0;
|
|
532
|
+
this.startTime = 0;
|
|
372
533
|
this.validateStreamKeyFormat();
|
|
373
534
|
this.onStateChange?.("connecting");
|
|
374
535
|
const wsUrl = this.buildWebSocketUrl();
|
|
@@ -379,8 +540,15 @@ var WebSocketStreamer = class {
|
|
|
379
540
|
reject(new Error("WebSocket not initialized"));
|
|
380
541
|
return;
|
|
381
542
|
}
|
|
382
|
-
|
|
543
|
+
const timeoutId = setTimeout(() => {
|
|
544
|
+
reject(new Error(`WebSocket connection timeout. URL: ${wsUrl}`));
|
|
545
|
+
}, 1e4);
|
|
546
|
+
this.websocket.addEventListener("open", () => {
|
|
547
|
+
clearTimeout(timeoutId);
|
|
548
|
+
resolve();
|
|
549
|
+
}, { once: true });
|
|
383
550
|
this.websocket.addEventListener("error", (event) => {
|
|
551
|
+
clearTimeout(timeoutId);
|
|
384
552
|
console.error("\u274C WebSocket error event:", event);
|
|
385
553
|
console.error("\u{1F50D} Connection diagnostics:", {
|
|
386
554
|
url: wsUrl.replace(this.streamKey, "***"),
|
|
@@ -399,16 +567,17 @@ Common causes:
|
|
|
399
567
|
Please check encoder server logs and DATABASE_URL configuration.`
|
|
400
568
|
));
|
|
401
569
|
}, { once: true });
|
|
402
|
-
setTimeout(() => {
|
|
403
|
-
reject(new Error(`WebSocket connection timeout. URL: ${wsUrl}`));
|
|
404
|
-
}, 1e4);
|
|
405
570
|
});
|
|
406
571
|
console.log("\u2705 WebSocket connected");
|
|
407
572
|
this.setupWebSocketHandlers();
|
|
573
|
+
const streamToRecord = this.isVideo ? this.setupCanvasRendering() : this.mediaStream;
|
|
408
574
|
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
409
575
|
this.mimeType = recorderOptions.mimeType;
|
|
410
|
-
this.mediaRecorder = new MediaRecorder(
|
|
576
|
+
this.mediaRecorder = new MediaRecorder(streamToRecord, recorderOptions);
|
|
411
577
|
console.log("\u{1F399}\uFE0F MediaRecorder created with options:", recorderOptions);
|
|
578
|
+
if (this.isVideo) {
|
|
579
|
+
console.log("\u{1F3A8} Recording from canvas stream (enables seamless camera flips)");
|
|
580
|
+
}
|
|
412
581
|
this.setupMediaRecorderHandlers();
|
|
413
582
|
this.mediaRecorder.start(300);
|
|
414
583
|
this.startTime = Date.now();
|
|
@@ -445,9 +614,9 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
445
614
|
} else {
|
|
446
615
|
console.log("\u26A0\uFE0F No WebSocket to close");
|
|
447
616
|
}
|
|
617
|
+
this.cleanupCanvasRendering();
|
|
448
618
|
this.mediaRecorder = null;
|
|
449
619
|
this.websocket = null;
|
|
450
|
-
this.bytesSent = 0;
|
|
451
620
|
this.onStateChange?.("stopped");
|
|
452
621
|
}
|
|
453
622
|
/**
|
|
@@ -456,6 +625,13 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
456
625
|
getBytesSent() {
|
|
457
626
|
return this.bytesSent;
|
|
458
627
|
}
|
|
628
|
+
/**
|
|
629
|
+
* Get the current source media stream.
|
|
630
|
+
* This may change after replaceVideoTrack() is called.
|
|
631
|
+
*/
|
|
632
|
+
getMediaStream() {
|
|
633
|
+
return this.mediaStream;
|
|
634
|
+
}
|
|
459
635
|
/**
|
|
460
636
|
* Get current diagnostics
|
|
461
637
|
*/
|
|
@@ -470,71 +646,73 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
470
646
|
};
|
|
471
647
|
}
|
|
472
648
|
/**
|
|
473
|
-
*
|
|
474
|
-
*
|
|
475
|
-
*
|
|
649
|
+
* Replace the video track for camera flips.
|
|
650
|
+
*
|
|
651
|
+
* When using canvas-based rendering (video streams), this updates the video
|
|
652
|
+
* element source. The canvas continues drawing, and MediaRecorder is unaffected.
|
|
476
653
|
*
|
|
477
|
-
*
|
|
478
|
-
* event to fire BEFORE proceeding. Otherwise there's a race condition where
|
|
479
|
-
* the stop event fires after isHotSwapping is set to false, causing the
|
|
480
|
-
* WebSocket to close unexpectedly.
|
|
654
|
+
* @param newVideoTrack - The new video track from the flipped camera
|
|
481
655
|
*/
|
|
482
|
-
|
|
483
|
-
console.log("\u{1F504}
|
|
484
|
-
this.
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
656
|
+
replaceVideoTrack(newVideoTrack) {
|
|
657
|
+
console.log("\u{1F504} Replacing video track");
|
|
658
|
+
if (this.canvasState) {
|
|
659
|
+
console.log("\u{1F3A8} Using canvas-based swap (MediaRecorder unaffected)");
|
|
660
|
+
const audioTracks = this.mediaStream.getAudioTracks();
|
|
661
|
+
const newStream = new MediaStream([newVideoTrack, ...audioTracks]);
|
|
662
|
+
this.mediaStream.getVideoTracks().forEach((track) => track.stop());
|
|
663
|
+
this.canvasState.videoElement.srcObject = newStream;
|
|
664
|
+
this.canvasState.videoElement.play().catch((e) => console.warn("Video play warning:", e));
|
|
665
|
+
this.mediaStream = newStream;
|
|
666
|
+
this.invalidateScalingCache();
|
|
667
|
+
const settings = newVideoTrack.getSettings();
|
|
668
|
+
if (settings.width && settings.height) {
|
|
669
|
+
console.log(`\u{1F4D0} New camera resolution: ${settings.width}x${settings.height}`);
|
|
670
|
+
}
|
|
671
|
+
console.log("\u2705 Video source swapped - canvas continues seamlessly");
|
|
672
|
+
} else {
|
|
673
|
+
console.warn("\u26A0\uFE0F Canvas not available - attempting direct track replacement");
|
|
674
|
+
const oldVideoTracks = this.mediaStream.getVideoTracks();
|
|
675
|
+
this.mediaStream.addTrack(newVideoTrack);
|
|
676
|
+
console.log("\u2795 New video track added");
|
|
677
|
+
oldVideoTracks.forEach((track) => {
|
|
678
|
+
this.mediaStream.removeTrack(track);
|
|
679
|
+
track.stop();
|
|
495
680
|
});
|
|
681
|
+
console.log("\u2796 Old video track(s) removed");
|
|
682
|
+
console.log("\u2705 Video track replaced");
|
|
496
683
|
}
|
|
497
684
|
}
|
|
498
685
|
/**
|
|
499
|
-
*
|
|
500
|
-
* Restarts the MediaRecorder with the existing stream
|
|
501
|
-
*/
|
|
502
|
-
cancelHotSwap() {
|
|
503
|
-
console.log("\u21A9\uFE0F Canceling hot-swap - restarting with original stream");
|
|
504
|
-
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
505
|
-
this.mediaRecorder = new MediaRecorder(this.mediaStream, recorderOptions);
|
|
506
|
-
this.setupMediaRecorderHandlers();
|
|
507
|
-
this.mediaRecorder.start(300);
|
|
508
|
-
this.isHotSwapping = false;
|
|
509
|
-
console.log("\u2705 Original stream restored");
|
|
510
|
-
}
|
|
511
|
-
/**
|
|
512
|
-
* Complete the hot-swap with a new media stream
|
|
513
|
-
* Call this AFTER successfully obtaining a new camera stream
|
|
686
|
+
* Replace the audio track in the current MediaStream without stopping MediaRecorder.
|
|
514
687
|
*
|
|
515
|
-
*
|
|
516
|
-
* This allows the caller to handle camera flip failures gracefully
|
|
688
|
+
* @param newAudioTrack - The new audio track
|
|
517
689
|
*/
|
|
518
|
-
|
|
519
|
-
console.log("\u{1F504}
|
|
520
|
-
this.mediaStream
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
console.log("\
|
|
690
|
+
replaceAudioTrack(newAudioTrack) {
|
|
691
|
+
console.log("\u{1F504} Replacing audio track (no MediaRecorder restart)");
|
|
692
|
+
const oldAudioTracks = this.mediaStream.getAudioTracks();
|
|
693
|
+
this.mediaStream.addTrack(newAudioTrack);
|
|
694
|
+
console.log("\u2795 New audio track added to source stream");
|
|
695
|
+
oldAudioTracks.forEach((track) => {
|
|
696
|
+
this.mediaStream.removeTrack(track);
|
|
697
|
+
track.stop();
|
|
698
|
+
});
|
|
699
|
+
console.log("\u2796 Old audio track(s) removed from source stream");
|
|
700
|
+
if (this.canvasState) {
|
|
701
|
+
this.canvasState.stream.getAudioTracks().forEach((track) => {
|
|
702
|
+
this.canvasState.stream.removeTrack(track);
|
|
703
|
+
});
|
|
704
|
+
this.canvasState.stream.addTrack(newAudioTrack);
|
|
705
|
+
console.log("\u{1F3A8} Audio track synced to canvas stream");
|
|
706
|
+
}
|
|
707
|
+
console.log("\u2705 Audio track replaced - streaming continues seamlessly");
|
|
528
708
|
}
|
|
529
709
|
/**
|
|
530
|
-
* Update the media stream (e.g., when
|
|
531
|
-
* This keeps the WebSocket connection alive while swapping the media source
|
|
710
|
+
* Update the media stream (e.g., when switching devices from settings)
|
|
711
|
+
* This keeps the WebSocket connection alive while swapping the media source.
|
|
712
|
+
* Restarts the MediaRecorder with the new stream.
|
|
532
713
|
*
|
|
533
|
-
* Note:
|
|
534
|
-
*
|
|
535
|
-
*
|
|
536
|
-
* Note: Errors are thrown to the caller, not sent to onError callback
|
|
537
|
-
* This allows the caller to handle camera flip failures gracefully
|
|
714
|
+
* Note: For camera flips, prefer replaceVideoTrack() which doesn't restart MediaRecorder.
|
|
715
|
+
* Note: Errors are thrown to the caller, not sent to onError callback.
|
|
538
716
|
*/
|
|
539
717
|
async updateMediaStream(newMediaStream) {
|
|
540
718
|
console.log("\u{1F504} Updating media stream (hot-swap)");
|
|
@@ -544,8 +722,14 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
544
722
|
console.log("\u23F9\uFE0F Old MediaRecorder stopped");
|
|
545
723
|
}
|
|
546
724
|
this.mediaStream = newMediaStream;
|
|
725
|
+
let streamToRecord = this.mediaStream;
|
|
726
|
+
if (this.isVideo) {
|
|
727
|
+
this.cleanupCanvasRendering();
|
|
728
|
+
streamToRecord = this.setupCanvasRendering();
|
|
729
|
+
console.log("\u{1F3A8} Canvas rendering recreated for new stream");
|
|
730
|
+
}
|
|
547
731
|
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
548
|
-
this.mediaRecorder = new MediaRecorder(
|
|
732
|
+
this.mediaRecorder = new MediaRecorder(streamToRecord, recorderOptions);
|
|
549
733
|
console.log("\u{1F399}\uFE0F New MediaRecorder created");
|
|
550
734
|
this.setupMediaRecorderHandlers();
|
|
551
735
|
this.mediaRecorder.start(300);
|
|
@@ -598,7 +782,9 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
598
782
|
this.bytesSent += event.data.size;
|
|
599
783
|
this.chunksSent += 1;
|
|
600
784
|
this.onBytesUpdate?.(this.bytesSent);
|
|
601
|
-
|
|
785
|
+
if (this.chunksSent % 10 === 0) {
|
|
786
|
+
console.log(`\u{1F4E4} Sent ${this.chunksSent} chunks (${(this.bytesSent / 1024 / 1024).toFixed(2)} MB total)`);
|
|
787
|
+
}
|
|
602
788
|
}
|
|
603
789
|
});
|
|
604
790
|
this.mediaRecorder.addEventListener("error", (event) => {
|
|
@@ -1868,41 +2054,32 @@ function DialtribeStreamer({
|
|
|
1868
2054
|
const handleFlipCamera = async () => {
|
|
1869
2055
|
if (!isVideoKey || !hasMultipleCameras) return;
|
|
1870
2056
|
const newFacingMode = facingMode === "user" ? "environment" : "user";
|
|
1871
|
-
setFacingMode(newFacingMode);
|
|
1872
2057
|
if (state === "live" && streamer) {
|
|
1873
|
-
console.log("\u{1F504}
|
|
1874
|
-
await streamer.prepareForHotSwap();
|
|
1875
|
-
if (mediaStream) {
|
|
1876
|
-
mediaStream.getTracks().forEach((track) => track.stop());
|
|
1877
|
-
}
|
|
2058
|
+
console.log("\u{1F504} Flipping camera during live broadcast (canvas-based swap)");
|
|
1878
2059
|
try {
|
|
1879
2060
|
const constraints = getMediaConstraints({
|
|
1880
2061
|
isVideo: true,
|
|
1881
2062
|
facingMode: newFacingMode
|
|
1882
2063
|
});
|
|
1883
2064
|
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
1884
|
-
console.log("\u{1F4F7}
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
} catch (err) {
|
|
1889
|
-
console.error("\u274C Failed to get new camera stream:", err);
|
|
1890
|
-
setFacingMode(facingMode);
|
|
1891
|
-
try {
|
|
1892
|
-
const originalConstraints = getMediaConstraints({
|
|
1893
|
-
isVideo: true,
|
|
1894
|
-
facingMode
|
|
1895
|
-
});
|
|
1896
|
-
const restoredStream = await navigator.mediaDevices.getUserMedia(originalConstraints);
|
|
1897
|
-
streamer.completeHotSwap(restoredStream);
|
|
1898
|
-
setMediaStream(restoredStream);
|
|
1899
|
-
console.warn("\u26A0\uFE0F Camera flip failed - restored original camera");
|
|
1900
|
-
} catch (restoreErr) {
|
|
1901
|
-
console.error("\u274C Failed to restore original camera:", restoreErr);
|
|
1902
|
-
streamer.cancelHotSwap();
|
|
2065
|
+
console.log("\u{1F4F7} Got new camera stream:", newFacingMode);
|
|
2066
|
+
const newVideoTrack = newStream.getVideoTracks()[0];
|
|
2067
|
+
if (newVideoTrack) {
|
|
2068
|
+
streamer.replaceVideoTrack(newVideoTrack);
|
|
1903
2069
|
}
|
|
2070
|
+
const updatedStream = streamer.getMediaStream();
|
|
2071
|
+
setMediaStream(updatedStream);
|
|
2072
|
+
setFacingMode(newFacingMode);
|
|
2073
|
+
if (videoRef.current) {
|
|
2074
|
+
videoRef.current.srcObject = updatedStream;
|
|
2075
|
+
}
|
|
2076
|
+
console.log("\u2705 Camera flipped successfully - broadcast continues seamlessly");
|
|
2077
|
+
} catch (err) {
|
|
2078
|
+
console.error("\u274C Failed to flip camera:", err);
|
|
2079
|
+
console.warn("\u26A0\uFE0F Camera flip failed - continuing with current camera");
|
|
1904
2080
|
}
|
|
1905
2081
|
} else {
|
|
2082
|
+
setFacingMode(newFacingMode);
|
|
1906
2083
|
try {
|
|
1907
2084
|
const constraints = getMediaConstraints({
|
|
1908
2085
|
isVideo: true,
|