@dialtribe/react-sdk 0.1.0-alpha.19 → 0.1.0-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{dialtribe-streamer-jxyxtG7Z.d.ts → dialtribe-streamer-D9ulVBVb.d.ts} +39 -25
- package/dist/{dialtribe-streamer-Bb6LLFG2.d.mts → dialtribe-streamer-DH23BseY.d.mts} +39 -25
- package/dist/dialtribe-streamer.d.mts +1 -1
- package/dist/dialtribe-streamer.d.ts +1 -1
- package/dist/dialtribe-streamer.js +264 -87
- package/dist/dialtribe-streamer.js.map +1 -1
- package/dist/dialtribe-streamer.mjs +264 -87
- package/dist/dialtribe-streamer.mjs.map +1 -1
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +264 -87
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +264 -87
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
|
@@ -323,6 +323,9 @@ var WebSocketStreamer = class {
|
|
|
323
323
|
this.isHotSwapping = false;
|
|
324
324
|
// Track if we're swapping media streams
|
|
325
325
|
this.startTime = 0;
|
|
326
|
+
// Canvas-based rendering for seamless camera flips
|
|
327
|
+
// MediaRecorder records from canvas stream, so track changes don't affect it
|
|
328
|
+
this.canvasState = null;
|
|
326
329
|
this.streamKey = options.streamKey;
|
|
327
330
|
this.mediaStream = options.mediaStream;
|
|
328
331
|
this.isVideo = options.isVideo;
|
|
@@ -331,6 +334,36 @@ var WebSocketStreamer = class {
|
|
|
331
334
|
this.onStateChange = options.onStateChange;
|
|
332
335
|
this.onError = options.onError;
|
|
333
336
|
}
|
|
337
|
+
/**
|
|
338
|
+
* Calculate scaled dimensions for fitting video into canvas.
|
|
339
|
+
* @param mode - "contain" fits video inside canvas, "cover" fills canvas (cropping)
|
|
340
|
+
*/
|
|
341
|
+
calculateScaledDimensions(videoWidth, videoHeight, canvasWidth, canvasHeight, mode) {
|
|
342
|
+
const videoAspect = videoWidth / videoHeight;
|
|
343
|
+
const canvasAspect = canvasWidth / canvasHeight;
|
|
344
|
+
const useWidthBased = mode === "contain" ? videoAspect > canvasAspect : videoAspect <= canvasAspect;
|
|
345
|
+
if (useWidthBased) {
|
|
346
|
+
const width = canvasWidth;
|
|
347
|
+
const height = canvasWidth / videoAspect;
|
|
348
|
+
return { x: 0, y: (canvasHeight - height) / 2, width, height };
|
|
349
|
+
} else {
|
|
350
|
+
const height = canvasHeight;
|
|
351
|
+
const width = canvasHeight * videoAspect;
|
|
352
|
+
return { x: (canvasWidth - width) / 2, y: 0, width, height };
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Invalidate cached scaling dimensions (call when video source changes)
|
|
357
|
+
*/
|
|
358
|
+
invalidateScalingCache() {
|
|
359
|
+
if (this.canvasState) {
|
|
360
|
+
this.canvasState.cachedContain = null;
|
|
361
|
+
this.canvasState.cachedCover = null;
|
|
362
|
+
this.canvasState.cachedNeedsBackground = false;
|
|
363
|
+
this.canvasState.lastVideoWidth = 0;
|
|
364
|
+
this.canvasState.lastVideoHeight = 0;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
334
367
|
/**
|
|
335
368
|
* Validate stream key format
|
|
336
369
|
* Stream keys must follow format: {tierCode}{foreignId}_{randomKey}
|
|
@@ -357,6 +390,130 @@ var WebSocketStreamer = class {
|
|
|
357
390
|
isVIP: tierCode === "b" || tierCode === "w"
|
|
358
391
|
});
|
|
359
392
|
}
|
|
393
|
+
/**
|
|
394
|
+
* Set up canvas-based rendering pipeline for video streams.
|
|
395
|
+
* This allows seamless camera flips by changing the video source
|
|
396
|
+
* without affecting MediaRecorder (which records from the canvas).
|
|
397
|
+
*/
|
|
398
|
+
setupCanvasRendering() {
|
|
399
|
+
console.log("\u{1F3A8} Setting up canvas-based rendering for seamless camera flips");
|
|
400
|
+
const videoTrack = this.mediaStream.getVideoTracks()[0];
|
|
401
|
+
const settings = videoTrack?.getSettings() || {};
|
|
402
|
+
const width = settings.width || 1280;
|
|
403
|
+
const height = settings.height || 720;
|
|
404
|
+
console.log(`\u{1F4D0} Video dimensions: ${width}x${height}`);
|
|
405
|
+
const canvas = document.createElement("canvas");
|
|
406
|
+
canvas.width = width;
|
|
407
|
+
canvas.height = height;
|
|
408
|
+
const ctx = canvas.getContext("2d");
|
|
409
|
+
if (!ctx) {
|
|
410
|
+
throw new Error("Failed to get 2D canvas context - canvas rendering unavailable");
|
|
411
|
+
}
|
|
412
|
+
const videoElement = document.createElement("video");
|
|
413
|
+
videoElement.srcObject = this.mediaStream;
|
|
414
|
+
videoElement.muted = true;
|
|
415
|
+
videoElement.playsInline = true;
|
|
416
|
+
videoElement.play().catch((e) => console.warn("Video autoplay warning:", e));
|
|
417
|
+
const frameRate = settings.frameRate || 30;
|
|
418
|
+
const stream = canvas.captureStream(frameRate);
|
|
419
|
+
const audioTracks = this.mediaStream.getAudioTracks();
|
|
420
|
+
audioTracks.forEach((track) => {
|
|
421
|
+
stream.addTrack(track);
|
|
422
|
+
});
|
|
423
|
+
console.log(`\u{1F3AC} Canvas stream created with ${frameRate}fps video + ${audioTracks.length} audio track(s)`);
|
|
424
|
+
this.canvasState = {
|
|
425
|
+
canvas,
|
|
426
|
+
ctx,
|
|
427
|
+
videoElement,
|
|
428
|
+
stream,
|
|
429
|
+
renderLoopId: 0,
|
|
430
|
+
// Will be set below
|
|
431
|
+
useBlurBackground: true,
|
|
432
|
+
slowFrameCount: 0,
|
|
433
|
+
cachedContain: null,
|
|
434
|
+
cachedCover: null,
|
|
435
|
+
cachedNeedsBackground: false,
|
|
436
|
+
lastVideoWidth: 0,
|
|
437
|
+
lastVideoHeight: 0
|
|
438
|
+
};
|
|
439
|
+
const state = this.canvasState;
|
|
440
|
+
const renderFrame = () => {
|
|
441
|
+
if (!this.canvasState || state !== this.canvasState) return;
|
|
442
|
+
const { ctx: ctx2, canvas: canvas2, videoElement: videoElement2 } = state;
|
|
443
|
+
if (videoElement2.paused) {
|
|
444
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
445
|
+
return;
|
|
446
|
+
}
|
|
447
|
+
const canvasWidth = canvas2.width;
|
|
448
|
+
const canvasHeight = canvas2.height;
|
|
449
|
+
const videoWidth = videoElement2.videoWidth;
|
|
450
|
+
const videoHeight = videoElement2.videoHeight;
|
|
451
|
+
if (videoWidth === 0 || videoHeight === 0) {
|
|
452
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
if (videoWidth !== state.lastVideoWidth || videoHeight !== state.lastVideoHeight) {
|
|
456
|
+
state.lastVideoWidth = videoWidth;
|
|
457
|
+
state.lastVideoHeight = videoHeight;
|
|
458
|
+
state.cachedContain = this.calculateScaledDimensions(
|
|
459
|
+
videoWidth,
|
|
460
|
+
videoHeight,
|
|
461
|
+
canvasWidth,
|
|
462
|
+
canvasHeight,
|
|
463
|
+
"contain"
|
|
464
|
+
);
|
|
465
|
+
state.cachedCover = this.calculateScaledDimensions(
|
|
466
|
+
videoWidth,
|
|
467
|
+
videoHeight,
|
|
468
|
+
canvasWidth,
|
|
469
|
+
canvasHeight,
|
|
470
|
+
"cover"
|
|
471
|
+
);
|
|
472
|
+
state.cachedNeedsBackground = Math.abs(state.cachedContain.width - canvasWidth) > 1 || Math.abs(state.cachedContain.height - canvasHeight) > 1;
|
|
473
|
+
console.log(`\u{1F4D0} Video dimensions changed: ${videoWidth}x${videoHeight}, needsBackground: ${state.cachedNeedsBackground}`);
|
|
474
|
+
}
|
|
475
|
+
const contain = state.cachedContain;
|
|
476
|
+
const cover = state.cachedCover;
|
|
477
|
+
const frameStart = performance.now();
|
|
478
|
+
if (state.cachedNeedsBackground && state.useBlurBackground) {
|
|
479
|
+
ctx2.save();
|
|
480
|
+
ctx2.filter = "blur(20px)";
|
|
481
|
+
ctx2.drawImage(videoElement2, cover.x, cover.y, cover.width, cover.height);
|
|
482
|
+
ctx2.restore();
|
|
483
|
+
ctx2.fillStyle = "rgba(0, 0, 0, 0.5)";
|
|
484
|
+
ctx2.fillRect(0, 0, canvasWidth, canvasHeight);
|
|
485
|
+
} else if (state.cachedNeedsBackground) {
|
|
486
|
+
ctx2.fillStyle = "#000";
|
|
487
|
+
ctx2.fillRect(0, 0, canvasWidth, canvasHeight);
|
|
488
|
+
}
|
|
489
|
+
ctx2.drawImage(videoElement2, contain.x, contain.y, contain.width, contain.height);
|
|
490
|
+
const frameDuration = performance.now() - frameStart;
|
|
491
|
+
if (frameDuration > 16 && state.useBlurBackground) {
|
|
492
|
+
state.slowFrameCount++;
|
|
493
|
+
if (state.slowFrameCount > 5) {
|
|
494
|
+
console.log("\u26A1 Disabling blur background for performance");
|
|
495
|
+
state.useBlurBackground = false;
|
|
496
|
+
}
|
|
497
|
+
} else if (frameDuration <= 16) {
|
|
498
|
+
state.slowFrameCount = 0;
|
|
499
|
+
}
|
|
500
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
501
|
+
};
|
|
502
|
+
state.renderLoopId = requestAnimationFrame(renderFrame);
|
|
503
|
+
console.log("\u2705 Canvas rendering pipeline ready (with adaptive blur background)");
|
|
504
|
+
return stream;
|
|
505
|
+
}
|
|
506
|
+
/**
|
|
507
|
+
* Clean up canvas rendering resources
|
|
508
|
+
*/
|
|
509
|
+
cleanupCanvasRendering() {
|
|
510
|
+
if (!this.canvasState) return;
|
|
511
|
+
cancelAnimationFrame(this.canvasState.renderLoopId);
|
|
512
|
+
this.canvasState.videoElement.pause();
|
|
513
|
+
this.canvasState.videoElement.srcObject = null;
|
|
514
|
+
this.canvasState.stream.getTracks().forEach((track) => track.stop());
|
|
515
|
+
this.canvasState = null;
|
|
516
|
+
}
|
|
360
517
|
/**
|
|
361
518
|
* Build WebSocket URL from stream key
|
|
362
519
|
*/
|
|
@@ -376,6 +533,10 @@ var WebSocketStreamer = class {
|
|
|
376
533
|
*/
|
|
377
534
|
async start() {
|
|
378
535
|
try {
|
|
536
|
+
this.userStopped = false;
|
|
537
|
+
this.chunksSent = 0;
|
|
538
|
+
this.bytesSent = 0;
|
|
539
|
+
this.startTime = 0;
|
|
379
540
|
this.validateStreamKeyFormat();
|
|
380
541
|
this.onStateChange?.("connecting");
|
|
381
542
|
const wsUrl = this.buildWebSocketUrl();
|
|
@@ -386,8 +547,15 @@ var WebSocketStreamer = class {
|
|
|
386
547
|
reject(new Error("WebSocket not initialized"));
|
|
387
548
|
return;
|
|
388
549
|
}
|
|
389
|
-
|
|
550
|
+
const timeoutId = setTimeout(() => {
|
|
551
|
+
reject(new Error(`WebSocket connection timeout. URL: ${wsUrl}`));
|
|
552
|
+
}, 1e4);
|
|
553
|
+
this.websocket.addEventListener("open", () => {
|
|
554
|
+
clearTimeout(timeoutId);
|
|
555
|
+
resolve();
|
|
556
|
+
}, { once: true });
|
|
390
557
|
this.websocket.addEventListener("error", (event) => {
|
|
558
|
+
clearTimeout(timeoutId);
|
|
391
559
|
console.error("\u274C WebSocket error event:", event);
|
|
392
560
|
console.error("\u{1F50D} Connection diagnostics:", {
|
|
393
561
|
url: wsUrl.replace(this.streamKey, "***"),
|
|
@@ -406,16 +574,17 @@ Common causes:
|
|
|
406
574
|
Please check encoder server logs and DATABASE_URL configuration.`
|
|
407
575
|
));
|
|
408
576
|
}, { once: true });
|
|
409
|
-
setTimeout(() => {
|
|
410
|
-
reject(new Error(`WebSocket connection timeout. URL: ${wsUrl}`));
|
|
411
|
-
}, 1e4);
|
|
412
577
|
});
|
|
413
578
|
console.log("\u2705 WebSocket connected");
|
|
414
579
|
this.setupWebSocketHandlers();
|
|
580
|
+
const streamToRecord = this.isVideo ? this.setupCanvasRendering() : this.mediaStream;
|
|
415
581
|
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
416
582
|
this.mimeType = recorderOptions.mimeType;
|
|
417
|
-
this.mediaRecorder = new MediaRecorder(
|
|
583
|
+
this.mediaRecorder = new MediaRecorder(streamToRecord, recorderOptions);
|
|
418
584
|
console.log("\u{1F399}\uFE0F MediaRecorder created with options:", recorderOptions);
|
|
585
|
+
if (this.isVideo) {
|
|
586
|
+
console.log("\u{1F3A8} Recording from canvas stream (enables seamless camera flips)");
|
|
587
|
+
}
|
|
419
588
|
this.setupMediaRecorderHandlers();
|
|
420
589
|
this.mediaRecorder.start(300);
|
|
421
590
|
this.startTime = Date.now();
|
|
@@ -452,9 +621,9 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
452
621
|
} else {
|
|
453
622
|
console.log("\u26A0\uFE0F No WebSocket to close");
|
|
454
623
|
}
|
|
624
|
+
this.cleanupCanvasRendering();
|
|
455
625
|
this.mediaRecorder = null;
|
|
456
626
|
this.websocket = null;
|
|
457
|
-
this.bytesSent = 0;
|
|
458
627
|
this.onStateChange?.("stopped");
|
|
459
628
|
}
|
|
460
629
|
/**
|
|
@@ -463,6 +632,13 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
463
632
|
getBytesSent() {
|
|
464
633
|
return this.bytesSent;
|
|
465
634
|
}
|
|
635
|
+
/**
|
|
636
|
+
* Get the current source media stream.
|
|
637
|
+
* This may change after replaceVideoTrack() is called.
|
|
638
|
+
*/
|
|
639
|
+
getMediaStream() {
|
|
640
|
+
return this.mediaStream;
|
|
641
|
+
}
|
|
466
642
|
/**
|
|
467
643
|
* Get current diagnostics
|
|
468
644
|
*/
|
|
@@ -477,71 +653,73 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
477
653
|
};
|
|
478
654
|
}
|
|
479
655
|
/**
|
|
480
|
-
*
|
|
481
|
-
*
|
|
482
|
-
*
|
|
656
|
+
* Replace the video track for camera flips.
|
|
657
|
+
*
|
|
658
|
+
* When using canvas-based rendering (video streams), this updates the video
|
|
659
|
+
* element source. The canvas continues drawing, and MediaRecorder is unaffected.
|
|
483
660
|
*
|
|
484
|
-
*
|
|
485
|
-
* event to fire BEFORE proceeding. Otherwise there's a race condition where
|
|
486
|
-
* the stop event fires after isHotSwapping is set to false, causing the
|
|
487
|
-
* WebSocket to close unexpectedly.
|
|
661
|
+
* @param newVideoTrack - The new video track from the flipped camera
|
|
488
662
|
*/
|
|
489
|
-
|
|
490
|
-
console.log("\u{1F504}
|
|
491
|
-
this.
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
663
|
+
replaceVideoTrack(newVideoTrack) {
|
|
664
|
+
console.log("\u{1F504} Replacing video track");
|
|
665
|
+
if (this.canvasState) {
|
|
666
|
+
console.log("\u{1F3A8} Using canvas-based swap (MediaRecorder unaffected)");
|
|
667
|
+
const audioTracks = this.mediaStream.getAudioTracks();
|
|
668
|
+
const newStream = new MediaStream([newVideoTrack, ...audioTracks]);
|
|
669
|
+
this.mediaStream.getVideoTracks().forEach((track) => track.stop());
|
|
670
|
+
this.canvasState.videoElement.srcObject = newStream;
|
|
671
|
+
this.canvasState.videoElement.play().catch((e) => console.warn("Video play warning:", e));
|
|
672
|
+
this.mediaStream = newStream;
|
|
673
|
+
this.invalidateScalingCache();
|
|
674
|
+
const settings = newVideoTrack.getSettings();
|
|
675
|
+
if (settings.width && settings.height) {
|
|
676
|
+
console.log(`\u{1F4D0} New camera resolution: ${settings.width}x${settings.height}`);
|
|
677
|
+
}
|
|
678
|
+
console.log("\u2705 Video source swapped - canvas continues seamlessly");
|
|
679
|
+
} else {
|
|
680
|
+
console.warn("\u26A0\uFE0F Canvas not available - attempting direct track replacement");
|
|
681
|
+
const oldVideoTracks = this.mediaStream.getVideoTracks();
|
|
682
|
+
this.mediaStream.addTrack(newVideoTrack);
|
|
683
|
+
console.log("\u2795 New video track added");
|
|
684
|
+
oldVideoTracks.forEach((track) => {
|
|
685
|
+
this.mediaStream.removeTrack(track);
|
|
686
|
+
track.stop();
|
|
502
687
|
});
|
|
688
|
+
console.log("\u2796 Old video track(s) removed");
|
|
689
|
+
console.log("\u2705 Video track replaced");
|
|
503
690
|
}
|
|
504
691
|
}
|
|
505
692
|
/**
|
|
506
|
-
*
|
|
507
|
-
* Restarts the MediaRecorder with the existing stream
|
|
508
|
-
*/
|
|
509
|
-
cancelHotSwap() {
|
|
510
|
-
console.log("\u21A9\uFE0F Canceling hot-swap - restarting with original stream");
|
|
511
|
-
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
512
|
-
this.mediaRecorder = new MediaRecorder(this.mediaStream, recorderOptions);
|
|
513
|
-
this.setupMediaRecorderHandlers();
|
|
514
|
-
this.mediaRecorder.start(300);
|
|
515
|
-
this.isHotSwapping = false;
|
|
516
|
-
console.log("\u2705 Original stream restored");
|
|
517
|
-
}
|
|
518
|
-
/**
|
|
519
|
-
* Complete the hot-swap with a new media stream
|
|
520
|
-
* Call this AFTER successfully obtaining a new camera stream
|
|
693
|
+
* Replace the audio track in the current MediaStream without stopping MediaRecorder.
|
|
521
694
|
*
|
|
522
|
-
*
|
|
523
|
-
* This allows the caller to handle camera flip failures gracefully
|
|
695
|
+
* @param newAudioTrack - The new audio track
|
|
524
696
|
*/
|
|
525
|
-
|
|
526
|
-
console.log("\u{1F504}
|
|
527
|
-
this.mediaStream
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
console.log("\
|
|
697
|
+
replaceAudioTrack(newAudioTrack) {
|
|
698
|
+
console.log("\u{1F504} Replacing audio track (no MediaRecorder restart)");
|
|
699
|
+
const oldAudioTracks = this.mediaStream.getAudioTracks();
|
|
700
|
+
this.mediaStream.addTrack(newAudioTrack);
|
|
701
|
+
console.log("\u2795 New audio track added to source stream");
|
|
702
|
+
oldAudioTracks.forEach((track) => {
|
|
703
|
+
this.mediaStream.removeTrack(track);
|
|
704
|
+
track.stop();
|
|
705
|
+
});
|
|
706
|
+
console.log("\u2796 Old audio track(s) removed from source stream");
|
|
707
|
+
if (this.canvasState) {
|
|
708
|
+
this.canvasState.stream.getAudioTracks().forEach((track) => {
|
|
709
|
+
this.canvasState.stream.removeTrack(track);
|
|
710
|
+
});
|
|
711
|
+
this.canvasState.stream.addTrack(newAudioTrack);
|
|
712
|
+
console.log("\u{1F3A8} Audio track synced to canvas stream");
|
|
713
|
+
}
|
|
714
|
+
console.log("\u2705 Audio track replaced - streaming continues seamlessly");
|
|
535
715
|
}
|
|
536
716
|
/**
|
|
537
|
-
* Update the media stream (e.g., when
|
|
538
|
-
* This keeps the WebSocket connection alive while swapping the media source
|
|
717
|
+
* Update the media stream (e.g., when switching devices from settings)
|
|
718
|
+
* This keeps the WebSocket connection alive while swapping the media source.
|
|
719
|
+
* Restarts the MediaRecorder with the new stream.
|
|
539
720
|
*
|
|
540
|
-
* Note:
|
|
541
|
-
*
|
|
542
|
-
*
|
|
543
|
-
* Note: Errors are thrown to the caller, not sent to onError callback
|
|
544
|
-
* This allows the caller to handle camera flip failures gracefully
|
|
721
|
+
* Note: For camera flips, prefer replaceVideoTrack() which doesn't restart MediaRecorder.
|
|
722
|
+
* Note: Errors are thrown to the caller, not sent to onError callback.
|
|
545
723
|
*/
|
|
546
724
|
async updateMediaStream(newMediaStream) {
|
|
547
725
|
console.log("\u{1F504} Updating media stream (hot-swap)");
|
|
@@ -551,8 +729,14 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
551
729
|
console.log("\u23F9\uFE0F Old MediaRecorder stopped");
|
|
552
730
|
}
|
|
553
731
|
this.mediaStream = newMediaStream;
|
|
732
|
+
let streamToRecord = this.mediaStream;
|
|
733
|
+
if (this.isVideo) {
|
|
734
|
+
this.cleanupCanvasRendering();
|
|
735
|
+
streamToRecord = this.setupCanvasRendering();
|
|
736
|
+
console.log("\u{1F3A8} Canvas rendering recreated for new stream");
|
|
737
|
+
}
|
|
554
738
|
const recorderOptions = getMediaRecorderOptions(this.isVideo);
|
|
555
|
-
this.mediaRecorder = new MediaRecorder(
|
|
739
|
+
this.mediaRecorder = new MediaRecorder(streamToRecord, recorderOptions);
|
|
556
740
|
console.log("\u{1F399}\uFE0F New MediaRecorder created");
|
|
557
741
|
this.setupMediaRecorderHandlers();
|
|
558
742
|
this.mediaRecorder.start(300);
|
|
@@ -605,7 +789,9 @@ Please check encoder server logs and DATABASE_URL configuration.`
|
|
|
605
789
|
this.bytesSent += event.data.size;
|
|
606
790
|
this.chunksSent += 1;
|
|
607
791
|
this.onBytesUpdate?.(this.bytesSent);
|
|
608
|
-
|
|
792
|
+
if (this.chunksSent % 10 === 0) {
|
|
793
|
+
console.log(`\u{1F4E4} Sent ${this.chunksSent} chunks (${(this.bytesSent / 1024 / 1024).toFixed(2)} MB total)`);
|
|
794
|
+
}
|
|
609
795
|
}
|
|
610
796
|
});
|
|
611
797
|
this.mediaRecorder.addEventListener("error", (event) => {
|
|
@@ -1875,41 +2061,32 @@ function DialtribeStreamer({
|
|
|
1875
2061
|
const handleFlipCamera = async () => {
|
|
1876
2062
|
if (!isVideoKey || !hasMultipleCameras) return;
|
|
1877
2063
|
const newFacingMode = facingMode === "user" ? "environment" : "user";
|
|
1878
|
-
setFacingMode(newFacingMode);
|
|
1879
2064
|
if (state === "live" && streamer) {
|
|
1880
|
-
console.log("\u{1F504}
|
|
1881
|
-
await streamer.prepareForHotSwap();
|
|
1882
|
-
if (mediaStream) {
|
|
1883
|
-
mediaStream.getTracks().forEach((track) => track.stop());
|
|
1884
|
-
}
|
|
2065
|
+
console.log("\u{1F504} Flipping camera during live broadcast (canvas-based swap)");
|
|
1885
2066
|
try {
|
|
1886
2067
|
const constraints = getMediaConstraints({
|
|
1887
2068
|
isVideo: true,
|
|
1888
2069
|
facingMode: newFacingMode
|
|
1889
2070
|
});
|
|
1890
2071
|
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
1891
|
-
console.log("\u{1F4F7}
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
} catch (err) {
|
|
1896
|
-
console.error("\u274C Failed to get new camera stream:", err);
|
|
1897
|
-
setFacingMode(facingMode);
|
|
1898
|
-
try {
|
|
1899
|
-
const originalConstraints = getMediaConstraints({
|
|
1900
|
-
isVideo: true,
|
|
1901
|
-
facingMode
|
|
1902
|
-
});
|
|
1903
|
-
const restoredStream = await navigator.mediaDevices.getUserMedia(originalConstraints);
|
|
1904
|
-
streamer.completeHotSwap(restoredStream);
|
|
1905
|
-
setMediaStream(restoredStream);
|
|
1906
|
-
console.warn("\u26A0\uFE0F Camera flip failed - restored original camera");
|
|
1907
|
-
} catch (restoreErr) {
|
|
1908
|
-
console.error("\u274C Failed to restore original camera:", restoreErr);
|
|
1909
|
-
streamer.cancelHotSwap();
|
|
2072
|
+
console.log("\u{1F4F7} Got new camera stream:", newFacingMode);
|
|
2073
|
+
const newVideoTrack = newStream.getVideoTracks()[0];
|
|
2074
|
+
if (newVideoTrack) {
|
|
2075
|
+
streamer.replaceVideoTrack(newVideoTrack);
|
|
1910
2076
|
}
|
|
2077
|
+
const updatedStream = streamer.getMediaStream();
|
|
2078
|
+
setMediaStream(updatedStream);
|
|
2079
|
+
setFacingMode(newFacingMode);
|
|
2080
|
+
if (videoRef.current) {
|
|
2081
|
+
videoRef.current.srcObject = updatedStream;
|
|
2082
|
+
}
|
|
2083
|
+
console.log("\u2705 Camera flipped successfully - broadcast continues seamlessly");
|
|
2084
|
+
} catch (err) {
|
|
2085
|
+
console.error("\u274C Failed to flip camera:", err);
|
|
2086
|
+
console.warn("\u26A0\uFE0F Camera flip failed - continuing with current camera");
|
|
1911
2087
|
}
|
|
1912
2088
|
} else {
|
|
2089
|
+
setFacingMode(newFacingMode);
|
|
1913
2090
|
try {
|
|
1914
2091
|
const constraints = getMediaConstraints({
|
|
1915
2092
|
isVideo: true,
|