node-mac-recorder 2.21.32 β 2.21.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +3 -9
- package/canvas-player.html +235 -41
- package/index.js +7 -1
- package/package.json +1 -1
- package/src/audio_recorder.mm +41 -4
- package/src/avfoundation_recorder.mm +26 -0
- package/src/camera_recorder.mm +188 -22
- package/src/mac_recorder.mm +56 -27
- package/src/sync_timeline.h +10 -0
- package/src/sync_timeline.mm +19 -3
|
@@ -1,15 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"permissions": {
|
|
3
3
|
"allow": [
|
|
4
|
-
"Bash(
|
|
5
|
-
"Bash(
|
|
6
|
-
"Bash(
|
|
7
|
-
"Bash(node:*)",
|
|
8
|
-
"Bash(ALLOW_CONTINUITY_CAMERA=1 node:*)",
|
|
9
|
-
"Bash(awk:*)",
|
|
10
|
-
"Bash(ffprobe:*)",
|
|
11
|
-
"Bash(sw_vers:*)",
|
|
12
|
-
"Bash(system_profiler:*)"
|
|
4
|
+
"Bash(cat:*)",
|
|
5
|
+
"Bash(pkill:*)",
|
|
6
|
+
"Bash(for f in test-output/*1761946670140.mov)"
|
|
13
7
|
],
|
|
14
8
|
"deny": [],
|
|
15
9
|
"ask": []
|
package/canvas-player.html
CHANGED
|
@@ -347,6 +347,22 @@
|
|
|
347
347
|
cursor: true
|
|
348
348
|
};
|
|
349
349
|
|
|
350
|
+
// SYNC FIX: Track video durations for sync compensation
|
|
351
|
+
this.videoDurations = {
|
|
352
|
+
screen: 0,
|
|
353
|
+
camera: 0,
|
|
354
|
+
audio: 0
|
|
355
|
+
};
|
|
356
|
+
this.maxDuration = 0;
|
|
357
|
+
this.cameraStartOffset = 0; // Time offset for camera sync
|
|
358
|
+
this.cameraAvailableUntil = 0; // Absolute time (s) where camera footage ends on master timeline
|
|
359
|
+
this.cameraEndGap = 0; // How much camera ends before master
|
|
360
|
+
this.mediaLoaded = {
|
|
361
|
+
screen: false,
|
|
362
|
+
camera: false,
|
|
363
|
+
audio: false
|
|
364
|
+
};
|
|
365
|
+
|
|
350
366
|
// SYNC FIX: Ensure all videos have same playback settings
|
|
351
367
|
[this.screenVideo, this.cameraVideo, this.audioVideo].forEach(video => {
|
|
352
368
|
video.playbackRate = 1.0;
|
|
@@ -362,12 +378,14 @@
|
|
|
362
378
|
const response = await fetch('test-output/recording-metadata.json');
|
|
363
379
|
this.metadata = await response.json();
|
|
364
380
|
|
|
365
|
-
this.displayInfo();
|
|
366
381
|
this.showLayerControls();
|
|
367
382
|
|
|
368
383
|
// Load videos
|
|
369
384
|
await this.loadVideos();
|
|
370
385
|
|
|
386
|
+
// Update info after media durations are known
|
|
387
|
+
this.displayInfo();
|
|
388
|
+
|
|
371
389
|
// Load cursor data
|
|
372
390
|
if (this.metadata.files.cursor) {
|
|
373
391
|
await this.loadCursorData();
|
|
@@ -418,6 +436,16 @@
|
|
|
418
436
|
|
|
419
437
|
console.log('β
All videos loaded and ready for playback');
|
|
420
438
|
|
|
439
|
+
this.mediaLoaded.screen = screenLoaded;
|
|
440
|
+
this.mediaLoaded.camera = cameraLoaded;
|
|
441
|
+
this.mediaLoaded.audio = audioLoaded;
|
|
442
|
+
|
|
443
|
+
this.videoDurations.screen = screenLoaded ? (this.screenVideo.duration || 0) : 0;
|
|
444
|
+
this.videoDurations.camera = cameraLoaded ? (this.cameraVideo.duration || 0) : 0;
|
|
445
|
+
this.videoDurations.audio = audioLoaded ? (this.audioVideo.duration || 0) : 0;
|
|
446
|
+
|
|
447
|
+
this.calculateSyncOffsets();
|
|
448
|
+
|
|
421
449
|
// SYNC FIX: Set all videos to frame 0 and wait for seek complete
|
|
422
450
|
const seekToStart = (video) => {
|
|
423
451
|
return new Promise(resolve => {
|
|
@@ -448,6 +476,95 @@
|
|
|
448
476
|
}
|
|
449
477
|
}
|
|
450
478
|
|
|
479
|
+
calculateSyncOffsets() {
|
|
480
|
+
const tolerance = 0.1; // seconds
|
|
481
|
+
|
|
482
|
+
const screenDuration = this.videoDurations.screen || 0;
|
|
483
|
+
const cameraDuration = this.videoDurations.camera || 0;
|
|
484
|
+
const audioDuration = this.videoDurations.audio || 0;
|
|
485
|
+
|
|
486
|
+
console.log('π Media durations (s):', {
|
|
487
|
+
screen: screenDuration ? screenDuration.toFixed(2) : 'β',
|
|
488
|
+
camera: cameraDuration ? cameraDuration.toFixed(2) : 'β',
|
|
489
|
+
audio: audioDuration ? audioDuration.toFixed(2) : 'β'
|
|
490
|
+
});
|
|
491
|
+
|
|
492
|
+
// Reset derived sync state
|
|
493
|
+
this.cameraStartOffset = 0;
|
|
494
|
+
this.cameraAvailableUntil = cameraDuration;
|
|
495
|
+
this.cameraEndGap = 0;
|
|
496
|
+
|
|
497
|
+
// Choose a sensible master duration (screen dominates, then audio, then camera)
|
|
498
|
+
const candidateDurations = [
|
|
499
|
+
screenDuration,
|
|
500
|
+
audioDuration,
|
|
501
|
+
cameraDuration
|
|
502
|
+
].filter(value => value && value > 0);
|
|
503
|
+
|
|
504
|
+
this.maxDuration = candidateDurations.length > 0 ? Math.max(...candidateDurations) : 0;
|
|
505
|
+
|
|
506
|
+
const audioMatchesScreen =
|
|
507
|
+
screenDuration > 0 && audioDuration > 0 && Math.abs(audioDuration - screenDuration) <= tolerance;
|
|
508
|
+
|
|
509
|
+
if (cameraDuration > 0) {
|
|
510
|
+
let explained = false;
|
|
511
|
+
|
|
512
|
+
if (screenDuration > 0) {
|
|
513
|
+
const cameraVsScreen = screenDuration - cameraDuration;
|
|
514
|
+
|
|
515
|
+
if (cameraVsScreen > tolerance) {
|
|
516
|
+
// Screen is longer than camera. Only treat this as a late camera start
|
|
517
|
+
// when audio corroborates the screen duration or camera is the outlier.
|
|
518
|
+
const cameraVsAudio = audioDuration > 0 ? Math.abs(audioDuration - cameraDuration) : Infinity;
|
|
519
|
+
const screenVsAudio = audioDuration > 0 ? Math.abs(audioDuration - screenDuration) : Infinity;
|
|
520
|
+
const audioSupportsScreen = audioMatchesScreen || screenVsAudio <= cameraVsAudio;
|
|
521
|
+
|
|
522
|
+
if (audioSupportsScreen) {
|
|
523
|
+
this.cameraStartOffset = cameraVsScreen;
|
|
524
|
+
this.cameraAvailableUntil = this.cameraStartOffset + cameraDuration;
|
|
525
|
+
explained = true;
|
|
526
|
+
console.log('βοΈ Camera appears to start late by', (this.cameraStartOffset * 1000).toFixed(0), 'ms');
|
|
527
|
+
}
|
|
528
|
+
} else if (cameraVsScreen < -tolerance) {
|
|
529
|
+
// Camera is longer than screen β we do not support negative offsets yet.
|
|
530
|
+
this.cameraAvailableUntil = cameraDuration;
|
|
531
|
+
console.log('βΉοΈ Camera duration exceeds screen by', Math.abs(cameraVsScreen * 1000).toFixed(0), 'ms; leaving offset at 0');
|
|
532
|
+
explained = true;
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
if (!explained) {
|
|
537
|
+
// Either no screen video or difference is negligible β camera aligns to master at t=0
|
|
538
|
+
this.cameraAvailableUntil = cameraDuration;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
// Determine how far the camera footage extends on the master timeline
|
|
543
|
+
if (this.cameraAvailableUntil > 0) {
|
|
544
|
+
this.cameraAvailableUntil = Math.max(this.cameraAvailableUntil, cameraDuration);
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
const masterCandidates = [
|
|
548
|
+
this.maxDuration,
|
|
549
|
+
this.cameraAvailableUntil,
|
|
550
|
+
screenDuration,
|
|
551
|
+
audioDuration
|
|
552
|
+
].filter(value => value && value > 0);
|
|
553
|
+
|
|
554
|
+
this.maxDuration = masterCandidates.length > 0 ? Math.max(...masterCandidates) : 0;
|
|
555
|
+
|
|
556
|
+
if (this.maxDuration > 0 && this.cameraAvailableUntil > 0) {
|
|
557
|
+
this.cameraEndGap = this.maxDuration - this.cameraAvailableUntil;
|
|
558
|
+
if (this.cameraEndGap < tolerance) {
|
|
559
|
+
this.cameraEndGap = 0;
|
|
560
|
+
} else {
|
|
561
|
+
console.log('βΉοΈ Camera footage ends', this.cameraEndGap.toFixed(2), 's before master timeline');
|
|
562
|
+
}
|
|
563
|
+
} else {
|
|
564
|
+
this.cameraEndGap = 0;
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
|
|
451
568
|
async loadCursorData() {
|
|
452
569
|
try {
|
|
453
570
|
const response = await fetch('test-output/' + this.metadata.files.cursor);
|
|
@@ -544,7 +661,11 @@
|
|
|
544
661
|
const targetTime = this.pausedTime / 1000;
|
|
545
662
|
this.screenVideo.currentTime = targetTime;
|
|
546
663
|
if (this.metadata.files.camera && this.cameraVideo.readyState >= 2) {
|
|
547
|
-
this.cameraVideo.
|
|
664
|
+
const cameraDuration = this.cameraVideo.duration || this.videoDurations.camera || 0;
|
|
665
|
+
const cameraTarget = Math.max(0, targetTime - this.cameraStartOffset);
|
|
666
|
+
this.cameraVideo.currentTime = cameraDuration > 0
|
|
667
|
+
? Math.min(cameraTarget, cameraDuration)
|
|
668
|
+
: cameraTarget;
|
|
548
669
|
}
|
|
549
670
|
if (this.metadata.files.audio && this.audioVideo.readyState >= 2) {
|
|
550
671
|
this.audioVideo.currentTime = targetTime;
|
|
@@ -606,18 +727,24 @@
|
|
|
606
727
|
}
|
|
607
728
|
|
|
608
729
|
async seek(percent) {
|
|
609
|
-
const
|
|
730
|
+
const masterDuration = this.screenVideo.duration || this.maxDuration || 0;
|
|
731
|
+
const time = masterDuration * percent;
|
|
732
|
+
const screenTarget = Math.min(time, this.screenVideo.duration || time);
|
|
610
733
|
|
|
611
734
|
// SYNC FIX: Set all video times simultaneously
|
|
612
735
|
const seekPromises = [];
|
|
613
736
|
|
|
614
|
-
this.screenVideo.currentTime =
|
|
737
|
+
this.screenVideo.currentTime = screenTarget;
|
|
615
738
|
seekPromises.push(new Promise(resolve => {
|
|
616
739
|
this.screenVideo.onseeked = resolve;
|
|
617
740
|
}));
|
|
618
741
|
|
|
619
742
|
if (this.metadata.files.camera) {
|
|
620
|
-
this.cameraVideo.
|
|
743
|
+
const cameraDuration = this.cameraVideo.duration || this.videoDurations.camera || 0;
|
|
744
|
+
const cameraTarget = Math.max(0, time - this.cameraStartOffset);
|
|
745
|
+
this.cameraVideo.currentTime = cameraDuration > 0
|
|
746
|
+
? Math.min(cameraTarget, cameraDuration)
|
|
747
|
+
: cameraTarget;
|
|
621
748
|
seekPromises.push(new Promise(resolve => {
|
|
622
749
|
this.cameraVideo.onseeked = resolve;
|
|
623
750
|
}));
|
|
@@ -638,13 +765,13 @@
|
|
|
638
765
|
console.warn('β οΈ Seek error:', error);
|
|
639
766
|
}
|
|
640
767
|
|
|
641
|
-
this.pausedTime =
|
|
768
|
+
this.pausedTime = screenTarget * 1000;
|
|
642
769
|
if (this.isPlaying) {
|
|
643
770
|
this.startTime = performance.now() - this.pausedTime;
|
|
644
771
|
}
|
|
645
772
|
|
|
646
773
|
// Update cursor
|
|
647
|
-
this.updateCursor(
|
|
774
|
+
this.updateCursor(screenTarget * 1000);
|
|
648
775
|
}
|
|
649
776
|
|
|
650
777
|
animate() {
|
|
@@ -681,28 +808,46 @@
|
|
|
681
808
|
const syncThreshold = 30; // 30ms tolerance (very tight sync)
|
|
682
809
|
const masterSeconds = masterTime / 1000;
|
|
683
810
|
|
|
684
|
-
// Sync camera video
|
|
811
|
+
// Sync camera video with offset compensation
|
|
685
812
|
if (this.metadata.files.camera && this.cameraVideo.readyState >= 2 && !this.cameraVideo.paused) {
|
|
686
|
-
const
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
813
|
+
const cameraStart = this.cameraStartOffset || 0;
|
|
814
|
+
const cameraDuration = this.cameraVideo.duration || this.videoDurations.camera || 0;
|
|
815
|
+
const adjustedMasterTime = masterSeconds - cameraStart;
|
|
816
|
+
|
|
817
|
+
// Before camera start - keep it parked at frame 0
|
|
818
|
+
if (adjustedMasterTime < -0.05) {
|
|
819
|
+
if (this.cameraVideo.currentTime > 0.05) {
|
|
820
|
+
this.cameraVideo.currentTime = 0;
|
|
821
|
+
}
|
|
822
|
+
return;
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
// After camera footage ends - nothing to sync
|
|
826
|
+
if (cameraDuration > 0 && adjustedMasterTime > cameraDuration + 0.05) {
|
|
827
|
+
return;
|
|
828
|
+
}
|
|
829
|
+
|
|
830
|
+
// Keep within valid playback range
|
|
831
|
+
const targetTime = Math.max(0, Math.min(adjustedMasterTime, cameraDuration || adjustedMasterTime));
|
|
832
|
+
|
|
833
|
+
if (cameraDuration > 0) {
|
|
834
|
+
const cameraDiff = Math.abs(this.cameraVideo.currentTime - targetTime) * 1000;
|
|
835
|
+
if (cameraDiff > syncThreshold) {
|
|
836
|
+
if (cameraDiff > 200) {
|
|
837
|
+
this.cameraVideo.currentTime = targetTime;
|
|
838
|
+
console.log('π₯ Camera hard sync:', cameraDiff.toFixed(0) + 'ms β ' + targetTime.toFixed(2) + 's');
|
|
839
|
+
} else if (cameraDiff > syncThreshold) {
|
|
840
|
+
if (this.cameraVideo.currentTime > targetTime) {
|
|
841
|
+
this.cameraVideo.playbackRate = 0.95;
|
|
842
|
+
} else {
|
|
843
|
+
this.cameraVideo.playbackRate = 1.05;
|
|
704
844
|
}
|
|
705
|
-
|
|
845
|
+
setTimeout(() => {
|
|
846
|
+
if (this.cameraVideo) {
|
|
847
|
+
this.cameraVideo.playbackRate = 1.0;
|
|
848
|
+
}
|
|
849
|
+
}, 500);
|
|
850
|
+
}
|
|
706
851
|
}
|
|
707
852
|
}
|
|
708
853
|
}
|
|
@@ -744,21 +889,48 @@
|
|
|
744
889
|
this.ctx.drawImage(this.screenVideo, 0, 0, this.canvas.width, this.canvas.height);
|
|
745
890
|
}
|
|
746
891
|
|
|
747
|
-
// Draw camera video overlay (layer 2)
|
|
892
|
+
// Draw camera video overlay (layer 2) with offset compensation
|
|
748
893
|
if (this.layers.camera && this.metadata.files.camera && this.cameraVideo.readyState >= 2) {
|
|
894
|
+
// SYNC FIX: Apply camera start offset
|
|
895
|
+
const timeInSeconds = currentTime / 1000;
|
|
749
896
|
const cameraWidth = 240;
|
|
750
897
|
const cameraHeight = 180;
|
|
751
898
|
const padding = 20;
|
|
752
899
|
const x = this.canvas.width - cameraWidth - padding;
|
|
753
900
|
const y = this.canvas.height - cameraHeight - padding;
|
|
901
|
+
const cameraDuration = this.videoDurations.camera || (this.cameraVideo.duration || 0);
|
|
902
|
+
const cameraStart = this.cameraStartOffset || 0;
|
|
903
|
+
const cameraEnd = this.cameraAvailableUntil > 0
|
|
904
|
+
? this.cameraAvailableUntil
|
|
905
|
+
: cameraStart + cameraDuration;
|
|
906
|
+
const beforeStart = timeInSeconds + 0.05 < cameraStart; // small tolerance
|
|
907
|
+
const afterEnd = cameraDuration > 0 && timeInSeconds - 0.05 > cameraEnd;
|
|
908
|
+
|
|
909
|
+
const drawPlaceholder = (message, border = '#666') => {
|
|
910
|
+
this.ctx.strokeStyle = border;
|
|
911
|
+
this.ctx.lineWidth = 3;
|
|
912
|
+
this.ctx.strokeRect(x - 2, y - 2, cameraWidth + 4, cameraHeight + 4);
|
|
913
|
+
this.ctx.fillStyle = '#333';
|
|
914
|
+
this.ctx.fillRect(x, y, cameraWidth, cameraHeight);
|
|
915
|
+
if (message) {
|
|
916
|
+
this.ctx.fillStyle = '#999';
|
|
917
|
+
this.ctx.font = '14px sans-serif';
|
|
918
|
+
this.ctx.textAlign = 'center';
|
|
919
|
+
this.ctx.textBaseline = 'middle';
|
|
920
|
+
this.ctx.fillText(message, x + cameraWidth / 2, y + cameraHeight / 2);
|
|
921
|
+
}
|
|
922
|
+
};
|
|
754
923
|
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
924
|
+
if (beforeStart) {
|
|
925
|
+
drawPlaceholder('Camera Starting...');
|
|
926
|
+
} else if (afterEnd) {
|
|
927
|
+
drawPlaceholder('Camera Finished', '#555');
|
|
928
|
+
} else {
|
|
929
|
+
this.ctx.strokeStyle = '#4a9eff';
|
|
930
|
+
this.ctx.lineWidth = 3;
|
|
931
|
+
this.ctx.strokeRect(x - 2, y - 2, cameraWidth + 4, cameraHeight + 4);
|
|
932
|
+
this.ctx.drawImage(this.cameraVideo, x, y, cameraWidth, cameraHeight);
|
|
933
|
+
}
|
|
762
934
|
}
|
|
763
935
|
}
|
|
764
936
|
|
|
@@ -813,7 +985,8 @@
|
|
|
813
985
|
}
|
|
814
986
|
|
|
815
987
|
updateProgress(masterTime) {
|
|
816
|
-
|
|
988
|
+
// SYNC FIX: Use maxDuration for accurate timeline
|
|
989
|
+
const duration = (this.maxDuration || this.screenVideo.duration) * 1000;
|
|
817
990
|
const percent = (masterTime / duration) * 100;
|
|
818
991
|
|
|
819
992
|
document.getElementById('progressFill').style.width = Math.min(100, percent) + '%';
|
|
@@ -832,20 +1005,41 @@
|
|
|
832
1005
|
}
|
|
833
1006
|
|
|
834
1007
|
displayInfo() {
|
|
1008
|
+
// Build duration display with offset info if present
|
|
1009
|
+
let durationDisplay = this.maxDuration > 0
|
|
1010
|
+
? `${this.maxDuration.toFixed(1)}s`
|
|
1011
|
+
: 'Unknown';
|
|
1012
|
+
|
|
1013
|
+
const offsetMs = Math.round(this.cameraStartOffset * 1000);
|
|
1014
|
+
const earlyStopMs = Math.round(this.cameraEndGap * 1000);
|
|
1015
|
+
|
|
1016
|
+
if (offsetMs > 100) {
|
|
1017
|
+
durationDisplay += ` (Camera +${offsetMs}ms offset)`;
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
if (earlyStopMs > 100) {
|
|
1021
|
+
durationDisplay += ` (Camera ends ${earlyStopMs}ms early)`;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
const statusIcon = (expected, loaded) => {
|
|
1025
|
+
if (!expected) return 'β';
|
|
1026
|
+
return loaded ? 'β
' : 'β οΈ';
|
|
1027
|
+
};
|
|
1028
|
+
|
|
835
1029
|
const infoHtml = `
|
|
836
1030
|
<div class="info">
|
|
837
1031
|
<div class="info-grid">
|
|
838
1032
|
<div class="info-item">
|
|
839
1033
|
<span class="info-label">π₯οΈ Screen:</span>
|
|
840
|
-
<span class="info-value">${this.metadata.files.screen
|
|
1034
|
+
<span class="info-value">${statusIcon(this.metadata.files.screen, this.mediaLoaded.screen)}</span>
|
|
841
1035
|
</div>
|
|
842
1036
|
<div class="info-item">
|
|
843
1037
|
<span class="info-label">πΉ Camera:</span>
|
|
844
|
-
<span class="info-value">${this.metadata.files.camera
|
|
1038
|
+
<span class="info-value">${statusIcon(this.metadata.files.camera, this.mediaLoaded.camera)}</span>
|
|
845
1039
|
</div>
|
|
846
1040
|
<div class="info-item">
|
|
847
1041
|
<span class="info-label">ποΈ Audio:</span>
|
|
848
|
-
<span class="info-value">${this.metadata.files.audio
|
|
1042
|
+
<span class="info-value">${statusIcon(this.metadata.files.audio, this.mediaLoaded.audio)}</span>
|
|
849
1043
|
</div>
|
|
850
1044
|
<div class="info-item">
|
|
851
1045
|
<span class="info-label">π±οΈ Cursor:</span>
|
|
@@ -853,7 +1047,7 @@
|
|
|
853
1047
|
</div>
|
|
854
1048
|
<div class="info-item">
|
|
855
1049
|
<span class="info-label">β±οΈ Duration:</span>
|
|
856
|
-
<span class="info-value">${
|
|
1050
|
+
<span class="info-value">${durationDisplay}</span>
|
|
857
1051
|
</div>
|
|
858
1052
|
<div class="info-item">
|
|
859
1053
|
<span class="info-label">ποΈ Frame Rate:</span>
|
|
@@ -861,7 +1055,7 @@
|
|
|
861
1055
|
</div>
|
|
862
1056
|
<div class="info-item">
|
|
863
1057
|
<span class="info-label">π Sync Mode:</span>
|
|
864
|
-
<span class="info-value">Master-Slave (30ms) + Adaptive Rate</span>
|
|
1058
|
+
<span class="info-value">Master-Slave (30ms) + Adaptive Rate + Offset</span>
|
|
865
1059
|
</div>
|
|
866
1060
|
</div>
|
|
867
1061
|
</div>
|
package/index.js
CHANGED
|
@@ -784,6 +784,11 @@ class MacRecorder extends EventEmitter {
|
|
|
784
784
|
}
|
|
785
785
|
|
|
786
786
|
return new Promise(async (resolve, reject) => {
|
|
787
|
+
const stopRequestedAt = Date.now();
|
|
788
|
+
const elapsedSeconds =
|
|
789
|
+
this.recordingStartTime && this.recordingStartTime > 0
|
|
790
|
+
? (stopRequestedAt - this.recordingStartTime) / 1000
|
|
791
|
+
: -1;
|
|
787
792
|
try {
|
|
788
793
|
console.log('π SYNC: Stopping all recording components simultaneously');
|
|
789
794
|
|
|
@@ -804,7 +809,8 @@ class MacRecorder extends EventEmitter {
|
|
|
804
809
|
// 2. Stop native screen recording
|
|
805
810
|
try {
|
|
806
811
|
console.log('π SYNC: Stopping screen recording');
|
|
807
|
-
|
|
812
|
+
const stopLimit = elapsedSeconds > 0 ? elapsedSeconds : 0;
|
|
813
|
+
success = nativeBinding.stopRecording(stopLimit);
|
|
808
814
|
if (success) {
|
|
809
815
|
console.log('β
SYNC: Screen recording stopped');
|
|
810
816
|
}
|
package/package.json
CHANGED
package/src/audio_recorder.mm
CHANGED
|
@@ -264,13 +264,20 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
264
264
|
dispatch_semaphore_signal(semaphore);
|
|
265
265
|
}];
|
|
266
266
|
|
|
267
|
-
//
|
|
268
|
-
|
|
267
|
+
// SYNC FIX: Match camera timeout (3 seconds) for consistent finish timing
|
|
268
|
+
const int64_t primaryWaitSeconds = 3;
|
|
269
|
+
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(primaryWaitSeconds * NSEC_PER_SEC));
|
|
269
270
|
long result = dispatch_semaphore_wait(semaphore, timeout);
|
|
270
271
|
|
|
271
272
|
if (result != 0 || !finished) {
|
|
272
|
-
MRLog(@"β οΈ AudioRecorder:
|
|
273
|
-
|
|
273
|
+
MRLog(@"β οΈ AudioRecorder: Writer still finishing after %ds β waiting longer", (int)primaryWaitSeconds);
|
|
274
|
+
const int64_t extendedWaitSeconds = 5;
|
|
275
|
+
dispatch_time_t extendedTimeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(extendedWaitSeconds * NSEC_PER_SEC));
|
|
276
|
+
result = dispatch_semaphore_wait(semaphore, extendedTimeout);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
if (result != 0 || !finished) {
|
|
280
|
+
MRLog(@"β οΈ AudioRecorder: Writer did not finish after extended wait β forcing cancel");
|
|
274
281
|
[self.writer cancelWriting];
|
|
275
282
|
} else {
|
|
276
283
|
MRLog(@"β
AudioRecorder writer finished successfully");
|
|
@@ -333,6 +340,9 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
333
340
|
CMItemCount timingEntryCount = 0;
|
|
334
341
|
OSStatus timingStatus = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, NULL, &timingEntryCount);
|
|
335
342
|
CMSampleTimingInfo *timingInfo = NULL;
|
|
343
|
+
double stopLimit = MRSyncGetStopLimitSeconds();
|
|
344
|
+
double audioTolerance = 0.02;
|
|
345
|
+
BOOL shouldDropBuffer = NO;
|
|
336
346
|
|
|
337
347
|
if (timingStatus == noErr && timingEntryCount > 0) {
|
|
338
348
|
timingInfo = (CMSampleTimingInfo *)malloc(sizeof(CMSampleTimingInfo) * timingEntryCount);
|
|
@@ -348,6 +358,15 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
348
358
|
adjustedPTS = kCMTimeZero;
|
|
349
359
|
}
|
|
350
360
|
timingInfo[i].presentationTimeStamp = adjustedPTS;
|
|
361
|
+
|
|
362
|
+
if (stopLimit > 0) {
|
|
363
|
+
double sampleStart = CMTimeGetSeconds(adjustedPTS);
|
|
364
|
+
double sampleDuration = CMTIME_IS_VALID(timingInfo[i].duration) ? CMTimeGetSeconds(timingInfo[i].duration) : 0.0;
|
|
365
|
+
if (sampleStart > stopLimit + audioTolerance ||
|
|
366
|
+
(sampleDuration > 0.0 && (sampleStart + sampleDuration) > stopLimit + audioTolerance)) {
|
|
367
|
+
shouldDropBuffer = YES;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
351
370
|
} else {
|
|
352
371
|
timingInfo[i].presentationTimeStamp = kCMTimeZero;
|
|
353
372
|
}
|
|
@@ -376,6 +395,24 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
376
395
|
timingInfo = NULL;
|
|
377
396
|
}
|
|
378
397
|
}
|
|
398
|
+
|
|
399
|
+
if (stopLimit > 0 && !shouldDropBuffer && bufferToAppend == sampleBuffer) {
|
|
400
|
+
// No timing info available; approximate using buffer timestamp.
|
|
401
|
+
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
402
|
+
if (CMTIME_IS_VALID(pts)) {
|
|
403
|
+
double relativeStart = CMTimeGetSeconds(CMTimeSubtract(pts, self.startTime));
|
|
404
|
+
if (relativeStart > stopLimit + audioTolerance) {
|
|
405
|
+
shouldDropBuffer = YES;
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
if (shouldDropBuffer) {
|
|
411
|
+
if (bufferToAppend != sampleBuffer) {
|
|
412
|
+
CFRelease(bufferToAppend);
|
|
413
|
+
}
|
|
414
|
+
return;
|
|
415
|
+
}
|
|
379
416
|
|
|
380
417
|
if (![self.writerInput appendSampleBuffer:bufferToAppend]) {
|
|
381
418
|
NSLog(@"β οΈ Failed appending audio buffer: %@", self.writer.error);
|
|
@@ -80,6 +80,16 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
80
80
|
// CRITICAL FIX: Use actual captured image dimensions for pixel buffer
|
|
81
81
|
// CGDisplayCreateImage returns physical pixels on Retina displays
|
|
82
82
|
CGImageRef testImage = CGDisplayCreateImage(displayID);
|
|
83
|
+
if (!testImage) {
|
|
84
|
+
NSLog(@"β AVFoundation: Failed to create display image for displayID %u", displayID);
|
|
85
|
+
NSLog(@" Display may be invalid. Retrying with main display...");
|
|
86
|
+
displayID = CGMainDisplayID();
|
|
87
|
+
testImage = CGDisplayCreateImage(displayID);
|
|
88
|
+
if (!testImage) {
|
|
89
|
+
NSLog(@"β AVFoundation: Failed to create image even for main display");
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
83
93
|
CGSize actualImageSize = CGSizeMake(CGImageGetWidth(testImage), CGImageGetHeight(testImage));
|
|
84
94
|
CGImageRelease(testImage);
|
|
85
95
|
|
|
@@ -397,6 +407,22 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
397
407
|
}
|
|
398
408
|
CMTime presentationTime = CMTimeMakeWithSeconds(CMTimeGetSeconds(relativeTime), 600);
|
|
399
409
|
|
|
410
|
+
double stopLimit = MRSyncGetStopLimitSeconds();
|
|
411
|
+
if (stopLimit > 0) {
|
|
412
|
+
double secondsFromStart = CMTimeGetSeconds(presentationTime);
|
|
413
|
+
double tolerance = fps > 0 ? (1.5 / fps) : 0.02;
|
|
414
|
+
if (tolerance < 0.02) {
|
|
415
|
+
tolerance = 0.02;
|
|
416
|
+
}
|
|
417
|
+
if (secondsFromStart > stopLimit + tolerance) {
|
|
418
|
+
MRLog(@"βΉοΈ Screen frame skipped at %.3fs (limit %.3fs)", secondsFromStart, stopLimit);
|
|
419
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
420
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
421
|
+
CGImageRelease(screenImage);
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
|
|
400
426
|
BOOL appendSuccess = [localPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
|
|
401
427
|
if (appendSuccess) {
|
|
402
428
|
g_avFrameNumber++;
|
package/src/camera_recorder.mm
CHANGED
|
@@ -90,6 +90,7 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
90
90
|
@property (nonatomic, assign) int32_t expectedHeight;
|
|
91
91
|
@property (nonatomic, assign) double expectedFrameRate;
|
|
92
92
|
@property (atomic, assign) BOOL needsReconfiguration;
|
|
93
|
+
@property (nonatomic, strong) NSMutableArray<NSValue *> *pendingSampleBuffers;
|
|
93
94
|
|
|
94
95
|
+ (instancetype)sharedRecorder;
|
|
95
96
|
+ (NSArray<NSDictionary *> *)availableCameraDevices;
|
|
@@ -102,6 +103,14 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
102
103
|
|
|
103
104
|
@implementation CameraRecorder
|
|
104
105
|
|
|
106
|
+
- (instancetype)init {
|
|
107
|
+
self = [super init];
|
|
108
|
+
if (self) {
|
|
109
|
+
_pendingSampleBuffers = [NSMutableArray array];
|
|
110
|
+
}
|
|
111
|
+
return self;
|
|
112
|
+
}
|
|
113
|
+
|
|
105
114
|
+ (instancetype)sharedRecorder {
|
|
106
115
|
static CameraRecorder *recorder = nil;
|
|
107
116
|
static dispatch_once_t onceToken;
|
|
@@ -256,6 +265,22 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
256
265
|
return devicesInfo;
|
|
257
266
|
}
|
|
258
267
|
|
|
268
|
+
- (void)clearPendingSampleBuffers {
|
|
269
|
+
id container = self.pendingSampleBuffers;
|
|
270
|
+
if (![container isKindOfClass:[NSArray class]]) {
|
|
271
|
+
MRLog(@"β οΈ CameraRecorder: pendingSampleBuffers corrupted (%@) β resetting", NSStringFromClass([container class]));
|
|
272
|
+
self.pendingSampleBuffers = [NSMutableArray array];
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
for (NSValue *value in (NSArray *)container) {
|
|
276
|
+
CMSampleBufferRef buffer = (CMSampleBufferRef)[value pointerValue];
|
|
277
|
+
if (buffer) {
|
|
278
|
+
CFRelease(buffer);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
[self.pendingSampleBuffers removeAllObjects];
|
|
282
|
+
}
|
|
283
|
+
|
|
259
284
|
- (void)resetState {
|
|
260
285
|
self.writerStarted = NO;
|
|
261
286
|
self.isRecording = NO;
|
|
@@ -269,6 +294,7 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
269
294
|
self.pixelBufferAdaptor = nil;
|
|
270
295
|
self.outputPath = nil;
|
|
271
296
|
self.captureQueue = nil;
|
|
297
|
+
[self clearPendingSampleBuffers];
|
|
272
298
|
}
|
|
273
299
|
|
|
274
300
|
- (AVCaptureDevice *)deviceForId:(NSString *)deviceId {
|
|
@@ -600,6 +626,7 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
600
626
|
MRLog(@"β οΈ CameraRecorder: Failed to remove existing camera file: %@", removeError);
|
|
601
627
|
}
|
|
602
628
|
|
|
629
|
+
[self clearPendingSampleBuffers];
|
|
603
630
|
AVCaptureDevice *device = [self deviceForId:deviceId];
|
|
604
631
|
if (!device) {
|
|
605
632
|
if (error) {
|
|
@@ -719,6 +746,21 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
719
746
|
return YES;
|
|
720
747
|
}
|
|
721
748
|
|
|
749
|
+
// Delay stop slightly so camera ends close to audio length.
|
|
750
|
+
// Tunable via env var CAMERA_TAIL_SECONDS (default 0.11s)
|
|
751
|
+
NSTimeInterval cameraTailSeconds = 1.7;
|
|
752
|
+
const char *tailEnv = getenv("CAMERA_TAIL_SECONDS");
|
|
753
|
+
if (tailEnv) {
|
|
754
|
+
double parsed = atof(tailEnv);
|
|
755
|
+
if (parsed >= 0.0 && parsed <= 1.0) {
|
|
756
|
+
cameraTailSeconds = parsed;
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
MRLog(@"β³ CameraRecorder: Delaying stop by %.3fs for tail capture", cameraTailSeconds);
|
|
760
|
+
if (cameraTailSeconds > 0) {
|
|
761
|
+
[NSThread sleepForTimeInterval:cameraTailSeconds];
|
|
762
|
+
}
|
|
763
|
+
|
|
722
764
|
// CRITICAL FIX: For external cameras (especially Continuity Camera/iPhone),
|
|
723
765
|
// stopRunning can hang if device is disconnected. Use async approach.
|
|
724
766
|
MRLog(@"π CameraRecorder: Stopping session (external device safe)...");
|
|
@@ -814,24 +856,101 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
814
856
|
return success;
|
|
815
857
|
}
|
|
816
858
|
|
|
817
|
-
- (void)
|
|
818
|
-
|
|
819
|
-
fromConnection:(AVCaptureConnection *)connection {
|
|
820
|
-
if (!self.isRecording) {
|
|
859
|
+
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
|
860
|
+
if (!sampleBuffer) {
|
|
821
861
|
return;
|
|
822
862
|
}
|
|
863
|
+
if (![self.pendingSampleBuffers isKindOfClass:[NSMutableArray class]]) {
|
|
864
|
+
MRLog(@"β οΈ CameraRecorder: pendingSampleBuffers not NSMutableArray (%@) β reinitializing",
|
|
865
|
+
NSStringFromClass([self.pendingSampleBuffers class]));
|
|
866
|
+
self.pendingSampleBuffers = [NSMutableArray array];
|
|
867
|
+
}
|
|
868
|
+
CMSampleBufferRef bufferCopy = NULL;
|
|
869
|
+
OSStatus status = CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &bufferCopy);
|
|
870
|
+
if (status == noErr && bufferCopy) {
|
|
871
|
+
[self.pendingSampleBuffers addObject:[NSValue valueWithPointer:bufferCopy]];
|
|
872
|
+
} else if (bufferCopy) {
|
|
873
|
+
CFRelease(bufferCopy);
|
|
874
|
+
}
|
|
875
|
+
}
|
|
823
876
|
|
|
824
|
-
|
|
877
|
+
- (void)flushPendingSampleBuffers {
|
|
878
|
+
id container = self.pendingSampleBuffers;
|
|
879
|
+
if (![container isKindOfClass:[NSArray class]]) {
|
|
880
|
+
MRLog(@"β οΈ CameraRecorder: pendingSampleBuffers corrupted (%@) β resetting",
|
|
881
|
+
NSStringFromClass([container class]));
|
|
882
|
+
self.pendingSampleBuffers = [NSMutableArray array];
|
|
883
|
+
return;
|
|
884
|
+
}
|
|
885
|
+
if ([(NSArray *)container count] == 0) {
|
|
825
886
|
return;
|
|
826
887
|
}
|
|
827
888
|
|
|
828
|
-
|
|
889
|
+
NSArray<NSValue *> *queued = [(NSArray *)container copy];
|
|
890
|
+
[self.pendingSampleBuffers removeAllObjects];
|
|
829
891
|
|
|
830
|
-
|
|
831
|
-
|
|
892
|
+
CMTime audioStart = MRSyncAudioFirstTimestamp();
|
|
893
|
+
BOOL hasAudioStart = CMTIME_IS_VALID(audioStart);
|
|
894
|
+
|
|
895
|
+
double stopLimit = MRSyncGetStopLimitSeconds();
|
|
896
|
+
|
|
897
|
+
for (NSValue *value in queued) {
|
|
898
|
+
CMSampleBufferRef buffer = (CMSampleBufferRef)[value pointerValue];
|
|
899
|
+
if (!buffer) {
|
|
900
|
+
continue;
|
|
901
|
+
}
|
|
902
|
+
|
|
903
|
+
CMTime bufferTime = CMSampleBufferGetPresentationTimeStamp(buffer);
|
|
904
|
+
if (hasAudioStart && CMTIME_IS_VALID(bufferTime)) {
|
|
905
|
+
// Drop frames captured before audio actually began to keep durations aligned.
|
|
906
|
+
if (CMTIME_COMPARE_INLINE(bufferTime, <, audioStart)) {
|
|
907
|
+
CFRelease(buffer);
|
|
908
|
+
continue;
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
if (stopLimit > 0 && CMTIME_IS_VALID(bufferTime)) {
|
|
913
|
+
CMTime baseline = kCMTimeInvalid;
|
|
914
|
+
if (CMTIME_IS_VALID(self.firstSampleTime)) {
|
|
915
|
+
baseline = self.firstSampleTime;
|
|
916
|
+
} else if (hasAudioStart) {
|
|
917
|
+
baseline = audioStart;
|
|
918
|
+
}
|
|
919
|
+
double frameSeconds = 0.0;
|
|
920
|
+
if (CMTIME_IS_VALID(baseline)) {
|
|
921
|
+
frameSeconds = CMTimeGetSeconds(CMTimeSubtract(bufferTime, baseline));
|
|
922
|
+
}
|
|
923
|
+
// Adjust camera stop limit by start offset relative to audio
|
|
924
|
+
double effectiveStopLimit = stopLimit;
|
|
925
|
+
if (hasAudioStart && CMTIME_IS_VALID(baseline)) {
|
|
926
|
+
CMTime startDeltaTime = CMTimeSubtract(baseline, audioStart);
|
|
927
|
+
double startDelta = CMTimeGetSeconds(startDeltaTime);
|
|
928
|
+
if (startDelta > 0) {
|
|
929
|
+
effectiveStopLimit += startDelta;
|
|
930
|
+
}
|
|
931
|
+
}
|
|
932
|
+
double tolerance = self.expectedFrameRate > 0 ? (1.5 / self.expectedFrameRate) : 0.02;
|
|
933
|
+
if (tolerance < 0.02) {
|
|
934
|
+
tolerance = 0.02;
|
|
935
|
+
}
|
|
936
|
+
if (frameSeconds > effectiveStopLimit + tolerance) {
|
|
937
|
+
CFRelease(buffer);
|
|
938
|
+
continue;
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
[self processSampleBufferReadyForWriting:buffer];
|
|
943
|
+
CFRelease(buffer);
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
|
|
947
|
+
- (void)processSampleBufferReadyForWriting:(CMSampleBufferRef)sampleBuffer {
|
|
948
|
+
if (!sampleBuffer) {
|
|
832
949
|
return;
|
|
833
950
|
}
|
|
834
951
|
|
|
952
|
+
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
953
|
+
|
|
835
954
|
// Lazy initialization - setup writer with actual frame dimensions
|
|
836
955
|
if (!self.assetWriter) {
|
|
837
956
|
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
@@ -846,11 +965,9 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
846
965
|
MRLog(@"π¬ First frame received: %zux%zu (format said %dx%d)",
|
|
847
966
|
actualWidth, actualHeight, self.expectedWidth, self.expectedHeight);
|
|
848
967
|
|
|
849
|
-
// Use ACTUAL dimensions from the frame, not format dimensions
|
|
850
968
|
NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
|
|
851
969
|
NSError *setupError = nil;
|
|
852
970
|
|
|
853
|
-
// Use frame rate from device configuration
|
|
854
971
|
double frameRate = self.expectedFrameRate > 0 ? self.expectedFrameRate : 30.0;
|
|
855
972
|
|
|
856
973
|
if (![self setupWriterWithURL:outputURL
|
|
@@ -880,37 +997,58 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
880
997
|
}
|
|
881
998
|
}
|
|
882
999
|
}
|
|
883
|
-
|
|
1000
|
+
|
|
884
1001
|
if (!self.writerStarted || self.assetWriter.status != AVAssetWriterStatusWriting) {
|
|
885
1002
|
return;
|
|
886
1003
|
}
|
|
887
|
-
|
|
1004
|
+
|
|
888
1005
|
if (!self.assetWriterInput.readyForMoreMediaData) {
|
|
889
1006
|
return;
|
|
890
1007
|
}
|
|
891
|
-
|
|
892
|
-
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
893
|
-
if (!pixelBuffer) {
|
|
894
|
-
return;
|
|
895
|
-
}
|
|
896
|
-
|
|
1008
|
+
|
|
897
1009
|
if (CMTIME_IS_INVALID(self.firstSampleTime)) {
|
|
898
1010
|
self.firstSampleTime = timestamp;
|
|
899
1011
|
}
|
|
900
|
-
|
|
1012
|
+
|
|
901
1013
|
CMTime relativeTimestamp = timestamp;
|
|
902
1014
|
if (CMTIME_IS_VALID(self.firstSampleTime)) {
|
|
903
|
-
// Align camera frames to a zero-based timeline so multi-track compositions stay in sync
|
|
904
1015
|
relativeTimestamp = CMTimeSubtract(timestamp, self.firstSampleTime);
|
|
905
1016
|
if (CMTIME_COMPARE_INLINE(relativeTimestamp, <, kCMTimeZero)) {
|
|
906
1017
|
relativeTimestamp = kCMTimeZero;
|
|
907
1018
|
}
|
|
908
1019
|
}
|
|
909
|
-
|
|
1020
|
+
|
|
1021
|
+
double stopLimit = MRSyncGetStopLimitSeconds();
|
|
1022
|
+
if (stopLimit > 0) {
|
|
1023
|
+
// Adjust by camera start vs audio start so durations align closely
|
|
1024
|
+
CMTime audioStartTS = MRSyncAudioFirstTimestamp();
|
|
1025
|
+
if (CMTIME_IS_VALID(audioStartTS) && CMTIME_IS_VALID(self.firstSampleTime)) {
|
|
1026
|
+
CMTime startDeltaTS = CMTimeSubtract(self.firstSampleTime, audioStartTS);
|
|
1027
|
+
double startDelta = CMTimeGetSeconds(startDeltaTS);
|
|
1028
|
+
if (startDelta > 0) {
|
|
1029
|
+
stopLimit += startDelta;
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
double frameSeconds = CMTimeGetSeconds(relativeTimestamp);
|
|
1034
|
+
double tolerance = self.expectedFrameRate > 0 ? (1.5 / self.expectedFrameRate) : 0.02;
|
|
1035
|
+
if (tolerance < 0.02) {
|
|
1036
|
+
tolerance = 0.02;
|
|
1037
|
+
}
|
|
1038
|
+
if (frameSeconds > stopLimit + tolerance) {
|
|
1039
|
+
return;
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
1044
|
+
if (!pixelBuffer) {
|
|
1045
|
+
return;
|
|
1046
|
+
}
|
|
1047
|
+
|
|
910
1048
|
CVPixelBufferRetain(pixelBuffer);
|
|
911
1049
|
BOOL appended = [self.pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:relativeTimestamp];
|
|
912
1050
|
CVPixelBufferRelease(pixelBuffer);
|
|
913
|
-
|
|
1051
|
+
|
|
914
1052
|
if (!appended) {
|
|
915
1053
|
MRLog(@"β οΈ CameraRecorder: Failed to append camera frame at time %.2f (status %ld)",
|
|
916
1054
|
CMTimeGetSeconds(relativeTimestamp), (long)self.assetWriter.status);
|
|
@@ -921,6 +1059,34 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
921
1059
|
}
|
|
922
1060
|
}
|
|
923
1061
|
|
|
1062
|
+
- (void)captureOutput:(AVCaptureOutput *)output
|
|
1063
|
+
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
1064
|
+
fromConnection:(AVCaptureConnection *)connection {
|
|
1065
|
+
if (!self.isRecording) {
|
|
1066
|
+
return;
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
if (!sampleBuffer) {
|
|
1070
|
+
return;
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
1074
|
+
|
|
1075
|
+
// If audio is expected but not yet flowing, hold video frames to keep timeline aligned.
|
|
1076
|
+
if (MRSyncShouldHoldVideoFrame(timestamp)) {
|
|
1077
|
+
[self enqueueSampleBuffer:sampleBuffer];
|
|
1078
|
+
if (CMTIME_IS_INVALID(self.firstSampleTime)) {
|
|
1079
|
+
self.firstSampleTime = timestamp;
|
|
1080
|
+
}
|
|
1081
|
+
return;
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
// Flush any buffered frames now that audio is ready
|
|
1085
|
+
[self flushPendingSampleBuffers];
|
|
1086
|
+
|
|
1087
|
+
[self processSampleBufferReadyForWriting:sampleBuffer];
|
|
1088
|
+
}
|
|
1089
|
+
|
|
924
1090
|
@end
|
|
925
1091
|
|
|
926
1092
|
// MARK: - C Interface
|
package/src/mac_recorder.mm
CHANGED
|
@@ -383,17 +383,14 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
383
383
|
MRLog(@" Reason: ScreenCaptureKit has thread safety issues in Electron (SIGTRAP crashes)");
|
|
384
384
|
}
|
|
385
385
|
|
|
386
|
-
// CRITICAL FIX:
|
|
387
|
-
//
|
|
388
|
-
//
|
|
386
|
+
// CRITICAL FIX: Always use AVFoundation for stability
|
|
387
|
+
// ScreenCaptureKit has file writing issues in Node.js environment
|
|
388
|
+
// AVFoundation works reliably in both Node.js and Electron
|
|
389
389
|
BOOL forceAVFoundation = YES;
|
|
390
390
|
|
|
391
|
-
MRLog(@"π§
|
|
392
|
-
MRLog(@"
|
|
393
|
-
|
|
394
|
-
if (isElectron) {
|
|
395
|
-
MRLog(@"β‘ Electron environment detected - using stable AVFoundation");
|
|
396
|
-
}
|
|
391
|
+
MRLog(@"π§ FRAMEWORK SELECTION: Using AVFoundation for stability");
|
|
392
|
+
MRLog(@" Environment: %@", isElectron ? @"Electron" : @"Node.js");
|
|
393
|
+
MRLog(@" macOS: %ld.%ld.%ld", (long)osVersion.majorVersion, (long)osVersion.minorVersion, (long)osVersion.patchVersion);
|
|
397
394
|
|
|
398
395
|
// Electron-first priority: ALWAYS use AVFoundation in Electron for stability
|
|
399
396
|
// ScreenCaptureKit has severe thread safety issues in Electron causing SIGTRAP crashes
|
|
@@ -592,6 +589,19 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
592
589
|
Napi::Env env = info.Env();
|
|
593
590
|
|
|
594
591
|
MRLog(@"π StopRecording native method called");
|
|
592
|
+
|
|
593
|
+
double stopLimitSeconds = -1.0;
|
|
594
|
+
if (info.Length() > 0 && info[0].IsNumber()) {
|
|
595
|
+
stopLimitSeconds = info[0].As<Napi::Number>().DoubleValue();
|
|
596
|
+
if (stopLimitSeconds > 0) {
|
|
597
|
+
MRLog(@"β²οΈ Requested stop limit: %.3f seconds", stopLimitSeconds);
|
|
598
|
+
MRSyncSetStopLimitSeconds(stopLimitSeconds);
|
|
599
|
+
} else {
|
|
600
|
+
MRSyncSetStopLimitSeconds(-1.0);
|
|
601
|
+
}
|
|
602
|
+
} else {
|
|
603
|
+
MRSyncSetStopLimitSeconds(-1.0);
|
|
604
|
+
}
|
|
595
605
|
|
|
596
606
|
// Try ScreenCaptureKit first
|
|
597
607
|
if (@available(macOS 12.3, *)) {
|
|
@@ -617,6 +627,7 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
617
627
|
// DO NOT set g_isRecording here - let ScreenCaptureKit completion handler do it
|
|
618
628
|
// Otherwise we have a race condition where JS thinks recording stopped but it's still running
|
|
619
629
|
g_usingStandaloneAudio = false;
|
|
630
|
+
MRSyncSetStopLimitSeconds(-1.0);
|
|
620
631
|
return Napi::Boolean::New(env, true);
|
|
621
632
|
}
|
|
622
633
|
}
|
|
@@ -631,44 +642,61 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
631
642
|
MRLog(@"π Stopping AVFoundation recording");
|
|
632
643
|
|
|
633
644
|
BOOL cameraWasRecording = isCameraRecording();
|
|
645
|
+
BOOL audioWasRecording = g_usingStandaloneAudio && isStandaloneAudioRecording();
|
|
634
646
|
__block BOOL cameraStopResult = YES;
|
|
635
|
-
|
|
647
|
+
__block BOOL audioStopResult = YES;
|
|
648
|
+
|
|
649
|
+
// SYNC FIX: Create unified stop group for camera and audio
|
|
650
|
+
dispatch_group_t stopGroup = dispatch_group_create();
|
|
636
651
|
|
|
652
|
+
// SYNC FIX: Stop camera and audio SIMULTANEOUSLY in parallel
|
|
637
653
|
if (cameraWasRecording) {
|
|
638
|
-
MRLog(@"π Stopping camera recording...");
|
|
654
|
+
MRLog(@"π SYNC: Stopping camera recording...");
|
|
639
655
|
cameraStopResult = NO;
|
|
640
|
-
|
|
641
|
-
dispatch_group_enter(cameraStopGroup);
|
|
642
|
-
// Stop camera on a background queue so audio/screen shutdown can proceed immediately.
|
|
656
|
+
dispatch_group_enter(stopGroup);
|
|
643
657
|
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
644
658
|
cameraStopResult = stopCameraRecording() ? YES : NO;
|
|
645
|
-
dispatch_group_leave(
|
|
659
|
+
dispatch_group_leave(stopGroup);
|
|
646
660
|
});
|
|
647
661
|
}
|
|
648
662
|
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
663
|
+
if (audioWasRecording) {
|
|
664
|
+
MRLog(@"π SYNC: Stopping audio recording...");
|
|
665
|
+
audioStopResult = NO;
|
|
666
|
+
dispatch_group_enter(stopGroup);
|
|
667
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
668
|
+
audioStopResult = stopStandaloneAudioRecording() ? YES : NO;
|
|
669
|
+
dispatch_group_leave(stopGroup);
|
|
670
|
+
});
|
|
653
671
|
}
|
|
654
672
|
|
|
655
673
|
bool avFoundationStopped = stopAVFoundationRecording();
|
|
656
674
|
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
675
|
+
// SYNC FIX: Wait for both camera AND audio to finish (increased timeout to 5s)
|
|
676
|
+
if (cameraWasRecording || audioWasRecording) {
|
|
677
|
+
dispatch_time_t waitTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC));
|
|
678
|
+
long waitResult = dispatch_group_wait(stopGroup, waitTime);
|
|
660
679
|
if (waitResult != 0) {
|
|
661
|
-
MRLog(@"β οΈ Camera stop did not finish within
|
|
662
|
-
cameraStopResult
|
|
663
|
-
|
|
664
|
-
|
|
680
|
+
MRLog(@"β οΈ SYNC: Camera/Audio stop did not finish within 5 seconds");
|
|
681
|
+
if (cameraWasRecording && !cameraStopResult) {
|
|
682
|
+
MRLog(@" β οΈ Camera stop timed out");
|
|
683
|
+
}
|
|
684
|
+
if (audioWasRecording && !audioStopResult) {
|
|
685
|
+
MRLog(@" β οΈ Audio stop timed out");
|
|
686
|
+
}
|
|
665
687
|
} else {
|
|
666
|
-
|
|
688
|
+
if (cameraWasRecording) {
|
|
689
|
+
MRLog(@"β
SYNC: Camera stopped successfully");
|
|
690
|
+
}
|
|
691
|
+
if (audioWasRecording) {
|
|
692
|
+
MRLog(@"β
SYNC: Audio stopped successfully");
|
|
693
|
+
}
|
|
667
694
|
}
|
|
668
695
|
}
|
|
669
696
|
|
|
670
697
|
g_isRecording = false;
|
|
671
698
|
g_usingStandaloneAudio = false;
|
|
699
|
+
MRSyncSetStopLimitSeconds(-1.0);
|
|
672
700
|
|
|
673
701
|
if (avFoundationStopped && (!cameraWasRecording || cameraStopResult)) {
|
|
674
702
|
MRLog(@"β
AVFoundation recording stopped");
|
|
@@ -690,6 +718,7 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
690
718
|
stopCameraRecording();
|
|
691
719
|
}
|
|
692
720
|
g_isRecording = false;
|
|
721
|
+
MRSyncSetStopLimitSeconds(-1.0);
|
|
693
722
|
return Napi::Boolean::New(env, true);
|
|
694
723
|
}
|
|
695
724
|
|
package/src/sync_timeline.h
CHANGED
|
@@ -20,6 +20,16 @@ BOOL MRSyncShouldHoldVideoFrame(CMTime timestamp);
|
|
|
20
20
|
// so both tracks share the same starting point.
|
|
21
21
|
void MRSyncMarkAudioSample(CMTime timestamp);
|
|
22
22
|
|
|
23
|
+
// Returns the offset between audio and video start timestamps when available.
|
|
24
|
+
CMTime MRSyncVideoAlignmentOffset(void);
|
|
25
|
+
|
|
26
|
+
// Returns the first audio timestamp observed for the current session.
|
|
27
|
+
CMTime MRSyncAudioFirstTimestamp(void);
|
|
28
|
+
|
|
29
|
+
// Optional hard stop limit (seconds) shared across capture components.
|
|
30
|
+
void MRSyncSetStopLimitSeconds(double seconds);
|
|
31
|
+
double MRSyncGetStopLimitSeconds(void);
|
|
32
|
+
|
|
23
33
|
#ifdef __cplusplus
|
|
24
34
|
}
|
|
25
35
|
#endif
|
package/src/sync_timeline.mm
CHANGED
|
@@ -16,6 +16,7 @@ static CMTime g_videoFirstTimestamp = kCMTimeInvalid;
|
|
|
16
16
|
static BOOL g_videoHoldLogged = NO;
|
|
17
17
|
static CMTime g_audioFirstTimestamp = kCMTimeInvalid;
|
|
18
18
|
static CMTime g_alignmentDelta = kCMTimeInvalid;
|
|
19
|
+
static double g_stopLimitSeconds = -1.0;
|
|
19
20
|
|
|
20
21
|
void MRSyncConfigure(BOOL expectAudio) {
|
|
21
22
|
dispatch_sync(MRSyncQueue(), ^{
|
|
@@ -25,6 +26,7 @@ void MRSyncConfigure(BOOL expectAudio) {
|
|
|
25
26
|
g_videoHoldLogged = NO;
|
|
26
27
|
g_audioFirstTimestamp = kCMTimeInvalid;
|
|
27
28
|
g_alignmentDelta = kCMTimeInvalid;
|
|
29
|
+
g_stopLimitSeconds = -1.0;
|
|
28
30
|
});
|
|
29
31
|
}
|
|
30
32
|
|
|
@@ -71,7 +73,7 @@ BOOL MRSyncShouldHoldVideoFrame(CMTime timestamp) {
|
|
|
71
73
|
}
|
|
72
74
|
|
|
73
75
|
CMTime elapsed = CMTimeSubtract(timestamp, g_videoFirstTimestamp);
|
|
74
|
-
CMTime maxWait = CMTimeMakeWithSeconds(0
|
|
76
|
+
CMTime maxWait = CMTimeMakeWithSeconds(1.0, 600); // SYNC FIX: Increased from 0.25s to 1.0s for better sync tolerance
|
|
75
77
|
if (CMTIME_COMPARE_INLINE(elapsed, >, maxWait)) {
|
|
76
78
|
g_audioReady = YES;
|
|
77
79
|
g_videoFirstTimestamp = kCMTimeInvalid;
|
|
@@ -85,9 +87,9 @@ BOOL MRSyncShouldHoldVideoFrame(CMTime timestamp) {
|
|
|
85
87
|
});
|
|
86
88
|
|
|
87
89
|
if (logHold) {
|
|
88
|
-
MRLog(@"βΈοΈ Video pipeline waiting for audio to begin (holding frames up to
|
|
90
|
+
MRLog(@"βΈοΈ Video pipeline waiting for audio to begin (holding frames up to 1.0s)");
|
|
89
91
|
} else if (logRelease) {
|
|
90
|
-
MRLog(@"βΆοΈ Video pipeline resume forced (audio not detected within
|
|
92
|
+
MRLog(@"βΆοΈ Video pipeline resume forced (audio not detected within 1.0s)");
|
|
91
93
|
}
|
|
92
94
|
|
|
93
95
|
return shouldHold;
|
|
@@ -142,3 +144,17 @@ CMTime MRSyncAudioFirstTimestamp(void) {
|
|
|
142
144
|
});
|
|
143
145
|
return ts;
|
|
144
146
|
}
|
|
147
|
+
|
|
148
|
+
void MRSyncSetStopLimitSeconds(double seconds) {
|
|
149
|
+
dispatch_sync(MRSyncQueue(), ^{
|
|
150
|
+
g_stopLimitSeconds = seconds;
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
double MRSyncGetStopLimitSeconds(void) {
|
|
155
|
+
__block double seconds = -1.0;
|
|
156
|
+
dispatch_sync(MRSyncQueue(), ^{
|
|
157
|
+
seconds = g_stopLimitSeconds;
|
|
158
|
+
});
|
|
159
|
+
return seconds;
|
|
160
|
+
}
|