node-mac-recorder 2.21.52 → 2.21.54
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -42,7 +42,13 @@
|
|
|
42
42
|
"Bash(echo:*)",
|
|
43
43
|
"Bash(timeout 3 node:*)",
|
|
44
44
|
"Bash(timeout 25 node:*)",
|
|
45
|
-
"Bash(ls:*)"
|
|
45
|
+
"Bash(ls:*)",
|
|
46
|
+
"Bash(for f in test-output/temp_camera_1764838803672.mov test-output/temp_audio_1764838803672.mov test-output/duration-test-1764838803672.mov)",
|
|
47
|
+
"Bash(do)",
|
|
48
|
+
"Bash(grep:*)",
|
|
49
|
+
"Bash(timeout 10 ffprobe:*)",
|
|
50
|
+
"Bash(ffmpeg:*)",
|
|
51
|
+
"Bash(timeout 30 node:*)"
|
|
46
52
|
],
|
|
47
53
|
"deny": [],
|
|
48
54
|
"ask": []
|
package/package.json
CHANGED
package/src/camera_recorder.mm
CHANGED
|
@@ -525,8 +525,10 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
|
525
525
|
|
|
526
526
|
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
527
527
|
|
|
528
|
-
//
|
|
529
|
-
|
|
528
|
+
// Hold camera frames until we see audio so timelines stay aligned
|
|
529
|
+
if (MRSyncShouldHoldVideoFrame(timestamp)) {
|
|
530
|
+
return;
|
|
531
|
+
}
|
|
530
532
|
|
|
531
533
|
// Start writer session on first frame
|
|
532
534
|
if (!self.writerStarted) {
|
|
@@ -536,11 +538,31 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
|
536
538
|
}
|
|
537
539
|
[self.writer startSessionAtSourceTime:kCMTimeZero]; // CRITICAL: t=0 timeline
|
|
538
540
|
self.writerStarted = YES;
|
|
539
|
-
|
|
541
|
+
|
|
542
|
+
// LIP SYNC FIX: Align camera startTime with audio's first timestamp for perfect lip sync
|
|
543
|
+
// This ensures camera and audio start from the same reference point
|
|
544
|
+
CMTime audioFirstTimestamp = MRSyncAudioFirstTimestamp();
|
|
545
|
+
CMTime alignmentOffset = MRSyncVideoAlignmentOffset();
|
|
546
|
+
|
|
547
|
+
if (CMTIME_IS_VALID(audioFirstTimestamp)) {
|
|
548
|
+
// Use audio's first timestamp as reference - this is the key to lip sync
|
|
549
|
+
self.startTime = audioFirstTimestamp;
|
|
550
|
+
CMTime offset = CMTimeSubtract(timestamp, audioFirstTimestamp);
|
|
551
|
+
double offsetMs = CMTimeGetSeconds(offset) * 1000.0;
|
|
552
|
+
MRLog(@"🎥 Camera writer started @ t=0 (aligned with audio first timestamp, offset: %.1fms)", offsetMs);
|
|
553
|
+
} else if (CMTIME_IS_VALID(alignmentOffset)) {
|
|
554
|
+
// If audio came first, use the alignment offset to sync
|
|
555
|
+
self.startTime = CMTimeSubtract(timestamp, alignmentOffset);
|
|
556
|
+
double offsetMs = CMTimeGetSeconds(alignmentOffset) * 1000.0;
|
|
557
|
+
MRLog(@"🎥 Camera writer started @ t=0 (using alignment offset: %.1fms)", offsetMs);
|
|
558
|
+
} else {
|
|
559
|
+
// Fallback: use camera's own timestamp (should not happen if sync is configured)
|
|
560
|
+
self.startTime = timestamp;
|
|
561
|
+
MRLog(@"🎥 Camera writer started @ t=0 (source PTS: %.3fs, no audio sync available)", CMTimeGetSeconds(timestamp));
|
|
562
|
+
}
|
|
563
|
+
|
|
540
564
|
g_cameraStartTimestamp = CFAbsoluteTimeGetCurrent();
|
|
541
565
|
|
|
542
|
-
MRLog(@"🎥 Camera writer started @ t=0 (source PTS: %.3fs)", CMTimeGetSeconds(timestamp));
|
|
543
|
-
|
|
544
566
|
// Signal start completion
|
|
545
567
|
[self completeStart:YES token:self.activeToken];
|
|
546
568
|
}
|
|
@@ -551,12 +573,17 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
|
551
573
|
}
|
|
552
574
|
|
|
553
575
|
// TIMESTAMP NORMALIZATION (audio_recorder.mm pattern)
|
|
576
|
+
// LIP SYNC FIX: Use audio-aligned startTime for perfect synchronization
|
|
554
577
|
CMTime adjustedTimestamp = kCMTimeZero;
|
|
555
578
|
if (CMTIME_IS_VALID(self.startTime)) {
|
|
556
579
|
adjustedTimestamp = CMTimeSubtract(timestamp, self.startTime);
|
|
557
580
|
if (CMTIME_COMPARE_INLINE(adjustedTimestamp, <, kCMTimeZero)) {
|
|
558
581
|
adjustedTimestamp = kCMTimeZero;
|
|
559
582
|
}
|
|
583
|
+
} else {
|
|
584
|
+
// Fallback: if startTime not set, use current timestamp as base
|
|
585
|
+
// This should not happen if sync is working correctly
|
|
586
|
+
adjustedTimestamp = kCMTimeZero;
|
|
560
587
|
}
|
|
561
588
|
|
|
562
589
|
// Get pixel buffer from sample
|
|
@@ -475,8 +475,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
475
475
|
}
|
|
476
476
|
|
|
477
477
|
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
478
|
-
|
|
479
|
-
|
|
478
|
+
|
|
480
479
|
// Wait for audio to arrive before starting screen video to prevent leading frames.
|
|
481
480
|
if (MRSyncShouldHoldVideoFrame(presentationTime)) {
|
|
482
481
|
return;
|
|
@@ -632,6 +631,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
632
631
|
}
|
|
633
632
|
|
|
634
633
|
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
634
|
+
MRSyncMarkAudioSample(presentationTime);
|
|
635
635
|
|
|
636
636
|
if (!g_audioWriterStarted) {
|
|
637
637
|
if (![g_audioWriter startWriting]) {
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
[{"x":549,"y":1055,"timestamp":23,"unixTimeMs":1764800556470,"cursorType":"text","type":"move","coordinateSystem":"video-relative","recordingType":"display","videoInfo":{"width":2048,"height":1330,"offsetX":0,"offsetY":0},"displayInfo":{"displayId":1,"width":2048,"height":1330},"_syncMetadata":{"videoStartTime":1764800554355,"cursorStartTime":1764800556447,"offsetMs":2092}}]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
[{"x":412,"y":989,"timestamp":25,"unixTimeMs":1764800788789,"cursorType":"text","type":"move","coordinateSystem":"video-relative","recordingType":"display","videoInfo":{"width":2048,"height":1330,"offsetX":0,"offsetY":0},"displayInfo":{"displayId":1,"width":2048,"height":1330},"_syncMetadata":{"videoStartTime":1764800784424,"cursorStartTime":1764800788764,"offsetMs":4340}}]
|