node-mac-recorder 2.21.25 → 2.21.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +48 -17
- package/package.json +1 -1
- package/src/avfoundation_recorder.mm +8 -2
- package/src/mac_recorder.mm +24 -39
package/index.js
CHANGED
|
@@ -37,6 +37,7 @@ class MacRecorder extends EventEmitter {
|
|
|
37
37
|
this.cameraCaptureFile = null;
|
|
38
38
|
this.cameraCaptureActive = false;
|
|
39
39
|
this.sessionTimestamp = null;
|
|
40
|
+
this.syncTimestamp = null;
|
|
40
41
|
this.audioCaptureFile = null;
|
|
41
42
|
this.audioCaptureActive = false;
|
|
42
43
|
|
|
@@ -553,6 +554,11 @@ class MacRecorder extends EventEmitter {
|
|
|
553
554
|
|
|
554
555
|
// Only start cursor if native recording started successfully
|
|
555
556
|
if (success) {
|
|
557
|
+
this.sessionTimestamp = sessionTimestamp;
|
|
558
|
+
const syncTimestamp = Date.now();
|
|
559
|
+
this.syncTimestamp = syncTimestamp;
|
|
560
|
+
this.recordingStartTime = syncTimestamp;
|
|
561
|
+
|
|
556
562
|
const standardCursorOptions = {
|
|
557
563
|
videoRelative: true,
|
|
558
564
|
displayInfo: this.recordingDisplayInfo,
|
|
@@ -560,11 +566,11 @@ class MacRecorder extends EventEmitter {
|
|
|
560
566
|
this.options.captureArea ? 'area' : 'display',
|
|
561
567
|
captureArea: this.options.captureArea,
|
|
562
568
|
windowId: this.options.windowId,
|
|
563
|
-
startTimestamp:
|
|
569
|
+
startTimestamp: syncTimestamp // Align cursor timeline to actual start
|
|
564
570
|
};
|
|
565
571
|
|
|
566
572
|
try {
|
|
567
|
-
console.log('🎯 SYNC: Starting cursor tracking at timestamp:',
|
|
573
|
+
console.log('🎯 SYNC: Starting cursor tracking at timestamp:', syncTimestamp);
|
|
568
574
|
await this.startCursorCapture(cursorFilePath, standardCursorOptions);
|
|
569
575
|
console.log('✅ SYNC: Cursor tracking started successfully');
|
|
570
576
|
} catch (cursorError) {
|
|
@@ -574,6 +580,9 @@ class MacRecorder extends EventEmitter {
|
|
|
574
580
|
}
|
|
575
581
|
|
|
576
582
|
if (success) {
|
|
583
|
+
const timelineTimestamp = this.syncTimestamp || sessionTimestamp;
|
|
584
|
+
const fileTimestamp = this.sessionTimestamp || sessionTimestamp;
|
|
585
|
+
|
|
577
586
|
if (this.options.captureCamera === true) {
|
|
578
587
|
try {
|
|
579
588
|
const nativeCameraPath = nativeBinding.getCameraRecordingPath
|
|
@@ -601,31 +610,33 @@ class MacRecorder extends EventEmitter {
|
|
|
601
610
|
}
|
|
602
611
|
}
|
|
603
612
|
this.isRecording = true;
|
|
604
|
-
// SYNC FIX: Use session timestamp for consistent timing across all components
|
|
605
|
-
this.recordingStartTime = sessionTimestamp;
|
|
606
613
|
|
|
607
614
|
if (this.options.captureCamera === true && cameraFilePath) {
|
|
608
615
|
this.cameraCaptureActive = true;
|
|
609
|
-
console.log('📹 SYNC: Camera recording started at timestamp:',
|
|
616
|
+
console.log('📹 SYNC: Camera recording started at timestamp:', timelineTimestamp);
|
|
610
617
|
this.emit("cameraCaptureStarted", {
|
|
611
618
|
outputPath: cameraFilePath,
|
|
612
619
|
deviceId: this.options.cameraDeviceId || null,
|
|
613
|
-
timestamp:
|
|
614
|
-
sessionTimestamp,
|
|
620
|
+
timestamp: timelineTimestamp,
|
|
621
|
+
sessionTimestamp: fileTimestamp,
|
|
622
|
+
syncTimestamp: timelineTimestamp,
|
|
623
|
+
fileTimestamp,
|
|
615
624
|
});
|
|
616
625
|
}
|
|
617
626
|
|
|
618
627
|
if (captureAudio && audioFilePath) {
|
|
619
628
|
this.audioCaptureActive = true;
|
|
620
|
-
console.log('🎙️ SYNC: Audio recording started at timestamp:',
|
|
629
|
+
console.log('🎙️ SYNC: Audio recording started at timestamp:', timelineTimestamp);
|
|
621
630
|
this.emit("audioCaptureStarted", {
|
|
622
631
|
outputPath: audioFilePath,
|
|
623
632
|
deviceIds: {
|
|
624
633
|
microphone: this.options.audioDeviceId || null,
|
|
625
634
|
system: this.options.systemAudioDeviceId || null,
|
|
626
635
|
},
|
|
627
|
-
timestamp:
|
|
628
|
-
sessionTimestamp,
|
|
636
|
+
timestamp: timelineTimestamp,
|
|
637
|
+
sessionTimestamp: fileTimestamp,
|
|
638
|
+
syncTimestamp: timelineTimestamp,
|
|
639
|
+
fileTimestamp,
|
|
629
640
|
});
|
|
630
641
|
}
|
|
631
642
|
|
|
@@ -638,7 +649,7 @@ class MacRecorder extends EventEmitter {
|
|
|
638
649
|
if (this.cursorCaptureInterval) activeComponents.push('Cursor');
|
|
639
650
|
if (this.cameraCaptureActive) activeComponents.push('Camera');
|
|
640
651
|
if (this.audioCaptureActive) activeComponents.push('Audio');
|
|
641
|
-
console.log(`✅ SYNC COMPLETE: All components synchronized at timestamp ${
|
|
652
|
+
console.log(`✅ SYNC COMPLETE: All components synchronized at timestamp ${timelineTimestamp}`);
|
|
642
653
|
console.log(` Active components: ${activeComponents.join(', ')}`);
|
|
643
654
|
|
|
644
655
|
// Timer başlat (progress tracking için)
|
|
@@ -659,15 +670,19 @@ class MacRecorder extends EventEmitter {
|
|
|
659
670
|
clearInterval(checkRecordingStatus);
|
|
660
671
|
|
|
661
672
|
// Kayıt gerçekten başladığı anda event emit et
|
|
673
|
+
const startTimestampPayload = this.syncTimestamp || this.recordingStartTime || Date.now();
|
|
674
|
+
const fileTimestampPayload = this.sessionTimestamp;
|
|
662
675
|
this.emit("recordingStarted", {
|
|
663
676
|
outputPath: this.outputPath,
|
|
664
|
-
timestamp:
|
|
677
|
+
timestamp: startTimestampPayload,
|
|
665
678
|
options: this.options,
|
|
666
679
|
nativeConfirmed: true,
|
|
667
680
|
cameraOutputPath: this.cameraCaptureFile || null,
|
|
668
681
|
audioOutputPath: this.audioCaptureFile || null,
|
|
669
682
|
cursorOutputPath: cursorFilePath,
|
|
670
|
-
sessionTimestamp:
|
|
683
|
+
sessionTimestamp: fileTimestampPayload,
|
|
684
|
+
syncTimestamp: startTimestampPayload,
|
|
685
|
+
fileTimestamp: fileTimestampPayload,
|
|
671
686
|
});
|
|
672
687
|
}
|
|
673
688
|
} catch (error) {
|
|
@@ -675,15 +690,19 @@ class MacRecorder extends EventEmitter {
|
|
|
675
690
|
if (!recordingStartedEmitted) {
|
|
676
691
|
recordingStartedEmitted = true;
|
|
677
692
|
clearInterval(checkRecordingStatus);
|
|
693
|
+
const startTimestampPayload = this.syncTimestamp || this.recordingStartTime || Date.now();
|
|
694
|
+
const fileTimestampPayload = this.sessionTimestamp;
|
|
678
695
|
this.emit("recordingStarted", {
|
|
679
696
|
outputPath: this.outputPath,
|
|
680
|
-
timestamp:
|
|
697
|
+
timestamp: startTimestampPayload,
|
|
681
698
|
options: this.options,
|
|
682
699
|
nativeConfirmed: false,
|
|
683
700
|
cameraOutputPath: this.cameraCaptureFile || null,
|
|
684
701
|
audioOutputPath: this.audioCaptureFile || null,
|
|
685
702
|
cursorOutputPath: cursorFilePath,
|
|
686
|
-
sessionTimestamp:
|
|
703
|
+
sessionTimestamp: fileTimestampPayload,
|
|
704
|
+
syncTimestamp: startTimestampPayload,
|
|
705
|
+
fileTimestamp: fileTimestampPayload,
|
|
687
706
|
});
|
|
688
707
|
}
|
|
689
708
|
}
|
|
@@ -694,15 +713,19 @@ class MacRecorder extends EventEmitter {
|
|
|
694
713
|
if (!recordingStartedEmitted) {
|
|
695
714
|
recordingStartedEmitted = true;
|
|
696
715
|
clearInterval(checkRecordingStatus);
|
|
716
|
+
const startTimestampPayload = this.syncTimestamp || this.recordingStartTime || Date.now();
|
|
717
|
+
const fileTimestampPayload = this.sessionTimestamp;
|
|
697
718
|
this.emit("recordingStarted", {
|
|
698
719
|
outputPath: this.outputPath,
|
|
699
|
-
timestamp:
|
|
720
|
+
timestamp: startTimestampPayload,
|
|
700
721
|
options: this.options,
|
|
701
722
|
nativeConfirmed: false,
|
|
702
723
|
cameraOutputPath: this.cameraCaptureFile || null,
|
|
703
724
|
audioOutputPath: this.audioCaptureFile || null,
|
|
704
725
|
cursorOutputPath: cursorFilePath,
|
|
705
|
-
sessionTimestamp:
|
|
726
|
+
sessionTimestamp: fileTimestampPayload,
|
|
727
|
+
syncTimestamp: startTimestampPayload,
|
|
728
|
+
fileTimestamp: fileTimestampPayload,
|
|
706
729
|
});
|
|
707
730
|
}
|
|
708
731
|
}, 5000);
|
|
@@ -735,6 +758,7 @@ class MacRecorder extends EventEmitter {
|
|
|
735
758
|
}
|
|
736
759
|
|
|
737
760
|
this.sessionTimestamp = null;
|
|
761
|
+
this.syncTimestamp = null;
|
|
738
762
|
|
|
739
763
|
reject(
|
|
740
764
|
new Error(
|
|
@@ -744,6 +768,7 @@ class MacRecorder extends EventEmitter {
|
|
|
744
768
|
}
|
|
745
769
|
} catch (error) {
|
|
746
770
|
this.sessionTimestamp = null;
|
|
771
|
+
this.syncTimestamp = null;
|
|
747
772
|
reject(error);
|
|
748
773
|
}
|
|
749
774
|
});
|
|
@@ -822,6 +847,7 @@ class MacRecorder extends EventEmitter {
|
|
|
822
847
|
outputPath: this.cameraCaptureFile || null,
|
|
823
848
|
success: success === true,
|
|
824
849
|
sessionTimestamp: this.sessionTimestamp,
|
|
850
|
+
syncTimestamp: this.syncTimestamp,
|
|
825
851
|
});
|
|
826
852
|
}
|
|
827
853
|
|
|
@@ -832,6 +858,7 @@ class MacRecorder extends EventEmitter {
|
|
|
832
858
|
outputPath: this.audioCaptureFile || null,
|
|
833
859
|
success: success === true,
|
|
834
860
|
sessionTimestamp: this.sessionTimestamp,
|
|
861
|
+
syncTimestamp: this.syncTimestamp,
|
|
835
862
|
});
|
|
836
863
|
}
|
|
837
864
|
|
|
@@ -857,6 +884,7 @@ class MacRecorder extends EventEmitter {
|
|
|
857
884
|
cameraOutputPath: this.cameraCaptureFile || null,
|
|
858
885
|
audioOutputPath: this.audioCaptureFile || null,
|
|
859
886
|
sessionTimestamp: sessionId,
|
|
887
|
+
syncTimestamp: this.syncTimestamp,
|
|
860
888
|
};
|
|
861
889
|
|
|
862
890
|
this.emit("stopped", result);
|
|
@@ -871,6 +899,7 @@ class MacRecorder extends EventEmitter {
|
|
|
871
899
|
}
|
|
872
900
|
|
|
873
901
|
this.sessionTimestamp = null;
|
|
902
|
+
this.syncTimestamp = null;
|
|
874
903
|
resolve(result);
|
|
875
904
|
} catch (error) {
|
|
876
905
|
this.isRecording = false;
|
|
@@ -879,6 +908,7 @@ class MacRecorder extends EventEmitter {
|
|
|
879
908
|
this.audioCaptureActive = false;
|
|
880
909
|
this.audioCaptureFile = null;
|
|
881
910
|
this.sessionTimestamp = null;
|
|
911
|
+
this.syncTimestamp = null;
|
|
882
912
|
if (this.recordingTimer) {
|
|
883
913
|
clearInterval(this.recordingTimer);
|
|
884
914
|
this.recordingTimer = null;
|
|
@@ -901,6 +931,7 @@ class MacRecorder extends EventEmitter {
|
|
|
901
931
|
cameraCapturing: this.cameraCaptureActive,
|
|
902
932
|
audioCapturing: this.audioCaptureActive,
|
|
903
933
|
sessionTimestamp: this.sessionTimestamp,
|
|
934
|
+
syncTimestamp: this.syncTimestamp,
|
|
904
935
|
options: this.options,
|
|
905
936
|
recordingTime: this.recordingStartTime
|
|
906
937
|
? Math.floor((Date.now() - this.recordingStartTime) / 1000)
|
package/package.json
CHANGED
|
@@ -189,8 +189,7 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
189
189
|
return false;
|
|
190
190
|
}
|
|
191
191
|
|
|
192
|
-
g_avStartTime =
|
|
193
|
-
[g_avWriter startSessionAtSourceTime:g_avStartTime];
|
|
192
|
+
g_avStartTime = kCMTimeInvalid;
|
|
194
193
|
|
|
195
194
|
// Store recording parameters with scaling correction
|
|
196
195
|
g_avDisplayID = displayID;
|
|
@@ -377,6 +376,12 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
377
376
|
|
|
378
377
|
// Write frame only if input is ready
|
|
379
378
|
if (localVideoInput && localVideoInput.readyForMoreMediaData) {
|
|
379
|
+
if (CMTIME_IS_INVALID(g_avStartTime)) {
|
|
380
|
+
g_avStartTime = CMTimeMakeWithSeconds(CACurrentMediaTime(), 600);
|
|
381
|
+
[g_avWriter startSessionAtSourceTime:g_avStartTime];
|
|
382
|
+
MRLog(@"🎞️ AVFoundation writer session started @ %.3f", CMTimeGetSeconds(g_avStartTime));
|
|
383
|
+
}
|
|
384
|
+
|
|
380
385
|
CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(((double)g_avFrameNumber) / fps, 600));
|
|
381
386
|
BOOL appendSuccess = [localPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
382
387
|
if (appendSuccess) {
|
|
@@ -479,6 +484,7 @@ extern "C" bool stopAVFoundationRecording() {
|
|
|
479
484
|
g_avVideoInput = nil;
|
|
480
485
|
g_avPixelBufferAdaptor = nil;
|
|
481
486
|
g_avFrameNumber = 0;
|
|
487
|
+
g_avStartTime = kCMTimeInvalid;
|
|
482
488
|
|
|
483
489
|
MRLog(@"✅ AVFoundation recording stopped");
|
|
484
490
|
return true;
|
package/src/mac_recorder.mm
CHANGED
|
@@ -444,20 +444,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
444
444
|
// Set timeout for ScreenCaptureKit initialization
|
|
445
445
|
// Attempt to start ScreenCaptureKit with safety wrapper
|
|
446
446
|
@try {
|
|
447
|
-
|
|
448
|
-
bool cameraStarted = true;
|
|
449
|
-
if (captureCamera) {
|
|
450
|
-
MRLog(@"🎯 SYNC: Starting camera recording first for parallel sync");
|
|
451
|
-
cameraStarted = startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp);
|
|
452
|
-
if (!cameraStarted) {
|
|
453
|
-
MRLog(@"❌ Camera start failed - aborting");
|
|
454
|
-
return Napi::Boolean::New(env, false);
|
|
455
|
-
}
|
|
456
|
-
MRLog(@"✅ SYNC: Camera recording started");
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
// Now start ScreenCaptureKit immediately after camera
|
|
460
|
-
MRLog(@"🎯 SYNC: Starting ScreenCaptureKit recording immediately");
|
|
447
|
+
MRLog(@"🎯 SYNC: Starting ScreenCaptureKit recording");
|
|
461
448
|
if ([ScreenCaptureKitRecorder startRecordingWithConfiguration:sckConfig
|
|
462
449
|
delegate:g_delegate
|
|
463
450
|
error:&sckError]) {
|
|
@@ -466,16 +453,22 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
466
453
|
MRLog(@"🎬 RECORDING METHOD: ScreenCaptureKit");
|
|
467
454
|
MRLog(@"✅ SYNC: ScreenCaptureKit recording started successfully");
|
|
468
455
|
|
|
456
|
+
if (captureCamera) {
|
|
457
|
+
MRLog(@"🎯 SYNC: Starting camera recording after screen start");
|
|
458
|
+
bool cameraStarted = startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp);
|
|
459
|
+
if (!cameraStarted) {
|
|
460
|
+
MRLog(@"❌ Camera start failed - stopping ScreenCaptureKit recording");
|
|
461
|
+
[ScreenCaptureKitRecorder stopRecording];
|
|
462
|
+
return Napi::Boolean::New(env, false);
|
|
463
|
+
}
|
|
464
|
+
MRLog(@"✅ SYNC: Camera recording started");
|
|
465
|
+
}
|
|
466
|
+
|
|
469
467
|
g_isRecording = true;
|
|
470
468
|
return Napi::Boolean::New(env, true);
|
|
471
469
|
} else {
|
|
472
470
|
NSLog(@"❌ ScreenCaptureKit failed to start");
|
|
473
471
|
NSLog(@"❌ Error: %@", sckError ? sckError.localizedDescription : @"Unknown error");
|
|
474
|
-
|
|
475
|
-
// Cleanup camera if ScreenCaptureKit failed
|
|
476
|
-
if (cameraStarted && isCameraRecording()) {
|
|
477
|
-
stopCameraRecording();
|
|
478
|
-
}
|
|
479
472
|
}
|
|
480
473
|
} @catch (NSException *sckException) {
|
|
481
474
|
NSLog(@"❌ Exception during ScreenCaptureKit startup: %@", sckException.reason);
|
|
@@ -542,21 +535,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
542
535
|
NSString* audioOutputPath,
|
|
543
536
|
double frameRate);
|
|
544
537
|
|
|
545
|
-
|
|
546
|
-
// This ensures both capture their first frame at approximately the same time
|
|
547
|
-
bool cameraStarted = true;
|
|
548
|
-
if (captureCamera) {
|
|
549
|
-
MRLog(@"🎯 SYNC: Starting camera recording first for parallel sync");
|
|
550
|
-
cameraStarted = startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp);
|
|
551
|
-
if (!cameraStarted) {
|
|
552
|
-
MRLog(@"❌ Camera start failed - aborting");
|
|
553
|
-
return Napi::Boolean::New(env, false);
|
|
554
|
-
}
|
|
555
|
-
MRLog(@"✅ SYNC: Camera recording started");
|
|
556
|
-
}
|
|
557
|
-
|
|
558
|
-
// Now start screen recording immediately after camera
|
|
559
|
-
MRLog(@"🎯 SYNC: Starting screen recording immediately");
|
|
538
|
+
MRLog(@"🎯 SYNC: Starting screen recording");
|
|
560
539
|
bool avResult = startAVFoundationRecording(outputPath, displayID, windowID, captureRect,
|
|
561
540
|
captureCursor, includeMicrophone, includeSystemAudio,
|
|
562
541
|
audioDeviceId, audioOutputPath, frameRate);
|
|
@@ -565,6 +544,17 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
565
544
|
MRLog(@"🎥 RECORDING METHOD: AVFoundation");
|
|
566
545
|
MRLog(@"✅ SYNC: Screen recording started successfully");
|
|
567
546
|
|
|
547
|
+
if (captureCamera) {
|
|
548
|
+
MRLog(@"🎯 SYNC: Starting camera recording after screen start");
|
|
549
|
+
bool cameraStarted = startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp);
|
|
550
|
+
if (!cameraStarted) {
|
|
551
|
+
MRLog(@"❌ Camera start failed - stopping screen recording");
|
|
552
|
+
stopAVFoundationRecording();
|
|
553
|
+
return Napi::Boolean::New(env, false);
|
|
554
|
+
}
|
|
555
|
+
MRLog(@"✅ SYNC: Camera recording started");
|
|
556
|
+
}
|
|
557
|
+
|
|
568
558
|
// NOTE: Audio is handled internally by AVFoundation, no need for standalone audio
|
|
569
559
|
// AVFoundation integrates audio recording directly
|
|
570
560
|
|
|
@@ -573,11 +563,6 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
573
563
|
} else {
|
|
574
564
|
NSLog(@"❌ AVFoundation recording failed to start");
|
|
575
565
|
NSLog(@"❌ Check permissions and output path validity");
|
|
576
|
-
|
|
577
|
-
// Cleanup camera if screen recording failed
|
|
578
|
-
if (cameraStarted && isCameraRecording()) {
|
|
579
|
-
stopCameraRecording();
|
|
580
|
-
}
|
|
581
566
|
}
|
|
582
567
|
} @catch (NSException *avException) {
|
|
583
568
|
NSLog(@"❌ Exception during AVFoundation startup: %@", avException.reason);
|