node-mac-recorder 2.21.1 โ 2.21.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/camera_recorder.mm +105 -28
- package/src/screen_capture_kit.mm +82 -41
- package/test-electron-fix.js +90 -0
package/package.json
CHANGED
package/src/camera_recorder.mm
CHANGED
|
@@ -82,6 +82,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
82
82
|
@property (atomic, assign) BOOL writerStarted;
|
|
83
83
|
@property (atomic, assign) BOOL isShuttingDown;
|
|
84
84
|
@property (nonatomic, assign) CMTime firstSampleTime;
|
|
85
|
+
@property (nonatomic, assign) int32_t expectedWidth;
|
|
86
|
+
@property (nonatomic, assign) int32_t expectedHeight;
|
|
87
|
+
@property (nonatomic, assign) double expectedFrameRate;
|
|
88
|
+
@property (atomic, assign) BOOL needsReconfiguration;
|
|
85
89
|
|
|
86
90
|
+ (instancetype)sharedRecorder;
|
|
87
91
|
+ (NSArray<NSDictionary *> *)availableCameraDevices;
|
|
@@ -242,23 +246,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
242
246
|
AVCaptureDeviceFormat *bestFormat = nil;
|
|
243
247
|
int64_t bestResolutionScore = 0;
|
|
244
248
|
double bestFrameRate = 0.0;
|
|
245
|
-
|
|
249
|
+
|
|
250
|
+
MRLog(@"๐ Scanning formats for device: %@", device.localizedName);
|
|
251
|
+
|
|
246
252
|
for (AVCaptureDeviceFormat *format in device.formats) {
|
|
247
253
|
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
248
254
|
if (dims.width <= 0 || dims.height <= 0) {
|
|
249
255
|
continue;
|
|
250
256
|
}
|
|
251
|
-
|
|
257
|
+
|
|
258
|
+
// No filtering - use whatever the device supports
|
|
259
|
+
// The device knows best what it can capture
|
|
260
|
+
|
|
252
261
|
int64_t score = (int64_t)dims.width * (int64_t)dims.height;
|
|
253
|
-
|
|
262
|
+
|
|
254
263
|
double maxFrameRate = 0.0;
|
|
255
264
|
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
256
265
|
maxFrameRate = MAX(maxFrameRate, range.maxFrameRate);
|
|
257
266
|
}
|
|
258
|
-
|
|
267
|
+
|
|
259
268
|
BOOL usesBetterResolution = score > bestResolutionScore;
|
|
260
269
|
BOOL sameResolutionHigherFps = (score == bestResolutionScore) && (maxFrameRate > bestFrameRate);
|
|
261
|
-
|
|
270
|
+
|
|
262
271
|
if (!bestFormat || usesBetterResolution || sameResolutionHigherFps) {
|
|
263
272
|
bestFormat = format;
|
|
264
273
|
bestResolutionScore = score;
|
|
@@ -266,9 +275,17 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
266
275
|
if (widthOut) *widthOut = dims.width;
|
|
267
276
|
if (heightOut) *heightOut = dims.height;
|
|
268
277
|
if (frameRateOut) *frameRateOut = bestFrameRate;
|
|
278
|
+
MRLog(@" โ
New best: %dx%d @ %.0ffps (score=%lld)",
|
|
279
|
+
dims.width, dims.height, maxFrameRate, score);
|
|
269
280
|
}
|
|
270
281
|
}
|
|
271
|
-
|
|
282
|
+
|
|
283
|
+
if (bestFormat) {
|
|
284
|
+
MRLog(@"๐น Selected format: %dx%d @ %.0ffps", *widthOut, *heightOut, *frameRateOut);
|
|
285
|
+
} else {
|
|
286
|
+
MRLog(@"โ No suitable format found");
|
|
287
|
+
}
|
|
288
|
+
|
|
272
289
|
return bestFormat;
|
|
273
290
|
}
|
|
274
291
|
|
|
@@ -415,17 +432,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
415
432
|
MRLog(@"โน๏ธ CameraRecorder: WebM unavailable, storing data in QuickTime container");
|
|
416
433
|
}
|
|
417
434
|
|
|
418
|
-
|
|
419
|
-
|
|
435
|
+
// Calculate bitrate based on resolution for high quality
|
|
436
|
+
// Use higher multiplier for better quality (10 instead of 6)
|
|
437
|
+
NSInteger bitrate = (NSInteger)(width * height * 10);
|
|
438
|
+
bitrate = MAX(bitrate, 8 * 1000 * 1000); // Minimum 8 Mbps for quality
|
|
439
|
+
bitrate = MIN(bitrate, 50 * 1000 * 1000); // Maximum 50 Mbps to avoid excessive file size
|
|
440
|
+
|
|
441
|
+
MRLog(@"๐ฌ Camera encoder settings: %dx%d @ %.2ffps, bitrate=%.2fMbps",
|
|
442
|
+
width, height, frameRate, bitrate / (1000.0 * 1000.0));
|
|
420
443
|
|
|
421
444
|
NSMutableDictionary *compressionProps = [@{
|
|
422
445
|
AVVideoAverageBitRateKey: @(bitrate),
|
|
423
446
|
AVVideoMaxKeyFrameIntervalKey: @(MAX(1, (int)round(frameRate))),
|
|
424
|
-
AVVideoAllowFrameReorderingKey: @YES
|
|
447
|
+
AVVideoAllowFrameReorderingKey: @YES,
|
|
448
|
+
AVVideoExpectedSourceFrameRateKey: @(frameRate),
|
|
449
|
+
// Add quality hint for better encoding
|
|
450
|
+
AVVideoQualityKey: @(0.9) // 0.0-1.0, higher is better quality
|
|
425
451
|
} mutableCopy];
|
|
426
|
-
|
|
452
|
+
|
|
427
453
|
if ([codec isEqualToString:AVVideoCodecTypeH264]) {
|
|
428
454
|
compressionProps[AVVideoProfileLevelKey] = AVVideoProfileLevelH264HighAutoLevel;
|
|
455
|
+
// Use Main profile for better quality
|
|
456
|
+
compressionProps[AVVideoH264EntropyModeKey] = AVVideoH264EntropyModeCABAC;
|
|
429
457
|
}
|
|
430
458
|
|
|
431
459
|
NSDictionary *videoSettings = @{
|
|
@@ -440,9 +468,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
440
468
|
self.assetWriterInput.expectsMediaDataInRealTime = YES;
|
|
441
469
|
|
|
442
470
|
NSDictionary *pixelBufferAttributes = @{
|
|
443
|
-
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(
|
|
471
|
+
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
|
|
444
472
|
(NSString *)kCVPixelBufferWidthKey: @(width),
|
|
445
|
-
(NSString *)kCVPixelBufferHeightKey: @(height)
|
|
473
|
+
(NSString *)kCVPixelBufferHeightKey: @(height),
|
|
474
|
+
// Preserve aspect ratio and use high quality scaling
|
|
475
|
+
(NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
|
|
476
|
+
(NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
|
|
446
477
|
};
|
|
447
478
|
|
|
448
479
|
self.pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterInput
|
|
@@ -572,8 +603,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
572
603
|
|
|
573
604
|
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
|
|
574
605
|
self.videoOutput.alwaysDiscardsLateVideoFrames = NO;
|
|
606
|
+
// Use video range (not full range) for better compatibility and quality
|
|
607
|
+
// YpCbCr 4:2:0 biplanar is the native format for most cameras
|
|
575
608
|
self.videoOutput.videoSettings = @{
|
|
576
|
-
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(
|
|
609
|
+
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
|
|
577
610
|
};
|
|
578
611
|
|
|
579
612
|
self.captureQueue = dispatch_queue_create("node_mac_recorder.camera.queue", DISPATCH_QUEUE_SERIAL);
|
|
@@ -594,31 +627,39 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
594
627
|
|
|
595
628
|
AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
596
629
|
if (connection) {
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
630
|
+
// DON'T set orientation - let the camera use its natural orientation
|
|
631
|
+
// The device knows best (portrait for phones, landscape for webcams)
|
|
632
|
+
// We just capture whatever comes through
|
|
633
|
+
|
|
634
|
+
// Mirror front cameras for natural preview
|
|
600
635
|
if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
|
|
601
636
|
if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
|
|
602
637
|
connection.automaticallyAdjustsVideoMirroring = NO;
|
|
603
638
|
}
|
|
604
639
|
connection.videoMirrored = YES;
|
|
605
640
|
}
|
|
641
|
+
|
|
642
|
+
// Log actual connection properties for debugging
|
|
643
|
+
MRLog(@"๐ Camera connection: orientation=%ld (native), mirrored=%d, format=%dx%d",
|
|
644
|
+
(long)connection.videoOrientation,
|
|
645
|
+
connection.isVideoMirrored,
|
|
646
|
+
width, height);
|
|
606
647
|
}
|
|
607
648
|
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
[self.session stopRunning];
|
|
611
|
-
[self resetState];
|
|
612
|
-
return NO;
|
|
613
|
-
}
|
|
614
|
-
|
|
649
|
+
// DON'T setup writer yet - wait for first frame to get actual dimensions
|
|
650
|
+
// Store configuration for lazy initialization
|
|
615
651
|
self.outputPath = outputPath;
|
|
616
652
|
self.isRecording = YES;
|
|
617
653
|
self.isShuttingDown = NO;
|
|
618
|
-
|
|
654
|
+
self.expectedWidth = width;
|
|
655
|
+
self.expectedHeight = height;
|
|
656
|
+
self.expectedFrameRate = frameRate;
|
|
657
|
+
self.needsReconfiguration = NO;
|
|
658
|
+
|
|
619
659
|
[self.session startRunning];
|
|
620
|
-
|
|
621
|
-
MRLog(@"๐ฅ CameraRecorder started: %@ (
|
|
660
|
+
|
|
661
|
+
MRLog(@"๐ฅ CameraRecorder started: %@ (will use actual frame dimensions)", device.localizedName);
|
|
662
|
+
MRLog(@" Format reports: %dx%d @ %.2ffps", width, height, frameRate);
|
|
622
663
|
return YES;
|
|
623
664
|
}
|
|
624
665
|
|
|
@@ -674,18 +715,54 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
674
715
|
if (!self.isRecording || self.isShuttingDown) {
|
|
675
716
|
return;
|
|
676
717
|
}
|
|
677
|
-
|
|
718
|
+
|
|
678
719
|
if (!sampleBuffer) {
|
|
679
720
|
return;
|
|
680
721
|
}
|
|
681
|
-
|
|
722
|
+
|
|
682
723
|
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
724
|
+
|
|
725
|
+
// Lazy initialization - setup writer with actual frame dimensions
|
|
726
|
+
if (!self.assetWriter) {
|
|
727
|
+
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
728
|
+
if (!pixelBuffer) {
|
|
729
|
+
MRLog(@"โ No pixel buffer in first frame");
|
|
730
|
+
return;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
size_t actualWidth = CVPixelBufferGetWidth(pixelBuffer);
|
|
734
|
+
size_t actualHeight = CVPixelBufferGetHeight(pixelBuffer);
|
|
735
|
+
|
|
736
|
+
MRLog(@"๐ฌ First frame received: %zux%zu (format said %dx%d)",
|
|
737
|
+
actualWidth, actualHeight, self.expectedWidth, self.expectedHeight);
|
|
738
|
+
|
|
739
|
+
// Use ACTUAL dimensions from the frame, not format dimensions
|
|
740
|
+
NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
|
|
741
|
+
NSError *setupError = nil;
|
|
742
|
+
|
|
743
|
+
// Use frame rate from device configuration
|
|
744
|
+
double frameRate = self.expectedFrameRate > 0 ? self.expectedFrameRate : 30.0;
|
|
745
|
+
|
|
746
|
+
if (![self setupWriterWithURL:outputURL
|
|
747
|
+
width:(int32_t)actualWidth
|
|
748
|
+
height:(int32_t)actualHeight
|
|
749
|
+
frameRate:frameRate
|
|
750
|
+
error:&setupError]) {
|
|
751
|
+
MRLog(@"โ Failed to setup writer with actual dimensions: %@", setupError);
|
|
752
|
+
self.isRecording = NO;
|
|
753
|
+
return;
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
MRLog(@"โ
Writer configured with ACTUAL dimensions: %zux%zu", actualWidth, actualHeight);
|
|
757
|
+
}
|
|
758
|
+
|
|
683
759
|
if (!self.writerStarted) {
|
|
684
760
|
if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
|
|
685
761
|
if ([self.assetWriter startWriting]) {
|
|
686
762
|
[self.assetWriter startSessionAtSourceTime:timestamp];
|
|
687
763
|
self.writerStarted = YES;
|
|
688
764
|
self.firstSampleTime = timestamp;
|
|
765
|
+
MRLog(@"โ
Camera writer started");
|
|
689
766
|
} else {
|
|
690
767
|
MRLog(@"โ CameraRecorder: Failed to start asset writer: %@", self.assetWriter.error);
|
|
691
768
|
self.isRecording = NO;
|
|
@@ -121,27 +121,30 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
121
121
|
@implementation PureScreenCaptureDelegate
|
|
122
122
|
- (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
|
|
123
123
|
MRLog(@"๐ Pure ScreenCapture stream stopped");
|
|
124
|
-
|
|
124
|
+
|
|
125
125
|
// Prevent recursive calls during cleanup
|
|
126
126
|
if (g_isCleaningUp) {
|
|
127
127
|
MRLog(@"โ ๏ธ Already cleaning up, ignoring delegate callback");
|
|
128
128
|
return;
|
|
129
129
|
}
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
130
|
+
|
|
131
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
132
|
+
g_isRecording = NO;
|
|
133
|
+
}
|
|
134
|
+
|
|
133
135
|
if (error) {
|
|
134
136
|
NSLog(@"โ Stream error: %@", error);
|
|
135
137
|
} else {
|
|
136
138
|
MRLog(@"โ
Stream stopped cleanly");
|
|
137
139
|
}
|
|
138
|
-
|
|
139
|
-
//
|
|
140
|
-
|
|
141
|
-
|
|
140
|
+
|
|
141
|
+
// ELECTRON FIX: Don't use dispatch_async to main queue - it can cause crashes
|
|
142
|
+
// Instead, finalize directly on current thread with synchronization
|
|
143
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
144
|
+
if (!g_isCleaningUp) {
|
|
142
145
|
[ScreenCaptureKitRecorder finalizeRecording];
|
|
143
146
|
}
|
|
144
|
-
}
|
|
147
|
+
}
|
|
145
148
|
}
|
|
146
149
|
@end
|
|
147
150
|
|
|
@@ -450,17 +453,23 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
450
453
|
+ (BOOL)startRecordingWithConfiguration:(NSDictionary *)config delegate:(id)delegate error:(NSError **)error {
|
|
451
454
|
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
452
455
|
if (g_isRecording || g_isCleaningUp) {
|
|
453
|
-
|
|
456
|
+
MRLog(@"โ ๏ธ Already recording or cleaning up (recording:%d cleaning:%d)", g_isRecording, g_isCleaningUp);
|
|
454
457
|
return NO;
|
|
455
458
|
}
|
|
456
|
-
|
|
459
|
+
|
|
457
460
|
// Reset any stale state
|
|
458
461
|
g_isCleaningUp = NO;
|
|
462
|
+
|
|
463
|
+
// Set flag early to prevent race conditions in Electron
|
|
464
|
+
g_isRecording = YES;
|
|
459
465
|
}
|
|
460
|
-
|
|
466
|
+
|
|
461
467
|
NSString *outputPath = config[@"outputPath"];
|
|
462
468
|
if (!outputPath || [outputPath length] == 0) {
|
|
463
469
|
NSLog(@"โ Invalid output path provided");
|
|
470
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
471
|
+
g_isRecording = NO;
|
|
472
|
+
}
|
|
464
473
|
return NO;
|
|
465
474
|
}
|
|
466
475
|
g_outputPath = outputPath;
|
|
@@ -483,12 +492,17 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
483
492
|
// CRITICAL DEBUG: Log EXACT audio parameter values
|
|
484
493
|
MRLog(@"๐ AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
|
|
485
494
|
MRLog(@"๐ AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
|
|
486
|
-
|
|
487
|
-
// Get shareable content
|
|
495
|
+
|
|
496
|
+
// ELECTRON FIX: Get shareable content FULLY ASYNCHRONOUSLY
|
|
497
|
+
// NO semaphores, NO blocking - pure async to prevent Electron crashes
|
|
488
498
|
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
499
|
+
@autoreleasepool {
|
|
489
500
|
if (contentError) {
|
|
490
501
|
NSLog(@"โ Content error: %@", contentError);
|
|
491
|
-
|
|
502
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
503
|
+
g_isRecording = NO;
|
|
504
|
+
}
|
|
505
|
+
return; // Early return from completion handler block
|
|
492
506
|
}
|
|
493
507
|
|
|
494
508
|
MRLog(@"โ
Got %lu displays, %lu windows for pure recording",
|
|
@@ -529,7 +543,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
529
543
|
recordingHeight = (NSInteger)targetWindow.frame.size.height;
|
|
530
544
|
} else {
|
|
531
545
|
NSLog(@"โ Window ID %@ not found", windowId);
|
|
532
|
-
|
|
546
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
547
|
+
g_isRecording = NO;
|
|
548
|
+
}
|
|
549
|
+
return; // Early return from completion handler block
|
|
533
550
|
}
|
|
534
551
|
}
|
|
535
552
|
// DISPLAY RECORDING
|
|
@@ -558,7 +575,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
558
575
|
|
|
559
576
|
if (!targetDisplay) {
|
|
560
577
|
NSLog(@"โ Display not found");
|
|
561
|
-
|
|
578
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
579
|
+
g_isRecording = NO;
|
|
580
|
+
}
|
|
581
|
+
return; // Early return from completion handler block
|
|
562
582
|
}
|
|
563
583
|
|
|
564
584
|
MRLog(@"๐ฅ๏ธ Recording display %u (%dx%d)",
|
|
@@ -660,7 +680,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
660
680
|
NSError *writerError = nil;
|
|
661
681
|
if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
|
|
662
682
|
NSLog(@"โ Failed to prepare video writer: %@", writerError);
|
|
663
|
-
|
|
683
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
684
|
+
g_isRecording = NO;
|
|
685
|
+
}
|
|
686
|
+
return; // Early return from completion handler block
|
|
664
687
|
}
|
|
665
688
|
|
|
666
689
|
g_videoQueue = dispatch_queue_create("screen_capture_video_queue", DISPATCH_QUEUE_SERIAL);
|
|
@@ -678,7 +701,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
678
701
|
if (!g_stream) {
|
|
679
702
|
NSLog(@"โ Failed to create pure stream");
|
|
680
703
|
CleanupWriters();
|
|
681
|
-
|
|
704
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
705
|
+
g_isRecording = NO;
|
|
706
|
+
}
|
|
707
|
+
return; // Early return from completion handler block
|
|
682
708
|
}
|
|
683
709
|
|
|
684
710
|
NSError *outputError = nil;
|
|
@@ -686,7 +712,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
686
712
|
if (!videoOutputAdded || outputError) {
|
|
687
713
|
NSLog(@"โ Failed to add video output: %@", outputError);
|
|
688
714
|
CleanupWriters();
|
|
689
|
-
|
|
715
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
716
|
+
g_isRecording = NO;
|
|
717
|
+
}
|
|
718
|
+
return; // Early return from completion handler block
|
|
690
719
|
}
|
|
691
720
|
|
|
692
721
|
if (g_shouldCaptureAudio) {
|
|
@@ -696,7 +725,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
696
725
|
if (!audioOutputAdded || audioError) {
|
|
697
726
|
NSLog(@"โ Failed to add audio output: %@", audioError);
|
|
698
727
|
CleanupWriters();
|
|
699
|
-
|
|
728
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
729
|
+
g_isRecording = NO;
|
|
730
|
+
}
|
|
731
|
+
return; // Early return from completion handler block
|
|
700
732
|
}
|
|
701
733
|
} else {
|
|
702
734
|
NSLog(@"โ ๏ธ Audio capture requested but requires macOS 13.0+");
|
|
@@ -708,19 +740,24 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
708
740
|
if (sessionTimestampNumber) {
|
|
709
741
|
MRLog(@"๐ Session timestamp: %@", sessionTimestampNumber);
|
|
710
742
|
}
|
|
711
|
-
|
|
743
|
+
|
|
744
|
+
// ELECTRON FIX: Start capture FULLY ASYNCHRONOUSLY - NO blocking
|
|
712
745
|
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
713
746
|
if (startError) {
|
|
714
747
|
NSLog(@"โ Failed to start pure capture: %@", startError);
|
|
715
|
-
g_isRecording = NO;
|
|
716
748
|
CleanupWriters();
|
|
749
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
750
|
+
g_isRecording = NO;
|
|
751
|
+
}
|
|
717
752
|
} else {
|
|
718
753
|
MRLog(@"๐ PURE ScreenCaptureKit recording started successfully!");
|
|
719
|
-
g_isRecording
|
|
754
|
+
// g_isRecording already set to YES at the beginning
|
|
720
755
|
}
|
|
721
756
|
}];
|
|
722
|
-
|
|
723
|
-
|
|
757
|
+
} // End of autoreleasepool
|
|
758
|
+
}]; // End of getShareableContentWithCompletionHandler
|
|
759
|
+
|
|
760
|
+
// Return immediately - async completion will handle success/failure
|
|
724
761
|
return YES;
|
|
725
762
|
}
|
|
726
763
|
|
|
@@ -729,26 +766,30 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
729
766
|
NSLog(@"โ ๏ธ Cannot stop: recording=%d stream=%@ cleaning=%d", g_isRecording, g_stream, g_isCleaningUp);
|
|
730
767
|
return;
|
|
731
768
|
}
|
|
732
|
-
|
|
769
|
+
|
|
733
770
|
MRLog(@"๐ Stopping pure ScreenCaptureKit recording");
|
|
734
|
-
|
|
771
|
+
|
|
735
772
|
// Store stream reference to prevent it from being deallocated
|
|
736
773
|
SCStream *streamToStop = g_stream;
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
774
|
+
|
|
775
|
+
// ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
|
|
776
|
+
[streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
|
|
777
|
+
@autoreleasepool {
|
|
778
|
+
if (stopError) {
|
|
779
|
+
NSLog(@"โ Stop error: %@", stopError);
|
|
780
|
+
} else {
|
|
781
|
+
MRLog(@"โ
Pure stream stopped");
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
// Reset recording state to allow new recordings
|
|
785
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
786
|
+
g_isRecording = NO;
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
// Cleanup after stop completes
|
|
749
790
|
CleanupWriters();
|
|
750
791
|
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
751
|
-
}
|
|
792
|
+
}
|
|
752
793
|
}];
|
|
753
794
|
}
|
|
754
795
|
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Test script for Electron crash fix
|
|
4
|
+
* Tests ScreenCaptureKit recording with synchronous semaphore-based approach
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const MacRecorder = require('./index.js');
|
|
8
|
+
const path = require('path');
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
|
|
11
|
+
async function testRecording() {
|
|
12
|
+
console.log('๐งช Testing ScreenCaptureKit Electron crash fix...\n');
|
|
13
|
+
|
|
14
|
+
const recorder = new MacRecorder();
|
|
15
|
+
|
|
16
|
+
// Check permissions first
|
|
17
|
+
console.log('1๏ธโฃ Checking permissions...');
|
|
18
|
+
const permissions = await recorder.checkPermissions();
|
|
19
|
+
console.log(' Permissions:', permissions);
|
|
20
|
+
|
|
21
|
+
if (!permissions.screenRecording) {
|
|
22
|
+
console.error('โ Screen recording permission not granted');
|
|
23
|
+
console.log(' Please enable screen recording in System Settings > Privacy & Security');
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Get displays
|
|
28
|
+
console.log('\n2๏ธโฃ Getting displays...');
|
|
29
|
+
const displays = await recorder.getDisplays();
|
|
30
|
+
console.log(` Found ${displays.length} display(s):`);
|
|
31
|
+
displays.forEach(d => {
|
|
32
|
+
console.log(` - Display ${d.id}: ${d.width}x${d.height} (Primary: ${d.isPrimary})`);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Prepare output path
|
|
36
|
+
const outputDir = path.join(__dirname, 'test-output');
|
|
37
|
+
if (!fs.existsSync(outputDir)) {
|
|
38
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const outputPath = path.join(outputDir, `electron-fix-test-${Date.now()}.mov`);
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
// Start recording
|
|
45
|
+
console.log('\n3๏ธโฃ Starting recording...');
|
|
46
|
+
console.log(` Output: ${outputPath}`);
|
|
47
|
+
|
|
48
|
+
await recorder.startRecording(outputPath, {
|
|
49
|
+
displayId: displays[0].id,
|
|
50
|
+
captureCursor: true,
|
|
51
|
+
includeMicrophone: false,
|
|
52
|
+
includeSystemAudio: false
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
console.log('โ
Recording started successfully!');
|
|
56
|
+
console.log(' Recording for 3 seconds...\n');
|
|
57
|
+
|
|
58
|
+
// Record for 3 seconds
|
|
59
|
+
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
60
|
+
|
|
61
|
+
// Stop recording
|
|
62
|
+
console.log('4๏ธโฃ Stopping recording...');
|
|
63
|
+
const result = await recorder.stopRecording();
|
|
64
|
+
console.log('โ
Recording stopped successfully!');
|
|
65
|
+
console.log(' Result:', result);
|
|
66
|
+
|
|
67
|
+
// Check output file
|
|
68
|
+
if (fs.existsSync(outputPath)) {
|
|
69
|
+
const stats = fs.statSync(outputPath);
|
|
70
|
+
console.log(`\nโ
Output file created: ${outputPath}`);
|
|
71
|
+
console.log(` File size: ${(stats.size / 1024).toFixed(2)} KB`);
|
|
72
|
+
} else {
|
|
73
|
+
console.log('\nโ ๏ธ Output file not found (may still be finalizing)');
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
console.log('\n๐ Test completed successfully! No crashes detected.');
|
|
77
|
+
console.log(' The Electron crash fix appears to be working.\n');
|
|
78
|
+
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.error('\nโ Test failed:', error.message);
|
|
81
|
+
console.error(' Stack:', error.stack);
|
|
82
|
+
process.exit(1);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Run test
|
|
87
|
+
testRecording().catch(error => {
|
|
88
|
+
console.error('Fatal error:', error);
|
|
89
|
+
process.exit(1);
|
|
90
|
+
});
|