node-mac-recorder 2.21.2 → 2.21.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +5 -2
- package/package.json +1 -1
- package/src/camera_recorder.mm +122 -37
- package/src/screen_capture_kit.mm +71 -71
|
@@ -5,9 +5,12 @@
|
|
|
5
5
|
"Bash(node:*)",
|
|
6
6
|
"Bash(timeout:*)",
|
|
7
7
|
"Bash(open:*)",
|
|
8
|
-
"Read(//Users/onur/codes/**)"
|
|
8
|
+
"Read(//Users/onur/codes/**)",
|
|
9
|
+
"Bash(log show:*)",
|
|
10
|
+
"Bash(MAC_RECORDER_DEBUG=1 node:*)",
|
|
11
|
+
"Read(//private/tmp/test-recording/**)"
|
|
9
12
|
],
|
|
10
13
|
"deny": [],
|
|
11
14
|
"ask": []
|
|
12
15
|
}
|
|
13
|
-
}
|
|
16
|
+
}
|
package/package.json
CHANGED
package/src/camera_recorder.mm
CHANGED
|
@@ -82,6 +82,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
82
82
|
@property (atomic, assign) BOOL writerStarted;
|
|
83
83
|
@property (atomic, assign) BOOL isShuttingDown;
|
|
84
84
|
@property (nonatomic, assign) CMTime firstSampleTime;
|
|
85
|
+
@property (nonatomic, assign) int32_t expectedWidth;
|
|
86
|
+
@property (nonatomic, assign) int32_t expectedHeight;
|
|
87
|
+
@property (nonatomic, assign) double expectedFrameRate;
|
|
88
|
+
@property (atomic, assign) BOOL needsReconfiguration;
|
|
85
89
|
|
|
86
90
|
+ (instancetype)sharedRecorder;
|
|
87
91
|
+ (NSArray<NSDictionary *> *)availableCameraDevices;
|
|
@@ -105,34 +109,42 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
105
109
|
|
|
106
110
|
+ (NSArray<NSDictionary *> *)availableCameraDevices {
|
|
107
111
|
NSMutableArray<NSDictionary *> *devicesInfo = [NSMutableArray array];
|
|
108
|
-
|
|
112
|
+
|
|
109
113
|
NSMutableArray<AVCaptureDeviceType> *deviceTypes = [NSMutableArray array];
|
|
110
114
|
BOOL allowContinuity = MRAllowContinuityCamera();
|
|
111
|
-
|
|
115
|
+
|
|
116
|
+
// Always include built-in and external cameras
|
|
112
117
|
if (@available(macOS 10.15, *)) {
|
|
113
118
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
|
|
114
119
|
} else {
|
|
115
120
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
|
|
116
121
|
}
|
|
117
|
-
|
|
122
|
+
|
|
123
|
+
// ALWAYS add external cameras - they should be available regardless of Continuity permission
|
|
124
|
+
if (@available(macOS 14.0, *)) {
|
|
125
|
+
[deviceTypes addObject:AVCaptureDeviceTypeExternal];
|
|
126
|
+
} else {
|
|
127
|
+
[deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Only add Continuity Camera type if allowed
|
|
118
131
|
if (allowContinuity) {
|
|
119
132
|
if (@available(macOS 14.0, *)) {
|
|
120
133
|
[deviceTypes addObject:AVCaptureDeviceTypeContinuityCamera];
|
|
121
|
-
[deviceTypes addObject:AVCaptureDeviceTypeExternal];
|
|
122
|
-
} else {
|
|
123
|
-
[deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
|
|
124
134
|
}
|
|
125
135
|
}
|
|
126
|
-
|
|
136
|
+
|
|
127
137
|
AVCaptureDeviceDiscoverySession *discoverySession =
|
|
128
138
|
[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
|
|
129
139
|
mediaType:AVMediaTypeVideo
|
|
130
140
|
position:AVCaptureDevicePositionUnspecified];
|
|
131
|
-
|
|
141
|
+
|
|
132
142
|
for (AVCaptureDevice *device in discoverySession.devices) {
|
|
133
143
|
BOOL continuityCamera = MRIsContinuityCamera(device);
|
|
144
|
+
// ONLY skip Continuity cameras when permission is missing
|
|
145
|
+
// Regular USB/external cameras should ALWAYS be listed
|
|
134
146
|
if (continuityCamera && !allowContinuity) {
|
|
135
|
-
|
|
147
|
+
MRLog(@"⏭️ Skipping Continuity Camera (permission required): %@", device.localizedName);
|
|
136
148
|
continue;
|
|
137
149
|
}
|
|
138
150
|
|
|
@@ -242,23 +254,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
242
254
|
AVCaptureDeviceFormat *bestFormat = nil;
|
|
243
255
|
int64_t bestResolutionScore = 0;
|
|
244
256
|
double bestFrameRate = 0.0;
|
|
245
|
-
|
|
257
|
+
|
|
258
|
+
MRLog(@"🔍 Scanning formats for device: %@", device.localizedName);
|
|
259
|
+
|
|
246
260
|
for (AVCaptureDeviceFormat *format in device.formats) {
|
|
247
261
|
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
|
248
262
|
if (dims.width <= 0 || dims.height <= 0) {
|
|
249
263
|
continue;
|
|
250
264
|
}
|
|
251
|
-
|
|
265
|
+
|
|
266
|
+
// No filtering - use whatever the device supports
|
|
267
|
+
// The device knows best what it can capture
|
|
268
|
+
|
|
252
269
|
int64_t score = (int64_t)dims.width * (int64_t)dims.height;
|
|
253
|
-
|
|
270
|
+
|
|
254
271
|
double maxFrameRate = 0.0;
|
|
255
272
|
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
|
|
256
273
|
maxFrameRate = MAX(maxFrameRate, range.maxFrameRate);
|
|
257
274
|
}
|
|
258
|
-
|
|
275
|
+
|
|
259
276
|
BOOL usesBetterResolution = score > bestResolutionScore;
|
|
260
277
|
BOOL sameResolutionHigherFps = (score == bestResolutionScore) && (maxFrameRate > bestFrameRate);
|
|
261
|
-
|
|
278
|
+
|
|
262
279
|
if (!bestFormat || usesBetterResolution || sameResolutionHigherFps) {
|
|
263
280
|
bestFormat = format;
|
|
264
281
|
bestResolutionScore = score;
|
|
@@ -266,9 +283,17 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
266
283
|
if (widthOut) *widthOut = dims.width;
|
|
267
284
|
if (heightOut) *heightOut = dims.height;
|
|
268
285
|
if (frameRateOut) *frameRateOut = bestFrameRate;
|
|
286
|
+
MRLog(@" ✅ New best: %dx%d @ %.0ffps (score=%lld)",
|
|
287
|
+
dims.width, dims.height, maxFrameRate, score);
|
|
269
288
|
}
|
|
270
289
|
}
|
|
271
|
-
|
|
290
|
+
|
|
291
|
+
if (bestFormat) {
|
|
292
|
+
MRLog(@"📹 Selected format: %dx%d @ %.0ffps", *widthOut, *heightOut, *frameRateOut);
|
|
293
|
+
} else {
|
|
294
|
+
MRLog(@"❌ No suitable format found");
|
|
295
|
+
}
|
|
296
|
+
|
|
272
297
|
return bestFormat;
|
|
273
298
|
}
|
|
274
299
|
|
|
@@ -415,17 +440,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
415
440
|
MRLog(@"ℹ️ CameraRecorder: WebM unavailable, storing data in QuickTime container");
|
|
416
441
|
}
|
|
417
442
|
|
|
418
|
-
|
|
419
|
-
|
|
443
|
+
// Calculate bitrate based on resolution for high quality
|
|
444
|
+
// Use higher multiplier for better quality (10 instead of 6)
|
|
445
|
+
NSInteger bitrate = (NSInteger)(width * height * 10);
|
|
446
|
+
bitrate = MAX(bitrate, 8 * 1000 * 1000); // Minimum 8 Mbps for quality
|
|
447
|
+
bitrate = MIN(bitrate, 50 * 1000 * 1000); // Maximum 50 Mbps to avoid excessive file size
|
|
448
|
+
|
|
449
|
+
MRLog(@"🎬 Camera encoder settings: %dx%d @ %.2ffps, bitrate=%.2fMbps",
|
|
450
|
+
width, height, frameRate, bitrate / (1000.0 * 1000.0));
|
|
420
451
|
|
|
421
452
|
NSMutableDictionary *compressionProps = [@{
|
|
422
453
|
AVVideoAverageBitRateKey: @(bitrate),
|
|
423
454
|
AVVideoMaxKeyFrameIntervalKey: @(MAX(1, (int)round(frameRate))),
|
|
424
|
-
AVVideoAllowFrameReorderingKey: @YES
|
|
455
|
+
AVVideoAllowFrameReorderingKey: @YES,
|
|
456
|
+
AVVideoExpectedSourceFrameRateKey: @(frameRate),
|
|
457
|
+
// Add quality hint for better encoding
|
|
458
|
+
AVVideoQualityKey: @(0.9) // 0.0-1.0, higher is better quality
|
|
425
459
|
} mutableCopy];
|
|
426
|
-
|
|
460
|
+
|
|
427
461
|
if ([codec isEqualToString:AVVideoCodecTypeH264]) {
|
|
428
462
|
compressionProps[AVVideoProfileLevelKey] = AVVideoProfileLevelH264HighAutoLevel;
|
|
463
|
+
// Use Main profile for better quality
|
|
464
|
+
compressionProps[AVVideoH264EntropyModeKey] = AVVideoH264EntropyModeCABAC;
|
|
429
465
|
}
|
|
430
466
|
|
|
431
467
|
NSDictionary *videoSettings = @{
|
|
@@ -440,9 +476,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
440
476
|
self.assetWriterInput.expectsMediaDataInRealTime = YES;
|
|
441
477
|
|
|
442
478
|
NSDictionary *pixelBufferAttributes = @{
|
|
443
|
-
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(
|
|
479
|
+
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
|
|
444
480
|
(NSString *)kCVPixelBufferWidthKey: @(width),
|
|
445
|
-
(NSString *)kCVPixelBufferHeightKey: @(height)
|
|
481
|
+
(NSString *)kCVPixelBufferHeightKey: @(height),
|
|
482
|
+
// Preserve aspect ratio and use high quality scaling
|
|
483
|
+
(NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
|
|
484
|
+
(NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
|
|
446
485
|
};
|
|
447
486
|
|
|
448
487
|
self.pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterInput
|
|
@@ -572,8 +611,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
572
611
|
|
|
573
612
|
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
|
|
574
613
|
self.videoOutput.alwaysDiscardsLateVideoFrames = NO;
|
|
614
|
+
// Use video range (not full range) for better compatibility and quality
|
|
615
|
+
// YpCbCr 4:2:0 biplanar is the native format for most cameras
|
|
575
616
|
self.videoOutput.videoSettings = @{
|
|
576
|
-
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(
|
|
617
|
+
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
|
|
577
618
|
};
|
|
578
619
|
|
|
579
620
|
self.captureQueue = dispatch_queue_create("node_mac_recorder.camera.queue", DISPATCH_QUEUE_SERIAL);
|
|
@@ -594,31 +635,39 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
594
635
|
|
|
595
636
|
AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
596
637
|
if (connection) {
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
638
|
+
// DON'T set orientation - let the camera use its natural orientation
|
|
639
|
+
// The device knows best (portrait for phones, landscape for webcams)
|
|
640
|
+
// We just capture whatever comes through
|
|
641
|
+
|
|
642
|
+
// Mirror front cameras for natural preview
|
|
600
643
|
if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
|
|
601
644
|
if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
|
|
602
645
|
connection.automaticallyAdjustsVideoMirroring = NO;
|
|
603
646
|
}
|
|
604
647
|
connection.videoMirrored = YES;
|
|
605
648
|
}
|
|
649
|
+
|
|
650
|
+
// Log actual connection properties for debugging
|
|
651
|
+
MRLog(@"📐 Camera connection: orientation=%ld (native), mirrored=%d, format=%dx%d",
|
|
652
|
+
(long)connection.videoOrientation,
|
|
653
|
+
connection.isVideoMirrored,
|
|
654
|
+
width, height);
|
|
606
655
|
}
|
|
607
656
|
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
[self.session stopRunning];
|
|
611
|
-
[self resetState];
|
|
612
|
-
return NO;
|
|
613
|
-
}
|
|
614
|
-
|
|
657
|
+
// DON'T setup writer yet - wait for first frame to get actual dimensions
|
|
658
|
+
// Store configuration for lazy initialization
|
|
615
659
|
self.outputPath = outputPath;
|
|
616
660
|
self.isRecording = YES;
|
|
617
661
|
self.isShuttingDown = NO;
|
|
618
|
-
|
|
662
|
+
self.expectedWidth = width;
|
|
663
|
+
self.expectedHeight = height;
|
|
664
|
+
self.expectedFrameRate = frameRate;
|
|
665
|
+
self.needsReconfiguration = NO;
|
|
666
|
+
|
|
619
667
|
[self.session startRunning];
|
|
620
|
-
|
|
621
|
-
MRLog(@"🎥 CameraRecorder started: %@ (
|
|
668
|
+
|
|
669
|
+
MRLog(@"🎥 CameraRecorder started: %@ (will use actual frame dimensions)", device.localizedName);
|
|
670
|
+
MRLog(@" Format reports: %dx%d @ %.2ffps", width, height, frameRate);
|
|
622
671
|
return YES;
|
|
623
672
|
}
|
|
624
673
|
|
|
@@ -674,18 +723,54 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
674
723
|
if (!self.isRecording || self.isShuttingDown) {
|
|
675
724
|
return;
|
|
676
725
|
}
|
|
677
|
-
|
|
726
|
+
|
|
678
727
|
if (!sampleBuffer) {
|
|
679
728
|
return;
|
|
680
729
|
}
|
|
681
|
-
|
|
730
|
+
|
|
682
731
|
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
732
|
+
|
|
733
|
+
// Lazy initialization - setup writer with actual frame dimensions
|
|
734
|
+
if (!self.assetWriter) {
|
|
735
|
+
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
736
|
+
if (!pixelBuffer) {
|
|
737
|
+
MRLog(@"❌ No pixel buffer in first frame");
|
|
738
|
+
return;
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
size_t actualWidth = CVPixelBufferGetWidth(pixelBuffer);
|
|
742
|
+
size_t actualHeight = CVPixelBufferGetHeight(pixelBuffer);
|
|
743
|
+
|
|
744
|
+
MRLog(@"🎬 First frame received: %zux%zu (format said %dx%d)",
|
|
745
|
+
actualWidth, actualHeight, self.expectedWidth, self.expectedHeight);
|
|
746
|
+
|
|
747
|
+
// Use ACTUAL dimensions from the frame, not format dimensions
|
|
748
|
+
NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
|
|
749
|
+
NSError *setupError = nil;
|
|
750
|
+
|
|
751
|
+
// Use frame rate from device configuration
|
|
752
|
+
double frameRate = self.expectedFrameRate > 0 ? self.expectedFrameRate : 30.0;
|
|
753
|
+
|
|
754
|
+
if (![self setupWriterWithURL:outputURL
|
|
755
|
+
width:(int32_t)actualWidth
|
|
756
|
+
height:(int32_t)actualHeight
|
|
757
|
+
frameRate:frameRate
|
|
758
|
+
error:&setupError]) {
|
|
759
|
+
MRLog(@"❌ Failed to setup writer with actual dimensions: %@", setupError);
|
|
760
|
+
self.isRecording = NO;
|
|
761
|
+
return;
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
MRLog(@"✅ Writer configured with ACTUAL dimensions: %zux%zu", actualWidth, actualHeight);
|
|
765
|
+
}
|
|
766
|
+
|
|
683
767
|
if (!self.writerStarted) {
|
|
684
768
|
if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
|
|
685
769
|
if ([self.assetWriter startWriting]) {
|
|
686
770
|
[self.assetWriter startSessionAtSourceTime:timestamp];
|
|
687
771
|
self.writerStarted = YES;
|
|
688
772
|
self.firstSampleTime = timestamp;
|
|
773
|
+
MRLog(@"✅ Camera writer started");
|
|
689
774
|
} else {
|
|
690
775
|
MRLog(@"❌ CameraRecorder: Failed to start asset writer: %@", self.assetWriter.error);
|
|
691
776
|
self.isRecording = NO;
|
|
@@ -120,31 +120,33 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
120
120
|
|
|
121
121
|
@implementation PureScreenCaptureDelegate
|
|
122
122
|
- (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
|
|
123
|
-
|
|
123
|
+
// ELECTRON FIX: Run cleanup on background thread to avoid blocking Electron
|
|
124
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
125
|
+
MRLog(@"🛑 Pure ScreenCapture stream stopped");
|
|
124
126
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
127
|
+
// Prevent recursive calls during cleanup
|
|
128
|
+
if (g_isCleaningUp) {
|
|
129
|
+
MRLog(@"⚠️ Already cleaning up, ignoring delegate callback");
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
130
132
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
133
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
134
|
+
g_isRecording = NO;
|
|
135
|
+
}
|
|
134
136
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
137
|
+
if (error) {
|
|
138
|
+
NSLog(@"❌ Stream error: %@", error);
|
|
139
|
+
} else {
|
|
140
|
+
MRLog(@"✅ Stream stopped cleanly");
|
|
141
|
+
}
|
|
140
142
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
143
|
+
// Finalize on background thread with synchronization
|
|
144
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
145
|
+
if (!g_isCleaningUp) {
|
|
146
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
147
|
+
}
|
|
146
148
|
}
|
|
147
|
-
}
|
|
149
|
+
});
|
|
148
150
|
}
|
|
149
151
|
@end
|
|
150
152
|
|
|
@@ -460,16 +462,13 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
460
462
|
// Reset any stale state
|
|
461
463
|
g_isCleaningUp = NO;
|
|
462
464
|
|
|
463
|
-
//
|
|
464
|
-
|
|
465
|
+
// DON'T set g_isRecording here - wait for stream to actually start
|
|
466
|
+
// This prevents the "recording=1 stream=null" issue
|
|
465
467
|
}
|
|
466
468
|
|
|
467
469
|
NSString *outputPath = config[@"outputPath"];
|
|
468
470
|
if (!outputPath || [outputPath length] == 0) {
|
|
469
471
|
NSLog(@"❌ Invalid output path provided");
|
|
470
|
-
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
471
|
-
g_isRecording = NO;
|
|
472
|
-
}
|
|
473
472
|
return NO;
|
|
474
473
|
}
|
|
475
474
|
g_outputPath = outputPath;
|
|
@@ -493,18 +492,17 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
493
492
|
MRLog(@"🔍 AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
|
|
494
493
|
MRLog(@"🔍 AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
|
|
495
494
|
|
|
496
|
-
// ELECTRON FIX: Get shareable content
|
|
497
|
-
//
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
g_isRecording
|
|
495
|
+
// ELECTRON FIX: Get shareable content FULLY ASYNCHRONOUSLY
|
|
496
|
+
// NO semaphores, NO blocking - pure async to prevent Electron crashes
|
|
497
|
+
// CRITICAL: Run on background queue to avoid blocking Electron's main thread
|
|
498
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
499
|
+
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
500
|
+
@autoreleasepool {
|
|
501
|
+
if (contentError) {
|
|
502
|
+
NSLog(@"❌ Content error: %@", contentError);
|
|
503
|
+
// No need to set g_isRecording=NO since it was never set to YES
|
|
504
|
+
return; // Early return from completion handler block
|
|
505
505
|
}
|
|
506
|
-
return; // Early return from completion handler block
|
|
507
|
-
}
|
|
508
506
|
|
|
509
507
|
MRLog(@"✅ Got %lu displays, %lu windows for pure recording",
|
|
510
508
|
content.displays.count, content.windows.count);
|
|
@@ -544,9 +542,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
544
542
|
recordingHeight = (NSInteger)targetWindow.frame.size.height;
|
|
545
543
|
} else {
|
|
546
544
|
NSLog(@"❌ Window ID %@ not found", windowId);
|
|
547
|
-
|
|
548
|
-
g_isRecording = NO;
|
|
549
|
-
}
|
|
545
|
+
// No need to set g_isRecording=NO since it was never set to YES
|
|
550
546
|
return; // Early return from completion handler block
|
|
551
547
|
}
|
|
552
548
|
}
|
|
@@ -576,9 +572,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
576
572
|
|
|
577
573
|
if (!targetDisplay) {
|
|
578
574
|
NSLog(@"❌ Display not found");
|
|
579
|
-
|
|
580
|
-
g_isRecording = NO;
|
|
581
|
-
}
|
|
575
|
+
// No need to set g_isRecording=NO since it was never set to YES
|
|
582
576
|
return; // Early return from completion handler block
|
|
583
577
|
}
|
|
584
578
|
|
|
@@ -681,9 +675,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
681
675
|
NSError *writerError = nil;
|
|
682
676
|
if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
|
|
683
677
|
NSLog(@"❌ Failed to prepare video writer: %@", writerError);
|
|
684
|
-
|
|
685
|
-
g_isRecording = NO;
|
|
686
|
-
}
|
|
678
|
+
// No need to set g_isRecording=NO since it was never set to YES
|
|
687
679
|
return; // Early return from completion handler block
|
|
688
680
|
}
|
|
689
681
|
|
|
@@ -696,29 +688,30 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
696
688
|
g_audioStreamOutput = nil;
|
|
697
689
|
}
|
|
698
690
|
|
|
691
|
+
// Create stream outputs and delegate
|
|
699
692
|
g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
|
|
700
693
|
g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
|
|
701
|
-
|
|
694
|
+
|
|
695
|
+
// Check if stream was created successfully
|
|
702
696
|
if (!g_stream) {
|
|
703
697
|
NSLog(@"❌ Failed to create pure stream");
|
|
704
698
|
CleanupWriters();
|
|
705
|
-
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
706
|
-
g_isRecording = NO;
|
|
707
|
-
}
|
|
708
699
|
return; // Early return from completion handler block
|
|
709
700
|
}
|
|
710
|
-
|
|
701
|
+
|
|
702
|
+
MRLog(@"✅ Stream created successfully");
|
|
703
|
+
|
|
711
704
|
NSError *outputError = nil;
|
|
712
705
|
BOOL videoOutputAdded = [g_stream addStreamOutput:g_videoStreamOutput type:SCStreamOutputTypeScreen sampleHandlerQueue:g_videoQueue error:&outputError];
|
|
713
706
|
if (!videoOutputAdded || outputError) {
|
|
714
707
|
NSLog(@"❌ Failed to add video output: %@", outputError);
|
|
715
708
|
CleanupWriters();
|
|
716
709
|
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
717
|
-
|
|
710
|
+
g_stream = nil;
|
|
718
711
|
}
|
|
719
712
|
return; // Early return from completion handler block
|
|
720
713
|
}
|
|
721
|
-
|
|
714
|
+
|
|
722
715
|
if (g_shouldCaptureAudio) {
|
|
723
716
|
if (@available(macOS 13.0, *)) {
|
|
724
717
|
NSError *audioError = nil;
|
|
@@ -727,7 +720,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
727
720
|
NSLog(@"❌ Failed to add audio output: %@", audioError);
|
|
728
721
|
CleanupWriters();
|
|
729
722
|
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
730
|
-
|
|
723
|
+
g_stream = nil;
|
|
731
724
|
}
|
|
732
725
|
return; // Early return from completion handler block
|
|
733
726
|
}
|
|
@@ -736,27 +729,32 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
736
729
|
g_shouldCaptureAudio = NO;
|
|
737
730
|
}
|
|
738
731
|
}
|
|
739
|
-
|
|
732
|
+
|
|
740
733
|
MRLog(@"✅ Stream outputs configured (audio=%d)", g_shouldCaptureAudio);
|
|
741
734
|
if (sessionTimestampNumber) {
|
|
742
735
|
MRLog(@"🕒 Session timestamp: %@", sessionTimestampNumber);
|
|
743
736
|
}
|
|
744
737
|
|
|
745
|
-
//
|
|
738
|
+
// Start capture - can be async
|
|
746
739
|
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
747
740
|
if (startError) {
|
|
748
741
|
NSLog(@"❌ Failed to start pure capture: %@", startError);
|
|
749
742
|
CleanupWriters();
|
|
750
743
|
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
751
744
|
g_isRecording = NO;
|
|
745
|
+
g_stream = nil;
|
|
752
746
|
}
|
|
753
747
|
} else {
|
|
754
748
|
MRLog(@"🎉 PURE ScreenCaptureKit recording started successfully!");
|
|
755
|
-
//
|
|
749
|
+
// NOW set recording flag - stream is actually running
|
|
750
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
751
|
+
g_isRecording = YES;
|
|
752
|
+
}
|
|
756
753
|
}
|
|
757
|
-
}];
|
|
754
|
+
}]; // End of startCaptureWithCompletionHandler
|
|
758
755
|
} // End of autoreleasepool
|
|
759
|
-
|
|
756
|
+
}]; // End of getShareableContentWithCompletionHandler
|
|
757
|
+
}); // End of dispatch_async
|
|
760
758
|
|
|
761
759
|
// Return immediately - async completion will handle success/failure
|
|
762
760
|
return YES;
|
|
@@ -773,22 +771,24 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
773
771
|
// Store stream reference to prevent it from being deallocated
|
|
774
772
|
SCStream *streamToStop = g_stream;
|
|
775
773
|
|
|
776
|
-
// ELECTRON FIX: Stop
|
|
774
|
+
// ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
|
|
777
775
|
[streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
776
|
+
@autoreleasepool {
|
|
777
|
+
if (stopError) {
|
|
778
|
+
NSLog(@"❌ Stop error: %@", stopError);
|
|
779
|
+
} else {
|
|
780
|
+
MRLog(@"✅ Pure stream stopped");
|
|
781
|
+
}
|
|
783
782
|
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
783
|
+
// Reset recording state to allow new recordings
|
|
784
|
+
@synchronized([ScreenCaptureKitRecorder class]) {
|
|
785
|
+
g_isRecording = NO;
|
|
786
|
+
}
|
|
788
787
|
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
788
|
+
// Cleanup after stop completes
|
|
789
|
+
CleanupWriters();
|
|
790
|
+
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
791
|
+
}
|
|
792
792
|
}];
|
|
793
793
|
}
|
|
794
794
|
|