node-mac-recorder 2.21.39 โ†’ 2.21.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,40 +6,245 @@
6
6
  #import <CoreMedia/CoreMedia.h>
7
7
  #import <AudioToolbox/AudioToolbox.h>
8
8
 
9
- // Pure ScreenCaptureKit implementation - NO AVFoundation
9
+ // MULTI-SESSION RECORDING: Session-based state management
10
+ @interface RecordingSession : NSObject
11
+ @property (nonatomic, strong) NSString *sessionId;
12
+ @property (nonatomic, strong) SCStream *stream API_AVAILABLE(macos(12.3));
13
+ @property (nonatomic, strong) id<SCStreamDelegate> streamDelegate API_AVAILABLE(macos(12.3));
14
+ @property (nonatomic, assign) BOOL isRecording;
15
+ @property (nonatomic, assign) BOOL isCleaningUp;
16
+ @property (nonatomic, assign) BOOL isScheduling;
17
+ @property (nonatomic, strong) NSString *outputPath;
18
+
19
+ // Frame tracking
20
+ @property (nonatomic, assign) BOOL firstFrameReceived;
21
+ @property (nonatomic, assign) NSInteger frameCountSinceStart;
22
+
23
+ // Queues and outputs
24
+ @property (nonatomic, strong) dispatch_queue_t videoQueue;
25
+ @property (nonatomic, strong) dispatch_queue_t audioQueue;
26
+ @property (nonatomic, strong) id videoStreamOutput;
27
+ @property (nonatomic, strong) id audioStreamOutput;
28
+
29
+ // Video writer state
30
+ @property (nonatomic, strong) AVAssetWriter *videoWriter;
31
+ @property (nonatomic, strong) AVAssetWriterInput *videoInput;
32
+ @property (nonatomic, assign) CFTypeRef pixelBufferAdaptorRef;
33
+ @property (nonatomic, assign) CMTime videoStartTime;
34
+ @property (nonatomic, assign) BOOL videoWriterStarted;
35
+
36
+ // Audio state
37
+ @property (nonatomic, assign) BOOL shouldCaptureAudio;
38
+ @property (nonatomic, strong) NSString *audioOutputPath;
39
+ @property (nonatomic, strong) AVAssetWriter *audioWriter;
40
+ @property (nonatomic, strong) AVAssetWriterInput *systemAudioInput;
41
+ @property (nonatomic, strong) AVAssetWriterInput *microphoneAudioInput;
42
+ @property (nonatomic, assign) CMTime audioStartTime;
43
+ @property (nonatomic, assign) BOOL audioWriterStarted;
44
+ @property (nonatomic, assign) BOOL captureMicrophoneEnabled;
45
+ @property (nonatomic, assign) BOOL captureSystemAudioEnabled;
46
+ @property (nonatomic, assign) BOOL mixAudioEnabled;
47
+ @property (nonatomic, assign) float mixMicGain;
48
+ @property (nonatomic, assign) float mixSystemGain;
49
+
50
+ // Configuration
51
+ @property (nonatomic, assign) NSInteger configuredSampleRate;
52
+ @property (nonatomic, assign) NSInteger configuredChannelCount;
53
+ @property (nonatomic, assign) NSInteger targetFPS;
54
+
55
+ // Frame rate debugging
56
+ @property (nonatomic, assign) NSInteger frameCount;
57
+ @property (nonatomic, assign) CFAbsoluteTime firstFrameTime;
58
+
59
+ - (instancetype)initWithSessionId:(NSString *)sessionId;
60
+ - (void)cleanup;
61
+ @end
62
+
63
+ @implementation RecordingSession
64
+
65
+ - (instancetype)initWithSessionId:(NSString *)sessionId {
66
+ self = [super init];
67
+ if (self) {
68
+ _sessionId = sessionId;
69
+ _isRecording = NO;
70
+ _isCleaningUp = NO;
71
+ _isScheduling = NO;
72
+ _firstFrameReceived = NO;
73
+ _frameCountSinceStart = 0;
74
+ _videoStartTime = kCMTimeInvalid;
75
+ _videoWriterStarted = NO;
76
+ _audioStartTime = kCMTimeInvalid;
77
+ _audioWriterStarted = NO;
78
+ _shouldCaptureAudio = NO;
79
+ _captureMicrophoneEnabled = NO;
80
+ _captureSystemAudioEnabled = NO;
81
+ _mixAudioEnabled = YES;
82
+ _mixMicGain = 0.8f;
83
+ _mixSystemGain = 0.4f;
84
+ _configuredSampleRate = 48000;
85
+ _configuredChannelCount = 2;
86
+ _targetFPS = 60;
87
+ _frameCount = 0;
88
+ _firstFrameTime = 0;
89
+ _pixelBufferAdaptorRef = NULL;
90
+ }
91
+ return self;
92
+ }
93
+
94
+ - (void)cleanup {
95
+ MRLog(@"๐Ÿงน Cleaning up session: %@", _sessionId);
96
+
97
+ if (_pixelBufferAdaptorRef) {
98
+ CFRelease(_pixelBufferAdaptorRef);
99
+ _pixelBufferAdaptorRef = NULL;
100
+ }
101
+
102
+ _stream = nil;
103
+ _streamDelegate = nil;
104
+ _videoWriter = nil;
105
+ _videoInput = nil;
106
+ _audioWriter = nil;
107
+ _systemAudioInput = nil;
108
+ _microphoneAudioInput = nil;
109
+ _videoStreamOutput = nil;
110
+ _audioStreamOutput = nil;
111
+ _videoQueue = nil;
112
+ _audioQueue = nil;
113
+ _outputPath = nil;
114
+ _audioOutputPath = nil;
115
+ _isRecording = NO;
116
+ _isCleaningUp = NO;
117
+ _isScheduling = NO;
118
+ _firstFrameReceived = NO;
119
+ _frameCountSinceStart = 0;
120
+ }
121
+
122
+ - (void)dealloc {
123
+ [self cleanup];
124
+ }
125
+
126
+ @end
127
+
128
+ // Session registry - thread-safe access
129
+ static NSMutableDictionary<NSString *, RecordingSession *> *g_sessions = nil;
130
+ static dispatch_queue_t g_sessionsQueue = nil;
131
+
132
+ // Legacy global state for backward compatibility (points to first/default session)
10
133
  static SCStream * API_AVAILABLE(macos(12.3)) g_stream = nil;
11
134
  static id<SCStreamDelegate> API_AVAILABLE(macos(12.3)) g_streamDelegate = nil;
12
135
  static BOOL g_isRecording = NO;
13
- static BOOL g_isCleaningUp = NO; // Prevent recursive cleanup
136
+ static BOOL g_isCleaningUp = NO;
137
+ static BOOL g_isScheduling = NO;
14
138
  static NSString *g_outputPath = nil;
15
-
139
+ static BOOL g_firstFrameReceived = NO;
140
+ static NSInteger g_frameCountSinceStart = 0;
16
141
  static dispatch_queue_t g_videoQueue = nil;
17
142
  static dispatch_queue_t g_audioQueue = nil;
18
143
  static id g_videoStreamOutput = nil;
19
144
  static id g_audioStreamOutput = nil;
20
-
21
145
  static AVAssetWriter *g_videoWriter = nil;
22
146
  static AVAssetWriterInput *g_videoInput = nil;
23
147
  static CFTypeRef g_pixelBufferAdaptorRef = NULL;
24
148
  static CMTime g_videoStartTime = kCMTimeInvalid;
25
149
  static BOOL g_videoWriterStarted = NO;
26
-
27
150
  static BOOL g_shouldCaptureAudio = NO;
28
151
  static NSString *g_audioOutputPath = nil;
29
152
  static AVAssetWriter *g_audioWriter = nil;
30
- static AVAssetWriterInput *g_audioInput = nil;
153
+ static AVAssetWriterInput *g_systemAudioInput = nil;
154
+ static AVAssetWriterInput *g_microphoneAudioInput = nil;
31
155
  static CMTime g_audioStartTime = kCMTimeInvalid;
32
156
  static BOOL g_audioWriterStarted = NO;
33
-
157
+ static BOOL g_captureMicrophoneEnabled = NO;
158
+ static BOOL g_captureSystemAudioEnabled = NO;
159
+ static BOOL g_mixAudioEnabled = YES;
160
+ static float g_mixMicGain = 0.8f;
161
+ static float g_mixSystemGain = 0.4f;
34
162
  static NSInteger g_configuredSampleRate = 48000;
35
163
  static NSInteger g_configuredChannelCount = 2;
36
164
  static NSInteger g_targetFPS = 60;
37
-
38
- // Frame rate debugging
39
165
  static NSInteger g_frameCount = 0;
40
166
  static CFAbsoluteTime g_firstFrameTime = 0;
41
167
 
168
+ static dispatch_queue_t ScreenCaptureControlQueue(void);
169
+ static void SCKMarkSchedulingComplete(void);
170
+ static void SCKFailScheduling(void);
171
+ static void SCKPerformRecordingSetup(NSDictionary *config, SCShareableContent *content) API_AVAILABLE(macos(12.3));
172
+
42
173
  static void CleanupWriters(void);
174
+
175
+ // SESSION MANAGEMENT FUNCTIONS
176
+ static void InitializeSessionRegistry(void) {
177
+ static dispatch_once_t onceToken;
178
+ dispatch_once(&onceToken, ^{
179
+ g_sessions = [[NSMutableDictionary alloc] init];
180
+ g_sessionsQueue = dispatch_queue_create("com.macrecorder.sessions", DISPATCH_QUEUE_CONCURRENT);
181
+ MRLog(@"๐Ÿ“ฆ Session registry initialized");
182
+ });
183
+ }
184
+
185
+ static NSString *GenerateSessionId(void) {
186
+ return [NSString stringWithFormat:@"rec_%lld", (long long)([[NSDate date] timeIntervalSince1970] * 1000)];
187
+ }
188
+
189
+ static RecordingSession * _Nullable GetSession(NSString *sessionId) {
190
+ if (!sessionId) return nil;
191
+ InitializeSessionRegistry();
192
+
193
+ __block RecordingSession *session = nil;
194
+ dispatch_sync(g_sessionsQueue, ^{
195
+ session = g_sessions[sessionId];
196
+ });
197
+ return session;
198
+ }
199
+
200
+ static NSString *CreateSession(void) {
201
+ InitializeSessionRegistry();
202
+
203
+ NSString *sessionId = GenerateSessionId();
204
+ RecordingSession *session = [[RecordingSession alloc] initWithSessionId:sessionId];
205
+
206
+ dispatch_barrier_async(g_sessionsQueue, ^{
207
+ g_sessions[sessionId] = session;
208
+ MRLog(@"โž• Session created: %@", sessionId);
209
+ });
210
+
211
+ return sessionId;
212
+ }
213
+
214
+ static void RemoveSession(NSString *sessionId) {
215
+ if (!sessionId) return;
216
+ InitializeSessionRegistry();
217
+
218
+ dispatch_barrier_async(g_sessionsQueue, ^{
219
+ RecordingSession *session = g_sessions[sessionId];
220
+ if (session) {
221
+ [session cleanup];
222
+ [g_sessions removeObjectForKey:sessionId];
223
+ MRLog(@"โž– Session removed: %@", sessionId);
224
+ }
225
+ });
226
+ }
227
+
228
+ static NSArray<NSString *> *GetAllSessionIds(void) {
229
+ InitializeSessionRegistry();
230
+
231
+ __block NSArray<NSString *> *sessionIds = nil;
232
+ dispatch_sync(g_sessionsQueue, ^{
233
+ sessionIds = [g_sessions allKeys];
234
+ });
235
+ return sessionIds ?: @[];
236
+ }
237
+
238
+ static NSInteger GetActiveSessionCount(void) {
239
+ InitializeSessionRegistry();
240
+
241
+ __block NSInteger count = 0;
242
+ dispatch_sync(g_sessionsQueue, ^{
243
+ count = g_sessions.count;
244
+ });
245
+ return count;
246
+ }
247
+
43
248
  static AVAssetWriterInputPixelBufferAdaptor * _Nullable CurrentPixelBufferAdaptor(void) {
44
249
  if (!g_pixelBufferAdaptorRef) {
45
250
  return nil;
@@ -103,17 +308,39 @@ static void CleanupWriters(void) {
103
308
  }
104
309
 
105
310
  if (g_audioWriter) {
106
- FinishWriter(g_audioWriter, g_audioInput);
311
+ if (g_systemAudioInput) {
312
+ [g_systemAudioInput markAsFinished];
313
+ }
314
+ if (g_microphoneAudioInput) {
315
+ [g_microphoneAudioInput markAsFinished];
316
+ }
317
+ FinishWriter(g_audioWriter, nil);
107
318
  g_audioWriter = nil;
108
- g_audioInput = nil;
319
+ g_systemAudioInput = nil;
320
+ g_microphoneAudioInput = nil;
109
321
  g_audioWriterStarted = NO;
110
322
  g_audioStartTime = kCMTimeInvalid;
323
+ g_captureMicrophoneEnabled = NO;
324
+ g_captureSystemAudioEnabled = NO;
111
325
  }
112
326
  }
113
327
 
114
328
  @interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
115
329
  @end
116
330
 
331
+ // External helpers for mixing/muxing
332
+ extern "C" NSString *currentStandaloneAudioRecordingPath(void);
333
+ extern "C" NSString *lastStandaloneAudioRecordingPath(void);
334
+ extern "C" BOOL MRMixAudioToSingleTrack(NSString *primaryAudioPath,
335
+ NSString *externalMicPath,
336
+ BOOL preferInternalTracks);
337
+ extern "C" BOOL MRMixAudioToSingleTrackWithGains(NSString *primaryAudioPath,
338
+ NSString *externalMicPath,
339
+ BOOL preferInternalTracks,
340
+ float micGain,
341
+ float systemGain);
342
+ extern "C" BOOL MRMuxAudioIntoVideo(NSString *videoPath, NSString *audioPath);
343
+
117
344
  extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
118
345
  if (!g_audioOutputPath) {
119
346
  return nil;
@@ -161,7 +388,8 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
161
388
  @end
162
389
 
163
390
  @interface ScreenCaptureKitRecorder (Private)
164
- + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer;
391
+ + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
392
+ isMicrophone:(BOOL)isMicrophone;
165
393
  @end
166
394
 
167
395
  @interface ScreenCaptureVideoOutput : NSObject <SCStreamOutput>
@@ -182,6 +410,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
182
410
  }
183
411
 
184
412
  CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
413
+ MRSyncMarkAudioSample(presentationTime);
185
414
 
186
415
  // Wait for audio to arrive before starting screen video to prevent leading frames.
187
416
  if (MRSyncShouldHoldVideoFrame(presentationTime)) {
@@ -196,8 +425,18 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
196
425
  [g_videoWriter startSessionAtSourceTime:kCMTimeZero];
197
426
  g_videoStartTime = presentationTime;
198
427
  g_videoWriterStarted = YES;
428
+ g_frameCountSinceStart = 0;
199
429
  MRLog(@"๐ŸŽž๏ธ Video writer session started @ %.3f (zero-based timeline)", CMTimeGetSeconds(presentationTime));
200
430
  }
431
+
432
+ // ELECTRON FIX: Track frame count to ensure ScreenCaptureKit is fully running
433
+ if (!g_firstFrameReceived) {
434
+ g_frameCountSinceStart++;
435
+ if (g_frameCountSinceStart >= 10) { // Wait for 10 frames (~150ms at 60fps)
436
+ g_firstFrameReceived = YES;
437
+ MRLog(@"โœ… ScreenCaptureKit fully initialized after %ld frames", (long)g_frameCountSinceStart);
438
+ }
439
+ }
201
440
 
202
441
  if (!g_videoInput.readyForMoreMediaData) {
203
442
  return;
@@ -234,6 +473,18 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
234
473
  relativePresentation = kCMTimeZero;
235
474
  }
236
475
  }
476
+
477
+ double stopLimit = MRSyncGetStopLimitSeconds();
478
+ if (stopLimit > 0) {
479
+ double frameSeconds = CMTimeGetSeconds(relativePresentation);
480
+ double tolerance = g_targetFPS > 0 ? (1.5 / g_targetFPS) : 0.02;
481
+ if (tolerance < 0.02) {
482
+ tolerance = 0.02;
483
+ }
484
+ if (frameSeconds > stopLimit + tolerance) {
485
+ return;
486
+ }
487
+ }
237
488
 
238
489
  AVAssetWriterInputPixelBufferAdaptor *adaptor = adaptorCandidate;
239
490
  BOOL appended = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:relativePresentation];
@@ -268,24 +519,50 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
268
519
  return;
269
520
  }
270
521
 
271
- if (@available(macOS 13.0, *)) {
272
- if (type != SCStreamOutputTypeAudio) {
273
- return;
522
+ BOOL isMicrophoneSample = NO;
523
+ BOOL isSupportedSample = NO;
524
+ if (@available(macOS 15.0, *)) {
525
+ if (type == SCStreamOutputTypeAudio) {
526
+ isSupportedSample = YES;
527
+ } else if (type == SCStreamOutputTypeMicrophone) {
528
+ isSupportedSample = YES;
529
+ isMicrophoneSample = YES;
274
530
  }
275
- } else {
531
+ } else if (@available(macOS 13.0, *)) {
532
+ if (type == SCStreamOutputTypeAudio) {
533
+ isSupportedSample = YES;
534
+ }
535
+ }
536
+
537
+ if (!isSupportedSample) {
276
538
  return;
277
539
  }
278
540
 
541
+ BOOL routeToMicrophoneTrack = isMicrophoneSample;
542
+ if (!routeToMicrophoneTrack) {
543
+ if (!g_captureSystemAudioEnabled && g_captureMicrophoneEnabled) {
544
+ // Only microphone requested (e.g., macOS < 15), so treat stream as microphone.
545
+ routeToMicrophoneTrack = YES;
546
+ }
547
+ }
548
+
279
549
  if (!CMSampleBufferDataIsReady(sampleBuffer)) {
280
- MRLog(@"โš ๏ธ Audio sample buffer data not ready");
550
+ MRLog(@"โš ๏ธ %@ audio sample buffer not ready",
551
+ routeToMicrophoneTrack ? @"Microphone" : @"System");
281
552
  return;
282
553
  }
283
554
 
284
- if (![ScreenCaptureKitRecorder prepareAudioWriterIfNeededWithSampleBuffer:sampleBuffer]) {
555
+ if (![ScreenCaptureKitRecorder prepareAudioWriterIfNeededWithSampleBuffer:sampleBuffer
556
+ isMicrophone:routeToMicrophoneTrack]) {
285
557
  return;
286
558
  }
287
559
 
288
- if (!g_audioWriter || !g_audioInput) {
560
+ if (!g_audioWriter) {
561
+ return;
562
+ }
563
+
564
+ AVAssetWriterInput *targetInput = routeToMicrophoneTrack ? g_microphoneAudioInput : g_systemAudioInput;
565
+ if (!targetInput) {
289
566
  return;
290
567
  }
291
568
 
@@ -299,13 +576,20 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
299
576
  [g_audioWriter startSessionAtSourceTime:kCMTimeZero];
300
577
  g_audioStartTime = presentationTime;
301
578
  g_audioWriterStarted = YES;
302
- MRLog(@"๐Ÿ”Š Audio writer session started @ %.3f (zero-based timeline)", CMTimeGetSeconds(presentationTime));
579
+ MRLog(@"๐Ÿ”Š Audio writer session started @ %.3f (source=%@)",
580
+ CMTimeGetSeconds(presentationTime),
581
+ routeToMicrophoneTrack ? @"microphone" : @"system");
303
582
  }
304
583
 
305
- if (!g_audioInput.readyForMoreMediaData) {
306
- static int notReadyCount = 0;
307
- if (notReadyCount++ % 100 == 0) {
308
- MRLog(@"โš ๏ธ Audio input not ready for data (count: %d)", notReadyCount);
584
+ static int systemNotReadyCount = 0;
585
+ static int microphoneNotReadyCount = 0;
586
+ int *notReadyCounter = routeToMicrophoneTrack ? &microphoneNotReadyCount : &systemNotReadyCount;
587
+
588
+ if (!targetInput.readyForMoreMediaData) {
589
+ if ((*notReadyCounter)++ % 100 == 0) {
590
+ MRLog(@"โš ๏ธ %@ audio input not ready for data (count: %d)",
591
+ routeToMicrophoneTrack ? @"Microphone" : @"System",
592
+ *notReadyCounter);
309
593
  }
310
594
  return;
311
595
  }
@@ -362,13 +646,34 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
362
646
  }
363
647
  }
364
648
 
365
- BOOL success = [g_audioInput appendSampleBuffer:bufferToAppend];
649
+ double stopLimit = MRSyncGetStopLimitSeconds();
650
+ if (stopLimit > 0) {
651
+ CMTime sampleStart = CMSampleBufferGetPresentationTimeStamp(bufferToAppend);
652
+ double sampleSeconds = CMTimeGetSeconds(sampleStart);
653
+ double sampleDuration = CMTIME_IS_VALID(CMSampleBufferGetDuration(bufferToAppend))
654
+ ? CMTimeGetSeconds(CMSampleBufferGetDuration(bufferToAppend))
655
+ : 0.0;
656
+ double tolerance = 0.02;
657
+ if (sampleSeconds > stopLimit + tolerance ||
658
+ (sampleDuration > 0.0 && (sampleSeconds + sampleDuration) > stopLimit + tolerance)) {
659
+ if (bufferToAppend != sampleBuffer) {
660
+ CFRelease(bufferToAppend);
661
+ }
662
+ return;
663
+ }
664
+ }
665
+
666
+ BOOL success = [targetInput appendSampleBuffer:bufferToAppend];
366
667
  if (!success) {
367
668
  NSLog(@"โš ๏ธ Failed appending audio sample buffer: %@", g_audioWriter.error);
368
669
  } else {
369
- static int appendCount = 0;
370
- if (appendCount++ % 100 == 0) {
371
- MRLog(@"โœ… Audio sample appended successfully (count: %d)", appendCount);
670
+ static int systemAppendCount = 0;
671
+ static int microphoneAppendCount = 0;
672
+ int *appendCount = routeToMicrophoneTrack ? &microphoneAppendCount : &systemAppendCount;
673
+ if ((*appendCount)++ % 100 == 0) {
674
+ MRLog(@"โœ… %@ audio sample appended (count: %d)",
675
+ routeToMicrophoneTrack ? @"Microphone" : @"System",
676
+ *appendCount);
372
677
  }
373
678
  }
374
679
 
@@ -462,61 +767,135 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
462
767
  return YES;
463
768
  }
464
769
 
465
- + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer {
466
- if (!g_shouldCaptureAudio || g_audioWriter || !g_audioOutputPath) {
770
+ + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer
771
+ isMicrophone:(BOOL)isMicrophone {
772
+ if (!g_shouldCaptureAudio || !g_audioOutputPath) {
467
773
  return g_audioWriter != nil || !g_shouldCaptureAudio;
468
774
  }
469
-
775
+
470
776
  CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
471
777
  if (!formatDescription) {
472
778
  NSLog(@"โš ๏ธ Missing audio format description");
473
779
  return NO;
474
780
  }
475
-
476
- const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
781
+
782
+ const AudioStreamBasicDescription *asbd =
783
+ CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
477
784
  if (!asbd) {
478
785
  NSLog(@"โš ๏ธ Unsupported audio format description");
479
786
  return NO;
480
787
  }
481
-
482
- g_configuredSampleRate = (NSInteger)asbd->mSampleRate;
483
- g_configuredChannelCount = asbd->mChannelsPerFrame;
484
-
485
- NSString *originalPath = g_audioOutputPath ?: @"";
486
- NSURL *audioURL = [NSURL fileURLWithPath:originalPath];
487
- [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
488
-
489
- NSError *writerError = nil;
490
- // CRITICAL FIX: AVAssetWriter does NOT support WebM for audio
491
- // Always use QuickTime Movie format (.mov) for audio files
492
- AVFileType requestedFileType = AVFileTypeQuickTimeMovie;
493
-
494
- // Ensure path has .mov extension for audio
495
- NSString *audioPath = originalPath;
496
- if (![audioPath.pathExtension.lowercaseString isEqualToString:@"mov"]) {
497
- MRLog(@"โš ๏ธ Audio path has wrong extension '%@', changing to .mov", audioPath.pathExtension);
498
- audioPath = [[audioPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
499
- g_audioOutputPath = audioPath;
500
- }
501
- audioURL = [NSURL fileURLWithPath:audioPath];
502
- [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
503
-
504
- @try {
505
- g_audioWriter = [[AVAssetWriter alloc] initWithURL:audioURL fileType:requestedFileType error:&writerError];
506
- } @catch (NSException *exception) {
507
- NSDictionary *info = @{
508
- NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
509
- };
510
- writerError = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-201 userInfo:info];
511
- g_audioWriter = nil;
788
+
789
+ if (!g_audioWriter) {
790
+ g_configuredSampleRate = (NSInteger)asbd->mSampleRate;
791
+ g_configuredChannelCount = asbd->mChannelsPerFrame;
792
+
793
+ NSString *originalPath = g_audioOutputPath ?: @"";
794
+ NSURL *audioURL = [NSURL fileURLWithPath:originalPath];
795
+ [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
796
+
797
+ NSError *writerError = nil;
798
+ AVFileType requestedFileType = AVFileTypeQuickTimeMovie;
799
+
800
+ NSString *audioPath = originalPath;
801
+ if (![audioPath.pathExtension.lowercaseString isEqualToString:@"mov"]) {
802
+ MRLog(@"โš ๏ธ Audio path has wrong extension '%@', changing to .mov", audioPath.pathExtension);
803
+ audioPath = [[audioPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
804
+ g_audioOutputPath = audioPath;
805
+ }
806
+ audioURL = [NSURL fileURLWithPath:audioPath];
807
+ [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
808
+
809
+ @try {
810
+ g_audioWriter = [[AVAssetWriter alloc] initWithURL:audioURL
811
+ fileType:requestedFileType
812
+ error:&writerError];
813
+ } @catch (NSException *exception) {
814
+ NSDictionary *info = @{
815
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
816
+ };
817
+ writerError = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-201 userInfo:info];
818
+ g_audioWriter = nil;
819
+ }
820
+
821
+ if (!g_audioWriter || writerError) {
822
+ NSLog(@"โŒ Failed to create audio writer: %@", writerError);
823
+ return NO;
824
+ }
825
+
826
+ // Reset tracking flags whenever we create a new writer
827
+ g_audioWriterStarted = NO;
828
+ g_audioStartTime = kCMTimeInvalid;
829
+
830
+ // CRITICAL FIX: Add BOTH system and microphone inputs NOW (before startWriting)
831
+ // if both are enabled. AVAssetWriter cannot add inputs after startWriting() is called.
832
+ NSLog(@"๐ŸŽ™๏ธ Creating audio writer - system=%d, microphone=%d",
833
+ g_captureSystemAudioEnabled, g_captureMicrophoneEnabled);
512
834
  }
513
-
514
- if (!g_audioWriter || writerError) {
515
- NSLog(@"โŒ Failed to create audio writer: %@", writerError);
516
- return NO;
835
+
836
+ AVAssetWriterInput **targetInput = isMicrophone ? &g_microphoneAudioInput : &g_systemAudioInput;
837
+ if (*targetInput) {
838
+ return YES;
517
839
  }
518
-
519
- NSInteger channelCount = MAX(1, g_configuredChannelCount);
840
+
841
+ // If writer was just created and BOTH sources are enabled, create BOTH inputs now
842
+ if (g_audioWriter && g_captureSystemAudioEnabled && g_captureMicrophoneEnabled) {
843
+ if (!g_systemAudioInput && !g_microphoneAudioInput) {
844
+ NSLog(@"๐ŸŽ™๏ธ Both audio sources enabled - creating both inputs from first sample");
845
+
846
+ NSUInteger channelCount = MAX((NSUInteger)1, (NSUInteger)asbd->mChannelsPerFrame);
847
+ AudioChannelLayout layout = {0};
848
+ size_t layoutSize = 0;
849
+ if (channelCount == 1) {
850
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
851
+ layoutSize = sizeof(AudioChannelLayout);
852
+ } else if (channelCount == 2) {
853
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
854
+ layoutSize = sizeof(AudioChannelLayout);
855
+ }
856
+
857
+ NSMutableDictionary *audioSettings = [@{
858
+ AVFormatIDKey: @(kAudioFormatMPEG4AAC),
859
+ AVSampleRateKey: @(asbd->mSampleRate),
860
+ AVNumberOfChannelsKey: @(channelCount),
861
+ AVEncoderBitRateKey: @(192000)
862
+ } mutableCopy];
863
+
864
+ if (layoutSize > 0) {
865
+ audioSettings[AVChannelLayoutKey] = [NSData dataWithBytes:&layout length:layoutSize];
866
+ }
867
+
868
+ // ELECTRON FIX: Create microphone input FIRST (stream 0)
869
+ // Electron plays stream 0 by default, so put the more important audio first
870
+ // TODO: Mix both streams into single track for full Electron compatibility
871
+ AVAssetWriterInput *micInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
872
+ outputSettings:audioSettings];
873
+ micInput.expectsMediaDataInRealTime = YES;
874
+ if ([g_audioWriter canAddInput:micInput]) {
875
+ [g_audioWriter addInput:micInput];
876
+ g_microphoneAudioInput = micInput;
877
+ NSLog(@"โœ… Microphone audio input created (stream 0 - Electron default)");
878
+ } else {
879
+ NSLog(@"โŒ Cannot add microphone audio input");
880
+ }
881
+
882
+ // Create system audio input (stream 1)
883
+ AVAssetWriterInput *systemInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
884
+ outputSettings:audioSettings];
885
+ systemInput.expectsMediaDataInRealTime = YES;
886
+ if ([g_audioWriter canAddInput:systemInput]) {
887
+ [g_audioWriter addInput:systemInput];
888
+ g_systemAudioInput = systemInput;
889
+ NSLog(@"โœ… System audio input created (stream 1)");
890
+ } else {
891
+ NSLog(@"โŒ Cannot add system audio input");
892
+ }
893
+
894
+ return YES;
895
+ }
896
+ }
897
+
898
+ NSUInteger channelCount = MAX((NSUInteger)1, (NSUInteger)asbd->mChannelsPerFrame);
520
899
  AudioChannelLayout layout = {0};
521
900
  size_t layoutSize = 0;
522
901
  if (channelCount == 1) {
@@ -529,7 +908,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
529
908
 
530
909
  NSMutableDictionary *audioSettings = [@{
531
910
  AVFormatIDKey: @(kAudioFormatMPEG4AAC),
532
- AVSampleRateKey: @(g_configuredSampleRate),
911
+ AVSampleRateKey: @(asbd->mSampleRate),
533
912
  AVNumberOfChannelsKey: @(channelCount),
534
913
  AVEncoderBitRateKey: @(192000)
535
914
  } mutableCopy];
@@ -537,22 +916,27 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
537
916
  if (layoutSize > 0) {
538
917
  audioSettings[AVChannelLayoutKey] = [NSData dataWithBytes:&layout length:layoutSize];
539
918
  }
540
-
541
- g_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
542
- g_audioInput.expectsMediaDataInRealTime = YES;
543
919
 
544
- MRLog(@"๐ŸŽ™๏ธ Audio input created: sampleRate=%ld, channels=%ld, bitrate=192k",
545
- (long)g_configuredSampleRate, (long)channelCount);
920
+ AVAssetWriterInput *newInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
921
+ outputSettings:audioSettings];
922
+ newInput.expectsMediaDataInRealTime = YES;
546
923
 
547
- if (![g_audioWriter canAddInput:g_audioInput]) {
548
- NSLog(@"โŒ Audio writer cannot add input");
924
+ if (![g_audioWriter canAddInput:newInput]) {
925
+ NSLog(@"โŒ Audio writer cannot add %@ input", isMicrophone ? @"microphone" : @"system");
549
926
  return NO;
550
927
  }
551
- [g_audioWriter addInput:g_audioInput];
552
- g_audioWriterStarted = NO;
553
- g_audioStartTime = kCMTimeInvalid;
928
+ [g_audioWriter addInput:newInput];
929
+
930
+ if (isMicrophone) {
931
+ g_microphoneAudioInput = newInput;
932
+ MRLog(@"๐ŸŽ™๏ธ Microphone audio input created: sampleRate=%.0f, channels=%ld",
933
+ asbd->mSampleRate, (long)channelCount);
934
+ } else {
935
+ g_systemAudioInput = newInput;
936
+ MRLog(@"๐Ÿ”ˆ System audio input created: sampleRate=%.0f, channels=%ld",
937
+ asbd->mSampleRate, (long)channelCount);
938
+ }
554
939
 
555
- MRLog(@"โœ… Audio writer prepared successfully (path: %@)", g_audioOutputPath);
556
940
  return YES;
557
941
  }
558
942
 
@@ -564,90 +948,353 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
564
948
  }
565
949
 
566
950
  + (BOOL)startRecordingWithConfiguration:(NSDictionary *)config delegate:(id)delegate error:(NSError **)error {
567
- @synchronized([ScreenCaptureKitRecorder class]) {
568
- if (g_isRecording || g_isCleaningUp) {
569
- MRLog(@"โš ๏ธ Already recording or cleaning up (recording:%d cleaning:%d)", g_isRecording, g_isCleaningUp);
570
- return NO;
571
- }
951
+ if (!config) {
952
+ return NO;
953
+ }
572
954
 
573
- // Reset any stale state
574
- g_isCleaningUp = NO;
955
+ NSDictionary *configCopy = [config copy];
956
+ dispatch_queue_t controlQueue = ScreenCaptureControlQueue();
957
+ __block BOOL accepted = NO;
575
958
 
576
- // DON'T set g_isRecording here - wait for stream to actually start
577
- // This prevents the "recording=1 stream=null" issue
578
- }
959
+ dispatch_sync(controlQueue, ^{
960
+ if (g_isRecording || g_isCleaningUp || g_isScheduling) {
961
+ MRLog(@"โš ๏ธ ScreenCaptureKit busy (recording:%d cleaning:%d scheduling:%d)", g_isRecording, g_isCleaningUp, g_isScheduling);
962
+ accepted = NO;
963
+ return;
964
+ }
965
+ g_isCleaningUp = NO;
966
+ g_isScheduling = YES;
967
+ accepted = YES;
968
+ });
579
969
 
580
- NSString *outputPath = config[@"outputPath"];
581
- if (!outputPath || [outputPath length] == 0) {
582
- NSLog(@"โŒ Invalid output path provided");
970
+ if (!accepted) {
583
971
  return NO;
584
972
  }
585
- g_outputPath = outputPath;
586
-
587
- // Extract configuration options
588
- NSNumber *displayId = config[@"displayId"];
589
- NSNumber *windowId = config[@"windowId"];
590
- NSDictionary *captureRect = config[@"captureRect"];
591
- NSNumber *captureCursor = config[@"captureCursor"];
592
- NSNumber *includeMicrophone = config[@"includeMicrophone"];
593
- NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
594
- NSString *microphoneDeviceId = config[@"microphoneDeviceId"];
595
- NSString *audioOutputPath = MRNormalizePath(config[@"audioOutputPath"]);
596
- NSNumber *sessionTimestampNumber = config[@"sessionTimestamp"];
597
-
598
- // Extract requested frame rate
599
- NSNumber *frameRateNumber = config[@"frameRate"];
600
- if (frameRateNumber && [frameRateNumber respondsToSelector:@selector(intValue)]) {
601
- NSInteger fps = [frameRateNumber intValue];
602
- if (fps < 1) fps = 1;
603
- if (fps > 120) fps = 120;
604
- g_targetFPS = fps;
605
- } else {
606
- g_targetFPS = 60;
607
- }
608
-
609
- MRLog(@"๐ŸŽฌ Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
610
- MRLog(@"๐Ÿ”ง Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@ crop=%@",
611
- captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId, captureRect);
612
-
613
- // CRITICAL DEBUG: Log EXACT audio parameter values
614
- MRLog(@"๐Ÿ” AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
615
- MRLog(@"๐Ÿ” AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
616
973
 
617
- // ELECTRON FIX: Get shareable content FULLY ASYNCHRONOUSLY
618
- // NO semaphores, NO blocking - pure async to prevent Electron crashes
619
- // CRITICAL: Run on background queue to avoid blocking Electron's main thread
974
+ // CRITICAL FIX: Use dispatch_get_global_queue instead of main_queue
975
+ // because Node.js standalone doesn't run macOS main event loop (only Electron does)
976
+ NSLog(@"๐Ÿš€ Requesting shareable content...");
620
977
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
621
978
  [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
622
- @autoreleasepool {
623
- if (contentError) {
979
+ if (contentError || !content) {
624
980
  NSLog(@"โŒ Content error: %@", contentError);
625
- // No need to set g_isRecording=NO since it was never set to YES
626
- return; // Early return from completion handler block
981
+ SCKFailScheduling();
982
+ return;
627
983
  }
984
+ NSLog(@"โœ… Got shareable content, starting recording setup...");
985
+ dispatch_async(controlQueue, ^{
986
+ SCKPerformRecordingSetup(configCopy, content);
987
+ });
988
+ }];
989
+ });
990
+
991
+ return YES;
992
+ }
993
+
994
+ + (void)stopRecording {
995
+ if (!g_isRecording || !g_stream || g_isCleaningUp) {
996
+ NSLog(@"โš ๏ธ Cannot stop: recording=%d stream=%@ cleaning=%d", g_isRecording, g_stream, g_isCleaningUp);
997
+ SCKMarkSchedulingComplete();
998
+ return;
999
+ }
1000
+
1001
+ MRLog(@"๐Ÿ›‘ Stopping pure ScreenCaptureKit recording");
1002
+
1003
+ // CRITICAL FIX: Set cleanup flag IMMEDIATELY to prevent race conditions
1004
+ // This prevents startRecording from being called while stop is in progress
1005
+ @synchronized([ScreenCaptureKitRecorder class]) {
1006
+ g_isCleaningUp = YES;
1007
+ }
1008
+
1009
+ // Store stream reference to prevent it from being deallocated
1010
+ SCStream *streamToStop = g_stream;
1011
+
1012
+ // ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
1013
+ [streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
1014
+ @autoreleasepool {
1015
+ if (stopError) {
1016
+ NSLog(@"โŒ Stop error: %@", stopError);
1017
+ } else {
1018
+ MRLog(@"โœ… Pure stream stopped");
1019
+ }
1020
+
1021
+ // Reset recording state to allow new recordings
1022
+ @synchronized([ScreenCaptureKitRecorder class]) {
1023
+ g_isRecording = NO;
1024
+ g_isCleaningUp = NO; // CRITICAL: Reset cleanup flag when done
1025
+ }
1026
+
1027
+ // Cleanup after stop completes
1028
+ CleanupWriters();
1029
+ [ScreenCaptureKitRecorder cleanupVideoWriter];
1030
+
1031
+ // Post-process: mix (if enabled) then mux audio into video file
1032
+ if (g_shouldCaptureAudio && g_audioOutputPath) {
1033
+ NSString *primaryAudioPath = ScreenCaptureKitCurrentAudioPath();
1034
+ if ([primaryAudioPath isKindOfClass:[NSArray class]]) {
1035
+ id first = [(NSArray *)primaryAudioPath firstObject];
1036
+ if ([first isKindOfClass:[NSString class]]) {
1037
+ primaryAudioPath = (NSString *)first;
1038
+ } else {
1039
+ primaryAudioPath = nil;
1040
+ }
1041
+ }
1042
+ if (primaryAudioPath && [primaryAudioPath length] > 0) {
1043
+ BOOL preferInternal = NO;
1044
+ if (@available(macOS 15.0, *)) {
1045
+ preferInternal = (g_captureSystemAudioEnabled && g_captureMicrophoneEnabled);
1046
+ }
1047
+ NSString *externalMicPath = nil;
1048
+ if (currentStandaloneAudioRecordingPath) {
1049
+ externalMicPath = currentStandaloneAudioRecordingPath();
1050
+ }
1051
+ if (!externalMicPath || [externalMicPath length] == 0) {
1052
+ if (lastStandaloneAudioRecordingPath) {
1053
+ externalMicPath = lastStandaloneAudioRecordingPath();
1054
+ }
1055
+ }
1056
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
1057
+ NSString *audioForMux = primaryAudioPath;
1058
+ if (g_mixAudioEnabled) {
1059
+ BOOL mixed = NO;
1060
+ // Try gain-aware mix first
1061
+ mixed = MRMixAudioToSingleTrackWithGains(primaryAudioPath, externalMicPath, preferInternal, g_mixMicGain, g_mixSystemGain);
1062
+ if (!mixed) {
1063
+ mixed = MRMixAudioToSingleTrack(primaryAudioPath, externalMicPath, preferInternal);
1064
+ }
1065
+ if (mixed) {
1066
+ MRLog(@"๐ŸŽง Post-mix completed: %@", primaryAudioPath);
1067
+ } else {
1068
+ MRLog(@"โ„น๏ธ Post-mix skipped or failed; proceeding to mux");
1069
+ }
1070
+ }
1071
+ if (g_outputPath && [g_outputPath length] > 0) {
1072
+ BOOL muxed = MRMuxAudioIntoVideo(g_outputPath, audioForMux);
1073
+ if (muxed) {
1074
+ MRLog(@"๐Ÿ”— Muxed audio into video: %@", g_outputPath);
1075
+ } else {
1076
+ MRLog(@"โš ๏ธ Failed to mux audio into video %@", g_outputPath);
1077
+ }
1078
+ }
1079
+ });
1080
+ }
1081
+ }
1082
+
1083
+ SCKMarkSchedulingComplete();
1084
+ }
1085
+ }];
1086
+ }
1087
+
1088
+ + (BOOL)isRecording {
1089
+ return g_isRecording;
1090
+ }
1091
+
1092
+ + (BOOL)isFullyInitialized {
1093
+ return g_firstFrameReceived;
1094
+ }
1095
+
1096
+ + (NSTimeInterval)getVideoStartTimestamp {
1097
+ if (!CMTIME_IS_VALID(g_videoStartTime)) {
1098
+ return 0;
1099
+ }
1100
+ // Return as milliseconds since epoch - approximate using current time
1101
+ // and relative offset from video start
1102
+ NSDate *now = [NSDate date];
1103
+ NSTimeInterval currentTimestamp = [now timeIntervalSince1970] * 1000;
1104
+
1105
+ // Calculate time elapsed since video start
1106
+ CMTime currentCMTime = CMClockGetTime(CMClockGetHostTimeClock());
1107
+ CMTime elapsedCMTime = CMTimeSubtract(currentCMTime, g_videoStartTime);
1108
+ NSTimeInterval elapsedSeconds = CMTimeGetSeconds(elapsedCMTime);
1109
+
1110
+ // Video start timestamp = current timestamp - elapsed time
1111
+ NSTimeInterval videoStartTimestamp = currentTimestamp - (elapsedSeconds * 1000);
1112
+ return videoStartTimestamp;
1113
+ }
1114
+
1115
+ + (BOOL)isCleaningUp {
1116
+ return g_isCleaningUp;
1117
+ }
1118
+
1119
+ @end
1120
+
1121
+ // Export C function for checking cleanup state
1122
+ BOOL isScreenCaptureKitCleaningUp() API_AVAILABLE(macos(12.3)) {
1123
+ return [ScreenCaptureKitRecorder isCleaningUp];
1124
+ }
1125
+
1126
+ @implementation ScreenCaptureKitRecorder (Methods)
1127
+
1128
+ + (BOOL)setupVideoWriter {
1129
+ // No setup needed - SCRecordingOutput handles everything
1130
+ return YES;
1131
+ }
1132
+
1133
+ + (void)finalizeRecording {
1134
+ @synchronized([ScreenCaptureKitRecorder class]) {
1135
+ MRLog(@"๐ŸŽฌ Finalizing pure ScreenCaptureKit recording");
628
1136
 
629
- MRLog(@"โœ… Got %lu displays, %lu windows for pure recording",
630
- content.displays.count, content.windows.count);
1137
+ // Set cleanup flag now that we're actually cleaning up
1138
+ g_isCleaningUp = YES;
1139
+ g_isRecording = NO;
1140
+
1141
+ [ScreenCaptureKitRecorder cleanupVideoWriter];
1142
+ }
1143
+ }
1144
+
1145
+ + (void)finalizeVideoWriter {
1146
+ // Alias for finalizeRecording to maintain compatibility
1147
+ [ScreenCaptureKitRecorder finalizeRecording];
1148
+ }
1149
+
1150
+ + (void)cleanupVideoWriter {
1151
+ @synchronized([ScreenCaptureKitRecorder class]) {
1152
+ MRLog(@"๐Ÿงน Starting ScreenCaptureKit cleanup");
1153
+
1154
+ // Clean up in proper order to prevent crashes
1155
+ if (g_stream) {
1156
+ g_stream = nil;
1157
+ MRLog(@"โœ… Stream reference cleared");
1158
+ }
1159
+
1160
+ if (g_streamDelegate) {
1161
+ g_streamDelegate = nil;
1162
+ MRLog(@"โœ… Stream delegate reference cleared");
1163
+ }
1164
+
1165
+ g_videoStreamOutput = nil;
1166
+ g_audioStreamOutput = nil;
1167
+ g_videoQueue = nil;
1168
+ g_audioQueue = nil;
1169
+ if (g_pixelBufferAdaptorRef) {
1170
+ CFRelease(g_pixelBufferAdaptorRef);
1171
+ g_pixelBufferAdaptorRef = NULL;
1172
+ }
1173
+ g_audioOutputPath = nil;
1174
+ g_shouldCaptureAudio = NO;
1175
+ g_captureMicrophoneEnabled = NO;
1176
+ g_captureSystemAudioEnabled = NO;
1177
+
1178
+ g_isRecording = NO;
1179
+ g_isCleaningUp = NO; // Reset cleanup flag
1180
+ g_outputPath = nil;
1181
+
1182
+ // ELECTRON FIX: Reset frame tracking
1183
+ g_firstFrameReceived = NO;
1184
+ g_frameCountSinceStart = 0;
631
1185
 
632
- // CRITICAL DEBUG: List all available displays in ScreenCaptureKit
1186
+ MRLog(@"๐Ÿงน Pure ScreenCaptureKit cleanup complete");
1187
+ }
1188
+ }
1189
+
1190
+ @end
1191
+ static dispatch_queue_t ScreenCaptureControlQueue(void) {
1192
+ static dispatch_queue_t controlQueue = NULL;
1193
+ static dispatch_once_t onceToken;
1194
+ dispatch_once(&onceToken, ^{
1195
+ controlQueue = dispatch_queue_create("com.macrecorder.screencapture.control", DISPATCH_QUEUE_SERIAL);
1196
+ });
1197
+ return controlQueue;
1198
+ }
1199
+
1200
+ static void SCKMarkSchedulingComplete(void) {
1201
+ g_isScheduling = NO;
1202
+ }
1203
+
1204
+ static void SCKFailScheduling(void) {
1205
+ g_isScheduling = NO;
1206
+ g_isRecording = NO;
1207
+ }
1208
+
1209
+ static void SCKPerformRecordingSetup(NSDictionary *config, SCShareableContent *content) API_AVAILABLE(macos(12.3)) {
1210
+ @autoreleasepool {
1211
+ if (!config || !content) {
1212
+ SCKFailScheduling();
1213
+ return;
1214
+ }
1215
+
1216
+ // CRITICAL FIX: Reset frame tracking at START of new recording
1217
+ g_firstFrameReceived = NO;
1218
+ g_frameCountSinceStart = 0;
1219
+ NSLog(@"๐Ÿ”„ Frame tracking reset for new recording");
1220
+
1221
+ NSString *outputPath = config[@"outputPath"];
1222
+ if (!outputPath || [outputPath length] == 0) {
1223
+ NSLog(@"โŒ Invalid output path provided");
1224
+ SCKFailScheduling();
1225
+ return;
1226
+ }
1227
+ g_outputPath = outputPath;
1228
+
1229
+ NSNumber *displayId = config[@"displayId"];
1230
+ NSNumber *windowId = config[@"windowId"];
1231
+ NSDictionary *captureRect = config[@"captureRect"];
1232
+ NSNumber *captureCursor = config[@"captureCursor"];
1233
+ NSNumber *includeMicrophone = config[@"includeMicrophone"];
1234
+ NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
1235
+ NSString *microphoneDeviceId = config[@"microphoneDeviceId"];
1236
+ NSString *audioOutputPath = MRNormalizePath(config[@"audioOutputPath"]);
1237
+ NSNumber *sessionTimestampNumber = config[@"sessionTimestamp"];
1238
+ NSNumber *frameRateNumber = config[@"frameRate"];
1239
+ NSNumber *mixAudioNumber = config[@"mixAudio"];
1240
+ g_mixAudioEnabled = mixAudioNumber ? [mixAudioNumber boolValue] : YES;
1241
+ NSNumber *mixMicGainNumber = config[@"mixMicGain"];
1242
+ NSNumber *mixSystemGainNumber = config[@"mixSystemGain"];
1243
+ if (mixMicGainNumber && [mixMicGainNumber respondsToSelector:@selector(floatValue)]) {
1244
+ g_mixMicGain = [mixMicGainNumber floatValue];
1245
+ if (g_mixMicGain < 0.f) g_mixMicGain = 0.f;
1246
+ if (g_mixMicGain > 2.f) g_mixMicGain = 2.f;
1247
+ }
1248
+ if (mixSystemGainNumber && [mixSystemGainNumber respondsToSelector:@selector(floatValue)]) {
1249
+ g_mixSystemGain = [mixSystemGainNumber floatValue];
1250
+ if (g_mixSystemGain < 0.f) g_mixSystemGain = 0.f;
1251
+ if (g_mixSystemGain > 2.f) g_mixSystemGain = 2.f;
1252
+ }
1253
+ NSNumber *captureCamera = config[@"captureCamera"];
1254
+
1255
+ if (frameRateNumber && [frameRateNumber respondsToSelector:@selector(intValue)]) {
1256
+ NSInteger fps = [frameRateNumber intValue];
1257
+ if (fps < 1) fps = 1;
1258
+ if (fps > 120) fps = 120;
1259
+ g_targetFPS = fps;
1260
+ } else {
1261
+ g_targetFPS = 60;
1262
+ }
1263
+
1264
+ // CRITICAL ELECTRON FIX: Lower FPS to 30 when recording with camera
1265
+ // This prevents resource conflicts and crashes when running both simultaneously
1266
+ BOOL isCameraEnabled = captureCamera && [captureCamera boolValue];
1267
+ if (isCameraEnabled && g_targetFPS > 30) {
1268
+ MRLog(@"๐Ÿ“น Camera recording detected - lowering ScreenCaptureKit FPS from %ld to 30 for stability", (long)g_targetFPS);
1269
+ g_targetFPS = 30;
1270
+ }
1271
+
1272
+ MRLog(@"๐ŸŽฌ Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
1273
+ MRLog(@"๐Ÿ”ง Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@ crop=%@",
1274
+ captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId, captureRect);
1275
+ MRLog(@"๐Ÿ” AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
1276
+ MRLog(@"๐Ÿ” AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
1277
+ MRLog(@"๐ŸŽš๏ธ Post-mix enabled: %@ (mic=%.2f, sys=%.2f)", g_mixAudioEnabled ? @"YES" : @"NO", g_mixMicGain, g_mixSystemGain);
1278
+
1279
+ MRLog(@"โœ… Got %lu displays, %lu windows for pure recording",
1280
+ content.displays.count, content.windows.count);
633
1281
  MRLog(@"๐Ÿ” ScreenCaptureKit available displays:");
634
1282
  for (SCDisplay *display in content.displays) {
635
- MRLog(@" Display ID=%u, Size=%dx%d, Frame=(%.0f,%.0f,%.0fx%.0f)",
1283
+ MRLog(@" Display ID=%u, Size=%dx%d, Frame=(%.0f,%.0f,%.0fx%.0f)",
636
1284
  display.displayID, (int)display.width, (int)display.height,
637
1285
  display.frame.origin.x, display.frame.origin.y,
638
1286
  display.frame.size.width, display.frame.size.height);
639
1287
  }
640
-
1288
+
641
1289
  SCContentFilter *filter = nil;
642
1290
  NSInteger recordingWidth = 0;
643
1291
  NSInteger recordingHeight = 0;
644
- SCDisplay *targetDisplay = nil; // Move to shared scope
645
-
646
- // WINDOW RECORDING
1292
+ SCDisplay *targetDisplay = nil;
1293
+
647
1294
  if (windowId && [windowId integerValue] != 0) {
648
1295
  SCRunningApplication *targetApp = nil;
649
1296
  SCWindow *targetWindow = nil;
650
-
1297
+
651
1298
  for (SCWindow *window in content.windows) {
652
1299
  if (window.windowID == [windowId unsignedIntValue]) {
653
1300
  targetWindow = window;
@@ -655,90 +1302,86 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
655
1302
  break;
656
1303
  }
657
1304
  }
658
-
1305
+
659
1306
  if (targetWindow && targetApp) {
660
- MRLog(@"๐ŸชŸ Recording window: %@ (%ux%u)",
1307
+ MRLog(@"๐ŸชŸ Recording window: %@ (%ux%u)",
661
1308
  targetWindow.title, (unsigned)targetWindow.frame.size.width, (unsigned)targetWindow.frame.size.height);
662
1309
  filter = [[SCContentFilter alloc] initWithDesktopIndependentWindow:targetWindow];
663
1310
  recordingWidth = (NSInteger)targetWindow.frame.size.width;
664
1311
  recordingHeight = (NSInteger)targetWindow.frame.size.height;
665
1312
  } else {
666
1313
  NSLog(@"โŒ Window ID %@ not found", windowId);
667
- // No need to set g_isRecording=NO since it was never set to YES
668
- return; // Early return from completion handler block
1314
+ SCKFailScheduling();
1315
+ return;
669
1316
  }
670
- }
671
- // DISPLAY RECORDING
672
- else {
673
-
674
- if (displayId && [displayId integerValue] != 0) {
675
- // Find specific display
676
- MRLog(@"๐ŸŽฏ Looking for display ID=%@ in ScreenCaptureKit list", displayId);
1317
+ } else {
1318
+ if (displayId) {
677
1319
  for (SCDisplay *display in content.displays) {
678
- MRLog(@" Checking display ID=%u vs requested=%u", display.displayID, [displayId unsignedIntValue]);
679
1320
  if (display.displayID == [displayId unsignedIntValue]) {
680
1321
  targetDisplay = display;
681
- MRLog(@"โœ… FOUND matching display ID=%u", display.displayID);
682
1322
  break;
683
1323
  }
684
1324
  }
685
-
686
- if (!targetDisplay) {
687
- NSLog(@"โŒ Display ID=%@ NOT FOUND in ScreenCaptureKit - using first display as fallback", displayId);
688
- targetDisplay = content.displays.firstObject;
1325
+
1326
+ if (!targetDisplay && content.displays.count > 0) {
1327
+ NSUInteger count = content.displays.count;
1328
+ NSUInteger idx0 = (NSUInteger)[displayId unsignedIntValue];
1329
+ if (idx0 < count) {
1330
+ targetDisplay = content.displays[idx0];
1331
+ } else if ([displayId unsignedIntegerValue] > 0) {
1332
+ NSUInteger idx1 = [displayId unsignedIntegerValue] - 1;
1333
+ if (idx1 < count) {
1334
+ targetDisplay = content.displays[idx1];
1335
+ }
1336
+ }
689
1337
  }
690
- } else {
691
- // Use first display
1338
+ }
1339
+
1340
+ if (!targetDisplay && content.displays.count > 0) {
692
1341
  targetDisplay = content.displays.firstObject;
693
1342
  }
694
-
695
- if (!targetDisplay) {
696
- NSLog(@"โŒ Display not found");
697
- // No need to set g_isRecording=NO since it was never set to YES
698
- return; // Early return from completion handler block
1343
+
1344
+ if (targetDisplay) {
1345
+ MRLog(@"๐Ÿ–ฅ๏ธ Recording display %u (%dx%d)",
1346
+ targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
1347
+ filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
1348
+ recordingWidth = targetDisplay.width;
1349
+ recordingHeight = targetDisplay.height;
1350
+ } else {
1351
+ NSLog(@"โŒ No display available");
1352
+ SCKFailScheduling();
1353
+ return;
699
1354
  }
700
-
701
- MRLog(@"๐Ÿ–ฅ๏ธ Recording display %u (%dx%d)",
702
- targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
703
- filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
704
- recordingWidth = targetDisplay.width;
705
- recordingHeight = targetDisplay.height;
706
1355
  }
707
-
708
- // CROP AREA SUPPORT - Adjust dimensions and source rect
1356
+
709
1357
  if (captureRect && captureRect[@"width"] && captureRect[@"height"]) {
710
1358
  CGFloat cropWidth = [captureRect[@"width"] doubleValue];
711
1359
  CGFloat cropHeight = [captureRect[@"height"] doubleValue];
712
-
713
1360
  if (cropWidth > 0 && cropHeight > 0) {
714
- MRLog(@"๐Ÿ”ฒ Crop area specified: %.0fx%.0f at (%.0f,%.0f)",
715
- cropWidth, cropHeight,
1361
+ MRLog(@"๐Ÿ”ฒ Crop area specified: %.0fx%.0f at (%.0f,%.0f)",
1362
+ cropWidth, cropHeight,
716
1363
  [captureRect[@"x"] doubleValue], [captureRect[@"y"] doubleValue]);
717
1364
  recordingWidth = (NSInteger)cropWidth;
718
1365
  recordingHeight = (NSInteger)cropHeight;
719
1366
  }
720
1367
  }
721
-
722
- // Configure stream with HIGH QUALITY settings
1368
+
723
1369
  SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
724
1370
  streamConfig.width = recordingWidth;
725
1371
  streamConfig.height = recordingHeight;
726
1372
  streamConfig.minimumFrameInterval = CMTimeMake(1, (int)MAX(1, g_targetFPS));
727
1373
  streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
728
1374
  streamConfig.scalesToFit = NO;
729
-
730
- // QUALITY FIX: Set high quality encoding parameters
731
1375
  if (@available(macOS 13.0, *)) {
732
- streamConfig.queueDepth = 8; // Larger queue for smoother capture
1376
+ streamConfig.queueDepth = 8;
733
1377
  }
734
1378
 
735
- MRLog(@"๐ŸŽฌ ScreenCaptureKit config: %ldx%ld @ %ldfps", (long)recordingWidth, (long)recordingHeight, (long)g_targetFPS);
736
-
737
1379
  BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
738
1380
  BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
739
- g_shouldCaptureAudio = shouldCaptureMic || shouldCaptureSystemAudio;
1381
+ g_shouldCaptureAudio = shouldCaptureSystemAudio || shouldCaptureMic;
1382
+ g_captureMicrophoneEnabled = shouldCaptureMic;
1383
+ g_captureSystemAudioEnabled = shouldCaptureSystemAudio;
740
1384
 
741
- // SAFETY: Ensure audioOutputPath is NSString, not NSURL or other type
742
1385
  if (audioOutputPath && ![audioOutputPath isKindOfClass:[NSString class]]) {
743
1386
  MRLog(@"โš ๏ธ audioOutputPath type mismatch: %@, converting...", NSStringFromClass([audioOutputPath class]));
744
1387
  g_audioOutputPath = nil;
@@ -749,84 +1392,82 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
749
1392
  if (g_shouldCaptureAudio && (!g_audioOutputPath || [g_audioOutputPath length] == 0)) {
750
1393
  NSLog(@"โš ๏ธ Audio capture requested but no audio output path supplied โ€“ audio will be disabled");
751
1394
  g_shouldCaptureAudio = NO;
1395
+ g_captureMicrophoneEnabled = NO;
1396
+ g_captureSystemAudioEnabled = NO;
752
1397
  }
753
-
1398
+
754
1399
  if (@available(macos 13.0, *)) {
755
- // capturesAudio enables audio capture (both mic and system audio)
756
1400
  streamConfig.capturesAudio = g_shouldCaptureAudio;
757
1401
  streamConfig.sampleRate = g_configuredSampleRate ?: 48000;
758
1402
  streamConfig.channelCount = g_configuredChannelCount ?: 2;
759
-
760
- // excludesCurrentProcessAudio = YES means ONLY microphone
761
- // excludesCurrentProcessAudio = NO means system audio + mic
762
1403
  streamConfig.excludesCurrentProcessAudio = !shouldCaptureSystemAudio;
763
-
764
- MRLog(@"๐ŸŽค Audio config (macOS 13+): capturesAudio=%d, excludeProcess=%d (mic=%d sys=%d)",
1404
+ NSLog(@"๐ŸŽค Audio config (macOS 13+): capturesAudio=%d, excludeProcess=%d (mic=%d sys=%d)",
765
1405
  g_shouldCaptureAudio, streamConfig.excludesCurrentProcessAudio,
766
1406
  shouldCaptureMic, shouldCaptureSystemAudio);
767
1407
  }
768
1408
 
769
1409
  if (@available(macos 15.0, *)) {
770
- // macOS 15+ has explicit microphone control
771
1410
  streamConfig.captureMicrophone = shouldCaptureMic;
772
- if (microphoneDeviceId && microphoneDeviceId.length > 0) {
773
- streamConfig.microphoneCaptureDeviceID = microphoneDeviceId;
1411
+ NSString *micIdToUse = microphoneDeviceId;
1412
+ if (micIdToUse && micIdToUse.length > 0) {
1413
+ // Validate UniqueID; if invalid, fall back to default to avoid silencing mic
1414
+ AVCaptureDevice *dev = [AVCaptureDevice deviceWithUniqueID:micIdToUse];
1415
+ if (!dev) {
1416
+ NSLog(@"โš ๏ธ Invalid microphone deviceID '%@' โ€“ falling back to default", micIdToUse);
1417
+ micIdToUse = nil;
1418
+ }
1419
+ }
1420
+ if (micIdToUse && micIdToUse.length > 0) {
1421
+ streamConfig.microphoneCaptureDeviceID = micIdToUse;
774
1422
  }
775
- MRLog(@"๐ŸŽค Microphone (macOS 15+): enabled=%d, deviceID=%@",
776
- shouldCaptureMic, microphoneDeviceId ?: @"default");
1423
+ NSLog(@"๐ŸŽค Microphone (macOS 15+): enabled=%d, deviceID=%@",
1424
+ shouldCaptureMic, micIdToUse ?: @"default");
777
1425
  }
778
-
779
- // Apply crop area using sourceRect - CONVERT GLOBAL TO DISPLAY-RELATIVE COORDINATES
780
- if (captureRect && captureRect[@"x"] && captureRect[@"y"] && captureRect[@"width"] && captureRect[@"height"]) {
781
- CGFloat globalX = [captureRect[@"x"] doubleValue];
782
- CGFloat globalY = [captureRect[@"y"] doubleValue];
1426
+
1427
+ if (captureRect && captureRect[@"x"] && captureRect[@"y"] && captureRect[@"width"] && captureRect[@"height"] && targetDisplay) {
1428
+ // CRITICAL FIX: captureRect comes from index.js as ALREADY display-relative coordinates
1429
+ // (see index.js:371-376 where global coords are converted to display-relative)
1430
+ // So we should NOT subtract display origin again - just use them directly!
1431
+ CGFloat displayRelativeX = [captureRect[@"x"] doubleValue];
1432
+ CGFloat displayRelativeY = [captureRect[@"y"] doubleValue];
783
1433
  CGFloat cropWidth = [captureRect[@"width"] doubleValue];
784
1434
  CGFloat cropHeight = [captureRect[@"height"] doubleValue];
785
-
786
- if (cropWidth > 0 && cropHeight > 0 && targetDisplay) {
787
- // Convert global coordinates to display-relative coordinates
788
- CGRect displayBounds = targetDisplay.frame;
789
- CGFloat displayRelativeX = globalX - displayBounds.origin.x;
790
- CGFloat displayRelativeY = globalY - displayBounds.origin.y;
791
-
792
- MRLog(@"๐ŸŒ Global coords: (%.0f,%.0f) on Display ID=%u", globalX, globalY, targetDisplay.displayID);
793
- MRLog(@"๐Ÿ–ฅ๏ธ Display bounds: (%.0f,%.0f,%.0fx%.0f)",
794
- displayBounds.origin.x, displayBounds.origin.y,
1435
+ CGRect displayBounds = targetDisplay.frame;
1436
+
1437
+ MRLog(@"๐Ÿ” CROP DEBUG: Input coords=(%.0f,%.0f) size=(%.0fx%.0f)",
1438
+ displayRelativeX, displayRelativeY, cropWidth, cropHeight);
1439
+ MRLog(@"๐Ÿ” CROP DEBUG: Display bounds origin=(%.0f,%.0f) size=(%.0fx%.0f)",
1440
+ displayBounds.origin.x, displayBounds.origin.y,
1441
+ displayBounds.size.width, displayBounds.size.height);
1442
+
1443
+ if (displayRelativeX >= 0 && displayRelativeY >= 0 &&
1444
+ displayRelativeX + cropWidth <= displayBounds.size.width &&
1445
+ displayRelativeY + cropHeight <= displayBounds.size.height) {
1446
+ CGRect sourceRect = CGRectMake(displayRelativeX, displayRelativeY, cropWidth, cropHeight);
1447
+ streamConfig.sourceRect = sourceRect;
1448
+ MRLog(@"โœ‚๏ธ Crop sourceRect applied: (%.0f,%.0f) %.0fx%.0f (display-relative)",
1449
+ displayRelativeX, displayRelativeY, cropWidth, cropHeight);
1450
+ } else {
1451
+ NSLog(@"โŒ Crop coordinates out of display bounds - skipping crop");
1452
+ MRLog(@" Coords: (%.0f,%.0f) size:(%.0fx%.0f) vs display:(%.0fx%.0f)",
1453
+ displayRelativeX, displayRelativeY, cropWidth, cropHeight,
795
1454
  displayBounds.size.width, displayBounds.size.height);
796
- MRLog(@"๐Ÿ“ Display-relative: (%.0f,%.0f) -> SourceRect", displayRelativeX, displayRelativeY);
797
-
798
- // Validate coordinates are within display bounds
799
- if (displayRelativeX >= 0 && displayRelativeY >= 0 &&
800
- displayRelativeX + cropWidth <= displayBounds.size.width &&
801
- displayRelativeY + cropHeight <= displayBounds.size.height) {
802
-
803
- CGRect sourceRect = CGRectMake(displayRelativeX, displayRelativeY, cropWidth, cropHeight);
804
- streamConfig.sourceRect = sourceRect;
805
- MRLog(@"โœ‚๏ธ Crop sourceRect applied: (%.0f,%.0f) %.0fx%.0f (display-relative)",
806
- displayRelativeX, displayRelativeY, cropWidth, cropHeight);
807
- } else {
808
- NSLog(@"โŒ Crop coordinates out of display bounds - skipping crop");
809
- MRLog(@" Relative: (%.0f,%.0f) size:(%.0fx%.0f) vs display:(%.0fx%.0f)",
810
- displayRelativeX, displayRelativeY, cropWidth, cropHeight,
811
- displayBounds.size.width, displayBounds.size.height);
812
- }
813
1455
  }
814
1456
  }
815
-
816
- // CURSOR SUPPORT
1457
+
817
1458
  BOOL shouldShowCursor = captureCursor ? [captureCursor boolValue] : YES;
818
1459
  streamConfig.showsCursor = shouldShowCursor;
819
-
820
- MRLog(@"๐ŸŽฅ Pure ScreenCapture config: %ldx%ld @ %ldfps, cursor=%d",
1460
+ MRLog(@"๐ŸŽฅ Pure ScreenCapture config: %ldx%ld @ %ldfps, cursor=%d",
821
1461
  recordingWidth, recordingHeight, (long)g_targetFPS, shouldShowCursor);
822
-
1462
+
823
1463
  NSError *writerError = nil;
824
1464
  if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
825
1465
  NSLog(@"โŒ Failed to prepare video writer: %@", writerError);
826
- // No need to set g_isRecording=NO since it was never set to YES
827
- return; // Early return from completion handler block
1466
+ SCKFailScheduling();
1467
+ CleanupWriters();
1468
+ return;
828
1469
  }
829
-
1470
+
830
1471
  g_videoQueue = dispatch_queue_create("screen_capture_video_queue", DISPATCH_QUEUE_SERIAL);
831
1472
  g_audioQueue = dispatch_queue_create("screen_capture_audio_queue", DISPATCH_QUEUE_SERIAL);
832
1473
  g_videoStreamOutput = [[ScreenCaptureVideoOutput alloc] init];
@@ -835,20 +1476,16 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
835
1476
  } else {
836
1477
  g_audioStreamOutput = nil;
837
1478
  }
838
-
839
- // Create stream outputs and delegate
1479
+
840
1480
  g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
841
1481
  g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
842
-
843
- // Check if stream was created successfully
844
1482
  if (!g_stream) {
845
1483
  NSLog(@"โŒ Failed to create pure stream");
846
1484
  CleanupWriters();
847
- return; // Early return from completion handler block
1485
+ SCKFailScheduling();
1486
+ return;
848
1487
  }
849
1488
 
850
- MRLog(@"โœ… Stream created successfully");
851
-
852
1489
  NSError *outputError = nil;
853
1490
  BOOL videoOutputAdded = [g_stream addStreamOutput:g_videoStreamOutput type:SCStreamOutputTypeScreen sampleHandlerQueue:g_videoQueue error:&outputError];
854
1491
  if (!videoOutputAdded || outputError) {
@@ -857,24 +1494,81 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
857
1494
  @synchronized([ScreenCaptureKitRecorder class]) {
858
1495
  g_stream = nil;
859
1496
  }
860
- return; // Early return from completion handler block
1497
+ SCKFailScheduling();
1498
+ return;
861
1499
  }
862
1500
 
863
1501
  if (g_shouldCaptureAudio) {
864
1502
  if (@available(macOS 13.0, *)) {
865
1503
  NSError *audioError = nil;
866
- BOOL audioOutputAdded = [g_stream addStreamOutput:g_audioStreamOutput type:SCStreamOutputTypeAudio sampleHandlerQueue:g_audioQueue error:&audioError];
867
- if (!audioOutputAdded || audioError) {
868
- NSLog(@"โŒ Failed to add audio output: %@", audioError);
869
- CleanupWriters();
870
- @synchronized([ScreenCaptureKitRecorder class]) {
871
- g_stream = nil;
1504
+ BOOL anyAudioAdded = NO;
1505
+ if (@available(macOS 15.0, *)) {
1506
+ // On macOS 15+, microphone has its own output type
1507
+ if (g_captureMicrophoneEnabled) {
1508
+ NSLog(@"โž• Adding microphone output stream...");
1509
+ audioError = nil;
1510
+ BOOL micAdded = [g_stream addStreamOutput:g_audioStreamOutput
1511
+ type:SCStreamOutputTypeMicrophone
1512
+ sampleHandlerQueue:g_audioQueue
1513
+ error:&audioError];
1514
+ if (!micAdded || audioError) {
1515
+ NSLog(@"โŒ Failed to add microphone output: %@", audioError);
1516
+ CleanupWriters();
1517
+ @synchronized([ScreenCaptureKitRecorder class]) { g_stream = nil; }
1518
+ SCKFailScheduling();
1519
+ return;
1520
+ }
1521
+ anyAudioAdded = YES;
1522
+ NSLog(@"โœ… Microphone output added successfully");
872
1523
  }
873
- return; // Early return from completion handler block
1524
+ if (g_captureSystemAudioEnabled) {
1525
+ NSLog(@"โž• Adding system audio output stream...");
1526
+ audioError = nil;
1527
+ BOOL sysAdded = [g_stream addStreamOutput:g_audioStreamOutput
1528
+ type:SCStreamOutputTypeAudio
1529
+ sampleHandlerQueue:g_audioQueue
1530
+ error:&audioError];
1531
+ if (!sysAdded || audioError) {
1532
+ NSLog(@"โŒ Failed to add system audio output: %@", audioError);
1533
+ CleanupWriters();
1534
+ @synchronized([ScreenCaptureKitRecorder class]) { g_stream = nil; }
1535
+ SCKFailScheduling();
1536
+ return;
1537
+ }
1538
+ anyAudioAdded = YES;
1539
+ NSLog(@"โœ… System audio output added successfully");
1540
+ }
1541
+ } else {
1542
+ // macOS 13/14: only SCStreamOutputTypeAudio exists
1543
+ NSLog(@"โž• Adding audio output stream (macOS 13/14)...");
1544
+ audioError = nil;
1545
+ BOOL audAdded = [g_stream addStreamOutput:g_audioStreamOutput
1546
+ type:SCStreamOutputTypeAudio
1547
+ sampleHandlerQueue:g_audioQueue
1548
+ error:&audioError];
1549
+ if (!audAdded || audioError) {
1550
+ NSLog(@"โŒ Failed to add audio output: %@", audioError);
1551
+ CleanupWriters();
1552
+ @synchronized([ScreenCaptureKitRecorder class]) { g_stream = nil; }
1553
+ SCKFailScheduling();
1554
+ return;
1555
+ }
1556
+ anyAudioAdded = YES;
1557
+ NSLog(@"โœ… Audio output added successfully");
1558
+ }
1559
+
1560
+ if (!anyAudioAdded) {
1561
+ NSLog(@"โŒ No audio outputs added (unexpected configuration)");
1562
+ CleanupWriters();
1563
+ @synchronized([ScreenCaptureKitRecorder class]) { g_stream = nil; }
1564
+ SCKFailScheduling();
1565
+ return;
874
1566
  }
875
1567
  } else {
876
1568
  NSLog(@"โš ๏ธ Audio capture requested but requires macOS 13.0+");
877
1569
  g_shouldCaptureAudio = NO;
1570
+ g_captureMicrophoneEnabled = NO;
1571
+ g_captureSystemAudioEnabled = NO;
878
1572
  }
879
1573
  }
880
1574
 
@@ -883,141 +1577,28 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
883
1577
  MRLog(@"๐Ÿ•’ Session timestamp: %@", sessionTimestampNumber);
884
1578
  }
885
1579
 
886
- // Start capture - can be async
1580
+ NSLog(@"๐Ÿš€ CALLING startCaptureWithCompletionHandler (async)...");
887
1581
  [g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
888
- if (startError) {
889
- NSLog(@"โŒ Failed to start pure capture: %@", startError);
890
- CleanupWriters();
891
- @synchronized([ScreenCaptureKitRecorder class]) {
892
- g_isRecording = NO;
893
- g_stream = nil;
894
- }
895
- } else {
896
- MRLog(@"๐ŸŽ‰ PURE ScreenCaptureKit recording started successfully!");
897
- // NOW set recording flag - stream is actually running
898
- @synchronized([ScreenCaptureKitRecorder class]) {
899
- g_isRecording = YES;
1582
+ dispatch_async(ScreenCaptureControlQueue(), ^{
1583
+ if (startError) {
1584
+ NSLog(@"โŒ Failed to start pure capture: %@", startError);
1585
+ NSLog(@"โŒ Error domain: %@, code: %ld", startError.domain, (long)startError.code);
1586
+ NSLog(@"โŒ Error userInfo: %@", startError.userInfo);
1587
+ CleanupWriters();
1588
+ @synchronized([ScreenCaptureKitRecorder class]) {
1589
+ g_isRecording = NO;
1590
+ g_stream = nil;
1591
+ }
1592
+ SCKFailScheduling();
1593
+ } else {
1594
+ NSLog(@"๐ŸŽ‰ PURE ScreenCaptureKit recording started successfully!");
1595
+ NSLog(@"๐ŸŽค Audio capture enabled: %d (mic=%d, system=%d)", g_shouldCaptureAudio, g_captureMicrophoneEnabled, g_captureSystemAudioEnabled);
1596
+ @synchronized([ScreenCaptureKitRecorder class]) {
1597
+ g_isRecording = YES;
1598
+ }
1599
+ SCKMarkSchedulingComplete();
900
1600
  }
901
- }
902
- }]; // End of startCaptureWithCompletionHandler
903
- } // End of autoreleasepool
904
- }]; // End of getShareableContentWithCompletionHandler
905
- }); // End of dispatch_async
906
-
907
- // Return immediately - async completion will handle success/failure
908
- return YES;
909
- }
910
-
911
- + (void)stopRecording {
912
- if (!g_isRecording || !g_stream || g_isCleaningUp) {
913
- NSLog(@"โš ๏ธ Cannot stop: recording=%d stream=%@ cleaning=%d", g_isRecording, g_stream, g_isCleaningUp);
914
- return;
915
- }
916
-
917
- MRLog(@"๐Ÿ›‘ Stopping pure ScreenCaptureKit recording");
918
-
919
- // CRITICAL FIX: Set cleanup flag IMMEDIATELY to prevent race conditions
920
- // This prevents startRecording from being called while stop is in progress
921
- @synchronized([ScreenCaptureKitRecorder class]) {
922
- g_isCleaningUp = YES;
1601
+ });
1602
+ }];
923
1603
  }
924
-
925
- // Store stream reference to prevent it from being deallocated
926
- SCStream *streamToStop = g_stream;
927
-
928
- // ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
929
- [streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
930
- @autoreleasepool {
931
- if (stopError) {
932
- NSLog(@"โŒ Stop error: %@", stopError);
933
- } else {
934
- MRLog(@"โœ… Pure stream stopped");
935
- }
936
-
937
- // Reset recording state to allow new recordings
938
- @synchronized([ScreenCaptureKitRecorder class]) {
939
- g_isRecording = NO;
940
- g_isCleaningUp = NO; // CRITICAL: Reset cleanup flag when done
941
- }
942
-
943
- // Cleanup after stop completes
944
- CleanupWriters();
945
- [ScreenCaptureKitRecorder cleanupVideoWriter];
946
- }
947
- }];
948
- }
949
-
950
- + (BOOL)isRecording {
951
- return g_isRecording;
952
1604
  }
953
-
954
- + (BOOL)isCleaningUp {
955
- return g_isCleaningUp;
956
- }
957
-
958
- @end
959
-
960
- // Export C function for checking cleanup state
961
- BOOL isScreenCaptureKitCleaningUp() API_AVAILABLE(macos(12.3)) {
962
- return [ScreenCaptureKitRecorder isCleaningUp];
963
- }
964
-
965
- @implementation ScreenCaptureKitRecorder (Methods)
966
-
967
- + (BOOL)setupVideoWriter {
968
- // No setup needed - SCRecordingOutput handles everything
969
- return YES;
970
- }
971
-
972
- + (void)finalizeRecording {
973
- @synchronized([ScreenCaptureKitRecorder class]) {
974
- MRLog(@"๐ŸŽฌ Finalizing pure ScreenCaptureKit recording");
975
-
976
- // Set cleanup flag now that we're actually cleaning up
977
- g_isCleaningUp = YES;
978
- g_isRecording = NO;
979
-
980
- [ScreenCaptureKitRecorder cleanupVideoWriter];
981
- }
982
- }
983
-
984
- + (void)finalizeVideoWriter {
985
- // Alias for finalizeRecording to maintain compatibility
986
- [ScreenCaptureKitRecorder finalizeRecording];
987
- }
988
-
989
- + (void)cleanupVideoWriter {
990
- @synchronized([ScreenCaptureKitRecorder class]) {
991
- MRLog(@"๐Ÿงน Starting ScreenCaptureKit cleanup");
992
-
993
- // Clean up in proper order to prevent crashes
994
- if (g_stream) {
995
- g_stream = nil;
996
- MRLog(@"โœ… Stream reference cleared");
997
- }
998
-
999
- if (g_streamDelegate) {
1000
- g_streamDelegate = nil;
1001
- MRLog(@"โœ… Stream delegate reference cleared");
1002
- }
1003
-
1004
- g_videoStreamOutput = nil;
1005
- g_audioStreamOutput = nil;
1006
- g_videoQueue = nil;
1007
- g_audioQueue = nil;
1008
- if (g_pixelBufferAdaptorRef) {
1009
- CFRelease(g_pixelBufferAdaptorRef);
1010
- g_pixelBufferAdaptorRef = NULL;
1011
- }
1012
- g_audioOutputPath = nil;
1013
- g_shouldCaptureAudio = NO;
1014
-
1015
- g_isRecording = NO;
1016
- g_isCleaningUp = NO; // Reset cleanup flag
1017
- g_outputPath = nil;
1018
-
1019
- MRLog(@"๐Ÿงน Pure ScreenCaptureKit cleanup complete");
1020
- }
1021
- }
1022
-
1023
- @end