node-mac-recorder 2.20.16 → 2.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,76 @@
1
1
  #import "screen_capture_kit.h"
2
2
  #import "logging.h"
3
+ #import <AVFoundation/AVFoundation.h>
4
+ #import <CoreVideo/CoreVideo.h>
5
+ #import <CoreMedia/CoreMedia.h>
6
+ #import <AudioToolbox/AudioToolbox.h>
3
7
 
4
8
  // Pure ScreenCaptureKit implementation - NO AVFoundation
5
9
  static SCStream * API_AVAILABLE(macos(12.3)) g_stream = nil;
6
- static SCRecordingOutput * API_AVAILABLE(macos(15.0)) g_recordingOutput = nil;
7
10
  static id<SCStreamDelegate> API_AVAILABLE(macos(12.3)) g_streamDelegate = nil;
8
11
  static BOOL g_isRecording = NO;
9
12
  static BOOL g_isCleaningUp = NO; // Prevent recursive cleanup
10
13
  static NSString *g_outputPath = nil;
11
14
 
15
+ static dispatch_queue_t g_videoQueue = nil;
16
+ static dispatch_queue_t g_audioQueue = nil;
17
+ static id g_videoStreamOutput = nil;
18
+ static id g_audioStreamOutput = nil;
19
+
20
+ static AVAssetWriter *g_videoWriter = nil;
21
+ static AVAssetWriterInput *g_videoInput = nil;
22
+ static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
23
+ static CMTime g_videoStartTime = kCMTimeInvalid;
24
+ static BOOL g_videoWriterStarted = NO;
25
+
26
+ static BOOL g_shouldCaptureAudio = NO;
27
+ static NSString *g_audioOutputPath = nil;
28
+ static AVAssetWriter *g_audioWriter = nil;
29
+ static AVAssetWriterInput *g_audioInput = nil;
30
+ static CMTime g_audioStartTime = kCMTimeInvalid;
31
+ static BOOL g_audioWriterStarted = NO;
32
+
33
+ static NSInteger g_configuredSampleRate = 48000;
34
+ static NSInteger g_configuredChannelCount = 2;
35
+
36
+ static void CleanupWriters(void);
37
+
38
+ static void FinishWriter(AVAssetWriter *writer, AVAssetWriterInput *input) {
39
+ if (!writer) {
40
+ return;
41
+ }
42
+
43
+ if (input) {
44
+ [input markAsFinished];
45
+ }
46
+
47
+ dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
48
+ [writer finishWritingWithCompletionHandler:^{
49
+ dispatch_semaphore_signal(semaphore);
50
+ }];
51
+ dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC));
52
+ dispatch_semaphore_wait(semaphore, timeout);
53
+ }
54
+
55
+ static void CleanupWriters(void) {
56
+ if (g_videoWriter) {
57
+ FinishWriter(g_videoWriter, g_videoInput);
58
+ g_videoWriter = nil;
59
+ g_videoInput = nil;
60
+ g_pixelBufferAdaptor = nil;
61
+ g_videoWriterStarted = NO;
62
+ g_videoStartTime = kCMTimeInvalid;
63
+ }
64
+
65
+ if (g_audioWriter) {
66
+ FinishWriter(g_audioWriter, g_audioInput);
67
+ g_audioWriter = nil;
68
+ g_audioInput = nil;
69
+ g_audioWriterStarted = NO;
70
+ g_audioStartTime = kCMTimeInvalid;
71
+ }
72
+ }
73
+
12
74
  @interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
13
75
  @end
14
76
 
@@ -39,8 +101,228 @@ static NSString *g_outputPath = nil;
39
101
  }
40
102
  @end
41
103
 
104
+ @interface ScreenCaptureKitRecorder (Private)
105
+ + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer;
106
+ @end
107
+
108
+ @interface ScreenCaptureVideoOutput : NSObject <SCStreamOutput>
109
+ @end
110
+
111
+ @implementation ScreenCaptureVideoOutput
112
+ - (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type API_AVAILABLE(macos(12.3)) {
113
+ if (!g_isRecording || type != SCStreamOutputTypeScreen) {
114
+ return;
115
+ }
116
+
117
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
118
+ return;
119
+ }
120
+
121
+ if (!g_videoWriter || !g_videoInput) {
122
+ return;
123
+ }
124
+
125
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
126
+
127
+ if (!g_videoWriterStarted) {
128
+ if (![g_videoWriter startWriting]) {
129
+ NSLog(@"❌ ScreenCaptureKit video writer failed to start: %@", g_videoWriter.error);
130
+ return;
131
+ }
132
+ [g_videoWriter startSessionAtSourceTime:presentationTime];
133
+ g_videoStartTime = presentationTime;
134
+ g_videoWriterStarted = YES;
135
+ MRLog(@"🎞️ Video writer session started @ %.3f", CMTimeGetSeconds(presentationTime));
136
+ }
137
+
138
+ if (!g_videoInput.readyForMoreMediaData) {
139
+ return;
140
+ }
141
+
142
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
143
+ if (!pixelBuffer) {
144
+ return;
145
+ }
146
+
147
+ if (!g_pixelBufferAdaptor) {
148
+ NSLog(@"❌ Pixel buffer adaptor is nil – cannot append video frames");
149
+ return;
150
+ }
151
+
152
+ BOOL appended = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
153
+ if (!appended) {
154
+ NSLog(@"⚠️ Failed appending pixel buffer: %@", g_videoWriter.error);
155
+ }
156
+ }
157
+ @end
158
+
159
+ @interface ScreenCaptureAudioOutput : NSObject <SCStreamOutput>
160
+ @end
161
+
162
+ @implementation ScreenCaptureAudioOutput
163
+ - (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type API_AVAILABLE(macos(12.3)) {
164
+ if (!g_isRecording || !g_shouldCaptureAudio) {
165
+ return;
166
+ }
167
+
168
+ if (@available(macOS 13.0, *)) {
169
+ if (type != SCStreamOutputTypeAudio) {
170
+ return;
171
+ }
172
+ } else {
173
+ return;
174
+ }
175
+
176
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
177
+ return;
178
+ }
179
+
180
+ if (![ScreenCaptureKitRecorder prepareAudioWriterIfNeededWithSampleBuffer:sampleBuffer]) {
181
+ return;
182
+ }
183
+
184
+ if (!g_audioWriter || !g_audioInput) {
185
+ return;
186
+ }
187
+
188
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
189
+
190
+ if (!g_audioWriterStarted) {
191
+ if (![g_audioWriter startWriting]) {
192
+ NSLog(@"❌ Audio writer failed to start: %@", g_audioWriter.error);
193
+ return;
194
+ }
195
+ [g_audioWriter startSessionAtSourceTime:presentationTime];
196
+ g_audioStartTime = presentationTime;
197
+ g_audioWriterStarted = YES;
198
+ MRLog(@"🔊 Audio writer session started @ %.3f", CMTimeGetSeconds(presentationTime));
199
+ }
200
+
201
+ if (!g_audioInput.readyForMoreMediaData) {
202
+ return;
203
+ }
204
+
205
+ if (![g_audioInput appendSampleBuffer:sampleBuffer]) {
206
+ NSLog(@"⚠️ Failed appending audio sample buffer: %@", g_audioWriter.error);
207
+ }
208
+ }
209
+ @end
210
+
42
211
  @implementation ScreenCaptureKitRecorder
43
212
 
213
+ + (BOOL)prepareVideoWriterWithWidth:(NSInteger)width height:(NSInteger)height error:(NSError **)error {
214
+ if (!g_outputPath) {
215
+ return NO;
216
+ }
217
+
218
+ NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
219
+ [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
220
+
221
+ g_videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:error];
222
+ if (!g_videoWriter || (error && *error)) {
223
+ return NO;
224
+ }
225
+
226
+ NSDictionary *compressionProps = @{
227
+ AVVideoAverageBitRateKey: @(width * height * 6),
228
+ AVVideoMaxKeyFrameIntervalKey: @30
229
+ };
230
+
231
+ NSDictionary *videoSettings = @{
232
+ AVVideoCodecKey: AVVideoCodecTypeH264,
233
+ AVVideoWidthKey: @(width),
234
+ AVVideoHeightKey: @(height),
235
+ AVVideoCompressionPropertiesKey: compressionProps
236
+ };
237
+
238
+ g_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
239
+ g_videoInput.expectsMediaDataInRealTime = YES;
240
+
241
+ g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_videoInput sourcePixelBufferAttributes:@{
242
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
243
+ (NSString *)kCVPixelBufferWidthKey: @(width),
244
+ (NSString *)kCVPixelBufferHeightKey: @(height),
245
+ (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
246
+ (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
247
+ }];
248
+
249
+ if (![g_videoWriter canAddInput:g_videoInput]) {
250
+ if (error) {
251
+ *error = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-100 userInfo:@{NSLocalizedDescriptionKey: @"Cannot add video input to writer"}];
252
+ }
253
+ return NO;
254
+ }
255
+
256
+ [g_videoWriter addInput:g_videoInput];
257
+ g_videoWriterStarted = NO;
258
+ g_videoStartTime = kCMTimeInvalid;
259
+
260
+ return YES;
261
+ }
262
+
263
+ + (BOOL)prepareAudioWriterIfNeededWithSampleBuffer:(CMSampleBufferRef)sampleBuffer {
264
+ if (!g_shouldCaptureAudio || g_audioWriter || !g_audioOutputPath) {
265
+ return g_audioWriter != nil || !g_shouldCaptureAudio;
266
+ }
267
+
268
+ CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
269
+ if (!formatDescription) {
270
+ NSLog(@"⚠️ Missing audio format description");
271
+ return NO;
272
+ }
273
+
274
+ const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
275
+ if (!asbd) {
276
+ NSLog(@"⚠️ Unsupported audio format description");
277
+ return NO;
278
+ }
279
+
280
+ g_configuredSampleRate = (NSInteger)asbd->mSampleRate;
281
+ g_configuredChannelCount = asbd->mChannelsPerFrame;
282
+
283
+ NSURL *audioURL = [NSURL fileURLWithPath:g_audioOutputPath];
284
+ [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
285
+
286
+ NSError *writerError = nil;
287
+ AVFileType fileType = AVFileTypeQuickTimeMovie;
288
+ if (@available(macOS 15.0, *)) {
289
+ fileType = @"public.webm";
290
+ }
291
+
292
+ g_audioWriter = [[AVAssetWriter alloc] initWithURL:audioURL fileType:fileType error:&writerError];
293
+ if (!g_audioWriter || writerError) {
294
+ NSLog(@"❌ Failed to create audio writer: %@", writerError);
295
+ return NO;
296
+ }
297
+
298
+ AudioChannelLayout stereoLayout = {
299
+ .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
300
+ .mChannelBitmap = 0,
301
+ .mNumberChannelDescriptions = 0
302
+ };
303
+
304
+ NSDictionary *audioSettings = @{
305
+ AVFormatIDKey: @(kAudioFormatMPEG4AAC),
306
+ AVSampleRateKey: @(g_configuredSampleRate),
307
+ AVNumberOfChannelsKey: @(MAX(1, g_configuredChannelCount)),
308
+ AVChannelLayoutKey: [NSData dataWithBytes:&stereoLayout length:sizeof(AudioChannelLayout)],
309
+ AVEncoderBitRateKey: @(192000)
310
+ };
311
+
312
+ g_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
313
+ g_audioInput.expectsMediaDataInRealTime = YES;
314
+
315
+ if (![g_audioWriter canAddInput:g_audioInput]) {
316
+ NSLog(@"❌ Audio writer cannot add input");
317
+ return NO;
318
+ }
319
+ [g_audioWriter addInput:g_audioInput];
320
+ g_audioWriterStarted = NO;
321
+ g_audioStartTime = kCMTimeInvalid;
322
+
323
+ return YES;
324
+ }
325
+
44
326
  + (BOOL)isScreenCaptureKitAvailable {
45
327
  if (@available(macOS 15.0, *)) {
46
328
  return [SCShareableContent class] != nil && [SCStream class] != nil && [SCRecordingOutput class] != nil;
@@ -73,6 +355,9 @@ static NSString *g_outputPath = nil;
73
355
  NSNumber *captureCursor = config[@"captureCursor"];
74
356
  NSNumber *includeMicrophone = config[@"includeMicrophone"];
75
357
  NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
358
+ NSString *microphoneDeviceId = config[@"microphoneDeviceId"];
359
+ NSString *audioOutputPath = config[@"audioOutputPath"];
360
+ NSNumber *sessionTimestampNumber = config[@"sessionTimestamp"];
76
361
 
77
362
  MRLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
78
363
  MRLog(@"🔧 Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@ crop=%@",
@@ -188,6 +473,29 @@ static NSString *g_outputPath = nil;
188
473
  streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
189
474
  streamConfig.scalesToFit = NO;
190
475
 
476
+ BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
477
+ BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
478
+ g_shouldCaptureAudio = shouldCaptureMic || shouldCaptureSystemAudio;
479
+ g_audioOutputPath = audioOutputPath;
480
+ if (g_shouldCaptureAudio && (!g_audioOutputPath || [g_audioOutputPath length] == 0)) {
481
+ NSLog(@"⚠️ Audio capture requested but no audio output path supplied – audio will be disabled");
482
+ g_shouldCaptureAudio = NO;
483
+ }
484
+
485
+ if (@available(macos 13.0, *)) {
486
+ streamConfig.capturesAudio = g_shouldCaptureAudio;
487
+ streamConfig.sampleRate = g_configuredSampleRate;
488
+ streamConfig.channelCount = g_configuredChannelCount;
489
+ streamConfig.excludesCurrentProcessAudio = !shouldCaptureSystemAudio;
490
+ }
491
+
492
+ if (@available(macos 15.0, *)) {
493
+ streamConfig.captureMicrophone = shouldCaptureMic;
494
+ if (microphoneDeviceId && microphoneDeviceId.length > 0) {
495
+ streamConfig.microphoneCaptureDeviceID = microphoneDeviceId;
496
+ }
497
+ }
498
+
191
499
  // Apply crop area using sourceRect - CONVERT GLOBAL TO DISPLAY-RELATIVE COORDINATES
192
500
  if (captureRect && captureRect[@"x"] && captureRect[@"y"] && captureRect[@"width"] && captureRect[@"height"]) {
193
501
  CGFloat globalX = [captureRect[@"x"] doubleValue];
@@ -232,110 +540,63 @@ static NSString *g_outputPath = nil;
232
540
  MRLog(@"🎥 Pure ScreenCapture config: %ldx%ld @ 30fps, cursor=%d",
233
541
  recordingWidth, recordingHeight, shouldShowCursor);
234
542
 
235
- // AUDIO SUPPORT - Enable both microphone and system audio
236
- MRLog(@"🔍 AUDIO PROCESSING: includeMicrophone=%@ includeSystemAudio=%@", includeMicrophone, includeSystemAudio);
237
- BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
238
- BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
239
- MRLog(@"🔍 AUDIO COMPUTED: shouldCaptureMic=%d shouldCaptureSystemAudio=%d", shouldCaptureMic, shouldCaptureSystemAudio);
240
-
241
- // Enable audio if either microphone or system audio is requested
242
- if (@available(macOS 13.0, *)) {
243
- if (shouldCaptureMic || shouldCaptureSystemAudio) {
244
- streamConfig.capturesAudio = YES;
245
- streamConfig.sampleRate = 44100;
246
- streamConfig.channelCount = 2;
247
-
248
- if (shouldCaptureMic && shouldCaptureSystemAudio) {
249
- MRLog(@"🎵 Both microphone and system audio enabled");
250
- } else if (shouldCaptureMic) {
251
- MRLog(@"🎤 Microphone audio enabled");
252
- } else {
253
- MRLog(@"🔊 System audio enabled");
254
- }
255
- } else {
256
- streamConfig.capturesAudio = NO;
257
- MRLog(@"🔇 Audio disabled");
258
- }
259
- } else {
260
- streamConfig.capturesAudio = NO;
261
- MRLog(@"🔇 Audio disabled (macOS < 13.0)");
262
- }
263
-
264
- // Create pure ScreenCaptureKit recording output
265
- // Use local copy to prevent race conditions
266
- NSString *safeOutputPath = outputPath; // Local variable from outer scope
267
- if (!safeOutputPath || [safeOutputPath length] == 0) {
268
- NSLog(@"❌ Output path is nil or empty");
543
+ NSError *writerError = nil;
544
+ if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
545
+ NSLog(@"❌ Failed to prepare video writer: %@", writerError);
269
546
  return;
270
547
  }
271
548
 
272
- NSURL *outputURL = [NSURL fileURLWithPath:safeOutputPath];
273
- if (!outputURL) {
274
- NSLog(@"❌ Failed to create output URL from path: %@", safeOutputPath);
275
- return;
276
- }
277
-
278
- if (@available(macOS 15.0, *)) {
279
- // Create recording output configuration
280
- SCRecordingOutputConfiguration *recordingConfig = [[SCRecordingOutputConfiguration alloc] init];
281
- recordingConfig.outputURL = outputURL;
282
- recordingConfig.videoCodecType = AVVideoCodecTypeH264;
283
-
284
- // Audio configuration - using available properties
285
- // Note: Specific audio routing handled by ScreenCaptureKit automatically
286
-
287
- // Create recording output with correct initializer
288
- g_recordingOutput = [[SCRecordingOutput alloc] initWithConfiguration:recordingConfig
289
- delegate:nil];
290
- if (shouldCaptureMic && shouldCaptureSystemAudio) {
291
- NSLog(@"🔧 Created SCRecordingOutput with microphone and system audio");
292
- } else if (shouldCaptureMic) {
293
- NSLog(@"🔧 Created SCRecordingOutput with microphone audio");
294
- } else if (shouldCaptureSystemAudio) {
295
- NSLog(@"🔧 Created SCRecordingOutput with system audio");
296
- } else {
297
- NSLog(@"🔧 Created SCRecordingOutput (audio disabled)");
298
- }
299
- }
300
-
301
- if (!g_recordingOutput) {
302
- NSLog(@"❌ Failed to create SCRecordingOutput");
303
- return;
549
+ g_videoQueue = dispatch_queue_create("screen_capture_video_queue", DISPATCH_QUEUE_SERIAL);
550
+ g_audioQueue = dispatch_queue_create("screen_capture_audio_queue", DISPATCH_QUEUE_SERIAL);
551
+ g_videoStreamOutput = [[ScreenCaptureVideoOutput alloc] init];
552
+ if (g_shouldCaptureAudio) {
553
+ g_audioStreamOutput = [[ScreenCaptureAudioOutput alloc] init];
554
+ } else {
555
+ g_audioStreamOutput = nil;
304
556
  }
305
557
 
306
- NSLog(@"✅ Pure ScreenCaptureKit recording output created");
307
-
308
- // Create delegate
309
558
  g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
310
-
311
- // Create and configure stream
312
559
  g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
313
560
 
314
561
  if (!g_stream) {
315
562
  NSLog(@"❌ Failed to create pure stream");
563
+ CleanupWriters();
316
564
  return;
317
565
  }
318
566
 
319
- // Add recording output directly to stream
320
567
  NSError *outputError = nil;
321
- BOOL outputAdded = NO;
322
-
323
- if (@available(macOS 15.0, *)) {
324
- outputAdded = [g_stream addRecordingOutput:g_recordingOutput error:&outputError];
568
+ BOOL videoOutputAdded = [g_stream addStreamOutput:g_videoStreamOutput type:SCStreamOutputTypeScreen sampleHandlerQueue:g_videoQueue error:&outputError];
569
+ if (!videoOutputAdded || outputError) {
570
+ NSLog(@"❌ Failed to add video output: %@", outputError);
571
+ CleanupWriters();
572
+ return;
325
573
  }
326
574
 
327
- if (!outputAdded || outputError) {
328
- NSLog(@"❌ Failed to add recording output: %@", outputError);
329
- return;
575
+ if (g_shouldCaptureAudio) {
576
+ if (@available(macOS 13.0, *)) {
577
+ NSError *audioError = nil;
578
+ BOOL audioOutputAdded = [g_stream addStreamOutput:g_audioStreamOutput type:SCStreamOutputTypeAudio sampleHandlerQueue:g_audioQueue error:&audioError];
579
+ if (!audioOutputAdded || audioError) {
580
+ NSLog(@"❌ Failed to add audio output: %@", audioError);
581
+ CleanupWriters();
582
+ return;
583
+ }
584
+ } else {
585
+ NSLog(@"⚠️ Audio capture requested but requires macOS 13.0+");
586
+ g_shouldCaptureAudio = NO;
587
+ }
330
588
  }
331
589
 
332
- MRLog(@"✅ Pure recording output added to stream");
590
+ MRLog(@"✅ Stream outputs configured (audio=%d)", g_shouldCaptureAudio);
591
+ if (sessionTimestampNumber) {
592
+ MRLog(@"🕒 Session timestamp: %@", sessionTimestampNumber);
593
+ }
333
594
 
334
- // Start capture with recording
335
595
  [g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
336
596
  if (startError) {
337
597
  NSLog(@"❌ Failed to start pure capture: %@", startError);
338
598
  g_isRecording = NO;
599
+ CleanupWriters();
339
600
  } else {
340
601
  MRLog(@"🎉 PURE ScreenCaptureKit recording started successfully!");
341
602
  g_isRecording = YES;
@@ -368,6 +629,7 @@ static NSString *g_outputPath = nil;
368
629
 
369
630
  // Finalize on main queue to prevent threading issues
370
631
  dispatch_async(dispatch_get_main_queue(), ^{
632
+ CleanupWriters();
371
633
  [ScreenCaptureKitRecorder cleanupVideoWriter];
372
634
  });
373
635
  }];
@@ -390,11 +652,6 @@ static NSString *g_outputPath = nil;
390
652
  g_isCleaningUp = YES;
391
653
  g_isRecording = NO;
392
654
 
393
- if (g_recordingOutput) {
394
- // SCRecordingOutput finalizes automatically
395
- MRLog(@"✅ Pure recording output finalized");
396
- }
397
-
398
655
  [ScreenCaptureKitRecorder cleanupVideoWriter];
399
656
  }
400
657
  }
@@ -414,16 +671,18 @@ static NSString *g_outputPath = nil;
414
671
  MRLog(@"✅ Stream reference cleared");
415
672
  }
416
673
 
417
- if (g_recordingOutput) {
418
- g_recordingOutput = nil;
419
- MRLog(@"✅ Recording output reference cleared");
420
- }
421
-
422
674
  if (g_streamDelegate) {
423
675
  g_streamDelegate = nil;
424
676
  MRLog(@"✅ Stream delegate reference cleared");
425
677
  }
426
678
 
679
+ g_videoStreamOutput = nil;
680
+ g_audioStreamOutput = nil;
681
+ g_videoQueue = nil;
682
+ g_audioQueue = nil;
683
+ g_audioOutputPath = nil;
684
+ g_shouldCaptureAudio = NO;
685
+
427
686
  g_isRecording = NO;
428
687
  g_isCleaningUp = NO; // Reset cleanup flag
429
688
  g_outputPath = nil;
@@ -58,7 +58,7 @@ static NSInteger g_currentActiveScreenIndex = -1;
58
58
 
59
59
  // Record icon helpers
60
60
  static NSImage *CreateRecordIconImage(CGFloat size) {
61
- const CGFloat leadingInset = 8.0;
61
+ const CGFloat leadingInset = 24.0;
62
62
  const CGFloat trailingSpacing = 6.0;
63
63
  CGFloat width = leadingInset + size + trailingSpacing;
64
64
  NSImage *image = [[[NSImage alloc] initWithSize:NSMakeSize(width, size)] autorelease];
@@ -1,41 +0,0 @@
1
- #import <Foundation/Foundation.h>
2
-
3
- @interface AudioCapture : NSObject
4
-
5
- + (NSArray *)getAudioDevices;
6
- + (BOOL)hasAudioPermission;
7
- + (void)requestAudioPermission:(void(^)(BOOL granted))completion;
8
-
9
- @end
10
-
11
- @implementation AudioCapture
12
-
13
- + (NSArray *)getAudioDevices {
14
- NSMutableArray *devices = [NSMutableArray array];
15
-
16
- // ScreenCaptureKit handles audio internally - return default device
17
- NSDictionary *deviceInfo = @{
18
- @"id": @"default",
19
- @"name": @"Default Audio Device",
20
- @"manufacturer": @"System",
21
- @"isDefault": @YES
22
- };
23
-
24
- [devices addObject:deviceInfo];
25
-
26
- return devices;
27
- }
28
-
29
- + (BOOL)hasAudioPermission {
30
- // ScreenCaptureKit handles audio permissions internally
31
- return YES;
32
- }
33
-
34
- + (void)requestAudioPermission:(void(^)(BOOL granted))completion {
35
- // ScreenCaptureKit handles audio permissions internally
36
- if (completion) {
37
- completion(YES);
38
- }
39
- }
40
-
41
- @end