node-mac-recorder 2.21.0 → 2.21.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,6 +24,8 @@ This project relies on system-level frameworks (ScreenCaptureKit, AVFoundation)
24
24
  > `NSCameraUseContinuityCameraDeviceType` removes the runtime warning when Continuity Camera devices are detected. macOS 14+ expects this key whenever Continuity Camera APIs are used.
25
25
  > The `com.apple.security.*` entitlements are only required for sandboxed / hardened runtime builds. Omit them if your distribution does not use the macOS sandbox.
26
26
 
27
+ During local development you can temporarily bypass the Continuity Camera check by running with `ALLOW_CONTINUITY_CAMERA=1`, but Apple still recommends setting the Info.plist key for shipping applications.
28
+
27
29
  ### Screen recording
28
30
 
29
31
  Screen recording permissions are granted by the user via the OS **Screen Recording** privacy panel. There is no `Info.plist` key to request it, but your app should guide the user to approve it.
package/index.js CHANGED
@@ -425,12 +425,12 @@ class MacRecorder extends EventEmitter {
425
425
  this.sessionTimestamp = sessionTimestamp;
426
426
  const outputDir = path.dirname(outputPath);
427
427
  const cursorFilePath = path.join(outputDir, `temp_cursor_${sessionTimestamp}.json`);
428
- const cameraFilePath =
428
+ let cameraFilePath =
429
429
  this.options.captureCamera === true
430
430
  ? path.join(outputDir, `temp_camera_${sessionTimestamp}.webm`)
431
431
  : null;
432
432
  const captureAudio = this.options.includeMicrophone === true || this.options.includeSystemAudio === true;
433
- const audioFilePath = captureAudio
433
+ let audioFilePath = captureAudio
434
434
  ? path.join(outputDir, `temp_audio_${sessionTimestamp}.webm`)
435
435
  : null;
436
436
 
@@ -451,11 +451,11 @@ class MacRecorder extends EventEmitter {
451
451
  }
452
452
 
453
453
  // Native kayıt başlat
454
- const recordingOptions = {
455
- includeMicrophone: this.options.includeMicrophone === true, // Only if explicitly enabled
456
- includeSystemAudio: this.options.includeSystemAudio === true, // Only if explicitly enabled
457
- captureCursor: this.options.captureCursor || false,
458
- displayId: this.options.displayId || null, // null = ana ekran
454
+ let recordingOptions = {
455
+ includeMicrophone: this.options.includeMicrophone === true, // Only if explicitly enabled
456
+ includeSystemAudio: this.options.includeSystemAudio === true, // Only if explicitly enabled
457
+ captureCursor: this.options.captureCursor || false,
458
+ displayId: this.options.displayId || null, // null = ana ekran
459
459
  windowId: this.options.windowId || null, // null = tam ekran
460
460
  audioDeviceId: this.options.audioDeviceId || null, // null = default device
461
461
  systemAudioDeviceId: this.options.systemAudioDeviceId || null, // null = auto-detect system audio device
@@ -464,12 +464,18 @@ class MacRecorder extends EventEmitter {
464
464
  sessionTimestamp,
465
465
  };
466
466
 
467
- if (cameraFilePath) {
468
- recordingOptions.cameraOutputPath = cameraFilePath;
469
- }
467
+ if (cameraFilePath) {
468
+ recordingOptions = {
469
+ ...recordingOptions,
470
+ cameraOutputPath: cameraFilePath,
471
+ };
472
+ }
470
473
 
471
- if (audioFilePath) {
472
- recordingOptions.audioOutputPath = audioFilePath;
474
+ if (audioFilePath) {
475
+ recordingOptions = {
476
+ ...recordingOptions,
477
+ audioOutputPath: audioFilePath,
478
+ };
473
479
  }
474
480
 
475
481
  // Manuel captureArea varsa onu kullan
@@ -494,6 +500,32 @@ class MacRecorder extends EventEmitter {
494
500
  }
495
501
 
496
502
  if (success) {
503
+ if (this.options.captureCamera === true) {
504
+ try {
505
+ const nativeCameraPath = nativeBinding.getCameraRecordingPath
506
+ ? nativeBinding.getCameraRecordingPath()
507
+ : null;
508
+ if (typeof nativeCameraPath === "string" && nativeCameraPath.length > 0) {
509
+ this.cameraCaptureFile = nativeCameraPath;
510
+ cameraFilePath = nativeCameraPath;
511
+ }
512
+ } catch (pathError) {
513
+ console.warn("Camera output path sync failed:", pathError.message);
514
+ }
515
+ }
516
+ if (captureAudio) {
517
+ try {
518
+ const nativeAudioPath = nativeBinding.getAudioRecordingPath
519
+ ? nativeBinding.getAudioRecordingPath()
520
+ : null;
521
+ if (typeof nativeAudioPath === "string" && nativeAudioPath.length > 0) {
522
+ this.audioCaptureFile = nativeAudioPath;
523
+ audioFilePath = nativeAudioPath;
524
+ }
525
+ } catch (pathError) {
526
+ console.warn("Audio output path sync failed:", pathError.message);
527
+ }
528
+ }
497
529
  this.isRecording = true;
498
530
  this.recordingStartTime = Date.now();
499
531
 
@@ -664,6 +696,33 @@ class MacRecorder extends EventEmitter {
664
696
  success = true; // Assume success to avoid throwing
665
697
  }
666
698
 
699
+ if (this.options.captureCamera === true) {
700
+ try {
701
+ const nativeCameraPath = nativeBinding.getCameraRecordingPath
702
+ ? nativeBinding.getCameraRecordingPath()
703
+ : null;
704
+ if (typeof nativeCameraPath === "string" && nativeCameraPath.length > 0) {
705
+ this.cameraCaptureFile = nativeCameraPath;
706
+ }
707
+ } catch (pathError) {
708
+ console.warn("Camera output path sync failed:", pathError.message);
709
+ }
710
+ }
711
+
712
+ const captureAudio = this.options.includeMicrophone === true || this.options.includeSystemAudio === true;
713
+ if (captureAudio) {
714
+ try {
715
+ const nativeAudioPath = nativeBinding.getAudioRecordingPath
716
+ ? nativeBinding.getAudioRecordingPath()
717
+ : null;
718
+ if (typeof nativeAudioPath === "string" && nativeAudioPath.length > 0) {
719
+ this.audioCaptureFile = nativeAudioPath;
720
+ }
721
+ } catch (pathError) {
722
+ console.warn("Audio output path sync failed:", pathError.message);
723
+ }
724
+ }
725
+
667
726
  if (this.cameraCaptureActive) {
668
727
  this.cameraCaptureActive = false;
669
728
  this.emit("cameraCaptureStopped", {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.0",
3
+ "version": "2.21.1",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -55,13 +55,49 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
55
55
  NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
56
56
  [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
57
57
 
58
- AVFileType fileType = AVFileTypeQuickTimeMovie;
58
+ NSError *writerError = nil;
59
+ AVFileType requestedFileType = AVFileTypeQuickTimeMovie;
60
+ BOOL requestedWebM = NO;
59
61
  if (@available(macOS 15.0, *)) {
60
- fileType = @"public.webm";
62
+ requestedFileType = @"public.webm";
63
+ requestedWebM = YES;
64
+ }
65
+
66
+ @try {
67
+ self.writer = [[AVAssetWriter alloc] initWithURL:outputURL fileType:requestedFileType error:&writerError];
68
+ } @catch (NSException *exception) {
69
+ NSDictionary *info = @{
70
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
71
+ };
72
+ writerError = [NSError errorWithDomain:@"NativeAudioRecorder" code:-30 userInfo:info];
73
+ self.writer = nil;
61
74
  }
62
75
 
63
- self.writer = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:error];
64
- if (!self.writer || (*error)) {
76
+ if ((!self.writer || writerError) && requestedWebM) {
77
+ NSString *fallbackPath = [[self.outputPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
78
+ if (!fallbackPath || [fallbackPath length] == 0) {
79
+ fallbackPath = [self.outputPath stringByAppendingString:@".mov"];
80
+ }
81
+ [[NSFileManager defaultManager] removeItemAtPath:fallbackPath error:nil];
82
+ NSURL *fallbackURL = [NSURL fileURLWithPath:fallbackPath];
83
+ self.outputPath = fallbackPath;
84
+ writerError = nil;
85
+ @try {
86
+ self.writer = [[AVAssetWriter alloc] initWithURL:fallbackURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
87
+ } @catch (NSException *exception) {
88
+ NSDictionary *info = @{
89
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
90
+ };
91
+ writerError = [NSError errorWithDomain:@"NativeAudioRecorder" code:-31 userInfo:info];
92
+ self.writer = nil;
93
+ }
94
+ outputURL = fallbackURL;
95
+ }
96
+
97
+ if (!self.writer || writerError) {
98
+ if (error) {
99
+ *error = writerError;
100
+ }
65
101
  return NO;
66
102
  }
67
103
 
@@ -70,20 +106,28 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
70
106
 
71
107
  double sampleRate = asbd ? asbd->mSampleRate : 44100.0;
72
108
  NSUInteger channels = asbd ? asbd->mChannelsPerFrame : 2;
73
-
74
- AudioChannelLayout stereoLayout = {
75
- .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
76
- .mChannelBitmap = 0,
77
- .mNumberChannelDescriptions = 0
78
- };
79
-
80
- NSDictionary *audioSettings = @{
109
+ channels = MAX((NSUInteger)1, channels);
110
+
111
+ AudioChannelLayout layout = {0};
112
+ size_t layoutSize = 0;
113
+ if (channels == 1) {
114
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
115
+ layoutSize = sizeof(AudioChannelLayout);
116
+ } else if (channels == 2) {
117
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
118
+ layoutSize = sizeof(AudioChannelLayout);
119
+ }
120
+
121
+ NSMutableDictionary *audioSettings = [@{
81
122
  AVFormatIDKey: @(kAudioFormatMPEG4AAC),
82
123
  AVSampleRateKey: @(sampleRate),
83
- AVNumberOfChannelsKey: @(MAX((NSUInteger)1, channels)),
84
- AVChannelLayoutKey: [NSData dataWithBytes:&stereoLayout length:sizeof(AudioChannelLayout)],
124
+ AVNumberOfChannelsKey: @(channels),
85
125
  AVEncoderBitRateKey: @(192000)
86
- };
126
+ } mutableCopy];
127
+
128
+ if (layoutSize > 0) {
129
+ audioSettings[AVChannelLayoutKey] = [NSData dataWithBytes:&layout length:layoutSize];
130
+ }
87
131
 
88
132
  self.writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
89
133
  self.writerInput.expectsMediaDataInRealTime = YES;
@@ -318,4 +362,11 @@ void requestAudioPermission(void (^completion)(BOOL granted)) {
318
362
  [AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:completion];
319
363
  }
320
364
 
365
+ NSString *currentStandaloneAudioRecordingPath() {
366
+ if (!g_audioRecorder) {
367
+ return nil;
368
+ }
369
+ return g_audioRecorder.outputPath;
370
+ }
371
+
321
372
  }
@@ -8,6 +8,21 @@
8
8
  static AVVideoCodecType const AVVideoCodecTypeVP9 = @"vp09";
9
9
  #endif
10
10
 
11
+ static BOOL MRAllowContinuityCamera() {
12
+ static dispatch_once_t onceToken;
13
+ static BOOL allowContinuity = NO;
14
+ dispatch_once(&onceToken, ^{
15
+ id continuityKey = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUseContinuityCameraDeviceType"];
16
+ if ([continuityKey respondsToSelector:@selector(boolValue)] && [continuityKey boolValue]) {
17
+ allowContinuity = YES;
18
+ }
19
+ if (!allowContinuity && getenv("ALLOW_CONTINUITY_CAMERA")) {
20
+ allowContinuity = YES;
21
+ }
22
+ });
23
+ return allowContinuity;
24
+ }
25
+
11
26
  static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
12
27
  if (!device) {
13
28
  return NO;
@@ -27,16 +42,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
27
42
  BOOL nameMentionsContinuity = [localizedName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound ||
28
43
  [modelId rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound;
29
44
 
30
- if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] && nameMentionsContinuity) {
31
- return YES;
45
+ if (@available(macOS 14.0, *)) {
46
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] && nameMentionsContinuity) {
47
+ return YES;
48
+ }
32
49
  }
33
50
 
34
- if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] &&
35
- [manufacturer rangeOfString:@"Apple" options:NSCaseInsensitiveSearch].location != NSNotFound &&
36
- nameMentionsContinuity) {
51
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown] && nameMentionsContinuity) {
37
52
  return YES;
38
53
  }
39
54
 
55
+ BOOL isApple = [manufacturer rangeOfString:@"Apple" options:NSCaseInsensitiveSearch].location != NSNotFound;
56
+ if (isApple && nameMentionsContinuity) {
57
+ if (@available(macOS 14.0, *)) {
58
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal]) {
59
+ return YES;
60
+ }
61
+ }
62
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown]) {
63
+ return YES;
64
+ }
65
+ }
66
+
40
67
  return NO;
41
68
  }
42
69
 
@@ -80,14 +107,21 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
80
107
  NSMutableArray<NSDictionary *> *devicesInfo = [NSMutableArray array];
81
108
 
82
109
  NSMutableArray<AVCaptureDeviceType> *deviceTypes = [NSMutableArray array];
83
- [deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
110
+ BOOL allowContinuity = MRAllowContinuityCamera();
111
+
84
112
  if (@available(macOS 10.15, *)) {
85
113
  [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
114
+ } else {
115
+ [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
116
+ }
117
+
118
+ if (allowContinuity) {
86
119
  if (@available(macOS 14.0, *)) {
87
120
  [deviceTypes addObject:AVCaptureDeviceTypeContinuityCamera];
121
+ [deviceTypes addObject:AVCaptureDeviceTypeExternal];
122
+ } else {
123
+ [deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
88
124
  }
89
- } else {
90
- [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
91
125
  }
92
126
 
93
127
  AVCaptureDeviceDiscoverySession *discoverySession =
@@ -97,6 +131,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
97
131
 
98
132
  for (AVCaptureDevice *device in discoverySession.devices) {
99
133
  BOOL continuityCamera = MRIsContinuityCamera(device);
134
+ if (continuityCamera && !allowContinuity) {
135
+ // Skip Continuity cameras when entitlement/env flag is missing
136
+ continue;
137
+ }
100
138
 
101
139
  // Determine the best (maximum) resolution format for this device
102
140
  CMVideoDimensions bestDimensions = {0, 0};
@@ -264,12 +302,25 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
264
302
  }
265
303
  }
266
304
 
267
- if (bestRange) {
268
- double clampedRate = MIN(bestRange.maxFrameRate, MAX(bestRange.minFrameRate, targetFrameRate));
269
- CMTime frameDuration = CMTimeMake(1, (int32_t)round(clampedRate));
270
- device.activeVideoMinFrameDuration = frameDuration;
271
- device.activeVideoMaxFrameDuration = frameDuration;
305
+ if (bestRange) {
306
+ double clampedRate = MIN(bestRange.maxFrameRate, MAX(bestRange.minFrameRate, targetFrameRate));
307
+ double durationSeconds = clampedRate > 0.0 ? (1.0 / clampedRate) : CMTimeGetSeconds(bestRange.maxFrameDuration);
308
+ int32_t preferredTimescale = bestRange.minFrameDuration.timescale > 0 ? bestRange.minFrameDuration.timescale : 600;
309
+ CMTime desiredDuration = CMTimeMakeWithSeconds(durationSeconds, preferredTimescale);
310
+
311
+ if (!CMTIME_IS_NUMERIC(desiredDuration)) {
312
+ desiredDuration = bestRange.maxFrameDuration;
313
+ }
314
+
315
+ if (CMTimeCompare(desiredDuration, bestRange.minFrameDuration) < 0) {
316
+ desiredDuration = bestRange.minFrameDuration;
317
+ } else if (CMTimeCompare(desiredDuration, bestRange.maxFrameDuration) > 0) {
318
+ desiredDuration = bestRange.maxFrameDuration;
272
319
  }
320
+
321
+ device.activeVideoMinFrameDuration = desiredDuration;
322
+ device.activeVideoMaxFrameDuration = desiredDuration;
323
+ }
273
324
  } @catch (NSException *exception) {
274
325
  if (error) {
275
326
  NSDictionary *userInfo = @{
@@ -298,6 +349,7 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
298
349
 
299
350
  NSString *extension = outputURL.pathExtension.lowercaseString;
300
351
  BOOL wantsWebM = [extension isEqualToString:@"webm"];
352
+ NSString *originalPath = outputURL.path ?: @"";
301
353
 
302
354
  NSString *codec = AVVideoCodecTypeH264;
303
355
  AVFileType fileType = AVFileTypeQuickTimeMovie;
@@ -315,7 +367,42 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
315
367
  }
316
368
 
317
369
  NSError *writerError = nil;
318
- self.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:&writerError];
370
+ @try {
371
+ self.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:&writerError];
372
+ } @catch (NSException *exception) {
373
+ NSDictionary *info = @{
374
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize asset writer"
375
+ };
376
+ writerError = [NSError errorWithDomain:@"CameraRecorder" code:-100 userInfo:info];
377
+ self.assetWriter = nil;
378
+ }
379
+
380
+ if ((!self.assetWriter || writerError) && wantsWebM) {
381
+ MRLog(@"⚠️ CameraRecorder: WebM writer unavailable (%@) – falling back to QuickTime container", writerError.localizedDescription);
382
+ codec = AVVideoCodecTypeH264;
383
+ fileType = AVFileTypeQuickTimeMovie;
384
+ webMSupported = NO;
385
+ writerError = nil;
386
+ NSString *fallbackPath = [[originalPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
387
+ if (!fallbackPath || [fallbackPath length] == 0) {
388
+ fallbackPath = [originalPath stringByAppendingString:@".mov"];
389
+ }
390
+ [[NSFileManager defaultManager] removeItemAtPath:fallbackPath error:nil];
391
+ NSURL *fallbackURL = [NSURL fileURLWithPath:fallbackPath];
392
+ self.outputPath = fallbackPath;
393
+ @try {
394
+ self.assetWriter = [[AVAssetWriter alloc] initWithURL:fallbackURL fileType:fileType error:&writerError];
395
+ } @catch (NSException *exception) {
396
+ NSDictionary *info = @{
397
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize asset writer"
398
+ };
399
+ writerError = [NSError errorWithDomain:@"CameraRecorder" code:-100 userInfo:info];
400
+ self.assetWriter = nil;
401
+ }
402
+ outputURL = fallbackURL;
403
+ } else {
404
+ self.outputPath = originalPath;
405
+ }
319
406
 
320
407
  if (!self.assetWriter || writerError) {
321
408
  if (error) {
@@ -324,7 +411,6 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
324
411
  return NO;
325
412
  }
326
413
 
327
- // On fallback, if WebM was requested but not supported, log and switch extension to .mov
328
414
  if (wantsWebM && !webMSupported) {
329
415
  MRLog(@"ℹ️ CameraRecorder: WebM unavailable, storing data in QuickTime container");
330
416
  }
@@ -349,7 +435,6 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
349
435
  AVVideoCompressionPropertiesKey: compressionProps
350
436
  };
351
437
 
352
- // Video-only writer input (camera recordings remain silent by design)
353
438
  self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
354
439
  outputSettings:videoSettings];
355
440
  self.assetWriterInput.expectsMediaDataInRealTime = YES;
@@ -444,26 +529,15 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
444
529
  return NO;
445
530
  }
446
531
 
447
- BOOL isContinuityCamera = MRIsContinuityCamera(device);
448
- if (isContinuityCamera) {
449
- id continuityKey = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUseContinuityCameraDeviceType"];
450
- BOOL allowContinuity = NO;
451
- if ([continuityKey respondsToSelector:@selector(boolValue)]) {
452
- allowContinuity = [continuityKey boolValue];
453
- }
454
- if (!allowContinuity && getenv("ALLOW_CONTINUITY_CAMERA")) {
455
- allowContinuity = YES;
456
- }
457
- if (!allowContinuity) {
458
- if (error) {
459
- NSDictionary *userInfo = @{
460
- NSLocalizedDescriptionKey: @"Continuity Camera requires NSCameraUseContinuityCameraDeviceType=true in Info.plist"
461
- };
462
- *error = [NSError errorWithDomain:@"CameraRecorder" code:-5 userInfo:userInfo];
463
- }
464
- MRLog(@"⚠️ Continuity Camera access denied - missing Info.plist entitlement");
465
- return NO;
532
+ if (MRIsContinuityCamera(device) && !MRAllowContinuityCamera()) {
533
+ if (error) {
534
+ NSDictionary *userInfo = @{
535
+ NSLocalizedDescriptionKey: @"Continuity Camera requires NSCameraUseContinuityCameraDeviceType=true in Info.plist"
536
+ };
537
+ *error = [NSError errorWithDomain:@"CameraRecorder" code:-5 userInfo:userInfo];
466
538
  }
539
+ MRLog(@"⚠️ Continuity Camera access denied - missing Info.plist entitlement");
540
+ return NO;
467
541
  }
468
542
 
469
543
  int32_t width = 0;
@@ -521,9 +595,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
521
595
  AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
522
596
  if (connection) {
523
597
  if (connection.isVideoOrientationSupported) {
524
- connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
598
+ connection.videoOrientation = AVCaptureVideoOrientationPortrait;
525
599
  }
526
600
  if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
601
+ if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
602
+ connection.automaticallyAdjustsVideoMirroring = NO;
603
+ }
527
604
  connection.videoMirrored = YES;
528
605
  }
529
606
  }
@@ -651,17 +728,13 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
651
728
  extern "C" {
652
729
 
653
730
  NSArray<NSDictionary *> *listCameraDevices() {
654
- @autoreleasepool {
655
- return [CameraRecorder availableCameraDevices];
656
- }
731
+ return [CameraRecorder availableCameraDevices];
657
732
  }
658
733
 
659
734
  bool startCameraRecording(NSString *outputPath, NSString *deviceId, NSError **error) {
660
- @autoreleasepool {
661
- return [[CameraRecorder sharedRecorder] startRecordingWithDeviceId:deviceId
662
- outputPath:outputPath
663
- error:error];
664
- }
735
+ return [[CameraRecorder sharedRecorder] startRecordingWithDeviceId:deviceId
736
+ outputPath:outputPath
737
+ error:error];
665
738
  }
666
739
 
667
740
  bool stopCameraRecording() {
@@ -674,4 +747,8 @@ bool isCameraRecording() {
674
747
  return [CameraRecorder sharedRecorder].isRecording;
675
748
  }
676
749
 
750
+ NSString *currentCameraRecordingPath() {
751
+ return [CameraRecorder sharedRecorder].outputPath;
752
+ }
753
+
677
754
  }
@@ -1088,43 +1088,13 @@ NSDictionary* getDisplayScalingInfo(CGPoint globalPoint) {
1088
1088
 
1089
1089
  // Check if point is within this display
1090
1090
  if (isInBounds) {
1091
- // Get physical dimensions
1092
- CGImageRef testImage = CGDisplayCreateImage(displayID);
1093
- CGSize imageSize = CGSizeMake(CGImageGetWidth(testImage), CGImageGetHeight(testImage));
1094
- CGImageRelease(testImage);
1095
-
1096
- CGSize actualPhysicalSize = imageSize;
1097
- CFArrayRef displayModes = CGDisplayCopyAllDisplayModes(displayID, NULL);
1098
- if (displayModes) {
1099
- CFIndex modeCount = CFArrayGetCount(displayModes);
1100
- CGSize maxResolution = CGSizeMake(0, 0);
1101
-
1102
- for (CFIndex i = 0; i < modeCount; i++) {
1103
- CGDisplayModeRef mode = (CGDisplayModeRef)CFArrayGetValueAtIndex(displayModes, i);
1104
- CGSize modeSize = CGSizeMake(CGDisplayModeGetWidth(mode), CGDisplayModeGetHeight(mode));
1105
-
1106
- if (modeSize.width > maxResolution.width ||
1107
- (modeSize.width == maxResolution.width && modeSize.height > maxResolution.height)) {
1108
- maxResolution = modeSize;
1109
- }
1110
- }
1111
-
1112
- if (maxResolution.width > imageSize.width * 1.5 || maxResolution.height > imageSize.height * 1.5) {
1113
- actualPhysicalSize = maxResolution;
1114
- } else {
1115
- actualPhysicalSize = imageSize;
1116
- }
1117
-
1118
- CFRelease(displayModes);
1119
- } else {
1120
- actualPhysicalSize = imageSize;
1121
- }
1122
-
1091
+ // Compute physical dimensions using pixel counts to avoid heavy APIs
1123
1092
  CGSize logicalSize = displayBounds.size;
1124
- CGSize reportedPhysicalSize = CGSizeMake(CGDisplayPixelsWide(displayID), CGDisplayPixelsHigh(displayID));
1093
+ CGSize actualPhysicalSize = CGSizeMake(CGDisplayPixelsWide(displayID), CGDisplayPixelsHigh(displayID));
1094
+ CGSize reportedPhysicalSize = actualPhysicalSize;
1125
1095
 
1126
- CGFloat scaleX = actualPhysicalSize.width / logicalSize.width;
1127
- CGFloat scaleY = actualPhysicalSize.height / logicalSize.height;
1096
+ CGFloat scaleX = logicalSize.width > 0 ? actualPhysicalSize.width / logicalSize.width : 1.0;
1097
+ CGFloat scaleY = logicalSize.height > 0 ? actualPhysicalSize.height / logicalSize.height : 1.0;
1128
1098
  CGFloat scaleFactor = MAX(scaleX, scaleY);
1129
1099
 
1130
1100
  return @{
@@ -1141,36 +1111,15 @@ NSDictionary* getDisplayScalingInfo(CGPoint globalPoint) {
1141
1111
  CGDirectDisplayID mainDisplay = CGMainDisplayID();
1142
1112
  CGRect displayBounds = CGDisplayBounds(mainDisplay);
1143
1113
 
1144
- CGImageRef testImage = CGDisplayCreateImage(mainDisplay);
1145
- CGSize imageSize = CGSizeMake(CGImageGetWidth(testImage), CGImageGetHeight(testImage));
1146
- CGImageRelease(testImage);
1147
-
1148
- CGSize actualPhysicalSize = imageSize;
1149
- CFArrayRef displayModes = CGDisplayCopyAllDisplayModes(mainDisplay, NULL);
1150
- if (displayModes) {
1151
- CFIndex modeCount = CFArrayGetCount(displayModes);
1152
- CGSize maxResolution = CGSizeMake(0, 0);
1153
-
1154
- for (CFIndex i = 0; i < modeCount; i++) {
1155
- CGDisplayModeRef mode = (CGDisplayModeRef)CFArrayGetValueAtIndex(displayModes, i);
1156
- CGSize modeSize = CGSizeMake(CGDisplayModeGetWidth(mode), CGDisplayModeGetHeight(mode));
1157
-
1158
- if (modeSize.width > maxResolution.width ||
1159
- (modeSize.width == maxResolution.width && modeSize.height > maxResolution.height)) {
1160
- maxResolution = modeSize;
1161
- }
1162
- }
1163
-
1164
- if (maxResolution.width > imageSize.width * 1.5 || maxResolution.height > imageSize.height * 1.5) {
1165
- actualPhysicalSize = maxResolution;
1166
- }
1167
-
1168
- CFRelease(displayModes);
1114
+ CGSize logicalSize = displayBounds.size;
1115
+ CGSize actualPhysicalSize = CGSizeMake(CGDisplayPixelsWide(mainDisplay), CGDisplayPixelsHigh(mainDisplay));
1116
+ CGFloat scaleFactor = 1.0;
1117
+ if (logicalSize.width > 0 && logicalSize.height > 0) {
1118
+ CGFloat scaleX = actualPhysicalSize.width / logicalSize.width;
1119
+ CGFloat scaleY = actualPhysicalSize.height / logicalSize.height;
1120
+ scaleFactor = MAX(scaleX, scaleY);
1169
1121
  }
1170
1122
 
1171
- CGSize logicalSize = displayBounds.size;
1172
- CGFloat scaleFactor = MAX(actualPhysicalSize.width / logicalSize.width, actualPhysicalSize.height / logicalSize.height);
1173
-
1174
1123
  return @{
1175
1124
  @"displayID": @(mainDisplay),
1176
1125
  @"logicalSize": [NSValue valueWithSize:NSMakeSize(logicalSize.width, logicalSize.height)],
@@ -26,12 +26,16 @@ extern "C" {
26
26
  bool startCameraRecording(NSString *outputPath, NSString *deviceId, NSError **error);
27
27
  bool stopCameraRecording();
28
28
  bool isCameraRecording();
29
+ NSString *currentCameraRecordingPath();
30
+ NSString *currentStandaloneAudioRecordingPath();
29
31
 
30
32
  NSArray<NSDictionary *> *listAudioCaptureDevices();
31
33
  bool startStandaloneAudioRecording(NSString *outputPath, NSString *preferredDeviceId, NSError **error);
32
34
  bool stopStandaloneAudioRecording();
33
35
  bool isStandaloneAudioRecording();
34
36
  bool hasAudioPermission();
37
+
38
+ NSString *ScreenCaptureKitCurrentAudioPath(void);
35
39
  }
36
40
 
37
41
  // Cursor tracker function declarations
@@ -56,7 +60,7 @@ static bool g_isRecording = false;
56
60
  static bool g_usingStandaloneAudio = false;
57
61
 
58
62
  static bool startCameraIfRequested(bool captureCamera,
59
- NSString *cameraOutputPath,
63
+ NSString **cameraOutputPathRef,
60
64
  NSString *cameraDeviceId,
61
65
  const std::string &screenOutputPath,
62
66
  int64_t sessionTimestampMs) {
@@ -64,7 +68,7 @@ static bool startCameraIfRequested(bool captureCamera,
64
68
  return true;
65
69
  }
66
70
 
67
- NSString *resolvedOutputPath = cameraOutputPath;
71
+ NSString *resolvedOutputPath = cameraOutputPathRef ? *cameraOutputPathRef : nil;
68
72
  if (!resolvedOutputPath || [resolvedOutputPath length] == 0) {
69
73
  NSString *screenPath = [NSString stringWithUTF8String:screenOutputPath.c_str()];
70
74
  NSString *directory = nil;
@@ -91,6 +95,13 @@ static bool startCameraIfRequested(bool captureCamera,
91
95
  }
92
96
  return false;
93
97
  }
98
+ NSString *actualPath = currentCameraRecordingPath();
99
+ if (actualPath && [actualPath length] > 0) {
100
+ resolvedOutputPath = actualPath;
101
+ if (cameraOutputPathRef) {
102
+ *cameraOutputPathRef = actualPath;
103
+ }
104
+ }
94
105
 
95
106
  MRLog(@"🎥 Camera recording started (output: %@)", resolvedOutputPath);
96
107
  return true;
@@ -396,7 +407,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
396
407
  MRLog(@"🎬 RECORDING METHOD: ScreenCaptureKit");
397
408
  MRLog(@"✅ ScreenCaptureKit recording started successfully");
398
409
 
399
- if (!startCameraIfRequested(captureCamera, cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp)) {
410
+ if (!startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp)) {
400
411
  MRLog(@"❌ Camera start failed - stopping ScreenCaptureKit session");
401
412
  [ScreenCaptureKitRecorder stopRecording];
402
413
  g_isRecording = false;
@@ -474,7 +485,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
474
485
  MRLog(@"🎥 RECORDING METHOD: AVFoundation");
475
486
  MRLog(@"✅ AVFoundation recording started successfully");
476
487
 
477
- if (!startCameraIfRequested(captureCamera, cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp)) {
488
+ if (!startCameraIfRequested(captureCamera, &cameraOutputPath, cameraDeviceId, outputPath, sessionTimestamp)) {
478
489
  MRLog(@"❌ Camera start failed - stopping AVFoundation session");
479
490
  stopAVFoundationRecording();
480
491
  g_isRecording = false;
@@ -724,92 +735,94 @@ Napi::Value GetCameraDevices(const Napi::CallbackInfo& info) {
724
735
  Napi::Array result = Napi::Array::New(env);
725
736
 
726
737
  @try {
727
- NSArray<NSDictionary *> *devices = listCameraDevices();
728
- if (!devices) {
729
- return result;
730
- }
731
-
732
- NSUInteger index = 0;
733
- for (id entry in devices) {
734
- if (![entry isKindOfClass:[NSDictionary class]]) {
735
- continue;
736
- }
737
-
738
- NSDictionary *camera = (NSDictionary *)entry;
739
- Napi::Object cameraObj = Napi::Object::New(env);
740
-
741
- NSString *identifier = camera[@"id"];
742
- NSString *name = camera[@"name"];
743
- NSString *model = camera[@"model"];
744
- NSString *manufacturer = camera[@"manufacturer"];
745
- NSString *position = camera[@"position"];
746
- NSNumber *transportType = camera[@"transportType"];
747
- NSNumber *isConnected = camera[@"isConnected"];
748
- NSNumber *hasFlash = camera[@"hasFlash"];
749
- NSNumber *supportsDepth = camera[@"supportsDepth"];
750
-
751
- if (identifier && [identifier isKindOfClass:[NSString class]]) {
752
- cameraObj.Set("id", Napi::String::New(env, [identifier UTF8String]));
753
- } else {
754
- cameraObj.Set("id", Napi::String::New(env, ""));
755
- }
756
-
757
- if (name && [name isKindOfClass:[NSString class]]) {
758
- cameraObj.Set("name", Napi::String::New(env, [name UTF8String]));
759
- } else {
760
- cameraObj.Set("name", Napi::String::New(env, "Unknown Camera"));
761
- }
762
-
763
- if (model && [model isKindOfClass:[NSString class]]) {
764
- cameraObj.Set("model", Napi::String::New(env, [model UTF8String]));
765
- }
766
-
767
- if (manufacturer && [manufacturer isKindOfClass:[NSString class]]) {
768
- cameraObj.Set("manufacturer", Napi::String::New(env, [manufacturer UTF8String]));
738
+ @autoreleasepool {
739
+ NSArray<NSDictionary *> *devices = listCameraDevices();
740
+ if (!devices) {
741
+ return result;
769
742
  }
770
743
 
771
- if (position && [position isKindOfClass:[NSString class]]) {
772
- cameraObj.Set("position", Napi::String::New(env, [position UTF8String]));
773
- }
774
-
775
- if (transportType && [transportType isKindOfClass:[NSNumber class]]) {
776
- cameraObj.Set("transportType", Napi::Number::New(env, [transportType integerValue]));
777
- }
778
-
779
- if (isConnected && [isConnected isKindOfClass:[NSNumber class]]) {
780
- cameraObj.Set("isConnected", Napi::Boolean::New(env, [isConnected boolValue]));
781
- }
782
-
783
- if (hasFlash && [hasFlash isKindOfClass:[NSNumber class]]) {
784
- cameraObj.Set("hasFlash", Napi::Boolean::New(env, [hasFlash boolValue]));
785
- }
786
-
787
- if (supportsDepth && [supportsDepth isKindOfClass:[NSNumber class]]) {
788
- cameraObj.Set("supportsDepth", Napi::Boolean::New(env, [supportsDepth boolValue]));
789
- }
790
-
791
- NSDictionary *maxResolution = camera[@"maxResolution"];
792
- if (maxResolution && [maxResolution isKindOfClass:[NSDictionary class]]) {
793
- Napi::Object maxResObj = Napi::Object::New(env);
744
+ NSUInteger index = 0;
745
+ for (id entry in devices) {
746
+ if (![entry isKindOfClass:[NSDictionary class]]) {
747
+ continue;
748
+ }
749
+
750
+ NSDictionary *camera = (NSDictionary *)entry;
751
+ Napi::Object cameraObj = Napi::Object::New(env);
794
752
 
795
- NSNumber *width = maxResolution[@"width"];
796
- NSNumber *height = maxResolution[@"height"];
797
- NSNumber *frameRate = maxResolution[@"maxFrameRate"];
753
+ NSString *identifier = camera[@"id"];
754
+ NSString *name = camera[@"name"];
755
+ NSString *model = camera[@"model"];
756
+ NSString *manufacturer = camera[@"manufacturer"];
757
+ NSString *position = camera[@"position"];
758
+ NSNumber *transportType = camera[@"transportType"];
759
+ NSNumber *isConnected = camera[@"isConnected"];
760
+ NSNumber *hasFlash = camera[@"hasFlash"];
761
+ NSNumber *supportsDepth = camera[@"supportsDepth"];
762
+
763
+ if (identifier && [identifier isKindOfClass:[NSString class]]) {
764
+ cameraObj.Set("id", Napi::String::New(env, [identifier UTF8String]));
765
+ } else {
766
+ cameraObj.Set("id", Napi::String::New(env, ""));
767
+ }
798
768
 
799
- if (width && [width isKindOfClass:[NSNumber class]]) {
800
- maxResObj.Set("width", Napi::Number::New(env, [width integerValue]));
769
+ if (name && [name isKindOfClass:[NSString class]]) {
770
+ cameraObj.Set("name", Napi::String::New(env, [name UTF8String]));
771
+ } else {
772
+ cameraObj.Set("name", Napi::String::New(env, "Unknown Camera"));
801
773
  }
802
- if (height && [height isKindOfClass:[NSNumber class]]) {
803
- maxResObj.Set("height", Napi::Number::New(env, [height integerValue]));
774
+
775
+ if (model && [model isKindOfClass:[NSString class]]) {
776
+ cameraObj.Set("model", Napi::String::New(env, [model UTF8String]));
804
777
  }
805
- if (frameRate && [frameRate isKindOfClass:[NSNumber class]]) {
806
- maxResObj.Set("maxFrameRate", Napi::Number::New(env, [frameRate doubleValue]));
778
+
779
+ if (manufacturer && [manufacturer isKindOfClass:[NSString class]]) {
780
+ cameraObj.Set("manufacturer", Napi::String::New(env, [manufacturer UTF8String]));
807
781
  }
808
782
 
809
- cameraObj.Set("maxResolution", maxResObj);
783
+ if (position && [position isKindOfClass:[NSString class]]) {
784
+ cameraObj.Set("position", Napi::String::New(env, [position UTF8String]));
785
+ }
786
+
787
+ if (transportType && [transportType isKindOfClass:[NSNumber class]]) {
788
+ cameraObj.Set("transportType", Napi::Number::New(env, [transportType integerValue]));
789
+ }
790
+
791
+ if (isConnected && [isConnected isKindOfClass:[NSNumber class]]) {
792
+ cameraObj.Set("isConnected", Napi::Boolean::New(env, [isConnected boolValue]));
793
+ }
794
+
795
+ if (hasFlash && [hasFlash isKindOfClass:[NSNumber class]]) {
796
+ cameraObj.Set("hasFlash", Napi::Boolean::New(env, [hasFlash boolValue]));
797
+ }
798
+
799
+ if (supportsDepth && [supportsDepth isKindOfClass:[NSNumber class]]) {
800
+ cameraObj.Set("supportsDepth", Napi::Boolean::New(env, [supportsDepth boolValue]));
801
+ }
802
+
803
+ NSDictionary *maxResolution = camera[@"maxResolution"];
804
+ if (maxResolution && [maxResolution isKindOfClass:[NSDictionary class]]) {
805
+ Napi::Object maxResObj = Napi::Object::New(env);
806
+
807
+ NSNumber *width = maxResolution[@"width"];
808
+ NSNumber *height = maxResolution[@"height"];
809
+ NSNumber *frameRate = maxResolution[@"maxFrameRate"];
810
+
811
+ if (width && [width isKindOfClass:[NSNumber class]]) {
812
+ maxResObj.Set("width", Napi::Number::New(env, [width integerValue]));
813
+ }
814
+ if (height && [height isKindOfClass:[NSNumber class]]) {
815
+ maxResObj.Set("height", Napi::Number::New(env, [height integerValue]));
816
+ }
817
+ if (frameRate && [frameRate isKindOfClass:[NSNumber class]]) {
818
+ maxResObj.Set("maxFrameRate", Napi::Number::New(env, [frameRate doubleValue]));
819
+ }
820
+
821
+ cameraObj.Set("maxResolution", maxResObj);
822
+ }
823
+
824
+ result[index++] = cameraObj;
810
825
  }
811
-
812
- result[index++] = cameraObj;
813
826
  }
814
827
 
815
828
  return result;
@@ -819,6 +832,43 @@ Napi::Value GetCameraDevices(const Napi::CallbackInfo& info) {
819
832
  }
820
833
  }
821
834
 
835
+ Napi::Value GetCameraRecordingPath(const Napi::CallbackInfo& info) {
836
+ Napi::Env env = info.Env();
837
+ @try {
838
+ NSString *path = currentCameraRecordingPath();
839
+ if (!path || [path length] == 0) {
840
+ return env.Null();
841
+ }
842
+ return Napi::String::New(env, [path UTF8String]);
843
+ } @catch (NSException *exception) {
844
+ NSLog(@"❌ Exception while reading camera output path: %@", exception.reason);
845
+ return env.Null();
846
+ }
847
+ }
848
+
849
+ Napi::Value GetAudioRecordingPath(const Napi::CallbackInfo& info) {
850
+ Napi::Env env = info.Env();
851
+ @try {
852
+ NSString *path = nil;
853
+ if (@available(macOS 12.3, *)) {
854
+ path = ScreenCaptureKitCurrentAudioPath();
855
+ }
856
+ if ([path isKindOfClass:[NSArray class]]) {
857
+ path = [(NSArray *)path firstObject];
858
+ }
859
+ if (!path || [path length] == 0) {
860
+ path = currentStandaloneAudioRecordingPath();
861
+ }
862
+ if (!path || [path length] == 0) {
863
+ return env.Null();
864
+ }
865
+ return Napi::String::New(env, [path UTF8String]);
866
+ } @catch (NSException *exception) {
867
+ NSLog(@"❌ Exception while reading audio output path: %@", exception.reason);
868
+ return env.Null();
869
+ }
870
+ }
871
+
822
872
  // NAPI Function: Get Displays
823
873
  Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
824
874
  Napi::Env env = info.Env();
@@ -1222,6 +1272,8 @@ Napi::Object Init(Napi::Env env, Napi::Object exports) {
1222
1272
 
1223
1273
  exports.Set(Napi::String::New(env, "getAudioDevices"), Napi::Function::New(env, GetAudioDevices));
1224
1274
  exports.Set(Napi::String::New(env, "getCameraDevices"), Napi::Function::New(env, GetCameraDevices));
1275
+ exports.Set(Napi::String::New(env, "getCameraRecordingPath"), Napi::Function::New(env, GetCameraRecordingPath));
1276
+ exports.Set(Napi::String::New(env, "getAudioRecordingPath"), Napi::Function::New(env, GetAudioRecordingPath));
1225
1277
  exports.Set(Napi::String::New(env, "getDisplays"), Napi::Function::New(env, GetDisplays));
1226
1278
  exports.Set(Napi::String::New(env, "getWindows"), Napi::Function::New(env, GetWindows));
1227
1279
  exports.Set(Napi::String::New(env, "getRecordingStatus"), Napi::Function::New(env, GetRecordingStatus));
@@ -19,7 +19,7 @@ static id g_audioStreamOutput = nil;
19
19
 
20
20
  static AVAssetWriter *g_videoWriter = nil;
21
21
  static AVAssetWriterInput *g_videoInput = nil;
22
- static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
22
+ static CFTypeRef g_pixelBufferAdaptorRef = NULL;
23
23
  static CMTime g_videoStartTime = kCMTimeInvalid;
24
24
  static BOOL g_videoWriterStarted = NO;
25
25
 
@@ -34,6 +34,33 @@ static NSInteger g_configuredSampleRate = 48000;
34
34
  static NSInteger g_configuredChannelCount = 2;
35
35
 
36
36
  static void CleanupWriters(void);
37
+ static AVAssetWriterInputPixelBufferAdaptor * _Nullable CurrentPixelBufferAdaptor(void) {
38
+ if (!g_pixelBufferAdaptorRef) {
39
+ return nil;
40
+ }
41
+ return (__bridge AVAssetWriterInputPixelBufferAdaptor *)g_pixelBufferAdaptorRef;
42
+ }
43
+
44
+ static NSString *MRNormalizePath(id value) {
45
+ if (!value || value == (id)kCFNull) {
46
+ return nil;
47
+ }
48
+ if ([value isKindOfClass:[NSString class]]) {
49
+ return (NSString *)value;
50
+ }
51
+ if ([value isKindOfClass:[NSURL class]]) {
52
+ return [(NSURL *)value path];
53
+ }
54
+ if ([value isKindOfClass:[NSArray class]]) {
55
+ for (id entry in (NSArray *)value) {
56
+ NSString *candidate = MRNormalizePath(entry);
57
+ if (candidate.length > 0) {
58
+ return candidate;
59
+ }
60
+ }
61
+ }
62
+ return nil;
63
+ }
37
64
 
38
65
  static void FinishWriter(AVAssetWriter *writer, AVAssetWriterInput *input) {
39
66
  if (!writer) {
@@ -57,7 +84,10 @@ static void CleanupWriters(void) {
57
84
  FinishWriter(g_videoWriter, g_videoInput);
58
85
  g_videoWriter = nil;
59
86
  g_videoInput = nil;
60
- g_pixelBufferAdaptor = nil;
87
+ if (g_pixelBufferAdaptorRef) {
88
+ CFRelease(g_pixelBufferAdaptorRef);
89
+ g_pixelBufferAdaptorRef = NULL;
90
+ }
61
91
  g_videoWriterStarted = NO;
62
92
  g_videoStartTime = kCMTimeInvalid;
63
93
  }
@@ -74,6 +104,20 @@ static void CleanupWriters(void) {
74
104
  @interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
75
105
  @end
76
106
 
107
+ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
108
+ if (!g_audioOutputPath) {
109
+ return nil;
110
+ }
111
+ if ([g_audioOutputPath isKindOfClass:[NSArray class]]) {
112
+ id first = [(NSArray *)g_audioOutputPath firstObject];
113
+ if ([first isKindOfClass:[NSString class]]) {
114
+ return first;
115
+ }
116
+ return nil;
117
+ }
118
+ return g_audioOutputPath;
119
+ }
120
+
77
121
  @implementation PureScreenCaptureDelegate
78
122
  - (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
79
123
  MRLog(@"🛑 Pure ScreenCapture stream stopped");
@@ -144,12 +188,27 @@ static void CleanupWriters(void) {
144
188
  return;
145
189
  }
146
190
 
147
- if (!g_pixelBufferAdaptor) {
191
+ AVAssetWriterInputPixelBufferAdaptor *adaptorCandidate = CurrentPixelBufferAdaptor();
192
+ if ([adaptorCandidate isKindOfClass:[NSArray class]]) {
193
+ id first = [(NSArray *)adaptorCandidate firstObject];
194
+ if ([first isKindOfClass:[AVAssetWriterInputPixelBufferAdaptor class]]) {
195
+ adaptorCandidate = first;
196
+ if (g_pixelBufferAdaptorRef) {
197
+ CFRelease(g_pixelBufferAdaptorRef);
198
+ }
199
+ g_pixelBufferAdaptorRef = CFBridgingRetain(adaptorCandidate);
200
+ }
201
+ }
202
+ if (![adaptorCandidate isKindOfClass:[AVAssetWriterInputPixelBufferAdaptor class]]) {
203
+ if (adaptorCandidate) {
204
+ MRLog(@"⚠️ Pixel buffer adaptor invalid (%@) – skipping frame", NSStringFromClass([adaptorCandidate class]));
205
+ }
148
206
  NSLog(@"❌ Pixel buffer adaptor is nil – cannot append video frames");
149
207
  return;
150
208
  }
151
209
 
152
- BOOL appended = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
210
+ AVAssetWriterInputPixelBufferAdaptor *adaptor = adaptorCandidate;
211
+ BOOL appended = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
153
212
  if (!appended) {
154
213
  NSLog(@"⚠️ Failed appending pixel buffer: %@", g_videoWriter.error);
155
214
  }
@@ -211,7 +270,13 @@ static void CleanupWriters(void) {
211
270
  @implementation ScreenCaptureKitRecorder
212
271
 
213
272
  + (BOOL)prepareVideoWriterWithWidth:(NSInteger)width height:(NSInteger)height error:(NSError **)error {
273
+ MRLog(@"🎬 Preparing video writer %ldx%ld", (long)width, (long)height);
214
274
  if (!g_outputPath) {
275
+ MRLog(@"❌ Video writer failed: missing output path");
276
+ return NO;
277
+ }
278
+ if (width <= 0 || height <= 0) {
279
+ MRLog(@"❌ Video writer invalid dimensions %ldx%ld", (long)width, (long)height);
215
280
  return NO;
216
281
  }
217
282
 
@@ -220,6 +285,7 @@ static void CleanupWriters(void) {
220
285
 
221
286
  g_videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:error];
222
287
  if (!g_videoWriter || (error && *error)) {
288
+ MRLog(@"❌ Failed creating video writer: %@", error && *error ? (*error).localizedDescription : @"unknown");
223
289
  return NO;
224
290
  }
225
291
 
@@ -238,15 +304,16 @@ static void CleanupWriters(void) {
238
304
  g_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
239
305
  g_videoInput.expectsMediaDataInRealTime = YES;
240
306
 
241
- g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_videoInput sourcePixelBufferAttributes:@{
307
+ AVAssetWriterInputPixelBufferAdaptor *pixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_videoInput sourcePixelBufferAttributes:@{
242
308
  (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
243
309
  (NSString *)kCVPixelBufferWidthKey: @(width),
244
310
  (NSString *)kCVPixelBufferHeightKey: @(height),
245
311
  (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
246
312
  (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
247
313
  }];
248
-
314
+
249
315
  if (![g_videoWriter canAddInput:g_videoInput]) {
316
+ MRLog(@"❌ Cannot add video input to writer");
250
317
  if (error) {
251
318
  *error = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-100 userInfo:@{NSLocalizedDescriptionKey: @"Cannot add video input to writer"}];
252
319
  }
@@ -254,9 +321,17 @@ static void CleanupWriters(void) {
254
321
  }
255
322
 
256
323
  [g_videoWriter addInput:g_videoInput];
324
+ if (g_pixelBufferAdaptorRef) {
325
+ CFRelease(g_pixelBufferAdaptorRef);
326
+ g_pixelBufferAdaptorRef = NULL;
327
+ }
328
+ if (pixelAdaptor) {
329
+ g_pixelBufferAdaptorRef = CFBridgingRetain(pixelAdaptor);
330
+ }
257
331
  g_videoWriterStarted = NO;
258
332
  g_videoStartTime = kCMTimeInvalid;
259
-
333
+ MRLog(@"✅ Video writer ready %ldx%ld", (long)width, (long)height);
334
+
260
335
  return YES;
261
336
  }
262
337
 
@@ -280,34 +355,76 @@ static void CleanupWriters(void) {
280
355
  g_configuredSampleRate = (NSInteger)asbd->mSampleRate;
281
356
  g_configuredChannelCount = asbd->mChannelsPerFrame;
282
357
 
283
- NSURL *audioURL = [NSURL fileURLWithPath:g_audioOutputPath];
358
+ NSString *originalPath = g_audioOutputPath ?: @"";
359
+ NSURL *audioURL = [NSURL fileURLWithPath:originalPath];
284
360
  [[NSFileManager defaultManager] removeItemAtURL:audioURL error:nil];
285
361
 
286
362
  NSError *writerError = nil;
287
- AVFileType fileType = AVFileTypeQuickTimeMovie;
363
+ AVFileType requestedFileType = AVFileTypeQuickTimeMovie;
364
+ BOOL requestedWebM = NO;
288
365
  if (@available(macOS 15.0, *)) {
289
- fileType = @"public.webm";
366
+ requestedFileType = @"public.webm";
367
+ requestedWebM = YES;
368
+ }
369
+
370
+ @try {
371
+ g_audioWriter = [[AVAssetWriter alloc] initWithURL:audioURL fileType:requestedFileType error:&writerError];
372
+ } @catch (NSException *exception) {
373
+ NSDictionary *info = @{
374
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
375
+ };
376
+ writerError = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-201 userInfo:info];
377
+ g_audioWriter = nil;
378
+ }
379
+
380
+ if ((!g_audioWriter || writerError) && requestedWebM) {
381
+ MRLog(@"⚠️ ScreenCaptureKit audio writer unavailable (%@) – falling back to QuickTime container", writerError.localizedDescription);
382
+ NSString *fallbackPath = [[originalPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
383
+ if (!fallbackPath || [fallbackPath length] == 0) {
384
+ fallbackPath = [originalPath stringByAppendingString:@".mov"];
385
+ }
386
+ [[NSFileManager defaultManager] removeItemAtPath:fallbackPath error:nil];
387
+ NSURL *fallbackURL = [NSURL fileURLWithPath:fallbackPath];
388
+ g_audioOutputPath = fallbackPath;
389
+ writerError = nil;
390
+ @try {
391
+ g_audioWriter = [[AVAssetWriter alloc] initWithURL:fallbackURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
392
+ } @catch (NSException *exception) {
393
+ NSDictionary *info = @{
394
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
395
+ };
396
+ writerError = [NSError errorWithDomain:@"ScreenCaptureKitRecorder" code:-202 userInfo:info];
397
+ g_audioWriter = nil;
398
+ }
399
+ audioURL = fallbackURL;
290
400
  }
291
401
 
292
- g_audioWriter = [[AVAssetWriter alloc] initWithURL:audioURL fileType:fileType error:&writerError];
293
402
  if (!g_audioWriter || writerError) {
294
403
  NSLog(@"❌ Failed to create audio writer: %@", writerError);
295
404
  return NO;
296
405
  }
297
406
 
298
- AudioChannelLayout stereoLayout = {
299
- .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
300
- .mChannelBitmap = 0,
301
- .mNumberChannelDescriptions = 0
302
- };
303
-
304
- NSDictionary *audioSettings = @{
407
+ NSInteger channelCount = MAX(1, g_configuredChannelCount);
408
+ AudioChannelLayout layout = {0};
409
+ size_t layoutSize = 0;
410
+ if (channelCount == 1) {
411
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
412
+ layoutSize = sizeof(AudioChannelLayout);
413
+ } else if (channelCount == 2) {
414
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
415
+ layoutSize = sizeof(AudioChannelLayout);
416
+ }
417
+
418
+ NSMutableDictionary *audioSettings = [@{
305
419
  AVFormatIDKey: @(kAudioFormatMPEG4AAC),
306
420
  AVSampleRateKey: @(g_configuredSampleRate),
307
- AVNumberOfChannelsKey: @(MAX(1, g_configuredChannelCount)),
308
- AVChannelLayoutKey: [NSData dataWithBytes:&stereoLayout length:sizeof(AudioChannelLayout)],
421
+ AVNumberOfChannelsKey: @(channelCount),
309
422
  AVEncoderBitRateKey: @(192000)
310
- };
423
+ } mutableCopy];
424
+
425
+ if (layoutSize > 0) {
426
+ audioSettings[AVChannelLayoutKey] = [NSData dataWithBytes:&layout length:layoutSize];
427
+ }
311
428
 
312
429
  g_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
313
430
  g_audioInput.expectsMediaDataInRealTime = YES;
@@ -356,7 +473,7 @@ static void CleanupWriters(void) {
356
473
  NSNumber *includeMicrophone = config[@"includeMicrophone"];
357
474
  NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
358
475
  NSString *microphoneDeviceId = config[@"microphoneDeviceId"];
359
- NSString *audioOutputPath = config[@"audioOutputPath"];
476
+ NSString *audioOutputPath = MRNormalizePath(config[@"audioOutputPath"]);
360
477
  NSNumber *sessionTimestampNumber = config[@"sessionTimestamp"];
361
478
 
362
479
  MRLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
@@ -680,6 +797,10 @@ static void CleanupWriters(void) {
680
797
  g_audioStreamOutput = nil;
681
798
  g_videoQueue = nil;
682
799
  g_audioQueue = nil;
800
+ if (g_pixelBufferAdaptorRef) {
801
+ CFRelease(g_pixelBufferAdaptorRef);
802
+ g_pixelBufferAdaptorRef = NULL;
803
+ }
683
804
  g_audioOutputPath = nil;
684
805
  g_shouldCaptureAudio = NO;
685
806