node-mac-recorder 2.21.0 → 2.21.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,6 +24,8 @@ This project relies on system-level frameworks (ScreenCaptureKit, AVFoundation)
24
24
  > `NSCameraUseContinuityCameraDeviceType` removes the runtime warning when Continuity Camera devices are detected. macOS 14+ expects this key whenever Continuity Camera APIs are used.
25
25
  > The `com.apple.security.*` entitlements are only required for sandboxed / hardened runtime builds. Omit them if your distribution does not use the macOS sandbox.
26
26
 
27
+ During local development you can temporarily bypass the Continuity Camera check by running with `ALLOW_CONTINUITY_CAMERA=1`, but Apple still recommends setting the Info.plist key for shipping applications.
28
+
27
29
  ### Screen recording
28
30
 
29
31
  Screen recording permissions are granted by the user via the OS **Screen Recording** privacy panel. There is no `Info.plist` key to request it, but your app should guide the user to approve it.
package/index.js CHANGED
@@ -425,12 +425,12 @@ class MacRecorder extends EventEmitter {
425
425
  this.sessionTimestamp = sessionTimestamp;
426
426
  const outputDir = path.dirname(outputPath);
427
427
  const cursorFilePath = path.join(outputDir, `temp_cursor_${sessionTimestamp}.json`);
428
- const cameraFilePath =
428
+ let cameraFilePath =
429
429
  this.options.captureCamera === true
430
430
  ? path.join(outputDir, `temp_camera_${sessionTimestamp}.webm`)
431
431
  : null;
432
432
  const captureAudio = this.options.includeMicrophone === true || this.options.includeSystemAudio === true;
433
- const audioFilePath = captureAudio
433
+ let audioFilePath = captureAudio
434
434
  ? path.join(outputDir, `temp_audio_${sessionTimestamp}.webm`)
435
435
  : null;
436
436
 
@@ -451,11 +451,11 @@ class MacRecorder extends EventEmitter {
451
451
  }
452
452
 
453
453
  // Native kayıt başlat
454
- const recordingOptions = {
455
- includeMicrophone: this.options.includeMicrophone === true, // Only if explicitly enabled
456
- includeSystemAudio: this.options.includeSystemAudio === true, // Only if explicitly enabled
457
- captureCursor: this.options.captureCursor || false,
458
- displayId: this.options.displayId || null, // null = ana ekran
454
+ let recordingOptions = {
455
+ includeMicrophone: this.options.includeMicrophone === true, // Only if explicitly enabled
456
+ includeSystemAudio: this.options.includeSystemAudio === true, // Only if explicitly enabled
457
+ captureCursor: this.options.captureCursor || false,
458
+ displayId: this.options.displayId || null, // null = ana ekran
459
459
  windowId: this.options.windowId || null, // null = tam ekran
460
460
  audioDeviceId: this.options.audioDeviceId || null, // null = default device
461
461
  systemAudioDeviceId: this.options.systemAudioDeviceId || null, // null = auto-detect system audio device
@@ -464,12 +464,18 @@ class MacRecorder extends EventEmitter {
464
464
  sessionTimestamp,
465
465
  };
466
466
 
467
- if (cameraFilePath) {
468
- recordingOptions.cameraOutputPath = cameraFilePath;
469
- }
467
+ if (cameraFilePath) {
468
+ recordingOptions = {
469
+ ...recordingOptions,
470
+ cameraOutputPath: cameraFilePath,
471
+ };
472
+ }
470
473
 
471
- if (audioFilePath) {
472
- recordingOptions.audioOutputPath = audioFilePath;
474
+ if (audioFilePath) {
475
+ recordingOptions = {
476
+ ...recordingOptions,
477
+ audioOutputPath: audioFilePath,
478
+ };
473
479
  }
474
480
 
475
481
  // Manuel captureArea varsa onu kullan
@@ -494,6 +500,32 @@ class MacRecorder extends EventEmitter {
494
500
  }
495
501
 
496
502
  if (success) {
503
+ if (this.options.captureCamera === true) {
504
+ try {
505
+ const nativeCameraPath = nativeBinding.getCameraRecordingPath
506
+ ? nativeBinding.getCameraRecordingPath()
507
+ : null;
508
+ if (typeof nativeCameraPath === "string" && nativeCameraPath.length > 0) {
509
+ this.cameraCaptureFile = nativeCameraPath;
510
+ cameraFilePath = nativeCameraPath;
511
+ }
512
+ } catch (pathError) {
513
+ console.warn("Camera output path sync failed:", pathError.message);
514
+ }
515
+ }
516
+ if (captureAudio) {
517
+ try {
518
+ const nativeAudioPath = nativeBinding.getAudioRecordingPath
519
+ ? nativeBinding.getAudioRecordingPath()
520
+ : null;
521
+ if (typeof nativeAudioPath === "string" && nativeAudioPath.length > 0) {
522
+ this.audioCaptureFile = nativeAudioPath;
523
+ audioFilePath = nativeAudioPath;
524
+ }
525
+ } catch (pathError) {
526
+ console.warn("Audio output path sync failed:", pathError.message);
527
+ }
528
+ }
497
529
  this.isRecording = true;
498
530
  this.recordingStartTime = Date.now();
499
531
 
@@ -664,6 +696,33 @@ class MacRecorder extends EventEmitter {
664
696
  success = true; // Assume success to avoid throwing
665
697
  }
666
698
 
699
+ if (this.options.captureCamera === true) {
700
+ try {
701
+ const nativeCameraPath = nativeBinding.getCameraRecordingPath
702
+ ? nativeBinding.getCameraRecordingPath()
703
+ : null;
704
+ if (typeof nativeCameraPath === "string" && nativeCameraPath.length > 0) {
705
+ this.cameraCaptureFile = nativeCameraPath;
706
+ }
707
+ } catch (pathError) {
708
+ console.warn("Camera output path sync failed:", pathError.message);
709
+ }
710
+ }
711
+
712
+ const captureAudio = this.options.includeMicrophone === true || this.options.includeSystemAudio === true;
713
+ if (captureAudio) {
714
+ try {
715
+ const nativeAudioPath = nativeBinding.getAudioRecordingPath
716
+ ? nativeBinding.getAudioRecordingPath()
717
+ : null;
718
+ if (typeof nativeAudioPath === "string" && nativeAudioPath.length > 0) {
719
+ this.audioCaptureFile = nativeAudioPath;
720
+ }
721
+ } catch (pathError) {
722
+ console.warn("Audio output path sync failed:", pathError.message);
723
+ }
724
+ }
725
+
667
726
  if (this.cameraCaptureActive) {
668
727
  this.cameraCaptureActive = false;
669
728
  this.emit("cameraCaptureStopped", {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.0",
3
+ "version": "2.21.2",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -55,13 +55,49 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
55
55
  NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
56
56
  [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
57
57
 
58
- AVFileType fileType = AVFileTypeQuickTimeMovie;
58
+ NSError *writerError = nil;
59
+ AVFileType requestedFileType = AVFileTypeQuickTimeMovie;
60
+ BOOL requestedWebM = NO;
59
61
  if (@available(macOS 15.0, *)) {
60
- fileType = @"public.webm";
62
+ requestedFileType = @"public.webm";
63
+ requestedWebM = YES;
64
+ }
65
+
66
+ @try {
67
+ self.writer = [[AVAssetWriter alloc] initWithURL:outputURL fileType:requestedFileType error:&writerError];
68
+ } @catch (NSException *exception) {
69
+ NSDictionary *info = @{
70
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
71
+ };
72
+ writerError = [NSError errorWithDomain:@"NativeAudioRecorder" code:-30 userInfo:info];
73
+ self.writer = nil;
61
74
  }
62
75
 
63
- self.writer = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:error];
64
- if (!self.writer || (*error)) {
76
+ if ((!self.writer || writerError) && requestedWebM) {
77
+ NSString *fallbackPath = [[self.outputPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
78
+ if (!fallbackPath || [fallbackPath length] == 0) {
79
+ fallbackPath = [self.outputPath stringByAppendingString:@".mov"];
80
+ }
81
+ [[NSFileManager defaultManager] removeItemAtPath:fallbackPath error:nil];
82
+ NSURL *fallbackURL = [NSURL fileURLWithPath:fallbackPath];
83
+ self.outputPath = fallbackPath;
84
+ writerError = nil;
85
+ @try {
86
+ self.writer = [[AVAssetWriter alloc] initWithURL:fallbackURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
87
+ } @catch (NSException *exception) {
88
+ NSDictionary *info = @{
89
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize audio writer"
90
+ };
91
+ writerError = [NSError errorWithDomain:@"NativeAudioRecorder" code:-31 userInfo:info];
92
+ self.writer = nil;
93
+ }
94
+ outputURL = fallbackURL;
95
+ }
96
+
97
+ if (!self.writer || writerError) {
98
+ if (error) {
99
+ *error = writerError;
100
+ }
65
101
  return NO;
66
102
  }
67
103
 
@@ -70,20 +106,28 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
70
106
 
71
107
  double sampleRate = asbd ? asbd->mSampleRate : 44100.0;
72
108
  NSUInteger channels = asbd ? asbd->mChannelsPerFrame : 2;
73
-
74
- AudioChannelLayout stereoLayout = {
75
- .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
76
- .mChannelBitmap = 0,
77
- .mNumberChannelDescriptions = 0
78
- };
79
-
80
- NSDictionary *audioSettings = @{
109
+ channels = MAX((NSUInteger)1, channels);
110
+
111
+ AudioChannelLayout layout = {0};
112
+ size_t layoutSize = 0;
113
+ if (channels == 1) {
114
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
115
+ layoutSize = sizeof(AudioChannelLayout);
116
+ } else if (channels == 2) {
117
+ layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
118
+ layoutSize = sizeof(AudioChannelLayout);
119
+ }
120
+
121
+ NSMutableDictionary *audioSettings = [@{
81
122
  AVFormatIDKey: @(kAudioFormatMPEG4AAC),
82
123
  AVSampleRateKey: @(sampleRate),
83
- AVNumberOfChannelsKey: @(MAX((NSUInteger)1, channels)),
84
- AVChannelLayoutKey: [NSData dataWithBytes:&stereoLayout length:sizeof(AudioChannelLayout)],
124
+ AVNumberOfChannelsKey: @(channels),
85
125
  AVEncoderBitRateKey: @(192000)
86
- };
126
+ } mutableCopy];
127
+
128
+ if (layoutSize > 0) {
129
+ audioSettings[AVChannelLayoutKey] = [NSData dataWithBytes:&layout length:layoutSize];
130
+ }
87
131
 
88
132
  self.writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
89
133
  self.writerInput.expectsMediaDataInRealTime = YES;
@@ -318,4 +362,11 @@ void requestAudioPermission(void (^completion)(BOOL granted)) {
318
362
  [AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:completion];
319
363
  }
320
364
 
365
+ NSString *currentStandaloneAudioRecordingPath() {
366
+ if (!g_audioRecorder) {
367
+ return nil;
368
+ }
369
+ return g_audioRecorder.outputPath;
370
+ }
371
+
321
372
  }
@@ -8,6 +8,21 @@
8
8
  static AVVideoCodecType const AVVideoCodecTypeVP9 = @"vp09";
9
9
  #endif
10
10
 
11
+ static BOOL MRAllowContinuityCamera() {
12
+ static dispatch_once_t onceToken;
13
+ static BOOL allowContinuity = NO;
14
+ dispatch_once(&onceToken, ^{
15
+ id continuityKey = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUseContinuityCameraDeviceType"];
16
+ if ([continuityKey respondsToSelector:@selector(boolValue)] && [continuityKey boolValue]) {
17
+ allowContinuity = YES;
18
+ }
19
+ if (!allowContinuity && getenv("ALLOW_CONTINUITY_CAMERA")) {
20
+ allowContinuity = YES;
21
+ }
22
+ });
23
+ return allowContinuity;
24
+ }
25
+
11
26
  static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
12
27
  if (!device) {
13
28
  return NO;
@@ -27,16 +42,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
27
42
  BOOL nameMentionsContinuity = [localizedName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound ||
28
43
  [modelId rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound;
29
44
 
30
- if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] && nameMentionsContinuity) {
31
- return YES;
45
+ if (@available(macOS 14.0, *)) {
46
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] && nameMentionsContinuity) {
47
+ return YES;
48
+ }
32
49
  }
33
50
 
34
- if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] &&
35
- [manufacturer rangeOfString:@"Apple" options:NSCaseInsensitiveSearch].location != NSNotFound &&
36
- nameMentionsContinuity) {
51
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown] && nameMentionsContinuity) {
37
52
  return YES;
38
53
  }
39
54
 
55
+ BOOL isApple = [manufacturer rangeOfString:@"Apple" options:NSCaseInsensitiveSearch].location != NSNotFound;
56
+ if (isApple && nameMentionsContinuity) {
57
+ if (@available(macOS 14.0, *)) {
58
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal]) {
59
+ return YES;
60
+ }
61
+ }
62
+ if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown]) {
63
+ return YES;
64
+ }
65
+ }
66
+
40
67
  return NO;
41
68
  }
42
69
 
@@ -80,14 +107,21 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
80
107
  NSMutableArray<NSDictionary *> *devicesInfo = [NSMutableArray array];
81
108
 
82
109
  NSMutableArray<AVCaptureDeviceType> *deviceTypes = [NSMutableArray array];
83
- [deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
110
+ BOOL allowContinuity = MRAllowContinuityCamera();
111
+
84
112
  if (@available(macOS 10.15, *)) {
85
113
  [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
114
+ } else {
115
+ [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
116
+ }
117
+
118
+ if (allowContinuity) {
86
119
  if (@available(macOS 14.0, *)) {
87
120
  [deviceTypes addObject:AVCaptureDeviceTypeContinuityCamera];
121
+ [deviceTypes addObject:AVCaptureDeviceTypeExternal];
122
+ } else {
123
+ [deviceTypes addObject:AVCaptureDeviceTypeExternalUnknown];
88
124
  }
89
- } else {
90
- [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
91
125
  }
92
126
 
93
127
  AVCaptureDeviceDiscoverySession *discoverySession =
@@ -97,6 +131,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
97
131
 
98
132
  for (AVCaptureDevice *device in discoverySession.devices) {
99
133
  BOOL continuityCamera = MRIsContinuityCamera(device);
134
+ if (continuityCamera && !allowContinuity) {
135
+ // Skip Continuity cameras when entitlement/env flag is missing
136
+ continue;
137
+ }
100
138
 
101
139
  // Determine the best (maximum) resolution format for this device
102
140
  CMVideoDimensions bestDimensions = {0, 0};
@@ -264,12 +302,25 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
264
302
  }
265
303
  }
266
304
 
267
- if (bestRange) {
268
- double clampedRate = MIN(bestRange.maxFrameRate, MAX(bestRange.minFrameRate, targetFrameRate));
269
- CMTime frameDuration = CMTimeMake(1, (int32_t)round(clampedRate));
270
- device.activeVideoMinFrameDuration = frameDuration;
271
- device.activeVideoMaxFrameDuration = frameDuration;
305
+ if (bestRange) {
306
+ double clampedRate = MIN(bestRange.maxFrameRate, MAX(bestRange.minFrameRate, targetFrameRate));
307
+ double durationSeconds = clampedRate > 0.0 ? (1.0 / clampedRate) : CMTimeGetSeconds(bestRange.maxFrameDuration);
308
+ int32_t preferredTimescale = bestRange.minFrameDuration.timescale > 0 ? bestRange.minFrameDuration.timescale : 600;
309
+ CMTime desiredDuration = CMTimeMakeWithSeconds(durationSeconds, preferredTimescale);
310
+
311
+ if (!CMTIME_IS_NUMERIC(desiredDuration)) {
312
+ desiredDuration = bestRange.maxFrameDuration;
313
+ }
314
+
315
+ if (CMTimeCompare(desiredDuration, bestRange.minFrameDuration) < 0) {
316
+ desiredDuration = bestRange.minFrameDuration;
317
+ } else if (CMTimeCompare(desiredDuration, bestRange.maxFrameDuration) > 0) {
318
+ desiredDuration = bestRange.maxFrameDuration;
272
319
  }
320
+
321
+ device.activeVideoMinFrameDuration = desiredDuration;
322
+ device.activeVideoMaxFrameDuration = desiredDuration;
323
+ }
273
324
  } @catch (NSException *exception) {
274
325
  if (error) {
275
326
  NSDictionary *userInfo = @{
@@ -298,6 +349,7 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
298
349
 
299
350
  NSString *extension = outputURL.pathExtension.lowercaseString;
300
351
  BOOL wantsWebM = [extension isEqualToString:@"webm"];
352
+ NSString *originalPath = outputURL.path ?: @"";
301
353
 
302
354
  NSString *codec = AVVideoCodecTypeH264;
303
355
  AVFileType fileType = AVFileTypeQuickTimeMovie;
@@ -315,7 +367,42 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
315
367
  }
316
368
 
317
369
  NSError *writerError = nil;
318
- self.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:&writerError];
370
+ @try {
371
+ self.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:fileType error:&writerError];
372
+ } @catch (NSException *exception) {
373
+ NSDictionary *info = @{
374
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize asset writer"
375
+ };
376
+ writerError = [NSError errorWithDomain:@"CameraRecorder" code:-100 userInfo:info];
377
+ self.assetWriter = nil;
378
+ }
379
+
380
+ if ((!self.assetWriter || writerError) && wantsWebM) {
381
+ MRLog(@"⚠️ CameraRecorder: WebM writer unavailable (%@) – falling back to QuickTime container", writerError.localizedDescription);
382
+ codec = AVVideoCodecTypeH264;
383
+ fileType = AVFileTypeQuickTimeMovie;
384
+ webMSupported = NO;
385
+ writerError = nil;
386
+ NSString *fallbackPath = [[originalPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
387
+ if (!fallbackPath || [fallbackPath length] == 0) {
388
+ fallbackPath = [originalPath stringByAppendingString:@".mov"];
389
+ }
390
+ [[NSFileManager defaultManager] removeItemAtPath:fallbackPath error:nil];
391
+ NSURL *fallbackURL = [NSURL fileURLWithPath:fallbackPath];
392
+ self.outputPath = fallbackPath;
393
+ @try {
394
+ self.assetWriter = [[AVAssetWriter alloc] initWithURL:fallbackURL fileType:fileType error:&writerError];
395
+ } @catch (NSException *exception) {
396
+ NSDictionary *info = @{
397
+ NSLocalizedDescriptionKey: exception.reason ?: @"Failed to initialize asset writer"
398
+ };
399
+ writerError = [NSError errorWithDomain:@"CameraRecorder" code:-100 userInfo:info];
400
+ self.assetWriter = nil;
401
+ }
402
+ outputURL = fallbackURL;
403
+ } else {
404
+ self.outputPath = originalPath;
405
+ }
319
406
 
320
407
  if (!self.assetWriter || writerError) {
321
408
  if (error) {
@@ -324,7 +411,6 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
324
411
  return NO;
325
412
  }
326
413
 
327
- // On fallback, if WebM was requested but not supported, log and switch extension to .mov
328
414
  if (wantsWebM && !webMSupported) {
329
415
  MRLog(@"ℹ️ CameraRecorder: WebM unavailable, storing data in QuickTime container");
330
416
  }
@@ -349,7 +435,6 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
349
435
  AVVideoCompressionPropertiesKey: compressionProps
350
436
  };
351
437
 
352
- // Video-only writer input (camera recordings remain silent by design)
353
438
  self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
354
439
  outputSettings:videoSettings];
355
440
  self.assetWriterInput.expectsMediaDataInRealTime = YES;
@@ -444,26 +529,15 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
444
529
  return NO;
445
530
  }
446
531
 
447
- BOOL isContinuityCamera = MRIsContinuityCamera(device);
448
- if (isContinuityCamera) {
449
- id continuityKey = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUseContinuityCameraDeviceType"];
450
- BOOL allowContinuity = NO;
451
- if ([continuityKey respondsToSelector:@selector(boolValue)]) {
452
- allowContinuity = [continuityKey boolValue];
453
- }
454
- if (!allowContinuity && getenv("ALLOW_CONTINUITY_CAMERA")) {
455
- allowContinuity = YES;
456
- }
457
- if (!allowContinuity) {
458
- if (error) {
459
- NSDictionary *userInfo = @{
460
- NSLocalizedDescriptionKey: @"Continuity Camera requires NSCameraUseContinuityCameraDeviceType=true in Info.plist"
461
- };
462
- *error = [NSError errorWithDomain:@"CameraRecorder" code:-5 userInfo:userInfo];
463
- }
464
- MRLog(@"⚠️ Continuity Camera access denied - missing Info.plist entitlement");
465
- return NO;
532
+ if (MRIsContinuityCamera(device) && !MRAllowContinuityCamera()) {
533
+ if (error) {
534
+ NSDictionary *userInfo = @{
535
+ NSLocalizedDescriptionKey: @"Continuity Camera requires NSCameraUseContinuityCameraDeviceType=true in Info.plist"
536
+ };
537
+ *error = [NSError errorWithDomain:@"CameraRecorder" code:-5 userInfo:userInfo];
466
538
  }
539
+ MRLog(@"⚠️ Continuity Camera access denied - missing Info.plist entitlement");
540
+ return NO;
467
541
  }
468
542
 
469
543
  int32_t width = 0;
@@ -521,9 +595,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
521
595
  AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
522
596
  if (connection) {
523
597
  if (connection.isVideoOrientationSupported) {
524
- connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
598
+ connection.videoOrientation = AVCaptureVideoOrientationPortrait;
525
599
  }
526
600
  if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
601
+ if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
602
+ connection.automaticallyAdjustsVideoMirroring = NO;
603
+ }
527
604
  connection.videoMirrored = YES;
528
605
  }
529
606
  }
@@ -651,17 +728,13 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
651
728
  extern "C" {
652
729
 
653
730
  NSArray<NSDictionary *> *listCameraDevices() {
654
- @autoreleasepool {
655
- return [CameraRecorder availableCameraDevices];
656
- }
731
+ return [CameraRecorder availableCameraDevices];
657
732
  }
658
733
 
659
734
  bool startCameraRecording(NSString *outputPath, NSString *deviceId, NSError **error) {
660
- @autoreleasepool {
661
- return [[CameraRecorder sharedRecorder] startRecordingWithDeviceId:deviceId
662
- outputPath:outputPath
663
- error:error];
664
- }
735
+ return [[CameraRecorder sharedRecorder] startRecordingWithDeviceId:deviceId
736
+ outputPath:outputPath
737
+ error:error];
665
738
  }
666
739
 
667
740
  bool stopCameraRecording() {
@@ -674,4 +747,8 @@ bool isCameraRecording() {
674
747
  return [CameraRecorder sharedRecorder].isRecording;
675
748
  }
676
749
 
750
+ NSString *currentCameraRecordingPath() {
751
+ return [CameraRecorder sharedRecorder].outputPath;
752
+ }
753
+
677
754
  }
@@ -1088,43 +1088,13 @@ NSDictionary* getDisplayScalingInfo(CGPoint globalPoint) {
1088
1088
 
1089
1089
  // Check if point is within this display
1090
1090
  if (isInBounds) {
1091
- // Get physical dimensions
1092
- CGImageRef testImage = CGDisplayCreateImage(displayID);
1093
- CGSize imageSize = CGSizeMake(CGImageGetWidth(testImage), CGImageGetHeight(testImage));
1094
- CGImageRelease(testImage);
1095
-
1096
- CGSize actualPhysicalSize = imageSize;
1097
- CFArrayRef displayModes = CGDisplayCopyAllDisplayModes(displayID, NULL);
1098
- if (displayModes) {
1099
- CFIndex modeCount = CFArrayGetCount(displayModes);
1100
- CGSize maxResolution = CGSizeMake(0, 0);
1101
-
1102
- for (CFIndex i = 0; i < modeCount; i++) {
1103
- CGDisplayModeRef mode = (CGDisplayModeRef)CFArrayGetValueAtIndex(displayModes, i);
1104
- CGSize modeSize = CGSizeMake(CGDisplayModeGetWidth(mode), CGDisplayModeGetHeight(mode));
1105
-
1106
- if (modeSize.width > maxResolution.width ||
1107
- (modeSize.width == maxResolution.width && modeSize.height > maxResolution.height)) {
1108
- maxResolution = modeSize;
1109
- }
1110
- }
1111
-
1112
- if (maxResolution.width > imageSize.width * 1.5 || maxResolution.height > imageSize.height * 1.5) {
1113
- actualPhysicalSize = maxResolution;
1114
- } else {
1115
- actualPhysicalSize = imageSize;
1116
- }
1117
-
1118
- CFRelease(displayModes);
1119
- } else {
1120
- actualPhysicalSize = imageSize;
1121
- }
1122
-
1091
+ // Compute physical dimensions using pixel counts to avoid heavy APIs
1123
1092
  CGSize logicalSize = displayBounds.size;
1124
- CGSize reportedPhysicalSize = CGSizeMake(CGDisplayPixelsWide(displayID), CGDisplayPixelsHigh(displayID));
1093
+ CGSize actualPhysicalSize = CGSizeMake(CGDisplayPixelsWide(displayID), CGDisplayPixelsHigh(displayID));
1094
+ CGSize reportedPhysicalSize = actualPhysicalSize;
1125
1095
 
1126
- CGFloat scaleX = actualPhysicalSize.width / logicalSize.width;
1127
- CGFloat scaleY = actualPhysicalSize.height / logicalSize.height;
1096
+ CGFloat scaleX = logicalSize.width > 0 ? actualPhysicalSize.width / logicalSize.width : 1.0;
1097
+ CGFloat scaleY = logicalSize.height > 0 ? actualPhysicalSize.height / logicalSize.height : 1.0;
1128
1098
  CGFloat scaleFactor = MAX(scaleX, scaleY);
1129
1099
 
1130
1100
  return @{
@@ -1141,36 +1111,15 @@ NSDictionary* getDisplayScalingInfo(CGPoint globalPoint) {
1141
1111
  CGDirectDisplayID mainDisplay = CGMainDisplayID();
1142
1112
  CGRect displayBounds = CGDisplayBounds(mainDisplay);
1143
1113
 
1144
- CGImageRef testImage = CGDisplayCreateImage(mainDisplay);
1145
- CGSize imageSize = CGSizeMake(CGImageGetWidth(testImage), CGImageGetHeight(testImage));
1146
- CGImageRelease(testImage);
1147
-
1148
- CGSize actualPhysicalSize = imageSize;
1149
- CFArrayRef displayModes = CGDisplayCopyAllDisplayModes(mainDisplay, NULL);
1150
- if (displayModes) {
1151
- CFIndex modeCount = CFArrayGetCount(displayModes);
1152
- CGSize maxResolution = CGSizeMake(0, 0);
1153
-
1154
- for (CFIndex i = 0; i < modeCount; i++) {
1155
- CGDisplayModeRef mode = (CGDisplayModeRef)CFArrayGetValueAtIndex(displayModes, i);
1156
- CGSize modeSize = CGSizeMake(CGDisplayModeGetWidth(mode), CGDisplayModeGetHeight(mode));
1157
-
1158
- if (modeSize.width > maxResolution.width ||
1159
- (modeSize.width == maxResolution.width && modeSize.height > maxResolution.height)) {
1160
- maxResolution = modeSize;
1161
- }
1162
- }
1163
-
1164
- if (maxResolution.width > imageSize.width * 1.5 || maxResolution.height > imageSize.height * 1.5) {
1165
- actualPhysicalSize = maxResolution;
1166
- }
1167
-
1168
- CFRelease(displayModes);
1114
+ CGSize logicalSize = displayBounds.size;
1115
+ CGSize actualPhysicalSize = CGSizeMake(CGDisplayPixelsWide(mainDisplay), CGDisplayPixelsHigh(mainDisplay));
1116
+ CGFloat scaleFactor = 1.0;
1117
+ if (logicalSize.width > 0 && logicalSize.height > 0) {
1118
+ CGFloat scaleX = actualPhysicalSize.width / logicalSize.width;
1119
+ CGFloat scaleY = actualPhysicalSize.height / logicalSize.height;
1120
+ scaleFactor = MAX(scaleX, scaleY);
1169
1121
  }
1170
1122
 
1171
- CGSize logicalSize = displayBounds.size;
1172
- CGFloat scaleFactor = MAX(actualPhysicalSize.width / logicalSize.width, actualPhysicalSize.height / logicalSize.height);
1173
-
1174
1123
  return @{
1175
1124
  @"displayID": @(mainDisplay),
1176
1125
  @"logicalSize": [NSValue valueWithSize:NSMakeSize(logicalSize.width, logicalSize.height)],