node-mac-recorder 2.21.2 → 2.21.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.2",
3
+ "version": "2.21.3",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -82,6 +82,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
82
82
  @property (atomic, assign) BOOL writerStarted;
83
83
  @property (atomic, assign) BOOL isShuttingDown;
84
84
  @property (nonatomic, assign) CMTime firstSampleTime;
85
+ @property (nonatomic, assign) int32_t expectedWidth;
86
+ @property (nonatomic, assign) int32_t expectedHeight;
87
+ @property (nonatomic, assign) double expectedFrameRate;
88
+ @property (atomic, assign) BOOL needsReconfiguration;
85
89
 
86
90
  + (instancetype)sharedRecorder;
87
91
  + (NSArray<NSDictionary *> *)availableCameraDevices;
@@ -242,23 +246,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
242
246
  AVCaptureDeviceFormat *bestFormat = nil;
243
247
  int64_t bestResolutionScore = 0;
244
248
  double bestFrameRate = 0.0;
245
-
249
+
250
+ MRLog(@"🔍 Scanning formats for device: %@", device.localizedName);
251
+
246
252
  for (AVCaptureDeviceFormat *format in device.formats) {
247
253
  CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
248
254
  if (dims.width <= 0 || dims.height <= 0) {
249
255
  continue;
250
256
  }
251
-
257
+
258
+ // No filtering - use whatever the device supports
259
+ // The device knows best what it can capture
260
+
252
261
  int64_t score = (int64_t)dims.width * (int64_t)dims.height;
253
-
262
+
254
263
  double maxFrameRate = 0.0;
255
264
  for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
256
265
  maxFrameRate = MAX(maxFrameRate, range.maxFrameRate);
257
266
  }
258
-
267
+
259
268
  BOOL usesBetterResolution = score > bestResolutionScore;
260
269
  BOOL sameResolutionHigherFps = (score == bestResolutionScore) && (maxFrameRate > bestFrameRate);
261
-
270
+
262
271
  if (!bestFormat || usesBetterResolution || sameResolutionHigherFps) {
263
272
  bestFormat = format;
264
273
  bestResolutionScore = score;
@@ -266,9 +275,17 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
266
275
  if (widthOut) *widthOut = dims.width;
267
276
  if (heightOut) *heightOut = dims.height;
268
277
  if (frameRateOut) *frameRateOut = bestFrameRate;
278
+ MRLog(@" ✅ New best: %dx%d @ %.0ffps (score=%lld)",
279
+ dims.width, dims.height, maxFrameRate, score);
269
280
  }
270
281
  }
271
-
282
+
283
+ if (bestFormat) {
284
+ MRLog(@"📹 Selected format: %dx%d @ %.0ffps", *widthOut, *heightOut, *frameRateOut);
285
+ } else {
286
+ MRLog(@"❌ No suitable format found");
287
+ }
288
+
272
289
  return bestFormat;
273
290
  }
274
291
 
@@ -415,17 +432,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
415
432
  MRLog(@"â„šī¸ CameraRecorder: WebM unavailable, storing data in QuickTime container");
416
433
  }
417
434
 
418
- NSInteger bitrate = (NSInteger)(width * height * 6); // Empirical bitrate multiplier
419
- bitrate = MAX(bitrate, 5 * 1000 * 1000); // Minimum 5 Mbps
435
+ // Calculate bitrate based on resolution for high quality
436
+ // Use higher multiplier for better quality (10 instead of 6)
437
+ NSInteger bitrate = (NSInteger)(width * height * 10);
438
+ bitrate = MAX(bitrate, 8 * 1000 * 1000); // Minimum 8 Mbps for quality
439
+ bitrate = MIN(bitrate, 50 * 1000 * 1000); // Maximum 50 Mbps to avoid excessive file size
440
+
441
+ MRLog(@"đŸŽŦ Camera encoder settings: %dx%d @ %.2ffps, bitrate=%.2fMbps",
442
+ width, height, frameRate, bitrate / (1000.0 * 1000.0));
420
443
 
421
444
  NSMutableDictionary *compressionProps = [@{
422
445
  AVVideoAverageBitRateKey: @(bitrate),
423
446
  AVVideoMaxKeyFrameIntervalKey: @(MAX(1, (int)round(frameRate))),
424
- AVVideoAllowFrameReorderingKey: @YES
447
+ AVVideoAllowFrameReorderingKey: @YES,
448
+ AVVideoExpectedSourceFrameRateKey: @(frameRate),
449
+ // Add quality hint for better encoding
450
+ AVVideoQualityKey: @(0.9) // 0.0-1.0, higher is better quality
425
451
  } mutableCopy];
426
-
452
+
427
453
  if ([codec isEqualToString:AVVideoCodecTypeH264]) {
428
454
  compressionProps[AVVideoProfileLevelKey] = AVVideoProfileLevelH264HighAutoLevel;
455
+ // Use Main profile for better quality
456
+ compressionProps[AVVideoH264EntropyModeKey] = AVVideoH264EntropyModeCABAC;
429
457
  }
430
458
 
431
459
  NSDictionary *videoSettings = @{
@@ -440,9 +468,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
440
468
  self.assetWriterInput.expectsMediaDataInRealTime = YES;
441
469
 
442
470
  NSDictionary *pixelBufferAttributes = @{
443
- (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
471
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
444
472
  (NSString *)kCVPixelBufferWidthKey: @(width),
445
- (NSString *)kCVPixelBufferHeightKey: @(height)
473
+ (NSString *)kCVPixelBufferHeightKey: @(height),
474
+ // Preserve aspect ratio and use high quality scaling
475
+ (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
476
+ (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
446
477
  };
447
478
 
448
479
  self.pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterInput
@@ -572,8 +603,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
572
603
 
573
604
  self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
574
605
  self.videoOutput.alwaysDiscardsLateVideoFrames = NO;
606
+ // Use video range (not full range) for better compatibility and quality
607
+ // YpCbCr 4:2:0 biplanar is the native format for most cameras
575
608
  self.videoOutput.videoSettings = @{
576
- (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
609
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
577
610
  };
578
611
 
579
612
  self.captureQueue = dispatch_queue_create("node_mac_recorder.camera.queue", DISPATCH_QUEUE_SERIAL);
@@ -594,31 +627,39 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
594
627
 
595
628
  AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
596
629
  if (connection) {
597
- if (connection.isVideoOrientationSupported) {
598
- connection.videoOrientation = AVCaptureVideoOrientationPortrait;
599
- }
630
+ // DON'T set orientation - let the camera use its natural orientation
631
+ // The device knows best (portrait for phones, landscape for webcams)
632
+ // We just capture whatever comes through
633
+
634
+ // Mirror front cameras for natural preview
600
635
  if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
601
636
  if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
602
637
  connection.automaticallyAdjustsVideoMirroring = NO;
603
638
  }
604
639
  connection.videoMirrored = YES;
605
640
  }
641
+
642
+ // Log actual connection properties for debugging
643
+ MRLog(@"📐 Camera connection: orientation=%ld (native), mirrored=%d, format=%dx%d",
644
+ (long)connection.videoOrientation,
645
+ connection.isVideoMirrored,
646
+ width, height);
606
647
  }
607
648
 
608
- NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
609
- if (![self setupWriterWithURL:outputURL width:width height:height frameRate:frameRate error:error]) {
610
- [self.session stopRunning];
611
- [self resetState];
612
- return NO;
613
- }
614
-
649
+ // DON'T setup writer yet - wait for first frame to get actual dimensions
650
+ // Store configuration for lazy initialization
615
651
  self.outputPath = outputPath;
616
652
  self.isRecording = YES;
617
653
  self.isShuttingDown = NO;
618
-
654
+ self.expectedWidth = width;
655
+ self.expectedHeight = height;
656
+ self.expectedFrameRate = frameRate;
657
+ self.needsReconfiguration = NO;
658
+
619
659
  [self.session startRunning];
620
-
621
- MRLog(@"đŸŽĨ CameraRecorder started: %@ (%dx%d @ %.2ffps)", device.localizedName, width, height, frameRate);
660
+
661
+ MRLog(@"đŸŽĨ CameraRecorder started: %@ (will use actual frame dimensions)", device.localizedName);
662
+ MRLog(@" Format reports: %dx%d @ %.2ffps", width, height, frameRate);
622
663
  return YES;
623
664
  }
624
665
 
@@ -674,18 +715,54 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
674
715
  if (!self.isRecording || self.isShuttingDown) {
675
716
  return;
676
717
  }
677
-
718
+
678
719
  if (!sampleBuffer) {
679
720
  return;
680
721
  }
681
-
722
+
682
723
  CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
724
+
725
+ // Lazy initialization - setup writer with actual frame dimensions
726
+ if (!self.assetWriter) {
727
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
728
+ if (!pixelBuffer) {
729
+ MRLog(@"❌ No pixel buffer in first frame");
730
+ return;
731
+ }
732
+
733
+ size_t actualWidth = CVPixelBufferGetWidth(pixelBuffer);
734
+ size_t actualHeight = CVPixelBufferGetHeight(pixelBuffer);
735
+
736
+ MRLog(@"đŸŽŦ First frame received: %zux%zu (format said %dx%d)",
737
+ actualWidth, actualHeight, self.expectedWidth, self.expectedHeight);
738
+
739
+ // Use ACTUAL dimensions from the frame, not format dimensions
740
+ NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
741
+ NSError *setupError = nil;
742
+
743
+ // Use frame rate from device configuration
744
+ double frameRate = self.expectedFrameRate > 0 ? self.expectedFrameRate : 30.0;
745
+
746
+ if (![self setupWriterWithURL:outputURL
747
+ width:(int32_t)actualWidth
748
+ height:(int32_t)actualHeight
749
+ frameRate:frameRate
750
+ error:&setupError]) {
751
+ MRLog(@"❌ Failed to setup writer with actual dimensions: %@", setupError);
752
+ self.isRecording = NO;
753
+ return;
754
+ }
755
+
756
+ MRLog(@"✅ Writer configured with ACTUAL dimensions: %zux%zu", actualWidth, actualHeight);
757
+ }
758
+
683
759
  if (!self.writerStarted) {
684
760
  if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
685
761
  if ([self.assetWriter startWriting]) {
686
762
  [self.assetWriter startSessionAtSourceTime:timestamp];
687
763
  self.writerStarted = YES;
688
764
  self.firstSampleTime = timestamp;
765
+ MRLog(@"✅ Camera writer started");
689
766
  } else {
690
767
  MRLog(@"❌ CameraRecorder: Failed to start asset writer: %@", self.assetWriter.error);
691
768
  self.isRecording = NO;
@@ -493,10 +493,9 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
493
493
  MRLog(@"🔍 AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
494
494
  MRLog(@"🔍 AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
495
495
 
496
- // ELECTRON FIX: Get shareable content asynchronously without blocking
497
- // This prevents deadlocks in Electron's event loop
496
+ // ELECTRON FIX: Get shareable content FULLY ASYNCHRONOUSLY
497
+ // NO semaphores, NO blocking - pure async to prevent Electron crashes
498
498
  [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
499
- // This block runs asynchronously - safe for Electron
500
499
  @autoreleasepool {
501
500
  if (contentError) {
502
501
  NSLog(@"❌ Content error: %@", contentError);
@@ -742,7 +741,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
742
741
  MRLog(@"🕒 Session timestamp: %@", sessionTimestampNumber);
743
742
  }
744
743
 
745
- // ELECTRON FIX: Start capture asynchronously
744
+ // ELECTRON FIX: Start capture FULLY ASYNCHRONOUSLY - NO blocking
746
745
  [g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
747
746
  if (startError) {
748
747
  NSLog(@"❌ Failed to start pure capture: %@", startError);
@@ -773,22 +772,24 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
773
772
  // Store stream reference to prevent it from being deallocated
774
773
  SCStream *streamToStop = g_stream;
775
774
 
776
- // ELECTRON FIX: Stop asynchronously without blocking
775
+ // ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
777
776
  [streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
778
- if (stopError) {
779
- NSLog(@"❌ Stop error: %@", stopError);
780
- } else {
781
- MRLog(@"✅ Pure stream stopped");
782
- }
777
+ @autoreleasepool {
778
+ if (stopError) {
779
+ NSLog(@"❌ Stop error: %@", stopError);
780
+ } else {
781
+ MRLog(@"✅ Pure stream stopped");
782
+ }
783
783
 
784
- // Reset recording state to allow new recordings
785
- @synchronized([ScreenCaptureKitRecorder class]) {
786
- g_isRecording = NO;
787
- }
784
+ // Reset recording state to allow new recordings
785
+ @synchronized([ScreenCaptureKitRecorder class]) {
786
+ g_isRecording = NO;
787
+ }
788
788
 
789
- // Cleanup after stop completes
790
- CleanupWriters();
791
- [ScreenCaptureKitRecorder cleanupVideoWriter];
789
+ // Cleanup after stop completes
790
+ CleanupWriters();
791
+ [ScreenCaptureKitRecorder cleanupVideoWriter];
792
+ }
792
793
  }];
793
794
  }
794
795