node-mac-recorder 2.21.1 โ†’ 2.21.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.1",
3
+ "version": "2.21.3",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -82,6 +82,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
82
82
  @property (atomic, assign) BOOL writerStarted;
83
83
  @property (atomic, assign) BOOL isShuttingDown;
84
84
  @property (nonatomic, assign) CMTime firstSampleTime;
85
+ @property (nonatomic, assign) int32_t expectedWidth;
86
+ @property (nonatomic, assign) int32_t expectedHeight;
87
+ @property (nonatomic, assign) double expectedFrameRate;
88
+ @property (atomic, assign) BOOL needsReconfiguration;
85
89
 
86
90
  + (instancetype)sharedRecorder;
87
91
  + (NSArray<NSDictionary *> *)availableCameraDevices;
@@ -242,23 +246,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
242
246
  AVCaptureDeviceFormat *bestFormat = nil;
243
247
  int64_t bestResolutionScore = 0;
244
248
  double bestFrameRate = 0.0;
245
-
249
+
250
+ MRLog(@"๐Ÿ” Scanning formats for device: %@", device.localizedName);
251
+
246
252
  for (AVCaptureDeviceFormat *format in device.formats) {
247
253
  CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
248
254
  if (dims.width <= 0 || dims.height <= 0) {
249
255
  continue;
250
256
  }
251
-
257
+
258
+ // No filtering - use whatever the device supports
259
+ // The device knows best what it can capture
260
+
252
261
  int64_t score = (int64_t)dims.width * (int64_t)dims.height;
253
-
262
+
254
263
  double maxFrameRate = 0.0;
255
264
  for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
256
265
  maxFrameRate = MAX(maxFrameRate, range.maxFrameRate);
257
266
  }
258
-
267
+
259
268
  BOOL usesBetterResolution = score > bestResolutionScore;
260
269
  BOOL sameResolutionHigherFps = (score == bestResolutionScore) && (maxFrameRate > bestFrameRate);
261
-
270
+
262
271
  if (!bestFormat || usesBetterResolution || sameResolutionHigherFps) {
263
272
  bestFormat = format;
264
273
  bestResolutionScore = score;
@@ -266,9 +275,17 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
266
275
  if (widthOut) *widthOut = dims.width;
267
276
  if (heightOut) *heightOut = dims.height;
268
277
  if (frameRateOut) *frameRateOut = bestFrameRate;
278
+ MRLog(@" โœ… New best: %dx%d @ %.0ffps (score=%lld)",
279
+ dims.width, dims.height, maxFrameRate, score);
269
280
  }
270
281
  }
271
-
282
+
283
+ if (bestFormat) {
284
+ MRLog(@"๐Ÿ“น Selected format: %dx%d @ %.0ffps", *widthOut, *heightOut, *frameRateOut);
285
+ } else {
286
+ MRLog(@"โŒ No suitable format found");
287
+ }
288
+
272
289
  return bestFormat;
273
290
  }
274
291
 
@@ -415,17 +432,28 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
415
432
  MRLog(@"โ„น๏ธ CameraRecorder: WebM unavailable, storing data in QuickTime container");
416
433
  }
417
434
 
418
- NSInteger bitrate = (NSInteger)(width * height * 6); // Empirical bitrate multiplier
419
- bitrate = MAX(bitrate, 5 * 1000 * 1000); // Minimum 5 Mbps
435
+ // Calculate bitrate based on resolution for high quality
436
+ // Use higher multiplier for better quality (10 instead of 6)
437
+ NSInteger bitrate = (NSInteger)(width * height * 10);
438
+ bitrate = MAX(bitrate, 8 * 1000 * 1000); // Minimum 8 Mbps for quality
439
+ bitrate = MIN(bitrate, 50 * 1000 * 1000); // Maximum 50 Mbps to avoid excessive file size
440
+
441
+ MRLog(@"๐ŸŽฌ Camera encoder settings: %dx%d @ %.2ffps, bitrate=%.2fMbps",
442
+ width, height, frameRate, bitrate / (1000.0 * 1000.0));
420
443
 
421
444
  NSMutableDictionary *compressionProps = [@{
422
445
  AVVideoAverageBitRateKey: @(bitrate),
423
446
  AVVideoMaxKeyFrameIntervalKey: @(MAX(1, (int)round(frameRate))),
424
- AVVideoAllowFrameReorderingKey: @YES
447
+ AVVideoAllowFrameReorderingKey: @YES,
448
+ AVVideoExpectedSourceFrameRateKey: @(frameRate),
449
+ // Add quality hint for better encoding
450
+ AVVideoQualityKey: @(0.9) // 0.0-1.0, higher is better quality
425
451
  } mutableCopy];
426
-
452
+
427
453
  if ([codec isEqualToString:AVVideoCodecTypeH264]) {
428
454
  compressionProps[AVVideoProfileLevelKey] = AVVideoProfileLevelH264HighAutoLevel;
455
+ // Use Main profile for better quality
456
+ compressionProps[AVVideoH264EntropyModeKey] = AVVideoH264EntropyModeCABAC;
429
457
  }
430
458
 
431
459
  NSDictionary *videoSettings = @{
@@ -440,9 +468,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
440
468
  self.assetWriterInput.expectsMediaDataInRealTime = YES;
441
469
 
442
470
  NSDictionary *pixelBufferAttributes = @{
443
- (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
471
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
444
472
  (NSString *)kCVPixelBufferWidthKey: @(width),
445
- (NSString *)kCVPixelBufferHeightKey: @(height)
473
+ (NSString *)kCVPixelBufferHeightKey: @(height),
474
+ // Preserve aspect ratio and use high quality scaling
475
+ (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
476
+ (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
446
477
  };
447
478
 
448
479
  self.pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterInput
@@ -572,8 +603,10 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
572
603
 
573
604
  self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
574
605
  self.videoOutput.alwaysDiscardsLateVideoFrames = NO;
606
+ // Use video range (not full range) for better compatibility and quality
607
+ // YpCbCr 4:2:0 biplanar is the native format for most cameras
575
608
  self.videoOutput.videoSettings = @{
576
- (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
609
+ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
577
610
  };
578
611
 
579
612
  self.captureQueue = dispatch_queue_create("node_mac_recorder.camera.queue", DISPATCH_QUEUE_SERIAL);
@@ -594,31 +627,39 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
594
627
 
595
628
  AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
596
629
  if (connection) {
597
- if (connection.isVideoOrientationSupported) {
598
- connection.videoOrientation = AVCaptureVideoOrientationPortrait;
599
- }
630
+ // DON'T set orientation - let the camera use its natural orientation
631
+ // The device knows best (portrait for phones, landscape for webcams)
632
+ // We just capture whatever comes through
633
+
634
+ // Mirror front cameras for natural preview
600
635
  if (connection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
601
636
  if ([connection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
602
637
  connection.automaticallyAdjustsVideoMirroring = NO;
603
638
  }
604
639
  connection.videoMirrored = YES;
605
640
  }
641
+
642
+ // Log actual connection properties for debugging
643
+ MRLog(@"๐Ÿ“ Camera connection: orientation=%ld (native), mirrored=%d, format=%dx%d",
644
+ (long)connection.videoOrientation,
645
+ connection.isVideoMirrored,
646
+ width, height);
606
647
  }
607
648
 
608
- NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
609
- if (![self setupWriterWithURL:outputURL width:width height:height frameRate:frameRate error:error]) {
610
- [self.session stopRunning];
611
- [self resetState];
612
- return NO;
613
- }
614
-
649
+ // DON'T setup writer yet - wait for first frame to get actual dimensions
650
+ // Store configuration for lazy initialization
615
651
  self.outputPath = outputPath;
616
652
  self.isRecording = YES;
617
653
  self.isShuttingDown = NO;
618
-
654
+ self.expectedWidth = width;
655
+ self.expectedHeight = height;
656
+ self.expectedFrameRate = frameRate;
657
+ self.needsReconfiguration = NO;
658
+
619
659
  [self.session startRunning];
620
-
621
- MRLog(@"๐ŸŽฅ CameraRecorder started: %@ (%dx%d @ %.2ffps)", device.localizedName, width, height, frameRate);
660
+
661
+ MRLog(@"๐ŸŽฅ CameraRecorder started: %@ (will use actual frame dimensions)", device.localizedName);
662
+ MRLog(@" Format reports: %dx%d @ %.2ffps", width, height, frameRate);
622
663
  return YES;
623
664
  }
624
665
 
@@ -674,18 +715,54 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
674
715
  if (!self.isRecording || self.isShuttingDown) {
675
716
  return;
676
717
  }
677
-
718
+
678
719
  if (!sampleBuffer) {
679
720
  return;
680
721
  }
681
-
722
+
682
723
  CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
724
+
725
+ // Lazy initialization - setup writer with actual frame dimensions
726
+ if (!self.assetWriter) {
727
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
728
+ if (!pixelBuffer) {
729
+ MRLog(@"โŒ No pixel buffer in first frame");
730
+ return;
731
+ }
732
+
733
+ size_t actualWidth = CVPixelBufferGetWidth(pixelBuffer);
734
+ size_t actualHeight = CVPixelBufferGetHeight(pixelBuffer);
735
+
736
+ MRLog(@"๐ŸŽฌ First frame received: %zux%zu (format said %dx%d)",
737
+ actualWidth, actualHeight, self.expectedWidth, self.expectedHeight);
738
+
739
+ // Use ACTUAL dimensions from the frame, not format dimensions
740
+ NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
741
+ NSError *setupError = nil;
742
+
743
+ // Use frame rate from device configuration
744
+ double frameRate = self.expectedFrameRate > 0 ? self.expectedFrameRate : 30.0;
745
+
746
+ if (![self setupWriterWithURL:outputURL
747
+ width:(int32_t)actualWidth
748
+ height:(int32_t)actualHeight
749
+ frameRate:frameRate
750
+ error:&setupError]) {
751
+ MRLog(@"โŒ Failed to setup writer with actual dimensions: %@", setupError);
752
+ self.isRecording = NO;
753
+ return;
754
+ }
755
+
756
+ MRLog(@"โœ… Writer configured with ACTUAL dimensions: %zux%zu", actualWidth, actualHeight);
757
+ }
758
+
683
759
  if (!self.writerStarted) {
684
760
  if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
685
761
  if ([self.assetWriter startWriting]) {
686
762
  [self.assetWriter startSessionAtSourceTime:timestamp];
687
763
  self.writerStarted = YES;
688
764
  self.firstSampleTime = timestamp;
765
+ MRLog(@"โœ… Camera writer started");
689
766
  } else {
690
767
  MRLog(@"โŒ CameraRecorder: Failed to start asset writer: %@", self.assetWriter.error);
691
768
  self.isRecording = NO;
@@ -121,27 +121,30 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
121
121
  @implementation PureScreenCaptureDelegate
122
122
  - (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
123
123
  MRLog(@"๐Ÿ›‘ Pure ScreenCapture stream stopped");
124
-
124
+
125
125
  // Prevent recursive calls during cleanup
126
126
  if (g_isCleaningUp) {
127
127
  MRLog(@"โš ๏ธ Already cleaning up, ignoring delegate callback");
128
128
  return;
129
129
  }
130
-
131
- g_isRecording = NO;
132
-
130
+
131
+ @synchronized([ScreenCaptureKitRecorder class]) {
132
+ g_isRecording = NO;
133
+ }
134
+
133
135
  if (error) {
134
136
  NSLog(@"โŒ Stream error: %@", error);
135
137
  } else {
136
138
  MRLog(@"โœ… Stream stopped cleanly");
137
139
  }
138
-
139
- // Use dispatch_async to prevent potential deadlocks in Electron
140
- dispatch_async(dispatch_get_main_queue(), ^{
141
- if (!g_isCleaningUp) { // Double-check before finalizing
140
+
141
+ // ELECTRON FIX: Don't use dispatch_async to main queue - it can cause crashes
142
+ // Instead, finalize directly on current thread with synchronization
143
+ @synchronized([ScreenCaptureKitRecorder class]) {
144
+ if (!g_isCleaningUp) {
142
145
  [ScreenCaptureKitRecorder finalizeRecording];
143
146
  }
144
- });
147
+ }
145
148
  }
146
149
  @end
147
150
 
@@ -450,17 +453,23 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
450
453
  + (BOOL)startRecordingWithConfiguration:(NSDictionary *)config delegate:(id)delegate error:(NSError **)error {
451
454
  @synchronized([ScreenCaptureKitRecorder class]) {
452
455
  if (g_isRecording || g_isCleaningUp) {
453
- MRLog(@"โš ๏ธ Already recording or cleaning up (recording:%d cleaning:%d)", g_isRecording, g_isCleaningUp);
456
+ MRLog(@"โš ๏ธ Already recording or cleaning up (recording:%d cleaning:%d)", g_isRecording, g_isCleaningUp);
454
457
  return NO;
455
458
  }
456
-
459
+
457
460
  // Reset any stale state
458
461
  g_isCleaningUp = NO;
462
+
463
+ // Set flag early to prevent race conditions in Electron
464
+ g_isRecording = YES;
459
465
  }
460
-
466
+
461
467
  NSString *outputPath = config[@"outputPath"];
462
468
  if (!outputPath || [outputPath length] == 0) {
463
469
  NSLog(@"โŒ Invalid output path provided");
470
+ @synchronized([ScreenCaptureKitRecorder class]) {
471
+ g_isRecording = NO;
472
+ }
464
473
  return NO;
465
474
  }
466
475
  g_outputPath = outputPath;
@@ -483,12 +492,17 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
483
492
  // CRITICAL DEBUG: Log EXACT audio parameter values
484
493
  MRLog(@"๐Ÿ” AUDIO DEBUG: includeMicrophone type=%@ value=%d", [includeMicrophone class], [includeMicrophone boolValue]);
485
494
  MRLog(@"๐Ÿ” AUDIO DEBUG: includeSystemAudio type=%@ value=%d", [includeSystemAudio class], [includeSystemAudio boolValue]);
486
-
487
- // Get shareable content
495
+
496
+ // ELECTRON FIX: Get shareable content FULLY ASYNCHRONOUSLY
497
+ // NO semaphores, NO blocking - pure async to prevent Electron crashes
488
498
  [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
499
+ @autoreleasepool {
489
500
  if (contentError) {
490
501
  NSLog(@"โŒ Content error: %@", contentError);
491
- return;
502
+ @synchronized([ScreenCaptureKitRecorder class]) {
503
+ g_isRecording = NO;
504
+ }
505
+ return; // Early return from completion handler block
492
506
  }
493
507
 
494
508
  MRLog(@"โœ… Got %lu displays, %lu windows for pure recording",
@@ -529,7 +543,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
529
543
  recordingHeight = (NSInteger)targetWindow.frame.size.height;
530
544
  } else {
531
545
  NSLog(@"โŒ Window ID %@ not found", windowId);
532
- return;
546
+ @synchronized([ScreenCaptureKitRecorder class]) {
547
+ g_isRecording = NO;
548
+ }
549
+ return; // Early return from completion handler block
533
550
  }
534
551
  }
535
552
  // DISPLAY RECORDING
@@ -558,7 +575,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
558
575
 
559
576
  if (!targetDisplay) {
560
577
  NSLog(@"โŒ Display not found");
561
- return;
578
+ @synchronized([ScreenCaptureKitRecorder class]) {
579
+ g_isRecording = NO;
580
+ }
581
+ return; // Early return from completion handler block
562
582
  }
563
583
 
564
584
  MRLog(@"๐Ÿ–ฅ๏ธ Recording display %u (%dx%d)",
@@ -660,7 +680,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
660
680
  NSError *writerError = nil;
661
681
  if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
662
682
  NSLog(@"โŒ Failed to prepare video writer: %@", writerError);
663
- return;
683
+ @synchronized([ScreenCaptureKitRecorder class]) {
684
+ g_isRecording = NO;
685
+ }
686
+ return; // Early return from completion handler block
664
687
  }
665
688
 
666
689
  g_videoQueue = dispatch_queue_create("screen_capture_video_queue", DISPATCH_QUEUE_SERIAL);
@@ -678,7 +701,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
678
701
  if (!g_stream) {
679
702
  NSLog(@"โŒ Failed to create pure stream");
680
703
  CleanupWriters();
681
- return;
704
+ @synchronized([ScreenCaptureKitRecorder class]) {
705
+ g_isRecording = NO;
706
+ }
707
+ return; // Early return from completion handler block
682
708
  }
683
709
 
684
710
  NSError *outputError = nil;
@@ -686,7 +712,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
686
712
  if (!videoOutputAdded || outputError) {
687
713
  NSLog(@"โŒ Failed to add video output: %@", outputError);
688
714
  CleanupWriters();
689
- return;
715
+ @synchronized([ScreenCaptureKitRecorder class]) {
716
+ g_isRecording = NO;
717
+ }
718
+ return; // Early return from completion handler block
690
719
  }
691
720
 
692
721
  if (g_shouldCaptureAudio) {
@@ -696,7 +725,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
696
725
  if (!audioOutputAdded || audioError) {
697
726
  NSLog(@"โŒ Failed to add audio output: %@", audioError);
698
727
  CleanupWriters();
699
- return;
728
+ @synchronized([ScreenCaptureKitRecorder class]) {
729
+ g_isRecording = NO;
730
+ }
731
+ return; // Early return from completion handler block
700
732
  }
701
733
  } else {
702
734
  NSLog(@"โš ๏ธ Audio capture requested but requires macOS 13.0+");
@@ -708,19 +740,24 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
708
740
  if (sessionTimestampNumber) {
709
741
  MRLog(@"๐Ÿ•’ Session timestamp: %@", sessionTimestampNumber);
710
742
  }
711
-
743
+
744
+ // ELECTRON FIX: Start capture FULLY ASYNCHRONOUSLY - NO blocking
712
745
  [g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
713
746
  if (startError) {
714
747
  NSLog(@"โŒ Failed to start pure capture: %@", startError);
715
- g_isRecording = NO;
716
748
  CleanupWriters();
749
+ @synchronized([ScreenCaptureKitRecorder class]) {
750
+ g_isRecording = NO;
751
+ }
717
752
  } else {
718
753
  MRLog(@"๐ŸŽ‰ PURE ScreenCaptureKit recording started successfully!");
719
- g_isRecording = YES;
754
+ // g_isRecording already set to YES at the beginning
720
755
  }
721
756
  }];
722
- }];
723
-
757
+ } // End of autoreleasepool
758
+ }]; // End of getShareableContentWithCompletionHandler
759
+
760
+ // Return immediately - async completion will handle success/failure
724
761
  return YES;
725
762
  }
726
763
 
@@ -729,26 +766,30 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
729
766
  NSLog(@"โš ๏ธ Cannot stop: recording=%d stream=%@ cleaning=%d", g_isRecording, g_stream, g_isCleaningUp);
730
767
  return;
731
768
  }
732
-
769
+
733
770
  MRLog(@"๐Ÿ›‘ Stopping pure ScreenCaptureKit recording");
734
-
771
+
735
772
  // Store stream reference to prevent it from being deallocated
736
773
  SCStream *streamToStop = g_stream;
737
-
738
- [streamToStop stopCaptureWithCompletionHandler:^(NSError *error) {
739
- if (error) {
740
- NSLog(@"โŒ Stop error: %@", error);
741
- }
742
- MRLog(@"โœ… Pure stream stopped");
743
-
744
- // Immediately reset recording state to allow new recordings
745
- g_isRecording = NO;
746
-
747
- // Finalize on main queue to prevent threading issues
748
- dispatch_async(dispatch_get_main_queue(), ^{
774
+
775
+ // ELECTRON FIX: Stop FULLY ASYNCHRONOUSLY - NO blocking, NO semaphores
776
+ [streamToStop stopCaptureWithCompletionHandler:^(NSError *stopError) {
777
+ @autoreleasepool {
778
+ if (stopError) {
779
+ NSLog(@"โŒ Stop error: %@", stopError);
780
+ } else {
781
+ MRLog(@"โœ… Pure stream stopped");
782
+ }
783
+
784
+ // Reset recording state to allow new recordings
785
+ @synchronized([ScreenCaptureKitRecorder class]) {
786
+ g_isRecording = NO;
787
+ }
788
+
789
+ // Cleanup after stop completes
749
790
  CleanupWriters();
750
791
  [ScreenCaptureKitRecorder cleanupVideoWriter];
751
- });
792
+ }
752
793
  }];
753
794
  }
754
795
 
@@ -0,0 +1,90 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Test script for Electron crash fix
4
+ * Tests ScreenCaptureKit recording with synchronous semaphore-based approach
5
+ */
6
+
7
+ const MacRecorder = require('./index.js');
8
+ const path = require('path');
9
+ const fs = require('fs');
10
+
11
+ async function testRecording() {
12
+ console.log('๐Ÿงช Testing ScreenCaptureKit Electron crash fix...\n');
13
+
14
+ const recorder = new MacRecorder();
15
+
16
+ // Check permissions first
17
+ console.log('1๏ธโƒฃ Checking permissions...');
18
+ const permissions = await recorder.checkPermissions();
19
+ console.log(' Permissions:', permissions);
20
+
21
+ if (!permissions.screenRecording) {
22
+ console.error('โŒ Screen recording permission not granted');
23
+ console.log(' Please enable screen recording in System Settings > Privacy & Security');
24
+ process.exit(1);
25
+ }
26
+
27
+ // Get displays
28
+ console.log('\n2๏ธโƒฃ Getting displays...');
29
+ const displays = await recorder.getDisplays();
30
+ console.log(` Found ${displays.length} display(s):`);
31
+ displays.forEach(d => {
32
+ console.log(` - Display ${d.id}: ${d.width}x${d.height} (Primary: ${d.isPrimary})`);
33
+ });
34
+
35
+ // Prepare output path
36
+ const outputDir = path.join(__dirname, 'test-output');
37
+ if (!fs.existsSync(outputDir)) {
38
+ fs.mkdirSync(outputDir, { recursive: true });
39
+ }
40
+
41
+ const outputPath = path.join(outputDir, `electron-fix-test-${Date.now()}.mov`);
42
+
43
+ try {
44
+ // Start recording
45
+ console.log('\n3๏ธโƒฃ Starting recording...');
46
+ console.log(` Output: ${outputPath}`);
47
+
48
+ await recorder.startRecording(outputPath, {
49
+ displayId: displays[0].id,
50
+ captureCursor: true,
51
+ includeMicrophone: false,
52
+ includeSystemAudio: false
53
+ });
54
+
55
+ console.log('โœ… Recording started successfully!');
56
+ console.log(' Recording for 3 seconds...\n');
57
+
58
+ // Record for 3 seconds
59
+ await new Promise(resolve => setTimeout(resolve, 3000));
60
+
61
+ // Stop recording
62
+ console.log('4๏ธโƒฃ Stopping recording...');
63
+ const result = await recorder.stopRecording();
64
+ console.log('โœ… Recording stopped successfully!');
65
+ console.log(' Result:', result);
66
+
67
+ // Check output file
68
+ if (fs.existsSync(outputPath)) {
69
+ const stats = fs.statSync(outputPath);
70
+ console.log(`\nโœ… Output file created: ${outputPath}`);
71
+ console.log(` File size: ${(stats.size / 1024).toFixed(2)} KB`);
72
+ } else {
73
+ console.log('\nโš ๏ธ Output file not found (may still be finalizing)');
74
+ }
75
+
76
+ console.log('\n๐ŸŽ‰ Test completed successfully! No crashes detected.');
77
+ console.log(' The Electron crash fix appears to be working.\n');
78
+
79
+ } catch (error) {
80
+ console.error('\nโŒ Test failed:', error.message);
81
+ console.error(' Stack:', error.stack);
82
+ process.exit(1);
83
+ }
84
+ }
85
+
86
+ // Run test
87
+ testRecording().catch(error => {
88
+ console.error('Fatal error:', error);
89
+ process.exit(1);
90
+ });