node-mac-recorder 2.21.51 → 2.21.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -98,12 +98,18 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
98
98
  }
99
99
  }
100
100
 
101
- @interface CameraRecorder : NSObject<AVCaptureFileOutputRecordingDelegate>
101
+ @interface CameraRecorder : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
102
102
 
103
103
  @property (nonatomic, strong) dispatch_queue_t workQueue;
104
+ @property (nonatomic, strong) dispatch_queue_t videoQueue;
104
105
  @property (nonatomic, strong) AVCaptureSession *session;
105
106
  @property (nonatomic, strong) AVCaptureDeviceInput *deviceInput;
106
- @property (nonatomic, strong) AVCaptureMovieFileOutput *fileOutput;
107
+ @property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
108
+ @property (nonatomic, strong) AVAssetWriter *writer;
109
+ @property (nonatomic, strong) AVAssetWriterInput *writerInput;
110
+ @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor;
111
+ @property (nonatomic, assign) CMTime startTime;
112
+ @property (nonatomic, assign) BOOL writerStarted;
107
113
  @property (nonatomic, copy) NSString *outputPath;
108
114
  @property (nonatomic, copy) NSString *lastFinishedOutputPath;
109
115
 
@@ -135,10 +141,13 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
135
141
  self = [super init];
136
142
  if (self) {
137
143
  _workQueue = dispatch_queue_create("com.macrecorder.camera", DISPATCH_QUEUE_SERIAL);
144
+ _videoQueue = dispatch_queue_create("com.macrecorder.camera.video", DISPATCH_QUEUE_SERIAL);
138
145
  _startCompleted = YES;
139
146
  _startSucceeded = NO;
140
147
  _activeToken = 0;
141
148
  _unexpectedRestartAttempted = NO;
149
+ _writerStarted = NO;
150
+ _startTime = kCMTimeInvalid;
142
151
  }
143
152
  return self;
144
153
  }
@@ -399,6 +408,172 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
399
408
  return YES;
400
409
  }
401
410
 
411
+ #pragma mark - AVAssetWriter Setup
412
+
413
+ - (BOOL)setupWriterWithSampleBuffer:(CMSampleBufferRef)sampleBuffer error:(NSError **)error {
414
+ if (self.writer) {
415
+ return YES; // Already initialized
416
+ }
417
+
418
+ if (!self.outputPath || [self.outputPath length] == 0) {
419
+ if (error) {
420
+ *error = MRCameraError(-100, @"Output path not set");
421
+ }
422
+ return NO;
423
+ }
424
+
425
+ NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
426
+ MRCameraRemoveFileIfExists(self.outputPath);
427
+
428
+ NSError *writerError = nil;
429
+ self.writer = [[AVAssetWriter alloc] initWithURL:outputURL
430
+ fileType:AVFileTypeQuickTimeMovie
431
+ error:&writerError];
432
+ if (!self.writer || writerError) {
433
+ if (error) {
434
+ *error = writerError;
435
+ }
436
+ MRLog(@"❌ Failed to create camera AVAssetWriter: %@", writerError);
437
+ return NO;
438
+ }
439
+
440
+ // Get video dimensions from sample buffer
441
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
442
+ if (!pixelBuffer) {
443
+ if (error) {
444
+ *error = MRCameraError(-101, @"No pixel buffer in sample");
445
+ }
446
+ return NO;
447
+ }
448
+
449
+ size_t width = CVPixelBufferGetWidth(pixelBuffer);
450
+ size_t height = CVPixelBufferGetHeight(pixelBuffer);
451
+
452
+ MRLog(@"🎥 Camera recording dimensions: %zux%zu", width, height);
453
+
454
+ // H.264 video settings (matching current quality)
455
+ NSInteger bitrate = (NSInteger)(width * height * 24); // 24fps target
456
+ bitrate = MAX(bitrate, 5 * 1000 * 1000); // Min 5 Mbps
457
+ bitrate = MIN(bitrate, 30 * 1000 * 1000); // Max 30 Mbps
458
+
459
+ NSDictionary *compressionProps = @{
460
+ AVVideoAverageBitRateKey: @(bitrate),
461
+ AVVideoMaxKeyFrameIntervalKey: @(24),
462
+ AVVideoAllowFrameReorderingKey: @YES,
463
+ AVVideoExpectedSourceFrameRateKey: @(24),
464
+ AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel
465
+ };
466
+
467
+ NSDictionary *videoSettings = @{
468
+ AVVideoCodecKey: AVVideoCodecTypeH264,
469
+ AVVideoWidthKey: @(width),
470
+ AVVideoHeightKey: @(height),
471
+ AVVideoCompressionPropertiesKey: compressionProps
472
+ };
473
+
474
+ self.writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
475
+ outputSettings:videoSettings];
476
+ self.writerInput.expectsMediaDataInRealTime = YES;
477
+
478
+ if (![self.writer canAddInput:self.writerInput]) {
479
+ if (error) {
480
+ *error = MRCameraError(-102, @"Cannot add video input to writer");
481
+ }
482
+ return NO;
483
+ }
484
+ [self.writer addInput:self.writerInput];
485
+
486
+ // Create pixel buffer adaptor
487
+ NSDictionary *pixelBufferAttributes = @{
488
+ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
489
+ (NSString*)kCVPixelBufferWidthKey: @(width),
490
+ (NSString*)kCVPixelBufferHeightKey: @(height),
491
+ (NSString*)kCVPixelBufferCGImageCompatibilityKey: @YES,
492
+ (NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
493
+ };
494
+
495
+ self.pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor
496
+ assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.writerInput
497
+ sourcePixelBufferAttributes:pixelBufferAttributes];
498
+
499
+ MRLog(@"✅ Camera AVAssetWriter configured: %.2f Mbps, H.264", bitrate / (1000.0 * 1000.0));
500
+ return YES;
501
+ }
502
+
503
+ #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
504
+
505
+ - (void)captureOutput:(AVCaptureOutput *)output
506
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
507
+ fromConnection:(AVCaptureConnection *)connection {
508
+
509
+ if (!CMSampleBufferDataIsReady(sampleBuffer)) {
510
+ return;
511
+ }
512
+
513
+ // Setup writer on first frame
514
+ NSError *setupError = nil;
515
+ if (![self setupWriterWithSampleBuffer:sampleBuffer error:&setupError]) {
516
+ if (setupError) {
517
+ MRLog(@"❌ Camera writer setup failed: %@", setupError);
518
+ }
519
+ return;
520
+ }
521
+
522
+ if (!self.writer || !self.writerInput || !self.pixelBufferAdaptor) {
523
+ return;
524
+ }
525
+
526
+ CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
527
+
528
+ // SYNC INTEGRATION: Register camera as active stream
529
+ MRSyncMarkAudioSample(timestamp);
530
+
531
+ // Start writer session on first frame
532
+ if (!self.writerStarted) {
533
+ if (![self.writer startWriting]) {
534
+ MRLog(@"❌ Camera writer failed to start: %@", self.writer.error);
535
+ return;
536
+ }
537
+ [self.writer startSessionAtSourceTime:kCMTimeZero]; // CRITICAL: t=0 timeline
538
+ self.writerStarted = YES;
539
+ self.startTime = timestamp;
540
+ g_cameraStartTimestamp = CFAbsoluteTimeGetCurrent();
541
+
542
+ MRLog(@"🎥 Camera writer started @ t=0 (source PTS: %.3fs)", CMTimeGetSeconds(timestamp));
543
+
544
+ // Signal start completion
545
+ [self completeStart:YES token:self.activeToken];
546
+ }
547
+
548
+ if (!self.writerInput.readyForMoreMediaData) {
549
+ // Drop frame if writer is not ready (prevents blocking)
550
+ return;
551
+ }
552
+
553
+ // TIMESTAMP NORMALIZATION (audio_recorder.mm pattern)
554
+ CMTime adjustedTimestamp = kCMTimeZero;
555
+ if (CMTIME_IS_VALID(self.startTime)) {
556
+ adjustedTimestamp = CMTimeSubtract(timestamp, self.startTime);
557
+ if (CMTIME_COMPARE_INLINE(adjustedTimestamp, <, kCMTimeZero)) {
558
+ adjustedTimestamp = kCMTimeZero;
559
+ }
560
+ }
561
+
562
+ // Get pixel buffer from sample
563
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
564
+ if (!pixelBuffer) {
565
+ MRLog(@"⚠️ No pixel buffer in camera sample");
566
+ return;
567
+ }
568
+
569
+ // Append to writer with normalized timestamp
570
+ BOOL success = [self.pixelBufferAdaptor appendPixelBuffer:pixelBuffer
571
+ withPresentationTime:adjustedTimestamp];
572
+ if (!success) {
573
+ MRLog(@"⚠️ Failed to append camera pixel buffer: %@", self.writer.error);
574
+ }
575
+ }
576
+
402
577
  #pragma mark - Synchronization helpers
403
578
 
404
579
  - (uint64_t)nextToken {
@@ -445,7 +620,12 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
445
620
  - (void)cleanupAfterStopOnQueue {
446
621
  self.session = nil;
447
622
  self.deviceInput = nil;
448
- self.fileOutput = nil;
623
+ self.videoOutput = nil;
624
+ self.writer = nil;
625
+ self.writerInput = nil;
626
+ self.pixelBufferAdaptor = nil;
627
+ self.writerStarted = NO;
628
+ self.startTime = kCMTimeInvalid;
449
629
  self.isRecording = NO;
450
630
  self.stopInFlight = NO;
451
631
  self.outputPath = nil;
@@ -453,60 +633,6 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
453
633
  g_cameraStartTimestamp = 0.0;
454
634
  }
455
635
 
456
- - (BOOL)attemptRestartAfterUnexpectedStop {
457
- if (self.unexpectedRestartAttempted) {
458
- MRLog(@"⚠️ Camera already retried after unexpected stop; skipping restart");
459
- return NO;
460
- }
461
- self.unexpectedRestartAttempted = YES;
462
-
463
- if (!self.outputPath || [self.outputPath length] == 0) {
464
- MRLog(@"⚠️ Cannot restart camera: missing output path");
465
- return NO;
466
- }
467
- if (!self.session || !self.fileOutput) {
468
- MRLog(@"⚠️ Cannot restart camera: session/output unavailable");
469
- return NO;
470
- }
471
-
472
- if (![self.session isRunning]) {
473
- [self.session startRunning];
474
- }
475
-
476
- NSURL *outputURL = [NSURL fileURLWithPath:self.outputPath];
477
- if (!outputURL) {
478
- MRLog(@"⚠️ Cannot restart camera: invalid output URL");
479
- return NO;
480
- }
481
-
482
- // Move existing clip aside so we don't lose it if restart fails
483
- NSString *backupPath = [self.outputPath stringByAppendingPathExtension:@"bak"];
484
- [[NSFileManager defaultManager] removeItemAtPath:backupPath error:nil];
485
- if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) {
486
- [[NSFileManager defaultManager] moveItemAtPath:self.outputPath toPath:backupPath error:nil];
487
- }
488
-
489
- MRLog(@"🔁 Attempting automatic camera restart after unexpected stop");
490
- @try {
491
- self.stopInFlight = NO;
492
- self.isRecording = YES;
493
- g_cameraStartTimestamp = 0.0;
494
- [self.fileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
495
- // Remove backup since restart succeeded
496
- [[NSFileManager defaultManager] removeItemAtPath:backupPath error:nil];
497
- return YES;
498
- } @catch (NSException *exception) {
499
- MRLog(@"❌ Camera auto-restart failed: %@", exception.reason);
500
- // Restore previous clip if we created a backup
501
- if ([[NSFileManager defaultManager] fileExistsAtPath:backupPath]) {
502
- [[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:nil];
503
- [[NSFileManager defaultManager] moveItemAtPath:backupPath toPath:self.outputPath error:nil];
504
- }
505
- self.isRecording = NO;
506
- return NO;
507
- }
508
- }
509
-
510
636
  #pragma mark - Recording lifecycle
511
637
 
512
638
  - (void)performStartWithDeviceId:(NSString *)deviceId
@@ -575,26 +701,28 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
575
701
  }
576
702
  [session addInput:deviceInput];
577
703
 
578
- AVCaptureMovieFileOutput *fileOutput = [[AVCaptureMovieFileOutput alloc] init];
579
- if (![session canAddOutput:fileOutput]) {
580
- MRLog(@"❌ Unable to add movie file output to capture session");
704
+ // Setup video data output with delegate pattern (realtime sync)
705
+ AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
706
+
707
+ NSDictionary *videoSettings = @{
708
+ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)
709
+ };
710
+ [videoOutput setVideoSettings:videoSettings];
711
+ videoOutput.alwaysDiscardsLateVideoFrames = NO; // Preserve all frames
712
+
713
+ if (![session canAddOutput:videoOutput]) {
714
+ MRLog(@"❌ Unable to add video data output to capture session");
581
715
  [session commitConfiguration];
582
716
  [self completeStart:NO token:token];
583
717
  return;
584
718
  }
585
- [session addOutput:fileOutput];
719
+ [session addOutput:videoOutput];
586
720
 
587
- // Ensure the file output does not auto-stop due to implicit limits
588
- fileOutput.movieFragmentInterval = kCMTimeInvalid;
589
- fileOutput.maxRecordedDuration = kCMTimeInvalid;
590
- fileOutput.maxRecordedFileSize = 0;
721
+ // Set delegate for per-frame processing
722
+ [videoOutput setSampleBufferDelegate:self queue:self.videoQueue];
591
723
 
592
- AVCaptureConnection *audioConnection = [fileOutput connectionWithMediaType:AVMediaTypeAudio];
593
- if (audioConnection) {
594
- audioConnection.enabled = NO;
595
- }
596
-
597
- AVCaptureConnection *videoConnection = [fileOutput connectionWithMediaType:AVMediaTypeVideo];
724
+ // Configure video mirroring for front camera
725
+ AVCaptureConnection *videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
598
726
  if (videoConnection && videoConnection.isVideoMirroringSupported && device.position == AVCaptureDevicePositionFront) {
599
727
  if ([videoConnection respondsToSelector:@selector(setAutomaticallyAdjustsVideoMirroring:)]) {
600
728
  videoConnection.automaticallyAdjustsVideoMirroring = NO;
@@ -611,12 +739,14 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
611
739
 
612
740
  self.session = session;
613
741
  self.deviceInput = deviceInput;
614
- self.fileOutput = fileOutput;
742
+ self.videoOutput = videoOutput;
615
743
  self.outputPath = normalizedPath;
744
+ self.writerStarted = NO;
745
+ self.startTime = kCMTimeInvalid;
616
746
 
617
747
  [session startRunning];
618
748
 
619
- // Give session a brief moment to warm up to avoid false start timeouts on slower devices
749
+ // Give session a brief moment to warm up
620
750
  [NSThread sleepForTimeInterval:0.5];
621
751
 
622
752
  if (self.stopInFlight || token != self.activeToken) {
@@ -625,22 +755,8 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
625
755
  return;
626
756
  }
627
757
 
628
- NSURL *outputURL = [NSURL fileURLWithPath:normalizedPath];
629
- if (!outputURL) {
630
- MRLog(@"❌ Failed to create output URL for camera recording");
631
- [self completeStart:NO token:token];
632
- return;
633
- }
634
-
635
- MRLog(@"🎥 Starting camera recording to %@", normalizedPath);
636
- @try {
637
- [fileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
638
- MRLog(@"📤 Camera setup scheduled on background queue (non-blocking)");
639
- } @catch (NSException *exception) {
640
- MRLog(@"❌ Exception while starting camera recording: %@", exception.reason);
641
- [self completeStart:NO token:token];
642
- return;
643
- }
758
+ MRLog(@"🎥 Camera session running - writer will start on first frame");
759
+ // Note: Recording confirmation will be triggered by first video frame in delegate
644
760
  }
645
761
  }
646
762
 
@@ -706,8 +822,8 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
706
822
 
707
823
  - (BOOL)stopRecording {
708
824
  BOOL hasActiveSession = (self.session && [self.session isRunning]);
709
- BOOL outputRecording = (self.fileOutput && [self.fileOutput isRecording]);
710
- if (!self.isRecording && !hasActiveSession && !outputRecording) {
825
+ BOOL writerActive = (self.writer && self.writerStarted);
826
+ if (!self.isRecording && !hasActiveSession && !writerActive) {
711
827
  [self waitForStopCompletion:5.0];
712
828
  return YES;
713
829
  }
@@ -722,13 +838,38 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
722
838
  self.stopSemaphore = stopSemaphore;
723
839
 
724
840
  dispatch_async(self.workQueue, ^{
725
- if (self.fileOutput && [self.fileOutput isRecording]) {
726
- MRLog(@"🛑 Movie file output stop requested");
727
- [self.fileOutput stopRecording];
728
- } else {
729
- dispatch_semaphore_signal(stopSemaphore);
841
+ // Stop video delegate
842
+ if (self.videoOutput) {
843
+ [self.videoOutput setSampleBufferDelegate:nil queue:nil];
730
844
  }
731
845
 
846
+ // Finalize writer (audio_recorder.mm pattern)
847
+ if (self.writer && self.writerStarted) {
848
+ if (self.writerInput) {
849
+ [self.writerInput markAsFinished];
850
+ }
851
+
852
+ dispatch_semaphore_t writerSemaphore = dispatch_semaphore_create(0);
853
+ [self.writer finishWritingWithCompletionHandler:^{
854
+ if (self.writer.status == AVAssetWriterStatusCompleted) {
855
+ MRLog(@"✅ Camera writer finished");
856
+ } else if (self.writer.status == AVAssetWriterStatusFailed) {
857
+ MRLog(@"❌ Camera writer failed: %@", self.writer.error);
858
+ }
859
+ dispatch_semaphore_signal(writerSemaphore);
860
+ }];
861
+
862
+ // 3 second timeout (matching audio_recorder.mm:269)
863
+ dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, 3 * NSEC_PER_SEC);
864
+ if (dispatch_semaphore_wait(writerSemaphore, timeout) != 0) {
865
+ MRLog(@"⚠️ Camera writer timeout – canceling");
866
+ [self.writer cancelWriting];
867
+ }
868
+ }
869
+
870
+ dispatch_semaphore_signal(stopSemaphore);
871
+
872
+ // Session cleanup
732
873
  if (self.session && [self.session isRunning]) {
733
874
  [self.session stopRunning];
734
875
  }
@@ -736,13 +877,11 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
736
877
  if (self.session && self.deviceInput && [self.session.inputs containsObject:self.deviceInput]) {
737
878
  [self.session removeInput:self.deviceInput];
738
879
  }
739
- if (self.session && self.fileOutput && [self.session.outputs containsObject:self.fileOutput]) {
740
- [self.session removeOutput:self.fileOutput];
880
+ if (self.session && self.videoOutput && [self.session.outputs containsObject:self.videoOutput]) {
881
+ [self.session removeOutput:self.videoOutput];
741
882
  }
742
883
 
743
- if (!self.fileOutput || ![self.fileOutput isRecording]) {
744
- [self cleanupAfterStopOnQueue];
745
- }
884
+ [self cleanupAfterStopOnQueue];
746
885
  });
747
886
 
748
887
  dispatch_time_t waitTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC));
@@ -775,49 +914,6 @@ static void MRCameraRemoveFileIfExists(NSString *path) {
775
914
  return self.startSucceeded;
776
915
  }
777
916
 
778
- #pragma mark - AVCaptureFileOutputRecordingDelegate
779
-
780
- - (void)captureOutput:(AVCaptureFileOutput *)output
781
- didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
782
- fromConnections:(NSArray<AVCaptureConnection *> *)connections
783
- error:(NSError *)error {
784
- double elapsedTime = g_cameraStartTimestamp > 0 ? (CFAbsoluteTimeGetCurrent() - g_cameraStartTimestamp) : 0;
785
- MRLog(@"🎬 Camera recording finished (elapsed: %.2fs)", elapsedTime);
786
- if (error) {
787
- MRLog(@"❌ Camera recording finished with error: %@", error);
788
- } else {
789
- MRLog(@"✅ Camera recording finished successfully");
790
- }
791
- self.lastFinishedOutputPath = outputFileURL.path ?: self.outputPath;
792
-
793
- dispatch_semaphore_t stopSemaphore = self.stopSemaphore;
794
- BOOL expectedStop = self.stopInFlight || (stopSemaphore != nil);
795
- dispatch_async(self.workQueue, ^{
796
- if (!expectedStop) {
797
- BOOL restarted = [self attemptRestartAfterUnexpectedStop];
798
- if (restarted) {
799
- MRLog(@"🔁 Camera auto-restart initiated after unexpected stop");
800
- return;
801
- } else {
802
- MRLog(@"⚠️ Camera could not auto-restart after unexpected stop");
803
- }
804
- }
805
-
806
- [self cleanupAfterStopOnQueue];
807
- if (stopSemaphore) {
808
- dispatch_semaphore_signal(stopSemaphore);
809
- }
810
- });
811
- }
812
-
813
- - (void)captureOutput:(AVCaptureFileOutput *)output
814
- didStartRecordingToOutputFileAtURL:(NSURL *)fileURL
815
- fromConnections:(NSArray<AVCaptureConnection *> *)connections {
816
- MRLog(@"✅ Camera file recording started: %@", fileURL.path);
817
- g_cameraStartTimestamp = CFAbsoluteTimeGetCurrent();
818
- [self completeStart:YES token:self.activeToken];
819
- }
820
-
821
917
  @end
822
918
 
823
919
  // MARK: - C Interface