node-mac-recorder 2.13.0 โ†’ 2.13.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.13.0",
3
+ "version": "2.13.2",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -168,9 +168,9 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
168
168
  }
169
169
 
170
170
  @try {
171
- // Try ScreenCaptureKit first (macOS 12.3+)
172
- NSLog(@"๐Ÿ” System Version Check - macOS availability for ScreenCaptureKit");
173
- if (@available(macOS 12.3, *)) {
171
+ // Use stable AVFoundation approach - ScreenCaptureKit disabled until Electron crash resolved
172
+ NSLog(@"๐Ÿ” Using stable AVFoundation approach (ScreenCaptureKit disabled for Electron compatibility)");
173
+ if (false) { // Disable ScreenCaptureKit completely for now
174
174
  NSLog(@"โœ… macOS 12.3+ detected - ScreenCaptureKit should be available");
175
175
  if ([ScreenCaptureKitRecorder isScreenCaptureKitAvailable]) {
176
176
  NSLog(@"โœ… ScreenCaptureKit availability check passed");
@@ -11,6 +11,8 @@ API_AVAILABLE(macos(12.3))
11
11
  error:(NSError **)error;
12
12
  + (void)stopRecording;
13
13
  + (BOOL)isRecording;
14
- + (void)createVideoFromFrames;
14
+ + (void)setupVideoWriter;
15
+ + (void)finalizeVideoWriter;
16
+ + (void)cleanupVideoWriter;
15
17
 
16
18
  @end
@@ -6,51 +6,67 @@ static id<SCStreamDelegate> g_streamDelegate = nil;
6
6
  static id<SCStreamOutput> g_streamOutput = nil;
7
7
  static BOOL g_isRecording = NO;
8
8
 
9
- // Frame-based approach for working video
10
- static NSMutableArray<NSImage *> *g_capturedFrames = nil;
9
+ // Electron-safe direct writing approach
10
+ static AVAssetWriter *g_assetWriter = nil;
11
+ static AVAssetWriterInput *g_assetWriterInput = nil;
12
+ static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
11
13
  static NSString *g_outputPath = nil;
12
- static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
14
+ static CMTime g_startTime;
15
+ static CMTime g_currentTime;
16
+ static BOOL g_writerStarted = NO;
13
17
 
14
- @interface FrameCapturDelegate : NSObject <SCStreamDelegate>
18
+ @interface ElectronSafeDelegate : NSObject <SCStreamDelegate>
15
19
  @end
16
20
 
17
- @implementation FrameCapturDelegate
21
+ @implementation ElectronSafeDelegate
18
22
  - (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
19
- NSLog(@"๐Ÿ›‘ Stream stopped in delegate");
23
+ NSLog(@"๐Ÿ›‘ ScreenCaptureKit stream stopped in delegate");
20
24
  g_isRecording = NO;
21
25
 
22
26
  if (error) {
23
27
  NSLog(@"โŒ Stream stopped with error: %@", error);
28
+ } else {
29
+ NSLog(@"โœ… ScreenCaptureKit stream stopped successfully");
24
30
  }
31
+
32
+ // Finalize video writer
33
+ [ScreenCaptureKitRecorder finalizeVideoWriter];
25
34
  }
26
35
  @end
27
36
 
28
- @interface FrameCaptureOutput : NSObject <SCStreamOutput>
37
+ @interface ElectronSafeOutput : NSObject <SCStreamOutput>
29
38
  @end
30
39
 
31
- @implementation FrameCaptureOutput
40
+ @implementation ElectronSafeOutput
32
41
  - (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
33
- if (!g_isRecording || type != SCStreamOutputTypeScreen) {
42
+ if (!g_isRecording || type != SCStreamOutputTypeScreen || !g_assetWriterInput) {
34
43
  return;
35
44
  }
36
45
 
37
46
  @autoreleasepool {
38
- CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
39
- if (pixelBuffer && g_capturedFrames.count < g_maxFrames) {
40
-
41
- // Convert pixel buffer to UIImage
42
- CIContext *context = [CIContext context];
43
- CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
44
- CGImageRef cgImage = [context createCGImage:ciImage fromRect:ciImage.extent];
47
+ // Initialize video writer on first frame
48
+ if (!g_writerStarted && g_assetWriter && g_assetWriterInput) {
49
+ g_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
50
+ g_currentTime = g_startTime;
45
51
 
46
- if (cgImage) {
47
- NSImage *image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
48
- [g_capturedFrames addObject:image];
49
-
50
- CGImageRelease(cgImage);
52
+ [g_assetWriter startWriting];
53
+ [g_assetWriter startSessionAtSourceTime:g_startTime];
54
+ g_writerStarted = YES;
55
+ NSLog(@"โœ… Electron-safe video writer started");
56
+ }
57
+
58
+ // Write sample buffer directly (Electron-safe approach)
59
+ if (g_writerStarted && g_assetWriterInput.isReadyForMoreMediaData) {
60
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
61
+ if (pixelBuffer && g_pixelBufferAdaptor) {
62
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
63
+ CMTime relativeTime = CMTimeSubtract(presentationTime, g_startTime);
51
64
 
52
- if (g_capturedFrames.count % 30 == 0) {
53
- NSLog(@"๐Ÿ“ธ Captured %lu frames", (unsigned long)g_capturedFrames.count);
65
+ BOOL success = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:relativeTime];
66
+ if (success) {
67
+ g_currentTime = relativeTime;
68
+ } else {
69
+ NSLog(@"โš ๏ธ Failed to append pixel buffer");
54
70
  }
55
71
  }
56
72
  }
@@ -73,9 +89,12 @@ static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
73
89
  }
74
90
 
75
91
  g_outputPath = config[@"outputPath"];
76
- g_capturedFrames = [NSMutableArray array];
92
+ g_writerStarted = NO;
77
93
 
78
- NSLog(@"๐ŸŽฌ Starting simple frame capture approach");
94
+ // Setup Electron-safe video writer
95
+ [ScreenCaptureKitRecorder setupVideoWriter];
96
+
97
+ NSLog(@"๐ŸŽฌ Starting Electron-safe ScreenCaptureKit recording");
79
98
 
80
99
  [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
81
100
  if (contentError) {
@@ -96,9 +115,9 @@ static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
96
115
  streamConfig.minimumFrameInterval = CMTimeMake(1, 30);
97
116
  streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
98
117
 
99
- // Create delegates
100
- g_streamDelegate = [[FrameCapturDelegate alloc] init];
101
- g_streamOutput = [[FrameCaptureOutput alloc] init];
118
+ // Create Electron-safe delegates
119
+ g_streamDelegate = [[ElectronSafeDelegate alloc] init];
120
+ g_streamOutput = [[ElectronSafeOutput alloc] init];
102
121
 
103
122
  // Create stream
104
123
  g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
@@ -126,18 +145,15 @@ static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
126
145
  return;
127
146
  }
128
147
 
129
- NSLog(@"๐Ÿ›‘ Stopping frame capture");
148
+ NSLog(@"๐Ÿ›‘ Stopping Electron-safe ScreenCaptureKit recording");
130
149
 
131
150
  [g_stream stopCaptureWithCompletionHandler:^(NSError *stopError) {
132
151
  if (stopError) {
133
152
  NSLog(@"โŒ Stop error: %@", stopError);
134
153
  } else {
135
- NSLog(@"โœ… Stream stopped successfully in completion handler");
154
+ NSLog(@"โœ… ScreenCaptureKit stream stopped in completion handler");
136
155
  }
137
-
138
- // Call video creation directly since delegate might not be called
139
- NSLog(@"๐Ÿš€ About to call createVideoFromFrames");
140
- [ScreenCaptureKitRecorder createVideoFromFrames];
156
+ // Video finalization happens in delegate
141
157
  }];
142
158
  }
143
159
 
@@ -145,145 +161,86 @@ static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
145
161
  return g_isRecording;
146
162
  }
147
163
 
148
- + (void)createVideoFromFrames {
149
- NSLog(@"๐ŸŽฌ createVideoFromFrames called with %lu frames", (unsigned long)g_capturedFrames.count);
150
-
151
- if (g_capturedFrames.count == 0) {
152
- NSLog(@"โŒ No frames captured");
153
- return;
164
+ + (void)setupVideoWriter {
165
+ if (g_assetWriter) {
166
+ return; // Already setup
154
167
  }
155
168
 
156
- NSLog(@"๐ŸŽฌ Creating video from %lu frames to path: %@", (unsigned long)g_capturedFrames.count, g_outputPath);
169
+ NSLog(@"๐Ÿ”ง Setting up Electron-safe video writer");
157
170
 
158
- // Use simple approach - write first frame as image to test
159
- NSImage *firstFrame = g_capturedFrames.firstObject;
160
- if (firstFrame) {
161
- NSString *testImagePath = [g_outputPath stringByReplacingOccurrencesOfString:@".mov" withString:@"_test.png"];
162
-
163
- // Convert NSImage to PNG data
164
- CGImageRef cgImage = [firstFrame CGImageForProposedRect:NULL context:NULL hints:NULL];
165
- NSBitmapImageRep *bitmapRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage];
166
- NSData *pngData = [bitmapRep representationUsingType:NSBitmapImageFileTypePNG properties:@{}];
167
- [pngData writeToFile:testImagePath atomically:YES];
168
- NSLog(@"โœ… Test image saved: %@", testImagePath);
169
- }
170
-
171
- // For now, just create a simple video file that works
172
171
  NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
173
-
174
- // Create a working video using AVAssetWriter with frames
175
172
  NSError *error = nil;
176
- AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
177
173
 
178
- if (error) {
179
- NSLog(@"โŒ Asset writer error: %@", error);
174
+ g_assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
175
+
176
+ if (error || !g_assetWriter) {
177
+ NSLog(@"โŒ Failed to create asset writer: %@", error);
180
178
  return;
181
179
  }
182
180
 
183
- // Simple video settings that definitely work
181
+ // Electron-safe video settings
184
182
  NSDictionary *videoSettings = @{
185
183
  AVVideoCodecKey: AVVideoCodecTypeH264,
186
184
  AVVideoWidthKey: @1280,
187
185
  AVVideoHeightKey: @720,
188
186
  AVVideoCompressionPropertiesKey: @{
189
- AVVideoAverageBitRateKey: @(1280 * 720 * 3)
187
+ AVVideoAverageBitRateKey: @(1280 * 720 * 2),
188
+ AVVideoMaxKeyFrameIntervalKey: @30
190
189
  }
191
190
  };
192
191
 
193
- AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
194
- writerInput.expectsMediaDataInRealTime = NO;
192
+ g_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
193
+ g_assetWriterInput.expectsMediaDataInRealTime = YES; // Important for live capture
195
194
 
196
- // Create pixel buffer pool
195
+ // Pixel buffer attributes matching ScreenCaptureKit format
197
196
  NSDictionary *pixelBufferAttributes = @{
198
- (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
197
+ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
199
198
  (NSString*)kCVPixelBufferWidthKey: @1280,
200
199
  (NSString*)kCVPixelBufferHeightKey: @720
201
200
  };
202
201
 
203
- AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelBufferAttributes];
202
+ g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_assetWriterInput sourcePixelBufferAttributes:pixelBufferAttributes];
204
203
 
205
- if ([assetWriter canAddInput:writerInput]) {
206
- [assetWriter addInput:writerInput];
204
+ if ([g_assetWriter canAddInput:g_assetWriterInput]) {
205
+ [g_assetWriter addInput:g_assetWriterInput];
206
+ NSLog(@"โœ… Electron-safe video writer setup complete");
207
+ } else {
208
+ NSLog(@"โŒ Failed to add input to asset writer");
207
209
  }
210
+ }
211
+
212
+ + (void)finalizeVideoWriter {
213
+ NSLog(@"๐ŸŽฌ Finalizing Electron-safe video writer");
208
214
 
209
- // Start writing session
210
- [assetWriter startWriting];
211
- [assetWriter startSessionAtSourceTime:kCMTimeZero];
215
+ if (!g_assetWriter || !g_writerStarted) {
216
+ NSLog(@"โš ๏ธ Video writer not started, cleaning up");
217
+ [ScreenCaptureKitRecorder cleanupVideoWriter];
218
+ return;
219
+ }
212
220
 
213
- // Create simple 1-second video with first frame
214
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
215
-
216
- // Add captured frames
217
- for (int i = 0; i < g_capturedFrames.count && i < 30; i++) { // Limit to 30 frames for now
218
- if (writerInput.isReadyForMoreMediaData) {
219
-
220
- NSImage *frameImage = g_capturedFrames[i];
221
-
222
- // Create pixel buffer
223
- CVPixelBufferRef pixelBuffer = NULL;
224
- CVPixelBufferCreate(kCFAllocatorDefault, 1280, 720, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);
225
-
226
- if (pixelBuffer) {
227
- // Lock pixel buffer for writing
228
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
229
-
230
- // Get pixel buffer info
231
- void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
232
- size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
233
-
234
- // Create graphics context on pixel buffer
235
- CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
236
- CGContextRef context = CGBitmapContextCreate(pixelData, 1280, 720, 8, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Host);
237
-
238
- if (context) {
239
- // Clear the context
240
- CGContextClearRect(context, CGRectMake(0, 0, 1280, 720));
241
-
242
- // Get CGImage from NSImage
243
- CGImageRef cgImage = [frameImage CGImageForProposedRect:NULL context:NULL hints:NULL];
244
- if (cgImage) {
245
- // Draw the image to fill the entire frame
246
- CGContextDrawImage(context, CGRectMake(0, 0, 1280, 720), cgImage);
247
- }
248
-
249
- CGContextRelease(context);
250
- }
251
-
252
- CGColorSpaceRelease(colorSpace);
253
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
254
-
255
- // Add frame with timing
256
- CMTime frameTime = CMTimeMake(i, 30);
257
- BOOL success = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
258
-
259
- if (!success) {
260
- NSLog(@"โŒ Failed to append frame %d", i);
261
- }
262
-
263
- CVPixelBufferRelease(pixelBuffer);
264
- }
265
- } else {
266
- // Wait for writer to be ready
267
- [NSThread sleepForTimeInterval:0.01];
268
- i--; // Retry this frame
269
- }
221
+ [g_assetWriterInput markAsFinished];
222
+
223
+ [g_assetWriter finishWritingWithCompletionHandler:^{
224
+ if (g_assetWriter.status == AVAssetWriterStatusCompleted) {
225
+ NSLog(@"โœ… Electron-safe video created successfully: %@", g_outputPath);
226
+ } else {
227
+ NSLog(@"โŒ Video creation failed: %@", g_assetWriter.error);
270
228
  }
271
229
 
272
- [writerInput markAsFinished];
273
- [assetWriter finishWritingWithCompletionHandler:^{
274
- if (assetWriter.status == AVAssetWriterStatusCompleted) {
275
- NSLog(@"โœ… Simple video created: %@", g_outputPath);
276
- } else {
277
- NSLog(@"โŒ Video creation failed: %@", assetWriter.error);
278
- }
279
-
280
- // Cleanup
281
- g_capturedFrames = nil;
282
- g_stream = nil;
283
- g_streamDelegate = nil;
284
- g_streamOutput = nil;
285
- }];
286
- });
230
+ [ScreenCaptureKitRecorder cleanupVideoWriter];
231
+ }];
232
+ }
233
+
234
+ + (void)cleanupVideoWriter {
235
+ g_assetWriter = nil;
236
+ g_assetWriterInput = nil;
237
+ g_pixelBufferAdaptor = nil;
238
+ g_writerStarted = NO;
239
+ g_stream = nil;
240
+ g_streamDelegate = nil;
241
+ g_streamOutput = nil;
242
+
243
+ NSLog(@"๐Ÿงน Video writer cleanup complete");
287
244
  }
288
245
 
289
246
  @end