node-mac-recorder 2.16.11 โ†’ 2.16.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "permissions": {
3
3
  "allow": [
4
- "Bash(FORCE_AVFOUNDATION=1 node -e \"\nconst MacRecorder = require(''./index.js'');\nconst recorder = new MacRecorder();\nrecorder.startRecording(''/tmp/test-final.mov'')\n .then(success => {\n console.log(''Recording start:'', success ? ''SUCCESS'' : ''FAILED'');\n if (success) {\n setTimeout(() => {\n recorder.stopRecording().then(() => {\n console.log(''Recording stopped'');\n const fs = require(''fs'');\n if (fs.existsSync(''/tmp/test-final.mov'')) {\n console.log(''File created:'', Math.round(fs.statSync(''/tmp/test-final.mov'').size/1024) + ''KB'');\n }\n });\n }, 2000);\n }\n })\n .catch(console.error);\n\")"
4
+ "Bash(FORCE_AVFOUNDATION=1 node -e \"\nconsole.log(''๐Ÿงช Testing fixed AVFoundation...'');\nconst MacRecorder = require(''./index.js'');\nconst recorder = new MacRecorder();\n\nrecorder.startRecording(''/tmp/electron-test2.mov'')\n .then(success => {\n console.log(''Start:'', success ? ''โœ… SUCCESS'' : ''โŒ FAILED'');\n if (success) {\n setTimeout(() => {\n console.log(''โน๏ธ Stopping...'');\n recorder.stopRecording().then(() => {\n console.log(''โœ… Stop completed'');\n const fs = require(''fs'');\n if (fs.existsSync(''/tmp/electron-test2.mov'')) {\n console.log(''๐Ÿ“น File:'', Math.round(fs.statSync(''/tmp/electron-test2.mov'').size/1024) + ''KB'');\n console.log(''๐ŸŽ‰ Fix successful!'');\n }\n });\n }, 2000);\n }\n })\n .catch(console.error);\n\")"
5
5
  ],
6
6
  "deny": [],
7
7
  "ask": []
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.16.11",
3
+ "version": "2.16.12",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -123,7 +123,7 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
123
123
  g_avCaptureRect = captureRect;
124
124
  g_avFrameNumber = 0;
125
125
 
126
- // Start capture timer (15 FPS for compatibility)
126
+ // Start capture timer (10 FPS for Electron compatibility)
127
127
  dispatch_queue_t captureQueue = dispatch_queue_create("AVFoundationCaptureQueue", DISPATCH_QUEUE_SERIAL);
128
128
  g_avTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, captureQueue);
129
129
 
@@ -132,78 +132,107 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
132
132
  return false;
133
133
  }
134
134
 
135
- uint64_t interval = NSEC_PER_SEC / 15; // 15 FPS
135
+ uint64_t interval = NSEC_PER_SEC / 10; // 10 FPS for Electron stability
136
136
  dispatch_source_set_timer(g_avTimer, dispatch_time(DISPATCH_TIME_NOW, 0), interval, interval / 10);
137
137
 
138
138
  dispatch_source_set_event_handler(g_avTimer, ^{
139
139
  if (!g_avIsRecording) return;
140
140
 
141
141
  @autoreleasepool {
142
- // Capture screen
143
- CGImageRef screenImage = nil;
144
- if (CGRectIsEmpty(g_avCaptureRect)) {
145
- screenImage = CGDisplayCreateImage(g_avDisplayID);
146
- } else {
147
- CGImageRef fullScreen = CGDisplayCreateImage(g_avDisplayID);
148
- if (fullScreen) {
149
- screenImage = CGImageCreateWithImageInRect(fullScreen, g_avCaptureRect);
150
- CGImageRelease(fullScreen);
142
+ @try {
143
+ // Capture screen with Electron-safe error handling
144
+ CGImageRef screenImage = nil;
145
+ if (CGRectIsEmpty(g_avCaptureRect)) {
146
+ screenImage = CGDisplayCreateImage(g_avDisplayID);
147
+ } else {
148
+ CGImageRef fullScreen = CGDisplayCreateImage(g_avDisplayID);
149
+ if (fullScreen) {
150
+ screenImage = CGImageCreateWithImageInRect(fullScreen, g_avCaptureRect);
151
+ CGImageRelease(fullScreen);
152
+ }
151
153
  }
152
- }
153
-
154
- if (!screenImage) return;
155
-
156
- // Convert to pixel buffer
157
- CVPixelBufferRef pixelBuffer = nil;
158
- CVReturn cvRet = CVPixelBufferPoolCreatePixelBuffer(NULL, g_avPixelBufferAdaptor.pixelBufferPool, &pixelBuffer);
159
-
160
- if (cvRet == kCVReturnSuccess && pixelBuffer) {
161
- CVPixelBufferLockBaseAddress(pixelBuffer, 0);
162
-
163
- void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
164
- size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
165
-
166
- CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
167
154
 
168
- // Match bitmap info to pixel format for compatibility
169
- CGBitmapInfo bitmapInfo;
170
- OSType currentPixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
171
- if (currentPixelFormat == kCVPixelFormatType_32ARGB) {
172
- bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big;
173
- } else { // kCVPixelFormatType_32BGRA
174
- bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little;
155
+ if (!screenImage) {
156
+ NSLog(@"โš ๏ธ Failed to capture screen image, skipping frame");
157
+ return;
175
158
  }
159
+
160
+ // Convert to pixel buffer with Electron-safe error handling
161
+ CVPixelBufferRef pixelBuffer = nil;
162
+ CVReturn cvRet = CVPixelBufferPoolCreatePixelBuffer(NULL, g_avPixelBufferAdaptor.pixelBufferPool, &pixelBuffer);
176
163
 
177
- CGContextRef context = CGBitmapContextCreate(pixelData,
178
- CVPixelBufferGetWidth(pixelBuffer),
179
- CVPixelBufferGetHeight(pixelBuffer),
180
- 8, bytesPerRow, colorSpace, bitmapInfo);
181
-
182
- if (context) {
183
- CGContextDrawImage(context, CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)), screenImage);
184
- CGContextRelease(context);
185
- }
186
- CGColorSpaceRelease(colorSpace);
187
- CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
188
-
189
- // Write frame
190
- if (g_avVideoInput.readyForMoreMediaData) {
191
- CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(g_avFrameNumber / 15.0, 600));
192
- [g_avPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
193
- g_avFrameNumber++;
164
+ if (cvRet == kCVReturnSuccess && pixelBuffer) {
165
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
166
+
167
+ void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
168
+ if (!pixelData) {
169
+ NSLog(@"โš ๏ธ Failed to get pixel buffer base address");
170
+ CVPixelBufferRelease(pixelBuffer);
171
+ CGImageRelease(screenImage);
172
+ return;
173
+ }
174
+
175
+ size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
176
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
177
+ if (!colorSpace) {
178
+ NSLog(@"โš ๏ธ Failed to create color space");
179
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
180
+ CVPixelBufferRelease(pixelBuffer);
181
+ CGImageRelease(screenImage);
182
+ return;
183
+ }
184
+
185
+ // Match bitmap info to pixel format for compatibility
186
+ CGBitmapInfo bitmapInfo;
187
+ OSType currentPixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
188
+ if (currentPixelFormat == kCVPixelFormatType_32ARGB) {
189
+ bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big;
190
+ } else { // kCVPixelFormatType_32BGRA
191
+ bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little;
192
+ }
193
+
194
+ CGContextRef context = CGBitmapContextCreate(pixelData,
195
+ CVPixelBufferGetWidth(pixelBuffer),
196
+ CVPixelBufferGetHeight(pixelBuffer),
197
+ 8, bytesPerRow, colorSpace, bitmapInfo);
198
+
199
+ if (context) {
200
+ CGContextDrawImage(context, CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)), screenImage);
201
+ CGContextRelease(context);
202
+
203
+ // Write frame only if input is ready
204
+ if (g_avVideoInput && g_avVideoInput.readyForMoreMediaData) {
205
+ CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(g_avFrameNumber / 10.0, 600));
206
+ BOOL appendSuccess = [g_avPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
207
+ if (appendSuccess) {
208
+ g_avFrameNumber++;
209
+ } else {
210
+ NSLog(@"โš ๏ธ Failed to append pixel buffer");
211
+ }
212
+ }
213
+ } else {
214
+ NSLog(@"โš ๏ธ Failed to create bitmap context");
215
+ }
216
+
217
+ CGColorSpaceRelease(colorSpace);
218
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
219
+ CVPixelBufferRelease(pixelBuffer);
220
+ } else {
221
+ NSLog(@"โš ๏ธ Failed to create pixel buffer: %d", cvRet);
194
222
  }
195
223
 
196
- CVPixelBufferRelease(pixelBuffer);
224
+ CGImageRelease(screenImage);
225
+ } @catch (NSException *exception) {
226
+ NSLog(@"โŒ Exception in AVFoundation capture loop: %@", exception.reason);
227
+ g_avIsRecording = false; // Stop recording on exception to prevent crash
197
228
  }
198
-
199
- CGImageRelease(screenImage);
200
229
  }
201
230
  });
202
231
 
203
232
  dispatch_resume(g_avTimer);
204
233
  g_avIsRecording = true;
205
234
 
206
- NSLog(@"๐ŸŽฅ AVFoundation recording started: %dx%d @ 15fps",
235
+ NSLog(@"๐ŸŽฅ AVFoundation recording started: %dx%d @ 10fps",
207
236
  (int)recordingSize.width, (int)recordingSize.height);
208
237
 
209
238
  return true;
@@ -222,10 +251,11 @@ extern "C" bool stopAVFoundationRecording() {
222
251
  g_avIsRecording = false;
223
252
 
224
253
  @try {
225
- // Stop timer
254
+ // Stop timer with Electron-safe cleanup
226
255
  if (g_avTimer) {
227
256
  dispatch_source_cancel(g_avTimer);
228
257
  g_avTimer = nil;
258
+ NSLog(@"โœ… AVFoundation timer stopped safely");
229
259
  }
230
260
 
231
261
  // Finish writing