node-mac-recorder 2.21.21 → 2.21.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/electron-safe-index.js +2 -2
- package/index.js +12 -2
- package/package.json +1 -1
- package/src/audio_recorder.mm +56 -9
- package/src/avfoundation_recorder.mm +14 -8
- package/src/camera_recorder.mm +72 -15
- package/src/electron_safe/screen_capture_electron.mm +54 -3
- package/src/mac_recorder.mm +51 -16
- package/src/screen_capture_kit.mm +18 -6
package/electron-safe-index.js
CHANGED
|
@@ -44,8 +44,8 @@ class ElectronSafeMacRecorder extends EventEmitter {
|
|
|
44
44
|
this.options = {
|
|
45
45
|
includeMicrophone: false,
|
|
46
46
|
includeSystemAudio: false,
|
|
47
|
-
quality: "
|
|
48
|
-
frameRate:
|
|
47
|
+
quality: "high",
|
|
48
|
+
frameRate: 60,
|
|
49
49
|
captureArea: null,
|
|
50
50
|
captureCursor: false,
|
|
51
51
|
showClicks: false,
|
package/index.js
CHANGED
|
@@ -43,8 +43,8 @@ class MacRecorder extends EventEmitter {
|
|
|
43
43
|
this.options = {
|
|
44
44
|
includeMicrophone: false, // Default olarak mikrofon kapalı
|
|
45
45
|
includeSystemAudio: false, // Default olarak sistem sesi kapalı - kullanıcı explicit olarak açmalı
|
|
46
|
-
quality: "
|
|
47
|
-
frameRate:
|
|
46
|
+
quality: "high",
|
|
47
|
+
frameRate: 60,
|
|
48
48
|
captureArea: null, // { x, y, width, height }
|
|
49
49
|
captureCursor: false, // Default olarak cursor gizli
|
|
50
50
|
showClicks: false,
|
|
@@ -103,6 +103,7 @@ class MacRecorder extends EventEmitter {
|
|
|
103
103
|
position: device?.position ?? "unspecified",
|
|
104
104
|
transportType: device?.transportType ?? null,
|
|
105
105
|
isConnected: device?.isConnected ?? false,
|
|
106
|
+
isDefault: device?.isDefault === true,
|
|
106
107
|
hasFlash: device?.hasFlash ?? false,
|
|
107
108
|
supportsDepth: device?.supportsDepth ?? false,
|
|
108
109
|
deviceType: device?.deviceType ?? null,
|
|
@@ -180,6 +181,13 @@ class MacRecorder extends EventEmitter {
|
|
|
180
181
|
if (options.captureCamera !== undefined) {
|
|
181
182
|
this.options.captureCamera = options.captureCamera === true;
|
|
182
183
|
}
|
|
184
|
+
if (options.frameRate !== undefined) {
|
|
185
|
+
const fps = parseInt(options.frameRate, 10);
|
|
186
|
+
if (!Number.isNaN(fps) && fps > 0) {
|
|
187
|
+
// Clamp reasonable range 1-120
|
|
188
|
+
this.options.frameRate = Math.min(Math.max(fps, 1), 120);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
183
191
|
if (options.cameraDeviceId !== undefined) {
|
|
184
192
|
this.options.cameraDeviceId =
|
|
185
193
|
typeof options.cameraDeviceId === "string" && options.cameraDeviceId.length > 0
|
|
@@ -496,6 +504,8 @@ class MacRecorder extends EventEmitter {
|
|
|
496
504
|
captureCamera: this.options.captureCamera === true,
|
|
497
505
|
cameraDeviceId: this.options.cameraDeviceId || null,
|
|
498
506
|
sessionTimestamp,
|
|
507
|
+
frameRate: this.options.frameRate || 60,
|
|
508
|
+
quality: this.options.quality || "high",
|
|
499
509
|
};
|
|
500
510
|
|
|
501
511
|
if (cameraFilePath) {
|
package/package.json
CHANGED
package/src/audio_recorder.mm
CHANGED
|
@@ -224,10 +224,30 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
224
224
|
return YES;
|
|
225
225
|
}
|
|
226
226
|
|
|
227
|
-
|
|
227
|
+
// CRITICAL FIX: For external devices (especially Continuity Microphone),
|
|
228
|
+
// stopRunning can hang if device is disconnected. Use async approach.
|
|
229
|
+
MRLog(@"🛑 AudioRecorder: Stopping session (external device safe)...");
|
|
230
|
+
|
|
231
|
+
// Stop session on background thread to avoid blocking
|
|
232
|
+
AVCaptureSession *sessionToStop = self.session;
|
|
233
|
+
AVCaptureAudioDataOutput *outputToStop = self.audioOutput;
|
|
234
|
+
|
|
235
|
+
// Clear references FIRST to prevent new samples
|
|
228
236
|
self.session = nil;
|
|
229
237
|
self.audioOutput = nil;
|
|
230
238
|
|
|
239
|
+
// Stop session asynchronously with timeout protection
|
|
240
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
241
|
+
@autoreleasepool {
|
|
242
|
+
if ([sessionToStop isRunning]) {
|
|
243
|
+
MRLog(@"🛑 Stopping AVCaptureSession...");
|
|
244
|
+
[sessionToStop stopRunning];
|
|
245
|
+
MRLog(@"✅ AVCaptureSession stopped");
|
|
246
|
+
}
|
|
247
|
+
// Release happens automatically when block completes
|
|
248
|
+
}
|
|
249
|
+
});
|
|
250
|
+
|
|
231
251
|
// CRITICAL FIX: Check if writer exists before trying to finish it
|
|
232
252
|
if (self.writer) {
|
|
233
253
|
// Only mark as finished if writerInput exists
|
|
@@ -243,16 +263,16 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
243
263
|
dispatch_semaphore_signal(semaphore);
|
|
244
264
|
}];
|
|
245
265
|
|
|
246
|
-
// Reduced timeout to
|
|
247
|
-
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(
|
|
248
|
-
dispatch_semaphore_wait(semaphore, timeout);
|
|
266
|
+
// Reduced timeout to 1 second for external devices
|
|
267
|
+
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC));
|
|
268
|
+
long result = dispatch_semaphore_wait(semaphore, timeout);
|
|
249
269
|
|
|
250
|
-
if (!finished) {
|
|
251
|
-
MRLog(@"⚠️ AudioRecorder: Timed out waiting for writer
|
|
270
|
+
if (result != 0 || !finished) {
|
|
271
|
+
MRLog(@"⚠️ AudioRecorder: Timed out waiting for writer (external device?) - forcing cancel");
|
|
252
272
|
// Force cancel if timeout
|
|
253
273
|
[self.writer cancelWriting];
|
|
254
274
|
} else {
|
|
255
|
-
MRLog(@"✅ AudioRecorder
|
|
275
|
+
MRLog(@"✅ AudioRecorder writer finished successfully");
|
|
256
276
|
}
|
|
257
277
|
} else {
|
|
258
278
|
MRLog(@"⚠️ AudioRecorder: No writer to finish (no audio captured)");
|
|
@@ -264,6 +284,7 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
|
264
284
|
self.startTime = kCMTimeInvalid;
|
|
265
285
|
self.outputPath = nil;
|
|
266
286
|
|
|
287
|
+
MRLog(@"✅ AudioRecorder stopped (safe for external devices)");
|
|
267
288
|
return YES;
|
|
268
289
|
}
|
|
269
290
|
|
|
@@ -334,11 +355,37 @@ NSArray<NSDictionary *> *listAudioCaptureDevices() {
|
|
|
334
355
|
position:AVCaptureDevicePositionUnspecified];
|
|
335
356
|
|
|
336
357
|
for (AVCaptureDevice *device in session.devices) {
|
|
358
|
+
// PRIORITY FIX: MacBook built-in devices should be default, not external devices
|
|
359
|
+
// Check if this is a built-in device (MacBook's own microphone)
|
|
360
|
+
NSString *deviceName = device.localizedName ?: @"";
|
|
361
|
+
BOOL isBuiltIn = NO;
|
|
362
|
+
|
|
363
|
+
// Built-in detection: Check for "MacBook", "iMac", "Mac Studio", "Mac mini", "Mac Pro" in name
|
|
364
|
+
if ([deviceName rangeOfString:@"MacBook" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
365
|
+
[deviceName rangeOfString:@"iMac" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
366
|
+
[deviceName rangeOfString:@"Mac Studio" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
367
|
+
[deviceName rangeOfString:@"Mac mini" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
368
|
+
[deviceName rangeOfString:@"Mac Pro" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
369
|
+
isBuiltIn = YES;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Also check for generic "Built-in" in name
|
|
373
|
+
if ([deviceName rangeOfString:@"Built-in" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
374
|
+
isBuiltIn = YES;
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
// External devices (Continuity, USB, etc.) should NOT be default
|
|
378
|
+
if ([deviceName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
379
|
+
[deviceName rangeOfString:@"iPhone" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
380
|
+
[deviceName rangeOfString:@"iPad" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
381
|
+
isBuiltIn = NO;
|
|
382
|
+
}
|
|
383
|
+
|
|
337
384
|
NSDictionary *info = @{
|
|
338
385
|
@"id": device.uniqueID ?: @"",
|
|
339
|
-
@"name":
|
|
386
|
+
@"name": deviceName,
|
|
340
387
|
@"manufacturer": device.manufacturer ?: @"",
|
|
341
|
-
@"isDefault": @(
|
|
388
|
+
@"isDefault": @(isBuiltIn), // Only built-in devices are default
|
|
342
389
|
@"transportType": @(device.transportType)
|
|
343
390
|
};
|
|
344
391
|
[devicesInfo addObject:info];
|
|
@@ -34,7 +34,8 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
34
34
|
bool includeMicrophone,
|
|
35
35
|
bool includeSystemAudio,
|
|
36
36
|
NSString* audioDeviceId,
|
|
37
|
-
NSString* audioOutputPath
|
|
37
|
+
NSString* audioOutputPath,
|
|
38
|
+
double requestedFrameRate) {
|
|
38
39
|
|
|
39
40
|
if (g_avIsRecording) {
|
|
40
41
|
NSLog(@"❌ AVFoundation recording already in progress");
|
|
@@ -129,15 +130,20 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
129
130
|
NSLog(@"🎬 ULTRA QUALITY AVFoundation: %dx%d, bitrate=%.2fMbps",
|
|
130
131
|
(int)recordingSize.width, (int)recordingSize.height, bitrate / (1000.0 * 1000.0));
|
|
131
132
|
|
|
133
|
+
// Resolve target FPS
|
|
134
|
+
double fps = requestedFrameRate > 0 ? requestedFrameRate : 60.0;
|
|
135
|
+
if (fps < 1.0) fps = 1.0;
|
|
136
|
+
if (fps > 120.0) fps = 120.0;
|
|
137
|
+
|
|
132
138
|
NSDictionary *videoSettings = @{
|
|
133
139
|
AVVideoCodecKey: codecKey,
|
|
134
140
|
AVVideoWidthKey: @((int)recordingSize.width),
|
|
135
141
|
AVVideoHeightKey: @((int)recordingSize.height),
|
|
136
142
|
AVVideoCompressionPropertiesKey: @{
|
|
137
143
|
AVVideoAverageBitRateKey: @(bitrate),
|
|
138
|
-
AVVideoMaxKeyFrameIntervalKey: @
|
|
144
|
+
AVVideoMaxKeyFrameIntervalKey: @((int)fps),
|
|
139
145
|
AVVideoAllowFrameReorderingKey: @YES,
|
|
140
|
-
AVVideoExpectedSourceFrameRateKey: @
|
|
146
|
+
AVVideoExpectedSourceFrameRateKey: @((int)fps),
|
|
141
147
|
AVVideoQualityKey: @(0.95), // 0.0-1.0, higher is better
|
|
142
148
|
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
|
143
149
|
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
|
@@ -266,7 +272,7 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
266
272
|
}
|
|
267
273
|
}
|
|
268
274
|
|
|
269
|
-
// Start capture timer
|
|
275
|
+
// Start capture timer using target FPS
|
|
270
276
|
dispatch_queue_t captureQueue = dispatch_queue_create("AVFoundationCaptureQueue", DISPATCH_QUEUE_SERIAL);
|
|
271
277
|
g_avTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, captureQueue);
|
|
272
278
|
|
|
@@ -275,7 +281,7 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
275
281
|
return false;
|
|
276
282
|
}
|
|
277
283
|
|
|
278
|
-
uint64_t interval = NSEC_PER_SEC /
|
|
284
|
+
uint64_t interval = (uint64_t)(NSEC_PER_SEC / fps);
|
|
279
285
|
dispatch_source_set_timer(g_avTimer, dispatch_time(DISPATCH_TIME_NOW, 0), interval, interval / 10);
|
|
280
286
|
|
|
281
287
|
// Retain objects before passing to block to prevent deallocation
|
|
@@ -371,7 +377,7 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
371
377
|
|
|
372
378
|
// Write frame only if input is ready
|
|
373
379
|
if (localVideoInput && localVideoInput.readyForMoreMediaData) {
|
|
374
|
-
CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(g_avFrameNumber /
|
|
380
|
+
CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(((double)g_avFrameNumber) / fps, 600));
|
|
375
381
|
BOOL appendSuccess = [localPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
376
382
|
if (appendSuccess) {
|
|
377
383
|
g_avFrameNumber++;
|
|
@@ -401,8 +407,8 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
401
407
|
dispatch_resume(g_avTimer);
|
|
402
408
|
g_avIsRecording = true;
|
|
403
409
|
|
|
404
|
-
MRLog(@"🎥 AVFoundation recording started: %dx%d @
|
|
405
|
-
(int)recordingSize.width, (int)recordingSize.height);
|
|
410
|
+
MRLog(@"🎥 AVFoundation recording started: %dx%d @ %.0ffps",
|
|
411
|
+
(int)recordingSize.width, (int)recordingSize.height, fps);
|
|
406
412
|
|
|
407
413
|
return true;
|
|
408
414
|
|
package/src/camera_recorder.mm
CHANGED
|
@@ -189,18 +189,58 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
189
189
|
position = @"unspecified";
|
|
190
190
|
break;
|
|
191
191
|
}
|
|
192
|
-
|
|
192
|
+
|
|
193
|
+
// PRIORITY FIX: MacBook built-in cameras should be default, not external cameras
|
|
194
|
+
// Check if this is a built-in camera (MacBook's own camera)
|
|
195
|
+
NSString *deviceName = device.localizedName ?: @"";
|
|
196
|
+
NSString *deviceType = device.deviceType ?: @"";
|
|
197
|
+
BOOL isBuiltIn = NO;
|
|
198
|
+
|
|
199
|
+
// Built-in detection: Check for common built-in camera names
|
|
200
|
+
if ([deviceName rangeOfString:@"FaceTime" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
201
|
+
[deviceName rangeOfString:@"iSight" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
202
|
+
[deviceName rangeOfString:@"Built-in" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
203
|
+
isBuiltIn = YES;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Check device type for built-in wide angle camera
|
|
207
|
+
if (@available(macOS 10.15, *)) {
|
|
208
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeBuiltInWideAngleCamera]) {
|
|
209
|
+
isBuiltIn = YES;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// External devices (Continuity Camera, iPhone, iPad, USB) should NOT be default
|
|
214
|
+
if (continuityCamera ||
|
|
215
|
+
[deviceName rangeOfString:@"iPhone" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
216
|
+
[deviceName rangeOfString:@"iPad" options:NSCaseInsensitiveSearch].location != NSNotFound ||
|
|
217
|
+
[deviceName rangeOfString:@"Continuity" options:NSCaseInsensitiveSearch].location != NSNotFound) {
|
|
218
|
+
isBuiltIn = NO;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// External device types should not be default
|
|
222
|
+
if (@available(macOS 14.0, *)) {
|
|
223
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternal] ||
|
|
224
|
+
[deviceType isEqualToString:AVCaptureDeviceTypeContinuityCamera]) {
|
|
225
|
+
isBuiltIn = NO;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
if ([deviceType isEqualToString:AVCaptureDeviceTypeExternalUnknown]) {
|
|
229
|
+
isBuiltIn = NO;
|
|
230
|
+
}
|
|
231
|
+
|
|
193
232
|
NSDictionary *deviceInfo = @{
|
|
194
233
|
@"id": device.uniqueID ?: @"",
|
|
195
|
-
@"name":
|
|
234
|
+
@"name": deviceName,
|
|
196
235
|
@"model": device.modelID ?: @"",
|
|
197
236
|
@"manufacturer": device.manufacturer ?: @"",
|
|
198
237
|
@"position": position ?: @"unspecified",
|
|
199
238
|
@"transportType": @(device.transportType),
|
|
200
239
|
@"isConnected": @(device.isConnected),
|
|
240
|
+
@"isDefault": @(isBuiltIn), // Only built-in cameras are default
|
|
201
241
|
@"hasFlash": @(device.hasFlash),
|
|
202
242
|
@"supportsDepth": @NO,
|
|
203
|
-
@"deviceType":
|
|
243
|
+
@"deviceType": deviceType,
|
|
204
244
|
@"requiresContinuityCameraPermission": @(continuityCamera),
|
|
205
245
|
@"maxResolution": @{
|
|
206
246
|
@"width": @(bestDimensions.width),
|
|
@@ -678,17 +718,33 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
678
718
|
return YES;
|
|
679
719
|
}
|
|
680
720
|
|
|
721
|
+
// CRITICAL FIX: For external cameras (especially Continuity Camera/iPhone),
|
|
722
|
+
// stopRunning can hang if device is disconnected. Use async approach.
|
|
723
|
+
MRLog(@"🛑 CameraRecorder: Stopping session (external device safe)...");
|
|
724
|
+
|
|
681
725
|
self.isShuttingDown = YES;
|
|
682
726
|
self.isRecording = NO;
|
|
683
727
|
|
|
684
|
-
|
|
685
|
-
[self.session stopRunning];
|
|
686
|
-
} @catch (NSException *exception) {
|
|
687
|
-
MRLog(@"⚠️ CameraRecorder: Exception while stopping session: %@", exception.reason);
|
|
688
|
-
}
|
|
689
|
-
|
|
728
|
+
// Stop delegate FIRST to prevent new frames
|
|
690
729
|
[self.videoOutput setSampleBufferDelegate:nil queue:nil];
|
|
691
730
|
|
|
731
|
+
// Stop session on background thread to avoid blocking
|
|
732
|
+
AVCaptureSession *sessionToStop = self.session;
|
|
733
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
734
|
+
@autoreleasepool {
|
|
735
|
+
@try {
|
|
736
|
+
if ([sessionToStop isRunning]) {
|
|
737
|
+
MRLog(@"🛑 Stopping AVCaptureSession (camera)...");
|
|
738
|
+
[sessionToStop stopRunning];
|
|
739
|
+
MRLog(@"✅ AVCaptureSession stopped (camera)");
|
|
740
|
+
}
|
|
741
|
+
} @catch (NSException *exception) {
|
|
742
|
+
MRLog(@"⚠️ CameraRecorder: Exception while stopping session: %@", exception.reason);
|
|
743
|
+
}
|
|
744
|
+
// Release happens automatically when block completes
|
|
745
|
+
}
|
|
746
|
+
});
|
|
747
|
+
|
|
692
748
|
// CRITICAL FIX: Check if assetWriter exists before trying to finish it
|
|
693
749
|
// If no frames were captured, assetWriter will be nil
|
|
694
750
|
if (!self.assetWriter) {
|
|
@@ -709,12 +765,12 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
709
765
|
dispatch_semaphore_signal(semaphore);
|
|
710
766
|
}];
|
|
711
767
|
|
|
712
|
-
// Reduced timeout to
|
|
713
|
-
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(
|
|
714
|
-
dispatch_semaphore_wait(semaphore, timeout);
|
|
768
|
+
// Reduced timeout to 1 second for external devices
|
|
769
|
+
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC));
|
|
770
|
+
long result = dispatch_semaphore_wait(semaphore, timeout);
|
|
715
771
|
|
|
716
|
-
if (!finished) {
|
|
717
|
-
MRLog(@"⚠️ CameraRecorder: Timed out waiting for writer
|
|
772
|
+
if (result != 0 || !finished) {
|
|
773
|
+
MRLog(@"⚠️ CameraRecorder: Timed out waiting for writer (external device?) - forcing cancel");
|
|
718
774
|
// Force cancel if timeout
|
|
719
775
|
[self.assetWriter cancelWriting];
|
|
720
776
|
}
|
|
@@ -723,10 +779,11 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
|
|
|
723
779
|
if (!success) {
|
|
724
780
|
MRLog(@"⚠️ CameraRecorder: Writer finished with status %ld error %@", (long)self.assetWriter.status, self.assetWriter.error);
|
|
725
781
|
} else {
|
|
726
|
-
MRLog(@"✅ CameraRecorder
|
|
782
|
+
MRLog(@"✅ CameraRecorder writer finished successfully");
|
|
727
783
|
}
|
|
728
784
|
|
|
729
785
|
[self resetState];
|
|
786
|
+
MRLog(@"✅ CameraRecorder stopped (safe for external devices)");
|
|
730
787
|
return success;
|
|
731
788
|
}
|
|
732
789
|
|
|
@@ -121,12 +121,27 @@ static void initializeSafeQueue() {
|
|
|
121
121
|
SCDisplay *targetDisplay = nil;
|
|
122
122
|
|
|
123
123
|
if (displayId) {
|
|
124
|
+
// First, try matching by real CGDirectDisplayID
|
|
124
125
|
for (SCDisplay *display in content.displays) {
|
|
125
126
|
if (display.displayID == [displayId unsignedIntValue]) {
|
|
126
127
|
targetDisplay = display;
|
|
127
128
|
break;
|
|
128
129
|
}
|
|
129
130
|
}
|
|
131
|
+
|
|
132
|
+
// If not matched, treat provided value as index (0-based or 1-based)
|
|
133
|
+
if (!targetDisplay && content.displays.count > 0) {
|
|
134
|
+
NSUInteger count = content.displays.count;
|
|
135
|
+
NSUInteger idx0 = (NSUInteger)[displayId unsignedIntValue];
|
|
136
|
+
if (idx0 < count) {
|
|
137
|
+
targetDisplay = content.displays[idx0];
|
|
138
|
+
} else if ([displayId unsignedIntegerValue] > 0) {
|
|
139
|
+
NSUInteger idx1 = [displayId unsignedIntegerValue] - 1;
|
|
140
|
+
if (idx1 < count) {
|
|
141
|
+
targetDisplay = content.displays[idx1];
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
130
145
|
}
|
|
131
146
|
|
|
132
147
|
if (!targetDisplay && content.displays.count > 0) {
|
|
@@ -154,9 +169,45 @@ static void initializeSafeQueue() {
|
|
|
154
169
|
}
|
|
155
170
|
|
|
156
171
|
// Video configuration
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
172
|
+
// Prefer the target display's native resolution when available
|
|
173
|
+
if (filter && [filter isKindOfClass:[SCContentFilter class]]) {
|
|
174
|
+
// Try to infer dimensions from selected display or capture area
|
|
175
|
+
NSDictionary *captureArea = options[@"captureArea"];
|
|
176
|
+
if (captureArea) {
|
|
177
|
+
config.width = (size_t)[captureArea[@"width"] doubleValue];
|
|
178
|
+
config.height = (size_t)[captureArea[@"height"] doubleValue];
|
|
179
|
+
} else {
|
|
180
|
+
// Find the selected display again to get dimensions
|
|
181
|
+
NSNumber *displayId = options[@"displayId"];
|
|
182
|
+
if (displayId) {
|
|
183
|
+
for (SCDisplay *display in content.displays) {
|
|
184
|
+
if (display.displayID == [displayId unsignedIntValue]) {
|
|
185
|
+
config.width = (size_t)display.width;
|
|
186
|
+
config.height = (size_t)display.height;
|
|
187
|
+
break;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Fallback default resolution if not set above
|
|
195
|
+
if (config.width == 0 || config.height == 0) {
|
|
196
|
+
config.width = 1920;
|
|
197
|
+
config.height = 1080;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Frame rate from options (default 60)
|
|
201
|
+
NSInteger fps = 60;
|
|
202
|
+
if (options[@"frameRate"]) {
|
|
203
|
+
NSInteger v = [options[@"frameRate"] integerValue];
|
|
204
|
+
if (v > 0) {
|
|
205
|
+
if (v < 1) v = 1;
|
|
206
|
+
if (v > 120) v = 120;
|
|
207
|
+
fps = v;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
config.minimumFrameInterval = CMTimeMake(1, (int)fps);
|
|
160
211
|
config.queueDepth = 8;
|
|
161
212
|
|
|
162
213
|
// Capture area if specified
|
package/src/mac_recorder.mm
CHANGED
|
@@ -19,7 +19,8 @@ extern "C" {
|
|
|
19
19
|
bool includeMicrophone,
|
|
20
20
|
bool includeSystemAudio,
|
|
21
21
|
NSString* audioDeviceId,
|
|
22
|
-
NSString* audioOutputPath
|
|
22
|
+
NSString* audioOutputPath,
|
|
23
|
+
double frameRate);
|
|
23
24
|
bool stopAVFoundationRecording();
|
|
24
25
|
bool isAVFoundationRecording();
|
|
25
26
|
NSString* getAVFoundationAudioPath();
|
|
@@ -204,6 +205,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
204
205
|
NSString *cameraOutputPath = nil;
|
|
205
206
|
int64_t sessionTimestamp = 0;
|
|
206
207
|
NSString *audioOutputPath = nil;
|
|
208
|
+
double frameRate = 60.0;
|
|
207
209
|
|
|
208
210
|
if (info.Length() > 1 && info[1].IsObject()) {
|
|
209
211
|
Napi::Object options = info[1].As<Napi::Object>();
|
|
@@ -271,33 +273,57 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
271
273
|
if (options.Has("sessionTimestamp") && options.Get("sessionTimestamp").IsNumber()) {
|
|
272
274
|
sessionTimestamp = options.Get("sessionTimestamp").As<Napi::Number>().Int64Value();
|
|
273
275
|
}
|
|
276
|
+
|
|
277
|
+
// Frame rate
|
|
278
|
+
if (options.Has("frameRate") && options.Get("frameRate").IsNumber()) {
|
|
279
|
+
double fps = options.Get("frameRate").As<Napi::Number>().DoubleValue();
|
|
280
|
+
if (fps > 0) {
|
|
281
|
+
// Clamp to reasonable range
|
|
282
|
+
if (fps < 1.0) fps = 1.0;
|
|
283
|
+
if (fps > 120.0) fps = 120.0;
|
|
284
|
+
frameRate = fps;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
274
287
|
|
|
275
|
-
// Display ID
|
|
288
|
+
// Display ID (accepts either real CGDirectDisplayID or index [0-based or 1-based])
|
|
276
289
|
if (options.Has("displayId") && !options.Get("displayId").IsNull()) {
|
|
277
290
|
double displayIdNum = options.Get("displayId").As<Napi::Number>().DoubleValue();
|
|
278
291
|
|
|
279
|
-
//
|
|
280
|
-
|
|
281
|
-
displayID = (CGDirectDisplayID)displayIdNum;
|
|
292
|
+
// First, assume the provided value is a real CGDirectDisplayID
|
|
293
|
+
CGDirectDisplayID candidateID = (CGDirectDisplayID)displayIdNum;
|
|
282
294
|
|
|
283
|
-
// Verify
|
|
284
|
-
uint32_t displayCount;
|
|
295
|
+
// Verify against active displays
|
|
296
|
+
uint32_t displayCount = 0;
|
|
285
297
|
CGGetActiveDisplayList(0, NULL, &displayCount);
|
|
286
298
|
if (displayCount > 0) {
|
|
287
299
|
CGDirectDisplayID *displays = (CGDirectDisplayID*)malloc(displayCount * sizeof(CGDirectDisplayID));
|
|
288
300
|
CGGetActiveDisplayList(displayCount, displays, &displayCount);
|
|
289
301
|
|
|
290
|
-
bool
|
|
302
|
+
bool matchedByID = false;
|
|
291
303
|
for (uint32_t i = 0; i < displayCount; i++) {
|
|
292
|
-
if (displays[i] ==
|
|
293
|
-
|
|
304
|
+
if (displays[i] == candidateID) {
|
|
305
|
+
matchedByID = true;
|
|
306
|
+
displayID = candidateID;
|
|
294
307
|
break;
|
|
295
308
|
}
|
|
296
309
|
}
|
|
297
310
|
|
|
298
|
-
if (!
|
|
299
|
-
//
|
|
300
|
-
|
|
311
|
+
if (!matchedByID) {
|
|
312
|
+
// Tolerant mapping: allow passing index instead of CGDirectDisplayID
|
|
313
|
+
// Try 0-based index
|
|
314
|
+
int idx0 = (int)displayIdNum;
|
|
315
|
+
if (idx0 >= 0 && idx0 < (int)displayCount) {
|
|
316
|
+
displayID = displays[idx0];
|
|
317
|
+
} else {
|
|
318
|
+
// Try 1-based index (common in user examples)
|
|
319
|
+
int idx1 = (int)displayIdNum - 1;
|
|
320
|
+
if (idx1 >= 0 && idx1 < (int)displayCount) {
|
|
321
|
+
displayID = displays[idx1];
|
|
322
|
+
} else {
|
|
323
|
+
// Fallback to main display
|
|
324
|
+
displayID = CGMainDisplayID();
|
|
325
|
+
}
|
|
326
|
+
}
|
|
301
327
|
}
|
|
302
328
|
|
|
303
329
|
free(displays);
|
|
@@ -400,6 +426,8 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
400
426
|
if (sessionTimestamp != 0) {
|
|
401
427
|
sckConfig[@"sessionTimestamp"] = @(sessionTimestamp);
|
|
402
428
|
}
|
|
429
|
+
// Pass requested frame rate
|
|
430
|
+
sckConfig[@"frameRate"] = @(frameRate);
|
|
403
431
|
|
|
404
432
|
if (!CGRectIsNull(captureRect)) {
|
|
405
433
|
sckConfig[@"captureRect"] = @{
|
|
@@ -511,7 +539,8 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
511
539
|
bool includeMicrophone,
|
|
512
540
|
bool includeSystemAudio,
|
|
513
541
|
NSString* audioDeviceId,
|
|
514
|
-
NSString* audioOutputPath
|
|
542
|
+
NSString* audioOutputPath,
|
|
543
|
+
double frameRate);
|
|
515
544
|
|
|
516
545
|
// CRITICAL SYNC FIX: Start camera BEFORE screen recording for perfect sync
|
|
517
546
|
// This ensures both capture their first frame at approximately the same time
|
|
@@ -529,7 +558,8 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
529
558
|
// Now start screen recording immediately after camera
|
|
530
559
|
MRLog(@"🎯 SYNC: Starting screen recording immediately");
|
|
531
560
|
bool avResult = startAVFoundationRecording(outputPath, displayID, windowID, captureRect,
|
|
532
|
-
captureCursor, includeMicrophone, includeSystemAudio,
|
|
561
|
+
captureCursor, includeMicrophone, includeSystemAudio,
|
|
562
|
+
audioDeviceId, audioOutputPath, frameRate);
|
|
533
563
|
|
|
534
564
|
if (avResult) {
|
|
535
565
|
MRLog(@"🎥 RECORDING METHOD: AVFoundation");
|
|
@@ -827,6 +857,7 @@ Napi::Value GetCameraDevices(const Napi::CallbackInfo& info) {
|
|
|
827
857
|
NSString *position = camera[@"position"];
|
|
828
858
|
NSNumber *transportType = camera[@"transportType"];
|
|
829
859
|
NSNumber *isConnected = camera[@"isConnected"];
|
|
860
|
+
NSNumber *isDefault = camera[@"isDefault"];
|
|
830
861
|
NSNumber *hasFlash = camera[@"hasFlash"];
|
|
831
862
|
NSNumber *supportsDepth = camera[@"supportsDepth"];
|
|
832
863
|
|
|
@@ -861,7 +892,11 @@ Napi::Value GetCameraDevices(const Napi::CallbackInfo& info) {
|
|
|
861
892
|
if (isConnected && [isConnected isKindOfClass:[NSNumber class]]) {
|
|
862
893
|
cameraObj.Set("isConnected", Napi::Boolean::New(env, [isConnected boolValue]));
|
|
863
894
|
}
|
|
864
|
-
|
|
895
|
+
|
|
896
|
+
if (isDefault && [isDefault isKindOfClass:[NSNumber class]]) {
|
|
897
|
+
cameraObj.Set("isDefault", Napi::Boolean::New(env, [isDefault boolValue]));
|
|
898
|
+
}
|
|
899
|
+
|
|
865
900
|
if (hasFlash && [hasFlash isKindOfClass:[NSNumber class]]) {
|
|
866
901
|
cameraObj.Set("hasFlash", Napi::Boolean::New(env, [hasFlash boolValue]));
|
|
867
902
|
}
|
|
@@ -32,6 +32,7 @@ static BOOL g_audioWriterStarted = NO;
|
|
|
32
32
|
|
|
33
33
|
static NSInteger g_configuredSampleRate = 48000;
|
|
34
34
|
static NSInteger g_configuredChannelCount = 2;
|
|
35
|
+
static NSInteger g_targetFPS = 60;
|
|
35
36
|
|
|
36
37
|
// Frame rate debugging
|
|
37
38
|
static NSInteger g_frameCount = 0;
|
|
@@ -342,9 +343,9 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
342
343
|
|
|
343
344
|
NSDictionary *compressionProps = @{
|
|
344
345
|
AVVideoAverageBitRateKey: @(bitrate),
|
|
345
|
-
AVVideoMaxKeyFrameIntervalKey: @
|
|
346
|
+
AVVideoMaxKeyFrameIntervalKey: @(MAX(1, g_targetFPS)),
|
|
346
347
|
AVVideoAllowFrameReorderingKey: @YES,
|
|
347
|
-
AVVideoExpectedSourceFrameRateKey: @
|
|
348
|
+
AVVideoExpectedSourceFrameRateKey: @(MAX(1, g_targetFPS)),
|
|
348
349
|
AVVideoQualityKey: @(0.95), // 0.0-1.0, higher is better (0.95 = excellent)
|
|
349
350
|
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
|
350
351
|
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
|
@@ -524,6 +525,17 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
524
525
|
NSString *audioOutputPath = MRNormalizePath(config[@"audioOutputPath"]);
|
|
525
526
|
NSNumber *sessionTimestampNumber = config[@"sessionTimestamp"];
|
|
526
527
|
|
|
528
|
+
// Extract requested frame rate
|
|
529
|
+
NSNumber *frameRateNumber = config[@"frameRate"];
|
|
530
|
+
if (frameRateNumber && [frameRateNumber respondsToSelector:@selector(intValue)]) {
|
|
531
|
+
NSInteger fps = [frameRateNumber intValue];
|
|
532
|
+
if (fps < 1) fps = 1;
|
|
533
|
+
if (fps > 120) fps = 120;
|
|
534
|
+
g_targetFPS = fps;
|
|
535
|
+
} else {
|
|
536
|
+
g_targetFPS = 60;
|
|
537
|
+
}
|
|
538
|
+
|
|
527
539
|
MRLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
|
|
528
540
|
MRLog(@"🔧 Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@ crop=%@",
|
|
529
541
|
captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId, captureRect);
|
|
@@ -641,7 +653,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
641
653
|
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
642
654
|
streamConfig.width = recordingWidth;
|
|
643
655
|
streamConfig.height = recordingHeight;
|
|
644
|
-
streamConfig.minimumFrameInterval = CMTimeMake(1,
|
|
656
|
+
streamConfig.minimumFrameInterval = CMTimeMake(1, (int)MAX(1, g_targetFPS));
|
|
645
657
|
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
|
|
646
658
|
streamConfig.scalesToFit = NO;
|
|
647
659
|
|
|
@@ -650,7 +662,7 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
650
662
|
streamConfig.queueDepth = 8; // Larger queue for smoother capture
|
|
651
663
|
}
|
|
652
664
|
|
|
653
|
-
MRLog(@"🎬 ScreenCaptureKit config: %ldx%ld @
|
|
665
|
+
MRLog(@"🎬 ScreenCaptureKit config: %ldx%ld @ %ldfps", (long)recordingWidth, (long)recordingHeight, (long)g_targetFPS);
|
|
654
666
|
|
|
655
667
|
BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
|
|
656
668
|
BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
|
|
@@ -735,8 +747,8 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
|
|
|
735
747
|
BOOL shouldShowCursor = captureCursor ? [captureCursor boolValue] : YES;
|
|
736
748
|
streamConfig.showsCursor = shouldShowCursor;
|
|
737
749
|
|
|
738
|
-
MRLog(@"🎥 Pure ScreenCapture config: %ldx%ld @
|
|
739
|
-
recordingWidth, recordingHeight, shouldShowCursor);
|
|
750
|
+
MRLog(@"🎥 Pure ScreenCapture config: %ldx%ld @ %ldfps, cursor=%d",
|
|
751
|
+
recordingWidth, recordingHeight, (long)g_targetFPS, shouldShowCursor);
|
|
740
752
|
|
|
741
753
|
NSError *writerError = nil;
|
|
742
754
|
if (![ScreenCaptureKitRecorder prepareVideoWriterWithWidth:recordingWidth height:recordingHeight error:&writerError]) {
|