node-mac-recorder 2.21.7 → 2.21.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +3 -1
- package/index.js +32 -13
- package/package.json +1 -1
- package/src/audio_recorder.mm +33 -0
- package/src/avfoundation_recorder.mm +71 -8
- package/src/mac_recorder.mm +30 -25
|
@@ -11,7 +11,9 @@
|
|
|
11
11
|
"Read(//private/tmp/test-recording/**)",
|
|
12
12
|
"Bash(MAC_RECORDER_DEBUG=1 timeout 5 node:*)",
|
|
13
13
|
"Read(//private/tmp/**)",
|
|
14
|
-
"Bash(ffprobe:*)"
|
|
14
|
+
"Bash(ffprobe:*)",
|
|
15
|
+
"Bash(MAC_RECORDER_DEBUG=1 timeout 8 node:*)",
|
|
16
|
+
"Bash(cat:*)"
|
|
15
17
|
],
|
|
16
18
|
"deny": [],
|
|
17
19
|
"ask": []
|
package/index.js
CHANGED
|
@@ -152,21 +152,40 @@ class MacRecorder extends EventEmitter {
|
|
|
152
152
|
* Kayıt seçeneklerini ayarlar
|
|
153
153
|
*/
|
|
154
154
|
setOptions(options = {}) {
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
155
|
+
// Merge options instead of replacing to preserve previously set values
|
|
156
|
+
if (options.includeMicrophone !== undefined) {
|
|
157
|
+
this.options.includeMicrophone = options.includeMicrophone === true;
|
|
158
|
+
}
|
|
159
|
+
if (options.includeSystemAudio !== undefined) {
|
|
160
|
+
this.options.includeSystemAudio = options.includeSystemAudio === true;
|
|
161
|
+
}
|
|
162
|
+
if (options.captureCursor !== undefined) {
|
|
163
|
+
this.options.captureCursor = options.captureCursor || false;
|
|
164
|
+
}
|
|
165
|
+
if (options.displayId !== undefined) {
|
|
166
|
+
this.options.displayId = options.displayId || null;
|
|
167
|
+
}
|
|
168
|
+
if (options.windowId !== undefined) {
|
|
169
|
+
this.options.windowId = options.windowId || null;
|
|
170
|
+
}
|
|
171
|
+
if (options.audioDeviceId !== undefined) {
|
|
172
|
+
this.options.audioDeviceId = options.audioDeviceId || null;
|
|
173
|
+
}
|
|
174
|
+
if (options.systemAudioDeviceId !== undefined) {
|
|
175
|
+
this.options.systemAudioDeviceId = options.systemAudioDeviceId || null;
|
|
176
|
+
}
|
|
177
|
+
if (options.captureArea !== undefined) {
|
|
178
|
+
this.options.captureArea = options.captureArea || null;
|
|
179
|
+
}
|
|
180
|
+
if (options.captureCamera !== undefined) {
|
|
181
|
+
this.options.captureCamera = options.captureCamera === true;
|
|
182
|
+
}
|
|
183
|
+
if (options.cameraDeviceId !== undefined) {
|
|
184
|
+
this.options.cameraDeviceId =
|
|
166
185
|
typeof options.cameraDeviceId === "string" && options.cameraDeviceId.length > 0
|
|
167
186
|
? options.cameraDeviceId
|
|
168
|
-
: null
|
|
169
|
-
}
|
|
187
|
+
: null;
|
|
188
|
+
}
|
|
170
189
|
}
|
|
171
190
|
|
|
172
191
|
/**
|
package/package.json
CHANGED
package/src/audio_recorder.mm
CHANGED
|
@@ -369,4 +369,37 @@ NSString *currentStandaloneAudioRecordingPath() {
|
|
|
369
369
|
return g_audioRecorder.outputPath;
|
|
370
370
|
}
|
|
371
371
|
|
|
372
|
+
// C API for AVFoundation integration
|
|
373
|
+
void* createNativeAudioRecorder() {
|
|
374
|
+
return (__bridge_retained void*)[[NativeAudioRecorder alloc] init];
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
bool startNativeAudioRecording(void* recorder, const char* deviceId, const char* outputPath) {
|
|
378
|
+
if (!recorder || !outputPath) {
|
|
379
|
+
return false;
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
NativeAudioRecorder* audioRecorder = (__bridge NativeAudioRecorder*)recorder;
|
|
383
|
+
NSString* deviceIdStr = deviceId ? [NSString stringWithUTF8String:deviceId] : nil;
|
|
384
|
+
NSString* outputPathStr = [NSString stringWithUTF8String:outputPath];
|
|
385
|
+
|
|
386
|
+
NSError* error = nil;
|
|
387
|
+
return [audioRecorder startRecordingWithDeviceId:deviceIdStr outputPath:outputPathStr error:&error];
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
bool stopNativeAudioRecording(void* recorder) {
|
|
391
|
+
if (!recorder) {
|
|
392
|
+
return false;
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
NativeAudioRecorder* audioRecorder = (__bridge NativeAudioRecorder*)recorder;
|
|
396
|
+
return [audioRecorder stopRecording];
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
void destroyNativeAudioRecorder(void* recorder) {
|
|
400
|
+
if (recorder) {
|
|
401
|
+
CFRelease(recorder);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
|
|
372
405
|
}
|
|
@@ -7,6 +7,12 @@
|
|
|
7
7
|
#include <string>
|
|
8
8
|
#import "logging.h"
|
|
9
9
|
|
|
10
|
+
// Import audio recorder
|
|
11
|
+
extern "C" void* createNativeAudioRecorder(void);
|
|
12
|
+
extern "C" bool startNativeAudioRecording(void* recorder, const char* deviceId, const char* outputPath);
|
|
13
|
+
extern "C" bool stopNativeAudioRecording(void* recorder);
|
|
14
|
+
extern "C" void destroyNativeAudioRecorder(void* recorder);
|
|
15
|
+
|
|
10
16
|
static AVAssetWriter *g_avWriter = nil;
|
|
11
17
|
static AVAssetWriterInput *g_avVideoInput = nil;
|
|
12
18
|
static AVAssetWriterInputPixelBufferAdaptor *g_avPixelBufferAdaptor = nil;
|
|
@@ -16,25 +22,28 @@ static CGRect g_avCaptureRect = CGRectZero;
|
|
|
16
22
|
static bool g_avIsRecording = false;
|
|
17
23
|
static int64_t g_avFrameNumber = 0;
|
|
18
24
|
static CMTime g_avStartTime;
|
|
25
|
+
static void* g_avAudioRecorder = nil;
|
|
26
|
+
static NSString* g_avAudioOutputPath = nil;
|
|
19
27
|
|
|
20
28
|
// AVFoundation screen recording implementation
|
|
21
|
-
extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
29
|
+
extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
22
30
|
CGDirectDisplayID displayID,
|
|
23
31
|
uint32_t windowID,
|
|
24
32
|
CGRect captureRect,
|
|
25
33
|
bool captureCursor,
|
|
26
|
-
bool includeMicrophone,
|
|
34
|
+
bool includeMicrophone,
|
|
27
35
|
bool includeSystemAudio,
|
|
28
|
-
NSString* audioDeviceId
|
|
36
|
+
NSString* audioDeviceId,
|
|
37
|
+
NSString* audioOutputPath) {
|
|
29
38
|
|
|
30
39
|
if (g_avIsRecording) {
|
|
31
40
|
NSLog(@"❌ AVFoundation recording already in progress");
|
|
32
41
|
return false;
|
|
33
42
|
}
|
|
34
|
-
|
|
43
|
+
|
|
35
44
|
@try {
|
|
36
45
|
MRLog(@"🎬 AVFoundation: Starting recording initialization");
|
|
37
|
-
|
|
46
|
+
|
|
38
47
|
// Create output URL
|
|
39
48
|
NSString *outputPathStr = [NSString stringWithUTF8String:outputPath.c_str()];
|
|
40
49
|
NSURL *outputURL = [NSURL fileURLWithPath:outputPathStr];
|
|
@@ -197,7 +206,48 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
|
|
|
197
206
|
}
|
|
198
207
|
|
|
199
208
|
g_avFrameNumber = 0;
|
|
200
|
-
|
|
209
|
+
|
|
210
|
+
// Start audio recording if requested
|
|
211
|
+
if (includeMicrophone || includeSystemAudio) {
|
|
212
|
+
MRLog(@"🎤 Starting audio capture (mic=%d, system=%d)", includeMicrophone, includeSystemAudio);
|
|
213
|
+
|
|
214
|
+
// Use provided audio output path or generate one
|
|
215
|
+
if (audioOutputPath && [audioOutputPath length] > 0) {
|
|
216
|
+
g_avAudioOutputPath = audioOutputPath;
|
|
217
|
+
MRLog(@"🎤 Using provided audio path: %@", g_avAudioOutputPath);
|
|
218
|
+
} else {
|
|
219
|
+
NSString *videoDir = [outputPathStr stringByDeletingLastPathComponent];
|
|
220
|
+
NSString *audioFilename = [NSString stringWithFormat:@"avf_audio_%ld.mov", (long)[[NSDate date] timeIntervalSince1970]];
|
|
221
|
+
g_avAudioOutputPath = [videoDir stringByAppendingPathComponent:audioFilename];
|
|
222
|
+
MRLog(@"🎤 Generated audio path: %@", g_avAudioOutputPath);
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Ensure .mov extension
|
|
226
|
+
if (![g_avAudioOutputPath.pathExtension.lowercaseString isEqualToString:@"mov"]) {
|
|
227
|
+
g_avAudioOutputPath = [[g_avAudioOutputPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"mov"];
|
|
228
|
+
MRLog(@"🔧 Fixed audio extension to .mov: %@", g_avAudioOutputPath);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Create audio recorder
|
|
232
|
+
g_avAudioRecorder = createNativeAudioRecorder();
|
|
233
|
+
|
|
234
|
+
if (g_avAudioRecorder) {
|
|
235
|
+
const char* deviceIdCStr = audioDeviceId ? [audioDeviceId UTF8String] : NULL;
|
|
236
|
+
const char* outputPathCStr = [g_avAudioOutputPath UTF8String];
|
|
237
|
+
|
|
238
|
+
if (startNativeAudioRecording(g_avAudioRecorder, deviceIdCStr, outputPathCStr)) {
|
|
239
|
+
MRLog(@"✅ Audio recording started to: %@", g_avAudioOutputPath);
|
|
240
|
+
} else {
|
|
241
|
+
NSLog(@"❌ Failed to start audio recording");
|
|
242
|
+
destroyNativeAudioRecorder(g_avAudioRecorder);
|
|
243
|
+
g_avAudioRecorder = nil;
|
|
244
|
+
g_avAudioOutputPath = nil;
|
|
245
|
+
}
|
|
246
|
+
} else {
|
|
247
|
+
NSLog(@"❌ Failed to create audio recorder");
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
201
251
|
// Start capture timer (10 FPS for Electron compatibility)
|
|
202
252
|
dispatch_queue_t captureQueue = dispatch_queue_create("AVFoundationCaptureQueue", DISPATCH_QUEUE_SERIAL);
|
|
203
253
|
g_avTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, captureQueue);
|
|
@@ -348,9 +398,18 @@ extern "C" bool stopAVFoundationRecording() {
|
|
|
348
398
|
if (!g_avIsRecording) {
|
|
349
399
|
return true;
|
|
350
400
|
}
|
|
351
|
-
|
|
401
|
+
|
|
352
402
|
g_avIsRecording = false;
|
|
353
|
-
|
|
403
|
+
|
|
404
|
+
// Stop audio recording if active
|
|
405
|
+
if (g_avAudioRecorder) {
|
|
406
|
+
MRLog(@"🛑 Stopping audio recording");
|
|
407
|
+
stopNativeAudioRecording(g_avAudioRecorder);
|
|
408
|
+
destroyNativeAudioRecorder(g_avAudioRecorder);
|
|
409
|
+
g_avAudioRecorder = nil;
|
|
410
|
+
MRLog(@"✅ Audio recording stopped");
|
|
411
|
+
}
|
|
412
|
+
|
|
354
413
|
@try {
|
|
355
414
|
// Stop timer with Electron-safe cleanup
|
|
356
415
|
if (g_avTimer) {
|
|
@@ -404,3 +463,7 @@ extern "C" bool stopAVFoundationRecording() {
|
|
|
404
463
|
extern "C" bool isAVFoundationRecording() {
|
|
405
464
|
return g_avIsRecording;
|
|
406
465
|
}
|
|
466
|
+
|
|
467
|
+
extern "C" NSString* getAVFoundationAudioPath() {
|
|
468
|
+
return g_avAudioOutputPath;
|
|
469
|
+
}
|
package/src/mac_recorder.mm
CHANGED
|
@@ -11,16 +11,18 @@
|
|
|
11
11
|
|
|
12
12
|
// AVFoundation fallback declarations
|
|
13
13
|
extern "C" {
|
|
14
|
-
bool startAVFoundationRecording(const std::string& outputPath,
|
|
14
|
+
bool startAVFoundationRecording(const std::string& outputPath,
|
|
15
15
|
CGDirectDisplayID displayID,
|
|
16
16
|
uint32_t windowID,
|
|
17
17
|
CGRect captureRect,
|
|
18
18
|
bool captureCursor,
|
|
19
|
-
bool includeMicrophone,
|
|
19
|
+
bool includeMicrophone,
|
|
20
20
|
bool includeSystemAudio,
|
|
21
|
-
NSString* audioDeviceId
|
|
21
|
+
NSString* audioDeviceId,
|
|
22
|
+
NSString* audioOutputPath);
|
|
22
23
|
bool stopAVFoundationRecording();
|
|
23
24
|
bool isAVFoundationRecording();
|
|
25
|
+
NSString* getAVFoundationAudioPath();
|
|
24
26
|
|
|
25
27
|
NSArray<NSDictionary *> *listCameraDevices();
|
|
26
28
|
bool startCameraRecording(NSString *outputPath, NSString *deviceId, NSError **error);
|
|
@@ -196,7 +198,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
196
198
|
|
|
197
199
|
if (info.Length() > 1 && info[1].IsObject()) {
|
|
198
200
|
Napi::Object options = info[1].As<Napi::Object>();
|
|
199
|
-
|
|
201
|
+
|
|
200
202
|
// Capture area
|
|
201
203
|
if (options.Has("captureArea") && options.Get("captureArea").IsObject()) {
|
|
202
204
|
Napi::Object rectObj = options.Get("captureArea").As<Napi::Object>();
|
|
@@ -217,15 +219,15 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
217
219
|
|
|
218
220
|
// Microphone
|
|
219
221
|
if (options.Has("includeMicrophone")) {
|
|
220
|
-
includeMicrophone = options.Get("includeMicrophone").As<Napi::Boolean>();
|
|
222
|
+
includeMicrophone = options.Get("includeMicrophone").As<Napi::Boolean>().Value();
|
|
221
223
|
}
|
|
222
|
-
|
|
224
|
+
|
|
223
225
|
// Audio device ID
|
|
224
226
|
if (options.Has("audioDeviceId") && !options.Get("audioDeviceId").IsNull()) {
|
|
225
227
|
std::string deviceId = options.Get("audioDeviceId").As<Napi::String>().Utf8Value();
|
|
226
228
|
audioDeviceId = [NSString stringWithUTF8String:deviceId.c_str()];
|
|
227
229
|
}
|
|
228
|
-
|
|
230
|
+
|
|
229
231
|
// System audio
|
|
230
232
|
if (options.Has("includeSystemAudio")) {
|
|
231
233
|
includeSystemAudio = options.Get("includeSystemAudio").As<Napi::Boolean>();
|
|
@@ -343,18 +345,16 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
343
345
|
MRLog(@" Reason: ScreenCaptureKit has thread safety issues in Electron (SIGTRAP crashes)");
|
|
344
346
|
}
|
|
345
347
|
|
|
346
|
-
//
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
348
|
+
// CRITICAL FIX: ScreenCaptureKit causes segmentation faults
|
|
349
|
+
// Forcing AVFoundation for ALL environments until issue is resolved
|
|
350
|
+
// TODO: Implement audio capture in AVFoundation
|
|
351
|
+
BOOL forceAVFoundation = YES;
|
|
352
|
+
|
|
353
|
+
MRLog(@"🔧 CRITICAL: ScreenCaptureKit disabled globally (segfault issue)");
|
|
354
|
+
MRLog(@" Using AVFoundation for stability with integrated audio capture");
|
|
351
355
|
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
forceAVFoundation = YES;
|
|
355
|
-
if (forceAVFoundation) {
|
|
356
|
-
MRLog(@"🔧 CRITICAL: ScreenCaptureKit disabled due to segmentation faults");
|
|
357
|
-
MRLog(@" Using AVFoundation for stability in ALL environments");
|
|
356
|
+
if (isElectron) {
|
|
357
|
+
MRLog(@"⚡ Electron environment detected - using stable AVFoundation");
|
|
358
358
|
}
|
|
359
359
|
|
|
360
360
|
// Electron-first priority: ALWAYS use AVFoundation in Electron for stability
|
|
@@ -474,20 +474,21 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
474
474
|
|
|
475
475
|
// AVFoundation recording (either fallback from ScreenCaptureKit or direct)
|
|
476
476
|
MRLog(@"🎥 Starting AVFoundation recording...");
|
|
477
|
-
|
|
477
|
+
|
|
478
478
|
@try {
|
|
479
479
|
// Import AVFoundation recording functions (if available)
|
|
480
|
-
extern bool startAVFoundationRecording(const std::string& outputPath,
|
|
480
|
+
extern bool startAVFoundationRecording(const std::string& outputPath,
|
|
481
481
|
CGDirectDisplayID displayID,
|
|
482
482
|
uint32_t windowID,
|
|
483
483
|
CGRect captureRect,
|
|
484
484
|
bool captureCursor,
|
|
485
|
-
bool includeMicrophone,
|
|
485
|
+
bool includeMicrophone,
|
|
486
486
|
bool includeSystemAudio,
|
|
487
|
-
NSString* audioDeviceId
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
487
|
+
NSString* audioDeviceId,
|
|
488
|
+
NSString* audioOutputPath);
|
|
489
|
+
|
|
490
|
+
bool avResult = startAVFoundationRecording(outputPath, displayID, windowID, captureRect,
|
|
491
|
+
captureCursor, includeMicrophone, includeSystemAudio, audioDeviceId, audioOutputPath);
|
|
491
492
|
|
|
492
493
|
if (avResult) {
|
|
493
494
|
MRLog(@"🎥 RECORDING METHOD: AVFoundation");
|
|
@@ -554,6 +555,7 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
554
555
|
// Try AVFoundation fallback (supports both Node.js and Electron)
|
|
555
556
|
extern bool isAVFoundationRecording();
|
|
556
557
|
extern bool stopAVFoundationRecording();
|
|
558
|
+
extern NSString* getAVFoundationAudioPath();
|
|
557
559
|
|
|
558
560
|
@try {
|
|
559
561
|
if (isAVFoundationRecording()) {
|
|
@@ -867,6 +869,9 @@ Napi::Value GetAudioRecordingPath(const Napi::CallbackInfo& info) {
|
|
|
867
869
|
if (!path || [path length] == 0) {
|
|
868
870
|
path = currentStandaloneAudioRecordingPath();
|
|
869
871
|
}
|
|
872
|
+
if (!path || [path length] == 0) {
|
|
873
|
+
path = getAVFoundationAudioPath();
|
|
874
|
+
}
|
|
870
875
|
if (!path || [path length] == 0) {
|
|
871
876
|
return env.Null();
|
|
872
877
|
}
|