node-mac-recorder 2.15.12 โ 2.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/binding.gyp +5 -0
- package/package.json +1 -1
- package/src/avfoundation_recorder.mm +222 -0
- package/src/mac_recorder.mm +113 -32
- package/src/screen_capture_kit.mm +0 -10
- package/test-display-id-debug.js +0 -42
- package/test-electron-env.js +0 -38
|
@@ -42,7 +42,8 @@
|
|
|
42
42
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Final stability test with all features'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function finalTest() {\n try {\n const outputPath = ''./test-output/final-stability-test.mov'';\n console.log(''๐น Starting final stability test...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: true,\n captureArea: {\n x: 200,\n y: 200,\n width: 600,\n height: 400\n }\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 4 seconds'');\n await new Promise(resolve => setTimeout(resolve, 4000));\n \n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''๐ FINAL TEST COMPLETED SUCCESSFULLY - NO CRASH!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n }\n}\n\nfinalTest();\n\")",
|
|
43
43
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ต Testing both microphone and system audio'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function testBothAudio() {\n try {\n const outputPath = ''./test-output/both-audio-test.mov'';\n console.log(''๐น Starting recording with both audio sources...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: true, // Mikrophone AรIK\n includeSystemAudio: true // Sistem sesi AรIK\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 5 seconds'');\n await new Promise(resolve => setTimeout(resolve, 5000));\n \n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''โ
Both audio test completed!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n console.log(''Stack:'', error.stack);\n }\n}\n\ntestBothAudio();\n\")",
|
|
44
44
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ค Testing microphone only'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function testMicrophoneOnly() {\n try {\n const outputPath = ''./test-output/microphone-only-test.mov'';\n console.log(''๐น Starting microphone only recording...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: false,\n includeMicrophone: true, // Sadece mikrofon AรIK\n includeSystemAudio: false // Sistem sesi KAPALI\n });\n \n if (success) {\n console.log(''โ
Microphone recording started - waiting 3 seconds'');\n await new Promise(resolve => setTimeout(resolve, 3000));\n \n console.log(''๐ Stopping microphone recording...'');\n await recorder.stopRecording();\n console.log(''โ
Microphone only test completed!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n }\n}\n\ntestMicrophoneOnly();\n\")",
|
|
45
|
-
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Full stress test - All features + Both audio sources'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function fullStressTest() {\n try {\n const outputPath = ''./test-output/full-stress-test.mov'';\n console.log(''๐น Starting full featured recording (stress test)...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true, // Cursor kayฤฑt\n includeMicrophone: true, // Mikrofon kayฤฑt\n includeSystemAudio: true, // Sistem sesi kayฤฑt\n captureArea: { // Crop area\n x: 300,\n y: 300,\n width: 800,\n height: 500\n }\n });\n \n if (success) {\n console.log(''โ
Full stress test started - waiting 6 seconds'');\n await new Promise(resolve => setTimeout(resolve, 6000));\n \n console.log(''๐ Stopping full stress test...'');\n await recorder.stopRecording();\n console.log(''๐ FULL STRESS TEST COMPLETED WITHOUT CRASH!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n console.log(''Stack:'', error.stack);\n }\n}\n\nfullStressTest();\n\")"
|
|
45
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Full stress test - All features + Both audio sources'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function fullStressTest() {\n try {\n const outputPath = ''./test-output/full-stress-test.mov'';\n console.log(''๐น Starting full featured recording (stress test)...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true, // Cursor kayฤฑt\n includeMicrophone: true, // Mikrofon kayฤฑt\n includeSystemAudio: true, // Sistem sesi kayฤฑt\n captureArea: { // Crop area\n x: 300,\n y: 300,\n width: 800,\n height: 500\n }\n });\n \n if (success) {\n console.log(''โ
Full stress test started - waiting 6 seconds'');\n await new Promise(resolve => setTimeout(resolve, 6000));\n \n console.log(''๐ Stopping full stress test...'');\n await recorder.stopRecording();\n console.log(''๐ FULL STRESS TEST COMPLETED WITHOUT CRASH!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n console.log(''Stack:'', error.stack);\n }\n}\n\nfullStressTest();\n\")",
|
|
46
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐งช Testing version detection and fallback system'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function testVersionDetection() {\n try {\n const outputPath = ''./test-output/version-detection-test.mov'';\n console.log(''๐น Starting recording with version detection...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: true\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 3 seconds'');\n await new Promise(resolve => setTimeout(resolve, 3000));\n \n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''โ
Version detection test completed successfully!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n }\n}\n\ntestVersionDetection();\n\")"
|
|
46
47
|
],
|
|
47
48
|
"deny": []
|
|
48
49
|
}
|
package/binding.gyp
CHANGED
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
"sources": [
|
|
6
6
|
"src/mac_recorder.mm",
|
|
7
7
|
"src/screen_capture_kit.mm",
|
|
8
|
+
"src/avfoundation_recorder.mm",
|
|
8
9
|
"src/audio_capture.mm",
|
|
9
10
|
"src/cursor_tracker.mm",
|
|
10
11
|
"src/window_selector.mm"
|
|
@@ -30,6 +31,10 @@
|
|
|
30
31
|
"-framework Foundation",
|
|
31
32
|
"-framework AppKit",
|
|
32
33
|
"-framework ScreenCaptureKit",
|
|
34
|
+
"-framework AVFoundation",
|
|
35
|
+
"-framework CoreMedia",
|
|
36
|
+
"-framework CoreVideo",
|
|
37
|
+
"-framework QuartzCore",
|
|
33
38
|
"-framework ApplicationServices",
|
|
34
39
|
"-framework Carbon",
|
|
35
40
|
"-framework Accessibility",
|
package/package.json
CHANGED
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
#import <Foundation/Foundation.h>
|
|
2
|
+
#import <AVFoundation/AVFoundation.h>
|
|
3
|
+
#import <CoreMedia/CoreMedia.h>
|
|
4
|
+
#import <CoreVideo/CoreVideo.h>
|
|
5
|
+
#import <QuartzCore/QuartzCore.h>
|
|
6
|
+
#import <AppKit/AppKit.h>
|
|
7
|
+
#include <string>
|
|
8
|
+
|
|
9
|
+
static AVAssetWriter *g_avWriter = nil;
|
|
10
|
+
static AVAssetWriterInput *g_avVideoInput = nil;
|
|
11
|
+
static AVAssetWriterInputPixelBufferAdaptor *g_avPixelBufferAdaptor = nil;
|
|
12
|
+
static dispatch_source_t g_avTimer = nil;
|
|
13
|
+
static CGDirectDisplayID g_avDisplayID = 0;
|
|
14
|
+
static CGRect g_avCaptureRect = CGRectZero;
|
|
15
|
+
static bool g_avIsRecording = false;
|
|
16
|
+
static int64_t g_avFrameNumber = 0;
|
|
17
|
+
static CMTime g_avStartTime;
|
|
18
|
+
|
|
19
|
+
// AVFoundation screen recording implementation
|
|
20
|
+
bool startAVFoundationRecording(const std::string& outputPath,
|
|
21
|
+
CGDirectDisplayID displayID,
|
|
22
|
+
uint32_t windowID,
|
|
23
|
+
CGRect captureRect,
|
|
24
|
+
bool captureCursor,
|
|
25
|
+
bool includeMicrophone,
|
|
26
|
+
bool includeSystemAudio,
|
|
27
|
+
NSString* audioDeviceId) {
|
|
28
|
+
|
|
29
|
+
if (g_avIsRecording) {
|
|
30
|
+
NSLog(@"โ AVFoundation recording already in progress");
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
@try {
|
|
35
|
+
// Create output URL
|
|
36
|
+
NSString *outputPathStr = [NSString stringWithUTF8String:outputPath.c_str()];
|
|
37
|
+
NSURL *outputURL = [NSURL fileURLWithPath:outputPathStr];
|
|
38
|
+
|
|
39
|
+
// Remove existing file
|
|
40
|
+
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
|
|
41
|
+
|
|
42
|
+
// Create asset writer
|
|
43
|
+
NSError *error = nil;
|
|
44
|
+
g_avWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
|
|
45
|
+
if (!g_avWriter || error) {
|
|
46
|
+
NSLog(@"โ Failed to create AVAssetWriter: %@", error);
|
|
47
|
+
return false;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Get display dimensions
|
|
51
|
+
CGRect displayBounds = CGDisplayBounds(displayID);
|
|
52
|
+
CGSize recordingSize = captureRect.size.width > 0 ? captureRect.size : displayBounds.size;
|
|
53
|
+
|
|
54
|
+
// Video settings
|
|
55
|
+
NSDictionary *videoSettings = @{
|
|
56
|
+
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
57
|
+
AVVideoWidthKey: @((int)recordingSize.width),
|
|
58
|
+
AVVideoHeightKey: @((int)recordingSize.height),
|
|
59
|
+
AVVideoCompressionPropertiesKey: @{
|
|
60
|
+
AVVideoAverageBitRateKey: @(recordingSize.width * recordingSize.height * 8),
|
|
61
|
+
AVVideoMaxKeyFrameIntervalKey: @30
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
// Create video input
|
|
66
|
+
g_avVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
|
|
67
|
+
g_avVideoInput.expectsMediaDataInRealTime = YES;
|
|
68
|
+
|
|
69
|
+
// Create pixel buffer adaptor
|
|
70
|
+
NSDictionary *pixelBufferAttributes = @{
|
|
71
|
+
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
|
|
72
|
+
(NSString*)kCVPixelBufferWidthKey: @((int)recordingSize.width),
|
|
73
|
+
(NSString*)kCVPixelBufferHeightKey: @((int)recordingSize.height),
|
|
74
|
+
(NSString*)kCVPixelBufferCGImageCompatibilityKey: @YES,
|
|
75
|
+
(NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
g_avPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_avVideoInput sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
79
|
+
|
|
80
|
+
// Add input to writer
|
|
81
|
+
if (![g_avWriter canAddInput:g_avVideoInput]) {
|
|
82
|
+
NSLog(@"โ Cannot add video input to AVAssetWriter");
|
|
83
|
+
return false;
|
|
84
|
+
}
|
|
85
|
+
[g_avWriter addInput:g_avVideoInput];
|
|
86
|
+
|
|
87
|
+
// Start writing
|
|
88
|
+
if (![g_avWriter startWriting]) {
|
|
89
|
+
NSLog(@"โ Failed to start AVAssetWriter: %@", g_avWriter.error);
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
g_avStartTime = CMTimeMakeWithSeconds(CACurrentMediaTime(), 600);
|
|
94
|
+
[g_avWriter startSessionAtSourceTime:g_avStartTime];
|
|
95
|
+
|
|
96
|
+
// Store recording parameters
|
|
97
|
+
g_avDisplayID = displayID;
|
|
98
|
+
g_avCaptureRect = captureRect;
|
|
99
|
+
g_avFrameNumber = 0;
|
|
100
|
+
|
|
101
|
+
// Start capture timer (15 FPS for compatibility)
|
|
102
|
+
dispatch_queue_t captureQueue = dispatch_queue_create("AVFoundationCaptureQueue", DISPATCH_QUEUE_SERIAL);
|
|
103
|
+
g_avTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, captureQueue);
|
|
104
|
+
|
|
105
|
+
uint64_t interval = NSEC_PER_SEC / 15; // 15 FPS
|
|
106
|
+
dispatch_source_set_timer(g_avTimer, dispatch_time(DISPATCH_TIME_NOW, 0), interval, interval / 10);
|
|
107
|
+
|
|
108
|
+
dispatch_source_set_event_handler(g_avTimer, ^{
|
|
109
|
+
if (!g_avIsRecording) return;
|
|
110
|
+
|
|
111
|
+
@autoreleasepool {
|
|
112
|
+
// Capture screen
|
|
113
|
+
CGImageRef screenImage = nil;
|
|
114
|
+
if (CGRectIsEmpty(g_avCaptureRect)) {
|
|
115
|
+
screenImage = CGDisplayCreateImage(g_avDisplayID);
|
|
116
|
+
} else {
|
|
117
|
+
CGImageRef fullScreen = CGDisplayCreateImage(g_avDisplayID);
|
|
118
|
+
if (fullScreen) {
|
|
119
|
+
screenImage = CGImageCreateWithImageInRect(fullScreen, g_avCaptureRect);
|
|
120
|
+
CGImageRelease(fullScreen);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (!screenImage) return;
|
|
125
|
+
|
|
126
|
+
// Convert to pixel buffer
|
|
127
|
+
CVPixelBufferRef pixelBuffer = nil;
|
|
128
|
+
CVReturn cvRet = CVPixelBufferPoolCreatePixelBuffer(NULL, g_avPixelBufferAdaptor.pixelBufferPool, &pixelBuffer);
|
|
129
|
+
|
|
130
|
+
if (cvRet == kCVReturnSuccess && pixelBuffer) {
|
|
131
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
132
|
+
|
|
133
|
+
void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
134
|
+
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
135
|
+
|
|
136
|
+
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
137
|
+
CGContextRef context = CGBitmapContextCreate(pixelData,
|
|
138
|
+
CVPixelBufferGetWidth(pixelBuffer),
|
|
139
|
+
CVPixelBufferGetHeight(pixelBuffer),
|
|
140
|
+
8, bytesPerRow, colorSpace,
|
|
141
|
+
kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Little);
|
|
142
|
+
|
|
143
|
+
if (context) {
|
|
144
|
+
CGContextDrawImage(context, CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)), screenImage);
|
|
145
|
+
CGContextRelease(context);
|
|
146
|
+
}
|
|
147
|
+
CGColorSpaceRelease(colorSpace);
|
|
148
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
149
|
+
|
|
150
|
+
// Write frame
|
|
151
|
+
if (g_avVideoInput.readyForMoreMediaData) {
|
|
152
|
+
CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(g_avFrameNumber / 15.0, 600));
|
|
153
|
+
[g_avPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
154
|
+
g_avFrameNumber++;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
CGImageRelease(screenImage);
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
dispatch_resume(g_avTimer);
|
|
165
|
+
g_avIsRecording = true;
|
|
166
|
+
|
|
167
|
+
NSLog(@"๐ฅ AVFoundation recording started: %dx%d @ 15fps",
|
|
168
|
+
(int)recordingSize.width, (int)recordingSize.height);
|
|
169
|
+
|
|
170
|
+
return true;
|
|
171
|
+
|
|
172
|
+
} @catch (NSException *exception) {
|
|
173
|
+
NSLog(@"โ Exception in AVFoundation recording: %@", exception.reason);
|
|
174
|
+
return false;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
bool stopAVFoundationRecording() {
|
|
179
|
+
if (!g_avIsRecording) {
|
|
180
|
+
return true;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
g_avIsRecording = false;
|
|
184
|
+
|
|
185
|
+
@try {
|
|
186
|
+
// Stop timer
|
|
187
|
+
if (g_avTimer) {
|
|
188
|
+
dispatch_source_cancel(g_avTimer);
|
|
189
|
+
g_avTimer = nil;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Finish writing
|
|
193
|
+
if (g_avVideoInput) {
|
|
194
|
+
[g_avVideoInput markAsFinished];
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (g_avWriter && g_avWriter.status == AVAssetWriterStatusWriting) {
|
|
198
|
+
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
|
|
199
|
+
[g_avWriter finishWritingWithCompletionHandler:^{
|
|
200
|
+
dispatch_semaphore_signal(semaphore);
|
|
201
|
+
}];
|
|
202
|
+
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Cleanup
|
|
206
|
+
g_avWriter = nil;
|
|
207
|
+
g_avVideoInput = nil;
|
|
208
|
+
g_avPixelBufferAdaptor = nil;
|
|
209
|
+
g_avFrameNumber = 0;
|
|
210
|
+
|
|
211
|
+
NSLog(@"โ
AVFoundation recording stopped");
|
|
212
|
+
return true;
|
|
213
|
+
|
|
214
|
+
} @catch (NSException *exception) {
|
|
215
|
+
NSLog(@"โ Exception stopping AVFoundation recording: %@", exception.reason);
|
|
216
|
+
return false;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
bool isAVFoundationRecording() {
|
|
221
|
+
return g_avIsRecording;
|
|
222
|
+
}
|
package/src/mac_recorder.mm
CHANGED
|
@@ -8,6 +8,20 @@
|
|
|
8
8
|
// Import screen capture (ScreenCaptureKit only)
|
|
9
9
|
#import "screen_capture_kit.h"
|
|
10
10
|
|
|
11
|
+
// AVFoundation fallback declarations
|
|
12
|
+
extern "C" {
|
|
13
|
+
bool startAVFoundationRecording(const std::string& outputPath,
|
|
14
|
+
CGDirectDisplayID displayID,
|
|
15
|
+
uint32_t windowID,
|
|
16
|
+
CGRect captureRect,
|
|
17
|
+
bool captureCursor,
|
|
18
|
+
bool includeMicrophone,
|
|
19
|
+
bool includeSystemAudio,
|
|
20
|
+
NSString* audioDeviceId);
|
|
21
|
+
bool stopAVFoundationRecording();
|
|
22
|
+
bool isAVFoundationRecording();
|
|
23
|
+
}
|
|
24
|
+
|
|
11
25
|
// Cursor tracker function declarations
|
|
12
26
|
Napi::Object InitCursorTracker(Napi::Env env, Napi::Object exports);
|
|
13
27
|
|
|
@@ -30,12 +44,18 @@ static bool g_isRecording = false;
|
|
|
30
44
|
|
|
31
45
|
// Helper function to cleanup recording resources
|
|
32
46
|
void cleanupRecording() {
|
|
33
|
-
// ScreenCaptureKit cleanup
|
|
47
|
+
// ScreenCaptureKit cleanup
|
|
34
48
|
if (@available(macOS 12.3, *)) {
|
|
35
49
|
if ([ScreenCaptureKitRecorder isRecording]) {
|
|
36
50
|
[ScreenCaptureKitRecorder stopRecording];
|
|
37
51
|
}
|
|
38
52
|
}
|
|
53
|
+
|
|
54
|
+
// AVFoundation cleanup
|
|
55
|
+
if (isAVFoundationRecording()) {
|
|
56
|
+
stopAVFoundationRecording();
|
|
57
|
+
}
|
|
58
|
+
|
|
39
59
|
g_isRecording = false;
|
|
40
60
|
}
|
|
41
61
|
|
|
@@ -164,18 +184,24 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
164
184
|
NSLog(@"โ ๏ธ Warning: ScreenCaptureKit in Electron may require additional stability measures");
|
|
165
185
|
}
|
|
166
186
|
|
|
167
|
-
//
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
187
|
+
// Check macOS version for ScreenCaptureKit compatibility
|
|
188
|
+
NSOperatingSystemVersion osVersion = [[NSProcessInfo processInfo] operatingSystemVersion];
|
|
189
|
+
BOOL isM15Plus = (osVersion.majorVersion > 15 ||
|
|
190
|
+
(osVersion.majorVersion == 15 && osVersion.minorVersion >= 0));
|
|
191
|
+
BOOL isM14Plus = (osVersion.majorVersion > 14 ||
|
|
192
|
+
(osVersion.majorVersion == 14 && osVersion.minorVersion >= 0));
|
|
193
|
+
|
|
194
|
+
NSLog(@"๐ฅ๏ธ macOS Version: %ld.%ld.%ld",
|
|
195
|
+
(long)osVersion.majorVersion, (long)osVersion.minorVersion, (long)osVersion.patchVersion);
|
|
196
|
+
|
|
197
|
+
// Try ScreenCaptureKit on macOS 14+ (with better compatibility on 15+)
|
|
198
|
+
if (@available(macOS 12.3, *) && isM14Plus) {
|
|
199
|
+
NSLog(@"โ
macOS 14+ detected - ScreenCaptureKit available with %@ compatibility",
|
|
200
|
+
isM15Plus ? @"full" : @"limited");
|
|
175
201
|
|
|
176
|
-
// Try ScreenCaptureKit with extensive safety measures
|
|
202
|
+
// Try ScreenCaptureKit with extensive safety measures
|
|
177
203
|
@try {
|
|
178
|
-
if (
|
|
204
|
+
if ([ScreenCaptureKitRecorder isScreenCaptureKitAvailable]) {
|
|
179
205
|
NSLog(@"โ
ScreenCaptureKit availability check passed");
|
|
180
206
|
NSLog(@"๐ฏ Using ScreenCaptureKit - overlay windows will be automatically excluded");
|
|
181
207
|
|
|
@@ -234,25 +260,49 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
234
260
|
}
|
|
235
261
|
|
|
236
262
|
NSLog(@"โ ScreenCaptureKit failed or unsafe");
|
|
237
|
-
|
|
263
|
+
|
|
238
264
|
} else {
|
|
239
|
-
NSLog(@"โ ScreenCaptureKit
|
|
265
|
+
NSLog(@"โ ScreenCaptureKit availability check failed");
|
|
266
|
+
NSLog(@"โ ScreenCaptureKit not available");
|
|
240
267
|
}
|
|
241
268
|
} @catch (NSException *availabilityException) {
|
|
242
|
-
NSLog(@"
|
|
243
|
-
|
|
244
|
-
NSLog(@"๐ Expected in Electron - using AVFoundation fallback");
|
|
245
|
-
} else {
|
|
246
|
-
NSLog(@"๐ Falling back to AVFoundation");
|
|
247
|
-
}
|
|
269
|
+
NSLog(@"โ Exception during ScreenCaptureKit availability check: %@", availabilityException.reason);
|
|
270
|
+
return Napi::Boolean::New(env, false);
|
|
248
271
|
}
|
|
249
272
|
} else {
|
|
250
|
-
NSLog(@"
|
|
273
|
+
NSLog(@"โ macOS version too old for ScreenCaptureKit (< 12.3) - Recording not supported");
|
|
274
|
+
return Napi::Boolean::New(env, false);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// If we get here, ScreenCaptureKit failed - try AVFoundation fallback
|
|
278
|
+
NSLog(@"๐ ScreenCaptureKit failed - attempting AVFoundation fallback");
|
|
279
|
+
|
|
280
|
+
@try {
|
|
281
|
+
// Import AVFoundation recording functions (if available)
|
|
282
|
+
extern bool startAVFoundationRecording(const std::string& outputPath,
|
|
283
|
+
CGDirectDisplayID displayID,
|
|
284
|
+
uint32_t windowID,
|
|
285
|
+
CGRect captureRect,
|
|
286
|
+
bool captureCursor,
|
|
287
|
+
bool includeMicrophone,
|
|
288
|
+
bool includeSystemAudio,
|
|
289
|
+
NSString* audioDeviceId);
|
|
290
|
+
|
|
291
|
+
if (startAVFoundationRecording(outputPath, displayID, windowID, captureRect,
|
|
292
|
+
captureCursor, includeMicrophone, includeSystemAudio, audioDeviceId)) {
|
|
293
|
+
NSLog(@"๐ฅ RECORDING METHOD: AVFoundation (Fallback)");
|
|
294
|
+
NSLog(@"โ
AVFoundation recording started successfully");
|
|
295
|
+
g_isRecording = true;
|
|
296
|
+
return Napi::Boolean::New(env, true);
|
|
297
|
+
} else {
|
|
298
|
+
NSLog(@"โ AVFoundation recording also failed to start");
|
|
299
|
+
}
|
|
300
|
+
} @catch (NSException *avException) {
|
|
301
|
+
NSLog(@"โ Exception during AVFoundation startup: %@", avException.reason);
|
|
251
302
|
}
|
|
252
303
|
|
|
253
|
-
//
|
|
254
|
-
NSLog(@"
|
|
255
|
-
NSLog(@"โ AVFoundation recording not implemented in this version");
|
|
304
|
+
// Both ScreenCaptureKit and AVFoundation failed
|
|
305
|
+
NSLog(@"โ All recording methods failed - no recording available");
|
|
256
306
|
return Napi::Boolean::New(env, false);
|
|
257
307
|
|
|
258
308
|
} @catch (NSException *exception) {
|
|
@@ -267,22 +317,39 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
267
317
|
|
|
268
318
|
NSLog(@"๐ StopRecording native method called");
|
|
269
319
|
|
|
270
|
-
// ScreenCaptureKit
|
|
320
|
+
// Try ScreenCaptureKit first
|
|
271
321
|
if (@available(macOS 12.3, *)) {
|
|
272
322
|
if ([ScreenCaptureKitRecorder isRecording]) {
|
|
273
323
|
NSLog(@"๐ Stopping ScreenCaptureKit recording");
|
|
274
324
|
[ScreenCaptureKitRecorder stopRecording];
|
|
275
325
|
g_isRecording = false;
|
|
276
326
|
return Napi::Boolean::New(env, true);
|
|
277
|
-
} else {
|
|
278
|
-
NSLog(@"โ ๏ธ ScreenCaptureKit not recording");
|
|
279
|
-
g_isRecording = false;
|
|
280
|
-
return Napi::Boolean::New(env, true);
|
|
281
327
|
}
|
|
282
|
-
} else {
|
|
283
|
-
NSLog(@"โ ScreenCaptureKit not available - cannot stop recording");
|
|
284
|
-
return Napi::Boolean::New(env, false);
|
|
285
328
|
}
|
|
329
|
+
|
|
330
|
+
// Try AVFoundation fallback
|
|
331
|
+
extern bool isAVFoundationRecording();
|
|
332
|
+
extern bool stopAVFoundationRecording();
|
|
333
|
+
|
|
334
|
+
@try {
|
|
335
|
+
if (isAVFoundationRecording()) {
|
|
336
|
+
NSLog(@"๐ Stopping AVFoundation recording");
|
|
337
|
+
if (stopAVFoundationRecording()) {
|
|
338
|
+
g_isRecording = false;
|
|
339
|
+
return Napi::Boolean::New(env, true);
|
|
340
|
+
} else {
|
|
341
|
+
NSLog(@"โ Failed to stop AVFoundation recording");
|
|
342
|
+
g_isRecording = false;
|
|
343
|
+
return Napi::Boolean::New(env, false);
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
} @catch (NSException *exception) {
|
|
347
|
+
NSLog(@"โ Exception stopping AVFoundation: %@", exception.reason);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
NSLog(@"โ ๏ธ No active recording found to stop");
|
|
351
|
+
g_isRecording = false;
|
|
352
|
+
return Napi::Boolean::New(env, true);
|
|
286
353
|
}
|
|
287
354
|
|
|
288
355
|
|
|
@@ -538,7 +605,21 @@ Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
|
|
|
538
605
|
// NAPI Function: Get Recording Status
|
|
539
606
|
Napi::Value GetRecordingStatus(const Napi::CallbackInfo& info) {
|
|
540
607
|
Napi::Env env = info.Env();
|
|
541
|
-
|
|
608
|
+
|
|
609
|
+
// Check both recording methods
|
|
610
|
+
bool isRecording = g_isRecording;
|
|
611
|
+
|
|
612
|
+
if (@available(macOS 12.3, *)) {
|
|
613
|
+
if ([ScreenCaptureKitRecorder isRecording]) {
|
|
614
|
+
isRecording = true;
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
if (isAVFoundationRecording()) {
|
|
619
|
+
isRecording = true;
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
return Napi::Boolean::New(env, isRecording);
|
|
542
623
|
}
|
|
543
624
|
|
|
544
625
|
// NAPI Function: Get Window Thumbnail
|
|
@@ -121,25 +121,15 @@ static NSString *g_outputPath = nil;
|
|
|
121
121
|
|
|
122
122
|
if (displayId && [displayId integerValue] != 0) {
|
|
123
123
|
// Find specific display
|
|
124
|
-
NSLog(@"๐ Looking for display ID: %@", displayId);
|
|
125
|
-
NSLog(@"๐ Available displays:");
|
|
126
124
|
for (SCDisplay *display in content.displays) {
|
|
127
|
-
NSLog(@" - Display %u: %dx%d", display.displayID, (int)display.width, (int)display.height);
|
|
128
125
|
if (display.displayID == [displayId unsignedIntValue]) {
|
|
129
126
|
targetDisplay = display;
|
|
130
|
-
NSLog(@"โ
Found matching display %u", display.displayID);
|
|
131
127
|
break;
|
|
132
128
|
}
|
|
133
129
|
}
|
|
134
|
-
|
|
135
|
-
if (!targetDisplay) {
|
|
136
|
-
NSLog(@"โ Display ID %@ not found in ScreenCaptureKit displays", displayId);
|
|
137
|
-
NSLog(@"๐ง Available IDs: %@", [content.displays valueForKey:@"displayID"]);
|
|
138
|
-
}
|
|
139
130
|
} else {
|
|
140
131
|
// Use first display
|
|
141
132
|
targetDisplay = content.displays.firstObject;
|
|
142
|
-
NSLog(@"๐บ Using first available display: %u", targetDisplay.displayID);
|
|
143
133
|
}
|
|
144
134
|
|
|
145
135
|
if (!targetDisplay) {
|
package/test-display-id-debug.js
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
const MacRecorder = require('./index');
|
|
2
|
-
|
|
3
|
-
async function testDisplayIdDebug() {
|
|
4
|
-
console.log('๐ Testing display ID debugging...');
|
|
5
|
-
|
|
6
|
-
const recorder = new MacRecorder();
|
|
7
|
-
const displays = await recorder.getDisplays();
|
|
8
|
-
|
|
9
|
-
console.log('๐ JavaScript display IDs:');
|
|
10
|
-
displays.forEach(display => {
|
|
11
|
-
console.log(` ${display.name}: ID=${display.id} ${display.isPrimary ? '[PRIMARY]' : ''}`);
|
|
12
|
-
});
|
|
13
|
-
|
|
14
|
-
// Test recording on secondary display to trigger the debug logs
|
|
15
|
-
const secondaryDisplay = displays.find(d => !d.isPrimary);
|
|
16
|
-
if (secondaryDisplay) {
|
|
17
|
-
console.log(`\n๐ฏ Testing recording on ${secondaryDisplay.name} (ID: ${secondaryDisplay.id})`);
|
|
18
|
-
|
|
19
|
-
try {
|
|
20
|
-
await recorder.startRecording('./test-output/display-id-debug.mov', {
|
|
21
|
-
displayId: secondaryDisplay.id,
|
|
22
|
-
captureCursor: true,
|
|
23
|
-
includeMicrophone: false,
|
|
24
|
-
includeSystemAudio: false
|
|
25
|
-
});
|
|
26
|
-
|
|
27
|
-
console.log('โ
Recording started - check native logs for display ID matching');
|
|
28
|
-
|
|
29
|
-
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
30
|
-
|
|
31
|
-
await recorder.stopRecording();
|
|
32
|
-
console.log('โ
Recording completed');
|
|
33
|
-
|
|
34
|
-
} catch (error) {
|
|
35
|
-
console.log(`โ Recording failed: ${error.message}`);
|
|
36
|
-
}
|
|
37
|
-
} else {
|
|
38
|
-
console.log('โ ๏ธ No secondary display found');
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
testDisplayIdDebug();
|
package/test-electron-env.js
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
const MacRecorder = require('./index');
|
|
2
|
-
|
|
3
|
-
// Simulate Electron environment
|
|
4
|
-
process.env.ELECTRON_RUN_AS_NODE = '1';
|
|
5
|
-
|
|
6
|
-
async function testElectronFallback() {
|
|
7
|
-
console.log('๐ Testing Electron environment fallback...');
|
|
8
|
-
console.log('Environment variables:');
|
|
9
|
-
console.log(' ELECTRON_RUN_AS_NODE:', process.env.ELECTRON_RUN_AS_NODE);
|
|
10
|
-
|
|
11
|
-
const recorder = new MacRecorder();
|
|
12
|
-
|
|
13
|
-
try {
|
|
14
|
-
console.log('\n๐ฏ Attempting to start recording in simulated Electron environment...');
|
|
15
|
-
|
|
16
|
-
await recorder.startRecording('./test-output/electron-fallback-test.mov', {
|
|
17
|
-
displayId: 1,
|
|
18
|
-
captureCursor: true,
|
|
19
|
-
includeMicrophone: false,
|
|
20
|
-
includeSystemAudio: false
|
|
21
|
-
});
|
|
22
|
-
|
|
23
|
-
console.log('โ
Recording started (unexpected - should have failed)');
|
|
24
|
-
|
|
25
|
-
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
26
|
-
|
|
27
|
-
await recorder.stopRecording();
|
|
28
|
-
console.log('โ
Recording completed');
|
|
29
|
-
|
|
30
|
-
} catch (error) {
|
|
31
|
-
console.log(`โ Recording failed as expected in Electron: ${error.message}`);
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
testElectronFallback().finally(() => {
|
|
36
|
-
// Clean up
|
|
37
|
-
delete process.env.ELECTRON_RUN_AS_NODE;
|
|
38
|
-
});
|