node-mac-recorder 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +492 -0
- package/binding.gyp +39 -0
- package/index.js +372 -0
- package/package.json +45 -0
- package/src/audio_capture.mm +116 -0
- package/src/mac_recorder.mm +472 -0
- package/src/screen_capture.mm +141 -0
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
#import <napi.h>
|
|
2
|
+
#import <AVFoundation/AVFoundation.h>
|
|
3
|
+
#import <CoreMedia/CoreMedia.h>
|
|
4
|
+
#import <AppKit/AppKit.h>
|
|
5
|
+
#import <Foundation/Foundation.h>
|
|
6
|
+
#import <CoreGraphics/CoreGraphics.h>
|
|
7
|
+
#import <ImageIO/ImageIO.h>
|
|
8
|
+
#import <CoreAudio/CoreAudio.h>
|
|
9
|
+
|
|
10
|
+
@interface MacRecorderDelegate : NSObject <AVCaptureFileOutputRecordingDelegate>
|
|
11
|
+
@property (nonatomic, copy) void (^completionHandler)(NSURL *outputURL, NSError *error);
|
|
12
|
+
@end
|
|
13
|
+
|
|
14
|
+
@implementation MacRecorderDelegate
|
|
15
|
+
- (void)captureOutput:(AVCaptureFileOutput *)output
|
|
16
|
+
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|
17
|
+
fromConnections:(NSArray<AVCaptureConnection *> *)connections
|
|
18
|
+
error:(NSError *)error {
|
|
19
|
+
if (self.completionHandler) {
|
|
20
|
+
self.completionHandler(outputFileURL, error);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
@end
|
|
24
|
+
|
|
25
|
+
// Global state for recording
|
|
26
|
+
static AVCaptureSession *g_captureSession = nil;
|
|
27
|
+
static AVCaptureMovieFileOutput *g_movieFileOutput = nil;
|
|
28
|
+
static AVCaptureScreenInput *g_screenInput = nil;
|
|
29
|
+
static AVCaptureDeviceInput *g_audioInput = nil;
|
|
30
|
+
static MacRecorderDelegate *g_delegate = nil;
|
|
31
|
+
static bool g_isRecording = false;
|
|
32
|
+
|
|
33
|
+
// Helper function to cleanup recording resources
|
|
34
|
+
void cleanupRecording() {
|
|
35
|
+
if (g_captureSession) {
|
|
36
|
+
[g_captureSession stopRunning];
|
|
37
|
+
g_captureSession = nil;
|
|
38
|
+
}
|
|
39
|
+
g_movieFileOutput = nil;
|
|
40
|
+
g_screenInput = nil;
|
|
41
|
+
g_audioInput = nil;
|
|
42
|
+
g_delegate = nil;
|
|
43
|
+
g_isRecording = false;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// NAPI Function: Start Recording
|
|
47
|
+
Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
48
|
+
Napi::Env env = info.Env();
|
|
49
|
+
|
|
50
|
+
if (info.Length() < 1) {
|
|
51
|
+
Napi::TypeError::New(env, "Output path required").ThrowAsJavaScriptException();
|
|
52
|
+
return env.Null();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (g_isRecording) {
|
|
56
|
+
return Napi::Boolean::New(env, false);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
std::string outputPath = info[0].As<Napi::String>().Utf8Value();
|
|
60
|
+
|
|
61
|
+
// Options parsing
|
|
62
|
+
CGRect captureRect = CGRectNull;
|
|
63
|
+
bool captureCursor = false; // Default olarak cursor gizli
|
|
64
|
+
bool includeMicrophone = false; // Default olarak mikrofon kapalı
|
|
65
|
+
bool includeSystemAudio = true; // Default olarak sistem sesi açık
|
|
66
|
+
CGDirectDisplayID displayID = CGMainDisplayID(); // Default ana ekran
|
|
67
|
+
|
|
68
|
+
if (info.Length() > 1 && info[1].IsObject()) {
|
|
69
|
+
Napi::Object options = info[1].As<Napi::Object>();
|
|
70
|
+
|
|
71
|
+
// Capture area
|
|
72
|
+
if (options.Has("captureArea") && options.Get("captureArea").IsObject()) {
|
|
73
|
+
Napi::Object rectObj = options.Get("captureArea").As<Napi::Object>();
|
|
74
|
+
if (rectObj.Has("x") && rectObj.Has("y") && rectObj.Has("width") && rectObj.Has("height")) {
|
|
75
|
+
captureRect = CGRectMake(
|
|
76
|
+
rectObj.Get("x").As<Napi::Number>().DoubleValue(),
|
|
77
|
+
rectObj.Get("y").As<Napi::Number>().DoubleValue(),
|
|
78
|
+
rectObj.Get("width").As<Napi::Number>().DoubleValue(),
|
|
79
|
+
rectObj.Get("height").As<Napi::Number>().DoubleValue()
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Capture cursor
|
|
85
|
+
if (options.Has("captureCursor")) {
|
|
86
|
+
captureCursor = options.Get("captureCursor").As<Napi::Boolean>();
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Microphone
|
|
90
|
+
if (options.Has("includeMicrophone")) {
|
|
91
|
+
includeMicrophone = options.Get("includeMicrophone").As<Napi::Boolean>();
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// System audio
|
|
95
|
+
if (options.Has("includeSystemAudio")) {
|
|
96
|
+
includeSystemAudio = options.Get("includeSystemAudio").As<Napi::Boolean>();
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Display ID
|
|
100
|
+
if (options.Has("displayId") && !options.Get("displayId").IsNull()) {
|
|
101
|
+
double displayIdNum = options.Get("displayId").As<Napi::Number>().DoubleValue();
|
|
102
|
+
|
|
103
|
+
// Get all displays and use the specified one
|
|
104
|
+
uint32_t displayCount;
|
|
105
|
+
CGGetActiveDisplayList(0, NULL, &displayCount);
|
|
106
|
+
if (displayCount > 0) {
|
|
107
|
+
CGDirectDisplayID *displays = (CGDirectDisplayID*)malloc(displayCount * sizeof(CGDirectDisplayID));
|
|
108
|
+
CGGetActiveDisplayList(displayCount, displays, &displayCount);
|
|
109
|
+
|
|
110
|
+
if (displayIdNum >= 0 && displayIdNum < displayCount) {
|
|
111
|
+
displayID = displays[(int)displayIdNum];
|
|
112
|
+
}
|
|
113
|
+
free(displays);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Window ID için gelecekte kullanım (şimdilik captureArea ile hallediliyor)
|
|
118
|
+
if (options.Has("windowId") && !options.Get("windowId").IsNull()) {
|
|
119
|
+
// WindowId belirtilmiş ama captureArea JavaScript tarafında ayarlanıyor
|
|
120
|
+
// Bu parametre gelecekte native level pencere seçimi için kullanılabilir
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
@try {
|
|
125
|
+
// Create capture session
|
|
126
|
+
g_captureSession = [[AVCaptureSession alloc] init];
|
|
127
|
+
[g_captureSession beginConfiguration];
|
|
128
|
+
|
|
129
|
+
// Set session preset
|
|
130
|
+
g_captureSession.sessionPreset = AVCaptureSessionPresetHigh;
|
|
131
|
+
|
|
132
|
+
// Create screen input with selected display
|
|
133
|
+
g_screenInput = [[AVCaptureScreenInput alloc] initWithDisplayID:displayID];
|
|
134
|
+
|
|
135
|
+
if (!CGRectIsNull(captureRect)) {
|
|
136
|
+
g_screenInput.cropRect = captureRect;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Set cursor capture
|
|
140
|
+
g_screenInput.capturesCursor = captureCursor;
|
|
141
|
+
|
|
142
|
+
if ([g_captureSession canAddInput:g_screenInput]) {
|
|
143
|
+
[g_captureSession addInput:g_screenInput];
|
|
144
|
+
} else {
|
|
145
|
+
cleanupRecording();
|
|
146
|
+
return Napi::Boolean::New(env, false);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Add microphone input if requested
|
|
150
|
+
if (includeMicrophone) {
|
|
151
|
+
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
|
152
|
+
if (audioDevice) {
|
|
153
|
+
NSError *error;
|
|
154
|
+
g_audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
|
|
155
|
+
if (g_audioInput && [g_captureSession canAddInput:g_audioInput]) {
|
|
156
|
+
[g_captureSession addInput:g_audioInput];
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// System audio için AVCaptureScreenInput zaten sistem sesini yakalar
|
|
162
|
+
// includeSystemAudio parametresi screen input'un ses yakalama özelliğini kontrol eder
|
|
163
|
+
if (includeSystemAudio) {
|
|
164
|
+
g_screenInput.capturesMouseClicks = YES;
|
|
165
|
+
// AVCaptureScreenInput otomatik olarak sistem sesini yakalar
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Create movie file output
|
|
169
|
+
g_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
|
|
170
|
+
if ([g_captureSession canAddOutput:g_movieFileOutput]) {
|
|
171
|
+
[g_captureSession addOutput:g_movieFileOutput];
|
|
172
|
+
} else {
|
|
173
|
+
cleanupRecording();
|
|
174
|
+
return Napi::Boolean::New(env, false);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
[g_captureSession commitConfiguration];
|
|
178
|
+
|
|
179
|
+
// Start session
|
|
180
|
+
[g_captureSession startRunning];
|
|
181
|
+
|
|
182
|
+
// Create delegate
|
|
183
|
+
g_delegate = [[MacRecorderDelegate alloc] init];
|
|
184
|
+
|
|
185
|
+
// Start recording
|
|
186
|
+
NSURL *outputURL = [NSURL fileURLWithPath:[NSString stringWithUTF8String:outputPath.c_str()]];
|
|
187
|
+
[g_movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:g_delegate];
|
|
188
|
+
|
|
189
|
+
g_isRecording = true;
|
|
190
|
+
return Napi::Boolean::New(env, true);
|
|
191
|
+
|
|
192
|
+
} @catch (NSException *exception) {
|
|
193
|
+
cleanupRecording();
|
|
194
|
+
return Napi::Boolean::New(env, false);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// NAPI Function: Stop Recording
|
|
199
|
+
Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
200
|
+
Napi::Env env = info.Env();
|
|
201
|
+
|
|
202
|
+
if (!g_isRecording || !g_movieFileOutput) {
|
|
203
|
+
return Napi::Boolean::New(env, false);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
@try {
|
|
207
|
+
[g_movieFileOutput stopRecording];
|
|
208
|
+
[g_captureSession stopRunning];
|
|
209
|
+
|
|
210
|
+
g_isRecording = false;
|
|
211
|
+
return Napi::Boolean::New(env, true);
|
|
212
|
+
|
|
213
|
+
} @catch (NSException *exception) {
|
|
214
|
+
cleanupRecording();
|
|
215
|
+
return Napi::Boolean::New(env, false);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
// NAPI Function: Get Windows List
|
|
222
|
+
Napi::Value GetWindows(const Napi::CallbackInfo& info) {
|
|
223
|
+
Napi::Env env = info.Env();
|
|
224
|
+
Napi::Array windowArray = Napi::Array::New(env);
|
|
225
|
+
|
|
226
|
+
@try {
|
|
227
|
+
// Get window list
|
|
228
|
+
CFArrayRef windowList = CGWindowListCopyWindowInfo(
|
|
229
|
+
kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
|
|
230
|
+
kCGNullWindowID
|
|
231
|
+
);
|
|
232
|
+
|
|
233
|
+
if (!windowList) {
|
|
234
|
+
return windowArray;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
CFIndex windowCount = CFArrayGetCount(windowList);
|
|
238
|
+
uint32_t arrayIndex = 0;
|
|
239
|
+
|
|
240
|
+
for (CFIndex i = 0; i < windowCount; i++) {
|
|
241
|
+
CFDictionaryRef window = (CFDictionaryRef)CFArrayGetValueAtIndex(windowList, i);
|
|
242
|
+
|
|
243
|
+
// Get window ID
|
|
244
|
+
CFNumberRef windowIDRef = (CFNumberRef)CFDictionaryGetValue(window, kCGWindowNumber);
|
|
245
|
+
if (!windowIDRef) continue;
|
|
246
|
+
|
|
247
|
+
uint32_t windowID;
|
|
248
|
+
CFNumberGetValue(windowIDRef, kCFNumberSInt32Type, &windowID);
|
|
249
|
+
|
|
250
|
+
// Get window name
|
|
251
|
+
CFStringRef windowNameRef = (CFStringRef)CFDictionaryGetValue(window, kCGWindowName);
|
|
252
|
+
std::string windowName = "";
|
|
253
|
+
if (windowNameRef) {
|
|
254
|
+
const char* windowNameCStr = CFStringGetCStringPtr(windowNameRef, kCFStringEncodingUTF8);
|
|
255
|
+
if (windowNameCStr) {
|
|
256
|
+
windowName = std::string(windowNameCStr);
|
|
257
|
+
} else {
|
|
258
|
+
// Fallback for non-ASCII characters
|
|
259
|
+
CFIndex length = CFStringGetLength(windowNameRef);
|
|
260
|
+
CFIndex maxSize = CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8) + 1;
|
|
261
|
+
char* buffer = (char*)malloc(maxSize);
|
|
262
|
+
if (CFStringGetCString(windowNameRef, buffer, maxSize, kCFStringEncodingUTF8)) {
|
|
263
|
+
windowName = std::string(buffer);
|
|
264
|
+
}
|
|
265
|
+
free(buffer);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Get application name
|
|
270
|
+
CFStringRef appNameRef = (CFStringRef)CFDictionaryGetValue(window, kCGWindowOwnerName);
|
|
271
|
+
std::string appName = "";
|
|
272
|
+
if (appNameRef) {
|
|
273
|
+
const char* appNameCStr = CFStringGetCStringPtr(appNameRef, kCFStringEncodingUTF8);
|
|
274
|
+
if (appNameCStr) {
|
|
275
|
+
appName = std::string(appNameCStr);
|
|
276
|
+
} else {
|
|
277
|
+
CFIndex length = CFStringGetLength(appNameRef);
|
|
278
|
+
CFIndex maxSize = CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8) + 1;
|
|
279
|
+
char* buffer = (char*)malloc(maxSize);
|
|
280
|
+
if (CFStringGetCString(appNameRef, buffer, maxSize, kCFStringEncodingUTF8)) {
|
|
281
|
+
appName = std::string(buffer);
|
|
282
|
+
}
|
|
283
|
+
free(buffer);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// Get window bounds
|
|
288
|
+
CFDictionaryRef boundsRef = (CFDictionaryRef)CFDictionaryGetValue(window, kCGWindowBounds);
|
|
289
|
+
CGRect bounds = CGRectZero;
|
|
290
|
+
if (boundsRef) {
|
|
291
|
+
CGRectMakeWithDictionaryRepresentation(boundsRef, &bounds);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// Skip windows without name or very small windows
|
|
295
|
+
if (windowName.empty() || bounds.size.width < 50 || bounds.size.height < 50) {
|
|
296
|
+
continue;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// Create window object
|
|
300
|
+
Napi::Object windowObj = Napi::Object::New(env);
|
|
301
|
+
windowObj.Set("id", Napi::Number::New(env, windowID));
|
|
302
|
+
windowObj.Set("name", Napi::String::New(env, windowName));
|
|
303
|
+
windowObj.Set("appName", Napi::String::New(env, appName));
|
|
304
|
+
windowObj.Set("x", Napi::Number::New(env, bounds.origin.x));
|
|
305
|
+
windowObj.Set("y", Napi::Number::New(env, bounds.origin.y));
|
|
306
|
+
windowObj.Set("width", Napi::Number::New(env, bounds.size.width));
|
|
307
|
+
windowObj.Set("height", Napi::Number::New(env, bounds.size.height));
|
|
308
|
+
|
|
309
|
+
windowArray.Set(arrayIndex++, windowObj);
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
CFRelease(windowList);
|
|
313
|
+
return windowArray;
|
|
314
|
+
|
|
315
|
+
} @catch (NSException *exception) {
|
|
316
|
+
return windowArray;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// NAPI Function: Get Audio Devices
|
|
321
|
+
Napi::Value GetAudioDevices(const Napi::CallbackInfo& info) {
|
|
322
|
+
Napi::Env env = info.Env();
|
|
323
|
+
|
|
324
|
+
@try {
|
|
325
|
+
NSMutableArray *devices = [NSMutableArray array];
|
|
326
|
+
|
|
327
|
+
// Get all audio devices
|
|
328
|
+
NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
|
|
329
|
+
|
|
330
|
+
for (AVCaptureDevice *device in audioDevices) {
|
|
331
|
+
[devices addObject:@{
|
|
332
|
+
@"id": device.uniqueID,
|
|
333
|
+
@"name": device.localizedName,
|
|
334
|
+
@"manufacturer": device.manufacturer ?: @"Unknown",
|
|
335
|
+
@"isDefault": @([device isEqual:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]])
|
|
336
|
+
}];
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
// Convert to NAPI array
|
|
340
|
+
Napi::Array result = Napi::Array::New(env, devices.count);
|
|
341
|
+
for (NSUInteger i = 0; i < devices.count; i++) {
|
|
342
|
+
NSDictionary *device = devices[i];
|
|
343
|
+
Napi::Object deviceObj = Napi::Object::New(env);
|
|
344
|
+
deviceObj.Set("id", Napi::String::New(env, [device[@"id"] UTF8String]));
|
|
345
|
+
deviceObj.Set("name", Napi::String::New(env, [device[@"name"] UTF8String]));
|
|
346
|
+
deviceObj.Set("manufacturer", Napi::String::New(env, [device[@"manufacturer"] UTF8String]));
|
|
347
|
+
deviceObj.Set("isDefault", Napi::Boolean::New(env, [device[@"isDefault"] boolValue]));
|
|
348
|
+
result[i] = deviceObj;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
return result;
|
|
352
|
+
|
|
353
|
+
} @catch (NSException *exception) {
|
|
354
|
+
return Napi::Array::New(env, 0);
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// NAPI Function: Get Displays
|
|
359
|
+
Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
|
|
360
|
+
Napi::Env env = info.Env();
|
|
361
|
+
|
|
362
|
+
@try {
|
|
363
|
+
NSMutableArray *displays = [NSMutableArray array];
|
|
364
|
+
|
|
365
|
+
uint32_t displayCount;
|
|
366
|
+
CGGetActiveDisplayList(0, NULL, &displayCount);
|
|
367
|
+
|
|
368
|
+
CGDirectDisplayID *displayList = (CGDirectDisplayID *)malloc(displayCount * sizeof(CGDirectDisplayID));
|
|
369
|
+
CGGetActiveDisplayList(displayCount, displayList, &displayCount);
|
|
370
|
+
|
|
371
|
+
for (uint32_t i = 0; i < displayCount; i++) {
|
|
372
|
+
CGDirectDisplayID displayID = displayList[i];
|
|
373
|
+
CGRect bounds = CGDisplayBounds(displayID);
|
|
374
|
+
|
|
375
|
+
[displays addObject:@{
|
|
376
|
+
@"id": @(displayID),
|
|
377
|
+
@"name": [NSString stringWithFormat:@"Display %d", i + 1],
|
|
378
|
+
@"width": @(bounds.size.width),
|
|
379
|
+
@"height": @(bounds.size.height),
|
|
380
|
+
@"x": @(bounds.origin.x),
|
|
381
|
+
@"y": @(bounds.origin.y),
|
|
382
|
+
@"isPrimary": @(CGDisplayIsMain(displayID))
|
|
383
|
+
}];
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
free(displayList);
|
|
387
|
+
|
|
388
|
+
// Convert to NAPI array
|
|
389
|
+
Napi::Array result = Napi::Array::New(env, displays.count);
|
|
390
|
+
for (NSUInteger i = 0; i < displays.count; i++) {
|
|
391
|
+
NSDictionary *display = displays[i];
|
|
392
|
+
Napi::Object displayObj = Napi::Object::New(env);
|
|
393
|
+
displayObj.Set("id", Napi::Number::New(env, [display[@"id"] unsignedIntValue]));
|
|
394
|
+
displayObj.Set("name", Napi::String::New(env, [display[@"name"] UTF8String]));
|
|
395
|
+
displayObj.Set("width", Napi::Number::New(env, [display[@"width"] doubleValue]));
|
|
396
|
+
displayObj.Set("height", Napi::Number::New(env, [display[@"height"] doubleValue]));
|
|
397
|
+
displayObj.Set("x", Napi::Number::New(env, [display[@"x"] doubleValue]));
|
|
398
|
+
displayObj.Set("y", Napi::Number::New(env, [display[@"y"] doubleValue]));
|
|
399
|
+
displayObj.Set("isPrimary", Napi::Boolean::New(env, [display[@"isPrimary"] boolValue]));
|
|
400
|
+
result[i] = displayObj;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
return result;
|
|
404
|
+
|
|
405
|
+
} @catch (NSException *exception) {
|
|
406
|
+
return Napi::Array::New(env, 0);
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
// NAPI Function: Get Recording Status
|
|
411
|
+
Napi::Value GetRecordingStatus(const Napi::CallbackInfo& info) {
|
|
412
|
+
Napi::Env env = info.Env();
|
|
413
|
+
return Napi::Boolean::New(env, g_isRecording);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// NAPI Function: Check Permissions
|
|
417
|
+
Napi::Value CheckPermissions(const Napi::CallbackInfo& info) {
|
|
418
|
+
Napi::Env env = info.Env();
|
|
419
|
+
|
|
420
|
+
@try {
|
|
421
|
+
// Check screen recording permission
|
|
422
|
+
bool hasScreenPermission = true;
|
|
423
|
+
|
|
424
|
+
if (@available(macOS 10.15, *)) {
|
|
425
|
+
// Try to create a display stream to test permissions
|
|
426
|
+
CGDisplayStreamRef stream = CGDisplayStreamCreate(
|
|
427
|
+
CGMainDisplayID(),
|
|
428
|
+
1, 1,
|
|
429
|
+
kCVPixelFormatType_32BGRA,
|
|
430
|
+
nil,
|
|
431
|
+
^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
|
|
432
|
+
// Empty handler
|
|
433
|
+
}
|
|
434
|
+
);
|
|
435
|
+
|
|
436
|
+
if (stream) {
|
|
437
|
+
CFRelease(stream);
|
|
438
|
+
hasScreenPermission = true;
|
|
439
|
+
} else {
|
|
440
|
+
hasScreenPermission = false;
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// Check audio permission
|
|
445
|
+
bool hasAudioPermission = true;
|
|
446
|
+
if (@available(macOS 10.14, *)) {
|
|
447
|
+
AVAuthorizationStatus audioStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
|
|
448
|
+
hasAudioPermission = (audioStatus == AVAuthorizationStatusAuthorized);
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
return Napi::Boolean::New(env, hasScreenPermission && hasAudioPermission);
|
|
452
|
+
|
|
453
|
+
} @catch (NSException *exception) {
|
|
454
|
+
return Napi::Boolean::New(env, false);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Initialize NAPI Module
|
|
459
|
+
Napi::Object Init(Napi::Env env, Napi::Object exports) {
|
|
460
|
+
exports.Set(Napi::String::New(env, "startRecording"), Napi::Function::New(env, StartRecording));
|
|
461
|
+
exports.Set(Napi::String::New(env, "stopRecording"), Napi::Function::New(env, StopRecording));
|
|
462
|
+
|
|
463
|
+
exports.Set(Napi::String::New(env, "getAudioDevices"), Napi::Function::New(env, GetAudioDevices));
|
|
464
|
+
exports.Set(Napi::String::New(env, "getDisplays"), Napi::Function::New(env, GetDisplays));
|
|
465
|
+
exports.Set(Napi::String::New(env, "getWindows"), Napi::Function::New(env, GetWindows));
|
|
466
|
+
exports.Set(Napi::String::New(env, "getRecordingStatus"), Napi::Function::New(env, GetRecordingStatus));
|
|
467
|
+
exports.Set(Napi::String::New(env, "checkPermissions"), Napi::Function::New(env, CheckPermissions));
|
|
468
|
+
|
|
469
|
+
return exports;
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
NODE_API_MODULE(mac_recorder, Init)
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
#import <AVFoundation/AVFoundation.h>
|
|
2
|
+
#import <CoreGraphics/CoreGraphics.h>
|
|
3
|
+
#import <AppKit/AppKit.h>
|
|
4
|
+
|
|
5
|
+
@interface ScreenCapture : NSObject
|
|
6
|
+
|
|
7
|
+
+ (NSArray *)getAvailableDisplays;
|
|
8
|
+
+ (BOOL)captureDisplay:(CGDirectDisplayID)displayID
|
|
9
|
+
toFile:(NSString *)filePath
|
|
10
|
+
rect:(CGRect)rect
|
|
11
|
+
includeCursor:(BOOL)includeCursor;
|
|
12
|
+
+ (CGImageRef)createScreenshotFromDisplay:(CGDirectDisplayID)displayID
|
|
13
|
+
rect:(CGRect)rect;
|
|
14
|
+
|
|
15
|
+
@end
|
|
16
|
+
|
|
17
|
+
@implementation ScreenCapture
|
|
18
|
+
|
|
19
|
+
+ (NSArray *)getAvailableDisplays {
|
|
20
|
+
NSMutableArray *displays = [NSMutableArray array];
|
|
21
|
+
|
|
22
|
+
uint32_t displayCount;
|
|
23
|
+
CGGetActiveDisplayList(0, NULL, &displayCount);
|
|
24
|
+
|
|
25
|
+
CGDirectDisplayID *displayList = (CGDirectDisplayID *)malloc(displayCount * sizeof(CGDirectDisplayID));
|
|
26
|
+
CGGetActiveDisplayList(displayCount, displayList, &displayCount);
|
|
27
|
+
|
|
28
|
+
for (uint32_t i = 0; i < displayCount; i++) {
|
|
29
|
+
CGDirectDisplayID displayID = displayList[i];
|
|
30
|
+
|
|
31
|
+
// Get display bounds
|
|
32
|
+
CGRect bounds = CGDisplayBounds(displayID);
|
|
33
|
+
|
|
34
|
+
// Create display info dictionary
|
|
35
|
+
NSDictionary *displayInfo = @{
|
|
36
|
+
@"id": @(displayID),
|
|
37
|
+
@"name": [NSString stringWithFormat:@"Display %d", i + 1],
|
|
38
|
+
@"width": @(bounds.size.width),
|
|
39
|
+
@"height": @(bounds.size.height),
|
|
40
|
+
@"x": @(bounds.origin.x),
|
|
41
|
+
@"y": @(bounds.origin.y),
|
|
42
|
+
@"isPrimary": @(CGDisplayIsMain(displayID))
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
[displays addObject:displayInfo];
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
free(displayList);
|
|
49
|
+
return [displays copy];
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
+ (BOOL)captureDisplay:(CGDirectDisplayID)displayID
|
|
53
|
+
toFile:(NSString *)filePath
|
|
54
|
+
rect:(CGRect)rect
|
|
55
|
+
includeCursor:(BOOL)includeCursor {
|
|
56
|
+
|
|
57
|
+
CGImageRef screenshot = [self createScreenshotFromDisplay:displayID rect:rect];
|
|
58
|
+
if (!screenshot) {
|
|
59
|
+
return NO;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Create image destination
|
|
63
|
+
NSURL *fileURL = [NSURL fileURLWithPath:filePath];
|
|
64
|
+
CGImageDestinationRef destination = CGImageDestinationCreateWithURL(
|
|
65
|
+
(__bridge CFURLRef)fileURL,
|
|
66
|
+
kUTTypePNG,
|
|
67
|
+
1,
|
|
68
|
+
NULL
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
if (!destination) {
|
|
72
|
+
CGImageRelease(screenshot);
|
|
73
|
+
return NO;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Add cursor if requested
|
|
77
|
+
if (includeCursor) {
|
|
78
|
+
// Get cursor position
|
|
79
|
+
CGPoint cursorPos = CGEventGetLocation(CGEventCreate(NULL));
|
|
80
|
+
|
|
81
|
+
// Create mutable image context
|
|
82
|
+
size_t width = CGImageGetWidth(screenshot);
|
|
83
|
+
size_t height = CGImageGetHeight(screenshot);
|
|
84
|
+
|
|
85
|
+
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
86
|
+
CGContextRef context = CGBitmapContextCreate(
|
|
87
|
+
NULL, width, height, 8, width * 4,
|
|
88
|
+
colorSpace, kCGImageAlphaPremultipliedFirst
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
if (context) {
|
|
92
|
+
// Draw original screenshot
|
|
93
|
+
CGContextDrawImage(context, CGRectMake(0, 0, width, height), screenshot);
|
|
94
|
+
|
|
95
|
+
// Draw cursor (simplified - just a small circle)
|
|
96
|
+
CGRect displayBounds = CGDisplayBounds(displayID);
|
|
97
|
+
CGFloat relativeX = cursorPos.x - displayBounds.origin.x;
|
|
98
|
+
CGFloat relativeY = height - (cursorPos.y - displayBounds.origin.y);
|
|
99
|
+
|
|
100
|
+
if (!CGRectIsNull(rect)) {
|
|
101
|
+
relativeX -= rect.origin.x;
|
|
102
|
+
relativeY -= rect.origin.y;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (relativeX >= 0 && relativeX < width && relativeY >= 0 && relativeY < height) {
|
|
106
|
+
CGContextSetRGBFillColor(context, 1.0, 0.0, 0.0, 0.8); // Red cursor
|
|
107
|
+
CGContextFillEllipseInRect(context, CGRectMake(relativeX - 5, relativeY - 5, 10, 10));
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
CGImageRef finalImage = CGBitmapContextCreateImage(context);
|
|
111
|
+
CGContextRelease(context);
|
|
112
|
+
CGImageRelease(screenshot);
|
|
113
|
+
screenshot = finalImage;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
CGColorSpaceRelease(colorSpace);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Save image
|
|
120
|
+
CGImageDestinationAddImage(destination, screenshot, NULL);
|
|
121
|
+
BOOL success = CGImageDestinationFinalize(destination);
|
|
122
|
+
|
|
123
|
+
CFRelease(destination);
|
|
124
|
+
CGImageRelease(screenshot);
|
|
125
|
+
|
|
126
|
+
return success;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
+ (CGImageRef)createScreenshotFromDisplay:(CGDirectDisplayID)displayID
|
|
130
|
+
rect:(CGRect)rect {
|
|
131
|
+
|
|
132
|
+
if (CGRectIsNull(rect)) {
|
|
133
|
+
// Capture entire display
|
|
134
|
+
return CGDisplayCreateImage(displayID);
|
|
135
|
+
} else {
|
|
136
|
+
// Capture specific rect
|
|
137
|
+
return CGDisplayCreateImageForRect(displayID, rect);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
@end
|