node-mac-recorder 2.4.11 → 2.4.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,4 @@
1
1
  #import <napi.h>
2
- #import <ScreenCaptureKit/ScreenCaptureKit.h>
3
2
  #import <AVFoundation/AVFoundation.h>
4
3
  #import <CoreMedia/CoreMedia.h>
5
4
  #import <AppKit/AppKit.h>
@@ -10,7 +9,6 @@
10
9
 
11
10
  // Import screen capture
12
11
  #import "screen_capture.h"
13
- #import "screen_capture_kit.h"
14
12
 
15
13
  // Cursor tracker function declarations
16
14
  Napi::Object InitCursorTracker(Napi::Env env, Napi::Object exports);
@@ -18,884 +16,806 @@ Napi::Object InitCursorTracker(Napi::Env env, Napi::Object exports);
18
16
  // Window selector function declarations
19
17
  Napi::Object InitWindowSelector(Napi::Env env, Napi::Object exports);
20
18
 
21
- // ScreenCaptureKit Recording Delegate
22
- API_AVAILABLE(macos(12.3))
23
- @interface SCKRecorderDelegate : NSObject <SCStreamDelegate, SCStreamOutput>
19
+ @interface MacRecorderDelegate : NSObject <AVCaptureFileOutputRecordingDelegate>
24
20
  @property (nonatomic, copy) void (^completionHandler)(NSURL *outputURL, NSError *error);
25
- @property (nonatomic, copy) void (^startedHandler)(void);
26
- @property (nonatomic, strong) AVAssetWriter *assetWriter;
27
- @property (nonatomic, strong) AVAssetWriterInput *videoInput;
28
- @property (nonatomic, strong) AVAssetWriterInput *audioInput;
29
- @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor;
30
- @property (nonatomic, strong) NSURL *outputURL;
31
- @property (nonatomic, assign) BOOL isWriting;
32
- @property (nonatomic, assign) CMTime startTime;
33
- @property (nonatomic, assign) BOOL hasStartTime;
34
- @property (nonatomic, assign) BOOL startAttempted;
35
- @property (nonatomic, assign) BOOL startFailed;
36
21
  @end
37
22
 
38
- @implementation SCKRecorderDelegate
39
-
40
- // Standard SCStreamDelegate method - should be called automatically
41
- - (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
42
- NSLog(@"📹 SCStreamDelegate received sample buffer of type: %ld", (long)type);
43
- [self handleSampleBuffer:sampleBuffer ofType:type fromStream:stream];
44
- }
45
-
46
- - (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
47
- NSLog(@"🛑 Stream stopped with error: %@", error ? error.localizedDescription : @"none");
23
+ @implementation MacRecorderDelegate
24
+ - (void)captureOutput:(AVCaptureFileOutput *)output
25
+ didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
26
+ fromConnections:(NSArray<AVCaptureConnection *> *)connections
27
+ error:(NSError *)error {
48
28
  if (self.completionHandler) {
49
- self.completionHandler(self.outputURL, error);
29
+ self.completionHandler(outputFileURL, error);
50
30
  }
51
31
  }
52
-
53
-
54
- // Main sample buffer handler (renamed to avoid conflicts)
55
- - (void)handleSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type fromStream:(SCStream *)stream {
56
- NSLog(@"📹 Handling sample buffer of type: %ld", (long)type);
57
-
58
- if (!self.isWriting || !self.assetWriter) {
59
- NSLog(@"⚠️ Not writing or no asset writer available");
60
- return;
61
- }
62
- if (self.startFailed) {
63
- NSLog(@"⚠️ Asset writer start previously failed; ignoring buffers");
64
- return;
65
- }
66
-
67
- // Check asset writer status before processing
68
- if (self.assetWriter.status == AVAssetWriterStatusFailed) {
69
- NSLog(@"❌ Asset writer has failed status: %@", self.assetWriter.error.localizedDescription);
70
- self.startFailed = YES;
71
- return;
72
- }
73
-
74
- // Start asset writer on first sample buffer
75
- if (!self.hasStartTime) {
76
- NSLog(@"🚀 Starting asset writer with first sample buffer");
77
- if (self.startAttempted) {
78
- // Another thread already attempted start; wait for success/fail flag to flip
79
- return;
80
- }
81
- self.startAttempted = YES;
82
- if (![self.assetWriter startWriting]) {
83
- NSLog(@"❌ Failed to start asset writer: %@", self.assetWriter.error.localizedDescription);
84
- self.startFailed = YES;
85
- return;
86
- }
87
-
88
- NSLog(@"✅ Asset writer started successfully");
89
- self.startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
90
- self.hasStartTime = YES;
91
- [self.assetWriter startSessionAtSourceTime:self.startTime];
92
- NSLog(@"✅ Asset writer session started at time: %lld", self.startTime.value);
93
- }
94
-
95
- switch (type) {
96
- case SCStreamOutputTypeScreen: {
97
- NSLog(@"📺 Processing screen sample buffer");
98
- if (self.videoInput && self.videoInput.isReadyForMoreMediaData) {
99
- // Check sample buffer validity
100
- if (!CMSampleBufferIsValid(sampleBuffer)) {
101
- NSLog(@"⚠️ Invalid sample buffer received");
102
- return;
103
- }
104
-
105
- // Check timing - ensure presentation time is advancing
106
- CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
107
- NSLog(@"📺 Sample buffer PTS: %lld", currentTime.value);
108
-
109
- BOOL success = NO;
110
-
111
- // Try using pixel buffer adaptor for better compatibility
112
- if (self.pixelBufferAdaptor && self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData) {
113
- // Extract pixel buffer from sample buffer
114
- CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
115
- if (pixelBuffer) {
116
- success = [self.pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:currentTime];
117
- NSLog(@"📺 Pixel buffer appended via adaptor: %@", success ? @"SUCCESS" : @"FAILED");
118
- } else {
119
- // Fallback to direct sample buffer append
120
- success = [self.videoInput appendSampleBuffer:sampleBuffer];
121
- NSLog(@"📺 Sample buffer appended directly: %@", success ? @"SUCCESS" : @"FAILED");
122
- }
123
- } else {
124
- // Fallback to direct sample buffer append
125
- success = [self.videoInput appendSampleBuffer:sampleBuffer];
126
- NSLog(@"📺 Video sample buffer appended (fallback): %@", success ? @"SUCCESS" : @"FAILED");
127
- }
128
-
129
- if (!success) {
130
- // Log detailed error information
131
- NSLog(@"❌ Video input append failed - Asset Writer Status: %ld", (long)self.assetWriter.status);
132
- if (self.assetWriter.error) {
133
- NSLog(@"❌ Asset Writer Error: %@", self.assetWriter.error.localizedDescription);
134
- }
135
-
136
- // Check if asset writer has failed and mark for cleanup
137
- if (self.assetWriter.status == AVAssetWriterStatusFailed) {
138
- self.startFailed = YES;
139
- }
140
- }
141
- } else {
142
- NSLog(@"⚠️ Video input not ready for more data - isReadyForMoreMediaData: %@",
143
- self.videoInput.isReadyForMoreMediaData ? @"YES" : @"NO");
144
-
145
- // Also check pixel buffer adaptor readiness
146
- if (self.pixelBufferAdaptor) {
147
- NSLog(@"📊 Pixel buffer adaptor ready: %@",
148
- self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData ? @"YES" : @"NO");
149
- }
150
-
151
- // Log asset writer input status
152
- NSLog(@"📊 Asset Writer Status: %ld, Video Input Status: readyForMoreMediaData=%@",
153
- (long)self.assetWriter.status,
154
- self.videoInput.isReadyForMoreMediaData ? @"YES" : @"NO");
155
- }
156
- break;
157
- }
158
- case SCStreamOutputTypeAudio: {
159
- NSLog(@"🔊 Processing audio sample buffer");
160
- if (self.audioInput && self.audioInput.isReadyForMoreMediaData) {
161
- BOOL success = [self.audioInput appendSampleBuffer:sampleBuffer];
162
- NSLog(@"🔊 Audio sample buffer appended: %@", success ? @"SUCCESS" : @"FAILED");
163
-
164
- if (!success && self.assetWriter.error) {
165
- NSLog(@"❌ Audio append error: %@", self.assetWriter.error.localizedDescription);
166
- }
167
- } else {
168
- NSLog(@"⚠️ Audio input not ready for more data (or no audio input)");
169
- }
170
- break;
171
- }
172
- case SCStreamOutputTypeMicrophone: {
173
- NSLog(@"🎤 Processing microphone sample buffer");
174
- if (self.audioInput && self.audioInput.isReadyForMoreMediaData) {
175
- BOOL success = [self.audioInput appendSampleBuffer:sampleBuffer];
176
- NSLog(@"🎤 Microphone sample buffer appended: %@", success ? @"SUCCESS" : @"FAILED");
177
-
178
- if (!success && self.assetWriter.error) {
179
- NSLog(@"❌ Microphone append error: %@", self.assetWriter.error.localizedDescription);
180
- }
181
- } else {
182
- NSLog(@"⚠️ Microphone input not ready for more data (or no audio input)");
183
- }
184
- break;
185
- }
186
- }
187
- }
188
-
189
32
  @end
190
33
 
191
- // Global state for ScreenCaptureKit recording
192
- static SCStream *g_scStream = nil;
193
- static SCKRecorderDelegate *g_scDelegate = nil;
34
+ // Global state for recording
35
+ static AVCaptureSession *g_captureSession = nil;
36
+ static AVCaptureMovieFileOutput *g_movieFileOutput = nil;
37
+ static AVCaptureScreenInput *g_screenInput = nil;
38
+ static AVCaptureDeviceInput *g_audioInput = nil;
39
+ static MacRecorderDelegate *g_delegate = nil;
194
40
  static bool g_isRecording = false;
195
- static BOOL g_screenOutputAttached = NO;
196
- static BOOL g_audioOutputAttached = NO;
197
- static dispatch_queue_t g_outputQueue = NULL; // use a dedicated serial queue for sample handling
198
41
 
199
- // Helper function to cleanup ScreenCaptureKit recording resources
200
- void cleanupSCKRecording() {
201
- NSLog(@"🛑 Cleaning up ScreenCaptureKit recording");
202
-
203
- // Detach outputs first to prevent further callbacks into the delegate
204
- if (g_scStream && g_scDelegate) {
205
- NSError *rmError = nil;
206
- if (g_screenOutputAttached) {
207
- [g_scStream removeStreamOutput:g_scDelegate type:SCStreamOutputTypeScreen error:&rmError];
208
- g_screenOutputAttached = NO;
209
- }
210
- if (g_audioOutputAttached) {
211
- rmError = nil;
212
- [g_scStream removeStreamOutput:g_scDelegate type:SCStreamOutputTypeAudio error:&rmError];
213
- g_audioOutputAttached = NO;
214
- }
215
- }
216
-
217
- if (g_scStream) {
218
- NSLog(@"🛑 Stopping SCStream");
219
- SCStream *streamToStop = g_scStream; // keep local until stop completes
220
- [streamToStop stopCaptureWithCompletionHandler:^(NSError * _Nullable error) {
221
- if (error) {
222
- NSLog(@"❌ Error stopping SCStream: %@", error.localizedDescription);
223
- } else {
224
- NSLog(@"✅ SCStream stopped successfully");
225
- }
226
-
227
- // Finish writer after stream has stopped to ensure no further buffers arrive
228
- if (g_scDelegate && g_scDelegate.assetWriter && g_scDelegate.isWriting) {
229
- NSLog(@"🛑 Finishing asset writer (status: %ld)", (long)g_scDelegate.assetWriter.status);
230
- g_scDelegate.isWriting = NO;
231
-
232
- if (g_scDelegate.assetWriter.status == AVAssetWriterStatusWriting) {
233
- if (g_scDelegate.videoInput) {
234
- [g_scDelegate.videoInput markAsFinished];
235
- }
236
- if (g_scDelegate.audioInput) {
237
- [g_scDelegate.audioInput markAsFinished];
238
- }
239
-
240
- [g_scDelegate.assetWriter finishWritingWithCompletionHandler:^{
241
- NSLog(@"✅ Asset writer finished. Status: %ld", (long)g_scDelegate.assetWriter.status);
242
- if (g_scDelegate.assetWriter.error) {
243
- NSLog(@"❌ Asset writer error: %@", g_scDelegate.assetWriter.error.localizedDescription);
244
- }
245
- }];
246
- } else if (g_scDelegate.assetWriter.status == AVAssetWriterStatusFailed) {
247
- NSLog(@"❌ Asset writer failed: %@", g_scDelegate.assetWriter.error.localizedDescription);
248
- }
249
- }
250
-
251
- g_isRecording = false;
252
- g_scStream = nil;
253
- g_scDelegate = nil;
254
- }];
255
- } else {
256
- // No stream, just finalize writer if needed
257
- if (g_scDelegate && g_scDelegate.assetWriter && g_scDelegate.isWriting) {
258
- NSLog(@"🛑 Finishing asset writer (status: %ld)", (long)g_scDelegate.assetWriter.status);
259
- g_scDelegate.isWriting = NO;
260
- if (g_scDelegate.assetWriter.status == AVAssetWriterStatusWriting) {
261
- if (g_scDelegate.videoInput) {
262
- [g_scDelegate.videoInput markAsFinished];
263
- }
264
- if (g_scDelegate.audioInput) {
265
- [g_scDelegate.audioInput markAsFinished];
266
- }
267
- [g_scDelegate.assetWriter finishWritingWithCompletionHandler:^{}];
268
- }
269
- }
270
- g_isRecording = false;
271
- g_scDelegate = nil;
272
- }
42
+ // Helper function to cleanup recording resources
43
+ void cleanupRecording() {
44
+ if (g_captureSession) {
45
+ [g_captureSession stopRunning];
46
+ g_captureSession = nil;
47
+ }
48
+ g_movieFileOutput = nil;
49
+ g_screenInput = nil;
50
+ g_audioInput = nil;
51
+ g_delegate = nil;
52
+ g_isRecording = false;
273
53
  }
274
54
 
275
- // Check if ScreenCaptureKit is available
276
- bool isScreenCaptureKitAvailable() {
277
- if (@available(macOS 12.3, *)) {
278
- return true;
279
- }
280
- return false;
281
- }
282
-
283
- // NAPI Function: Start Recording with ScreenCaptureKit
55
+ // NAPI Function: Start Recording
284
56
  Napi::Value StartRecording(const Napi::CallbackInfo& info) {
285
57
  Napi::Env env = info.Env();
286
- @autoreleasepool {
287
- if (!isScreenCaptureKitAvailable()) {
288
- NSLog(@"ScreenCaptureKit requires macOS 12.3 or later");
289
- return Napi::Boolean::New(env, false);
290
- }
291
58
 
292
59
  if (info.Length() < 1) {
293
- NSLog(@"Output path required");
294
- return Napi::Boolean::New(env, false);
60
+ Napi::TypeError::New(env, "Output path required").ThrowAsJavaScriptException();
61
+ return env.Null();
295
62
  }
296
63
 
297
64
  if (g_isRecording) {
298
- NSLog(@"⚠️ Already recording");
299
65
  return Napi::Boolean::New(env, false);
300
66
  }
301
67
 
302
- // Verify permissions before starting
303
- if (!CGPreflightScreenCaptureAccess()) {
304
- NSLog(@"❌ Screen recording permission not granted - requesting access");
305
- bool requestResult = CGRequestScreenCaptureAccess();
306
- NSLog(@"📋 Permission request result: %@", requestResult ? @"SUCCESS" : @"FAILED");
307
-
308
- if (!CGPreflightScreenCaptureAccess()) {
309
- NSLog(@"❌ Screen recording permission still not available");
310
- return Napi::Boolean::New(env, false);
311
- }
312
- }
313
- NSLog(@"✅ Screen recording permission verified");
314
-
315
68
  std::string outputPath = info[0].As<Napi::String>().Utf8Value();
316
69
 
317
- // Default options
318
- bool captureCursor = false;
319
- bool includeSystemAudio = true;
320
- CGDirectDisplayID displayID = 0; // Will be set to first available display
321
- uint32_t windowID = 0;
70
+ // Options parsing
322
71
  CGRect captureRect = CGRectNull;
72
+ bool captureCursor = false; // Default olarak cursor gizli
73
+ bool includeMicrophone = false; // Default olarak mikrofon kapalı
74
+ bool includeSystemAudio = true; // Default olarak sistem sesi açık
75
+ CGDirectDisplayID displayID = CGMainDisplayID(); // Default ana ekran
76
+ NSString *audioDeviceId = nil; // Default audio device ID
77
+ NSString *systemAudioDeviceId = nil; // System audio device ID
323
78
 
324
- // Parse options
325
79
  if (info.Length() > 1 && info[1].IsObject()) {
326
80
  Napi::Object options = info[1].As<Napi::Object>();
327
81
 
82
+ // Capture area
83
+ if (options.Has("captureArea") && options.Get("captureArea").IsObject()) {
84
+ Napi::Object rectObj = options.Get("captureArea").As<Napi::Object>();
85
+ if (rectObj.Has("x") && rectObj.Has("y") && rectObj.Has("width") && rectObj.Has("height")) {
86
+ captureRect = CGRectMake(
87
+ rectObj.Get("x").As<Napi::Number>().DoubleValue(),
88
+ rectObj.Get("y").As<Napi::Number>().DoubleValue(),
89
+ rectObj.Get("width").As<Napi::Number>().DoubleValue(),
90
+ rectObj.Get("height").As<Napi::Number>().DoubleValue()
91
+ );
92
+ }
93
+ }
94
+
95
+ // Capture cursor
328
96
  if (options.Has("captureCursor")) {
329
97
  captureCursor = options.Get("captureCursor").As<Napi::Boolean>();
330
98
  }
331
99
 
100
+ // Microphone
101
+ if (options.Has("includeMicrophone")) {
102
+ includeMicrophone = options.Get("includeMicrophone").As<Napi::Boolean>();
103
+ }
104
+
105
+ // Audio device ID
106
+ if (options.Has("audioDeviceId") && !options.Get("audioDeviceId").IsNull()) {
107
+ std::string deviceId = options.Get("audioDeviceId").As<Napi::String>().Utf8Value();
108
+ audioDeviceId = [NSString stringWithUTF8String:deviceId.c_str()];
109
+ }
332
110
 
111
+ // System audio
333
112
  if (options.Has("includeSystemAudio")) {
334
113
  includeSystemAudio = options.Get("includeSystemAudio").As<Napi::Boolean>();
335
114
  }
336
115
 
116
+ // System audio device ID
117
+ if (options.Has("systemAudioDeviceId") && !options.Get("systemAudioDeviceId").IsNull()) {
118
+ std::string sysDeviceId = options.Get("systemAudioDeviceId").As<Napi::String>().Utf8Value();
119
+ systemAudioDeviceId = [NSString stringWithUTF8String:sysDeviceId.c_str()];
120
+ }
121
+
122
+ // Display ID
337
123
  if (options.Has("displayId") && !options.Get("displayId").IsNull()) {
338
- uint32_t tempDisplayID = options.Get("displayId").As<Napi::Number>().Uint32Value();
339
- if (tempDisplayID != 0) {
340
- displayID = tempDisplayID;
124
+ double displayIdNum = options.Get("displayId").As<Napi::Number>().DoubleValue();
125
+
126
+ // Use the display ID directly (not as an index)
127
+ // The JavaScript layer passes the actual CGDirectDisplayID
128
+ displayID = (CGDirectDisplayID)displayIdNum;
129
+
130
+ // Verify that this display ID is valid
131
+ uint32_t displayCount;
132
+ CGGetActiveDisplayList(0, NULL, &displayCount);
133
+ if (displayCount > 0) {
134
+ CGDirectDisplayID *displays = (CGDirectDisplayID*)malloc(displayCount * sizeof(CGDirectDisplayID));
135
+ CGGetActiveDisplayList(displayCount, displays, &displayCount);
136
+
137
+ bool validDisplay = false;
138
+ for (uint32_t i = 0; i < displayCount; i++) {
139
+ if (displays[i] == displayID) {
140
+ validDisplay = true;
141
+ break;
142
+ }
143
+ }
144
+
145
+ if (!validDisplay) {
146
+ // Fallback to main display if invalid ID provided
147
+ displayID = CGMainDisplayID();
148
+ }
149
+
150
+ free(displays);
341
151
  }
342
152
  }
343
153
 
154
+ // Window ID için gelecekte kullanım (şimdilik captureArea ile hallediliyor)
344
155
  if (options.Has("windowId") && !options.Get("windowId").IsNull()) {
345
- windowID = options.Get("windowId").As<Napi::Number>().Uint32Value();
346
- }
347
-
348
- if (options.Has("captureArea") && options.Get("captureArea").IsObject()) {
349
- Napi::Object rectObj = options.Get("captureArea").As<Napi::Object>();
350
- if (rectObj.Has("x") && rectObj.Has("y") && rectObj.Has("width") && rectObj.Has("height")) {
351
- captureRect = CGRectMake(
352
- rectObj.Get("x").As<Napi::Number>().DoubleValue(),
353
- rectObj.Get("y").As<Napi::Number>().DoubleValue(),
354
- rectObj.Get("width").As<Napi::Number>().DoubleValue(),
355
- rectObj.Get("height").As<Napi::Number>().DoubleValue()
356
- );
357
- }
156
+ // WindowId belirtilmiş ama captureArea JavaScript tarafında ayarlanıyor
157
+ // Bu parametre gelecekte native level pencere seçimi için kullanılabilir
358
158
  }
359
159
  }
360
160
 
361
- // Create output URL
362
- NSURL *outputURL = [NSURL fileURLWithPath:[NSString stringWithUTF8String:outputPath.c_str()]];
363
- NSLog(@"📁 Output URL: %@", outputURL.absoluteString);
364
-
365
- // Remove existing file if present to avoid AVAssetWriter "Cannot Save" error
366
- NSFileManager *fm = [NSFileManager defaultManager];
367
- if ([fm fileExistsAtPath:outputURL.path]) {
368
- NSError *rmErr = nil;
369
- [fm removeItemAtURL:outputURL error:&rmErr];
370
- if (rmErr) {
371
- NSLog(@"⚠️ Failed to remove existing output file (%@): %@", outputURL.path, rmErr.localizedDescription);
372
- }
373
- }
374
-
375
- // Get shareable content
376
- dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
377
- __block NSError *contentError = nil;
378
- __block SCShareableContent *shareableContent = nil;
379
-
380
- [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent * _Nullable content, NSError * _Nullable error) {
381
- shareableContent = content;
382
- contentError = error;
383
- dispatch_semaphore_signal(semaphore);
384
- }];
385
-
386
- dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
387
-
388
- if (contentError) {
389
- NSLog(@"ScreenCaptureKit error: %@", contentError.localizedDescription);
390
- NSLog(@"This is likely due to missing screen recording permissions");
391
- return Napi::Boolean::New(env, false);
392
- }
393
-
394
- // Find target display or window
395
- SCContentFilter *contentFilter = nil;
396
-
397
- if (windowID > 0) {
398
- // Window recording
399
- SCWindow *targetWindow = nil;
400
- for (SCWindow *window in shareableContent.windows) {
401
- if (window.windowID == windowID) {
402
- targetWindow = window;
403
- break;
404
- }
405
- }
161
+ @try {
162
+ // Create capture session
163
+ g_captureSession = [[AVCaptureSession alloc] init];
164
+ [g_captureSession beginConfiguration];
406
165
 
407
- if (!targetWindow) {
408
- NSLog(@"Window not found with ID: %u", windowID);
409
- return Napi::Boolean::New(env, false);
410
- }
166
+ // Set session preset
167
+ g_captureSession.sessionPreset = AVCaptureSessionPresetHigh;
168
+
169
+ // Create screen input with selected display
170
+ g_screenInput = [[AVCaptureScreenInput alloc] initWithDisplayID:displayID];
411
171
 
412
- contentFilter = [[SCContentFilter alloc] initWithDesktopIndependentWindow:targetWindow];
413
- } else {
414
- // Display recording
415
- NSLog(@"🔍 Selecting display among %lu available displays", (unsigned long)shareableContent.displays.count);
172
+ if (!CGRectIsNull(captureRect)) {
173
+ g_screenInput.cropRect = captureRect;
174
+ }
416
175
 
417
- SCDisplay *targetDisplay = nil;
176
+ // Set cursor capture
177
+ g_screenInput.capturesCursor = captureCursor;
418
178
 
419
- // Log all available displays first
420
- for (SCDisplay *display in shareableContent.displays) {
421
- NSLog(@"📺 Available display: ID=%u, width=%d, height=%d", display.displayID, (int)display.width, (int)display.height);
179
+ if ([g_captureSession canAddInput:g_screenInput]) {
180
+ [g_captureSession addInput:g_screenInput];
181
+ } else {
182
+ cleanupRecording();
183
+ return Napi::Boolean::New(env, false);
422
184
  }
423
185
 
424
- if (displayID != 0) {
425
- // Look for specific display ID
426
- for (SCDisplay *display in shareableContent.displays) {
427
- if (display.displayID == displayID) {
428
- targetDisplay = display;
429
- break;
186
+ // Add microphone input if requested
187
+ if (includeMicrophone) {
188
+ AVCaptureDevice *audioDevice = nil;
189
+
190
+ if (audioDeviceId) {
191
+ // Try to find the specified device
192
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
193
+ NSLog(@"[DEBUG] Looking for audio device with ID: %@", audioDeviceId);
194
+ NSLog(@"[DEBUG] Available audio devices:");
195
+ for (AVCaptureDevice *device in devices) {
196
+ NSLog(@"[DEBUG] - Device: %@ (ID: %@)", device.localizedName, device.uniqueID);
197
+ if ([device.uniqueID isEqualToString:audioDeviceId]) {
198
+ NSLog(@"[DEBUG] Found matching device: %@", device.localizedName);
199
+ audioDevice = device;
200
+ break;
201
+ }
202
+ }
203
+
204
+ if (!audioDevice) {
205
+ NSLog(@"[DEBUG] Specified audio device not found, falling back to default");
430
206
  }
431
207
  }
432
208
 
433
- if (!targetDisplay) {
434
- NSLog(@"❌ Display not found with ID: %u", displayID);
209
+ // Fallback to default device if specified device not found
210
+ if (!audioDevice) {
211
+ audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
212
+ NSLog(@"[DEBUG] Using default audio device: %@ (ID: %@)", audioDevice.localizedName, audioDevice.uniqueID);
435
213
  }
436
- }
437
-
438
- // If no specific display was requested or found, use the first available
439
- if (!targetDisplay) {
440
- if (shareableContent.displays.count > 0) {
441
- targetDisplay = shareableContent.displays.firstObject;
442
- NSLog(@" Using first available display: ID=%u, %dx%d", targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
443
- } else {
444
- NSLog(@" No displays available at all");
445
- return Napi::Boolean::New(env, false);
214
+
215
+ if (audioDevice) {
216
+ NSError *error;
217
+ g_audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
218
+ if (g_audioInput && [g_captureSession canAddInput:g_audioInput]) {
219
+ [g_captureSession addInput:g_audioInput];
220
+ NSLog(@"[DEBUG] Successfully added audio input device");
221
+ } else {
222
+ NSLog(@"[DEBUG] Failed to add audio input device: %@", error);
223
+ }
446
224
  }
447
- } else {
448
- NSLog(@"✅ Using specified display: ID=%u, %dx%d", targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
449
225
  }
450
226
 
451
- // Update displayID for subsequent use
452
- displayID = targetDisplay.displayID;
453
-
454
- // Build exclusion windows array if provided
455
- NSMutableArray<SCWindow *> *excluded = [NSMutableArray array];
456
- BOOL excludeCurrentApp = NO;
457
- if (info.Length() > 1 && info[1].IsObject()) {
458
- Napi::Object options = info[1].As<Napi::Object>();
459
- if (options.Has("excludeCurrentApp")) {
460
- excludeCurrentApp = options.Get("excludeCurrentApp").As<Napi::Boolean>();
461
- }
462
- if (options.Has("excludeWindowIds") && options.Get("excludeWindowIds").IsArray()) {
463
- Napi::Array arr = options.Get("excludeWindowIds").As<Napi::Array>();
464
- for (uint32_t i = 0; i < arr.Length(); i++) {
465
- Napi::Value v = arr.Get(i);
466
- if (v.IsNumber()) {
467
- uint32_t wid = v.As<Napi::Number>().Uint32Value();
468
- for (SCWindow *w in shareableContent.windows) {
469
- if (w.windowID == wid) {
470
- [excluded addObject:w];
471
- break;
472
- }
227
+ // System audio configuration
228
+ if (includeSystemAudio) {
229
+ // Enable audio capture in screen input
230
+ g_screenInput.capturesMouseClicks = YES;
231
+
232
+ // Try to add system audio input using Core Audio
233
+ // This approach captures system audio by creating a virtual audio device
234
+ if (@available(macOS 10.15, *)) {
235
+ // Configure screen input for better audio capture
236
+ g_screenInput.capturesCursor = captureCursor;
237
+ g_screenInput.capturesMouseClicks = YES;
238
+
239
+ // Try to find and add system audio device (like Soundflower, BlackHole, etc.)
240
+ NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
241
+ AVCaptureDevice *systemAudioDevice = nil;
242
+
243
+ // If specific system audio device ID is provided, try to find it first
244
+ if (systemAudioDeviceId) {
245
+ for (AVCaptureDevice *device in audioDevices) {
246
+ if ([device.uniqueID isEqualToString:systemAudioDeviceId]) {
247
+ systemAudioDevice = device;
248
+ NSLog(@"[DEBUG] Found specified system audio device: %@ (ID: %@)", device.localizedName, device.uniqueID);
249
+ break;
250
+ }
251
+ }
252
+ }
253
+
254
+ // If no specific device found or specified, look for known system audio devices
255
+ if (!systemAudioDevice) {
256
+ for (AVCaptureDevice *device in audioDevices) {
257
+ NSString *deviceName = [device.localizedName lowercaseString];
258
+ // Check for common system audio capture devices
259
+ if ([deviceName containsString:@"soundflower"] ||
260
+ [deviceName containsString:@"blackhole"] ||
261
+ [deviceName containsString:@"loopback"] ||
262
+ [deviceName containsString:@"system audio"] ||
263
+ [deviceName containsString:@"aggregate"]) {
264
+ systemAudioDevice = device;
265
+ NSLog(@"[DEBUG] Auto-detected system audio device: %@", device.localizedName);
266
+ break;
473
267
  }
474
268
  }
475
269
  }
270
+
271
+ // If we found a system audio device, add it as an additional input
272
+ if (systemAudioDevice && !includeMicrophone) {
273
+ // Only add system audio device if microphone is not already added
274
+ NSError *error;
275
+ AVCaptureDeviceInput *systemAudioInput = [[AVCaptureDeviceInput alloc] initWithDevice:systemAudioDevice error:&error];
276
+ if (systemAudioInput && [g_captureSession canAddInput:systemAudioInput]) {
277
+ [g_captureSession addInput:systemAudioInput];
278
+ NSLog(@"[DEBUG] Successfully added system audio device: %@", systemAudioDevice.localizedName);
279
+ } else if (error) {
280
+ NSLog(@"[DEBUG] Failed to add system audio device: %@", error.localizedDescription);
281
+ }
282
+ } else if (includeSystemAudio && !systemAudioDevice) {
283
+ NSLog(@"[DEBUG] System audio requested but no suitable device found. Available devices:");
284
+ for (AVCaptureDevice *device in audioDevices) {
285
+ NSLog(@"[DEBUG] - %@ (ID: %@)", device.localizedName, device.uniqueID);
286
+ }
287
+ }
476
288
  }
289
+ } else {
290
+ // Explicitly disable audio capture if not requested
291
+ g_screenInput.capturesMouseClicks = NO;
477
292
  }
478
293
 
479
- if (excludeCurrentApp) {
480
- pid_t pid = [[NSProcessInfo processInfo] processIdentifier];
481
- for (SCWindow *w in shareableContent.windows) {
482
- if (w.owningApplication && w.owningApplication.processID == pid) {
483
- [excluded addObject:w];
484
- }
485
- }
294
+ // Create movie file output
295
+ g_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
296
+ if ([g_captureSession canAddOutput:g_movieFileOutput]) {
297
+ [g_captureSession addOutput:g_movieFileOutput];
298
+ } else {
299
+ cleanupRecording();
300
+ return Napi::Boolean::New(env, false);
486
301
  }
487
302
 
488
- contentFilter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:excluded];
489
- NSLog(@"✅ Content filter created for display recording");
490
- }
491
-
492
- // Get actual display dimensions for proper video configuration
493
- CGRect displayBounds = CGDisplayBounds(displayID);
494
- NSSize videoSize = NSMakeSize(displayBounds.size.width, displayBounds.size.height);
495
-
496
- // Create stream configuration
497
- SCStreamConfiguration *config = [[SCStreamConfiguration alloc] init];
498
- config.width = videoSize.width;
499
- config.height = videoSize.height;
500
- config.minimumFrameInterval = CMTimeMake(1, 30); // 30 FPS
501
-
502
- // Try a more compatible pixel format
503
- config.pixelFormat = kCVPixelFormatType_32BGRA;
504
-
505
- NSLog(@"📐 Stream configuration: %dx%d, FPS=30, cursor=%@", (int)config.width, (int)config.height, captureCursor ? @"YES" : @"NO");
506
-
507
- if (@available(macOS 13.0, *)) {
508
- config.capturesAudio = includeSystemAudio;
509
- config.excludesCurrentProcessAudio = YES;
510
- NSLog(@"🔊 Audio configuration: capture=%@, excludeProcess=%@", includeSystemAudio ? @"YES" : @"NO", @"YES");
511
- } else {
512
- NSLog(@"⚠️ macOS 13.0+ features not available");
513
- }
514
- config.showsCursor = captureCursor;
515
-
516
- if (!CGRectIsNull(captureRect)) {
517
- config.sourceRect = captureRect;
518
- // Update video size if capture rect is specified
519
- videoSize = NSMakeSize(captureRect.size.width, captureRect.size.height);
520
- }
521
-
522
- // Create delegate
523
- g_scDelegate = [[SCKRecorderDelegate alloc] init];
524
- g_scDelegate.outputURL = outputURL;
525
- g_scDelegate.hasStartTime = NO;
526
- g_scDelegate.startAttempted = NO;
527
- g_scDelegate.startFailed = NO;
528
-
529
- // Setup AVAssetWriter
530
- NSError *writerError = nil;
531
- g_scDelegate.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
532
-
533
- if (writerError) {
534
- NSLog(@"❌ Failed to create asset writer: %@", writerError.localizedDescription);
303
+ [g_captureSession commitConfiguration];
304
+
305
+ // Start session
306
+ [g_captureSession startRunning];
307
+
308
+ // Create delegate
309
+ g_delegate = [[MacRecorderDelegate alloc] init];
310
+
311
+ // Start recording
312
+ NSURL *outputURL = [NSURL fileURLWithPath:[NSString stringWithUTF8String:outputPath.c_str()]];
313
+ [g_movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:g_delegate];
314
+
315
+ g_isRecording = true;
316
+ return Napi::Boolean::New(env, true);
317
+
318
+ } @catch (NSException *exception) {
319
+ cleanupRecording();
535
320
  return Napi::Boolean::New(env, false);
536
321
  }
537
-
538
- NSLog(@"✅ Asset writer created successfully");
539
-
540
- // Video input settings using actual dimensions
541
- NSLog(@"📺 Setting up video input: %dx%d", (int)videoSize.width, (int)videoSize.height);
542
- NSDictionary *videoSettings = @{
543
- AVVideoCodecKey: AVVideoCodecTypeH264,
544
- AVVideoWidthKey: @((NSInteger)videoSize.width),
545
- AVVideoHeightKey: @((NSInteger)videoSize.height),
546
- AVVideoCompressionPropertiesKey: @{
547
- AVVideoAverageBitRateKey: @(2000000), // 2 Mbps
548
- AVVideoMaxKeyFrameIntervalKey: @30
549
- }
550
- };
551
-
552
- g_scDelegate.videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
553
- g_scDelegate.videoInput.expectsMediaDataInRealTime = YES;
554
-
555
- // Create pixel buffer adaptor for more robust handling
556
- NSDictionary *pixelBufferAttributes = @{
557
- (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
558
- (NSString*)kCVPixelBufferWidthKey: @((NSInteger)videoSize.width),
559
- (NSString*)kCVPixelBufferHeightKey: @((NSInteger)videoSize.height),
560
- };
561
-
562
- g_scDelegate.pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
563
- initWithAssetWriterInput:g_scDelegate.videoInput
564
- sourcePixelBufferAttributes:pixelBufferAttributes];
565
-
566
- if ([g_scDelegate.assetWriter canAddInput:g_scDelegate.videoInput]) {
567
- [g_scDelegate.assetWriter addInput:g_scDelegate.videoInput];
568
- NSLog(@"✅ Video input added to asset writer with pixel buffer adaptor");
569
- } else {
570
- NSLog(@"❌ Cannot add video input to asset writer");
571
- }
572
-
573
- // Audio input settings (if needed)
574
- if (includeSystemAudio) {
575
- NSDictionary *audioSettings = @{
576
- AVFormatIDKey: @(kAudioFormatMPEG4AAC),
577
- AVSampleRateKey: @44100,
578
- AVNumberOfChannelsKey: @2
579
- };
580
-
581
- g_scDelegate.audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
582
- g_scDelegate.audioInput.expectsMediaDataInRealTime = YES;
583
-
584
- if ([g_scDelegate.assetWriter canAddInput:g_scDelegate.audioInput]) {
585
- [g_scDelegate.assetWriter addInput:g_scDelegate.audioInput];
586
- }
587
- }
588
-
589
- // Create a dedicated serial queue for output callbacks
590
- if (g_outputQueue == NULL) {
591
- g_outputQueue = dispatch_queue_create("com.node-mac-recorder.stream-output", DISPATCH_QUEUE_SERIAL);
592
- }
322
+ }
593
323
 
594
- // Create and start stream first
595
- g_scStream = [[SCStream alloc] initWithFilter:contentFilter configuration:config delegate:g_scDelegate];
596
-
597
- // Attach outputs to actually receive sample buffers
598
- NSLog(@"✅ Setting up stream output callback for sample buffers");
599
- dispatch_queue_t outputQueue = g_outputQueue;
600
- NSError *outputError = nil;
601
- BOOL addedScreenOutput = [g_scStream addStreamOutput:g_scDelegate type:SCStreamOutputTypeScreen sampleHandlerQueue:outputQueue error:&outputError];
602
- if (addedScreenOutput) {
603
- NSLog(@"✅ Screen output attached to SCStream");
604
- g_screenOutputAttached = YES;
605
- } else {
606
- NSLog(@"❌ Failed to attach screen output to SCStream: %@", outputError.localizedDescription);
607
- }
608
- if (includeSystemAudio) {
609
- outputError = nil;
610
- BOOL addedAudioOutput = [g_scStream addStreamOutput:g_scDelegate type:SCStreamOutputTypeAudio sampleHandlerQueue:outputQueue error:&outputError];
611
- if (addedAudioOutput) {
612
- NSLog(@"✅ Audio output attached to SCStream");
613
- g_audioOutputAttached = YES;
614
- } else {
615
- NSLog(@"⚠️ Failed to attach audio output to SCStream (audio may be disabled): %@", outputError.localizedDescription);
616
- }
617
- }
324
+ // NAPI Function: Stop Recording
325
+ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
326
+ Napi::Env env = info.Env();
618
327
 
619
- if (!g_scStream) {
620
- NSLog(@"❌ Failed to create SCStream");
328
+ if (!g_isRecording || !g_movieFileOutput) {
621
329
  return Napi::Boolean::New(env, false);
622
330
  }
623
331
 
624
- NSLog(@"✅ SCStream created successfully");
625
-
626
- // Add callback queue for sample buffers (this might be important)
627
- if (@available(macOS 14.0, *)) {
628
- // In macOS 14+, we can set a specific queue
629
- // For now, we'll rely on the default behavior
630
- }
631
-
632
- // Start capture and wait for it to begin
633
- dispatch_semaphore_t startSemaphore = dispatch_semaphore_create(0);
634
- __block NSError *startError = nil;
635
-
636
- NSLog(@"🚀 Starting ScreenCaptureKit capture");
637
- [g_scStream startCaptureWithCompletionHandler:^(NSError * _Nullable error) {
638
- startError = error;
639
- dispatch_semaphore_signal(startSemaphore);
640
- }];
641
-
642
- dispatch_semaphore_wait(startSemaphore, dispatch_time(DISPATCH_TIME_NOW, 5 * NSEC_PER_SEC));
643
-
644
- if (startError) {
645
- NSLog(@"❌ Failed to start capture: %@", startError.localizedDescription);
332
+ @try {
333
+ [g_movieFileOutput stopRecording];
334
+ [g_captureSession stopRunning];
335
+
336
+ g_isRecording = false;
337
+ return Napi::Boolean::New(env, true);
338
+
339
+ } @catch (NSException *exception) {
340
+ cleanupRecording();
646
341
  return Napi::Boolean::New(env, false);
647
342
  }
648
-
649
- NSLog(@"✅ ScreenCaptureKit capture started successfully");
650
-
651
- // Mark that we're ready to write (asset writer will be started in first sample buffer)
652
- g_scDelegate.isWriting = YES;
653
- g_isRecording = true;
654
-
655
- // Wait a moment to see if we get any sample buffers
656
- NSLog(@"⏱️ Waiting 1 second for sample buffers to arrive...");
657
- dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
658
- if (g_scDelegate && !g_scDelegate.hasStartTime) {
659
- NSLog(@"⚠️ No sample buffers received after 1 second - this might indicate a permission or configuration issue");
660
- } else if (g_scDelegate && g_scDelegate.hasStartTime) {
661
- NSLog(@"✅ Sample buffers are being received successfully");
343
+ }
344
+
345
+
346
+
347
+ // NAPI Function: Get Windows List
348
+ Napi::Value GetWindows(const Napi::CallbackInfo& info) {
349
+ Napi::Env env = info.Env();
350
+ Napi::Array windowArray = Napi::Array::New(env);
351
+
352
+ @try {
353
+ // Get window list
354
+ CFArrayRef windowList = CGWindowListCopyWindowInfo(
355
+ kCGWindowListOptionOnScreenOnly | kCGWindowListExcludeDesktopElements,
356
+ kCGNullWindowID
357
+ );
358
+
359
+ if (!windowList) {
360
+ return windowArray;
662
361
  }
663
- });
664
-
665
- NSLog(@"🎬 Recording initialized successfully");
666
- return Napi::Boolean::New(env, true);
362
+
363
+ CFIndex windowCount = CFArrayGetCount(windowList);
364
+ uint32_t arrayIndex = 0;
365
+
366
+ for (CFIndex i = 0; i < windowCount; i++) {
367
+ CFDictionaryRef window = (CFDictionaryRef)CFArrayGetValueAtIndex(windowList, i);
368
+
369
+ // Get window ID
370
+ CFNumberRef windowIDRef = (CFNumberRef)CFDictionaryGetValue(window, kCGWindowNumber);
371
+ if (!windowIDRef) continue;
372
+
373
+ uint32_t windowID;
374
+ CFNumberGetValue(windowIDRef, kCFNumberSInt32Type, &windowID);
375
+
376
+ // Get window name
377
+ CFStringRef windowNameRef = (CFStringRef)CFDictionaryGetValue(window, kCGWindowName);
378
+ std::string windowName = "";
379
+ if (windowNameRef) {
380
+ const char* windowNameCStr = CFStringGetCStringPtr(windowNameRef, kCFStringEncodingUTF8);
381
+ if (windowNameCStr) {
382
+ windowName = std::string(windowNameCStr);
383
+ } else {
384
+ // Fallback for non-ASCII characters
385
+ CFIndex length = CFStringGetLength(windowNameRef);
386
+ CFIndex maxSize = CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8) + 1;
387
+ char* buffer = (char*)malloc(maxSize);
388
+ if (CFStringGetCString(windowNameRef, buffer, maxSize, kCFStringEncodingUTF8)) {
389
+ windowName = std::string(buffer);
390
+ }
391
+ free(buffer);
392
+ }
393
+ }
394
+
395
+ // Get application name
396
+ CFStringRef appNameRef = (CFStringRef)CFDictionaryGetValue(window, kCGWindowOwnerName);
397
+ std::string appName = "";
398
+ if (appNameRef) {
399
+ const char* appNameCStr = CFStringGetCStringPtr(appNameRef, kCFStringEncodingUTF8);
400
+ if (appNameCStr) {
401
+ appName = std::string(appNameCStr);
402
+ } else {
403
+ CFIndex length = CFStringGetLength(appNameRef);
404
+ CFIndex maxSize = CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8) + 1;
405
+ char* buffer = (char*)malloc(maxSize);
406
+ if (CFStringGetCString(appNameRef, buffer, maxSize, kCFStringEncodingUTF8)) {
407
+ appName = std::string(buffer);
408
+ }
409
+ free(buffer);
410
+ }
411
+ }
412
+
413
+ // Get window bounds
414
+ CFDictionaryRef boundsRef = (CFDictionaryRef)CFDictionaryGetValue(window, kCGWindowBounds);
415
+ CGRect bounds = CGRectZero;
416
+ if (boundsRef) {
417
+ CGRectMakeWithDictionaryRepresentation(boundsRef, &bounds);
418
+ }
419
+
420
+ // Skip windows without name or very small windows
421
+ if (windowName.empty() || bounds.size.width < 50 || bounds.size.height < 50) {
422
+ continue;
423
+ }
424
+
425
+ // Create window object
426
+ Napi::Object windowObj = Napi::Object::New(env);
427
+ windowObj.Set("id", Napi::Number::New(env, windowID));
428
+ windowObj.Set("name", Napi::String::New(env, windowName));
429
+ windowObj.Set("appName", Napi::String::New(env, appName));
430
+ windowObj.Set("x", Napi::Number::New(env, bounds.origin.x));
431
+ windowObj.Set("y", Napi::Number::New(env, bounds.origin.y));
432
+ windowObj.Set("width", Napi::Number::New(env, bounds.size.width));
433
+ windowObj.Set("height", Napi::Number::New(env, bounds.size.height));
434
+
435
+ windowArray.Set(arrayIndex++, windowObj);
436
+ }
437
+
438
+ CFRelease(windowList);
439
+ return windowArray;
440
+
441
+ } @catch (NSException *exception) {
442
+ return windowArray;
667
443
  }
668
444
  }
669
445
 
670
- // NAPI Function: Stop Recording
671
- Napi::Value StopRecording(const Napi::CallbackInfo& info) {
446
+ // NAPI Function: Get Audio Devices
447
+ Napi::Value GetAudioDevices(const Napi::CallbackInfo& info) {
672
448
  Napi::Env env = info.Env();
673
449
 
674
- if (!g_isRecording) {
675
- return Napi::Boolean::New(env, false);
450
+ @try {
451
+ NSMutableArray *devices = [NSMutableArray array];
452
+
453
+ // Get all audio devices
454
+ NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
455
+
456
+ for (AVCaptureDevice *device in audioDevices) {
457
+ [devices addObject:@{
458
+ @"id": device.uniqueID,
459
+ @"name": device.localizedName,
460
+ @"manufacturer": device.manufacturer ?: @"Unknown",
461
+ @"isDefault": @([device isEqual:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]])
462
+ }];
463
+ }
464
+
465
+ // Convert to NAPI array
466
+ Napi::Array result = Napi::Array::New(env, devices.count);
467
+ for (NSUInteger i = 0; i < devices.count; i++) {
468
+ NSDictionary *device = devices[i];
469
+ Napi::Object deviceObj = Napi::Object::New(env);
470
+ deviceObj.Set("id", Napi::String::New(env, [device[@"id"] UTF8String]));
471
+ deviceObj.Set("name", Napi::String::New(env, [device[@"name"] UTF8String]));
472
+ deviceObj.Set("manufacturer", Napi::String::New(env, [device[@"manufacturer"] UTF8String]));
473
+ deviceObj.Set("isDefault", Napi::Boolean::New(env, [device[@"isDefault"] boolValue]));
474
+ result[i] = deviceObj;
475
+ }
476
+
477
+ return result;
478
+
479
+ } @catch (NSException *exception) {
480
+ return Napi::Array::New(env, 0);
676
481
  }
677
-
678
- cleanupSCKRecording();
679
- return Napi::Boolean::New(env, true);
680
482
  }
681
483
 
682
- // NAPI Function: Get Recording Status (for JS compatibility)
683
- Napi::Value GetRecordingStatus(const Napi::CallbackInfo& info) {
484
+ // NAPI Function: Get Displays
485
+ Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
684
486
  Napi::Env env = info.Env();
685
- return Napi::Boolean::New(env, g_isRecording);
487
+
488
+ @try {
489
+ NSArray *displays = [ScreenCapture getAvailableDisplays];
490
+ Napi::Array result = Napi::Array::New(env, displays.count);
491
+
492
+ NSLog(@"Found %lu displays", (unsigned long)displays.count);
493
+
494
+ for (NSUInteger i = 0; i < displays.count; i++) {
495
+ NSDictionary *display = displays[i];
496
+ NSLog(@"Display %lu: ID=%u, Name=%@, Size=%@x%@",
497
+ (unsigned long)i,
498
+ [display[@"id"] unsignedIntValue],
499
+ display[@"name"],
500
+ display[@"width"],
501
+ display[@"height"]);
502
+
503
+ Napi::Object displayObj = Napi::Object::New(env);
504
+ displayObj.Set("id", Napi::Number::New(env, [display[@"id"] unsignedIntValue]));
505
+ displayObj.Set("name", Napi::String::New(env, [display[@"name"] UTF8String]));
506
+ displayObj.Set("width", Napi::Number::New(env, [display[@"width"] doubleValue]));
507
+ displayObj.Set("height", Napi::Number::New(env, [display[@"height"] doubleValue]));
508
+ displayObj.Set("x", Napi::Number::New(env, [display[@"x"] doubleValue]));
509
+ displayObj.Set("y", Napi::Number::New(env, [display[@"y"] doubleValue]));
510
+ displayObj.Set("isPrimary", Napi::Boolean::New(env, [display[@"isPrimary"] boolValue]));
511
+ result[i] = displayObj;
512
+ }
513
+
514
+ return result;
515
+
516
+ } @catch (NSException *exception) {
517
+ NSLog(@"Exception in GetDisplays: %@", exception);
518
+ return Napi::Array::New(env, 0);
519
+ }
686
520
  }
687
521
 
688
522
  // NAPI Function: Get Recording Status
689
- Napi::Value IsRecording(const Napi::CallbackInfo& info) {
523
+ Napi::Value GetRecordingStatus(const Napi::CallbackInfo& info) {
690
524
  Napi::Env env = info.Env();
691
525
  return Napi::Boolean::New(env, g_isRecording);
692
526
  }
693
527
 
694
- // NAPI Function: Get Displays
695
- Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
528
+ // NAPI Function: Get Window Thumbnail
529
+ Napi::Value GetWindowThumbnail(const Napi::CallbackInfo& info) {
696
530
  Napi::Env env = info.Env();
697
531
 
698
- if (!isScreenCaptureKitAvailable()) {
699
- // Fallback to legacy method
700
- return GetAvailableDisplays(info);
532
+ if (info.Length() < 1) {
533
+ Napi::TypeError::New(env, "Window ID is required").ThrowAsJavaScriptException();
534
+ return env.Null();
701
535
  }
702
536
 
703
- // Use ScreenCaptureKit
704
- dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
705
- __block SCShareableContent *shareableContent = nil;
706
- __block NSError *error = nil;
537
+ uint32_t windowID = info[0].As<Napi::Number>().Uint32Value();
707
538
 
708
- [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent * _Nullable content, NSError * _Nullable err) {
709
- shareableContent = content;
710
- error = err;
711
- dispatch_semaphore_signal(semaphore);
712
- }];
539
+ // Optional parameters
540
+ int maxWidth = 300; // Default thumbnail width
541
+ int maxHeight = 200; // Default thumbnail height
713
542
 
714
- dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
715
-
716
- if (error) {
717
- NSLog(@"Failed to get displays: %@", error.localizedDescription);
718
- return Napi::Array::New(env, 0);
543
+ if (info.Length() >= 2 && !info[1].IsNull()) {
544
+ maxWidth = info[1].As<Napi::Number>().Int32Value();
719
545
  }
720
-
721
- Napi::Array displaysArray = Napi::Array::New(env);
722
- uint32_t index = 0;
723
-
724
- for (SCDisplay *display in shareableContent.displays) {
725
- Napi::Object displayObj = Napi::Object::New(env);
726
- displayObj.Set("id", Napi::Number::New(env, display.displayID));
727
- displayObj.Set("width", Napi::Number::New(env, display.width));
728
- displayObj.Set("height", Napi::Number::New(env, display.height));
729
- displayObj.Set("frame", Napi::Object::New(env)); // TODO: Add frame details
730
-
731
- displaysArray.Set(index++, displayObj);
546
+ if (info.Length() >= 3 && !info[2].IsNull()) {
547
+ maxHeight = info[2].As<Napi::Number>().Int32Value();
732
548
  }
733
549
 
734
- return displaysArray;
550
+ @try {
551
+ // Create window image
552
+ CGImageRef windowImage = CGWindowListCreateImage(
553
+ CGRectNull,
554
+ kCGWindowListOptionIncludingWindow,
555
+ windowID,
556
+ kCGWindowImageBoundsIgnoreFraming | kCGWindowImageShouldBeOpaque
557
+ );
558
+
559
+ if (!windowImage) {
560
+ return env.Null();
561
+ }
562
+
563
+ // Get original dimensions
564
+ size_t originalWidth = CGImageGetWidth(windowImage);
565
+ size_t originalHeight = CGImageGetHeight(windowImage);
566
+
567
+ // Calculate scaled dimensions maintaining aspect ratio
568
+ double scaleX = (double)maxWidth / originalWidth;
569
+ double scaleY = (double)maxHeight / originalHeight;
570
+ double scale = std::min(scaleX, scaleY);
571
+
572
+ size_t thumbnailWidth = (size_t)(originalWidth * scale);
573
+ size_t thumbnailHeight = (size_t)(originalHeight * scale);
574
+
575
+ // Create scaled image
576
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
577
+ CGContextRef context = CGBitmapContextCreate(
578
+ NULL,
579
+ thumbnailWidth,
580
+ thumbnailHeight,
581
+ 8,
582
+ thumbnailWidth * 4,
583
+ colorSpace,
584
+ kCGImageAlphaPremultipliedLast
585
+ );
586
+
587
+ if (context) {
588
+ CGContextDrawImage(context, CGRectMake(0, 0, thumbnailWidth, thumbnailHeight), windowImage);
589
+ CGImageRef thumbnailImage = CGBitmapContextCreateImage(context);
590
+
591
+ if (thumbnailImage) {
592
+ // Convert to PNG data
593
+ NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithCGImage:thumbnailImage];
594
+ NSData *pngData = [imageRep representationUsingType:NSBitmapImageFileTypePNG properties:@{}];
595
+
596
+ if (pngData) {
597
+ // Convert to Base64
598
+ NSString *base64String = [pngData base64EncodedStringWithOptions:0];
599
+ std::string base64Std = [base64String UTF8String];
600
+
601
+ CGImageRelease(thumbnailImage);
602
+ CGContextRelease(context);
603
+ CGColorSpaceRelease(colorSpace);
604
+ CGImageRelease(windowImage);
605
+
606
+ return Napi::String::New(env, base64Std);
607
+ }
608
+
609
+ CGImageRelease(thumbnailImage);
610
+ }
611
+
612
+ CGContextRelease(context);
613
+ }
614
+
615
+ CGColorSpaceRelease(colorSpace);
616
+ CGImageRelease(windowImage);
617
+
618
+ return env.Null();
619
+
620
+ } @catch (NSException *exception) {
621
+ return env.Null();
622
+ }
735
623
  }
736
624
 
737
-
738
- // NAPI Function: Get Windows
739
- Napi::Value GetWindows(const Napi::CallbackInfo& info) {
625
+ // NAPI Function: Get Display Thumbnail
626
+ Napi::Value GetDisplayThumbnail(const Napi::CallbackInfo& info) {
740
627
  Napi::Env env = info.Env();
741
628
 
742
- if (!isScreenCaptureKitAvailable()) {
743
- // Use legacy CGWindowList method
744
- return GetWindowList(info);
629
+ if (info.Length() < 1) {
630
+ Napi::TypeError::New(env, "Display ID is required").ThrowAsJavaScriptException();
631
+ return env.Null();
745
632
  }
746
633
 
747
- // Use ScreenCaptureKit
748
- dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
749
- __block SCShareableContent *shareableContent = nil;
750
- __block NSError *error = nil;
751
-
752
- [SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent * _Nullable content, NSError * _Nullable err) {
753
- shareableContent = content;
754
- error = err;
755
- dispatch_semaphore_signal(semaphore);
756
- }];
634
+ uint32_t displayID = info[0].As<Napi::Number>().Uint32Value();
757
635
 
758
- dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
636
+ // Optional parameters
637
+ int maxWidth = 300; // Default thumbnail width
638
+ int maxHeight = 200; // Default thumbnail height
759
639
 
760
- if (error) {
761
- NSLog(@"Failed to get windows: %@", error.localizedDescription);
762
- return Napi::Array::New(env, 0);
640
+ if (info.Length() >= 2 && !info[1].IsNull()) {
641
+ maxWidth = info[1].As<Napi::Number>().Int32Value();
642
+ }
643
+ if (info.Length() >= 3 && !info[2].IsNull()) {
644
+ maxHeight = info[2].As<Napi::Number>().Int32Value();
763
645
  }
764
646
 
765
- Napi::Array windowsArray = Napi::Array::New(env);
766
- uint32_t index = 0;
767
-
768
- for (SCWindow *window in shareableContent.windows) {
769
- if (window.isOnScreen && window.frame.size.width > 50 && window.frame.size.height > 50) {
770
- Napi::Object windowObj = Napi::Object::New(env);
771
- windowObj.Set("id", Napi::Number::New(env, window.windowID));
772
- windowObj.Set("title", Napi::String::New(env, window.title ? [window.title UTF8String] : ""));
773
-
774
- // Safely get application information (can be nil for some windows)
775
- NSString *appName = @"";
776
- NSString *bundleId = @"";
777
- if (window.owningApplication) {
778
- appName = window.owningApplication.applicationName ?: @"";
779
- bundleId = window.owningApplication.bundleIdentifier ?: @"";
647
+ @try {
648
+ // Verify display exists
649
+ CGDirectDisplayID activeDisplays[32];
650
+ uint32_t displayCount;
651
+ CGError err = CGGetActiveDisplayList(32, activeDisplays, &displayCount);
652
+
653
+ if (err != kCGErrorSuccess) {
654
+ NSLog(@"Failed to get active display list: %d", err);
655
+ return env.Null();
656
+ }
657
+
658
+ bool displayFound = false;
659
+ for (uint32_t i = 0; i < displayCount; i++) {
660
+ if (activeDisplays[i] == displayID) {
661
+ displayFound = true;
662
+ break;
780
663
  }
781
-
782
- windowObj.Set("appName", Napi::String::New(env, [appName UTF8String]));
783
- windowObj.Set("ownerName", Napi::String::New(env, [appName UTF8String]));
784
- windowObj.Set("bundleId", Napi::String::New(env, [bundleId UTF8String]));
785
-
786
- // Add frame details
787
- CGRect frame = window.frame;
788
- windowObj.Set("x", Napi::Number::New(env, (int)frame.origin.x));
789
- windowObj.Set("y", Napi::Number::New(env, (int)frame.origin.y));
790
- windowObj.Set("width", Napi::Number::New(env, (int)frame.size.width));
791
- windowObj.Set("height", Napi::Number::New(env, (int)frame.size.height));
792
-
793
- // Legacy bounds object for compatibility
794
- Napi::Object boundsObj = Napi::Object::New(env);
795
- boundsObj.Set("x", Napi::Number::New(env, (int)frame.origin.x));
796
- boundsObj.Set("y", Napi::Number::New(env, (int)frame.origin.y));
797
- boundsObj.Set("width", Napi::Number::New(env, (int)frame.size.width));
798
- boundsObj.Set("height", Napi::Number::New(env, (int)frame.size.height));
799
- windowObj.Set("bounds", boundsObj);
800
-
801
- windowsArray.Set(index++, windowObj);
802
664
  }
665
+
666
+ if (!displayFound) {
667
+ NSLog(@"Display ID %u not found in active displays", displayID);
668
+ return env.Null();
669
+ }
670
+
671
+ // Create display image
672
+ CGImageRef displayImage = CGDisplayCreateImage(displayID);
673
+
674
+ if (!displayImage) {
675
+ NSLog(@"CGDisplayCreateImage failed for display ID: %u", displayID);
676
+ return env.Null();
677
+ }
678
+
679
+ // Get original dimensions
680
+ size_t originalWidth = CGImageGetWidth(displayImage);
681
+ size_t originalHeight = CGImageGetHeight(displayImage);
682
+
683
+ NSLog(@"Original dimensions: %zux%zu", originalWidth, originalHeight);
684
+
685
+ // Calculate scaled dimensions maintaining aspect ratio
686
+ double scaleX = (double)maxWidth / originalWidth;
687
+ double scaleY = (double)maxHeight / originalHeight;
688
+ double scale = std::min(scaleX, scaleY);
689
+
690
+ size_t thumbnailWidth = (size_t)(originalWidth * scale);
691
+ size_t thumbnailHeight = (size_t)(originalHeight * scale);
692
+
693
+ NSLog(@"Thumbnail dimensions: %zux%zu (scale: %f)", thumbnailWidth, thumbnailHeight, scale);
694
+
695
+ // Create scaled image
696
+ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
697
+ CGContextRef context = CGBitmapContextCreate(
698
+ NULL,
699
+ thumbnailWidth,
700
+ thumbnailHeight,
701
+ 8,
702
+ thumbnailWidth * 4,
703
+ colorSpace,
704
+ kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big
705
+ );
706
+
707
+ if (!context) {
708
+ NSLog(@"Failed to create bitmap context");
709
+ CGImageRelease(displayImage);
710
+ CGColorSpaceRelease(colorSpace);
711
+ return env.Null();
712
+ }
713
+
714
+ // Set interpolation quality for better scaling
715
+ CGContextSetInterpolationQuality(context, kCGInterpolationHigh);
716
+
717
+ // Draw the image
718
+ CGContextDrawImage(context, CGRectMake(0, 0, thumbnailWidth, thumbnailHeight), displayImage);
719
+ CGImageRef thumbnailImage = CGBitmapContextCreateImage(context);
720
+
721
+ if (!thumbnailImage) {
722
+ NSLog(@"Failed to create thumbnail image");
723
+ CGContextRelease(context);
724
+ CGImageRelease(displayImage);
725
+ CGColorSpaceRelease(colorSpace);
726
+ return env.Null();
727
+ }
728
+
729
+ // Convert to PNG data
730
+ NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithCGImage:thumbnailImage];
731
+ NSDictionary *properties = @{NSImageCompressionFactor: @0.8};
732
+ NSData *pngData = [imageRep representationUsingType:NSBitmapImageFileTypePNG properties:properties];
733
+
734
+ if (!pngData) {
735
+ NSLog(@"Failed to convert image to PNG data");
736
+ CGImageRelease(thumbnailImage);
737
+ CGContextRelease(context);
738
+ CGImageRelease(displayImage);
739
+ CGColorSpaceRelease(colorSpace);
740
+ return env.Null();
741
+ }
742
+
743
+ // Convert to Base64
744
+ NSString *base64String = [pngData base64EncodedStringWithOptions:0];
745
+ std::string base64Std = [base64String UTF8String];
746
+
747
+ NSLog(@"Successfully created thumbnail with base64 length: %lu", (unsigned long)base64Std.length());
748
+
749
+ // Cleanup
750
+ CGImageRelease(thumbnailImage);
751
+ CGContextRelease(context);
752
+ CGColorSpaceRelease(colorSpace);
753
+ CGImageRelease(displayImage);
754
+
755
+ return Napi::String::New(env, base64Std);
756
+
757
+ } @catch (NSException *exception) {
758
+ NSLog(@"Exception in GetDisplayThumbnail: %@", exception);
759
+ return env.Null();
803
760
  }
804
-
805
- return windowsArray;
806
761
  }
807
762
 
808
763
  // NAPI Function: Check Permissions
809
764
  Napi::Value CheckPermissions(const Napi::CallbackInfo& info) {
810
765
  Napi::Env env = info.Env();
811
766
 
812
- // Check screen recording permission
813
- bool hasPermission = CGPreflightScreenCaptureAccess();
814
-
815
- // If we don't have permission, try to request it
816
- if (!hasPermission) {
817
- NSLog(@"⚠️ Screen recording permission not granted, requesting access");
818
- bool requestResult = CGRequestScreenCaptureAccess();
819
- NSLog(@"📋 Permission request result: %@", requestResult ? @"SUCCESS" : @"FAILED");
820
-
821
- // Check again after request
822
- hasPermission = CGPreflightScreenCaptureAccess();
823
- }
824
-
825
- return Napi::Boolean::New(env, hasPermission);
826
- }
827
-
828
- // NAPI Function: Get Audio Devices
829
- Napi::Value GetAudioDevices(const Napi::CallbackInfo& info) {
830
- Napi::Env env = info.Env();
831
-
832
- Napi::Array devices = Napi::Array::New(env);
833
- uint32_t index = 0;
834
-
835
- AudioObjectPropertyAddress propertyAddress = {
836
- kAudioHardwarePropertyDevices,
837
- kAudioObjectPropertyScopeGlobal,
838
- kAudioObjectPropertyElementMain
839
- };
840
-
841
- UInt32 dataSize = 0;
842
- OSStatus status = AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize);
843
-
844
- if (status != noErr) {
845
- return devices;
846
- }
847
-
848
- UInt32 deviceCount = dataSize / sizeof(AudioDeviceID);
849
- AudioDeviceID *audioDevices = (AudioDeviceID *)malloc(dataSize);
850
-
851
- status = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize, audioDevices);
852
-
853
- if (status == noErr) {
854
- for (UInt32 i = 0; i < deviceCount; ++i) {
855
- AudioDeviceID deviceID = audioDevices[i];
856
-
857
- // Get device name
858
- CFStringRef deviceName = NULL;
859
- UInt32 size = sizeof(deviceName);
860
- AudioObjectPropertyAddress nameAddress = {
861
- kAudioDevicePropertyDeviceNameCFString,
862
- kAudioDevicePropertyScopeInput,
863
- kAudioObjectPropertyElementMain
864
- };
865
-
866
- status = AudioObjectGetPropertyData(deviceID, &nameAddress, 0, NULL, &size, &deviceName);
867
-
868
- if (status == noErr && deviceName) {
869
- Napi::Object deviceObj = Napi::Object::New(env);
870
- deviceObj.Set("id", Napi::String::New(env, std::to_string(deviceID)));
871
-
872
- const char *name = CFStringGetCStringPtr(deviceName, kCFStringEncodingUTF8);
873
- if (name) {
874
- deviceObj.Set("name", Napi::String::New(env, name));
875
- } else {
876
- deviceObj.Set("name", Napi::String::New(env, "Unknown Device"));
767
+ @try {
768
+ // Check screen recording permission
769
+ bool hasScreenPermission = true;
770
+
771
+ if (@available(macOS 10.15, *)) {
772
+ // Try to create a display stream to test permissions
773
+ CGDisplayStreamRef stream = CGDisplayStreamCreate(
774
+ CGMainDisplayID(),
775
+ 1, 1,
776
+ kCVPixelFormatType_32BGRA,
777
+ nil,
778
+ ^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
779
+ // Empty handler
877
780
  }
878
-
879
- devices.Set(index++, deviceObj);
880
- CFRelease(deviceName);
781
+ );
782
+
783
+ if (stream) {
784
+ CFRelease(stream);
785
+ hasScreenPermission = true;
786
+ } else {
787
+ hasScreenPermission = false;
881
788
  }
882
789
  }
790
+
791
+ // Check audio permission
792
+ bool hasAudioPermission = true;
793
+ if (@available(macOS 10.14, *)) {
794
+ AVAuthorizationStatus audioStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
795
+ hasAudioPermission = (audioStatus == AVAuthorizationStatusAuthorized);
796
+ }
797
+
798
+ return Napi::Boolean::New(env, hasScreenPermission && hasAudioPermission);
799
+
800
+ } @catch (NSException *exception) {
801
+ return Napi::Boolean::New(env, false);
883
802
  }
884
-
885
- free(audioDevices);
886
- return devices;
887
803
  }
888
804
 
889
- // Initialize the addon
805
+ // Initialize NAPI Module
890
806
  Napi::Object Init(Napi::Env env, Napi::Object exports) {
891
- exports.Set("startRecording", Napi::Function::New(env, StartRecording));
892
- exports.Set("stopRecording", Napi::Function::New(env, StopRecording));
893
- exports.Set("isRecording", Napi::Function::New(env, IsRecording));
894
- exports.Set("getRecordingStatus", Napi::Function::New(env, GetRecordingStatus));
895
- exports.Set("getDisplays", Napi::Function::New(env, GetDisplays));
896
- exports.Set("getWindows", Napi::Function::New(env, GetWindows));
897
- exports.Set("checkPermissions", Napi::Function::New(env, CheckPermissions));
898
- exports.Set("getAudioDevices", Napi::Function::New(env, GetAudioDevices));
807
+ exports.Set(Napi::String::New(env, "startRecording"), Napi::Function::New(env, StartRecording));
808
+ exports.Set(Napi::String::New(env, "stopRecording"), Napi::Function::New(env, StopRecording));
809
+
810
+ exports.Set(Napi::String::New(env, "getAudioDevices"), Napi::Function::New(env, GetAudioDevices));
811
+ exports.Set(Napi::String::New(env, "getDisplays"), Napi::Function::New(env, GetDisplays));
812
+ exports.Set(Napi::String::New(env, "getWindows"), Napi::Function::New(env, GetWindows));
813
+ exports.Set(Napi::String::New(env, "getRecordingStatus"), Napi::Function::New(env, GetRecordingStatus));
814
+ exports.Set(Napi::String::New(env, "checkPermissions"), Napi::Function::New(env, CheckPermissions));
815
+
816
+ // Thumbnail functions
817
+ exports.Set(Napi::String::New(env, "getWindowThumbnail"), Napi::Function::New(env, GetWindowThumbnail));
818
+ exports.Set(Napi::String::New(env, "getDisplayThumbnail"), Napi::Function::New(env, GetDisplayThumbnail));
899
819
 
900
820
  // Initialize cursor tracker
901
821
  InitCursorTracker(env, exports);
@@ -906,4 +826,4 @@ Napi::Object Init(Napi::Env env, Napi::Object exports) {
906
826
  return exports;
907
827
  }
908
828
 
909
- NODE_API_MODULE(mac_recorder, Init)
829
+ NODE_API_MODULE(mac_recorder, Init)