node-mac-recorder 2.15.2 โ 2.15.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/binding.gyp +2 -5
- package/package.json +1 -1
- package/src/audio_capture.mm +14 -89
- package/src/mac_recorder.mm +32 -35
- package/src/screen_capture.h +0 -19
- package/src/screen_capture.mm +0 -162
|
@@ -33,7 +33,8 @@
|
|
|
33
33
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''๐น Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''โ
Test completed'');\n } else {\n console.log(''โ Recording start failed'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n }\n}\n\ntest();\n\")",
|
|
34
34
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''''๐ Debugging frame writing...'''');\nconst MacRecorder = require(''''./index'''');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''''./test-output/frame-debug.mov'''';\n console.log(''''๐น Starting debug test...'''');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''''โฑ๏ธ Recording for 2 seconds...'''');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''''๐ Stopping...'''');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''''โ Failed to start'''');\n }\n } catch (error) {\n console.log(''''โ Error:'''', error);\n }\n}\n\ndebugFrameWriting();\n\")",
|
|
35
35
|
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/crash-test.mov'';\n console.log(''๐น Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 3 seconds'');\n await new Promise(resolve => setTimeout(resolve, 3000));\n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''โ
Test completed without crash'');\n } else {\n console.log(''โ Recording start failed'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n console.log(''Stack:'', error.stack);\n }\n}\n\ntest();\n\")",
|
|
36
|
-
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Debugging frame writing...'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''./test-output/frame-debug.mov'';\n console.log(''๐น Starting debug test...'');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''โฑ๏ธ Recording for 2 seconds...'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''๐ Stopping...'');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''โ Failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error);\n }\n}\n\ndebugFrameWriting();\n\")"
|
|
36
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐ Debugging frame writing...'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''./test-output/frame-debug.mov'';\n console.log(''๐น Starting debug test...'');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''โฑ๏ธ Recording for 2 seconds...'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''๐ Stopping...'');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''โ Failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error);\n }\n}\n\ndebugFrameWriting();\n\")",
|
|
37
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''๐งช Testing without AVFoundation - Pure ScreenCaptureKit'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function testPureScreenCaptureKit() {\n try {\n const outputPath = ''./test-output/pure-sck.mov'';\n console.log(''๐น Starting pure ScreenCaptureKit test...'');\n \n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''โ
Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''๐ Stopping recording...'');\n await recorder.stopRecording();\n console.log(''โ
Pure ScreenCaptureKit test completed successfully!'');\n } else {\n console.log(''โ Recording failed to start'');\n }\n } catch (error) {\n console.log(''โ Error:'', error.message);\n console.log(''Stack:'', error.stack);\n }\n}\n\ntestPureScreenCaptureKit();\n\")"
|
|
37
38
|
],
|
|
38
39
|
"deny": []
|
|
39
40
|
}
|
package/binding.gyp
CHANGED
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
"target_name": "mac_recorder",
|
|
5
5
|
"sources": [
|
|
6
6
|
"src/mac_recorder.mm",
|
|
7
|
-
"src/screen_capture.mm",
|
|
8
7
|
"src/screen_capture_kit.mm",
|
|
9
8
|
"src/audio_capture.mm",
|
|
10
9
|
"src/cursor_tracker.mm",
|
|
@@ -28,15 +27,13 @@
|
|
|
28
27
|
},
|
|
29
28
|
"link_settings": {
|
|
30
29
|
"libraries": [
|
|
31
|
-
"-framework AVFoundation",
|
|
32
|
-
"-framework CoreMedia",
|
|
33
|
-
"-framework CoreVideo",
|
|
34
30
|
"-framework Foundation",
|
|
35
31
|
"-framework AppKit",
|
|
36
32
|
"-framework ScreenCaptureKit",
|
|
37
33
|
"-framework ApplicationServices",
|
|
38
34
|
"-framework Carbon",
|
|
39
|
-
"-framework Accessibility"
|
|
35
|
+
"-framework Accessibility",
|
|
36
|
+
"-framework CoreAudio"
|
|
40
37
|
]
|
|
41
38
|
},
|
|
42
39
|
"defines": [ "NAPI_DISABLE_CPP_EXCEPTIONS" ]
|
package/package.json
CHANGED
package/src/audio_capture.mm
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
#import <
|
|
2
|
-
#import <CoreAudio/CoreAudio.h>
|
|
1
|
+
#import <Foundation/Foundation.h>
|
|
3
2
|
|
|
4
3
|
@interface AudioCapture : NSObject
|
|
5
4
|
|
|
@@ -14,103 +13,29 @@
|
|
|
14
13
|
+ (NSArray *)getAudioDevices {
|
|
15
14
|
NSMutableArray *devices = [NSMutableArray array];
|
|
16
15
|
|
|
17
|
-
//
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
@"name": device.localizedName,
|
|
24
|
-
@"manufacturer": device.manufacturer ?: @"Unknown",
|
|
25
|
-
@"isDefault": @([device isEqual:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]])
|
|
26
|
-
};
|
|
27
|
-
|
|
28
|
-
[devices addObject:deviceInfo];
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
// Also get system audio devices using Core Audio
|
|
32
|
-
AudioObjectPropertyAddress propertyAddress = {
|
|
33
|
-
kAudioHardwarePropertyDevices,
|
|
34
|
-
kAudioObjectPropertyScopeGlobal,
|
|
35
|
-
kAudioObjectPropertyElementMaster
|
|
16
|
+
// ScreenCaptureKit handles audio internally - return default device
|
|
17
|
+
NSDictionary *deviceInfo = @{
|
|
18
|
+
@"id": @"default",
|
|
19
|
+
@"name": @"Default Audio Device",
|
|
20
|
+
@"manufacturer": @"System",
|
|
21
|
+
@"isDefault": @YES
|
|
36
22
|
};
|
|
37
23
|
|
|
38
|
-
|
|
39
|
-
OSStatus status = AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize);
|
|
24
|
+
[devices addObject:deviceInfo];
|
|
40
25
|
|
|
41
|
-
|
|
42
|
-
UInt32 deviceCount = dataSize / sizeof(AudioDeviceID);
|
|
43
|
-
AudioDeviceID *audioDeviceIDs = (AudioDeviceID *)malloc(dataSize);
|
|
44
|
-
|
|
45
|
-
status = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize, audioDeviceIDs);
|
|
46
|
-
|
|
47
|
-
if (status == kAudioHardwareNoError) {
|
|
48
|
-
for (UInt32 i = 0; i < deviceCount; i++) {
|
|
49
|
-
AudioDeviceID deviceID = audioDeviceIDs[i];
|
|
50
|
-
|
|
51
|
-
// Get device name
|
|
52
|
-
propertyAddress.mSelector = kAudioDevicePropertyDeviceNameCFString;
|
|
53
|
-
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
|
|
54
|
-
|
|
55
|
-
CFStringRef deviceName = NULL;
|
|
56
|
-
dataSize = sizeof(deviceName);
|
|
57
|
-
|
|
58
|
-
status = AudioObjectGetPropertyData(deviceID, &propertyAddress, 0, NULL, &dataSize, &deviceName);
|
|
59
|
-
|
|
60
|
-
if (status == kAudioHardwareNoError && deviceName) {
|
|
61
|
-
// Check if it's an input device
|
|
62
|
-
propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
|
|
63
|
-
propertyAddress.mScope = kAudioDevicePropertyScopeInput;
|
|
64
|
-
|
|
65
|
-
AudioObjectGetPropertyDataSize(deviceID, &propertyAddress, 0, NULL, &dataSize);
|
|
66
|
-
|
|
67
|
-
if (dataSize > 0) {
|
|
68
|
-
AudioBufferList *bufferList = (AudioBufferList *)malloc(dataSize);
|
|
69
|
-
AudioObjectGetPropertyData(deviceID, &propertyAddress, 0, NULL, &dataSize, bufferList);
|
|
70
|
-
|
|
71
|
-
if (bufferList->mNumberBuffers > 0) {
|
|
72
|
-
NSDictionary *deviceInfo = @{
|
|
73
|
-
@"id": @(deviceID),
|
|
74
|
-
@"name": (__bridge NSString *)deviceName,
|
|
75
|
-
@"type": @"System Audio Input",
|
|
76
|
-
@"isSystemDevice": @YES
|
|
77
|
-
};
|
|
78
|
-
|
|
79
|
-
[devices addObject:deviceInfo];
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
free(bufferList);
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
CFRelease(deviceName);
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
free(audioDeviceIDs);
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
return [devices copy];
|
|
26
|
+
return devices;
|
|
94
27
|
}
|
|
95
28
|
|
|
96
29
|
+ (BOOL)hasAudioPermission {
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
return status == AVAuthorizationStatusAuthorized;
|
|
100
|
-
}
|
|
101
|
-
return YES; // Older versions don't require explicit permission
|
|
30
|
+
// ScreenCaptureKit handles audio permissions internally
|
|
31
|
+
return YES;
|
|
102
32
|
}
|
|
103
33
|
|
|
104
34
|
+ (void)requestAudioPermission:(void(^)(BOOL granted))completion {
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
dispatch_async(dispatch_get_main_queue(), ^{
|
|
108
|
-
completion(granted);
|
|
109
|
-
});
|
|
110
|
-
}];
|
|
111
|
-
} else {
|
|
35
|
+
// ScreenCaptureKit handles audio permissions internally
|
|
36
|
+
if (completion) {
|
|
112
37
|
completion(YES);
|
|
113
38
|
}
|
|
114
39
|
}
|
|
115
40
|
|
|
116
|
-
@end
|
|
41
|
+
@end
|
package/src/mac_recorder.mm
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
#import <napi.h>
|
|
2
|
-
#import <AVFoundation/AVFoundation.h>
|
|
3
|
-
#import <CoreMedia/CoreMedia.h>
|
|
4
2
|
#import <AppKit/AppKit.h>
|
|
5
3
|
#import <Foundation/Foundation.h>
|
|
6
4
|
#import <CoreGraphics/CoreGraphics.h>
|
|
7
5
|
#import <ImageIO/ImageIO.h>
|
|
8
6
|
#import <CoreAudio/CoreAudio.h>
|
|
9
7
|
|
|
10
|
-
// Import screen capture
|
|
11
|
-
#import "screen_capture.h"
|
|
8
|
+
// Import screen capture (ScreenCaptureKit only)
|
|
12
9
|
#import "screen_capture_kit.h"
|
|
13
10
|
|
|
14
11
|
// Cursor tracker function declarations
|
|
@@ -21,26 +18,13 @@ Napi::Object InitWindowSelector(Napi::Env env, Napi::Object exports);
|
|
|
21
18
|
extern "C" void hideOverlays();
|
|
22
19
|
extern "C" void showOverlays();
|
|
23
20
|
|
|
24
|
-
@interface MacRecorderDelegate : NSObject
|
|
25
|
-
@property (nonatomic, copy) void (^completionHandler)(NSURL *outputURL, NSError *error);
|
|
21
|
+
@interface MacRecorderDelegate : NSObject
|
|
26
22
|
@end
|
|
27
23
|
|
|
28
24
|
@implementation MacRecorderDelegate
|
|
29
|
-
- (void)captureOutput:(AVCaptureFileOutput *)output
|
|
30
|
-
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
|
|
31
|
-
fromConnections:(NSArray<AVCaptureConnection *> *)connections
|
|
32
|
-
error:(NSError *)error {
|
|
33
|
-
if (self.completionHandler) {
|
|
34
|
-
self.completionHandler(outputFileURL, error);
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
25
|
@end
|
|
38
26
|
|
|
39
|
-
// Global state for recording
|
|
40
|
-
static AVCaptureSession *g_captureSession = nil;
|
|
41
|
-
static AVCaptureMovieFileOutput *g_movieFileOutput = nil;
|
|
42
|
-
static AVCaptureScreenInput *g_screenInput = nil;
|
|
43
|
-
static AVCaptureDeviceInput *g_audioInput = nil;
|
|
27
|
+
// Global state for recording (ScreenCaptureKit only)
|
|
44
28
|
static MacRecorderDelegate *g_delegate = nil;
|
|
45
29
|
static bool g_isRecording = false;
|
|
46
30
|
|
|
@@ -244,11 +228,11 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
244
228
|
NSLog(@"โ Exception during ScreenCaptureKit startup: %@", sckException.reason);
|
|
245
229
|
}
|
|
246
230
|
|
|
247
|
-
NSLog(@"
|
|
231
|
+
NSLog(@"โ ScreenCaptureKit failed or unsafe");
|
|
248
232
|
|
|
249
233
|
} else {
|
|
250
234
|
NSLog(@"โ ScreenCaptureKit availability check failed");
|
|
251
|
-
NSLog(@"
|
|
235
|
+
NSLog(@"โ ScreenCaptureKit not available");
|
|
252
236
|
}
|
|
253
237
|
} @catch (NSException *availabilityException) {
|
|
254
238
|
NSLog(@"โ Exception during ScreenCaptureKit availability check: %@", availabilityException.reason);
|
|
@@ -275,7 +259,7 @@ Napi::Value StopRecording(const Napi::CallbackInfo& info) {
|
|
|
275
259
|
|
|
276
260
|
NSLog(@"๐ StopRecording native method called");
|
|
277
261
|
|
|
278
|
-
// ScreenCaptureKit ONLY
|
|
262
|
+
// ScreenCaptureKit ONLY
|
|
279
263
|
if (@available(macOS 12.3, *)) {
|
|
280
264
|
if ([ScreenCaptureKitRecorder isRecording]) {
|
|
281
265
|
NSLog(@"๐ Stopping ScreenCaptureKit recording");
|
|
@@ -402,16 +386,15 @@ Napi::Value GetAudioDevices(const Napi::CallbackInfo& info) {
|
|
|
402
386
|
NSMutableArray *devices = [NSMutableArray array];
|
|
403
387
|
|
|
404
388
|
// Get all audio devices
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
}
|
|
389
|
+
// Audio device enumeration removed - ScreenCaptureKit handles audio internally
|
|
390
|
+
NSLog(@"๐ต Audio device enumeration disabled - using ScreenCaptureKit internal audio");
|
|
391
|
+
|
|
392
|
+
// Add default system audio entry
|
|
393
|
+
[devices addObject:@{
|
|
394
|
+
@"id": @"default",
|
|
395
|
+
@"name": @"Default Audio Device",
|
|
396
|
+
@"isDefault": @YES
|
|
397
|
+
}];
|
|
415
398
|
|
|
416
399
|
// Convert to NAPI array
|
|
417
400
|
Napi::Array result = Napi::Array::New(env, devices.count);
|
|
@@ -437,7 +420,20 @@ Napi::Value GetDisplays(const Napi::CallbackInfo& info) {
|
|
|
437
420
|
Napi::Env env = info.Env();
|
|
438
421
|
|
|
439
422
|
@try {
|
|
440
|
-
|
|
423
|
+
// Get displays using NSScreen instead of ScreenCapture
|
|
424
|
+
NSArray *screens = [NSScreen screens];
|
|
425
|
+
NSMutableArray *displays = [NSMutableArray array];
|
|
426
|
+
|
|
427
|
+
for (NSUInteger i = 0; i < [screens count]; i++) {
|
|
428
|
+
NSScreen *screen = [screens objectAtIndex:i];
|
|
429
|
+
NSDictionary *displayInfo = @{
|
|
430
|
+
@"id": @(i + 1),
|
|
431
|
+
@"name": [NSString stringWithFormat:@"Display %lu", (unsigned long)(i + 1)],
|
|
432
|
+
@"width": @((int)screen.frame.size.width),
|
|
433
|
+
@"height": @((int)screen.frame.size.height)
|
|
434
|
+
};
|
|
435
|
+
[displays addObject:displayInfo];
|
|
436
|
+
}
|
|
441
437
|
Napi::Array result = Napi::Array::New(env, displays.count);
|
|
442
438
|
|
|
443
439
|
NSLog(@"Found %lu displays", (unsigned long)displays.count);
|
|
@@ -742,8 +738,9 @@ Napi::Value CheckPermissions(const Napi::CallbackInfo& info) {
|
|
|
742
738
|
// Check audio permission
|
|
743
739
|
bool hasAudioPermission = true;
|
|
744
740
|
if (@available(macOS 10.14, *)) {
|
|
745
|
-
|
|
746
|
-
|
|
741
|
+
// Audio permissions handled by ScreenCaptureKit internally
|
|
742
|
+
BOOL audioAuthorized = YES; // Assume authorized since SCK handles it
|
|
743
|
+
hasAudioPermission = audioAuthorized;
|
|
747
744
|
}
|
|
748
745
|
|
|
749
746
|
return Napi::Boolean::New(env, hasScreenPermission && hasAudioPermission);
|
package/src/screen_capture.h
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
#ifndef SCREEN_CAPTURE_H
|
|
2
|
-
#define SCREEN_CAPTURE_H
|
|
3
|
-
|
|
4
|
-
#import <Foundation/Foundation.h>
|
|
5
|
-
#import <CoreGraphics/CoreGraphics.h>
|
|
6
|
-
|
|
7
|
-
@interface ScreenCapture : NSObject
|
|
8
|
-
|
|
9
|
-
+ (NSArray *)getAvailableDisplays;
|
|
10
|
-
+ (BOOL)captureDisplay:(CGDirectDisplayID)displayID
|
|
11
|
-
toFile:(NSString *)filePath
|
|
12
|
-
rect:(CGRect)rect
|
|
13
|
-
includeCursor:(BOOL)includeCursor;
|
|
14
|
-
+ (CGImageRef)createScreenshotFromDisplay:(CGDirectDisplayID)displayID
|
|
15
|
-
rect:(CGRect)rect;
|
|
16
|
-
|
|
17
|
-
@end
|
|
18
|
-
|
|
19
|
-
#endif // SCREEN_CAPTURE_H
|
package/src/screen_capture.mm
DELETED
|
@@ -1,162 +0,0 @@
|
|
|
1
|
-
#import <AVFoundation/AVFoundation.h>
|
|
2
|
-
#import <CoreGraphics/CoreGraphics.h>
|
|
3
|
-
#import <AppKit/AppKit.h>
|
|
4
|
-
|
|
5
|
-
@interface ScreenCapture : NSObject
|
|
6
|
-
|
|
7
|
-
+ (NSArray *)getAvailableDisplays;
|
|
8
|
-
+ (BOOL)captureDisplay:(CGDirectDisplayID)displayID
|
|
9
|
-
toFile:(NSString *)filePath
|
|
10
|
-
rect:(CGRect)rect
|
|
11
|
-
includeCursor:(BOOL)includeCursor;
|
|
12
|
-
+ (CGImageRef)createScreenshotFromDisplay:(CGDirectDisplayID)displayID
|
|
13
|
-
rect:(CGRect)rect;
|
|
14
|
-
|
|
15
|
-
@end
|
|
16
|
-
|
|
17
|
-
@implementation ScreenCapture
|
|
18
|
-
|
|
19
|
-
+ (NSArray *)getAvailableDisplays {
|
|
20
|
-
NSMutableArray *displays = [NSMutableArray array];
|
|
21
|
-
|
|
22
|
-
uint32_t displayCount;
|
|
23
|
-
CGGetActiveDisplayList(0, NULL, &displayCount);
|
|
24
|
-
|
|
25
|
-
CGDirectDisplayID *displayList = (CGDirectDisplayID *)malloc(displayCount * sizeof(CGDirectDisplayID));
|
|
26
|
-
CGGetActiveDisplayList(displayCount, displayList, &displayCount);
|
|
27
|
-
|
|
28
|
-
// Get NSScreen list for consistent coordinate system
|
|
29
|
-
NSArray<NSScreen *> *screens = [NSScreen screens];
|
|
30
|
-
|
|
31
|
-
for (uint32_t i = 0; i < displayCount; i++) {
|
|
32
|
-
CGDirectDisplayID displayID = displayList[i];
|
|
33
|
-
|
|
34
|
-
// Find corresponding NSScreen for this display ID
|
|
35
|
-
NSScreen *matchingScreen = nil;
|
|
36
|
-
for (NSScreen *screen in screens) {
|
|
37
|
-
// Match by display ID (requires screen.deviceDescription lookup)
|
|
38
|
-
NSDictionary *deviceDescription = [screen deviceDescription];
|
|
39
|
-
NSNumber *screenDisplayID = [deviceDescription objectForKey:@"NSScreenNumber"];
|
|
40
|
-
if (screenDisplayID && [screenDisplayID unsignedIntValue] == displayID) {
|
|
41
|
-
matchingScreen = screen;
|
|
42
|
-
break;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
// Use NSScreen.frame if found, fallback to CGDisplayBounds
|
|
47
|
-
CGRect bounds;
|
|
48
|
-
if (matchingScreen) {
|
|
49
|
-
NSRect screenFrame = [matchingScreen frame];
|
|
50
|
-
bounds = CGRectMake(screenFrame.origin.x, screenFrame.origin.y, screenFrame.size.width, screenFrame.size.height);
|
|
51
|
-
} else {
|
|
52
|
-
bounds = CGDisplayBounds(displayID);
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// Create display info dictionary
|
|
56
|
-
NSDictionary *displayInfo = @{
|
|
57
|
-
@"id": @(displayID),
|
|
58
|
-
@"name": [NSString stringWithFormat:@"Display %d", i + 1],
|
|
59
|
-
@"width": @(bounds.size.width),
|
|
60
|
-
@"height": @(bounds.size.height),
|
|
61
|
-
@"x": @(bounds.origin.x),
|
|
62
|
-
@"y": @(bounds.origin.y),
|
|
63
|
-
@"isPrimary": @(CGDisplayIsMain(displayID))
|
|
64
|
-
};
|
|
65
|
-
|
|
66
|
-
[displays addObject:displayInfo];
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
free(displayList);
|
|
70
|
-
return [displays copy];
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
+ (BOOL)captureDisplay:(CGDirectDisplayID)displayID
|
|
74
|
-
toFile:(NSString *)filePath
|
|
75
|
-
rect:(CGRect)rect
|
|
76
|
-
includeCursor:(BOOL)includeCursor {
|
|
77
|
-
|
|
78
|
-
CGImageRef screenshot = [self createScreenshotFromDisplay:displayID rect:rect];
|
|
79
|
-
if (!screenshot) {
|
|
80
|
-
return NO;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
// Create image destination
|
|
84
|
-
NSURL *fileURL = [NSURL fileURLWithPath:filePath];
|
|
85
|
-
CGImageDestinationRef destination = CGImageDestinationCreateWithURL(
|
|
86
|
-
(__bridge CFURLRef)fileURL,
|
|
87
|
-
kUTTypePNG,
|
|
88
|
-
1,
|
|
89
|
-
NULL
|
|
90
|
-
);
|
|
91
|
-
|
|
92
|
-
if (!destination) {
|
|
93
|
-
CGImageRelease(screenshot);
|
|
94
|
-
return NO;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// Add cursor if requested
|
|
98
|
-
if (includeCursor) {
|
|
99
|
-
// Get cursor position
|
|
100
|
-
CGPoint cursorPos = CGEventGetLocation(CGEventCreate(NULL));
|
|
101
|
-
|
|
102
|
-
// Create mutable image context
|
|
103
|
-
size_t width = CGImageGetWidth(screenshot);
|
|
104
|
-
size_t height = CGImageGetHeight(screenshot);
|
|
105
|
-
|
|
106
|
-
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
107
|
-
CGContextRef context = CGBitmapContextCreate(
|
|
108
|
-
NULL, width, height, 8, width * 4,
|
|
109
|
-
colorSpace, kCGImageAlphaPremultipliedFirst
|
|
110
|
-
);
|
|
111
|
-
|
|
112
|
-
if (context) {
|
|
113
|
-
// Draw original screenshot
|
|
114
|
-
CGContextDrawImage(context, CGRectMake(0, 0, width, height), screenshot);
|
|
115
|
-
|
|
116
|
-
// Draw cursor (simplified - just a small circle)
|
|
117
|
-
CGRect displayBounds = CGDisplayBounds(displayID);
|
|
118
|
-
CGFloat relativeX = cursorPos.x - displayBounds.origin.x;
|
|
119
|
-
CGFloat relativeY = height - (cursorPos.y - displayBounds.origin.y);
|
|
120
|
-
|
|
121
|
-
if (!CGRectIsNull(rect)) {
|
|
122
|
-
relativeX -= rect.origin.x;
|
|
123
|
-
relativeY -= rect.origin.y;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
if (relativeX >= 0 && relativeX < width && relativeY >= 0 && relativeY < height) {
|
|
127
|
-
CGContextSetRGBFillColor(context, 1.0, 0.0, 0.0, 0.8); // Red cursor
|
|
128
|
-
CGContextFillEllipseInRect(context, CGRectMake(relativeX - 5, relativeY - 5, 10, 10));
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
CGImageRef finalImage = CGBitmapContextCreateImage(context);
|
|
132
|
-
CGContextRelease(context);
|
|
133
|
-
CGImageRelease(screenshot);
|
|
134
|
-
screenshot = finalImage;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
CGColorSpaceRelease(colorSpace);
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
// Save image
|
|
141
|
-
CGImageDestinationAddImage(destination, screenshot, NULL);
|
|
142
|
-
BOOL success = CGImageDestinationFinalize(destination);
|
|
143
|
-
|
|
144
|
-
CFRelease(destination);
|
|
145
|
-
CGImageRelease(screenshot);
|
|
146
|
-
|
|
147
|
-
return success;
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
+ (CGImageRef)createScreenshotFromDisplay:(CGDirectDisplayID)displayID
|
|
151
|
-
rect:(CGRect)rect {
|
|
152
|
-
|
|
153
|
-
if (CGRectIsNull(rect)) {
|
|
154
|
-
// Capture entire display
|
|
155
|
-
return CGDisplayCreateImage(displayID);
|
|
156
|
-
} else {
|
|
157
|
-
// Capture specific rect
|
|
158
|
-
return CGDisplayCreateImageForRect(displayID, rect);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
@end
|