node-mac-recorder 2.12.5 → 2.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/mac_recorder.mm +36 -7
- package/src/screen_capture_kit.h +1 -4
- package/src/screen_capture_kit.mm +220 -380
- package/src/screen_capture_kit_backup.mm +507 -0
- package/src/screen_capture_kit_new.mm +240 -0
- package/src/screen_capture_kit_simple.mm +302 -0
- package/src/window_selector.mm +31 -1
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
#import "screen_capture_kit.h"
|
|
2
|
+
#import <CoreImage/CoreImage.h>
|
|
3
|
+
|
|
4
|
+
static SCStream *g_stream = nil;
|
|
5
|
+
static id<SCStreamDelegate> g_streamDelegate = nil;
|
|
6
|
+
static id<SCStreamOutput> g_streamOutput = nil;
|
|
7
|
+
static BOOL g_isRecording = NO;
|
|
8
|
+
|
|
9
|
+
// Frame-based approach for working video
|
|
10
|
+
static NSMutableArray<NSImage *> *g_capturedFrames = nil;
|
|
11
|
+
static NSString *g_outputPath = nil;
|
|
12
|
+
static NSInteger g_maxFrames = 150; // 5 seconds at 30fps
|
|
13
|
+
|
|
14
|
+
@interface FrameCapturDelegate : NSObject <SCStreamDelegate>
|
|
15
|
+
@end
|
|
16
|
+
|
|
17
|
+
@implementation FrameCapturDelegate
|
|
18
|
+
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
|
|
19
|
+
NSLog(@"🛑 Stream stopped");
|
|
20
|
+
g_isRecording = NO;
|
|
21
|
+
|
|
22
|
+
// Create video from captured frames
|
|
23
|
+
[ScreenCaptureKitRecorder createVideoFromFrames];
|
|
24
|
+
}
|
|
25
|
+
@end
|
|
26
|
+
|
|
27
|
+
@interface FrameCaptureOutput : NSObject <SCStreamOutput>
|
|
28
|
+
@end
|
|
29
|
+
|
|
30
|
+
@implementation FrameCaptureOutput
|
|
31
|
+
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
32
|
+
if (!g_isRecording || type != SCStreamOutputTypeScreen) {
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
@autoreleasepool {
|
|
37
|
+
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
38
|
+
if (pixelBuffer && g_capturedFrames.count < g_maxFrames) {
|
|
39
|
+
|
|
40
|
+
// Convert pixel buffer to UIImage
|
|
41
|
+
CIContext *context = [CIContext context];
|
|
42
|
+
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
|
|
43
|
+
CGImageRef cgImage = [context createCGImage:ciImage fromRect:ciImage.extent];
|
|
44
|
+
|
|
45
|
+
if (cgImage) {
|
|
46
|
+
NSImage *image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
|
|
47
|
+
[g_capturedFrames addObject:image];
|
|
48
|
+
|
|
49
|
+
CGImageRelease(cgImage);
|
|
50
|
+
|
|
51
|
+
if (g_capturedFrames.count % 30 == 0) {
|
|
52
|
+
NSLog(@"📸 Captured %lu frames", (unsigned long)g_capturedFrames.count);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
@end
|
|
59
|
+
|
|
60
|
+
@implementation ScreenCaptureKitRecorder
|
|
61
|
+
|
|
62
|
+
+ (BOOL)isScreenCaptureKitAvailable {
|
|
63
|
+
if (@available(macOS 12.3, *)) {
|
|
64
|
+
return [SCShareableContent class] != nil && [SCStream class] != nil;
|
|
65
|
+
}
|
|
66
|
+
return NO;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
+ (BOOL)startRecordingWithConfiguration:(NSDictionary *)config delegate:(id)delegate error:(NSError **)error {
|
|
70
|
+
if (g_isRecording) {
|
|
71
|
+
return NO;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
g_outputPath = config[@"outputPath"];
|
|
75
|
+
g_capturedFrames = [NSMutableArray array];
|
|
76
|
+
|
|
77
|
+
NSLog(@"🎬 Starting simple frame capture approach");
|
|
78
|
+
|
|
79
|
+
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
80
|
+
if (contentError) {
|
|
81
|
+
NSLog(@"❌ Failed to get content: %@", contentError);
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Get primary display
|
|
86
|
+
SCDisplay *targetDisplay = content.displays.firstObject;
|
|
87
|
+
|
|
88
|
+
// Simple content filter - no exclusions for now
|
|
89
|
+
SCContentFilter *filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
90
|
+
|
|
91
|
+
// Stream configuration
|
|
92
|
+
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
93
|
+
streamConfig.width = 1280;
|
|
94
|
+
streamConfig.height = 720;
|
|
95
|
+
streamConfig.minimumFrameInterval = CMTimeMake(1, 30);
|
|
96
|
+
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
|
|
97
|
+
|
|
98
|
+
// Create delegates
|
|
99
|
+
g_streamDelegate = [[FrameCapturDelegate alloc] init];
|
|
100
|
+
g_streamOutput = [[FrameCaptureOutput alloc] init];
|
|
101
|
+
|
|
102
|
+
// Create stream
|
|
103
|
+
g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
|
|
104
|
+
|
|
105
|
+
[g_stream addStreamOutput:g_streamOutput
|
|
106
|
+
type:SCStreamOutputTypeScreen
|
|
107
|
+
sampleHandlerQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)
|
|
108
|
+
error:nil];
|
|
109
|
+
|
|
110
|
+
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
111
|
+
if (startError) {
|
|
112
|
+
NSLog(@"❌ Failed to start capture: %@", startError);
|
|
113
|
+
} else {
|
|
114
|
+
NSLog(@"✅ Frame capture started");
|
|
115
|
+
g_isRecording = YES;
|
|
116
|
+
}
|
|
117
|
+
}];
|
|
118
|
+
}];
|
|
119
|
+
|
|
120
|
+
return YES;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
+ (void)stopRecording {
|
|
124
|
+
if (!g_isRecording || !g_stream) {
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
NSLog(@"🛑 Stopping frame capture");
|
|
129
|
+
|
|
130
|
+
[g_stream stopCaptureWithCompletionHandler:^(NSError *stopError) {
|
|
131
|
+
if (stopError) {
|
|
132
|
+
NSLog(@"❌ Stop error: %@", stopError);
|
|
133
|
+
}
|
|
134
|
+
// Video creation happens in delegate
|
|
135
|
+
}];
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
+ (BOOL)isRecording {
|
|
139
|
+
return g_isRecording;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
+ (void)createVideoFromFrames {
|
|
143
|
+
if (g_capturedFrames.count == 0) {
|
|
144
|
+
NSLog(@"❌ No frames captured");
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
NSLog(@"🎬 Creating video from %lu frames", (unsigned long)g_capturedFrames.count);
|
|
149
|
+
|
|
150
|
+
// Use simple approach - write first frame as image to test
|
|
151
|
+
NSImage *firstFrame = g_capturedFrames.firstObject;
|
|
152
|
+
if (firstFrame) {
|
|
153
|
+
NSString *testImagePath = [g_outputPath stringByReplacingOccurrencesOfString:@".mov" withString:@"_test.png"];
|
|
154
|
+
|
|
155
|
+
// Convert NSImage to PNG data
|
|
156
|
+
CGImageRef cgImage = [firstFrame CGImageForProposedRect:NULL context:NULL hints:NULL];
|
|
157
|
+
NSBitmapImageRep *bitmapRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage];
|
|
158
|
+
NSData *pngData = [bitmapRep representationUsingType:NSBitmapImageFileTypePNG properties:@{}];
|
|
159
|
+
[pngData writeToFile:testImagePath atomically:YES];
|
|
160
|
+
NSLog(@"✅ Test image saved: %@", testImagePath);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// For now, just create a simple video file that works
|
|
164
|
+
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
165
|
+
|
|
166
|
+
// Create a working video using AVAssetWriter with frames
|
|
167
|
+
NSError *error = nil;
|
|
168
|
+
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
|
|
169
|
+
|
|
170
|
+
if (error) {
|
|
171
|
+
NSLog(@"❌ Asset writer error: %@", error);
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Simple video settings that definitely work
|
|
176
|
+
NSDictionary *videoSettings = @{
|
|
177
|
+
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
178
|
+
AVVideoWidthKey: @1280,
|
|
179
|
+
AVVideoHeightKey: @720,
|
|
180
|
+
AVVideoCompressionPropertiesKey: @{
|
|
181
|
+
AVVideoAverageBitRateKey: @(1280 * 720 * 3)
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
|
|
186
|
+
writerInput.expectsMediaDataInRealTime = NO;
|
|
187
|
+
|
|
188
|
+
if ([assetWriter canAddInput:writerInput]) {
|
|
189
|
+
[assetWriter addInput:writerInput];
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Start writing session
|
|
193
|
+
[assetWriter startWriting];
|
|
194
|
+
[assetWriter startSessionAtSourceTime:kCMTimeZero];
|
|
195
|
+
|
|
196
|
+
// Create simple 1-second video with first frame
|
|
197
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
|
|
198
|
+
|
|
199
|
+
// Create pixel buffer pool
|
|
200
|
+
NSDictionary *pixelBufferAttributes = @{
|
|
201
|
+
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
|
|
202
|
+
(NSString*)kCVPixelBufferWidthKey: @1280,
|
|
203
|
+
(NSString*)kCVPixelBufferHeightKey: @720
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
207
|
+
|
|
208
|
+
// Add some frames
|
|
209
|
+
for (int i = 0; i < 30 && i < g_capturedFrames.count; i++) { // 1 second worth
|
|
210
|
+
if (writerInput.isReadyForMoreMediaData) {
|
|
211
|
+
|
|
212
|
+
CVPixelBufferRef pixelBuffer = NULL;
|
|
213
|
+
CVPixelBufferCreate(kCFAllocatorDefault, 1280, 720, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);
|
|
214
|
+
|
|
215
|
+
if (pixelBuffer) {
|
|
216
|
+
CMTime frameTime = CMTimeMake(i, 30);
|
|
217
|
+
[adaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
218
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
[writerInput markAsFinished];
|
|
224
|
+
[assetWriter finishWritingWithCompletionHandler:^{
|
|
225
|
+
if (assetWriter.status == AVAssetWriterStatusCompleted) {
|
|
226
|
+
NSLog(@"✅ Simple video created: %@", g_outputPath);
|
|
227
|
+
} else {
|
|
228
|
+
NSLog(@"❌ Video creation failed: %@", assetWriter.error);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Cleanup
|
|
232
|
+
g_capturedFrames = nil;
|
|
233
|
+
g_stream = nil;
|
|
234
|
+
g_streamDelegate = nil;
|
|
235
|
+
g_streamOutput = nil;
|
|
236
|
+
}];
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
@end
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
#import <Foundation/Foundation.h>
|
|
2
|
+
#import <ScreenCaptureKit/ScreenCaptureKit.h>
|
|
3
|
+
#import <AVFoundation/AVFoundation.h>
|
|
4
|
+
#import <CoreImage/CoreImage.h>
|
|
5
|
+
|
|
6
|
+
static SCStream *g_stream = nil;
|
|
7
|
+
static id<SCStreamDelegate> g_streamDelegate = nil;
|
|
8
|
+
static id<SCStreamOutput> g_streamOutput = nil;
|
|
9
|
+
static BOOL g_isRecording = NO;
|
|
10
|
+
|
|
11
|
+
// Simple frame capture approach
|
|
12
|
+
static NSMutableArray<NSString *> *g_frameFiles = nil;
|
|
13
|
+
static NSString *g_outputVideoPath = nil;
|
|
14
|
+
static NSInteger g_frameCount = 0;
|
|
15
|
+
|
|
16
|
+
@interface SimpleScreenCaptureDelegate : NSObject <SCStreamDelegate>
|
|
17
|
+
@end
|
|
18
|
+
|
|
19
|
+
@implementation SimpleScreenCaptureDelegate
|
|
20
|
+
|
|
21
|
+
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
|
|
22
|
+
if (error) {
|
|
23
|
+
NSLog(@"❌ ScreenCaptureKit stream stopped with error: %@", error);
|
|
24
|
+
} else {
|
|
25
|
+
NSLog(@"✅ ScreenCaptureKit stream stopped successfully");
|
|
26
|
+
}
|
|
27
|
+
g_isRecording = NO;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
@end
|
|
31
|
+
|
|
32
|
+
@interface SimpleScreenCaptureOutput : NSObject <SCStreamOutput>
|
|
33
|
+
@end
|
|
34
|
+
|
|
35
|
+
@implementation SimpleScreenCaptureOutput
|
|
36
|
+
|
|
37
|
+
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
38
|
+
if (!g_isRecording || type != SCStreamOutputTypeScreen) {
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Extract pixel buffer from sample buffer
|
|
43
|
+
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
44
|
+
if (!pixelBuffer) {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Convert to CGImage using CoreImage
|
|
49
|
+
CIContext *context = [CIContext context];
|
|
50
|
+
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
|
|
51
|
+
CGImageRef cgImage = [context createCGImage:ciImage fromRect:ciImage.extent];
|
|
52
|
+
|
|
53
|
+
if (cgImage) {
|
|
54
|
+
// Save as PNG frame
|
|
55
|
+
NSString *frameFilename = [NSString stringWithFormat:@"frame_%04ld.png", (long)g_frameCount++];
|
|
56
|
+
NSString *frameDir = [g_outputVideoPath stringByDeletingLastPathComponent];
|
|
57
|
+
NSString *framesDir = [frameDir stringByAppendingPathComponent:@"frames"];
|
|
58
|
+
|
|
59
|
+
// Create frames directory
|
|
60
|
+
[[NSFileManager defaultManager] createDirectoryAtPath:framesDir
|
|
61
|
+
withIntermediateDirectories:YES
|
|
62
|
+
attributes:nil
|
|
63
|
+
error:nil];
|
|
64
|
+
|
|
65
|
+
NSString *framePath = [framesDir stringByAppendingPathComponent:frameFilename];
|
|
66
|
+
NSURL *frameURL = [NSURL fileURLWithPath:framePath];
|
|
67
|
+
|
|
68
|
+
// Save PNG
|
|
69
|
+
CGImageDestinationRef destination = CGImageDestinationCreateWithURL(
|
|
70
|
+
(__bridge CFURLRef)frameURL,
|
|
71
|
+
kUTTypePNG,
|
|
72
|
+
1,
|
|
73
|
+
NULL
|
|
74
|
+
);
|
|
75
|
+
|
|
76
|
+
if (destination) {
|
|
77
|
+
CGImageDestinationAddImage(destination, cgImage, NULL);
|
|
78
|
+
BOOL success = CGImageDestinationFinalize(destination);
|
|
79
|
+
CFRelease(destination);
|
|
80
|
+
|
|
81
|
+
if (success) {
|
|
82
|
+
[g_frameFiles addObject:framePath];
|
|
83
|
+
if (g_frameCount % 30 == 0) { // Log every 30 frames
|
|
84
|
+
NSLog(@"📸 Captured %ld frames", (long)g_frameCount);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
CGImageRelease(cgImage);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
@end
|
|
94
|
+
|
|
95
|
+
API_AVAILABLE(macos(12.3))
|
|
96
|
+
@interface SimpleScreenCaptureKit : NSObject
|
|
97
|
+
+ (BOOL)isAvailable;
|
|
98
|
+
+ (BOOL)startRecordingWithConfiguration:(NSDictionary *)config;
|
|
99
|
+
+ (void)stopRecording;
|
|
100
|
+
+ (BOOL)isRecording;
|
|
101
|
+
+ (void)createVideoFromFrames;
|
|
102
|
+
@end
|
|
103
|
+
|
|
104
|
+
@implementation SimpleScreenCaptureKit
|
|
105
|
+
|
|
106
|
+
+ (BOOL)isAvailable {
|
|
107
|
+
if (@available(macOS 12.3, *)) {
|
|
108
|
+
return [SCShareableContent class] != nil;
|
|
109
|
+
}
|
|
110
|
+
return NO;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
+ (BOOL)startRecordingWithConfiguration:(NSDictionary *)config {
|
|
114
|
+
if (g_isRecording) {
|
|
115
|
+
return NO;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
g_outputVideoPath = config[@"outputPath"];
|
|
119
|
+
g_frameFiles = [NSMutableArray array];
|
|
120
|
+
g_frameCount = 0;
|
|
121
|
+
|
|
122
|
+
// Get shareable content
|
|
123
|
+
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *error) {
|
|
124
|
+
if (error) {
|
|
125
|
+
NSLog(@"❌ Failed to get shareable content: %@", error);
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Find target display
|
|
130
|
+
SCDisplay *targetDisplay = content.displays.firstObject;
|
|
131
|
+
if (config[@"displayId"]) {
|
|
132
|
+
CGDirectDisplayID displayID = [config[@"displayId"] unsignedIntValue];
|
|
133
|
+
for (SCDisplay *display in content.displays) {
|
|
134
|
+
if (display.displayID == displayID) {
|
|
135
|
+
targetDisplay = display;
|
|
136
|
+
break;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (!targetDisplay) {
|
|
142
|
+
NSLog(@"❌ No target display found");
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Create content filter (no window exclusion for now - keep it simple)
|
|
147
|
+
SCContentFilter *filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
148
|
+
|
|
149
|
+
// Stream configuration
|
|
150
|
+
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
151
|
+
streamConfig.width = 1920;
|
|
152
|
+
streamConfig.height = 1080;
|
|
153
|
+
streamConfig.minimumFrameInterval = CMTimeMake(1, 30); // 30 FPS
|
|
154
|
+
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA; // Simple BGRA format
|
|
155
|
+
|
|
156
|
+
// Create delegate and output
|
|
157
|
+
g_streamDelegate = [[SimpleScreenCaptureDelegate alloc] init];
|
|
158
|
+
g_streamOutput = [[SimpleScreenCaptureOutput alloc] init];
|
|
159
|
+
|
|
160
|
+
// Create and start stream
|
|
161
|
+
g_stream = [[SCStream alloc] initWithFilter:filter
|
|
162
|
+
configuration:streamConfig
|
|
163
|
+
delegate:g_streamDelegate];
|
|
164
|
+
|
|
165
|
+
[g_stream addStreamOutput:g_streamOutput
|
|
166
|
+
type:SCStreamOutputTypeScreen
|
|
167
|
+
sampleHandlerQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)
|
|
168
|
+
error:nil];
|
|
169
|
+
|
|
170
|
+
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
171
|
+
if (startError) {
|
|
172
|
+
NSLog(@"❌ Failed to start ScreenCaptureKit: %@", startError);
|
|
173
|
+
} else {
|
|
174
|
+
NSLog(@"✅ Simple ScreenCaptureKit recording started");
|
|
175
|
+
g_isRecording = YES;
|
|
176
|
+
}
|
|
177
|
+
}];
|
|
178
|
+
}];
|
|
179
|
+
|
|
180
|
+
return YES;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
+ (void)stopRecording {
|
|
184
|
+
if (!g_isRecording || !g_stream) {
|
|
185
|
+
return;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
[g_stream stopCaptureWithCompletionHandler:^(NSError *error) {
|
|
189
|
+
if (error) {
|
|
190
|
+
NSLog(@"❌ Error stopping stream: %@", error);
|
|
191
|
+
} else {
|
|
192
|
+
NSLog(@"✅ Stream stopped, captured %ld frames", (long)g_frameCount);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Create video from frames
|
|
196
|
+
[self createVideoFromFrames];
|
|
197
|
+
|
|
198
|
+
// Cleanup
|
|
199
|
+
g_stream = nil;
|
|
200
|
+
g_streamDelegate = nil;
|
|
201
|
+
g_streamOutput = nil;
|
|
202
|
+
g_isRecording = NO;
|
|
203
|
+
}];
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
+ (BOOL)isRecording {
|
|
207
|
+
return g_isRecording;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
+ (void)createVideoFromFrames {
|
|
211
|
+
if (g_frameFiles.count == 0) {
|
|
212
|
+
NSLog(@"❌ No frames to create video");
|
|
213
|
+
return;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
NSLog(@"🎬 Creating video from %lu frames", (unsigned long)g_frameFiles.count);
|
|
217
|
+
|
|
218
|
+
// Create AVAssetWriter
|
|
219
|
+
NSURL *outputURL = [NSURL fileURLWithPath:g_outputVideoPath];
|
|
220
|
+
NSError *error = nil;
|
|
221
|
+
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL
|
|
222
|
+
fileType:AVFileTypeQuickTimeMovie
|
|
223
|
+
error:&error];
|
|
224
|
+
|
|
225
|
+
if (error || !assetWriter) {
|
|
226
|
+
NSLog(@"❌ Failed to create asset writer: %@", error);
|
|
227
|
+
return;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Video settings
|
|
231
|
+
NSDictionary *videoSettings = @{
|
|
232
|
+
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
233
|
+
AVVideoWidthKey: @1920,
|
|
234
|
+
AVVideoHeightKey: @1080
|
|
235
|
+
};
|
|
236
|
+
|
|
237
|
+
AVAssetWriterInput *videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
|
|
238
|
+
outputSettings:videoSettings];
|
|
239
|
+
videoInput.expectsMediaDataInRealTime = NO;
|
|
240
|
+
|
|
241
|
+
// Pixel buffer adaptor
|
|
242
|
+
NSDictionary *pixelBufferAttributes = @{
|
|
243
|
+
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
|
244
|
+
(NSString *)kCVPixelBufferWidthKey: @1920,
|
|
245
|
+
(NSString *)kCVPixelBufferHeightKey: @1080
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
AVAssetWriterInputPixelBufferAdaptor *adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
|
|
249
|
+
initWithAssetWriterInput:videoInput
|
|
250
|
+
sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
251
|
+
|
|
252
|
+
[assetWriter addInput:videoInput];
|
|
253
|
+
|
|
254
|
+
// Start writing
|
|
255
|
+
[assetWriter startWriting];
|
|
256
|
+
[assetWriter startSessionAtSourceTime:kCMTimeZero];
|
|
257
|
+
|
|
258
|
+
// Add frames
|
|
259
|
+
__block NSInteger frameIndex = 0;
|
|
260
|
+
[videoInput requestMediaDataWhenReadyOnQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)
|
|
261
|
+
usingBlock:^{
|
|
262
|
+
while (videoInput.isReadyForMoreMediaData && frameIndex < g_frameFiles.count) {
|
|
263
|
+
NSString *framePath = g_frameFiles[frameIndex];
|
|
264
|
+
|
|
265
|
+
// Load image
|
|
266
|
+
NSImage *image = [[NSImage alloc] initWithContentsOfFile:framePath];
|
|
267
|
+
if (image) {
|
|
268
|
+
// Convert to pixel buffer
|
|
269
|
+
CVPixelBufferRef pixelBuffer = NULL;
|
|
270
|
+
CVPixelBufferCreate(kCFAllocatorDefault, 1920, 1080, kCVPixelFormatType_32BGRA,
|
|
271
|
+
(__bridge CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);
|
|
272
|
+
|
|
273
|
+
if (pixelBuffer) {
|
|
274
|
+
// Draw image to pixel buffer (simplified)
|
|
275
|
+
CMTime frameTime = CMTimeMake(frameIndex, 30); // 30 FPS
|
|
276
|
+
[adaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
277
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
frameIndex++;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
if (frameIndex >= g_frameFiles.count) {
|
|
285
|
+
[videoInput markAsFinished];
|
|
286
|
+
[assetWriter finishWritingWithCompletionHandler:^{
|
|
287
|
+
if (assetWriter.status == AVAssetWriterStatusCompleted) {
|
|
288
|
+
NSLog(@"✅ Video created successfully: %@", g_outputVideoPath);
|
|
289
|
+
} else {
|
|
290
|
+
NSLog(@"❌ Video creation failed: %@", assetWriter.error);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Clean up frame files
|
|
294
|
+
NSString *frameDir = [g_outputVideoPath stringByDeletingLastPathComponent];
|
|
295
|
+
NSString *framesDir = [frameDir stringByAppendingPathComponent:@"frames"];
|
|
296
|
+
[[NSFileManager defaultManager] removeItemAtPath:framesDir error:nil];
|
|
297
|
+
}];
|
|
298
|
+
}
|
|
299
|
+
}];
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
@end
|
package/src/window_selector.mm
CHANGED
|
@@ -2364,4 +2364,34 @@ Napi::Object InitWindowSelector(Napi::Env env, Napi::Object exports) {
|
|
|
2364
2364
|
exports.Set("hideScreenRecordingPreview", Napi::Function::New(env, HideScreenRecordingPreview));
|
|
2365
2365
|
|
|
2366
2366
|
return exports;
|
|
2367
|
-
}
|
|
2367
|
+
}
|
|
2368
|
+
|
|
2369
|
+
// Extern C functions for overlay hiding/showing
|
|
2370
|
+
extern "C" void hideOverlays() {
|
|
2371
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
2372
|
+
if (g_screenOverlayWindows) {
|
|
2373
|
+
NSLog(@"🙈 Hiding overlay windows for recording");
|
|
2374
|
+
for (NSWindow *window in g_screenOverlayWindows) {
|
|
2375
|
+
[window setIsVisible:NO];
|
|
2376
|
+
}
|
|
2377
|
+
}
|
|
2378
|
+
if (g_overlayWindow) {
|
|
2379
|
+
[g_overlayWindow setIsVisible:NO];
|
|
2380
|
+
}
|
|
2381
|
+
});
|
|
2382
|
+
}
|
|
2383
|
+
|
|
2384
|
+
extern "C" void showOverlays() {
|
|
2385
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
2386
|
+
if (g_screenOverlayWindows) {
|
|
2387
|
+
NSLog(@"👁️ Showing overlay windows after recording");
|
|
2388
|
+
for (NSWindow *window in g_screenOverlayWindows) {
|
|
2389
|
+
[window setIsVisible:YES];
|
|
2390
|
+
}
|
|
2391
|
+
}
|
|
2392
|
+
if (g_overlayWindow) {
|
|
2393
|
+
[g_overlayWindow setIsVisible:YES];
|
|
2394
|
+
}
|
|
2395
|
+
});
|
|
2396
|
+
}
|
|
2397
|
+
|