node-mac-recorder 2.13.11 → 2.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +3 -1
- package/package.json +1 -1
- package/src/mac_recorder.mm +5 -3
- package/src/screen_capture_kit.h +4 -3
- package/src/screen_capture_kit.mm +172 -388
|
@@ -29,7 +29,9 @@
|
|
|
29
29
|
"Bash(chmod:*)",
|
|
30
30
|
"Bash(ffprobe:*)",
|
|
31
31
|
"Bash(ffmpeg:*)",
|
|
32
|
-
"WebSearch"
|
|
32
|
+
"WebSearch",
|
|
33
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''🔍 Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''📹 Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''✅ Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''🛑 Stopping recording...'');\n await recorder.stopRecording();\n console.log(''✅ Test completed'');\n } else {\n console.log(''❌ Recording start failed'');\n }\n } catch (error) {\n console.log(''❌ Error:'', error.message);\n }\n}\n\ntest();\n\")",
|
|
34
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''''🔍 Debugging frame writing...'''');\nconst MacRecorder = require(''''./index'''');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''''./test-output/frame-debug.mov'''';\n console.log(''''📹 Starting debug test...'''');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''''⏱️ Recording for 2 seconds...'''');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''''🛑 Stopping...'''');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''''❌ Failed to start'''');\n }\n } catch (error) {\n console.log(''''❌ Error:'''', error);\n }\n}\n\ndebugFrameWriting();\n\")"
|
|
33
35
|
],
|
|
34
36
|
"deny": []
|
|
35
37
|
}
|
package/package.json
CHANGED
package/src/mac_recorder.mm
CHANGED
|
@@ -76,6 +76,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
76
76
|
bool includeMicrophone = false; // Default olarak mikrofon kapalı
|
|
77
77
|
bool includeSystemAudio = true; // Default olarak sistem sesi açık
|
|
78
78
|
CGDirectDisplayID displayID = CGMainDisplayID(); // Default ana ekran
|
|
79
|
+
uint32_t windowID = 0; // Default no window selection
|
|
79
80
|
NSString *audioDeviceId = nil; // Default audio device ID
|
|
80
81
|
NSString *systemAudioDeviceId = nil; // System audio device ID
|
|
81
82
|
|
|
@@ -154,10 +155,10 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
154
155
|
}
|
|
155
156
|
}
|
|
156
157
|
|
|
157
|
-
// Window ID
|
|
158
|
+
// Window ID support
|
|
158
159
|
if (options.Has("windowId") && !options.Get("windowId").IsNull()) {
|
|
159
|
-
|
|
160
|
-
|
|
160
|
+
windowID = options.Get("windowId").As<Napi::Number>().Uint32Value();
|
|
161
|
+
NSLog(@"🪟 Window ID specified: %u", windowID);
|
|
161
162
|
}
|
|
162
163
|
}
|
|
163
164
|
|
|
@@ -192,6 +193,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
192
193
|
// Create configuration for ScreenCaptureKit
|
|
193
194
|
NSMutableDictionary *sckConfig = [NSMutableDictionary dictionary];
|
|
194
195
|
sckConfig[@"displayId"] = @(displayID);
|
|
196
|
+
sckConfig[@"windowId"] = @(windowID);
|
|
195
197
|
sckConfig[@"captureCursor"] = @(captureCursor);
|
|
196
198
|
sckConfig[@"includeSystemAudio"] = @(includeSystemAudio);
|
|
197
199
|
sckConfig[@"includeMicrophone"] = @(includeMicrophone);
|
package/src/screen_capture_kit.h
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#import <Foundation/Foundation.h>
|
|
2
2
|
#import <ScreenCaptureKit/ScreenCaptureKit.h>
|
|
3
|
-
|
|
3
|
+
// NO AVFoundation - Pure ScreenCaptureKit implementation
|
|
4
4
|
|
|
5
|
-
API_AVAILABLE(macos(
|
|
5
|
+
API_AVAILABLE(macos(15.0))
|
|
6
6
|
@interface ScreenCaptureKitRecorder : NSObject
|
|
7
7
|
|
|
8
8
|
+ (BOOL)isScreenCaptureKitAvailable;
|
|
@@ -11,7 +11,8 @@ API_AVAILABLE(macos(12.3))
|
|
|
11
11
|
error:(NSError **)error;
|
|
12
12
|
+ (void)stopRecording;
|
|
13
13
|
+ (BOOL)isRecording;
|
|
14
|
-
+ (
|
|
14
|
+
+ (BOOL)setupVideoWriter;
|
|
15
|
+
+ (void)finalizeRecording;
|
|
15
16
|
+ (void)finalizeVideoWriter;
|
|
16
17
|
+ (void)cleanupVideoWriter;
|
|
17
18
|
|
|
@@ -1,371 +1,215 @@
|
|
|
1
1
|
#import "screen_capture_kit.h"
|
|
2
|
-
#import <CoreImage/CoreImage.h>
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
static
|
|
6
|
-
static
|
|
3
|
+
// Pure ScreenCaptureKit implementation - NO AVFoundation
|
|
4
|
+
static SCStream * API_AVAILABLE(macos(12.3)) g_stream = nil;
|
|
5
|
+
static SCRecordingOutput * API_AVAILABLE(macos(15.0)) g_recordingOutput = nil;
|
|
6
|
+
static id<SCStreamDelegate> API_AVAILABLE(macos(12.3)) g_streamDelegate = nil;
|
|
7
7
|
static BOOL g_isRecording = NO;
|
|
8
|
-
|
|
9
|
-
// Electron-safe direct writing approach
|
|
10
|
-
static AVAssetWriter *g_assetWriter = nil;
|
|
11
|
-
static AVAssetWriterInput *g_assetWriterInput = nil;
|
|
12
|
-
static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
|
|
13
8
|
static NSString *g_outputPath = nil;
|
|
14
|
-
static CMTime g_startTime;
|
|
15
|
-
static CMTime g_currentTime;
|
|
16
|
-
static BOOL g_writerStarted = NO;
|
|
17
|
-
static int g_frameNumber = 0;
|
|
18
9
|
|
|
19
|
-
@interface
|
|
10
|
+
@interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
|
|
20
11
|
@end
|
|
21
12
|
|
|
22
|
-
@implementation
|
|
23
|
-
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
|
|
24
|
-
NSLog(@"🛑
|
|
13
|
+
@implementation PureScreenCaptureDelegate
|
|
14
|
+
- (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
|
|
15
|
+
NSLog(@"🛑 Pure ScreenCapture stream stopped");
|
|
25
16
|
g_isRecording = NO;
|
|
26
17
|
|
|
27
18
|
if (error) {
|
|
28
|
-
NSLog(@"❌ Stream
|
|
19
|
+
NSLog(@"❌ Stream error: %@", error);
|
|
29
20
|
} else {
|
|
30
|
-
NSLog(@"✅
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
// Finalize video writer
|
|
34
|
-
NSLog(@"🎬 Delegate calling finalizeVideoWriter...");
|
|
35
|
-
[ScreenCaptureKitRecorder finalizeVideoWriter];
|
|
36
|
-
NSLog(@"🎬 Delegate finished calling finalizeVideoWriter");
|
|
37
|
-
}
|
|
38
|
-
@end
|
|
39
|
-
|
|
40
|
-
@interface ElectronSafeOutput : NSObject <SCStreamOutput>
|
|
41
|
-
- (void)processSampleBufferSafely:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type;
|
|
42
|
-
@end
|
|
43
|
-
|
|
44
|
-
@implementation ElectronSafeOutput
|
|
45
|
-
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
46
|
-
// EXTREME SAFETY: Complete isolation with separate thread
|
|
47
|
-
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
|
|
48
|
-
@autoreleasepool {
|
|
49
|
-
[self processSampleBufferSafely:sampleBuffer ofType:type];
|
|
50
|
-
}
|
|
51
|
-
});
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
- (void)processSampleBufferSafely:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
55
|
-
// ELECTRON CRASH PROTECTION: Multiple layers of safety
|
|
56
|
-
if (!g_isRecording || !g_assetWriterInput) {
|
|
57
|
-
NSLog(@"🔍 ProcessSampleBuffer: isRecording=%d, type=%d, writerInput=%p", g_isRecording, (int)type, g_assetWriterInput);
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
NSLog(@"🔍 ProcessSampleBuffer: Processing frame, type=%d (Screen=%d, Audio=%d)...", (int)type, (int)SCStreamOutputTypeScreen, (int)SCStreamOutputTypeAudio);
|
|
62
|
-
|
|
63
|
-
// Process both screen and audio if available
|
|
64
|
-
if (type == SCStreamOutputTypeAudio) {
|
|
65
|
-
NSLog(@"🔊 Received audio sample buffer - skipping for video-only recording");
|
|
66
|
-
return;
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
if (type != SCStreamOutputTypeScreen) {
|
|
70
|
-
NSLog(@"⚠️ Unknown sample buffer type: %d", (int)type);
|
|
71
|
-
return;
|
|
21
|
+
NSLog(@"✅ Stream stopped cleanly");
|
|
72
22
|
}
|
|
73
23
|
|
|
74
|
-
|
|
75
|
-
if (!sampleBuffer || !CMSampleBufferIsValid(sampleBuffer)) {
|
|
76
|
-
NSLog(@"❌ LAYER 1 FAIL: Invalid sample buffer");
|
|
77
|
-
return;
|
|
78
|
-
}
|
|
79
|
-
NSLog(@"✅ LAYER 1 PASS: Sample buffer valid");
|
|
80
|
-
|
|
81
|
-
// SAFETY LAYER 2: Try-catch with complete isolation
|
|
82
|
-
@try {
|
|
83
|
-
@autoreleasepool {
|
|
84
|
-
// SAFETY LAYER 3: Initialize writer safely (only once)
|
|
85
|
-
static BOOL initializationAttempted = NO;
|
|
86
|
-
if (!g_writerStarted && !initializationAttempted && g_assetWriter && g_assetWriterInput) {
|
|
87
|
-
initializationAttempted = YES;
|
|
88
|
-
@try {
|
|
89
|
-
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
90
|
-
|
|
91
|
-
// SAFETY CHECK: Ensure valid time
|
|
92
|
-
if (CMTIME_IS_VALID(presentationTime) && CMTIME_IS_NUMERIC(presentationTime)) {
|
|
93
|
-
g_startTime = presentationTime;
|
|
94
|
-
g_currentTime = g_startTime;
|
|
95
|
-
|
|
96
|
-
// SAFETY LAYER 4: Writer state validation
|
|
97
|
-
if (g_assetWriter.status == AVAssetWriterStatusUnknown) {
|
|
98
|
-
[g_assetWriter startWriting];
|
|
99
|
-
[g_assetWriter startSessionAtSourceTime:g_startTime];
|
|
100
|
-
g_writerStarted = YES;
|
|
101
|
-
NSLog(@"✅ Ultra-safe ScreenCaptureKit writer started");
|
|
102
|
-
}
|
|
103
|
-
} else {
|
|
104
|
-
// Use current time if sample buffer time is invalid
|
|
105
|
-
NSLog(@"⚠️ Invalid sample buffer time, using current time");
|
|
106
|
-
g_startTime = CMTimeMakeWithSeconds(CACurrentMediaTime(), 600);
|
|
107
|
-
g_currentTime = g_startTime;
|
|
108
|
-
|
|
109
|
-
if (g_assetWriter.status == AVAssetWriterStatusUnknown) {
|
|
110
|
-
[g_assetWriter startWriting];
|
|
111
|
-
[g_assetWriter startSessionAtSourceTime:g_startTime];
|
|
112
|
-
g_writerStarted = YES;
|
|
113
|
-
NSLog(@"✅ Ultra-safe ScreenCaptureKit writer started with current time");
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
} @catch (NSException *writerException) {
|
|
117
|
-
NSLog(@"⚠️ Writer initialization failed safely: %@", writerException.reason);
|
|
118
|
-
return;
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
// SAFETY LAYER 5: Frame processing with isolation
|
|
123
|
-
if (!g_writerStarted || !g_assetWriterInput || !g_pixelBufferAdaptor) {
|
|
124
|
-
NSLog(@"❌ LAYER 5 FAIL: writer=%d, input=%p, adaptor=%p", g_writerStarted, g_assetWriterInput, g_pixelBufferAdaptor);
|
|
125
|
-
return;
|
|
126
|
-
}
|
|
127
|
-
NSLog(@"✅ LAYER 5 PASS: Writer components ready");
|
|
128
|
-
|
|
129
|
-
// SAFETY LAYER 6: Higher frame rate for video
|
|
130
|
-
static NSTimeInterval lastProcessTime = 0;
|
|
131
|
-
NSTimeInterval currentTime = [NSDate timeIntervalSinceReferenceDate];
|
|
132
|
-
if (currentTime - lastProcessTime < 0.033) { // Max 30 FPS
|
|
133
|
-
NSLog(@"❌ LAYER 6 FAIL: Rate limited (%.3fs since last)", currentTime - lastProcessTime);
|
|
134
|
-
return;
|
|
135
|
-
}
|
|
136
|
-
lastProcessTime = currentTime;
|
|
137
|
-
NSLog(@"✅ LAYER 6 PASS: Rate limiting OK");
|
|
138
|
-
|
|
139
|
-
// SAFETY LAYER 7: Input readiness check
|
|
140
|
-
if (!g_assetWriterInput.isReadyForMoreMediaData) {
|
|
141
|
-
NSLog(@"❌ LAYER 7 FAIL: Writer not ready for data");
|
|
142
|
-
return;
|
|
143
|
-
}
|
|
144
|
-
NSLog(@"✅ LAYER 7 PASS: Writer ready for data");
|
|
145
|
-
|
|
146
|
-
// SAFETY LAYER 8: Get pixel buffer from sample buffer
|
|
147
|
-
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
148
|
-
BOOL createdDummyBuffer = NO;
|
|
149
|
-
|
|
150
|
-
if (!pixelBuffer) {
|
|
151
|
-
// Try alternative methods to get pixel buffer
|
|
152
|
-
CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
|
|
153
|
-
if (formatDesc) {
|
|
154
|
-
CMMediaType mediaType = CMFormatDescriptionGetMediaType(formatDesc);
|
|
155
|
-
NSLog(@"🔍 Sample buffer media type: %u (Video=%u)", (unsigned int)mediaType, (unsigned int)kCMMediaType_Video);
|
|
156
|
-
return; // Skip processing if no pixel buffer
|
|
157
|
-
} else {
|
|
158
|
-
NSLog(@"❌ No pixel buffer and no format description - permissions issue");
|
|
159
|
-
|
|
160
|
-
// Create a dummy pixel buffer using the pool from adaptor
|
|
161
|
-
CVPixelBufferRef dummyBuffer = NULL;
|
|
162
|
-
|
|
163
|
-
// Try to get a pixel buffer from the adaptor's buffer pool
|
|
164
|
-
CVPixelBufferPoolRef bufferPool = g_pixelBufferAdaptor.pixelBufferPool;
|
|
165
|
-
if (bufferPool) {
|
|
166
|
-
CVReturn poolResult = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &dummyBuffer);
|
|
167
|
-
if (poolResult == kCVReturnSuccess && dummyBuffer) {
|
|
168
|
-
pixelBuffer = dummyBuffer;
|
|
169
|
-
createdDummyBuffer = YES;
|
|
170
|
-
NSLog(@"✅ Created dummy buffer from adaptor pool");
|
|
171
|
-
|
|
172
|
-
// Fill buffer with black pixels
|
|
173
|
-
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
174
|
-
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
175
|
-
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
176
|
-
size_t height = CVPixelBufferGetHeight(pixelBuffer);
|
|
177
|
-
if (baseAddress) {
|
|
178
|
-
memset(baseAddress, 0, bytesPerRow * height);
|
|
179
|
-
}
|
|
180
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
181
|
-
} else {
|
|
182
|
-
NSLog(@"❌ Failed to create buffer from pool: %d", poolResult);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
// Fallback: create manual buffer if pool method failed
|
|
187
|
-
if (!dummyBuffer) {
|
|
188
|
-
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
|
|
189
|
-
1920, 1080,
|
|
190
|
-
kCVPixelFormatType_32BGRA,
|
|
191
|
-
NULL, &dummyBuffer);
|
|
192
|
-
if (result == kCVReturnSuccess && dummyBuffer) {
|
|
193
|
-
pixelBuffer = dummyBuffer;
|
|
194
|
-
createdDummyBuffer = YES;
|
|
195
|
-
NSLog(@"✅ Created manual dummy buffer");
|
|
196
|
-
|
|
197
|
-
// Fill buffer with black pixels
|
|
198
|
-
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
199
|
-
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
200
|
-
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
201
|
-
size_t height = CVPixelBufferGetHeight(pixelBuffer);
|
|
202
|
-
if (baseAddress) {
|
|
203
|
-
memset(baseAddress, 0, bytesPerRow * height);
|
|
204
|
-
}
|
|
205
|
-
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
206
|
-
} else {
|
|
207
|
-
NSLog(@"❌ Failed to create dummy pixel buffer");
|
|
208
|
-
return;
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
NSLog(@"✅ LAYER 8 PASS: Pixel buffer ready (dummy=%d)", createdDummyBuffer);
|
|
214
|
-
|
|
215
|
-
// SAFETY LAYER 9: Dimension validation - flexible this time
|
|
216
|
-
size_t width = CVPixelBufferGetWidth(pixelBuffer);
|
|
217
|
-
size_t height = CVPixelBufferGetHeight(pixelBuffer);
|
|
218
|
-
if (width == 0 || height == 0 || width > 4096 || height > 4096) {
|
|
219
|
-
NSLog(@"❌ LAYER 9 FAIL: Invalid dimensions %zux%zu", width, height);
|
|
220
|
-
return; // Skip only if clearly invalid
|
|
221
|
-
}
|
|
222
|
-
NSLog(@"✅ LAYER 9 PASS: Valid dimensions %zux%zu", width, height);
|
|
223
|
-
|
|
224
|
-
// SAFETY LAYER 10: Time validation - use sequential timing
|
|
225
|
-
g_frameNumber++;
|
|
226
|
-
|
|
227
|
-
// Create sequential time stamps
|
|
228
|
-
CMTime relativeTime = CMTimeMake(g_frameNumber, 30); // 30 FPS sequential
|
|
229
|
-
|
|
230
|
-
if (!CMTIME_IS_VALID(relativeTime)) {
|
|
231
|
-
return;
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
double seconds = CMTimeGetSeconds(relativeTime);
|
|
235
|
-
if (seconds > 30.0) { // Max 30 seconds
|
|
236
|
-
return;
|
|
237
|
-
}
|
|
238
|
-
|
|
239
|
-
// SAFETY LAYER 11: Append with complete exception handling
|
|
240
|
-
@try {
|
|
241
|
-
// Use pixel buffer directly - copy was causing errors
|
|
242
|
-
NSLog(@"🔍 Attempting to append frame %d with time %.3fs", g_frameNumber, seconds);
|
|
243
|
-
BOOL success = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:relativeTime];
|
|
244
|
-
|
|
245
|
-
if (success) {
|
|
246
|
-
g_currentTime = relativeTime;
|
|
247
|
-
static int ultraSafeFrameCount = 0;
|
|
248
|
-
ultraSafeFrameCount++;
|
|
249
|
-
NSLog(@"✅ Frame %d appended successfully! (%.1fs)", ultraSafeFrameCount, seconds);
|
|
250
|
-
} else {
|
|
251
|
-
NSLog(@"❌ Failed to append frame %d - adaptor rejected", g_frameNumber);
|
|
252
|
-
}
|
|
253
|
-
} @catch (NSException *appendException) {
|
|
254
|
-
NSLog(@"🛡️ Append exception handled safely: %@", appendException.reason);
|
|
255
|
-
// Continue gracefully - don't crash
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
// Cleanup dummy pixel buffer if we created one
|
|
259
|
-
if (pixelBuffer && createdDummyBuffer) {
|
|
260
|
-
CVPixelBufferRelease(pixelBuffer);
|
|
261
|
-
NSLog(@"🧹 Released dummy pixel buffer");
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
} @catch (NSException *outerException) {
|
|
265
|
-
NSLog(@"🛡️ Outer exception handled: %@", outerException.reason);
|
|
266
|
-
// Ultimate safety - graceful continue
|
|
267
|
-
} @catch (...) {
|
|
268
|
-
NSLog(@"🛡️ Unknown exception caught and handled safely");
|
|
269
|
-
// Catch any C++ exceptions too
|
|
270
|
-
}
|
|
24
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
271
25
|
}
|
|
272
26
|
@end
|
|
273
27
|
|
|
274
28
|
@implementation ScreenCaptureKitRecorder
|
|
275
29
|
|
|
276
30
|
+ (BOOL)isScreenCaptureKitAvailable {
|
|
277
|
-
if (@available(macOS
|
|
278
|
-
return [SCShareableContent class] != nil && [SCStream class] != nil;
|
|
31
|
+
if (@available(macOS 15.0, *)) {
|
|
32
|
+
return [SCShareableContent class] != nil && [SCStream class] != nil && [SCRecordingOutput class] != nil;
|
|
279
33
|
}
|
|
280
34
|
return NO;
|
|
281
35
|
}
|
|
282
36
|
|
|
283
37
|
+ (BOOL)startRecordingWithConfiguration:(NSDictionary *)config delegate:(id)delegate error:(NSError **)error {
|
|
284
38
|
if (g_isRecording) {
|
|
39
|
+
NSLog(@"⚠️ Already recording");
|
|
285
40
|
return NO;
|
|
286
41
|
}
|
|
287
42
|
|
|
288
43
|
g_outputPath = config[@"outputPath"];
|
|
289
|
-
g_writerStarted = NO;
|
|
290
|
-
g_frameNumber = 0; // Reset frame counter for new recording
|
|
291
44
|
|
|
292
|
-
//
|
|
293
|
-
[
|
|
45
|
+
// Extract configuration options
|
|
46
|
+
NSNumber *displayId = config[@"displayId"];
|
|
47
|
+
NSNumber *windowId = config[@"windowId"];
|
|
48
|
+
NSValue *captureAreaValue = config[@"captureArea"];
|
|
49
|
+
NSNumber *captureCursor = config[@"captureCursor"];
|
|
50
|
+
NSNumber *includeMicrophone = config[@"includeMicrophone"];
|
|
51
|
+
NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
|
|
294
52
|
|
|
295
|
-
NSLog(@"🎬 Starting
|
|
53
|
+
NSLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
|
|
54
|
+
NSLog(@"🔧 Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@",
|
|
55
|
+
captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId);
|
|
296
56
|
|
|
57
|
+
// Get shareable content
|
|
297
58
|
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
298
59
|
if (contentError) {
|
|
299
|
-
NSLog(@"❌
|
|
60
|
+
NSLog(@"❌ Content error: %@", contentError);
|
|
300
61
|
return;
|
|
301
62
|
}
|
|
302
63
|
|
|
303
|
-
NSLog(@"✅ Got
|
|
64
|
+
NSLog(@"✅ Got %lu displays, %lu windows for pure recording",
|
|
65
|
+
content.displays.count, content.windows.count);
|
|
304
66
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
}
|
|
67
|
+
SCContentFilter *filter = nil;
|
|
68
|
+
NSInteger recordingWidth = 0;
|
|
69
|
+
NSInteger recordingHeight = 0;
|
|
309
70
|
|
|
310
|
-
//
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
71
|
+
// WINDOW RECORDING
|
|
72
|
+
if (windowId && [windowId integerValue] != 0) {
|
|
73
|
+
SCRunningApplication *targetApp = nil;
|
|
74
|
+
SCWindow *targetWindow = nil;
|
|
75
|
+
|
|
76
|
+
for (SCWindow *window in content.windows) {
|
|
77
|
+
if (window.windowID == [windowId unsignedIntValue]) {
|
|
78
|
+
targetWindow = window;
|
|
79
|
+
targetApp = window.owningApplication;
|
|
80
|
+
break;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (targetWindow && targetApp) {
|
|
85
|
+
NSLog(@"🪟 Recording window: %@ (%ux%u)",
|
|
86
|
+
targetWindow.title, (unsigned)targetWindow.frame.size.width, (unsigned)targetWindow.frame.size.height);
|
|
87
|
+
filter = [[SCContentFilter alloc] initWithDesktopIndependentWindow:targetWindow];
|
|
88
|
+
recordingWidth = (NSInteger)targetWindow.frame.size.width;
|
|
89
|
+
recordingHeight = (NSInteger)targetWindow.frame.size.height;
|
|
90
|
+
} else {
|
|
91
|
+
NSLog(@"❌ Window ID %@ not found", windowId);
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// DISPLAY RECORDING
|
|
96
|
+
else {
|
|
97
|
+
SCDisplay *targetDisplay = nil;
|
|
98
|
+
|
|
99
|
+
if (displayId && [displayId integerValue] != 0) {
|
|
100
|
+
// Find specific display
|
|
101
|
+
for (SCDisplay *display in content.displays) {
|
|
102
|
+
if (display.displayID == [displayId unsignedIntValue]) {
|
|
103
|
+
targetDisplay = display;
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
} else {
|
|
108
|
+
// Use first display
|
|
109
|
+
targetDisplay = content.displays.firstObject;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!targetDisplay) {
|
|
113
|
+
NSLog(@"❌ Display not found");
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
NSLog(@"🖥️ Recording display %u (%dx%d)",
|
|
118
|
+
targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
|
|
119
|
+
filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
120
|
+
recordingWidth = targetDisplay.width;
|
|
121
|
+
recordingHeight = targetDisplay.height;
|
|
315
122
|
}
|
|
316
123
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
// Create content filter for entire display - NO exclusions
|
|
320
|
-
SCContentFilter *filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
321
|
-
NSLog(@"✅ Content filter created for display");
|
|
322
|
-
|
|
323
|
-
// Stream configuration - fixed resolution to avoid permissions issues
|
|
124
|
+
// Configure stream with extracted options
|
|
324
125
|
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
325
|
-
streamConfig.width =
|
|
326
|
-
streamConfig.height =
|
|
126
|
+
streamConfig.width = recordingWidth;
|
|
127
|
+
streamConfig.height = recordingHeight;
|
|
327
128
|
streamConfig.minimumFrameInterval = CMTimeMake(1, 30); // 30 FPS
|
|
328
129
|
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
|
|
329
|
-
streamConfig.
|
|
130
|
+
streamConfig.scalesToFit = NO;
|
|
330
131
|
|
|
331
|
-
|
|
132
|
+
// CURSOR SUPPORT
|
|
133
|
+
BOOL shouldShowCursor = captureCursor ? [captureCursor boolValue] : YES;
|
|
134
|
+
streamConfig.showsCursor = shouldShowCursor;
|
|
332
135
|
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
g_streamOutput = [[ElectronSafeOutput alloc] init];
|
|
136
|
+
NSLog(@"🎥 Pure ScreenCapture config: %ldx%ld @ 30fps, cursor=%d",
|
|
137
|
+
recordingWidth, recordingHeight, shouldShowCursor);
|
|
336
138
|
|
|
337
|
-
|
|
139
|
+
// AUDIO SUPPORT - Configure stream audio settings
|
|
140
|
+
BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
|
|
141
|
+
BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
|
|
142
|
+
|
|
143
|
+
if (@available(macOS 13.0, *)) {
|
|
144
|
+
if (shouldCaptureMic || shouldCaptureSystemAudio) {
|
|
145
|
+
streamConfig.capturesAudio = YES;
|
|
146
|
+
streamConfig.sampleRate = 44100;
|
|
147
|
+
streamConfig.channelCount = 2;
|
|
148
|
+
NSLog(@"🎵 Audio enabled: mic=%d system=%d", shouldCaptureMic, shouldCaptureSystemAudio);
|
|
149
|
+
} else {
|
|
150
|
+
streamConfig.capturesAudio = NO;
|
|
151
|
+
NSLog(@"🔇 Audio disabled");
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Create pure ScreenCaptureKit recording output
|
|
156
|
+
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
157
|
+
if (@available(macOS 15.0, *)) {
|
|
158
|
+
// Create recording output configuration
|
|
159
|
+
SCRecordingOutputConfiguration *recordingConfig = [[SCRecordingOutputConfiguration alloc] init];
|
|
160
|
+
recordingConfig.outputURL = outputURL;
|
|
161
|
+
recordingConfig.videoCodecType = AVVideoCodecTypeH264;
|
|
162
|
+
|
|
163
|
+
// Audio configuration - using available properties
|
|
164
|
+
// Note: Specific audio routing handled by ScreenCaptureKit automatically
|
|
165
|
+
|
|
166
|
+
// Create recording output with correct initializer
|
|
167
|
+
g_recordingOutput = [[SCRecordingOutput alloc] initWithConfiguration:recordingConfig
|
|
168
|
+
delegate:nil];
|
|
169
|
+
NSLog(@"🔧 Created SCRecordingOutput with audio config: mic=%d system=%d",
|
|
170
|
+
shouldCaptureMic, shouldCaptureSystemAudio);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if (!g_recordingOutput) {
|
|
174
|
+
NSLog(@"❌ Failed to create SCRecordingOutput");
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
338
177
|
|
|
339
|
-
|
|
340
|
-
|
|
178
|
+
NSLog(@"✅ Pure ScreenCaptureKit recording output created");
|
|
179
|
+
|
|
180
|
+
// Create delegate
|
|
181
|
+
g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
|
|
182
|
+
|
|
183
|
+
// Create and configure stream
|
|
341
184
|
g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
|
|
342
185
|
|
|
343
186
|
if (!g_stream) {
|
|
344
|
-
NSLog(@"❌ Failed to create stream");
|
|
187
|
+
NSLog(@"❌ Failed to create pure stream");
|
|
345
188
|
return;
|
|
346
189
|
}
|
|
347
190
|
|
|
348
|
-
|
|
191
|
+
// Add recording output directly to stream
|
|
192
|
+
NSError *outputError = nil;
|
|
193
|
+
BOOL outputAdded = NO;
|
|
349
194
|
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
sampleHandlerQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
|
|
354
|
-
error:&streamError];
|
|
195
|
+
if (@available(macOS 15.0, *)) {
|
|
196
|
+
outputAdded = [g_stream addRecordingOutput:g_recordingOutput error:&outputError];
|
|
197
|
+
}
|
|
355
198
|
|
|
356
|
-
if (!
|
|
357
|
-
NSLog(@"❌ Failed to add
|
|
199
|
+
if (!outputAdded || outputError) {
|
|
200
|
+
NSLog(@"❌ Failed to add recording output: %@", outputError);
|
|
358
201
|
return;
|
|
359
202
|
}
|
|
360
203
|
|
|
361
|
-
NSLog(@"✅
|
|
204
|
+
NSLog(@"✅ Pure recording output added to stream");
|
|
362
205
|
|
|
206
|
+
// Start capture with recording
|
|
363
207
|
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
364
208
|
if (startError) {
|
|
365
|
-
NSLog(@"❌ Failed to start capture: %@", startError);
|
|
209
|
+
NSLog(@"❌ Failed to start pure capture: %@", startError);
|
|
366
210
|
g_isRecording = NO;
|
|
367
211
|
} else {
|
|
368
|
-
NSLog(@"
|
|
212
|
+
NSLog(@"🎉 PURE ScreenCaptureKit recording started successfully!");
|
|
369
213
|
g_isRecording = YES;
|
|
370
214
|
}
|
|
371
215
|
}];
|
|
@@ -379,19 +223,14 @@ static int g_frameNumber = 0;
|
|
|
379
223
|
return;
|
|
380
224
|
}
|
|
381
225
|
|
|
382
|
-
NSLog(@"🛑 Stopping
|
|
226
|
+
NSLog(@"🛑 Stopping pure ScreenCaptureKit recording");
|
|
383
227
|
|
|
384
|
-
[g_stream stopCaptureWithCompletionHandler:^(NSError *
|
|
385
|
-
if (
|
|
386
|
-
NSLog(@"❌ Stop error: %@",
|
|
387
|
-
} else {
|
|
388
|
-
NSLog(@"✅ ScreenCaptureKit stream stopped in completion handler");
|
|
228
|
+
[g_stream stopCaptureWithCompletionHandler:^(NSError *error) {
|
|
229
|
+
if (error) {
|
|
230
|
+
NSLog(@"❌ Stop error: %@", error);
|
|
389
231
|
}
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
NSLog(@"🎬 Completion handler calling finalizeVideoWriter...");
|
|
393
|
-
[ScreenCaptureKitRecorder finalizeVideoWriter];
|
|
394
|
-
NSLog(@"🎬 Completion handler finished calling finalizeVideoWriter");
|
|
232
|
+
NSLog(@"✅ Pure stream stopped");
|
|
233
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
395
234
|
}];
|
|
396
235
|
}
|
|
397
236
|
|
|
@@ -399,92 +238,37 @@ static int g_frameNumber = 0;
|
|
|
399
238
|
return g_isRecording;
|
|
400
239
|
}
|
|
401
240
|
|
|
402
|
-
+ (
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
410
|
-
NSError *error = nil;
|
|
241
|
+
+ (BOOL)setupVideoWriter {
|
|
242
|
+
// No setup needed - SCRecordingOutput handles everything
|
|
243
|
+
return YES;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
+ (void)finalizeRecording {
|
|
247
|
+
NSLog(@"🎬 Finalizing pure ScreenCaptureKit recording");
|
|
411
248
|
|
|
412
|
-
|
|
249
|
+
g_isRecording = NO;
|
|
413
250
|
|
|
414
|
-
if (
|
|
415
|
-
|
|
416
|
-
|
|
251
|
+
if (g_recordingOutput) {
|
|
252
|
+
// SCRecordingOutput finalizes automatically
|
|
253
|
+
NSLog(@"✅ Pure recording output finalized");
|
|
417
254
|
}
|
|
418
255
|
|
|
419
|
-
|
|
420
|
-
NSDictionary *videoSettings = @{
|
|
421
|
-
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
422
|
-
AVVideoWidthKey: @1920,
|
|
423
|
-
AVVideoHeightKey: @1080,
|
|
424
|
-
AVVideoCompressionPropertiesKey: @{
|
|
425
|
-
AVVideoAverageBitRateKey: @(1920 * 1080 * 2), // 2 bits per pixel
|
|
426
|
-
AVVideoMaxKeyFrameIntervalKey: @30,
|
|
427
|
-
AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel
|
|
428
|
-
}
|
|
429
|
-
};
|
|
430
|
-
|
|
431
|
-
g_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
|
|
432
|
-
g_assetWriterInput.expectsMediaDataInRealTime = NO; // Safer for Electron
|
|
433
|
-
|
|
434
|
-
// Pixel buffer attributes matching ScreenCaptureKit format
|
|
435
|
-
NSDictionary *pixelBufferAttributes = @{
|
|
436
|
-
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
|
437
|
-
(NSString*)kCVPixelBufferWidthKey: @1920,
|
|
438
|
-
(NSString*)kCVPixelBufferHeightKey: @1080
|
|
439
|
-
};
|
|
440
|
-
|
|
441
|
-
g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_assetWriterInput sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
442
|
-
|
|
443
|
-
if ([g_assetWriter canAddInput:g_assetWriterInput]) {
|
|
444
|
-
[g_assetWriter addInput:g_assetWriterInput];
|
|
445
|
-
NSLog(@"✅ Electron-safe video writer setup complete");
|
|
446
|
-
} else {
|
|
447
|
-
NSLog(@"❌ Failed to add input to asset writer");
|
|
448
|
-
}
|
|
256
|
+
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
449
257
|
}
|
|
450
258
|
|
|
451
259
|
+ (void)finalizeVideoWriter {
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
if (!g_assetWriter || !g_writerStarted) {
|
|
455
|
-
NSLog(@"⚠️ Video writer not started properly - writer: %p, started: %d", g_assetWriter, g_writerStarted);
|
|
456
|
-
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
457
|
-
return;
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
NSLog(@"🎬 Marking input as finished and finalizing...");
|
|
461
|
-
[g_assetWriterInput markAsFinished];
|
|
462
|
-
|
|
463
|
-
[g_assetWriter finishWritingWithCompletionHandler:^{
|
|
464
|
-
NSLog(@"🎬 Finalization completion handler called");
|
|
465
|
-
if (g_assetWriter.status == AVAssetWriterStatusCompleted) {
|
|
466
|
-
NSLog(@"✅ Video finalization successful: %@", g_outputPath);
|
|
467
|
-
} else {
|
|
468
|
-
NSLog(@"❌ Video finalization failed - status: %ld, error: %@", (long)g_assetWriter.status, g_assetWriter.error);
|
|
469
|
-
}
|
|
470
|
-
|
|
471
|
-
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
472
|
-
}];
|
|
473
|
-
|
|
474
|
-
NSLog(@"🎬 Finalization request submitted, waiting for completion...");
|
|
260
|
+
// Alias for finalizeRecording to maintain compatibility
|
|
261
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
475
262
|
}
|
|
476
263
|
|
|
477
264
|
+ (void)cleanupVideoWriter {
|
|
478
|
-
g_assetWriter = nil;
|
|
479
|
-
g_assetWriterInput = nil;
|
|
480
|
-
g_pixelBufferAdaptor = nil;
|
|
481
|
-
g_writerStarted = NO;
|
|
482
|
-
g_frameNumber = 0; // Reset frame counter
|
|
483
265
|
g_stream = nil;
|
|
266
|
+
g_recordingOutput = nil;
|
|
484
267
|
g_streamDelegate = nil;
|
|
485
|
-
|
|
268
|
+
g_isRecording = NO;
|
|
269
|
+
g_outputPath = nil;
|
|
486
270
|
|
|
487
|
-
NSLog(@"🧹
|
|
271
|
+
NSLog(@"🧹 Pure ScreenCaptureKit cleanup complete");
|
|
488
272
|
}
|
|
489
273
|
|
|
490
274
|
@end
|