node-mac-recorder 2.13.12 → 2.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/package.json +1 -1
- package/src/mac_recorder.mm +5 -3
- package/src/screen_capture_kit.h +3 -2
- package/src/screen_capture_kit.mm +193 -209
|
@@ -30,7 +30,8 @@
|
|
|
30
30
|
"Bash(ffprobe:*)",
|
|
31
31
|
"Bash(ffmpeg:*)",
|
|
32
32
|
"WebSearch",
|
|
33
|
-
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''🔍 Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''📹 Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''✅ Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''🛑 Stopping recording...'');\n await recorder.stopRecording();\n console.log(''✅ Test completed'');\n } else {\n console.log(''❌ Recording start failed'');\n }\n } catch (error) {\n console.log(''❌ Error:'', error.message);\n }\n}\n\ntest();\n\")"
|
|
33
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''🔍 Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''📹 Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''✅ Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''🛑 Stopping recording...'');\n await recorder.stopRecording();\n console.log(''✅ Test completed'');\n } else {\n console.log(''❌ Recording start failed'');\n }\n } catch (error) {\n console.log(''❌ Error:'', error.message);\n }\n}\n\ntest();\n\")",
|
|
34
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''''🔍 Debugging frame writing...'''');\nconst MacRecorder = require(''''./index'''');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''''./test-output/frame-debug.mov'''';\n console.log(''''📹 Starting debug test...'''');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''''⏱️ Recording for 2 seconds...'''');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''''🛑 Stopping...'''');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''''❌ Failed to start'''');\n }\n } catch (error) {\n console.log(''''❌ Error:'''', error);\n }\n}\n\ndebugFrameWriting();\n\")"
|
|
34
35
|
],
|
|
35
36
|
"deny": []
|
|
36
37
|
}
|
package/package.json
CHANGED
package/src/mac_recorder.mm
CHANGED
|
@@ -76,6 +76,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
76
76
|
bool includeMicrophone = false; // Default olarak mikrofon kapalı
|
|
77
77
|
bool includeSystemAudio = true; // Default olarak sistem sesi açık
|
|
78
78
|
CGDirectDisplayID displayID = CGMainDisplayID(); // Default ana ekran
|
|
79
|
+
uint32_t windowID = 0; // Default no window selection
|
|
79
80
|
NSString *audioDeviceId = nil; // Default audio device ID
|
|
80
81
|
NSString *systemAudioDeviceId = nil; // System audio device ID
|
|
81
82
|
|
|
@@ -154,10 +155,10 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
154
155
|
}
|
|
155
156
|
}
|
|
156
157
|
|
|
157
|
-
// Window ID
|
|
158
|
+
// Window ID support
|
|
158
159
|
if (options.Has("windowId") && !options.Get("windowId").IsNull()) {
|
|
159
|
-
|
|
160
|
-
|
|
160
|
+
windowID = options.Get("windowId").As<Napi::Number>().Uint32Value();
|
|
161
|
+
NSLog(@"🪟 Window ID specified: %u", windowID);
|
|
161
162
|
}
|
|
162
163
|
}
|
|
163
164
|
|
|
@@ -192,6 +193,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
192
193
|
// Create configuration for ScreenCaptureKit
|
|
193
194
|
NSMutableDictionary *sckConfig = [NSMutableDictionary dictionary];
|
|
194
195
|
sckConfig[@"displayId"] = @(displayID);
|
|
196
|
+
sckConfig[@"windowId"] = @(windowID);
|
|
195
197
|
sckConfig[@"captureCursor"] = @(captureCursor);
|
|
196
198
|
sckConfig[@"includeSystemAudio"] = @(includeSystemAudio);
|
|
197
199
|
sckConfig[@"includeMicrophone"] = @(includeMicrophone);
|
package/src/screen_capture_kit.h
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#import <Foundation/Foundation.h>
|
|
2
2
|
#import <ScreenCaptureKit/ScreenCaptureKit.h>
|
|
3
|
-
|
|
3
|
+
// NO AVFoundation - Pure ScreenCaptureKit implementation
|
|
4
4
|
|
|
5
|
-
API_AVAILABLE(macos(
|
|
5
|
+
API_AVAILABLE(macos(15.0))
|
|
6
6
|
@interface ScreenCaptureKitRecorder : NSObject
|
|
7
7
|
|
|
8
8
|
+ (BOOL)isScreenCaptureKitAvailable;
|
|
@@ -12,6 +12,7 @@ API_AVAILABLE(macos(12.3))
|
|
|
12
12
|
+ (void)stopRecording;
|
|
13
13
|
+ (BOOL)isRecording;
|
|
14
14
|
+ (BOOL)setupVideoWriter;
|
|
15
|
+
+ (void)finalizeRecording;
|
|
15
16
|
+ (void)finalizeVideoWriter;
|
|
16
17
|
+ (void)cleanupVideoWriter;
|
|
17
18
|
|
|
@@ -1,114 +1,35 @@
|
|
|
1
1
|
#import "screen_capture_kit.h"
|
|
2
|
-
#import <CoreImage/CoreImage.h>
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
static
|
|
6
|
-
static
|
|
3
|
+
// Pure ScreenCaptureKit implementation - NO AVFoundation
|
|
4
|
+
static SCStream * API_AVAILABLE(macos(12.3)) g_stream = nil;
|
|
5
|
+
static SCRecordingOutput * API_AVAILABLE(macos(15.0)) g_recordingOutput = nil;
|
|
6
|
+
static id<SCStreamDelegate> API_AVAILABLE(macos(12.3)) g_streamDelegate = nil;
|
|
7
7
|
static BOOL g_isRecording = NO;
|
|
8
|
-
|
|
9
|
-
// Modern ScreenCaptureKit writer
|
|
10
|
-
static AVAssetWriter *g_assetWriter = nil;
|
|
11
|
-
static AVAssetWriterInput *g_assetWriterInput = nil;
|
|
12
|
-
static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
|
|
13
8
|
static NSString *g_outputPath = nil;
|
|
14
|
-
static BOOL g_writerStarted = NO;
|
|
15
|
-
static int g_frameCount = 0;
|
|
16
9
|
|
|
17
|
-
@interface
|
|
10
|
+
@interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
|
|
18
11
|
@end
|
|
19
12
|
|
|
20
|
-
@implementation
|
|
21
|
-
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
|
|
22
|
-
NSLog(@"🛑
|
|
13
|
+
@implementation PureScreenCaptureDelegate
|
|
14
|
+
- (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
|
|
15
|
+
NSLog(@"🛑 Pure ScreenCapture stream stopped");
|
|
23
16
|
g_isRecording = NO;
|
|
24
17
|
|
|
25
18
|
if (error) {
|
|
26
19
|
NSLog(@"❌ Stream error: %@", error);
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
[ScreenCaptureKitRecorder finalizeVideoWriter];
|
|
30
|
-
}
|
|
31
|
-
@end
|
|
32
|
-
|
|
33
|
-
@interface ModernStreamOutput : NSObject <SCStreamOutput>
|
|
34
|
-
@end
|
|
35
|
-
|
|
36
|
-
@implementation ModernStreamOutput
|
|
37
|
-
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
38
|
-
if (!g_isRecording) return;
|
|
39
|
-
|
|
40
|
-
// Only process screen frames
|
|
41
|
-
if (type != SCStreamOutputTypeScreen) return;
|
|
42
|
-
|
|
43
|
-
// Validate sample buffer
|
|
44
|
-
if (!sampleBuffer || !CMSampleBufferIsValid(sampleBuffer)) {
|
|
45
|
-
NSLog(@"⚠️ Invalid sample buffer");
|
|
46
|
-
return;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
// Get pixel buffer
|
|
50
|
-
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
51
|
-
if (!pixelBuffer) {
|
|
52
|
-
NSLog(@"⚠️ No pixel buffer in sample");
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
// Initialize writer on first frame
|
|
57
|
-
static dispatch_once_t onceToken;
|
|
58
|
-
dispatch_once(&onceToken, ^{
|
|
59
|
-
[self initializeWriterWithSampleBuffer:sampleBuffer];
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
if (!g_writerStarted) {
|
|
63
|
-
return;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
// Write frame
|
|
67
|
-
[self writePixelBuffer:pixelBuffer];
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
- (void)initializeWriterWithSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
|
71
|
-
if (!g_assetWriter) return;
|
|
72
|
-
|
|
73
|
-
NSLog(@"🎬 Initializing writer with first sample");
|
|
74
|
-
|
|
75
|
-
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
76
|
-
if (!CMTIME_IS_VALID(startTime)) {
|
|
77
|
-
startTime = CMTimeMakeWithSeconds(0, 600);
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
[g_assetWriter startWriting];
|
|
81
|
-
[g_assetWriter startSessionAtSourceTime:startTime];
|
|
82
|
-
g_writerStarted = YES;
|
|
83
|
-
|
|
84
|
-
NSLog(@"✅ Writer initialized");
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
- (void)writePixelBuffer:(CVPixelBufferRef)pixelBuffer {
|
|
88
|
-
if (!g_assetWriterInput.isReadyForMoreMediaData) {
|
|
89
|
-
return;
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// Create time for this frame
|
|
93
|
-
CMTime frameTime = CMTimeMakeWithSeconds(g_frameCount / 30.0, 600);
|
|
94
|
-
g_frameCount++;
|
|
95
|
-
|
|
96
|
-
// Write the frame
|
|
97
|
-
BOOL success = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
98
|
-
|
|
99
|
-
if (success) {
|
|
100
|
-
NSLog(@"✅ Frame %d written", g_frameCount);
|
|
101
20
|
} else {
|
|
102
|
-
NSLog(@"
|
|
21
|
+
NSLog(@"✅ Stream stopped cleanly");
|
|
103
22
|
}
|
|
23
|
+
|
|
24
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
104
25
|
}
|
|
105
26
|
@end
|
|
106
27
|
|
|
107
28
|
@implementation ScreenCaptureKitRecorder
|
|
108
29
|
|
|
109
30
|
+ (BOOL)isScreenCaptureKitAvailable {
|
|
110
|
-
if (@available(macOS
|
|
111
|
-
return [SCShareableContent class] != nil && [SCStream class] != nil;
|
|
31
|
+
if (@available(macOS 15.0, *)) {
|
|
32
|
+
return [SCShareableContent class] != nil && [SCStream class] != nil && [SCRecordingOutput class] != nil;
|
|
112
33
|
}
|
|
113
34
|
return NO;
|
|
114
35
|
}
|
|
@@ -120,15 +41,18 @@ static int g_frameCount = 0;
|
|
|
120
41
|
}
|
|
121
42
|
|
|
122
43
|
g_outputPath = config[@"outputPath"];
|
|
123
|
-
g_frameCount = 0;
|
|
124
44
|
|
|
125
|
-
|
|
45
|
+
// Extract configuration options
|
|
46
|
+
NSNumber *displayId = config[@"displayId"];
|
|
47
|
+
NSNumber *windowId = config[@"windowId"];
|
|
48
|
+
NSDictionary *captureRect = config[@"captureRect"];
|
|
49
|
+
NSNumber *captureCursor = config[@"captureCursor"];
|
|
50
|
+
NSNumber *includeMicrophone = config[@"includeMicrophone"];
|
|
51
|
+
NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
|
|
126
52
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
return NO;
|
|
131
|
-
}
|
|
53
|
+
NSLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
|
|
54
|
+
NSLog(@"🔧 Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@ crop=%@",
|
|
55
|
+
captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId, captureRect);
|
|
132
56
|
|
|
133
57
|
// Get shareable content
|
|
134
58
|
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
@@ -137,69 +61,183 @@ static int g_frameCount = 0;
|
|
|
137
61
|
return;
|
|
138
62
|
}
|
|
139
63
|
|
|
140
|
-
NSLog(@"✅ Got %lu displays",
|
|
64
|
+
NSLog(@"✅ Got %lu displays, %lu windows for pure recording",
|
|
65
|
+
content.displays.count, content.windows.count);
|
|
141
66
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
}
|
|
67
|
+
SCContentFilter *filter = nil;
|
|
68
|
+
NSInteger recordingWidth = 0;
|
|
69
|
+
NSInteger recordingHeight = 0;
|
|
146
70
|
|
|
147
|
-
//
|
|
148
|
-
|
|
149
|
-
|
|
71
|
+
// WINDOW RECORDING
|
|
72
|
+
if (windowId && [windowId integerValue] != 0) {
|
|
73
|
+
SCRunningApplication *targetApp = nil;
|
|
74
|
+
SCWindow *targetWindow = nil;
|
|
75
|
+
|
|
76
|
+
for (SCWindow *window in content.windows) {
|
|
77
|
+
if (window.windowID == [windowId unsignedIntValue]) {
|
|
78
|
+
targetWindow = window;
|
|
79
|
+
targetApp = window.owningApplication;
|
|
80
|
+
break;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (targetWindow && targetApp) {
|
|
85
|
+
NSLog(@"🪟 Recording window: %@ (%ux%u)",
|
|
86
|
+
targetWindow.title, (unsigned)targetWindow.frame.size.width, (unsigned)targetWindow.frame.size.height);
|
|
87
|
+
filter = [[SCContentFilter alloc] initWithDesktopIndependentWindow:targetWindow];
|
|
88
|
+
recordingWidth = (NSInteger)targetWindow.frame.size.width;
|
|
89
|
+
recordingHeight = (NSInteger)targetWindow.frame.size.height;
|
|
90
|
+
} else {
|
|
91
|
+
NSLog(@"❌ Window ID %@ not found", windowId);
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// DISPLAY RECORDING
|
|
96
|
+
else {
|
|
97
|
+
SCDisplay *targetDisplay = nil;
|
|
98
|
+
|
|
99
|
+
if (displayId && [displayId integerValue] != 0) {
|
|
100
|
+
// Find specific display
|
|
101
|
+
for (SCDisplay *display in content.displays) {
|
|
102
|
+
if (display.displayID == [displayId unsignedIntValue]) {
|
|
103
|
+
targetDisplay = display;
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
} else {
|
|
108
|
+
// Use first display
|
|
109
|
+
targetDisplay = content.displays.firstObject;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!targetDisplay) {
|
|
113
|
+
NSLog(@"❌ Display not found");
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
NSLog(@"🖥️ Recording display %u (%dx%d)",
|
|
118
|
+
targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
|
|
119
|
+
filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
120
|
+
recordingWidth = targetDisplay.width;
|
|
121
|
+
recordingHeight = targetDisplay.height;
|
|
122
|
+
}
|
|
150
123
|
|
|
151
|
-
//
|
|
152
|
-
|
|
124
|
+
// CROP AREA SUPPORT - Adjust dimensions and source rect
|
|
125
|
+
if (captureRect && captureRect[@"width"] && captureRect[@"height"]) {
|
|
126
|
+
CGFloat cropWidth = [captureRect[@"width"] doubleValue];
|
|
127
|
+
CGFloat cropHeight = [captureRect[@"height"] doubleValue];
|
|
128
|
+
|
|
129
|
+
if (cropWidth > 0 && cropHeight > 0) {
|
|
130
|
+
NSLog(@"🔲 Crop area specified: %.0fx%.0f at (%.0f,%.0f)",
|
|
131
|
+
cropWidth, cropHeight,
|
|
132
|
+
[captureRect[@"x"] doubleValue], [captureRect[@"y"] doubleValue]);
|
|
133
|
+
recordingWidth = (NSInteger)cropWidth;
|
|
134
|
+
recordingHeight = (NSInteger)cropHeight;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
153
137
|
|
|
154
|
-
// Configure stream
|
|
138
|
+
// Configure stream with extracted options
|
|
155
139
|
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
NSInteger targetWidth = MIN(display.width, 1920);
|
|
159
|
-
NSInteger targetHeight = MIN(display.height, 1080);
|
|
160
|
-
|
|
161
|
-
streamConfig.width = targetWidth;
|
|
162
|
-
streamConfig.height = targetHeight;
|
|
140
|
+
streamConfig.width = recordingWidth;
|
|
141
|
+
streamConfig.height = recordingHeight;
|
|
163
142
|
streamConfig.minimumFrameInterval = CMTimeMake(1, 30); // 30 FPS
|
|
164
143
|
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
|
|
165
|
-
streamConfig.
|
|
166
|
-
|
|
144
|
+
streamConfig.scalesToFit = NO;
|
|
145
|
+
|
|
146
|
+
// Apply crop area using sourceRect
|
|
147
|
+
if (captureRect && captureRect[@"x"] && captureRect[@"y"] && captureRect[@"width"] && captureRect[@"height"]) {
|
|
148
|
+
CGFloat cropX = [captureRect[@"x"] doubleValue];
|
|
149
|
+
CGFloat cropY = [captureRect[@"y"] doubleValue];
|
|
150
|
+
CGFloat cropWidth = [captureRect[@"width"] doubleValue];
|
|
151
|
+
CGFloat cropHeight = [captureRect[@"height"] doubleValue];
|
|
152
|
+
|
|
153
|
+
if (cropWidth > 0 && cropHeight > 0) {
|
|
154
|
+
CGRect sourceRect = CGRectMake(cropX, cropY, cropWidth, cropHeight);
|
|
155
|
+
streamConfig.sourceRect = sourceRect;
|
|
156
|
+
NSLog(@"✂️ Crop sourceRect applied: (%.0f,%.0f) %.0fx%.0f", cropX, cropY, cropWidth, cropHeight);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// CURSOR SUPPORT
|
|
161
|
+
BOOL shouldShowCursor = captureCursor ? [captureCursor boolValue] : YES;
|
|
162
|
+
streamConfig.showsCursor = shouldShowCursor;
|
|
163
|
+
|
|
164
|
+
NSLog(@"🎥 Pure ScreenCapture config: %ldx%ld @ 30fps, cursor=%d",
|
|
165
|
+
recordingWidth, recordingHeight, shouldShowCursor);
|
|
166
|
+
|
|
167
|
+
// AUDIO SUPPORT - Configure stream audio settings
|
|
168
|
+
BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
|
|
169
|
+
BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
|
|
170
|
+
|
|
171
|
+
if (@available(macOS 13.0, *)) {
|
|
172
|
+
if (shouldCaptureMic || shouldCaptureSystemAudio) {
|
|
173
|
+
streamConfig.capturesAudio = YES;
|
|
174
|
+
streamConfig.sampleRate = 44100;
|
|
175
|
+
streamConfig.channelCount = 2;
|
|
176
|
+
NSLog(@"🎵 Audio enabled: mic=%d system=%d", shouldCaptureMic, shouldCaptureSystemAudio);
|
|
177
|
+
} else {
|
|
178
|
+
streamConfig.capturesAudio = NO;
|
|
179
|
+
NSLog(@"🔇 Audio disabled");
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Create pure ScreenCaptureKit recording output
|
|
184
|
+
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
185
|
+
if (@available(macOS 15.0, *)) {
|
|
186
|
+
// Create recording output configuration
|
|
187
|
+
SCRecordingOutputConfiguration *recordingConfig = [[SCRecordingOutputConfiguration alloc] init];
|
|
188
|
+
recordingConfig.outputURL = outputURL;
|
|
189
|
+
recordingConfig.videoCodecType = AVVideoCodecTypeH264;
|
|
190
|
+
|
|
191
|
+
// Audio configuration - using available properties
|
|
192
|
+
// Note: Specific audio routing handled by ScreenCaptureKit automatically
|
|
193
|
+
|
|
194
|
+
// Create recording output with correct initializer
|
|
195
|
+
g_recordingOutput = [[SCRecordingOutput alloc] initWithConfiguration:recordingConfig
|
|
196
|
+
delegate:nil];
|
|
197
|
+
NSLog(@"🔧 Created SCRecordingOutput with audio config: mic=%d system=%d",
|
|
198
|
+
shouldCaptureMic, shouldCaptureSystemAudio);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
if (!g_recordingOutput) {
|
|
202
|
+
NSLog(@"❌ Failed to create SCRecordingOutput");
|
|
203
|
+
return;
|
|
204
|
+
}
|
|
167
205
|
|
|
168
|
-
NSLog(@"
|
|
206
|
+
NSLog(@"✅ Pure ScreenCaptureKit recording output created");
|
|
169
207
|
|
|
170
|
-
// Create
|
|
171
|
-
g_streamDelegate = [[
|
|
172
|
-
g_streamOutput = [[ModernStreamOutput alloc] init];
|
|
208
|
+
// Create delegate
|
|
209
|
+
g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
|
|
173
210
|
|
|
174
|
-
// Create and
|
|
211
|
+
// Create and configure stream
|
|
175
212
|
g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
|
|
176
213
|
|
|
177
214
|
if (!g_stream) {
|
|
178
|
-
NSLog(@"❌ Failed to create stream");
|
|
215
|
+
NSLog(@"❌ Failed to create pure stream");
|
|
179
216
|
return;
|
|
180
217
|
}
|
|
181
218
|
|
|
182
|
-
// Add output
|
|
219
|
+
// Add recording output directly to stream
|
|
183
220
|
NSError *outputError = nil;
|
|
184
|
-
BOOL outputAdded =
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
221
|
+
BOOL outputAdded = NO;
|
|
222
|
+
|
|
223
|
+
if (@available(macOS 15.0, *)) {
|
|
224
|
+
outputAdded = [g_stream addRecordingOutput:g_recordingOutput error:&outputError];
|
|
225
|
+
}
|
|
188
226
|
|
|
189
227
|
if (!outputAdded || outputError) {
|
|
190
|
-
NSLog(@"❌
|
|
228
|
+
NSLog(@"❌ Failed to add recording output: %@", outputError);
|
|
191
229
|
return;
|
|
192
230
|
}
|
|
193
231
|
|
|
194
|
-
NSLog(@"✅
|
|
232
|
+
NSLog(@"✅ Pure recording output added to stream");
|
|
195
233
|
|
|
196
|
-
// Start capture
|
|
234
|
+
// Start capture with recording
|
|
197
235
|
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
198
236
|
if (startError) {
|
|
199
|
-
NSLog(@"❌
|
|
237
|
+
NSLog(@"❌ Failed to start pure capture: %@", startError);
|
|
200
238
|
g_isRecording = NO;
|
|
201
239
|
} else {
|
|
202
|
-
NSLog(@"
|
|
240
|
+
NSLog(@"🎉 PURE ScreenCaptureKit recording started successfully!");
|
|
203
241
|
g_isRecording = YES;
|
|
204
242
|
}
|
|
205
243
|
}];
|
|
@@ -213,14 +251,14 @@ static int g_frameCount = 0;
|
|
|
213
251
|
return;
|
|
214
252
|
}
|
|
215
253
|
|
|
216
|
-
NSLog(@"🛑 Stopping recording");
|
|
254
|
+
NSLog(@"🛑 Stopping pure ScreenCaptureKit recording");
|
|
217
255
|
|
|
218
256
|
[g_stream stopCaptureWithCompletionHandler:^(NSError *error) {
|
|
219
257
|
if (error) {
|
|
220
258
|
NSLog(@"❌ Stop error: %@", error);
|
|
221
259
|
}
|
|
222
|
-
NSLog(@"✅
|
|
223
|
-
[ScreenCaptureKitRecorder
|
|
260
|
+
NSLog(@"✅ Pure stream stopped");
|
|
261
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
224
262
|
}];
|
|
225
263
|
}
|
|
226
264
|
|
|
@@ -229,90 +267,36 @@ static int g_frameCount = 0;
|
|
|
229
267
|
}
|
|
230
268
|
|
|
231
269
|
+ (BOOL)setupVideoWriter {
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
NSLog(@"🔧 Setting up video writer");
|
|
235
|
-
|
|
236
|
-
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
237
|
-
NSError *error = nil;
|
|
238
|
-
|
|
239
|
-
g_assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
|
|
240
|
-
|
|
241
|
-
if (error || !g_assetWriter) {
|
|
242
|
-
NSLog(@"❌ Writer creation error: %@", error);
|
|
243
|
-
return NO;
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
// Video settings
|
|
247
|
-
NSDictionary *videoSettings = @{
|
|
248
|
-
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
249
|
-
AVVideoWidthKey: @1920,
|
|
250
|
-
AVVideoHeightKey: @1080,
|
|
251
|
-
AVVideoCompressionPropertiesKey: @{
|
|
252
|
-
AVVideoAverageBitRateKey: @(5000000), // 5 Mbps
|
|
253
|
-
AVVideoMaxKeyFrameIntervalKey: @30
|
|
254
|
-
}
|
|
255
|
-
};
|
|
256
|
-
|
|
257
|
-
g_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
|
|
258
|
-
g_assetWriterInput.expectsMediaDataInRealTime = YES;
|
|
259
|
-
|
|
260
|
-
// Pixel buffer adaptor
|
|
261
|
-
NSDictionary *pixelBufferAttributes = @{
|
|
262
|
-
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
|
263
|
-
(NSString*)kCVPixelBufferWidthKey: @1920,
|
|
264
|
-
(NSString*)kCVPixelBufferHeightKey: @1080
|
|
265
|
-
};
|
|
266
|
-
|
|
267
|
-
g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor
|
|
268
|
-
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_assetWriterInput
|
|
269
|
-
sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
270
|
-
|
|
271
|
-
if ([g_assetWriter canAddInput:g_assetWriterInput]) {
|
|
272
|
-
[g_assetWriter addInput:g_assetWriterInput];
|
|
273
|
-
NSLog(@"✅ Video writer ready");
|
|
274
|
-
return YES;
|
|
275
|
-
} else {
|
|
276
|
-
NSLog(@"❌ Cannot add input to writer");
|
|
277
|
-
return NO;
|
|
278
|
-
}
|
|
270
|
+
// No setup needed - SCRecordingOutput handles everything
|
|
271
|
+
return YES;
|
|
279
272
|
}
|
|
280
273
|
|
|
281
|
-
+ (void)
|
|
282
|
-
NSLog(@"🎬 Finalizing
|
|
274
|
+
+ (void)finalizeRecording {
|
|
275
|
+
NSLog(@"🎬 Finalizing pure ScreenCaptureKit recording");
|
|
283
276
|
|
|
284
277
|
g_isRecording = NO;
|
|
285
278
|
|
|
286
|
-
if (
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
return;
|
|
279
|
+
if (g_recordingOutput) {
|
|
280
|
+
// SCRecordingOutput finalizes automatically
|
|
281
|
+
NSLog(@"✅ Pure recording output finalized");
|
|
290
282
|
}
|
|
291
283
|
|
|
292
|
-
[
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
NSLog(@"❌ Write failed: %@", g_assetWriter.error);
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
302
|
-
}];
|
|
284
|
+
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
+ (void)finalizeVideoWriter {
|
|
288
|
+
// Alias for finalizeRecording to maintain compatibility
|
|
289
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
303
290
|
}
|
|
304
291
|
|
|
305
292
|
+ (void)cleanupVideoWriter {
|
|
306
|
-
g_assetWriter = nil;
|
|
307
|
-
g_assetWriterInput = nil;
|
|
308
|
-
g_pixelBufferAdaptor = nil;
|
|
309
|
-
g_writerStarted = NO;
|
|
310
|
-
g_frameCount = 0;
|
|
311
293
|
g_stream = nil;
|
|
294
|
+
g_recordingOutput = nil;
|
|
312
295
|
g_streamDelegate = nil;
|
|
313
|
-
|
|
296
|
+
g_isRecording = NO;
|
|
297
|
+
g_outputPath = nil;
|
|
314
298
|
|
|
315
|
-
NSLog(@"🧹
|
|
299
|
+
NSLog(@"🧹 Pure ScreenCaptureKit cleanup complete");
|
|
316
300
|
}
|
|
317
301
|
|
|
318
302
|
@end
|