node-mac-recorder 2.13.12 → 2.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/package.json +1 -1
- package/src/mac_recorder.mm +5 -3
- package/src/screen_capture_kit.h +3 -2
- package/src/screen_capture_kit.mm +165 -209
|
@@ -30,7 +30,8 @@
|
|
|
30
30
|
"Bash(ffprobe:*)",
|
|
31
31
|
"Bash(ffmpeg:*)",
|
|
32
32
|
"WebSearch",
|
|
33
|
-
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''🔍 Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''📹 Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''✅ Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''🛑 Stopping recording...'');\n await recorder.stopRecording();\n console.log(''✅ Test completed'');\n } else {\n console.log(''❌ Recording start failed'');\n }\n } catch (error) {\n console.log(''❌ Error:'', error.message);\n }\n}\n\ntest();\n\")"
|
|
33
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''🔍 Testing with proper permissions and Electron env'');\nconst MacRecorder = require(''./index'');\nconst recorder = new MacRecorder();\n\nasync function test() {\n try {\n const outputPath = ''./test-output/proper-test.mov'';\n console.log(''📹 Starting recording...'');\n const success = await recorder.startRecording(outputPath, {\n captureCursor: true,\n includeMicrophone: false,\n includeSystemAudio: false\n });\n \n if (success) {\n console.log(''✅ Recording started - waiting 2 seconds'');\n await new Promise(resolve => setTimeout(resolve, 2000));\n console.log(''🛑 Stopping recording...'');\n await recorder.stopRecording();\n console.log(''✅ Test completed'');\n } else {\n console.log(''❌ Recording start failed'');\n }\n } catch (error) {\n console.log(''❌ Error:'', error.message);\n }\n}\n\ntest();\n\")",
|
|
34
|
+
"Bash(ELECTRON_RUN_AS_NODE=1 node -e \"\nconsole.log(''''🔍 Debugging frame writing...'''');\nconst MacRecorder = require(''''./index'''');\nconst recorder = new MacRecorder();\n\nasync function debugFrameWriting() {\n try {\n const outputPath = ''''./test-output/frame-debug.mov'''';\n console.log(''''📹 Starting debug test...'''');\n \n const success = await recorder.startRecording(outputPath);\n \n if (success) {\n console.log(''''⏱️ Recording for 2 seconds...'''');\n await new Promise(resolve => setTimeout(resolve, 2000));\n \n console.log(''''🛑 Stopping...'''');\n await recorder.stopRecording();\n \n // Wait for finalization\n await new Promise(resolve => setTimeout(resolve, 1000));\n \n } else {\n console.log(''''❌ Failed to start'''');\n }\n } catch (error) {\n console.log(''''❌ Error:'''', error);\n }\n}\n\ndebugFrameWriting();\n\")"
|
|
34
35
|
],
|
|
35
36
|
"deny": []
|
|
36
37
|
}
|
package/package.json
CHANGED
package/src/mac_recorder.mm
CHANGED
|
@@ -76,6 +76,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
76
76
|
bool includeMicrophone = false; // Default olarak mikrofon kapalı
|
|
77
77
|
bool includeSystemAudio = true; // Default olarak sistem sesi açık
|
|
78
78
|
CGDirectDisplayID displayID = CGMainDisplayID(); // Default ana ekran
|
|
79
|
+
uint32_t windowID = 0; // Default no window selection
|
|
79
80
|
NSString *audioDeviceId = nil; // Default audio device ID
|
|
80
81
|
NSString *systemAudioDeviceId = nil; // System audio device ID
|
|
81
82
|
|
|
@@ -154,10 +155,10 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
154
155
|
}
|
|
155
156
|
}
|
|
156
157
|
|
|
157
|
-
// Window ID
|
|
158
|
+
// Window ID support
|
|
158
159
|
if (options.Has("windowId") && !options.Get("windowId").IsNull()) {
|
|
159
|
-
|
|
160
|
-
|
|
160
|
+
windowID = options.Get("windowId").As<Napi::Number>().Uint32Value();
|
|
161
|
+
NSLog(@"🪟 Window ID specified: %u", windowID);
|
|
161
162
|
}
|
|
162
163
|
}
|
|
163
164
|
|
|
@@ -192,6 +193,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
|
|
|
192
193
|
// Create configuration for ScreenCaptureKit
|
|
193
194
|
NSMutableDictionary *sckConfig = [NSMutableDictionary dictionary];
|
|
194
195
|
sckConfig[@"displayId"] = @(displayID);
|
|
196
|
+
sckConfig[@"windowId"] = @(windowID);
|
|
195
197
|
sckConfig[@"captureCursor"] = @(captureCursor);
|
|
196
198
|
sckConfig[@"includeSystemAudio"] = @(includeSystemAudio);
|
|
197
199
|
sckConfig[@"includeMicrophone"] = @(includeMicrophone);
|
package/src/screen_capture_kit.h
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#import <Foundation/Foundation.h>
|
|
2
2
|
#import <ScreenCaptureKit/ScreenCaptureKit.h>
|
|
3
|
-
|
|
3
|
+
// NO AVFoundation - Pure ScreenCaptureKit implementation
|
|
4
4
|
|
|
5
|
-
API_AVAILABLE(macos(
|
|
5
|
+
API_AVAILABLE(macos(15.0))
|
|
6
6
|
@interface ScreenCaptureKitRecorder : NSObject
|
|
7
7
|
|
|
8
8
|
+ (BOOL)isScreenCaptureKitAvailable;
|
|
@@ -12,6 +12,7 @@ API_AVAILABLE(macos(12.3))
|
|
|
12
12
|
+ (void)stopRecording;
|
|
13
13
|
+ (BOOL)isRecording;
|
|
14
14
|
+ (BOOL)setupVideoWriter;
|
|
15
|
+
+ (void)finalizeRecording;
|
|
15
16
|
+ (void)finalizeVideoWriter;
|
|
16
17
|
+ (void)cleanupVideoWriter;
|
|
17
18
|
|
|
@@ -1,114 +1,35 @@
|
|
|
1
1
|
#import "screen_capture_kit.h"
|
|
2
|
-
#import <CoreImage/CoreImage.h>
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
static
|
|
6
|
-
static
|
|
3
|
+
// Pure ScreenCaptureKit implementation - NO AVFoundation
|
|
4
|
+
static SCStream * API_AVAILABLE(macos(12.3)) g_stream = nil;
|
|
5
|
+
static SCRecordingOutput * API_AVAILABLE(macos(15.0)) g_recordingOutput = nil;
|
|
6
|
+
static id<SCStreamDelegate> API_AVAILABLE(macos(12.3)) g_streamDelegate = nil;
|
|
7
7
|
static BOOL g_isRecording = NO;
|
|
8
|
-
|
|
9
|
-
// Modern ScreenCaptureKit writer
|
|
10
|
-
static AVAssetWriter *g_assetWriter = nil;
|
|
11
|
-
static AVAssetWriterInput *g_assetWriterInput = nil;
|
|
12
|
-
static AVAssetWriterInputPixelBufferAdaptor *g_pixelBufferAdaptor = nil;
|
|
13
8
|
static NSString *g_outputPath = nil;
|
|
14
|
-
static BOOL g_writerStarted = NO;
|
|
15
|
-
static int g_frameCount = 0;
|
|
16
9
|
|
|
17
|
-
@interface
|
|
10
|
+
@interface PureScreenCaptureDelegate : NSObject <SCStreamDelegate>
|
|
18
11
|
@end
|
|
19
12
|
|
|
20
|
-
@implementation
|
|
21
|
-
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error {
|
|
22
|
-
NSLog(@"🛑
|
|
13
|
+
@implementation PureScreenCaptureDelegate
|
|
14
|
+
- (void)stream:(SCStream * API_AVAILABLE(macos(12.3)))stream didStopWithError:(NSError *)error API_AVAILABLE(macos(12.3)) {
|
|
15
|
+
NSLog(@"🛑 Pure ScreenCapture stream stopped");
|
|
23
16
|
g_isRecording = NO;
|
|
24
17
|
|
|
25
18
|
if (error) {
|
|
26
19
|
NSLog(@"❌ Stream error: %@", error);
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
[ScreenCaptureKitRecorder finalizeVideoWriter];
|
|
30
|
-
}
|
|
31
|
-
@end
|
|
32
|
-
|
|
33
|
-
@interface ModernStreamOutput : NSObject <SCStreamOutput>
|
|
34
|
-
@end
|
|
35
|
-
|
|
36
|
-
@implementation ModernStreamOutput
|
|
37
|
-
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type {
|
|
38
|
-
if (!g_isRecording) return;
|
|
39
|
-
|
|
40
|
-
// Only process screen frames
|
|
41
|
-
if (type != SCStreamOutputTypeScreen) return;
|
|
42
|
-
|
|
43
|
-
// Validate sample buffer
|
|
44
|
-
if (!sampleBuffer || !CMSampleBufferIsValid(sampleBuffer)) {
|
|
45
|
-
NSLog(@"⚠️ Invalid sample buffer");
|
|
46
|
-
return;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
// Get pixel buffer
|
|
50
|
-
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
51
|
-
if (!pixelBuffer) {
|
|
52
|
-
NSLog(@"⚠️ No pixel buffer in sample");
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
// Initialize writer on first frame
|
|
57
|
-
static dispatch_once_t onceToken;
|
|
58
|
-
dispatch_once(&onceToken, ^{
|
|
59
|
-
[self initializeWriterWithSampleBuffer:sampleBuffer];
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
if (!g_writerStarted) {
|
|
63
|
-
return;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
// Write frame
|
|
67
|
-
[self writePixelBuffer:pixelBuffer];
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
- (void)initializeWriterWithSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
|
71
|
-
if (!g_assetWriter) return;
|
|
72
|
-
|
|
73
|
-
NSLog(@"🎬 Initializing writer with first sample");
|
|
74
|
-
|
|
75
|
-
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
76
|
-
if (!CMTIME_IS_VALID(startTime)) {
|
|
77
|
-
startTime = CMTimeMakeWithSeconds(0, 600);
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
[g_assetWriter startWriting];
|
|
81
|
-
[g_assetWriter startSessionAtSourceTime:startTime];
|
|
82
|
-
g_writerStarted = YES;
|
|
83
|
-
|
|
84
|
-
NSLog(@"✅ Writer initialized");
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
- (void)writePixelBuffer:(CVPixelBufferRef)pixelBuffer {
|
|
88
|
-
if (!g_assetWriterInput.isReadyForMoreMediaData) {
|
|
89
|
-
return;
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// Create time for this frame
|
|
93
|
-
CMTime frameTime = CMTimeMakeWithSeconds(g_frameCount / 30.0, 600);
|
|
94
|
-
g_frameCount++;
|
|
95
|
-
|
|
96
|
-
// Write the frame
|
|
97
|
-
BOOL success = [g_pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
|
|
98
|
-
|
|
99
|
-
if (success) {
|
|
100
|
-
NSLog(@"✅ Frame %d written", g_frameCount);
|
|
101
20
|
} else {
|
|
102
|
-
NSLog(@"
|
|
21
|
+
NSLog(@"✅ Stream stopped cleanly");
|
|
103
22
|
}
|
|
23
|
+
|
|
24
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
104
25
|
}
|
|
105
26
|
@end
|
|
106
27
|
|
|
107
28
|
@implementation ScreenCaptureKitRecorder
|
|
108
29
|
|
|
109
30
|
+ (BOOL)isScreenCaptureKitAvailable {
|
|
110
|
-
if (@available(macOS
|
|
111
|
-
return [SCShareableContent class] != nil && [SCStream class] != nil;
|
|
31
|
+
if (@available(macOS 15.0, *)) {
|
|
32
|
+
return [SCShareableContent class] != nil && [SCStream class] != nil && [SCRecordingOutput class] != nil;
|
|
112
33
|
}
|
|
113
34
|
return NO;
|
|
114
35
|
}
|
|
@@ -120,15 +41,18 @@ static int g_frameCount = 0;
|
|
|
120
41
|
}
|
|
121
42
|
|
|
122
43
|
g_outputPath = config[@"outputPath"];
|
|
123
|
-
g_frameCount = 0;
|
|
124
44
|
|
|
125
|
-
|
|
45
|
+
// Extract configuration options
|
|
46
|
+
NSNumber *displayId = config[@"displayId"];
|
|
47
|
+
NSNumber *windowId = config[@"windowId"];
|
|
48
|
+
NSValue *captureAreaValue = config[@"captureArea"];
|
|
49
|
+
NSNumber *captureCursor = config[@"captureCursor"];
|
|
50
|
+
NSNumber *includeMicrophone = config[@"includeMicrophone"];
|
|
51
|
+
NSNumber *includeSystemAudio = config[@"includeSystemAudio"];
|
|
126
52
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
return NO;
|
|
131
|
-
}
|
|
53
|
+
NSLog(@"🎬 Starting PURE ScreenCaptureKit recording (NO AVFoundation)");
|
|
54
|
+
NSLog(@"🔧 Config: cursor=%@ mic=%@ system=%@ display=%@ window=%@",
|
|
55
|
+
captureCursor, includeMicrophone, includeSystemAudio, displayId, windowId);
|
|
132
56
|
|
|
133
57
|
// Get shareable content
|
|
134
58
|
[SCShareableContent getShareableContentWithCompletionHandler:^(SCShareableContent *content, NSError *contentError) {
|
|
@@ -137,69 +61,155 @@ static int g_frameCount = 0;
|
|
|
137
61
|
return;
|
|
138
62
|
}
|
|
139
63
|
|
|
140
|
-
NSLog(@"✅ Got %lu displays",
|
|
64
|
+
NSLog(@"✅ Got %lu displays, %lu windows for pure recording",
|
|
65
|
+
content.displays.count, content.windows.count);
|
|
141
66
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
67
|
+
SCContentFilter *filter = nil;
|
|
68
|
+
NSInteger recordingWidth = 0;
|
|
69
|
+
NSInteger recordingHeight = 0;
|
|
70
|
+
|
|
71
|
+
// WINDOW RECORDING
|
|
72
|
+
if (windowId && [windowId integerValue] != 0) {
|
|
73
|
+
SCRunningApplication *targetApp = nil;
|
|
74
|
+
SCWindow *targetWindow = nil;
|
|
75
|
+
|
|
76
|
+
for (SCWindow *window in content.windows) {
|
|
77
|
+
if (window.windowID == [windowId unsignedIntValue]) {
|
|
78
|
+
targetWindow = window;
|
|
79
|
+
targetApp = window.owningApplication;
|
|
80
|
+
break;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (targetWindow && targetApp) {
|
|
85
|
+
NSLog(@"🪟 Recording window: %@ (%ux%u)",
|
|
86
|
+
targetWindow.title, (unsigned)targetWindow.frame.size.width, (unsigned)targetWindow.frame.size.height);
|
|
87
|
+
filter = [[SCContentFilter alloc] initWithDesktopIndependentWindow:targetWindow];
|
|
88
|
+
recordingWidth = (NSInteger)targetWindow.frame.size.width;
|
|
89
|
+
recordingHeight = (NSInteger)targetWindow.frame.size.height;
|
|
90
|
+
} else {
|
|
91
|
+
NSLog(@"❌ Window ID %@ not found", windowId);
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// DISPLAY RECORDING
|
|
96
|
+
else {
|
|
97
|
+
SCDisplay *targetDisplay = nil;
|
|
98
|
+
|
|
99
|
+
if (displayId && [displayId integerValue] != 0) {
|
|
100
|
+
// Find specific display
|
|
101
|
+
for (SCDisplay *display in content.displays) {
|
|
102
|
+
if (display.displayID == [displayId unsignedIntValue]) {
|
|
103
|
+
targetDisplay = display;
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
} else {
|
|
108
|
+
// Use first display
|
|
109
|
+
targetDisplay = content.displays.firstObject;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!targetDisplay) {
|
|
113
|
+
NSLog(@"❌ Display not found");
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
NSLog(@"🖥️ Recording display %u (%dx%d)",
|
|
118
|
+
targetDisplay.displayID, (int)targetDisplay.width, (int)targetDisplay.height);
|
|
119
|
+
filter = [[SCContentFilter alloc] initWithDisplay:targetDisplay excludingWindows:@[]];
|
|
120
|
+
recordingWidth = targetDisplay.width;
|
|
121
|
+
recordingHeight = targetDisplay.height;
|
|
145
122
|
}
|
|
146
123
|
|
|
147
|
-
//
|
|
148
|
-
|
|
149
|
-
|
|
124
|
+
// Configure stream with extracted options
|
|
125
|
+
SCStreamConfiguration *streamConfig = [[SCStreamConfiguration alloc] init];
|
|
126
|
+
streamConfig.width = recordingWidth;
|
|
127
|
+
streamConfig.height = recordingHeight;
|
|
128
|
+
streamConfig.minimumFrameInterval = CMTimeMake(1, 30); // 30 FPS
|
|
129
|
+
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA;
|
|
130
|
+
streamConfig.scalesToFit = NO;
|
|
150
131
|
|
|
151
|
-
//
|
|
152
|
-
|
|
132
|
+
// CURSOR SUPPORT
|
|
133
|
+
BOOL shouldShowCursor = captureCursor ? [captureCursor boolValue] : YES;
|
|
134
|
+
streamConfig.showsCursor = shouldShowCursor;
|
|
153
135
|
|
|
154
|
-
|
|
155
|
-
|
|
136
|
+
NSLog(@"🎥 Pure ScreenCapture config: %ldx%ld @ 30fps, cursor=%d",
|
|
137
|
+
recordingWidth, recordingHeight, shouldShowCursor);
|
|
156
138
|
|
|
157
|
-
//
|
|
158
|
-
|
|
159
|
-
|
|
139
|
+
// AUDIO SUPPORT - Configure stream audio settings
|
|
140
|
+
BOOL shouldCaptureMic = includeMicrophone ? [includeMicrophone boolValue] : NO;
|
|
141
|
+
BOOL shouldCaptureSystemAudio = includeSystemAudio ? [includeSystemAudio boolValue] : NO;
|
|
160
142
|
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
143
|
+
if (@available(macOS 13.0, *)) {
|
|
144
|
+
if (shouldCaptureMic || shouldCaptureSystemAudio) {
|
|
145
|
+
streamConfig.capturesAudio = YES;
|
|
146
|
+
streamConfig.sampleRate = 44100;
|
|
147
|
+
streamConfig.channelCount = 2;
|
|
148
|
+
NSLog(@"🎵 Audio enabled: mic=%d system=%d", shouldCaptureMic, shouldCaptureSystemAudio);
|
|
149
|
+
} else {
|
|
150
|
+
streamConfig.capturesAudio = NO;
|
|
151
|
+
NSLog(@"🔇 Audio disabled");
|
|
152
|
+
}
|
|
153
|
+
}
|
|
167
154
|
|
|
168
|
-
|
|
155
|
+
// Create pure ScreenCaptureKit recording output
|
|
156
|
+
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
157
|
+
if (@available(macOS 15.0, *)) {
|
|
158
|
+
// Create recording output configuration
|
|
159
|
+
SCRecordingOutputConfiguration *recordingConfig = [[SCRecordingOutputConfiguration alloc] init];
|
|
160
|
+
recordingConfig.outputURL = outputURL;
|
|
161
|
+
recordingConfig.videoCodecType = AVVideoCodecTypeH264;
|
|
162
|
+
|
|
163
|
+
// Audio configuration - using available properties
|
|
164
|
+
// Note: Specific audio routing handled by ScreenCaptureKit automatically
|
|
165
|
+
|
|
166
|
+
// Create recording output with correct initializer
|
|
167
|
+
g_recordingOutput = [[SCRecordingOutput alloc] initWithConfiguration:recordingConfig
|
|
168
|
+
delegate:nil];
|
|
169
|
+
NSLog(@"🔧 Created SCRecordingOutput with audio config: mic=%d system=%d",
|
|
170
|
+
shouldCaptureMic, shouldCaptureSystemAudio);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if (!g_recordingOutput) {
|
|
174
|
+
NSLog(@"❌ Failed to create SCRecordingOutput");
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
NSLog(@"✅ Pure ScreenCaptureKit recording output created");
|
|
169
179
|
|
|
170
|
-
// Create
|
|
171
|
-
g_streamDelegate = [[
|
|
172
|
-
g_streamOutput = [[ModernStreamOutput alloc] init];
|
|
180
|
+
// Create delegate
|
|
181
|
+
g_streamDelegate = [[PureScreenCaptureDelegate alloc] init];
|
|
173
182
|
|
|
174
|
-
// Create and
|
|
183
|
+
// Create and configure stream
|
|
175
184
|
g_stream = [[SCStream alloc] initWithFilter:filter configuration:streamConfig delegate:g_streamDelegate];
|
|
176
185
|
|
|
177
186
|
if (!g_stream) {
|
|
178
|
-
NSLog(@"❌ Failed to create stream");
|
|
187
|
+
NSLog(@"❌ Failed to create pure stream");
|
|
179
188
|
return;
|
|
180
189
|
}
|
|
181
190
|
|
|
182
|
-
// Add output
|
|
191
|
+
// Add recording output directly to stream
|
|
183
192
|
NSError *outputError = nil;
|
|
184
|
-
BOOL outputAdded =
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
193
|
+
BOOL outputAdded = NO;
|
|
194
|
+
|
|
195
|
+
if (@available(macOS 15.0, *)) {
|
|
196
|
+
outputAdded = [g_stream addRecordingOutput:g_recordingOutput error:&outputError];
|
|
197
|
+
}
|
|
188
198
|
|
|
189
199
|
if (!outputAdded || outputError) {
|
|
190
|
-
NSLog(@"❌
|
|
200
|
+
NSLog(@"❌ Failed to add recording output: %@", outputError);
|
|
191
201
|
return;
|
|
192
202
|
}
|
|
193
203
|
|
|
194
|
-
NSLog(@"✅
|
|
204
|
+
NSLog(@"✅ Pure recording output added to stream");
|
|
195
205
|
|
|
196
|
-
// Start capture
|
|
206
|
+
// Start capture with recording
|
|
197
207
|
[g_stream startCaptureWithCompletionHandler:^(NSError *startError) {
|
|
198
208
|
if (startError) {
|
|
199
|
-
NSLog(@"❌
|
|
209
|
+
NSLog(@"❌ Failed to start pure capture: %@", startError);
|
|
200
210
|
g_isRecording = NO;
|
|
201
211
|
} else {
|
|
202
|
-
NSLog(@"
|
|
212
|
+
NSLog(@"🎉 PURE ScreenCaptureKit recording started successfully!");
|
|
203
213
|
g_isRecording = YES;
|
|
204
214
|
}
|
|
205
215
|
}];
|
|
@@ -213,14 +223,14 @@ static int g_frameCount = 0;
|
|
|
213
223
|
return;
|
|
214
224
|
}
|
|
215
225
|
|
|
216
|
-
NSLog(@"🛑 Stopping recording");
|
|
226
|
+
NSLog(@"🛑 Stopping pure ScreenCaptureKit recording");
|
|
217
227
|
|
|
218
228
|
[g_stream stopCaptureWithCompletionHandler:^(NSError *error) {
|
|
219
229
|
if (error) {
|
|
220
230
|
NSLog(@"❌ Stop error: %@", error);
|
|
221
231
|
}
|
|
222
|
-
NSLog(@"✅
|
|
223
|
-
[ScreenCaptureKitRecorder
|
|
232
|
+
NSLog(@"✅ Pure stream stopped");
|
|
233
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
224
234
|
}];
|
|
225
235
|
}
|
|
226
236
|
|
|
@@ -229,90 +239,36 @@ static int g_frameCount = 0;
|
|
|
229
239
|
}
|
|
230
240
|
|
|
231
241
|
+ (BOOL)setupVideoWriter {
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
NSLog(@"🔧 Setting up video writer");
|
|
235
|
-
|
|
236
|
-
NSURL *outputURL = [NSURL fileURLWithPath:g_outputPath];
|
|
237
|
-
NSError *error = nil;
|
|
238
|
-
|
|
239
|
-
g_assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
|
|
240
|
-
|
|
241
|
-
if (error || !g_assetWriter) {
|
|
242
|
-
NSLog(@"❌ Writer creation error: %@", error);
|
|
243
|
-
return NO;
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
// Video settings
|
|
247
|
-
NSDictionary *videoSettings = @{
|
|
248
|
-
AVVideoCodecKey: AVVideoCodecTypeH264,
|
|
249
|
-
AVVideoWidthKey: @1920,
|
|
250
|
-
AVVideoHeightKey: @1080,
|
|
251
|
-
AVVideoCompressionPropertiesKey: @{
|
|
252
|
-
AVVideoAverageBitRateKey: @(5000000), // 5 Mbps
|
|
253
|
-
AVVideoMaxKeyFrameIntervalKey: @30
|
|
254
|
-
}
|
|
255
|
-
};
|
|
256
|
-
|
|
257
|
-
g_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
|
|
258
|
-
g_assetWriterInput.expectsMediaDataInRealTime = YES;
|
|
259
|
-
|
|
260
|
-
// Pixel buffer adaptor
|
|
261
|
-
NSDictionary *pixelBufferAttributes = @{
|
|
262
|
-
(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
|
|
263
|
-
(NSString*)kCVPixelBufferWidthKey: @1920,
|
|
264
|
-
(NSString*)kCVPixelBufferHeightKey: @1080
|
|
265
|
-
};
|
|
266
|
-
|
|
267
|
-
g_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor
|
|
268
|
-
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:g_assetWriterInput
|
|
269
|
-
sourcePixelBufferAttributes:pixelBufferAttributes];
|
|
270
|
-
|
|
271
|
-
if ([g_assetWriter canAddInput:g_assetWriterInput]) {
|
|
272
|
-
[g_assetWriter addInput:g_assetWriterInput];
|
|
273
|
-
NSLog(@"✅ Video writer ready");
|
|
274
|
-
return YES;
|
|
275
|
-
} else {
|
|
276
|
-
NSLog(@"❌ Cannot add input to writer");
|
|
277
|
-
return NO;
|
|
278
|
-
}
|
|
242
|
+
// No setup needed - SCRecordingOutput handles everything
|
|
243
|
+
return YES;
|
|
279
244
|
}
|
|
280
245
|
|
|
281
|
-
+ (void)
|
|
282
|
-
NSLog(@"🎬 Finalizing
|
|
246
|
+
+ (void)finalizeRecording {
|
|
247
|
+
NSLog(@"🎬 Finalizing pure ScreenCaptureKit recording");
|
|
283
248
|
|
|
284
249
|
g_isRecording = NO;
|
|
285
250
|
|
|
286
|
-
if (
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
return;
|
|
251
|
+
if (g_recordingOutput) {
|
|
252
|
+
// SCRecordingOutput finalizes automatically
|
|
253
|
+
NSLog(@"✅ Pure recording output finalized");
|
|
290
254
|
}
|
|
291
255
|
|
|
292
|
-
[
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
NSLog(@"❌ Write failed: %@", g_assetWriter.error);
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
302
|
-
}];
|
|
256
|
+
[ScreenCaptureKitRecorder cleanupVideoWriter];
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
+ (void)finalizeVideoWriter {
|
|
260
|
+
// Alias for finalizeRecording to maintain compatibility
|
|
261
|
+
[ScreenCaptureKitRecorder finalizeRecording];
|
|
303
262
|
}
|
|
304
263
|
|
|
305
264
|
+ (void)cleanupVideoWriter {
|
|
306
|
-
g_assetWriter = nil;
|
|
307
|
-
g_assetWriterInput = nil;
|
|
308
|
-
g_pixelBufferAdaptor = nil;
|
|
309
|
-
g_writerStarted = NO;
|
|
310
|
-
g_frameCount = 0;
|
|
311
265
|
g_stream = nil;
|
|
266
|
+
g_recordingOutput = nil;
|
|
312
267
|
g_streamDelegate = nil;
|
|
313
|
-
|
|
268
|
+
g_isRecording = NO;
|
|
269
|
+
g_outputPath = nil;
|
|
314
270
|
|
|
315
|
-
NSLog(@"🧹
|
|
271
|
+
NSLog(@"🧹 Pure ScreenCaptureKit cleanup complete");
|
|
316
272
|
}
|
|
317
273
|
|
|
318
274
|
@end
|