@rejourneyco/react-native 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/build.gradle.kts +135 -0
- package/android/consumer-rules.pro +10 -0
- package/android/proguard-rules.pro +1 -0
- package/android/src/main/AndroidManifest.xml +15 -0
- package/android/src/main/java/com/rejourney/RejourneyModuleImpl.kt +2981 -0
- package/android/src/main/java/com/rejourney/capture/ANRHandler.kt +206 -0
- package/android/src/main/java/com/rejourney/capture/ActivityTracker.kt +98 -0
- package/android/src/main/java/com/rejourney/capture/CaptureEngine.kt +1553 -0
- package/android/src/main/java/com/rejourney/capture/CaptureHeuristics.kt +375 -0
- package/android/src/main/java/com/rejourney/capture/CrashHandler.kt +153 -0
- package/android/src/main/java/com/rejourney/capture/MotionEvent.kt +215 -0
- package/android/src/main/java/com/rejourney/capture/SegmentUploader.kt +512 -0
- package/android/src/main/java/com/rejourney/capture/VideoEncoder.kt +773 -0
- package/android/src/main/java/com/rejourney/capture/ViewHierarchyScanner.kt +633 -0
- package/android/src/main/java/com/rejourney/capture/ViewSerializer.kt +286 -0
- package/android/src/main/java/com/rejourney/core/Constants.kt +117 -0
- package/android/src/main/java/com/rejourney/core/Logger.kt +93 -0
- package/android/src/main/java/com/rejourney/core/Types.kt +124 -0
- package/android/src/main/java/com/rejourney/lifecycle/SessionLifecycleService.kt +162 -0
- package/android/src/main/java/com/rejourney/network/DeviceAuthManager.kt +747 -0
- package/android/src/main/java/com/rejourney/network/HttpClientProvider.kt +16 -0
- package/android/src/main/java/com/rejourney/network/NetworkMonitor.kt +272 -0
- package/android/src/main/java/com/rejourney/network/UploadManager.kt +1363 -0
- package/android/src/main/java/com/rejourney/network/UploadWorker.kt +492 -0
- package/android/src/main/java/com/rejourney/privacy/PrivacyMask.kt +645 -0
- package/android/src/main/java/com/rejourney/touch/GestureClassifier.kt +233 -0
- package/android/src/main/java/com/rejourney/touch/KeyboardTracker.kt +158 -0
- package/android/src/main/java/com/rejourney/touch/TextInputTracker.kt +181 -0
- package/android/src/main/java/com/rejourney/touch/TouchInterceptor.kt +591 -0
- package/android/src/main/java/com/rejourney/utils/EventBuffer.kt +284 -0
- package/android/src/main/java/com/rejourney/utils/OEMDetector.kt +154 -0
- package/android/src/main/java/com/rejourney/utils/PerfTiming.kt +235 -0
- package/android/src/main/java/com/rejourney/utils/Telemetry.kt +297 -0
- package/android/src/main/java/com/rejourney/utils/WindowUtils.kt +84 -0
- package/android/src/newarch/java/com/rejourney/RejourneyModule.kt +187 -0
- package/android/src/newarch/java/com/rejourney/RejourneyPackage.kt +40 -0
- package/android/src/oldarch/java/com/rejourney/RejourneyModule.kt +218 -0
- package/android/src/oldarch/java/com/rejourney/RejourneyPackage.kt +23 -0
- package/ios/Capture/RJANRHandler.h +42 -0
- package/ios/Capture/RJANRHandler.m +328 -0
- package/ios/Capture/RJCaptureEngine.h +275 -0
- package/ios/Capture/RJCaptureEngine.m +2062 -0
- package/ios/Capture/RJCaptureHeuristics.h +80 -0
- package/ios/Capture/RJCaptureHeuristics.m +903 -0
- package/ios/Capture/RJCrashHandler.h +46 -0
- package/ios/Capture/RJCrashHandler.m +313 -0
- package/ios/Capture/RJMotionEvent.h +183 -0
- package/ios/Capture/RJMotionEvent.m +183 -0
- package/ios/Capture/RJPerformanceManager.h +100 -0
- package/ios/Capture/RJPerformanceManager.m +373 -0
- package/ios/Capture/RJPixelBufferDownscaler.h +42 -0
- package/ios/Capture/RJPixelBufferDownscaler.m +85 -0
- package/ios/Capture/RJSegmentUploader.h +146 -0
- package/ios/Capture/RJSegmentUploader.m +778 -0
- package/ios/Capture/RJVideoEncoder.h +247 -0
- package/ios/Capture/RJVideoEncoder.m +1036 -0
- package/ios/Capture/RJViewControllerTracker.h +73 -0
- package/ios/Capture/RJViewControllerTracker.m +508 -0
- package/ios/Capture/RJViewHierarchyScanner.h +215 -0
- package/ios/Capture/RJViewHierarchyScanner.m +1464 -0
- package/ios/Capture/RJViewSerializer.h +119 -0
- package/ios/Capture/RJViewSerializer.m +498 -0
- package/ios/Core/RJConstants.h +124 -0
- package/ios/Core/RJConstants.m +88 -0
- package/ios/Core/RJLifecycleManager.h +85 -0
- package/ios/Core/RJLifecycleManager.m +308 -0
- package/ios/Core/RJLogger.h +61 -0
- package/ios/Core/RJLogger.m +211 -0
- package/ios/Core/RJTypes.h +176 -0
- package/ios/Core/RJTypes.m +66 -0
- package/ios/Core/Rejourney.h +64 -0
- package/ios/Core/Rejourney.mm +2495 -0
- package/ios/Network/RJDeviceAuthManager.h +94 -0
- package/ios/Network/RJDeviceAuthManager.m +967 -0
- package/ios/Network/RJNetworkMonitor.h +68 -0
- package/ios/Network/RJNetworkMonitor.m +267 -0
- package/ios/Network/RJRetryManager.h +73 -0
- package/ios/Network/RJRetryManager.m +325 -0
- package/ios/Network/RJUploadManager.h +267 -0
- package/ios/Network/RJUploadManager.m +2296 -0
- package/ios/Privacy/RJPrivacyMask.h +163 -0
- package/ios/Privacy/RJPrivacyMask.m +922 -0
- package/ios/Rejourney.h +63 -0
- package/ios/Touch/RJGestureClassifier.h +130 -0
- package/ios/Touch/RJGestureClassifier.m +333 -0
- package/ios/Touch/RJTouchInterceptor.h +169 -0
- package/ios/Touch/RJTouchInterceptor.m +772 -0
- package/ios/Utils/RJEventBuffer.h +112 -0
- package/ios/Utils/RJEventBuffer.m +358 -0
- package/ios/Utils/RJGzipUtils.h +33 -0
- package/ios/Utils/RJGzipUtils.m +89 -0
- package/ios/Utils/RJKeychainManager.h +48 -0
- package/ios/Utils/RJKeychainManager.m +111 -0
- package/ios/Utils/RJPerfTiming.h +209 -0
- package/ios/Utils/RJPerfTiming.m +264 -0
- package/ios/Utils/RJTelemetry.h +92 -0
- package/ios/Utils/RJTelemetry.m +320 -0
- package/ios/Utils/RJWindowUtils.h +66 -0
- package/ios/Utils/RJWindowUtils.m +133 -0
- package/lib/commonjs/NativeRejourney.js +40 -0
- package/lib/commonjs/components/Mask.js +79 -0
- package/lib/commonjs/index.js +1381 -0
- package/lib/commonjs/sdk/autoTracking.js +1259 -0
- package/lib/commonjs/sdk/constants.js +151 -0
- package/lib/commonjs/sdk/errorTracking.js +199 -0
- package/lib/commonjs/sdk/index.js +50 -0
- package/lib/commonjs/sdk/metricsTracking.js +204 -0
- package/lib/commonjs/sdk/navigation.js +151 -0
- package/lib/commonjs/sdk/networkInterceptor.js +412 -0
- package/lib/commonjs/sdk/utils.js +363 -0
- package/lib/commonjs/types/expo-router.d.js +2 -0
- package/lib/commonjs/types/index.js +2 -0
- package/lib/module/NativeRejourney.js +38 -0
- package/lib/module/components/Mask.js +72 -0
- package/lib/module/index.js +1284 -0
- package/lib/module/sdk/autoTracking.js +1233 -0
- package/lib/module/sdk/constants.js +145 -0
- package/lib/module/sdk/errorTracking.js +189 -0
- package/lib/module/sdk/index.js +12 -0
- package/lib/module/sdk/metricsTracking.js +187 -0
- package/lib/module/sdk/navigation.js +143 -0
- package/lib/module/sdk/networkInterceptor.js +401 -0
- package/lib/module/sdk/utils.js +342 -0
- package/lib/module/types/expo-router.d.js +2 -0
- package/lib/module/types/index.js +2 -0
- package/lib/typescript/NativeRejourney.d.ts +147 -0
- package/lib/typescript/components/Mask.d.ts +39 -0
- package/lib/typescript/index.d.ts +117 -0
- package/lib/typescript/sdk/autoTracking.d.ts +204 -0
- package/lib/typescript/sdk/constants.d.ts +120 -0
- package/lib/typescript/sdk/errorTracking.d.ts +32 -0
- package/lib/typescript/sdk/index.d.ts +9 -0
- package/lib/typescript/sdk/metricsTracking.d.ts +58 -0
- package/lib/typescript/sdk/navigation.d.ts +33 -0
- package/lib/typescript/sdk/networkInterceptor.d.ts +47 -0
- package/lib/typescript/sdk/utils.d.ts +148 -0
- package/lib/typescript/types/index.d.ts +624 -0
- package/package.json +102 -0
- package/rejourney.podspec +21 -0
- package/src/NativeRejourney.ts +165 -0
- package/src/components/Mask.tsx +80 -0
- package/src/index.ts +1459 -0
- package/src/sdk/autoTracking.ts +1373 -0
- package/src/sdk/constants.ts +134 -0
- package/src/sdk/errorTracking.ts +231 -0
- package/src/sdk/index.ts +11 -0
- package/src/sdk/metricsTracking.ts +232 -0
- package/src/sdk/navigation.ts +157 -0
- package/src/sdk/networkInterceptor.ts +440 -0
- package/src/sdk/utils.ts +369 -0
- package/src/types/expo-router.d.ts +7 -0
- package/src/types/index.ts +739 -0
|
@@ -0,0 +1,2062 @@
|
|
|
1
|
+
//
|
|
2
|
+
// RJCaptureEngine.m
|
|
3
|
+
// Rejourney
|
|
4
|
+
//
|
|
5
|
+
// Video capture orchestrator with H.264 encoding.
|
|
6
|
+
//
|
|
7
|
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
// you may not use this file except in compliance with the License.
|
|
9
|
+
// You may obtain a copy of the License at
|
|
10
|
+
//
|
|
11
|
+
// http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
//
|
|
13
|
+
// Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
// See the License for the specific language governing permissions and
|
|
17
|
+
// limitations under the License.
|
|
18
|
+
//
|
|
19
|
+
// Copyright (c) 2026 Rejourney
|
|
20
|
+
//
|
|
21
|
+
|
|
22
|
+
#import "RJCaptureEngine.h"
|
|
23
|
+
#import "../Core/RJConstants.h"
|
|
24
|
+
#import "../Core/RJLogger.h"
|
|
25
|
+
#import "../Privacy/RJPrivacyMask.h"
|
|
26
|
+
#import "../Utils/RJPerfTiming.h"
|
|
27
|
+
#import "RJCaptureHeuristics.h"
|
|
28
|
+
#import "RJPerformanceManager.h"
|
|
29
|
+
#import "RJPixelBufferDownscaler.h"
|
|
30
|
+
#import "RJSegmentUploader.h"
|
|
31
|
+
#import "RJVideoEncoder.h"
|
|
32
|
+
#import "RJViewHierarchyScanner.h"
|
|
33
|
+
#import "RJViewSerializer.h"
|
|
34
|
+
#import <CoreFoundation/CoreFoundation.h>
|
|
35
|
+
#import <QuartzCore/CABase.h>
|
|
36
|
+
#import <UIKit/UIKit.h>
|
|
37
|
+
#import <mach/mach.h>
|
|
38
|
+
|
|
39
|
+
static void *kRJEncodingQueueKey = &kRJEncodingQueueKey;
|
|
40
|
+
|
|
41
|
+
static const NSTimeInterval kRJDefensiveCaptureDelayNavigation = 0.2;
|
|
42
|
+
static const NSTimeInterval kRJDefensiveCaptureDelayInteraction = 0.15;
|
|
43
|
+
static const NSTimeInterval kRJDefensiveCaptureDelayScroll = 0.2;
|
|
44
|
+
static const NSTimeInterval kRJDefensiveCaptureDelayMap = 0.55;
|
|
45
|
+
static const NSTimeInterval kRJMapPresenceWindowSeconds = 2.0;
|
|
46
|
+
|
|
47
|
+
typedef struct {
|
|
48
|
+
size_t pixelWidth;
|
|
49
|
+
size_t pixelHeight;
|
|
50
|
+
CGFloat unifiedScale; // The effective scale (e.g. 0.5)
|
|
51
|
+
NSInteger fps;
|
|
52
|
+
NSInteger bitrate;
|
|
53
|
+
} RJCaptureLayout;
|
|
54
|
+
|
|
55
|
+
@interface RJCapturePendingCapture : NSObject
|
|
56
|
+
|
|
57
|
+
@property(nonatomic, assign) NSTimeInterval wantedAt;
|
|
58
|
+
@property(nonatomic, assign) NSTimeInterval deadline;
|
|
59
|
+
@property(nonatomic, assign) NSTimeInterval timestamp;
|
|
60
|
+
@property(nonatomic, assign) NSTimeInterval lastScanTime;
|
|
61
|
+
@property(nonatomic, assign) NSInteger generation;
|
|
62
|
+
@property(nonatomic, strong, nullable) RJViewHierarchyScanResult *scanResult;
|
|
63
|
+
@property(nonatomic, copy, nullable) NSString *layoutSignature;
|
|
64
|
+
|
|
65
|
+
@end
|
|
66
|
+
|
|
67
|
+
@implementation RJCapturePendingCapture
|
|
68
|
+
@end
|
|
69
|
+
|
|
70
|
+
#pragma mark - Private Interface
|
|
71
|
+
|
|
72
|
+
@interface RJCaptureEngine () <RJPerformanceManagerDelegate>
|
|
73
|
+
|
|
74
|
+
- (RJCaptureLayout)currentCaptureLayoutForWindow:(UIWindow *)window;
|
|
75
|
+
|
|
76
|
+
- (CVPixelBufferRef)capturePixelBufferFromWindow:(UIWindow *)window
|
|
77
|
+
withLayout:(RJCaptureLayout)layout
|
|
78
|
+
scanResult:(RJViewHierarchyScanResult *)
|
|
79
|
+
scanResult;
|
|
80
|
+
|
|
81
|
+
- (void)requestDefensiveCaptureAfterDelay:(NSTimeInterval)delay
|
|
82
|
+
reason:(NSString *)reason;
|
|
83
|
+
|
|
84
|
+
@property(nonatomic, copy) RJWindowProvider windowProvider;
|
|
85
|
+
|
|
86
|
+
@property(nonatomic, copy) NSString *internalSessionId;
|
|
87
|
+
|
|
88
|
+
@property(atomic, assign) BOOL internalIsRecording;
|
|
89
|
+
|
|
90
|
+
@property(atomic, assign) BOOL uiReadyForCapture;
|
|
91
|
+
|
|
92
|
+
@property(atomic, assign) BOOL isShuttingDown;
|
|
93
|
+
|
|
94
|
+
@property(nonatomic, strong, nullable) RJVideoEncoder *internalVideoEncoder;
|
|
95
|
+
|
|
96
|
+
@property(nonatomic, strong, nullable) RJViewSerializer *internalViewSerializer;
|
|
97
|
+
|
|
98
|
+
@property(nonatomic, strong, nullable)
|
|
99
|
+
RJSegmentUploader *internalSegmentUploader;
|
|
100
|
+
|
|
101
|
+
@property(nonatomic, strong) RJViewHierarchyScanner *viewScanner;
|
|
102
|
+
|
|
103
|
+
@property(nonatomic, assign) NSInteger framesSinceHierarchy;
|
|
104
|
+
|
|
105
|
+
@property(nonatomic, strong) NSMutableArray<NSDictionary *> *hierarchySnapshots;
|
|
106
|
+
|
|
107
|
+
@property(atomic, assign) RJPerformanceLevel internalPerformanceLevel;
|
|
108
|
+
|
|
109
|
+
@property(nonatomic, strong, nullable) dispatch_source_t memoryPressureSource;
|
|
110
|
+
|
|
111
|
+
@property(nonatomic, copy, nullable) NSString *currentScreenName;
|
|
112
|
+
|
|
113
|
+
@property(atomic, assign) BOOL captureInProgress;
|
|
114
|
+
|
|
115
|
+
@property(nonatomic, copy, nullable) NSString *lastSerializedSignature;
|
|
116
|
+
|
|
117
|
+
@property(nonatomic, strong) dispatch_queue_t encodingQueue;
|
|
118
|
+
|
|
119
|
+
@property(nonatomic, assign) NSTimeInterval lastIntentTime;
|
|
120
|
+
@property(nonatomic, assign) NSTimeInterval lastMapPresenceTime;
|
|
121
|
+
|
|
122
|
+
@property(nonatomic, assign) BOOL didPrewarmScanner;
|
|
123
|
+
|
|
124
|
+
@property(nonatomic, assign) NSInteger framesSinceSessionStart;
|
|
125
|
+
|
|
126
|
+
@property(nonatomic, strong) RJCaptureHeuristics *captureHeuristics;
|
|
127
|
+
|
|
128
|
+
@property(nonatomic, strong, nullable) RJCapturePendingCapture *pendingCapture;
|
|
129
|
+
|
|
130
|
+
@property(nonatomic, assign) NSInteger pendingCaptureGeneration;
|
|
131
|
+
|
|
132
|
+
@property(nonatomic, assign) NSTimeInterval pendingDefensiveCaptureTime;
|
|
133
|
+
@property(nonatomic, assign) NSInteger pendingDefensiveCaptureGeneration;
|
|
134
|
+
|
|
135
|
+
/// Tracks whether app is in background (skip rendering while inactive)
|
|
136
|
+
@property(atomic, assign) BOOL isInBackground;
|
|
137
|
+
|
|
138
|
+
@property(nonatomic, strong, nullable) NSArray<UIWindow *> *cachedWindows;
|
|
139
|
+
@property(nonatomic, assign) NSTimeInterval lastWindowScanTime;
|
|
140
|
+
|
|
141
|
+
/// Pixel buffer pool for direct-to-buffer rendering
|
|
142
|
+
/// Pixel Buffer Pool for direct-to-buffer rendering (Encoding/Downscaled)
|
|
143
|
+
@property(nonatomic, assign) CVPixelBufferPoolRef pixelBufferPool;
|
|
144
|
+
|
|
145
|
+
/// Pixel Buffer Pool for NATIVE (Screen) dimensions
|
|
146
|
+
@property(nonatomic, assign) CVPixelBufferPoolRef nativePixelBufferPool;
|
|
147
|
+
|
|
148
|
+
/// Last captured pixel buffer for frame reuse
|
|
149
|
+
@property(nonatomic, assign) CVPixelBufferRef lastCapturedPixelBuffer;
|
|
150
|
+
@property(nonatomic, assign) CVPixelBufferRef lastSafePixelBuffer;
|
|
151
|
+
|
|
152
|
+
@property(nonatomic, strong, nullable)
|
|
153
|
+
RJViewHierarchyScanResult *lastMaskScanResult;
|
|
154
|
+
@property(nonatomic, strong, nullable)
|
|
155
|
+
RJViewHierarchyScanResult *lastSafeMaskScanResult;
|
|
156
|
+
@property(nonatomic, assign) BOOL lastCapturedHadBlockedSurface;
|
|
157
|
+
|
|
158
|
+
@property(nonatomic, assign) CGColorSpaceRef commonColorSpace;
|
|
159
|
+
|
|
160
|
+
@property(nonatomic, assign) size_t poolWidth;
|
|
161
|
+
@property(nonatomic, assign) size_t poolHeight;
|
|
162
|
+
|
|
163
|
+
@property(nonatomic, assign) size_t nativePoolWidth;
|
|
164
|
+
@property(nonatomic, assign) size_t nativePoolHeight;
|
|
165
|
+
|
|
166
|
+
@property(nonatomic, strong, nullable) CADisplayLink *displayLink;
|
|
167
|
+
|
|
168
|
+
@property(nonatomic, assign) CFRunLoopObserverRef runLoopObserver;
|
|
169
|
+
@property(nonatomic, assign) BOOL runLoopCapturePending;
|
|
170
|
+
|
|
171
|
+
@end
|
|
172
|
+
|
|
173
|
+
#pragma mark - Implementation
|
|
174
|
+
|
|
175
|
+
@implementation RJCaptureEngine
|
|
176
|
+
|
|
177
|
+
#pragma mark - Initialization
|
|
178
|
+
|
|
179
|
+
- (RJCaptureLayout)currentCaptureLayoutForWindow:(UIWindow *)window {
|
|
180
|
+
if (!window) {
|
|
181
|
+
// Try to find a default window if nil
|
|
182
|
+
if (self.windowProvider) {
|
|
183
|
+
window = self.windowProvider();
|
|
184
|
+
}
|
|
185
|
+
if (!window) {
|
|
186
|
+
window = [[UIApplication sharedApplication] windows].firstObject;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
CGSize screenSize = CGSizeZero;
|
|
191
|
+
|
|
192
|
+
if (window) {
|
|
193
|
+
screenSize = window.bounds.size;
|
|
194
|
+
} else {
|
|
195
|
+
// Fallback if no window found
|
|
196
|
+
screenSize = CGSizeMake(390, 844);
|
|
197
|
+
}
|
|
198
|
+
if (!isfinite(screenSize.width) || !isfinite(screenSize.height) ||
|
|
199
|
+
screenSize.width <= 0 || screenSize.height <= 0) {
|
|
200
|
+
screenSize = CGSizeMake(390, 844);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Always use main screen scale to avoid UIWindow casting header issues
|
|
204
|
+
// and ensure consistent native resolution (@2x or @3x)
|
|
205
|
+
CGFloat screenScale = [UIScreen mainScreen].scale;
|
|
206
|
+
if (!isfinite(screenScale) || screenScale <= 0) {
|
|
207
|
+
screenScale = 1.0;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
CGFloat scaleToUse = self.captureScale; // Configured value (e.g. 0.72)
|
|
211
|
+
if (!isfinite(scaleToUse) || scaleToUse <= 0) {
|
|
212
|
+
scaleToUse = RJDefaultCaptureScale;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// define targetFPS and targetBitrate
|
|
216
|
+
NSInteger targetFPS = self.videoFPS;
|
|
217
|
+
NSInteger targetBitrate = self.videoBitrate;
|
|
218
|
+
|
|
219
|
+
// Performance Level Overrides - only reduce slightly for reduced performance
|
|
220
|
+
if (self.internalPerformanceLevel >= RJPerformanceLevelReduced) {
|
|
221
|
+
scaleToUse = MIN(scaleToUse, 0.25);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
if (self.internalPerformanceLevel == RJPerformanceLevelMinimal) {
|
|
225
|
+
scaleToUse = MIN(scaleToUse, 0.15);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Removed aggressive warmup/scroll downscaling multipliers (0.3x) which
|
|
229
|
+
// caused fuzziness
|
|
230
|
+
|
|
231
|
+
// Clamp
|
|
232
|
+
scaleToUse = MIN(MAX(scaleToUse, 0.05), 1.0);
|
|
233
|
+
|
|
234
|
+
// Calculate dimensions
|
|
235
|
+
// Use native pixel dimensions * scaleToUse
|
|
236
|
+
size_t width = (size_t)(screenSize.width * screenScale * scaleToUse);
|
|
237
|
+
size_t height = (size_t)(screenSize.height * screenScale * scaleToUse);
|
|
238
|
+
|
|
239
|
+
// H.264 Alignment (mod 2)
|
|
240
|
+
width = (width / 2) * 2;
|
|
241
|
+
height = (height / 2) * 2;
|
|
242
|
+
|
|
243
|
+
// Max dimension cap (higher cap for better quality)
|
|
244
|
+
CGFloat maxDimension =
|
|
245
|
+
1920.0; // Increased from 800.0 to support high-res capture
|
|
246
|
+
if (width > maxDimension || height > maxDimension) {
|
|
247
|
+
CGFloat ratio = MIN(maxDimension / width, maxDimension / height);
|
|
248
|
+
width = (size_t)(width * ratio);
|
|
249
|
+
height = (size_t)(height * ratio);
|
|
250
|
+
width = (width / 2) * 2;
|
|
251
|
+
height = (height / 2) * 2;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Recalculate effective scale so consumers (PrivacyMask) know the real
|
|
255
|
+
// mapping This must be width / pointWidth (screenSize.width) This matches how
|
|
256
|
+
// we scale the context later (CGContextScale)
|
|
257
|
+
if (screenSize.width > 0) {
|
|
258
|
+
scaleToUse = (CGFloat)width / screenSize.width;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
RJCaptureLayout layout;
|
|
262
|
+
layout.pixelWidth = width;
|
|
263
|
+
layout.pixelHeight = height;
|
|
264
|
+
layout.unifiedScale = scaleToUse;
|
|
265
|
+
layout.fps = targetFPS;
|
|
266
|
+
layout.bitrate = targetBitrate;
|
|
267
|
+
|
|
268
|
+
return layout;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
- (instancetype)initWithWindowProvider:(RJWindowProvider)windowProvider {
|
|
272
|
+
self = [super init];
|
|
273
|
+
if (self) {
|
|
274
|
+
if (windowProvider) {
|
|
275
|
+
_windowProvider = [windowProvider copy];
|
|
276
|
+
} else {
|
|
277
|
+
RJLogWarning(@"CaptureEngine initialized without window provider");
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
_privacyMask = [[RJPrivacyMask alloc] init];
|
|
281
|
+
_viewScanner = [[RJViewHierarchyScanner alloc] init];
|
|
282
|
+
_captureHeuristics = [[RJCaptureHeuristics alloc] init];
|
|
283
|
+
|
|
284
|
+
dispatch_queue_attr_t encodeAttr = dispatch_queue_attr_make_with_qos_class(
|
|
285
|
+
DISPATCH_QUEUE_SERIAL, QOS_CLASS_UTILITY, 0);
|
|
286
|
+
_encodingQueue =
|
|
287
|
+
dispatch_queue_create("com.rejourney.capture.encoding", encodeAttr);
|
|
288
|
+
dispatch_queue_set_specific(_encodingQueue, kRJEncodingQueueKey,
|
|
289
|
+
kRJEncodingQueueKey, NULL);
|
|
290
|
+
_captureInProgress = NO;
|
|
291
|
+
_lastIntentTime = 0;
|
|
292
|
+
_lastMapPresenceTime = 0;
|
|
293
|
+
_pendingCaptureGeneration = 0;
|
|
294
|
+
_isInBackground = NO;
|
|
295
|
+
|
|
296
|
+
[[RJPerformanceManager sharedManager] setDelegate:self];
|
|
297
|
+
[[RJPerformanceManager sharedManager] startMonitoring];
|
|
298
|
+
RJLogDebug(@"CaptureEngine: Performance monitoring started");
|
|
299
|
+
_hierarchySnapshots = [NSMutableArray new];
|
|
300
|
+
_isShuttingDown = NO;
|
|
301
|
+
_uiReadyForCapture = NO;
|
|
302
|
+
_framesSinceHierarchy = 0;
|
|
303
|
+
|
|
304
|
+
[self applyDefaultConfiguration];
|
|
305
|
+
|
|
306
|
+
// Pre-warm H.264 encoder is now handled via prepareEncoderWithSize later in
|
|
307
|
+
// prewarmRenderServer [RJVideoEncoder prewarmEncoderAsync]; // Removed
|
|
308
|
+
// static call
|
|
309
|
+
|
|
310
|
+
@try {
|
|
311
|
+
[self setupSystemMonitoring];
|
|
312
|
+
} @catch (NSException *exception) {
|
|
313
|
+
RJLogWarning(@"System monitoring setup failed: %@", exception);
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
// Subscribe to background/foreground notifications
|
|
317
|
+
[self setupBackgroundTracking];
|
|
318
|
+
|
|
319
|
+
// Pre-warm pixel buffer pool
|
|
320
|
+
[self prewarmPixelBufferPool];
|
|
321
|
+
|
|
322
|
+
// Create common color space once
|
|
323
|
+
_commonColorSpace = CGColorSpaceCreateDeviceRGB();
|
|
324
|
+
|
|
325
|
+
// Pre-warm render server (GPU context)
|
|
326
|
+
[self prewarmRenderServer];
|
|
327
|
+
}
|
|
328
|
+
return self;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
- (void)applyDefaultConfiguration {
|
|
332
|
+
|
|
333
|
+
_videoFPS = 1; // Default to 1 FPS intent clock
|
|
334
|
+
_framesPerSegment = 60;
|
|
335
|
+
_videoBitrate = 1500000; // 1.5 Mbps - optimized for quality capture
|
|
336
|
+
_hierarchyCaptureInterval = 5;
|
|
337
|
+
_captureScale = RJDefaultCaptureScale;
|
|
338
|
+
_uploadsEnabled = YES;
|
|
339
|
+
|
|
340
|
+
_adaptiveQualityEnabled = YES;
|
|
341
|
+
_thermalThrottleEnabled = YES;
|
|
342
|
+
_batteryAwareEnabled = YES;
|
|
343
|
+
|
|
344
|
+
_privacyMaskTextInputs = YES;
|
|
345
|
+
_privacyMaskCameraViews = YES;
|
|
346
|
+
_privacyMaskWebViews = YES;
|
|
347
|
+
_privacyMaskVideoLayers = YES;
|
|
348
|
+
_privacyMask.maskTextInputs = YES;
|
|
349
|
+
_privacyMask.maskCameraViews = YES;
|
|
350
|
+
_privacyMask.maskWebViews = YES;
|
|
351
|
+
_privacyMask.maskVideoLayers = YES;
|
|
352
|
+
self.viewScanner.config.detectVideoLayers = YES;
|
|
353
|
+
|
|
354
|
+
_internalPerformanceLevel = RJPerformanceLevelNormal;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
#pragma mark - Public Property Accessors
|
|
358
|
+
|
|
359
|
+
- (RJVideoEncoder *)videoEncoder {
|
|
360
|
+
return self.internalVideoEncoder;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
- (RJViewSerializer *)viewSerializer {
|
|
364
|
+
return self.internalViewSerializer;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
- (RJSegmentUploader *)segmentUploader {
|
|
368
|
+
return self.internalSegmentUploader;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
- (RJPerformanceLevel)currentPerformanceLevel {
|
|
372
|
+
return self.internalPerformanceLevel;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
- (BOOL)isRecording {
|
|
376
|
+
return self.internalIsRecording;
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
- (NSString *)sessionId {
|
|
380
|
+
return self.internalSessionId;
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
- (void)setPrivacyMaskTextInputs:(BOOL)privacyMaskTextInputs {
|
|
384
|
+
_privacyMaskTextInputs = privacyMaskTextInputs;
|
|
385
|
+
_privacyMask.maskTextInputs = privacyMaskTextInputs;
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
- (void)setPrivacyMaskCameraViews:(BOOL)privacyMaskCameraViews {
|
|
389
|
+
_privacyMaskCameraViews = privacyMaskCameraViews;
|
|
390
|
+
_privacyMask.maskCameraViews = privacyMaskCameraViews;
|
|
391
|
+
self.viewScanner.config.detectCameraViews = privacyMaskCameraViews;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
- (void)setPrivacyMaskWebViews:(BOOL)privacyMaskWebViews {
|
|
395
|
+
_privacyMaskWebViews = privacyMaskWebViews;
|
|
396
|
+
_privacyMask.maskWebViews = privacyMaskWebViews;
|
|
397
|
+
self.viewScanner.config.detectWebViews = privacyMaskWebViews;
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
- (void)setPrivacyMaskVideoLayers:(BOOL)privacyMaskVideoLayers {
|
|
401
|
+
_privacyMaskVideoLayers = privacyMaskVideoLayers;
|
|
402
|
+
_privacyMask.maskVideoLayers = privacyMaskVideoLayers;
|
|
403
|
+
self.viewScanner.config.detectVideoLayers = privacyMaskVideoLayers;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
#pragma mark - Deallocation
|
|
407
|
+
|
|
408
|
+
- (void)dealloc {
|
|
409
|
+
_isShuttingDown = YES;
|
|
410
|
+
_internalIsRecording = NO;
|
|
411
|
+
|
|
412
|
+
[self teardownDisplayLink];
|
|
413
|
+
|
|
414
|
+
[_internalVideoEncoder finishSegment];
|
|
415
|
+
|
|
416
|
+
@try {
|
|
417
|
+
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
|
418
|
+
} @catch (NSException *e) {
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
if (_memoryPressureSource) {
|
|
422
|
+
dispatch_source_cancel(_memoryPressureSource);
|
|
423
|
+
_memoryPressureSource = nil;
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
@try {
|
|
427
|
+
[_privacyMask forceCleanup];
|
|
428
|
+
} @catch (NSException *e) {
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
if (_pixelBufferPool) {
|
|
432
|
+
CVPixelBufferPoolRelease(_pixelBufferPool);
|
|
433
|
+
_pixelBufferPool = NULL;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
if (_nativePixelBufferPool) {
|
|
437
|
+
CVPixelBufferPoolRelease(_nativePixelBufferPool);
|
|
438
|
+
_nativePixelBufferPool = NULL;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
if (_lastCapturedPixelBuffer) {
|
|
442
|
+
CVPixelBufferRelease(_lastCapturedPixelBuffer);
|
|
443
|
+
_lastCapturedPixelBuffer = NULL;
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
if (_lastSafePixelBuffer) {
|
|
447
|
+
CVPixelBufferRelease(_lastSafePixelBuffer);
|
|
448
|
+
_lastSafePixelBuffer = NULL;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
if (_commonColorSpace) {
|
|
452
|
+
CGColorSpaceRelease(_commonColorSpace);
|
|
453
|
+
_commonColorSpace = NULL;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
#pragma mark - Background State Tracking
|
|
458
|
+
|
|
459
|
+
- (void)setupBackgroundTracking {
|
|
460
|
+
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
|
461
|
+
|
|
462
|
+
[center addObserver:self
|
|
463
|
+
selector:@selector(appWillEnterBackground:)
|
|
464
|
+
name:UIApplicationWillResignActiveNotification
|
|
465
|
+
object:nil];
|
|
466
|
+
[center addObserver:self
|
|
467
|
+
selector:@selector(appDidEnterBackground:)
|
|
468
|
+
name:UIApplicationDidEnterBackgroundNotification
|
|
469
|
+
object:nil];
|
|
470
|
+
[center addObserver:self
|
|
471
|
+
selector:@selector(appWillEnterForeground:)
|
|
472
|
+
name:UIApplicationWillEnterForegroundNotification
|
|
473
|
+
object:nil];
|
|
474
|
+
[center addObserver:self
|
|
475
|
+
selector:@selector(appDidBecomeActive:)
|
|
476
|
+
name:UIApplicationDidBecomeActiveNotification
|
|
477
|
+
object:nil];
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
- (void)appWillEnterBackground:(NSNotification *)notification {
|
|
481
|
+
// Set background flag BEFORE iOS takes app switcher snapshot
|
|
482
|
+
// Avoid rendering during app switch transitions.
|
|
483
|
+
self.isInBackground = YES;
|
|
484
|
+
RJLogDebug(@"CaptureEngine: App will enter background - suspending capture");
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
- (void)appDidEnterBackground:(NSNotification *)notification {
|
|
488
|
+
self.isInBackground = YES;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
- (void)appWillEnterForeground:(NSNotification *)notification {
|
|
492
|
+
// Keep background flag set until fully active - views haven't rendered yet
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
- (void)appDidBecomeActive:(NSNotification *)notification {
|
|
496
|
+
self.isInBackground = NO;
|
|
497
|
+
RJLogDebug(@"CaptureEngine: App became active - warmup before capture");
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
#pragma mark - System Monitoring
|
|
501
|
+
|
|
502
|
+
- (void)setupSystemMonitoring {
|
|
503
|
+
[self setupMemoryPressureMonitoring];
|
|
504
|
+
|
|
505
|
+
[[NSNotificationCenter defaultCenter]
|
|
506
|
+
addObserver:self
|
|
507
|
+
selector:@selector(handleMemoryWarning)
|
|
508
|
+
name:UIApplicationDidReceiveMemoryWarningNotification
|
|
509
|
+
object:nil];
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
- (void)setupMemoryPressureMonitoring {
|
|
513
|
+
dispatch_source_t source = dispatch_source_create(
|
|
514
|
+
DISPATCH_SOURCE_TYPE_MEMORYPRESSURE, 0,
|
|
515
|
+
DISPATCH_MEMORYPRESSURE_WARN | DISPATCH_MEMORYPRESSURE_CRITICAL,
|
|
516
|
+
dispatch_get_main_queue());
|
|
517
|
+
|
|
518
|
+
__weak typeof(self) weakSelf = self;
|
|
519
|
+
dispatch_source_set_event_handler(source, ^{
|
|
520
|
+
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
521
|
+
if (!strongSelf)
|
|
522
|
+
return;
|
|
523
|
+
|
|
524
|
+
dispatch_source_memorypressure_flags_t level =
|
|
525
|
+
dispatch_source_get_data(source);
|
|
526
|
+
|
|
527
|
+
if (level & DISPATCH_MEMORYPRESSURE_CRITICAL) {
|
|
528
|
+
RJLogWarning(@"Critical memory pressure - reducing capture quality");
|
|
529
|
+
strongSelf.internalPerformanceLevel = RJPerformanceLevelMinimal;
|
|
530
|
+
} else if (level & DISPATCH_MEMORYPRESSURE_WARN) {
|
|
531
|
+
RJLogInfo(@"Memory pressure warning - reducing quality");
|
|
532
|
+
strongSelf.internalPerformanceLevel = RJPerformanceLevelReduced;
|
|
533
|
+
}
|
|
534
|
+
});
|
|
535
|
+
|
|
536
|
+
dispatch_resume(source);
|
|
537
|
+
self.memoryPressureSource = source;
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
- (void)handleMemoryWarning {
|
|
541
|
+
RJLogWarning(@"Memory warning received");
|
|
542
|
+
self.internalPerformanceLevel = RJPerformanceLevelMinimal;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
#pragma mark - Configuration
|
|
546
|
+
|
|
547
|
+
- (void)configureSegmentUploaderWithBaseURL:(NSString *)baseURL
|
|
548
|
+
apiKey:(NSString *)apiKey
|
|
549
|
+
projectId:(NSString *)projectId {
|
|
550
|
+
RJLogInfo(@"Configuring segment uploader: baseURL=%@, projectId=%@", baseURL,
|
|
551
|
+
projectId);
|
|
552
|
+
|
|
553
|
+
self.internalSegmentUploader =
|
|
554
|
+
[[RJSegmentUploader alloc] initWithBaseURL:baseURL];
|
|
555
|
+
self.internalSegmentUploader.apiKey = apiKey;
|
|
556
|
+
self.internalSegmentUploader.projectId = projectId;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
#pragma mark - Pixel Buffer Management
|
|
560
|
+
|
|
561
|
+
- (void)prewarmPixelBufferPool {
|
|
562
|
+
RJCaptureLayout layout = [self currentCaptureLayoutForWindow:nil];
|
|
563
|
+
size_t width = layout.pixelWidth;
|
|
564
|
+
size_t height = layout.pixelHeight;
|
|
565
|
+
|
|
566
|
+
[self createPixelBufferPoolWithWidth:width height:height];
|
|
567
|
+
|
|
568
|
+
// Also pre-warm native pool
|
|
569
|
+
CGFloat screenScale = [UIScreen mainScreen].scale;
|
|
570
|
+
size_t nativeW = (size_t)(width / layout.unifiedScale * screenScale);
|
|
571
|
+
size_t nativeH = (size_t)(height / layout.unifiedScale * screenScale);
|
|
572
|
+
// Align
|
|
573
|
+
nativeW = (nativeW / 2) * 2;
|
|
574
|
+
nativeH = (nativeH / 2) * 2;
|
|
575
|
+
|
|
576
|
+
[self createNativePixelBufferPoolWithWidth:nativeW height:nativeH];
|
|
577
|
+
|
|
578
|
+
RJLogDebug(@"CaptureEngine: Pre-warmed pixel buffer pools (Enc: %zux%zu, "
|
|
579
|
+
@"Native: %zux%zu)",
|
|
580
|
+
width, height, nativeW, nativeH);
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
- (void)createPixelBufferPoolWithWidth:(size_t)width height:(size_t)height {
|
|
584
|
+
if (_pixelBufferPool) {
|
|
585
|
+
if (_poolWidth == width && _poolHeight == height) {
|
|
586
|
+
return; // Already matched
|
|
587
|
+
}
|
|
588
|
+
CVPixelBufferPoolRelease(_pixelBufferPool);
|
|
589
|
+
_pixelBufferPool = NULL;
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
NSDictionary *poolAttributes = @{
|
|
593
|
+
(id)kCVPixelBufferPoolMinimumBufferCountKey : @(10), // Increased from 3
|
|
594
|
+
};
|
|
595
|
+
|
|
596
|
+
NSDictionary *pixelBufferAttributes = @{
|
|
597
|
+
(id)kCVPixelBufferWidthKey : @(width),
|
|
598
|
+
(id)kCVPixelBufferHeightKey : @(height),
|
|
599
|
+
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
|
|
600
|
+
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
|
|
601
|
+
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
|
|
602
|
+
(id)kCVPixelBufferIOSurfacePropertiesKey : @{},
|
|
603
|
+
};
|
|
604
|
+
|
|
605
|
+
CVReturn ret = CVPixelBufferPoolCreate(
|
|
606
|
+
kCFAllocatorDefault, (__bridge CFDictionaryRef)poolAttributes,
|
|
607
|
+
(__bridge CFDictionaryRef)pixelBufferAttributes, &_pixelBufferPool);
|
|
608
|
+
|
|
609
|
+
if (ret == kCVReturnSuccess) {
|
|
610
|
+
_poolWidth = width;
|
|
611
|
+
_poolHeight = height;
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
- (void)prewarmRenderServer {
|
|
616
|
+
// Perform a dummy 100x100 render on the main thread to wake up the render
|
|
617
|
+
// server. CRITICAL: We must use the EXACT same pipeline (PixelPool ->
|
|
618
|
+
// BitmapContext) as the real capture to ensure the specific internal
|
|
619
|
+
// toggles for CGBitmapContext and CVPixelBuffer interactions are warmed up.
|
|
620
|
+
// Using UIGraphicsBeginImageContext here is useless because we don't use it
|
|
621
|
+
// anymore.
|
|
622
|
+
|
|
623
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
624
|
+
@try {
|
|
625
|
+
RJLogDebug(@"CaptureEngine: Pre-warming Render Server (Direct-Buffer "
|
|
626
|
+
@"Path)...");
|
|
627
|
+
|
|
628
|
+
// 1. Get a dummy buffer from the NATIVE pool
|
|
629
|
+
// Using 100x100 is fine, it will trigger pool creation if needed
|
|
630
|
+
CVPixelBufferRef pixelBuffer =
|
|
631
|
+
[self createNativePixelBufferFromPoolWithWidth:100 height:100];
|
|
632
|
+
if (pixelBuffer) {
|
|
633
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
634
|
+
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
635
|
+
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
636
|
+
|
|
637
|
+
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
638
|
+
CGContextRef context = CGBitmapContextCreate(
|
|
639
|
+
baseAddress, 100, 100, 8, bytesPerRow, colorSpace,
|
|
640
|
+
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
|
|
641
|
+
CGColorSpaceRelease(colorSpace);
|
|
642
|
+
|
|
643
|
+
if (context) {
|
|
644
|
+
// 2. Perform the render
|
|
645
|
+
// We need a valid view. Pushing context is what UIKit needs.
|
|
646
|
+
UIGraphicsPushContext(context);
|
|
647
|
+
|
|
648
|
+
UIWindow *window = self.windowProvider ? self.windowProvider() : nil;
|
|
649
|
+
if (!window)
|
|
650
|
+
window = [[UIApplication sharedApplication] windows].firstObject;
|
|
651
|
+
|
|
652
|
+
if (window) {
|
|
653
|
+
// Force the heavy lifting
|
|
654
|
+
[window drawViewHierarchyInRect:CGRectMake(0, 0, 100, 100)
|
|
655
|
+
afterScreenUpdates:NO];
|
|
656
|
+
|
|
657
|
+
// 3. Pre-warm the Encoder with the REAL expected size
|
|
658
|
+
// This is the most critical fix for the 650ms first-frame spike.
|
|
659
|
+
// We calculate what the first frame size WILL be and initialize
|
|
660
|
+
// the AVAssetWriter now.
|
|
661
|
+
RJCaptureLayout layout =
|
|
662
|
+
[self currentCaptureLayoutForWindow:window];
|
|
663
|
+
CGSize expectedSize =
|
|
664
|
+
CGSizeMake(layout.pixelWidth, layout.pixelHeight);
|
|
665
|
+
|
|
666
|
+
RJLogDebug(
|
|
667
|
+
@"CaptureEngine: Pre-warming Encoder with expected size: %@",
|
|
668
|
+
NSStringFromCGSize(expectedSize));
|
|
669
|
+
[self.internalVideoEncoder prepareEncoderWithSize:expectedSize];
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
UIGraphicsPopContext();
|
|
673
|
+
CGContextRelease(context);
|
|
674
|
+
}
|
|
675
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
676
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
677
|
+
}
|
|
678
|
+
RJLogDebug(@"CaptureEngine: Render Server pre-warmed");
|
|
679
|
+
} @catch (NSException *e) {
|
|
680
|
+
RJLogWarning(@"CaptureEngine: Failed to pre-warm render server: %@", e);
|
|
681
|
+
}
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
- (void)createNativePixelBufferPoolWithWidth:(size_t)width
|
|
686
|
+
height:(size_t)height {
|
|
687
|
+
if (_nativePixelBufferPool) {
|
|
688
|
+
if (_nativePoolWidth == width && _nativePoolHeight == height) {
|
|
689
|
+
return; // Already matched
|
|
690
|
+
}
|
|
691
|
+
CVPixelBufferPoolRelease(_nativePixelBufferPool);
|
|
692
|
+
_nativePixelBufferPool = NULL;
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
NSDictionary *poolAttributes = @{
|
|
696
|
+
(id)kCVPixelBufferPoolMinimumBufferCountKey : @(10),
|
|
697
|
+
};
|
|
698
|
+
|
|
699
|
+
NSDictionary *pixelBufferAttributes = @{
|
|
700
|
+
(id)kCVPixelBufferWidthKey : @(width),
|
|
701
|
+
(id)kCVPixelBufferHeightKey : @(height),
|
|
702
|
+
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
|
|
703
|
+
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
|
|
704
|
+
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
|
|
705
|
+
(id)kCVPixelBufferIOSurfacePropertiesKey : @{},
|
|
706
|
+
};
|
|
707
|
+
|
|
708
|
+
CVReturn ret = CVPixelBufferPoolCreate(
|
|
709
|
+
kCFAllocatorDefault, (__bridge CFDictionaryRef)poolAttributes,
|
|
710
|
+
(__bridge CFDictionaryRef)pixelBufferAttributes, &_nativePixelBufferPool);
|
|
711
|
+
|
|
712
|
+
if (ret == kCVReturnSuccess) {
|
|
713
|
+
_nativePoolWidth = width;
|
|
714
|
+
_nativePoolHeight = height;
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
- (CVPixelBufferRef)createNativePixelBufferFromPoolWithWidth:(size_t)width
|
|
719
|
+
height:(size_t)height {
|
|
720
|
+
if (!_nativePixelBufferPool) {
|
|
721
|
+
[self createNativePixelBufferPoolWithWidth:width height:height];
|
|
722
|
+
}
|
|
723
|
+
if (_nativePoolWidth != width || _nativePoolHeight != height) {
|
|
724
|
+
[self createNativePixelBufferPoolWithWidth:width height:height];
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
CVPixelBufferRef pixelBuffer = NULL;
|
|
728
|
+
CVReturn status = CVPixelBufferPoolCreatePixelBuffer(
|
|
729
|
+
kCFAllocatorDefault, _nativePixelBufferPool, &pixelBuffer);
|
|
730
|
+
|
|
731
|
+
if (status != kCVReturnSuccess) {
|
|
732
|
+
[self createNativePixelBufferPoolWithWidth:width height:height];
|
|
733
|
+
status = CVPixelBufferPoolCreatePixelBuffer(
|
|
734
|
+
kCFAllocatorDefault, _nativePixelBufferPool, &pixelBuffer);
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
if (status != kCVReturnSuccess) {
|
|
738
|
+
// Fallback
|
|
739
|
+
RJLogWarning(@"CaptureEngine: Native Pool allocation failed");
|
|
740
|
+
NSDictionary *options = @{
|
|
741
|
+
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
|
|
742
|
+
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
|
|
743
|
+
};
|
|
744
|
+
CVPixelBufferCreate(kCFAllocatorDefault, width, height,
|
|
745
|
+
kCVPixelFormatType_32BGRA,
|
|
746
|
+
(__bridge CFDictionaryRef)options, &pixelBuffer);
|
|
747
|
+
}
|
|
748
|
+
return pixelBuffer;
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
#pragma mark - Session Lifecycle
|
|
752
|
+
|
|
753
|
+
// B8: Cached Window Enumeration
|
|
754
|
+
- (NSArray<UIWindow *> *)cachedWindows {
|
|
755
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
756
|
+
if (!_cachedWindows ||
|
|
757
|
+
(now - self.lastWindowScanTime > 0.1)) { // Cache for 100ms
|
|
758
|
+
_cachedWindows = [self scanForWindows];
|
|
759
|
+
self.lastWindowScanTime = now;
|
|
760
|
+
}
|
|
761
|
+
return _cachedWindows;
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
- (NSArray<UIWindow *> *)scanForWindows {
|
|
765
|
+
NSMutableArray<UIWindow *> *windowsToScan =
|
|
766
|
+
[NSMutableArray arrayWithCapacity:4];
|
|
767
|
+
UIWindow *primaryWindow = self.windowProvider ? self.windowProvider() : nil;
|
|
768
|
+
|
|
769
|
+
if (@available(iOS 13.0, *)) {
|
|
770
|
+
for (UIScene *scene in [UIApplication sharedApplication].connectedScenes) {
|
|
771
|
+
if (![scene isKindOfClass:[UIWindowScene class]])
|
|
772
|
+
continue;
|
|
773
|
+
UIWindowScene *windowScene = (UIWindowScene *)scene;
|
|
774
|
+
if (windowScene.activationState !=
|
|
775
|
+
(NSInteger)0) { // Check active state if possible, or just skip
|
|
776
|
+
// simplified check to avoid enum mismatch issues if any
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
for (UIWindow *window in windowScene.windows) {
|
|
780
|
+
if (!window || window.isHidden || window.alpha <= 0.01)
|
|
781
|
+
continue;
|
|
782
|
+
|
|
783
|
+
NSString *windowClass = NSStringFromClass([window class]);
|
|
784
|
+
BOOL isSystemWindow = ([windowClass containsString:@"Keyboard"] ||
|
|
785
|
+
[windowClass containsString:@"TextEffects"] ||
|
|
786
|
+
[windowClass containsString:@"InputWindow"] ||
|
|
787
|
+
[windowClass containsString:@"RemoteKeyboard"] ||
|
|
788
|
+
[windowClass containsString:@"StatusBar"]);
|
|
789
|
+
if (isSystemWindow)
|
|
790
|
+
continue;
|
|
791
|
+
|
|
792
|
+
[windowsToScan addObject:window];
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
} else {
|
|
796
|
+
if (primaryWindow)
|
|
797
|
+
[windowsToScan addObject:primaryWindow];
|
|
798
|
+
}
|
|
799
|
+
|
|
800
|
+
// Fallback if empty
|
|
801
|
+
if (windowsToScan.count == 0 && primaryWindow) {
|
|
802
|
+
[windowsToScan addObject:primaryWindow];
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
return windowsToScan;
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
- (void)startSessionWithId:(NSString *)sessionId {
|
|
809
|
+
if (self.isShuttingDown)
|
|
810
|
+
return;
|
|
811
|
+
|
|
812
|
+
if (!sessionId || sessionId.length == 0) {
|
|
813
|
+
RJLogWarning(@"Cannot start session with empty ID");
|
|
814
|
+
return;
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
if (self.internalIsRecording) {
|
|
818
|
+
RJLogWarning(@"Session already active, stopping previous");
|
|
819
|
+
[self stopSession];
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
RJLogInfo(@"Starting video capture session: %@", sessionId);
|
|
823
|
+
self.internalSessionId = sessionId;
|
|
824
|
+
|
|
825
|
+
@try {
|
|
826
|
+
|
|
827
|
+
self.internalIsRecording = YES;
|
|
828
|
+
self.lastMapPresenceTime = 0;
|
|
829
|
+
|
|
830
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
831
|
+
if (!self.isShuttingDown && self.internalIsRecording) {
|
|
832
|
+
[self startVideoCapture];
|
|
833
|
+
}
|
|
834
|
+
});
|
|
835
|
+
|
|
836
|
+
} @catch (NSException *exception) {
|
|
837
|
+
RJLogError(@"Session start failed: %@", exception);
|
|
838
|
+
self.internalIsRecording = NO;
|
|
839
|
+
self.internalSessionId = nil;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
|
|
843
|
+
- (void)stopSession {
|
|
844
|
+
[self stopSessionWithSynchronousFinish:NO];
|
|
845
|
+
}
|
|
846
|
+
|
|
847
|
+
- (void)stopSessionSync {
|
|
848
|
+
[self stopSessionWithSynchronousFinish:YES];
|
|
849
|
+
}
|
|
850
|
+
|
|
851
|
+
- (void)stopSessionWithSynchronousFinish:(BOOL)synchronous {
|
|
852
|
+
if (!self.internalIsRecording && !self.internalSessionId) {
|
|
853
|
+
return;
|
|
854
|
+
}
|
|
855
|
+
|
|
856
|
+
RJLogInfo(@"Stopping session: %@ (sync=%d)", self.internalSessionId,
|
|
857
|
+
synchronous);
|
|
858
|
+
|
|
859
|
+
@try {
|
|
860
|
+
self.internalIsRecording = NO;
|
|
861
|
+
|
|
862
|
+
// Always invalidate display link synchronously to prevent race conditions
|
|
863
|
+
if ([NSThread isMainThread]) {
|
|
864
|
+
[self teardownDisplayLink];
|
|
865
|
+
} else {
|
|
866
|
+
dispatch_sync(dispatch_get_main_queue(), ^{
|
|
867
|
+
[self teardownDisplayLink];
|
|
868
|
+
});
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
if (self.internalVideoEncoder) {
|
|
872
|
+
if (synchronous) {
|
|
873
|
+
void (^finishSync)(void) = ^{
|
|
874
|
+
RJLogInfo(
|
|
875
|
+
@"CaptureEngine: Finishing segment synchronously (session stop)");
|
|
876
|
+
[self.internalVideoEncoder finishSegmentSync];
|
|
877
|
+
self.internalVideoEncoder =
|
|
878
|
+
nil; // Force full re-creation on next session
|
|
879
|
+
};
|
|
880
|
+
if (dispatch_get_specific(kRJEncodingQueueKey)) {
|
|
881
|
+
finishSync();
|
|
882
|
+
} else {
|
|
883
|
+
dispatch_sync(self.encodingQueue, finishSync);
|
|
884
|
+
}
|
|
885
|
+
[self waitForPendingSegmentUploadsWithTimeout:5.0];
|
|
886
|
+
} else {
|
|
887
|
+
dispatch_async(self.encodingQueue, ^{
|
|
888
|
+
RJLogInfo(@"CaptureEngine: Finishing segment (async) (session stop)");
|
|
889
|
+
[self.internalVideoEncoder finishSegment];
|
|
890
|
+
self.internalVideoEncoder = nil;
|
|
891
|
+
});
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
|
|
895
|
+
if (self.hierarchySnapshots.count > 0) {
|
|
896
|
+
[self uploadCurrentHierarchySnapshots];
|
|
897
|
+
}
|
|
898
|
+
|
|
899
|
+
self.framesSinceHierarchy = 0;
|
|
900
|
+
self.lastIntentTime = 0;
|
|
901
|
+
self.lastMapPresenceTime = 0;
|
|
902
|
+
self.captureInProgress = NO;
|
|
903
|
+
self.framesSinceSessionStart = 0;
|
|
904
|
+
self.lastSerializedSignature = nil;
|
|
905
|
+
self.pendingCapture = nil;
|
|
906
|
+
self.pendingCaptureGeneration = 0;
|
|
907
|
+
self.lastMaskScanResult = nil;
|
|
908
|
+
self.lastSafeMaskScanResult = nil;
|
|
909
|
+
self.lastCapturedHadBlockedSurface = NO;
|
|
910
|
+
if (self.lastSafePixelBuffer) {
|
|
911
|
+
CVPixelBufferRelease(self.lastSafePixelBuffer);
|
|
912
|
+
self.lastSafePixelBuffer = NULL;
|
|
913
|
+
}
|
|
914
|
+
[self.captureHeuristics reset];
|
|
915
|
+
|
|
916
|
+
self.internalSessionId = nil;
|
|
917
|
+
self.currentScreenName = nil;
|
|
918
|
+
|
|
919
|
+
} @catch (NSException *exception) {
|
|
920
|
+
RJLogError(@"Session stop error: %@", exception);
|
|
921
|
+
self.internalIsRecording = NO;
|
|
922
|
+
self.internalSessionId = nil;
|
|
923
|
+
}
|
|
924
|
+
}
|
|
925
|
+
|
|
926
|
+
#pragma mark - Video Capture
|
|
927
|
+
|
|
928
|
+
- (void)startVideoCapture {
|
|
929
|
+
|
|
930
|
+
if (!self.internalSegmentUploader || !self.internalSegmentUploader.apiKey) {
|
|
931
|
+
RJLogError(@"Segment uploader not configured!");
|
|
932
|
+
self.internalIsRecording = NO;
|
|
933
|
+
return;
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
self.framesSinceSessionStart = 0;
|
|
937
|
+
[self.captureHeuristics reset];
|
|
938
|
+
self.lastMaskScanResult = nil;
|
|
939
|
+
self.lastSafeMaskScanResult = nil;
|
|
940
|
+
self.lastCapturedHadBlockedSurface = NO;
|
|
941
|
+
if (self.lastSafePixelBuffer) {
|
|
942
|
+
CVPixelBufferRelease(self.lastSafePixelBuffer);
|
|
943
|
+
self.lastSafePixelBuffer = NULL;
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
// Pre-warm view scanner class caches
|
|
947
|
+
if (!self.didPrewarmScanner) {
|
|
948
|
+
[self.viewScanner prewarmClassCaches];
|
|
949
|
+
self.didPrewarmScanner = YES;
|
|
950
|
+
}
|
|
951
|
+
|
|
952
|
+
self.internalVideoEncoder = [[RJVideoEncoder alloc] init];
|
|
953
|
+
self.internalVideoEncoder.delegate = self;
|
|
954
|
+
self.internalVideoEncoder.fps = self.videoFPS;
|
|
955
|
+
self.internalVideoEncoder.framesPerSegment = self.framesPerSegment;
|
|
956
|
+
self.internalVideoEncoder.targetBitrate = self.videoBitrate;
|
|
957
|
+
self.internalVideoEncoder.captureScale = self.captureScale;
|
|
958
|
+
[self.internalVideoEncoder setSessionId:self.internalSessionId];
|
|
959
|
+
|
|
960
|
+
self.internalViewSerializer = [[RJViewSerializer alloc] init];
|
|
961
|
+
|
|
962
|
+
[self.hierarchySnapshots removeAllObjects];
|
|
963
|
+
self.framesSinceHierarchy = 0;
|
|
964
|
+
|
|
965
|
+
__weak typeof(self) weakSelf = self;
|
|
966
|
+
|
|
967
|
+
// OPTIMIZATION: Capture first frame after 300ms delay
|
|
968
|
+
dispatch_after(
|
|
969
|
+
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)),
|
|
970
|
+
dispatch_get_main_queue(), ^{
|
|
971
|
+
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
972
|
+
if (strongSelf && strongSelf.internalIsRecording &&
|
|
973
|
+
!strongSelf.isShuttingDown) {
|
|
974
|
+
// Auto-enable capture readiness if JS hasn't called notifyUIReady yet
|
|
975
|
+
strongSelf.uiReadyForCapture = YES;
|
|
976
|
+
RJLogDebug(@"Capturing initial frame after session start");
|
|
977
|
+
[strongSelf captureVideoFrame];
|
|
978
|
+
}
|
|
979
|
+
});
|
|
980
|
+
|
|
981
|
+
// Use CADisplayLink for frame-synchronized capture
|
|
982
|
+
[self setupDisplayLink];
|
|
983
|
+
|
|
984
|
+
RJLogInfo(@"Video capture started: %ld FPS, %ld frames/segment "
|
|
985
|
+
@"(CADisplayLink Mode)",
|
|
986
|
+
(long)self.videoFPS, (long)self.framesPerSegment);
|
|
987
|
+
}
|
|
988
|
+
|
|
989
|
+
- (void)setupDisplayLink {
|
|
990
|
+
[self teardownDisplayLink];
|
|
991
|
+
|
|
992
|
+
__weak typeof(self) weakSelf = self;
|
|
993
|
+
|
|
994
|
+
// CADisplayLink synchronized with display refresh
|
|
995
|
+
_displayLink =
|
|
996
|
+
[CADisplayLink displayLinkWithTarget:self
|
|
997
|
+
selector:@selector(displayLinkCallback:)];
|
|
998
|
+
|
|
999
|
+
// Set preferred frame rate (iOS 15+)
|
|
1000
|
+
if (@available(iOS 15.0, *)) {
|
|
1001
|
+
// Hard cap at target FPS to avoid 60Hz tick capability
|
|
1002
|
+
_displayLink.preferredFrameRateRange =
|
|
1003
|
+
CAFrameRateRangeMake(self.videoFPS, self.videoFPS, self.videoFPS);
|
|
1004
|
+
} else {
|
|
1005
|
+
// For older iOS, use frameInterval
|
|
1006
|
+
NSInteger interval = (NSInteger)(60.0 / self.videoFPS);
|
|
1007
|
+
_displayLink.frameInterval = MAX(1, interval);
|
|
1008
|
+
}
|
|
1009
|
+
|
|
1010
|
+
// Add to RunLoop in CommonModes to capture during scroll
|
|
1011
|
+
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop]
|
|
1012
|
+
forMode:NSRunLoopCommonModes];
|
|
1013
|
+
|
|
1014
|
+
[self setupRunLoopObserver];
|
|
1015
|
+
|
|
1016
|
+
RJLogDebug(@"CADisplayLink attached (target FPS: %ld)", (long)self.videoFPS);
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
- (void)setupRunLoopObserver {
|
|
1020
|
+
if (self.runLoopObserver) {
|
|
1021
|
+
return;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
__weak typeof(self) weakSelf = self;
|
|
1025
|
+
CFRunLoopObserverRef observer = CFRunLoopObserverCreateWithHandler(
|
|
1026
|
+
kCFAllocatorDefault, kCFRunLoopBeforeWaiting, true, 0,
|
|
1027
|
+
^(CFRunLoopObserverRef observer, CFRunLoopActivity activity) {
|
|
1028
|
+
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
1029
|
+
if (!strongSelf) {
|
|
1030
|
+
return;
|
|
1031
|
+
}
|
|
1032
|
+
if (!strongSelf.runLoopCapturePending) {
|
|
1033
|
+
return;
|
|
1034
|
+
}
|
|
1035
|
+
strongSelf.runLoopCapturePending = NO;
|
|
1036
|
+
if (!strongSelf.internalIsRecording || strongSelf.isShuttingDown) {
|
|
1037
|
+
return;
|
|
1038
|
+
}
|
|
1039
|
+
[strongSelf captureVideoFrame];
|
|
1040
|
+
});
|
|
1041
|
+
|
|
1042
|
+
if (!observer) {
|
|
1043
|
+
return;
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
CFRunLoopAddObserver(CFRunLoopGetMain(), observer, kCFRunLoopCommonModes);
|
|
1047
|
+
self.runLoopObserver = observer;
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
- (void)teardownRunLoopObserver {
|
|
1051
|
+
if (!self.runLoopObserver) {
|
|
1052
|
+
return;
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
CFRunLoopRemoveObserver(CFRunLoopGetMain(), self.runLoopObserver,
|
|
1056
|
+
kCFRunLoopCommonModes);
|
|
1057
|
+
CFRelease(self.runLoopObserver);
|
|
1058
|
+
self.runLoopObserver = NULL;
|
|
1059
|
+
self.runLoopCapturePending = NO;
|
|
1060
|
+
}
|
|
1061
|
+
|
|
1062
|
+
- (void)teardownDisplayLink {
|
|
1063
|
+
if (_displayLink) {
|
|
1064
|
+
[_displayLink invalidate];
|
|
1065
|
+
_displayLink = nil;
|
|
1066
|
+
}
|
|
1067
|
+
[self teardownRunLoopObserver];
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
- (void)displayLinkCallback:(CADisplayLink *)displayLink {
|
|
1071
|
+
if (!self.internalIsRecording || self.isShuttingDown)
|
|
1072
|
+
return;
|
|
1073
|
+
|
|
1074
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1075
|
+
NSTimeInterval interval = 1.0 / (CGFloat)self.videoFPS;
|
|
1076
|
+
|
|
1077
|
+
// Manual throttle check
|
|
1078
|
+
if (now - self.lastIntentTime < interval)
|
|
1079
|
+
return;
|
|
1080
|
+
|
|
1081
|
+
self.lastIntentTime = now;
|
|
1082
|
+
|
|
1083
|
+
// Move capture to the next run loop idle cycle to avoid
|
|
1084
|
+
// blocking the VSYNC callback with heavy work.
|
|
1085
|
+
self.runLoopCapturePending = YES;
|
|
1086
|
+
[self setupRunLoopObserver];
|
|
1087
|
+
}
|
|
1088
|
+
|
|
1089
|
+
- (void)captureVideoFrame {
|
|
1090
|
+
if (!self.internalIsRecording || self.isShuttingDown)
|
|
1091
|
+
return;
|
|
1092
|
+
if (!self.uiReadyForCapture) {
|
|
1093
|
+
if (self.framesSinceSessionStart % 60 == 0)
|
|
1094
|
+
RJLogDebug(@"Skipping capture: UI not ready");
|
|
1095
|
+
return;
|
|
1096
|
+
}
|
|
1097
|
+
if (self.internalPerformanceLevel == RJPerformanceLevelPaused) {
|
|
1098
|
+
static NSInteger pauseSkipCount = 0;
|
|
1099
|
+
if (++pauseSkipCount % 60 == 0)
|
|
1100
|
+
RJLogDebug(@"Skipping capture: Performance Paused");
|
|
1101
|
+
return;
|
|
1102
|
+
}
|
|
1103
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1104
|
+
|
|
1105
|
+
if (self.pendingCapture) {
|
|
1106
|
+
self.pendingCapture.deadline = now;
|
|
1107
|
+
[self attemptPendingCapture:self.pendingCapture fullScan:NO];
|
|
1108
|
+
}
|
|
1109
|
+
|
|
1110
|
+
RJCapturePendingCapture *pending = [[RJCapturePendingCapture alloc] init];
|
|
1111
|
+
pending.wantedAt = now;
|
|
1112
|
+
NSTimeInterval grace = self.captureHeuristics.captureGraceSeconds;
|
|
1113
|
+
if (self.captureHeuristics.animationBlocking ||
|
|
1114
|
+
self.captureHeuristics.scrollActive ||
|
|
1115
|
+
self.captureHeuristics.keyboardAnimating) {
|
|
1116
|
+
grace = MIN(grace, 0.3);
|
|
1117
|
+
}
|
|
1118
|
+
pending.deadline = now + grace;
|
|
1119
|
+
pending.timestamp = [self currentTimestamp];
|
|
1120
|
+
pending.generation = ++self.pendingCaptureGeneration;
|
|
1121
|
+
|
|
1122
|
+
self.pendingCapture = pending;
|
|
1123
|
+
|
|
1124
|
+
[self attemptPendingCapture:pending fullScan:YES];
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
- (void)attemptPendingCapture:(RJCapturePendingCapture *)pending
|
|
1128
|
+
fullScan:(BOOL)fullScan {
|
|
1129
|
+
if (!pending || pending != self.pendingCapture)
|
|
1130
|
+
return;
|
|
1131
|
+
if (!self.internalIsRecording || self.isShuttingDown)
|
|
1132
|
+
return;
|
|
1133
|
+
|
|
1134
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1135
|
+
if (now > pending.deadline) {
|
|
1136
|
+
UIWindow *window = self.windowProvider ? self.windowProvider() : nil;
|
|
1137
|
+
if (window) {
|
|
1138
|
+
[self emitFrameForPendingCapture:pending
|
|
1139
|
+
window:window
|
|
1140
|
+
shouldRender:NO
|
|
1141
|
+
reason:RJCaptureHeuristicsReasonDeadlineExpired
|
|
1142
|
+
now:now];
|
|
1143
|
+
} else {
|
|
1144
|
+
self.pendingCapture = nil;
|
|
1145
|
+
}
|
|
1146
|
+
return;
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
if (self.captureInProgress) {
|
|
1150
|
+
[self schedulePendingCaptureAttemptWithDelay:self.captureHeuristics
|
|
1151
|
+
.pollIntervalSeconds
|
|
1152
|
+
generation:pending.generation];
|
|
1153
|
+
return;
|
|
1154
|
+
}
|
|
1155
|
+
|
|
1156
|
+
UIWindow *window = self.windowProvider ? self.windowProvider() : nil;
|
|
1157
|
+
if (!window) {
|
|
1158
|
+
NSTimeInterval pollInterval = self.captureHeuristics.pollIntervalSeconds;
|
|
1159
|
+
if (now + pollInterval <= pending.deadline) {
|
|
1160
|
+
[self schedulePendingCaptureAttemptWithDelay:pollInterval
|
|
1161
|
+
generation:pending.generation];
|
|
1162
|
+
} else {
|
|
1163
|
+
self.pendingCapture = nil;
|
|
1164
|
+
}
|
|
1165
|
+
return;
|
|
1166
|
+
}
|
|
1167
|
+
|
|
1168
|
+
if (fullScan || !pending.scanResult) {
|
|
1169
|
+
RJ_TIME_START_NAMED(viewScan);
|
|
1170
|
+
|
|
1171
|
+
RJViewHierarchyScanResult *scanResult = nil;
|
|
1172
|
+
@try {
|
|
1173
|
+
NSArray<UIWindow *> *windows = [self cachedWindows];
|
|
1174
|
+
scanResult = [self.viewScanner scanWindows:windows
|
|
1175
|
+
relativeToWindow:window];
|
|
1176
|
+
} @catch (NSException *exception) {
|
|
1177
|
+
RJLogWarning(@"CaptureEngine: View scan failed: %@", exception);
|
|
1178
|
+
}
|
|
1179
|
+
if (!scanResult) {
|
|
1180
|
+
scanResult = [[RJViewHierarchyScanResult alloc] init];
|
|
1181
|
+
}
|
|
1182
|
+
pending.scanResult = scanResult;
|
|
1183
|
+
pending.layoutSignature = scanResult.layoutSignature;
|
|
1184
|
+
pending.lastScanTime = now;
|
|
1185
|
+
|
|
1186
|
+
if (scanResult.hasMapView || scanResult.mapViewFrames.count > 0) {
|
|
1187
|
+
self.lastMapPresenceTime = now;
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
RJ_TIME_END_NAMED(viewScan, RJPerfMetricViewScan);
|
|
1191
|
+
|
|
1192
|
+
@try {
|
|
1193
|
+
[self.captureHeuristics updateWithScanResult:scanResult
|
|
1194
|
+
window:window
|
|
1195
|
+
now:now];
|
|
1196
|
+
} @catch (NSException *exception) {
|
|
1197
|
+
RJLogWarning(@"CaptureEngine: Heuristics update failed: %@", exception);
|
|
1198
|
+
}
|
|
1199
|
+
} else {
|
|
1200
|
+
@try {
|
|
1201
|
+
[self.captureHeuristics updateWithStabilityProbeForWindow:window now:now];
|
|
1202
|
+
} @catch (NSException *exception) {
|
|
1203
|
+
RJLogWarning(@"CaptureEngine: Stability probe failed: %@", exception);
|
|
1204
|
+
}
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
RJCaptureHeuristicsDecision *decision = [self.captureHeuristics
|
|
1208
|
+
decisionForSignature:pending.layoutSignature
|
|
1209
|
+
now:now
|
|
1210
|
+
hasLastFrame:(self.lastCapturedPixelBuffer != NULL)];
|
|
1211
|
+
|
|
1212
|
+
if (decision.action == RJCaptureHeuristicsActionRenderNow && !fullScan) {
|
|
1213
|
+
RJ_TIME_START_NAMED(viewScan);
|
|
1214
|
+
RJViewHierarchyScanResult *refreshResult = nil;
|
|
1215
|
+
@try {
|
|
1216
|
+
NSArray<UIWindow *> *windows = [self cachedWindows];
|
|
1217
|
+
refreshResult = [self.viewScanner scanWindows:windows
|
|
1218
|
+
relativeToWindow:window];
|
|
1219
|
+
} @catch (NSException *exception) {
|
|
1220
|
+
RJLogWarning(@"CaptureEngine: View refresh scan failed: %@", exception);
|
|
1221
|
+
}
|
|
1222
|
+
if (!refreshResult) {
|
|
1223
|
+
refreshResult = [[RJViewHierarchyScanResult alloc] init];
|
|
1224
|
+
}
|
|
1225
|
+
pending.scanResult = refreshResult;
|
|
1226
|
+
pending.layoutSignature = refreshResult.layoutSignature;
|
|
1227
|
+
pending.lastScanTime = now;
|
|
1228
|
+
|
|
1229
|
+
if (refreshResult.hasMapView || refreshResult.mapViewFrames.count > 0) {
|
|
1230
|
+
self.lastMapPresenceTime = now;
|
|
1231
|
+
}
|
|
1232
|
+
RJ_TIME_END_NAMED(viewScan, RJPerfMetricViewScan);
|
|
1233
|
+
|
|
1234
|
+
@try {
|
|
1235
|
+
[self.captureHeuristics updateWithScanResult:refreshResult
|
|
1236
|
+
window:window
|
|
1237
|
+
now:now];
|
|
1238
|
+
} @catch (NSException *exception) {
|
|
1239
|
+
RJLogWarning(@"CaptureEngine: Heuristics refresh failed: %@", exception);
|
|
1240
|
+
}
|
|
1241
|
+
decision = [self.captureHeuristics
|
|
1242
|
+
decisionForSignature:pending.layoutSignature
|
|
1243
|
+
now:now
|
|
1244
|
+
hasLastFrame:(self.lastCapturedPixelBuffer != NULL)];
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
if (decision.action == RJCaptureHeuristicsActionDefer && fullScan) {
|
|
1248
|
+
[self logCaptureDecision:decision pending:pending];
|
|
1249
|
+
}
|
|
1250
|
+
|
|
1251
|
+
if (decision.action == RJCaptureHeuristicsActionDefer) {
|
|
1252
|
+
NSTimeInterval pollInterval = self.captureHeuristics.pollIntervalSeconds;
|
|
1253
|
+
NSTimeInterval deferUntil = MAX(decision.deferUntil, now + pollInterval);
|
|
1254
|
+
if (deferUntil > pending.deadline) {
|
|
1255
|
+
[self emitFrameForPendingCapture:pending
|
|
1256
|
+
window:window
|
|
1257
|
+
shouldRender:NO
|
|
1258
|
+
reason:RJCaptureHeuristicsReasonDeadlineExpired
|
|
1259
|
+
now:now];
|
|
1260
|
+
return;
|
|
1261
|
+
}
|
|
1262
|
+
[self schedulePendingCaptureAttemptWithDelay:(deferUntil - now)
|
|
1263
|
+
generation:pending.generation];
|
|
1264
|
+
return;
|
|
1265
|
+
}
|
|
1266
|
+
|
|
1267
|
+
BOOL shouldRender = (decision.action == RJCaptureHeuristicsActionRenderNow);
|
|
1268
|
+
[self emitFrameForPendingCapture:pending
|
|
1269
|
+
window:window
|
|
1270
|
+
shouldRender:shouldRender
|
|
1271
|
+
reason:decision.reason
|
|
1272
|
+
now:now];
|
|
1273
|
+
}
|
|
1274
|
+
|
|
1275
|
+
- (void)schedulePendingCaptureAttemptWithDelay:(NSTimeInterval)delay
|
|
1276
|
+
generation:(NSInteger)generation {
|
|
1277
|
+
NSTimeInterval clampedDelay = MAX(0.0, delay);
|
|
1278
|
+
dispatch_after(
|
|
1279
|
+
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(clampedDelay * NSEC_PER_SEC)),
|
|
1280
|
+
dispatch_get_main_queue(), ^{
|
|
1281
|
+
if (!self.pendingCapture ||
|
|
1282
|
+
self.pendingCapture.generation != generation) {
|
|
1283
|
+
return;
|
|
1284
|
+
}
|
|
1285
|
+
[self attemptPendingCapture:self.pendingCapture fullScan:NO];
|
|
1286
|
+
});
|
|
1287
|
+
}
|
|
1288
|
+
|
|
1289
|
+
- (void)waitForPendingSegmentUploadsWithTimeout:(NSTimeInterval)timeout {
|
|
1290
|
+
if (!self.internalSegmentUploader) {
|
|
1291
|
+
return;
|
|
1292
|
+
}
|
|
1293
|
+
|
|
1294
|
+
NSTimeInterval deadline = CACurrentMediaTime() + MAX(0.0, timeout);
|
|
1295
|
+
while (self.internalSegmentUploader.pendingUploads > 0 &&
|
|
1296
|
+
CACurrentMediaTime() < deadline) {
|
|
1297
|
+
if ([NSThread isMainThread]) {
|
|
1298
|
+
[[NSRunLoop currentRunLoop]
|
|
1299
|
+
runMode:NSDefaultRunLoopMode
|
|
1300
|
+
beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.05]];
|
|
1301
|
+
} else {
|
|
1302
|
+
[NSThread sleepForTimeInterval:0.05];
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1306
|
+
if (self.internalSegmentUploader.pendingUploads > 0) {
|
|
1307
|
+
RJLogWarning(
|
|
1308
|
+
@"CaptureEngine: Pending segment uploads did not finish before timeout"
|
|
1309
|
+
@" (%ld remaining)",
|
|
1310
|
+
(long)self.internalSegmentUploader.pendingUploads);
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
|
|
1314
|
+
- (void)requestDefensiveCaptureAfterDelay:(NSTimeInterval)delay
|
|
1315
|
+
reason:(NSString *)reason {
|
|
1316
|
+
if (!self.internalIsRecording || self.isShuttingDown) {
|
|
1317
|
+
return;
|
|
1318
|
+
}
|
|
1319
|
+
|
|
1320
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1321
|
+
NSTimeInterval target = now + MAX(0.0, delay);
|
|
1322
|
+
if (self.pendingDefensiveCaptureTime > 0 &&
|
|
1323
|
+
target >= self.pendingDefensiveCaptureTime - 0.01) {
|
|
1324
|
+
return;
|
|
1325
|
+
}
|
|
1326
|
+
|
|
1327
|
+
self.pendingDefensiveCaptureTime = target;
|
|
1328
|
+
NSInteger generation = ++self.pendingDefensiveCaptureGeneration;
|
|
1329
|
+
|
|
1330
|
+
if (reason.length > 0) {
|
|
1331
|
+
RJLogDebug(@"CaptureEngine: scheduling defensive capture (%@)", reason);
|
|
1332
|
+
}
|
|
1333
|
+
|
|
1334
|
+
dispatch_after(dispatch_time(DISPATCH_TIME_NOW,
|
|
1335
|
+
(int64_t)((target - now) * NSEC_PER_SEC)),
|
|
1336
|
+
dispatch_get_main_queue(), ^{
|
|
1337
|
+
if (!self.internalIsRecording || self.isShuttingDown) {
|
|
1338
|
+
return;
|
|
1339
|
+
}
|
|
1340
|
+
if (self.pendingDefensiveCaptureGeneration != generation) {
|
|
1341
|
+
return;
|
|
1342
|
+
}
|
|
1343
|
+
self.pendingDefensiveCaptureTime = 0;
|
|
1344
|
+
self.lastIntentTime = CACurrentMediaTime();
|
|
1345
|
+
[self captureVideoFrame];
|
|
1346
|
+
});
|
|
1347
|
+
}
|
|
1348
|
+
|
|
1349
|
+
- (void)emitFrameForPendingCapture:(RJCapturePendingCapture *)pending
|
|
1350
|
+
window:(UIWindow *)window
|
|
1351
|
+
shouldRender:(BOOL)shouldRender
|
|
1352
|
+
reason:(RJCaptureHeuristicsReason)reason
|
|
1353
|
+
now:(NSTimeInterval)now {
|
|
1354
|
+
if (!pending || pending != self.pendingCapture)
|
|
1355
|
+
return;
|
|
1356
|
+
if (!window || CGRectIsEmpty(window.bounds) ||
|
|
1357
|
+
window.bounds.size.width <= 0 || window.bounds.size.height <= 0) {
|
|
1358
|
+
self.pendingCapture = nil;
|
|
1359
|
+
self.captureInProgress = NO;
|
|
1360
|
+
return;
|
|
1361
|
+
}
|
|
1362
|
+
|
|
1363
|
+
NSString *reasonLabel = [RJCaptureHeuristics stringForReason:reason];
|
|
1364
|
+
if (reasonLabel.length > 0) {
|
|
1365
|
+
RJLogDebug(@"CaptureEngine: %@", reasonLabel);
|
|
1366
|
+
}
|
|
1367
|
+
|
|
1368
|
+
RJ_TIME_START;
|
|
1369
|
+
|
|
1370
|
+
self.pendingCapture = nil;
|
|
1371
|
+
|
|
1372
|
+
self.framesSinceSessionStart++;
|
|
1373
|
+
self.captureInProgress = YES;
|
|
1374
|
+
|
|
1375
|
+
RJViewHierarchyScanResult *scanForFrame =
|
|
1376
|
+
pending.scanResult ?: [[RJViewHierarchyScanResult alloc] init];
|
|
1377
|
+
RJViewHierarchyScanResult *maskScanResult = scanForFrame;
|
|
1378
|
+
NSString *currentSignature = pending.layoutSignature;
|
|
1379
|
+
BOOL hasBlockedSurface = (scanForFrame.cameraFrames.count > 0 ||
|
|
1380
|
+
scanForFrame.webViewFrames.count > 0 ||
|
|
1381
|
+
scanForFrame.videoFrames.count > 0);
|
|
1382
|
+
|
|
1383
|
+
BOOL layoutChanged =
|
|
1384
|
+
(currentSignature.length == 0 ||
|
|
1385
|
+
![currentSignature isEqualToString:self.lastSerializedSignature]);
|
|
1386
|
+
self.lastSerializedSignature = currentSignature;
|
|
1387
|
+
|
|
1388
|
+
// ===== CAPTURE LAYOUTS =====
|
|
1389
|
+
RJCaptureLayout targetLayout = [self currentCaptureLayoutForWindow:window];
|
|
1390
|
+
CGFloat targetScale = targetLayout.unifiedScale;
|
|
1391
|
+
|
|
1392
|
+
RJCaptureLayout nativeLayout = targetLayout;
|
|
1393
|
+
CGFloat screenScale =
|
|
1394
|
+
window.screen ? window.screen.scale : [UIScreen mainScreen].scale;
|
|
1395
|
+
size_t nativeW = (size_t)(window.bounds.size.width * screenScale);
|
|
1396
|
+
size_t nativeH = (size_t)(window.bounds.size.height * screenScale);
|
|
1397
|
+
nativeW = (nativeW / 2) * 2;
|
|
1398
|
+
nativeH = (nativeH / 2) * 2;
|
|
1399
|
+
nativeLayout.pixelWidth = nativeW;
|
|
1400
|
+
nativeLayout.pixelHeight = nativeH;
|
|
1401
|
+
nativeLayout.unifiedScale = screenScale;
|
|
1402
|
+
|
|
1403
|
+
if (!_pixelBufferPool || _poolWidth != targetLayout.pixelWidth ||
|
|
1404
|
+
_poolHeight != targetLayout.pixelHeight) {
|
|
1405
|
+
[self createPixelBufferPoolWithWidth:targetLayout.pixelWidth
|
|
1406
|
+
height:targetLayout.pixelHeight];
|
|
1407
|
+
}
|
|
1408
|
+
|
|
1409
|
+
__weak typeof(self) weakSelf = self;
|
|
1410
|
+
|
|
1411
|
+
@autoreleasepool {
|
|
1412
|
+
CVPixelBufferRef nativePixelBuffer = NULL;
|
|
1413
|
+
BOOL didRender = NO;
|
|
1414
|
+
BOOL usingCachedBuffer = NO;
|
|
1415
|
+
RJViewHierarchyScanResult *cachedMaskResult = nil;
|
|
1416
|
+
|
|
1417
|
+
if (shouldRender) {
|
|
1418
|
+
RJ_TIME_START_NAMED(screenshot);
|
|
1419
|
+
nativePixelBuffer = [self capturePixelBufferFromWindow:window
|
|
1420
|
+
withLayout:nativeLayout
|
|
1421
|
+
scanResult:scanForFrame];
|
|
1422
|
+
RJ_TIME_END_NAMED(screenshot, RJPerfMetricScreenshot);
|
|
1423
|
+
didRender = (nativePixelBuffer != NULL);
|
|
1424
|
+
}
|
|
1425
|
+
|
|
1426
|
+
if (!nativePixelBuffer && self.lastCapturedPixelBuffer) {
|
|
1427
|
+
if (!hasBlockedSurface && self.lastCapturedHadBlockedSurface &&
|
|
1428
|
+
self.lastSafePixelBuffer) {
|
|
1429
|
+
nativePixelBuffer = CVPixelBufferRetain(self.lastSafePixelBuffer);
|
|
1430
|
+
usingCachedBuffer = YES;
|
|
1431
|
+
cachedMaskResult = self.lastSafeMaskScanResult;
|
|
1432
|
+
} else {
|
|
1433
|
+
nativePixelBuffer = CVPixelBufferRetain(self.lastCapturedPixelBuffer);
|
|
1434
|
+
usingCachedBuffer = YES;
|
|
1435
|
+
cachedMaskResult = self.lastMaskScanResult;
|
|
1436
|
+
}
|
|
1437
|
+
if (shouldRender) {
|
|
1438
|
+
RJLogDebug(
|
|
1439
|
+
@"CaptureEngine: %@",
|
|
1440
|
+
[RJCaptureHeuristics
|
|
1441
|
+
stringForReason:RJCaptureHeuristicsReasonRenderFailedReuse]);
|
|
1442
|
+
reason = RJCaptureHeuristicsReasonRenderFailedReuse;
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
|
|
1446
|
+
if (usingCachedBuffer && cachedMaskResult) {
|
|
1447
|
+
maskScanResult = cachedMaskResult;
|
|
1448
|
+
}
|
|
1449
|
+
|
|
1450
|
+
if (!nativePixelBuffer) {
|
|
1451
|
+
self.captureInProgress = NO;
|
|
1452
|
+
return;
|
|
1453
|
+
}
|
|
1454
|
+
|
|
1455
|
+
if (didRender) {
|
|
1456
|
+
if (self.lastCapturedPixelBuffer) {
|
|
1457
|
+
CVPixelBufferRelease(self.lastCapturedPixelBuffer);
|
|
1458
|
+
}
|
|
1459
|
+
self.lastCapturedPixelBuffer = CVPixelBufferRetain(nativePixelBuffer);
|
|
1460
|
+
[self.captureHeuristics recordRenderedSignature:currentSignature
|
|
1461
|
+
atTime:now];
|
|
1462
|
+
self.lastMaskScanResult = scanForFrame;
|
|
1463
|
+
self.lastCapturedHadBlockedSurface = hasBlockedSurface;
|
|
1464
|
+
if (!hasBlockedSurface) {
|
|
1465
|
+
if (self.lastSafePixelBuffer) {
|
|
1466
|
+
CVPixelBufferRelease(self.lastSafePixelBuffer);
|
|
1467
|
+
}
|
|
1468
|
+
self.lastSafePixelBuffer = CVPixelBufferRetain(nativePixelBuffer);
|
|
1469
|
+
self.lastSafeMaskScanResult = scanForFrame;
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
RJ_TIME_END(RJPerfMetricFrame);
|
|
1474
|
+
RJ_PERF_DUMP_IF_NEEDED();
|
|
1475
|
+
|
|
1476
|
+
NSTimeInterval timestamp = pending.timestamp;
|
|
1477
|
+
|
|
1478
|
+
dispatch_async(self.encodingQueue, ^{
|
|
1479
|
+
@autoreleasepool {
|
|
1480
|
+
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
1481
|
+
if (!strongSelf || strongSelf.isShuttingDown) {
|
|
1482
|
+
CVPixelBufferRelease(nativePixelBuffer);
|
|
1483
|
+
return;
|
|
1484
|
+
}
|
|
1485
|
+
|
|
1486
|
+
CVPixelBufferRef scaledBuffer = NULL;
|
|
1487
|
+
if (nativePixelBuffer) {
|
|
1488
|
+
RJ_TIME_START_NAMED(downscale);
|
|
1489
|
+
RJDownscaleQuality quality = RJDownscaleQualityBalanced;
|
|
1490
|
+
if (strongSelf.internalPerformanceLevel == RJPerformanceLevelNormal &&
|
|
1491
|
+
strongSelf.captureScale >= 0.5) {
|
|
1492
|
+
quality = RJDownscaleQualityHigh;
|
|
1493
|
+
}
|
|
1494
|
+
scaledBuffer =
|
|
1495
|
+
[RJPixelBufferDownscaler downscale:nativePixelBuffer
|
|
1496
|
+
toW:targetLayout.pixelWidth
|
|
1497
|
+
toH:targetLayout.pixelHeight
|
|
1498
|
+
usingPool:strongSelf.pixelBufferPool
|
|
1499
|
+
quality:quality];
|
|
1500
|
+
RJ_TIME_END_NAMED(downscale, RJPerfMetricDownscale);
|
|
1501
|
+
}
|
|
1502
|
+
|
|
1503
|
+
CVPixelBufferRelease(nativePixelBuffer);
|
|
1504
|
+
|
|
1505
|
+
if (!scaledBuffer) {
|
|
1506
|
+
RJLogWarning(@"CaptureEngine: Downscale failed");
|
|
1507
|
+
return;
|
|
1508
|
+
}
|
|
1509
|
+
|
|
1510
|
+
if (strongSelf.privacyMask) {
|
|
1511
|
+
CGFloat safeScale =
|
|
1512
|
+
(isfinite(targetScale) && targetScale > 0.0) ? targetScale : 1.0;
|
|
1513
|
+
[strongSelf.privacyMask applyToPixelBuffer:scaledBuffer
|
|
1514
|
+
withScanResult:maskScanResult
|
|
1515
|
+
scale:safeScale];
|
|
1516
|
+
}
|
|
1517
|
+
|
|
1518
|
+
RJ_TIME_START_NAMED(encode);
|
|
1519
|
+
[strongSelf.internalVideoEncoder appendPixelBuffer:scaledBuffer
|
|
1520
|
+
timestamp:timestamp];
|
|
1521
|
+
RJ_TIME_END_NAMED(encode, RJPerfMetricEncode);
|
|
1522
|
+
|
|
1523
|
+
CVPixelBufferRelease(scaledBuffer);
|
|
1524
|
+
}
|
|
1525
|
+
});
|
|
1526
|
+
|
|
1527
|
+
self.captureInProgress = NO;
|
|
1528
|
+
|
|
1529
|
+
self.framesSinceHierarchy++;
|
|
1530
|
+
BOOL shouldSerialize = !scanForFrame.scrollActive;
|
|
1531
|
+
|
|
1532
|
+
if (shouldSerialize &&
|
|
1533
|
+
(self.framesSinceHierarchy == 1 ||
|
|
1534
|
+
(layoutChanged &&
|
|
1535
|
+
self.framesSinceHierarchy >= self.hierarchyCaptureInterval) ||
|
|
1536
|
+
self.framesSinceHierarchy >= 30)) {
|
|
1537
|
+
|
|
1538
|
+
NSString *screenName = self.currentScreenName;
|
|
1539
|
+
|
|
1540
|
+
RJ_TIME_START_NAMED(serialize);
|
|
1541
|
+
NSDictionary *hierarchy = nil;
|
|
1542
|
+
@try {
|
|
1543
|
+
if (self.internalViewSerializer) {
|
|
1544
|
+
hierarchy =
|
|
1545
|
+
[self.internalViewSerializer serializeWindow:window
|
|
1546
|
+
withScanResult:scanForFrame];
|
|
1547
|
+
}
|
|
1548
|
+
} @catch (NSException *exception) {
|
|
1549
|
+
RJLogWarning(@"CaptureEngine: View serialization failed: %@",
|
|
1550
|
+
exception);
|
|
1551
|
+
}
|
|
1552
|
+
RJ_TIME_END_NAMED(serialize, RJPerfMetricViewSerialize);
|
|
1553
|
+
|
|
1554
|
+
if (hierarchy && hierarchy.count > 0) {
|
|
1555
|
+
if (screenName) {
|
|
1556
|
+
NSMutableDictionary *enriched = [hierarchy mutableCopy];
|
|
1557
|
+
enriched[@"screenName"] = screenName;
|
|
1558
|
+
[self.hierarchySnapshots addObject:enriched];
|
|
1559
|
+
} else {
|
|
1560
|
+
[self.hierarchySnapshots addObject:hierarchy];
|
|
1561
|
+
}
|
|
1562
|
+
}
|
|
1563
|
+
|
|
1564
|
+
self.framesSinceHierarchy = 0;
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1569
|
+
- (void)logCaptureDecision:(RJCaptureHeuristicsDecision *)decision
|
|
1570
|
+
pending:(RJCapturePendingCapture *)pending {
|
|
1571
|
+
if (!decision) {
|
|
1572
|
+
return;
|
|
1573
|
+
}
|
|
1574
|
+
NSString *reason = [RJCaptureHeuristics stringForReason:decision.reason];
|
|
1575
|
+
if (reason.length == 0) {
|
|
1576
|
+
return;
|
|
1577
|
+
}
|
|
1578
|
+
NSTimeInterval remainingMs =
|
|
1579
|
+
pending ? (pending.deadline - CACurrentMediaTime()) * 1000.0 : 0;
|
|
1580
|
+
RJLogDebug(@"CaptureEngine: %@ (deadline=%.0fms)", reason, remainingMs);
|
|
1581
|
+
}
|
|
1582
|
+
|
|
1583
|
+
- (CVPixelBufferRef)capturePixelBufferFromWindow:(UIWindow *)window
|
|
1584
|
+
withLayout:(RJCaptureLayout)layout
|
|
1585
|
+
scanResult:(RJViewHierarchyScanResult *)
|
|
1586
|
+
scanResult {
|
|
1587
|
+
if (!window)
|
|
1588
|
+
return NULL;
|
|
1589
|
+
if (window.isHidden || window.alpha <= 0.01) {
|
|
1590
|
+
return NULL;
|
|
1591
|
+
}
|
|
1592
|
+
|
|
1593
|
+
// Optimize window check (A2)
|
|
1594
|
+
CGRect winBounds = window.bounds;
|
|
1595
|
+
if (!isfinite(winBounds.origin.x) || !isfinite(winBounds.origin.y) ||
|
|
1596
|
+
!isfinite(winBounds.size.width) || !isfinite(winBounds.size.height)) {
|
|
1597
|
+
return NULL;
|
|
1598
|
+
}
|
|
1599
|
+
if (CGRectIsEmpty(winBounds) || winBounds.size.width <= 0 ||
|
|
1600
|
+
winBounds.size.height <= 0) {
|
|
1601
|
+
return NULL;
|
|
1602
|
+
}
|
|
1603
|
+
|
|
1604
|
+
// CRITICAL: Skip during background
|
|
1605
|
+
if (self.isInBackground)
|
|
1606
|
+
return NULL;
|
|
1607
|
+
|
|
1608
|
+
// Start Immediately - Removed Warmup Delay logic that caused missing startup
|
|
1609
|
+
// frames
|
|
1610
|
+
|
|
1611
|
+
size_t width = layout.pixelWidth;
|
|
1612
|
+
size_t height = layout.pixelHeight;
|
|
1613
|
+
|
|
1614
|
+
if (width < 2 || height < 2)
|
|
1615
|
+
return NULL;
|
|
1616
|
+
|
|
1617
|
+
CGFloat contextScale = layout.unifiedScale;
|
|
1618
|
+
CGSize sizePoints = window.bounds.size;
|
|
1619
|
+
if (!isfinite(sizePoints.width) || !isfinite(sizePoints.height) ||
|
|
1620
|
+
sizePoints.width <= 0 || sizePoints.height <= 0) {
|
|
1621
|
+
return NULL;
|
|
1622
|
+
}
|
|
1623
|
+
|
|
1624
|
+
// Re-verify context scale based on pixel dimensions to be exact
|
|
1625
|
+
if (sizePoints.width > 0) {
|
|
1626
|
+
contextScale = (CGFloat)width / sizePoints.width;
|
|
1627
|
+
}
|
|
1628
|
+
if (!isfinite(contextScale) || contextScale <= 0) {
|
|
1629
|
+
return NULL;
|
|
1630
|
+
}
|
|
1631
|
+
|
|
1632
|
+
// ===== PIXEL BUFFER ALLOCATION (NATIVE) =====
|
|
1633
|
+
RJ_TIME_START_NAMED(buffer);
|
|
1634
|
+
// Use NATIVE pool
|
|
1635
|
+
CVPixelBufferRef pixelBuffer =
|
|
1636
|
+
[self createNativePixelBufferFromPoolWithWidth:width height:height];
|
|
1637
|
+
RJ_TIME_END_NAMED(buffer, RJPerfMetricBufferAlloc);
|
|
1638
|
+
|
|
1639
|
+
if (!pixelBuffer) {
|
|
1640
|
+
RJLogWarning(@"CaptureEngine: Failed to obtain pixel buffer");
|
|
1641
|
+
return NULL;
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
|
1645
|
+
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
1646
|
+
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
1647
|
+
|
|
1648
|
+
// Optimization #10: Use cached color space
|
|
1649
|
+
if (!self.commonColorSpace) {
|
|
1650
|
+
self.commonColorSpace = CGColorSpaceCreateDeviceRGB();
|
|
1651
|
+
}
|
|
1652
|
+
CGColorSpaceRef colorSpace = self.commonColorSpace;
|
|
1653
|
+
|
|
1654
|
+
CGContextRef context = CGBitmapContextCreate(
|
|
1655
|
+
baseAddress, width, height, 8, bytesPerRow, colorSpace,
|
|
1656
|
+
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
|
|
1657
|
+
|
|
1658
|
+
if (!context) {
|
|
1659
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
1660
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
1661
|
+
return NULL;
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1664
|
+
if (self.internalPerformanceLevel == RJPerformanceLevelMinimal) {
|
|
1665
|
+
// Minimal mode trades quality for speed
|
|
1666
|
+
CGContextSetInterpolationQuality(context, kCGInterpolationNone);
|
|
1667
|
+
CGContextSetShouldAntialias(context, false);
|
|
1668
|
+
CGContextSetAllowsAntialiasing(context, false);
|
|
1669
|
+
} else {
|
|
1670
|
+
CGContextSetInterpolationQuality(context, kCGInterpolationDefault);
|
|
1671
|
+
CGContextSetShouldAntialias(context, true);
|
|
1672
|
+
CGContextSetAllowsAntialiasing(context, true);
|
|
1673
|
+
}
|
|
1674
|
+
|
|
1675
|
+
// Set up context transform (flip for UIKit coordinates)
|
|
1676
|
+
CGContextScaleCTM(context, contextScale, -contextScale);
|
|
1677
|
+
CGContextTranslateCTM(context, 0, -sizePoints.height);
|
|
1678
|
+
|
|
1679
|
+
// Optimization #9: Fast Memset Clear (White = 0xFF)
|
|
1680
|
+
// Much faster than CGContextFillRect
|
|
1681
|
+
memset(baseAddress, 0xFF, bytesPerRow * height);
|
|
1682
|
+
|
|
1683
|
+
UIGraphicsPushContext(context);
|
|
1684
|
+
|
|
1685
|
+
// ===== RENDERING: ALWAYS USE drawViewHierarchyInRect =====
|
|
1686
|
+
RJ_TIME_START_NAMED(render);
|
|
1687
|
+
BOOL didDraw = NO;
|
|
1688
|
+
@try {
|
|
1689
|
+
didDraw = [window drawViewHierarchyInRect:window.bounds
|
|
1690
|
+
afterScreenUpdates:NO];
|
|
1691
|
+
} @catch (NSException *exception) {
|
|
1692
|
+
RJLogWarning(@"CaptureEngine: drawViewHierarchy failed: %@", exception);
|
|
1693
|
+
didDraw = NO;
|
|
1694
|
+
}
|
|
1695
|
+
RJ_TIME_END_NAMED(render, RJPerfMetricRender);
|
|
1696
|
+
|
|
1697
|
+
UIGraphicsPopContext();
|
|
1698
|
+
CGContextRelease(context);
|
|
1699
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
1700
|
+
|
|
1701
|
+
if (!didDraw) {
|
|
1702
|
+
CVPixelBufferRelease(pixelBuffer);
|
|
1703
|
+
return NULL;
|
|
1704
|
+
}
|
|
1705
|
+
|
|
1706
|
+
// Recalculate effective scale so consumers (PrivacyMask) know the real
|
|
1707
|
+
// mapping Used by caller to pass to applyToPixelBuffer Note: we don't need to
|
|
1708
|
+
// return it, caller has it.
|
|
1709
|
+
|
|
1710
|
+
return pixelBuffer;
|
|
1711
|
+
}
|
|
1712
|
+
|
|
1713
|
+
- (void)uploadCurrentHierarchySnapshots {
|
|
1714
|
+
if (!self.internalSegmentUploader || self.hierarchySnapshots.count == 0)
|
|
1715
|
+
return;
|
|
1716
|
+
|
|
1717
|
+
if (!self.uploadsEnabled) {
|
|
1718
|
+
[self.hierarchySnapshots removeAllObjects];
|
|
1719
|
+
return;
|
|
1720
|
+
}
|
|
1721
|
+
|
|
1722
|
+
@try {
|
|
1723
|
+
NSError *error = nil;
|
|
1724
|
+
NSData *jsonData =
|
|
1725
|
+
[NSJSONSerialization dataWithJSONObject:self.hierarchySnapshots
|
|
1726
|
+
options:0
|
|
1727
|
+
error:&error];
|
|
1728
|
+
if (!jsonData || error) {
|
|
1729
|
+
if (error) {
|
|
1730
|
+
RJLogWarning(@"CaptureEngine: Failed to serialize hierarchy: %@",
|
|
1731
|
+
error);
|
|
1732
|
+
}
|
|
1733
|
+
return;
|
|
1734
|
+
}
|
|
1735
|
+
|
|
1736
|
+
NSTimeInterval timestamp = [self currentTimestamp];
|
|
1737
|
+
[self.internalSegmentUploader uploadHierarchy:jsonData
|
|
1738
|
+
sessionId:self.internalSessionId
|
|
1739
|
+
timestamp:timestamp
|
|
1740
|
+
completion:nil];
|
|
1741
|
+
|
|
1742
|
+
[self.hierarchySnapshots removeAllObjects];
|
|
1743
|
+
} @catch (NSException *exception) {
|
|
1744
|
+
RJLogWarning(@"CaptureEngine: Upload hierarchy failed: %@", exception);
|
|
1745
|
+
}
|
|
1746
|
+
}
|
|
1747
|
+
|
|
1748
|
+
- (NSTimeInterval)currentTimestamp {
|
|
1749
|
+
// Always use wall clock time for session timestamps
|
|
1750
|
+
// CACurrentMediaTime optimization removed - it causes drift after
|
|
1751
|
+
// background periods The ~1ms overhead is acceptable for 1fps capture
|
|
1752
|
+
return [[NSDate date] timeIntervalSince1970] * 1000.0;
|
|
1753
|
+
}
|
|
1754
|
+
|
|
1755
|
+
#pragma mark - RJVideoEncoderDelegate
|
|
1756
|
+
|
|
1757
|
+
- (void)videoEncoderDidFinishSegment:(NSURL *)segmentURL
|
|
1758
|
+
sessionId:(NSString *)sessionId
|
|
1759
|
+
startTime:(NSTimeInterval)startTime
|
|
1760
|
+
endTime:(NSTimeInterval)endTime
|
|
1761
|
+
frameCount:(NSInteger)frameCount {
|
|
1762
|
+
|
|
1763
|
+
// Ensure we are on our own encoding queue to protect hierarchySnapshots
|
|
1764
|
+
// and maintain thread safety (callback comes from VideoEncoder queue)
|
|
1765
|
+
dispatch_async(self.encodingQueue, ^{
|
|
1766
|
+
RJLogDebug(@"CaptureEngine: videoEncoderDidFinishSegment: %@ (%ld frames, "
|
|
1767
|
+
@"%.1fs), sessionId=%@",
|
|
1768
|
+
segmentURL.lastPathComponent, (long)frameCount,
|
|
1769
|
+
(endTime - startTime) / 1000.0, sessionId);
|
|
1770
|
+
|
|
1771
|
+
if (!self.uploadsEnabled) {
|
|
1772
|
+
if (segmentURL) {
|
|
1773
|
+
[[NSFileManager defaultManager] removeItemAtURL:segmentURL error:nil];
|
|
1774
|
+
}
|
|
1775
|
+
[self.hierarchySnapshots removeAllObjects];
|
|
1776
|
+
RJLogInfo(@"CaptureEngine: Segment upload skipped (uploads disabled)");
|
|
1777
|
+
return;
|
|
1778
|
+
}
|
|
1779
|
+
|
|
1780
|
+
if (self.internalSegmentUploader && sessionId) {
|
|
1781
|
+
RJLogDebug(@"CaptureEngine: Calling uploadVideoSegment");
|
|
1782
|
+
[self.internalSegmentUploader
|
|
1783
|
+
uploadVideoSegment:segmentURL
|
|
1784
|
+
sessionId:sessionId
|
|
1785
|
+
startTime:startTime
|
|
1786
|
+
endTime:endTime
|
|
1787
|
+
frameCount:frameCount
|
|
1788
|
+
completion:^(BOOL success, NSError *error) {
|
|
1789
|
+
if (!success) {
|
|
1790
|
+
RJLogWarning(@"CaptureEngine: Segment upload FAILED: %@",
|
|
1791
|
+
error);
|
|
1792
|
+
} else {
|
|
1793
|
+
RJLogInfo(@"CaptureEngine: Segment upload SUCCESS: %@",
|
|
1794
|
+
segmentURL.lastPathComponent);
|
|
1795
|
+
}
|
|
1796
|
+
}];
|
|
1797
|
+
} else if (!sessionId) {
|
|
1798
|
+
RJLogWarning(@"CaptureEngine: Cannot upload segment (sessionId is nil)");
|
|
1799
|
+
} else {
|
|
1800
|
+
RJLogWarning(
|
|
1801
|
+
@"CaptureEngine: Cannot upload segment (segmentUploader is nil)");
|
|
1802
|
+
}
|
|
1803
|
+
|
|
1804
|
+
[self uploadCurrentHierarchySnapshots];
|
|
1805
|
+
|
|
1806
|
+
// NUCLEAR FIX: Do NOT call startSegmentWithSize here!
|
|
1807
|
+
// The encoder's appendFrame method will auto-start a segment with the
|
|
1808
|
+
// correct PIXEL dimensions when the next frame is captured.
|
|
1809
|
+
if (self.internalIsRecording && !self.isShuttingDown) {
|
|
1810
|
+
RJLogDebug(
|
|
1811
|
+
@"CaptureEngine: Segment finished, auto-start new on next frame");
|
|
1812
|
+
}
|
|
1813
|
+
});
|
|
1814
|
+
}
|
|
1815
|
+
|
|
1816
|
+
- (void)videoEncoderDidFailWithError:(NSError *)error {
|
|
1817
|
+
RJLogError(@"Video encoder failed: %@", error);
|
|
1818
|
+
|
|
1819
|
+
if (self.internalIsRecording) {
|
|
1820
|
+
RJLogWarning(@"Stopping recording due to encoder failure");
|
|
1821
|
+
[self stopSession];
|
|
1822
|
+
}
|
|
1823
|
+
}
|
|
1824
|
+
|
|
1825
|
+
#pragma mark - App Lifecycle
|
|
1826
|
+
|
|
1827
|
+
- (void)pauseVideoCapture {
|
|
1828
|
+
[self pauseVideoCaptureWithSynchronousFinish:NO];
|
|
1829
|
+
}
|
|
1830
|
+
|
|
1831
|
+
- (void)pauseVideoCaptureSync {
|
|
1832
|
+
[self pauseVideoCaptureWithSynchronousFinish:YES];
|
|
1833
|
+
}
|
|
1834
|
+
|
|
1835
|
+
- (void)pauseVideoCaptureWithSynchronousFinish:(BOOL)synchronous {
|
|
1836
|
+
RJLogDebug(@"CaptureEngine: pauseVideoCapture (isRecording=%d, sync=%d)",
|
|
1837
|
+
self.internalIsRecording, synchronous);
|
|
1838
|
+
|
|
1839
|
+
if (!self.internalIsRecording) {
|
|
1840
|
+
RJLogDebug(@"CaptureEngine: pauseVideoCapture - NOT recording");
|
|
1841
|
+
return;
|
|
1842
|
+
}
|
|
1843
|
+
|
|
1844
|
+
RJLogInfo(@"CaptureEngine: Pausing video capture (sync=%d)", synchronous);
|
|
1845
|
+
|
|
1846
|
+
// Reset capture-in-progress flag immediately to prevent stuck state
|
|
1847
|
+
self.captureInProgress = NO;
|
|
1848
|
+
|
|
1849
|
+
// Invalidate timer synchronously if in sync mode
|
|
1850
|
+
if (synchronous) {
|
|
1851
|
+
[self teardownDisplayLink];
|
|
1852
|
+
} else {
|
|
1853
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
1854
|
+
[self teardownDisplayLink];
|
|
1855
|
+
});
|
|
1856
|
+
}
|
|
1857
|
+
|
|
1858
|
+
if (self.internalVideoEncoder) {
|
|
1859
|
+
self.internalIsRecording = NO; // prevent any new frames from being enqueued
|
|
1860
|
+
|
|
1861
|
+
if (synchronous) {
|
|
1862
|
+
void (^finishSync)(void) = ^{
|
|
1863
|
+
RJLogDebug(@"CaptureEngine: Calling finishSegmentSync");
|
|
1864
|
+
[self.internalVideoEncoder finishSegmentSync];
|
|
1865
|
+
};
|
|
1866
|
+
if (dispatch_get_specific(kRJEncodingQueueKey)) {
|
|
1867
|
+
finishSync();
|
|
1868
|
+
} else {
|
|
1869
|
+
dispatch_sync(self.encodingQueue, finishSync);
|
|
1870
|
+
}
|
|
1871
|
+
} else {
|
|
1872
|
+
dispatch_async(self.encodingQueue, ^{
|
|
1873
|
+
RJLogDebug(@"CaptureEngine: Calling finishSegment (async)");
|
|
1874
|
+
[self.internalVideoEncoder finishSegment];
|
|
1875
|
+
});
|
|
1876
|
+
}
|
|
1877
|
+
} else {
|
|
1878
|
+
RJLogWarning(@"CaptureEngine: videoEncoder is nil, cannot finish segment");
|
|
1879
|
+
}
|
|
1880
|
+
|
|
1881
|
+
if (self.hierarchySnapshots.count > 0) {
|
|
1882
|
+
RJLogDebug(@"CaptureEngine: Uploading %lu pending hierarchy snapshots",
|
|
1883
|
+
(unsigned long)self.hierarchySnapshots.count);
|
|
1884
|
+
[self uploadCurrentHierarchySnapshots];
|
|
1885
|
+
}
|
|
1886
|
+
}
|
|
1887
|
+
|
|
1888
|
+
- (void)resumeVideoCapture {
|
|
1889
|
+
RJLogDebug(
|
|
1890
|
+
@"CaptureEngine: resumeVideoCapture (isRecording=%d, sessionId=%@)",
|
|
1891
|
+
self.internalIsRecording, self.internalSessionId);
|
|
1892
|
+
|
|
1893
|
+
if (self.internalSessionId == nil) {
|
|
1894
|
+
RJLogDebug(@"CaptureEngine: resumeVideoCapture - NO active session");
|
|
1895
|
+
return;
|
|
1896
|
+
}
|
|
1897
|
+
|
|
1898
|
+
// Set recording back to YES to allow captureVideoFrame to proceed
|
|
1899
|
+
self.internalIsRecording = YES;
|
|
1900
|
+
|
|
1901
|
+
RJLogInfo(@"CaptureEngine: Resuming video capture");
|
|
1902
|
+
|
|
1903
|
+
// Reset capture state to ensure clean resumption
|
|
1904
|
+
// These flags may have been left in an inconsistent state when going to
|
|
1905
|
+
// background
|
|
1906
|
+
self.captureInProgress = NO;
|
|
1907
|
+
self.lastIntentTime = 0; // Allow immediate capture on resume
|
|
1908
|
+
|
|
1909
|
+
self.internalPerformanceLevel =
|
|
1910
|
+
RJPerformanceLevelNormal; // Reset perf level on resume
|
|
1911
|
+
|
|
1912
|
+
self.pendingCapture = nil;
|
|
1913
|
+
self.pendingCaptureGeneration = 0;
|
|
1914
|
+
[self.captureHeuristics reset];
|
|
1915
|
+
self.lastSerializedSignature = nil;
|
|
1916
|
+
self.lastMaskScanResult = nil;
|
|
1917
|
+
self.lastSafeMaskScanResult = nil;
|
|
1918
|
+
self.lastCapturedHadBlockedSurface = NO;
|
|
1919
|
+
if (self.lastSafePixelBuffer) {
|
|
1920
|
+
CVPixelBufferRelease(self.lastSafePixelBuffer);
|
|
1921
|
+
self.lastSafePixelBuffer = NULL;
|
|
1922
|
+
}
|
|
1923
|
+
|
|
1924
|
+
dispatch_async(dispatch_get_main_queue(), ^{
|
|
1925
|
+
UIWindow *window = self.windowProvider ? self.windowProvider() : nil;
|
|
1926
|
+
if (window && self.internalVideoEncoder) {
|
|
1927
|
+
RJLogInfo(@"CaptureEngine: Resuming capture...");
|
|
1928
|
+
|
|
1929
|
+
// Use the optimized Display Link
|
|
1930
|
+
[self setupDisplayLink];
|
|
1931
|
+
|
|
1932
|
+
} else {
|
|
1933
|
+
RJLogWarning(@"[RJ-CAPTURE] Cannot resume - window=%@ encoder=%@",
|
|
1934
|
+
window ? @"exists" : @"nil",
|
|
1935
|
+
self.internalVideoEncoder ? @"exists" : @"nil");
|
|
1936
|
+
}
|
|
1937
|
+
});
|
|
1938
|
+
}
|
|
1939
|
+
|
|
1940
|
+
#pragma mark - Event Notifications (Metadata Enrichment)
|
|
1941
|
+
|
|
1942
|
+
- (void)notifyNavigationToScreen:(NSString *)screenName {
|
|
1943
|
+
if (!self.internalIsRecording)
|
|
1944
|
+
return;
|
|
1945
|
+
|
|
1946
|
+
// Force update if screen changed
|
|
1947
|
+
if (![screenName isEqualToString:self.currentScreenName]) {
|
|
1948
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1949
|
+
self.currentScreenName = screenName;
|
|
1950
|
+
RJLogDebug(@"Navigation to screen: %@ (forcing layout refresh)",
|
|
1951
|
+
screenName);
|
|
1952
|
+
|
|
1953
|
+
// Force layout change detection on next frame
|
|
1954
|
+
[self.captureHeuristics invalidateSignature];
|
|
1955
|
+
[self.captureHeuristics recordNavigationEventAtTime:now];
|
|
1956
|
+
self.lastSerializedSignature = nil;
|
|
1957
|
+
[self requestDefensiveCaptureAfterDelay:kRJDefensiveCaptureDelayNavigation
|
|
1958
|
+
reason:@"navigation"];
|
|
1959
|
+
}
|
|
1960
|
+
}
|
|
1961
|
+
|
|
1962
|
+
- (BOOL)shouldTreatGestureAsMap:(NSString *)gestureType
|
|
1963
|
+
now:(NSTimeInterval)now {
|
|
1964
|
+
if (gestureType.length == 0) {
|
|
1965
|
+
return NO;
|
|
1966
|
+
}
|
|
1967
|
+
if (self.lastMapPresenceTime <= 0 ||
|
|
1968
|
+
(now - self.lastMapPresenceTime) > kRJMapPresenceWindowSeconds) {
|
|
1969
|
+
return NO;
|
|
1970
|
+
}
|
|
1971
|
+
|
|
1972
|
+
NSString *lower = [gestureType lowercaseString];
|
|
1973
|
+
return ([lower hasPrefix:@"scroll"] || [lower hasPrefix:@"pan"] ||
|
|
1974
|
+
[lower hasPrefix:@"pinch"] || [lower hasPrefix:@"zoom"] ||
|
|
1975
|
+
[lower hasPrefix:@"rotate"] || [lower hasPrefix:@"swipe"] ||
|
|
1976
|
+
[lower hasPrefix:@"drag"]);
|
|
1977
|
+
}
|
|
1978
|
+
|
|
1979
|
+
- (void)notifyGesture:(NSString *)gestureType {
|
|
1980
|
+
if (!self.internalIsRecording)
|
|
1981
|
+
return;
|
|
1982
|
+
RJLogDebug(@"Gesture: %@", gestureType);
|
|
1983
|
+
NSTimeInterval now = CACurrentMediaTime();
|
|
1984
|
+
BOOL isScroll = (gestureType.length > 0 && [gestureType hasPrefix:@"scroll"]);
|
|
1985
|
+
BOOL mapGesture = [self shouldTreatGestureAsMap:gestureType now:now];
|
|
1986
|
+
if (isScroll) {
|
|
1987
|
+
[self.captureHeuristics recordTouchEventAtTime:now];
|
|
1988
|
+
} else {
|
|
1989
|
+
[self.captureHeuristics recordInteractionEventAtTime:now];
|
|
1990
|
+
}
|
|
1991
|
+
|
|
1992
|
+
if (mapGesture) {
|
|
1993
|
+
[self.captureHeuristics recordMapInteractionAtTime:now];
|
|
1994
|
+
[self requestDefensiveCaptureAfterDelay:kRJDefensiveCaptureDelayMap
|
|
1995
|
+
reason:@"map"];
|
|
1996
|
+
return;
|
|
1997
|
+
}
|
|
1998
|
+
|
|
1999
|
+
if (isScroll) {
|
|
2000
|
+
[self requestDefensiveCaptureAfterDelay:kRJDefensiveCaptureDelayScroll
|
|
2001
|
+
reason:@"scroll"];
|
|
2002
|
+
} else {
|
|
2003
|
+
[self requestDefensiveCaptureAfterDelay:kRJDefensiveCaptureDelayInteraction
|
|
2004
|
+
reason:@"interaction"];
|
|
2005
|
+
}
|
|
2006
|
+
}
|
|
2007
|
+
|
|
2008
|
+
- (void)notifyReactNativeCommit {
|
|
2009
|
+
if (!self.internalIsRecording) {
|
|
2010
|
+
return;
|
|
2011
|
+
}
|
|
2012
|
+
|
|
2013
|
+
[self.captureHeuristics invalidateSignature];
|
|
2014
|
+
[self.captureHeuristics recordInteractionEventAtTime:CACurrentMediaTime()];
|
|
2015
|
+
[self requestDefensiveCaptureAfterDelay:kRJDefensiveCaptureDelayInteraction
|
|
2016
|
+
reason:@"rn_commit"];
|
|
2017
|
+
}
|
|
2018
|
+
|
|
2019
|
+
- (void)notifyUIReady {
|
|
2020
|
+
RJLogInfo(@"CaptureEngine: UI is ready for capture");
|
|
2021
|
+
self.uiReadyForCapture = YES;
|
|
2022
|
+
}
|
|
2023
|
+
|
|
2024
|
+
#pragma mark - RJPerformanceManagerDelegate
|
|
2025
|
+
|
|
2026
|
+
- (void)performanceManagerDidChangeLevel:(RJPerformanceLevel)level {
|
|
2027
|
+
self.internalPerformanceLevel = level;
|
|
2028
|
+
|
|
2029
|
+
NSString *levelName;
|
|
2030
|
+
switch (level) {
|
|
2031
|
+
case RJPerformanceLevelNormal:
|
|
2032
|
+
levelName = @"Normal";
|
|
2033
|
+
break;
|
|
2034
|
+
case RJPerformanceLevelReduced:
|
|
2035
|
+
levelName = @"Reduced (50% frames, 35% scale)";
|
|
2036
|
+
break;
|
|
2037
|
+
case RJPerformanceLevelMinimal:
|
|
2038
|
+
levelName = @"Minimal (25% frames, 25% scale)";
|
|
2039
|
+
break;
|
|
2040
|
+
case RJPerformanceLevelPaused:
|
|
2041
|
+
levelName = @"Paused (stopped)";
|
|
2042
|
+
break;
|
|
2043
|
+
}
|
|
2044
|
+
|
|
2045
|
+
RJLogInfo(@"CaptureEngine: Performance level changed to %@", levelName);
|
|
2046
|
+
|
|
2047
|
+
if (self.internalVideoEncoder) {
|
|
2048
|
+
switch (level) {
|
|
2049
|
+
case RJPerformanceLevelReduced:
|
|
2050
|
+
self.internalVideoEncoder.captureScale = MIN(self.captureScale, 0.25);
|
|
2051
|
+
break;
|
|
2052
|
+
case RJPerformanceLevelMinimal:
|
|
2053
|
+
self.internalVideoEncoder.captureScale = MIN(self.captureScale, 0.15);
|
|
2054
|
+
break;
|
|
2055
|
+
default:
|
|
2056
|
+
self.internalVideoEncoder.captureScale = self.captureScale;
|
|
2057
|
+
break;
|
|
2058
|
+
}
|
|
2059
|
+
}
|
|
2060
|
+
}
|
|
2061
|
+
|
|
2062
|
+
@end
|