@rejourneyco/react-native 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. package/android/build.gradle.kts +135 -0
  2. package/android/consumer-rules.pro +10 -0
  3. package/android/proguard-rules.pro +1 -0
  4. package/android/src/main/AndroidManifest.xml +15 -0
  5. package/android/src/main/java/com/rejourney/RejourneyModuleImpl.kt +2981 -0
  6. package/android/src/main/java/com/rejourney/capture/ANRHandler.kt +206 -0
  7. package/android/src/main/java/com/rejourney/capture/ActivityTracker.kt +98 -0
  8. package/android/src/main/java/com/rejourney/capture/CaptureEngine.kt +1553 -0
  9. package/android/src/main/java/com/rejourney/capture/CaptureHeuristics.kt +375 -0
  10. package/android/src/main/java/com/rejourney/capture/CrashHandler.kt +153 -0
  11. package/android/src/main/java/com/rejourney/capture/MotionEvent.kt +215 -0
  12. package/android/src/main/java/com/rejourney/capture/SegmentUploader.kt +512 -0
  13. package/android/src/main/java/com/rejourney/capture/VideoEncoder.kt +773 -0
  14. package/android/src/main/java/com/rejourney/capture/ViewHierarchyScanner.kt +633 -0
  15. package/android/src/main/java/com/rejourney/capture/ViewSerializer.kt +286 -0
  16. package/android/src/main/java/com/rejourney/core/Constants.kt +117 -0
  17. package/android/src/main/java/com/rejourney/core/Logger.kt +93 -0
  18. package/android/src/main/java/com/rejourney/core/Types.kt +124 -0
  19. package/android/src/main/java/com/rejourney/lifecycle/SessionLifecycleService.kt +162 -0
  20. package/android/src/main/java/com/rejourney/network/DeviceAuthManager.kt +747 -0
  21. package/android/src/main/java/com/rejourney/network/HttpClientProvider.kt +16 -0
  22. package/android/src/main/java/com/rejourney/network/NetworkMonitor.kt +272 -0
  23. package/android/src/main/java/com/rejourney/network/UploadManager.kt +1363 -0
  24. package/android/src/main/java/com/rejourney/network/UploadWorker.kt +492 -0
  25. package/android/src/main/java/com/rejourney/privacy/PrivacyMask.kt +645 -0
  26. package/android/src/main/java/com/rejourney/touch/GestureClassifier.kt +233 -0
  27. package/android/src/main/java/com/rejourney/touch/KeyboardTracker.kt +158 -0
  28. package/android/src/main/java/com/rejourney/touch/TextInputTracker.kt +181 -0
  29. package/android/src/main/java/com/rejourney/touch/TouchInterceptor.kt +591 -0
  30. package/android/src/main/java/com/rejourney/utils/EventBuffer.kt +284 -0
  31. package/android/src/main/java/com/rejourney/utils/OEMDetector.kt +154 -0
  32. package/android/src/main/java/com/rejourney/utils/PerfTiming.kt +235 -0
  33. package/android/src/main/java/com/rejourney/utils/Telemetry.kt +297 -0
  34. package/android/src/main/java/com/rejourney/utils/WindowUtils.kt +84 -0
  35. package/android/src/newarch/java/com/rejourney/RejourneyModule.kt +187 -0
  36. package/android/src/newarch/java/com/rejourney/RejourneyPackage.kt +40 -0
  37. package/android/src/oldarch/java/com/rejourney/RejourneyModule.kt +218 -0
  38. package/android/src/oldarch/java/com/rejourney/RejourneyPackage.kt +23 -0
  39. package/ios/Capture/RJANRHandler.h +42 -0
  40. package/ios/Capture/RJANRHandler.m +328 -0
  41. package/ios/Capture/RJCaptureEngine.h +275 -0
  42. package/ios/Capture/RJCaptureEngine.m +2062 -0
  43. package/ios/Capture/RJCaptureHeuristics.h +80 -0
  44. package/ios/Capture/RJCaptureHeuristics.m +903 -0
  45. package/ios/Capture/RJCrashHandler.h +46 -0
  46. package/ios/Capture/RJCrashHandler.m +313 -0
  47. package/ios/Capture/RJMotionEvent.h +183 -0
  48. package/ios/Capture/RJMotionEvent.m +183 -0
  49. package/ios/Capture/RJPerformanceManager.h +100 -0
  50. package/ios/Capture/RJPerformanceManager.m +373 -0
  51. package/ios/Capture/RJPixelBufferDownscaler.h +42 -0
  52. package/ios/Capture/RJPixelBufferDownscaler.m +85 -0
  53. package/ios/Capture/RJSegmentUploader.h +146 -0
  54. package/ios/Capture/RJSegmentUploader.m +778 -0
  55. package/ios/Capture/RJVideoEncoder.h +247 -0
  56. package/ios/Capture/RJVideoEncoder.m +1036 -0
  57. package/ios/Capture/RJViewControllerTracker.h +73 -0
  58. package/ios/Capture/RJViewControllerTracker.m +508 -0
  59. package/ios/Capture/RJViewHierarchyScanner.h +215 -0
  60. package/ios/Capture/RJViewHierarchyScanner.m +1464 -0
  61. package/ios/Capture/RJViewSerializer.h +119 -0
  62. package/ios/Capture/RJViewSerializer.m +498 -0
  63. package/ios/Core/RJConstants.h +124 -0
  64. package/ios/Core/RJConstants.m +88 -0
  65. package/ios/Core/RJLifecycleManager.h +85 -0
  66. package/ios/Core/RJLifecycleManager.m +308 -0
  67. package/ios/Core/RJLogger.h +61 -0
  68. package/ios/Core/RJLogger.m +211 -0
  69. package/ios/Core/RJTypes.h +176 -0
  70. package/ios/Core/RJTypes.m +66 -0
  71. package/ios/Core/Rejourney.h +64 -0
  72. package/ios/Core/Rejourney.mm +2495 -0
  73. package/ios/Network/RJDeviceAuthManager.h +94 -0
  74. package/ios/Network/RJDeviceAuthManager.m +967 -0
  75. package/ios/Network/RJNetworkMonitor.h +68 -0
  76. package/ios/Network/RJNetworkMonitor.m +267 -0
  77. package/ios/Network/RJRetryManager.h +73 -0
  78. package/ios/Network/RJRetryManager.m +325 -0
  79. package/ios/Network/RJUploadManager.h +267 -0
  80. package/ios/Network/RJUploadManager.m +2296 -0
  81. package/ios/Privacy/RJPrivacyMask.h +163 -0
  82. package/ios/Privacy/RJPrivacyMask.m +922 -0
  83. package/ios/Rejourney.h +63 -0
  84. package/ios/Touch/RJGestureClassifier.h +130 -0
  85. package/ios/Touch/RJGestureClassifier.m +333 -0
  86. package/ios/Touch/RJTouchInterceptor.h +169 -0
  87. package/ios/Touch/RJTouchInterceptor.m +772 -0
  88. package/ios/Utils/RJEventBuffer.h +112 -0
  89. package/ios/Utils/RJEventBuffer.m +358 -0
  90. package/ios/Utils/RJGzipUtils.h +33 -0
  91. package/ios/Utils/RJGzipUtils.m +89 -0
  92. package/ios/Utils/RJKeychainManager.h +48 -0
  93. package/ios/Utils/RJKeychainManager.m +111 -0
  94. package/ios/Utils/RJPerfTiming.h +209 -0
  95. package/ios/Utils/RJPerfTiming.m +264 -0
  96. package/ios/Utils/RJTelemetry.h +92 -0
  97. package/ios/Utils/RJTelemetry.m +320 -0
  98. package/ios/Utils/RJWindowUtils.h +66 -0
  99. package/ios/Utils/RJWindowUtils.m +133 -0
  100. package/lib/commonjs/NativeRejourney.js +40 -0
  101. package/lib/commonjs/components/Mask.js +79 -0
  102. package/lib/commonjs/index.js +1381 -0
  103. package/lib/commonjs/sdk/autoTracking.js +1259 -0
  104. package/lib/commonjs/sdk/constants.js +151 -0
  105. package/lib/commonjs/sdk/errorTracking.js +199 -0
  106. package/lib/commonjs/sdk/index.js +50 -0
  107. package/lib/commonjs/sdk/metricsTracking.js +204 -0
  108. package/lib/commonjs/sdk/navigation.js +151 -0
  109. package/lib/commonjs/sdk/networkInterceptor.js +412 -0
  110. package/lib/commonjs/sdk/utils.js +363 -0
  111. package/lib/commonjs/types/expo-router.d.js +2 -0
  112. package/lib/commonjs/types/index.js +2 -0
  113. package/lib/module/NativeRejourney.js +38 -0
  114. package/lib/module/components/Mask.js +72 -0
  115. package/lib/module/index.js +1284 -0
  116. package/lib/module/sdk/autoTracking.js +1233 -0
  117. package/lib/module/sdk/constants.js +145 -0
  118. package/lib/module/sdk/errorTracking.js +189 -0
  119. package/lib/module/sdk/index.js +12 -0
  120. package/lib/module/sdk/metricsTracking.js +187 -0
  121. package/lib/module/sdk/navigation.js +143 -0
  122. package/lib/module/sdk/networkInterceptor.js +401 -0
  123. package/lib/module/sdk/utils.js +342 -0
  124. package/lib/module/types/expo-router.d.js +2 -0
  125. package/lib/module/types/index.js +2 -0
  126. package/lib/typescript/NativeRejourney.d.ts +147 -0
  127. package/lib/typescript/components/Mask.d.ts +39 -0
  128. package/lib/typescript/index.d.ts +117 -0
  129. package/lib/typescript/sdk/autoTracking.d.ts +204 -0
  130. package/lib/typescript/sdk/constants.d.ts +120 -0
  131. package/lib/typescript/sdk/errorTracking.d.ts +32 -0
  132. package/lib/typescript/sdk/index.d.ts +9 -0
  133. package/lib/typescript/sdk/metricsTracking.d.ts +58 -0
  134. package/lib/typescript/sdk/navigation.d.ts +33 -0
  135. package/lib/typescript/sdk/networkInterceptor.d.ts +47 -0
  136. package/lib/typescript/sdk/utils.d.ts +148 -0
  137. package/lib/typescript/types/index.d.ts +624 -0
  138. package/package.json +102 -0
  139. package/rejourney.podspec +21 -0
  140. package/src/NativeRejourney.ts +165 -0
  141. package/src/components/Mask.tsx +80 -0
  142. package/src/index.ts +1459 -0
  143. package/src/sdk/autoTracking.ts +1373 -0
  144. package/src/sdk/constants.ts +134 -0
  145. package/src/sdk/errorTracking.ts +231 -0
  146. package/src/sdk/index.ts +11 -0
  147. package/src/sdk/metricsTracking.ts +232 -0
  148. package/src/sdk/navigation.ts +157 -0
  149. package/src/sdk/networkInterceptor.ts +440 -0
  150. package/src/sdk/utils.ts +369 -0
  151. package/src/types/expo-router.d.ts +7 -0
  152. package/src/types/index.ts +739 -0
@@ -0,0 +1,1036 @@
1
+ //
2
+ // RJVideoEncoder.m
3
+ // Rejourney
4
+ //
5
+ // H.264 video segment encoder implementation using AVAssetWriter.
6
+ //
7
+ // Licensed under the Apache License, Version 2.0 (the "License");
8
+ // you may not use this file except in compliance with the License.
9
+ // You may obtain a copy of the License at
10
+ //
11
+ // http://www.apache.org/licenses/LICENSE-2.0
12
+ //
13
+ // Unless required by applicable law or agreed to in writing, software
14
+ // distributed under the License is distributed on an "AS IS" BASIS,
15
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ // See the License for the specific language governing permissions and
17
+ // limitations under the License.
18
+ //
19
+ // Copyright (c) 2026 Rejourney
20
+ //
21
+
22
+ #import "RJVideoEncoder.h"
23
+ #import "../Core/RJConstants.h"
24
+ #import "../Core/RJLogger.h"
25
+ #import "../Utils/RJPerfTiming.h"
26
+ #import <QuartzCore/CABase.h>
27
+ #import <UIKit/UIKit.h>
28
+
29
+ @interface RJVideoEncoder ()
30
+
31
+ @property(nonatomic, strong, nullable) AVAssetWriter *assetWriter;
32
+
33
+ @property(nonatomic, strong, nullable) AVAssetWriterInput *videoInput;
34
+
35
+ @property(nonatomic, strong, nullable)
36
+ AVAssetWriterInputPixelBufferAdaptor *adaptor;
37
+
38
+ @property(nonatomic, strong, nullable) NSURL *currentSegmentURL;
39
+
40
+ @property(nonatomic, assign) NSInteger frameCount;
41
+
42
+ @property(nonatomic, assign) NSTimeInterval segmentStartTime;
43
+
44
+ @property(nonatomic, assign) NSTimeInterval segmentFirstFrameTimestamp;
45
+
46
+ @property(nonatomic, assign) NSTimeInterval lastFrameTimestamp;
47
+
48
+ @property(nonatomic, assign) CGSize currentFrameSize;
49
+
50
+ @property(nonatomic, strong) dispatch_queue_t encodingQueue;
51
+
52
+ @property(nonatomic, copy, nullable) NSString *internalSessionId;
53
+
54
+ @property(nonatomic, assign, nullable) CVPixelBufferPoolRef pixelBufferPool;
55
+
56
+ @end
57
+
58
+ @implementation RJVideoEncoder
59
+
60
+ #pragma mark - Initialization
61
+
62
+ - (instancetype)init {
63
+ self = [super init];
64
+ if (self) {
65
+ _targetBitrate = 1500000; // 1.5 Mbps
66
+ _framesPerSegment = 60;
67
+ _fps = 15;
68
+ _captureScale = RJDefaultCaptureScale; // Unified scale from RJConstants
69
+ _frameCount = 0;
70
+ _segmentStartTime = 0;
71
+ _segmentFirstFrameTimestamp = 0;
72
+ _lastFrameTimestamp = 0;
73
+
74
+ _encodingQueue = dispatch_queue_create("com.rejourney.videoencoder",
75
+ DISPATCH_QUEUE_SERIAL);
76
+ dispatch_set_target_queue(
77
+ _encodingQueue, dispatch_get_global_queue(QOS_CLASS_UTILITY, 0));
78
+
79
+ // Pre-warm pixel buffer pool to eliminate first-frame encoding spike
80
+ [self prewarmPixelBufferPool];
81
+
82
+ // Pre-warm AVAssetWriter to load VideoToolbox binaries
83
+ [self prewarmEncoderAsync];
84
+ }
85
+ return self;
86
+ }
87
+
88
+ - (void)prewarmEncoderAsync {
89
+ dispatch_async(_encodingQueue, ^{
90
+ @try {
91
+ RJLogDebug(@"VideoEncoder: Pre-warming AVAssetWriter libraries...");
92
+ // Create a dummy writer to force load VideoToolbox and codecs
93
+ NSError *error = nil;
94
+ NSURL *tempURL =
95
+ [NSURL fileURLWithPath:
96
+ [NSTemporaryDirectory()
97
+ stringByAppendingPathComponent:@"rj_prewarm.mp4"]];
98
+ AVAssetWriter *dummyWriter =
99
+ [AVAssetWriter assetWriterWithURL:tempURL
100
+ fileType:AVFileTypeMPEG4
101
+ error:&error];
102
+ if (dummyWriter) {
103
+ RJLogDebug(@"VideoEncoder: AVAssetWriter libraries loaded");
104
+ }
105
+ } @catch (NSException *e) {
106
+ RJLogWarning(@"VideoEncoder: Failed to pre-warm libraries: %@", e);
107
+ }
108
+ });
109
+ }
110
+
111
+ - (void)prepareEncoderWithSize:(CGSize)size {
112
+ // Dispatch to encoding queue to avoid main thread blocking
113
+ dispatch_async(_encodingQueue, ^{
114
+ if (self.isRecording) {
115
+ RJLogDebug(@"VideoEncoder: Already recording, skipping prepare");
116
+ return;
117
+ }
118
+
119
+ RJLogDebug(@"VideoEncoder: Preparing encoder with size: %@",
120
+ NSStringFromCGSize(size));
121
+ // Reuse startSegment logic to fully initialize writer, inputs, and file
122
+ [self startSegmentWithSize:size];
123
+ RJLogDebug(@"VideoEncoder: Encoder prepared and ready for first frame");
124
+ });
125
+ }
126
+
127
+ - (void)dealloc {
128
+ [self cleanup];
129
+ if (_pixelBufferPool) {
130
+ CVPixelBufferPoolRelease(_pixelBufferPool);
131
+ _pixelBufferPool = NULL;
132
+ }
133
+ }
134
+
135
+ #pragma mark - Public Properties
136
+
137
+ - (BOOL)isRecording {
138
+ return self.assetWriter != nil &&
139
+ self.assetWriter.status == AVAssetWriterStatusWriting;
140
+ }
141
+
142
+ - (NSInteger)currentFrameCount {
143
+ return self.frameCount;
144
+ }
145
+
146
+ - (NSString *)sessionId {
147
+ return self.internalSessionId;
148
+ }
149
+
150
+ #pragma mark - Lifecycle
151
+
152
+ - (void)setSessionId:(NSString *)sessionId {
153
+ self.internalSessionId = sessionId;
154
+ }
155
+
156
+ - (BOOL)startSegmentWithSize:(CGSize)size {
157
+ if (self.isRecording) {
158
+ [self finishSegment];
159
+ }
160
+
161
+ self.frameCount = 0;
162
+ self.lastFrameTimestamp = 0;
163
+ self.segmentFirstFrameTimestamp = 0;
164
+ self.segmentStartTime = [[NSDate date] timeIntervalSince1970] * 1000;
165
+
166
+ // Size is already in pixels (from actual captured frame), use directly
167
+ CGSize scaledSize = CGSizeMake(floor(size.width), floor(size.height));
168
+
169
+ // Ensure even dimensions (required for H.264)
170
+ scaledSize.width = ((NSInteger)scaledSize.width / 2) * 2;
171
+ scaledSize.height = ((NSInteger)scaledSize.height / 2) * 2;
172
+
173
+ if (scaledSize.width < 100 || scaledSize.height < 100) {
174
+ RJLogWarning(@"Video encoder: Frame size too small, using minimum 100x100");
175
+ scaledSize = CGSizeMake(100, 100);
176
+ }
177
+
178
+ self.currentFrameSize = scaledSize;
179
+
180
+ NSString *sessionPrefix = self.internalSessionId ?: @"unknown";
181
+ NSString *filename =
182
+ [NSString stringWithFormat:@"seg_%@_%lld.mp4", sessionPrefix,
183
+ (long long)self.segmentStartTime];
184
+
185
+ NSURL *tempDir = [[NSURL fileURLWithPath:NSTemporaryDirectory()]
186
+ URLByAppendingPathComponent:@"rj_segments"
187
+ isDirectory:YES];
188
+ [[NSFileManager defaultManager] createDirectoryAtURL:tempDir
189
+ withIntermediateDirectories:YES
190
+ attributes:nil
191
+ error:nil];
192
+
193
+ self.currentSegmentURL = [tempDir URLByAppendingPathComponent:filename];
194
+
195
+ [[NSFileManager defaultManager] removeItemAtURL:self.currentSegmentURL
196
+ error:nil];
197
+
198
+ NSError *error = nil;
199
+ self.assetWriter = [[AVAssetWriter alloc] initWithURL:self.currentSegmentURL
200
+ fileType:AVFileTypeMPEG4
201
+ error:&error];
202
+
203
+ if (error) {
204
+ RJLogError(@"Failed to create AVAssetWriter: %@", error);
205
+ [self notifyError:error];
206
+ return NO;
207
+ }
208
+
209
+ NSInteger effectiveBitrate = [self bitrateForSize:scaledSize];
210
+ NSDictionary *videoSettings = @{
211
+ AVVideoCodecKey : AVVideoCodecTypeH264,
212
+ AVVideoWidthKey : @(scaledSize.width),
213
+ AVVideoHeightKey : @(scaledSize.height),
214
+ AVVideoCompressionPropertiesKey : @{
215
+ AVVideoAverageBitRateKey : @(effectiveBitrate),
216
+ // Baseline + CAVLC reduces CPU without changing capture scale/bitrate.
217
+ // Tradeoff: slightly larger files, but similar visual quality at same
218
+ // bitrate.
219
+ AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel,
220
+ AVVideoH264EntropyModeKey : AVVideoH264EntropyModeCAVLC,
221
+ AVVideoMaxKeyFrameIntervalKey :
222
+ @(self.fps * 10), // 10s keyframes - fewer keyframes = smaller files
223
+ AVVideoAllowFrameReorderingKey : @NO,
224
+ AVVideoExpectedSourceFrameRateKey : @(self.fps),
225
+ }
226
+ };
227
+
228
+ self.videoInput =
229
+ [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
230
+ outputSettings:videoSettings];
231
+ self.videoInput.expectsMediaDataInRealTime = YES;
232
+
233
+ NSDictionary *sourcePixelBufferAttributes = @{
234
+ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
235
+ (id)kCVPixelBufferWidthKey : @(scaledSize.width),
236
+ (id)kCVPixelBufferHeightKey : @(scaledSize.height),
237
+ };
238
+
239
+ self.adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
240
+ initWithAssetWriterInput:self.videoInput
241
+ sourcePixelBufferAttributes:sourcePixelBufferAttributes];
242
+
243
+ if (![self.assetWriter canAddInput:self.videoInput]) {
244
+ RJLogError(@"Cannot add video input to asset writer");
245
+ [self cleanup];
246
+ return NO;
247
+ }
248
+
249
+ [self.assetWriter addInput:self.videoInput];
250
+
251
+ if (![self.assetWriter startWriting]) {
252
+ RJLogError(@"Failed to start writing: %@", self.assetWriter.error);
253
+ [self notifyError:self.assetWriter.error];
254
+ [self cleanup];
255
+ return NO;
256
+ }
257
+
258
+ [self.assetWriter startSessionAtSourceTime:kCMTimeZero];
259
+
260
+ RJLogDebug(@"Video encoder: Started segment at %.0f, size=%@",
261
+ self.segmentStartTime, NSStringFromCGSize(scaledSize));
262
+
263
+ return YES;
264
+ }
265
+
266
+ - (BOOL)appendFrame:(UIImage *)frame timestamp:(NSTimeInterval)timestamp {
267
+ if (!frame) {
268
+ RJLogWarning(@"Video encoder: Cannot append nil frame");
269
+ return NO;
270
+ }
271
+
272
+ // On first frame, initialize segment with actual captured frame size
273
+ if (!self.isRecording) {
274
+ CGSize frameSize = CGSizeMake(frame.size.width * frame.scale,
275
+ frame.size.height * frame.scale);
276
+ if (![self startSegmentWithSize:frameSize]) {
277
+ RJLogWarning(@"Video encoder: Failed to start segment with frame size %@",
278
+ NSStringFromCGSize(frameSize));
279
+ return NO;
280
+ }
281
+ }
282
+
283
+ if (!self.videoInput.readyForMoreMediaData) {
284
+ RJLogDebug(@"Video encoder: Input not ready for more data, skipping frame");
285
+ return NO;
286
+ }
287
+
288
+ RJ_TIME_START_NAMED(pixelBuffer);
289
+ CVPixelBufferRef pixelBuffer = [self createPixelBufferFromImage:frame];
290
+ RJ_TIME_END_NAMED(pixelBuffer, RJPerfMetricPixelBuffer);
291
+ if (!pixelBuffer) {
292
+ RJLogWarning(@"Video encoder: Failed to create pixel buffer");
293
+ return NO;
294
+ }
295
+
296
+ BOOL success = [self appendPixelBuffer:pixelBuffer timestamp:timestamp];
297
+
298
+ CVPixelBufferRelease(pixelBuffer);
299
+
300
+ return success;
301
+ }
302
+
303
+ - (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer
304
+ timestamp:(NSTimeInterval)timestamp {
305
+ if (!pixelBuffer) {
306
+ RJLogWarning(@"Video encoder: Cannot append nil pixel buffer");
307
+ return NO;
308
+ }
309
+
310
+ // On first frame, initialize segment with actual buffer size if needed
311
+ if (!self.isRecording) {
312
+ size_t width = CVPixelBufferGetWidth(pixelBuffer);
313
+ size_t height = CVPixelBufferGetHeight(pixelBuffer);
314
+ CGSize frameSize = CGSizeMake(width, height);
315
+ if (![self startSegmentWithSize:frameSize]) {
316
+ RJLogWarning(
317
+ @"Video encoder: Failed to start segment with buffer size %@",
318
+ NSStringFromCGSize(frameSize));
319
+ return NO;
320
+ }
321
+ }
322
+
323
+ if (!self.videoInput.readyForMoreMediaData) {
324
+ RJLogDebug(
325
+ @"Video encoder: Input not ready for more data, skipping buffer");
326
+ return NO;
327
+ }
328
+
329
+ if (self.frameCount == 0) {
330
+ self.segmentFirstFrameTimestamp = timestamp;
331
+ }
332
+
333
+ NSTimeInterval presentationSeconds =
334
+ (timestamp - self.segmentFirstFrameTimestamp) / 1000.0;
335
+
336
+ CMTime frameTime = CMTimeMakeWithSeconds(presentationSeconds, 1000);
337
+
338
+ RJ_TIME_START_NAMED(encodeAppend);
339
+ BOOL success = [self.adaptor appendPixelBuffer:pixelBuffer
340
+ withPresentationTime:frameTime];
341
+ RJ_TIME_END_NAMED(encodeAppend, RJPerfMetricEncodeAppend);
342
+
343
+ if (success) {
344
+ self.frameCount++;
345
+ self.lastFrameTimestamp = timestamp;
346
+
347
+ RJLogDebug(@"Video encoder: Appended frame %ld at time %.3fs",
348
+ (long)self.frameCount, CMTimeGetSeconds(frameTime));
349
+
350
+ if (self.frameCount >= self.framesPerSegment) {
351
+ RJLogInfo(@"Video encoder: Segment full (%ld frames), rotating",
352
+ (long)self.frameCount);
353
+ [self finishSegmentAndContinue];
354
+ }
355
+ } else {
356
+ RJLogWarning(@"Video encoder: Failed to append frame, status=%ld",
357
+ (long)self.assetWriter.status);
358
+ }
359
+
360
+ return success;
361
+ }
362
+
363
+ - (void)finishSegment {
364
+ [self finishSegmentWithContinuation:NO synchronous:NO];
365
+ }
366
+
367
+ - (void)finishSegmentAndContinue {
368
+ [self finishSegmentWithContinuation:YES synchronous:NO];
369
+ }
370
+
371
+ - (void)finishSegmentSync {
372
+ [self finishSegmentWithContinuation:NO synchronous:YES];
373
+ }
374
+
375
+ - (void)finishSegmentWithContinuation:(BOOL)shouldContinue
376
+ synchronous:(BOOL)synchronous {
377
+ if (!self.assetWriter) {
378
+ RJLogInfo(@"[RJ-ENCODER] finishSegment called but no assetWriter - nothing to "
379
+ @"finish");
380
+ return;
381
+ }
382
+
383
+ if (self.assetWriter.status != AVAssetWriterStatusWriting) {
384
+ RJLogInfo(@"[RJ-ENCODER] finishSegment called but assetWriter status=%ld (not "
385
+ @"writing)",
386
+ (long)self.assetWriter.status);
387
+ return;
388
+ }
389
+
390
+ RJLogInfo(
391
+ @"[RJ-ENCODER] Finishing segment with %ld frames, sessionId=%@, sync=%d",
392
+ (long)self.frameCount, self.internalSessionId, synchronous);
393
+
394
+ NSTimeInterval endTime = self.lastFrameTimestamp > 0
395
+ ? self.lastFrameTimestamp
396
+ : [[NSDate date] timeIntervalSince1970] * 1000;
397
+ NSTimeInterval startTime = self.segmentFirstFrameTimestamp > 0
398
+ ? self.segmentFirstFrameTimestamp
399
+ : self.segmentStartTime;
400
+ NSInteger count = self.frameCount;
401
+ NSURL *url = self.currentSegmentURL;
402
+ CGSize frameSize = self.currentFrameSize;
403
+ // Capture sessionId before async to avoid nil race condition
404
+ NSString *sessionId = [self.internalSessionId copy];
405
+
406
+ if (count == 0) {
407
+ RJLogInfo(@"[RJ-ENCODER] No frames in segment, canceling");
408
+ [self cancelSegment];
409
+ return;
410
+ }
411
+
412
+ if (!sessionId) {
413
+ RJLogInfo(@"[RJ-ENCODER] No sessionId, canceling segment");
414
+ [self cancelSegment];
415
+ return;
416
+ }
417
+
418
+ [self.videoInput markAsFinished];
419
+
420
+ // For synchronous finishing (termination/background), use semaphore to block
421
+ dispatch_semaphore_t semaphore =
422
+ synchronous ? dispatch_semaphore_create(0) : nil;
423
+
424
+ __weak typeof(self) weakSelf = self;
425
+ [self.assetWriter finishWritingWithCompletionHandler:^{
426
+ __strong typeof(weakSelf) strongSelf = weakSelf;
427
+
428
+ // Completion handler runs on background queue - process immediately for
429
+ // sync mode
430
+ if (strongSelf.assetWriter.status == AVAssetWriterStatusFailed) {
431
+ RJLogInfo(@"[RJ-ENCODER] Segment FAILED: %@", strongSelf.assetWriter.error);
432
+ if (!synchronous) {
433
+ dispatch_async(dispatch_get_main_queue(), ^{
434
+ [strongSelf notifyError:strongSelf.assetWriter.error];
435
+ });
436
+ }
437
+ } else {
438
+ NSDictionary *attrs =
439
+ [[NSFileManager defaultManager] attributesOfItemAtPath:url.path
440
+ error:nil];
441
+ unsigned long long fileSize = [attrs fileSize];
442
+
443
+ RJLogInfo(@"[RJ-ENCODER] Segment COMPLETE - %ld frames, %.1f KB, %.1fs, "
444
+ @"sessionId=%@, sync=%d",
445
+ (long)count, fileSize / 1024.0, (endTime - startTime) / 1000.0,
446
+ sessionId, synchronous);
447
+ RJLogInfo(@"[RJ-ENCODER] Calling delegate videoEncoderDidFinishSegment "
448
+ @"with url=%@",
449
+ url.path);
450
+
451
+ // For synchronous mode, call delegate immediately on current thread
452
+ // For async mode, dispatch to main queue
453
+ if (synchronous) {
454
+ [strongSelf.delegate videoEncoderDidFinishSegment:url
455
+ sessionId:sessionId
456
+ startTime:startTime
457
+ endTime:endTime
458
+ frameCount:count];
459
+ } else {
460
+ dispatch_async(strongSelf.encodingQueue, ^{
461
+ [strongSelf.delegate videoEncoderDidFinishSegment:url
462
+ sessionId:sessionId
463
+ startTime:startTime
464
+ endTime:endTime
465
+ frameCount:count];
466
+ });
467
+ }
468
+ }
469
+
470
+ // Clean up writer state
471
+ strongSelf.assetWriter = nil;
472
+ strongSelf.videoInput = nil;
473
+ strongSelf.adaptor = nil;
474
+
475
+ if (!synchronous && shouldContinue && strongSelf.internalSessionId) {
476
+ dispatch_async(strongSelf.encodingQueue, ^{
477
+ [strongSelf startSegmentWithSize:frameSize];
478
+ });
479
+ }
480
+
481
+ // Signal completion for synchronous mode
482
+ if (semaphore) {
483
+ dispatch_semaphore_signal(semaphore);
484
+ }
485
+ }];
486
+
487
+ // Wait for completion in synchronous mode (with timeout)
488
+ if (semaphore) {
489
+ dispatch_time_t timeout =
490
+ dispatch_time(DISPATCH_TIME_NOW, 5.0 * NSEC_PER_SEC);
491
+ long result = dispatch_semaphore_wait(semaphore, timeout);
492
+ if (result != 0) {
493
+ RJLogInfo(@"[RJ-ENCODER] WARNING: Synchronous segment finish timed out after "
494
+ @"5s");
495
+ } else {
496
+ RJLogInfo(@"[RJ-ENCODER] Synchronous segment finish completed");
497
+ }
498
+ }
499
+ }
500
+
501
+ - (void)cancelSegment {
502
+ if (self.assetWriter) {
503
+ [self.assetWriter cancelWriting];
504
+ }
505
+
506
+ if (self.currentSegmentURL) {
507
+ [[NSFileManager defaultManager] removeItemAtURL:self.currentSegmentURL
508
+ error:nil];
509
+ }
510
+
511
+ self.assetWriter = nil;
512
+ self.videoInput = nil;
513
+ self.adaptor = nil;
514
+ self.currentSegmentURL = nil;
515
+ self.frameCount = 0;
516
+ self.lastFrameTimestamp = 0;
517
+ self.segmentFirstFrameTimestamp = 0;
518
+
519
+ RJLogDebug(@"Video encoder: Segment canceled");
520
+ }
521
+
522
+ - (void)cleanup {
523
+ // Only cancel the current in-progress segment, don't delete the entire temp
524
+ // directory. Completed segments may still be uploading and must not be
525
+ // deleted here. The RJSegmentUploader handles file cleanup after successful
526
+ // upload.
527
+ [self cancelSegment];
528
+
529
+ // NOTE: Do NOT delete rj_segments directory here!
530
+ // Other segments may be in the middle of uploading.
531
+ // Old orphaned segments are cleaned up by
532
+ // RJSegmentUploader.cleanupOrphanedSegments()
533
+ }
534
+
535
+ #pragma mark - Private Methods
536
+
537
+ - (NSInteger)bitrateForSize:(CGSize)size {
538
+ NSInteger baseBitrate = self.targetBitrate > 0 ? self.targetBitrate : 1500000;
539
+ CGFloat pixelCount = size.width * size.height;
540
+ CGFloat referencePixels = 1280.0 * 720.0;
541
+ CGFloat scale = referencePixels > 0 ? pixelCount / referencePixels : 1.0;
542
+ NSInteger scaledBitrate = (NSInteger)lrint(baseBitrate * scale);
543
+
544
+ NSInteger minBitrate = 200000;
545
+ NSInteger maxBitrate = 8000000;
546
+ if (scaledBitrate < minBitrate) {
547
+ scaledBitrate = minBitrate;
548
+ } else if (scaledBitrate > maxBitrate) {
549
+ scaledBitrate = maxBitrate;
550
+ }
551
+
552
+ return scaledBitrate;
553
+ }
554
+
555
+ - (CVPixelBufferRef)createPixelBufferFromImage:(UIImage *)image {
556
+ CGImageRef cgImage = image.CGImage;
557
+ if (!cgImage) {
558
+ return NULL;
559
+ }
560
+
561
+ size_t width = (size_t)self.currentFrameSize.width;
562
+ size_t height = (size_t)self.currentFrameSize.height;
563
+
564
+ // CRITICAL FIX: Validate incoming image dimensions match expected size
565
+ // During keyboard/rotation transitions, image size may temporarily differ
566
+ // from currentFrameSize, causing CGBitmapContextCreate bytesPerRow mismatch
567
+ size_t imageWidth = CGImageGetWidth(cgImage);
568
+ size_t imageHeight = CGImageGetHeight(cgImage);
569
+
570
+ // Allow small variance (1-2 pixels) due to rounding, but reject major
571
+ // mismatches
572
+ if (labs((long)imageWidth - (long)width) > 2 ||
573
+ labs((long)imageHeight - (long)height) > 2) {
574
+ RJLogDebug(@"Video encoder: Skipping frame - size mismatch (got %zux%zu, "
575
+ @"expected %zux%zu)",
576
+ imageWidth, imageHeight, width, height);
577
+ return NULL;
578
+ }
579
+
580
+ if (!self.pixelBufferPool) {
581
+ NSDictionary *poolAttributes = @{
582
+ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(3),
583
+ };
584
+
585
+ NSDictionary *pixelBufferAttributes = @{
586
+ (id)kCVPixelBufferWidthKey : @(width),
587
+ (id)kCVPixelBufferHeightKey : @(height),
588
+ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
589
+ (id)kCVPixelBufferCGImageCompatibilityKey : @YES,
590
+ (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
591
+ (id)kCVPixelBufferIOSurfacePropertiesKey : @{},
592
+ };
593
+
594
+ CVReturn poolStatus = CVPixelBufferPoolCreate(
595
+ kCFAllocatorDefault, (__bridge CFDictionaryRef)poolAttributes,
596
+ (__bridge CFDictionaryRef)pixelBufferAttributes, &_pixelBufferPool);
597
+
598
+ if (poolStatus != kCVReturnSuccess) {
599
+ RJLogWarning(@"Video encoder: Failed to create pixel buffer pool: %d",
600
+ poolStatus);
601
+
602
+ self.pixelBufferPool = NULL;
603
+ } else {
604
+ RJLogDebug(@"Video encoder: Created pixel buffer pool (%zux%zu)", width,
605
+ height);
606
+ }
607
+ }
608
+
609
+ CVPixelBufferRef pixelBuffer = NULL;
610
+ CVReturn status;
611
+
612
+ if (self.pixelBufferPool) {
613
+
614
+ status = CVPixelBufferPoolCreatePixelBuffer(
615
+ kCFAllocatorDefault, self.pixelBufferPool, &pixelBuffer);
616
+ } else {
617
+
618
+ NSDictionary *options = @{
619
+ (id)kCVPixelBufferCGImageCompatibilityKey : @YES,
620
+ (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
621
+ };
622
+ status = CVPixelBufferCreate(
623
+ kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA,
624
+ (__bridge CFDictionaryRef)options, &pixelBuffer);
625
+ }
626
+
627
+ if (status != kCVReturnSuccess || !pixelBuffer) {
628
+ RJLogWarning(@"Video encoder: Failed to get pixel buffer: %d", status);
629
+ return NULL;
630
+ }
631
+
632
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
633
+
634
+ void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
635
+ size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
636
+
637
+ static CGColorSpaceRef colorSpace = NULL;
638
+ if (!colorSpace) {
639
+ colorSpace = CGColorSpaceCreateDeviceRGB();
640
+ }
641
+
642
+ // CRITICAL: Validate bytesPerRow is sufficient for the target width
643
+ // Error "CGBitmapContextCreate: invalid data bytes/row" occurs when
644
+ // bytesPerRow < width * 4 (4 bytes per pixel for BGRA)
645
+ size_t requiredBytesPerRow = width * 4;
646
+ if (bytesPerRow < requiredBytesPerRow) {
647
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
648
+ CVPixelBufferRelease(pixelBuffer);
649
+ RJLogWarning(@"Video encoder: bytesPerRow mismatch (%zu < %zu required) - "
650
+ @"skipping frame",
651
+ bytesPerRow, requiredBytesPerRow);
652
+ return NULL;
653
+ }
654
+
655
+ CGContextRef context = CGBitmapContextCreate(
656
+ baseAddress, width, height, 8, bytesPerRow, colorSpace,
657
+ kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
658
+
659
+ if (!context) {
660
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
661
+ CVPixelBufferRelease(pixelBuffer);
662
+ RJLogWarning(@"Video encoder: Failed to create bitmap context");
663
+ return NULL;
664
+ }
665
+
666
+ // Use fastest interpolation for pixel buffer drawing
667
+ CGContextSetInterpolationQuality(context, kCGInterpolationNone);
668
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), cgImage);
669
+ CGContextRelease(context);
670
+
671
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
672
+
673
+ return pixelBuffer;
674
+ }
675
+
676
+ - (void)notifyError:(NSError *)error {
677
+ if ([self.delegate
678
+ respondsToSelector:@selector(videoEncoderDidFailWithError:)]) {
679
+ [self.delegate videoEncoderDidFailWithError:error];
680
+ }
681
+ }
682
+
683
+ - (void)prewarmPixelBufferPool {
684
+ // Pre-warm the VideoToolbox H.264 encoder by creating a minimal AVAssetWriter
685
+ // and encoding a single dummy frame. This eliminates the ~1.5s spike on first
686
+ // real frame encode by front-loading the hardware encoder initialization.
687
+ dispatch_async(self.encodingQueue, ^{
688
+ @autoreleasepool {
689
+ NSTimeInterval startTime = CACurrentMediaTime();
690
+
691
+ // Use a small size for fast prewarm (H.264 requires even dimensions)
692
+ CGSize warmupSize = CGSizeMake(100, 100);
693
+
694
+ // Create temp file for dummy segment
695
+ NSURL *tempDir = [NSURL fileURLWithPath:NSTemporaryDirectory()];
696
+ NSURL *warmupURL =
697
+ [tempDir URLByAppendingPathComponent:@"rj_encoder_warmup.mp4"];
698
+ [[NSFileManager defaultManager] removeItemAtURL:warmupURL error:nil];
699
+
700
+ NSError *error = nil;
701
+ AVAssetWriter *warmupWriter =
702
+ [[AVAssetWriter alloc] initWithURL:warmupURL
703
+ fileType:AVFileTypeMPEG4
704
+ error:&error];
705
+ if (error || !warmupWriter) {
706
+ RJLogWarning(@"Video encoder: Prewarm failed to create writer: %@",
707
+ error);
708
+ return;
709
+ }
710
+
711
+ NSDictionary *videoSettings = @{
712
+ AVVideoCodecKey : AVVideoCodecTypeH264,
713
+ AVVideoWidthKey : @(warmupSize.width),
714
+ AVVideoHeightKey : @(warmupSize.height),
715
+ AVVideoCompressionPropertiesKey : @{
716
+ AVVideoAverageBitRateKey : @(100000),
717
+ AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel,
718
+ AVVideoAllowFrameReorderingKey : @NO,
719
+ }
720
+ };
721
+
722
+ AVAssetWriterInput *warmupInput =
723
+ [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
724
+ outputSettings:videoSettings];
725
+ warmupInput.expectsMediaDataInRealTime = YES;
726
+
727
+ NSDictionary *bufferAttrs = @{
728
+ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
729
+ (id)kCVPixelBufferWidthKey : @(warmupSize.width),
730
+ (id)kCVPixelBufferHeightKey : @(warmupSize.height),
731
+ };
732
+
733
+ AVAssetWriterInputPixelBufferAdaptor *warmupAdaptor =
734
+ [[AVAssetWriterInputPixelBufferAdaptor alloc]
735
+ initWithAssetWriterInput:warmupInput
736
+ sourcePixelBufferAttributes:bufferAttrs];
737
+
738
+ if (![warmupWriter canAddInput:warmupInput]) {
739
+ RJLogWarning(@"Video encoder: Prewarm cannot add input");
740
+ return;
741
+ }
742
+
743
+ [warmupWriter addInput:warmupInput];
744
+
745
+ if (![warmupWriter startWriting]) {
746
+ RJLogWarning(@"Video encoder: Prewarm failed to start writing");
747
+ return;
748
+ }
749
+
750
+ [warmupWriter startSessionAtSourceTime:kCMTimeZero];
751
+
752
+ // Create and encode a single dummy frame to trigger H.264 encoder init
753
+ CVPixelBufferRef dummyBuffer = NULL;
754
+ NSDictionary *pixelBufferOpts = @{
755
+ (id)kCVPixelBufferCGImageCompatibilityKey : @YES,
756
+ (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
757
+ (id)kCVPixelBufferIOSurfacePropertiesKey : @{},
758
+ };
759
+
760
+ CVReturn cvStatus = CVPixelBufferCreate(
761
+ kCFAllocatorDefault, (size_t)warmupSize.width,
762
+ (size_t)warmupSize.height, kCVPixelFormatType_32BGRA,
763
+ (__bridge CFDictionaryRef)pixelBufferOpts, &dummyBuffer);
764
+
765
+ if (cvStatus == kCVReturnSuccess && dummyBuffer) {
766
+ // Fill with black (optional, just ensures valid data)
767
+ CVPixelBufferLockBaseAddress(dummyBuffer, 0);
768
+ void *baseAddr = CVPixelBufferGetBaseAddress(dummyBuffer);
769
+ size_t dataSize = CVPixelBufferGetDataSize(dummyBuffer);
770
+ memset(baseAddr, 0, dataSize);
771
+ CVPixelBufferUnlockBaseAddress(dummyBuffer, 0);
772
+
773
+ // Append the dummy frame - THIS is what triggers encoder init
774
+ if (warmupInput.readyForMoreMediaData) {
775
+ [warmupAdaptor appendPixelBuffer:dummyBuffer
776
+ withPresentationTime:kCMTimeZero];
777
+ }
778
+
779
+ CVPixelBufferRelease(dummyBuffer);
780
+ }
781
+
782
+ // Finish writing (use semaphore to wait synchronously)
783
+ [warmupInput markAsFinished];
784
+ dispatch_semaphore_t sem = dispatch_semaphore_create(0);
785
+ [warmupWriter finishWritingWithCompletionHandler:^{
786
+ dispatch_semaphore_signal(sem);
787
+ }];
788
+ dispatch_semaphore_wait(
789
+ sem, dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC));
790
+
791
+ // Cleanup
792
+ [[NSFileManager defaultManager] removeItemAtURL:warmupURL error:nil];
793
+
794
+ NSTimeInterval elapsed = (CACurrentMediaTime() - startTime) * 1000;
795
+ RJLogInfo(@"Video encoder: H.264 encoder pre-warmed in %.1fms", elapsed);
796
+ }
797
+ });
798
+ }
799
+
800
+ #pragma mark - Class-level Encoder Prewarm
801
+
802
+ static BOOL sEncoderPrewarmed = NO;
803
+ static dispatch_once_t sPrewarmOnceToken;
804
+
805
+ + (void)prewarmEncoderAsync {
806
+ dispatch_once(&sPrewarmOnceToken, ^{
807
+ if (sEncoderPrewarmed)
808
+ return;
809
+ sEncoderPrewarmed = YES;
810
+
811
+ // Run prewarm on a low-priority background queue
812
+ dispatch_async(dispatch_get_global_queue(QOS_CLASS_UTILITY, 0), ^{
813
+ @autoreleasepool {
814
+ NSTimeInterval startTime = CACurrentMediaTime();
815
+
816
+ // Use a small size for fast prewarm (H.264 requires even dimensions)
817
+ CGSize warmupSize = CGSizeMake(100, 100);
818
+
819
+ // Create temp file for dummy segment
820
+ NSURL *tempDir = [NSURL fileURLWithPath:NSTemporaryDirectory()];
821
+ NSURL *warmupURL =
822
+ [tempDir URLByAppendingPathComponent:@"rj_encoder_prewarm.mp4"];
823
+ [[NSFileManager defaultManager] removeItemAtURL:warmupURL error:nil];
824
+
825
+ NSError *error = nil;
826
+ AVAssetWriter *warmupWriter =
827
+ [[AVAssetWriter alloc] initWithURL:warmupURL
828
+ fileType:AVFileTypeMPEG4
829
+ error:&error];
830
+ if (error || !warmupWriter) {
831
+ RJLogWarning(@"Video encoder: Class prewarm failed: %@", error);
832
+ return;
833
+ }
834
+
835
+ NSDictionary *videoSettings = @{
836
+ AVVideoCodecKey : AVVideoCodecTypeH264,
837
+ AVVideoWidthKey : @(warmupSize.width),
838
+ AVVideoHeightKey : @(warmupSize.height),
839
+ AVVideoCompressionPropertiesKey : @{
840
+ AVVideoAverageBitRateKey : @(100000),
841
+ AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel,
842
+ AVVideoAllowFrameReorderingKey : @NO,
843
+ }
844
+ };
845
+
846
+ AVAssetWriterInput *warmupInput =
847
+ [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
848
+ outputSettings:videoSettings];
849
+ warmupInput.expectsMediaDataInRealTime = YES;
850
+
851
+ NSDictionary *bufferAttrs = @{
852
+ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
853
+ (id)kCVPixelBufferWidthKey : @(warmupSize.width),
854
+ (id)kCVPixelBufferHeightKey : @(warmupSize.height),
855
+ };
856
+
857
+ AVAssetWriterInputPixelBufferAdaptor *warmupAdaptor =
858
+ [[AVAssetWriterInputPixelBufferAdaptor alloc]
859
+ initWithAssetWriterInput:warmupInput
860
+ sourcePixelBufferAttributes:bufferAttrs];
861
+
862
+ if (![warmupWriter canAddInput:warmupInput]) {
863
+ RJLogWarning(@"Video encoder: Class prewarm cannot add input");
864
+ return;
865
+ }
866
+
867
+ [warmupWriter addInput:warmupInput];
868
+
869
+ if (![warmupWriter startWriting]) {
870
+ RJLogWarning(@"Video encoder: Class prewarm failed to start");
871
+ return;
872
+ }
873
+
874
+ [warmupWriter startSessionAtSourceTime:kCMTimeZero];
875
+
876
+ // Create and encode a single dummy frame to trigger H.264 encoder init
877
+ CVPixelBufferRef dummyBuffer = NULL;
878
+ NSDictionary *pixelBufferOpts = @{
879
+ (id)kCVPixelBufferCGImageCompatibilityKey : @YES,
880
+ (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
881
+ (id)kCVPixelBufferIOSurfacePropertiesKey : @{},
882
+ };
883
+
884
+ CVReturn cvStatus = CVPixelBufferCreate(
885
+ kCFAllocatorDefault, (size_t)warmupSize.width,
886
+ (size_t)warmupSize.height, kCVPixelFormatType_32BGRA,
887
+ (__bridge CFDictionaryRef)pixelBufferOpts, &dummyBuffer);
888
+
889
+ if (cvStatus == kCVReturnSuccess && dummyBuffer) {
890
+ CVPixelBufferLockBaseAddress(dummyBuffer, 0);
891
+ void *baseAddr = CVPixelBufferGetBaseAddress(dummyBuffer);
892
+ size_t dataSize = CVPixelBufferGetDataSize(dummyBuffer);
893
+ memset(baseAddr, 0, dataSize);
894
+ CVPixelBufferUnlockBaseAddress(dummyBuffer, 0);
895
+
896
+ if (warmupInput.readyForMoreMediaData) {
897
+ [warmupAdaptor appendPixelBuffer:dummyBuffer
898
+ withPresentationTime:kCMTimeZero];
899
+ }
900
+
901
+ CVPixelBufferRelease(dummyBuffer);
902
+ }
903
+
904
+ [warmupInput markAsFinished];
905
+ dispatch_semaphore_t sem = dispatch_semaphore_create(0);
906
+ [warmupWriter finishWritingWithCompletionHandler:^{
907
+ dispatch_semaphore_signal(sem);
908
+ }];
909
+ dispatch_semaphore_wait(
910
+ sem, dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC));
911
+
912
+ [[NSFileManager defaultManager] removeItemAtURL:warmupURL error:nil];
913
+
914
+ NSTimeInterval elapsed = (CACurrentMediaTime() - startTime) * 1000;
915
+ RJLogInfo(@"Video encoder: H.264 class prewarm completed in %.1fms",
916
+ elapsed);
917
+ }
918
+ });
919
+ });
920
+ }
921
+
922
+ #pragma mark - Crash Recovery
923
+
924
+ static NSString *const kRJPendingSegmentMetadataFile =
925
+ @"rj_pending_segment.json";
926
+
927
+ + (NSString *)pendingSegmentMetadataPath {
928
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory,
929
+ NSUserDomainMask, YES);
930
+ NSString *cacheDir = [paths firstObject];
931
+ return
932
+ [cacheDir stringByAppendingPathComponent:kRJPendingSegmentMetadataFile];
933
+ }
934
+
935
+ + (nullable NSDictionary *)pendingCrashSegmentMetadata {
936
+ NSString *path = [self pendingSegmentMetadataPath];
937
+ if (![[NSFileManager defaultManager] fileExistsAtPath:path]) {
938
+ return nil;
939
+ }
940
+
941
+ NSData *data = [NSData dataWithContentsOfFile:path];
942
+ if (!data)
943
+ return nil;
944
+
945
+ return [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];
946
+ }
947
+
948
+ + (void)clearPendingCrashSegmentMetadata {
949
+ NSString *path = [self pendingSegmentMetadataPath];
950
+ [[NSFileManager defaultManager] removeItemAtPath:path error:nil];
951
+ }
952
+
953
+ - (BOOL)emergencyFlushSync {
954
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] ⚠️ Emergency flush called (crash detected)");
955
+
956
+ if (!self.assetWriter) {
957
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] No active asset writer, nothing to flush");
958
+ return NO;
959
+ }
960
+
961
+ if (self.assetWriter.status != AVAssetWriterStatusWriting) {
962
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Asset writer not in writing state (status=%ld)",
963
+ (long)self.assetWriter.status);
964
+ return NO;
965
+ }
966
+
967
+ NSURL *segmentURL = self.currentSegmentURL;
968
+ NSTimeInterval startTime = self.segmentFirstFrameTimestamp > 0
969
+ ? self.segmentFirstFrameTimestamp
970
+ : self.segmentStartTime;
971
+ NSTimeInterval endTime = self.lastFrameTimestamp > 0
972
+ ? self.lastFrameTimestamp
973
+ : [[NSDate date] timeIntervalSince1970] * 1000;
974
+ NSInteger frameCount = self.frameCount;
975
+ NSString *sessionId = self.internalSessionId;
976
+
977
+ if (frameCount == 0) {
978
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] No frames in segment, skipping emergency flush");
979
+ return NO;
980
+ }
981
+
982
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Emergency flush: %ld frames, url=%@",
983
+ (long)frameCount, segmentURL.path);
984
+
985
+ @try {
986
+ [self.videoInput markAsFinished];
987
+ } @catch (NSException *e) {
988
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Exception marking input finished: %@", e);
989
+ }
990
+
991
+ dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
992
+ __block BOOL finishSuccess = NO;
993
+
994
+ [self.assetWriter finishWritingWithCompletionHandler:^{
995
+ finishSuccess = (self.assetWriter.status == AVAssetWriterStatusCompleted);
996
+ dispatch_semaphore_signal(semaphore);
997
+ }];
998
+
999
+ dispatch_time_t timeout =
1000
+ dispatch_time(DISPATCH_TIME_NOW, (int64_t)(500 * NSEC_PER_MSEC));
1001
+ long result = dispatch_semaphore_wait(semaphore, timeout);
1002
+
1003
+ if (result != 0) {
1004
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Emergency flush timed out");
1005
+ }
1006
+
1007
+ NSDictionary *metadata = @{
1008
+ @"segmentPath" : segmentURL.path ?: @"",
1009
+ @"sessionId" : sessionId ?: @"",
1010
+ @"startTime" : @(startTime),
1011
+ @"endTime" : @(endTime),
1012
+ @"frameCount" : @(frameCount),
1013
+ @"timestamp" : @([[NSDate date] timeIntervalSince1970] * 1000),
1014
+ @"finalized" : @(finishSuccess),
1015
+ };
1016
+
1017
+ NSData *metadataData = [NSJSONSerialization dataWithJSONObject:metadata
1018
+ options:0
1019
+ error:nil];
1020
+ if (metadataData) {
1021
+ NSString *metadataPath = [RJVideoEncoder pendingSegmentMetadataPath];
1022
+ [metadataData writeToFile:metadataPath atomically:YES];
1023
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Saved pending segment metadata to %@",
1024
+ metadataPath);
1025
+ }
1026
+
1027
+ self.assetWriter = nil;
1028
+ self.videoInput = nil;
1029
+ self.adaptor = nil;
1030
+
1031
+ RJLogInfo(@"[RJ-VIDEO-ENCODER] Emergency flush completed (success=%d)",
1032
+ finishSuccess);
1033
+ return finishSuccess;
1034
+ }
1035
+
1036
+ @end