@rejourneyco/react-native 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/rejourney/network/DeviceAuthManager.kt +11 -1
- package/ios/Capture/RJCaptureEngine.m +9 -61
- package/lib/commonjs/index.js +2 -1
- package/lib/commonjs/sdk/constants.js +1 -1
- package/lib/module/index.js +2 -1
- package/lib/module/sdk/constants.js +1 -1
- package/lib/typescript/sdk/constants.d.ts +1 -1
- package/package.json +1 -1
- package/src/index.ts +2 -1
- package/src/sdk/constants.ts +1 -1
|
@@ -556,7 +556,14 @@ class DeviceAuthManager private constructor(private val context: Context) {
|
|
|
556
556
|
keyStore.load(null)
|
|
557
557
|
|
|
558
558
|
val privateKey = keyStore.getKey(KEYSTORE_ALIAS, null) as? PrivateKey
|
|
559
|
-
?:
|
|
559
|
+
?: run {
|
|
560
|
+
// Critical error: Key missing but we have a credential ID.
|
|
561
|
+
// This implies a corrupted state (e.g. app backup restored prefs but not keystore).
|
|
562
|
+
// We MUST clear credentials to allow re-registration.
|
|
563
|
+
Logger.error("CRITICAL: Private key not found for existing credential. Clearing credentials.")
|
|
564
|
+
clearCredentials()
|
|
565
|
+
throw Exception("Private key not found - credentials cleared")
|
|
566
|
+
}
|
|
560
567
|
|
|
561
568
|
val challengeBytes = Base64.decode(challenge, Base64.DEFAULT)
|
|
562
569
|
|
|
@@ -569,6 +576,9 @@ class DeviceAuthManager private constructor(private val context: Context) {
|
|
|
569
576
|
Base64.encodeToString(signatureBytes, Base64.NO_WRAP)
|
|
570
577
|
} catch (e: Exception) {
|
|
571
578
|
Logger.error("Failed to sign challenge", e)
|
|
579
|
+
if (e.message?.contains("Private key not found") == true) {
|
|
580
|
+
clearCredentials()
|
|
581
|
+
}
|
|
572
582
|
null
|
|
573
583
|
}
|
|
574
584
|
}
|
|
@@ -203,23 +203,19 @@ typedef struct {
|
|
|
203
203
|
screenSize = CGSizeMake(390, 844);
|
|
204
204
|
}
|
|
205
205
|
|
|
206
|
-
// Always use main screen scale to avoid UIWindow casting header issues
|
|
207
|
-
// and ensure consistent native resolution (@2x or @3x)
|
|
208
206
|
CGFloat screenScale = [UIScreen mainScreen].scale;
|
|
209
207
|
if (!isfinite(screenScale) || screenScale <= 0) {
|
|
210
208
|
screenScale = 1.0;
|
|
211
209
|
}
|
|
212
210
|
|
|
213
|
-
CGFloat scaleToUse = self.captureScale;
|
|
211
|
+
CGFloat scaleToUse = self.captureScale;
|
|
214
212
|
if (!isfinite(scaleToUse) || scaleToUse <= 0) {
|
|
215
213
|
scaleToUse = RJDefaultCaptureScale;
|
|
216
214
|
}
|
|
217
215
|
|
|
218
|
-
// define targetFPS and targetBitrate
|
|
219
216
|
NSInteger targetFPS = self.videoFPS;
|
|
220
217
|
NSInteger targetBitrate = self.videoBitrate;
|
|
221
218
|
|
|
222
|
-
// Performance Level Overrides - only reduce slightly for reduced performance
|
|
223
219
|
if (self.internalPerformanceLevel >= RJPerformanceLevelReduced) {
|
|
224
220
|
scaleToUse = MIN(scaleToUse, 0.25);
|
|
225
221
|
}
|
|
@@ -228,24 +224,16 @@ typedef struct {
|
|
|
228
224
|
scaleToUse = MIN(scaleToUse, 0.15);
|
|
229
225
|
}
|
|
230
226
|
|
|
231
|
-
// Removed aggressive warmup/scroll downscaling multipliers (0.3x) which
|
|
232
|
-
// caused fuzziness
|
|
233
227
|
|
|
234
|
-
// Clamp
|
|
235
228
|
scaleToUse = MIN(MAX(scaleToUse, 0.05), 1.0);
|
|
236
229
|
|
|
237
|
-
// Calculate dimensions
|
|
238
|
-
// Use native pixel dimensions * scaleToUse
|
|
239
230
|
size_t width = (size_t)(screenSize.width * screenScale * scaleToUse);
|
|
240
231
|
size_t height = (size_t)(screenSize.height * screenScale * scaleToUse);
|
|
241
232
|
|
|
242
|
-
// H.264 Alignment (mod 2)
|
|
243
233
|
width = (width / 2) * 2;
|
|
244
234
|
height = (height / 2) * 2;
|
|
245
235
|
|
|
246
|
-
|
|
247
|
-
CGFloat maxDimension =
|
|
248
|
-
1920.0; // Increased from 800.0 to support high-res capture
|
|
236
|
+
CGFloat maxDimension = 1920.0;
|
|
249
237
|
if (width > maxDimension || height > maxDimension) {
|
|
250
238
|
CGFloat ratio = MIN(maxDimension / width, maxDimension / height);
|
|
251
239
|
width = (size_t)(width * ratio);
|
|
@@ -482,8 +470,7 @@ typedef struct {
|
|
|
482
470
|
}
|
|
483
471
|
|
|
484
472
|
- (void)appWillEnterBackground:(NSNotification *)notification {
|
|
485
|
-
|
|
486
|
-
// Avoid rendering during app switch transitions.
|
|
473
|
+
|
|
487
474
|
self.isInBackground = YES;
|
|
488
475
|
RJLogDebug(@"CaptureEngine: App will enter background - suspending capture");
|
|
489
476
|
}
|
|
@@ -499,14 +486,8 @@ typedef struct {
|
|
|
499
486
|
- (void)appDidBecomeActive:(NSNotification *)notification {
|
|
500
487
|
self.isInBackground = NO;
|
|
501
488
|
|
|
502
|
-
// DEFENSIVE FIX: Warmup period
|
|
503
|
-
// When returning from background, the view hierarchy and layout may not be
|
|
504
|
-
// stable immediately. This can cause privacy masks to be drawn in the wrong
|
|
505
|
-
// position relative to the content (race condition). We impose a short
|
|
506
|
-
// "warmup" period where we skip capture to allow AutoLayout to settle.
|
|
507
489
|
self.isWarmingUp = YES;
|
|
508
490
|
|
|
509
|
-
// Clear stale caches to force fresh scan
|
|
510
491
|
if (self.lastCapturedPixelBuffer) {
|
|
511
492
|
CVPixelBufferRelease(self.lastCapturedPixelBuffer);
|
|
512
493
|
self.lastCapturedPixelBuffer = NULL;
|
|
@@ -600,11 +581,9 @@ typedef struct {
|
|
|
600
581
|
|
|
601
582
|
[self createPixelBufferPoolWithWidth:width height:height];
|
|
602
583
|
|
|
603
|
-
// Also pre-warm native pool
|
|
604
584
|
CGFloat screenScale = [UIScreen mainScreen].scale;
|
|
605
585
|
size_t nativeW = (size_t)(width / layout.unifiedScale * screenScale);
|
|
606
586
|
size_t nativeH = (size_t)(height / layout.unifiedScale * screenScale);
|
|
607
|
-
// Align
|
|
608
587
|
nativeW = (nativeW / 2) * 2;
|
|
609
588
|
nativeH = (nativeH / 2) * 2;
|
|
610
589
|
|
|
@@ -618,7 +597,7 @@ typedef struct {
|
|
|
618
597
|
- (void)createPixelBufferPoolWithWidth:(size_t)width height:(size_t)height {
|
|
619
598
|
if (_pixelBufferPool) {
|
|
620
599
|
if (_poolWidth == width && _poolHeight == height) {
|
|
621
|
-
return;
|
|
600
|
+
return;
|
|
622
601
|
}
|
|
623
602
|
CVPixelBufferPoolRelease(_pixelBufferPool);
|
|
624
603
|
_pixelBufferPool = NULL;
|
|
@@ -648,20 +627,14 @@ typedef struct {
|
|
|
648
627
|
}
|
|
649
628
|
|
|
650
629
|
- (void)prewarmRenderServer {
|
|
651
|
-
|
|
652
|
-
// server. CRITICAL: We must use the EXACT same pipeline (PixelPool ->
|
|
653
|
-
// BitmapContext) as the real capture to ensure the specific internal
|
|
654
|
-
// toggles for CGBitmapContext and CVPixelBuffer interactions are warmed up.
|
|
655
|
-
// Using UIGraphicsBeginImageContext here is useless because we don't use it
|
|
656
|
-
// anymore.
|
|
630
|
+
|
|
657
631
|
|
|
658
632
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
659
633
|
@try {
|
|
660
634
|
RJLogDebug(@"CaptureEngine: Pre-warming Render Server (Direct-Buffer "
|
|
661
635
|
@"Path)...");
|
|
662
636
|
|
|
663
|
-
|
|
664
|
-
// Using 100x100 is fine, it will trigger pool creation if needed
|
|
637
|
+
|
|
665
638
|
CVPixelBufferRef pixelBuffer =
|
|
666
639
|
[self createNativePixelBufferFromPoolWithWidth:100 height:100];
|
|
667
640
|
if (pixelBuffer) {
|
|
@@ -676,8 +649,7 @@ typedef struct {
|
|
|
676
649
|
CGColorSpaceRelease(colorSpace);
|
|
677
650
|
|
|
678
651
|
if (context) {
|
|
679
|
-
|
|
680
|
-
// We need a valid view. Pushing context is what UIKit needs.
|
|
652
|
+
|
|
681
653
|
UIGraphicsPushContext(context);
|
|
682
654
|
|
|
683
655
|
UIWindow *window = self.windowProvider ? self.windowProvider() : nil;
|
|
@@ -685,14 +657,10 @@ typedef struct {
|
|
|
685
657
|
window = [[UIApplication sharedApplication] windows].firstObject;
|
|
686
658
|
|
|
687
659
|
if (window) {
|
|
688
|
-
// Force the heavy lifting
|
|
689
660
|
[window drawViewHierarchyInRect:CGRectMake(0, 0, 100, 100)
|
|
690
661
|
afterScreenUpdates:NO];
|
|
691
662
|
|
|
692
|
-
|
|
693
|
-
// This is the most critical fix for the 650ms first-frame spike.
|
|
694
|
-
// We calculate what the first frame size WILL be and initialize
|
|
695
|
-
// the AVAssetWriter now.
|
|
663
|
+
|
|
696
664
|
RJCaptureLayout layout =
|
|
697
665
|
[self currentCaptureLayoutForWindow:window];
|
|
698
666
|
CGSize expectedSize =
|
|
@@ -808,7 +776,6 @@ typedef struct {
|
|
|
808
776
|
UIWindowScene *windowScene = (UIWindowScene *)scene;
|
|
809
777
|
if (windowScene.activationState !=
|
|
810
778
|
(NSInteger)0) { // Check active state if possible, or just skip
|
|
811
|
-
// simplified check to avoid enum mismatch issues if any
|
|
812
779
|
}
|
|
813
780
|
|
|
814
781
|
for (UIWindow *window in windowScene.windows) {
|
|
@@ -832,7 +799,6 @@ typedef struct {
|
|
|
832
799
|
[windowsToScan addObject:primaryWindow];
|
|
833
800
|
}
|
|
834
801
|
|
|
835
|
-
// Fallback if empty
|
|
836
802
|
if (windowsToScan.count == 0 && primaryWindow) {
|
|
837
803
|
[windowsToScan addObject:primaryWindow];
|
|
838
804
|
}
|
|
@@ -910,7 +876,7 @@ typedef struct {
|
|
|
910
876
|
@"CaptureEngine: Finishing segment synchronously (session stop)");
|
|
911
877
|
[self.internalVideoEncoder finishSegmentSync];
|
|
912
878
|
self.internalVideoEncoder =
|
|
913
|
-
nil;
|
|
879
|
+
nil;
|
|
914
880
|
};
|
|
915
881
|
if (dispatch_get_specific(kRJEncodingQueueKey)) {
|
|
916
882
|
finishSync();
|
|
@@ -978,7 +944,6 @@ typedef struct {
|
|
|
978
944
|
self.lastSafePixelBuffer = NULL;
|
|
979
945
|
}
|
|
980
946
|
|
|
981
|
-
// Pre-warm view scanner class caches
|
|
982
947
|
if (!self.didPrewarmScanner) {
|
|
983
948
|
[self.viewScanner prewarmClassCaches];
|
|
984
949
|
self.didPrewarmScanner = YES;
|
|
@@ -999,21 +964,18 @@ typedef struct {
|
|
|
999
964
|
|
|
1000
965
|
__weak typeof(self) weakSelf = self;
|
|
1001
966
|
|
|
1002
|
-
// OPTIMIZATION: Capture first frame after 300ms delay
|
|
1003
967
|
dispatch_after(
|
|
1004
968
|
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3 * NSEC_PER_SEC)),
|
|
1005
969
|
dispatch_get_main_queue(), ^{
|
|
1006
970
|
__strong typeof(weakSelf) strongSelf = weakSelf;
|
|
1007
971
|
if (strongSelf && strongSelf.internalIsRecording &&
|
|
1008
972
|
!strongSelf.isShuttingDown) {
|
|
1009
|
-
// Auto-enable capture readiness if JS hasn't called notifyUIReady yet
|
|
1010
973
|
strongSelf.uiReadyForCapture = YES;
|
|
1011
974
|
RJLogDebug(@"Capturing initial frame after session start");
|
|
1012
975
|
[strongSelf captureVideoFrame];
|
|
1013
976
|
}
|
|
1014
977
|
});
|
|
1015
978
|
|
|
1016
|
-
// Use CADisplayLink for frame-synchronized capture
|
|
1017
979
|
[self setupDisplayLink];
|
|
1018
980
|
|
|
1019
981
|
RJLogInfo(@"Video capture started: %ld FPS, %ld frames/segment "
|
|
@@ -1026,23 +988,18 @@ typedef struct {
|
|
|
1026
988
|
|
|
1027
989
|
__weak typeof(self) weakSelf = self;
|
|
1028
990
|
|
|
1029
|
-
// CADisplayLink synchronized with display refresh
|
|
1030
991
|
_displayLink =
|
|
1031
992
|
[CADisplayLink displayLinkWithTarget:self
|
|
1032
993
|
selector:@selector(displayLinkCallback:)];
|
|
1033
994
|
|
|
1034
|
-
// Set preferred frame rate (iOS 15+)
|
|
1035
995
|
if (@available(iOS 15.0, *)) {
|
|
1036
|
-
// Hard cap at target FPS to avoid 60Hz tick capability
|
|
1037
996
|
_displayLink.preferredFrameRateRange =
|
|
1038
997
|
CAFrameRateRangeMake(self.videoFPS, self.videoFPS, self.videoFPS);
|
|
1039
998
|
} else {
|
|
1040
|
-
// For older iOS, use frameInterval
|
|
1041
999
|
NSInteger interval = (NSInteger)(60.0 / self.videoFPS);
|
|
1042
1000
|
_displayLink.frameInterval = MAX(1, interval);
|
|
1043
1001
|
}
|
|
1044
1002
|
|
|
1045
|
-
// Add to RunLoop in CommonModes to capture during scroll
|
|
1046
1003
|
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop]
|
|
1047
1004
|
forMode:NSRunLoopCommonModes];
|
|
1048
1005
|
|
|
@@ -1109,14 +1066,11 @@ typedef struct {
|
|
|
1109
1066
|
NSTimeInterval now = CACurrentMediaTime();
|
|
1110
1067
|
NSTimeInterval interval = 1.0 / (CGFloat)self.videoFPS;
|
|
1111
1068
|
|
|
1112
|
-
// Manual throttle check
|
|
1113
1069
|
if (now - self.lastIntentTime < interval)
|
|
1114
1070
|
return;
|
|
1115
1071
|
|
|
1116
1072
|
self.lastIntentTime = now;
|
|
1117
1073
|
|
|
1118
|
-
// Move capture to the next run loop idle cycle to avoid
|
|
1119
|
-
// blocking the VSYNC callback with heavy work.
|
|
1120
1074
|
self.runLoopCapturePending = YES;
|
|
1121
1075
|
[self setupRunLoopObserver];
|
|
1122
1076
|
}
|
|
@@ -1403,8 +1357,6 @@ typedef struct {
|
|
|
1403
1357
|
}
|
|
1404
1358
|
self.pendingDefensiveCaptureTime = 0;
|
|
1405
1359
|
self.lastIntentTime = CACurrentMediaTime();
|
|
1406
|
-
// Defensive capture triggered by heuristics (e.g.
|
|
1407
|
-
// navigation) is High importance
|
|
1408
1360
|
[self captureVideoFrameWithImportance:RJCaptureImportanceHigh
|
|
1409
1361
|
reason:reason];
|
|
1410
1362
|
});
|
|
@@ -1449,7 +1401,6 @@ typedef struct {
|
|
|
1449
1401
|
![currentSignature isEqualToString:self.lastSerializedSignature]);
|
|
1450
1402
|
self.lastSerializedSignature = currentSignature;
|
|
1451
1403
|
|
|
1452
|
-
// ===== CAPTURE LAYOUTS =====
|
|
1453
1404
|
RJCaptureLayout targetLayout = [self currentCaptureLayoutForWindow:window];
|
|
1454
1405
|
CGFloat targetScale = targetLayout.unifiedScale;
|
|
1455
1406
|
|
|
@@ -1693,9 +1644,7 @@ typedef struct {
|
|
|
1693
1644
|
return NULL;
|
|
1694
1645
|
}
|
|
1695
1646
|
|
|
1696
|
-
// ===== PIXEL BUFFER ALLOCATION (NATIVE) =====
|
|
1697
1647
|
RJ_TIME_START_NAMED(buffer);
|
|
1698
|
-
// Use NATIVE pool
|
|
1699
1648
|
CVPixelBufferRef pixelBuffer =
|
|
1700
1649
|
[self createNativePixelBufferFromPoolWithWidth:width height:height];
|
|
1701
1650
|
RJ_TIME_END_NAMED(buffer, RJPerfMetricBufferAlloc);
|
|
@@ -1709,7 +1658,6 @@ typedef struct {
|
|
|
1709
1658
|
void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
|
|
1710
1659
|
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
1711
1660
|
|
|
1712
|
-
// Optimization #10: Use cached color space
|
|
1713
1661
|
if (!self.commonColorSpace) {
|
|
1714
1662
|
self.commonColorSpace = CGColorSpaceCreateDeviceRGB();
|
|
1715
1663
|
}
|
package/lib/commonjs/index.js
CHANGED
|
@@ -571,6 +571,7 @@ const Rejourney = {
|
|
|
571
571
|
const ignoreUrls = [apiUrl, '/api/ingest/presign', '/api/ingest/batch/complete', '/api/ingest/session/end', ...(_storedConfig?.networkIgnoreUrls || [])];
|
|
572
572
|
getNetworkInterceptor().initNetworkInterceptor(request => {
|
|
573
573
|
getAutoTracking().trackAPIRequest(request.success || false, request.statusCode, request.duration || 0, request.responseBodySize || 0);
|
|
574
|
+
Rejourney.logNetworkRequest(request);
|
|
574
575
|
}, {
|
|
575
576
|
ignoreUrls,
|
|
576
577
|
captureSizes: _storedConfig?.networkCaptureSizes !== false
|
|
@@ -721,7 +722,7 @@ const Rejourney = {
|
|
|
721
722
|
eventCount: 0,
|
|
722
723
|
videoSegmentCount: 0,
|
|
723
724
|
storageSize: 0,
|
|
724
|
-
sdkVersion:
|
|
725
|
+
sdkVersion: _constants.SDK_VERSION,
|
|
725
726
|
isComplete: false
|
|
726
727
|
},
|
|
727
728
|
events: []
|
|
@@ -8,7 +8,7 @@ exports.UPLOAD_SETTINGS = exports.STORAGE_SETTINGS = exports.SDK_VERSION = expor
|
|
|
8
8
|
* Rejourney SDK Constants
|
|
9
9
|
*/
|
|
10
10
|
|
|
11
|
-
const SDK_VERSION = exports.SDK_VERSION = '1.0.
|
|
11
|
+
const SDK_VERSION = exports.SDK_VERSION = '1.0.3';
|
|
12
12
|
|
|
13
13
|
/** Default configuration values */
|
|
14
14
|
const DEFAULT_CONFIG = exports.DEFAULT_CONFIG = {
|
package/lib/module/index.js
CHANGED
|
@@ -463,6 +463,7 @@ const Rejourney = {
|
|
|
463
463
|
const ignoreUrls = [apiUrl, '/api/ingest/presign', '/api/ingest/batch/complete', '/api/ingest/session/end', ...(_storedConfig?.networkIgnoreUrls || [])];
|
|
464
464
|
getNetworkInterceptor().initNetworkInterceptor(request => {
|
|
465
465
|
getAutoTracking().trackAPIRequest(request.success || false, request.statusCode, request.duration || 0, request.responseBodySize || 0);
|
|
466
|
+
Rejourney.logNetworkRequest(request);
|
|
466
467
|
}, {
|
|
467
468
|
ignoreUrls,
|
|
468
469
|
captureSizes: _storedConfig?.networkCaptureSizes !== false
|
|
@@ -613,7 +614,7 @@ const Rejourney = {
|
|
|
613
614
|
eventCount: 0,
|
|
614
615
|
videoSegmentCount: 0,
|
|
615
616
|
storageSize: 0,
|
|
616
|
-
sdkVersion:
|
|
617
|
+
sdkVersion: SDK_VERSION,
|
|
617
618
|
isComplete: false
|
|
618
619
|
},
|
|
619
620
|
events: []
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -533,6 +533,7 @@ const Rejourney: RejourneyAPI = {
|
|
|
533
533
|
request.duration || 0,
|
|
534
534
|
request.responseBodySize || 0
|
|
535
535
|
);
|
|
536
|
+
Rejourney.logNetworkRequest(request);
|
|
536
537
|
},
|
|
537
538
|
{
|
|
538
539
|
ignoreUrls,
|
|
@@ -712,7 +713,7 @@ const Rejourney: RejourneyAPI = {
|
|
|
712
713
|
eventCount: 0,
|
|
713
714
|
videoSegmentCount: 0,
|
|
714
715
|
storageSize: 0,
|
|
715
|
-
sdkVersion:
|
|
716
|
+
sdkVersion: SDK_VERSION,
|
|
716
717
|
isComplete: false,
|
|
717
718
|
},
|
|
718
719
|
events: [],
|