@rejourneyco/react-native 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/rejourney/RejourneyModuleImpl.kt +72 -391
- package/android/src/main/java/com/rejourney/capture/CaptureEngine.kt +11 -113
- package/android/src/main/java/com/rejourney/capture/SegmentUploader.kt +1 -15
- package/android/src/main/java/com/rejourney/capture/VideoEncoder.kt +1 -61
- package/android/src/main/java/com/rejourney/capture/ViewHierarchyScanner.kt +3 -1
- package/android/src/main/java/com/rejourney/lifecycle/SessionLifecycleService.kt +1 -22
- package/android/src/main/java/com/rejourney/network/DeviceAuthManager.kt +3 -26
- package/android/src/main/java/com/rejourney/network/NetworkMonitor.kt +0 -2
- package/android/src/main/java/com/rejourney/network/UploadManager.kt +7 -93
- package/android/src/main/java/com/rejourney/network/UploadWorker.kt +5 -41
- package/android/src/main/java/com/rejourney/privacy/PrivacyMask.kt +2 -58
- package/android/src/main/java/com/rejourney/touch/TouchInterceptor.kt +4 -4
- package/android/src/main/java/com/rejourney/utils/EventBuffer.kt +36 -7
- package/android/src/newarch/java/com/rejourney/RejourneyModule.kt +7 -0
- package/android/src/oldarch/java/com/rejourney/RejourneyModule.kt +9 -0
- package/ios/Capture/RJCaptureEngine.m +3 -34
- package/ios/Capture/RJVideoEncoder.m +0 -26
- package/ios/Capture/RJViewHierarchyScanner.m +68 -51
- package/ios/Core/RJLifecycleManager.m +0 -14
- package/ios/Core/Rejourney.mm +53 -129
- package/ios/Network/RJDeviceAuthManager.m +0 -2
- package/ios/Network/RJUploadManager.h +8 -0
- package/ios/Network/RJUploadManager.m +45 -0
- package/ios/Privacy/RJPrivacyMask.m +5 -31
- package/ios/Rejourney.h +0 -14
- package/ios/Touch/RJTouchInterceptor.m +21 -15
- package/ios/Utils/RJEventBuffer.m +57 -69
- package/ios/Utils/RJPerfTiming.m +0 -5
- package/ios/Utils/RJWindowUtils.m +87 -87
- package/lib/commonjs/components/Mask.js +1 -6
- package/lib/commonjs/index.js +46 -117
- package/lib/commonjs/sdk/autoTracking.js +39 -313
- package/lib/commonjs/sdk/constants.js +2 -13
- package/lib/commonjs/sdk/errorTracking.js +1 -29
- package/lib/commonjs/sdk/metricsTracking.js +3 -24
- package/lib/commonjs/sdk/navigation.js +3 -42
- package/lib/commonjs/sdk/networkInterceptor.js +7 -60
- package/lib/commonjs/sdk/utils.js +73 -19
- package/lib/module/components/Mask.js +1 -6
- package/lib/module/index.js +45 -121
- package/lib/module/sdk/autoTracking.js +39 -314
- package/lib/module/sdk/constants.js +2 -13
- package/lib/module/sdk/errorTracking.js +1 -29
- package/lib/module/sdk/index.js +0 -2
- package/lib/module/sdk/metricsTracking.js +3 -24
- package/lib/module/sdk/navigation.js +3 -42
- package/lib/module/sdk/networkInterceptor.js +7 -60
- package/lib/module/sdk/utils.js +73 -19
- package/lib/typescript/NativeRejourney.d.ts +1 -0
- package/lib/typescript/sdk/autoTracking.d.ts +4 -4
- package/lib/typescript/sdk/utils.d.ts +31 -1
- package/lib/typescript/types/index.d.ts +0 -1
- package/package.json +17 -11
- package/src/NativeRejourney.ts +2 -0
- package/src/components/Mask.tsx +0 -3
- package/src/index.ts +43 -92
- package/src/sdk/autoTracking.ts +51 -284
- package/src/sdk/constants.ts +13 -13
- package/src/sdk/errorTracking.ts +1 -17
- package/src/sdk/index.ts +0 -2
- package/src/sdk/metricsTracking.ts +5 -33
- package/src/sdk/navigation.ts +8 -29
- package/src/sdk/networkInterceptor.ts +9 -42
- package/src/sdk/utils.ts +76 -19
- package/src/types/index.ts +0 -29
|
@@ -1155,9 +1155,6 @@ typedef struct {
|
|
|
1155
1155
|
NSTimeInterval now = CACurrentMediaTime();
|
|
1156
1156
|
|
|
1157
1157
|
if (self.pendingCapture) {
|
|
1158
|
-
// If we have a pending capture, force it to resolve now.
|
|
1159
|
-
// If the new request is High importance, we effectively "upgrade" the
|
|
1160
|
-
// current cycle by ensuring it runs immediately.
|
|
1161
1158
|
self.pendingCapture.deadline = now;
|
|
1162
1159
|
[self attemptPendingCapture:self.pendingCapture fullScan:NO];
|
|
1163
1160
|
}
|
|
@@ -1173,8 +1170,6 @@ typedef struct {
|
|
|
1173
1170
|
grace = MIN(grace, 0.3);
|
|
1174
1171
|
}
|
|
1175
1172
|
|
|
1176
|
-
// High importance requests should have a shorter grace, forcing faster
|
|
1177
|
-
// resolution
|
|
1178
1173
|
if (isCritical) {
|
|
1179
1174
|
grace = MIN(grace, 0.1);
|
|
1180
1175
|
}
|
|
@@ -1731,7 +1726,6 @@ typedef struct {
|
|
|
1731
1726
|
}
|
|
1732
1727
|
|
|
1733
1728
|
if (self.internalPerformanceLevel == RJPerformanceLevelMinimal) {
|
|
1734
|
-
// Minimal mode trades quality for speed
|
|
1735
1729
|
CGContextSetInterpolationQuality(context, kCGInterpolationNone);
|
|
1736
1730
|
CGContextSetShouldAntialias(context, false);
|
|
1737
1731
|
CGContextSetAllowsAntialiasing(context, false);
|
|
@@ -1741,17 +1735,13 @@ typedef struct {
|
|
|
1741
1735
|
CGContextSetAllowsAntialiasing(context, true);
|
|
1742
1736
|
}
|
|
1743
1737
|
|
|
1744
|
-
// Set up context transform (flip for UIKit coordinates)
|
|
1745
1738
|
CGContextScaleCTM(context, contextScale, -contextScale);
|
|
1746
1739
|
CGContextTranslateCTM(context, 0, -sizePoints.height);
|
|
1747
1740
|
|
|
1748
|
-
// Optimization #9: Fast Memset Clear (White = 0xFF)
|
|
1749
|
-
// Much faster than CGContextFillRect
|
|
1750
1741
|
memset(baseAddress, 0xFF, bytesPerRow * height);
|
|
1751
1742
|
|
|
1752
1743
|
UIGraphicsPushContext(context);
|
|
1753
1744
|
|
|
1754
|
-
// ===== RENDERING: ALWAYS USE drawViewHierarchyInRect =====
|
|
1755
1745
|
RJ_TIME_START_NAMED(render);
|
|
1756
1746
|
BOOL didDraw = NO;
|
|
1757
1747
|
@try {
|
|
@@ -1772,10 +1762,6 @@ typedef struct {
|
|
|
1772
1762
|
return NULL;
|
|
1773
1763
|
}
|
|
1774
1764
|
|
|
1775
|
-
// Recalculate effective scale so consumers (PrivacyMask) know the real
|
|
1776
|
-
// mapping Used by caller to pass to applyToPixelBuffer Note: we don't need to
|
|
1777
|
-
// return it, caller has it.
|
|
1778
|
-
|
|
1779
1765
|
return pixelBuffer;
|
|
1780
1766
|
}
|
|
1781
1767
|
|
|
@@ -1815,9 +1801,6 @@ typedef struct {
|
|
|
1815
1801
|
}
|
|
1816
1802
|
|
|
1817
1803
|
- (NSTimeInterval)currentTimestamp {
|
|
1818
|
-
// Always use wall clock time for session timestamps
|
|
1819
|
-
// CACurrentMediaTime optimization removed - it causes drift after
|
|
1820
|
-
// background periods The ~1ms overhead is acceptable for 1fps capture
|
|
1821
1804
|
return [[NSDate date] timeIntervalSince1970] * 1000.0;
|
|
1822
1805
|
}
|
|
1823
1806
|
|
|
@@ -1829,8 +1812,6 @@ typedef struct {
|
|
|
1829
1812
|
endTime:(NSTimeInterval)endTime
|
|
1830
1813
|
frameCount:(NSInteger)frameCount {
|
|
1831
1814
|
|
|
1832
|
-
// Ensure we are on our own encoding queue to protect hierarchySnapshots
|
|
1833
|
-
// and maintain thread safety (callback comes from VideoEncoder queue)
|
|
1834
1815
|
dispatch_async(self.encodingQueue, ^{
|
|
1835
1816
|
RJLogDebug(@"CaptureEngine: videoEncoderDidFinishSegment: %@ (%ld frames, "
|
|
1836
1817
|
@"%.1fs), sessionId=%@",
|
|
@@ -1872,9 +1853,6 @@ typedef struct {
|
|
|
1872
1853
|
|
|
1873
1854
|
[self uploadCurrentHierarchySnapshots];
|
|
1874
1855
|
|
|
1875
|
-
// NUCLEAR FIX: Do NOT call startSegmentWithSize here!
|
|
1876
|
-
// The encoder's appendFrame method will auto-start a segment with the
|
|
1877
|
-
// correct PIXEL dimensions when the next frame is captured.
|
|
1878
1856
|
if (self.internalIsRecording && !self.isShuttingDown) {
|
|
1879
1857
|
RJLogDebug(
|
|
1880
1858
|
@"CaptureEngine: Segment finished, auto-start new on next frame");
|
|
@@ -1912,10 +1890,8 @@ typedef struct {
|
|
|
1912
1890
|
|
|
1913
1891
|
RJLogInfo(@"CaptureEngine: Pausing video capture (sync=%d)", synchronous);
|
|
1914
1892
|
|
|
1915
|
-
// Reset capture-in-progress flag immediately to prevent stuck state
|
|
1916
1893
|
self.captureInProgress = NO;
|
|
1917
1894
|
|
|
1918
|
-
// Invalidate timer synchronously if in sync mode
|
|
1919
1895
|
if (synchronous) {
|
|
1920
1896
|
[self teardownDisplayLink];
|
|
1921
1897
|
} else {
|
|
@@ -1925,7 +1901,7 @@ typedef struct {
|
|
|
1925
1901
|
}
|
|
1926
1902
|
|
|
1927
1903
|
if (self.internalVideoEncoder) {
|
|
1928
|
-
self.internalIsRecording = NO;
|
|
1904
|
+
self.internalIsRecording = NO;
|
|
1929
1905
|
|
|
1930
1906
|
if (synchronous) {
|
|
1931
1907
|
void (^finishSync)(void) = ^{
|
|
@@ -1964,19 +1940,15 @@ typedef struct {
|
|
|
1964
1940
|
return;
|
|
1965
1941
|
}
|
|
1966
1942
|
|
|
1967
|
-
// Set recording back to YES to allow captureVideoFrame to proceed
|
|
1968
1943
|
self.internalIsRecording = YES;
|
|
1969
1944
|
|
|
1970
1945
|
RJLogInfo(@"CaptureEngine: Resuming video capture");
|
|
1971
1946
|
|
|
1972
|
-
// Reset capture state to ensure clean resumption
|
|
1973
|
-
// These flags may have been left in an inconsistent state when going to
|
|
1974
|
-
// background
|
|
1975
1947
|
self.captureInProgress = NO;
|
|
1976
|
-
self.lastIntentTime = 0;
|
|
1948
|
+
self.lastIntentTime = 0;
|
|
1977
1949
|
|
|
1978
1950
|
self.internalPerformanceLevel =
|
|
1979
|
-
RJPerformanceLevelNormal;
|
|
1951
|
+
RJPerformanceLevelNormal;
|
|
1980
1952
|
|
|
1981
1953
|
self.pendingCapture = nil;
|
|
1982
1954
|
self.pendingCaptureGeneration = 0;
|
|
@@ -1995,7 +1967,6 @@ typedef struct {
|
|
|
1995
1967
|
if (window && self.internalVideoEncoder) {
|
|
1996
1968
|
RJLogInfo(@"CaptureEngine: Resuming capture...");
|
|
1997
1969
|
|
|
1998
|
-
// Use the optimized Display Link
|
|
1999
1970
|
[self setupDisplayLink];
|
|
2000
1971
|
|
|
2001
1972
|
} else {
|
|
@@ -2012,14 +1983,12 @@ typedef struct {
|
|
|
2012
1983
|
if (!self.internalIsRecording)
|
|
2013
1984
|
return;
|
|
2014
1985
|
|
|
2015
|
-
// Force update if screen changed
|
|
2016
1986
|
if (![screenName isEqualToString:self.currentScreenName]) {
|
|
2017
1987
|
NSTimeInterval now = CACurrentMediaTime();
|
|
2018
1988
|
self.currentScreenName = screenName;
|
|
2019
1989
|
RJLogDebug(@"Navigation to screen: %@ (forcing layout refresh)",
|
|
2020
1990
|
screenName);
|
|
2021
1991
|
|
|
2022
|
-
// Force layout change detection on next frame
|
|
2023
1992
|
[self.captureHeuristics invalidateSignature];
|
|
2024
1993
|
[self.captureHeuristics recordNavigationEventAtTime:now];
|
|
2025
1994
|
self.lastSerializedSignature = nil;
|
|
@@ -520,16 +520,7 @@
|
|
|
520
520
|
}
|
|
521
521
|
|
|
522
522
|
- (void)cleanup {
|
|
523
|
-
// Only cancel the current in-progress segment, don't delete the entire temp
|
|
524
|
-
// directory. Completed segments may still be uploading and must not be
|
|
525
|
-
// deleted here. The RJSegmentUploader handles file cleanup after successful
|
|
526
|
-
// upload.
|
|
527
523
|
[self cancelSegment];
|
|
528
|
-
|
|
529
|
-
// NOTE: Do NOT delete rj_segments directory here!
|
|
530
|
-
// Other segments may be in the middle of uploading.
|
|
531
|
-
// Old orphaned segments are cleaned up by
|
|
532
|
-
// RJSegmentUploader.cleanupOrphanedSegments()
|
|
533
524
|
}
|
|
534
525
|
|
|
535
526
|
#pragma mark - Private Methods
|
|
@@ -561,14 +552,9 @@
|
|
|
561
552
|
size_t width = (size_t)self.currentFrameSize.width;
|
|
562
553
|
size_t height = (size_t)self.currentFrameSize.height;
|
|
563
554
|
|
|
564
|
-
// CRITICAL FIX: Validate incoming image dimensions match expected size
|
|
565
|
-
// During keyboard/rotation transitions, image size may temporarily differ
|
|
566
|
-
// from currentFrameSize, causing CGBitmapContextCreate bytesPerRow mismatch
|
|
567
555
|
size_t imageWidth = CGImageGetWidth(cgImage);
|
|
568
556
|
size_t imageHeight = CGImageGetHeight(cgImage);
|
|
569
557
|
|
|
570
|
-
// Allow small variance (1-2 pixels) due to rounding, but reject major
|
|
571
|
-
// mismatches
|
|
572
558
|
if (labs((long)imageWidth - (long)width) > 2 ||
|
|
573
559
|
labs((long)imageHeight - (long)height) > 2) {
|
|
574
560
|
RJLogDebug(@"Video encoder: Skipping frame - size mismatch (got %zux%zu, "
|
|
@@ -639,9 +625,6 @@
|
|
|
639
625
|
colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
640
626
|
}
|
|
641
627
|
|
|
642
|
-
// CRITICAL: Validate bytesPerRow is sufficient for the target width
|
|
643
|
-
// Error "CGBitmapContextCreate: invalid data bytes/row" occurs when
|
|
644
|
-
// bytesPerRow < width * 4 (4 bytes per pixel for BGRA)
|
|
645
628
|
size_t requiredBytesPerRow = width * 4;
|
|
646
629
|
if (bytesPerRow < requiredBytesPerRow) {
|
|
647
630
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
|
@@ -663,7 +646,6 @@
|
|
|
663
646
|
return NULL;
|
|
664
647
|
}
|
|
665
648
|
|
|
666
|
-
// Use fastest interpolation for pixel buffer drawing
|
|
667
649
|
CGContextSetInterpolationQuality(context, kCGInterpolationNone);
|
|
668
650
|
CGContextDrawImage(context, CGRectMake(0, 0, width, height), cgImage);
|
|
669
651
|
CGContextRelease(context);
|
|
@@ -681,14 +663,10 @@
|
|
|
681
663
|
}
|
|
682
664
|
|
|
683
665
|
- (void)prewarmPixelBufferPool {
|
|
684
|
-
// Pre-warm the VideoToolbox H.264 encoder by creating a minimal AVAssetWriter
|
|
685
|
-
// and encoding a single dummy frame. This eliminates the ~1.5s spike on first
|
|
686
|
-
// real frame encode by front-loading the hardware encoder initialization.
|
|
687
666
|
dispatch_async(self.encodingQueue, ^{
|
|
688
667
|
@autoreleasepool {
|
|
689
668
|
NSTimeInterval startTime = CACurrentMediaTime();
|
|
690
669
|
|
|
691
|
-
// Use a small size for fast prewarm (H.264 requires even dimensions)
|
|
692
670
|
CGSize warmupSize = CGSizeMake(100, 100);
|
|
693
671
|
|
|
694
672
|
// Create temp file for dummy segment
|
|
@@ -808,15 +786,12 @@ static dispatch_once_t sPrewarmOnceToken;
|
|
|
808
786
|
return;
|
|
809
787
|
sEncoderPrewarmed = YES;
|
|
810
788
|
|
|
811
|
-
// Run prewarm on a low-priority background queue
|
|
812
789
|
dispatch_async(dispatch_get_global_queue(QOS_CLASS_UTILITY, 0), ^{
|
|
813
790
|
@autoreleasepool {
|
|
814
791
|
NSTimeInterval startTime = CACurrentMediaTime();
|
|
815
792
|
|
|
816
|
-
// Use a small size for fast prewarm (H.264 requires even dimensions)
|
|
817
793
|
CGSize warmupSize = CGSizeMake(100, 100);
|
|
818
794
|
|
|
819
|
-
// Create temp file for dummy segment
|
|
820
795
|
NSURL *tempDir = [NSURL fileURLWithPath:NSTemporaryDirectory()];
|
|
821
796
|
NSURL *warmupURL =
|
|
822
797
|
[tempDir URLByAppendingPathComponent:@"rj_encoder_prewarm.mp4"];
|
|
@@ -873,7 +848,6 @@ static dispatch_once_t sPrewarmOnceToken;
|
|
|
873
848
|
|
|
874
849
|
[warmupWriter startSessionAtSourceTime:kCMTimeZero];
|
|
875
850
|
|
|
876
|
-
// Create and encode a single dummy frame to trigger H.264 encoder init
|
|
877
851
|
CVPixelBufferRef dummyBuffer = NULL;
|
|
878
852
|
NSDictionary *pixelBufferOpts = @{
|
|
879
853
|
(id)kCVPixelBufferCGImageCompatibilityKey : @YES,
|
|
@@ -152,8 +152,10 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
152
152
|
@property(nonatomic, strong) NSMutableArray<NSValue *> *mutableVideoFrames;
|
|
153
153
|
@property(nonatomic, strong) NSMutableArray<NSValue *> *mutableMapViewFrames;
|
|
154
154
|
@property(nonatomic, strong) NSMutableArray<NSValue *> *mutableMapViewPointers;
|
|
155
|
-
@property(nonatomic, strong)
|
|
156
|
-
|
|
155
|
+
@property(nonatomic, strong)
|
|
156
|
+
NSMutableArray<NSValue *> *mutableScrollViewPointers;
|
|
157
|
+
@property(nonatomic, strong)
|
|
158
|
+
NSMutableArray<NSValue *> *mutableAnimatedViewPointers;
|
|
157
159
|
|
|
158
160
|
@property(nonatomic, strong) NSMapTable<Class, NSString *> *classNameCache;
|
|
159
161
|
|
|
@@ -251,11 +253,14 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
251
253
|
// because map tiles load asynchronously and layout signature doesn't
|
|
252
254
|
// capture them
|
|
253
255
|
_mapViewClasses = [NSSet setWithArray:@[
|
|
254
|
-
@"MKMapView",
|
|
255
|
-
@"AIRMap",
|
|
256
|
-
@"AIRMapView",
|
|
257
|
-
@"RNMMapView",
|
|
258
|
-
@"GMSMapView",
|
|
256
|
+
@"MKMapView", // Apple Maps
|
|
257
|
+
@"AIRMap", // react-native-maps (iOS)
|
|
258
|
+
@"AIRMapView", // react-native-maps alternate
|
|
259
|
+
@"RNMMapView", // react-native-maps newer versions
|
|
260
|
+
@"GMSMapView", // Google Maps SDK
|
|
261
|
+
@"MGLMapView", // Mapbox GL Native (< v10)
|
|
262
|
+
@"RCTMGLMapView", // React Native Mapbox wrapper
|
|
263
|
+
@"MapboxMapView", // Mapbox Maps SDK (v10+)
|
|
259
264
|
]];
|
|
260
265
|
|
|
261
266
|
_layoutSignatureHash = 14695981039346656037ULL;
|
|
@@ -331,7 +336,8 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
331
336
|
BOOL needsPrivacyFallback =
|
|
332
337
|
(self.config.detectTextInputs &&
|
|
333
338
|
self.mutableTextInputFrames.count == 0) ||
|
|
334
|
-
(self.config.detectCameraViews &&
|
|
339
|
+
(self.config.detectCameraViews &&
|
|
340
|
+
self.mutableCameraFrames.count == 0) ||
|
|
335
341
|
(self.config.detectWebViews && self.mutableWebViewFrames.count == 0) ||
|
|
336
342
|
(self.config.detectVideoLayers && self.mutableVideoFrames.count == 0);
|
|
337
343
|
if (needsPrivacyFallback && (hitViewLimit || self.didBailOutEarly)) {
|
|
@@ -358,9 +364,7 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
358
364
|
result.hasAnyAnimations = self.scanHasAnimations;
|
|
359
365
|
CGFloat screenArea = window.bounds.size.width * window.bounds.size.height;
|
|
360
366
|
result.animationAreaRatio =
|
|
361
|
-
(screenArea > 0)
|
|
362
|
-
? MIN(self.scanAnimatedArea / screenArea, 1.0)
|
|
363
|
-
: 0.0;
|
|
367
|
+
(screenArea > 0) ? MIN(self.scanAnimatedArea / screenArea, 1.0) : 0.0;
|
|
364
368
|
result.didBailOutEarly = self.didBailOutEarly;
|
|
365
369
|
|
|
366
370
|
if (self.layoutSignatureHash != 14695981039346656037ULL) {
|
|
@@ -498,9 +502,9 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
498
502
|
self.mutableTextInputFrames.count == 0) ||
|
|
499
503
|
(self.config.detectCameraViews &&
|
|
500
504
|
self.mutableCameraFrames.count == 0) ||
|
|
501
|
-
(self.config.detectWebViews &&
|
|
502
|
-
|
|
503
|
-
|
|
505
|
+
(self.config.detectWebViews &&
|
|
506
|
+
self.mutableWebViewFrames.count == 0) ||
|
|
507
|
+
(self.config.detectVideoLayers && self.mutableVideoFrames.count == 0);
|
|
504
508
|
if (needsPrivacyFallback && hitViewLimit) {
|
|
505
509
|
[self scanSensitiveViewsOnlyInWindow:window];
|
|
506
510
|
}
|
|
@@ -511,7 +515,8 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
511
515
|
BOOL needsPrivacyFallback =
|
|
512
516
|
(self.config.detectTextInputs &&
|
|
513
517
|
self.mutableTextInputFrames.count == 0) ||
|
|
514
|
-
(self.config.detectCameraViews &&
|
|
518
|
+
(self.config.detectCameraViews &&
|
|
519
|
+
self.mutableCameraFrames.count == 0) ||
|
|
515
520
|
(self.config.detectWebViews && self.mutableWebViewFrames.count == 0) ||
|
|
516
521
|
(self.config.detectVideoLayers && self.mutableVideoFrames.count == 0);
|
|
517
522
|
if (needsPrivacyFallback && self.didBailOutEarly) {
|
|
@@ -542,9 +547,7 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
542
547
|
CGFloat screenArea =
|
|
543
548
|
primaryWindow.bounds.size.width * primaryWindow.bounds.size.height;
|
|
544
549
|
result.animationAreaRatio =
|
|
545
|
-
(screenArea > 0)
|
|
546
|
-
? MIN(self.scanAnimatedArea / screenArea, 1.0)
|
|
547
|
-
: 0.0;
|
|
550
|
+
(screenArea > 0) ? MIN(self.scanAnimatedArea / screenArea, 1.0) : 0.0;
|
|
548
551
|
result.didBailOutEarly = self.didBailOutEarly;
|
|
549
552
|
|
|
550
553
|
if (self.layoutSignatureHash != 14695981039346656037ULL) {
|
|
@@ -679,8 +682,10 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
679
682
|
|
|
680
683
|
@try {
|
|
681
684
|
BOOL isWebView = self.config.detectWebViews && [self isWebView:view];
|
|
682
|
-
BOOL isCamera =
|
|
683
|
-
|
|
685
|
+
BOOL isCamera =
|
|
686
|
+
self.config.detectCameraViews && [self isCameraPreview:view];
|
|
687
|
+
BOOL isVideo =
|
|
688
|
+
self.config.detectVideoLayers && [self isVideoLayerView:view];
|
|
684
689
|
BOOL isBlockedSurface = isWebView || isCamera || isVideo;
|
|
685
690
|
|
|
686
691
|
[self checkSensitiveView:view inWindow:window];
|
|
@@ -762,10 +767,11 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
762
767
|
UIEdgeInsets inset = ((UIScrollView *)view).contentInset;
|
|
763
768
|
[self mixInt:(int32_t)lrintf(isfinite(inset.top) ? inset.top * 100 : 0)];
|
|
764
769
|
[self mixInt:(int32_t)lrintf(isfinite(inset.bottom) ? inset.bottom * 100
|
|
765
|
-
|
|
766
|
-
[self
|
|
770
|
+
: 0)];
|
|
771
|
+
[self
|
|
772
|
+
mixInt:(int32_t)lrintf(isfinite(inset.left) ? inset.left * 100 : 0)];
|
|
767
773
|
[self mixInt:(int32_t)lrintf(isfinite(inset.right) ? inset.right * 100
|
|
768
|
-
|
|
774
|
+
: 0)];
|
|
769
775
|
}
|
|
770
776
|
|
|
771
777
|
// 5. Mix Text Content (avoid input content; use length only)
|
|
@@ -840,7 +846,8 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
840
846
|
}
|
|
841
847
|
}
|
|
842
848
|
|
|
843
|
-
- (void)appendBlockedSurfaceInfoToSignature:(UIView *)view
|
|
849
|
+
- (void)appendBlockedSurfaceInfoToSignature:(UIView *)view
|
|
850
|
+
depth:(NSInteger)depth {
|
|
844
851
|
if (!view) {
|
|
845
852
|
return;
|
|
846
853
|
}
|
|
@@ -998,18 +1005,18 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
998
1005
|
}
|
|
999
1006
|
|
|
1000
1007
|
#ifdef DEBUG
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1008
|
+
RJLogDebug(
|
|
1009
|
+
@"ViewHierarchyScanner: Found %@ at (%.0f,%.0f,%.0f,%.0f) - "
|
|
1010
|
+
@"view.window=%@ targetWindow=%@",
|
|
1011
|
+
isTextInput
|
|
1012
|
+
? @"TextInput"
|
|
1013
|
+
: (isCamera ? @"Camera"
|
|
1014
|
+
: (isWebView ? @"WebView"
|
|
1015
|
+
: (isVideo ? @"Video" : @"MaskedView"))),
|
|
1016
|
+
sanitizedFrame.origin.x, sanitizedFrame.origin.y,
|
|
1017
|
+
sanitizedFrame.size.width, sanitizedFrame.size.height,
|
|
1018
|
+
NSStringFromClass([view.window class]),
|
|
1019
|
+
NSStringFromClass([targetWindow class]));
|
|
1013
1020
|
#endif
|
|
1014
1021
|
}
|
|
1015
1022
|
}
|
|
@@ -1212,7 +1219,7 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1212
1219
|
|
|
1213
1220
|
@try {
|
|
1214
1221
|
if ([view respondsToSelector:@selector(isLoading)]) {
|
|
1215
|
-
BOOL (*loadingMsg)(id, SEL) = (BOOL
|
|
1222
|
+
BOOL (*loadingMsg)(id, SEL) = (BOOL(*)(id, SEL))objc_msgSend;
|
|
1216
1223
|
return loadingMsg(view, @selector(isLoading));
|
|
1217
1224
|
}
|
|
1218
1225
|
id loadingValue = [view valueForKey:@"loading"];
|
|
@@ -1314,8 +1321,9 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1314
1321
|
|
|
1315
1322
|
BOOL tracking = scrollView.isTracking || scrollView.isDragging ||
|
|
1316
1323
|
scrollView.isDecelerating;
|
|
1317
|
-
BOOL offsetMoved =
|
|
1318
|
-
|
|
1324
|
+
BOOL offsetMoved =
|
|
1325
|
+
(fabs(offset.x - state.contentOffset.x) > kRJScrollEpsilon ||
|
|
1326
|
+
fabs(offset.y - state.contentOffset.y) > kRJScrollEpsilon);
|
|
1319
1327
|
BOOL zoomMoved = fabs(zoomScale - state.zoomScale) > kRJZoomEpsilon;
|
|
1320
1328
|
if (tracking || offsetMoved || zoomMoved) {
|
|
1321
1329
|
self.scanScrollActive = YES;
|
|
@@ -1331,8 +1339,8 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1331
1339
|
}
|
|
1332
1340
|
|
|
1333
1341
|
if ([self isRefreshActiveForScrollView:scrollView
|
|
1334
|
-
|
|
1335
|
-
|
|
1342
|
+
offset:offset
|
|
1343
|
+
inset:inset]) {
|
|
1336
1344
|
self.scanRefreshActive = YES;
|
|
1337
1345
|
}
|
|
1338
1346
|
|
|
@@ -1351,16 +1359,16 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1351
1359
|
}
|
|
1352
1360
|
CGFloat topLimit = -inset.top - kRJScrollEpsilon;
|
|
1353
1361
|
CGFloat bottomLimit = scrollView.contentSize.height -
|
|
1354
|
-
scrollView.bounds.size.height +
|
|
1355
|
-
|
|
1362
|
+
scrollView.bounds.size.height + inset.bottom +
|
|
1363
|
+
kRJScrollEpsilon;
|
|
1356
1364
|
if (offset.y < topLimit || offset.y > bottomLimit) {
|
|
1357
1365
|
return YES;
|
|
1358
1366
|
}
|
|
1359
1367
|
|
|
1360
1368
|
CGFloat leftLimit = -inset.left - kRJScrollEpsilon;
|
|
1361
1369
|
CGFloat rightLimit = scrollView.contentSize.width -
|
|
1362
|
-
scrollView.bounds.size.width +
|
|
1363
|
-
|
|
1370
|
+
scrollView.bounds.size.width + inset.right +
|
|
1371
|
+
kRJScrollEpsilon;
|
|
1364
1372
|
return (offset.x < leftLimit || offset.x > rightLimit);
|
|
1365
1373
|
}
|
|
1366
1374
|
|
|
@@ -1376,8 +1384,8 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1376
1384
|
return YES;
|
|
1377
1385
|
}
|
|
1378
1386
|
|
|
1379
|
-
CGFloat triggerOffset =
|
|
1380
|
-
|
|
1387
|
+
CGFloat triggerOffset =
|
|
1388
|
+
-scrollView.adjustedContentInset.top - kRJScrollEpsilon;
|
|
1381
1389
|
if (offset.y < triggerOffset) {
|
|
1382
1390
|
return YES;
|
|
1383
1391
|
}
|
|
@@ -1448,14 +1456,23 @@ static inline uint64_t fnv1a_u64(uint64_t h, const void *data, size_t len) {
|
|
|
1448
1456
|
NSNumber *heading = [view valueForKeyPath:@"camera.heading"];
|
|
1449
1457
|
NSNumber *pitch = [view valueForKeyPath:@"camera.pitch"];
|
|
1450
1458
|
|
|
1459
|
+
// Mapbox uses zoomLevel instead of span
|
|
1460
|
+
NSNumber *zoomLevel = nil;
|
|
1461
|
+
@try {
|
|
1462
|
+
zoomLevel = [view valueForKey:@"zoomLevel"];
|
|
1463
|
+
} @catch (NSException *e) {
|
|
1464
|
+
}
|
|
1465
|
+
|
|
1451
1466
|
double altitudeValue = altitude ? altitude.doubleValue : 0;
|
|
1452
1467
|
double headingValue = heading ? heading.doubleValue : 0;
|
|
1453
1468
|
double pitchValue = pitch ? pitch.doubleValue : 0;
|
|
1469
|
+
double zoomValue = zoomLevel ? zoomLevel.doubleValue : 0;
|
|
1454
1470
|
|
|
1455
|
-
return [NSString
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1471
|
+
return [NSString
|
|
1472
|
+
stringWithFormat:@"%.5f:%.5f:%.5f:%.5f:%.1f:%.1f:%.1f:%.2f",
|
|
1473
|
+
center.latitude, center.longitude, span.latitudeDelta,
|
|
1474
|
+
span.longitudeDelta, altitudeValue, headingValue,
|
|
1475
|
+
pitchValue, zoomValue];
|
|
1459
1476
|
} @catch (NSException *exception) {
|
|
1460
1477
|
return @"";
|
|
1461
1478
|
}
|
|
@@ -121,7 +121,6 @@
|
|
|
121
121
|
name:UIApplicationDidBecomeActiveNotification
|
|
122
122
|
object:nil];
|
|
123
123
|
|
|
124
|
-
// Text change notifications
|
|
125
124
|
[center addObserver:self
|
|
126
125
|
selector:@selector(textDidChange:)
|
|
127
126
|
name:UITextFieldTextDidChangeNotification
|
|
@@ -220,8 +219,6 @@
|
|
|
220
219
|
RJLogInfo(@"[RJ-LIFECYCLE] appDidEnterBackground (isRecording=%@)",
|
|
221
220
|
self.isRecording ? @"YES" : @"NO");
|
|
222
221
|
|
|
223
|
-
// Always track background entry time, even if not recording
|
|
224
|
-
// This allows us to detect timeout when session was ended while in background
|
|
225
222
|
self.inBackground = YES;
|
|
226
223
|
self.backgroundEntryTime = [[NSDate date] timeIntervalSince1970];
|
|
227
224
|
|
|
@@ -257,13 +254,10 @@
|
|
|
257
254
|
RJLogInfo(@"[RJ-LIFECYCLE] appDidBecomeActive - was NOT in background");
|
|
258
255
|
}
|
|
259
256
|
|
|
260
|
-
// Reset background tracking state
|
|
261
257
|
self.inBackground = NO;
|
|
262
258
|
self.backgroundEntryTime = 0;
|
|
263
259
|
|
|
264
|
-
// Handle the case where we weren't recording (session already ended)
|
|
265
260
|
if (!self.isRecording) {
|
|
266
|
-
// If we were in background long enough, signal that a new session should start
|
|
267
261
|
if (wasInBackground && backgroundDurationSec >= self.backgroundTimeoutThreshold) {
|
|
268
262
|
RJLogInfo(@"[RJ-LIFECYCLE] Was not recording, background >= %.0fs - signaling for new session start",
|
|
269
263
|
self.backgroundTimeoutThreshold);
|
|
@@ -274,14 +268,10 @@
|
|
|
274
268
|
return;
|
|
275
269
|
}
|
|
276
270
|
|
|
277
|
-
// We ARE recording - handle background return
|
|
278
271
|
if (wasInBackground) {
|
|
279
272
|
NSTimeInterval bgDurationMs = backgroundDurationSec * 1000;
|
|
280
273
|
|
|
281
274
|
if (backgroundDurationSec >= self.backgroundTimeoutThreshold) {
|
|
282
|
-
// TIMEOUT CASE: End old session, start new one
|
|
283
|
-
// Add this background duration to accumulated time BEFORE signaling timeout
|
|
284
|
-
// so the old session gets the correct total background time
|
|
285
275
|
self.accumulatedBackgroundTimeMs += bgDurationMs;
|
|
286
276
|
RJLogInfo(@"[RJ-LIFECYCLE] TIMEOUT: Added %.0fms, total background=%.0fms - signaling session restart",
|
|
287
277
|
bgDurationMs, self.accumulatedBackgroundTimeMs);
|
|
@@ -289,17 +279,13 @@
|
|
|
289
279
|
if ([self.delegate respondsToSelector:@selector(lifecycleManagerSessionDidTimeout:)]) {
|
|
290
280
|
[self.delegate lifecycleManagerSessionDidTimeout:backgroundDurationSec];
|
|
291
281
|
}
|
|
292
|
-
// Note: The delegate's handleSessionTimeout will read totalBackgroundTimeMs
|
|
293
|
-
// and then call resetBackgroundTime for the new session
|
|
294
282
|
} else {
|
|
295
|
-
// SHORT BACKGROUND: Just accumulate and resume
|
|
296
283
|
self.accumulatedBackgroundTimeMs += bgDurationMs;
|
|
297
284
|
RJLogInfo(@"[RJ-LIFECYCLE] Short background: Added %.0fms, total=%.0fms - resuming session",
|
|
298
285
|
bgDurationMs, self.accumulatedBackgroundTimeMs);
|
|
299
286
|
}
|
|
300
287
|
}
|
|
301
288
|
|
|
302
|
-
// Call didBecomeActive for normal resume handling (video capture, etc.)
|
|
303
289
|
if ([self.delegate respondsToSelector:@selector(lifecycleManagerDidBecomeActive)]) {
|
|
304
290
|
[self.delegate lifecycleManagerDidBecomeActive];
|
|
305
291
|
}
|