@trustchex/react-native-sdk 1.355.1 → 1.357.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -9
- package/TrustchexSDK.podspec +5 -4
- package/android/build.gradle +6 -4
- package/android/src/main/AndroidManifest.xml +1 -1
- package/android/src/main/java/com/trustchex/reactnativesdk/TrustchexSDKPackage.kt +45 -25
- package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraManager.kt +168 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraView.kt +871 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mlkit/MLKitModule.kt +245 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidationModule.kt +785 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidator.kt +419 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/opencv/OpenCVModule.kt +818 -0
- package/ios/Camera/TrustchexCameraManager.m +37 -0
- package/ios/Camera/TrustchexCameraManager.swift +125 -0
- package/ios/Camera/TrustchexCameraView.swift +1176 -0
- package/ios/MLKit/MLKitModule.m +23 -0
- package/ios/MLKit/MLKitModule.swift +250 -0
- package/ios/MRZValidation.m +39 -0
- package/ios/MRZValidation.swift +802 -0
- package/ios/MRZValidator.swift +466 -0
- package/ios/OpenCV/OpenCVModule.h +4 -0
- package/ios/OpenCV/OpenCVModule.mm +810 -0
- package/lib/module/Screens/Dynamic/IdentityDocumentEIDScanningScreen.js +2 -3
- package/lib/module/Screens/Dynamic/IdentityDocumentScanningScreen.js +1 -2
- package/lib/module/Screens/Dynamic/LivenessDetectionScreen.js +418 -193
- package/lib/module/Screens/Static/OTPVerificationScreen.js +11 -11
- package/lib/module/Screens/Static/QrCodeScanningScreen.js +5 -1
- package/lib/module/Screens/Static/ResultScreen.js +25 -2
- package/lib/module/Screens/Static/VerificationSessionCheckScreen.js +25 -7
- package/lib/module/Shared/Components/DebugNavigationPanel.js +234 -24
- package/lib/module/Shared/Components/EIDScanner.js +99 -9
- package/lib/module/Shared/Components/FaceCamera.js +170 -179
- package/lib/module/Shared/Components/IdentityDocumentCamera.js +2151 -771
- package/lib/module/Shared/Components/QrCodeScannerCamera.js +109 -107
- package/lib/module/Shared/Components/TrustchexCamera.js +122 -0
- package/lib/module/Shared/EIDReader/tlv/tlv.helpers.js +91 -0
- package/lib/module/Shared/EIDReader/tlv/tlv.utils.js +2 -124
- package/lib/module/Shared/EIDReader/tlv/tlvInputStream.js +4 -4
- package/lib/module/Shared/EIDReader/tlv/tlvOutputState.js +4 -4
- package/lib/module/Shared/EIDReader/tlv/tlvOutputStream.js +4 -4
- package/lib/module/Shared/Libs/analytics.utils.js +2 -2
- package/lib/module/Shared/Libs/debug.utils.js +132 -0
- package/lib/module/Shared/Libs/deeplink.utils.js +6 -5
- package/lib/module/Shared/Libs/demo.utils.js +13 -3
- package/lib/module/Shared/Libs/mrz.utils.js +1 -175
- package/lib/module/Shared/Libs/native-device-info.utils.js +12 -6
- package/lib/module/Shared/Libs/tts.utils.js +40 -6
- package/lib/module/Shared/Services/AnalyticsService.js +9 -8
- package/lib/module/Shared/Types/mrzFields.js +1 -0
- package/lib/module/Translation/Resources/en.js +87 -88
- package/lib/module/Translation/Resources/tr.js +84 -85
- package/lib/module/Trustchex.js +9 -2
- package/lib/module/index.js +1 -0
- package/lib/module/version.js +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/LivenessDetectionScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/OTPVerificationScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/QrCodeScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/ResultScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/VerificationSessionCheckScreen.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/DebugNavigationPanel.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/EIDScanner.d.ts +2 -2
- package/lib/typescript/src/Shared/Components/EIDScanner.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/FaceCamera.d.ts +18 -4
- package/lib/typescript/src/Shared/Components/FaceCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts +3 -4
- package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/QrCodeScannerCamera.d.ts +2 -1
- package/lib/typescript/src/Shared/Components/QrCodeScannerCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts +124 -0
- package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts.map +1 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.helpers.d.ts +11 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.helpers.d.ts.map +1 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.utils.d.ts +2 -39
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/analytics.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/debug.utils.d.ts +42 -0
- package/lib/typescript/src/Shared/Libs/debug.utils.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Libs/deeplink.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/demo.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/http-client.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts +0 -4
- package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/native-device-info.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/tts.utils.d.ts +4 -3
- package/lib/typescript/src/Shared/Libs/tts.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Services/AnalyticsService.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts +2 -2
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Types/mrzFields.d.ts +11 -0
- package/lib/typescript/src/Shared/Types/mrzFields.d.ts.map +1 -0
- package/lib/typescript/src/Translation/Resources/en.d.ts +4 -5
- package/lib/typescript/src/Translation/Resources/en.d.ts.map +1 -1
- package/lib/typescript/src/Translation/Resources/tr.d.ts +4 -5
- package/lib/typescript/src/Translation/Resources/tr.d.ts.map +1 -1
- package/lib/typescript/src/Trustchex.d.ts +2 -0
- package/lib/typescript/src/Trustchex.d.ts.map +1 -1
- package/lib/typescript/src/index.d.ts +1 -0
- package/lib/typescript/src/index.d.ts.map +1 -1
- package/lib/typescript/src/version.d.ts +1 -1
- package/package.json +4 -35
- package/src/Screens/Dynamic/ContractAcceptanceScreen.tsx +1 -1
- package/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.tsx +7 -5
- package/src/Screens/Dynamic/IdentityDocumentScanningScreen.tsx +2 -3
- package/src/Screens/Dynamic/LivenessDetectionScreen.tsx +498 -216
- package/src/Screens/Static/OTPVerificationScreen.tsx +37 -31
- package/src/Screens/Static/QrCodeScanningScreen.tsx +8 -1
- package/src/Screens/Static/ResultScreen.tsx +136 -88
- package/src/Screens/Static/VerificationSessionCheckScreen.tsx +46 -13
- package/src/Shared/Components/DebugNavigationPanel.tsx +290 -34
- package/src/Shared/Components/EIDScanner.tsx +94 -16
- package/src/Shared/Components/FaceCamera.tsx +236 -203
- package/src/Shared/Components/IdentityDocumentCamera.tsx +3073 -1030
- package/src/Shared/Components/QrCodeScannerCamera.tsx +133 -127
- package/src/Shared/Components/TrustchexCamera.tsx +289 -0
- package/src/Shared/Config/camera-enhancement.config.ts +2 -2
- package/src/Shared/EIDReader/tlv/tlv.helpers.ts +96 -0
- package/src/Shared/EIDReader/tlv/tlv.utils.ts +2 -125
- package/src/Shared/EIDReader/tlv/tlvInputStream.ts +4 -4
- package/src/Shared/EIDReader/tlv/tlvOutputState.ts +4 -4
- package/src/Shared/EIDReader/tlv/tlvOutputStream.ts +4 -4
- package/src/Shared/Libs/analytics.utils.ts +48 -20
- package/src/Shared/Libs/debug.utils.ts +149 -0
- package/src/Shared/Libs/deeplink.utils.ts +7 -5
- package/src/Shared/Libs/demo.utils.ts +4 -0
- package/src/Shared/Libs/http-client.ts +12 -8
- package/src/Shared/Libs/mrz.utils.ts +1 -163
- package/src/Shared/Libs/native-device-info.utils.ts +12 -6
- package/src/Shared/Libs/tts.utils.ts +48 -6
- package/src/Shared/Services/AnalyticsService.ts +69 -24
- package/src/Shared/Types/identificationInfo.ts +2 -2
- package/src/Shared/Types/mrzFields.ts +29 -0
- package/src/Translation/Resources/en.ts +90 -100
- package/src/Translation/Resources/tr.ts +89 -97
- package/src/Translation/index.ts +1 -1
- package/src/Trustchex.tsx +21 -4
- package/src/index.tsx +14 -0
- package/src/version.ts +1 -1
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/barcodescanner/BarcodeScannerFrameProcessorPlugin.kt +0 -301
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/BitmapUtils.kt +0 -205
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/CropperPlugin.kt +0 -72
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/FrameMetadata.kt +0 -4
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/facedetector/FaceDetectorFrameProcessorPlugin.kt +0 -303
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/textrecognition/TextRecognitionFrameProcessorPlugin.kt +0 -115
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin-Bridging-Header.h +0 -9
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin.mm +0 -22
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin.swift +0 -188
- package/ios/VisionCameraPlugins/Cropper/Cropper-Bridging-Header.h +0 -13
- package/ios/VisionCameraPlugins/Cropper/Cropper.h +0 -20
- package/ios/VisionCameraPlugins/Cropper/Cropper.mm +0 -22
- package/ios/VisionCameraPlugins/Cropper/Cropper.swift +0 -145
- package/ios/VisionCameraPlugins/Cropper/CropperUtils.swift +0 -49
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin-Bridging-Header.h +0 -4
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin.mm +0 -22
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin.swift +0 -320
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin-Bridging-Header.h +0 -4
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin.mm +0 -27
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin.swift +0 -144
- package/lib/module/Shared/Libs/camera.utils.js +0 -308
- package/lib/module/Shared/Libs/frame-enhancement.utils.js +0 -133
- package/lib/module/Shared/Libs/opencv.utils.js +0 -21
- package/lib/module/Shared/Libs/worklet.utils.js +0 -68
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.js +0 -46
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.js +0 -35
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/index.js +0 -19
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.js +0 -26
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/types.js +0 -3
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.js +0 -197
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.js +0 -101
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.js +0 -60
- package/lib/module/Shared/VisionCameraPlugins/Cropper/index.js +0 -47
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/Camera.js +0 -42
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/detectFaces.js +0 -35
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/index.js +0 -4
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/types.js +0 -3
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/Camera.js +0 -56
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.js +0 -20
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.js +0 -9
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/index.js +0 -6
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/scanText.js +0 -20
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/translateText.js +0 -19
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/types.js +0 -3
- package/lib/typescript/src/Shared/Libs/camera.utils.d.ts +0 -87
- package/lib/typescript/src/Shared/Libs/camera.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/frame-enhancement.utils.d.ts +0 -25
- package/lib/typescript/src/Shared/Libs/frame-enhancement.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/opencv.utils.d.ts +0 -3
- package/lib/typescript/src/Shared/Libs/opencv.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/worklet.utils.d.ts +0 -9
- package/lib/typescript/src/Shared/Libs/worklet.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.d.ts +0 -13
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.d.ts +0 -6
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/index.d.ts +0 -12
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/types.d.ts +0 -52
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/types.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.d.ts +0 -62
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.d.ts +0 -34
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.d.ts +0 -32
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/Cropper/index.d.ts +0 -23
- package/lib/typescript/src/Shared/VisionCameraPlugins/Cropper/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/Camera.d.ts +0 -9
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/Camera.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/index.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/types.d.ts +0 -79
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/types.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/Camera.d.ts +0 -6
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/Camera.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/index.d.ts +0 -5
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/scanText.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/scanText.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/translateText.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/translateText.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/types.d.ts +0 -67
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/types.d.ts.map +0 -1
- package/src/Shared/Libs/camera.utils.ts +0 -345
- package/src/Shared/Libs/frame-enhancement.utils.ts +0 -217
- package/src/Shared/Libs/opencv.utils.ts +0 -40
- package/src/Shared/Libs/worklet.utils.ts +0 -72
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.ts +0 -79
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.ts +0 -46
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/index.ts +0 -60
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.ts +0 -32
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/types.ts +0 -82
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.ts +0 -195
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.ts +0 -135
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.ts +0 -84
- package/src/Shared/VisionCameraPlugins/Cropper/index.ts +0 -78
- package/src/Shared/VisionCameraPlugins/FaceDetector/Camera.tsx +0 -63
- package/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.ts +0 -44
- package/src/Shared/VisionCameraPlugins/FaceDetector/index.ts +0 -3
- package/src/Shared/VisionCameraPlugins/FaceDetector/types.ts +0 -99
- package/src/Shared/VisionCameraPlugins/TextRecognition/Camera.tsx +0 -76
- package/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.ts +0 -18
- package/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.ts +0 -7
- package/src/Shared/VisionCameraPlugins/TextRecognition/index.ts +0 -7
- package/src/Shared/VisionCameraPlugins/TextRecognition/scanText.ts +0 -27
- package/src/Shared/VisionCameraPlugins/TextRecognition/translateText.ts +0 -21
- package/src/Shared/VisionCameraPlugins/TextRecognition/types.ts +0 -141
|
@@ -0,0 +1,810 @@
|
|
|
1
|
+
#import "OpenCVModule.h"
|
|
2
|
+
#import <React/RCTLog.h>
|
|
3
|
+
|
|
4
|
+
#ifdef __cplusplus
|
|
5
|
+
#undef NO
|
|
6
|
+
#undef YES
|
|
7
|
+
#import <opencv2/opencv.hpp>
|
|
8
|
+
#import <opencv2/imgcodecs/ios.h>
|
|
9
|
+
#define NO __objc_no
|
|
10
|
+
#define YES __objc_yes
|
|
11
|
+
#endif
|
|
12
|
+
|
|
13
|
+
@implementation OpenCVModule
|
|
14
|
+
|
|
15
|
+
RCT_EXPORT_MODULE();
|
|
16
|
+
|
|
17
|
+
+ (BOOL)requiresMainQueueSetup {
|
|
18
|
+
return NO;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Helper: Convert base64 string to UIImage
|
|
22
|
+
- (UIImage *)base64ToImage:(NSString *)base64String {
|
|
23
|
+
if (!base64String || base64String.length == 0) return nil;
|
|
24
|
+
NSData *imageData = [[NSData alloc] initWithBase64EncodedString:base64String options:NSDataBase64DecodingIgnoreUnknownCharacters];
|
|
25
|
+
if (!imageData) return nil;
|
|
26
|
+
return [UIImage imageWithData:imageData];
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Helper: Convert UIImage to base64 string
|
|
30
|
+
- (NSString *)imageToBase64:(UIImage *)image {
|
|
31
|
+
if (!image) return nil;
|
|
32
|
+
NSData *imageData = UIImageJPEGRepresentation(image, 0.8);
|
|
33
|
+
if (!imageData) return nil;
|
|
34
|
+
return [imageData base64EncodedStringWithOptions:0];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Helper: Convert UIImage to cv::Mat
|
|
38
|
+
- (cv::Mat)imageToMat:(UIImage *)image {
|
|
39
|
+
if (!image) return cv::Mat();
|
|
40
|
+
|
|
41
|
+
CGImageRef imageRef = image.CGImage;
|
|
42
|
+
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
|
|
43
|
+
size_t width = CGImageGetWidth(imageRef);
|
|
44
|
+
size_t height = CGImageGetHeight(imageRef);
|
|
45
|
+
|
|
46
|
+
cv::Mat mat(static_cast<int>(height), static_cast<int>(width), CV_8UC4);
|
|
47
|
+
|
|
48
|
+
CGContextRef context = CGBitmapContextCreate(mat.data, width, height, 8, mat.step[0],
|
|
49
|
+
colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
|
|
50
|
+
if (!context) return cv::Mat();
|
|
51
|
+
|
|
52
|
+
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
|
|
53
|
+
CGContextRelease(context);
|
|
54
|
+
|
|
55
|
+
cv::Mat result;
|
|
56
|
+
cv::cvtColor(mat, result, cv::COLOR_RGBA2RGB);
|
|
57
|
+
return result;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Helper: Convert cv::Mat to UIImage
|
|
61
|
+
- (UIImage *)matToImage:(cv::Mat)mat {
|
|
62
|
+
if (mat.empty()) return nil;
|
|
63
|
+
|
|
64
|
+
cv::Mat rgba;
|
|
65
|
+
if (mat.channels() == 1) {
|
|
66
|
+
cv::cvtColor(mat, rgba, cv::COLOR_GRAY2RGBA);
|
|
67
|
+
} else if (mat.channels() == 3) {
|
|
68
|
+
cv::cvtColor(mat, rgba, cv::COLOR_RGB2RGBA);
|
|
69
|
+
} else {
|
|
70
|
+
rgba = mat.clone();
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
NSData *data = [NSData dataWithBytes:rgba.data length:rgba.elemSize() * rgba.total()];
|
|
74
|
+
|
|
75
|
+
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
|
76
|
+
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
|
|
77
|
+
|
|
78
|
+
CGImageRef imageRef = CGImageCreate(rgba.cols, rgba.rows, 8, 32, rgba.step[0],
|
|
79
|
+
colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault,
|
|
80
|
+
provider, NULL, false, kCGRenderingIntentDefault);
|
|
81
|
+
|
|
82
|
+
UIImage *image = [UIImage imageWithCGImage:imageRef];
|
|
83
|
+
|
|
84
|
+
CGImageRelease(imageRef);
|
|
85
|
+
CGDataProviderRelease(provider);
|
|
86
|
+
CGColorSpaceRelease(colorSpace);
|
|
87
|
+
|
|
88
|
+
return image;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
RCT_EXPORT_METHOD(cropFaceImages:(NSString *)base64Image
|
|
92
|
+
faceBounds:(NSArray *)faceBounds
|
|
93
|
+
imageWidth:(NSInteger)imageWidth
|
|
94
|
+
imageHeight:(NSInteger)imageHeight
|
|
95
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
96
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
97
|
+
@try {
|
|
98
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
99
|
+
if (!image) {
|
|
100
|
+
resolve(@[]);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
cv::Mat mat = [self imageToMat:image];
|
|
105
|
+
if (mat.empty()) {
|
|
106
|
+
resolve(@[]);
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Sort face bounds by x position (left to right) to match Android
|
|
111
|
+
NSArray *sortedBounds = [faceBounds sortedArrayUsingComparator:^NSComparisonResult(NSDictionary *a, NSDictionary *b) {
|
|
112
|
+
int xA = [a[@"x"] intValue];
|
|
113
|
+
int xB = [b[@"x"] intValue];
|
|
114
|
+
return xA < xB ? NSOrderedAscending : (xA > xB ? NSOrderedDescending : NSOrderedSame);
|
|
115
|
+
}];
|
|
116
|
+
|
|
117
|
+
NSMutableArray *croppedFaces = [NSMutableArray array];
|
|
118
|
+
|
|
119
|
+
for (NSDictionary *bounds in sortedBounds) {
|
|
120
|
+
int x = [bounds[@"x"] intValue];
|
|
121
|
+
int y = [bounds[@"y"] intValue];
|
|
122
|
+
int width = [bounds[@"width"] intValue];
|
|
123
|
+
int height = [bounds[@"height"] intValue];
|
|
124
|
+
|
|
125
|
+
// Add 25% padding (matching Android)
|
|
126
|
+
int padX = (int)(width * 0.25);
|
|
127
|
+
int padY = (int)(height * 0.25);
|
|
128
|
+
x = MAX(0, x - padX);
|
|
129
|
+
y = MAX(0, y - padY);
|
|
130
|
+
width = MIN(mat.cols - x, width + 2 * padX);
|
|
131
|
+
height = MIN(mat.rows - y, height + 2 * padY);
|
|
132
|
+
|
|
133
|
+
if (x >= 0 && y >= 0 && x + width <= mat.cols && y + height <= mat.rows && width > 0 && height > 0) {
|
|
134
|
+
cv::Rect roi(x, y, width, height);
|
|
135
|
+
cv::Mat cropped = mat(roi).clone();
|
|
136
|
+
|
|
137
|
+
// Resize to 240x320 to match Android and reduce memory for hologram detection
|
|
138
|
+
cv::Mat resized;
|
|
139
|
+
cv::resize(cropped, resized, cv::Size(240, 320));
|
|
140
|
+
cropped.release();
|
|
141
|
+
|
|
142
|
+
// Add face without blur check
|
|
143
|
+
UIImage *croppedImage = [self matToImage:resized];
|
|
144
|
+
if (croppedImage) {
|
|
145
|
+
NSString *base64 = [self imageToBase64:croppedImage];
|
|
146
|
+
if (base64) {
|
|
147
|
+
[croppedFaces addObject:base64];
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
resized.release();
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
resolve(croppedFaces);
|
|
156
|
+
} @catch (NSException *exception) {
|
|
157
|
+
reject(@"CROP_ERROR", exception.reason, nil);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
RCT_EXPORT_METHOD(areImagesSimilar:(NSString *)base64Image1
|
|
162
|
+
base64Image2:(NSString *)base64Image2
|
|
163
|
+
threshold:(NSInteger)threshold
|
|
164
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
165
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
166
|
+
@try {
|
|
167
|
+
UIImage *image1 = [self base64ToImage:base64Image1];
|
|
168
|
+
UIImage *image2 = [self base64ToImage:base64Image2];
|
|
169
|
+
|
|
170
|
+
if (!image1 || !image2) {
|
|
171
|
+
resolve(@NO);
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
cv::Mat mat1 = [self imageToMat:image1];
|
|
176
|
+
cv::Mat mat2 = [self imageToMat:image2];
|
|
177
|
+
|
|
178
|
+
if (mat1.empty() || mat2.empty()) {
|
|
179
|
+
resolve(@NO);
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Resize to same size if different
|
|
184
|
+
if (mat1.size() != mat2.size()) {
|
|
185
|
+
cv::resize(mat2, mat2, mat1.size());
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// Apply grayscale, blur, and threshold (matching Android)
|
|
189
|
+
cv::Mat thresh1, thresh2;
|
|
190
|
+
[self applyThreshold:mat1 output:thresh1];
|
|
191
|
+
[self applyThreshold:mat2 output:thresh2];
|
|
192
|
+
|
|
193
|
+
// Compute absolute difference
|
|
194
|
+
cv::Mat diff;
|
|
195
|
+
cv::absdiff(thresh1, thresh2, diff);
|
|
196
|
+
|
|
197
|
+
// Count non-zero pixels
|
|
198
|
+
int count = cv::countNonZero(diff);
|
|
199
|
+
|
|
200
|
+
BOOL isSimilar = count < threshold;
|
|
201
|
+
resolve(@(isSimilar));
|
|
202
|
+
} @catch (NSException *exception) {
|
|
203
|
+
reject(@"SIMILARITY_ERROR", exception.reason, nil);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// Helper: Apply grayscale, Gaussian blur, and Otsu threshold
|
|
208
|
+
- (void)applyThreshold:(cv::Mat)src output:(cv::Mat&)dst {
|
|
209
|
+
cv::Mat gray;
|
|
210
|
+
if (src.channels() > 1) {
|
|
211
|
+
cv::cvtColor(src, gray, cv::COLOR_RGB2GRAY);
|
|
212
|
+
} else {
|
|
213
|
+
gray = src.clone();
|
|
214
|
+
}
|
|
215
|
+
cv::GaussianBlur(gray, gray, cv::Size(5, 5), 0);
|
|
216
|
+
cv::threshold(gray, dst, 0, 255, cv::THRESH_BINARY + cv::THRESH_OTSU);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
RCT_EXPORT_METHOD(detectHologram:(NSArray *)base64Images
|
|
220
|
+
threshold:(NSInteger)threshold
|
|
221
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
222
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
223
|
+
// Run on background queue to avoid blocking the main/JS thread
|
|
224
|
+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
|
225
|
+
@try {
|
|
226
|
+
NSLog(@"[OpenCV] detectHologram called with %lu images, threshold: %ld", (unsigned long)base64Images.count, (long)threshold);
|
|
227
|
+
|
|
228
|
+
if (base64Images.count < 2) {
|
|
229
|
+
NSLog(@"[OpenCV] Not enough images for hologram detection");
|
|
230
|
+
resolve(nil);
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Decode all images to Mat - use std::vector for better memory management
|
|
235
|
+
std::vector<cv::Mat> mats;
|
|
236
|
+
for (NSString *base64 in base64Images) {
|
|
237
|
+
@autoreleasepool {
|
|
238
|
+
UIImage *image = [self base64ToImage:base64];
|
|
239
|
+
if (image) {
|
|
240
|
+
cv::Mat mat = [self imageToMat:image];
|
|
241
|
+
if (!mat.empty()) {
|
|
242
|
+
mats.push_back(mat.clone());
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
NSLog(@"[OpenCV] Decoded %lu valid images", (unsigned long)mats.size());
|
|
249
|
+
|
|
250
|
+
if (mats.size() < 2) {
|
|
251
|
+
NSLog(@"[OpenCV] Not enough valid decoded images");
|
|
252
|
+
resolve(nil);
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
NSLog(@"[OpenCV] Processing hologram detection...");
|
|
257
|
+
|
|
258
|
+
// Multi-range HSV filtering for holographic colors (matching Android)
|
|
259
|
+
// Range 1: Cyan-green holographic reflections
|
|
260
|
+
cv::Scalar lowerBound1(35, 80, 80);
|
|
261
|
+
cv::Scalar upperBound1(85, 255, 255);
|
|
262
|
+
// Range 2: Blue-violet holographic reflections
|
|
263
|
+
cv::Scalar lowerBound2(100, 80, 80);
|
|
264
|
+
cv::Scalar upperBound2(160, 255, 255);
|
|
265
|
+
|
|
266
|
+
std::vector<cv::Mat> diffs;
|
|
267
|
+
std::vector<cv::Mat> brightestImages;
|
|
268
|
+
|
|
269
|
+
const int HOLOGRAM_NON_ZERO_THRESHOLD = 600;
|
|
270
|
+
|
|
271
|
+
for (size_t i = 0; i < mats.size() - 1; i++) {
|
|
272
|
+
cv::Mat diff;
|
|
273
|
+
cv::absdiff(mats[i], mats[i + 1], diff);
|
|
274
|
+
|
|
275
|
+
cv::Mat hsv;
|
|
276
|
+
cv::cvtColor(diff, hsv, cv::COLOR_RGB2HSV);
|
|
277
|
+
|
|
278
|
+
// Apply multi-range HSV filtering
|
|
279
|
+
cv::Mat mask1, mask2, mask;
|
|
280
|
+
cv::inRange(hsv, lowerBound1, upperBound1, mask1);
|
|
281
|
+
cv::inRange(hsv, lowerBound2, upperBound2, mask2);
|
|
282
|
+
cv::bitwise_or(mask1, mask2, mask);
|
|
283
|
+
|
|
284
|
+
int maskNonZero = cv::countNonZero(mask);
|
|
285
|
+
NSLog(@"[OpenCV] Image pair %zu→%zu: mask non-zero=%d (threshold=%d)", i, i+1, maskNonZero, HOLOGRAM_NON_ZERO_THRESHOLD);
|
|
286
|
+
|
|
287
|
+
if (maskNonZero > HOLOGRAM_NON_ZERO_THRESHOLD) {
|
|
288
|
+
diffs.push_back(mask.clone());
|
|
289
|
+
brightestImages.push_back(mats[i].clone());
|
|
290
|
+
brightestImages.push_back(mats[i + 1].clone());
|
|
291
|
+
NSLog(@"[OpenCV] ✓ Mask added to diffs (significant variation)");
|
|
292
|
+
|
|
293
|
+
// Early termination: if first pair already has very strong signal, skip rest
|
|
294
|
+
if (i == 0 && maskNonZero > HOLOGRAM_NON_ZERO_THRESHOLD * 4) {
|
|
295
|
+
NSLog(@"[OpenCV] ⚡ Strong signal in first pair, skipping remaining pairs");
|
|
296
|
+
break;
|
|
297
|
+
}
|
|
298
|
+
} else {
|
|
299
|
+
NSLog(@"[OpenCV] ✗ Mask rejected (insufficient variation)");
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// Release original mats early
|
|
304
|
+
for (auto& m : mats) { m.release(); }
|
|
305
|
+
mats.clear();
|
|
306
|
+
NSLog(@"[OpenCV] Found %lu significant differences", (unsigned long)diffs.size());
|
|
307
|
+
|
|
308
|
+
if (diffs.empty()) {
|
|
309
|
+
NSLog(@"[OpenCV] No significant hologram features detected");
|
|
310
|
+
for (auto& m : brightestImages) { m.release(); }
|
|
311
|
+
resolve(nil);
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// Combine all difference masks using bitwise_or (matching Android)
|
|
316
|
+
cv::Mat hologramMask = diffs[0].clone();
|
|
317
|
+
NSLog(@"[OpenCV] Combining difference masks...");
|
|
318
|
+
|
|
319
|
+
for (size_t i = 1; i < diffs.size(); i++) {
|
|
320
|
+
cv::bitwise_or(hologramMask, diffs[i], hologramMask);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
for (auto& d : diffs) { d.release(); }
|
|
324
|
+
diffs.clear();
|
|
325
|
+
|
|
326
|
+
// Apply morphological operations to clean up (matching Android)
|
|
327
|
+
NSLog(@"[OpenCV] Applying morphological operations...");
|
|
328
|
+
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(3, 3));
|
|
329
|
+
cv::morphologyEx(hologramMask, hologramMask, cv::MORPH_CLOSE, kernel);
|
|
330
|
+
cv::morphologyEx(hologramMask, hologramMask, cv::MORPH_OPEN, kernel);
|
|
331
|
+
|
|
332
|
+
// Check if significant hologram detected
|
|
333
|
+
int nonZeroCount = cv::countNonZero(hologramMask);
|
|
334
|
+
NSLog(@"[OpenCV] Hologram mask non-zero count: %d, threshold: %ld", nonZeroCount, (long)threshold);
|
|
335
|
+
|
|
336
|
+
if (nonZeroCount < threshold) {
|
|
337
|
+
NSLog(@"[OpenCV] Hologram mask below threshold");
|
|
338
|
+
hologramMask.release();
|
|
339
|
+
for (auto& m : brightestImages) { m.release(); }
|
|
340
|
+
resolve(nil);
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// Find best image based on saturation score in mask regions (matching Android)
|
|
345
|
+
cv::Mat bestImage = brightestImages[0].clone();
|
|
346
|
+
double maxScore = 0.0;
|
|
347
|
+
|
|
348
|
+
for (const auto& img : brightestImages) {
|
|
349
|
+
cv::Mat maskedRegion;
|
|
350
|
+
img.copyTo(maskedRegion, hologramMask);
|
|
351
|
+
|
|
352
|
+
cv::Mat hsvImg;
|
|
353
|
+
cv::cvtColor(maskedRegion, hsvImg, cv::COLOR_RGB2HSV);
|
|
354
|
+
|
|
355
|
+
std::vector<cv::Mat> channels;
|
|
356
|
+
cv::split(hsvImg, channels);
|
|
357
|
+
|
|
358
|
+
cv::Scalar satMean = cv::mean(channels[1], hologramMask);
|
|
359
|
+
double score = satMean[0];
|
|
360
|
+
|
|
361
|
+
maskedRegion.release();
|
|
362
|
+
hsvImg.release();
|
|
363
|
+
for (auto& c : channels) { c.release(); }
|
|
364
|
+
|
|
365
|
+
if (score > maxScore) {
|
|
366
|
+
maxScore = score;
|
|
367
|
+
bestImage.release();
|
|
368
|
+
bestImage = img.clone();
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
for (auto& m : brightestImages) { m.release(); }
|
|
373
|
+
brightestImages.clear();
|
|
374
|
+
|
|
375
|
+
NSLog(@"[OpenCV] Selected best hologram image with score: %f", maxScore);
|
|
376
|
+
|
|
377
|
+
// Convert results to base64
|
|
378
|
+
NSLog(@"[OpenCV] Converting results to base64...");
|
|
379
|
+
@autoreleasepool {
|
|
380
|
+
UIImage *hologramImage = [self matToImage:bestImage];
|
|
381
|
+
UIImage *maskImage = [self matToImage:hologramMask];
|
|
382
|
+
|
|
383
|
+
bestImage.release();
|
|
384
|
+
hologramMask.release();
|
|
385
|
+
|
|
386
|
+
if (hologramImage && maskImage) {
|
|
387
|
+
NSString *hologramBase64 = [self imageToBase64:hologramImage];
|
|
388
|
+
NSString *maskBase64 = [self imageToBase64:maskImage];
|
|
389
|
+
|
|
390
|
+
if (hologramBase64 && maskBase64) {
|
|
391
|
+
NSLog(@"[OpenCV] Hologram detection successful");
|
|
392
|
+
resolve(@{
|
|
393
|
+
@"hologramImage": hologramBase64,
|
|
394
|
+
@"hologramMask": maskBase64
|
|
395
|
+
});
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
NSLog(@"[OpenCV] Failed to convert hologram results to base64");
|
|
402
|
+
resolve(nil);
|
|
403
|
+
} @catch (NSException *exception) {
|
|
404
|
+
NSLog(@"[OpenCV] Exception in detectHologram: %@", exception.reason);
|
|
405
|
+
reject(@"HOLOGRAM_ERROR", exception.reason, nil);
|
|
406
|
+
}
|
|
407
|
+
}); // end dispatch_async
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
RCT_EXPORT_METHOD(getAverageBrightness:(NSString *)base64Image
|
|
411
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
412
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
413
|
+
@try {
|
|
414
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
415
|
+
if (!image) {
|
|
416
|
+
resolve(@(0.0));
|
|
417
|
+
return;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
cv::Mat mat = [self imageToMat:image];
|
|
421
|
+
if (mat.empty()) {
|
|
422
|
+
resolve(@(0.0));
|
|
423
|
+
return;
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
cv::Mat gray;
|
|
427
|
+
cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
|
|
428
|
+
cv::Scalar mean = cv::mean(gray);
|
|
429
|
+
|
|
430
|
+
resolve(@(mean[0]));
|
|
431
|
+
} @catch (NSException *exception) {
|
|
432
|
+
resolve(@(0.0));
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
RCT_EXPORT_METHOD(isCircularRegionBright:(NSString *)base64Image
|
|
437
|
+
minX:(NSInteger)minX
|
|
438
|
+
minY:(NSInteger)minY
|
|
439
|
+
width:(NSInteger)width
|
|
440
|
+
height:(NSInteger)height
|
|
441
|
+
threshold:(double)threshold
|
|
442
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
443
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
444
|
+
@try {
|
|
445
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
446
|
+
if (!image) {
|
|
447
|
+
resolve(@NO);
|
|
448
|
+
return;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
cv::Mat mat = [self imageToMat:image];
|
|
452
|
+
if (mat.empty()) {
|
|
453
|
+
resolve(@NO);
|
|
454
|
+
return;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
cv::Mat gray;
|
|
458
|
+
cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
|
|
459
|
+
|
|
460
|
+
// Create circular mask
|
|
461
|
+
cv::Mat mask = cv::Mat::zeros(gray.rows, gray.cols, CV_8U);
|
|
462
|
+
int centerX = minX + width / 2;
|
|
463
|
+
int centerY = minY + height / 2;
|
|
464
|
+
int radius = std::min(width, height) / 2;
|
|
465
|
+
cv::circle(mask, cv::Point(centerX, centerY), radius, cv::Scalar(255), -1);
|
|
466
|
+
|
|
467
|
+
cv::Scalar mean = cv::mean(gray, mask);
|
|
468
|
+
BOOL isBright = mean[0] > threshold;
|
|
469
|
+
|
|
470
|
+
resolve(@(isBright));
|
|
471
|
+
} @catch (NSException *exception) {
|
|
472
|
+
resolve(@NO);
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
RCT_EXPORT_METHOD(isRectangularRegionBright:(NSString *)base64Image
|
|
477
|
+
minX:(NSInteger)minX
|
|
478
|
+
minY:(NSInteger)minY
|
|
479
|
+
width:(NSInteger)width
|
|
480
|
+
height:(NSInteger)height
|
|
481
|
+
threshold:(double)threshold
|
|
482
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
483
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
484
|
+
@try {
|
|
485
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
486
|
+
if (!image) {
|
|
487
|
+
resolve(@NO);
|
|
488
|
+
return;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
cv::Mat mat = [self imageToMat:image];
|
|
492
|
+
if (mat.empty()) {
|
|
493
|
+
resolve(@NO);
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
cv::Mat gray;
|
|
498
|
+
cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
|
|
499
|
+
|
|
500
|
+
// Validate region bounds
|
|
501
|
+
int x = std::max(0, (int)minX);
|
|
502
|
+
int y = std::max(0, (int)minY);
|
|
503
|
+
int w = std::min((int)width, gray.cols - x);
|
|
504
|
+
int h = std::min((int)height, gray.rows - y);
|
|
505
|
+
|
|
506
|
+
if (w <= 0 || h <= 0) {
|
|
507
|
+
resolve(@NO);
|
|
508
|
+
return;
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Extract ROI (Region of Interest)
|
|
512
|
+
cv::Rect roi(x, y, w, h);
|
|
513
|
+
cv::Mat regionMat = gray(roi);
|
|
514
|
+
|
|
515
|
+
// Calculate mean brightness within the region
|
|
516
|
+
cv::Scalar mean = cv::mean(regionMat);
|
|
517
|
+
BOOL isBright = mean[0] > threshold;
|
|
518
|
+
|
|
519
|
+
resolve(@(isBright));
|
|
520
|
+
} @catch (NSException *exception) {
|
|
521
|
+
resolve(@NO);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
RCT_EXPORT_METHOD(checkBlurry:(NSString *)base64Image
|
|
526
|
+
threshold:(double)threshold
|
|
527
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
528
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
529
|
+
@try {
|
|
530
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
531
|
+
if (!image) {
|
|
532
|
+
resolve(@NO);
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
cv::Mat mat = [self imageToMat:image];
|
|
537
|
+
if (mat.empty()) {
|
|
538
|
+
resolve(@NO);
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
cv::Mat gray;
|
|
543
|
+
cv::cvtColor(mat, gray, cv::COLOR_RGB2GRAY);
|
|
544
|
+
|
|
545
|
+
// Compute Laplacian
|
|
546
|
+
cv::Mat laplacian;
|
|
547
|
+
cv::Laplacian(gray, laplacian, CV_64F);
|
|
548
|
+
|
|
549
|
+
// Calculate variance
|
|
550
|
+
cv::Scalar mean, stdDev;
|
|
551
|
+
cv::meanStdDev(laplacian, mean, stdDev);
|
|
552
|
+
double variance = stdDev[0] * stdDev[0];
|
|
553
|
+
|
|
554
|
+
BOOL isBlurry = variance < threshold;
|
|
555
|
+
resolve(@(isBlurry));
|
|
556
|
+
} @catch (NSException *exception) {
|
|
557
|
+
resolve(@NO);
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
RCT_EXPORT_METHOD(checkBlurryInRegion:(NSString *)base64Image
|
|
562
|
+
centerXPercent:(double)centerXPercent
|
|
563
|
+
centerYPercent:(double)centerYPercent
|
|
564
|
+
widthPercent:(double)widthPercent
|
|
565
|
+
heightPercent:(double)heightPercent
|
|
566
|
+
threshold:(double)threshold
|
|
567
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
568
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
569
|
+
@try {
|
|
570
|
+
UIImage *image = [self base64ToImage:base64Image];
|
|
571
|
+
if (!image) {
|
|
572
|
+
resolve(@NO);
|
|
573
|
+
return;
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
cv::Mat mat = [self imageToMat:image];
|
|
577
|
+
if (mat.empty()) {
|
|
578
|
+
resolve(@NO);
|
|
579
|
+
return;
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
int imgWidth = mat.cols;
|
|
583
|
+
int imgHeight = mat.rows;
|
|
584
|
+
|
|
585
|
+
// Calculate region bounds
|
|
586
|
+
int regionWidth = MAX(1, (int)(imgWidth * widthPercent));
|
|
587
|
+
int regionHeight = MAX(1, (int)(imgHeight * heightPercent));
|
|
588
|
+
int regionX = MAX(0, MIN(imgWidth - regionWidth, (int)(imgWidth * centerXPercent - regionWidth / 2)));
|
|
589
|
+
int regionY = MAX(0, MIN(imgHeight - regionHeight, (int)(imgHeight * centerYPercent - regionHeight / 2)));
|
|
590
|
+
|
|
591
|
+
// Extract region of interest
|
|
592
|
+
cv::Rect roi(regionX, regionY, regionWidth, regionHeight);
|
|
593
|
+
cv::Mat regionMat = mat(roi);
|
|
594
|
+
|
|
595
|
+
cv::Mat gray;
|
|
596
|
+
cv::cvtColor(regionMat, gray, cv::COLOR_RGB2GRAY);
|
|
597
|
+
|
|
598
|
+
// Compute Laplacian
|
|
599
|
+
cv::Mat laplacian;
|
|
600
|
+
cv::Laplacian(gray, laplacian, CV_64F);
|
|
601
|
+
|
|
602
|
+
// Calculate variance
|
|
603
|
+
cv::Scalar mean, stdDev;
|
|
604
|
+
cv::meanStdDev(laplacian, mean, stdDev);
|
|
605
|
+
double variance = stdDev[0] * stdDev[0];
|
|
606
|
+
|
|
607
|
+
BOOL isBlurry = variance < threshold;
|
|
608
|
+
resolve(@(isBlurry));
|
|
609
|
+
} @catch (NSException *exception) {
|
|
610
|
+
resolve(@NO);
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
RCT_EXPORT_METHOD(detectCardBounds:(NSString *)base64Image
|
|
615
|
+
textBlocks:(NSArray *)textBlocks
|
|
616
|
+
faces:(NSArray *)faces
|
|
617
|
+
imageWidth:(NSInteger)imageWidth
|
|
618
|
+
imageHeight:(NSInteger)imageHeight
|
|
619
|
+
resolver:(RCTPromiseResolveBlock)resolve
|
|
620
|
+
rejecter:(RCTPromiseRejectBlock)reject) {
|
|
621
|
+
@try {
|
|
622
|
+
// Collect all element bounds for clustering
|
|
623
|
+
NSMutableArray *allElements = [NSMutableArray array];
|
|
624
|
+
|
|
625
|
+
// Process text blocks
|
|
626
|
+
for (NSDictionary *block in textBlocks) {
|
|
627
|
+
NSDictionary *frame = block[@"blockFrame"];
|
|
628
|
+
if (!frame) continue;
|
|
629
|
+
|
|
630
|
+
int x = [frame[@"x"] intValue];
|
|
631
|
+
int y = [frame[@"y"] intValue];
|
|
632
|
+
int width = [frame[@"width"] intValue];
|
|
633
|
+
int height = [frame[@"height"] intValue];
|
|
634
|
+
|
|
635
|
+
[allElements addObject:@{
|
|
636
|
+
@"left": @(x),
|
|
637
|
+
@"top": @(y),
|
|
638
|
+
@"right": @(x + width),
|
|
639
|
+
@"bottom": @(y + height)
|
|
640
|
+
}];
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
// Process faces
|
|
644
|
+
for (NSDictionary *face in faces) {
|
|
645
|
+
NSDictionary *bounds = face[@"bounds"];
|
|
646
|
+
if (!bounds) continue;
|
|
647
|
+
|
|
648
|
+
int x = [bounds[@"x"] intValue];
|
|
649
|
+
int y = [bounds[@"y"] intValue];
|
|
650
|
+
int width = [bounds[@"width"] intValue];
|
|
651
|
+
int height = [bounds[@"height"] intValue];
|
|
652
|
+
|
|
653
|
+
[allElements addObject:@{
|
|
654
|
+
@"left": @(x),
|
|
655
|
+
@"top": @(y),
|
|
656
|
+
@"right": @(x + width),
|
|
657
|
+
@"bottom": @(y + height)
|
|
658
|
+
}];
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
if (allElements.count == 0) {
|
|
662
|
+
NSLog(@"[OpenCVModule] No elements detected for card bounds");
|
|
663
|
+
resolve(nil);
|
|
664
|
+
return;
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
// Calculate the centroid of all elements
|
|
668
|
+
int centerX = 0;
|
|
669
|
+
int centerY = 0;
|
|
670
|
+
for (NSDictionary *rect in allElements) {
|
|
671
|
+
centerX += ([rect[@"left"] intValue] + [rect[@"right"] intValue]) / 2;
|
|
672
|
+
centerY += ([rect[@"top"] intValue] + [rect[@"bottom"] intValue]) / 2;
|
|
673
|
+
}
|
|
674
|
+
centerX /= (int)allElements.count;
|
|
675
|
+
centerY /= (int)allElements.count;
|
|
676
|
+
|
|
677
|
+
// Calculate distances from centroid
|
|
678
|
+
NSMutableArray *distances = [NSMutableArray array];
|
|
679
|
+
for (NSDictionary *rect in allElements) {
|
|
680
|
+
int elemCenterX = ([rect[@"left"] intValue] + [rect[@"right"] intValue]) / 2;
|
|
681
|
+
int elemCenterY = ([rect[@"top"] intValue] + [rect[@"bottom"] intValue]) / 2;
|
|
682
|
+
int dx = elemCenterX - centerX;
|
|
683
|
+
int dy = elemCenterY - centerY;
|
|
684
|
+
double distance = sqrt(dx * dx + dy * dy);
|
|
685
|
+
[distances addObject:@(distance)];
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
// Calculate median distance
|
|
689
|
+
NSArray *sortedDistances = [distances sortedArrayUsingSelector:@selector(compare:)];
|
|
690
|
+
double medianDistance = [sortedDistances[sortedDistances.count / 2] doubleValue];
|
|
691
|
+
|
|
692
|
+
// Filter out elements that are more than 2x the median distance from center
|
|
693
|
+
double threshold = medianDistance * 2.0;
|
|
694
|
+
NSMutableArray *filteredElements = [NSMutableArray array];
|
|
695
|
+
for (NSUInteger i = 0; i < allElements.count; i++) {
|
|
696
|
+
if ([distances[i] doubleValue] <= threshold) {
|
|
697
|
+
[filteredElements addObject:allElements[i]];
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
NSLog(@"[OpenCVModule] Filtered %lu outlier elements (%lu -> %lu)",
|
|
702
|
+
(unsigned long)(allElements.count - filteredElements.count),
|
|
703
|
+
(unsigned long)allElements.count,
|
|
704
|
+
(unsigned long)filteredElements.count);
|
|
705
|
+
|
|
706
|
+
if (filteredElements.count == 0) {
|
|
707
|
+
NSLog(@"[OpenCVModule] No elements after filtering outliers");
|
|
708
|
+
resolve(nil);
|
|
709
|
+
return;
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// Now calculate bounds from filtered elements
|
|
713
|
+
int minX = (int)imageWidth;
|
|
714
|
+
int minY = (int)imageHeight;
|
|
715
|
+
int maxX = 0;
|
|
716
|
+
int maxY = 0;
|
|
717
|
+
|
|
718
|
+
for (NSDictionary *rect in filteredElements) {
|
|
719
|
+
minX = MIN(minX, [rect[@"left"] intValue]);
|
|
720
|
+
minY = MIN(minY, [rect[@"top"] intValue]);
|
|
721
|
+
maxX = MAX(maxX, [rect[@"right"] intValue]);
|
|
722
|
+
maxY = MAX(maxY, [rect[@"bottom"] intValue]);
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
int elementCount = (int)filteredElements.count;
|
|
726
|
+
|
|
727
|
+
NSLog(@"[OpenCVModule] Detected elements: %d, bounds: (%d, %d) to (%d, %d)", elementCount, minX, minY, maxX, maxY);
|
|
728
|
+
|
|
729
|
+
// Calculate raw bounding box from elements
|
|
730
|
+
int elementsWidth = maxX - minX;
|
|
731
|
+
int elementsHeight = maxY - minY;
|
|
732
|
+
|
|
733
|
+
NSLog(@"[OpenCVModule] Elements size: %dx%d, frame: %ldx%ld", elementsWidth, elementsHeight, (long)imageWidth, (long)imageHeight);
|
|
734
|
+
|
|
735
|
+
// Validate minimum size (elements should occupy at least 5% of frame)
|
|
736
|
+
int minArea = (int)(imageWidth * imageHeight * 0.05);
|
|
737
|
+
if (elementsWidth * elementsHeight < minArea) {
|
|
738
|
+
NSLog(@"[OpenCVModule] Elements too small: %d < %d", elementsWidth * elementsHeight, minArea);
|
|
739
|
+
resolve(nil);
|
|
740
|
+
return;
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
// Add generous padding (15% on all sides) to ensure full card is captured
|
|
744
|
+
int paddingX = (int)(elementsWidth * 0.15);
|
|
745
|
+
int paddingY = (int)(elementsHeight * 0.15);
|
|
746
|
+
|
|
747
|
+
// Calculate card bounds with padding, clamped to image boundaries
|
|
748
|
+
int cardX = MAX(0, minX - paddingX);
|
|
749
|
+
int cardY = MAX(0, minY - paddingY);
|
|
750
|
+
int cardRight = MIN((int)imageWidth, maxX + paddingX);
|
|
751
|
+
int cardBottom = MIN((int)imageHeight, maxY + paddingY);
|
|
752
|
+
int cardWidth = cardRight - cardX;
|
|
753
|
+
int cardHeight = cardBottom - cardY;
|
|
754
|
+
|
|
755
|
+
NSLog(@"[OpenCVModule] Card bounds: (%d, %d) %dx%d", cardX, cardY, cardWidth, cardHeight);
|
|
756
|
+
|
|
757
|
+
// Validate aspect ratio is reasonable for a document (very lenient: 1.0 - 2.5)
|
|
758
|
+
double aspectRatio = (double)cardWidth / MAX((double)cardHeight, 1.0);
|
|
759
|
+
NSLog(@"[OpenCVModule] Card aspect ratio: %f", aspectRatio);
|
|
760
|
+
|
|
761
|
+
if (aspectRatio < 1.0 || aspectRatio > 2.5) {
|
|
762
|
+
NSLog(@"[OpenCVModule] Aspect ratio out of range: %f", aspectRatio);
|
|
763
|
+
resolve(nil);
|
|
764
|
+
return;
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
// Create corner points (rectangular bounds)
|
|
768
|
+
NSMutableArray *corners = [NSMutableArray array];
|
|
769
|
+
|
|
770
|
+
// Top-left
|
|
771
|
+
[corners addObject:@{
|
|
772
|
+
@"x": @(cardX),
|
|
773
|
+
@"y": @(cardY)
|
|
774
|
+
}];
|
|
775
|
+
|
|
776
|
+
// Top-right
|
|
777
|
+
[corners addObject:@{
|
|
778
|
+
@"x": @(cardX + cardWidth),
|
|
779
|
+
@"y": @(cardY)
|
|
780
|
+
}];
|
|
781
|
+
|
|
782
|
+
// Bottom-right
|
|
783
|
+
[corners addObject:@{
|
|
784
|
+
@"x": @(cardX + cardWidth),
|
|
785
|
+
@"y": @(cardY + cardHeight)
|
|
786
|
+
}];
|
|
787
|
+
|
|
788
|
+
// Bottom-left
|
|
789
|
+
[corners addObject:@{
|
|
790
|
+
@"x": @(cardX),
|
|
791
|
+
@"y": @(cardY + cardHeight)
|
|
792
|
+
}];
|
|
793
|
+
|
|
794
|
+
NSDictionary *result = @{
|
|
795
|
+
@"x": @(cardX),
|
|
796
|
+
@"y": @(cardY),
|
|
797
|
+
@"width": @(cardWidth),
|
|
798
|
+
@"height": @(cardHeight),
|
|
799
|
+
@"corners": corners,
|
|
800
|
+
@"angle": @(0.0) // Rectangular alignment
|
|
801
|
+
};
|
|
802
|
+
|
|
803
|
+
resolve(result);
|
|
804
|
+
} @catch (NSException *exception) {
|
|
805
|
+
resolve(nil);
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
@end
|
|
810
|
+
|