@trustchex/react-native-sdk 1.355.1 → 1.357.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -9
- package/TrustchexSDK.podspec +5 -4
- package/android/build.gradle +6 -4
- package/android/src/main/AndroidManifest.xml +1 -1
- package/android/src/main/java/com/trustchex/reactnativesdk/TrustchexSDKPackage.kt +45 -25
- package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraManager.kt +168 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraView.kt +871 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mlkit/MLKitModule.kt +245 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidationModule.kt +785 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/mrz/MRZValidator.kt +419 -0
- package/android/src/main/java/com/trustchex/reactnativesdk/opencv/OpenCVModule.kt +818 -0
- package/ios/Camera/TrustchexCameraManager.m +37 -0
- package/ios/Camera/TrustchexCameraManager.swift +125 -0
- package/ios/Camera/TrustchexCameraView.swift +1176 -0
- package/ios/MLKit/MLKitModule.m +23 -0
- package/ios/MLKit/MLKitModule.swift +250 -0
- package/ios/MRZValidation.m +39 -0
- package/ios/MRZValidation.swift +802 -0
- package/ios/MRZValidator.swift +466 -0
- package/ios/OpenCV/OpenCVModule.h +4 -0
- package/ios/OpenCV/OpenCVModule.mm +810 -0
- package/lib/module/Screens/Dynamic/IdentityDocumentEIDScanningScreen.js +2 -3
- package/lib/module/Screens/Dynamic/IdentityDocumentScanningScreen.js +1 -2
- package/lib/module/Screens/Dynamic/LivenessDetectionScreen.js +418 -193
- package/lib/module/Screens/Static/OTPVerificationScreen.js +11 -11
- package/lib/module/Screens/Static/QrCodeScanningScreen.js +5 -1
- package/lib/module/Screens/Static/ResultScreen.js +25 -2
- package/lib/module/Screens/Static/VerificationSessionCheckScreen.js +25 -7
- package/lib/module/Shared/Components/DebugNavigationPanel.js +234 -24
- package/lib/module/Shared/Components/EIDScanner.js +99 -9
- package/lib/module/Shared/Components/FaceCamera.js +170 -179
- package/lib/module/Shared/Components/IdentityDocumentCamera.js +2151 -771
- package/lib/module/Shared/Components/QrCodeScannerCamera.js +109 -107
- package/lib/module/Shared/Components/TrustchexCamera.js +122 -0
- package/lib/module/Shared/EIDReader/tlv/tlv.helpers.js +91 -0
- package/lib/module/Shared/EIDReader/tlv/tlv.utils.js +2 -124
- package/lib/module/Shared/EIDReader/tlv/tlvInputStream.js +4 -4
- package/lib/module/Shared/EIDReader/tlv/tlvOutputState.js +4 -4
- package/lib/module/Shared/EIDReader/tlv/tlvOutputStream.js +4 -4
- package/lib/module/Shared/Libs/analytics.utils.js +2 -2
- package/lib/module/Shared/Libs/debug.utils.js +132 -0
- package/lib/module/Shared/Libs/deeplink.utils.js +6 -5
- package/lib/module/Shared/Libs/demo.utils.js +13 -3
- package/lib/module/Shared/Libs/mrz.utils.js +1 -175
- package/lib/module/Shared/Libs/native-device-info.utils.js +12 -6
- package/lib/module/Shared/Libs/tts.utils.js +40 -6
- package/lib/module/Shared/Services/AnalyticsService.js +9 -8
- package/lib/module/Shared/Types/mrzFields.js +1 -0
- package/lib/module/Translation/Resources/en.js +87 -88
- package/lib/module/Translation/Resources/tr.js +84 -85
- package/lib/module/Trustchex.js +9 -2
- package/lib/module/index.js +1 -0
- package/lib/module/version.js +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/LivenessDetectionScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/OTPVerificationScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/QrCodeScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/ResultScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/VerificationSessionCheckScreen.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/DebugNavigationPanel.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/EIDScanner.d.ts +2 -2
- package/lib/typescript/src/Shared/Components/EIDScanner.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/FaceCamera.d.ts +18 -4
- package/lib/typescript/src/Shared/Components/FaceCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts +3 -4
- package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/QrCodeScannerCamera.d.ts +2 -1
- package/lib/typescript/src/Shared/Components/QrCodeScannerCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts +124 -0
- package/lib/typescript/src/Shared/Components/TrustchexCamera.d.ts.map +1 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.helpers.d.ts +11 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.helpers.d.ts.map +1 -0
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.utils.d.ts +2 -39
- package/lib/typescript/src/Shared/EIDReader/tlv/tlv.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/analytics.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/debug.utils.d.ts +42 -0
- package/lib/typescript/src/Shared/Libs/debug.utils.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Libs/deeplink.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/demo.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/http-client.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts +0 -4
- package/lib/typescript/src/Shared/Libs/mrz.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/native-device-info.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/tts.utils.d.ts +4 -3
- package/lib/typescript/src/Shared/Libs/tts.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Services/AnalyticsService.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts +2 -2
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Types/mrzFields.d.ts +11 -0
- package/lib/typescript/src/Shared/Types/mrzFields.d.ts.map +1 -0
- package/lib/typescript/src/Translation/Resources/en.d.ts +4 -5
- package/lib/typescript/src/Translation/Resources/en.d.ts.map +1 -1
- package/lib/typescript/src/Translation/Resources/tr.d.ts +4 -5
- package/lib/typescript/src/Translation/Resources/tr.d.ts.map +1 -1
- package/lib/typescript/src/Trustchex.d.ts +2 -0
- package/lib/typescript/src/Trustchex.d.ts.map +1 -1
- package/lib/typescript/src/index.d.ts +1 -0
- package/lib/typescript/src/index.d.ts.map +1 -1
- package/lib/typescript/src/version.d.ts +1 -1
- package/package.json +4 -35
- package/src/Screens/Dynamic/ContractAcceptanceScreen.tsx +1 -1
- package/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.tsx +7 -5
- package/src/Screens/Dynamic/IdentityDocumentScanningScreen.tsx +2 -3
- package/src/Screens/Dynamic/LivenessDetectionScreen.tsx +498 -216
- package/src/Screens/Static/OTPVerificationScreen.tsx +37 -31
- package/src/Screens/Static/QrCodeScanningScreen.tsx +8 -1
- package/src/Screens/Static/ResultScreen.tsx +136 -88
- package/src/Screens/Static/VerificationSessionCheckScreen.tsx +46 -13
- package/src/Shared/Components/DebugNavigationPanel.tsx +290 -34
- package/src/Shared/Components/EIDScanner.tsx +94 -16
- package/src/Shared/Components/FaceCamera.tsx +236 -203
- package/src/Shared/Components/IdentityDocumentCamera.tsx +3073 -1030
- package/src/Shared/Components/QrCodeScannerCamera.tsx +133 -127
- package/src/Shared/Components/TrustchexCamera.tsx +289 -0
- package/src/Shared/Config/camera-enhancement.config.ts +2 -2
- package/src/Shared/EIDReader/tlv/tlv.helpers.ts +96 -0
- package/src/Shared/EIDReader/tlv/tlv.utils.ts +2 -125
- package/src/Shared/EIDReader/tlv/tlvInputStream.ts +4 -4
- package/src/Shared/EIDReader/tlv/tlvOutputState.ts +4 -4
- package/src/Shared/EIDReader/tlv/tlvOutputStream.ts +4 -4
- package/src/Shared/Libs/analytics.utils.ts +48 -20
- package/src/Shared/Libs/debug.utils.ts +149 -0
- package/src/Shared/Libs/deeplink.utils.ts +7 -5
- package/src/Shared/Libs/demo.utils.ts +4 -0
- package/src/Shared/Libs/http-client.ts +12 -8
- package/src/Shared/Libs/mrz.utils.ts +1 -163
- package/src/Shared/Libs/native-device-info.utils.ts +12 -6
- package/src/Shared/Libs/tts.utils.ts +48 -6
- package/src/Shared/Services/AnalyticsService.ts +69 -24
- package/src/Shared/Types/identificationInfo.ts +2 -2
- package/src/Shared/Types/mrzFields.ts +29 -0
- package/src/Translation/Resources/en.ts +90 -100
- package/src/Translation/Resources/tr.ts +89 -97
- package/src/Translation/index.ts +1 -1
- package/src/Trustchex.tsx +21 -4
- package/src/index.tsx +14 -0
- package/src/version.ts +1 -1
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/barcodescanner/BarcodeScannerFrameProcessorPlugin.kt +0 -301
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/BitmapUtils.kt +0 -205
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/CropperPlugin.kt +0 -72
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/cropper/FrameMetadata.kt +0 -4
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/facedetector/FaceDetectorFrameProcessorPlugin.kt +0 -303
- package/android/src/main/java/com/trustchex/reactnativesdk/visioncameraplugins/textrecognition/TextRecognitionFrameProcessorPlugin.kt +0 -115
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin-Bridging-Header.h +0 -9
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin.mm +0 -22
- package/ios/VisionCameraPlugins/BarcodeScanner/BarcodeScannerFrameProcessorPlugin.swift +0 -188
- package/ios/VisionCameraPlugins/Cropper/Cropper-Bridging-Header.h +0 -13
- package/ios/VisionCameraPlugins/Cropper/Cropper.h +0 -20
- package/ios/VisionCameraPlugins/Cropper/Cropper.mm +0 -22
- package/ios/VisionCameraPlugins/Cropper/Cropper.swift +0 -145
- package/ios/VisionCameraPlugins/Cropper/CropperUtils.swift +0 -49
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin-Bridging-Header.h +0 -4
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin.mm +0 -22
- package/ios/VisionCameraPlugins/FaceDetector/FaceDetectorFrameProcessorPlugin.swift +0 -320
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin-Bridging-Header.h +0 -4
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin.mm +0 -27
- package/ios/VisionCameraPlugins/TextRecognition/TextRecognitionFrameProcessorPlugin.swift +0 -144
- package/lib/module/Shared/Libs/camera.utils.js +0 -308
- package/lib/module/Shared/Libs/frame-enhancement.utils.js +0 -133
- package/lib/module/Shared/Libs/opencv.utils.js +0 -21
- package/lib/module/Shared/Libs/worklet.utils.js +0 -68
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.js +0 -46
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.js +0 -35
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/index.js +0 -19
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.js +0 -26
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/types.js +0 -3
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.js +0 -197
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.js +0 -101
- package/lib/module/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.js +0 -60
- package/lib/module/Shared/VisionCameraPlugins/Cropper/index.js +0 -47
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/Camera.js +0 -42
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/detectFaces.js +0 -35
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/index.js +0 -4
- package/lib/module/Shared/VisionCameraPlugins/FaceDetector/types.js +0 -3
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/Camera.js +0 -56
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.js +0 -20
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.js +0 -9
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/index.js +0 -6
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/scanText.js +0 -20
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/translateText.js +0 -19
- package/lib/module/Shared/VisionCameraPlugins/TextRecognition/types.js +0 -3
- package/lib/typescript/src/Shared/Libs/camera.utils.d.ts +0 -87
- package/lib/typescript/src/Shared/Libs/camera.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/frame-enhancement.utils.d.ts +0 -25
- package/lib/typescript/src/Shared/Libs/frame-enhancement.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/opencv.utils.d.ts +0 -3
- package/lib/typescript/src/Shared/Libs/opencv.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/Libs/worklet.utils.d.ts +0 -9
- package/lib/typescript/src/Shared/Libs/worklet.utils.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.d.ts +0 -13
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.d.ts +0 -6
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/index.d.ts +0 -12
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/types.d.ts +0 -52
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/types.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.d.ts +0 -62
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.d.ts +0 -34
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.d.ts +0 -32
- package/lib/typescript/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/Cropper/index.d.ts +0 -23
- package/lib/typescript/src/Shared/VisionCameraPlugins/Cropper/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/Camera.d.ts +0 -9
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/Camera.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/index.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/types.d.ts +0 -79
- package/lib/typescript/src/Shared/VisionCameraPlugins/FaceDetector/types.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/Camera.d.ts +0 -6
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/Camera.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/index.d.ts +0 -5
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/index.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/scanText.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/scanText.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/translateText.d.ts +0 -3
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/translateText.d.ts.map +0 -1
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/types.d.ts +0 -67
- package/lib/typescript/src/Shared/VisionCameraPlugins/TextRecognition/types.d.ts.map +0 -1
- package/src/Shared/Libs/camera.utils.ts +0 -345
- package/src/Shared/Libs/frame-enhancement.utils.ts +0 -217
- package/src/Shared/Libs/opencv.utils.ts +0 -40
- package/src/Shared/Libs/worklet.utils.ts +0 -72
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useBarcodeScanner.ts +0 -79
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/hooks/useCameraPermissions.ts +0 -46
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/index.ts +0 -60
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/scanCodes.ts +0 -32
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/types.ts +0 -82
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/convert.ts +0 -195
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/geometry.ts +0 -135
- package/src/Shared/VisionCameraPlugins/BarcodeScanner/utils/highlights.ts +0 -84
- package/src/Shared/VisionCameraPlugins/Cropper/index.ts +0 -78
- package/src/Shared/VisionCameraPlugins/FaceDetector/Camera.tsx +0 -63
- package/src/Shared/VisionCameraPlugins/FaceDetector/detectFaces.ts +0 -44
- package/src/Shared/VisionCameraPlugins/FaceDetector/index.ts +0 -3
- package/src/Shared/VisionCameraPlugins/FaceDetector/types.ts +0 -99
- package/src/Shared/VisionCameraPlugins/TextRecognition/Camera.tsx +0 -76
- package/src/Shared/VisionCameraPlugins/TextRecognition/PhotoRecognizer.ts +0 -18
- package/src/Shared/VisionCameraPlugins/TextRecognition/RemoveLanguageModel.ts +0 -7
- package/src/Shared/VisionCameraPlugins/TextRecognition/index.ts +0 -7
- package/src/Shared/VisionCameraPlugins/TextRecognition/scanText.ts +0 -27
- package/src/Shared/VisionCameraPlugins/TextRecognition/translateText.ts +0 -21
- package/src/Shared/VisionCameraPlugins/TextRecognition/types.ts +0 -141
|
@@ -0,0 +1,1176 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import React
|
|
3
|
+
import AVFoundation
|
|
4
|
+
import UIKit
|
|
5
|
+
import MLKitFaceDetection
|
|
6
|
+
import MLKitTextRecognition
|
|
7
|
+
import MLKitVision
|
|
8
|
+
|
|
9
|
+
class TrustchexCameraView: UIView {
|
|
10
|
+
|
|
11
|
+
// MARK: - Properties
|
|
12
|
+
private var captureSession: AVCaptureSession?
|
|
13
|
+
private var previewLayer: AVCaptureVideoPreviewLayer?
|
|
14
|
+
private var videoOutput: AVCaptureVideoDataOutput?
|
|
15
|
+
private var movieFileOutput: AVCaptureMovieFileOutput?
|
|
16
|
+
private var metadataOutput: AVCaptureMetadataOutput?
|
|
17
|
+
private var currentCamera: AVCaptureDevice?
|
|
18
|
+
private var backCameras: [AVCaptureDevice] = []
|
|
19
|
+
private var lastDetectedBarcodes: [[String: Any]] = []
|
|
20
|
+
private var barcodeDetectionQueue = DispatchQueue(label: "com.trustchex.camera.barcodeQueue")
|
|
21
|
+
|
|
22
|
+
private let sessionQueue = DispatchQueue(label: "com.trustchex.camera.sessionQueue")
|
|
23
|
+
private let videoQueue = DispatchQueue(label: "com.trustchex.camera.videoQueue")
|
|
24
|
+
private let processingQueue = DispatchQueue(label: "com.trustchex.camera.processing", qos: .userInitiated)
|
|
25
|
+
|
|
26
|
+
@objc var cameraType: String = "back" {
|
|
27
|
+
didSet {
|
|
28
|
+
if cameraType != oldValue {
|
|
29
|
+
switchCameraType(cameraType)
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
@objc var torchEnabled: Bool = false {
|
|
34
|
+
didSet {
|
|
35
|
+
if torchEnabled != oldValue {
|
|
36
|
+
updateTorchEnabled(torchEnabled)
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
@objc var enableFrameProcessing: Bool = false {
|
|
41
|
+
didSet {
|
|
42
|
+
_frameProcessingEnabled = enableFrameProcessing
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
@objc var enableFaceDetection: Bool = false
|
|
46
|
+
@objc var enableTextRecognition: Bool = false
|
|
47
|
+
@objc var enableBarcodeScanning: Bool = false
|
|
48
|
+
@objc var enableMrzValidation: Bool = false
|
|
49
|
+
@objc var includeBase64: Bool = false
|
|
50
|
+
@objc var targetFps: NSNumber = 6 {
|
|
51
|
+
didSet {
|
|
52
|
+
_targetFps = max(1, min(30, targetFps.int32Value))
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
private var _cameraType: String = "back"
|
|
57
|
+
private var _torchEnabled = false
|
|
58
|
+
private var _frameProcessingEnabled = false
|
|
59
|
+
private var _targetFps: Int32 = 6
|
|
60
|
+
private var lastFrameTime: TimeInterval = 0
|
|
61
|
+
private var isProcessing = false
|
|
62
|
+
private var isRecording = false
|
|
63
|
+
private var isCancelledRecording = false
|
|
64
|
+
private var currentRecordingURL: URL?
|
|
65
|
+
private var finishFallbackWorkItem: DispatchWorkItem?
|
|
66
|
+
private var finishFallbackAttempts = 0
|
|
67
|
+
private let ciContext = CIContext()
|
|
68
|
+
private var minFrameInterval: TimeInterval {
|
|
69
|
+
return 1.0 / Double(_targetFps)
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// ML Kit detectors
|
|
73
|
+
private lazy var faceDetector: FaceDetector = {
|
|
74
|
+
let options = FaceDetectorOptions()
|
|
75
|
+
options.performanceMode = .accurate
|
|
76
|
+
options.landmarkMode = .none
|
|
77
|
+
options.classificationMode = .all
|
|
78
|
+
options.minFaceSize = 0.1 // 10% - small enough to detect faces reliably
|
|
79
|
+
return FaceDetector.faceDetector(options: options)
|
|
80
|
+
}()
|
|
81
|
+
|
|
82
|
+
private lazy var textRecognizer: TextRecognizer = {
|
|
83
|
+
let options = TextRecognizerOptions()
|
|
84
|
+
return TextRecognizer.textRecognizer(options: options)
|
|
85
|
+
}()
|
|
86
|
+
|
|
87
|
+
// Event callbacks
|
|
88
|
+
@objc var onFrameAvailable: RCTDirectEventBlock?
|
|
89
|
+
@objc var onCameraReady: RCTDirectEventBlock?
|
|
90
|
+
@objc var onCameraError: RCTDirectEventBlock?
|
|
91
|
+
@objc var onRecordingFinished: RCTDirectEventBlock?
|
|
92
|
+
@objc var onRecordingError: RCTDirectEventBlock?
|
|
93
|
+
|
|
94
|
+
// MARK: - Initialization
|
|
95
|
+
override init(frame: CGRect) {
|
|
96
|
+
super.init(frame: frame)
|
|
97
|
+
setupCamera()
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
required init?(coder: NSCoder) {
|
|
101
|
+
fatalError("init(coder:) has not been implemented")
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// MARK: - Setup
|
|
105
|
+
private func setupCamera() {
|
|
106
|
+
sessionQueue.async { [weak self] in
|
|
107
|
+
self?.configureCaptureSession()
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
private func configureCaptureSession() {
|
|
112
|
+
let session = AVCaptureSession()
|
|
113
|
+
session.beginConfiguration()
|
|
114
|
+
|
|
115
|
+
// Add video input
|
|
116
|
+
let cameraPosition: AVCaptureDevice.Position = (_cameraType == "front") ? .front : .back
|
|
117
|
+
|
|
118
|
+
// Set quality based on camera type
|
|
119
|
+
// Front camera (liveness): Full HD (1920x1080) for high-quality face detection
|
|
120
|
+
// Back camera (documents): Full HD (1920x1080) for sharp document capture
|
|
121
|
+
if cameraPosition == .front {
|
|
122
|
+
// Front camera: Use Full HD for high-quality liveness detection
|
|
123
|
+
if session.canSetSessionPreset(.hd1920x1080) {
|
|
124
|
+
session.sessionPreset = .hd1920x1080
|
|
125
|
+
print("[TrustchexCamera] Front camera: Using Full HD 1920x1080")
|
|
126
|
+
} else if session.canSetSessionPreset(.hd1280x720) {
|
|
127
|
+
session.sessionPreset = .hd1280x720
|
|
128
|
+
print("[TrustchexCamera] Front camera: Fallback to HD 1280x720")
|
|
129
|
+
} else {
|
|
130
|
+
session.sessionPreset = .high
|
|
131
|
+
print("[TrustchexCamera] Front camera: Using high preset")
|
|
132
|
+
}
|
|
133
|
+
} else {
|
|
134
|
+
// Back camera: Use Full HD for document scanning
|
|
135
|
+
if session.canSetSessionPreset(.hd1920x1080) {
|
|
136
|
+
session.sessionPreset = .hd1920x1080
|
|
137
|
+
print("[TrustchexCamera] Back camera: Using Full HD 1920x1080")
|
|
138
|
+
} else if session.canSetSessionPreset(.hd1280x720) {
|
|
139
|
+
session.sessionPreset = .hd1280x720
|
|
140
|
+
print("[TrustchexCamera] Back camera: Fallback to HD 1280x720")
|
|
141
|
+
} else {
|
|
142
|
+
session.sessionPreset = .high
|
|
143
|
+
print("[TrustchexCamera] Back camera: Using high preset")
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
let camera = selectBestCamera(for: cameraPosition)
|
|
147
|
+
guard let camera = camera,
|
|
148
|
+
let videoInput = try? AVCaptureDeviceInput(device: camera) else {
|
|
149
|
+
session.commitConfiguration()
|
|
150
|
+
sendErrorEvent(error: "Failed to access camera input")
|
|
151
|
+
return
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
if session.canAddInput(videoInput) {
|
|
155
|
+
session.addInput(videoInput)
|
|
156
|
+
}
|
|
157
|
+
currentCamera = camera
|
|
158
|
+
|
|
159
|
+
// Add audio input for video recording
|
|
160
|
+
if let audioDevice = AVCaptureDevice.default(for: .audio),
|
|
161
|
+
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) {
|
|
162
|
+
if session.canAddInput(audioInput) {
|
|
163
|
+
session.addInput(audioInput)
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Add video output
|
|
168
|
+
let videoOutput = AVCaptureVideoDataOutput()
|
|
169
|
+
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
|
|
170
|
+
videoOutput.alwaysDiscardsLateVideoFrames = true
|
|
171
|
+
videoOutput.videoSettings = [
|
|
172
|
+
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
|
173
|
+
]
|
|
174
|
+
|
|
175
|
+
if session.canAddOutput(videoOutput) {
|
|
176
|
+
session.addOutput(videoOutput)
|
|
177
|
+
}
|
|
178
|
+
self.videoOutput = videoOutput
|
|
179
|
+
|
|
180
|
+
// Add movie file output for video recording
|
|
181
|
+
let movieOutput = AVCaptureMovieFileOutput()
|
|
182
|
+
if session.canAddOutput(movieOutput) {
|
|
183
|
+
session.addOutput(movieOutput)
|
|
184
|
+
if let connection = movieOutput.connection(with: .video) {
|
|
185
|
+
if connection.isVideoOrientationSupported {
|
|
186
|
+
connection.videoOrientation = .portrait
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
self.movieFileOutput = movieOutput
|
|
191
|
+
|
|
192
|
+
// Add metadata output for native barcode scanning (much faster than MLKit)
|
|
193
|
+
let metadataOutput = AVCaptureMetadataOutput()
|
|
194
|
+
if session.canAddOutput(metadataOutput) {
|
|
195
|
+
session.addOutput(metadataOutput)
|
|
196
|
+
metadataOutput.setMetadataObjectsDelegate(self, queue: barcodeDetectionQueue)
|
|
197
|
+
|
|
198
|
+
// Configure barcode types after adding output
|
|
199
|
+
let supportedTypes = metadataOutput.availableMetadataObjectTypes
|
|
200
|
+
var typesToEnable: [AVMetadataObject.ObjectType] = []
|
|
201
|
+
|
|
202
|
+
if supportedTypes.contains(.pdf417) { typesToEnable.append(.pdf417) }
|
|
203
|
+
if supportedTypes.contains(.qr) { typesToEnable.append(.qr) }
|
|
204
|
+
if supportedTypes.contains(.code128) { typesToEnable.append(.code128) }
|
|
205
|
+
if supportedTypes.contains(.code39) { typesToEnable.append(.code39) }
|
|
206
|
+
if supportedTypes.contains(.ean13) { typesToEnable.append(.ean13) }
|
|
207
|
+
if supportedTypes.contains(.ean8) { typesToEnable.append(.ean8) }
|
|
208
|
+
if supportedTypes.contains(.aztec) { typesToEnable.append(.aztec) }
|
|
209
|
+
if supportedTypes.contains(.dataMatrix) { typesToEnable.append(.dataMatrix) }
|
|
210
|
+
|
|
211
|
+
metadataOutput.metadataObjectTypes = typesToEnable
|
|
212
|
+
print("[TrustchexCamera] Native barcode scanner enabled with types: \(typesToEnable)")
|
|
213
|
+
}
|
|
214
|
+
self.metadataOutput = metadataOutput
|
|
215
|
+
|
|
216
|
+
// Configure video connection
|
|
217
|
+
if let connection = videoOutput.connection(with: .video) {
|
|
218
|
+
if connection.isVideoOrientationSupported {
|
|
219
|
+
connection.videoOrientation = .portrait
|
|
220
|
+
}
|
|
221
|
+
if connection.isVideoMirroringSupported {
|
|
222
|
+
connection.isVideoMirrored = (_cameraType == "front")
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
session.commitConfiguration()
|
|
227
|
+
self.captureSession = session
|
|
228
|
+
|
|
229
|
+
// Setup UI and start running
|
|
230
|
+
DispatchQueue.main.async { [weak self] in
|
|
231
|
+
guard let self = self else { return }
|
|
232
|
+
self.setupPreviewLayer(session: session)
|
|
233
|
+
self.configureCameraSettings(camera)
|
|
234
|
+
|
|
235
|
+
self.sessionQueue.async { [weak self] in
|
|
236
|
+
self?.captureSession?.startRunning()
|
|
237
|
+
|
|
238
|
+
let minExposure = Double(camera.minExposureTargetBias)
|
|
239
|
+
let maxExposure = Double(camera.maxExposureTargetBias)
|
|
240
|
+
DispatchQueue.main.async { [weak self] in
|
|
241
|
+
self?.sendReadyEvent(minExposure: minExposure, maxExposure: maxExposure)
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// MARK: - Camera Selection
|
|
248
|
+
private func selectBestCamera(for position: AVCaptureDevice.Position) -> AVCaptureDevice? {
|
|
249
|
+
if position == .front {
|
|
250
|
+
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// For document/car scanning, prefer cameras with better close-range autofocus.
|
|
254
|
+
// Wide-angle cameras can struggle with focus at close distances.
|
|
255
|
+
// Priority: Dual Camera > Triple Camera > Wide Angle
|
|
256
|
+
|
|
257
|
+
// Try dual camera first (iPhone 7 Plus and later with dual cameras)
|
|
258
|
+
if let dualCamera = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
|
|
259
|
+
print("[TrustchexCamera] Selected Dual Camera for document scanning (better close-range focus)")
|
|
260
|
+
return dualCamera
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Try triple camera (iPhone 11 Pro and later)
|
|
264
|
+
if let tripleCamera = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: .back) {
|
|
265
|
+
print("[TrustchexCamera] Selected Triple Camera for document scanning (better close-range focus)")
|
|
266
|
+
return tripleCamera
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Fall back to wide angle camera
|
|
270
|
+
print("[TrustchexCamera] Selected Wide Angle camera for document scanning")
|
|
271
|
+
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
private func setupPreviewLayer(session: AVCaptureSession) {
|
|
277
|
+
let preview = AVCaptureVideoPreviewLayer(session: session)
|
|
278
|
+
preview.videoGravity = .resizeAspectFill
|
|
279
|
+
|
|
280
|
+
// Handle mirroring for front camera
|
|
281
|
+
if let connection = preview.connection {
|
|
282
|
+
if connection.isVideoMirroringSupported {
|
|
283
|
+
connection.automaticallyAdjustsVideoMirroring = false
|
|
284
|
+
connection.isVideoMirrored = (_cameraType == "front")
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
preview.frame = bounds
|
|
289
|
+
layer.insertSublayer(preview, at: 0)
|
|
290
|
+
self.previewLayer = preview
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
private func configureCameraSettings(_ camera: AVCaptureDevice) {
|
|
294
|
+
do {
|
|
295
|
+
try camera.lockForConfiguration()
|
|
296
|
+
|
|
297
|
+
// Configure focus based on camera position
|
|
298
|
+
// Front camera: Use continuous autofocus for liveness/face detection at arm's length
|
|
299
|
+
// Back camera: Use close-range focus lock for document scanning
|
|
300
|
+
let isFrontCamera = camera.position == .front
|
|
301
|
+
|
|
302
|
+
if isFrontCamera {
|
|
303
|
+
// Front camera: Continuous autofocus for face detection
|
|
304
|
+
if camera.isFocusModeSupported(.continuousAutoFocus) {
|
|
305
|
+
camera.focusMode = .continuousAutoFocus
|
|
306
|
+
|
|
307
|
+
// Enable smooth autofocus for better face tracking
|
|
308
|
+
if camera.isSmoothAutoFocusSupported {
|
|
309
|
+
camera.isSmoothAutoFocusEnabled = true
|
|
310
|
+
}
|
|
311
|
+
} else if camera.isFocusModeSupported(.autoFocus) {
|
|
312
|
+
camera.focusMode = .autoFocus
|
|
313
|
+
}
|
|
314
|
+
} else {
|
|
315
|
+
// Back camera: Close-range focus for document scanning
|
|
316
|
+
if #available(iOS 15.0, *) {
|
|
317
|
+
if camera.isLockingFocusWithCustomLensPositionSupported {
|
|
318
|
+
// Lock focus at close range (0.9) for document scanning
|
|
319
|
+
camera.setFocusModeLocked(lensPosition: 0.9, completionHandler: nil)
|
|
320
|
+
} else {
|
|
321
|
+
// Fallback to continuous autofocus
|
|
322
|
+
if camera.isFocusModeSupported(.continuousAutoFocus) {
|
|
323
|
+
camera.focusMode = .continuousAutoFocus
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
} else {
|
|
327
|
+
// iOS 14 and below: Use continuous autofocus
|
|
328
|
+
if camera.isFocusModeSupported(.continuousAutoFocus) {
|
|
329
|
+
camera.focusMode = .continuousAutoFocus
|
|
330
|
+
|
|
331
|
+
if camera.isSmoothAutoFocusSupported {
|
|
332
|
+
camera.isSmoothAutoFocusEnabled = true
|
|
333
|
+
}
|
|
334
|
+
} else if camera.isFocusModeSupported(.autoFocus) {
|
|
335
|
+
camera.focusMode = .autoFocus
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// Enable subject area change monitoring for better focus responsiveness
|
|
341
|
+
camera.isSubjectAreaChangeMonitoringEnabled = true
|
|
342
|
+
|
|
343
|
+
// Enable continuous auto exposure
|
|
344
|
+
if camera.isExposureModeSupported(.continuousAutoExposure) {
|
|
345
|
+
camera.exposureMode = .continuousAutoExposure
|
|
346
|
+
} else if camera.isExposureModeSupported(.autoExpose) {
|
|
347
|
+
camera.exposureMode = .autoExpose
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// Disable HDR
|
|
351
|
+
if camera.isVideoHDREnabled {
|
|
352
|
+
camera.automaticallyAdjustsVideoHDREnabled = false
|
|
353
|
+
camera.isVideoHDREnabled = false
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
camera.unlockForConfiguration()
|
|
357
|
+
} catch {
|
|
358
|
+
print("Failed to configure camera: \(error)")
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// MARK: - Layout
|
|
363
|
+
override func layoutSubviews() {
|
|
364
|
+
super.layoutSubviews()
|
|
365
|
+
previewLayer?.frame = bounds
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// MARK: - Public Methods
|
|
369
|
+
private func switchCameraType(_ type: String) {
|
|
370
|
+
_cameraType = type
|
|
371
|
+
sessionQueue.async { [weak self] in
|
|
372
|
+
self?.switchCamera(to: type)
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
private func updateTorchEnabled(_ enabled: Bool) {
|
|
377
|
+
_torchEnabled = enabled
|
|
378
|
+
sessionQueue.async { [weak self] in
|
|
379
|
+
guard let camera = self?.currentCamera else { return }
|
|
380
|
+
|
|
381
|
+
do {
|
|
382
|
+
try camera.lockForConfiguration()
|
|
383
|
+
if camera.hasTorch && (self?.torchEnabled ?? false) {
|
|
384
|
+
camera.torchMode = .on
|
|
385
|
+
} else {
|
|
386
|
+
camera.torchMode = .off
|
|
387
|
+
}
|
|
388
|
+
camera.unlockForConfiguration()
|
|
389
|
+
} catch {
|
|
390
|
+
print("Failed to set torch: \(error)")
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
// Imperative methods called from manager
|
|
396
|
+
@objc(changeCameraType:)
|
|
397
|
+
func setCameraType(_ type: String) {
|
|
398
|
+
cameraType = type
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
@objc(changeTorchEnabled:)
|
|
402
|
+
func setTorchEnabled(_ enabled: Bool) {
|
|
403
|
+
torchEnabled = enabled
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
@objc(changeEnableFrameProcessing:)
|
|
407
|
+
func setEnableFrameProcessing(_ enabled: Bool) {
|
|
408
|
+
enableFrameProcessing = enabled
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
func setEnableFaceDetection(_ enabled: Bool) {
|
|
412
|
+
enableFaceDetection = enabled
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
func setEnableTextRecognition(_ enabled: Bool) {
|
|
416
|
+
enableTextRecognition = enabled
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
func setEnableBarcodeScanning(_ enabled: Bool) {
|
|
420
|
+
enableBarcodeScanning = enabled
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
func setEnableMrzValidation(_ enabled: Bool) {
|
|
424
|
+
enableMrzValidation = enabled
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
func setIncludeBase64(_ enabled: Bool) {
|
|
428
|
+
includeBase64 = enabled
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
@objc(changeTargetFps:)
|
|
432
|
+
func changeTargetFps(_ fps: NSNumber) {
|
|
433
|
+
targetFps = fps
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
@objc func setFocusPoint(_ x: NSNumber, _ y: NSNumber) {
|
|
437
|
+
sessionQueue.async { [weak self] in
|
|
438
|
+
guard let camera = self?.currentCamera else { return }
|
|
439
|
+
|
|
440
|
+
let point = CGPoint(x: x.doubleValue, y: y.doubleValue)
|
|
441
|
+
|
|
442
|
+
do {
|
|
443
|
+
try camera.lockForConfiguration()
|
|
444
|
+
|
|
445
|
+
if camera.isFocusPointOfInterestSupported {
|
|
446
|
+
camera.focusPointOfInterest = point
|
|
447
|
+
if camera.isFocusModeSupported(.continuousAutoFocus) {
|
|
448
|
+
camera.focusMode = .continuousAutoFocus
|
|
449
|
+
} else if camera.isFocusModeSupported(.autoFocus) {
|
|
450
|
+
camera.focusMode = .autoFocus
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
if camera.isExposurePointOfInterestSupported {
|
|
455
|
+
camera.exposurePointOfInterest = point
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
camera.unlockForConfiguration()
|
|
459
|
+
} catch {
|
|
460
|
+
print("Failed to set focus point: \(error)")
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
@objc func setExposureOffset(_ offset: NSNumber) {
|
|
466
|
+
sessionQueue.async { [weak self] in
|
|
467
|
+
guard let camera = self?.currentCamera else { return }
|
|
468
|
+
|
|
469
|
+
let bias = offset.floatValue
|
|
470
|
+
|
|
471
|
+
do {
|
|
472
|
+
try camera.lockForConfiguration()
|
|
473
|
+
camera.setExposureTargetBias(bias, completionHandler: nil)
|
|
474
|
+
camera.unlockForConfiguration()
|
|
475
|
+
} catch {
|
|
476
|
+
print("Failed to set exposure: \(error)")
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
@objc func startRecording() {
|
|
482
|
+
sessionQueue.async { [weak self] in
|
|
483
|
+
guard let self = self else { return }
|
|
484
|
+
guard let session = self.captureSession, session.isRunning else { return }
|
|
485
|
+
guard let movieOutput = self.movieFileOutput else { return }
|
|
486
|
+
|
|
487
|
+
// Check actual AVFoundation recording state
|
|
488
|
+
if movieOutput.isRecording {
|
|
489
|
+
movieOutput.stopRecording()
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
// Wait for any in-progress recording to finish finalizing
|
|
493
|
+
var waitAttempts = 0
|
|
494
|
+
while movieOutput.isRecording && waitAttempts < 50 {
|
|
495
|
+
Thread.sleep(forTimeInterval: 0.02)
|
|
496
|
+
waitAttempts += 1
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
if movieOutput.isRecording {
|
|
500
|
+
DispatchQueue.main.async { [weak self] in
|
|
501
|
+
self?.onRecordingError?(["error": "Previous recording still in progress"])
|
|
502
|
+
}
|
|
503
|
+
return
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
// Verify video connection exists
|
|
507
|
+
guard movieOutput.connection(with: .video) != nil else { return }
|
|
508
|
+
|
|
509
|
+
// Create temporary file URL
|
|
510
|
+
let tempDir = NSTemporaryDirectory()
|
|
511
|
+
let fileName = "recording_\(Date().timeIntervalSince1970).mp4"
|
|
512
|
+
let fileURL = URL(fileURLWithPath: tempDir).appendingPathComponent(fileName)
|
|
513
|
+
|
|
514
|
+
// Remove existing file if any
|
|
515
|
+
if FileManager.default.fileExists(atPath: fileURL.path) {
|
|
516
|
+
try? FileManager.default.removeItem(at: fileURL)
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
self.isCancelledRecording = false
|
|
520
|
+
self.finishFallbackWorkItem?.cancel()
|
|
521
|
+
self.finishFallbackWorkItem = nil
|
|
522
|
+
self.finishFallbackAttempts = 0
|
|
523
|
+
self.currentRecordingURL = fileURL
|
|
524
|
+
self.isRecording = true
|
|
525
|
+
movieOutput.startRecording(to: fileURL, recordingDelegate: self)
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
@objc func stopRecording() {
|
|
530
|
+
sessionQueue.async { [weak self] in
|
|
531
|
+
guard let self = self else { return }
|
|
532
|
+
guard let movieOutput = self.movieFileOutput else { return }
|
|
533
|
+
|
|
534
|
+
// Check if recording is actually active
|
|
535
|
+
if self.isRecording || movieOutput.isRecording {
|
|
536
|
+
movieOutput.stopRecording()
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// Schedule fallback as a safety net
|
|
540
|
+
self.scheduleFinishFallbackCheck()
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
@objc func cancelRecording() {
|
|
545
|
+
sessionQueue.async { [weak self] in
|
|
546
|
+
guard let self = self else { return }
|
|
547
|
+
guard let movieOutput = self.movieFileOutput else { return }
|
|
548
|
+
|
|
549
|
+
// Check both our flag and the actual AVFoundation state
|
|
550
|
+
guard self.isRecording || movieOutput.isRecording else { return }
|
|
551
|
+
|
|
552
|
+
print("[TrustchexCameraView] *** cancelRecording: marking as cancelled ***")
|
|
553
|
+
self.isCancelledRecording = true
|
|
554
|
+
movieOutput.stopRecording()
|
|
555
|
+
// Do NOT set isRecording = false here — let the delegate handle it
|
|
556
|
+
// to avoid race conditions with startRecording
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
@objc func deleteRecording(_ filePath: String) {
|
|
561
|
+
sessionQueue.async {
|
|
562
|
+
do {
|
|
563
|
+
let fileURL = URL(fileURLWithPath: filePath)
|
|
564
|
+
if FileManager.default.fileExists(atPath: filePath) {
|
|
565
|
+
try FileManager.default.removeItem(at: fileURL)
|
|
566
|
+
print("[TrustchexCameraView] Successfully deleted recording at: \(filePath)")
|
|
567
|
+
}
|
|
568
|
+
} catch {
|
|
569
|
+
print("[TrustchexCameraView] Failed to delete recording: \(error.localizedDescription)")
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
private func scheduleFinishFallbackCheck() {
|
|
575
|
+
finishFallbackWorkItem?.cancel()
|
|
576
|
+
|
|
577
|
+
let workItem = DispatchWorkItem { [weak self] in
|
|
578
|
+
guard let self = self else { return }
|
|
579
|
+
|
|
580
|
+
// Execute the check on the sessionQueue
|
|
581
|
+
self.sessionQueue.async {
|
|
582
|
+
guard let movieOutput = self.movieFileOutput else { return }
|
|
583
|
+
guard let outputURL = self.currentRecordingURL else { return }
|
|
584
|
+
|
|
585
|
+
if movieOutput.isRecording {
|
|
586
|
+
self.finishFallbackAttempts += 1
|
|
587
|
+
if self.finishFallbackAttempts < 6 {
|
|
588
|
+
self.scheduleFinishFallbackCheck()
|
|
589
|
+
} else {
|
|
590
|
+
// Force completion after max attempts
|
|
591
|
+
self.finishFallbackAttempts = 0
|
|
592
|
+
if !self.isCancelledRecording {
|
|
593
|
+
DispatchQueue.main.async {
|
|
594
|
+
self.onRecordingFinished?(["path": outputURL.path])
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
self.isRecording = false
|
|
598
|
+
self.isCancelledRecording = false
|
|
599
|
+
self.currentRecordingURL = nil
|
|
600
|
+
}
|
|
601
|
+
return
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
self.finishFallbackAttempts = 0
|
|
605
|
+
|
|
606
|
+
if self.isCancelledRecording {
|
|
607
|
+
try? FileManager.default.removeItem(at: outputURL)
|
|
608
|
+
} else {
|
|
609
|
+
DispatchQueue.main.async { [weak self] in
|
|
610
|
+
self?.onRecordingFinished?(["path": outputURL.path])
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
self.isRecording = false
|
|
615
|
+
self.isCancelledRecording = false
|
|
616
|
+
self.currentRecordingURL = nil
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
finishFallbackWorkItem = workItem
|
|
621
|
+
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5, execute: workItem)
|
|
622
|
+
print("[TrustchexCameraView] *** Fallback check scheduled for 0.5s from now ***")
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
private func switchCamera(to type: String) {
|
|
626
|
+
guard let session = captureSession else { return }
|
|
627
|
+
|
|
628
|
+
let position: AVCaptureDevice.Position = (type == "front") ? .front : .back
|
|
629
|
+
|
|
630
|
+
// Select best camera for document scanning
|
|
631
|
+
guard let newCamera = selectBestCamera(for: position),
|
|
632
|
+
let newInput = try? AVCaptureDeviceInput(device: newCamera) else {
|
|
633
|
+
return
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
session.beginConfiguration()
|
|
637
|
+
|
|
638
|
+
// Remove only the video input, preserve audio input
|
|
639
|
+
for input in session.inputs {
|
|
640
|
+
if let deviceInput = input as? AVCaptureDeviceInput,
|
|
641
|
+
deviceInput.device.hasMediaType(.video) {
|
|
642
|
+
session.removeInput(deviceInput)
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
// Add new video input
|
|
647
|
+
if session.canAddInput(newInput) {
|
|
648
|
+
session.addInput(newInput)
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
// Re-apply portrait orientation on connections after input swap
|
|
652
|
+
if let connection = videoOutput?.connection(with: .video) {
|
|
653
|
+
connection.videoOrientation = .portrait
|
|
654
|
+
if connection.isVideoStabilizationSupported {
|
|
655
|
+
connection.preferredVideoStabilizationMode = .standard
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
if let connection = movieFileOutput?.connection(with: .video) {
|
|
659
|
+
connection.videoOrientation = .portrait
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
session.commitConfiguration()
|
|
663
|
+
|
|
664
|
+
currentCamera = newCamera
|
|
665
|
+
configureCameraSettings(newCamera)
|
|
666
|
+
|
|
667
|
+
// Update mirroring on preview layer
|
|
668
|
+
DispatchQueue.main.async { [weak self] in
|
|
669
|
+
if let connection = self?.previewLayer?.connection {
|
|
670
|
+
if connection.isVideoMirroringSupported {
|
|
671
|
+
connection.isVideoMirrored = (type == "front")
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// MARK: - Events
|
|
678
|
+
private func sendFrameEvent(frame: [String: Any]) {
|
|
679
|
+
onFrameAvailable?(["frame": frame])
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
private func sendReadyEvent(minExposure: Double, maxExposure: Double) {
|
|
683
|
+
onCameraReady?([
|
|
684
|
+
"minExposureOffset": minExposure,
|
|
685
|
+
"maxExposureOffset": maxExposure
|
|
686
|
+
])
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
private func sendErrorEvent(error: String) {
|
|
690
|
+
onCameraError?(["error": error])
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
// MARK: - Cleanup
|
|
694
|
+
deinit {
|
|
695
|
+
captureSession?.stopRunning()
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
|
|
700
|
+
extension TrustchexCameraView: AVCaptureVideoDataOutputSampleBufferDelegate {
|
|
701
|
+
|
|
702
|
+
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
|
703
|
+
autoreleasepool {
|
|
704
|
+
let hasAnyDetection = _frameProcessingEnabled && (enableFaceDetection || enableTextRecognition || enableBarcodeScanning)
|
|
705
|
+
guard hasAnyDetection else { return }
|
|
706
|
+
|
|
707
|
+
// Check flag on videoQueue (serial) to prevent re-entry
|
|
708
|
+
guard !isProcessing else { return }
|
|
709
|
+
|
|
710
|
+
let currentTime = CACurrentMediaTime()
|
|
711
|
+
if (currentTime - lastFrameTime) < minFrameInterval {
|
|
712
|
+
return
|
|
713
|
+
}
|
|
714
|
+
lastFrameTime = currentTime
|
|
715
|
+
|
|
716
|
+
// Mark as processing on videoQueue
|
|
717
|
+
isProcessing = true
|
|
718
|
+
|
|
719
|
+
// Dispatch to background queue to unblock videoQueue immediately
|
|
720
|
+
processingQueue.async { [weak self] in
|
|
721
|
+
guard let self = self else { return }
|
|
722
|
+
|
|
723
|
+
self.processSampleBuffer(sampleBuffer)
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
private func processSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
|
729
|
+
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
|
730
|
+
resetProcessingState()
|
|
731
|
+
return
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
let pixelWidth = CVPixelBufferGetWidth(pixelBuffer)
|
|
735
|
+
let pixelHeight = CVPixelBufferGetHeight(pixelBuffer)
|
|
736
|
+
|
|
737
|
+
// Report portrait dimensions (width < height)
|
|
738
|
+
let portraitWidth = min(pixelWidth, pixelHeight)
|
|
739
|
+
let portraitHeight = max(pixelWidth, pixelHeight)
|
|
740
|
+
|
|
741
|
+
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
|
|
742
|
+
// Ensure we unlock at the end of function
|
|
743
|
+
defer {
|
|
744
|
+
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
|
|
745
|
+
resetProcessingState()
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
let isBufferLandscape = pixelWidth > pixelHeight
|
|
749
|
+
|
|
750
|
+
// Log VisionImage metadata to understand coordinate space
|
|
751
|
+
NSLog("[VisionImage] Buffer: \(pixelWidth)x\(pixelHeight) | Orientation: \(isBufferLandscape ? "landscape→portrait (.right)" : "portrait (.up)") | Will report as: \(portraitWidth)x\(portraitHeight)")
|
|
752
|
+
|
|
753
|
+
// Create oriented CIImage for ML Kit processing
|
|
754
|
+
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
|
755
|
+
let orientedImage = isBufferLandscape ? ciImage.oriented(.right) : ciImage
|
|
756
|
+
|
|
757
|
+
// Create VisionImage from the oriented CIImage for better text recognition
|
|
758
|
+
// This ensures MLKit processes the image in the correct orientation
|
|
759
|
+
guard let cgImage = self.ciContext.createCGImage(orientedImage, from: orientedImage.extent) else {
|
|
760
|
+
resetProcessingState()
|
|
761
|
+
return
|
|
762
|
+
}
|
|
763
|
+
let visionImage = VisionImage(image: UIImage(cgImage: cgImage))
|
|
764
|
+
visionImage.orientation = .up // Already oriented correctly
|
|
765
|
+
|
|
766
|
+
// Generate JPEG base64 only when JS side explicitly needs the image
|
|
767
|
+
// NOTE: Do NOT auto-generate for face detection - too expensive, causes frame drops
|
|
768
|
+
var base64Image: String? = nil
|
|
769
|
+
if includeBase64 {
|
|
770
|
+
let uiImage = UIImage(cgImage: cgImage)
|
|
771
|
+
if let data = uiImage.jpegData(compressionQuality: 0.75) {
|
|
772
|
+
base64Image = data.base64EncodedString()
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
// Synchronous processing using a semaphore to keep the buffer locked
|
|
777
|
+
// This blocks processingQueue, which is fine (we are off videoQueue)
|
|
778
|
+
let semaphore = DispatchSemaphore(value: 0)
|
|
779
|
+
var facesArray: [[String: Any]] = []
|
|
780
|
+
var textBlocksArray: [[String: Any]] = []
|
|
781
|
+
var barcodesArray: [[String: Any]] = []
|
|
782
|
+
|
|
783
|
+
if enableFaceDetection {
|
|
784
|
+
faceDetector.process(visionImage) { faces, error in
|
|
785
|
+
if let faces = faces {
|
|
786
|
+
for face in faces {
|
|
787
|
+
var faceMap: [String: Any] = [:]
|
|
788
|
+
let isFront = self._cameraType == "front"
|
|
789
|
+
let faceX = isFront ? CGFloat(portraitWidth) - face.frame.origin.x - face.frame.width : face.frame.origin.x
|
|
790
|
+
|
|
791
|
+
faceMap["bounds"] = [
|
|
792
|
+
"x": Int(faceX),
|
|
793
|
+
"y": Int(face.frame.origin.y),
|
|
794
|
+
"width": Int(face.frame.width),
|
|
795
|
+
"height": Int(face.frame.height)
|
|
796
|
+
]
|
|
797
|
+
faceMap["yawAngle"] = face.hasHeadEulerAngleY ? Double(face.headEulerAngleY) : 0.0
|
|
798
|
+
faceMap["pitchAngle"] = face.hasHeadEulerAngleX ? Double(face.headEulerAngleX) : 0.0
|
|
799
|
+
faceMap["rollAngle"] = face.hasHeadEulerAngleZ ? Double(face.headEulerAngleZ) : 0.0
|
|
800
|
+
if face.hasTrackingID {
|
|
801
|
+
faceMap["trackingId"] = face.trackingID
|
|
802
|
+
}
|
|
803
|
+
// Only include probability fields when available (matching Android behavior)
|
|
804
|
+
if face.hasSmilingProbability {
|
|
805
|
+
faceMap["smilingProbability"] = Double(face.smilingProbability)
|
|
806
|
+
}
|
|
807
|
+
if face.hasLeftEyeOpenProbability {
|
|
808
|
+
faceMap["leftEyeOpenProbability"] = Double(face.leftEyeOpenProbability)
|
|
809
|
+
}
|
|
810
|
+
if face.hasRightEyeOpenProbability {
|
|
811
|
+
faceMap["rightEyeOpenProbability"] = Double(face.rightEyeOpenProbability)
|
|
812
|
+
}
|
|
813
|
+
facesArray.append(faceMap)
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
semaphore.signal()
|
|
817
|
+
}
|
|
818
|
+
} else {
|
|
819
|
+
semaphore.signal()
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
// Text recognition
|
|
823
|
+
let textSemaphore = DispatchSemaphore(value: 0)
|
|
824
|
+
var resultText = ""
|
|
825
|
+
if enableTextRecognition {
|
|
826
|
+
textRecognizer.process(visionImage) { text, error in
|
|
827
|
+
if let text = text {
|
|
828
|
+
resultText = text.text
|
|
829
|
+
NSLog("[Text Recognition] Found \(text.blocks.count) blocks | Buffer: \(pixelWidth)x\(pixelHeight) | Portrait: \(portraitWidth)x\(portraitHeight) | Landscape: \(isBufferLandscape)")
|
|
830
|
+
for block in text.blocks {
|
|
831
|
+
var blockMap: [String: Any] = ["text": block.text]
|
|
832
|
+
let bb = block.frame
|
|
833
|
+
|
|
834
|
+
// When buffer is landscape (1920x1080) but we set orientation to .right,
|
|
835
|
+
// ML Kit might still return coordinates in landscape space.
|
|
836
|
+
// We need to rotate them to portrait space (1080x1920) to match face detection.
|
|
837
|
+
let blockX: Int
|
|
838
|
+
let blockY: Int
|
|
839
|
+
let blockWidth: Int
|
|
840
|
+
let blockHeight: Int
|
|
841
|
+
|
|
842
|
+
if isBufferLandscape {
|
|
843
|
+
// Rotate from landscape (1920x1080) to portrait (1080x1920)
|
|
844
|
+
// When rotating 90° clockwise (.right):
|
|
845
|
+
// new_x = old_y
|
|
846
|
+
// new_y = landscape_width - old_x - width
|
|
847
|
+
// new_width = old_height
|
|
848
|
+
// new_height = old_width
|
|
849
|
+
blockX = Int(bb.origin.y)
|
|
850
|
+
blockY = pixelWidth - Int(bb.origin.x) - Int(bb.width)
|
|
851
|
+
blockWidth = Int(bb.height)
|
|
852
|
+
blockHeight = Int(bb.width)
|
|
853
|
+
NSLog("[Text Block] '\(block.text.prefix(8))...' | ROTATED: landscape(\(Int(bb.origin.x)),\(Int(bb.origin.y)),\(Int(bb.width)),\(Int(bb.height))) → portrait(x:\(blockX) y:\(blockY) w:\(blockWidth) h:\(blockHeight))")
|
|
854
|
+
} else {
|
|
855
|
+
// Already portrait, use directly
|
|
856
|
+
blockX = Int(bb.origin.x)
|
|
857
|
+
blockY = Int(bb.origin.y)
|
|
858
|
+
blockWidth = Int(bb.width)
|
|
859
|
+
blockHeight = Int(bb.height)
|
|
860
|
+
NSLog("[Text Block] '\(block.text.prefix(8))...' | DIRECT: x:\(blockX) y:\(blockY) w:\(blockWidth) h:\(blockHeight)")
|
|
861
|
+
}
|
|
862
|
+
|
|
863
|
+
blockMap["blockFrame"] = [
|
|
864
|
+
"x": blockX,
|
|
865
|
+
"y": blockY,
|
|
866
|
+
"width": blockWidth,
|
|
867
|
+
"height": blockHeight,
|
|
868
|
+
"boundingCenterX": blockX + blockWidth / 2,
|
|
869
|
+
"boundingCenterY": blockY + blockHeight / 2
|
|
870
|
+
]
|
|
871
|
+
textBlocksArray.append(blockMap)
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
textSemaphore.signal()
|
|
875
|
+
}
|
|
876
|
+
} else {
|
|
877
|
+
textSemaphore.signal()
|
|
878
|
+
}
|
|
879
|
+
|
|
880
|
+
// Barcode scanning - use native AVFoundation results (captured via metadata delegate)
|
|
881
|
+
// This is much faster than MLKit barcode scanning
|
|
882
|
+
let barcodeSemaphore = DispatchSemaphore(value: 0)
|
|
883
|
+
if enableBarcodeScanning {
|
|
884
|
+
// Use the barcodes detected by the native AVCaptureMetadataOutput
|
|
885
|
+
barcodesArray = lastDetectedBarcodes
|
|
886
|
+
barcodeSemaphore.signal()
|
|
887
|
+
} else {
|
|
888
|
+
barcodeSemaphore.signal()
|
|
889
|
+
}
|
|
890
|
+
|
|
891
|
+
// Wait for vision tasks (with timeout to prevent hang)
|
|
892
|
+
_ = semaphore.wait(timeout: .now() + 2.0)
|
|
893
|
+
_ = textSemaphore.wait(timeout: .now() + 2.0)
|
|
894
|
+
|
|
895
|
+
// MRZ validation (if enabled and text was recognized)
|
|
896
|
+
var mrzResultDict: [String: Any]? = nil
|
|
897
|
+
if enableMrzValidation && enableTextRecognition && !resultText.isEmpty {
|
|
898
|
+
let mrzResult = MRZValidator().validateWithCorrections(resultText)
|
|
899
|
+
mrzResultDict = mrzResult.toDictionary()
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
// Only compute brightness if we haven't timed out or crashed
|
|
903
|
+
// Brightness calculation restricted to scanning frame area (between 36% from top and 36% from bottom, 5% margins on sides)
|
|
904
|
+
let brightness = computeBrightness(from: pixelBuffer, width: portraitWidth, height: portraitHeight)
|
|
905
|
+
|
|
906
|
+
let currentTime = CACurrentMediaTime() * 1000 // Convert to milliseconds to match Android
|
|
907
|
+
|
|
908
|
+
var frameData: [String: Any] = [
|
|
909
|
+
"width": portraitWidth,
|
|
910
|
+
"height": portraitHeight,
|
|
911
|
+
"orientation": 0, // Already corrected to portrait
|
|
912
|
+
"timestamp": currentTime,
|
|
913
|
+
"faces": facesArray,
|
|
914
|
+
"brightness": brightness
|
|
915
|
+
]
|
|
916
|
+
|
|
917
|
+
if let base64 = base64Image {
|
|
918
|
+
frameData["base64Image"] = base64
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
// Only include text/barcode data if recognition is enabled
|
|
922
|
+
if enableTextRecognition {
|
|
923
|
+
frameData["resultText"] = resultText
|
|
924
|
+
frameData["textBlocks"] = textBlocksArray
|
|
925
|
+
}
|
|
926
|
+
if enableBarcodeScanning {
|
|
927
|
+
frameData["barcodes"] = barcodesArray
|
|
928
|
+
}
|
|
929
|
+
if let mrzDict = mrzResultDict {
|
|
930
|
+
frameData["mrzResult"] = mrzDict
|
|
931
|
+
}
|
|
932
|
+
|
|
933
|
+
DispatchQueue.main.async { [weak self] in
|
|
934
|
+
self?.onFrameAvailable?(["frame": frameData])
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
private func resetProcessingState() {
|
|
939
|
+
videoQueue.async { [weak self] in
|
|
940
|
+
self?.isProcessing = false
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
|
|
944
|
+
private func computeBrightness(from pixelBuffer: CVPixelBuffer, width: Int, height: Int) -> Double {
|
|
945
|
+
guard let yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0) else { return 128.0 }
|
|
946
|
+
let pixelWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0)
|
|
947
|
+
let pixelHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0)
|
|
948
|
+
let yBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0)
|
|
949
|
+
|
|
950
|
+
// Define scanning frame area (from IdentityDocumentCamera.tsx styles)
|
|
951
|
+
// top: 36%, left: 5%, right: 5%, bottom: 36%
|
|
952
|
+
let scanTopPercent = 0.36
|
|
953
|
+
let scanBottomPercent = 0.36
|
|
954
|
+
let scanLeftPercent = 0.05
|
|
955
|
+
let scanRightPercent = 0.05
|
|
956
|
+
|
|
957
|
+
// Convert percentages to pixel coordinates in the portrait frame
|
|
958
|
+
let scanTop = Int(Double(height) * scanTopPercent)
|
|
959
|
+
let scanBottom = Int(Double(height) * (1.0 - scanBottomPercent))
|
|
960
|
+
let scanLeft = Int(Double(width) * scanLeftPercent)
|
|
961
|
+
let scanRight = Int(Double(width) * (1.0 - scanRightPercent))
|
|
962
|
+
|
|
963
|
+
// Map from portrait coordinates to pixel buffer coordinates
|
|
964
|
+
// The pixel buffer is in landscape, so we need to rotate the coordinates
|
|
965
|
+
// Portrait frame (1080x1920) -> Landscape buffer (1920x1080)
|
|
966
|
+
// When rotating, the mapping is:
|
|
967
|
+
// buffer_x = height - portrait_y
|
|
968
|
+
// buffer_y = portrait_x
|
|
969
|
+
|
|
970
|
+
let bufferScanTop = scanLeft // portrait_x -> buffer_y
|
|
971
|
+
let bufferScanBottom = scanRight // portrait_x -> buffer_y
|
|
972
|
+
let bufferScanLeft = height - scanBottom // height - portrait_y -> buffer_x
|
|
973
|
+
let bufferScanRight = height - scanTop // height - portrait_y -> buffer_x
|
|
974
|
+
|
|
975
|
+
let ptr = yPlane.assumingMemoryBound(to: UInt8.self)
|
|
976
|
+
var sum: UInt64 = 0
|
|
977
|
+
let sampleCount = 100 // Sample 100 points within scanning frame
|
|
978
|
+
|
|
979
|
+
for i in 0..<sampleCount {
|
|
980
|
+
// Sample evenly within the scanning frame bounds
|
|
981
|
+
let relativeX = (i * 17) % sampleCount
|
|
982
|
+
let relativeY = (i * 23) % sampleCount
|
|
983
|
+
|
|
984
|
+
let x = bufferScanLeft + (relativeX * (bufferScanRight - bufferScanLeft)) / max(1, sampleCount - 1)
|
|
985
|
+
let y = bufferScanTop + (relativeY * (bufferScanBottom - bufferScanTop)) / max(1, sampleCount - 1)
|
|
986
|
+
|
|
987
|
+
// Clamp to buffer bounds
|
|
988
|
+
let clampedX = min(max(0, x), pixelWidth - 1)
|
|
989
|
+
let clampedY = min(max(0, y), pixelHeight - 1)
|
|
990
|
+
|
|
991
|
+
sum += UInt64(ptr[clampedY * yBytesPerRow + clampedX])
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
return Double(sum) / Double(sampleCount)
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
// MARK: - AVCaptureFileOutputRecordingDelegate
|
|
999
|
+
extension TrustchexCameraView: AVCaptureFileOutputRecordingDelegate {
|
|
1000
|
+
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
|
|
1001
|
+
// Recording started successfully
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
|
|
1005
|
+
|
|
1006
|
+
// Cancel fallback since delegate fired
|
|
1007
|
+
finishFallbackWorkItem?.cancel()
|
|
1008
|
+
finishFallbackWorkItem = nil
|
|
1009
|
+
finishFallbackAttempts = 0
|
|
1010
|
+
|
|
1011
|
+
// Check if this was a cancelled recording
|
|
1012
|
+
let wasCancelled = isCancelledRecording
|
|
1013
|
+
|
|
1014
|
+
// Reset recording state
|
|
1015
|
+
isRecording = false
|
|
1016
|
+
isCancelledRecording = false
|
|
1017
|
+
currentRecordingURL = nil
|
|
1018
|
+
|
|
1019
|
+
// If recording was cancelled, clean up the temp file and don't send any callback
|
|
1020
|
+
if wasCancelled {
|
|
1021
|
+
print("[TrustchexCameraView] *** Recording was cancelled, cleaning up temp file ***")
|
|
1022
|
+
try? FileManager.default.removeItem(at: outputFileURL)
|
|
1023
|
+
return
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
// Check if recording succeeded
|
|
1027
|
+
var recordingSucceeded = (error == nil)
|
|
1028
|
+
if let error = error as NSError? {
|
|
1029
|
+
let successfullyFinished = (error.userInfo[AVErrorRecordingSuccessfullyFinishedKey as String] as? Bool) == true
|
|
1030
|
+
if successfullyFinished {
|
|
1031
|
+
recordingSucceeded = true
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
|
|
1035
|
+
// Fire callback
|
|
1036
|
+
if recordingSucceeded {
|
|
1037
|
+
DispatchQueue.main.async { [weak self] in
|
|
1038
|
+
self?.onRecordingFinished?(["path": outputFileURL.path])
|
|
1039
|
+
}
|
|
1040
|
+
} else {
|
|
1041
|
+
DispatchQueue.main.async { [weak self] in
|
|
1042
|
+
self?.onRecordingError?(["error": error?.localizedDescription ?? "Unknown recording error"])
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
}
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
// MARK: - AVCaptureMetadataOutputObjectsDelegate (Native Barcode Scanner)
|
|
1049
|
+
extension TrustchexCameraView: AVCaptureMetadataOutputObjectsDelegate {
|
|
1050
|
+
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
|
|
1051
|
+
guard enableBarcodeScanning else {
|
|
1052
|
+
lastDetectedBarcodes = []
|
|
1053
|
+
return
|
|
1054
|
+
}
|
|
1055
|
+
|
|
1056
|
+
var barcodes: [[String: Any]] = []
|
|
1057
|
+
|
|
1058
|
+
// Get the current capture session preset to determine frame dimensions
|
|
1059
|
+
let frameWidth: Int
|
|
1060
|
+
let frameHeight: Int
|
|
1061
|
+
if let preset = captureSession?.sessionPreset {
|
|
1062
|
+
switch preset {
|
|
1063
|
+
case .hd1920x1080:
|
|
1064
|
+
frameWidth = 1080 // Portrait mode
|
|
1065
|
+
frameHeight = 1920
|
|
1066
|
+
case .hd1280x720:
|
|
1067
|
+
frameWidth = 720 // Portrait mode
|
|
1068
|
+
frameHeight = 1280
|
|
1069
|
+
default:
|
|
1070
|
+
frameWidth = 1080
|
|
1071
|
+
frameHeight = 1920
|
|
1072
|
+
}
|
|
1073
|
+
} else {
|
|
1074
|
+
frameWidth = 1080
|
|
1075
|
+
frameHeight = 1920
|
|
1076
|
+
}
|
|
1077
|
+
|
|
1078
|
+
for metadata in metadataObjects {
|
|
1079
|
+
guard let barcodeObject = metadata as? AVMetadataMachineReadableCodeObject,
|
|
1080
|
+
let stringValue = barcodeObject.stringValue else {
|
|
1081
|
+
continue
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
// Map AVMetadataObject type to format number (matching MLKit format numbers)
|
|
1085
|
+
let format: Int
|
|
1086
|
+
let formatName: String
|
|
1087
|
+
switch barcodeObject.type {
|
|
1088
|
+
case .pdf417:
|
|
1089
|
+
format = 2048 // BarcodeFormat.PDF417
|
|
1090
|
+
formatName = "PDF417"
|
|
1091
|
+
case .qr:
|
|
1092
|
+
format = 256 // BarcodeFormat.qrCode
|
|
1093
|
+
formatName = "QR_CODE"
|
|
1094
|
+
case .code128:
|
|
1095
|
+
format = 128 // BarcodeFormat.code128
|
|
1096
|
+
formatName = "CODE_128"
|
|
1097
|
+
case .code39:
|
|
1098
|
+
format = 16 // BarcodeFormat.code39
|
|
1099
|
+
formatName = "CODE_39"
|
|
1100
|
+
case .ean13:
|
|
1101
|
+
format = 32 // BarcodeFormat.EAN13
|
|
1102
|
+
formatName = "EAN_13"
|
|
1103
|
+
case .ean8:
|
|
1104
|
+
format = 64 // BarcodeFormat.EAN8
|
|
1105
|
+
formatName = "EAN_8"
|
|
1106
|
+
case .aztec:
|
|
1107
|
+
format = 4096 // BarcodeFormat.aztec
|
|
1108
|
+
formatName = "AZTEC"
|
|
1109
|
+
case .dataMatrix:
|
|
1110
|
+
format = 512 // BarcodeFormat.dataMatrix
|
|
1111
|
+
formatName = "DATA_MATRIX"
|
|
1112
|
+
default:
|
|
1113
|
+
format = 0
|
|
1114
|
+
formatName = "UNKNOWN"
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
let valuePreview = String(stringValue.prefix(50))
|
|
1118
|
+
NSLog("[Native Barcode] ✓ FAST DETECTION: \(formatName) - \(valuePreview)")
|
|
1119
|
+
|
|
1120
|
+
var barcodeMap: [String: Any] = [
|
|
1121
|
+
"rawValue": stringValue,
|
|
1122
|
+
"displayValue": stringValue,
|
|
1123
|
+
"format": format
|
|
1124
|
+
]
|
|
1125
|
+
|
|
1126
|
+
// CRITICAL: AVFoundation metadata coordinates are in a LANDSCAPE-RIGHT coordinate system
|
|
1127
|
+
// even when video orientation is portrait. This is because the sensor data is landscape.
|
|
1128
|
+
// For portrait video (1080x1920), metadata bounds are in (0-1) range where:
|
|
1129
|
+
// - X axis goes LEFT TO RIGHT (corresponds to portrait Y axis, top to bottom)
|
|
1130
|
+
// - Y axis goes TOP TO BOTTOM (corresponds to portrait X axis, left to right)
|
|
1131
|
+
// We need to rotate these coordinates 90° counter-clockwise to get portrait coordinates
|
|
1132
|
+
|
|
1133
|
+
let bounds = barcodeObject.bounds
|
|
1134
|
+
|
|
1135
|
+
// Transform from landscape-right (metadata) to portrait (video frame) coordinates
|
|
1136
|
+
// Landscape-right to Portrait transformation:
|
|
1137
|
+
// portrait_x = (1 - landscape_y) * frameWidth
|
|
1138
|
+
// portrait_y = landscape_x * frameHeight
|
|
1139
|
+
// portrait_width = landscape_height * frameWidth
|
|
1140
|
+
// portrait_height = landscape_width * frameHeight
|
|
1141
|
+
|
|
1142
|
+
let portraitLeft = Int((1.0 - bounds.maxY) * CGFloat(frameWidth))
|
|
1143
|
+
let portraitTop = Int(bounds.minX * CGFloat(frameHeight))
|
|
1144
|
+
let portraitRight = Int((1.0 - bounds.minY) * CGFloat(frameWidth))
|
|
1145
|
+
let portraitBottom = Int(bounds.maxX * CGFloat(frameHeight))
|
|
1146
|
+
|
|
1147
|
+
NSLog("[Native Barcode] Normalized bounds: (%.3f, %.3f) to (%.3f, %.3f)", bounds.minX, bounds.minY, bounds.maxX, bounds.maxY)
|
|
1148
|
+
NSLog("[Native Barcode] Frame: \(frameWidth)x\(frameHeight), Portrait coords: (\(portraitLeft), \(portraitTop)) to (\(portraitRight), \(portraitBottom))")
|
|
1149
|
+
|
|
1150
|
+
barcodeMap["boundingBox"] = [
|
|
1151
|
+
"left": portraitLeft,
|
|
1152
|
+
"top": portraitTop,
|
|
1153
|
+
"right": portraitRight,
|
|
1154
|
+
"bottom": portraitBottom
|
|
1155
|
+
]
|
|
1156
|
+
|
|
1157
|
+
barcodeMap["cornerPoints"] = barcodeObject.corners.map { point in
|
|
1158
|
+
// Transform each corner point from landscape-right to portrait
|
|
1159
|
+
let portraitX = Int((1.0 - point.y) * CGFloat(frameWidth))
|
|
1160
|
+
let portraitY = Int(point.x * CGFloat(frameHeight))
|
|
1161
|
+
return ["x": portraitX, "y": portraitY]
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
barcodes.append(barcodeMap)
|
|
1165
|
+
}
|
|
1166
|
+
|
|
1167
|
+
lastDetectedBarcodes = barcodes
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
// MARK: - Helper Extensions
|
|
1172
|
+
extension Comparable {
|
|
1173
|
+
func clamped(to limits: ClosedRange<Self>) -> Self {
|
|
1174
|
+
return min(max(self, limits.lowerBound), limits.upperBound)
|
|
1175
|
+
}
|
|
1176
|
+
}
|