omnipay-reactnative-sdk 1.2.2-beta.9 → 1.2.3-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -48
- package/android/build.gradle +4 -7
- package/android/src/main/AndroidManifest.xml +0 -5
- package/android/src/main/java/com/omniretail/omnipay/FaceVerificationFrameProcessor.kt +111 -0
- package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +6 -4
- package/ios/FaceVerificationFrameProcessor.swift +138 -0
- package/ios/FaceVerificationFrameProcessorPlugin.m +4 -0
- package/ios/OmnipayReactnativeSdk.m +5 -0
- package/ios/OmnipayReactnativeSdk.swift +10 -0
- package/ios/omnipay_reactnative_sdk.h +6 -0
- package/lib/commonjs/components/Button.js +68 -0
- package/lib/commonjs/components/Button.js.map +1 -0
- package/lib/commonjs/components/OmnipayProvider.js +7 -23
- package/lib/commonjs/components/OmnipayProvider.js.map +1 -1
- package/lib/commonjs/components/biometrics/FaceVerification.js +294 -270
- package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
- package/lib/commonjs/components/biometrics/useFaceVerification.js +85 -0
- package/lib/commonjs/components/biometrics/useFaceVerification.js.map +1 -0
- package/lib/commonjs/components/biometrics/useFaceVerificationFlow.js +157 -0
- package/lib/commonjs/components/biometrics/useFaceVerificationFlow.js.map +1 -0
- package/lib/commonjs/index.js +0 -33
- package/lib/commonjs/index.js.map +1 -1
- package/lib/module/components/Button.js +61 -0
- package/lib/module/components/Button.js.map +1 -0
- package/lib/module/components/OmnipayProvider.js +7 -23
- package/lib/module/components/OmnipayProvider.js.map +1 -1
- package/lib/module/components/biometrics/FaceVerification.js +294 -271
- package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
- package/lib/module/components/biometrics/useFaceVerification.js +78 -0
- package/lib/module/components/biometrics/useFaceVerification.js.map +1 -0
- package/lib/module/components/biometrics/useFaceVerificationFlow.js +150 -0
- package/lib/module/components/biometrics/useFaceVerificationFlow.js.map +1 -0
- package/lib/module/index.js +0 -6
- package/lib/module/index.js.map +1 -1
- package/lib/typescript/components/Button.d.ts +17 -0
- package/lib/typescript/components/Button.d.ts.map +1 -0
- package/lib/typescript/components/OmnipayProvider.d.ts.map +1 -1
- package/lib/typescript/components/biometrics/FaceVerification.d.ts +1 -3
- package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -1
- package/lib/typescript/components/biometrics/useFaceVerification.d.ts +38 -0
- package/lib/typescript/components/biometrics/useFaceVerification.d.ts.map +1 -0
- package/lib/typescript/components/biometrics/useFaceVerificationFlow.d.ts +29 -0
- package/lib/typescript/components/biometrics/useFaceVerificationFlow.d.ts.map +1 -0
- package/lib/typescript/index.d.ts +0 -2
- package/lib/typescript/index.d.ts.map +1 -1
- package/omnipay_reactnative_sdk.podspec +46 -0
- package/package.json +5 -8
- package/src/components/Button.tsx +86 -0
- package/src/components/OmnipayProvider.tsx +7 -24
- package/src/components/biometrics/FaceVerification.tsx +315 -309
- package/src/components/biometrics/useFaceVerification.ts +120 -0
- package/src/components/biometrics/useFaceVerificationFlow.ts +224 -0
- package/src/index.tsx +0 -7
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +0 -153
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +0 -49
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +0 -557
- package/ios/OmnipayLivenessCameraView.h +0 -15
- package/ios/OmnipayLivenessCameraView.m +0 -80
- package/ios/OmnipayLivenessCameraViewManager.m +0 -19
- package/ios/OmnipayLivenessModule.h +0 -38
- package/ios/OmnipayLivenessModule.m +0 -615
- package/lib/commonjs/components/biometrics/LivenessDetection.js +0 -149
- package/lib/commonjs/components/biometrics/LivenessDetection.js.map +0 -1
- package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +0 -15
- package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
- package/lib/commonjs/components/biometrics/PermissionManager.js +0 -279
- package/lib/commonjs/components/biometrics/PermissionManager.js.map +0 -1
- package/lib/commonjs/components/biometrics/index.js +0 -45
- package/lib/commonjs/components/biometrics/index.js.map +0 -1
- package/lib/commonjs/components/biometrics/types.js +0 -17
- package/lib/commonjs/components/biometrics/types.js.map +0 -1
- package/lib/module/components/biometrics/LivenessDetection.js +0 -129
- package/lib/module/components/biometrics/LivenessDetection.js.map +0 -1
- package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +0 -7
- package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
- package/lib/module/components/biometrics/PermissionManager.js +0 -272
- package/lib/module/components/biometrics/PermissionManager.js.map +0 -1
- package/lib/module/components/biometrics/index.js +0 -12
- package/lib/module/components/biometrics/index.js.map +0 -1
- package/lib/module/components/biometrics/types.js +0 -16
- package/lib/module/components/biometrics/types.js.map +0 -1
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts +0 -33
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +0 -18
- package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/PermissionManager.d.ts +0 -58
- package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/index.d.ts +0 -5
- package/lib/typescript/components/biometrics/index.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/types.d.ts +0 -73
- package/lib/typescript/components/biometrics/types.d.ts.map +0 -1
- package/omnipay-reactnative-sdk.podspec +0 -50
- package/src/components/biometrics/LivenessDetection.ts +0 -178
- package/src/components/biometrics/OmnipayLivenessCameraView.tsx +0 -19
- package/src/components/biometrics/PermissionManager.ts +0 -317
- package/src/components/biometrics/index.ts +0 -11
- package/src/components/biometrics/types.ts +0 -86
package/README.md
CHANGED
|
@@ -1,17 +1,6 @@
|
|
|
1
1
|
# omnipay-reactnative-sdk
|
|
2
2
|
|
|
3
|
-
Omnipay react native sdk
|
|
4
|
-
|
|
5
|
-
## Features
|
|
6
|
-
|
|
7
|
-
- **Payment Processing** - Complete payment flow integration
|
|
8
|
-
- **User Registration & BVN Verification** - User onboarding flows
|
|
9
|
-
- **🆕 Liveness Detection** - Real-time face verification using ML Kit (Android) and Vision Framework (iOS)
|
|
10
|
-
- Smile detection
|
|
11
|
-
- Blink detection
|
|
12
|
-
- Head turn challenges
|
|
13
|
-
- Native camera integration
|
|
14
|
-
- Cross-platform support
|
|
3
|
+
Omnipay react native sdk
|
|
15
4
|
|
|
16
5
|
## Installation
|
|
17
6
|
|
|
@@ -25,13 +14,41 @@ yarn add omnipay-reactnative-sdk
|
|
|
25
14
|
yarn add react-native-select-contact react-native-webview react-native-share
|
|
26
15
|
```
|
|
27
16
|
|
|
28
|
-
|
|
17
|
+
The SDK also includes **built-in face verification** capabilities with **zero consumer setup required** - all native dependencies auto-link automatically.
|
|
18
|
+
|
|
19
|
+
**Important:** After installing dependencies:
|
|
20
|
+
|
|
21
|
+
```sh
|
|
22
|
+
# Install dependencies
|
|
23
|
+
yarn install
|
|
24
|
+
|
|
25
|
+
# Auto-link native dependencies and install pods (iOS)
|
|
26
|
+
cd ios && pod install && cd ..
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
**That's it!** Face verification works automatically when needed by the SDK.
|
|
30
|
+
|
|
31
|
+
### Android Permissions
|
|
29
32
|
|
|
30
33
|
Make sure your manifest files includes permission to read contacts
|
|
31
34
|
|
|
32
35
|
```sh
|
|
33
36
|
<uses-permission android:name="android.permission.READ_CONTACTS" />
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
For SDK face verification features, also add camera permission:
|
|
34
40
|
|
|
41
|
+
```sh
|
|
42
|
+
<uses-permission android:name="android.permission.CAMERA" />
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
### iOS Permissions
|
|
46
|
+
|
|
47
|
+
For SDK face verification features on iOS, add camera permission to your `Info.plist`:
|
|
48
|
+
|
|
49
|
+
```xml
|
|
50
|
+
<key>NSCameraUsageDescription</key>
|
|
51
|
+
<string>This app needs access to camera for face verification</string>
|
|
35
52
|
```
|
|
36
53
|
|
|
37
54
|
Also add this for Android 11+ support below the application tag in your AndroidManifest.xml file
|
|
@@ -112,6 +129,34 @@ initiateWallet({
|
|
|
112
129
|
});
|
|
113
130
|
```
|
|
114
131
|
|
|
132
|
+
## Face Verification
|
|
133
|
+
|
|
134
|
+
The SDK includes built-in **real-time face verification** capabilities for enhanced security during certain operations. This feature is **automatically triggered when needed** and requires **no additional implementation** from your side.
|
|
135
|
+
|
|
136
|
+
### How It Works (Internal SDK Feature)
|
|
137
|
+
|
|
138
|
+
The face verification system uses native ML libraries to perform real-time analysis:
|
|
139
|
+
|
|
140
|
+
1. **Face Detection**: Automatically detects faces in the camera frame
|
|
141
|
+
2. **Verification Steps**: Guides users through verification actions:
|
|
142
|
+
- **Position Face**: Center your face in the frame
|
|
143
|
+
- **Smile**: Show a genuine smile
|
|
144
|
+
- **Blink**: Blink both eyes
|
|
145
|
+
- **Turn Left**: Slowly turn head to the left
|
|
146
|
+
- **Turn Right**: Slowly turn head to the right
|
|
147
|
+
3. **Real-time Feedback**: Provides instant visual feedback and progress indicators
|
|
148
|
+
4. **Security**: Uses multiple biometric markers to prevent spoofing
|
|
149
|
+
|
|
150
|
+
### Technical Implementation
|
|
151
|
+
|
|
152
|
+
- **iOS**: Uses built-in Vision Framework for face landmark detection
|
|
153
|
+
- **Android**: Uses Google MLKit Face Detection API
|
|
154
|
+
- **Performance**: Runs at 30-60 FPS with native frame processors
|
|
155
|
+
- **Privacy**: All processing happens on-device, no data is sent to servers
|
|
156
|
+
- **Dependencies**: All required libraries are included automatically
|
|
157
|
+
|
|
158
|
+
**Note:** Face verification requires camera permissions, which the SDK handles automatically.
|
|
159
|
+
|
|
115
160
|
### Properties
|
|
116
161
|
|
|
117
162
|
#### OmnipayProvider Props
|
|
@@ -192,38 +237,3 @@ import { Omnipay } from 'omnipay-reactnative-sdk';
|
|
|
192
237
|
| phoneNumber | String | phone number of the customer |
|
|
193
238
|
| publicKey | String | public key of the company on omnipay |
|
|
194
239
|
| view | String | the view to render on the sdk |
|
|
195
|
-
|
|
196
|
-
## Liveness Detection
|
|
197
|
-
|
|
198
|
-
The SDK now includes advanced liveness detection capabilities for secure face verification.
|
|
199
|
-
|
|
200
|
-
### Quick Start
|
|
201
|
-
|
|
202
|
-
```typescript
|
|
203
|
-
import React from 'react';
|
|
204
|
-
import { OmnipayProvider } from 'omnipay-reactnative-sdk';
|
|
205
|
-
|
|
206
|
-
function MyApp() {
|
|
207
|
-
return (
|
|
208
|
-
<OmnipayProvider publicKey="your-public-key" env="dev" color="#007AFF">
|
|
209
|
-
<YourAppContent />
|
|
210
|
-
</OmnipayProvider>
|
|
211
|
-
);
|
|
212
|
-
}
|
|
213
|
-
```
|
|
214
|
-
|
|
215
|
-
Face verification with liveness detection is **automatically integrated** and will appear as part of the SDK flow when needed. No additional setup or modal management required!
|
|
216
|
-
|
|
217
|
-
### Features
|
|
218
|
-
|
|
219
|
-
- **Real-time face detection** using ML Kit (Android) and Vision Framework (iOS)
|
|
220
|
-
- **Multiple challenge types**: Smile, Blink, Head Turn Left/Right
|
|
221
|
-
- **Configurable timeouts** and challenge sequences
|
|
222
|
-
- **Native camera integration** with optimal performance
|
|
223
|
-
- **Automatic permission handling** - no additional setup required
|
|
224
|
-
- **Event-driven callbacks** for complete control
|
|
225
|
-
- **Base64 screenshot capture** after successful verification
|
|
226
|
-
|
|
227
|
-
### Documentation
|
|
228
|
-
|
|
229
|
-
For complete liveness detection documentation, see [LIVENESS_DETECTION.md](./LIVENESS_DETECTION.md)
|
package/android/build.gradle
CHANGED
|
@@ -49,12 +49,9 @@ repositories {
|
|
|
49
49
|
dependencies {
|
|
50
50
|
implementation "com.facebook.react:react-native:${safeExtGet('reactNativeVersion', '+')}"
|
|
51
51
|
|
|
52
|
-
//
|
|
53
|
-
implementation 'com.google.mlkit:face-detection:16.1.
|
|
52
|
+
// MLKit Face Detection for Android
|
|
53
|
+
implementation 'com.google.mlkit:face-detection:16.1.5'
|
|
54
54
|
|
|
55
|
-
//
|
|
56
|
-
implementation
|
|
57
|
-
implementation "androidx.camera:camera-camera2:1.3.1"
|
|
58
|
-
implementation "androidx.camera:camera-lifecycle:1.3.1"
|
|
59
|
-
implementation "androidx.camera:camera-view:1.3.1"
|
|
55
|
+
// Vision Camera for frame processor plugins
|
|
56
|
+
implementation project(':react-native-vision-camera')
|
|
60
57
|
}
|
|
@@ -1,11 +1,6 @@
|
|
|
1
1
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
|
2
2
|
package="com.omniretail.omnipay">
|
|
3
3
|
|
|
4
|
-
<!-- Camera permissions for liveness detection -->
|
|
5
|
-
<uses-permission android:name="android.permission.CAMERA" />
|
|
6
|
-
<uses-feature android:name="android.hardware.camera" android:required="false" />
|
|
7
|
-
<uses-feature android:name="android.hardware.camera.front" android:required="false" />
|
|
8
|
-
|
|
9
4
|
<application>
|
|
10
5
|
<service
|
|
11
6
|
android:name=".OmnipayActivityModule"
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
package com.omniretail.omnipay
|
|
2
|
+
|
|
3
|
+
import android.util.Log
|
|
4
|
+
import com.google.mlkit.vision.common.InputImage
|
|
5
|
+
import com.google.mlkit.vision.face.FaceDetection
|
|
6
|
+
import com.google.mlkit.vision.face.FaceDetectorOptions
|
|
7
|
+
import com.google.mlkit.vision.face.Face
|
|
8
|
+
import com.mrousavy.camera.core.FrameInvalidError
|
|
9
|
+
import com.mrousavy.camera.frameprocessor.Frame
|
|
10
|
+
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
|
|
11
|
+
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
|
|
12
|
+
|
|
13
|
+
class FaceVerificationFrameProcessor(proxy: VisionCameraProxy, options: Map<String, Any>?) : FrameProcessorPlugin() {
|
|
14
|
+
|
|
15
|
+
private val faceDetector = FaceDetection.getClient(
|
|
16
|
+
FaceDetectorOptions.Builder()
|
|
17
|
+
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
18
|
+
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
|
|
19
|
+
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
|
|
20
|
+
.setMinFaceSize(0.15f)
|
|
21
|
+
.enableTracking()
|
|
22
|
+
.build()
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
override fun callback(frame: Frame, arguments: Map<String, Any>?): Any {
|
|
26
|
+
return try {
|
|
27
|
+
val results = mutableMapOf<String, Any>()
|
|
28
|
+
|
|
29
|
+
// Convert frame to InputImage
|
|
30
|
+
val image = InputImage.fromMediaImage(frame.image, frame.imageRotationDegrees)
|
|
31
|
+
|
|
32
|
+
// Process face detection synchronously
|
|
33
|
+
val task = faceDetector.process(image)
|
|
34
|
+
|
|
35
|
+
// Wait for result (this blocks the thread but that's OK for frame processors)
|
|
36
|
+
val faces = try {
|
|
37
|
+
// For simplicity, we'll use a blocking approach
|
|
38
|
+
// In production, you might want to handle this differently
|
|
39
|
+
while (!task.isComplete && !task.isCanceled) {
|
|
40
|
+
Thread.sleep(1)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (task.isSuccessful) {
|
|
44
|
+
task.result
|
|
45
|
+
} else {
|
|
46
|
+
emptyList()
|
|
47
|
+
}
|
|
48
|
+
} catch (e: Exception) {
|
|
49
|
+
Log.e("FaceVerification", "Face detection failed: ${e.message}")
|
|
50
|
+
results["error"] = e.message ?: "Face detection failed"
|
|
51
|
+
return results
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (faces.isEmpty()) {
|
|
55
|
+
results["faceDetected"] = false
|
|
56
|
+
return results
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Process first detected face
|
|
60
|
+
val face = faces[0]
|
|
61
|
+
results["faceDetected"] = true
|
|
62
|
+
|
|
63
|
+
// Face bounding box
|
|
64
|
+
results["boundingBox"] = mapOf(
|
|
65
|
+
"left" to face.boundingBox.left,
|
|
66
|
+
"top" to face.boundingBox.top,
|
|
67
|
+
"right" to face.boundingBox.right,
|
|
68
|
+
"bottom" to face.boundingBox.bottom
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
// Smile detection
|
|
72
|
+
face.smilingProbability?.let { smilingProbability ->
|
|
73
|
+
results["isSmiling"] = smilingProbability > 0.7f
|
|
74
|
+
results["smileProbability"] = smilingProbability
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Eye detection (for blinking)
|
|
78
|
+
val leftEyeOpenProbability = face.leftEyeOpenProbability
|
|
79
|
+
val rightEyeOpenProbability = face.rightEyeOpenProbability
|
|
80
|
+
|
|
81
|
+
if (leftEyeOpenProbability != null && rightEyeOpenProbability != null) {
|
|
82
|
+
val leftEyeClosed = leftEyeOpenProbability < 0.3f
|
|
83
|
+
val rightEyeClosed = rightEyeOpenProbability < 0.3f
|
|
84
|
+
|
|
85
|
+
results["leftEyeClosed"] = leftEyeClosed
|
|
86
|
+
results["rightEyeClosed"] = rightEyeClosed
|
|
87
|
+
results["isBlinking"] = leftEyeClosed && rightEyeClosed
|
|
88
|
+
results["leftEyeOpenProbability"] = leftEyeOpenProbability
|
|
89
|
+
results["rightEyeOpenProbability"] = rightEyeOpenProbability
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Head pose detection
|
|
93
|
+
results["headPose"] = mapOf(
|
|
94
|
+
"yaw" to face.headEulerAngleY, // Left-right movement
|
|
95
|
+
"pitch" to face.headEulerAngleX, // Up-down movement
|
|
96
|
+
"roll" to face.headEulerAngleZ // Tilt movement
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
// Face tracking ID (useful for consistency across frames)
|
|
100
|
+
face.trackingId?.let { trackingId ->
|
|
101
|
+
results["trackingId"] = trackingId
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
results
|
|
105
|
+
|
|
106
|
+
} catch (e: Exception) {
|
|
107
|
+
Log.e("FaceVerification", "Unexpected error: ${e.message}")
|
|
108
|
+
mapOf("error" to (e.message ?: "Unknown error occurred"))
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
@@ -9,22 +9,24 @@ import com.facebook.react.bridge.JavaScriptModule;
|
|
|
9
9
|
import com.facebook.react.bridge.NativeModule;
|
|
10
10
|
import com.facebook.react.bridge.ReactApplicationContext;
|
|
11
11
|
import com.facebook.react.uimanager.ViewManager;
|
|
12
|
+
import com.mrousavy.camera.frameprocessor.FrameProcessorPluginRegistry;
|
|
12
13
|
|
|
13
14
|
public class OmnipayActivityPackage implements ReactPackage {
|
|
14
15
|
|
|
16
|
+
static {
|
|
17
|
+
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces", FaceVerificationFrameProcessor::new);
|
|
18
|
+
}
|
|
19
|
+
|
|
15
20
|
@Override
|
|
16
21
|
public List<NativeModule> createNativeModules(ReactApplicationContext reactContext) {
|
|
17
22
|
List<NativeModule> modules = new ArrayList<>();
|
|
18
23
|
modules.add(new OmnipayActivityModule(reactContext));
|
|
19
|
-
modules.add(new OmnipayLivenessModule(reactContext));
|
|
20
24
|
return modules;
|
|
21
25
|
}
|
|
22
26
|
|
|
23
27
|
@Override
|
|
24
28
|
public List<ViewManager> createViewManagers(ReactApplicationContext reactContext) {
|
|
25
|
-
|
|
26
|
-
viewManagers.add(new OmnipayLivenessCameraViewManager(reactContext));
|
|
27
|
-
return viewManagers;
|
|
29
|
+
return Collections.emptyList();
|
|
28
30
|
}
|
|
29
31
|
|
|
30
32
|
// Deprecated RN 0.47
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import VisionCamera
|
|
2
|
+
import Vision
|
|
3
|
+
import AVFoundation
|
|
4
|
+
|
|
5
|
+
@objc(FaceVerificationFrameProcessor)
|
|
6
|
+
public class FaceVerificationFrameProcessor: FrameProcessorPlugin {
|
|
7
|
+
|
|
8
|
+
public override init(proxy: VisionCameraProxyHolder, options: [AnyHashable : Any]! = [:]) {
|
|
9
|
+
super.init(proxy: proxy, options: options)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable : Any]?) -> Any {
|
|
13
|
+
let buffer = frame.buffer
|
|
14
|
+
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
|
|
15
|
+
return ["error": "Failed to get image buffer"]
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return performFaceDetection(on: imageBuffer)
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
private func performFaceDetection(on imageBuffer: CVImageBuffer) -> [String: Any] {
|
|
22
|
+
let request = VNDetectFaceLandmarksRequest()
|
|
23
|
+
let handler = VNImageRequestHandler(cvPixelBuffer: imageBuffer, options: [:])
|
|
24
|
+
|
|
25
|
+
do {
|
|
26
|
+
try handler.perform([request])
|
|
27
|
+
|
|
28
|
+
guard let observations = request.results as? [VNFaceObservation],
|
|
29
|
+
let face = observations.first else {
|
|
30
|
+
return ["faceDetected": false]
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
var results: [String: Any] = [
|
|
34
|
+
"faceDetected": true,
|
|
35
|
+
"boundingBox": [
|
|
36
|
+
"x": face.boundingBox.origin.x,
|
|
37
|
+
"y": face.boundingBox.origin.y,
|
|
38
|
+
"width": face.boundingBox.size.width,
|
|
39
|
+
"height": face.boundingBox.size.height
|
|
40
|
+
]
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
// Analyze facial features
|
|
44
|
+
if let landmarks = face.landmarks {
|
|
45
|
+
results = analyzeFacialFeatures(landmarks: landmarks, results: results)
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Calculate head pose
|
|
49
|
+
if let pose = calculateHeadPose(face: face) {
|
|
50
|
+
results["headPose"] = pose
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return results
|
|
54
|
+
|
|
55
|
+
} catch {
|
|
56
|
+
return ["error": error.localizedDescription]
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
private func analyzeFacialFeatures(landmarks: VNFaceLandmarks2D, results: [String: Any]) -> [String: Any] {
|
|
61
|
+
var updatedResults = results
|
|
62
|
+
|
|
63
|
+
// Detect smile
|
|
64
|
+
if let mouth = landmarks.outerLips {
|
|
65
|
+
let isSmiling = detectSmile(mouthPoints: mouth.normalizedPoints)
|
|
66
|
+
updatedResults["isSmiling"] = isSmiling
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Detect blinks
|
|
70
|
+
if let leftEye = landmarks.leftEye, let rightEye = landmarks.rightEye {
|
|
71
|
+
let leftEyeClosed = detectEyeClosure(eyePoints: leftEye.normalizedPoints)
|
|
72
|
+
let rightEyeClosed = detectEyeClosure(eyePoints: rightEye.normalizedPoints)
|
|
73
|
+
|
|
74
|
+
updatedResults["leftEyeClosed"] = leftEyeClosed
|
|
75
|
+
updatedResults["rightEyeClosed"] = rightEyeClosed
|
|
76
|
+
updatedResults["isBlinking"] = leftEyeClosed && rightEyeClosed
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return updatedResults
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
private func detectSmile(mouthPoints: [CGPoint]) -> Bool {
|
|
83
|
+
guard mouthPoints.count >= 6 else { return false }
|
|
84
|
+
|
|
85
|
+
// Calculate mouth corner heights vs center
|
|
86
|
+
let leftCorner = mouthPoints[0]
|
|
87
|
+
let rightCorner = mouthPoints[3]
|
|
88
|
+
let topCenter = mouthPoints[1]
|
|
89
|
+
let bottomCenter = mouthPoints[4]
|
|
90
|
+
|
|
91
|
+
let cornerHeight = (leftCorner.y + rightCorner.y) / 2
|
|
92
|
+
let centerHeight = (topCenter.y + bottomCenter.y) / 2
|
|
93
|
+
|
|
94
|
+
// Smile detection: corners higher than center
|
|
95
|
+
return cornerHeight < centerHeight - 0.01
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
private func detectEyeClosure(eyePoints: [CGPoint]) -> Bool {
|
|
99
|
+
guard eyePoints.count >= 6 else { return false }
|
|
100
|
+
|
|
101
|
+
// Calculate eye aspect ratio
|
|
102
|
+
let topPoints = Array(eyePoints[1...2])
|
|
103
|
+
let bottomPoints = Array(eyePoints[4...5])
|
|
104
|
+
let leftPoint = eyePoints[0]
|
|
105
|
+
let rightPoint = eyePoints[3]
|
|
106
|
+
|
|
107
|
+
let verticalDist1 = distance(topPoints[0], bottomPoints[0])
|
|
108
|
+
let verticalDist2 = distance(topPoints[1], bottomPoints[1])
|
|
109
|
+
let horizontalDist = distance(leftPoint, rightPoint)
|
|
110
|
+
|
|
111
|
+
let eyeAspectRatio = (verticalDist1 + verticalDist2) / (2.0 * horizontalDist)
|
|
112
|
+
|
|
113
|
+
// Eye is closed if aspect ratio is below threshold
|
|
114
|
+
return eyeAspectRatio < 0.2
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
private func calculateHeadPose(face: VNFaceObservation) -> [String: Double]? {
|
|
118
|
+
guard let yaw = face.yaw, let pitch = face.pitch, let roll = face.roll else {
|
|
119
|
+
return nil
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
let yawDegrees = Double(truncating: yaw) * 180.0 / Double.pi
|
|
123
|
+
let pitchDegrees = Double(truncating: pitch) * 180.0 / Double.pi
|
|
124
|
+
let rollDegrees = Double(truncating: roll) * 180.0 / Double.pi
|
|
125
|
+
|
|
126
|
+
return [
|
|
127
|
+
"yaw": yawDegrees, // Left-right head movement
|
|
128
|
+
"pitch": pitchDegrees, // Up-down head movement
|
|
129
|
+
"roll": rollDegrees // Head tilt
|
|
130
|
+
]
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
private func distance(_ point1: CGPoint, _ point2: CGPoint) -> Double {
|
|
134
|
+
let dx = point1.x - point2.x
|
|
135
|
+
let dy = point1.y - point2.y
|
|
136
|
+
return sqrt(Double(dx * dx + dy * dy))
|
|
137
|
+
}
|
|
138
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.default = void 0;
|
|
7
|
+
var _react = _interopRequireDefault(require("react"));
|
|
8
|
+
var _reactNative = require("react-native");
|
|
9
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
10
|
+
const Button = ({
|
|
11
|
+
title,
|
|
12
|
+
onPress,
|
|
13
|
+
backgroundColor = '#007AFF',
|
|
14
|
+
borderColor,
|
|
15
|
+
textColor = 'white',
|
|
16
|
+
disabled = false,
|
|
17
|
+
loading = false,
|
|
18
|
+
style,
|
|
19
|
+
textStyle,
|
|
20
|
+
activeOpacity = 0.8
|
|
21
|
+
}) => {
|
|
22
|
+
const buttonStyle = {
|
|
23
|
+
...styles.button,
|
|
24
|
+
backgroundColor: disabled ? '#cccccc' : backgroundColor,
|
|
25
|
+
borderColor: disabled ? '#cccccc' : borderColor || backgroundColor,
|
|
26
|
+
...style
|
|
27
|
+
};
|
|
28
|
+
const finalTextStyle = {
|
|
29
|
+
...styles.buttonText,
|
|
30
|
+
color: disabled ? '#666666' : textColor,
|
|
31
|
+
...textStyle
|
|
32
|
+
};
|
|
33
|
+
return /*#__PURE__*/_react.default.createElement(_reactNative.TouchableOpacity, {
|
|
34
|
+
style: buttonStyle,
|
|
35
|
+
onPress: onPress,
|
|
36
|
+
disabled: disabled || loading,
|
|
37
|
+
activeOpacity: activeOpacity,
|
|
38
|
+
accessibilityRole: "button",
|
|
39
|
+
accessibilityLabel: title,
|
|
40
|
+
accessibilityState: {
|
|
41
|
+
disabled: disabled || loading
|
|
42
|
+
}
|
|
43
|
+
}, loading ? /*#__PURE__*/_react.default.createElement(_reactNative.ActivityIndicator, {
|
|
44
|
+
color: textColor,
|
|
45
|
+
size: "small"
|
|
46
|
+
}) : /*#__PURE__*/_react.default.createElement(_reactNative.Text, {
|
|
47
|
+
style: finalTextStyle
|
|
48
|
+
}, title));
|
|
49
|
+
};
|
|
50
|
+
const styles = _reactNative.StyleSheet.create({
|
|
51
|
+
button: {
|
|
52
|
+
borderRadius: 6,
|
|
53
|
+
paddingHorizontal: 12,
|
|
54
|
+
paddingVertical: 14,
|
|
55
|
+
borderWidth: 1,
|
|
56
|
+
alignItems: 'center',
|
|
57
|
+
justifyContent: 'center',
|
|
58
|
+
minHeight: 48
|
|
59
|
+
},
|
|
60
|
+
buttonText: {
|
|
61
|
+
color: 'white',
|
|
62
|
+
fontSize: 16,
|
|
63
|
+
fontWeight: '600',
|
|
64
|
+
paddingHorizontal: 30
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
var _default = exports.default = Button;
|
|
68
|
+
//# sourceMappingURL=Button.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["_react","_interopRequireDefault","require","_reactNative","e","__esModule","default","Button","title","onPress","backgroundColor","borderColor","textColor","disabled","loading","style","textStyle","activeOpacity","buttonStyle","styles","button","finalTextStyle","buttonText","color","createElement","TouchableOpacity","accessibilityRole","accessibilityLabel","accessibilityState","ActivityIndicator","size","Text","StyleSheet","create","borderRadius","paddingHorizontal","paddingVertical","borderWidth","alignItems","justifyContent","minHeight","fontSize","fontWeight","_default","exports"],"sourceRoot":"../../../src","sources":["components/Button.tsx"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAOsB,SAAAD,uBAAAG,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAetB,MAAMG,MAA6B,GAAGA,CAAC;EACrCC,KAAK;EACLC,OAAO;EACPC,eAAe,GAAG,SAAS;EAC3BC,WAAW;EACXC,SAAS,GAAG,OAAO;EACnBC,QAAQ,GAAG,KAAK;EAChBC,OAAO,GAAG,KAAK;EACfC,KAAK;EACLC,SAAS;EACTC,aAAa,GAAG;AAClB,CAAC,KAAK;EACJ,MAAMC,WAAsB,GAAG;IAC7B,GAAGC,MAAM,CAACC,MAAM;IAChBV,eAAe,EAAEG,QAAQ,GAAG,SAAS,GAAGH,eAAe;IACvDC,WAAW,EAAEE,QAAQ,GAAG,SAAS,GAAGF,WAAW,IAAID,eAAe;IAClE,GAAGK;EACL,CAAC;EAED,MAAMM,cAAyB,GAAG;IAChC,GAAGF,MAAM,CAACG,UAAU;IACpBC,KAAK,EAAEV,QAAQ,GAAG,SAAS,GAAGD,SAAS;IACvC,GAAGI;EACL,CAAC;EAED,oBACEhB,MAAA,CAAAM,OAAA,CAAAkB,aAAA,CAACrB,YAAA,CAAAsB,gBAAgB;IACfV,KAAK,EAAEG,WAAY;IACnBT,OAAO,EAAEA,OAAQ;IACjBI,QAAQ,EAAEA,QAAQ,IAAIC,OAAQ;IAC9BG,aAAa,EAAEA,aAAc;IAC7BS,iBAAiB,EAAC,QAAQ;IAC1BC,kBAAkB,EAAEnB,KAAM;IAC1BoB,kBAAkB,EAAE;MAAEf,QAAQ,EAAEA,QAAQ,IAAIC;IAAQ;EAAE,GAErDA,OAAO,gBACNd,MAAA,CAAAM,OAAA,CAAAkB,aAAA,CAACrB,YAAA,CAAA0B,iBAAiB;IAACN,KAAK,EAAEX,SAAU;IAACkB,IAAI,EAAC;EAAO,CAAE,CAAC,gBAEpD9B,MAAA,CAAAM,OAAA,CAAAkB,aAAA,CAACrB,YAAA,CAAA4B,IAAI;IAAChB,KAAK,EAAEM;EAAe,GAAEb,KAAY,CAE5B,CAAC;AAEvB,CAAC;AAED,MAAMW,MAAM,GAAGa,uBAAU,CAACC,MAAM,CAAC;EAC/Bb,MAAM,EAAE;IACNc,YAAY,EAAE,CAAC;IACfC,iBAAiB,EAAE,EAAE;IACrBC,eAAe,EAAE,EAAE;IACnBC,WAAW,EAAE,CAAC;IACdC,UAAU,EAAE,QAAQ;IACpBC,cAAc,EAAE,QAAQ;IACxBC,SAAS,EAAE;EACb,CAAC;EACDlB,UAAU,EAAE;IACVC,KAAK,EAAE,OAAO;IACdkB,QAAQ,EAAE,EAAE;IACZC,UAAU,EAAE,KAAK;IACjBP,iBAAiB,EAAE;EACrB;AACF,CAAC,CAAC;AAAC,IAAAQ,QAAA,GAAAC,OAAA,CAAAtC,OAAA,GAEYC,MAAM","ignoreList":[]}
|
|
@@ -10,6 +10,7 @@ var _reactNativeWebview = _interopRequireDefault(require("react-native-webview")
|
|
|
10
10
|
var _functions = require("../functions");
|
|
11
11
|
var _reactNativeShare = _interopRequireDefault(require("react-native-share"));
|
|
12
12
|
var _FaceVerification = _interopRequireDefault(require("./biometrics/FaceVerification"));
|
|
13
|
+
var _Button = _interopRequireDefault(require("./Button"));
|
|
13
14
|
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
14
15
|
function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
|
|
15
16
|
let defaultValue = {
|
|
@@ -38,7 +39,7 @@ const OmnipayProvider = ({
|
|
|
38
39
|
(0, _react.useEffect)(() => {
|
|
39
40
|
setTimeout(() => {
|
|
40
41
|
setShowFaceVerification(true);
|
|
41
|
-
},
|
|
42
|
+
}, 1000);
|
|
42
43
|
}, []);
|
|
43
44
|
(0, _react.useEffect)(() => {
|
|
44
45
|
visibilityRef.current = isVisible;
|
|
@@ -234,16 +235,12 @@ const OmnipayProvider = ({
|
|
|
234
235
|
style: styles.errorContainer
|
|
235
236
|
}, /*#__PURE__*/_react.default.createElement(_reactNative.Text, {
|
|
236
237
|
style: styles.errorSubtitle
|
|
237
|
-
}, "Unable to open your wallet. Please try again"), /*#__PURE__*/_react.default.createElement(
|
|
238
|
-
|
|
238
|
+
}, "Unable to open your wallet. Please try again"), /*#__PURE__*/_react.default.createElement(_Button.default, {
|
|
239
|
+
title: "Retry",
|
|
239
240
|
onPress: reloadWebview,
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
}]
|
|
244
|
-
}, /*#__PURE__*/_react.default.createElement(_react.default.Fragment, null, /*#__PURE__*/_react.default.createElement(_reactNative.Text, {
|
|
245
|
-
style: styles.buttonText
|
|
246
|
-
}, "Retry")))))
|
|
241
|
+
backgroundColor: color,
|
|
242
|
+
borderColor: color
|
|
243
|
+
})))
|
|
247
244
|
})), webviewStatus === 'loading' && showWebview && /*#__PURE__*/_react.default.createElement(_reactNative.TouchableWithoutFeedback, null, /*#__PURE__*/_react.default.createElement(_reactNative.View, {
|
|
248
245
|
style: styles.webviewLoader
|
|
249
246
|
}, /*#__PURE__*/_react.default.createElement(_reactNative.ActivityIndicator, {
|
|
@@ -363,19 +360,6 @@ const styles = _reactNative.StyleSheet.create({
|
|
|
363
360
|
retryButton: {
|
|
364
361
|
minWidth: 160,
|
|
365
362
|
marginHorizontal: 'auto'
|
|
366
|
-
},
|
|
367
|
-
button: {
|
|
368
|
-
borderRadius: 6,
|
|
369
|
-
paddingHorizontal: 12,
|
|
370
|
-
paddingVertical: 14,
|
|
371
|
-
borderWidth: 1,
|
|
372
|
-
alignItems: 'center',
|
|
373
|
-
justifyContent: 'center'
|
|
374
|
-
},
|
|
375
|
-
buttonText: {
|
|
376
|
-
color: 'white',
|
|
377
|
-
fontSize: 16,
|
|
378
|
-
paddingHorizontal: 30
|
|
379
363
|
}
|
|
380
364
|
});
|
|
381
365
|
//# sourceMappingURL=OmnipayProvider.js.map
|