omnipay-reactnative-sdk 1.2.1 → 1.2.2-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/build.gradle +13 -0
- package/android/src/main/AndroidManifest.xml +5 -0
- package/android/src/main/java/com/omniretail/omnipay/LivenessCameraViewManager.java +116 -0
- package/android/src/main/java/com/omniretail/omnipay/LivenessDetectionModule.java +588 -0
- package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +4 -1
- package/ios/LivenessCameraView.h +22 -0
- package/ios/LivenessCameraView.m +135 -0
- package/ios/LivenessCameraViewManager.h +12 -0
- package/ios/LivenessCameraViewManager.m +24 -0
- package/ios/LivenessDetectionModule.h +46 -0
- package/ios/LivenessDetectionModule.m +603 -0
- package/lib/commonjs/components/OmnipayProvider.js +10 -1
- package/lib/commonjs/components/OmnipayProvider.js.map +1 -1
- package/lib/commonjs/components/biometrics/FaceVerification.js +439 -0
- package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -0
- package/lib/commonjs/components/biometrics/LivenessCameraView.js +43 -0
- package/lib/commonjs/components/biometrics/LivenessCameraView.js.map +1 -0
- package/lib/commonjs/components/biometrics/LivenessDetection.js +252 -0
- package/lib/commonjs/components/biometrics/LivenessDetection.js.map +1 -0
- package/lib/commonjs/index.js +28 -0
- package/lib/commonjs/index.js.map +1 -1
- package/lib/module/components/OmnipayProvider.js +10 -1
- package/lib/module/components/OmnipayProvider.js.map +1 -1
- package/lib/module/components/biometrics/FaceVerification.js +429 -0
- package/lib/module/components/biometrics/FaceVerification.js.map +1 -0
- package/lib/module/components/biometrics/LivenessCameraView.js +38 -0
- package/lib/module/components/biometrics/LivenessCameraView.js.map +1 -0
- package/lib/module/components/biometrics/LivenessDetection.js +244 -0
- package/lib/module/components/biometrics/LivenessDetection.js.map +1 -0
- package/lib/module/index.js +5 -0
- package/lib/module/index.js.map +1 -1
- package/lib/typescript/components/OmnipayProvider.d.ts.map +1 -1
- package/lib/typescript/components/biometrics/FaceVerification.d.ts +12 -0
- package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -0
- package/lib/typescript/components/biometrics/LivenessCameraView.d.ts +22 -0
- package/lib/typescript/components/biometrics/LivenessCameraView.d.ts.map +1 -0
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts +73 -0
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +1 -0
- package/lib/typescript/index.d.ts +3 -0
- package/lib/typescript/index.d.ts.map +1 -1
- package/omnipay-reactnative-sdk.podspec +47 -0
- package/package.json +3 -2
- package/src/components/OmnipayProvider.tsx +12 -0
- package/src/components/biometrics/FaceVerification.tsx +484 -0
- package/src/components/biometrics/LivenessCameraView.tsx +61 -0
- package/src/components/biometrics/LivenessDetection.ts +305 -0
- package/src/index.tsx +18 -0
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
package com.omniretail.omnipay;
|
|
2
|
+
|
|
3
|
+
import android.app.Activity;
|
|
4
|
+
import android.content.Context;
|
|
5
|
+
import android.content.pm.PackageManager;
|
|
6
|
+
import android.graphics.Bitmap;
|
|
7
|
+
import android.graphics.BitmapFactory;
|
|
8
|
+
import android.graphics.ImageFormat;
|
|
9
|
+
import android.graphics.Matrix;
|
|
10
|
+
import android.graphics.Rect;
|
|
11
|
+
import android.graphics.YuvImage;
|
|
12
|
+
import android.media.Image;
|
|
13
|
+
import android.util.Base64;
|
|
14
|
+
import android.util.Log;
|
|
15
|
+
import android.view.View;
|
|
16
|
+
import android.Manifest;
|
|
17
|
+
|
|
18
|
+
import androidx.annotation.NonNull;
|
|
19
|
+
import androidx.camera.core.CameraSelector;
|
|
20
|
+
import androidx.camera.core.ImageAnalysis;
|
|
21
|
+
import androidx.camera.core.ImageCapture;
|
|
22
|
+
import androidx.camera.core.ImageCaptureException;
|
|
23
|
+
import androidx.camera.core.ImageProxy;
|
|
24
|
+
import androidx.camera.core.Preview;
|
|
25
|
+
import androidx.camera.lifecycle.ProcessCameraProvider;
|
|
26
|
+
import androidx.camera.view.PreviewView;
|
|
27
|
+
import androidx.core.content.ContextCompat;
|
|
28
|
+
import androidx.lifecycle.LifecycleOwner;
|
|
29
|
+
|
|
30
|
+
import com.facebook.react.bridge.Arguments;
|
|
31
|
+
import com.facebook.react.bridge.Promise;
|
|
32
|
+
import com.facebook.react.bridge.ReactApplicationContext;
|
|
33
|
+
import com.facebook.react.bridge.ReactContext;
|
|
34
|
+
import com.facebook.react.bridge.ReactContextBaseJavaModule;
|
|
35
|
+
import com.facebook.react.bridge.ReactMethod;
|
|
36
|
+
import com.facebook.react.bridge.ReadableArray;
|
|
37
|
+
import com.facebook.react.bridge.WritableMap;
|
|
38
|
+
import com.facebook.react.modules.core.DeviceEventManagerModule;
|
|
39
|
+
import com.google.common.util.concurrent.ListenableFuture;
|
|
40
|
+
import com.google.mlkit.vision.common.InputImage;
|
|
41
|
+
import com.google.mlkit.vision.face.Face;
|
|
42
|
+
import com.google.mlkit.vision.face.FaceDetection;
|
|
43
|
+
import com.google.mlkit.vision.face.FaceDetector;
|
|
44
|
+
import com.google.mlkit.vision.face.FaceDetectorOptions;
|
|
45
|
+
|
|
46
|
+
import java.io.ByteArrayOutputStream;
|
|
47
|
+
import java.nio.ByteBuffer;
|
|
48
|
+
import java.util.ArrayList;
|
|
49
|
+
import java.util.LinkedList;
|
|
50
|
+
import java.util.List;
|
|
51
|
+
import java.util.Queue;
|
|
52
|
+
import java.util.concurrent.ExecutionException;
|
|
53
|
+
import java.util.concurrent.ExecutorService;
|
|
54
|
+
import java.util.concurrent.Executors;
|
|
55
|
+
import java.util.HashMap;
|
|
56
|
+
import java.util.Map;
|
|
57
|
+
|
|
58
|
+
public class LivenessDetectionModule extends ReactContextBaseJavaModule {
|
|
59
|
+
private static final String MODULE_NAME = "LivenessDetection";
|
|
60
|
+
private static final String TAG = "LivenessDetection";
|
|
61
|
+
|
|
62
|
+
// Events
|
|
63
|
+
private static final String EVENT_CHALLENGE_START = "onChallengeStart";
|
|
64
|
+
private static final String EVENT_CHALLENGE_SUCCESS = "onChallengeSuccess";
|
|
65
|
+
private static final String EVENT_CHALLENGE_FAILURE = "onChallengeFailure";
|
|
66
|
+
private static final String EVENT_ALL_CHALLENGES_COMPLETE = "onAllChallengesComplete";
|
|
67
|
+
private static final String EVENT_SCREENSHOT_CAPTURED = "onScreenshotCaptured";
|
|
68
|
+
|
|
69
|
+
// Challenge types
|
|
70
|
+
public enum ChallengeType {
|
|
71
|
+
SMILE("smile"),
|
|
72
|
+
BLINK("blink"),
|
|
73
|
+
TURN_LEFT("turn_left"),
|
|
74
|
+
TURN_RIGHT("turn_right");
|
|
75
|
+
|
|
76
|
+
private final String value;
|
|
77
|
+
ChallengeType(String value) { this.value = value; }
|
|
78
|
+
public String getValue() { return value; }
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Detection state
|
|
82
|
+
private boolean isDetectionActive = false;
|
|
83
|
+
private Queue<ChallengeType> pendingChallenges;
|
|
84
|
+
private ChallengeType currentChallenge;
|
|
85
|
+
private long challengeStartTime;
|
|
86
|
+
private static final long CHALLENGE_TIMEOUT = 10000; // 10 seconds
|
|
87
|
+
|
|
88
|
+
// Camera components
|
|
89
|
+
private ProcessCameraProvider cameraProvider;
|
|
90
|
+
private Preview preview;
|
|
91
|
+
private ImageAnalysis imageAnalysis;
|
|
92
|
+
private ImageCapture imageCapture;
|
|
93
|
+
private PreviewView previewView;
|
|
94
|
+
private ExecutorService cameraExecutor;
|
|
95
|
+
|
|
96
|
+
// ML Kit face detector
|
|
97
|
+
private FaceDetector faceDetector;
|
|
98
|
+
|
|
99
|
+
// Challenge validation
|
|
100
|
+
private boolean lastSmileState = false;
|
|
101
|
+
private boolean lastLeftEyeOpen = true;
|
|
102
|
+
private boolean lastRightEyeOpen = true;
|
|
103
|
+
private float lastHeadYaw = 0f;
|
|
104
|
+
|
|
105
|
+
// Detection thresholds
|
|
106
|
+
private static final float SMILE_THRESHOLD = 0.8f;
|
|
107
|
+
private static final float EYE_OPEN_THRESHOLD = 0.6f;
|
|
108
|
+
private static final float HEAD_YAW_THRESHOLD = 15f; // degrees
|
|
109
|
+
|
|
110
|
+
// Permission request code
|
|
111
|
+
private static final int CAMERA_PERMISSION_REQUEST_CODE = 1001;
|
|
112
|
+
|
|
113
|
+
// Permission promise
|
|
114
|
+
private Promise mPermissionPromise;
|
|
115
|
+
|
|
116
|
+
private ReactApplicationContext reactContext;
|
|
117
|
+
|
|
118
|
+
public LivenessDetectionModule(ReactApplicationContext reactContext) {
|
|
119
|
+
super(reactContext);
|
|
120
|
+
this.reactContext = reactContext;
|
|
121
|
+
this.cameraExecutor = Executors.newSingleThreadExecutor();
|
|
122
|
+
|
|
123
|
+
// Initialize ML Kit face detector
|
|
124
|
+
FaceDetectorOptions options = new FaceDetectorOptions.Builder()
|
|
125
|
+
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
126
|
+
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
|
|
127
|
+
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
|
|
128
|
+
.build();
|
|
129
|
+
|
|
130
|
+
this.faceDetector = FaceDetection.getClient(options);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
@Override
|
|
134
|
+
public String getName() {
|
|
135
|
+
return MODULE_NAME;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
@Override
|
|
139
|
+
public Map<String, Object> getConstants() {
|
|
140
|
+
final Map<String, Object> constants = new HashMap<>();
|
|
141
|
+
constants.put("CHALLENGE_SMILE", ChallengeType.SMILE.getValue());
|
|
142
|
+
constants.put("CHALLENGE_BLINK", ChallengeType.BLINK.getValue());
|
|
143
|
+
constants.put("CHALLENGE_TURN_LEFT", ChallengeType.TURN_LEFT.getValue());
|
|
144
|
+
constants.put("CHALLENGE_TURN_RIGHT", ChallengeType.TURN_RIGHT.getValue());
|
|
145
|
+
return constants;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
@ReactMethod
|
|
149
|
+
public void startLivenessDetection(ReadableArray challenges, Promise promise) {
|
|
150
|
+
if (isDetectionActive) {
|
|
151
|
+
promise.reject("ALREADY_ACTIVE", "Liveness detection is already active");
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Check camera permission first
|
|
156
|
+
if (!hasCameraPermission()) {
|
|
157
|
+
promise.reject("CAMERA_PERMISSION_DENIED", "Camera permission is required for liveness detection");
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
// Parse challenges
|
|
163
|
+
pendingChallenges = new LinkedList<>();
|
|
164
|
+
for (int i = 0; i < challenges.size(); i++) {
|
|
165
|
+
String challengeStr = challenges.getString(i);
|
|
166
|
+
ChallengeType challenge = getChallengeFromString(challengeStr);
|
|
167
|
+
if (challenge != null) {
|
|
168
|
+
pendingChallenges.offer(challenge);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (pendingChallenges.isEmpty()) {
|
|
173
|
+
promise.reject("INVALID_CHALLENGES", "No valid challenges provided");
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
isDetectionActive = true;
|
|
178
|
+
startCamera();
|
|
179
|
+
startNextChallenge();
|
|
180
|
+
promise.resolve("Detection started");
|
|
181
|
+
|
|
182
|
+
} catch (Exception e) {
|
|
183
|
+
promise.reject("START_ERROR", "Failed to start liveness detection: " + e.getMessage());
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
@ReactMethod
|
|
188
|
+
public void checkCameraPermission(Promise promise) {
|
|
189
|
+
promise.resolve(hasCameraPermission());
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
@ReactMethod
|
|
193
|
+
public void requestCameraPermission(Promise promise) {
|
|
194
|
+
if (hasCameraPermission()) {
|
|
195
|
+
promise.resolve(true);
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
Activity currentActivity = getCurrentActivity();
|
|
200
|
+
if (currentActivity == null) {
|
|
201
|
+
promise.reject("NO_ACTIVITY", "No current activity available");
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Store promise for permission result
|
|
206
|
+
mPermissionPromise = promise;
|
|
207
|
+
|
|
208
|
+
// Request camera permission
|
|
209
|
+
currentActivity.requestPermissions(
|
|
210
|
+
new String[]{Manifest.permission.CAMERA},
|
|
211
|
+
CAMERA_PERMISSION_REQUEST_CODE
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
@ReactMethod
|
|
216
|
+
public void stopLivenessDetection() {
|
|
217
|
+
isDetectionActive = false;
|
|
218
|
+
currentChallenge = null;
|
|
219
|
+
if (pendingChallenges != null) {
|
|
220
|
+
pendingChallenges.clear();
|
|
221
|
+
}
|
|
222
|
+
stopCamera();
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
@ReactMethod
|
|
226
|
+
public void getCameraView(Promise promise) {
|
|
227
|
+
try {
|
|
228
|
+
Activity currentActivity = getCurrentActivity();
|
|
229
|
+
if (currentActivity == null) {
|
|
230
|
+
promise.reject("NO_ACTIVITY", "No current activity");
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
if (previewView == null) {
|
|
235
|
+
previewView = new PreviewView(currentActivity);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
promise.resolve("Camera view created");
|
|
239
|
+
} catch (Exception e) {
|
|
240
|
+
promise.reject("CAMERA_VIEW_ERROR", "Failed to create camera view: " + e.getMessage());
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
public PreviewView getPreviewView() {
|
|
245
|
+
return previewView;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
private void startCamera() {
|
|
249
|
+
Activity currentActivity = getCurrentActivity();
|
|
250
|
+
if (currentActivity == null) return;
|
|
251
|
+
|
|
252
|
+
ListenableFuture<ProcessCameraProvider> cameraProviderFuture =
|
|
253
|
+
ProcessCameraProvider.getInstance(currentActivity);
|
|
254
|
+
|
|
255
|
+
cameraProviderFuture.addListener(() -> {
|
|
256
|
+
try {
|
|
257
|
+
cameraProvider = cameraProviderFuture.get();
|
|
258
|
+
bindCameraUseCases();
|
|
259
|
+
} catch (ExecutionException | InterruptedException e) {
|
|
260
|
+
Log.e(TAG, "Camera initialization failed", e);
|
|
261
|
+
}
|
|
262
|
+
}, ContextCompat.getMainExecutor(currentActivity));
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
private void bindCameraUseCases() {
|
|
266
|
+
Activity currentActivity = getCurrentActivity();
|
|
267
|
+
if (currentActivity == null || cameraProvider == null) return;
|
|
268
|
+
|
|
269
|
+
// Preview use case
|
|
270
|
+
preview = new Preview.Builder().build();
|
|
271
|
+
if (previewView != null) {
|
|
272
|
+
preview.setSurfaceProvider(previewView.getSurfaceProvider());
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// Image capture use case for final screenshot
|
|
276
|
+
imageCapture = new ImageCapture.Builder().build();
|
|
277
|
+
|
|
278
|
+
// Image analysis use case for face detection
|
|
279
|
+
imageAnalysis = new ImageAnalysis.Builder()
|
|
280
|
+
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
281
|
+
.build();
|
|
282
|
+
|
|
283
|
+
imageAnalysis.setAnalyzer(cameraExecutor, this::analyzeImage);
|
|
284
|
+
|
|
285
|
+
// Camera selector (front-facing camera)
|
|
286
|
+
CameraSelector cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA;
|
|
287
|
+
|
|
288
|
+
try {
|
|
289
|
+
// Unbind all use cases before rebinding
|
|
290
|
+
cameraProvider.unbindAll();
|
|
291
|
+
|
|
292
|
+
// Bind use cases to camera
|
|
293
|
+
cameraProvider.bindToLifecycle(
|
|
294
|
+
(LifecycleOwner) currentActivity,
|
|
295
|
+
cameraSelector,
|
|
296
|
+
preview,
|
|
297
|
+
imageAnalysis,
|
|
298
|
+
imageCapture
|
|
299
|
+
);
|
|
300
|
+
|
|
301
|
+
} catch (Exception e) {
|
|
302
|
+
Log.e(TAG, "Camera binding failed", e);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
private void analyzeImage(@NonNull ImageProxy imageProxy) {
|
|
307
|
+
if (!isDetectionActive || currentChallenge == null) {
|
|
308
|
+
imageProxy.close();
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
@SuppressWarnings("UnsafeOptInUsageError")
|
|
313
|
+
Image mediaImage = imageProxy.getImage();
|
|
314
|
+
if (mediaImage != null) {
|
|
315
|
+
InputImage image = InputImage.fromMediaImage(
|
|
316
|
+
mediaImage,
|
|
317
|
+
imageProxy.getImageInfo().getRotationDegrees()
|
|
318
|
+
);
|
|
319
|
+
|
|
320
|
+
faceDetector.process(image)
|
|
321
|
+
.addOnSuccessListener(faces -> {
|
|
322
|
+
processFaceDetection(faces);
|
|
323
|
+
imageProxy.close();
|
|
324
|
+
})
|
|
325
|
+
.addOnFailureListener(e -> {
|
|
326
|
+
Log.e(TAG, "Face detection failed", e);
|
|
327
|
+
imageProxy.close();
|
|
328
|
+
});
|
|
329
|
+
} else {
|
|
330
|
+
imageProxy.close();
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
private void processFaceDetection(List<Face> faces) {
|
|
335
|
+
if (faces.isEmpty()) {
|
|
336
|
+
// No face detected
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
Face face = faces.get(0); // Use first detected face
|
|
341
|
+
|
|
342
|
+
// Check challenge timeout
|
|
343
|
+
if (System.currentTimeMillis() - challengeStartTime > CHALLENGE_TIMEOUT) {
|
|
344
|
+
emitChallengeFailure("Challenge timeout");
|
|
345
|
+
startNextChallenge();
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// Validate current challenge
|
|
350
|
+
boolean challengeSuccess = false;
|
|
351
|
+
|
|
352
|
+
switch (currentChallenge) {
|
|
353
|
+
case SMILE:
|
|
354
|
+
challengeSuccess = validateSmileChallenge(face);
|
|
355
|
+
break;
|
|
356
|
+
case BLINK:
|
|
357
|
+
challengeSuccess = validateBlinkChallenge(face);
|
|
358
|
+
break;
|
|
359
|
+
case TURN_LEFT:
|
|
360
|
+
challengeSuccess = validateTurnLeftChallenge(face);
|
|
361
|
+
break;
|
|
362
|
+
case TURN_RIGHT:
|
|
363
|
+
challengeSuccess = validateTurnRightChallenge(face);
|
|
364
|
+
break;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
if (challengeSuccess) {
|
|
368
|
+
emitChallengeSuccess();
|
|
369
|
+
startNextChallenge();
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
private boolean validateSmileChallenge(Face face) {
|
|
374
|
+
float smilingProbability = face.getSmilingProbability() != null ?
|
|
375
|
+
face.getSmilingProbability() : 0f;
|
|
376
|
+
|
|
377
|
+
boolean isSmiling = smilingProbability > SMILE_THRESHOLD;
|
|
378
|
+
|
|
379
|
+
// Detect transition from not smiling to smiling
|
|
380
|
+
if (isSmiling && !lastSmileState) {
|
|
381
|
+
lastSmileState = true;
|
|
382
|
+
return true;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
lastSmileState = isSmiling;
|
|
386
|
+
return false;
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
private boolean validateBlinkChallenge(Face face) {
|
|
390
|
+
float leftEyeOpen = face.getLeftEyeOpenProbability() != null ?
|
|
391
|
+
face.getLeftEyeOpenProbability() : 1f;
|
|
392
|
+
float rightEyeOpen = face.getRightEyeOpenProbability() != null ?
|
|
393
|
+
face.getRightEyeOpenProbability() : 1f;
|
|
394
|
+
|
|
395
|
+
boolean leftClosed = leftEyeOpen < (1f - EYE_OPEN_THRESHOLD);
|
|
396
|
+
boolean rightClosed = rightEyeOpen < (1f - EYE_OPEN_THRESHOLD);
|
|
397
|
+
boolean bothClosed = leftClosed && rightClosed;
|
|
398
|
+
|
|
399
|
+
// Detect blink: transition from open to closed and back to open
|
|
400
|
+
if (lastLeftEyeOpen && lastRightEyeOpen && bothClosed) {
|
|
401
|
+
// Eyes just closed
|
|
402
|
+
lastLeftEyeOpen = false;
|
|
403
|
+
lastRightEyeOpen = false;
|
|
404
|
+
} else if (!lastLeftEyeOpen && !lastRightEyeOpen && !bothClosed) {
|
|
405
|
+
// Eyes opened after being closed - blink detected!
|
|
406
|
+
lastLeftEyeOpen = true;
|
|
407
|
+
lastRightEyeOpen = true;
|
|
408
|
+
return true;
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
return false;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
private boolean validateTurnLeftChallenge(Face face) {
|
|
415
|
+
float headYaw = face.getHeadEulerAngleY();
|
|
416
|
+
|
|
417
|
+
// Positive yaw = head turned left (user's left)
|
|
418
|
+
if (headYaw > HEAD_YAW_THRESHOLD && Math.abs(lastHeadYaw) < HEAD_YAW_THRESHOLD) {
|
|
419
|
+
lastHeadYaw = headYaw;
|
|
420
|
+
return true;
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
lastHeadYaw = headYaw;
|
|
424
|
+
return false;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
private boolean validateTurnRightChallenge(Face face) {
|
|
428
|
+
float headYaw = face.getHeadEulerAngleY();
|
|
429
|
+
|
|
430
|
+
// Negative yaw = head turned right (user's right)
|
|
431
|
+
if (headYaw < -HEAD_YAW_THRESHOLD && Math.abs(lastHeadYaw) < HEAD_YAW_THRESHOLD) {
|
|
432
|
+
lastHeadYaw = headYaw;
|
|
433
|
+
return true;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
lastHeadYaw = headYaw;
|
|
437
|
+
return false;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
private void startNextChallenge() {
|
|
441
|
+
if (pendingChallenges == null || pendingChallenges.isEmpty()) {
|
|
442
|
+
// All challenges completed - take final screenshot
|
|
443
|
+
captureScreenshot();
|
|
444
|
+
return;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
currentChallenge = pendingChallenges.poll();
|
|
448
|
+
challengeStartTime = System.currentTimeMillis();
|
|
449
|
+
|
|
450
|
+
// Reset challenge-specific state
|
|
451
|
+
resetChallengeState();
|
|
452
|
+
|
|
453
|
+
emitChallengeStart();
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
private void resetChallengeState() {
|
|
457
|
+
lastSmileState = false;
|
|
458
|
+
lastLeftEyeOpen = true;
|
|
459
|
+
lastRightEyeOpen = true;
|
|
460
|
+
lastHeadYaw = 0f;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
private void captureScreenshot() {
|
|
464
|
+
if (imageCapture == null) {
|
|
465
|
+
emitAllChallengesComplete(null);
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
ImageCapture.OutputFileOptions outputFileOptions =
|
|
470
|
+
new ImageCapture.OutputFileOptions.Builder(
|
|
471
|
+
new java.io.File(reactContext.getCacheDir(), "liveness_screenshot.jpg")
|
|
472
|
+
).build();
|
|
473
|
+
|
|
474
|
+
imageCapture.takePicture(
|
|
475
|
+
outputFileOptions,
|
|
476
|
+
ContextCompat.getMainExecutor(reactContext),
|
|
477
|
+
new ImageCapture.OnImageSavedCallback() {
|
|
478
|
+
@Override
|
|
479
|
+
public void onImageSaved(@NonNull ImageCapture.OutputFileResults output) {
|
|
480
|
+
try {
|
|
481
|
+
// Read the saved image and convert to base64
|
|
482
|
+
java.io.File imageFile = new java.io.File(reactContext.getCacheDir(), "liveness_screenshot.jpg");
|
|
483
|
+
byte[] imageBytes = java.nio.file.Files.readAllBytes(imageFile.toPath());
|
|
484
|
+
String base64Image = Base64.encodeToString(imageBytes, Base64.DEFAULT);
|
|
485
|
+
|
|
486
|
+
emitScreenshotCaptured(base64Image);
|
|
487
|
+
emitAllChallengesComplete(base64Image);
|
|
488
|
+
|
|
489
|
+
// Clean up
|
|
490
|
+
imageFile.delete();
|
|
491
|
+
stopLivenessDetection();
|
|
492
|
+
|
|
493
|
+
} catch (Exception e) {
|
|
494
|
+
Log.e(TAG, "Failed to process screenshot", e);
|
|
495
|
+
emitAllChallengesComplete(null);
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
@Override
|
|
500
|
+
public void onError(@NonNull ImageCaptureException exception) {
|
|
501
|
+
Log.e(TAG, "Screenshot capture failed", exception);
|
|
502
|
+
emitAllChallengesComplete(null);
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
);
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
private void stopCamera() {
|
|
509
|
+
if (cameraProvider != null) {
|
|
510
|
+
cameraProvider.unbindAll();
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
private ChallengeType getChallengeFromString(String challengeStr) {
|
|
515
|
+
for (ChallengeType challenge : ChallengeType.values()) {
|
|
516
|
+
if (challenge.getValue().equals(challengeStr)) {
|
|
517
|
+
return challenge;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
return null;
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
// Event emission methods
|
|
524
|
+
private void emitChallengeStart() {
|
|
525
|
+
WritableMap params = Arguments.createMap();
|
|
526
|
+
params.putString("challenge", currentChallenge.getValue());
|
|
527
|
+
sendEvent(EVENT_CHALLENGE_START, params);
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
private void emitChallengeSuccess() {
|
|
531
|
+
WritableMap params = Arguments.createMap();
|
|
532
|
+
params.putString("challenge", currentChallenge.getValue());
|
|
533
|
+
sendEvent(EVENT_CHALLENGE_SUCCESS, params);
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
private void emitChallengeFailure(String reason) {
|
|
537
|
+
WritableMap params = Arguments.createMap();
|
|
538
|
+
params.putString("challenge", currentChallenge != null ? currentChallenge.getValue() : "unknown");
|
|
539
|
+
params.putString("reason", reason);
|
|
540
|
+
sendEvent(EVENT_CHALLENGE_FAILURE, params);
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
private void emitAllChallengesComplete(String screenshot) {
|
|
544
|
+
WritableMap params = Arguments.createMap();
|
|
545
|
+
params.putBoolean("success", true);
|
|
546
|
+
if (screenshot != null) {
|
|
547
|
+
params.putString("screenshot", screenshot);
|
|
548
|
+
}
|
|
549
|
+
sendEvent(EVENT_ALL_CHALLENGES_COMPLETE, params);
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
private void emitScreenshotCaptured(String screenshot) {
|
|
553
|
+
WritableMap params = Arguments.createMap();
|
|
554
|
+
params.putString("screenshot", screenshot);
|
|
555
|
+
sendEvent(EVENT_SCREENSHOT_CAPTURED, params);
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
private void sendEvent(String eventName, WritableMap params) {
|
|
559
|
+
reactContext
|
|
560
|
+
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
|
|
561
|
+
.emit(eventName, params);
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
// Permission helper methods
|
|
565
|
+
private boolean hasCameraPermission() {
|
|
566
|
+
return reactContext.checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
// Handle permission request results
|
|
570
|
+
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
|
|
571
|
+
if (requestCode == CAMERA_PERMISSION_REQUEST_CODE) {
|
|
572
|
+
if (mPermissionPromise != null) {
|
|
573
|
+
boolean granted = grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED;
|
|
574
|
+
mPermissionPromise.resolve(granted);
|
|
575
|
+
mPermissionPromise = null;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
@Override
|
|
581
|
+
public void onCatalystInstanceDestroy() {
|
|
582
|
+
super.onCatalystInstanceDestroy();
|
|
583
|
+
if (cameraExecutor != null) {
|
|
584
|
+
cameraExecutor.shutdown();
|
|
585
|
+
}
|
|
586
|
+
stopLivenessDetection();
|
|
587
|
+
}
|
|
588
|
+
}
|
|
@@ -16,12 +16,15 @@ public class OmnipayActivityPackage implements ReactPackage {
|
|
|
16
16
|
public List<NativeModule> createNativeModules(ReactApplicationContext reactContext) {
|
|
17
17
|
List<NativeModule> modules = new ArrayList<>();
|
|
18
18
|
modules.add(new OmnipayActivityModule(reactContext));
|
|
19
|
+
modules.add(new LivenessDetectionModule(reactContext));
|
|
19
20
|
return modules;
|
|
20
21
|
}
|
|
21
22
|
|
|
22
23
|
@Override
|
|
23
24
|
public List<ViewManager> createViewManagers(ReactApplicationContext reactContext) {
|
|
24
|
-
|
|
25
|
+
List<ViewManager> viewManagers = new ArrayList<>();
|
|
26
|
+
viewManagers.add(new LivenessCameraViewManager(reactContext));
|
|
27
|
+
return viewManagers;
|
|
25
28
|
}
|
|
26
29
|
|
|
27
30
|
// Deprecated RN 0.47
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
//
|
|
2
|
+
// LivenessCameraView.h
|
|
3
|
+
// omnipay-reactnative-sdk
|
|
4
|
+
//
|
|
5
|
+
// Created by React Native Auto-generated
|
|
6
|
+
//
|
|
7
|
+
|
|
8
|
+
#import <UIKit/UIKit.h>
|
|
9
|
+
#import <React/RCTComponent.h>
|
|
10
|
+
#import <AVFoundation/AVFoundation.h>
|
|
11
|
+
|
|
12
|
+
@class RCTBridge;
|
|
13
|
+
|
|
14
|
+
@interface LivenessCameraView : UIView
|
|
15
|
+
|
|
16
|
+
@property (nonatomic, strong) NSString *scaleType;
|
|
17
|
+
@property (nonatomic, copy) RCTDirectEventBlock onCameraReady;
|
|
18
|
+
@property (nonatomic, copy) RCTDirectEventBlock onCameraError;
|
|
19
|
+
|
|
20
|
+
- (instancetype)initWithBridge:(RCTBridge *)bridge;
|
|
21
|
+
|
|
22
|
+
@end
|