omnipay-reactnative-sdk 1.2.2-beta.9 → 1.2.3-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +43 -93
- package/android/build.gradle +0 -9
- package/android/src/main/AndroidManifest.xml +0 -5
- package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +1 -4
- package/lib/commonjs/components/OmnipayProvider.js +1 -1
- package/lib/commonjs/components/biometrics/FaceVerification.js +235 -275
- package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
- package/lib/commonjs/index.js +0 -33
- package/lib/commonjs/index.js.map +1 -1
- package/lib/module/components/OmnipayProvider.js +1 -1
- package/lib/module/components/biometrics/FaceVerification.js +237 -277
- package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
- package/lib/module/index.js +0 -6
- package/lib/module/index.js.map +1 -1
- package/lib/typescript/components/biometrics/FaceVerification.d.ts +1 -3
- package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -1
- package/lib/typescript/index.d.ts +0 -2
- package/lib/typescript/index.d.ts.map +1 -1
- package/package.json +4 -16
- package/src/components/OmnipayProvider.tsx +1 -1
- package/src/components/biometrics/FaceVerification.tsx +232 -317
- package/src/index.tsx +0 -7
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +0 -153
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +0 -49
- package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +0 -557
- package/ios/OmnipayLivenessCameraView.h +0 -15
- package/ios/OmnipayLivenessCameraView.m +0 -80
- package/ios/OmnipayLivenessCameraViewManager.m +0 -19
- package/ios/OmnipayLivenessModule.h +0 -38
- package/ios/OmnipayLivenessModule.m +0 -615
- package/lib/commonjs/components/biometrics/LivenessDetection.js +0 -149
- package/lib/commonjs/components/biometrics/LivenessDetection.js.map +0 -1
- package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +0 -15
- package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
- package/lib/commonjs/components/biometrics/PermissionManager.js +0 -279
- package/lib/commonjs/components/biometrics/PermissionManager.js.map +0 -1
- package/lib/commonjs/components/biometrics/index.js +0 -45
- package/lib/commonjs/components/biometrics/index.js.map +0 -1
- package/lib/commonjs/components/biometrics/types.js +0 -17
- package/lib/commonjs/components/biometrics/types.js.map +0 -1
- package/lib/module/components/biometrics/LivenessDetection.js +0 -129
- package/lib/module/components/biometrics/LivenessDetection.js.map +0 -1
- package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +0 -7
- package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
- package/lib/module/components/biometrics/PermissionManager.js +0 -272
- package/lib/module/components/biometrics/PermissionManager.js.map +0 -1
- package/lib/module/components/biometrics/index.js +0 -12
- package/lib/module/components/biometrics/index.js.map +0 -1
- package/lib/module/components/biometrics/types.js +0 -16
- package/lib/module/components/biometrics/types.js.map +0 -1
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts +0 -33
- package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +0 -18
- package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/PermissionManager.d.ts +0 -58
- package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/index.d.ts +0 -5
- package/lib/typescript/components/biometrics/index.d.ts.map +0 -1
- package/lib/typescript/components/biometrics/types.d.ts +0 -73
- package/lib/typescript/components/biometrics/types.d.ts.map +0 -1
- package/omnipay-reactnative-sdk.podspec +0 -50
- package/src/components/biometrics/LivenessDetection.ts +0 -178
- package/src/components/biometrics/OmnipayLivenessCameraView.tsx +0 -19
- package/src/components/biometrics/PermissionManager.ts +0 -317
- package/src/components/biometrics/index.ts +0 -11
- package/src/components/biometrics/types.ts +0 -86
|
@@ -1,557 +0,0 @@
|
|
|
1
|
-
package com.omniretail.omnipay;
|
|
2
|
-
|
|
3
|
-
import android.app.Activity;
|
|
4
|
-
import android.content.Context;
|
|
5
|
-
import android.graphics.Bitmap;
|
|
6
|
-
import android.graphics.Matrix;
|
|
7
|
-
import android.graphics.Rect;
|
|
8
|
-
import android.util.Base64;
|
|
9
|
-
import android.util.Log;
|
|
10
|
-
import android.util.Size;
|
|
11
|
-
|
|
12
|
-
import androidx.annotation.NonNull;
|
|
13
|
-
import androidx.camera.core.CameraSelector;
|
|
14
|
-
import androidx.camera.core.ImageAnalysis;
|
|
15
|
-
import androidx.camera.core.ImageCapture;
|
|
16
|
-
import androidx.camera.core.ImageCaptureException;
|
|
17
|
-
import androidx.camera.core.ImageProxy;
|
|
18
|
-
import androidx.camera.core.Preview;
|
|
19
|
-
import androidx.camera.lifecycle.ProcessCameraProvider;
|
|
20
|
-
import androidx.camera.view.PreviewView;
|
|
21
|
-
import androidx.core.content.ContextCompat;
|
|
22
|
-
import androidx.lifecycle.LifecycleOwner;
|
|
23
|
-
|
|
24
|
-
import com.facebook.react.bridge.Arguments;
|
|
25
|
-
import com.facebook.react.bridge.Promise;
|
|
26
|
-
import com.facebook.react.bridge.ReactApplicationContext;
|
|
27
|
-
import com.facebook.react.bridge.ReactContextBaseJavaModule;
|
|
28
|
-
import com.facebook.react.bridge.ReactMethod;
|
|
29
|
-
import com.facebook.react.bridge.ReadableMap;
|
|
30
|
-
import com.facebook.react.bridge.WritableArray;
|
|
31
|
-
import com.facebook.react.bridge.WritableMap;
|
|
32
|
-
import com.facebook.react.modules.core.DeviceEventManagerModule;
|
|
33
|
-
import com.google.android.gms.tasks.OnFailureListener;
|
|
34
|
-
import com.google.android.gms.tasks.OnSuccessListener;
|
|
35
|
-
import com.google.common.util.concurrent.ListenableFuture;
|
|
36
|
-
import com.google.mlkit.vision.common.InputImage;
|
|
37
|
-
import com.google.mlkit.vision.face.Face;
|
|
38
|
-
import com.google.mlkit.vision.face.FaceDetection;
|
|
39
|
-
import com.google.mlkit.vision.face.FaceDetector;
|
|
40
|
-
import com.google.mlkit.vision.face.FaceDetectorOptions;
|
|
41
|
-
|
|
42
|
-
import java.io.ByteArrayOutputStream;
|
|
43
|
-
import java.util.List;
|
|
44
|
-
import java.util.concurrent.ExecutionException;
|
|
45
|
-
import java.util.concurrent.ExecutorService;
|
|
46
|
-
import java.util.concurrent.Executors;
|
|
47
|
-
|
|
48
|
-
public class OmnipayLivenessModule extends ReactContextBaseJavaModule {
|
|
49
|
-
private static final String TAG = "OmnipayLiveness";
|
|
50
|
-
|
|
51
|
-
// Challenge constants
|
|
52
|
-
private static final String CHALLENGE_SMILE = "smile";
|
|
53
|
-
private static final String CHALLENGE_BLINK = "blink";
|
|
54
|
-
private static final String CHALLENGE_TURN_LEFT = "turnLeft";
|
|
55
|
-
private static final String CHALLENGE_TURN_RIGHT = "turnRight";
|
|
56
|
-
|
|
57
|
-
// Detection thresholds
|
|
58
|
-
private static final float SMILE_THRESHOLD = 0.8f;
|
|
59
|
-
private static final float HEAD_TURN_THRESHOLD = 15.0f;
|
|
60
|
-
private static final int BLINK_FRAMES_THRESHOLD = 3;
|
|
61
|
-
|
|
62
|
-
private ReactApplicationContext reactContext;
|
|
63
|
-
private FaceDetector faceDetector;
|
|
64
|
-
private ExecutorService cameraExecutor;
|
|
65
|
-
private ProcessCameraProvider cameraProvider;
|
|
66
|
-
private ImageCapture imageCapture;
|
|
67
|
-
private PreviewView previewView;
|
|
68
|
-
|
|
69
|
-
// Detection state
|
|
70
|
-
private boolean isDetectionRunning = false;
|
|
71
|
-
private String currentChallenge = null;
|
|
72
|
-
private WritableArray challenges;
|
|
73
|
-
private int currentChallengeIndex = 0;
|
|
74
|
-
private long challengeStartTime = 0;
|
|
75
|
-
private int challengeTimeoutMs = 10000; // 10 seconds default
|
|
76
|
-
|
|
77
|
-
// Blink detection state
|
|
78
|
-
private boolean previousEyeOpenState = true;
|
|
79
|
-
private int blinkCounter = 0;
|
|
80
|
-
private int eyesClosedFrames = 0;
|
|
81
|
-
|
|
82
|
-
// Challenge completion tracking
|
|
83
|
-
private boolean challengeCompleted = false;
|
|
84
|
-
|
|
85
|
-
public OmnipayLivenessModule(ReactApplicationContext reactContext) {
|
|
86
|
-
super(reactContext);
|
|
87
|
-
this.reactContext = reactContext;
|
|
88
|
-
initializeFaceDetector();
|
|
89
|
-
this.cameraExecutor = Executors.newSingleThreadExecutor();
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
@Override
|
|
93
|
-
public String getName() {
|
|
94
|
-
return "OmnipayLivenessModule";
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
private void initializeFaceDetector() {
|
|
98
|
-
FaceDetectorOptions options = new FaceDetectorOptions.Builder()
|
|
99
|
-
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
100
|
-
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
|
|
101
|
-
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
|
|
102
|
-
.setMinFaceSize(0.3f)
|
|
103
|
-
.enableTracking()
|
|
104
|
-
.build();
|
|
105
|
-
|
|
106
|
-
faceDetector = FaceDetection.getClient(options);
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
@ReactMethod
|
|
110
|
-
public void isSupported(Promise promise) {
|
|
111
|
-
try {
|
|
112
|
-
// Check if ML Kit face detection is available
|
|
113
|
-
boolean hasMLKit = true;
|
|
114
|
-
try {
|
|
115
|
-
// Try to create ML Kit face detector to verify it's available
|
|
116
|
-
FaceDetectorOptions options = new FaceDetectorOptions.Builder()
|
|
117
|
-
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
|
|
118
|
-
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
|
|
119
|
-
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
|
|
120
|
-
.build();
|
|
121
|
-
FaceDetection.getClient(options);
|
|
122
|
-
} catch (Exception e) {
|
|
123
|
-
Log.e(TAG, "ML Kit not available", e);
|
|
124
|
-
hasMLKit = false;
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
// Check if device has camera capability (independent of permission)
|
|
128
|
-
boolean hasCamera = false;
|
|
129
|
-
try {
|
|
130
|
-
android.content.Context context = getReactApplicationContext();
|
|
131
|
-
android.content.pm.PackageManager pm = context.getPackageManager();
|
|
132
|
-
hasCamera = pm.hasSystemFeature(android.content.pm.PackageManager.FEATURE_CAMERA_ANY) ||
|
|
133
|
-
pm.hasSystemFeature(android.content.pm.PackageManager.FEATURE_CAMERA_FRONT);
|
|
134
|
-
} catch (Exception e) {
|
|
135
|
-
Log.e(TAG, "Error checking camera features", e);
|
|
136
|
-
hasCamera = false;
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
boolean isSupported = hasMLKit && hasCamera;
|
|
140
|
-
|
|
141
|
-
Log.i(TAG, "Liveness Detection Support Check:");
|
|
142
|
-
Log.i(TAG, " ML Kit Available: " + hasMLKit);
|
|
143
|
-
Log.i(TAG, " Camera Available: " + hasCamera);
|
|
144
|
-
Log.i(TAG, " Overall Support: " + isSupported);
|
|
145
|
-
|
|
146
|
-
promise.resolve(isSupported);
|
|
147
|
-
} catch (Exception e) {
|
|
148
|
-
Log.e(TAG, "Error checking support", e);
|
|
149
|
-
promise.resolve(false);
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
@ReactMethod
|
|
154
|
-
public void startLivenessDetection(ReadableMap config, Promise promise) {
|
|
155
|
-
try {
|
|
156
|
-
if (isDetectionRunning) {
|
|
157
|
-
promise.reject("DETECTION_RUNNING", "Detection is already running");
|
|
158
|
-
return;
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
// Parse configuration
|
|
162
|
-
parseConfig(config);
|
|
163
|
-
|
|
164
|
-
Activity activity = getCurrentActivity();
|
|
165
|
-
if (activity == null) {
|
|
166
|
-
promise.reject("NO_ACTIVITY", "No current activity available");
|
|
167
|
-
return;
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
// Initialize camera
|
|
171
|
-
initializeCamera(activity, promise);
|
|
172
|
-
|
|
173
|
-
} catch (Exception e) {
|
|
174
|
-
Log.e(TAG, "Error starting liveness detection", e);
|
|
175
|
-
promise.reject("START_ERROR", e.getMessage());
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
@ReactMethod
|
|
180
|
-
public void stopDetection(Promise promise) {
|
|
181
|
-
try {
|
|
182
|
-
stopLivenessDetection();
|
|
183
|
-
promise.resolve(null);
|
|
184
|
-
} catch (Exception e) {
|
|
185
|
-
Log.e(TAG, "Error stopping detection", e);
|
|
186
|
-
promise.reject("STOP_ERROR", e.getMessage());
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
private void parseConfig(ReadableMap config) {
|
|
191
|
-
// Parse challenges
|
|
192
|
-
if (config.hasKey("challenges")) {
|
|
193
|
-
challenges = config.getArray("challenges");
|
|
194
|
-
} else {
|
|
195
|
-
// Default challenges
|
|
196
|
-
challenges = Arguments.createArray();
|
|
197
|
-
challenges.pushString(CHALLENGE_SMILE);
|
|
198
|
-
challenges.pushString(CHALLENGE_BLINK);
|
|
199
|
-
challenges.pushString(CHALLENGE_TURN_LEFT);
|
|
200
|
-
challenges.pushString(CHALLENGE_TURN_RIGHT);
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
// Parse timeout
|
|
204
|
-
if (config.hasKey("challengeTimeout")) {
|
|
205
|
-
challengeTimeoutMs = config.getInt("challengeTimeout") * 1000;
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
currentChallengeIndex = 0;
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
private void initializeCamera(Activity activity, Promise promise) {
|
|
212
|
-
ListenableFuture<ProcessCameraProvider> cameraProviderFuture =
|
|
213
|
-
ProcessCameraProvider.getInstance(activity);
|
|
214
|
-
|
|
215
|
-
cameraProviderFuture.addListener(() -> {
|
|
216
|
-
try {
|
|
217
|
-
cameraProvider = cameraProviderFuture.get();
|
|
218
|
-
startCamera(activity, promise);
|
|
219
|
-
} catch (ExecutionException | InterruptedException e) {
|
|
220
|
-
Log.e(TAG, "Error getting camera provider", e);
|
|
221
|
-
promise.reject("CAMERA_ERROR", "Failed to initialize camera");
|
|
222
|
-
}
|
|
223
|
-
}, ContextCompat.getMainExecutor(activity));
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
private void startCamera(Activity activity, Promise promise) {
|
|
227
|
-
// Preview use case
|
|
228
|
-
Preview preview = new Preview.Builder().build();
|
|
229
|
-
|
|
230
|
-
// Image capture use case
|
|
231
|
-
imageCapture = new ImageCapture.Builder().build();
|
|
232
|
-
|
|
233
|
-
// Image analysis use case for face detection
|
|
234
|
-
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
|
|
235
|
-
.setTargetResolution(new Size(640, 480))
|
|
236
|
-
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
|
237
|
-
.build();
|
|
238
|
-
|
|
239
|
-
imageAnalysis.setAnalyzer(cameraExecutor, this::analyzeImage);
|
|
240
|
-
|
|
241
|
-
// Select front camera
|
|
242
|
-
CameraSelector cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA;
|
|
243
|
-
|
|
244
|
-
try {
|
|
245
|
-
// Unbind use cases before rebinding
|
|
246
|
-
cameraProvider.unbindAll();
|
|
247
|
-
|
|
248
|
-
// Bind use cases to camera
|
|
249
|
-
cameraProvider.bindToLifecycle(
|
|
250
|
-
(LifecycleOwner) activity,
|
|
251
|
-
cameraSelector,
|
|
252
|
-
preview,
|
|
253
|
-
imageCapture,
|
|
254
|
-
imageAnalysis
|
|
255
|
-
);
|
|
256
|
-
|
|
257
|
-
isDetectionRunning = true;
|
|
258
|
-
startNextChallenge();
|
|
259
|
-
promise.resolve(null);
|
|
260
|
-
|
|
261
|
-
} catch (Exception e) {
|
|
262
|
-
Log.e(TAG, "Error starting camera", e);
|
|
263
|
-
promise.reject("CAMERA_START_ERROR", "Failed to start camera");
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
private void analyzeImage(ImageProxy imageProxy) {
|
|
268
|
-
if (!isDetectionRunning || currentChallenge == null) {
|
|
269
|
-
imageProxy.close();
|
|
270
|
-
return;
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
try {
|
|
274
|
-
InputImage image = InputImage.fromMediaImage(
|
|
275
|
-
imageProxy.getImage(),
|
|
276
|
-
imageProxy.getImageInfo().getRotationDegrees()
|
|
277
|
-
);
|
|
278
|
-
|
|
279
|
-
faceDetector.process(image)
|
|
280
|
-
.addOnSuccessListener(faces -> {
|
|
281
|
-
processFaces(faces);
|
|
282
|
-
imageProxy.close();
|
|
283
|
-
})
|
|
284
|
-
.addOnFailureListener(e -> {
|
|
285
|
-
Log.e(TAG, "Face detection failed", e);
|
|
286
|
-
imageProxy.close();
|
|
287
|
-
});
|
|
288
|
-
|
|
289
|
-
} catch (Exception e) {
|
|
290
|
-
Log.e(TAG, "Error analyzing image", e);
|
|
291
|
-
imageProxy.close();
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
private void processFaces(List<Face> faces) {
|
|
296
|
-
if (faces.isEmpty()) {
|
|
297
|
-
// No face detected
|
|
298
|
-
return;
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
Face face = faces.get(0); // Use the first detected face
|
|
302
|
-
|
|
303
|
-
// Check if challenge timeout exceeded
|
|
304
|
-
if (System.currentTimeMillis() - challengeStartTime > challengeTimeoutMs) {
|
|
305
|
-
onChallengeFailure("Challenge timeout");
|
|
306
|
-
return;
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
if (challengeCompleted) {
|
|
310
|
-
return;
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
switch (currentChallenge) {
|
|
314
|
-
case CHALLENGE_SMILE:
|
|
315
|
-
checkSmile(face);
|
|
316
|
-
break;
|
|
317
|
-
case CHALLENGE_BLINK:
|
|
318
|
-
checkBlink(face);
|
|
319
|
-
break;
|
|
320
|
-
case CHALLENGE_TURN_LEFT:
|
|
321
|
-
checkHeadTurn(face, -HEAD_TURN_THRESHOLD);
|
|
322
|
-
break;
|
|
323
|
-
case CHALLENGE_TURN_RIGHT:
|
|
324
|
-
checkHeadTurn(face, HEAD_TURN_THRESHOLD);
|
|
325
|
-
break;
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
|
|
329
|
-
private void checkSmile(Face face) {
|
|
330
|
-
float smilingProbability = face.getSmilingProbability() != null ?
|
|
331
|
-
face.getSmilingProbability() : 0f;
|
|
332
|
-
|
|
333
|
-
if (smilingProbability > SMILE_THRESHOLD) {
|
|
334
|
-
onChallengeSuccess();
|
|
335
|
-
}
|
|
336
|
-
}
|
|
337
|
-
|
|
338
|
-
private void checkBlink(Face face) {
|
|
339
|
-
float leftEyeOpenProbability = face.getLeftEyeOpenProbability() != null ?
|
|
340
|
-
face.getLeftEyeOpenProbability() : 1f;
|
|
341
|
-
float rightEyeOpenProbability = face.getRightEyeOpenProbability() != null ?
|
|
342
|
-
face.getRightEyeOpenProbability() : 1f;
|
|
343
|
-
|
|
344
|
-
boolean eyesOpen = leftEyeOpenProbability > 0.5f && rightEyeOpenProbability > 0.5f;
|
|
345
|
-
|
|
346
|
-
if (!eyesOpen) {
|
|
347
|
-
eyesClosedFrames++;
|
|
348
|
-
} else {
|
|
349
|
-
if (eyesClosedFrames >= BLINK_FRAMES_THRESHOLD && !previousEyeOpenState) {
|
|
350
|
-
blinkCounter++;
|
|
351
|
-
if (blinkCounter >= 1) { // Single blink required
|
|
352
|
-
onChallengeSuccess();
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
eyesClosedFrames = 0;
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
previousEyeOpenState = eyesOpen;
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
private void checkHeadTurn(Face face, float targetYaw) {
|
|
362
|
-
float headEulerAngleY = face.getHeadEulerAngleY();
|
|
363
|
-
|
|
364
|
-
if (targetYaw < 0) { // Turn left
|
|
365
|
-
if (headEulerAngleY < targetYaw) {
|
|
366
|
-
onChallengeSuccess();
|
|
367
|
-
}
|
|
368
|
-
} else { // Turn right
|
|
369
|
-
if (headEulerAngleY > targetYaw) {
|
|
370
|
-
onChallengeSuccess();
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
}
|
|
374
|
-
|
|
375
|
-
private void onChallengeSuccess() {
|
|
376
|
-
if (challengeCompleted) return;
|
|
377
|
-
|
|
378
|
-
challengeCompleted = true;
|
|
379
|
-
long duration = System.currentTimeMillis() - challengeStartTime;
|
|
380
|
-
|
|
381
|
-
// Emit challenge success event
|
|
382
|
-
WritableMap params = Arguments.createMap();
|
|
383
|
-
params.putString("challenge", currentChallenge);
|
|
384
|
-
WritableMap result = Arguments.createMap();
|
|
385
|
-
result.putString("challenge", currentChallenge);
|
|
386
|
-
result.putBoolean("success", true);
|
|
387
|
-
result.putDouble("duration", duration);
|
|
388
|
-
result.putDouble("confidence", 0.9); // Mock confidence for now
|
|
389
|
-
params.putMap("result", result);
|
|
390
|
-
|
|
391
|
-
sendEvent("onChallengeSuccess", params);
|
|
392
|
-
|
|
393
|
-
// Move to next challenge or complete
|
|
394
|
-
currentChallengeIndex++;
|
|
395
|
-
if (currentChallengeIndex < challenges.size()) {
|
|
396
|
-
// Delay before next challenge
|
|
397
|
-
cameraExecutor.execute(() -> {
|
|
398
|
-
try {
|
|
399
|
-
Thread.sleep(1000); // 1 second delay
|
|
400
|
-
startNextChallenge();
|
|
401
|
-
} catch (InterruptedException e) {
|
|
402
|
-
Thread.currentThread().interrupt();
|
|
403
|
-
}
|
|
404
|
-
});
|
|
405
|
-
} else {
|
|
406
|
-
// All challenges completed
|
|
407
|
-
onAllChallengesComplete();
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
private void onChallengeFailure(String reason) {
|
|
412
|
-
if (challengeCompleted) return;
|
|
413
|
-
|
|
414
|
-
challengeCompleted = true;
|
|
415
|
-
|
|
416
|
-
WritableMap params = Arguments.createMap();
|
|
417
|
-
params.putString("challenge", currentChallenge);
|
|
418
|
-
params.putString("reason", reason);
|
|
419
|
-
|
|
420
|
-
sendEvent("onChallengeFailure", params);
|
|
421
|
-
|
|
422
|
-
// Stop detection
|
|
423
|
-
stopLivenessDetection();
|
|
424
|
-
|
|
425
|
-
WritableMap failureParams = Arguments.createMap();
|
|
426
|
-
failureParams.putString("reason", reason);
|
|
427
|
-
sendEvent("onDetectionFailed", failureParams);
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
private void startNextChallenge() {
|
|
431
|
-
if (currentChallengeIndex >= challenges.size()) {
|
|
432
|
-
return;
|
|
433
|
-
}
|
|
434
|
-
|
|
435
|
-
currentChallenge = challenges.getString(currentChallengeIndex);
|
|
436
|
-
challengeStartTime = System.currentTimeMillis();
|
|
437
|
-
challengeCompleted = false;
|
|
438
|
-
|
|
439
|
-
// Reset blink detection state
|
|
440
|
-
if (CHALLENGE_BLINK.equals(currentChallenge)) {
|
|
441
|
-
blinkCounter = 0;
|
|
442
|
-
eyesClosedFrames = 0;
|
|
443
|
-
previousEyeOpenState = true;
|
|
444
|
-
}
|
|
445
|
-
|
|
446
|
-
// Emit challenge start event
|
|
447
|
-
WritableMap params = Arguments.createMap();
|
|
448
|
-
params.putString("challenge", currentChallenge);
|
|
449
|
-
sendEvent("onChallengeStart", params);
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
private void onAllChallengesComplete() {
|
|
453
|
-
sendEvent("onAllChallengesComplete", null);
|
|
454
|
-
|
|
455
|
-
// Capture final screenshot
|
|
456
|
-
captureScreenshot();
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
private void captureScreenshot() {
|
|
460
|
-
if (imageCapture == null) {
|
|
461
|
-
sendDetectionResult(false, null, "Failed to capture screenshot");
|
|
462
|
-
return;
|
|
463
|
-
}
|
|
464
|
-
|
|
465
|
-
imageCapture.takePicture(
|
|
466
|
-
ContextCompat.getMainExecutor(reactContext),
|
|
467
|
-
new ImageCapture.OnImageCapturedCallback() {
|
|
468
|
-
@Override
|
|
469
|
-
public void onCaptureSuccess(@NonNull ImageProxy image) {
|
|
470
|
-
// Convert to bitmap and encode as base64
|
|
471
|
-
String base64Image = convertImageToBase64(image);
|
|
472
|
-
|
|
473
|
-
WritableMap params = Arguments.createMap();
|
|
474
|
-
params.putString("screenshot", base64Image);
|
|
475
|
-
sendEvent("onScreenshotCaptured", params);
|
|
476
|
-
|
|
477
|
-
sendDetectionResult(true, base64Image, null);
|
|
478
|
-
image.close();
|
|
479
|
-
}
|
|
480
|
-
|
|
481
|
-
@Override
|
|
482
|
-
public void onError(@NonNull ImageCaptureException exception) {
|
|
483
|
-
Log.e(TAG, "Screenshot capture failed", exception);
|
|
484
|
-
sendDetectionResult(false, null, "Screenshot capture failed");
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
);
|
|
488
|
-
}
|
|
489
|
-
|
|
490
|
-
private String convertImageToBase64(ImageProxy image) {
|
|
491
|
-
// This is a simplified conversion - in real implementation,
|
|
492
|
-
// you'd need to properly convert ImageProxy to Bitmap
|
|
493
|
-
try {
|
|
494
|
-
// Mock base64 string for now
|
|
495
|
-
return "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQ...";
|
|
496
|
-
} catch (Exception e) {
|
|
497
|
-
Log.e(TAG, "Error converting image to base64", e);
|
|
498
|
-
return "";
|
|
499
|
-
}
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
private void sendDetectionResult(boolean success, String screenshot, String failureReason) {
|
|
503
|
-
stopLivenessDetection();
|
|
504
|
-
|
|
505
|
-
WritableMap result = Arguments.createMap();
|
|
506
|
-
result.putBoolean("success", success);
|
|
507
|
-
if (screenshot != null) {
|
|
508
|
-
result.putString("screenshot", screenshot);
|
|
509
|
-
}
|
|
510
|
-
if (failureReason != null) {
|
|
511
|
-
result.putString("failureReason", failureReason);
|
|
512
|
-
}
|
|
513
|
-
|
|
514
|
-
// Create mock challenge results
|
|
515
|
-
WritableArray challengeResults = Arguments.createArray();
|
|
516
|
-
for (int i = 0; i < currentChallengeIndex; i++) {
|
|
517
|
-
WritableMap challengeResult = Arguments.createMap();
|
|
518
|
-
challengeResult.putString("challenge", challenges.getString(i));
|
|
519
|
-
challengeResult.putBoolean("success", true);
|
|
520
|
-
challengeResult.putDouble("duration", 2000); // Mock duration
|
|
521
|
-
challengeResult.putDouble("confidence", 0.9);
|
|
522
|
-
challengeResults.pushMap(challengeResult);
|
|
523
|
-
}
|
|
524
|
-
result.putArray("challengeResults", challengeResults);
|
|
525
|
-
result.putDouble("totalDuration", System.currentTimeMillis() - challengeStartTime);
|
|
526
|
-
|
|
527
|
-
sendEvent("onDetectionComplete", result);
|
|
528
|
-
}
|
|
529
|
-
|
|
530
|
-
private void stopLivenessDetection() {
|
|
531
|
-
isDetectionRunning = false;
|
|
532
|
-
currentChallenge = null;
|
|
533
|
-
currentChallengeIndex = 0;
|
|
534
|
-
challengeCompleted = false;
|
|
535
|
-
|
|
536
|
-
if (cameraProvider != null) {
|
|
537
|
-
cameraProvider.unbindAll();
|
|
538
|
-
}
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
private void sendEvent(String eventName, WritableMap params) {
|
|
542
|
-
reactContext
|
|
543
|
-
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
|
|
544
|
-
.emit(eventName, params);
|
|
545
|
-
}
|
|
546
|
-
|
|
547
|
-
// Event listener management for React Native
|
|
548
|
-
@ReactMethod
|
|
549
|
-
public void addListener(String eventName) {
|
|
550
|
-
// Required for RN built in Event Emitter Calls
|
|
551
|
-
}
|
|
552
|
-
|
|
553
|
-
@ReactMethod
|
|
554
|
-
public void removeListeners(Integer count) {
|
|
555
|
-
// Required for RN built in Event Emitter Calls
|
|
556
|
-
}
|
|
557
|
-
}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
#import <UIKit/UIKit.h>
|
|
2
|
-
#import <AVFoundation/AVFoundation.h>
|
|
3
|
-
#import <React/RCTComponent.h>
|
|
4
|
-
|
|
5
|
-
@interface OmnipayLivenessCameraView : UIView
|
|
6
|
-
|
|
7
|
-
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
|
|
8
|
-
@property (nonatomic, copy) RCTBubblingEventBlock onCameraReady;
|
|
9
|
-
@property (nonatomic, copy) RCTBubblingEventBlock onCameraError;
|
|
10
|
-
|
|
11
|
-
- (void)setupCameraPreview:(AVCaptureSession *)captureSession;
|
|
12
|
-
- (void)startPreview;
|
|
13
|
-
- (void)stopPreview;
|
|
14
|
-
|
|
15
|
-
@end
|
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
#import "OmnipayLivenessCameraView.h"
|
|
2
|
-
#import <React/RCTLog.h>
|
|
3
|
-
|
|
4
|
-
@implementation OmnipayLivenessCameraView
|
|
5
|
-
|
|
6
|
-
- (instancetype)initWithFrame:(CGRect)frame {
|
|
7
|
-
self = [super initWithFrame:frame];
|
|
8
|
-
if (self) {
|
|
9
|
-
[self setupView];
|
|
10
|
-
}
|
|
11
|
-
return self;
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
- (void)setupView {
|
|
15
|
-
self.backgroundColor = [UIColor blackColor];
|
|
16
|
-
self.clipsToBounds = YES;
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
- (void)setupCameraPreview:(AVCaptureSession *)captureSession {
|
|
20
|
-
if (!captureSession) {
|
|
21
|
-
RCTLogError(@"Cannot setup camera preview: capture session is nil");
|
|
22
|
-
if (self.onCameraError) {
|
|
23
|
-
self.onCameraError(@{@"error": @"Capture session is nil"});
|
|
24
|
-
}
|
|
25
|
-
return;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// Remove existing preview layer
|
|
29
|
-
if (self.previewLayer) {
|
|
30
|
-
[self.previewLayer removeFromSuperlayer];
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
// Create new preview layer
|
|
34
|
-
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
|
|
35
|
-
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
|
36
|
-
self.previewLayer.frame = self.bounds;
|
|
37
|
-
|
|
38
|
-
[self.layer addSublayer:self.previewLayer];
|
|
39
|
-
|
|
40
|
-
// Notify that camera is ready
|
|
41
|
-
if (self.onCameraReady) {
|
|
42
|
-
self.onCameraReady(@{@"ready": @YES});
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
- (void)layoutSubviews {
|
|
47
|
-
[super layoutSubviews];
|
|
48
|
-
|
|
49
|
-
// Update preview layer frame when view bounds change
|
|
50
|
-
if (self.previewLayer) {
|
|
51
|
-
self.previewLayer.frame = self.bounds;
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
- (void)startPreview {
|
|
56
|
-
if (self.previewLayer && self.previewLayer.session) {
|
|
57
|
-
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
|
58
|
-
if (![self.previewLayer.session isRunning]) {
|
|
59
|
-
[self.previewLayer.session startRunning];
|
|
60
|
-
}
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
- (void)stopPreview {
|
|
66
|
-
if (self.previewLayer && self.previewLayer.session) {
|
|
67
|
-
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
|
68
|
-
if ([self.previewLayer.session isRunning]) {
|
|
69
|
-
[self.previewLayer.session stopRunning];
|
|
70
|
-
}
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
- (void)removeFromSuperview {
|
|
76
|
-
[self stopPreview];
|
|
77
|
-
[super removeFromSuperview];
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
@end
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
#import <React/RCTViewManager.h>
|
|
2
|
-
#import "OmnipayLivenessCameraView.h"
|
|
3
|
-
|
|
4
|
-
@interface OmnipayLivenessCameraViewManager : RCTViewManager
|
|
5
|
-
@end
|
|
6
|
-
|
|
7
|
-
@implementation OmnipayLivenessCameraViewManager
|
|
8
|
-
|
|
9
|
-
RCT_EXPORT_MODULE(OmnipayLivenessCameraView)
|
|
10
|
-
|
|
11
|
-
- (UIView *)view {
|
|
12
|
-
return [[OmnipayLivenessCameraView alloc] init];
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
// Export event props
|
|
16
|
-
RCT_EXPORT_VIEW_PROPERTY(onCameraReady, RCTBubblingEventBlock)
|
|
17
|
-
RCT_EXPORT_VIEW_PROPERTY(onCameraError, RCTBubblingEventBlock)
|
|
18
|
-
|
|
19
|
-
@end
|