omnipay-reactnative-sdk 1.2.2-beta.8 → 1.2.3-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +43 -93
  2. package/android/build.gradle +0 -9
  3. package/android/src/main/AndroidManifest.xml +0 -5
  4. package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +1 -4
  5. package/lib/commonjs/components/OmnipayProvider.js +1 -1
  6. package/lib/commonjs/components/biometrics/FaceVerification.js +235 -275
  7. package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
  8. package/lib/commonjs/index.js +0 -33
  9. package/lib/commonjs/index.js.map +1 -1
  10. package/lib/module/components/OmnipayProvider.js +1 -1
  11. package/lib/module/components/biometrics/FaceVerification.js +237 -277
  12. package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
  13. package/lib/module/index.js +0 -6
  14. package/lib/module/index.js.map +1 -1
  15. package/lib/typescript/components/biometrics/FaceVerification.d.ts +1 -3
  16. package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -1
  17. package/lib/typescript/index.d.ts +0 -2
  18. package/lib/typescript/index.d.ts.map +1 -1
  19. package/package.json +4 -2
  20. package/src/components/OmnipayProvider.tsx +1 -1
  21. package/src/components/biometrics/FaceVerification.tsx +232 -317
  22. package/src/index.tsx +0 -7
  23. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +0 -153
  24. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +0 -49
  25. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +0 -557
  26. package/ios/OmnipayLivenessCameraView.h +0 -15
  27. package/ios/OmnipayLivenessCameraView.m +0 -80
  28. package/ios/OmnipayLivenessCameraViewManager.m +0 -19
  29. package/ios/OmnipayLivenessModule.h +0 -38
  30. package/ios/OmnipayLivenessModule.m +0 -615
  31. package/lib/commonjs/components/biometrics/LivenessDetection.js +0 -149
  32. package/lib/commonjs/components/biometrics/LivenessDetection.js.map +0 -1
  33. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +0 -15
  34. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
  35. package/lib/commonjs/components/biometrics/PermissionManager.js +0 -279
  36. package/lib/commonjs/components/biometrics/PermissionManager.js.map +0 -1
  37. package/lib/commonjs/components/biometrics/index.js +0 -45
  38. package/lib/commonjs/components/biometrics/index.js.map +0 -1
  39. package/lib/commonjs/components/biometrics/types.js +0 -17
  40. package/lib/commonjs/components/biometrics/types.js.map +0 -1
  41. package/lib/module/components/biometrics/LivenessDetection.js +0 -129
  42. package/lib/module/components/biometrics/LivenessDetection.js.map +0 -1
  43. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +0 -7
  44. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +0 -1
  45. package/lib/module/components/biometrics/PermissionManager.js +0 -272
  46. package/lib/module/components/biometrics/PermissionManager.js.map +0 -1
  47. package/lib/module/components/biometrics/index.js +0 -12
  48. package/lib/module/components/biometrics/index.js.map +0 -1
  49. package/lib/module/components/biometrics/types.js +0 -16
  50. package/lib/module/components/biometrics/types.js.map +0 -1
  51. package/lib/typescript/components/biometrics/LivenessDetection.d.ts +0 -33
  52. package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +0 -1
  53. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +0 -18
  54. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +0 -1
  55. package/lib/typescript/components/biometrics/PermissionManager.d.ts +0 -58
  56. package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +0 -1
  57. package/lib/typescript/components/biometrics/index.d.ts +0 -5
  58. package/lib/typescript/components/biometrics/index.d.ts.map +0 -1
  59. package/lib/typescript/components/biometrics/types.d.ts +0 -73
  60. package/lib/typescript/components/biometrics/types.d.ts.map +0 -1
  61. package/omnipay-reactnative-sdk.podspec +0 -50
  62. package/src/components/biometrics/LivenessDetection.ts +0 -178
  63. package/src/components/biometrics/OmnipayLivenessCameraView.tsx +0 -19
  64. package/src/components/biometrics/PermissionManager.ts +0 -317
  65. package/src/components/biometrics/index.ts +0 -11
  66. package/src/components/biometrics/types.ts +0 -86
@@ -1,38 +0,0 @@
1
- #import <React/RCTBridgeModule.h>
2
- #import <React/RCTEventEmitter.h>
3
- #import <Vision/Vision.h>
4
- #import <AVFoundation/AVFoundation.h>
5
- #import <UIKit/UIKit.h>
6
-
7
- @interface OmnipayLivenessModule : RCTEventEmitter <RCTBridgeModule, AVCaptureVideoDataOutputSampleBufferDelegate>
8
-
9
- @property (nonatomic, strong) AVCaptureSession *captureSession;
10
- @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
11
- @property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
12
- @property (nonatomic, strong) AVCapturePhotoOutput *photoOutput;
13
- @property (nonatomic, strong) dispatch_queue_t videoDataOutputQueue;
14
-
15
- @property (nonatomic, strong) VNDetectFaceRectanglesRequest *faceDetectionRequest;
16
- @property (nonatomic, strong) VNDetectFaceLandmarksRequest *faceLandmarksRequest;
17
-
18
- // Detection state
19
- @property (nonatomic, assign) BOOL isDetectionRunning;
20
- @property (nonatomic, strong) NSString *currentChallenge;
21
- @property (nonatomic, strong) NSArray *challenges;
22
- @property (nonatomic, assign) NSInteger currentChallengeIndex;
23
- @property (nonatomic, assign) NSTimeInterval challengeStartTime;
24
- @property (nonatomic, assign) NSTimeInterval challengeTimeoutSeconds;
25
-
26
- // Blink detection state
27
- @property (nonatomic, assign) BOOL previousEyeOpenState;
28
- @property (nonatomic, assign) NSInteger blinkCounter;
29
- @property (nonatomic, assign) NSInteger eyesClosedFrames;
30
-
31
- // Challenge completion tracking
32
- @property (nonatomic, assign) BOOL challengeCompleted;
33
-
34
- // Detection thresholds
35
- @property (nonatomic, assign) CGFloat headTurnThreshold;
36
- @property (nonatomic, assign) NSInteger blinkFramesThreshold;
37
-
38
- @end
@@ -1,615 +0,0 @@
1
- #import "OmnipayLivenessModule.h"
2
- #import <React/RCTLog.h>
3
- #import <React/RCTUtils.h>
4
-
5
- // Challenge constants
6
- static NSString *const CHALLENGE_SMILE = @"smile";
7
- static NSString *const CHALLENGE_BLINK = @"blink";
8
- static NSString *const CHALLENGE_TURN_LEFT = @"turnLeft";
9
- static NSString *const CHALLENGE_TURN_RIGHT = @"turnRight";
10
-
11
- // Detection thresholds
12
- static const CGFloat HEAD_TURN_THRESHOLD = 15.0;
13
- static const NSInteger BLINK_FRAMES_THRESHOLD = 3;
14
-
15
- @implementation OmnipayLivenessModule
16
-
17
- RCT_EXPORT_MODULE(OmnipayLivenessModule)
18
-
19
- - (instancetype)init {
20
- self = [super init];
21
- if (self) {
22
- [self initializeDetection];
23
- }
24
- return self;
25
- }
26
-
27
- - (void)initializeDetection {
28
- self.headTurnThreshold = HEAD_TURN_THRESHOLD;
29
- self.blinkFramesThreshold = BLINK_FRAMES_THRESHOLD;
30
- self.challengeTimeoutSeconds = 10.0; // Default timeout
31
-
32
- // Initialize video data output queue
33
- self.videoDataOutputQueue = dispatch_queue_create("videoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
34
-
35
- // Initialize Vision requests
36
- self.faceDetectionRequest = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
37
- if (error) {
38
- RCTLogError(@"Face detection error: %@", error.localizedDescription);
39
- return;
40
- }
41
- [self processFaceDetectionResults:request.results];
42
- }];
43
-
44
- self.faceLandmarksRequest = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
45
- if (error) {
46
- RCTLogError(@"Face landmarks error: %@", error.localizedDescription);
47
- return;
48
- }
49
- [self processFaceLandmarksResults:request.results];
50
- }];
51
- }
52
-
53
- + (BOOL)requiresMainQueueSetup {
54
- return NO;
55
- }
56
-
57
- - (NSArray<NSString *> *)supportedEvents {
58
- return @[
59
- @"onChallengeStart",
60
- @"onChallengeSuccess",
61
- @"onChallengeFailure",
62
- @"onAllChallengesComplete",
63
- @"onScreenshotCaptured",
64
- @"onDetectionFailed",
65
- @"onDetectionComplete"
66
- ];
67
- }
68
-
69
- RCT_EXPORT_METHOD(isSupported:(RCTPromiseResolveBlock)resolve
70
- rejecter:(RCTPromiseRejectBlock)reject) {
71
-
72
- RCTLogInfo(@"🔍 Checking iOS liveness detection support...");
73
-
74
- // Check if Vision framework is available
75
- BOOL hasVisionFramework = [VNDetectFaceRectanglesRequest class] != nil;
76
- RCTLogInfo(@" Vision Framework Available: %@", hasVisionFramework ? @"YES" : @"NO");
77
-
78
- // Check if device has camera capability (independent of permission)
79
- BOOL hasCamera = NO;
80
-
81
- // Method 1: Check for camera devices
82
- NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
83
- hasCamera = videoDevices.count > 0;
84
- RCTLogInfo(@" Video devices found: %lu", (unsigned long)videoDevices.count);
85
-
86
- // Method 2: Check specifically for front camera (most reliable for face detection)
87
- if (!hasCamera) {
88
- AVCaptureDevice *frontCamera = nil;
89
- if (@available(iOS 10.0, *)) {
90
- frontCamera = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
91
- mediaType:AVMediaTypeVideo
92
- position:AVCaptureDevicePositionFront];
93
- } else {
94
- // Fallback for older iOS versions
95
- NSArray *devices = [AVCaptureDevice devices];
96
- for (AVCaptureDevice *device in devices) {
97
- if ([device hasMediaType:AVMediaTypeVideo] && device.position == AVCaptureDevicePositionFront) {
98
- frontCamera = device;
99
- break;
100
- }
101
- }
102
- }
103
- hasCamera = frontCamera != nil;
104
- RCTLogInfo(@" Front camera available: %@", frontCamera ? @"YES" : @"NO");
105
- }
106
-
107
- // Method 3: Check camera authorization status (but don't require it for support check)
108
- AVAuthorizationStatus cameraStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
109
- NSString *statusString = @"Unknown";
110
- switch (cameraStatus) {
111
- case AVAuthorizationStatusNotDetermined:
112
- statusString = @"Not Determined";
113
- break;
114
- case AVAuthorizationStatusRestricted:
115
- statusString = @"Restricted";
116
- break;
117
- case AVAuthorizationStatusDenied:
118
- statusString = @"Denied";
119
- break;
120
- case AVAuthorizationStatusAuthorized:
121
- statusString = @"Authorized";
122
- break;
123
- }
124
- RCTLogInfo(@" Camera Permission Status: %@", statusString);
125
-
126
- // Device supports liveness detection if it has Vision framework and camera hardware
127
- // Permission status doesn't affect hardware capability
128
- BOOL isSupported = hasVisionFramework && hasCamera;
129
-
130
- RCTLogInfo(@"📋 Final Support Decision:");
131
- RCTLogInfo(@" Vision Framework: %@", hasVisionFramework ? @"✅ YES" : @"❌ NO");
132
- RCTLogInfo(@" Camera Hardware: %@", hasCamera ? @"✅ YES" : @"❌ NO");
133
- RCTLogInfo(@" Overall Support: %@", isSupported ? @"✅ SUPPORTED" : @"❌ NOT SUPPORTED");
134
-
135
- resolve(@(isSupported));
136
- }
137
-
138
- RCT_EXPORT_METHOD(startLivenessDetection:(NSDictionary *)config
139
- resolver:(RCTPromiseResolveBlock)resolve
140
- rejecter:(RCTPromiseRejectBlock)reject) {
141
-
142
- if (self.isDetectionRunning) {
143
- reject(@"DETECTION_RUNNING", @"Detection is already running", nil);
144
- return;
145
- }
146
-
147
- // Parse configuration
148
- [self parseConfig:config];
149
-
150
- // Request camera permission
151
- [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
152
- dispatch_async(dispatch_get_main_queue(), ^{
153
- if (!granted) {
154
- reject(@"CAMERA_PERMISSION", @"Camera permission denied", nil);
155
- return;
156
- }
157
-
158
- NSError *error;
159
- if ([self setupCameraSession:&error]) {
160
- [self startCameraSession];
161
- self.isDetectionRunning = YES;
162
- [self startNextChallenge];
163
- resolve(nil);
164
- } else {
165
- reject(@"CAMERA_SETUP_ERROR", error.localizedDescription ?: @"Failed to setup camera", error);
166
- }
167
- });
168
- }];
169
- }
170
-
171
- RCT_EXPORT_METHOD(stopDetection:(RCTPromiseResolveBlock)resolve
172
- rejecter:(RCTPromiseRejectBlock)reject) {
173
- [self stopLivenessDetection];
174
- resolve(nil);
175
- }
176
-
177
- - (void)parseConfig:(NSDictionary *)config {
178
- // Parse challenges
179
- NSArray *challengesArray = config[@"challenges"];
180
- if (challengesArray && challengesArray.count > 0) {
181
- self.challenges = challengesArray;
182
- } else {
183
- // Default challenges
184
- self.challenges = @[CHALLENGE_SMILE, CHALLENGE_BLINK, CHALLENGE_TURN_LEFT, CHALLENGE_TURN_RIGHT];
185
- }
186
-
187
- // Parse timeout
188
- NSNumber *timeout = config[@"challengeTimeout"];
189
- if (timeout) {
190
- self.challengeTimeoutSeconds = timeout.doubleValue;
191
- }
192
-
193
- self.currentChallengeIndex = 0;
194
- }
195
-
196
- - (BOOL)setupCameraSession:(NSError **)error {
197
- // Create capture session
198
- self.captureSession = [[AVCaptureSession alloc] init];
199
- self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
200
-
201
- // Get front camera
202
- AVCaptureDevice *frontCamera = nil;
203
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
204
- for (AVCaptureDevice *device in devices) {
205
- if (device.position == AVCaptureDevicePositionFront) {
206
- frontCamera = device;
207
- break;
208
- }
209
- }
210
-
211
- if (!frontCamera) {
212
- if (error) {
213
- *error = [NSError errorWithDomain:@"OmnipayLiveness"
214
- code:1001
215
- userInfo:@{NSLocalizedDescriptionKey: @"Front camera not available"}];
216
- }
217
- return NO;
218
- }
219
-
220
- // Create device input
221
- NSError *inputError;
222
- AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&inputError];
223
- if (!deviceInput) {
224
- if (error) *error = inputError;
225
- return NO;
226
- }
227
-
228
- if (![self.captureSession canAddInput:deviceInput]) {
229
- if (error) {
230
- *error = [NSError errorWithDomain:@"OmnipayLiveness"
231
- code:1002
232
- userInfo:@{NSLocalizedDescriptionKey: @"Cannot add device input"}];
233
- }
234
- return NO;
235
- }
236
- [self.captureSession addInput:deviceInput];
237
-
238
- // Create video data output
239
- self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
240
- self.videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
241
- [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
242
-
243
- if (![self.captureSession canAddOutput:self.videoDataOutput]) {
244
- if (error) {
245
- *error = [NSError errorWithDomain:@"OmnipayLiveness"
246
- code:1003
247
- userInfo:@{NSLocalizedDescriptionKey: @"Cannot add video data output"}];
248
- }
249
- return NO;
250
- }
251
- [self.captureSession addOutput:self.videoDataOutput];
252
-
253
- // Create photo output for final screenshot
254
- self.photoOutput = [[AVCapturePhotoOutput alloc] init];
255
- if ([self.captureSession canAddOutput:self.photoOutput]) {
256
- [self.captureSession addOutput:self.photoOutput];
257
- }
258
-
259
- return YES;
260
- }
261
-
262
- - (void)startCameraSession {
263
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
264
- [self.captureSession startRunning];
265
- });
266
- }
267
-
268
- - (void)startNextChallenge {
269
- if (self.currentChallengeIndex >= self.challenges.count) {
270
- return;
271
- }
272
-
273
- self.currentChallenge = self.challenges[self.currentChallengeIndex];
274
- self.challengeStartTime = [[NSDate date] timeIntervalSince1970];
275
- self.challengeCompleted = NO;
276
-
277
- // Reset blink detection state
278
- if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
279
- self.blinkCounter = 0;
280
- self.eyesClosedFrames = 0;
281
- self.previousEyeOpenState = YES;
282
- }
283
-
284
- // Emit challenge start event
285
- [self sendEventWithName:@"onChallengeStart" body:@{@"challenge": self.currentChallenge}];
286
- }
287
-
288
- #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
289
-
290
- - (void)captureOutput:(AVCaptureOutput *)output
291
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
292
- fromConnection:(AVCaptureConnection *)connection {
293
-
294
- if (!self.isDetectionRunning || !self.currentChallenge) {
295
- return;
296
- }
297
-
298
- // Check timeout
299
- NSTimeInterval currentTime = [[NSDate date] timeIntervalSince1970];
300
- if (currentTime - self.challengeStartTime > self.challengeTimeoutSeconds) {
301
- dispatch_async(dispatch_get_main_queue(), ^{
302
- [self onChallengeFailure:@"Challenge timeout"];
303
- });
304
- return;
305
- }
306
-
307
- if (self.challengeCompleted) {
308
- return;
309
- }
310
-
311
- // Convert sample buffer to CIImage
312
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
313
- if (!imageBuffer) return;
314
-
315
- CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];
316
-
317
- // Create Vision image request handler
318
- VNImageRequestHandler *imageRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:ciImage options:@{}];
319
-
320
- NSError *error;
321
- if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE] ||
322
- [self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT] ||
323
- [self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
324
-
325
- // Use face landmarks for smile and head pose detection
326
- [imageRequestHandler performRequests:@[self.faceLandmarksRequest] error:&error];
327
- } else {
328
- // Use basic face detection for blink detection
329
- [imageRequestHandler performRequests:@[self.faceDetectionRequest, self.faceLandmarksRequest] error:&error];
330
- }
331
-
332
- if (error) {
333
- RCTLogError(@"Vision request error: %@", error.localizedDescription);
334
- }
335
- }
336
-
337
- - (void)processFaceDetectionResults:(NSArray<VNFaceObservation *> *)results {
338
- if (results.count == 0) {
339
- return; // No face detected
340
- }
341
-
342
- VNFaceObservation *face = results.firstObject;
343
- [self processFaceObservation:face];
344
- }
345
-
346
- - (void)processFaceLandmarksResults:(NSArray<VNFaceObservation *> *)results {
347
- if (results.count == 0) {
348
- return; // No face detected
349
- }
350
-
351
- VNFaceObservation *face = results.firstObject;
352
- [self processFaceObservation:face];
353
- }
354
-
355
- - (void)processFaceObservation:(VNFaceObservation *)face {
356
- if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE]) {
357
- [self checkSmile:face];
358
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
359
- [self checkBlink:face];
360
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT]) {
361
- [self checkHeadTurn:face targetYaw:-self.headTurnThreshold];
362
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
363
- [self checkHeadTurn:face targetYaw:self.headTurnThreshold];
364
- }
365
- }
366
-
367
- - (void)checkSmile:(VNFaceObservation *)face {
368
- if (!face.landmarks) return;
369
-
370
- // Simplified smile detection based on mouth landmarks
371
- VNFaceLandmarkRegion2D *outerLips = face.landmarks.outerLips;
372
- if (outerLips && outerLips.pointCount > 0) {
373
- // Basic smile detection - check if mouth corners are raised
374
- // This is a simplified implementation
375
- CGPoint *points = (CGPoint *)outerLips.normalizedPoints;
376
-
377
- // Get mouth corner points (approximate indices)
378
- if (outerLips.pointCount >= 12) {
379
- CGPoint leftCorner = points[0];
380
- CGPoint rightCorner = points[6];
381
- CGPoint topLip = points[3];
382
- CGPoint bottomLip = points[9];
383
-
384
- // Simple smile detection: corners higher than center
385
- CGFloat mouthHeight = topLip.y - bottomLip.y;
386
- CGFloat cornerElevation = (leftCorner.y + rightCorner.y) / 2.0 - bottomLip.y;
387
-
388
- if (cornerElevation > mouthHeight * 0.3) { // Smile threshold
389
- dispatch_async(dispatch_get_main_queue(), ^{
390
- [self onChallengeSuccess];
391
- });
392
- }
393
- }
394
- }
395
- }
396
-
397
- - (void)checkBlink:(VNFaceObservation *)face {
398
- if (!face.landmarks) return;
399
-
400
- // Check eye landmarks for blink detection
401
- VNFaceLandmarkRegion2D *leftEye = face.landmarks.leftEye;
402
- VNFaceLandmarkRegion2D *rightEye = face.landmarks.rightEye;
403
-
404
- if (leftEye && rightEye && leftEye.pointCount > 0 && rightEye.pointCount > 0) {
405
- // Simplified blink detection based on eye aspect ratio
406
- BOOL eyesOpen = [self areEyesOpen:leftEye rightEye:rightEye];
407
-
408
- if (!eyesOpen) {
409
- self.eyesClosedFrames++;
410
- } else {
411
- if (self.eyesClosedFrames >= self.blinkFramesThreshold && !self.previousEyeOpenState) {
412
- self.blinkCounter++;
413
- if (self.blinkCounter >= 1) { // Single blink required
414
- dispatch_async(dispatch_get_main_queue(), ^{
415
- [self onChallengeSuccess];
416
- });
417
- }
418
- }
419
- self.eyesClosedFrames = 0;
420
- }
421
-
422
- self.previousEyeOpenState = eyesOpen;
423
- }
424
- }
425
-
426
- - (BOOL)areEyesOpen:(VNFaceLandmarkRegion2D *)leftEye rightEye:(VNFaceLandmarkRegion2D *)rightEye {
427
- // Calculate eye aspect ratio for both eyes
428
- CGFloat leftEAR = [self calculateEyeAspectRatio:leftEye];
429
- CGFloat rightEAR = [self calculateEyeAspectRatio:rightEye];
430
-
431
- // Average eye aspect ratio
432
- CGFloat averageEAR = (leftEAR + rightEAR) / 2.0;
433
-
434
- // Threshold for eye open/closed state
435
- return averageEAR > 0.2; // Adjust threshold as needed
436
- }
437
-
438
- - (CGFloat)calculateEyeAspectRatio:(VNFaceLandmarkRegion2D *)eye {
439
- if (eye.pointCount < 6) return 1.0; // Default to open
440
-
441
- CGPoint *points = (CGPoint *)eye.normalizedPoints;
442
-
443
- // Simplified eye aspect ratio calculation
444
- // Get approximate top, bottom, left, right points
445
- CGFloat minY = points[0].y, maxY = points[0].y;
446
- CGFloat minX = points[0].x, maxX = points[0].x;
447
-
448
- for (int i = 1; i < eye.pointCount; i++) {
449
- if (points[i].y < minY) minY = points[i].y;
450
- if (points[i].y > maxY) maxY = points[i].y;
451
- if (points[i].x < minX) minX = points[i].x;
452
- if (points[i].x > maxX) maxX = points[i].x;
453
- }
454
-
455
- CGFloat height = maxY - minY;
456
- CGFloat width = maxX - minX;
457
-
458
- return width > 0 ? height / width : 1.0;
459
- }
460
-
461
- - (void)checkHeadTurn:(VNFaceObservation *)face targetYaw:(CGFloat)targetYaw {
462
- // Vision framework provides yaw directly
463
- CGFloat yaw = face.yaw ? face.yaw.floatValue * 180.0 / M_PI : 0.0; // Convert to degrees
464
-
465
- if (targetYaw < 0) { // Turn left
466
- if (yaw < targetYaw) {
467
- dispatch_async(dispatch_get_main_queue(), ^{
468
- [self onChallengeSuccess];
469
- });
470
- }
471
- } else { // Turn right
472
- if (yaw > targetYaw) {
473
- dispatch_async(dispatch_get_main_queue(), ^{
474
- [self onChallengeSuccess];
475
- });
476
- }
477
- }
478
- }
479
-
480
- - (void)onChallengeSuccess {
481
- if (self.challengeCompleted) return;
482
-
483
- self.challengeCompleted = YES;
484
- NSTimeInterval duration = ([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000; // Convert to ms
485
-
486
- // Create challenge result
487
- NSDictionary *result = @{
488
- @"challenge": self.currentChallenge,
489
- @"success": @YES,
490
- @"duration": @(duration),
491
- @"confidence": @(0.9) // Mock confidence
492
- };
493
-
494
- // Emit challenge success event
495
- [self sendEventWithName:@"onChallengeSuccess" body:@{
496
- @"challenge": self.currentChallenge,
497
- @"result": result
498
- }];
499
-
500
- // Move to next challenge or complete
501
- self.currentChallengeIndex++;
502
- if (self.currentChallengeIndex < self.challenges.count) {
503
- // Delay before next challenge
504
- dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
505
- [self startNextChallenge];
506
- });
507
- } else {
508
- // All challenges completed
509
- [self onAllChallengesComplete];
510
- }
511
- }
512
-
513
- - (void)onChallengeFailure:(NSString *)reason {
514
- if (self.challengeCompleted) return;
515
-
516
- self.challengeCompleted = YES;
517
-
518
- [self sendEventWithName:@"onChallengeFailure" body:@{
519
- @"challenge": self.currentChallenge,
520
- @"reason": reason
521
- }];
522
-
523
- // Stop detection
524
- [self stopLivenessDetection];
525
-
526
- [self sendEventWithName:@"onDetectionFailed" body:@{@"reason": reason}];
527
- }
528
-
529
- - (void)onAllChallengesComplete {
530
- [self sendEventWithName:@"onAllChallengesComplete" body:nil];
531
-
532
- // Capture final screenshot
533
- [self captureScreenshot];
534
- }
535
-
536
- - (void)captureScreenshot {
537
- if (!self.photoOutput) {
538
- [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to capture screenshot"];
539
- return;
540
- }
541
-
542
- AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
543
-
544
- [self.photoOutput capturePhotoWithSettings:settings delegate:(id<AVCapturePhotoCaptureDelegate>)self];
545
- }
546
-
547
- #pragma mark - AVCapturePhotoCaptureDelegate
548
-
549
- - (void)captureOutput:(AVCapturePhotoOutput *)output
550
- didFinishProcessingPhoto:(AVCapturePhoto *)photo
551
- error:(NSError *)error API_AVAILABLE(ios(11.0)) {
552
-
553
- if (error) {
554
- RCTLogError(@"Photo capture error: %@", error.localizedDescription);
555
- [self sendDetectionResult:NO screenshot:nil failureReason:@"Screenshot capture failed"];
556
- return;
557
- }
558
-
559
- NSData *imageData = [photo fileDataRepresentation];
560
- if (imageData) {
561
- NSString *base64String = [imageData base64EncodedStringWithOptions:0];
562
- NSString *base64Image = [NSString stringWithFormat:@"data:image/jpeg;base64,%@", base64String];
563
-
564
- [self sendEventWithName:@"onScreenshotCaptured" body:@{@"screenshot": base64Image}];
565
- [self sendDetectionResult:YES screenshot:base64Image failureReason:nil];
566
- } else {
567
- [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to process screenshot"];
568
- }
569
- }
570
-
571
- - (void)sendDetectionResult:(BOOL)success screenshot:(NSString *)screenshot failureReason:(NSString *)failureReason {
572
- [self stopLivenessDetection];
573
-
574
- NSMutableDictionary *result = [NSMutableDictionary dictionary];
575
- result[@"success"] = @(success);
576
-
577
- if (screenshot) {
578
- result[@"screenshot"] = screenshot;
579
- }
580
-
581
- if (failureReason) {
582
- result[@"failureReason"] = failureReason;
583
- }
584
-
585
- // Create mock challenge results
586
- NSMutableArray *challengeResults = [NSMutableArray array];
587
- for (NSInteger i = 0; i < self.currentChallengeIndex; i++) {
588
- NSDictionary *challengeResult = @{
589
- @"challenge": self.challenges[i],
590
- @"success": @YES,
591
- @"duration": @(2000), // Mock duration
592
- @"confidence": @(0.9)
593
- };
594
- [challengeResults addObject:challengeResult];
595
- }
596
- result[@"challengeResults"] = challengeResults;
597
- result[@"totalDuration"] = @(([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000);
598
-
599
- [self sendEventWithName:@"onDetectionComplete" body:result];
600
- }
601
-
602
- - (void)stopLivenessDetection {
603
- self.isDetectionRunning = NO;
604
- self.currentChallenge = nil;
605
- self.currentChallengeIndex = 0;
606
- self.challengeCompleted = NO;
607
-
608
- if (self.captureSession && self.captureSession.isRunning) {
609
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
610
- [self.captureSession stopRunning];
611
- });
612
- }
613
- }
614
-
615
- @end