omnipay-reactnative-sdk 1.2.2-beta.4 → 1.2.2-beta.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. package/README.md +93 -43
  2. package/android/build.gradle +16 -15
  3. package/android/src/main/AndroidManifest.xml +1 -1
  4. package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +2 -2
  5. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +153 -0
  6. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +49 -0
  7. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +557 -0
  8. package/ios/OmnipayLivenessCameraView.h +15 -0
  9. package/ios/OmnipayLivenessCameraView.m +80 -0
  10. package/ios/OmnipayLivenessCameraViewManager.m +19 -0
  11. package/ios/OmnipayLivenessModule.h +38 -0
  12. package/ios/OmnipayLivenessModule.m +574 -0
  13. package/lib/commonjs/components/OmnipayProvider.js +2 -66
  14. package/lib/commonjs/components/OmnipayProvider.js.map +1 -1
  15. package/lib/commonjs/components/OmnipayView.js.map +1 -1
  16. package/lib/commonjs/components/biometrics/FaceVerification.js +252 -345
  17. package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
  18. package/lib/commonjs/components/biometrics/LivenessDetection.js +90 -198
  19. package/lib/commonjs/components/biometrics/LivenessDetection.js.map +1 -1
  20. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +15 -0
  21. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  22. package/lib/commonjs/components/biometrics/PermissionManager.js +279 -0
  23. package/lib/commonjs/components/biometrics/PermissionManager.js.map +1 -0
  24. package/lib/commonjs/components/biometrics/index.js +45 -0
  25. package/lib/commonjs/components/biometrics/index.js.map +1 -0
  26. package/lib/commonjs/components/biometrics/types.js +17 -0
  27. package/lib/commonjs/components/biometrics/types.js.map +1 -0
  28. package/lib/commonjs/components/views/BvnVerification.js.map +1 -1
  29. package/lib/commonjs/components/views/PaylaterAgreement.js.map +1 -1
  30. package/lib/commonjs/components/views/Registration.js.map +1 -1
  31. package/lib/commonjs/index.js +23 -18
  32. package/lib/commonjs/index.js.map +1 -1
  33. package/lib/module/components/OmnipayProvider.js +3 -67
  34. package/lib/module/components/OmnipayProvider.js.map +1 -1
  35. package/lib/module/components/OmnipayView.js.map +1 -1
  36. package/lib/module/components/biometrics/FaceVerification.js +254 -346
  37. package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
  38. package/lib/module/components/biometrics/LivenessDetection.js +75 -197
  39. package/lib/module/components/biometrics/LivenessDetection.js.map +1 -1
  40. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +7 -0
  41. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  42. package/lib/module/components/biometrics/PermissionManager.js +272 -0
  43. package/lib/module/components/biometrics/PermissionManager.js.map +1 -0
  44. package/lib/module/components/biometrics/index.js +12 -0
  45. package/lib/module/components/biometrics/index.js.map +1 -0
  46. package/lib/module/components/biometrics/types.js +16 -0
  47. package/lib/module/components/biometrics/types.js.map +1 -0
  48. package/lib/module/components/views/BvnVerification.js.map +1 -1
  49. package/lib/module/components/views/PaylaterAgreement.js.map +1 -1
  50. package/lib/module/components/views/Registration.js.map +1 -1
  51. package/lib/module/index.js +5 -4
  52. package/lib/module/index.js.map +1 -1
  53. package/lib/typescript/{src/components → components}/OmnipayProvider.d.ts +1 -1
  54. package/lib/typescript/components/OmnipayProvider.d.ts.map +1 -0
  55. package/lib/typescript/{src/components → components}/OmnipayView.d.ts +21 -20
  56. package/lib/typescript/components/OmnipayView.d.ts.map +1 -0
  57. package/lib/typescript/components/biometrics/FaceVerification.d.ts +11 -0
  58. package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -0
  59. package/lib/typescript/components/biometrics/LivenessDetection.d.ts +33 -0
  60. package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +1 -0
  61. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +18 -0
  62. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +1 -0
  63. package/lib/typescript/components/biometrics/PermissionManager.d.ts +58 -0
  64. package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +1 -0
  65. package/lib/typescript/components/biometrics/index.d.ts +5 -0
  66. package/lib/typescript/components/biometrics/index.d.ts.map +1 -0
  67. package/lib/typescript/components/biometrics/types.d.ts +73 -0
  68. package/lib/typescript/components/biometrics/types.d.ts.map +1 -0
  69. package/lib/typescript/{src/components → components}/views/BvnVerification.d.ts +2 -1
  70. package/lib/typescript/components/views/BvnVerification.d.ts.map +1 -0
  71. package/lib/typescript/{src/components → components}/views/PaylaterAgreement.d.ts +2 -1
  72. package/lib/typescript/components/views/PaylaterAgreement.d.ts.map +1 -0
  73. package/lib/typescript/{src/components → components}/views/Registration.d.ts +2 -1
  74. package/lib/typescript/components/views/Registration.d.ts.map +1 -0
  75. package/lib/typescript/functions.d.ts.map +1 -0
  76. package/lib/typescript/hooks/useOmnipay.d.ts +28 -0
  77. package/lib/typescript/hooks/useOmnipay.d.ts.map +1 -0
  78. package/lib/typescript/index.d.ts +7 -0
  79. package/lib/typescript/index.d.ts.map +1 -0
  80. package/lib/typescript/lib/colors.d.ts.map +1 -0
  81. package/lib/typescript/lib/config.d.ts.map +1 -0
  82. package/omnipay-reactnative-sdk.podspec +32 -29
  83. package/package.json +15 -10
  84. package/src/components/OmnipayProvider.tsx +3 -106
  85. package/src/components/OmnipayView.tsx +1 -1
  86. package/src/components/biometrics/FaceVerification.tsx +291 -368
  87. package/src/components/biometrics/LivenessDetection.ts +113 -250
  88. package/src/components/biometrics/OmnipayLivenessCameraView.tsx +19 -0
  89. package/src/components/biometrics/PermissionManager.ts +317 -0
  90. package/src/components/biometrics/index.ts +11 -0
  91. package/src/components/biometrics/types.ts +86 -0
  92. package/src/components/views/BvnVerification.tsx +1 -1
  93. package/src/components/views/PaylaterAgreement.tsx +1 -1
  94. package/src/components/views/Registration.tsx +1 -1
  95. package/src/index.tsx +4 -15
  96. package/android/src/main/java/com/omniretail/omnipay/LivenessCameraViewManager.java +0 -116
  97. package/android/src/main/java/com/omniretail/omnipay/LivenessDetectionModule.java +0 -588
  98. package/ios/LivenessCameraView.h +0 -22
  99. package/ios/LivenessCameraView.m +0 -135
  100. package/ios/LivenessCameraViewManager.h +0 -12
  101. package/ios/LivenessCameraViewManager.m +0 -24
  102. package/ios/LivenessDetectionModule.h +0 -46
  103. package/ios/LivenessDetectionModule.m +0 -603
  104. package/lib/commonjs/components/biometrics/LivenessCameraView.js +0 -45
  105. package/lib/commonjs/components/biometrics/LivenessCameraView.js.map +0 -1
  106. package/lib/module/components/biometrics/LivenessCameraView.js +0 -39
  107. package/lib/module/components/biometrics/LivenessCameraView.js.map +0 -1
  108. package/lib/typescript/demo/src/App.d.ts +0 -3
  109. package/lib/typescript/demo/src/App.d.ts.map +0 -1
  110. package/lib/typescript/demo/src/Body.d.ts +0 -3
  111. package/lib/typescript/demo/src/Body.d.ts.map +0 -1
  112. package/lib/typescript/demo/src/NotificationsExample.d.ts +0 -4
  113. package/lib/typescript/demo/src/NotificationsExample.d.ts.map +0 -1
  114. package/lib/typescript/src/components/OmnipayProvider.d.ts.map +0 -1
  115. package/lib/typescript/src/components/OmnipayView.d.ts.map +0 -1
  116. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts +0 -12
  117. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts.map +0 -1
  118. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts +0 -22
  119. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts.map +0 -1
  120. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts +0 -73
  121. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts.map +0 -1
  122. package/lib/typescript/src/components/views/BvnVerification.d.ts.map +0 -1
  123. package/lib/typescript/src/components/views/PaylaterAgreement.d.ts.map +0 -1
  124. package/lib/typescript/src/components/views/Registration.d.ts.map +0 -1
  125. package/lib/typescript/src/functions.d.ts.map +0 -1
  126. package/lib/typescript/src/hooks/useOmnipay.d.ts +0 -28
  127. package/lib/typescript/src/hooks/useOmnipay.d.ts.map +0 -1
  128. package/lib/typescript/src/index.d.ts +0 -8
  129. package/lib/typescript/src/index.d.ts.map +0 -1
  130. package/lib/typescript/src/lib/colors.d.ts.map +0 -1
  131. package/lib/typescript/src/lib/config.d.ts.map +0 -1
  132. package/src/components/biometrics/LivenessCameraView.tsx +0 -61
  133. /package/lib/typescript/{src/functions.d.ts → functions.d.ts} +0 -0
  134. /package/lib/typescript/{src/lib → lib}/colors.d.ts +0 -0
  135. /package/lib/typescript/{src/lib → lib}/config.d.ts +0 -0
@@ -0,0 +1,38 @@
1
+ #import <React/RCTBridgeModule.h>
2
+ #import <React/RCTEventEmitter.h>
3
+ #import <Vision/Vision.h>
4
+ #import <AVFoundation/AVFoundation.h>
5
+ #import <UIKit/UIKit.h>
6
+
7
+ @interface OmnipayLivenessModule : RCTEventEmitter <RCTBridgeModule, AVCaptureVideoDataOutputSampleBufferDelegate>
8
+
9
+ @property (nonatomic, strong) AVCaptureSession *captureSession;
10
+ @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
11
+ @property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
12
+ @property (nonatomic, strong) AVCapturePhotoOutput *photoOutput;
13
+ @property (nonatomic, strong) dispatch_queue_t videoDataOutputQueue;
14
+
15
+ @property (nonatomic, strong) VNDetectFaceRectanglesRequest *faceDetectionRequest;
16
+ @property (nonatomic, strong) VNDetectFaceLandmarksRequest *faceLandmarksRequest;
17
+
18
+ // Detection state
19
+ @property (nonatomic, assign) BOOL isDetectionRunning;
20
+ @property (nonatomic, strong) NSString *currentChallenge;
21
+ @property (nonatomic, strong) NSArray *challenges;
22
+ @property (nonatomic, assign) NSInteger currentChallengeIndex;
23
+ @property (nonatomic, assign) NSTimeInterval challengeStartTime;
24
+ @property (nonatomic, assign) NSTimeInterval challengeTimeoutSeconds;
25
+
26
+ // Blink detection state
27
+ @property (nonatomic, assign) BOOL previousEyeOpenState;
28
+ @property (nonatomic, assign) NSInteger blinkCounter;
29
+ @property (nonatomic, assign) NSInteger eyesClosedFrames;
30
+
31
+ // Challenge completion tracking
32
+ @property (nonatomic, assign) BOOL challengeCompleted;
33
+
34
+ // Detection thresholds
35
+ @property (nonatomic, assign) CGFloat headTurnThreshold;
36
+ @property (nonatomic, assign) NSInteger blinkFramesThreshold;
37
+
38
+ @end
@@ -0,0 +1,574 @@
1
+ #import "OmnipayLivenessModule.h"
2
+ #import <React/RCTLog.h>
3
+ #import <React/RCTUtils.h>
4
+
5
+ // Challenge constants
6
+ static NSString *const CHALLENGE_SMILE = @"smile";
7
+ static NSString *const CHALLENGE_BLINK = @"blink";
8
+ static NSString *const CHALLENGE_TURN_LEFT = @"turnLeft";
9
+ static NSString *const CHALLENGE_TURN_RIGHT = @"turnRight";
10
+
11
+ // Detection thresholds
12
+ static const CGFloat HEAD_TURN_THRESHOLD = 15.0;
13
+ static const NSInteger BLINK_FRAMES_THRESHOLD = 3;
14
+
15
+ @implementation OmnipayLivenessModule
16
+
17
+ RCT_EXPORT_MODULE(OmnipayLivenessModule)
18
+
19
+ - (instancetype)init {
20
+ self = [super init];
21
+ if (self) {
22
+ [self initializeDetection];
23
+ }
24
+ return self;
25
+ }
26
+
27
+ - (void)initializeDetection {
28
+ self.headTurnThreshold = HEAD_TURN_THRESHOLD;
29
+ self.blinkFramesThreshold = BLINK_FRAMES_THRESHOLD;
30
+ self.challengeTimeoutSeconds = 10.0; // Default timeout
31
+
32
+ // Initialize video data output queue
33
+ self.videoDataOutputQueue = dispatch_queue_create("videoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
34
+
35
+ // Initialize Vision requests
36
+ self.faceDetectionRequest = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
37
+ if (error) {
38
+ RCTLogError(@"Face detection error: %@", error.localizedDescription);
39
+ return;
40
+ }
41
+ [self processFaceDetectionResults:request.results];
42
+ }];
43
+
44
+ self.faceLandmarksRequest = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
45
+ if (error) {
46
+ RCTLogError(@"Face landmarks error: %@", error.localizedDescription);
47
+ return;
48
+ }
49
+ [self processFaceLandmarksResults:request.results];
50
+ }];
51
+ }
52
+
53
+ + (BOOL)requiresMainQueueSetup {
54
+ return NO;
55
+ }
56
+
57
+ - (NSArray<NSString *> *)supportedEvents {
58
+ return @[
59
+ @"onChallengeStart",
60
+ @"onChallengeSuccess",
61
+ @"onChallengeFailure",
62
+ @"onAllChallengesComplete",
63
+ @"onScreenshotCaptured",
64
+ @"onDetectionFailed",
65
+ @"onDetectionComplete"
66
+ ];
67
+ }
68
+
69
+ RCT_EXPORT_METHOD(isSupported:(RCTPromiseResolveBlock)resolve
70
+ rejecter:(RCTPromiseRejectBlock)reject) {
71
+ // Check if Vision framework is available and device has camera capability
72
+ BOOL hasVisionFramework = [VNDetectFaceRectanglesRequest class] != nil;
73
+
74
+ // Check if device has camera (independent of permission status)
75
+ BOOL hasCamera = NO;
76
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
77
+ hasCamera = devices.count > 0;
78
+
79
+ // Additional check for front camera specifically (for selfie mode)
80
+ if (!hasCamera) {
81
+ AVCaptureDevice *frontCamera = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
82
+ mediaType:AVMediaTypeVideo
83
+ position:AVCaptureDevicePositionFront];
84
+ hasCamera = frontCamera != nil;
85
+ }
86
+
87
+ BOOL isSupported = hasVisionFramework && hasCamera;
88
+
89
+ RCTLogInfo(@"Liveness Detection Support Check:");
90
+ RCTLogInfo(@" Vision Framework: %@", hasVisionFramework ? @"YES" : @"NO");
91
+ RCTLogInfo(@" Camera Available: %@", hasCamera ? @"YES" : @"NO");
92
+ RCTLogInfo(@" Overall Support: %@", isSupported ? @"YES" : @"NO");
93
+
94
+ resolve(@(isSupported));
95
+ }
96
+
97
+ RCT_EXPORT_METHOD(startLivenessDetection:(NSDictionary *)config
98
+ resolver:(RCTPromiseResolveBlock)resolve
99
+ rejecter:(RCTPromiseRejectBlock)reject) {
100
+
101
+ if (self.isDetectionRunning) {
102
+ reject(@"DETECTION_RUNNING", @"Detection is already running", nil);
103
+ return;
104
+ }
105
+
106
+ // Parse configuration
107
+ [self parseConfig:config];
108
+
109
+ // Request camera permission
110
+ [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
111
+ dispatch_async(dispatch_get_main_queue(), ^{
112
+ if (!granted) {
113
+ reject(@"CAMERA_PERMISSION", @"Camera permission denied", nil);
114
+ return;
115
+ }
116
+
117
+ NSError *error;
118
+ if ([self setupCameraSession:&error]) {
119
+ [self startCameraSession];
120
+ self.isDetectionRunning = YES;
121
+ [self startNextChallenge];
122
+ resolve(nil);
123
+ } else {
124
+ reject(@"CAMERA_SETUP_ERROR", error.localizedDescription ?: @"Failed to setup camera", error);
125
+ }
126
+ });
127
+ }];
128
+ }
129
+
130
+ RCT_EXPORT_METHOD(stopDetection:(RCTPromiseResolveBlock)resolve
131
+ rejecter:(RCTPromiseRejectBlock)reject) {
132
+ [self stopLivenessDetection];
133
+ resolve(nil);
134
+ }
135
+
136
+ - (void)parseConfig:(NSDictionary *)config {
137
+ // Parse challenges
138
+ NSArray *challengesArray = config[@"challenges"];
139
+ if (challengesArray && challengesArray.count > 0) {
140
+ self.challenges = challengesArray;
141
+ } else {
142
+ // Default challenges
143
+ self.challenges = @[CHALLENGE_SMILE, CHALLENGE_BLINK, CHALLENGE_TURN_LEFT, CHALLENGE_TURN_RIGHT];
144
+ }
145
+
146
+ // Parse timeout
147
+ NSNumber *timeout = config[@"challengeTimeout"];
148
+ if (timeout) {
149
+ self.challengeTimeoutSeconds = timeout.doubleValue;
150
+ }
151
+
152
+ self.currentChallengeIndex = 0;
153
+ }
154
+
155
+ - (BOOL)setupCameraSession:(NSError **)error {
156
+ // Create capture session
157
+ self.captureSession = [[AVCaptureSession alloc] init];
158
+ self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
159
+
160
+ // Get front camera
161
+ AVCaptureDevice *frontCamera = nil;
162
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
163
+ for (AVCaptureDevice *device in devices) {
164
+ if (device.position == AVCaptureDevicePositionFront) {
165
+ frontCamera = device;
166
+ break;
167
+ }
168
+ }
169
+
170
+ if (!frontCamera) {
171
+ if (error) {
172
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
173
+ code:1001
174
+ userInfo:@{NSLocalizedDescriptionKey: @"Front camera not available"}];
175
+ }
176
+ return NO;
177
+ }
178
+
179
+ // Create device input
180
+ NSError *inputError;
181
+ AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&inputError];
182
+ if (!deviceInput) {
183
+ if (error) *error = inputError;
184
+ return NO;
185
+ }
186
+
187
+ if (![self.captureSession canAddInput:deviceInput]) {
188
+ if (error) {
189
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
190
+ code:1002
191
+ userInfo:@{NSLocalizedDescriptionKey: @"Cannot add device input"}];
192
+ }
193
+ return NO;
194
+ }
195
+ [self.captureSession addInput:deviceInput];
196
+
197
+ // Create video data output
198
+ self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
199
+ self.videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
200
+ [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
201
+
202
+ if (![self.captureSession canAddOutput:self.videoDataOutput]) {
203
+ if (error) {
204
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
205
+ code:1003
206
+ userInfo:@{NSLocalizedDescriptionKey: @"Cannot add video data output"}];
207
+ }
208
+ return NO;
209
+ }
210
+ [self.captureSession addOutput:self.videoDataOutput];
211
+
212
+ // Create photo output for final screenshot
213
+ self.photoOutput = [[AVCapturePhotoOutput alloc] init];
214
+ if ([self.captureSession canAddOutput:self.photoOutput]) {
215
+ [self.captureSession addOutput:self.photoOutput];
216
+ }
217
+
218
+ return YES;
219
+ }
220
+
221
+ - (void)startCameraSession {
222
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
223
+ [self.captureSession startRunning];
224
+ });
225
+ }
226
+
227
+ - (void)startNextChallenge {
228
+ if (self.currentChallengeIndex >= self.challenges.count) {
229
+ return;
230
+ }
231
+
232
+ self.currentChallenge = self.challenges[self.currentChallengeIndex];
233
+ self.challengeStartTime = [[NSDate date] timeIntervalSince1970];
234
+ self.challengeCompleted = NO;
235
+
236
+ // Reset blink detection state
237
+ if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
238
+ self.blinkCounter = 0;
239
+ self.eyesClosedFrames = 0;
240
+ self.previousEyeOpenState = YES;
241
+ }
242
+
243
+ // Emit challenge start event
244
+ [self sendEventWithName:@"onChallengeStart" body:@{@"challenge": self.currentChallenge}];
245
+ }
246
+
247
+ #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
248
+
249
+ - (void)captureOutput:(AVCaptureOutput *)output
250
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
251
+ fromConnection:(AVCaptureConnection *)connection {
252
+
253
+ if (!self.isDetectionRunning || !self.currentChallenge) {
254
+ return;
255
+ }
256
+
257
+ // Check timeout
258
+ NSTimeInterval currentTime = [[NSDate date] timeIntervalSince1970];
259
+ if (currentTime - self.challengeStartTime > self.challengeTimeoutSeconds) {
260
+ dispatch_async(dispatch_get_main_queue(), ^{
261
+ [self onChallengeFailure:@"Challenge timeout"];
262
+ });
263
+ return;
264
+ }
265
+
266
+ if (self.challengeCompleted) {
267
+ return;
268
+ }
269
+
270
+ // Convert sample buffer to CIImage
271
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
272
+ if (!imageBuffer) return;
273
+
274
+ CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];
275
+
276
+ // Create Vision image request handler
277
+ VNImageRequestHandler *imageRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:ciImage options:@{}];
278
+
279
+ NSError *error;
280
+ if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE] ||
281
+ [self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT] ||
282
+ [self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
283
+
284
+ // Use face landmarks for smile and head pose detection
285
+ [imageRequestHandler performRequests:@[self.faceLandmarksRequest] error:&error];
286
+ } else {
287
+ // Use basic face detection for blink detection
288
+ [imageRequestHandler performRequests:@[self.faceDetectionRequest, self.faceLandmarksRequest] error:&error];
289
+ }
290
+
291
+ if (error) {
292
+ RCTLogError(@"Vision request error: %@", error.localizedDescription);
293
+ }
294
+ }
295
+
296
+ - (void)processFaceDetectionResults:(NSArray<VNFaceObservation *> *)results {
297
+ if (results.count == 0) {
298
+ return; // No face detected
299
+ }
300
+
301
+ VNFaceObservation *face = results.firstObject;
302
+ [self processFaceObservation:face];
303
+ }
304
+
305
+ - (void)processFaceLandmarksResults:(NSArray<VNFaceObservation *> *)results {
306
+ if (results.count == 0) {
307
+ return; // No face detected
308
+ }
309
+
310
+ VNFaceObservation *face = results.firstObject;
311
+ [self processFaceObservation:face];
312
+ }
313
+
314
+ - (void)processFaceObservation:(VNFaceObservation *)face {
315
+ if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE]) {
316
+ [self checkSmile:face];
317
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
318
+ [self checkBlink:face];
319
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT]) {
320
+ [self checkHeadTurn:face targetYaw:-self.headTurnThreshold];
321
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
322
+ [self checkHeadTurn:face targetYaw:self.headTurnThreshold];
323
+ }
324
+ }
325
+
326
+ - (void)checkSmile:(VNFaceObservation *)face {
327
+ if (!face.landmarks) return;
328
+
329
+ // Simplified smile detection based on mouth landmarks
330
+ VNFaceLandmarkRegion2D *outerLips = face.landmarks.outerLips;
331
+ if (outerLips && outerLips.pointCount > 0) {
332
+ // Basic smile detection - check if mouth corners are raised
333
+ // This is a simplified implementation
334
+ CGPoint *points = (CGPoint *)outerLips.normalizedPoints;
335
+
336
+ // Get mouth corner points (approximate indices)
337
+ if (outerLips.pointCount >= 12) {
338
+ CGPoint leftCorner = points[0];
339
+ CGPoint rightCorner = points[6];
340
+ CGPoint topLip = points[3];
341
+ CGPoint bottomLip = points[9];
342
+
343
+ // Simple smile detection: corners higher than center
344
+ CGFloat mouthHeight = topLip.y - bottomLip.y;
345
+ CGFloat cornerElevation = (leftCorner.y + rightCorner.y) / 2.0 - bottomLip.y;
346
+
347
+ if (cornerElevation > mouthHeight * 0.3) { // Smile threshold
348
+ dispatch_async(dispatch_get_main_queue(), ^{
349
+ [self onChallengeSuccess];
350
+ });
351
+ }
352
+ }
353
+ }
354
+ }
355
+
356
+ - (void)checkBlink:(VNFaceObservation *)face {
357
+ if (!face.landmarks) return;
358
+
359
+ // Check eye landmarks for blink detection
360
+ VNFaceLandmarkRegion2D *leftEye = face.landmarks.leftEye;
361
+ VNFaceLandmarkRegion2D *rightEye = face.landmarks.rightEye;
362
+
363
+ if (leftEye && rightEye && leftEye.pointCount > 0 && rightEye.pointCount > 0) {
364
+ // Simplified blink detection based on eye aspect ratio
365
+ BOOL eyesOpen = [self areEyesOpen:leftEye rightEye:rightEye];
366
+
367
+ if (!eyesOpen) {
368
+ self.eyesClosedFrames++;
369
+ } else {
370
+ if (self.eyesClosedFrames >= self.blinkFramesThreshold && !self.previousEyeOpenState) {
371
+ self.blinkCounter++;
372
+ if (self.blinkCounter >= 1) { // Single blink required
373
+ dispatch_async(dispatch_get_main_queue(), ^{
374
+ [self onChallengeSuccess];
375
+ });
376
+ }
377
+ }
378
+ self.eyesClosedFrames = 0;
379
+ }
380
+
381
+ self.previousEyeOpenState = eyesOpen;
382
+ }
383
+ }
384
+
385
+ - (BOOL)areEyesOpen:(VNFaceLandmarkRegion2D *)leftEye rightEye:(VNFaceLandmarkRegion2D *)rightEye {
386
+ // Calculate eye aspect ratio for both eyes
387
+ CGFloat leftEAR = [self calculateEyeAspectRatio:leftEye];
388
+ CGFloat rightEAR = [self calculateEyeAspectRatio:rightEye];
389
+
390
+ // Average eye aspect ratio
391
+ CGFloat averageEAR = (leftEAR + rightEAR) / 2.0;
392
+
393
+ // Threshold for eye open/closed state
394
+ return averageEAR > 0.2; // Adjust threshold as needed
395
+ }
396
+
397
+ - (CGFloat)calculateEyeAspectRatio:(VNFaceLandmarkRegion2D *)eye {
398
+ if (eye.pointCount < 6) return 1.0; // Default to open
399
+
400
+ CGPoint *points = (CGPoint *)eye.normalizedPoints;
401
+
402
+ // Simplified eye aspect ratio calculation
403
+ // Get approximate top, bottom, left, right points
404
+ CGFloat minY = points[0].y, maxY = points[0].y;
405
+ CGFloat minX = points[0].x, maxX = points[0].x;
406
+
407
+ for (int i = 1; i < eye.pointCount; i++) {
408
+ if (points[i].y < minY) minY = points[i].y;
409
+ if (points[i].y > maxY) maxY = points[i].y;
410
+ if (points[i].x < minX) minX = points[i].x;
411
+ if (points[i].x > maxX) maxX = points[i].x;
412
+ }
413
+
414
+ CGFloat height = maxY - minY;
415
+ CGFloat width = maxX - minX;
416
+
417
+ return width > 0 ? height / width : 1.0;
418
+ }
419
+
420
+ - (void)checkHeadTurn:(VNFaceObservation *)face targetYaw:(CGFloat)targetYaw {
421
+ // Vision framework provides yaw directly
422
+ CGFloat yaw = face.yaw ? face.yaw.floatValue * 180.0 / M_PI : 0.0; // Convert to degrees
423
+
424
+ if (targetYaw < 0) { // Turn left
425
+ if (yaw < targetYaw) {
426
+ dispatch_async(dispatch_get_main_queue(), ^{
427
+ [self onChallengeSuccess];
428
+ });
429
+ }
430
+ } else { // Turn right
431
+ if (yaw > targetYaw) {
432
+ dispatch_async(dispatch_get_main_queue(), ^{
433
+ [self onChallengeSuccess];
434
+ });
435
+ }
436
+ }
437
+ }
438
+
439
+ - (void)onChallengeSuccess {
440
+ if (self.challengeCompleted) return;
441
+
442
+ self.challengeCompleted = YES;
443
+ NSTimeInterval duration = ([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000; // Convert to ms
444
+
445
+ // Create challenge result
446
+ NSDictionary *result = @{
447
+ @"challenge": self.currentChallenge,
448
+ @"success": @YES,
449
+ @"duration": @(duration),
450
+ @"confidence": @(0.9) // Mock confidence
451
+ };
452
+
453
+ // Emit challenge success event
454
+ [self sendEventWithName:@"onChallengeSuccess" body:@{
455
+ @"challenge": self.currentChallenge,
456
+ @"result": result
457
+ }];
458
+
459
+ // Move to next challenge or complete
460
+ self.currentChallengeIndex++;
461
+ if (self.currentChallengeIndex < self.challenges.count) {
462
+ // Delay before next challenge
463
+ dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
464
+ [self startNextChallenge];
465
+ });
466
+ } else {
467
+ // All challenges completed
468
+ [self onAllChallengesComplete];
469
+ }
470
+ }
471
+
472
+ - (void)onChallengeFailure:(NSString *)reason {
473
+ if (self.challengeCompleted) return;
474
+
475
+ self.challengeCompleted = YES;
476
+
477
+ [self sendEventWithName:@"onChallengeFailure" body:@{
478
+ @"challenge": self.currentChallenge,
479
+ @"reason": reason
480
+ }];
481
+
482
+ // Stop detection
483
+ [self stopLivenessDetection];
484
+
485
+ [self sendEventWithName:@"onDetectionFailed" body:@{@"reason": reason}];
486
+ }
487
+
488
+ - (void)onAllChallengesComplete {
489
+ [self sendEventWithName:@"onAllChallengesComplete" body:nil];
490
+
491
+ // Capture final screenshot
492
+ [self captureScreenshot];
493
+ }
494
+
495
+ - (void)captureScreenshot {
496
+ if (!self.photoOutput) {
497
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to capture screenshot"];
498
+ return;
499
+ }
500
+
501
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
502
+
503
+ [self.photoOutput capturePhotoWithSettings:settings delegate:(id<AVCapturePhotoCaptureDelegate>)self];
504
+ }
505
+
506
+ #pragma mark - AVCapturePhotoCaptureDelegate
507
+
508
+ - (void)captureOutput:(AVCapturePhotoOutput *)output
509
+ didFinishProcessingPhoto:(AVCapturePhoto *)photo
510
+ error:(NSError *)error API_AVAILABLE(ios(11.0)) {
511
+
512
+ if (error) {
513
+ RCTLogError(@"Photo capture error: %@", error.localizedDescription);
514
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Screenshot capture failed"];
515
+ return;
516
+ }
517
+
518
+ NSData *imageData = [photo fileDataRepresentation];
519
+ if (imageData) {
520
+ NSString *base64String = [imageData base64EncodedStringWithOptions:0];
521
+ NSString *base64Image = [NSString stringWithFormat:@"data:image/jpeg;base64,%@", base64String];
522
+
523
+ [self sendEventWithName:@"onScreenshotCaptured" body:@{@"screenshot": base64Image}];
524
+ [self sendDetectionResult:YES screenshot:base64Image failureReason:nil];
525
+ } else {
526
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to process screenshot"];
527
+ }
528
+ }
529
+
530
+ - (void)sendDetectionResult:(BOOL)success screenshot:(NSString *)screenshot failureReason:(NSString *)failureReason {
531
+ [self stopLivenessDetection];
532
+
533
+ NSMutableDictionary *result = [NSMutableDictionary dictionary];
534
+ result[@"success"] = @(success);
535
+
536
+ if (screenshot) {
537
+ result[@"screenshot"] = screenshot;
538
+ }
539
+
540
+ if (failureReason) {
541
+ result[@"failureReason"] = failureReason;
542
+ }
543
+
544
+ // Create mock challenge results
545
+ NSMutableArray *challengeResults = [NSMutableArray array];
546
+ for (NSInteger i = 0; i < self.currentChallengeIndex; i++) {
547
+ NSDictionary *challengeResult = @{
548
+ @"challenge": self.challenges[i],
549
+ @"success": @YES,
550
+ @"duration": @(2000), // Mock duration
551
+ @"confidence": @(0.9)
552
+ };
553
+ [challengeResults addObject:challengeResult];
554
+ }
555
+ result[@"challengeResults"] = challengeResults;
556
+ result[@"totalDuration"] = @(([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000);
557
+
558
+ [self sendEventWithName:@"onDetectionComplete" body:result];
559
+ }
560
+
561
+ - (void)stopLivenessDetection {
562
+ self.isDetectionRunning = NO;
563
+ self.currentChallenge = nil;
564
+ self.currentChallengeIndex = 0;
565
+ self.challengeCompleted = NO;
566
+
567
+ if (self.captureSession && self.captureSession.isRunning) {
568
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
569
+ [self.captureSession stopRunning];
570
+ });
571
+ }
572
+ }
573
+
574
+ @end