omnipay-reactnative-sdk 1.2.2-beta.3 → 1.2.2-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. package/README.md +93 -43
  2. package/android/build.gradle +16 -15
  3. package/android/src/main/AndroidManifest.xml +1 -1
  4. package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +2 -2
  5. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +153 -0
  6. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +49 -0
  7. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +524 -0
  8. package/ios/OmnipayLivenessCameraView.h +15 -0
  9. package/ios/OmnipayLivenessCameraView.m +80 -0
  10. package/ios/OmnipayLivenessCameraViewManager.m +19 -0
  11. package/ios/OmnipayLivenessModule.h +38 -0
  12. package/ios/OmnipayLivenessModule.m +554 -0
  13. package/lib/commonjs/components/OmnipayProvider.js +2 -66
  14. package/lib/commonjs/components/OmnipayProvider.js.map +1 -1
  15. package/lib/commonjs/components/OmnipayView.js.map +1 -1
  16. package/lib/commonjs/components/biometrics/FaceVerification.js +252 -345
  17. package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
  18. package/lib/commonjs/components/biometrics/LivenessDetection.js +90 -198
  19. package/lib/commonjs/components/biometrics/LivenessDetection.js.map +1 -1
  20. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +15 -0
  21. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  22. package/lib/commonjs/components/biometrics/PermissionManager.js +279 -0
  23. package/lib/commonjs/components/biometrics/PermissionManager.js.map +1 -0
  24. package/lib/commonjs/components/biometrics/index.js +45 -0
  25. package/lib/commonjs/components/biometrics/index.js.map +1 -0
  26. package/lib/commonjs/components/biometrics/types.js +17 -0
  27. package/lib/commonjs/components/biometrics/types.js.map +1 -0
  28. package/lib/commonjs/components/views/BvnVerification.js.map +1 -1
  29. package/lib/commonjs/components/views/PaylaterAgreement.js.map +1 -1
  30. package/lib/commonjs/components/views/Registration.js.map +1 -1
  31. package/lib/commonjs/index.js +23 -18
  32. package/lib/commonjs/index.js.map +1 -1
  33. package/lib/module/components/OmnipayProvider.js +3 -67
  34. package/lib/module/components/OmnipayProvider.js.map +1 -1
  35. package/lib/module/components/OmnipayView.js.map +1 -1
  36. package/lib/module/components/biometrics/FaceVerification.js +254 -346
  37. package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
  38. package/lib/module/components/biometrics/LivenessDetection.js +75 -197
  39. package/lib/module/components/biometrics/LivenessDetection.js.map +1 -1
  40. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +7 -0
  41. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  42. package/lib/module/components/biometrics/PermissionManager.js +272 -0
  43. package/lib/module/components/biometrics/PermissionManager.js.map +1 -0
  44. package/lib/module/components/biometrics/index.js +12 -0
  45. package/lib/module/components/biometrics/index.js.map +1 -0
  46. package/lib/module/components/biometrics/types.js +16 -0
  47. package/lib/module/components/biometrics/types.js.map +1 -0
  48. package/lib/module/components/views/BvnVerification.js.map +1 -1
  49. package/lib/module/components/views/PaylaterAgreement.js.map +1 -1
  50. package/lib/module/components/views/Registration.js.map +1 -1
  51. package/lib/module/index.js +5 -4
  52. package/lib/module/index.js.map +1 -1
  53. package/lib/typescript/{src/components → components}/OmnipayProvider.d.ts +1 -1
  54. package/lib/typescript/components/OmnipayProvider.d.ts.map +1 -0
  55. package/lib/typescript/{src/components → components}/OmnipayView.d.ts +21 -20
  56. package/lib/typescript/components/OmnipayView.d.ts.map +1 -0
  57. package/lib/typescript/components/biometrics/FaceVerification.d.ts +11 -0
  58. package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -0
  59. package/lib/typescript/components/biometrics/LivenessDetection.d.ts +33 -0
  60. package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +1 -0
  61. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +18 -0
  62. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +1 -0
  63. package/lib/typescript/components/biometrics/PermissionManager.d.ts +58 -0
  64. package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +1 -0
  65. package/lib/typescript/components/biometrics/index.d.ts +5 -0
  66. package/lib/typescript/components/biometrics/index.d.ts.map +1 -0
  67. package/lib/typescript/components/biometrics/types.d.ts +73 -0
  68. package/lib/typescript/components/biometrics/types.d.ts.map +1 -0
  69. package/lib/typescript/{src/components → components}/views/BvnVerification.d.ts +2 -1
  70. package/lib/typescript/components/views/BvnVerification.d.ts.map +1 -0
  71. package/lib/typescript/{src/components → components}/views/PaylaterAgreement.d.ts +2 -1
  72. package/lib/typescript/components/views/PaylaterAgreement.d.ts.map +1 -0
  73. package/lib/typescript/{src/components → components}/views/Registration.d.ts +2 -1
  74. package/lib/typescript/components/views/Registration.d.ts.map +1 -0
  75. package/lib/typescript/functions.d.ts.map +1 -0
  76. package/lib/typescript/hooks/useOmnipay.d.ts +28 -0
  77. package/lib/typescript/hooks/useOmnipay.d.ts.map +1 -0
  78. package/lib/typescript/index.d.ts +7 -0
  79. package/lib/typescript/index.d.ts.map +1 -0
  80. package/lib/typescript/lib/colors.d.ts.map +1 -0
  81. package/lib/typescript/lib/config.d.ts.map +1 -0
  82. package/omnipay-reactnative-sdk.podspec +32 -29
  83. package/package.json +16 -11
  84. package/src/components/OmnipayProvider.tsx +3 -106
  85. package/src/components/OmnipayView.tsx +1 -1
  86. package/src/components/biometrics/FaceVerification.tsx +291 -368
  87. package/src/components/biometrics/LivenessDetection.ts +113 -250
  88. package/src/components/biometrics/OmnipayLivenessCameraView.tsx +19 -0
  89. package/src/components/biometrics/PermissionManager.ts +317 -0
  90. package/src/components/biometrics/index.ts +11 -0
  91. package/src/components/biometrics/types.ts +86 -0
  92. package/src/components/views/BvnVerification.tsx +1 -1
  93. package/src/components/views/PaylaterAgreement.tsx +1 -1
  94. package/src/components/views/Registration.tsx +1 -1
  95. package/src/index.tsx +4 -15
  96. package/android/src/main/java/com/omniretail/omnipay/LivenessCameraViewManager.java +0 -116
  97. package/android/src/main/java/com/omniretail/omnipay/LivenessDetectionModule.java +0 -588
  98. package/ios/LivenessCameraView.h +0 -22
  99. package/ios/LivenessCameraView.m +0 -135
  100. package/ios/LivenessCameraViewManager.h +0 -12
  101. package/ios/LivenessCameraViewManager.m +0 -24
  102. package/ios/LivenessDetectionModule.h +0 -46
  103. package/ios/LivenessDetectionModule.m +0 -603
  104. package/lib/commonjs/components/biometrics/LivenessCameraView.js +0 -45
  105. package/lib/commonjs/components/biometrics/LivenessCameraView.js.map +0 -1
  106. package/lib/module/components/biometrics/LivenessCameraView.js +0 -39
  107. package/lib/module/components/biometrics/LivenessCameraView.js.map +0 -1
  108. package/lib/typescript/demo/src/App.d.ts +0 -3
  109. package/lib/typescript/demo/src/App.d.ts.map +0 -1
  110. package/lib/typescript/demo/src/Body.d.ts +0 -3
  111. package/lib/typescript/demo/src/Body.d.ts.map +0 -1
  112. package/lib/typescript/demo/src/NotificationsExample.d.ts +0 -4
  113. package/lib/typescript/demo/src/NotificationsExample.d.ts.map +0 -1
  114. package/lib/typescript/src/components/OmnipayProvider.d.ts.map +0 -1
  115. package/lib/typescript/src/components/OmnipayView.d.ts.map +0 -1
  116. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts +0 -12
  117. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts.map +0 -1
  118. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts +0 -22
  119. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts.map +0 -1
  120. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts +0 -73
  121. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts.map +0 -1
  122. package/lib/typescript/src/components/views/BvnVerification.d.ts.map +0 -1
  123. package/lib/typescript/src/components/views/PaylaterAgreement.d.ts.map +0 -1
  124. package/lib/typescript/src/components/views/Registration.d.ts.map +0 -1
  125. package/lib/typescript/src/functions.d.ts.map +0 -1
  126. package/lib/typescript/src/hooks/useOmnipay.d.ts +0 -28
  127. package/lib/typescript/src/hooks/useOmnipay.d.ts.map +0 -1
  128. package/lib/typescript/src/index.d.ts +0 -8
  129. package/lib/typescript/src/index.d.ts.map +0 -1
  130. package/lib/typescript/src/lib/colors.d.ts.map +0 -1
  131. package/lib/typescript/src/lib/config.d.ts.map +0 -1
  132. package/src/components/biometrics/LivenessCameraView.tsx +0 -61
  133. /package/lib/typescript/{src/functions.d.ts → functions.d.ts} +0 -0
  134. /package/lib/typescript/{src/lib → lib}/colors.d.ts +0 -0
  135. /package/lib/typescript/{src/lib → lib}/config.d.ts +0 -0
@@ -0,0 +1,554 @@
1
+ #import "OmnipayLivenessModule.h"
2
+ #import <React/RCTLog.h>
3
+ #import <React/RCTUtils.h>
4
+
5
+ // Challenge constants
6
+ static NSString *const CHALLENGE_SMILE = @"smile";
7
+ static NSString *const CHALLENGE_BLINK = @"blink";
8
+ static NSString *const CHALLENGE_TURN_LEFT = @"turnLeft";
9
+ static NSString *const CHALLENGE_TURN_RIGHT = @"turnRight";
10
+
11
+ // Detection thresholds
12
+ static const CGFloat HEAD_TURN_THRESHOLD = 15.0;
13
+ static const NSInteger BLINK_FRAMES_THRESHOLD = 3;
14
+
15
+ @implementation OmnipayLivenessModule
16
+
17
+ RCT_EXPORT_MODULE(OmnipayLivenessModule)
18
+
19
+ - (instancetype)init {
20
+ self = [super init];
21
+ if (self) {
22
+ [self initializeDetection];
23
+ }
24
+ return self;
25
+ }
26
+
27
+ - (void)initializeDetection {
28
+ self.headTurnThreshold = HEAD_TURN_THRESHOLD;
29
+ self.blinkFramesThreshold = BLINK_FRAMES_THRESHOLD;
30
+ self.challengeTimeoutSeconds = 10.0; // Default timeout
31
+
32
+ // Initialize video data output queue
33
+ self.videoDataOutputQueue = dispatch_queue_create("videoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
34
+
35
+ // Initialize Vision requests
36
+ self.faceDetectionRequest = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
37
+ if (error) {
38
+ RCTLogError(@"Face detection error: %@", error.localizedDescription);
39
+ return;
40
+ }
41
+ [self processFaceDetectionResults:request.results];
42
+ }];
43
+
44
+ self.faceLandmarksRequest = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
45
+ if (error) {
46
+ RCTLogError(@"Face landmarks error: %@", error.localizedDescription);
47
+ return;
48
+ }
49
+ [self processFaceLandmarksResults:request.results];
50
+ }];
51
+ }
52
+
53
+ + (BOOL)requiresMainQueueSetup {
54
+ return NO;
55
+ }
56
+
57
+ - (NSArray<NSString *> *)supportedEvents {
58
+ return @[
59
+ @"onChallengeStart",
60
+ @"onChallengeSuccess",
61
+ @"onChallengeFailure",
62
+ @"onAllChallengesComplete",
63
+ @"onScreenshotCaptured",
64
+ @"onDetectionFailed",
65
+ @"onDetectionComplete"
66
+ ];
67
+ }
68
+
69
+ RCT_EXPORT_METHOD(isSupported:(RCTPromiseResolveBlock)resolve
70
+ rejecter:(RCTPromiseRejectBlock)reject) {
71
+ // Check if Vision framework is available and camera is available
72
+ BOOL isSupported = [VNDetectFaceRectanglesRequest class] != nil &&
73
+ [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] != nil;
74
+ resolve(@(isSupported));
75
+ }
76
+
77
+ RCT_EXPORT_METHOD(startLivenessDetection:(NSDictionary *)config
78
+ resolver:(RCTPromiseResolveBlock)resolve
79
+ rejecter:(RCTPromiseRejectBlock)reject) {
80
+
81
+ if (self.isDetectionRunning) {
82
+ reject(@"DETECTION_RUNNING", @"Detection is already running", nil);
83
+ return;
84
+ }
85
+
86
+ // Parse configuration
87
+ [self parseConfig:config];
88
+
89
+ // Request camera permission
90
+ [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
91
+ dispatch_async(dispatch_get_main_queue(), ^{
92
+ if (!granted) {
93
+ reject(@"CAMERA_PERMISSION", @"Camera permission denied", nil);
94
+ return;
95
+ }
96
+
97
+ NSError *error;
98
+ if ([self setupCameraSession:&error]) {
99
+ [self startCameraSession];
100
+ self.isDetectionRunning = YES;
101
+ [self startNextChallenge];
102
+ resolve(nil);
103
+ } else {
104
+ reject(@"CAMERA_SETUP_ERROR", error.localizedDescription ?: @"Failed to setup camera", error);
105
+ }
106
+ });
107
+ }];
108
+ }
109
+
110
+ RCT_EXPORT_METHOD(stopDetection:(RCTPromiseResolveBlock)resolve
111
+ rejecter:(RCTPromiseRejectBlock)reject) {
112
+ [self stopLivenessDetection];
113
+ resolve(nil);
114
+ }
115
+
116
+ - (void)parseConfig:(NSDictionary *)config {
117
+ // Parse challenges
118
+ NSArray *challengesArray = config[@"challenges"];
119
+ if (challengesArray && challengesArray.count > 0) {
120
+ self.challenges = challengesArray;
121
+ } else {
122
+ // Default challenges
123
+ self.challenges = @[CHALLENGE_SMILE, CHALLENGE_BLINK, CHALLENGE_TURN_LEFT, CHALLENGE_TURN_RIGHT];
124
+ }
125
+
126
+ // Parse timeout
127
+ NSNumber *timeout = config[@"challengeTimeout"];
128
+ if (timeout) {
129
+ self.challengeTimeoutSeconds = timeout.doubleValue;
130
+ }
131
+
132
+ self.currentChallengeIndex = 0;
133
+ }
134
+
135
+ - (BOOL)setupCameraSession:(NSError **)error {
136
+ // Create capture session
137
+ self.captureSession = [[AVCaptureSession alloc] init];
138
+ self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
139
+
140
+ // Get front camera
141
+ AVCaptureDevice *frontCamera = nil;
142
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
143
+ for (AVCaptureDevice *device in devices) {
144
+ if (device.position == AVCaptureDevicePositionFront) {
145
+ frontCamera = device;
146
+ break;
147
+ }
148
+ }
149
+
150
+ if (!frontCamera) {
151
+ if (error) {
152
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
153
+ code:1001
154
+ userInfo:@{NSLocalizedDescriptionKey: @"Front camera not available"}];
155
+ }
156
+ return NO;
157
+ }
158
+
159
+ // Create device input
160
+ NSError *inputError;
161
+ AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&inputError];
162
+ if (!deviceInput) {
163
+ if (error) *error = inputError;
164
+ return NO;
165
+ }
166
+
167
+ if (![self.captureSession canAddInput:deviceInput]) {
168
+ if (error) {
169
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
170
+ code:1002
171
+ userInfo:@{NSLocalizedDescriptionKey: @"Cannot add device input"}];
172
+ }
173
+ return NO;
174
+ }
175
+ [self.captureSession addInput:deviceInput];
176
+
177
+ // Create video data output
178
+ self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
179
+ self.videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
180
+ [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
181
+
182
+ if (![self.captureSession canAddOutput:self.videoDataOutput]) {
183
+ if (error) {
184
+ *error = [NSError errorWithDomain:@"OmnipayLiveness"
185
+ code:1003
186
+ userInfo:@{NSLocalizedDescriptionKey: @"Cannot add video data output"}];
187
+ }
188
+ return NO;
189
+ }
190
+ [self.captureSession addOutput:self.videoDataOutput];
191
+
192
+ // Create photo output for final screenshot
193
+ self.photoOutput = [[AVCapturePhotoOutput alloc] init];
194
+ if ([self.captureSession canAddOutput:self.photoOutput]) {
195
+ [self.captureSession addOutput:self.photoOutput];
196
+ }
197
+
198
+ return YES;
199
+ }
200
+
201
+ - (void)startCameraSession {
202
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
203
+ [self.captureSession startRunning];
204
+ });
205
+ }
206
+
207
+ - (void)startNextChallenge {
208
+ if (self.currentChallengeIndex >= self.challenges.count) {
209
+ return;
210
+ }
211
+
212
+ self.currentChallenge = self.challenges[self.currentChallengeIndex];
213
+ self.challengeStartTime = [[NSDate date] timeIntervalSince1970];
214
+ self.challengeCompleted = NO;
215
+
216
+ // Reset blink detection state
217
+ if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
218
+ self.blinkCounter = 0;
219
+ self.eyesClosedFrames = 0;
220
+ self.previousEyeOpenState = YES;
221
+ }
222
+
223
+ // Emit challenge start event
224
+ [self sendEventWithName:@"onChallengeStart" body:@{@"challenge": self.currentChallenge}];
225
+ }
226
+
227
+ #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
228
+
229
+ - (void)captureOutput:(AVCaptureOutput *)output
230
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
231
+ fromConnection:(AVCaptureConnection *)connection {
232
+
233
+ if (!self.isDetectionRunning || !self.currentChallenge) {
234
+ return;
235
+ }
236
+
237
+ // Check timeout
238
+ NSTimeInterval currentTime = [[NSDate date] timeIntervalSince1970];
239
+ if (currentTime - self.challengeStartTime > self.challengeTimeoutSeconds) {
240
+ dispatch_async(dispatch_get_main_queue(), ^{
241
+ [self onChallengeFailure:@"Challenge timeout"];
242
+ });
243
+ return;
244
+ }
245
+
246
+ if (self.challengeCompleted) {
247
+ return;
248
+ }
249
+
250
+ // Convert sample buffer to CIImage
251
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
252
+ if (!imageBuffer) return;
253
+
254
+ CIImage *ciImage = [CIImage imageWithCVImageBuffer:imageBuffer];
255
+
256
+ // Create Vision image request handler
257
+ VNImageRequestHandler *imageRequestHandler = [[VNImageRequestHandler alloc] initWithCIImage:ciImage options:@{}];
258
+
259
+ NSError *error;
260
+ if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE] ||
261
+ [self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT] ||
262
+ [self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
263
+
264
+ // Use face landmarks for smile and head pose detection
265
+ [imageRequestHandler performRequests:@[self.faceLandmarksRequest] error:&error];
266
+ } else {
267
+ // Use basic face detection for blink detection
268
+ [imageRequestHandler performRequests:@[self.faceDetectionRequest, self.faceLandmarksRequest] error:&error];
269
+ }
270
+
271
+ if (error) {
272
+ RCTLogError(@"Vision request error: %@", error.localizedDescription);
273
+ }
274
+ }
275
+
276
+ - (void)processFaceDetectionResults:(NSArray<VNFaceObservation *> *)results {
277
+ if (results.count == 0) {
278
+ return; // No face detected
279
+ }
280
+
281
+ VNFaceObservation *face = results.firstObject;
282
+ [self processFaceObservation:face];
283
+ }
284
+
285
+ - (void)processFaceLandmarksResults:(NSArray<VNFaceObservation *> *)results {
286
+ if (results.count == 0) {
287
+ return; // No face detected
288
+ }
289
+
290
+ VNFaceObservation *face = results.firstObject;
291
+ [self processFaceObservation:face];
292
+ }
293
+
294
+ - (void)processFaceObservation:(VNFaceObservation *)face {
295
+ if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE]) {
296
+ [self checkSmile:face];
297
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
298
+ [self checkBlink:face];
299
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT]) {
300
+ [self checkHeadTurn:face targetYaw:-self.headTurnThreshold];
301
+ } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
302
+ [self checkHeadTurn:face targetYaw:self.headTurnThreshold];
303
+ }
304
+ }
305
+
306
+ - (void)checkSmile:(VNFaceObservation *)face {
307
+ if (!face.landmarks) return;
308
+
309
+ // Simplified smile detection based on mouth landmarks
310
+ VNFaceLandmarkRegion2D *outerLips = face.landmarks.outerLips;
311
+ if (outerLips && outerLips.pointCount > 0) {
312
+ // Basic smile detection - check if mouth corners are raised
313
+ // This is a simplified implementation
314
+ CGPoint *points = (CGPoint *)outerLips.normalizedPoints;
315
+
316
+ // Get mouth corner points (approximate indices)
317
+ if (outerLips.pointCount >= 12) {
318
+ CGPoint leftCorner = points[0];
319
+ CGPoint rightCorner = points[6];
320
+ CGPoint topLip = points[3];
321
+ CGPoint bottomLip = points[9];
322
+
323
+ // Simple smile detection: corners higher than center
324
+ CGFloat mouthHeight = topLip.y - bottomLip.y;
325
+ CGFloat cornerElevation = (leftCorner.y + rightCorner.y) / 2.0 - bottomLip.y;
326
+
327
+ if (cornerElevation > mouthHeight * 0.3) { // Smile threshold
328
+ dispatch_async(dispatch_get_main_queue(), ^{
329
+ [self onChallengeSuccess];
330
+ });
331
+ }
332
+ }
333
+ }
334
+ }
335
+
336
+ - (void)checkBlink:(VNFaceObservation *)face {
337
+ if (!face.landmarks) return;
338
+
339
+ // Check eye landmarks for blink detection
340
+ VNFaceLandmarkRegion2D *leftEye = face.landmarks.leftEye;
341
+ VNFaceLandmarkRegion2D *rightEye = face.landmarks.rightEye;
342
+
343
+ if (leftEye && rightEye && leftEye.pointCount > 0 && rightEye.pointCount > 0) {
344
+ // Simplified blink detection based on eye aspect ratio
345
+ BOOL eyesOpen = [self areEyesOpen:leftEye rightEye:rightEye];
346
+
347
+ if (!eyesOpen) {
348
+ self.eyesClosedFrames++;
349
+ } else {
350
+ if (self.eyesClosedFrames >= self.blinkFramesThreshold && !self.previousEyeOpenState) {
351
+ self.blinkCounter++;
352
+ if (self.blinkCounter >= 1) { // Single blink required
353
+ dispatch_async(dispatch_get_main_queue(), ^{
354
+ [self onChallengeSuccess];
355
+ });
356
+ }
357
+ }
358
+ self.eyesClosedFrames = 0;
359
+ }
360
+
361
+ self.previousEyeOpenState = eyesOpen;
362
+ }
363
+ }
364
+
365
+ - (BOOL)areEyesOpen:(VNFaceLandmarkRegion2D *)leftEye rightEye:(VNFaceLandmarkRegion2D *)rightEye {
366
+ // Calculate eye aspect ratio for both eyes
367
+ CGFloat leftEAR = [self calculateEyeAspectRatio:leftEye];
368
+ CGFloat rightEAR = [self calculateEyeAspectRatio:rightEye];
369
+
370
+ // Average eye aspect ratio
371
+ CGFloat averageEAR = (leftEAR + rightEAR) / 2.0;
372
+
373
+ // Threshold for eye open/closed state
374
+ return averageEAR > 0.2; // Adjust threshold as needed
375
+ }
376
+
377
+ - (CGFloat)calculateEyeAspectRatio:(VNFaceLandmarkRegion2D *)eye {
378
+ if (eye.pointCount < 6) return 1.0; // Default to open
379
+
380
+ CGPoint *points = (CGPoint *)eye.normalizedPoints;
381
+
382
+ // Simplified eye aspect ratio calculation
383
+ // Get approximate top, bottom, left, right points
384
+ CGFloat minY = points[0].y, maxY = points[0].y;
385
+ CGFloat minX = points[0].x, maxX = points[0].x;
386
+
387
+ for (int i = 1; i < eye.pointCount; i++) {
388
+ if (points[i].y < minY) minY = points[i].y;
389
+ if (points[i].y > maxY) maxY = points[i].y;
390
+ if (points[i].x < minX) minX = points[i].x;
391
+ if (points[i].x > maxX) maxX = points[i].x;
392
+ }
393
+
394
+ CGFloat height = maxY - minY;
395
+ CGFloat width = maxX - minX;
396
+
397
+ return width > 0 ? height / width : 1.0;
398
+ }
399
+
400
+ - (void)checkHeadTurn:(VNFaceObservation *)face targetYaw:(CGFloat)targetYaw {
401
+ // Vision framework provides yaw directly
402
+ CGFloat yaw = face.yaw ? face.yaw.floatValue * 180.0 / M_PI : 0.0; // Convert to degrees
403
+
404
+ if (targetYaw < 0) { // Turn left
405
+ if (yaw < targetYaw) {
406
+ dispatch_async(dispatch_get_main_queue(), ^{
407
+ [self onChallengeSuccess];
408
+ });
409
+ }
410
+ } else { // Turn right
411
+ if (yaw > targetYaw) {
412
+ dispatch_async(dispatch_get_main_queue(), ^{
413
+ [self onChallengeSuccess];
414
+ });
415
+ }
416
+ }
417
+ }
418
+
419
+ - (void)onChallengeSuccess {
420
+ if (self.challengeCompleted) return;
421
+
422
+ self.challengeCompleted = YES;
423
+ NSTimeInterval duration = ([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000; // Convert to ms
424
+
425
+ // Create challenge result
426
+ NSDictionary *result = @{
427
+ @"challenge": self.currentChallenge,
428
+ @"success": @YES,
429
+ @"duration": @(duration),
430
+ @"confidence": @(0.9) // Mock confidence
431
+ };
432
+
433
+ // Emit challenge success event
434
+ [self sendEventWithName:@"onChallengeSuccess" body:@{
435
+ @"challenge": self.currentChallenge,
436
+ @"result": result
437
+ }];
438
+
439
+ // Move to next challenge or complete
440
+ self.currentChallengeIndex++;
441
+ if (self.currentChallengeIndex < self.challenges.count) {
442
+ // Delay before next challenge
443
+ dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
444
+ [self startNextChallenge];
445
+ });
446
+ } else {
447
+ // All challenges completed
448
+ [self onAllChallengesComplete];
449
+ }
450
+ }
451
+
452
+ - (void)onChallengeFailure:(NSString *)reason {
453
+ if (self.challengeCompleted) return;
454
+
455
+ self.challengeCompleted = YES;
456
+
457
+ [self sendEventWithName:@"onChallengeFailure" body:@{
458
+ @"challenge": self.currentChallenge,
459
+ @"reason": reason
460
+ }];
461
+
462
+ // Stop detection
463
+ [self stopLivenessDetection];
464
+
465
+ [self sendEventWithName:@"onDetectionFailed" body:@{@"reason": reason}];
466
+ }
467
+
468
+ - (void)onAllChallengesComplete {
469
+ [self sendEventWithName:@"onAllChallengesComplete" body:nil];
470
+
471
+ // Capture final screenshot
472
+ [self captureScreenshot];
473
+ }
474
+
475
+ - (void)captureScreenshot {
476
+ if (!self.photoOutput) {
477
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to capture screenshot"];
478
+ return;
479
+ }
480
+
481
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
482
+
483
+ [self.photoOutput capturePhotoWithSettings:settings delegate:(id<AVCapturePhotoCaptureDelegate>)self];
484
+ }
485
+
486
+ #pragma mark - AVCapturePhotoCaptureDelegate
487
+
488
+ - (void)captureOutput:(AVCapturePhotoOutput *)output
489
+ didFinishProcessingPhoto:(AVCapturePhoto *)photo
490
+ error:(NSError *)error API_AVAILABLE(ios(11.0)) {
491
+
492
+ if (error) {
493
+ RCTLogError(@"Photo capture error: %@", error.localizedDescription);
494
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Screenshot capture failed"];
495
+ return;
496
+ }
497
+
498
+ NSData *imageData = [photo fileDataRepresentation];
499
+ if (imageData) {
500
+ NSString *base64String = [imageData base64EncodedStringWithOptions:0];
501
+ NSString *base64Image = [NSString stringWithFormat:@"data:image/jpeg;base64,%@", base64String];
502
+
503
+ [self sendEventWithName:@"onScreenshotCaptured" body:@{@"screenshot": base64Image}];
504
+ [self sendDetectionResult:YES screenshot:base64Image failureReason:nil];
505
+ } else {
506
+ [self sendDetectionResult:NO screenshot:nil failureReason:@"Failed to process screenshot"];
507
+ }
508
+ }
509
+
510
+ - (void)sendDetectionResult:(BOOL)success screenshot:(NSString *)screenshot failureReason:(NSString *)failureReason {
511
+ [self stopLivenessDetection];
512
+
513
+ NSMutableDictionary *result = [NSMutableDictionary dictionary];
514
+ result[@"success"] = @(success);
515
+
516
+ if (screenshot) {
517
+ result[@"screenshot"] = screenshot;
518
+ }
519
+
520
+ if (failureReason) {
521
+ result[@"failureReason"] = failureReason;
522
+ }
523
+
524
+ // Create mock challenge results
525
+ NSMutableArray *challengeResults = [NSMutableArray array];
526
+ for (NSInteger i = 0; i < self.currentChallengeIndex; i++) {
527
+ NSDictionary *challengeResult = @{
528
+ @"challenge": self.challenges[i],
529
+ @"success": @YES,
530
+ @"duration": @(2000), // Mock duration
531
+ @"confidence": @(0.9)
532
+ };
533
+ [challengeResults addObject:challengeResult];
534
+ }
535
+ result[@"challengeResults"] = challengeResults;
536
+ result[@"totalDuration"] = @(([[NSDate date] timeIntervalSince1970] - self.challengeStartTime) * 1000);
537
+
538
+ [self sendEventWithName:@"onDetectionComplete" body:result];
539
+ }
540
+
541
+ - (void)stopLivenessDetection {
542
+ self.isDetectionRunning = NO;
543
+ self.currentChallenge = nil;
544
+ self.currentChallengeIndex = 0;
545
+ self.challengeCompleted = NO;
546
+
547
+ if (self.captureSession && self.captureSession.isRunning) {
548
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
549
+ [self.captureSession stopRunning];
550
+ });
551
+ }
552
+ }
553
+
554
+ @end
@@ -9,11 +9,9 @@ var _reactNative = require("react-native");
9
9
  var _reactNativeWebview = _interopRequireDefault(require("react-native-webview"));
10
10
  var _functions = require("../functions");
11
11
  var _reactNativeShare = _interopRequireDefault(require("react-native-share"));
12
- var _asyncStorage = _interopRequireDefault(require("@react-native-async-storage/async-storage"));
13
12
  var _FaceVerification = _interopRequireDefault(require("./biometrics/FaceVerification"));
14
13
  function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
15
14
  function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
16
- const OmnipayActivity = _reactNative.NativeModules.OmnipayActivity || {};
17
15
  let defaultValue = {
18
16
  initiateBills: () => null,
19
17
  initiateWallet: () => null
@@ -36,74 +34,15 @@ const OmnipayProvider = ({
36
34
  const isValidEnv = ['prod', 'dev'].includes(env);
37
35
  const isValidColor = color.length > 2;
38
36
  const onCloseRef = (0, _react.useRef)(undefined);
39
- const [canUsePos, setCanUsePos] = (0, _react.useState)(false);
40
37
  const [showFaceVerification, setShowFaceVerification] = (0, _react.useState)(false);
41
38
  (0, _react.useEffect)(() => {
42
- checkPaymentApp();
43
39
  setTimeout(() => {
44
40
  setShowFaceVerification(true);
45
- }, 5000);
41
+ }, 4000);
46
42
  }, []);
47
43
  (0, _react.useEffect)(() => {
48
44
  visibilityRef.current = isVisible;
49
45
  }, [isVisible]);
50
- (0, _react.useEffect)(() => {
51
- if (canUsePos) {
52
- const eventEmitter = new _reactNative.NativeEventEmitter(OmnipayActivity);
53
- eventEmitter.addListener('OmnipayEvent', event => {
54
- console.log('native event', event);
55
- });
56
- }
57
- }, [canUsePos]);
58
- async function checkPaymentApp() {
59
- try {
60
- if (_reactNative.Platform.OS === 'android') {
61
- const isInstalled = await OmnipayActivity.isPackageInstalled('com.horizonpay.sample');
62
- if (isInstalled) {
63
- setCanUsePos(true);
64
- }
65
- }
66
- } catch (error) {}
67
- }
68
- async function startPosTransaction({
69
- amount,
70
- purchaseType,
71
- print,
72
- rrn,
73
- stan,
74
- terminalId
75
- }) {
76
- try {
77
- if (_reactNative.Platform.OS === 'android') {
78
- let result = '';
79
- if (purchaseType === 'KEY EXCHANGE') {
80
- const isKeyExchanged = await _asyncStorage.default.getItem('isKeyExchanged');
81
- if (!isKeyExchanged) {
82
- result = await OmnipayActivity.initiateHorizonTransaction(amount, purchaseType, color, print, rrn, stan, terminalId);
83
- if (terminalId && result && result.toLowerCase().includes('-message-success')) {
84
- await _asyncStorage.default.setItem('isKeyExchanged', terminalId);
85
- }
86
- postMessage({
87
- dataKey: 'onPosKeyExchanged',
88
- dataValue: result
89
- });
90
- }
91
- } else {
92
- result = await OmnipayActivity.initiateHorizonTransaction(amount, purchaseType, color, print, rrn, stan, terminalId);
93
- postMessage({
94
- dataKey: 'onPosTransactionSuccess',
95
- dataValue: result
96
- });
97
- }
98
- }
99
- } catch (error) {
100
- console.log(error);
101
- postMessage({
102
- dataKey: 'onPosTransactionFailure',
103
- dataValue: ''
104
- });
105
- }
106
- }
107
46
  function getWebviewStyle() {
108
47
  if (!showWebview) {
109
48
  return {
@@ -166,9 +105,6 @@ const OmnipayProvider = ({
166
105
  if (dataKey === 'shareReceipt') {
167
106
  shareReceipt(dataValue);
168
107
  }
169
- if (dataKey === 'startPosTransaction') {
170
- startPosTransaction(JSON.parse(dataValue));
171
- }
172
108
  }
173
109
  } catch (error) {}
174
110
  }
@@ -229,7 +165,7 @@ const OmnipayProvider = ({
229
165
  const isValidUserRef = !!userRef && !!userRef.trim();
230
166
  const usesNativeShare = true;
231
167
  if (isPhoneNumberValid || isValidCustomerRef || isValidUserRef) {
232
- const webUrl = `${webHost}?theme=${color}&view=wallet&publicKey=${publicKey}&phoneNumber=${phoneNumber}&usesPaylater=${usesPaylater}&usesPromo=${usesPromo}&usesAirtimeData=${usesAirtimeData}&usesTransfer=${usesTransfer}&usesBills=${usesBills}&usesPos=${usesPos}&customerRef=${customerRef}&userRef=${userRef}&promoBalanceOffset=${promoBalanceOffset}&deviceId=${deviceId}&deviceName=${deviceName}&hideWalletTransfer=${hideWalletTransfer}&bvnRequired=${isBvnValidationRequired}&usesNativeShare=${usesNativeShare}&isPosEnabled=${canUsePos}&walletTab=${walletTab}&sessionId=${sessionId}&kycStatus=${kycStatus || ''}&launchPage=${launchPage}`;
168
+ const webUrl = `${webHost}?theme=${color}&view=wallet&publicKey=${publicKey}&phoneNumber=${phoneNumber}&usesPaylater=${usesPaylater}&usesPromo=${usesPromo}&usesAirtimeData=${usesAirtimeData}&usesTransfer=${usesTransfer}&usesBills=${usesBills}&usesPos=${usesPos}&customerRef=${customerRef}&userRef=${userRef}&promoBalanceOffset=${promoBalanceOffset}&deviceId=${deviceId}&deviceName=${deviceName}&hideWalletTransfer=${hideWalletTransfer}&bvnRequired=${isBvnValidationRequired}&usesNativeShare=${usesNativeShare}&walletTab=${walletTab}&sessionId=${sessionId}&kycStatus=${kycStatus || ''}&launchPage=${launchPage}`;
233
169
  setWebviewUrl(webUrl);
234
170
  setIsVisible(true);
235
171
  onCloseRef.current = onClose;