omnipay-reactnative-sdk 1.2.2-beta.4 → 1.2.2-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. package/README.md +93 -43
  2. package/android/build.gradle +16 -15
  3. package/android/src/main/AndroidManifest.xml +1 -1
  4. package/android/src/main/java/com/omniretail/omnipay/OmnipayActivityPackage.java +2 -2
  5. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraView.java +153 -0
  6. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessCameraViewManager.java +49 -0
  7. package/android/src/main/java/com/omniretail/omnipay/OmnipayLivenessModule.java +524 -0
  8. package/ios/OmnipayLivenessCameraView.h +15 -0
  9. package/ios/OmnipayLivenessCameraView.m +80 -0
  10. package/ios/OmnipayLivenessCameraViewManager.m +19 -0
  11. package/ios/OmnipayLivenessModule.h +38 -0
  12. package/ios/OmnipayLivenessModule.m +554 -0
  13. package/lib/commonjs/components/OmnipayProvider.js +2 -66
  14. package/lib/commonjs/components/OmnipayProvider.js.map +1 -1
  15. package/lib/commonjs/components/OmnipayView.js.map +1 -1
  16. package/lib/commonjs/components/biometrics/FaceVerification.js +252 -345
  17. package/lib/commonjs/components/biometrics/FaceVerification.js.map +1 -1
  18. package/lib/commonjs/components/biometrics/LivenessDetection.js +90 -198
  19. package/lib/commonjs/components/biometrics/LivenessDetection.js.map +1 -1
  20. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js +15 -0
  21. package/lib/commonjs/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  22. package/lib/commonjs/components/biometrics/PermissionManager.js +279 -0
  23. package/lib/commonjs/components/biometrics/PermissionManager.js.map +1 -0
  24. package/lib/commonjs/components/biometrics/index.js +45 -0
  25. package/lib/commonjs/components/biometrics/index.js.map +1 -0
  26. package/lib/commonjs/components/biometrics/types.js +17 -0
  27. package/lib/commonjs/components/biometrics/types.js.map +1 -0
  28. package/lib/commonjs/components/views/BvnVerification.js.map +1 -1
  29. package/lib/commonjs/components/views/PaylaterAgreement.js.map +1 -1
  30. package/lib/commonjs/components/views/Registration.js.map +1 -1
  31. package/lib/commonjs/index.js +23 -18
  32. package/lib/commonjs/index.js.map +1 -1
  33. package/lib/module/components/OmnipayProvider.js +3 -67
  34. package/lib/module/components/OmnipayProvider.js.map +1 -1
  35. package/lib/module/components/OmnipayView.js.map +1 -1
  36. package/lib/module/components/biometrics/FaceVerification.js +254 -346
  37. package/lib/module/components/biometrics/FaceVerification.js.map +1 -1
  38. package/lib/module/components/biometrics/LivenessDetection.js +75 -197
  39. package/lib/module/components/biometrics/LivenessDetection.js.map +1 -1
  40. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js +7 -0
  41. package/lib/module/components/biometrics/OmnipayLivenessCameraView.js.map +1 -0
  42. package/lib/module/components/biometrics/PermissionManager.js +272 -0
  43. package/lib/module/components/biometrics/PermissionManager.js.map +1 -0
  44. package/lib/module/components/biometrics/index.js +12 -0
  45. package/lib/module/components/biometrics/index.js.map +1 -0
  46. package/lib/module/components/biometrics/types.js +16 -0
  47. package/lib/module/components/biometrics/types.js.map +1 -0
  48. package/lib/module/components/views/BvnVerification.js.map +1 -1
  49. package/lib/module/components/views/PaylaterAgreement.js.map +1 -1
  50. package/lib/module/components/views/Registration.js.map +1 -1
  51. package/lib/module/index.js +5 -4
  52. package/lib/module/index.js.map +1 -1
  53. package/lib/typescript/{src/components → components}/OmnipayProvider.d.ts +1 -1
  54. package/lib/typescript/components/OmnipayProvider.d.ts.map +1 -0
  55. package/lib/typescript/{src/components → components}/OmnipayView.d.ts +21 -20
  56. package/lib/typescript/components/OmnipayView.d.ts.map +1 -0
  57. package/lib/typescript/components/biometrics/FaceVerification.d.ts +11 -0
  58. package/lib/typescript/components/biometrics/FaceVerification.d.ts.map +1 -0
  59. package/lib/typescript/components/biometrics/LivenessDetection.d.ts +33 -0
  60. package/lib/typescript/components/biometrics/LivenessDetection.d.ts.map +1 -0
  61. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts +18 -0
  62. package/lib/typescript/components/biometrics/OmnipayLivenessCameraView.d.ts.map +1 -0
  63. package/lib/typescript/components/biometrics/PermissionManager.d.ts +58 -0
  64. package/lib/typescript/components/biometrics/PermissionManager.d.ts.map +1 -0
  65. package/lib/typescript/components/biometrics/index.d.ts +5 -0
  66. package/lib/typescript/components/biometrics/index.d.ts.map +1 -0
  67. package/lib/typescript/components/biometrics/types.d.ts +73 -0
  68. package/lib/typescript/components/biometrics/types.d.ts.map +1 -0
  69. package/lib/typescript/{src/components → components}/views/BvnVerification.d.ts +2 -1
  70. package/lib/typescript/components/views/BvnVerification.d.ts.map +1 -0
  71. package/lib/typescript/{src/components → components}/views/PaylaterAgreement.d.ts +2 -1
  72. package/lib/typescript/components/views/PaylaterAgreement.d.ts.map +1 -0
  73. package/lib/typescript/{src/components → components}/views/Registration.d.ts +2 -1
  74. package/lib/typescript/components/views/Registration.d.ts.map +1 -0
  75. package/lib/typescript/functions.d.ts.map +1 -0
  76. package/lib/typescript/hooks/useOmnipay.d.ts +28 -0
  77. package/lib/typescript/hooks/useOmnipay.d.ts.map +1 -0
  78. package/lib/typescript/index.d.ts +7 -0
  79. package/lib/typescript/index.d.ts.map +1 -0
  80. package/lib/typescript/lib/colors.d.ts.map +1 -0
  81. package/lib/typescript/lib/config.d.ts.map +1 -0
  82. package/omnipay-reactnative-sdk.podspec +32 -29
  83. package/package.json +15 -10
  84. package/src/components/OmnipayProvider.tsx +3 -106
  85. package/src/components/OmnipayView.tsx +1 -1
  86. package/src/components/biometrics/FaceVerification.tsx +291 -368
  87. package/src/components/biometrics/LivenessDetection.ts +113 -250
  88. package/src/components/biometrics/OmnipayLivenessCameraView.tsx +19 -0
  89. package/src/components/biometrics/PermissionManager.ts +317 -0
  90. package/src/components/biometrics/index.ts +11 -0
  91. package/src/components/biometrics/types.ts +86 -0
  92. package/src/components/views/BvnVerification.tsx +1 -1
  93. package/src/components/views/PaylaterAgreement.tsx +1 -1
  94. package/src/components/views/Registration.tsx +1 -1
  95. package/src/index.tsx +4 -15
  96. package/android/src/main/java/com/omniretail/omnipay/LivenessCameraViewManager.java +0 -116
  97. package/android/src/main/java/com/omniretail/omnipay/LivenessDetectionModule.java +0 -588
  98. package/ios/LivenessCameraView.h +0 -22
  99. package/ios/LivenessCameraView.m +0 -135
  100. package/ios/LivenessCameraViewManager.h +0 -12
  101. package/ios/LivenessCameraViewManager.m +0 -24
  102. package/ios/LivenessDetectionModule.h +0 -46
  103. package/ios/LivenessDetectionModule.m +0 -603
  104. package/lib/commonjs/components/biometrics/LivenessCameraView.js +0 -45
  105. package/lib/commonjs/components/biometrics/LivenessCameraView.js.map +0 -1
  106. package/lib/module/components/biometrics/LivenessCameraView.js +0 -39
  107. package/lib/module/components/biometrics/LivenessCameraView.js.map +0 -1
  108. package/lib/typescript/demo/src/App.d.ts +0 -3
  109. package/lib/typescript/demo/src/App.d.ts.map +0 -1
  110. package/lib/typescript/demo/src/Body.d.ts +0 -3
  111. package/lib/typescript/demo/src/Body.d.ts.map +0 -1
  112. package/lib/typescript/demo/src/NotificationsExample.d.ts +0 -4
  113. package/lib/typescript/demo/src/NotificationsExample.d.ts.map +0 -1
  114. package/lib/typescript/src/components/OmnipayProvider.d.ts.map +0 -1
  115. package/lib/typescript/src/components/OmnipayView.d.ts.map +0 -1
  116. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts +0 -12
  117. package/lib/typescript/src/components/biometrics/FaceVerification.d.ts.map +0 -1
  118. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts +0 -22
  119. package/lib/typescript/src/components/biometrics/LivenessCameraView.d.ts.map +0 -1
  120. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts +0 -73
  121. package/lib/typescript/src/components/biometrics/LivenessDetection.d.ts.map +0 -1
  122. package/lib/typescript/src/components/views/BvnVerification.d.ts.map +0 -1
  123. package/lib/typescript/src/components/views/PaylaterAgreement.d.ts.map +0 -1
  124. package/lib/typescript/src/components/views/Registration.d.ts.map +0 -1
  125. package/lib/typescript/src/functions.d.ts.map +0 -1
  126. package/lib/typescript/src/hooks/useOmnipay.d.ts +0 -28
  127. package/lib/typescript/src/hooks/useOmnipay.d.ts.map +0 -1
  128. package/lib/typescript/src/index.d.ts +0 -8
  129. package/lib/typescript/src/index.d.ts.map +0 -1
  130. package/lib/typescript/src/lib/colors.d.ts.map +0 -1
  131. package/lib/typescript/src/lib/config.d.ts.map +0 -1
  132. package/src/components/biometrics/LivenessCameraView.tsx +0 -61
  133. /package/lib/typescript/{src/functions.d.ts → functions.d.ts} +0 -0
  134. /package/lib/typescript/{src/lib → lib}/colors.d.ts +0 -0
  135. /package/lib/typescript/{src/lib → lib}/config.d.ts +0 -0
@@ -1,603 +0,0 @@
1
- //
2
- // LivenessDetectionModule.m
3
- // omnipay-reactnative-sdk
4
- //
5
- // Created by React Native Auto-generated
6
- //
7
-
8
- #import "LivenessDetectionModule.h"
9
- #import <React/RCTLog.h>
10
- #import <React/RCTUtils.h>
11
-
12
- // Constants
13
- static NSString * const CHALLENGE_SMILE = @"smile";
14
- static NSString * const CHALLENGE_BLINK = @"blink";
15
- static NSString * const CHALLENGE_TURN_LEFT = @"turn_left";
16
- static NSString * const CHALLENGE_TURN_RIGHT = @"turn_right";
17
-
18
- // Events
19
- static NSString * const EVENT_CHALLENGE_START = @"onChallengeStart";
20
- static NSString * const EVENT_CHALLENGE_SUCCESS = @"onChallengeSuccess";
21
- static NSString * const EVENT_CHALLENGE_FAILURE = @"onChallengeFailure";
22
- static NSString * const EVENT_ALL_CHALLENGES_COMPLETE = @"onAllChallengesComplete";
23
- static NSString * const EVENT_SCREENSHOT_CAPTURED = @"onScreenshotCaptured";
24
-
25
- // Detection thresholds
26
- static const CGFloat SMILE_THRESHOLD = 0.8;
27
- static const CGFloat EYE_OPEN_THRESHOLD = 0.6;
28
- static const CGFloat HEAD_YAW_THRESHOLD = 15.0; // degrees
29
- static const NSTimeInterval DEFAULT_CHALLENGE_TIMEOUT = 10.0; // seconds
30
-
31
- @implementation LivenessDetectionModule
32
-
33
- #pragma mark - RCTBridgeModule
34
-
35
- RCT_EXPORT_MODULE();
36
-
37
- + (BOOL)requiresMainQueueSetup {
38
- return YES;
39
- }
40
-
41
- - (NSArray<NSString *> *)supportedEvents {
42
- return @[EVENT_CHALLENGE_START,
43
- EVENT_CHALLENGE_SUCCESS,
44
- EVENT_CHALLENGE_FAILURE,
45
- EVENT_ALL_CHALLENGES_COMPLETE,
46
- EVENT_SCREENSHOT_CAPTURED];
47
- }
48
-
49
- - (NSDictionary *)constantsToExport {
50
- return @{
51
- @"CHALLENGE_SMILE": CHALLENGE_SMILE,
52
- @"CHALLENGE_BLINK": CHALLENGE_BLINK,
53
- @"CHALLENGE_TURN_LEFT": CHALLENGE_TURN_LEFT,
54
- @"CHALLENGE_TURN_RIGHT": CHALLENGE_TURN_RIGHT
55
- };
56
- }
57
-
58
- #pragma mark - Initialization
59
-
60
- - (instancetype)init {
61
- self = [super init];
62
- if (self) {
63
- _smileThreshold = SMILE_THRESHOLD;
64
- _eyeOpenThreshold = EYE_OPEN_THRESHOLD;
65
- _headYawThreshold = HEAD_YAW_THRESHOLD;
66
- _challengeTimeout = DEFAULT_CHALLENGE_TIMEOUT;
67
- _isDetectionActive = NO;
68
- _pendingChallenges = [[NSMutableArray alloc] init];
69
-
70
- // Initialize challenge state
71
- [self resetChallengeState];
72
-
73
- // Create video data output queue
74
- _videoDataOutputQueue = dispatch_queue_create("com.omnipay.livenessdetection.videoqueue", DISPATCH_QUEUE_SERIAL);
75
- }
76
- return self;
77
- }
78
-
79
- - (void)dealloc {
80
- [self stopLivenessDetection];
81
- }
82
-
83
- #pragma mark - React Native Methods
84
-
85
- RCT_EXPORT_METHOD(startLivenessDetection:(NSArray<NSString *> *)challenges
86
- resolver:(RCTPromiseResolveBlock)resolve
87
- rejecter:(RCTPromiseRejectBlock)reject) {
88
-
89
- if (self.isDetectionActive) {
90
- reject(@"ALREADY_ACTIVE", @"Liveness detection is already active", nil);
91
- return;
92
- }
93
-
94
- // Check camera permission first
95
- AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
96
- if (authStatus != AVAuthorizationStatusAuthorized) {
97
- reject(@"CAMERA_PERMISSION_DENIED", @"Camera permission is required for liveness detection", nil);
98
- return;
99
- }
100
-
101
- // Validate challenges
102
- if (!challenges || challenges.count == 0) {
103
- reject(@"INVALID_CHALLENGES", @"No valid challenges provided", nil);
104
- return;
105
- }
106
-
107
- NSArray *validChallenges = @[CHALLENGE_SMILE, CHALLENGE_BLINK, CHALLENGE_TURN_LEFT, CHALLENGE_TURN_RIGHT];
108
- for (NSString *challenge in challenges) {
109
- if (![validChallenges containsObject:challenge]) {
110
- reject(@"INVALID_CHALLENGE", [NSString stringWithFormat:@"Invalid challenge: %@", challenge], nil);
111
- return;
112
- }
113
- }
114
-
115
- dispatch_async(dispatch_get_main_queue(), ^{
116
- @try {
117
- [self.pendingChallenges removeAllObjects];
118
- [self.pendingChallenges addObjectsFromArray:challenges];
119
-
120
- self.isDetectionActive = YES;
121
- [self startCamera];
122
- [self startNextChallenge];
123
-
124
- resolve(@"Detection started");
125
- } @catch (NSException *exception) {
126
- reject(@"START_ERROR", [NSString stringWithFormat:@"Failed to start liveness detection: %@", exception.reason], nil);
127
- }
128
- });
129
- }
130
-
131
- RCT_EXPORT_METHOD(stopLivenessDetection) {
132
- dispatch_async(dispatch_get_main_queue(), ^{
133
- [self stopDetection];
134
- });
135
- }
136
-
137
- RCT_EXPORT_METHOD(getCameraView:(RCTPromiseResolveBlock)resolve
138
- rejecter:(RCTPromiseRejectBlock)reject) {
139
- dispatch_async(dispatch_get_main_queue(), ^{
140
- @try {
141
- if (!self.previewLayer) {
142
- [self setupCamera];
143
- }
144
- resolve(@"Camera view created");
145
- } @catch (NSException *exception) {
146
- reject(@"CAMERA_VIEW_ERROR", [NSString stringWithFormat:@"Failed to create camera view: %@", exception.reason], nil);
147
- }
148
- });
149
- }
150
-
151
- RCT_EXPORT_METHOD(checkCameraPermission:(RCTPromiseResolveBlock)resolve
152
- rejecter:(RCTPromiseRejectBlock)reject) {
153
- AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
154
- BOOL hasPermission = (authStatus == AVAuthorizationStatusAuthorized);
155
- resolve(@(hasPermission));
156
- }
157
-
158
- RCT_EXPORT_METHOD(requestCameraPermission:(RCTPromiseResolveBlock)resolve
159
- rejecter:(RCTPromiseRejectBlock)reject) {
160
- AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
161
-
162
- if (authStatus == AVAuthorizationStatusAuthorized) {
163
- resolve(@YES);
164
- return;
165
- }
166
-
167
- [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
168
- dispatch_async(dispatch_get_main_queue(), ^{
169
- resolve(@(granted));
170
- });
171
- }];
172
- }
173
-
174
- #pragma mark - Public Methods
175
-
176
- - (AVCaptureVideoPreviewLayer *)getPreviewLayer {
177
- if (!self.previewLayer) {
178
- [self setupCamera];
179
- }
180
- return self.previewLayer;
181
- }
182
-
183
- - (void)startCamera {
184
- if (!self.captureSession) {
185
- [self setupCamera];
186
- }
187
-
188
- if (!self.captureSession.isRunning) {
189
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
190
- [self.captureSession startRunning];
191
- });
192
- }
193
- }
194
-
195
- - (void)stopCamera {
196
- if (self.captureSession && self.captureSession.isRunning) {
197
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
198
- [self.captureSession stopRunning];
199
- });
200
- }
201
- }
202
-
203
- #pragma mark - Camera Setup
204
-
205
- - (void)setupCamera {
206
- self.captureSession = [[AVCaptureSession alloc] init];
207
- self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
208
-
209
- // Get front camera
210
- self.captureDevice = [self getFrontCamera];
211
- if (!self.captureDevice) {
212
- RCTLogError(@"No front camera available");
213
- return;
214
- }
215
-
216
- NSError *error = nil;
217
-
218
- // Create input
219
- AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
220
- if (error || !input) {
221
- RCTLogError(@"Failed to create camera input: %@", error.localizedDescription);
222
- return;
223
- }
224
-
225
- if ([self.captureSession canAddInput:input]) {
226
- [self.captureSession addInput:input];
227
- }
228
-
229
- // Create video data output for face detection
230
- self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
231
- self.videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
232
- [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
233
-
234
- if ([self.captureSession canAddOutput:self.videoDataOutput]) {
235
- [self.captureSession addOutput:self.videoDataOutput];
236
- }
237
-
238
- // Create photo output for final screenshot
239
- self.photoOutput = [[AVCapturePhotoOutput alloc] init];
240
- if ([self.captureSession canAddOutput:self.photoOutput]) {
241
- [self.captureSession addOutput:self.photoOutput];
242
- }
243
-
244
- // Create preview layer
245
- self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
246
- self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
247
- }
248
-
249
- - (AVCaptureDevice *)getFrontCamera {
250
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
251
- for (AVCaptureDevice *device in devices) {
252
- if (device.position == AVCaptureDevicePositionFront) {
253
- return device;
254
- }
255
- }
256
- return nil;
257
- }
258
-
259
- #pragma mark - Challenge Logic
260
-
261
- - (void)startNextChallenge {
262
- if (self.pendingChallenges.count == 0) {
263
- // All challenges completed - take final screenshot
264
- [self captureScreenshot];
265
- return;
266
- }
267
-
268
- self.currentChallenge = [self.pendingChallenges firstObject];
269
- [self.pendingChallenges removeObjectAtIndex:0];
270
- self.challengeStartTime = [[NSDate date] timeIntervalSince1970];
271
-
272
- // Reset challenge-specific state
273
- [self resetChallengeState];
274
-
275
- [self emitChallengeStart];
276
- }
277
-
278
- - (void)resetChallengeState {
279
- self.lastSmileState = NO;
280
- self.lastLeftEyeOpen = YES;
281
- self.lastRightEyeOpen = YES;
282
- self.lastHeadYaw = 0.0;
283
- }
284
-
285
- - (void)stopDetection {
286
- self.isDetectionActive = NO;
287
- self.currentChallenge = nil;
288
- [self.pendingChallenges removeAllObjects];
289
- [self stopCamera];
290
- }
291
-
292
- #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
293
-
294
- - (void)captureOutput:(AVCaptureOutput *)output
295
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
296
- fromConnection:(AVCaptureConnection *)connection {
297
-
298
- if (!self.isDetectionActive || !self.currentChallenge) {
299
- return;
300
- }
301
-
302
- // Check challenge timeout
303
- NSTimeInterval currentTime = [[NSDate date] timeIntervalSince1970];
304
- if (currentTime - self.challengeStartTime > self.challengeTimeout) {
305
- dispatch_async(dispatch_get_main_queue(), ^{
306
- [self emitChallengeFailure:@"Challenge timeout"];
307
- [self startNextChallenge];
308
- });
309
- return;
310
- }
311
-
312
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
313
- if (!imageBuffer) return;
314
-
315
- // Create Vision request
316
- VNDetectFaceLandmarksRequest *request = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest *request, NSError *error) {
317
- if (error) {
318
- RCTLogError(@"Face detection error: %@", error.localizedDescription);
319
- return;
320
- }
321
-
322
- [self processFaceDetectionResults:request.results];
323
- }];
324
-
325
- // Configure request for better performance
326
- request.revision = VNDetectFaceLandmarksRequestRevision3;
327
-
328
- // Create image request handler
329
- VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:imageBuffer options:@{}];
330
-
331
- NSError *error = nil;
332
- [handler performRequests:@[request] error:&error];
333
-
334
- if (error) {
335
- RCTLogError(@"Vision request error: %@", error.localizedDescription);
336
- }
337
- }
338
-
339
- #pragma mark - Face Detection Processing
340
-
341
- - (void)processFaceDetectionResults:(NSArray<VNFaceObservation *> *)faceObservations {
342
- if (faceObservations.count == 0) {
343
- // No face detected
344
- return;
345
- }
346
-
347
- VNFaceObservation *face = faceObservations.firstObject;
348
-
349
- // Validate current challenge
350
- BOOL challengeSuccess = NO;
351
-
352
- if ([self.currentChallenge isEqualToString:CHALLENGE_SMILE]) {
353
- challengeSuccess = [self validateSmileChallenge:face];
354
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_BLINK]) {
355
- challengeSuccess = [self validateBlinkChallenge:face];
356
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_LEFT]) {
357
- challengeSuccess = [self validateTurnLeftChallenge:face];
358
- } else if ([self.currentChallenge isEqualToString:CHALLENGE_TURN_RIGHT]) {
359
- challengeSuccess = [self validateTurnRightChallenge:face];
360
- }
361
-
362
- if (challengeSuccess) {
363
- dispatch_async(dispatch_get_main_queue(), ^{
364
- [self emitChallengeSuccess];
365
- [self startNextChallenge];
366
- });
367
- }
368
- }
369
-
370
- - (BOOL)validateSmileChallenge:(VNFaceObservation *)face {
371
- // Vision Framework doesn't directly provide smile detection
372
- // We'll use landmark analysis to detect smile
373
- if (!face.landmarks || !face.landmarks.outerLips) {
374
- return NO;
375
- }
376
-
377
- VNFaceLandmarkRegion2D *outerLips = face.landmarks.outerLips;
378
-
379
- // Simple smile detection based on lip curvature
380
- // This is a simplified implementation - you might want to use a more sophisticated algorithm
381
- CGFloat lipWidth = [self calculateLipWidth:outerLips];
382
- CGFloat lipHeight = [self calculateLipHeight:outerLips];
383
-
384
- CGFloat smileRatio = lipWidth / lipHeight;
385
- BOOL isSmiling = smileRatio > self.smileThreshold;
386
-
387
- // Detect transition from not smiling to smiling
388
- if (isSmiling && !self.lastSmileState) {
389
- self.lastSmileState = YES;
390
- return YES;
391
- }
392
-
393
- self.lastSmileState = isSmiling;
394
- return NO;
395
- }
396
-
397
- - (BOOL)validateBlinkChallenge:(VNFaceObservation *)face {
398
- // Vision Framework doesn't provide direct eye open/close detection
399
- // We'll use eye landmark analysis
400
- if (!face.landmarks || !face.landmarks.leftEye || !face.landmarks.rightEye) {
401
- return NO;
402
- }
403
-
404
- CGFloat leftEyeOpenness = [self calculateEyeOpenness:face.landmarks.leftEye];
405
- CGFloat rightEyeOpenness = [self calculateEyeOpenness:face.landmarks.rightEye];
406
-
407
- BOOL leftClosed = leftEyeOpenness < (1.0 - self.eyeOpenThreshold);
408
- BOOL rightClosed = rightEyeOpenness < (1.0 - self.eyeOpenThreshold);
409
- BOOL bothClosed = leftClosed && rightClosed;
410
-
411
- // Detect blink: transition from open to closed and back to open
412
- if (self.lastLeftEyeOpen && self.lastRightEyeOpen && bothClosed) {
413
- // Eyes just closed
414
- self.lastLeftEyeOpen = NO;
415
- self.lastRightEyeOpen = NO;
416
- } else if (!self.lastLeftEyeOpen && !self.lastRightEyeOpen && !bothClosed) {
417
- // Eyes opened after being closed - blink detected!
418
- self.lastLeftEyeOpen = YES;
419
- self.lastRightEyeOpen = YES;
420
- return YES;
421
- }
422
-
423
- return NO;
424
- }
425
-
426
- - (BOOL)validateTurnLeftChallenge:(VNFaceObservation *)face {
427
- CGFloat headYaw = [self calculateHeadYaw:face];
428
-
429
- // Positive yaw = head turned left (user's left)
430
- if (headYaw > self.headYawThreshold && fabsf(self.lastHeadYaw) < self.headYawThreshold) {
431
- self.lastHeadYaw = headYaw;
432
- return YES;
433
- }
434
-
435
- self.lastHeadYaw = headYaw;
436
- return NO;
437
- }
438
-
439
- - (BOOL)validateTurnRightChallenge:(VNFaceObservation *)face {
440
- CGFloat headYaw = [self calculateHeadYaw:face];
441
-
442
- // Negative yaw = head turned right (user's right)
443
- if (headYaw < -self.headYawThreshold && fabsf(self.lastHeadYaw) < self.headYawThreshold) {
444
- self.lastHeadYaw = headYaw;
445
- return YES;
446
- }
447
-
448
- self.lastHeadYaw = headYaw;
449
- return NO;
450
- }
451
-
452
- #pragma mark - Landmark Analysis Helpers
453
-
454
- - (CGFloat)calculateLipWidth:(VNFaceLandmarkRegion2D *)outerLips {
455
- if (outerLips.pointCount < 2) return 0.0;
456
-
457
- // Get leftmost and rightmost points
458
- CGPoint *points = (CGPoint *)outerLips.normalizedPoints;
459
- CGFloat minX = points[0].x, maxX = points[0].x;
460
-
461
- for (int i = 1; i < outerLips.pointCount; i++) {
462
- if (points[i].x < minX) minX = points[i].x;
463
- if (points[i].x > maxX) maxX = points[i].x;
464
- }
465
-
466
- return maxX - minX;
467
- }
468
-
469
- - (CGFloat)calculateLipHeight:(VNFaceLandmarkRegion2D *)outerLips {
470
- if (outerLips.pointCount < 2) return 0.0;
471
-
472
- // Get topmost and bottommost points
473
- CGPoint *points = (CGPoint *)outerLips.normalizedPoints;
474
- CGFloat minY = points[0].y, maxY = points[0].y;
475
-
476
- for (int i = 1; i < outerLips.pointCount; i++) {
477
- if (points[i].y < minY) minY = points[i].y;
478
- if (points[i].y > maxY) maxY = points[i].y;
479
- }
480
-
481
- return maxY - minY;
482
- }
483
-
484
- - (CGFloat)calculateEyeOpenness:(VNFaceLandmarkRegion2D *)eye {
485
- if (eye.pointCount < 6) return 1.0; // Assume open if can't calculate
486
-
487
- // Calculate eye aspect ratio (height/width)
488
- CGPoint *points = (CGPoint *)eye.normalizedPoints;
489
-
490
- // Find eye width (leftmost to rightmost)
491
- CGFloat minX = points[0].x, maxX = points[0].x;
492
- CGFloat minY = points[0].y, maxY = points[0].y;
493
-
494
- for (int i = 1; i < eye.pointCount; i++) {
495
- if (points[i].x < minX) minX = points[i].x;
496
- if (points[i].x > maxX) maxX = points[i].x;
497
- if (points[i].y < minY) minY = points[i].y;
498
- if (points[i].y > maxY) maxY = points[i].y;
499
- }
500
-
501
- CGFloat width = maxX - minX;
502
- CGFloat height = maxY - minY;
503
-
504
- return (width > 0) ? height / width : 1.0;
505
- }
506
-
507
- - (CGFloat)calculateHeadYaw:(VNFaceObservation *)face {
508
- // Use face observation's yaw if available
509
- if (@available(iOS 14.0, *)) {
510
- if (face.yaw) {
511
- return [face.yaw floatValue] * 180.0 / M_PI; // Convert to degrees
512
- }
513
- }
514
-
515
- // Fallback: estimate yaw from nose position relative to face center
516
- if (face.landmarks && face.landmarks.nose) {
517
- VNFaceLandmarkRegion2D *nose = face.landmarks.nose;
518
- if (nose.pointCount > 0) {
519
- CGPoint *nosePoints = (CGPoint *)nose.normalizedPoints;
520
- CGPoint noseCenter = nosePoints[0];
521
-
522
- // Calculate nose position relative to face center
523
- CGFloat faceCenterX = face.boundingBox.origin.x + face.boundingBox.size.width / 2.0;
524
- CGFloat noseOffsetX = noseCenter.x - faceCenterX;
525
-
526
- // Convert to approximate yaw angle (simplified calculation)
527
- return noseOffsetX * 90.0; // Scale factor for degrees
528
- }
529
- }
530
-
531
- return 0.0;
532
- }
533
-
534
- #pragma mark - Screenshot Capture
535
-
536
- - (void)captureScreenshot {
537
- if (!self.photoOutput) {
538
- [self emitAllChallengesComplete:nil];
539
- return;
540
- }
541
-
542
- AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
543
- settings.flashMode = AVCaptureFlashModeOff;
544
-
545
- [self.photoOutput capturePhotoWithSettings:settings delegate:self];
546
- }
547
-
548
- #pragma mark - AVCapturePhotoCaptureDelegate
549
-
550
- - (void)captureOutput:(AVCapturePhotoOutput *)output
551
- didFinishProcessingPhoto:(AVCapturePhoto *)photo
552
- error:(NSError *)error {
553
-
554
- if (error) {
555
- RCTLogError(@"Photo capture error: %@", error.localizedDescription);
556
- [self emitAllChallengesComplete:nil];
557
- return;
558
- }
559
-
560
- NSData *imageData = [photo fileDataRepresentation];
561
- if (imageData) {
562
- NSString *base64String = [imageData base64EncodedStringWithOptions:0];
563
-
564
- [self emitScreenshotCaptured:base64String];
565
- [self emitAllChallengesComplete:base64String];
566
- } else {
567
- [self emitAllChallengesComplete:nil];
568
- }
569
-
570
- // Clean up
571
- [self stopDetection];
572
- }
573
-
574
- #pragma mark - Event Emission
575
-
576
- - (void)emitChallengeStart {
577
- [self sendEventWithName:EVENT_CHALLENGE_START body:@{@"challenge": self.currentChallenge}];
578
- }
579
-
580
- - (void)emitChallengeSuccess {
581
- [self sendEventWithName:EVENT_CHALLENGE_SUCCESS body:@{@"challenge": self.currentChallenge}];
582
- }
583
-
584
- - (void)emitChallengeFailure:(NSString *)reason {
585
- [self sendEventWithName:EVENT_CHALLENGE_FAILURE body:@{
586
- @"challenge": self.currentChallenge ?: @"unknown",
587
- @"reason": reason
588
- }];
589
- }
590
-
591
- - (void)emitAllChallengesComplete:(NSString *)screenshot {
592
- NSMutableDictionary *body = [@{@"success": @YES} mutableCopy];
593
- if (screenshot) {
594
- body[@"screenshot"] = screenshot;
595
- }
596
- [self sendEventWithName:EVENT_ALL_CHALLENGES_COMPLETE body:body];
597
- }
598
-
599
- - (void)emitScreenshotCaptured:(NSString *)screenshot {
600
- [self sendEventWithName:EVENT_SCREENSHOT_CAPTURED body:@{@"screenshot": screenshot}];
601
- }
602
-
603
- @end
@@ -1,45 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.default = void 0;
7
- var _react = _interopRequireDefault(require("react"));
8
- var _reactNative = require("react-native");
9
- function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
10
- // Props for the native camera view
11
-
12
- // Native component interface
13
-
14
- // Import native component based on platform
15
- const NativeLivenessCameraView = _reactNative.Platform.select({
16
- android: (0, _reactNative.requireNativeComponent)('LivenessCameraView'),
17
- ios: (0, _reactNative.requireNativeComponent)('LivenessCameraView')
18
- });
19
-
20
- /**
21
- * LivenessCameraView - Native camera component for liveness detection
22
- *
23
- * This component provides a camera preview that integrates with the
24
- * liveness detection native module for real-time face analysis.
25
- */
26
- const LivenessCameraView = ({
27
- style,
28
- scaleType = 'fillCenter',
29
- onCameraReady,
30
- onCameraError
31
- }) => {
32
- // Platform check
33
- if (!NativeLivenessCameraView) {
34
- console.warn('LivenessCameraView is not available on this platform');
35
- return null;
36
- }
37
- return /*#__PURE__*/_react.default.createElement(NativeLivenessCameraView, {
38
- style: style,
39
- scaleType: scaleType,
40
- onCameraReady: onCameraReady,
41
- onCameraError: onCameraError
42
- });
43
- };
44
- var _default = exports.default = LivenessCameraView;
45
- //# sourceMappingURL=LivenessCameraView.js.map
@@ -1 +0,0 @@
1
- {"version":3,"names":["_react","_interopRequireDefault","require","_reactNative","e","__esModule","default","NativeLivenessCameraView","Platform","select","android","requireNativeComponent","ios","LivenessCameraView","style","scaleType","onCameraReady","onCameraError","console","warn","createElement","_default","exports"],"sourceRoot":"../../../../src","sources":["components/biometrics/LivenessCameraView.tsx"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAA2E,SAAAD,uBAAAG,CAAA,WAAAA,CAAA,IAAAA,CAAA,CAAAC,UAAA,GAAAD,CAAA,KAAAE,OAAA,EAAAF,CAAA;AAE3E;;AAcA;;AAKA;AACA,MAAMG,wBAAwB,GAAGC,qBAAQ,CAACC,MAAM,CAAC;EAC/CC,OAAO,EACL,IAAAC,mCAAsB,EAAgC,oBAAoB,CAAC;EAC7EC,GAAG,EAAE,IAAAD,mCAAsB,EACzB,oBACF;AACF,CAAC,CAAC;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA,MAAME,kBAAqD,GAAGA,CAAC;EAC7DC,KAAK;EACLC,SAAS,GAAG,YAAY;EACxBC,aAAa;EACbC;AACF,CAAC,KAAK;EACJ;EACA,IAAI,CAACV,wBAAwB,EAAE;IAC7BW,OAAO,CAACC,IAAI,CAAC,sDAAsD,CAAC;IACpE,OAAO,IAAI;EACb;EAEA,oBACEnB,MAAA,CAAAM,OAAA,CAAAc,aAAA,CAACb,wBAAwB;IACvBO,KAAK,EAAEA,KAAM;IACbC,SAAS,EAAEA,SAAU;IACrBC,aAAa,EAAEA,aAAc;IAC7BC,aAAa,EAAEA;EAAc,CAC9B,CAAC;AAEN,CAAC;AAAC,IAAAI,QAAA,GAAAC,OAAA,CAAAhB,OAAA,GAEaO,kBAAkB","ignoreList":[]}
@@ -1,39 +0,0 @@
1
- import React from 'react';
2
- import { requireNativeComponent, Platform } from 'react-native';
3
-
4
- // Props for the native camera view
5
-
6
- // Native component interface
7
-
8
- // Import native component based on platform
9
- const NativeLivenessCameraView = Platform.select({
10
- android: requireNativeComponent('LivenessCameraView'),
11
- ios: requireNativeComponent('LivenessCameraView')
12
- });
13
-
14
- /**
15
- * LivenessCameraView - Native camera component for liveness detection
16
- *
17
- * This component provides a camera preview that integrates with the
18
- * liveness detection native module for real-time face analysis.
19
- */
20
- const LivenessCameraView = ({
21
- style,
22
- scaleType = 'fillCenter',
23
- onCameraReady,
24
- onCameraError
25
- }) => {
26
- // Platform check
27
- if (!NativeLivenessCameraView) {
28
- console.warn('LivenessCameraView is not available on this platform');
29
- return null;
30
- }
31
- return /*#__PURE__*/React.createElement(NativeLivenessCameraView, {
32
- style: style,
33
- scaleType: scaleType,
34
- onCameraReady: onCameraReady,
35
- onCameraError: onCameraError
36
- });
37
- };
38
- export default LivenessCameraView;
39
- //# sourceMappingURL=LivenessCameraView.js.map
@@ -1 +0,0 @@
1
- {"version":3,"names":["React","requireNativeComponent","Platform","NativeLivenessCameraView","select","android","ios","LivenessCameraView","style","scaleType","onCameraReady","onCameraError","console","warn","createElement"],"sourceRoot":"../../../../src","sources":["components/biometrics/LivenessCameraView.tsx"],"mappings":"AAAA,OAAOA,KAAK,MAAM,OAAO;AACzB,SAASC,sBAAsB,EAAEC,QAAQ,QAAmB,cAAc;;AAE1E;;AAcA;;AAKA;AACA,MAAMC,wBAAwB,GAAGD,QAAQ,CAACE,MAAM,CAAC;EAC/CC,OAAO,EACLJ,sBAAsB,CAAgC,oBAAoB,CAAC;EAC7EK,GAAG,EAAEL,sBAAsB,CACzB,oBACF;AACF,CAAC,CAAC;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA,MAAMM,kBAAqD,GAAGA,CAAC;EAC7DC,KAAK;EACLC,SAAS,GAAG,YAAY;EACxBC,aAAa;EACbC;AACF,CAAC,KAAK;EACJ;EACA,IAAI,CAACR,wBAAwB,EAAE;IAC7BS,OAAO,CAACC,IAAI,CAAC,sDAAsD,CAAC;IACpE,OAAO,IAAI;EACb;EAEA,oBACEb,KAAA,CAAAc,aAAA,CAACX,wBAAwB;IACvBK,KAAK,EAAEA,KAAM;IACbC,SAAS,EAAEA,SAAU;IACrBC,aAAa,EAAEA,aAAc;IAC7BC,aAAa,EAAEA;EAAc,CAC9B,CAAC;AAEN,CAAC;AAED,eAAeJ,kBAAkB","ignoreList":[]}
@@ -1,3 +0,0 @@
1
- import * as React from 'react';
2
- export default function App(): React.JSX.Element;
3
- //# sourceMappingURL=App.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../../../../demo/src/App.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAK/B,MAAM,CAAC,OAAO,UAAU,GAAG,sBAoB1B"}