react-native-biometric-verifier 0.0.38 → 0.0.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-biometric-verifier",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.40",
|
|
4
4
|
"description": "A React Native module for biometric verification with face recognition and QR code scanning",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
"qr-code"
|
|
15
15
|
],
|
|
16
16
|
"author": "PRAFULDAS M M",
|
|
17
|
-
"license": "JESCON TECHNOLOGIES PVT LTD",
|
|
17
|
+
"license": "JESCON TECHNOLOGIES PVT LTD THRISSUR, KERALA, INDIA",
|
|
18
18
|
"peerDependencies": {
|
|
19
19
|
"react": "^16.8.0 || ^17.0.0 || ^18.0.0",
|
|
20
20
|
"react-native": ">=0.60.0",
|
|
@@ -24,7 +24,7 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
24
24
|
showCodeScanner = false,
|
|
25
25
|
isLoading = false,
|
|
26
26
|
frameProcessorFps = 1,
|
|
27
|
-
livenessLevel = 0,
|
|
27
|
+
livenessLevel = 0, // 0 = anti-spoof only, 1 = anti-spoof + blinking
|
|
28
28
|
}) => {
|
|
29
29
|
const cameraRef = useRef(null);
|
|
30
30
|
const [cameraDevice, setCameraDevice] = useState(null);
|
|
@@ -42,7 +42,6 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
42
42
|
const [antiSpoofConfidence, setAntiSpoofConfidence] = useState(0);
|
|
43
43
|
const [isFaceCentered, setIsFaceCentered] = useState(false);
|
|
44
44
|
const [hasSingleFace, setHasSingleFace] = useState(false);
|
|
45
|
-
const [recognitionImageBase64, setRecognitionImageBase64] = useState(null);
|
|
46
45
|
|
|
47
46
|
const captured = useRef(false);
|
|
48
47
|
const isMounted = useRef(true);
|
|
@@ -61,7 +60,6 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
61
60
|
setAntiSpoofConfidence(0);
|
|
62
61
|
setIsFaceCentered(false);
|
|
63
62
|
setHasSingleFace(false);
|
|
64
|
-
setRecognitionImageBase64(null);
|
|
65
63
|
}, []);
|
|
66
64
|
|
|
67
65
|
const codeScanner = useCodeScanner({
|
|
@@ -78,13 +76,38 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
78
76
|
});
|
|
79
77
|
|
|
80
78
|
const onStableFaceDetected = useCallback(
|
|
81
|
-
async (faceRect
|
|
79
|
+
async (faceRect) => {
|
|
82
80
|
if (!isMounted.current) return;
|
|
83
81
|
if (captured.current) return;
|
|
82
|
+
|
|
84
83
|
captured.current = true;
|
|
85
84
|
setFaces([faceRect]);
|
|
85
|
+
|
|
86
86
|
try {
|
|
87
|
-
|
|
87
|
+
if (!cameraRef.current) {
|
|
88
|
+
throw new Error('Camera ref not available');
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const photo = await cameraRef.current.takePhoto({
|
|
92
|
+
flash: 'off',
|
|
93
|
+
qualityPrioritization: 'quality',
|
|
94
|
+
enableShutterSound: false,
|
|
95
|
+
skipMetadata: true,
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
if (!photo || !photo.path) {
|
|
99
|
+
throw new Error('Failed to capture photo - no path returned');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const photopath = `file://${photo.path}`;
|
|
103
|
+
const fileName = photopath.substr(photopath.lastIndexOf('/') + 1);
|
|
104
|
+
const photoData = {
|
|
105
|
+
uri: photopath,
|
|
106
|
+
filename: fileName,
|
|
107
|
+
filetype: 'image/jpeg',
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
onCapture(photoData, faceRect);
|
|
88
111
|
} catch (e) {
|
|
89
112
|
console.error('Capture error:', e);
|
|
90
113
|
captured.current = false;
|
|
@@ -107,7 +130,6 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
107
130
|
setAntiSpoofConfidence(antiSpoofState.confidence || 0);
|
|
108
131
|
setIsFaceCentered(antiSpoofState.isFaceCentered || false);
|
|
109
132
|
setHasSingleFace(antiSpoofState.hasSingleFace || false);
|
|
110
|
-
setRecognitionImageBase64(antiSpoofState.recognitionImageBase64 || null);
|
|
111
133
|
}
|
|
112
134
|
|
|
113
135
|
if (count === 1) {
|
|
@@ -141,11 +163,6 @@ const CaptureImageWithoutEdit = React.memo(
|
|
|
141
163
|
const onAntiSpoofUpdate = useCallback((result) => {
|
|
142
164
|
if (!isMounted.current) return;
|
|
143
165
|
try {
|
|
144
|
-
// Update recognition image when available
|
|
145
|
-
if (result?.recognitionImageBase64) {
|
|
146
|
-
setRecognitionImageBase64(result.recognitionImageBase64);
|
|
147
|
-
}
|
|
148
|
-
|
|
149
166
|
// Animate live indicator when face becomes live
|
|
150
167
|
if (result?.isLive && !isFaceLive) {
|
|
151
168
|
Animated.spring(liveIndicatorAnim, {
|
|
@@ -109,7 +109,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
109
109
|
lastResult: null,
|
|
110
110
|
isLive: false,
|
|
111
111
|
confidence: 0,
|
|
112
|
-
recognitionImageBase64: null, // Added to store base64 image
|
|
113
112
|
},
|
|
114
113
|
|
|
115
114
|
// Face centering
|
|
@@ -148,7 +147,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
148
147
|
state.antiSpoof.lastResult = null;
|
|
149
148
|
state.antiSpoof.isLive = false;
|
|
150
149
|
state.antiSpoof.confidence = 0;
|
|
151
|
-
state.antiSpoof.recognitionImageBase64 = null; // Reset base64 image
|
|
152
150
|
state.flags.hasSingleFace = false;
|
|
153
151
|
state.centering.centeredFrames = 0;
|
|
154
152
|
state.flags.isFaceCentered = false;
|
|
@@ -302,7 +300,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
302
300
|
consecutiveLiveFrames: 0,
|
|
303
301
|
isFaceCentered: false,
|
|
304
302
|
hasSingleFace: false,
|
|
305
|
-
recognitionImageBase64: null,
|
|
306
303
|
});
|
|
307
304
|
return;
|
|
308
305
|
}
|
|
@@ -317,7 +314,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
317
314
|
consecutiveLiveFrames: 0,
|
|
318
315
|
isFaceCentered: false,
|
|
319
316
|
hasSingleFace: false,
|
|
320
|
-
recognitionImageBase64: null,
|
|
321
317
|
});
|
|
322
318
|
return;
|
|
323
319
|
}
|
|
@@ -364,28 +360,19 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
364
360
|
if (state.flags.isFaceCentered) {
|
|
365
361
|
try {
|
|
366
362
|
antiSpoofResult = faceAntiSpoofFrameProcessor?.(frame);
|
|
367
|
-
|
|
368
363
|
if (antiSpoofResult != null) {
|
|
369
364
|
state.antiSpoof.lastResult = antiSpoofResult;
|
|
370
365
|
|
|
371
366
|
const isLive = antiSpoofResult.isLive === true;
|
|
372
367
|
const confidence = antiSpoofResult.combinedScore || antiSpoofResult.neuralNetworkScore || 0;
|
|
373
|
-
const recognitionImageBase64 = antiSpoofResult.recognitionImageBase64 || null;
|
|
374
368
|
|
|
375
|
-
// Store the base64 image when face is live
|
|
376
369
|
if (isLive && confidence > ANTI_SPOOF_CONFIDENCE_THRESHOLD) {
|
|
377
370
|
state.antiSpoof.consecutiveLiveFrames = Math.min(
|
|
378
371
|
REQUIRED_CONSECUTIVE_LIVE_FRAMES,
|
|
379
372
|
state.antiSpoof.consecutiveLiveFrames + 1
|
|
380
373
|
);
|
|
381
|
-
// Store the base64 image when we have a live face
|
|
382
|
-
if (recognitionImageBase64) {
|
|
383
|
-
state.antiSpoof.recognitionImageBase64 = recognitionImageBase64;
|
|
384
|
-
}
|
|
385
374
|
} else {
|
|
386
375
|
state.antiSpoof.consecutiveLiveFrames = Math.max(0, state.antiSpoof.consecutiveLiveFrames - 1);
|
|
387
|
-
// Clear base64 image if face is not live
|
|
388
|
-
state.antiSpoof.recognitionImageBase64 = null;
|
|
389
376
|
}
|
|
390
377
|
state.antiSpoof.isLive = state.antiSpoof.consecutiveLiveFrames >= REQUIRED_CONSECUTIVE_LIVE_FRAMES;
|
|
391
378
|
state.antiSpoof.confidence = confidence;
|
|
@@ -398,19 +385,16 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
398
385
|
rawResult: antiSpoofResult,
|
|
399
386
|
consecutiveLiveFrames: state.antiSpoof.consecutiveLiveFrames,
|
|
400
387
|
isFaceCentered: state.flags.isFaceCentered,
|
|
401
|
-
recognitionImageBase64: state.antiSpoof.recognitionImageBase64,
|
|
402
388
|
});
|
|
403
389
|
}
|
|
404
390
|
}
|
|
405
391
|
} catch (antiSpoofError) {
|
|
406
392
|
// Silent error handling
|
|
407
|
-
console.log('Anti-spoof error:', antiSpoofError);
|
|
408
393
|
}
|
|
409
394
|
} else {
|
|
410
395
|
// Reset anti-spoof if face not centered
|
|
411
396
|
state.antiSpoof.consecutiveLiveFrames = 0;
|
|
412
397
|
state.antiSpoof.isLive = false;
|
|
413
|
-
state.antiSpoof.recognitionImageBase64 = null;
|
|
414
398
|
}
|
|
415
399
|
|
|
416
400
|
// Liveness logic - optimized
|
|
@@ -477,7 +461,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
477
461
|
consecutiveLiveFrames: state.antiSpoof.consecutiveLiveFrames,
|
|
478
462
|
isFaceCentered: state.flags.isFaceCentered,
|
|
479
463
|
hasSingleFace: true,
|
|
480
|
-
recognitionImageBase64: state.antiSpoof.recognitionImageBase64,
|
|
481
464
|
});
|
|
482
465
|
}
|
|
483
466
|
|
|
@@ -496,13 +479,9 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
496
479
|
|
|
497
480
|
if (shouldCapture) {
|
|
498
481
|
state.flags.captured = true;
|
|
499
|
-
// Include the base64 image in the stable face detection callback
|
|
500
482
|
runOnStable(
|
|
501
483
|
{ x, y, width, height },
|
|
502
|
-
|
|
503
|
-
...state.antiSpoof.lastResult,
|
|
504
|
-
recognitionImageBase64: state.antiSpoof.recognitionImageBase64
|
|
505
|
-
}
|
|
484
|
+
state.antiSpoof.lastResult
|
|
506
485
|
);
|
|
507
486
|
}
|
|
508
487
|
} else {
|
|
@@ -513,7 +492,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
513
492
|
state.flags.hasSingleFace = false;
|
|
514
493
|
state.centering.centeredFrames = 0;
|
|
515
494
|
state.flags.isFaceCentered = false;
|
|
516
|
-
state.antiSpoof.recognitionImageBase64 = null;
|
|
517
495
|
|
|
518
496
|
runOnFaces(detected.length, 0, state.liveness.step, false, {
|
|
519
497
|
isLive: false,
|
|
@@ -521,12 +499,10 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
521
499
|
consecutiveLiveFrames: 0,
|
|
522
500
|
isFaceCentered: false,
|
|
523
501
|
hasSingleFace: false,
|
|
524
|
-
recognitionImageBase64: null,
|
|
525
502
|
});
|
|
526
503
|
}
|
|
527
504
|
} catch (err) {
|
|
528
505
|
// Error boundary - ensure frame is released
|
|
529
|
-
console.log('Frame processor error:', err);
|
|
530
506
|
} finally {
|
|
531
507
|
frame.release?.();
|
|
532
508
|
}
|
|
@@ -551,7 +527,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
551
527
|
state.antiSpoof.lastResult = null;
|
|
552
528
|
state.antiSpoof.isLive = false;
|
|
553
529
|
state.antiSpoof.confidence = 0;
|
|
554
|
-
state.antiSpoof.recognitionImageBase64 = null; // Reset base64 image
|
|
555
530
|
state.flags.hasSingleFace = false;
|
|
556
531
|
state.centering.centeredFrames = 0;
|
|
557
532
|
state.flags.isFaceCentered = false;
|
|
@@ -586,7 +561,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
586
561
|
lastResult: null,
|
|
587
562
|
isLive: false,
|
|
588
563
|
confidence: 0,
|
|
589
|
-
recognitionImageBase64: null, // Reset base64 image
|
|
590
564
|
},
|
|
591
565
|
centering: {
|
|
592
566
|
centeredFrames: 0,
|
|
@@ -638,7 +612,6 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
638
612
|
lastResult: sharedState.value.antiSpoof.lastResult,
|
|
639
613
|
hasSingleFace: sharedState.value.flags.hasSingleFace,
|
|
640
614
|
isFaceCentered: sharedState.value.flags.isFaceCentered,
|
|
641
|
-
recognitionImageBase64: sharedState.value.antiSpoof.recognitionImageBase64,
|
|
642
615
|
},
|
|
643
616
|
};
|
|
644
617
|
};
|
package/src/index.js
CHANGED
|
@@ -216,7 +216,7 @@ const BiometricModal = React.memo(
|
|
|
216
216
|
|
|
217
217
|
// Face scan upload
|
|
218
218
|
const uploadFaceScan = useCallback(
|
|
219
|
-
async (
|
|
219
|
+
async (selfie) => {
|
|
220
220
|
if (!validateApiUrl()) return;
|
|
221
221
|
const currentData = dataRef.current;
|
|
222
222
|
|
|
@@ -231,66 +231,81 @@ const BiometricModal = React.memo(
|
|
|
231
231
|
animationState: Global.AnimationStates.processing,
|
|
232
232
|
});
|
|
233
233
|
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
return;
|
|
237
|
-
}
|
|
234
|
+
InteractionManager.runAfterInteractions(async () => {
|
|
235
|
+
let base64;
|
|
238
236
|
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
237
|
+
try {
|
|
238
|
+
updateState({
|
|
239
|
+
loadingType: Global.LoadingTypes.imageProcessing,
|
|
240
|
+
});
|
|
243
241
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
242
|
+
base64 = await convertImageToBase64(selfie?.uri);
|
|
243
|
+
} catch (err) {
|
|
244
|
+
console.error("Image conversion failed:", err);
|
|
245
|
+
handleProcessError("Image conversion failed.", err);
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
if (!base64) {
|
|
250
|
+
handleProcessError("Failed to process image.");
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
try {
|
|
255
|
+
const body = { image: base64 };
|
|
256
|
+
const header = { faceid: currentData };
|
|
257
|
+
const buttonapi = `${apiurl}python/recognize`;
|
|
258
258
|
|
|
259
259
|
updateState({
|
|
260
|
-
|
|
261
|
-
animationState: Global.AnimationStates.success,
|
|
262
|
-
isLoading: false,
|
|
263
|
-
loadingType: Global.LoadingTypes.none,
|
|
260
|
+
loadingType: Global.LoadingTypes.networkRequest,
|
|
264
261
|
});
|
|
265
262
|
|
|
266
|
-
|
|
263
|
+
const response = await networkServiceCall(
|
|
264
|
+
"POST",
|
|
265
|
+
buttonapi,
|
|
266
|
+
header,
|
|
267
|
+
body
|
|
268
|
+
);
|
|
267
269
|
|
|
268
|
-
if (
|
|
269
|
-
|
|
270
|
-
} else {
|
|
271
|
-
safeCallback(responseRef.current);
|
|
270
|
+
if (response?.httpstatus === 200) {
|
|
271
|
+
responseRef.current = response;
|
|
272
272
|
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
273
|
+
updateState({
|
|
274
|
+
employeeData: response.data?.data || null,
|
|
275
|
+
animationState: Global.AnimationStates.success,
|
|
276
|
+
isLoading: false,
|
|
277
|
+
loadingType: Global.LoadingTypes.none,
|
|
278
|
+
});
|
|
276
279
|
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
+
notifyMessage("Identity verified successfully!", "success");
|
|
281
|
+
|
|
282
|
+
if (qrscan) {
|
|
283
|
+
setTimeout(() => startQRCodeScan(), 1200);
|
|
284
|
+
} else {
|
|
285
|
+
safeCallback(responseRef.current);
|
|
286
|
+
|
|
287
|
+
if (resetTimeoutRef.current) {
|
|
288
|
+
clearTimeout(resetTimeoutRef.current);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
resetTimeoutRef.current = setTimeout(() => {
|
|
292
|
+
resetState();
|
|
293
|
+
}, 1200);
|
|
294
|
+
}
|
|
295
|
+
} else {
|
|
296
|
+
handleProcessError(
|
|
297
|
+
response?.data?.message ||
|
|
298
|
+
"Face not recognized. Please try again."
|
|
299
|
+
);
|
|
280
300
|
}
|
|
281
|
-
}
|
|
301
|
+
} catch (error) {
|
|
302
|
+
console.error("Network request failed:", error);
|
|
282
303
|
handleProcessError(
|
|
283
|
-
|
|
284
|
-
|
|
304
|
+
"Connection error. Please check your network.",
|
|
305
|
+
error
|
|
285
306
|
);
|
|
286
307
|
}
|
|
287
|
-
}
|
|
288
|
-
console.error("Network request failed:", error);
|
|
289
|
-
handleProcessError(
|
|
290
|
-
"Connection error. Please check your network.",
|
|
291
|
-
error
|
|
292
|
-
);
|
|
293
|
-
}
|
|
308
|
+
});
|
|
294
309
|
},
|
|
295
310
|
[
|
|
296
311
|
convertImageToBase64,
|