react-native-biometric-verifier 0.0.41 → 0.0.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -3
- package/src/components/Loader.js +96 -84
- package/src/components/StepIndicator.js +34 -32
- package/src/hooks/useCountdown.js +34 -34
- package/src/hooks/useFaceDetectionFrameProcessor.js +20 -42
- package/src/index.js +157 -152
- package/src/utils/Global.js +22 -2
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-biometric-verifier",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.43",
|
|
4
4
|
"description": "A React Native module for biometric verification with face recognition and QR code scanning",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"private": false,
|
|
7
|
-
"license": "
|
|
7
|
+
"license": "JESCON TECHNOLOGIES PVT LMT ,THRISSUR,KERALA",
|
|
8
8
|
"author": "PRAFUL DAS M M",
|
|
9
9
|
"scripts": {
|
|
10
10
|
"test": "echo \"Error: no test specified\" && exit 1"
|
|
@@ -34,7 +34,6 @@
|
|
|
34
34
|
"react-native-vector-icons": "^9.0.0",
|
|
35
35
|
"react-native-geolocation-service": "^5.0.0",
|
|
36
36
|
"react-native-image-resizer": "^1.0.0",
|
|
37
|
-
"@react-navigation/native": "^6.0.0",
|
|
38
37
|
"prop-types": "^15.8.0",
|
|
39
38
|
"react-native-fs": "^2.20.0"
|
|
40
39
|
},
|
package/src/components/Loader.js
CHANGED
|
@@ -5,23 +5,30 @@ import {
|
|
|
5
5
|
Modal,
|
|
6
6
|
Animated,
|
|
7
7
|
Easing,
|
|
8
|
-
Text
|
|
8
|
+
Text,
|
|
9
|
+
Image,
|
|
10
|
+
Dimensions,
|
|
9
11
|
} from "react-native";
|
|
10
|
-
import FastImage from 'react-native-fast-image';
|
|
11
|
-
import { normalize } from "react-native-elements";
|
|
12
12
|
import { getLoaderGif } from "../utils/getLoaderGif";
|
|
13
13
|
|
|
14
|
+
const { width, height } = Dimensions.get("window");
|
|
15
|
+
|
|
16
|
+
// Helper: convert percentage of screen width to px
|
|
17
|
+
const wp = (percent) => (width * percent) / 100;
|
|
18
|
+
|
|
14
19
|
export default function Loader({
|
|
15
20
|
state,
|
|
16
|
-
overlayColor =
|
|
17
|
-
loaderColor =
|
|
18
|
-
size =
|
|
19
|
-
gifSource = {
|
|
20
|
-
|
|
21
|
+
overlayColor = "rgba(0,0,0,0.4)",
|
|
22
|
+
loaderColor = "lightblue",
|
|
23
|
+
size = 12, // % of screen width
|
|
24
|
+
gifSource = {
|
|
25
|
+
uri: "http://emr.amalaims.org:9393/file/getCommonFile/image/Face.gif",
|
|
26
|
+
},
|
|
27
|
+
message = "",
|
|
21
28
|
messageStyle = {},
|
|
22
|
-
animationType =
|
|
29
|
+
animationType = "fade",
|
|
23
30
|
hasBackground = true,
|
|
24
|
-
borderRadius =
|
|
31
|
+
borderRadius = 4, // %
|
|
25
32
|
shadow = true,
|
|
26
33
|
imageurl,
|
|
27
34
|
}) {
|
|
@@ -30,30 +37,29 @@ export default function Loader({
|
|
|
30
37
|
const [fade] = useState(new Animated.Value(0));
|
|
31
38
|
const [imageSource, setImageSource] = useState(gifSource);
|
|
32
39
|
|
|
33
|
-
const error = getLoaderGif(
|
|
40
|
+
const error = getLoaderGif(
|
|
41
|
+
state.animationState,
|
|
42
|
+
state.currentStep,
|
|
43
|
+
"http://emr.amalaims.org:9393/",
|
|
44
|
+
imageurl
|
|
45
|
+
);
|
|
34
46
|
|
|
35
|
-
// Reset imageSource whenever gifSource prop changes
|
|
36
47
|
useEffect(() => {
|
|
37
48
|
setImageSource(gifSource);
|
|
38
49
|
}, [gifSource]);
|
|
39
50
|
|
|
40
51
|
const handleImageError = () => {
|
|
41
|
-
|
|
42
|
-
setImageSource(error);
|
|
43
|
-
} catch (err) {
|
|
44
|
-
console.error("Loader image error:", err);
|
|
45
|
-
}
|
|
52
|
+
setImageSource(error);
|
|
46
53
|
};
|
|
47
54
|
|
|
48
|
-
// Rotation, pulse, and fade-in animations
|
|
49
55
|
useEffect(() => {
|
|
50
|
-
if (!gifSource) {
|
|
56
|
+
if (!gifSource) {
|
|
51
57
|
Animated.loop(
|
|
52
58
|
Animated.timing(rotation, {
|
|
53
59
|
toValue: 1,
|
|
54
60
|
duration: 1500,
|
|
55
61
|
easing: Easing.linear,
|
|
56
|
-
useNativeDriver: true
|
|
62
|
+
useNativeDriver: true,
|
|
57
63
|
})
|
|
58
64
|
).start();
|
|
59
65
|
}
|
|
@@ -63,80 +69,90 @@ export default function Loader({
|
|
|
63
69
|
Animated.timing(pulse, {
|
|
64
70
|
toValue: 1.1,
|
|
65
71
|
duration: 800,
|
|
66
|
-
useNativeDriver: true
|
|
72
|
+
useNativeDriver: true,
|
|
67
73
|
}),
|
|
68
74
|
Animated.timing(pulse, {
|
|
69
75
|
toValue: 1,
|
|
70
76
|
duration: 800,
|
|
71
|
-
useNativeDriver: true
|
|
72
|
-
})
|
|
77
|
+
useNativeDriver: true,
|
|
78
|
+
}),
|
|
73
79
|
])
|
|
74
80
|
).start();
|
|
75
81
|
|
|
76
82
|
Animated.timing(fade, {
|
|
77
83
|
toValue: 1,
|
|
78
84
|
duration: 300,
|
|
79
|
-
useNativeDriver: true
|
|
85
|
+
useNativeDriver: true,
|
|
80
86
|
}).start();
|
|
81
87
|
}, []);
|
|
82
88
|
|
|
83
89
|
const spin = rotation.interpolate({
|
|
84
90
|
inputRange: [0, 1],
|
|
85
|
-
outputRange: [
|
|
91
|
+
outputRange: ["0deg", "360deg"],
|
|
86
92
|
});
|
|
87
93
|
|
|
94
|
+
const loaderSize = wp(size);
|
|
95
|
+
const borderSize = loaderSize * 0.12;
|
|
96
|
+
|
|
88
97
|
const loaderContent = gifSource ? (
|
|
89
|
-
<
|
|
90
|
-
style={[
|
|
98
|
+
<Image
|
|
99
|
+
style={[
|
|
100
|
+
styles.icon,
|
|
101
|
+
{ width: loaderSize, height: loaderSize },
|
|
102
|
+
]}
|
|
91
103
|
source={imageSource}
|
|
92
|
-
resizeMode={FastImage.resizeMode.contain}
|
|
93
104
|
onError={handleImageError}
|
|
94
105
|
/>
|
|
95
106
|
) : (
|
|
96
|
-
<Animated.View
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
borderColor: loaderColor,
|
|
100
|
-
transform: [{ rotate: spin }, { scale: pulse }],
|
|
101
|
-
width: normalize(size),
|
|
102
|
-
height: normalize(size),
|
|
103
|
-
borderWidth: normalize(size / 10)
|
|
104
|
-
}
|
|
105
|
-
]}>
|
|
106
|
-
<View style={[
|
|
107
|
-
styles.innerCircle,
|
|
107
|
+
<Animated.View
|
|
108
|
+
style={[
|
|
109
|
+
styles.defaultLoader,
|
|
108
110
|
{
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
111
|
+
width: loaderSize,
|
|
112
|
+
height: loaderSize,
|
|
113
|
+
borderWidth: borderSize,
|
|
114
|
+
borderColor: loaderColor,
|
|
115
|
+
transform: [{ rotate: spin }, { scale: pulse }],
|
|
116
|
+
},
|
|
117
|
+
]}
|
|
118
|
+
>
|
|
119
|
+
<View
|
|
120
|
+
style={[
|
|
121
|
+
styles.innerCircle,
|
|
122
|
+
{
|
|
123
|
+
width: loaderSize / 2,
|
|
124
|
+
height: loaderSize / 2,
|
|
125
|
+
backgroundColor: loaderColor,
|
|
126
|
+
},
|
|
127
|
+
]}
|
|
128
|
+
/>
|
|
114
129
|
</Animated.View>
|
|
115
130
|
);
|
|
116
131
|
|
|
117
132
|
return (
|
|
118
133
|
<Modal
|
|
119
134
|
animationType={animationType}
|
|
120
|
-
transparent
|
|
135
|
+
transparent
|
|
121
136
|
visible={state.isLoading}
|
|
122
|
-
onRequestClose={() => {
|
|
137
|
+
onRequestClose={() => {}}
|
|
123
138
|
>
|
|
124
|
-
<Animated.View
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
backgroundColor: overlayColor,
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
139
|
+
<Animated.View
|
|
140
|
+
style={[
|
|
141
|
+
styles.modalContainer,
|
|
142
|
+
{ backgroundColor: overlayColor, opacity: fade },
|
|
143
|
+
]}
|
|
144
|
+
>
|
|
145
|
+
<Animated.View
|
|
146
|
+
style={[
|
|
147
|
+
styles.loaderContainer,
|
|
148
|
+
{
|
|
149
|
+
backgroundColor: hasBackground ? "white" : "transparent",
|
|
150
|
+
borderRadius: wp(borderRadius),
|
|
151
|
+
transform: [{ scale: pulse }],
|
|
152
|
+
},
|
|
153
|
+
shadow && styles.shadowStyle,
|
|
154
|
+
]}
|
|
155
|
+
>
|
|
140
156
|
{loaderContent}
|
|
141
157
|
{message ? (
|
|
142
158
|
<Text style={[styles.messageText, messageStyle]}>
|
|
@@ -152,40 +168,36 @@ export default function Loader({
|
|
|
152
168
|
const styles = StyleSheet.create({
|
|
153
169
|
modalContainer: {
|
|
154
170
|
flex: 1,
|
|
155
|
-
justifyContent:
|
|
156
|
-
alignItems:
|
|
171
|
+
justifyContent: "center",
|
|
172
|
+
alignItems: "center",
|
|
157
173
|
},
|
|
158
174
|
loaderContainer: {
|
|
159
|
-
padding:
|
|
160
|
-
justifyContent: 'center',
|
|
161
|
-
alignItems: 'center',
|
|
162
|
-
},
|
|
163
|
-
icon_style: {
|
|
175
|
+
padding: wp(5),
|
|
164
176
|
justifyContent: "center",
|
|
165
|
-
alignItems: "center"
|
|
177
|
+
alignItems: "center",
|
|
178
|
+
},
|
|
179
|
+
icon: {
|
|
180
|
+
resizeMode: "contain",
|
|
166
181
|
},
|
|
167
182
|
defaultLoader: {
|
|
168
|
-
borderRadius:
|
|
169
|
-
justifyContent:
|
|
170
|
-
alignItems:
|
|
183
|
+
borderRadius: 1000,
|
|
184
|
+
justifyContent: "center",
|
|
185
|
+
alignItems: "center",
|
|
171
186
|
},
|
|
172
187
|
innerCircle: {
|
|
173
|
-
borderRadius:
|
|
188
|
+
borderRadius: 1000,
|
|
174
189
|
},
|
|
175
190
|
messageText: {
|
|
176
|
-
marginTop:
|
|
177
|
-
fontSize:
|
|
178
|
-
color:
|
|
179
|
-
textAlign:
|
|
191
|
+
marginTop: wp(3),
|
|
192
|
+
fontSize: wp(3.5),
|
|
193
|
+
color: "#555",
|
|
194
|
+
textAlign: "center",
|
|
180
195
|
},
|
|
181
196
|
shadowStyle: {
|
|
182
197
|
shadowColor: "#000",
|
|
183
|
-
shadowOffset: {
|
|
184
|
-
width: 0,
|
|
185
|
-
height: 2,
|
|
186
|
-
},
|
|
198
|
+
shadowOffset: { width: 0, height: 2 },
|
|
187
199
|
shadowOpacity: 0.25,
|
|
188
200
|
shadowRadius: 3.84,
|
|
189
201
|
elevation: 5,
|
|
190
|
-
}
|
|
202
|
+
},
|
|
191
203
|
});
|
|
@@ -7,45 +7,17 @@ import { Global } from '../utils/Global';
|
|
|
7
7
|
const StepIndicator = ({ currentStep, qrscan }) => {
|
|
8
8
|
return (
|
|
9
9
|
<View style={styles.statusContainer}>
|
|
10
|
-
{/* Identity Step */}
|
|
11
|
-
<View style={styles.statusItem}>
|
|
12
|
-
<Icon
|
|
13
|
-
name="face"
|
|
14
|
-
size={20}
|
|
15
|
-
color={
|
|
16
|
-
currentStep === "Identity Verification" ||
|
|
17
|
-
currentStep === "Location Verification" ||
|
|
18
|
-
currentStep === "Complete"
|
|
19
|
-
? Global.AppTheme.primary
|
|
20
|
-
: Global.AppTheme.light
|
|
21
|
-
}
|
|
22
|
-
style={styles.statusIcon}
|
|
23
|
-
/>
|
|
24
|
-
<Text
|
|
25
|
-
style={[
|
|
26
|
-
styles.statusText,
|
|
27
|
-
(currentStep === "Identity Verification" ||
|
|
28
|
-
currentStep === "Location Verification" ||
|
|
29
|
-
currentStep === "Complete") && styles.statusTextActive,
|
|
30
|
-
]}
|
|
31
|
-
>
|
|
32
|
-
Identity
|
|
33
|
-
</Text>
|
|
34
|
-
</View>
|
|
35
|
-
|
|
36
|
-
{/* Show Location only if qrscan = true */}
|
|
37
10
|
{qrscan && (
|
|
38
11
|
<>
|
|
39
|
-
<View style={styles.statusSeparator} />
|
|
40
12
|
<View style={styles.statusItem}>
|
|
41
13
|
<Icon
|
|
42
14
|
name="location-on"
|
|
43
15
|
size={20}
|
|
44
16
|
color={
|
|
45
17
|
currentStep === "Location Verification" ||
|
|
46
|
-
|
|
47
|
-
? Global.AppTheme.
|
|
48
|
-
: Global.AppTheme.
|
|
18
|
+
currentStep === "Complete"
|
|
19
|
+
? Global.AppTheme.light
|
|
20
|
+
: Global.AppTheme.primary
|
|
49
21
|
}
|
|
50
22
|
style={styles.statusIcon}
|
|
51
23
|
/>
|
|
@@ -56,11 +28,41 @@ const StepIndicator = ({ currentStep, qrscan }) => {
|
|
|
56
28
|
currentStep === "Complete") && styles.statusTextActive,
|
|
57
29
|
]}
|
|
58
30
|
>
|
|
59
|
-
|
|
31
|
+
QR
|
|
60
32
|
</Text>
|
|
61
33
|
</View>
|
|
34
|
+
<View style={styles.statusSeparator} />
|
|
62
35
|
</>
|
|
63
36
|
)}
|
|
37
|
+
|
|
38
|
+
{/* Identity Step */}
|
|
39
|
+
<View style={styles.statusItem}>
|
|
40
|
+
<Icon
|
|
41
|
+
name="face"
|
|
42
|
+
size={20}
|
|
43
|
+
color={
|
|
44
|
+
currentStep === "Identity Verification" ||
|
|
45
|
+
currentStep === "Location Verification" ||
|
|
46
|
+
currentStep === "Complete"
|
|
47
|
+
? Global.AppTheme.light
|
|
48
|
+
: Global.AppTheme.primary
|
|
49
|
+
}
|
|
50
|
+
style={styles.statusIcon}
|
|
51
|
+
/>
|
|
52
|
+
<Text
|
|
53
|
+
style={[
|
|
54
|
+
styles.statusText,
|
|
55
|
+
(currentStep === "Identity Verification" ||
|
|
56
|
+
currentStep === "Location Verification" ||
|
|
57
|
+
currentStep === "Complete") && styles.statusTextActive,
|
|
58
|
+
]}
|
|
59
|
+
>
|
|
60
|
+
ID
|
|
61
|
+
</Text>
|
|
62
|
+
</View>
|
|
63
|
+
|
|
64
|
+
{/* Show Location only if qrscan = true */}
|
|
65
|
+
|
|
64
66
|
</View>
|
|
65
67
|
);
|
|
66
68
|
};
|
|
@@ -1,42 +1,44 @@
|
|
|
1
1
|
import { useRef, useState, useEffect, useCallback } from 'react';
|
|
2
|
-
import { Global } from '../utils/Global';
|
|
3
2
|
|
|
4
3
|
/**
|
|
5
4
|
* Custom hook for a countdown timer with pause/resume functionality.
|
|
6
5
|
*
|
|
6
|
+
* @param {number} duration - Countdown duration in seconds
|
|
7
7
|
* @param {Function} onExpire - Callback fired when countdown reaches zero.
|
|
8
|
-
* @returns {Object}
|
|
8
|
+
* @returns {Object}
|
|
9
9
|
*/
|
|
10
|
-
export const useCountdown = (onExpire) => {
|
|
11
|
-
const [countdown, setCountdown] = useState(
|
|
10
|
+
export const useCountdown = (duration, onExpire) => {
|
|
11
|
+
const [countdown, setCountdown] = useState(duration);
|
|
12
12
|
const timerRef = useRef(null);
|
|
13
|
-
const countdownRef = useRef(
|
|
13
|
+
const countdownRef = useRef(duration);
|
|
14
14
|
const isPausedRef = useRef(false);
|
|
15
15
|
const onExpireRef = useRef(onExpire);
|
|
16
16
|
|
|
17
|
-
//
|
|
17
|
+
// Keep onExpire updated
|
|
18
18
|
useEffect(() => {
|
|
19
19
|
onExpireRef.current = onExpire;
|
|
20
20
|
}, [onExpire]);
|
|
21
21
|
|
|
22
|
+
// Update duration dynamically if it changes
|
|
23
|
+
useEffect(() => {
|
|
24
|
+
countdownRef.current = duration;
|
|
25
|
+
setCountdown(duration);
|
|
26
|
+
}, [duration]);
|
|
27
|
+
|
|
22
28
|
// Start or restart the countdown
|
|
23
29
|
const startCountdown = useCallback((onExpireCallback) => {
|
|
24
30
|
try {
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
setCountdown(Global.CountdownDuration);
|
|
31
|
+
countdownRef.current = duration;
|
|
32
|
+
setCountdown(duration);
|
|
28
33
|
isPausedRef.current = false;
|
|
29
34
|
|
|
30
|
-
// Clear any existing timer
|
|
31
35
|
if (timerRef.current) {
|
|
32
36
|
clearInterval(timerRef.current);
|
|
33
37
|
timerRef.current = null;
|
|
34
38
|
}
|
|
35
39
|
|
|
36
|
-
// Use provided callback or stored one
|
|
37
40
|
const expireCallback = onExpireCallback || onExpireRef.current;
|
|
38
41
|
|
|
39
|
-
// Start new timer
|
|
40
42
|
timerRef.current = setInterval(() => {
|
|
41
43
|
if (isPausedRef.current) return;
|
|
42
44
|
|
|
@@ -45,10 +47,7 @@ export const useCountdown = (onExpire) => {
|
|
|
45
47
|
if (countdownRef.current <= 0) {
|
|
46
48
|
clearInterval(timerRef.current);
|
|
47
49
|
timerRef.current = null;
|
|
48
|
-
|
|
49
|
-
if (typeof expireCallback === 'function') {
|
|
50
|
-
expireCallback();
|
|
51
|
-
}
|
|
50
|
+
expireCallback?.();
|
|
52
51
|
} else {
|
|
53
52
|
setCountdown(countdownRef.current);
|
|
54
53
|
}
|
|
@@ -56,23 +55,20 @@ export const useCountdown = (onExpire) => {
|
|
|
56
55
|
} catch (error) {
|
|
57
56
|
console.error('Error in startCountdown:', error);
|
|
58
57
|
}
|
|
59
|
-
}, []);
|
|
58
|
+
}, [duration]);
|
|
60
59
|
|
|
61
|
-
// Pause the countdown
|
|
62
60
|
const pauseCountdown = useCallback(() => {
|
|
63
61
|
isPausedRef.current = true;
|
|
64
62
|
}, []);
|
|
65
63
|
|
|
66
|
-
// Resume the countdown
|
|
67
64
|
const resumeCountdown = useCallback(() => {
|
|
68
65
|
isPausedRef.current = false;
|
|
69
66
|
}, []);
|
|
70
67
|
|
|
71
|
-
// Reset countdown to initial duration
|
|
72
68
|
const resetCountdown = useCallback(() => {
|
|
73
69
|
try {
|
|
74
|
-
countdownRef.current =
|
|
75
|
-
setCountdown(
|
|
70
|
+
countdownRef.current = duration;
|
|
71
|
+
setCountdown(duration);
|
|
76
72
|
isPausedRef.current = false;
|
|
77
73
|
|
|
78
74
|
if (timerRef.current) {
|
|
@@ -82,15 +78,19 @@ export const useCountdown = (onExpire) => {
|
|
|
82
78
|
} catch (error) {
|
|
83
79
|
console.error('Error in resetCountdown:', error);
|
|
84
80
|
}
|
|
85
|
-
}, []);
|
|
81
|
+
}, [duration]);
|
|
86
82
|
|
|
87
|
-
|
|
88
|
-
|
|
83
|
+
const getCurrentCountdown = useCallback(
|
|
84
|
+
() => countdownRef.current,
|
|
85
|
+
[]
|
|
86
|
+
);
|
|
89
87
|
|
|
90
|
-
|
|
91
|
-
|
|
88
|
+
const isPaused = useCallback(
|
|
89
|
+
() => isPausedRef.current,
|
|
90
|
+
[]
|
|
91
|
+
);
|
|
92
92
|
|
|
93
|
-
//
|
|
93
|
+
// Cleanup
|
|
94
94
|
useEffect(() => {
|
|
95
95
|
return () => {
|
|
96
96
|
if (timerRef.current) {
|
|
@@ -100,13 +100,13 @@ export const useCountdown = (onExpire) => {
|
|
|
100
100
|
};
|
|
101
101
|
}, []);
|
|
102
102
|
|
|
103
|
-
return {
|
|
104
|
-
countdown,
|
|
105
|
-
startCountdown,
|
|
106
|
-
resetCountdown,
|
|
107
|
-
pauseCountdown,
|
|
103
|
+
return {
|
|
104
|
+
countdown,
|
|
105
|
+
startCountdown,
|
|
106
|
+
resetCountdown,
|
|
107
|
+
pauseCountdown,
|
|
108
108
|
resumeCountdown,
|
|
109
109
|
getCurrentCountdown,
|
|
110
|
-
isPaused
|
|
110
|
+
isPaused,
|
|
111
111
|
};
|
|
112
112
|
};
|
|
@@ -7,29 +7,7 @@ import {
|
|
|
7
7
|
initializeFaceAntiSpoof,
|
|
8
8
|
isFaceAntiSpoofAvailable,
|
|
9
9
|
} from 'react-native-vision-camera-spoof-detector';
|
|
10
|
-
|
|
11
|
-
// Optimized constants - tuned for performance
|
|
12
|
-
const FACE_STABILITY_THRESHOLD = 3;
|
|
13
|
-
const FACE_MOVEMENT_THRESHOLD = 15;
|
|
14
|
-
const FRAME_PROCESSOR_MIN_INTERVAL_MS = 500;
|
|
15
|
-
const MIN_FACE_SIZE = 0.2;
|
|
16
|
-
|
|
17
|
-
// Blink detection
|
|
18
|
-
const BLINK_THRESHOLD = 0.3;
|
|
19
|
-
const REQUIRED_BLINKS = 3;
|
|
20
|
-
|
|
21
|
-
// Anti-spoofing
|
|
22
|
-
const ANTI_SPOOF_CONFIDENCE_THRESHOLD = 0.7;
|
|
23
|
-
const REQUIRED_CONSECUTIVE_LIVE_FRAMES = 3;
|
|
24
|
-
|
|
25
|
-
// Face centering
|
|
26
|
-
const FACE_CENTER_THRESHOLD_X = 0.2;
|
|
27
|
-
const FACE_CENTER_THRESHOLD_Y = 0.15;
|
|
28
|
-
const MIN_FACE_CENTERED_FRAMES = 2;
|
|
29
|
-
|
|
30
|
-
// Performance optimization constants
|
|
31
|
-
const MAX_FRAME_PROCESSING_TIME_MS = 500;
|
|
32
|
-
const BATCH_UPDATE_THRESHOLD = 3;
|
|
10
|
+
import { Global } from 'react-native-biometric-verifier/src/utils/Global';
|
|
33
11
|
|
|
34
12
|
export const useFaceDetectionFrameProcessor = ({
|
|
35
13
|
onStableFaceDetected = () => { },
|
|
@@ -46,7 +24,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
46
24
|
landmarkMode: 'none',
|
|
47
25
|
contourMode: 'none',
|
|
48
26
|
classificationMode: livenessLevel === 1 ? 'all' : 'none',
|
|
49
|
-
minFaceSize: MIN_FACE_SIZE,
|
|
27
|
+
minFaceSize: Global.MIN_FACE_SIZE,
|
|
50
28
|
});
|
|
51
29
|
|
|
52
30
|
const isMounted = useRef(true);
|
|
@@ -230,8 +208,8 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
230
208
|
const frameCenterY = frameHeight / 2;
|
|
231
209
|
|
|
232
210
|
return (
|
|
233
|
-
Math.abs(faceCenterX - frameCenterX) <= frameWidth * FACE_CENTER_THRESHOLD_X &&
|
|
234
|
-
Math.abs(faceCenterY - frameCenterY) <= frameHeight * FACE_CENTER_THRESHOLD_Y
|
|
211
|
+
Math.abs(faceCenterX - frameCenterX) <= frameWidth * Global.FACE_CENTER_THRESHOLD_X &&
|
|
212
|
+
Math.abs(faceCenterY - frameCenterY) <= frameHeight * Global.FACE_CENTER_THRESHOLD_Y
|
|
235
213
|
);
|
|
236
214
|
});
|
|
237
215
|
|
|
@@ -243,7 +221,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
243
221
|
state.flags.captured ||
|
|
244
222
|
isLoading ||
|
|
245
223
|
!state.flags.isActive ||
|
|
246
|
-
(now - state.lastProcessedTime <
|
|
224
|
+
(now - state.lastProcessedTime < Global.FACE_MOVEMENT_THRESHOLD)
|
|
247
225
|
);
|
|
248
226
|
});
|
|
249
227
|
|
|
@@ -265,7 +243,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
265
243
|
}
|
|
266
244
|
|
|
267
245
|
// Performance guard - don't process if taking too long
|
|
268
|
-
if (processingStart - frameProcessingStartTime.current < MAX_FRAME_PROCESSING_TIME_MS) {
|
|
246
|
+
if (processingStart - frameProcessingStartTime.current < Global.MAX_FRAME_PROCESSING_TIME_MS) {
|
|
269
247
|
frame.release?.();
|
|
270
248
|
return;
|
|
271
249
|
}
|
|
@@ -348,13 +326,13 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
348
326
|
|
|
349
327
|
if (centered) {
|
|
350
328
|
state.centering.centeredFrames = Math.min(
|
|
351
|
-
MIN_FACE_CENTERED_FRAMES,
|
|
329
|
+
Global.MIN_FACE_CENTERED_FRAMES,
|
|
352
330
|
state.centering.centeredFrames + 1
|
|
353
331
|
);
|
|
354
332
|
} else {
|
|
355
333
|
state.centering.centeredFrames = 0;
|
|
356
334
|
}
|
|
357
|
-
state.flags.isFaceCentered = state.centering.centeredFrames >= MIN_FACE_CENTERED_FRAMES;
|
|
335
|
+
state.flags.isFaceCentered = state.centering.centeredFrames >= Global.MIN_FACE_CENTERED_FRAMES;
|
|
358
336
|
|
|
359
337
|
// Anti-spoof detection only when face is centered and single
|
|
360
338
|
if (state.flags.isFaceCentered) {
|
|
@@ -366,19 +344,19 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
366
344
|
const isLive = antiSpoofResult.isLive === true;
|
|
367
345
|
const confidence = antiSpoofResult.combinedScore || antiSpoofResult.neuralNetworkScore || 0;
|
|
368
346
|
|
|
369
|
-
if (isLive && confidence > ANTI_SPOOF_CONFIDENCE_THRESHOLD) {
|
|
347
|
+
if (isLive && confidence > Global.ANTI_SPOOF_CONFIDENCE_THRESHOLD) {
|
|
370
348
|
state.antiSpoof.consecutiveLiveFrames = Math.min(
|
|
371
|
-
REQUIRED_CONSECUTIVE_LIVE_FRAMES,
|
|
349
|
+
Global.REQUIRED_CONSECUTIVE_LIVE_FRAMES,
|
|
372
350
|
state.antiSpoof.consecutiveLiveFrames + 1
|
|
373
351
|
);
|
|
374
352
|
} else {
|
|
375
353
|
state.antiSpoof.consecutiveLiveFrames = Math.max(0, state.antiSpoof.consecutiveLiveFrames - 1);
|
|
376
354
|
}
|
|
377
|
-
state.antiSpoof.isLive = state.antiSpoof.consecutiveLiveFrames >= REQUIRED_CONSECUTIVE_LIVE_FRAMES;
|
|
355
|
+
state.antiSpoof.isLive = state.antiSpoof.consecutiveLiveFrames >= Global.REQUIRED_CONSECUTIVE_LIVE_FRAMES;
|
|
378
356
|
state.antiSpoof.confidence = confidence;
|
|
379
357
|
|
|
380
358
|
// Batch anti-spoof updates
|
|
381
|
-
if (state.performance.batchCounter % BATCH_UPDATE_THRESHOLD === 0) {
|
|
359
|
+
if (state.performance.batchCounter % Global.BATCH_UPDATE_THRESHOLD === 0) {
|
|
382
360
|
runOnAntiSpoof({
|
|
383
361
|
isLive: state.antiSpoof.isLive,
|
|
384
362
|
confidence: state.antiSpoof.confidence,
|
|
@@ -410,7 +388,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
410
388
|
else if (newLivenessStep === 1) {
|
|
411
389
|
const leftEye = face.leftEyeOpenProbability ?? 1;
|
|
412
390
|
const rightEye = face.rightEyeOpenProbability ?? 1;
|
|
413
|
-
const eyesClosed = leftEye < BLINK_THRESHOLD && rightEye < BLINK_THRESHOLD;
|
|
391
|
+
const eyesClosed = leftEye < Global.BLINK_THRESHOLD && rightEye < Global.BLINK_THRESHOLD;
|
|
414
392
|
|
|
415
393
|
if (eyesClosed && !newEyeClosed) {
|
|
416
394
|
newBlinkCount++;
|
|
@@ -420,7 +398,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
420
398
|
newEyeClosed = false;
|
|
421
399
|
}
|
|
422
400
|
|
|
423
|
-
if (newBlinkCount >= REQUIRED_BLINKS) {
|
|
401
|
+
if (newBlinkCount >= Global.REQUIRED_BLINKS) {
|
|
424
402
|
newLivenessStep = 2;
|
|
425
403
|
runOnLiveness(newLivenessStep);
|
|
426
404
|
}
|
|
@@ -434,7 +412,7 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
434
412
|
} else {
|
|
435
413
|
const dx = Math.abs(x - state.faceTracking.lastX);
|
|
436
414
|
const dy = Math.abs(y - state.faceTracking.lastY);
|
|
437
|
-
newStableCount = (dx < FACE_MOVEMENT_THRESHOLD && dy < FACE_MOVEMENT_THRESHOLD)
|
|
415
|
+
newStableCount = (dx < Global.FACE_MOVEMENT_THRESHOLD && dy < Global.FACE_MOVEMENT_THRESHOLD)
|
|
438
416
|
? state.faceTracking.stableCount + 1
|
|
439
417
|
: 1;
|
|
440
418
|
}
|
|
@@ -451,10 +429,10 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
451
429
|
state.flags.eyeClosed = newEyeClosed;
|
|
452
430
|
state.performance.batchCounter++;
|
|
453
431
|
|
|
454
|
-
const progress = Math.min(100, (newStableCount / FACE_STABILITY_THRESHOLD) * 100);
|
|
432
|
+
const progress = Math.min(100, (newStableCount / Global.FACE_STABILITY_THRESHOLD) * 100);
|
|
455
433
|
|
|
456
434
|
// Batch face updates
|
|
457
|
-
if (state.performance.batchCounter % BATCH_UPDATE_THRESHOLD === 0) {
|
|
435
|
+
if (state.performance.batchCounter % Global.BATCH_UPDATE_THRESHOLD === 0) {
|
|
458
436
|
runOnFaces(1, progress, newLivenessStep, state.flags.isFaceCentered, {
|
|
459
437
|
isLive: state.antiSpoof.isLive,
|
|
460
438
|
confidence: state.antiSpoof.confidence,
|
|
@@ -466,14 +444,14 @@ export const useFaceDetectionFrameProcessor = ({
|
|
|
466
444
|
|
|
467
445
|
// Capture condition - optimized
|
|
468
446
|
const shouldCapture = !state.flags.captured && (
|
|
469
|
-
newStableCount >= FACE_STABILITY_THRESHOLD &&
|
|
447
|
+
newStableCount >= Global.FACE_STABILITY_THRESHOLD &&
|
|
470
448
|
state.antiSpoof.isLive &&
|
|
471
|
-
state.antiSpoof.consecutiveLiveFrames >= REQUIRED_CONSECUTIVE_LIVE_FRAMES &&
|
|
449
|
+
state.antiSpoof.consecutiveLiveFrames >= Global.REQUIRED_CONSECUTIVE_LIVE_FRAMES &&
|
|
472
450
|
state.flags.isFaceCentered &&
|
|
473
451
|
(localState.livenessLevel === 0 || (
|
|
474
452
|
localState.livenessLevel === 1 &&
|
|
475
453
|
newLivenessStep === 2 &&
|
|
476
|
-
newBlinkCount >= REQUIRED_BLINKS
|
|
454
|
+
newBlinkCount >= Global.REQUIRED_BLINKS
|
|
477
455
|
))
|
|
478
456
|
);
|
|
479
457
|
|
package/src/index.js
CHANGED
|
@@ -16,8 +16,6 @@ import {
|
|
|
16
16
|
Animated,
|
|
17
17
|
} from "react-native";
|
|
18
18
|
import Icon from "react-native-vector-icons/MaterialIcons";
|
|
19
|
-
import { useNavigation } from "@react-navigation/native";
|
|
20
|
-
|
|
21
19
|
// Custom hooks
|
|
22
20
|
import { useCountdown } from "./hooks/useCountdown";
|
|
23
21
|
import { useGeolocation } from "./hooks/useGeolocation";
|
|
@@ -40,11 +38,9 @@ import CaptureImageWithoutEdit from "./components/CaptureImageWithoutEdit";
|
|
|
40
38
|
import StepIndicator from "./components/StepIndicator";
|
|
41
39
|
|
|
42
40
|
const BiometricModal = React.memo(
|
|
43
|
-
({ data, qrscan = false, callback, apiurl, onclose, frameProcessorFps, livenessLevel, fileurl, imageurl }) => {
|
|
44
|
-
const navigation = useNavigation();
|
|
45
|
-
|
|
41
|
+
({ data, depkey, qrscan = false, callback, apiurl, onclose, frameProcessorFps, livenessLevel, fileurl, imageurl, navigation, MaxDistanceMeters = 50, duration = 100 }) => {
|
|
46
42
|
// Custom hooks
|
|
47
|
-
const { countdown, startCountdown, resetCountdown, pauseCountdown, resumeCountdown } = useCountdown();
|
|
43
|
+
const { countdown, startCountdown, resetCountdown, pauseCountdown, resumeCountdown } = useCountdown(duration);
|
|
48
44
|
const { requestLocationPermission, getCurrentLocation } = useGeolocation();
|
|
49
45
|
const { convertImageToBase64 } = useImageProcessing();
|
|
50
46
|
const { notification, fadeAnim, slideAnim, notifyMessage, clearNotification } = useNotifyMessage();
|
|
@@ -52,13 +48,14 @@ const BiometricModal = React.memo(
|
|
|
52
48
|
|
|
53
49
|
// State
|
|
54
50
|
const [modalVisible, setModalVisible] = useState(false);
|
|
55
|
-
const [cameraType, setCameraType] = useState("
|
|
51
|
+
const [cameraType, setCameraType] = useState("back"); // Start with back camera for QR scan
|
|
56
52
|
const [state, setState] = useState({
|
|
57
53
|
isLoading: false,
|
|
58
54
|
loadingType: Global.LoadingTypes.none,
|
|
59
55
|
currentStep: "Start",
|
|
60
56
|
employeeData: null,
|
|
61
|
-
animationState: Global.AnimationStates.
|
|
57
|
+
animationState: Global.AnimationStates.qrScan, // Start with QR scan animation
|
|
58
|
+
qrData: null, // Store QR data for later use in face recognition
|
|
62
59
|
});
|
|
63
60
|
|
|
64
61
|
// Refs
|
|
@@ -67,7 +64,6 @@ const BiometricModal = React.memo(
|
|
|
67
64
|
const responseRef = useRef(null);
|
|
68
65
|
const processedRef = useRef(false);
|
|
69
66
|
const resetTimeoutRef = useRef(null);
|
|
70
|
-
|
|
71
67
|
// Animation values
|
|
72
68
|
const iconScaleAnim = useRef(new Animated.Value(1)).current;
|
|
73
69
|
const iconOpacityAnim = useRef(new Animated.Value(0)).current;
|
|
@@ -156,7 +152,8 @@ const BiometricModal = React.memo(
|
|
|
156
152
|
loadingType: Global.LoadingTypes.none,
|
|
157
153
|
currentStep: "Start",
|
|
158
154
|
employeeData: null,
|
|
159
|
-
animationState: Global.AnimationStates.
|
|
155
|
+
animationState: Global.AnimationStates.qrScan,
|
|
156
|
+
qrData: null,
|
|
160
157
|
});
|
|
161
158
|
|
|
162
159
|
setModalVisible(false);
|
|
@@ -214,112 +211,7 @@ const BiometricModal = React.memo(
|
|
|
214
211
|
return true;
|
|
215
212
|
}, [apiurl, handleProcessError]);
|
|
216
213
|
|
|
217
|
-
//
|
|
218
|
-
const uploadFaceScan = useCallback(
|
|
219
|
-
async (selfie) => {
|
|
220
|
-
if (!validateApiUrl()) return;
|
|
221
|
-
const currentData = dataRef.current;
|
|
222
|
-
|
|
223
|
-
if (!currentData) {
|
|
224
|
-
handleProcessError("Employee data not found.");
|
|
225
|
-
return;
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
updateState({
|
|
229
|
-
isLoading: true,
|
|
230
|
-
loadingType: Global.LoadingTypes.faceRecognition,
|
|
231
|
-
animationState: Global.AnimationStates.processing,
|
|
232
|
-
});
|
|
233
|
-
|
|
234
|
-
InteractionManager.runAfterInteractions(async () => {
|
|
235
|
-
let base64;
|
|
236
|
-
|
|
237
|
-
try {
|
|
238
|
-
updateState({
|
|
239
|
-
loadingType: Global.LoadingTypes.imageProcessing,
|
|
240
|
-
});
|
|
241
|
-
|
|
242
|
-
base64 = await convertImageToBase64(selfie?.uri);
|
|
243
|
-
} catch (err) {
|
|
244
|
-
console.error("Image conversion failed:", err);
|
|
245
|
-
handleProcessError("Image conversion failed.", err);
|
|
246
|
-
return;
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
if (!base64) {
|
|
250
|
-
handleProcessError("Failed to process image.");
|
|
251
|
-
return;
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
try {
|
|
255
|
-
const body = { image: base64 };
|
|
256
|
-
const header = { faceid: currentData };
|
|
257
|
-
const buttonapi = `${apiurl}python/recognize`;
|
|
258
|
-
|
|
259
|
-
updateState({
|
|
260
|
-
loadingType: Global.LoadingTypes.networkRequest,
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
const response = await networkServiceCall(
|
|
264
|
-
"POST",
|
|
265
|
-
buttonapi,
|
|
266
|
-
header,
|
|
267
|
-
body
|
|
268
|
-
);
|
|
269
|
-
|
|
270
|
-
if (response?.httpstatus === 200) {
|
|
271
|
-
responseRef.current = response;
|
|
272
|
-
|
|
273
|
-
updateState({
|
|
274
|
-
employeeData: response.data?.data || null,
|
|
275
|
-
animationState: Global.AnimationStates.success,
|
|
276
|
-
isLoading: false,
|
|
277
|
-
loadingType: Global.LoadingTypes.none,
|
|
278
|
-
});
|
|
279
|
-
|
|
280
|
-
notifyMessage("Identity verified successfully!", "success");
|
|
281
|
-
|
|
282
|
-
if (qrscan) {
|
|
283
|
-
setTimeout(() => startQRCodeScan(), 1200);
|
|
284
|
-
} else {
|
|
285
|
-
safeCallback(responseRef.current);
|
|
286
|
-
|
|
287
|
-
if (resetTimeoutRef.current) {
|
|
288
|
-
clearTimeout(resetTimeoutRef.current);
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
resetTimeoutRef.current = setTimeout(() => {
|
|
292
|
-
resetState();
|
|
293
|
-
}, 1200);
|
|
294
|
-
}
|
|
295
|
-
} else {
|
|
296
|
-
handleProcessError(
|
|
297
|
-
response?.data?.message ||
|
|
298
|
-
"Face not recognized. Please try again."
|
|
299
|
-
);
|
|
300
|
-
}
|
|
301
|
-
} catch (error) {
|
|
302
|
-
console.error("Network request failed:", error);
|
|
303
|
-
handleProcessError(
|
|
304
|
-
"Connection error. Please check your network.",
|
|
305
|
-
error
|
|
306
|
-
);
|
|
307
|
-
}
|
|
308
|
-
});
|
|
309
|
-
},
|
|
310
|
-
[
|
|
311
|
-
convertImageToBase64,
|
|
312
|
-
notifyMessage,
|
|
313
|
-
qrscan,
|
|
314
|
-
resetState,
|
|
315
|
-
updateState,
|
|
316
|
-
validateApiUrl,
|
|
317
|
-
safeCallback,
|
|
318
|
-
handleProcessError
|
|
319
|
-
]
|
|
320
|
-
);
|
|
321
|
-
|
|
322
|
-
// QR code processing
|
|
214
|
+
// QR code processing - FIRST STEP
|
|
323
215
|
const handleQRScanned = useCallback(
|
|
324
216
|
async (qrCodeData) => {
|
|
325
217
|
if (!validateApiUrl()) return;
|
|
@@ -356,14 +248,12 @@ const BiometricModal = React.memo(
|
|
|
356
248
|
|
|
357
249
|
const location = await getCurrentLocation();
|
|
358
250
|
|
|
359
|
-
const [latStr, lngStr] = qrString.split(",");
|
|
251
|
+
const [latStr, lngStr, qrkey] = qrString.split(",");
|
|
360
252
|
const lat = parseFloat(latStr);
|
|
361
253
|
const lng = parseFloat(lngStr);
|
|
362
254
|
const validCoords = !isNaN(lat) && !isNaN(lng);
|
|
363
|
-
const validDev =
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
if (validCoords && validDev) {
|
|
255
|
+
const validDev = !isNaN(location?.latitude) && !isNaN(location?.longitude);
|
|
256
|
+
if (validCoords && validDev && qrkey === depkey) {
|
|
367
257
|
updateState({
|
|
368
258
|
loadingType: Global.LoadingTypes.calculateDistance,
|
|
369
259
|
});
|
|
@@ -374,8 +264,7 @@ const BiometricModal = React.memo(
|
|
|
374
264
|
location.latitude,
|
|
375
265
|
location.longitude
|
|
376
266
|
);
|
|
377
|
-
|
|
378
|
-
if (distance <= Global.MaxDistanceMeters) {
|
|
267
|
+
if (distance <= MaxDistanceMeters) {
|
|
379
268
|
const locationDetails = {
|
|
380
269
|
qrLocation: {
|
|
381
270
|
latitude: lat,
|
|
@@ -391,27 +280,24 @@ const BiometricModal = React.memo(
|
|
|
391
280
|
verifiedAt: new Date().toISOString(),
|
|
392
281
|
};
|
|
393
282
|
|
|
283
|
+
// Store location details and QR data for face recognition
|
|
394
284
|
responseRef.current = {
|
|
395
|
-
...(responseRef.current || {}), // existing faceData
|
|
396
285
|
location: locationDetails,
|
|
397
286
|
};
|
|
398
287
|
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
notifyMessage("Location verified successfully!", "success");
|
|
402
|
-
|
|
288
|
+
// Store QR data in state for face recognition step
|
|
403
289
|
updateState({
|
|
290
|
+
qrData: qrString,
|
|
404
291
|
animationState: Global.AnimationStates.success,
|
|
405
292
|
isLoading: false,
|
|
406
293
|
loadingType: Global.LoadingTypes.none,
|
|
407
294
|
});
|
|
408
295
|
|
|
409
|
-
|
|
410
|
-
clearTimeout(resetTimeoutRef.current);
|
|
411
|
-
}
|
|
296
|
+
notifyMessage("Location verified successfully!", "success");
|
|
412
297
|
|
|
413
|
-
|
|
414
|
-
|
|
298
|
+
// Start face recognition after a brief delay
|
|
299
|
+
setTimeout(() => {
|
|
300
|
+
startFaceRecognition();
|
|
415
301
|
}, 1200);
|
|
416
302
|
}
|
|
417
303
|
else {
|
|
@@ -434,49 +320,165 @@ const BiometricModal = React.memo(
|
|
|
434
320
|
getCurrentLocation,
|
|
435
321
|
notifyMessage,
|
|
436
322
|
requestLocationPermission,
|
|
323
|
+
updateState,
|
|
324
|
+
validateApiUrl,
|
|
325
|
+
handleProcessError
|
|
326
|
+
]
|
|
327
|
+
);
|
|
328
|
+
|
|
329
|
+
// Face scan upload - SECOND STEP
|
|
330
|
+
const uploadFaceScan = useCallback(
|
|
331
|
+
async (selfie) => {
|
|
332
|
+
if (!validateApiUrl()) return;
|
|
333
|
+
|
|
334
|
+
// Check if QR scan was completed successfully
|
|
335
|
+
if (!state.qrData) {
|
|
336
|
+
handleProcessError("Please complete QR scan first.");
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
const currentData = dataRef.current;
|
|
341
|
+
|
|
342
|
+
if (!currentData) {
|
|
343
|
+
handleProcessError("Employee data not found.");
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
updateState({
|
|
348
|
+
isLoading: true,
|
|
349
|
+
loadingType: Global.LoadingTypes.faceRecognition,
|
|
350
|
+
animationState: Global.AnimationStates.processing,
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
InteractionManager.runAfterInteractions(async () => {
|
|
354
|
+
let base64;
|
|
355
|
+
|
|
356
|
+
try {
|
|
357
|
+
updateState({
|
|
358
|
+
loadingType: Global.LoadingTypes.imageProcessing,
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
base64 = await convertImageToBase64(selfie?.uri);
|
|
362
|
+
} catch (err) {
|
|
363
|
+
console.error("Image conversion failed:", err);
|
|
364
|
+
handleProcessError("Image conversion failed.", err);
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (!base64) {
|
|
369
|
+
handleProcessError("Failed to process image.");
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
try {
|
|
374
|
+
const body = { image: base64 };
|
|
375
|
+
const header = { faceid: currentData };
|
|
376
|
+
const buttonapi = `${apiurl}python/recognize`;
|
|
377
|
+
|
|
378
|
+
updateState({
|
|
379
|
+
loadingType: Global.LoadingTypes.networkRequest,
|
|
380
|
+
});
|
|
381
|
+
|
|
382
|
+
const response = await networkServiceCall(
|
|
383
|
+
"POST",
|
|
384
|
+
buttonapi,
|
|
385
|
+
header,
|
|
386
|
+
body
|
|
387
|
+
);
|
|
388
|
+
|
|
389
|
+
if (response?.httpstatus === 200) {
|
|
390
|
+
// Combine face recognition response with QR location data
|
|
391
|
+
responseRef.current = {
|
|
392
|
+
...responseRef.current, // Contains location data from QR scan
|
|
393
|
+
...response.data?.data || {},
|
|
394
|
+
faceRecognition: response.data?.data || null,
|
|
395
|
+
};
|
|
396
|
+
|
|
397
|
+
updateState({
|
|
398
|
+
employeeData: response.data?.data || null,
|
|
399
|
+
animationState: Global.AnimationStates.success,
|
|
400
|
+
isLoading: false,
|
|
401
|
+
loadingType: Global.LoadingTypes.none,
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
notifyMessage("Identity verified successfully!", "success");
|
|
405
|
+
|
|
406
|
+
// Call the callback with combined data
|
|
407
|
+
safeCallback(responseRef.current);
|
|
408
|
+
|
|
409
|
+
if (resetTimeoutRef.current) {
|
|
410
|
+
clearTimeout(resetTimeoutRef.current);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
resetTimeoutRef.current = setTimeout(() => {
|
|
414
|
+
resetState();
|
|
415
|
+
}, 1200);
|
|
416
|
+
} else {
|
|
417
|
+
handleProcessError(
|
|
418
|
+
response?.data?.message ||
|
|
419
|
+
"Face not recognized. Please try again."
|
|
420
|
+
);
|
|
421
|
+
}
|
|
422
|
+
} catch (error) {
|
|
423
|
+
console.error("Network request failed:", error);
|
|
424
|
+
handleProcessError(
|
|
425
|
+
"Connection error. Please check your network.",
|
|
426
|
+
error
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
});
|
|
430
|
+
},
|
|
431
|
+
[
|
|
432
|
+
convertImageToBase64,
|
|
433
|
+
notifyMessage,
|
|
437
434
|
resetState,
|
|
438
435
|
updateState,
|
|
439
436
|
validateApiUrl,
|
|
440
437
|
safeCallback,
|
|
441
|
-
handleProcessError
|
|
438
|
+
handleProcessError,
|
|
439
|
+
state.qrData
|
|
442
440
|
]
|
|
443
441
|
);
|
|
444
442
|
|
|
445
443
|
// Image capture handler
|
|
446
444
|
const handleImageCapture = useCallback(
|
|
447
445
|
async (capturedData) => {
|
|
448
|
-
if (state.currentStep === "
|
|
449
|
-
uploadFaceScan(capturedData);
|
|
450
|
-
} else if (state.currentStep === "Location Verification") {
|
|
446
|
+
if (state.currentStep === "Location Verification") {
|
|
451
447
|
handleQRScanned(capturedData);
|
|
448
|
+
} else if (state.currentStep === "Identity Verification") {
|
|
449
|
+
uploadFaceScan(capturedData);
|
|
452
450
|
}
|
|
453
451
|
},
|
|
454
452
|
[state.currentStep, uploadFaceScan, handleQRScanned]
|
|
455
453
|
);
|
|
456
454
|
|
|
457
|
-
// Start
|
|
458
|
-
const
|
|
455
|
+
// Start QR code scan - FIRST STEP
|
|
456
|
+
const handleStartQRScan = useCallback(() => {
|
|
459
457
|
updateState({
|
|
460
|
-
currentStep: "
|
|
461
|
-
animationState: Global.AnimationStates.
|
|
458
|
+
currentStep: "Location Verification",
|
|
459
|
+
animationState: Global.AnimationStates.qrScan,
|
|
462
460
|
});
|
|
463
|
-
setCameraType("
|
|
461
|
+
setCameraType("back");
|
|
464
462
|
}, [updateState]);
|
|
465
463
|
|
|
466
|
-
// Start
|
|
467
|
-
const
|
|
464
|
+
// Start face recognition - SECOND STEP
|
|
465
|
+
const startFaceRecognition = useCallback(() => {
|
|
468
466
|
updateState({
|
|
469
|
-
currentStep: "
|
|
470
|
-
animationState: Global.AnimationStates.
|
|
467
|
+
currentStep: "Identity Verification",
|
|
468
|
+
animationState: Global.AnimationStates.faceScan,
|
|
471
469
|
});
|
|
472
|
-
setCameraType("
|
|
470
|
+
setCameraType("front");
|
|
473
471
|
}, [updateState]);
|
|
474
472
|
|
|
475
473
|
// Start the verification process
|
|
476
474
|
const startProcess = useCallback(() => {
|
|
477
475
|
startCountdown(handleCountdownFinish);
|
|
478
|
-
|
|
479
|
-
|
|
476
|
+
if (qrscan) {
|
|
477
|
+
handleStartQRScan();
|
|
478
|
+
} else {
|
|
479
|
+
startFaceRecognition();
|
|
480
|
+
}
|
|
481
|
+
}, [handleCountdownFinish, handleStartQRScan, startCountdown, startFaceRecognition]);
|
|
480
482
|
|
|
481
483
|
// Open modal when data is received
|
|
482
484
|
useEffect(() => {
|
|
@@ -485,7 +487,7 @@ const BiometricModal = React.memo(
|
|
|
485
487
|
setModalVisible(true);
|
|
486
488
|
startProcess();
|
|
487
489
|
}
|
|
488
|
-
}, [data, modalVisible, startProcess]);
|
|
490
|
+
}, [data, modalVisible, startProcess, qrscan]);
|
|
489
491
|
|
|
490
492
|
// Determine if camera should be shown
|
|
491
493
|
const shouldShowCamera =
|
|
@@ -539,7 +541,10 @@ const BiometricModal = React.memo(
|
|
|
539
541
|
</View>
|
|
540
542
|
|
|
541
543
|
<View style={styles.topContainerstep}>
|
|
542
|
-
<StepIndicator
|
|
544
|
+
<StepIndicator
|
|
545
|
+
currentStep={state.currentStep}
|
|
546
|
+
qrscan={qrscan}
|
|
547
|
+
/>
|
|
543
548
|
</View>
|
|
544
549
|
|
|
545
550
|
{state.employeeData && (
|
|
@@ -558,7 +563,7 @@ const BiometricModal = React.memo(
|
|
|
558
563
|
|
|
559
564
|
<View style={styles.timerContainer}>
|
|
560
565
|
<CountdownTimer
|
|
561
|
-
duration={
|
|
566
|
+
duration={duration}
|
|
562
567
|
currentTime={countdown}
|
|
563
568
|
/>
|
|
564
569
|
</View>
|
package/src/utils/Global.js
CHANGED
|
@@ -42,7 +42,27 @@ export class Global {
|
|
|
42
42
|
format: 'JPEG', // 'PNG' or 'JPEG'
|
|
43
43
|
quality: 85, // 0–100
|
|
44
44
|
};
|
|
45
|
+
// Optimized constants - tuned for performance
|
|
46
|
+
static FACE_STABILITY_THRESHOLD = 3;
|
|
47
|
+
static FACE_MOVEMENT_THRESHOLD = 15;
|
|
48
|
+
static FRAME_PROCESSOR_MIN_INTERVAL_MS = 500;
|
|
49
|
+
static MIN_FACE_SIZE = 0.2;
|
|
50
|
+
|
|
51
|
+
// Blink detection
|
|
52
|
+
static BLINK_THRESHOLD = 0.3;
|
|
53
|
+
static REQUIRED_BLINKS = 3;
|
|
54
|
+
|
|
55
|
+
// Anti-spoofing
|
|
56
|
+
static ANTI_SPOOF_CONFIDENCE_THRESHOLD = 0.7;
|
|
57
|
+
static REQUIRED_CONSECUTIVE_LIVE_FRAMES = 3;
|
|
58
|
+
|
|
59
|
+
// Face centering
|
|
60
|
+
static FACE_CENTER_THRESHOLD_X = 0.2;
|
|
61
|
+
static FACE_CENTER_THRESHOLD_Y = 0.15;
|
|
62
|
+
static MIN_FACE_CENTERED_FRAMES = 2;
|
|
63
|
+
|
|
64
|
+
// Performance optimization constants
|
|
65
|
+
static MAX_FRAME_PROCESSING_TIME_MS = 500;
|
|
66
|
+
static BATCH_UPDATE_THRESHOLD = 3;
|
|
45
67
|
|
|
46
|
-
static CountdownDuration = 100; // seconds
|
|
47
|
-
static MaxDistanceMeters = 100; // Max allowed distance for QR verification
|
|
48
68
|
}
|