@aws-amplify/ui-react-liveness 3.0.15 → 3.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +1 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +1 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +1 -1
- package/dist/esm/components/FaceLivenessDetector/service/machine/{index.mjs → machine.mjs} +24 -29
- package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +10 -2
- package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +59 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +22 -74
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +1 -1
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +1 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +5 -8
- package/dist/esm/version.mjs +1 -1
- package/dist/index.js +110 -110
- package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -5
- package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +5 -0
- package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +0 -1
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +0 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +6 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +17 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +1 -8
- package/dist/types/version.d.ts +1 -1
- package/package.json +1 -1
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as React from 'react';
|
|
2
2
|
import { useInterpret } from '@xstate/react';
|
|
3
|
-
import { livenessMachine } from './service/machine/
|
|
3
|
+
import { livenessMachine } from './service/machine/machine.mjs';
|
|
4
4
|
import './service/types/liveness.mjs';
|
|
5
5
|
import '@tensorflow/tfjs-core';
|
|
6
6
|
import '@tensorflow-models/face-detection';
|
|
@@ -2,7 +2,7 @@ import React__default, { useRef, useState } from 'react';
|
|
|
2
2
|
import { classNames } from '@aws-amplify/ui';
|
|
3
3
|
import { Loader, View, Flex, Text, Label, SelectField, Button } from '@aws-amplify/ui-react';
|
|
4
4
|
import { useColorMode } from '@aws-amplify/ui-react/internal';
|
|
5
|
-
import '../service/machine/
|
|
5
|
+
import '../service/machine/machine.mjs';
|
|
6
6
|
import { FaceMatchState } from '../service/types/liveness.mjs';
|
|
7
7
|
import '@tensorflow/tfjs-core';
|
|
8
8
|
import '@tensorflow-models/face-detection';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as React from 'react';
|
|
2
2
|
import { Flex, Text, Button, View } from '@aws-amplify/ui-react';
|
|
3
|
-
import '../service/machine/
|
|
3
|
+
import '../service/machine/machine.mjs';
|
|
4
4
|
import '../service/types/liveness.mjs';
|
|
5
5
|
import { LivenessErrorState } from '../service/types/error.mjs';
|
|
6
6
|
import '@tensorflow/tfjs-core';
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import { nanoid } from 'nanoid';
|
|
2
2
|
import { createMachine, assign, spawn, actions } from 'xstate';
|
|
3
|
-
import { drawStaticOval, getBoundingBox, getColorsSequencesFromSessionInformation, isCameraDeviceVirtual, getFaceMatchState, isFaceDistanceBelowThreshold, estimateIllumination, getOvalDetailsFromSessionInformation, generateBboxFromLandmarks, drawLivenessOvalInCanvas, getOvalBoundingBox, getIntersectionOverUnion,
|
|
3
|
+
import { drawStaticOval, getBoundingBox, getColorsSequencesFromSessionInformation, isCameraDeviceVirtual, getFaceMatchState, isFaceDistanceBelowThreshold, estimateIllumination, getOvalDetailsFromSessionInformation, generateBboxFromLandmarks, drawLivenessOvalInCanvas, getOvalBoundingBox, getIntersectionOverUnion, getStaticLivenessOvalDetails } from '../utils/liveness.mjs';
|
|
4
4
|
import { FaceMatchState } from '../types/liveness.mjs';
|
|
5
5
|
import { LivenessErrorState } from '../types/error.mjs';
|
|
6
6
|
import { BlazeFaceFaceDetection } from '../utils/blazefaceFaceDetection.mjs';
|
|
7
|
+
import { getFaceMatchStateInLivenessOval } from '../utils/getFaceMatchStateInLivenessOval.mjs';
|
|
7
8
|
import { LivenessStreamProvider } from '../utils/streamProvider.mjs';
|
|
8
9
|
import { FreshnessColorDisplay } from '../utils/freshnessColorDisplay.mjs';
|
|
9
10
|
import { isServerSesssionInformationEvent, isDisconnectionEvent, isValidationExceptionEvent, isInternalServerExceptionEvent, isThrottlingExceptionEvent, isServiceQuotaExceededExceptionEvent, isInvalidSignatureRegionException } from '../utils/eventUtils.mjs';
|
|
@@ -12,7 +13,6 @@ import { WS_CLOSURE_CODE } from '../utils/constants.mjs';
|
|
|
12
13
|
|
|
13
14
|
const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
|
|
14
15
|
const DEFAULT_FACE_FIT_TIMEOUT = 7000;
|
|
15
|
-
const MIN_FACE_MATCH_TIME = 1000;
|
|
16
16
|
let responseStream;
|
|
17
17
|
const responseStreamActor = async (callback) => {
|
|
18
18
|
try {
|
|
@@ -102,7 +102,6 @@ const livenessMachine = createMachine({
|
|
|
102
102
|
currentDetectedFace: undefined,
|
|
103
103
|
startFace: undefined,
|
|
104
104
|
endFace: undefined,
|
|
105
|
-
initialFaceMatchTime: undefined,
|
|
106
105
|
},
|
|
107
106
|
freshnessColorAssociatedParams: {
|
|
108
107
|
freshnessColorEl: undefined,
|
|
@@ -287,6 +286,8 @@ const livenessMachine = createMachine({
|
|
|
287
286
|
100: { target: 'checkRecordingStarted' },
|
|
288
287
|
},
|
|
289
288
|
},
|
|
289
|
+
// Evaluates face match and moves to checkMatch
|
|
290
|
+
// which continually checks for match until either timeout or face match
|
|
290
291
|
ovalMatching: {
|
|
291
292
|
entry: 'cancelOvalDrawingTimeout',
|
|
292
293
|
invoke: {
|
|
@@ -297,29 +298,32 @@ const livenessMachine = createMachine({
|
|
|
297
298
|
},
|
|
298
299
|
},
|
|
299
300
|
},
|
|
301
|
+
// If `hasFaceMatchedInOval` is true, then move to `delayBeforeFlash`, which pauses
|
|
302
|
+
// for one second to show "Hold still" text before moving to `flashFreshnessColors`.
|
|
303
|
+
// If not, move back to ovalMatching and re-evaluate match state
|
|
300
304
|
checkMatch: {
|
|
301
305
|
after: {
|
|
302
306
|
0: {
|
|
303
|
-
target: '
|
|
304
|
-
cond: '
|
|
307
|
+
target: 'delayBeforeFlash',
|
|
308
|
+
cond: 'hasFaceMatchedInOval',
|
|
305
309
|
actions: [
|
|
310
|
+
'setFaceMatchTimeAndStartFace',
|
|
306
311
|
'updateEndFaceMatch',
|
|
307
312
|
'setupFlashFreshnessColors',
|
|
308
313
|
'cancelOvalMatchTimeout',
|
|
309
314
|
'cancelOvalDrawingTimeout',
|
|
310
315
|
],
|
|
311
316
|
},
|
|
312
|
-
0.1: {
|
|
313
|
-
target: 'ovalMatching',
|
|
314
|
-
cond: 'hasFaceMatchedInOval',
|
|
315
|
-
actions: 'setFaceMatchTimeAndStartFace',
|
|
316
|
-
},
|
|
317
317
|
1: {
|
|
318
318
|
target: 'ovalMatching',
|
|
319
|
-
cond: 'hasNotFaceMatchedInOval',
|
|
320
319
|
},
|
|
321
320
|
},
|
|
322
321
|
},
|
|
322
|
+
delayBeforeFlash: {
|
|
323
|
+
after: {
|
|
324
|
+
1000: 'flashFreshnessColors',
|
|
325
|
+
},
|
|
326
|
+
},
|
|
323
327
|
flashFreshnessColors: {
|
|
324
328
|
invoke: {
|
|
325
329
|
src: 'flashColors',
|
|
@@ -584,10 +588,6 @@ const livenessMachine = createMachine({
|
|
|
584
588
|
startFace: context.faceMatchAssociatedParams.startFace === undefined
|
|
585
589
|
? context.faceMatchAssociatedParams.currentDetectedFace
|
|
586
590
|
: context.faceMatchAssociatedParams.startFace,
|
|
587
|
-
initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
|
|
588
|
-
undefined
|
|
589
|
-
? Date.now()
|
|
590
|
-
: context.faceMatchAssociatedParams.initialFaceMatchTime,
|
|
591
591
|
};
|
|
592
592
|
},
|
|
593
593
|
}),
|
|
@@ -759,21 +759,10 @@ const livenessMachine = createMachine({
|
|
|
759
759
|
},
|
|
760
760
|
guards: {
|
|
761
761
|
shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
|
|
762
|
-
hasFaceMatchedInOvalWithMinTime: (context) => {
|
|
763
|
-
const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
|
|
764
|
-
const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
|
|
765
|
-
const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
|
|
766
|
-
timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
|
|
767
|
-
return hasMatched;
|
|
768
|
-
},
|
|
769
762
|
hasFaceMatchedInOval: (context) => {
|
|
770
763
|
return (context.faceMatchAssociatedParams.faceMatchState ===
|
|
771
764
|
FaceMatchState.MATCHED);
|
|
772
765
|
},
|
|
773
|
-
hasNotFaceMatchedInOval: (context) => {
|
|
774
|
-
return (context.faceMatchAssociatedParams.faceMatchState !==
|
|
775
|
-
FaceMatchState.MATCHED);
|
|
776
|
-
},
|
|
777
766
|
hasSingleFace: (context) => {
|
|
778
767
|
return (context.faceMatchAssociatedParams.faceMatchState ===
|
|
779
768
|
FaceMatchState.FACE_IDENTIFIED);
|
|
@@ -990,7 +979,7 @@ const livenessMachine = createMachine({
|
|
|
990
979
|
videoWidth: videoEl.width,
|
|
991
980
|
});
|
|
992
981
|
// renormalize initial face
|
|
993
|
-
const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
|
|
982
|
+
const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
|
|
994
983
|
initialFace.top = renormalizedFace.top;
|
|
995
984
|
initialFace.left = renormalizedFace.left;
|
|
996
985
|
initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
|
|
@@ -1019,7 +1008,7 @@ const livenessMachine = createMachine({
|
|
|
1019
1008
|
let faceMatchPercentage = 0;
|
|
1020
1009
|
let detectedFace;
|
|
1021
1010
|
let illuminationState;
|
|
1022
|
-
const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
|
|
1011
|
+
const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
|
|
1023
1012
|
const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
|
|
1024
1013
|
const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
|
|
1025
1014
|
switch (detectedFaces.length) {
|
|
@@ -1032,7 +1021,13 @@ const livenessMachine = createMachine({
|
|
|
1032
1021
|
case 1: {
|
|
1033
1022
|
//exactly one face detected, match face with oval;
|
|
1034
1023
|
detectedFace = detectedFaces[0];
|
|
1035
|
-
const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(
|
|
1024
|
+
const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval({
|
|
1025
|
+
face: detectedFace,
|
|
1026
|
+
ovalDetails: ovalDetails,
|
|
1027
|
+
initialFaceIntersection,
|
|
1028
|
+
sessionInformation: serverSessionInformation,
|
|
1029
|
+
frameHeight: videoEl.videoHeight,
|
|
1030
|
+
});
|
|
1036
1031
|
faceMatchState = faceMatchStateInLivenessOval;
|
|
1037
1032
|
faceMatchPercentage = faceMatchPercentageInLivenessOval;
|
|
1038
1033
|
break;
|
|
@@ -14,7 +14,6 @@ var FaceMatchState;
|
|
|
14
14
|
(function (FaceMatchState) {
|
|
15
15
|
FaceMatchState["MATCHED"] = "MATCHED";
|
|
16
16
|
FaceMatchState["TOO_FAR"] = "TOO FAR";
|
|
17
|
-
FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
|
|
18
17
|
FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
|
|
19
18
|
FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
|
|
20
19
|
FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
|
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
// Face distance is calculated as pupilDistance / ovalWidth.
|
|
2
2
|
// The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
|
|
3
|
-
// These
|
|
3
|
+
// These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
|
|
4
4
|
// We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
|
|
5
5
|
// a certain distance away from the camera.
|
|
6
6
|
const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
7
7
|
const REDUCED_THRESHOLD = 0.4;
|
|
8
8
|
const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
9
|
+
// Constants from science team to determine ocular distance (space between eyes)
|
|
10
|
+
const PUPIL_DISTANCE_WEIGHT = 2.0;
|
|
11
|
+
const FACE_HEIGHT_WEIGHT = 1.8;
|
|
12
|
+
// Constants from science team to find face match percentage
|
|
13
|
+
const FACE_MATCH_RANGE_MIN = 0;
|
|
14
|
+
const FACE_MATCH_RANGE_MAX = 1;
|
|
15
|
+
const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
16
|
+
const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
9
17
|
const WS_CLOSURE_CODE = {
|
|
10
18
|
SUCCESS_CODE: 1000,
|
|
11
19
|
DEFAULT_ERROR_CODE: 4000,
|
|
@@ -15,4 +23,4 @@ const WS_CLOSURE_CODE = {
|
|
|
15
23
|
USER_ERROR_DURING_CONNECTION: 4007,
|
|
16
24
|
};
|
|
17
25
|
|
|
18
|
-
export { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD, REDUCED_THRESHOLD_MOBILE, WS_CLOSURE_CODE };
|
|
26
|
+
export { FACE_DISTANCE_THRESHOLD, FACE_HEIGHT_WEIGHT, FACE_MATCH_RANGE_MAX, FACE_MATCH_RANGE_MIN, FACE_MATCH_WEIGHT_MAX, FACE_MATCH_WEIGHT_MIN, PUPIL_DISTANCE_WEIGHT, REDUCED_THRESHOLD, REDUCED_THRESHOLD_MOBILE, WS_CLOSURE_CODE };
|
package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { FaceMatchState } from '../types/liveness.mjs';
|
|
2
|
+
import { generateBboxFromLandmarks, getOvalBoundingBox, getIntersectionOverUnion } from './liveness.mjs';
|
|
3
|
+
import { FACE_MATCH_RANGE_MAX, FACE_MATCH_WEIGHT_MAX, FACE_MATCH_WEIGHT_MIN, FACE_MATCH_RANGE_MIN } from './constants.mjs';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
7
|
+
*/
|
|
8
|
+
function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }) {
|
|
9
|
+
let faceMatchState;
|
|
10
|
+
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
11
|
+
?.ChallengeConfig;
|
|
12
|
+
if (!challengeConfig ||
|
|
13
|
+
!challengeConfig.OvalIouThreshold ||
|
|
14
|
+
!challengeConfig.OvalIouHeightThreshold ||
|
|
15
|
+
!challengeConfig.OvalIouWidthThreshold ||
|
|
16
|
+
!challengeConfig.FaceIouHeightThreshold ||
|
|
17
|
+
!challengeConfig.FaceIouWidthThreshold) {
|
|
18
|
+
throw new Error('Challenge information not returned from session information.');
|
|
19
|
+
}
|
|
20
|
+
const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
|
|
21
|
+
const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails, frameHeight);
|
|
22
|
+
const minFaceX = faceBoundingBox.left;
|
|
23
|
+
const maxFaceX = faceBoundingBox.right;
|
|
24
|
+
const minFaceY = faceBoundingBox.top;
|
|
25
|
+
const maxFaceY = faceBoundingBox.bottom;
|
|
26
|
+
const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
|
|
27
|
+
const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
|
|
28
|
+
const intersectionThreshold = OvalIouThreshold;
|
|
29
|
+
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
30
|
+
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
31
|
+
/** From Science
|
|
32
|
+
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
33
|
+
*/
|
|
34
|
+
const faceMatchPercentage = Math.max(Math.min(FACE_MATCH_RANGE_MAX, (FACE_MATCH_WEIGHT_MAX * (intersection - initialFaceIntersection)) /
|
|
35
|
+
(intersectionThreshold - initialFaceIntersection) +
|
|
36
|
+
FACE_MATCH_WEIGHT_MIN), FACE_MATCH_RANGE_MIN) * 100;
|
|
37
|
+
const isFaceOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
|
|
38
|
+
const isFaceOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
|
|
39
|
+
const isFaceMatched = intersection > intersectionThreshold;
|
|
40
|
+
const isFaceMatchedClosely = minOvalY - minFaceY > faceDetectionHeightThreshold ||
|
|
41
|
+
maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
|
|
42
|
+
(minOvalX - minFaceX > faceDetectionWidthThreshold &&
|
|
43
|
+
maxFaceX - maxOvalX > faceDetectionWidthThreshold);
|
|
44
|
+
if (isFaceMatched) {
|
|
45
|
+
faceMatchState = FaceMatchState.MATCHED;
|
|
46
|
+
}
|
|
47
|
+
else if (isFaceOutsideOvalToTheLeft || isFaceOutsideOvalToTheRight) {
|
|
48
|
+
faceMatchState = FaceMatchState.OFF_CENTER;
|
|
49
|
+
}
|
|
50
|
+
else if (isFaceMatchedClosely) {
|
|
51
|
+
faceMatchState = FaceMatchState.MATCHED;
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
faceMatchState = FaceMatchState.TOO_FAR;
|
|
55
|
+
}
|
|
56
|
+
return { faceMatchState, faceMatchPercentage };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export { getFaceMatchStateInLivenessOval };
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { IlluminationState, FaceMatchState } from '../types/liveness.mjs';
|
|
2
2
|
import { LivenessErrorState } from '../types/error.mjs';
|
|
3
|
-
import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
|
|
3
|
+
import { PUPIL_DISTANCE_WEIGHT, FACE_HEIGHT_WEIGHT, FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Returns the random number between min and max
|
|
@@ -180,87 +180,33 @@ function getPupilDistanceAndFaceHeight(face) {
|
|
|
180
180
|
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
181
181
|
return { pupilDistance, faceHeight };
|
|
182
182
|
}
|
|
183
|
-
function generateBboxFromLandmarks(face, oval) {
|
|
184
|
-
const { leftEye, rightEye, nose, leftEar, rightEar
|
|
183
|
+
function generateBboxFromLandmarks(face, oval, frameHeight) {
|
|
184
|
+
const { leftEye, rightEye, nose, leftEar, rightEar } = face;
|
|
185
185
|
const { height: ovalHeight, centerY } = oval;
|
|
186
186
|
const ovalTop = centerY - ovalHeight / 2;
|
|
187
187
|
const eyeCenter = [];
|
|
188
188
|
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
189
189
|
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
190
190
|
const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
|
|
191
|
-
const
|
|
192
|
-
|
|
193
|
-
const oh = 1.618 * ow;
|
|
194
|
-
let cx;
|
|
191
|
+
const ocularWidth = (PUPIL_DISTANCE_WEIGHT * pd + FACE_HEIGHT_WEIGHT * fh) / 2;
|
|
192
|
+
let centerFaceX, centerFaceY;
|
|
195
193
|
if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
|
|
196
|
-
|
|
194
|
+
centerFaceX = (eyeCenter[0] + nose[0]) / 2;
|
|
195
|
+
centerFaceY = (eyeCenter[1] + nose[1]) / 2;
|
|
197
196
|
}
|
|
198
197
|
else {
|
|
199
|
-
|
|
198
|
+
// when face tilts down
|
|
199
|
+
centerFaceX = eyeCenter[0];
|
|
200
|
+
centerFaceY = eyeCenter[1];
|
|
200
201
|
}
|
|
201
|
-
const
|
|
202
|
-
const
|
|
203
|
-
const
|
|
204
|
-
const
|
|
202
|
+
const faceWidth = ocularWidth;
|
|
203
|
+
const faceHeight = 1.68 * faceWidth;
|
|
204
|
+
const top = Math.max(centerFaceY - faceHeight / 2, 0);
|
|
205
|
+
const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
|
|
206
|
+
const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
|
|
207
|
+
const right = Math.max(centerFaceX + ocularWidth / 2, leftEar[0]);
|
|
205
208
|
return { bottom, left, right, top };
|
|
206
209
|
}
|
|
207
|
-
/**
|
|
208
|
-
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
209
|
-
*/
|
|
210
|
-
// eslint-disable-next-line max-params
|
|
211
|
-
function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
|
|
212
|
-
let faceMatchState;
|
|
213
|
-
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
214
|
-
?.ChallengeConfig;
|
|
215
|
-
if (!challengeConfig ||
|
|
216
|
-
!challengeConfig.OvalIouThreshold ||
|
|
217
|
-
!challengeConfig.OvalIouHeightThreshold ||
|
|
218
|
-
!challengeConfig.OvalIouWidthThreshold ||
|
|
219
|
-
!challengeConfig.FaceIouHeightThreshold ||
|
|
220
|
-
!challengeConfig.FaceIouWidthThreshold) {
|
|
221
|
-
throw new Error('Challenge information not returned from session information.');
|
|
222
|
-
}
|
|
223
|
-
const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
|
|
224
|
-
const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
|
|
225
|
-
const minFaceX = faceBoundingBox.left;
|
|
226
|
-
const maxFaceX = faceBoundingBox.right;
|
|
227
|
-
const minFaceY = faceBoundingBox.top;
|
|
228
|
-
const maxFaceY = faceBoundingBox.bottom;
|
|
229
|
-
const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
|
|
230
|
-
const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
|
|
231
|
-
const intersectionThreshold = OvalIouThreshold;
|
|
232
|
-
const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
|
|
233
|
-
const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
|
|
234
|
-
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
235
|
-
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
236
|
-
/** From Science
|
|
237
|
-
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
238
|
-
*/
|
|
239
|
-
const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
|
|
240
|
-
(intersectionThreshold - initialFaceIntersection) +
|
|
241
|
-
0.25), 0) * 100;
|
|
242
|
-
const faceIsOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
|
|
243
|
-
const faceIsOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
|
|
244
|
-
if (intersection > intersectionThreshold &&
|
|
245
|
-
Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
|
|
246
|
-
Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
|
|
247
|
-
Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
|
|
248
|
-
faceMatchState = FaceMatchState.MATCHED;
|
|
249
|
-
}
|
|
250
|
-
else if (faceIsOutsideOvalToTheLeft || faceIsOutsideOvalToTheRight) {
|
|
251
|
-
faceMatchState = FaceMatchState.OFF_CENTER;
|
|
252
|
-
}
|
|
253
|
-
else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
|
|
254
|
-
maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
|
|
255
|
-
(minOvalX - minFaceX > faceDetectionWidthThreshold &&
|
|
256
|
-
maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
|
|
257
|
-
faceMatchState = FaceMatchState.TOO_CLOSE;
|
|
258
|
-
}
|
|
259
|
-
else {
|
|
260
|
-
faceMatchState = FaceMatchState.TOO_FAR;
|
|
261
|
-
}
|
|
262
|
-
return { faceMatchState, faceMatchPercentage };
|
|
263
|
-
}
|
|
264
210
|
/**
|
|
265
211
|
* Returns the illumination state in the provided video frame.
|
|
266
212
|
*/
|
|
@@ -436,8 +382,10 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
|
|
|
436
382
|
detectedFace = detectedFaces[0];
|
|
437
383
|
const { width } = ovalDetails;
|
|
438
384
|
const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
|
|
439
|
-
const
|
|
440
|
-
|
|
385
|
+
const calibratedPupilDistance = (PUPIL_DISTANCE_WEIGHT * pupilDistance +
|
|
386
|
+
FACE_HEIGHT_WEIGHT * faceHeight) /
|
|
387
|
+
2 /
|
|
388
|
+
PUPIL_DISTANCE_WEIGHT;
|
|
441
389
|
if (width) {
|
|
442
390
|
isDistanceBelowThreshold =
|
|
443
391
|
calibratedPupilDistance / width <
|
|
@@ -469,4 +417,4 @@ function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }
|
|
|
469
417
|
};
|
|
470
418
|
}
|
|
471
419
|
|
|
472
|
-
export { clearOvalCanvas, drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState,
|
|
420
|
+
export { clearOvalCanvas, drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState, getIntersectionOverUnion, getOvalBoundingBox, getOvalDetailsFromSessionInformation, getRGBArrayFromColorString, getStaticLivenessOvalDetails, isCameraDeviceVirtual, isClientFreshnessColorSequence, isFaceDistanceBelowThreshold };
|
|
@@ -2,7 +2,7 @@ import React__default from 'react';
|
|
|
2
2
|
import { ComponentClassName } from '@aws-amplify/ui';
|
|
3
3
|
import { View, Flex } from '@aws-amplify/ui-react';
|
|
4
4
|
import { CancelButton } from './CancelButton.mjs';
|
|
5
|
-
import '../service/machine/
|
|
5
|
+
import '../service/machine/machine.mjs';
|
|
6
6
|
import '../service/types/liveness.mjs';
|
|
7
7
|
import '@tensorflow/tfjs-core';
|
|
8
8
|
import '@tensorflow-models/face-detection';
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import React__default from 'react';
|
|
2
2
|
import { Flex, Text, Button } from '@aws-amplify/ui-react';
|
|
3
3
|
import { AlertIcon } from '@aws-amplify/ui-react/internal';
|
|
4
|
-
import '../service/machine/
|
|
4
|
+
import '../service/machine/machine.mjs';
|
|
5
5
|
import '../service/types/liveness.mjs';
|
|
6
6
|
import { LivenessErrorState } from '../service/types/error.mjs';
|
|
7
7
|
import '@tensorflow/tfjs-core';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as React from 'react';
|
|
2
2
|
import { VisuallyHidden, View } from '@aws-amplify/ui-react';
|
|
3
|
-
import '../service/machine/
|
|
3
|
+
import '../service/machine/machine.mjs';
|
|
4
4
|
import { FaceMatchState, IlluminationState } from '../service/types/liveness.mjs';
|
|
5
5
|
import '@tensorflow/tfjs-core';
|
|
6
6
|
import '@tensorflow-models/face-detection';
|
|
@@ -52,7 +52,6 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
52
52
|
[FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
|
|
53
53
|
[FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
|
|
54
54
|
[FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
|
|
55
|
-
[FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
|
|
56
55
|
[FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
|
|
57
56
|
[FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
|
|
58
57
|
[FaceMatchState.OFF_CENTER]: hintDisplayText.hintFaceOffCenterText,
|
|
@@ -98,13 +97,11 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
98
97
|
return React.createElement(DefaultToast, { text: hintDisplayText.hintHoldFaceForFreshnessText });
|
|
99
98
|
}
|
|
100
99
|
if (isRecording && !isFlashingFreshness) {
|
|
101
|
-
// During face matching, we want to only show the
|
|
102
|
-
// TOO_FAR texts.
|
|
103
|
-
// the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
|
|
100
|
+
// During face matching, we want to only show the
|
|
101
|
+
// TOO_FAR texts. For FACE_IDENTIFIED, CANT_IDENTIFY, TOO_MANY
|
|
104
102
|
// we are defaulting to the TOO_FAR text (for now).
|
|
105
103
|
let resultHintString = FaceMatchStateStringMap[FaceMatchState.TOO_FAR];
|
|
106
|
-
if (faceMatchState === FaceMatchState.
|
|
107
|
-
faceMatchState === FaceMatchState.MATCHED) {
|
|
104
|
+
if (faceMatchState === FaceMatchState.MATCHED) {
|
|
108
105
|
resultHintString = FaceMatchStateStringMap[faceMatchState];
|
|
109
106
|
}
|
|
110
107
|
// If the face is outside the oval set the aria-label to a string about centering face in oval
|
|
@@ -118,7 +115,7 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
118
115
|
faceMatchPercentage > 50) {
|
|
119
116
|
a11yHintString = hintDisplayText.hintMatchIndicatorText;
|
|
120
117
|
}
|
|
121
|
-
return (React.createElement(Toast, { size: "large", variation:
|
|
118
|
+
return (React.createElement(Toast, { size: "large", variation: 'primary' },
|
|
122
119
|
React.createElement(VisuallyHidden, { "aria-live": "assertive" }, a11yHintString),
|
|
123
120
|
React.createElement(View, { "aria-label": a11yHintString }, resultHintString)));
|
|
124
121
|
}
|
package/dist/esm/version.mjs
CHANGED
package/dist/index.js
CHANGED
|
@@ -75,7 +75,6 @@ var FaceMatchState;
|
|
|
75
75
|
(function (FaceMatchState) {
|
|
76
76
|
FaceMatchState["MATCHED"] = "MATCHED";
|
|
77
77
|
FaceMatchState["TOO_FAR"] = "TOO FAR";
|
|
78
|
-
FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
|
|
79
78
|
FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
|
|
80
79
|
FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
|
|
81
80
|
FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
|
|
@@ -99,12 +98,20 @@ const LivenessErrorState = {
|
|
|
99
98
|
|
|
100
99
|
// Face distance is calculated as pupilDistance / ovalWidth.
|
|
101
100
|
// The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
|
|
102
|
-
// These
|
|
101
|
+
// These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
|
|
103
102
|
// We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
|
|
104
103
|
// a certain distance away from the camera.
|
|
105
104
|
const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
106
105
|
const REDUCED_THRESHOLD = 0.4;
|
|
107
106
|
const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
107
|
+
// Constants from science team to determine ocular distance (space between eyes)
|
|
108
|
+
const PUPIL_DISTANCE_WEIGHT = 2.0;
|
|
109
|
+
const FACE_HEIGHT_WEIGHT = 1.8;
|
|
110
|
+
// Constants from science team to find face match percentage
|
|
111
|
+
const FACE_MATCH_RANGE_MIN = 0;
|
|
112
|
+
const FACE_MATCH_RANGE_MAX = 1;
|
|
113
|
+
const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
114
|
+
const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
108
115
|
const WS_CLOSURE_CODE = {
|
|
109
116
|
SUCCESS_CODE: 1000,
|
|
110
117
|
DEFAULT_ERROR_CODE: 4000,
|
|
@@ -292,87 +299,33 @@ function getPupilDistanceAndFaceHeight(face) {
|
|
|
292
299
|
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
293
300
|
return { pupilDistance, faceHeight };
|
|
294
301
|
}
|
|
295
|
-
function generateBboxFromLandmarks(face, oval) {
|
|
296
|
-
const { leftEye, rightEye, nose, leftEar, rightEar
|
|
302
|
+
function generateBboxFromLandmarks(face, oval, frameHeight) {
|
|
303
|
+
const { leftEye, rightEye, nose, leftEar, rightEar } = face;
|
|
297
304
|
const { height: ovalHeight, centerY } = oval;
|
|
298
305
|
const ovalTop = centerY - ovalHeight / 2;
|
|
299
306
|
const eyeCenter = [];
|
|
300
307
|
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
301
308
|
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
302
309
|
const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
|
|
303
|
-
const
|
|
304
|
-
|
|
305
|
-
const oh = 1.618 * ow;
|
|
306
|
-
let cx;
|
|
310
|
+
const ocularWidth = (PUPIL_DISTANCE_WEIGHT * pd + FACE_HEIGHT_WEIGHT * fh) / 2;
|
|
311
|
+
let centerFaceX, centerFaceY;
|
|
307
312
|
if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
|
|
308
|
-
|
|
313
|
+
centerFaceX = (eyeCenter[0] + nose[0]) / 2;
|
|
314
|
+
centerFaceY = (eyeCenter[1] + nose[1]) / 2;
|
|
309
315
|
}
|
|
310
316
|
else {
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
const
|
|
316
|
-
const
|
|
317
|
+
// when face tilts down
|
|
318
|
+
centerFaceX = eyeCenter[0];
|
|
319
|
+
centerFaceY = eyeCenter[1];
|
|
320
|
+
}
|
|
321
|
+
const faceWidth = ocularWidth;
|
|
322
|
+
const faceHeight = 1.68 * faceWidth;
|
|
323
|
+
const top = Math.max(centerFaceY - faceHeight / 2, 0);
|
|
324
|
+
const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
|
|
325
|
+
const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
|
|
326
|
+
const right = Math.max(centerFaceX + ocularWidth / 2, leftEar[0]);
|
|
317
327
|
return { bottom, left, right, top };
|
|
318
328
|
}
|
|
319
|
-
/**
|
|
320
|
-
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
321
|
-
*/
|
|
322
|
-
// eslint-disable-next-line max-params
|
|
323
|
-
function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
|
|
324
|
-
let faceMatchState;
|
|
325
|
-
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
326
|
-
?.ChallengeConfig;
|
|
327
|
-
if (!challengeConfig ||
|
|
328
|
-
!challengeConfig.OvalIouThreshold ||
|
|
329
|
-
!challengeConfig.OvalIouHeightThreshold ||
|
|
330
|
-
!challengeConfig.OvalIouWidthThreshold ||
|
|
331
|
-
!challengeConfig.FaceIouHeightThreshold ||
|
|
332
|
-
!challengeConfig.FaceIouWidthThreshold) {
|
|
333
|
-
throw new Error('Challenge information not returned from session information.');
|
|
334
|
-
}
|
|
335
|
-
const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
|
|
336
|
-
const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
|
|
337
|
-
const minFaceX = faceBoundingBox.left;
|
|
338
|
-
const maxFaceX = faceBoundingBox.right;
|
|
339
|
-
const minFaceY = faceBoundingBox.top;
|
|
340
|
-
const maxFaceY = faceBoundingBox.bottom;
|
|
341
|
-
const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
|
|
342
|
-
const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
|
|
343
|
-
const intersectionThreshold = OvalIouThreshold;
|
|
344
|
-
const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
|
|
345
|
-
const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
|
|
346
|
-
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
347
|
-
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
348
|
-
/** From Science
|
|
349
|
-
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
350
|
-
*/
|
|
351
|
-
const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
|
|
352
|
-
(intersectionThreshold - initialFaceIntersection) +
|
|
353
|
-
0.25), 0) * 100;
|
|
354
|
-
const faceIsOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
|
|
355
|
-
const faceIsOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
|
|
356
|
-
if (intersection > intersectionThreshold &&
|
|
357
|
-
Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
|
|
358
|
-
Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
|
|
359
|
-
Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
|
|
360
|
-
faceMatchState = FaceMatchState.MATCHED;
|
|
361
|
-
}
|
|
362
|
-
else if (faceIsOutsideOvalToTheLeft || faceIsOutsideOvalToTheRight) {
|
|
363
|
-
faceMatchState = FaceMatchState.OFF_CENTER;
|
|
364
|
-
}
|
|
365
|
-
else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
|
|
366
|
-
maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
|
|
367
|
-
(minOvalX - minFaceX > faceDetectionWidthThreshold &&
|
|
368
|
-
maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
|
|
369
|
-
faceMatchState = FaceMatchState.TOO_CLOSE;
|
|
370
|
-
}
|
|
371
|
-
else {
|
|
372
|
-
faceMatchState = FaceMatchState.TOO_FAR;
|
|
373
|
-
}
|
|
374
|
-
return { faceMatchState, faceMatchPercentage };
|
|
375
|
-
}
|
|
376
329
|
/**
|
|
377
330
|
* Returns the illumination state in the provided video frame.
|
|
378
331
|
*/
|
|
@@ -548,8 +501,10 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
|
|
|
548
501
|
detectedFace = detectedFaces[0];
|
|
549
502
|
const { width } = ovalDetails;
|
|
550
503
|
const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
|
|
551
|
-
const
|
|
552
|
-
|
|
504
|
+
const calibratedPupilDistance = (PUPIL_DISTANCE_WEIGHT * pupilDistance +
|
|
505
|
+
FACE_HEIGHT_WEIGHT * faceHeight) /
|
|
506
|
+
2 /
|
|
507
|
+
PUPIL_DISTANCE_WEIGHT;
|
|
553
508
|
if (width) {
|
|
554
509
|
isDistanceBelowThreshold =
|
|
555
510
|
calibratedPupilDistance / width <
|
|
@@ -782,7 +737,61 @@ class VideoRecorder {
|
|
|
782
737
|
}
|
|
783
738
|
}
|
|
784
739
|
|
|
785
|
-
|
|
740
|
+
/**
|
|
741
|
+
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
742
|
+
*/
|
|
743
|
+
function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }) {
|
|
744
|
+
let faceMatchState;
|
|
745
|
+
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
746
|
+
?.ChallengeConfig;
|
|
747
|
+
if (!challengeConfig ||
|
|
748
|
+
!challengeConfig.OvalIouThreshold ||
|
|
749
|
+
!challengeConfig.OvalIouHeightThreshold ||
|
|
750
|
+
!challengeConfig.OvalIouWidthThreshold ||
|
|
751
|
+
!challengeConfig.FaceIouHeightThreshold ||
|
|
752
|
+
!challengeConfig.FaceIouWidthThreshold) {
|
|
753
|
+
throw new Error('Challenge information not returned from session information.');
|
|
754
|
+
}
|
|
755
|
+
const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
|
|
756
|
+
const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails, frameHeight);
|
|
757
|
+
const minFaceX = faceBoundingBox.left;
|
|
758
|
+
const maxFaceX = faceBoundingBox.right;
|
|
759
|
+
const minFaceY = faceBoundingBox.top;
|
|
760
|
+
const maxFaceY = faceBoundingBox.bottom;
|
|
761
|
+
const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
|
|
762
|
+
const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
|
|
763
|
+
const intersectionThreshold = OvalIouThreshold;
|
|
764
|
+
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
765
|
+
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
766
|
+
/** From Science
|
|
767
|
+
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
768
|
+
*/
|
|
769
|
+
const faceMatchPercentage = Math.max(Math.min(FACE_MATCH_RANGE_MAX, (FACE_MATCH_WEIGHT_MAX * (intersection - initialFaceIntersection)) /
|
|
770
|
+
(intersectionThreshold - initialFaceIntersection) +
|
|
771
|
+
FACE_MATCH_WEIGHT_MIN), FACE_MATCH_RANGE_MIN) * 100;
|
|
772
|
+
const isFaceOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
|
|
773
|
+
const isFaceOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
|
|
774
|
+
const isFaceMatched = intersection > intersectionThreshold;
|
|
775
|
+
const isFaceMatchedClosely = minOvalY - minFaceY > faceDetectionHeightThreshold ||
|
|
776
|
+
maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
|
|
777
|
+
(minOvalX - minFaceX > faceDetectionWidthThreshold &&
|
|
778
|
+
maxFaceX - maxOvalX > faceDetectionWidthThreshold);
|
|
779
|
+
if (isFaceMatched) {
|
|
780
|
+
faceMatchState = FaceMatchState.MATCHED;
|
|
781
|
+
}
|
|
782
|
+
else if (isFaceOutsideOvalToTheLeft || isFaceOutsideOvalToTheRight) {
|
|
783
|
+
faceMatchState = FaceMatchState.OFF_CENTER;
|
|
784
|
+
}
|
|
785
|
+
else if (isFaceMatchedClosely) {
|
|
786
|
+
faceMatchState = FaceMatchState.MATCHED;
|
|
787
|
+
}
|
|
788
|
+
else {
|
|
789
|
+
faceMatchState = FaceMatchState.TOO_FAR;
|
|
790
|
+
}
|
|
791
|
+
return { faceMatchState, faceMatchPercentage };
|
|
792
|
+
}
|
|
793
|
+
|
|
794
|
+
const VERSION = '3.0.16';
|
|
786
795
|
|
|
787
796
|
const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
|
|
788
797
|
const getLivenessUserAgent = () => {
|
|
@@ -1340,7 +1349,6 @@ const STATIC_VIDEO_CONSTRAINTS = {
|
|
|
1340
1349
|
|
|
1341
1350
|
const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
|
|
1342
1351
|
const DEFAULT_FACE_FIT_TIMEOUT = 7000;
|
|
1343
|
-
const MIN_FACE_MATCH_TIME = 1000;
|
|
1344
1352
|
let responseStream;
|
|
1345
1353
|
const responseStreamActor = async (callback) => {
|
|
1346
1354
|
try {
|
|
@@ -1430,7 +1438,6 @@ const livenessMachine = xstate.createMachine({
|
|
|
1430
1438
|
currentDetectedFace: undefined,
|
|
1431
1439
|
startFace: undefined,
|
|
1432
1440
|
endFace: undefined,
|
|
1433
|
-
initialFaceMatchTime: undefined,
|
|
1434
1441
|
},
|
|
1435
1442
|
freshnessColorAssociatedParams: {
|
|
1436
1443
|
freshnessColorEl: undefined,
|
|
@@ -1615,6 +1622,8 @@ const livenessMachine = xstate.createMachine({
|
|
|
1615
1622
|
100: { target: 'checkRecordingStarted' },
|
|
1616
1623
|
},
|
|
1617
1624
|
},
|
|
1625
|
+
// Evaluates face match and moves to checkMatch
|
|
1626
|
+
// which continually checks for match until either timeout or face match
|
|
1618
1627
|
ovalMatching: {
|
|
1619
1628
|
entry: 'cancelOvalDrawingTimeout',
|
|
1620
1629
|
invoke: {
|
|
@@ -1625,29 +1634,32 @@ const livenessMachine = xstate.createMachine({
|
|
|
1625
1634
|
},
|
|
1626
1635
|
},
|
|
1627
1636
|
},
|
|
1637
|
+
// If `hasFaceMatchedInOval` is true, then move to `delayBeforeFlash`, which pauses
|
|
1638
|
+
// for one second to show "Hold still" text before moving to `flashFreshnessColors`.
|
|
1639
|
+
// If not, move back to ovalMatching and re-evaluate match state
|
|
1628
1640
|
checkMatch: {
|
|
1629
1641
|
after: {
|
|
1630
1642
|
0: {
|
|
1631
|
-
target: '
|
|
1632
|
-
cond: '
|
|
1643
|
+
target: 'delayBeforeFlash',
|
|
1644
|
+
cond: 'hasFaceMatchedInOval',
|
|
1633
1645
|
actions: [
|
|
1646
|
+
'setFaceMatchTimeAndStartFace',
|
|
1634
1647
|
'updateEndFaceMatch',
|
|
1635
1648
|
'setupFlashFreshnessColors',
|
|
1636
1649
|
'cancelOvalMatchTimeout',
|
|
1637
1650
|
'cancelOvalDrawingTimeout',
|
|
1638
1651
|
],
|
|
1639
1652
|
},
|
|
1640
|
-
0.1: {
|
|
1641
|
-
target: 'ovalMatching',
|
|
1642
|
-
cond: 'hasFaceMatchedInOval',
|
|
1643
|
-
actions: 'setFaceMatchTimeAndStartFace',
|
|
1644
|
-
},
|
|
1645
1653
|
1: {
|
|
1646
1654
|
target: 'ovalMatching',
|
|
1647
|
-
cond: 'hasNotFaceMatchedInOval',
|
|
1648
1655
|
},
|
|
1649
1656
|
},
|
|
1650
1657
|
},
|
|
1658
|
+
delayBeforeFlash: {
|
|
1659
|
+
after: {
|
|
1660
|
+
1000: 'flashFreshnessColors',
|
|
1661
|
+
},
|
|
1662
|
+
},
|
|
1651
1663
|
flashFreshnessColors: {
|
|
1652
1664
|
invoke: {
|
|
1653
1665
|
src: 'flashColors',
|
|
@@ -1912,10 +1924,6 @@ const livenessMachine = xstate.createMachine({
|
|
|
1912
1924
|
startFace: context.faceMatchAssociatedParams.startFace === undefined
|
|
1913
1925
|
? context.faceMatchAssociatedParams.currentDetectedFace
|
|
1914
1926
|
: context.faceMatchAssociatedParams.startFace,
|
|
1915
|
-
initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
|
|
1916
|
-
undefined
|
|
1917
|
-
? Date.now()
|
|
1918
|
-
: context.faceMatchAssociatedParams.initialFaceMatchTime,
|
|
1919
1927
|
};
|
|
1920
1928
|
},
|
|
1921
1929
|
}),
|
|
@@ -2087,21 +2095,10 @@ const livenessMachine = xstate.createMachine({
|
|
|
2087
2095
|
},
|
|
2088
2096
|
guards: {
|
|
2089
2097
|
shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
|
|
2090
|
-
hasFaceMatchedInOvalWithMinTime: (context) => {
|
|
2091
|
-
const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
|
|
2092
|
-
const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
|
|
2093
|
-
const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
|
|
2094
|
-
timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
|
|
2095
|
-
return hasMatched;
|
|
2096
|
-
},
|
|
2097
2098
|
hasFaceMatchedInOval: (context) => {
|
|
2098
2099
|
return (context.faceMatchAssociatedParams.faceMatchState ===
|
|
2099
2100
|
FaceMatchState.MATCHED);
|
|
2100
2101
|
},
|
|
2101
|
-
hasNotFaceMatchedInOval: (context) => {
|
|
2102
|
-
return (context.faceMatchAssociatedParams.faceMatchState !==
|
|
2103
|
-
FaceMatchState.MATCHED);
|
|
2104
|
-
},
|
|
2105
2102
|
hasSingleFace: (context) => {
|
|
2106
2103
|
return (context.faceMatchAssociatedParams.faceMatchState ===
|
|
2107
2104
|
FaceMatchState.FACE_IDENTIFIED);
|
|
@@ -2318,7 +2315,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2318
2315
|
videoWidth: videoEl.width,
|
|
2319
2316
|
});
|
|
2320
2317
|
// renormalize initial face
|
|
2321
|
-
const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
|
|
2318
|
+
const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
|
|
2322
2319
|
initialFace.top = renormalizedFace.top;
|
|
2323
2320
|
initialFace.left = renormalizedFace.left;
|
|
2324
2321
|
initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
|
|
@@ -2347,7 +2344,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2347
2344
|
let faceMatchPercentage = 0;
|
|
2348
2345
|
let detectedFace;
|
|
2349
2346
|
let illuminationState;
|
|
2350
|
-
const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
|
|
2347
|
+
const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
|
|
2351
2348
|
const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
|
|
2352
2349
|
const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
|
|
2353
2350
|
switch (detectedFaces.length) {
|
|
@@ -2360,7 +2357,13 @@ const livenessMachine = xstate.createMachine({
|
|
|
2360
2357
|
case 1: {
|
|
2361
2358
|
//exactly one face detected, match face with oval;
|
|
2362
2359
|
detectedFace = detectedFaces[0];
|
|
2363
|
-
const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(
|
|
2360
|
+
const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval({
|
|
2361
|
+
face: detectedFace,
|
|
2362
|
+
ovalDetails: ovalDetails,
|
|
2363
|
+
initialFaceIntersection,
|
|
2364
|
+
sessionInformation: serverSessionInformation,
|
|
2365
|
+
frameHeight: videoEl.videoHeight,
|
|
2366
|
+
});
|
|
2364
2367
|
faceMatchState = faceMatchStateInLivenessOval;
|
|
2365
2368
|
faceMatchPercentage = faceMatchPercentageInLivenessOval;
|
|
2366
2369
|
break;
|
|
@@ -2615,7 +2618,6 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
2615
2618
|
[FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
|
|
2616
2619
|
[FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
|
|
2617
2620
|
[FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
|
|
2618
|
-
[FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
|
|
2619
2621
|
[FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
|
|
2620
2622
|
[FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
|
|
2621
2623
|
[FaceMatchState.OFF_CENTER]: hintDisplayText.hintFaceOffCenterText,
|
|
@@ -2661,13 +2663,11 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
2661
2663
|
return React__namespace.createElement(DefaultToast, { text: hintDisplayText.hintHoldFaceForFreshnessText });
|
|
2662
2664
|
}
|
|
2663
2665
|
if (isRecording && !isFlashingFreshness) {
|
|
2664
|
-
// During face matching, we want to only show the
|
|
2665
|
-
// TOO_FAR texts.
|
|
2666
|
-
// the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
|
|
2666
|
+
// During face matching, we want to only show the
|
|
2667
|
+
// TOO_FAR texts. For FACE_IDENTIFIED, CANT_IDENTIFY, TOO_MANY
|
|
2667
2668
|
// we are defaulting to the TOO_FAR text (for now).
|
|
2668
2669
|
let resultHintString = FaceMatchStateStringMap[FaceMatchState.TOO_FAR];
|
|
2669
|
-
if (faceMatchState === FaceMatchState.
|
|
2670
|
-
faceMatchState === FaceMatchState.MATCHED) {
|
|
2670
|
+
if (faceMatchState === FaceMatchState.MATCHED) {
|
|
2671
2671
|
resultHintString = FaceMatchStateStringMap[faceMatchState];
|
|
2672
2672
|
}
|
|
2673
2673
|
// If the face is outside the oval set the aria-label to a string about centering face in oval
|
|
@@ -2681,7 +2681,7 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
2681
2681
|
faceMatchPercentage > 50) {
|
|
2682
2682
|
a11yHintString = hintDisplayText.hintMatchIndicatorText;
|
|
2683
2683
|
}
|
|
2684
|
-
return (React__namespace.createElement(Toast, { size: "large", variation:
|
|
2684
|
+
return (React__namespace.createElement(Toast, { size: "large", variation: 'primary' },
|
|
2685
2685
|
React__namespace.createElement(uiReact.VisuallyHidden, { "aria-live": "assertive" }, a11yHintString),
|
|
2686
2686
|
React__namespace.createElement(uiReact.View, { "aria-label": a11yHintString }, resultHintString)));
|
|
2687
2687
|
}
|
|
@@ -1,5 +1 @@
|
|
|
1
|
-
|
|
2
|
-
export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
|
|
3
|
-
value: any;
|
|
4
|
-
context: LivenessContext;
|
|
5
|
-
}, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
|
|
1
|
+
export { livenessMachine } from './machine';
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { LivenessContext, LivenessEvent } from '../types';
|
|
2
|
+
export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
|
|
3
|
+
value: any;
|
|
4
|
+
context: LivenessContext;
|
|
5
|
+
}, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
|
|
@@ -98,7 +98,6 @@ export declare enum IlluminationState {
|
|
|
98
98
|
export declare enum FaceMatchState {
|
|
99
99
|
MATCHED = "MATCHED",
|
|
100
100
|
TOO_FAR = "TOO FAR",
|
|
101
|
-
TOO_CLOSE = "TOO CLOSE",
|
|
102
101
|
CANT_IDENTIFY = "CANNOT IDENTIFY",
|
|
103
102
|
FACE_IDENTIFIED = "ONE FACE IDENTIFIED",
|
|
104
103
|
TOO_MANY = "TOO MANY FACES",
|
|
@@ -1,6 +1,12 @@
|
|
|
1
1
|
export declare const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
2
2
|
export declare const REDUCED_THRESHOLD = 0.4;
|
|
3
3
|
export declare const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
4
|
+
export declare const PUPIL_DISTANCE_WEIGHT = 2;
|
|
5
|
+
export declare const FACE_HEIGHT_WEIGHT = 1.8;
|
|
6
|
+
export declare const FACE_MATCH_RANGE_MIN = 0;
|
|
7
|
+
export declare const FACE_MATCH_RANGE_MAX = 1;
|
|
8
|
+
export declare const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
9
|
+
export declare const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
4
10
|
export declare const WS_CLOSURE_CODE: {
|
|
5
11
|
SUCCESS_CODE: number;
|
|
6
12
|
DEFAULT_ERROR_CODE: number;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { LivenessOvalDetails, Face, FaceMatchState } from '../types';
|
|
2
|
+
import { SessionInformation } from '@aws-sdk/client-rekognitionstreaming';
|
|
3
|
+
interface MatchStateInOvalParams {
|
|
4
|
+
face: Face;
|
|
5
|
+
ovalDetails: LivenessOvalDetails;
|
|
6
|
+
initialFaceIntersection: number;
|
|
7
|
+
sessionInformation: SessionInformation;
|
|
8
|
+
frameHeight: number;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
12
|
+
*/
|
|
13
|
+
export declare function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }: MatchStateInOvalParams): {
|
|
14
|
+
faceMatchState: FaceMatchState;
|
|
15
|
+
faceMatchPercentage: number;
|
|
16
|
+
};
|
|
17
|
+
export {};
|
|
@@ -50,14 +50,7 @@ export declare function drawStaticOval(canvasEl: HTMLCanvasElement, videoEl: HTM
|
|
|
50
50
|
export declare function clearOvalCanvas({ canvas, }: {
|
|
51
51
|
canvas: HTMLCanvasElement;
|
|
52
52
|
}): void;
|
|
53
|
-
export declare function generateBboxFromLandmarks(face: Face, oval: LivenessOvalDetails): BoundingBox;
|
|
54
|
-
/**
|
|
55
|
-
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
56
|
-
*/
|
|
57
|
-
export declare function getFaceMatchStateInLivenessOval(face: Face, ovalDetails: LivenessOvalDetails, initialFaceIntersection: number, sessionInformation: SessionInformation): {
|
|
58
|
-
faceMatchState: FaceMatchState;
|
|
59
|
-
faceMatchPercentage: number;
|
|
60
|
-
};
|
|
53
|
+
export declare function generateBboxFromLandmarks(face: Face, oval: LivenessOvalDetails, frameHeight: number): BoundingBox;
|
|
61
54
|
/**
|
|
62
55
|
* Returns the illumination state in the provided video frame.
|
|
63
56
|
*/
|
package/dist/types/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "3.0.
|
|
1
|
+
export declare const VERSION = "3.0.16";
|