@aws-amplify/ui-react-liveness 2.0.11 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
- package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
- package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
- package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
- package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
- package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
- package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
- package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
- package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
- package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
- package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
- package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
- package/dist/esm/index.mjs +2 -1
- package/dist/esm/version.mjs +3 -1
- package/dist/index.js +3208 -1
- package/dist/styles.css +343 -680
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
- package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
- package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
- package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
- package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
- package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
- package/dist/types/version.d.ts +1 -1
- package/package.json +16 -37
- package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
- package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
- package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
- /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
|
@@ -1 +1,462 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import { FaceMatchState, IlluminationState } from '../types/liveness.mjs';
|
|
2
|
+
import { LivenessErrorState } from '../types/error.mjs';
|
|
3
|
+
import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
|
|
4
|
+
|
|
5
|
+
/* eslint-disable */
|
|
6
|
+
/**
|
|
7
|
+
* Returns the random number between min and max
|
|
8
|
+
* seeded with the provided random seed.
|
|
9
|
+
*/
|
|
10
|
+
function getScaledValueFromRandomSeed(randomSeed, min, max) {
|
|
11
|
+
return randomSeed * (max - min) + min;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Returns the bounding box details from an oval
|
|
15
|
+
*/
|
|
16
|
+
function getOvalBoundingBox(ovalDetails) {
|
|
17
|
+
const minOvalX = ovalDetails.flippedCenterX - ovalDetails.width / 2;
|
|
18
|
+
const maxOvalX = ovalDetails.flippedCenterX + ovalDetails.width / 2;
|
|
19
|
+
const minOvalY = ovalDetails.centerY - ovalDetails.height / 2;
|
|
20
|
+
const maxOvalY = ovalDetails.centerY + ovalDetails.height / 2;
|
|
21
|
+
const ovalBoundingBox = {
|
|
22
|
+
left: minOvalX,
|
|
23
|
+
top: minOvalY,
|
|
24
|
+
right: maxOvalX,
|
|
25
|
+
bottom: maxOvalY,
|
|
26
|
+
};
|
|
27
|
+
return { ovalBoundingBox, minOvalX, maxOvalX, minOvalY, maxOvalY };
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Returns the ratio of intersection and union of two bounding boxes.
|
|
31
|
+
*/
|
|
32
|
+
function getIntersectionOverUnion(box1, box2) {
|
|
33
|
+
const xA = Math.max(box1.left, box2.left);
|
|
34
|
+
const yA = Math.max(box1.top, box2.top);
|
|
35
|
+
const xB = Math.min(box1.right, box2.right);
|
|
36
|
+
const yB = Math.min(box1.bottom, box2.bottom);
|
|
37
|
+
const intersectionArea = Math.abs(Math.max(0, xB - xA) * Math.max(0, yB - yA));
|
|
38
|
+
if (intersectionArea === 0) {
|
|
39
|
+
return 0;
|
|
40
|
+
}
|
|
41
|
+
const boxAArea = Math.abs((box1.right - box1.left) * (box1.bottom - box1.top));
|
|
42
|
+
const boxBArea = Math.abs((box2.right - box2.left) * (box2.bottom - box2.top));
|
|
43
|
+
return intersectionArea / (boxAArea + boxBArea - intersectionArea);
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Returns the details of a randomly generated liveness oval
|
|
47
|
+
* from SDK
|
|
48
|
+
*/
|
|
49
|
+
function getOvalDetailsFromSessionInformation({ sessionInformation, videoWidth, }) {
|
|
50
|
+
const ovalParameters = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
51
|
+
?.OvalParameters;
|
|
52
|
+
if (!ovalParameters ||
|
|
53
|
+
!ovalParameters.CenterX ||
|
|
54
|
+
!ovalParameters.CenterY ||
|
|
55
|
+
!ovalParameters.Width ||
|
|
56
|
+
!ovalParameters.Height) {
|
|
57
|
+
throw new Error('Oval parameters not returned from session information.');
|
|
58
|
+
}
|
|
59
|
+
// We need to include a flippedCenterX for visualizing the oval on a flipped camera view
|
|
60
|
+
// The camera view we show the customer is flipped to making moving left and right more obvious
|
|
61
|
+
// The video stream sent to the liveness service is not flipped
|
|
62
|
+
return {
|
|
63
|
+
flippedCenterX: videoWidth - ovalParameters.CenterX,
|
|
64
|
+
centerX: ovalParameters.CenterX,
|
|
65
|
+
centerY: ovalParameters.CenterY,
|
|
66
|
+
width: ovalParameters.Width,
|
|
67
|
+
height: ovalParameters.Height,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Returns the details of a statically generated liveness oval based on the video dimensions
|
|
72
|
+
*/
|
|
73
|
+
function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXSeed = 0.5, centerYSeed = 0.5, ratioMultiplier = 0.8, }) {
|
|
74
|
+
const videoHeight = height;
|
|
75
|
+
let videoWidth = width;
|
|
76
|
+
const ovalRatio = widthSeed * ratioMultiplier;
|
|
77
|
+
const minOvalCenterX = Math.floor((7 * width) / 16);
|
|
78
|
+
const maxOvalCenterX = Math.floor((9 * width) / 16);
|
|
79
|
+
const minOvalCenterY = Math.floor((7 * height) / 16);
|
|
80
|
+
const maxOvalCenterY = Math.floor((9 * height) / 16);
|
|
81
|
+
const centerX = getScaledValueFromRandomSeed(centerXSeed, minOvalCenterX, maxOvalCenterX);
|
|
82
|
+
const centerY = getScaledValueFromRandomSeed(centerYSeed, minOvalCenterY, maxOvalCenterY);
|
|
83
|
+
if (width >= height) {
|
|
84
|
+
videoWidth = (3 / 4) * videoHeight;
|
|
85
|
+
}
|
|
86
|
+
const ovalWidth = ovalRatio * videoWidth;
|
|
87
|
+
const ovalHeight = 1.618 * ovalWidth;
|
|
88
|
+
return {
|
|
89
|
+
flippedCenterX: Math.floor(videoWidth - centerX),
|
|
90
|
+
centerX: Math.floor(centerX),
|
|
91
|
+
centerY: Math.floor(centerY),
|
|
92
|
+
width: Math.floor(ovalWidth),
|
|
93
|
+
height: Math.floor(ovalHeight),
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
|
|
97
|
+
const { width, height } = videoMediaStream.getTracks()[0].getSettings();
|
|
98
|
+
// Get width/height of video element so we can compute scaleFactor
|
|
99
|
+
// and set canvas width/height.
|
|
100
|
+
const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
|
|
101
|
+
canvasEl.width = Math.ceil(videoScaledWidth);
|
|
102
|
+
canvasEl.height = Math.ceil(videoScaledHeight);
|
|
103
|
+
const ovalDetails = getStaticLivenessOvalDetails({
|
|
104
|
+
width: width,
|
|
105
|
+
height: height,
|
|
106
|
+
ratioMultiplier: 0.5,
|
|
107
|
+
});
|
|
108
|
+
ovalDetails.flippedCenterX = width - ovalDetails.centerX;
|
|
109
|
+
// Compute scaleFactor which is how much our video element is scaled
|
|
110
|
+
// vs the intrinsic video resolution
|
|
111
|
+
const scaleFactor = videoScaledWidth / videoEl.videoWidth;
|
|
112
|
+
// Draw oval in canvas using ovalDetails and scaleFactor
|
|
113
|
+
drawLivenessOvalInCanvas({
|
|
114
|
+
canvas: canvasEl,
|
|
115
|
+
oval: ovalDetails,
|
|
116
|
+
scaleFactor,
|
|
117
|
+
videoEl: videoEl,
|
|
118
|
+
isStartScreen: true,
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Draws the provided liveness oval on the canvas.
|
|
123
|
+
*/
|
|
124
|
+
function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartScreen, }) {
|
|
125
|
+
const { flippedCenterX, centerY, width, height } = oval;
|
|
126
|
+
const { width: canvasWidth, height: canvasHeight } = canvas.getBoundingClientRect();
|
|
127
|
+
const ctx = canvas.getContext('2d');
|
|
128
|
+
if (ctx) {
|
|
129
|
+
ctx.restore();
|
|
130
|
+
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
|
131
|
+
// fill the canvas with a transparent rectangle
|
|
132
|
+
ctx.fillStyle = isStartScreen
|
|
133
|
+
? getComputedStyle(canvas).getPropertyValue('--amplify-colors-background-primary')
|
|
134
|
+
: '#fff';
|
|
135
|
+
ctx.fillRect(0, 0, canvasWidth, canvasHeight);
|
|
136
|
+
// On mobile our canvas is the width/height of the full screen.
|
|
137
|
+
// We need to calculate horizontal and vertical translation to reposition
|
|
138
|
+
// our canvas drawing so the oval is still placed relative to the dimensions
|
|
139
|
+
// of the video element.
|
|
140
|
+
const baseDims = { width: videoEl.videoWidth, height: videoEl.videoHeight };
|
|
141
|
+
const translate = {
|
|
142
|
+
x: (canvasWidth - baseDims.width * scaleFactor) / 2,
|
|
143
|
+
y: (canvasHeight - baseDims.height * scaleFactor) / 2,
|
|
144
|
+
};
|
|
145
|
+
// Set the transform to scale
|
|
146
|
+
ctx.setTransform(scaleFactor, 0, 0, scaleFactor, translate.x, translate.y);
|
|
147
|
+
// draw the oval path
|
|
148
|
+
ctx.beginPath();
|
|
149
|
+
ctx.ellipse(flippedCenterX, centerY, width / 2, height / 2, 0, 0, 2 * Math.PI);
|
|
150
|
+
// add stroke to the oval path
|
|
151
|
+
ctx.strokeStyle = getComputedStyle(canvas).getPropertyValue('--amplify-colors-border-secondary');
|
|
152
|
+
ctx.lineWidth = 3;
|
|
153
|
+
ctx.stroke();
|
|
154
|
+
ctx.save();
|
|
155
|
+
ctx.clip();
|
|
156
|
+
// Restore default canvas transform matrix
|
|
157
|
+
ctx.setTransform(1, 0, 0, 1, 0, 0);
|
|
158
|
+
// clear the oval content from the rectangle
|
|
159
|
+
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
throw new Error('Cannot find Canvas.');
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
167
|
+
*/
|
|
168
|
+
function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
|
|
169
|
+
let faceMatchState;
|
|
170
|
+
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
171
|
+
?.ChallengeConfig;
|
|
172
|
+
if (!challengeConfig ||
|
|
173
|
+
!challengeConfig.OvalIouThreshold ||
|
|
174
|
+
!challengeConfig.OvalIouHeightThreshold ||
|
|
175
|
+
!challengeConfig.OvalIouWidthThreshold ||
|
|
176
|
+
!challengeConfig.FaceIouHeightThreshold ||
|
|
177
|
+
!challengeConfig.FaceIouWidthThreshold) {
|
|
178
|
+
throw new Error('Challenge information not returned from session information.');
|
|
179
|
+
}
|
|
180
|
+
const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
|
|
181
|
+
const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
|
|
182
|
+
const minFaceX = faceBoundingBox.left;
|
|
183
|
+
const maxFaceX = faceBoundingBox.right;
|
|
184
|
+
const minFaceY = faceBoundingBox.top;
|
|
185
|
+
const maxFaceY = faceBoundingBox.bottom;
|
|
186
|
+
const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
|
|
187
|
+
const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
|
|
188
|
+
const intersectionThreshold = OvalIouThreshold;
|
|
189
|
+
const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
|
|
190
|
+
const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
|
|
191
|
+
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
192
|
+
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
193
|
+
/** From Science
|
|
194
|
+
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
195
|
+
*/
|
|
196
|
+
const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
|
|
197
|
+
(intersectionThreshold - initialFaceIntersection) +
|
|
198
|
+
0.25), 0) * 100;
|
|
199
|
+
if (intersection > intersectionThreshold &&
|
|
200
|
+
Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
|
|
201
|
+
Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
|
|
202
|
+
Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
|
|
203
|
+
faceMatchState = FaceMatchState.MATCHED;
|
|
204
|
+
}
|
|
205
|
+
else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
|
|
206
|
+
maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
|
|
207
|
+
(minOvalX - minFaceX > faceDetectionWidthThreshold &&
|
|
208
|
+
maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
|
|
209
|
+
faceMatchState = FaceMatchState.TOO_CLOSE;
|
|
210
|
+
}
|
|
211
|
+
else {
|
|
212
|
+
faceMatchState = FaceMatchState.TOO_FAR;
|
|
213
|
+
}
|
|
214
|
+
return { faceMatchState, faceMatchPercentage };
|
|
215
|
+
}
|
|
216
|
+
function getPupilDistanceAndFaceHeight(face) {
|
|
217
|
+
const { leftEye, rightEye, mouth } = face;
|
|
218
|
+
const eyeCenter = [];
|
|
219
|
+
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
220
|
+
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
221
|
+
const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
|
|
222
|
+
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
223
|
+
return { pupilDistance, faceHeight };
|
|
224
|
+
}
|
|
225
|
+
function generateBboxFromLandmarks(face, oval) {
|
|
226
|
+
const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
|
|
227
|
+
const { height: ovalHeight, centerY } = oval;
|
|
228
|
+
const ovalTop = centerY - ovalHeight / 2;
|
|
229
|
+
const eyeCenter = [];
|
|
230
|
+
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
231
|
+
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
232
|
+
const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
|
|
233
|
+
const alpha = 2.0, gamma = 1.8;
|
|
234
|
+
const ow = (alpha * pd + gamma * fh) / 2;
|
|
235
|
+
const oh = 1.618 * ow;
|
|
236
|
+
let cx;
|
|
237
|
+
if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
|
|
238
|
+
cx = (eyeCenter[0] + nose[0]) / 2;
|
|
239
|
+
}
|
|
240
|
+
else {
|
|
241
|
+
cx = eyeCenter[0];
|
|
242
|
+
}
|
|
243
|
+
const faceBottom = faceTop + faceHeight;
|
|
244
|
+
const top = faceBottom - oh;
|
|
245
|
+
const left = Math.min(cx - ow / 2, rightEar[0]);
|
|
246
|
+
const right = Math.max(cx + ow / 2, leftEar[0]);
|
|
247
|
+
return {
|
|
248
|
+
left: left,
|
|
249
|
+
top: top,
|
|
250
|
+
right: right,
|
|
251
|
+
bottom: faceBottom,
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
/**
|
|
255
|
+
* Returns the illumination state in the provided video frame.
|
|
256
|
+
*/
|
|
257
|
+
function estimateIllumination(videoEl) {
|
|
258
|
+
const canvasEl = document.createElement('canvas');
|
|
259
|
+
canvasEl.width = videoEl.videoWidth;
|
|
260
|
+
canvasEl.height = videoEl.videoHeight;
|
|
261
|
+
const ctx = canvasEl.getContext('2d');
|
|
262
|
+
if (ctx) {
|
|
263
|
+
ctx.drawImage(videoEl, 0, 0, canvasEl.width, canvasEl.height);
|
|
264
|
+
const frame = ctx.getImageData(0, 0, canvasEl.width, canvasEl.height).data;
|
|
265
|
+
// histogram
|
|
266
|
+
const MAX_SCALE = 8;
|
|
267
|
+
const hist = new Array(MAX_SCALE).fill(0);
|
|
268
|
+
for (let i = 0; i < frame.length; i++) {
|
|
269
|
+
const luma = Math.round(frame[i++] * 0.2126 + frame[i++] * 0.7152 + frame[i++] * 0.0722);
|
|
270
|
+
hist[luma % 32]++;
|
|
271
|
+
}
|
|
272
|
+
let ind = -1, maxCount = 0;
|
|
273
|
+
for (let i = 0; i < MAX_SCALE; i++) {
|
|
274
|
+
if (hist[i] > maxCount) {
|
|
275
|
+
maxCount = hist[i];
|
|
276
|
+
ind = i;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
canvasEl.remove();
|
|
280
|
+
if (ind === 0)
|
|
281
|
+
return IlluminationState.DARK;
|
|
282
|
+
if (ind === MAX_SCALE)
|
|
283
|
+
return IlluminationState.BRIGHT;
|
|
284
|
+
return IlluminationState.NORMAL;
|
|
285
|
+
}
|
|
286
|
+
else {
|
|
287
|
+
throw new Error('Cannot find Video Element.');
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Checks if the provided media device is a virtual camera.
|
|
292
|
+
* @param device
|
|
293
|
+
*/
|
|
294
|
+
function isCameraDeviceVirtual(device) {
|
|
295
|
+
return device.label.toLowerCase().includes('virtual');
|
|
296
|
+
}
|
|
297
|
+
const INITIAL_ALPHA = 0.9;
|
|
298
|
+
const SECONDARY_ALPHA = 0.75;
|
|
299
|
+
function fillFractionalContext({ ctx, prevColor, nextColor, fraction, }) {
|
|
300
|
+
const canvasWidth = ctx.canvas.width;
|
|
301
|
+
const canvasHeight = ctx.canvas.height;
|
|
302
|
+
ctx.fillStyle = nextColor;
|
|
303
|
+
ctx.fillRect(0, 0, canvasWidth, canvasHeight * fraction);
|
|
304
|
+
if (fraction !== 1) {
|
|
305
|
+
ctx.fillStyle = prevColor;
|
|
306
|
+
ctx.fillRect(0, canvasHeight * fraction, canvasWidth, canvasHeight * (1 - fraction));
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, videoEl, ovalDetails, heightFraction, scaleFactor, }) {
|
|
310
|
+
const { x: videoX, y: videoY } = videoEl.getBoundingClientRect();
|
|
311
|
+
const { flippedCenterX, centerY, width, height } = ovalDetails;
|
|
312
|
+
const updatedCenterX = flippedCenterX * scaleFactor + videoX;
|
|
313
|
+
const updatedCenterY = centerY * scaleFactor + videoY;
|
|
314
|
+
const canvasWidth = overlayCanvas.width;
|
|
315
|
+
const canvasHeight = overlayCanvas.height;
|
|
316
|
+
const ctx = overlayCanvas.getContext('2d');
|
|
317
|
+
if (ctx) {
|
|
318
|
+
// Because the canvas is set to to 100% we need to manually set the height for the canvas to use pixel values
|
|
319
|
+
ctx.canvas.width = window.innerWidth;
|
|
320
|
+
ctx.canvas.height = window.innerHeight;
|
|
321
|
+
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
|
322
|
+
// fill the complete canvas
|
|
323
|
+
fillFractionalContext({
|
|
324
|
+
ctx,
|
|
325
|
+
prevColor,
|
|
326
|
+
nextColor,
|
|
327
|
+
fraction: heightFraction,
|
|
328
|
+
});
|
|
329
|
+
// save the current state
|
|
330
|
+
ctx.save();
|
|
331
|
+
// draw the rectangle path and fill it
|
|
332
|
+
ctx.beginPath();
|
|
333
|
+
ctx.rect(0, 0, canvasWidth, canvasHeight);
|
|
334
|
+
ctx.clip();
|
|
335
|
+
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
|
336
|
+
ctx.globalAlpha = INITIAL_ALPHA;
|
|
337
|
+
fillFractionalContext({
|
|
338
|
+
ctx,
|
|
339
|
+
prevColor,
|
|
340
|
+
nextColor,
|
|
341
|
+
fraction: heightFraction,
|
|
342
|
+
});
|
|
343
|
+
// draw the oval path and fill it
|
|
344
|
+
ctx.beginPath();
|
|
345
|
+
ctx.ellipse(updatedCenterX, updatedCenterY, (width * scaleFactor) / 2, (height * scaleFactor) / 2, 0, 0, 2 * Math.PI);
|
|
346
|
+
// add stroke to the oval path
|
|
347
|
+
ctx.strokeStyle = 'white';
|
|
348
|
+
ctx.lineWidth = 8;
|
|
349
|
+
ctx.stroke();
|
|
350
|
+
ctx.clip();
|
|
351
|
+
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
|
|
352
|
+
ctx.globalAlpha = SECONDARY_ALPHA;
|
|
353
|
+
fillFractionalContext({
|
|
354
|
+
ctx,
|
|
355
|
+
prevColor,
|
|
356
|
+
nextColor,
|
|
357
|
+
fraction: heightFraction,
|
|
358
|
+
});
|
|
359
|
+
// restore the state
|
|
360
|
+
ctx.restore();
|
|
361
|
+
}
|
|
362
|
+
else {
|
|
363
|
+
throw new Error('Cannot find Overlay Canvas.');
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
const isClientFreshnessColorSequence = (obj) => !!obj;
|
|
367
|
+
function getColorsSequencesFromSessionInformation(sessionInformation) {
|
|
368
|
+
const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
|
|
369
|
+
.ColorSequences || [];
|
|
370
|
+
const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
|
|
371
|
+
const colorArray = FreshnessColor.RGB;
|
|
372
|
+
const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
|
|
373
|
+
return typeof color !== 'undefined' &&
|
|
374
|
+
typeof downscrollDuration !== 'undefined' &&
|
|
375
|
+
typeof flatDisplayDuration !== 'undefined'
|
|
376
|
+
? {
|
|
377
|
+
color,
|
|
378
|
+
downscrollDuration,
|
|
379
|
+
flatDisplayDuration,
|
|
380
|
+
}
|
|
381
|
+
: undefined;
|
|
382
|
+
});
|
|
383
|
+
return colorSequences.filter(isClientFreshnessColorSequence);
|
|
384
|
+
}
|
|
385
|
+
function getRGBArrayFromColorString(colorStr) {
|
|
386
|
+
return colorStr
|
|
387
|
+
.slice(colorStr.indexOf('(') + 1, colorStr.indexOf(')'))
|
|
388
|
+
.split(',')
|
|
389
|
+
.map((str) => parseInt(str));
|
|
390
|
+
}
|
|
391
|
+
async function getFaceMatchState(faceDetector, videoEl) {
|
|
392
|
+
const detectedFaces = await faceDetector.detectFaces(videoEl);
|
|
393
|
+
let faceMatchState;
|
|
394
|
+
switch (detectedFaces.length) {
|
|
395
|
+
case 0: {
|
|
396
|
+
//no face detected;
|
|
397
|
+
faceMatchState = FaceMatchState.CANT_IDENTIFY;
|
|
398
|
+
break;
|
|
399
|
+
}
|
|
400
|
+
case 1: {
|
|
401
|
+
//exactly one face detected, match face with oval;
|
|
402
|
+
faceMatchState = FaceMatchState.FACE_IDENTIFIED;
|
|
403
|
+
break;
|
|
404
|
+
}
|
|
405
|
+
default: {
|
|
406
|
+
//more than one face detected ;
|
|
407
|
+
faceMatchState = FaceMatchState.TOO_MANY;
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
return faceMatchState;
|
|
412
|
+
}
|
|
413
|
+
async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails, reduceThreshold = false, isMobile = false, }) {
|
|
414
|
+
const detectedFaces = await faceDetector.detectFaces(videoEl);
|
|
415
|
+
let detectedFace;
|
|
416
|
+
let isDistanceBelowThreshold = false;
|
|
417
|
+
let error;
|
|
418
|
+
switch (detectedFaces.length) {
|
|
419
|
+
case 0: {
|
|
420
|
+
//no face detected;
|
|
421
|
+
error = LivenessErrorState.FACE_DISTANCE_ERROR;
|
|
422
|
+
break;
|
|
423
|
+
}
|
|
424
|
+
case 1: {
|
|
425
|
+
//exactly one face detected, match face with oval;
|
|
426
|
+
detectedFace = detectedFaces[0];
|
|
427
|
+
const width = ovalDetails.width;
|
|
428
|
+
const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
|
|
429
|
+
const alpha = 2.0, gamma = 1.8;
|
|
430
|
+
const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
|
|
431
|
+
if (width) {
|
|
432
|
+
isDistanceBelowThreshold =
|
|
433
|
+
calibratedPupilDistance / width <
|
|
434
|
+
(!reduceThreshold
|
|
435
|
+
? FACE_DISTANCE_THRESHOLD
|
|
436
|
+
: isMobile
|
|
437
|
+
? REDUCED_THRESHOLD_MOBILE
|
|
438
|
+
: REDUCED_THRESHOLD);
|
|
439
|
+
if (!isDistanceBelowThreshold) {
|
|
440
|
+
error = LivenessErrorState.FACE_DISTANCE_ERROR;
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
break;
|
|
444
|
+
}
|
|
445
|
+
default: {
|
|
446
|
+
//more than one face detected
|
|
447
|
+
error = LivenessErrorState.MULTIPLE_FACES_ERROR;
|
|
448
|
+
break;
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
return { isDistanceBelowThreshold, error };
|
|
452
|
+
}
|
|
453
|
+
function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }) {
|
|
454
|
+
return {
|
|
455
|
+
Height: height / deviceHeight,
|
|
456
|
+
Width: width / deviceWidth,
|
|
457
|
+
Top: top / deviceHeight,
|
|
458
|
+
Left: left / deviceWidth,
|
|
459
|
+
};
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
export { drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState, getFaceMatchStateInLivenessOval, getIntersectionOverUnion, getOvalBoundingBox, getOvalDetailsFromSessionInformation, getRGBArrayFromColorString, getStaticLivenessOvalDetails, isCameraDeviceVirtual, isClientFreshnessColorSequence, isFaceDistanceBelowThreshold };
|
|
@@ -1 +1,144 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
|
|
2
|
+
import { fetchAuthSession } from 'aws-amplify/auth';
|
|
3
|
+
import { RekognitionStreamingClient, StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
|
|
4
|
+
import { VideoRecorder } from './videoRecorder.mjs';
|
|
5
|
+
import { getLivenessUserAgent } from '../../utils/platform.mjs';
|
|
6
|
+
import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
|
|
7
|
+
|
|
8
|
+
const TIME_SLICE = 1000;
|
|
9
|
+
function isBlob(obj) {
|
|
10
|
+
return obj.arrayBuffer !== undefined;
|
|
11
|
+
}
|
|
12
|
+
function isClientSessionInformationEvent(obj) {
|
|
13
|
+
return obj.Challenge !== undefined;
|
|
14
|
+
}
|
|
15
|
+
function isEndStreamWithCodeEvent(obj) {
|
|
16
|
+
return obj.code !== undefined;
|
|
17
|
+
}
|
|
18
|
+
class LivenessStreamProvider {
|
|
19
|
+
constructor({ sessionId, region, stream, videoEl, credentialProvider, endpointOverride, }) {
|
|
20
|
+
this.sessionId = sessionId;
|
|
21
|
+
this.region = region;
|
|
22
|
+
this._stream = stream;
|
|
23
|
+
this.videoEl = videoEl;
|
|
24
|
+
this.videoRecorder = new VideoRecorder(stream);
|
|
25
|
+
this.credentialProvider = credentialProvider;
|
|
26
|
+
this.endpointOverride = endpointOverride;
|
|
27
|
+
this.initPromise = this.init();
|
|
28
|
+
}
|
|
29
|
+
async getResponseStream() {
|
|
30
|
+
await this.initPromise;
|
|
31
|
+
return this.responseStream;
|
|
32
|
+
}
|
|
33
|
+
startRecordingLivenessVideo() {
|
|
34
|
+
this.videoRecorder.start(TIME_SLICE);
|
|
35
|
+
}
|
|
36
|
+
sendClientInfo(clientInfo) {
|
|
37
|
+
this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
|
|
38
|
+
data: { clientInfo },
|
|
39
|
+
}));
|
|
40
|
+
}
|
|
41
|
+
async stopVideo() {
|
|
42
|
+
await this.videoRecorder.stop();
|
|
43
|
+
}
|
|
44
|
+
dispatchStopVideoEvent() {
|
|
45
|
+
this.videoRecorder.dispatch(new Event('stopVideo'));
|
|
46
|
+
}
|
|
47
|
+
async endStreamWithCode(code) {
|
|
48
|
+
if (this.videoRecorder.getState() === 'recording') {
|
|
49
|
+
await this.stopVideo();
|
|
50
|
+
}
|
|
51
|
+
this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
|
|
52
|
+
data: { code: code },
|
|
53
|
+
}));
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
async init() {
|
|
57
|
+
const credentials = this.credentialProvider ?? (await fetchAuthSession()).credentials;
|
|
58
|
+
if (!credentials) {
|
|
59
|
+
throw new Error('No credentials');
|
|
60
|
+
}
|
|
61
|
+
const clientconfig = {
|
|
62
|
+
credentials,
|
|
63
|
+
region: this.region,
|
|
64
|
+
customUserAgent: `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
|
|
65
|
+
requestHandler: new CustomWebSocketFetchHandler({
|
|
66
|
+
connectionTimeout: 10000,
|
|
67
|
+
}),
|
|
68
|
+
};
|
|
69
|
+
if (this.endpointOverride) {
|
|
70
|
+
const override = this.endpointOverride;
|
|
71
|
+
clientconfig.endpointProvider = () => {
|
|
72
|
+
const url = new URL(override);
|
|
73
|
+
return { url };
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
this._client = new RekognitionStreamingClient(clientconfig);
|
|
77
|
+
this.responseStream = await this.startLivenessVideoConnection();
|
|
78
|
+
}
|
|
79
|
+
// Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
|
|
80
|
+
getAsyncGeneratorFromReadableStream(stream) {
|
|
81
|
+
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
82
|
+
const current = this;
|
|
83
|
+
this._reader = stream.getReader();
|
|
84
|
+
return async function* () {
|
|
85
|
+
while (true) {
|
|
86
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
87
|
+
const { done, value } = await current._reader.read();
|
|
88
|
+
if (done) {
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
// Video chunks blobs should be sent as video events
|
|
92
|
+
if (value === 'stopVideo') {
|
|
93
|
+
// sending an empty video chunk signals that we have ended sending video
|
|
94
|
+
yield {
|
|
95
|
+
VideoEvent: {
|
|
96
|
+
VideoChunk: [],
|
|
97
|
+
TimestampMillis: Date.now(),
|
|
98
|
+
},
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
else if (isBlob(value)) {
|
|
102
|
+
const buffer = await value.arrayBuffer();
|
|
103
|
+
const chunk = new Uint8Array(buffer);
|
|
104
|
+
if (chunk.length > 0) {
|
|
105
|
+
yield {
|
|
106
|
+
VideoEvent: {
|
|
107
|
+
VideoChunk: chunk,
|
|
108
|
+
TimestampMillis: Date.now(),
|
|
109
|
+
},
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
else if (isClientSessionInformationEvent(value)) {
|
|
114
|
+
yield {
|
|
115
|
+
ClientSessionInformationEvent: {
|
|
116
|
+
Challenge: value.Challenge,
|
|
117
|
+
},
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
else if (isEndStreamWithCodeEvent(value)) {
|
|
121
|
+
yield {
|
|
122
|
+
VideoEvent: {
|
|
123
|
+
VideoChunk: [],
|
|
124
|
+
TimestampMillis: { closeCode: value.code },
|
|
125
|
+
},
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
async startLivenessVideoConnection() {
|
|
132
|
+
const livenessRequestGenerator = this.getAsyncGeneratorFromReadableStream(this.videoRecorder.videoStream)();
|
|
133
|
+
const response = await this._client.send(new StartFaceLivenessSessionCommand({
|
|
134
|
+
ChallengeVersions: 'FaceMovementAndLightChallenge_1.0.0',
|
|
135
|
+
SessionId: this.sessionId,
|
|
136
|
+
LivenessRequestStream: livenessRequestGenerator,
|
|
137
|
+
VideoWidth: this.videoEl.videoWidth.toString(),
|
|
138
|
+
VideoHeight: this.videoEl.videoHeight.toString(),
|
|
139
|
+
}));
|
|
140
|
+
return response.LivenessResponseStream;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export { LivenessStreamProvider, TIME_SLICE };
|
|
@@ -1 +1,14 @@
|
|
|
1
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Checks whether WebAssembly is supported in the current environment.
|
|
3
|
+
*/
|
|
4
|
+
function isWebAssemblySupported() {
|
|
5
|
+
try {
|
|
6
|
+
return (!!window.WebAssembly &&
|
|
7
|
+
(!!window.WebAssembly.compile || !!window.WebAssembly.compileStreaming));
|
|
8
|
+
}
|
|
9
|
+
catch (e) {
|
|
10
|
+
return false;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export { isWebAssemblySupported };
|