@aws-amplify/ui-react-liveness 3.3.8 → 3.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -15
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/CameraSelector.mjs +13 -0
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +50 -28
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +5 -4
- package/dist/esm/components/FaceLivenessDetector/service/machine/machine.mjs +247 -314
- package/dist/esm/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.mjs +140 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.mjs +171 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.mjs +27 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +30 -7
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.mjs +32 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.mjs +148 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +2 -3
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +36 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +7 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +9 -5
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +19 -34
- package/dist/esm/components/FaceLivenessDetector/service/utils/{eventUtils.mjs → responseStreamEvent.mjs} +2 -2
- package/dist/esm/components/FaceLivenessDetector/service/utils/sessionInformation.mjs +45 -0
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +3 -2
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +4 -7
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +3 -0
- package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +12 -12
- package/dist/esm/index.mjs +12 -0
- package/dist/esm/version.mjs +1 -1
- package/dist/index.js +956 -775
- package/dist/styles.css +17 -2
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +3 -3
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +3 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/CameraSelector.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +3 -2
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/displayText.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/hooks/useLivenessSelector.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/index.d.ts +6 -3
- package/dist/types/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +40 -27
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.d.ts +55 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +2 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +27 -3
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.d.ts +30 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/CustomWebSocketFetchHandler.d.ts +3 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +1 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +28 -6
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/types.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +4 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +7 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +16 -26
- package/dist/types/components/FaceLivenessDetector/service/utils/{eventUtils.d.ts → responseStreamEvent.d.ts} +2 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/sessionInformation.d.ts +7 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/types.d.ts +21 -0
- package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/shared/Hint.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
- package/dist/types/components/FaceLivenessDetector/utils/device.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/utils/getDisplayText.d.ts +1 -1
- package/dist/types/index.d.ts +2 -1
- package/dist/types/version.d.ts +1 -1
- package/package.json +10 -10
- package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +0 -131
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +0 -126
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +0 -108
- package/dist/types/components/FaceLivenessDetector/service/types/service.d.ts +0 -5
- package/dist/types/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.d.ts +0 -21
- package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +0 -42
- package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +0 -27
package/dist/index.js
CHANGED
|
@@ -5,13 +5,15 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
5
5
|
var React = require('react');
|
|
6
6
|
var auth = require('aws-amplify/auth');
|
|
7
7
|
var react = require('@xstate/react');
|
|
8
|
-
var
|
|
8
|
+
var uuid = require('uuid');
|
|
9
9
|
var xstate = require('xstate');
|
|
10
10
|
var tfjsCore = require('@tensorflow/tfjs-core');
|
|
11
11
|
var faceDetection = require('@tensorflow-models/face-detection');
|
|
12
12
|
var tfjsBackendWasm = require('@tensorflow/tfjs-backend-wasm');
|
|
13
13
|
require('@tensorflow/tfjs-backend-cpu');
|
|
14
14
|
var utils = require('@aws-amplify/core/internals/utils');
|
|
15
|
+
var tslib = require('tslib');
|
|
16
|
+
var ui = require('@aws-amplify/ui');
|
|
15
17
|
var clientRekognitionstreaming = require('@aws-sdk/client-rekognitionstreaming');
|
|
16
18
|
var utilFormatUrl = require('@aws-sdk/util-format-url');
|
|
17
19
|
var eventstreamSerdeBrowser = require('@smithy/eventstream-serde-browser');
|
|
@@ -19,7 +21,6 @@ var fetchHttpHandler = require('@smithy/fetch-http-handler');
|
|
|
19
21
|
var protocolHttp = require('@smithy/protocol-http');
|
|
20
22
|
var signatureV4 = require('@smithy/signature-v4');
|
|
21
23
|
var uiReact = require('@aws-amplify/ui-react');
|
|
22
|
-
var ui = require('@aws-amplify/ui');
|
|
23
24
|
var internal = require('@aws-amplify/ui-react/internal');
|
|
24
25
|
|
|
25
26
|
function _interopNamespace(e) {
|
|
@@ -95,12 +96,6 @@ const LivenessErrorState = {
|
|
|
95
96
|
|
|
96
97
|
// Face distance is calculated as pupilDistance / ovalWidth.
|
|
97
98
|
// The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
|
|
98
|
-
// These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
|
|
99
|
-
// We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
|
|
100
|
-
// a certain distance away from the camera.
|
|
101
|
-
const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
102
|
-
const REDUCED_THRESHOLD = 0.4;
|
|
103
|
-
const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
104
99
|
// Constants from science team to determine ocular distance (space between eyes)
|
|
105
100
|
const PUPIL_DISTANCE_WEIGHT = 2.0;
|
|
106
101
|
const FACE_HEIGHT_WEIGHT = 1.8;
|
|
@@ -109,6 +104,7 @@ const FACE_MATCH_RANGE_MIN = 0;
|
|
|
109
104
|
const FACE_MATCH_RANGE_MAX = 1;
|
|
110
105
|
const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
111
106
|
const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
107
|
+
const OVAL_HEIGHT_WIDTH_RATIO = 1.618;
|
|
112
108
|
const WS_CLOSURE_CODE = {
|
|
113
109
|
SUCCESS_CODE: 1000,
|
|
114
110
|
DEFAULT_ERROR_CODE: 4000,
|
|
@@ -117,6 +113,34 @@ const WS_CLOSURE_CODE = {
|
|
|
117
113
|
RUNTIME_ERROR: 4005,
|
|
118
114
|
USER_ERROR_DURING_CONNECTION: 4007,
|
|
119
115
|
};
|
|
116
|
+
// number in milliseconds to record into each video chunk.
|
|
117
|
+
// see https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/start#timeslice
|
|
118
|
+
const TIME_SLICE = 1000;
|
|
119
|
+
// in MS, the rate at which colors are rendered/checked
|
|
120
|
+
const TICK_RATE = 10;
|
|
121
|
+
/**
|
|
122
|
+
* The number of seconds before the presigned URL expires.
|
|
123
|
+
* Used to override aws sdk default value of 60
|
|
124
|
+
*/
|
|
125
|
+
const REQUEST_EXPIRY = 299;
|
|
126
|
+
/**
|
|
127
|
+
* The maximum time in milliseconds that the connection phase of a request
|
|
128
|
+
* may take before the connection attempt is abandoned.
|
|
129
|
+
*/
|
|
130
|
+
const CONNECTION_TIMEOUT = 10000;
|
|
131
|
+
const FACE_MOVEMENT_AND_LIGHT_CHALLENGE = {
|
|
132
|
+
type: 'FaceMovementAndLightChallenge',
|
|
133
|
+
version: '1.0.0',
|
|
134
|
+
};
|
|
135
|
+
const FACE_MOVEMENT_CHALLENGE = {
|
|
136
|
+
type: 'FaceMovementChallenge',
|
|
137
|
+
version: '1.0.0',
|
|
138
|
+
};
|
|
139
|
+
const SUPPORTED_CHALLENGES = [
|
|
140
|
+
FACE_MOVEMENT_AND_LIGHT_CHALLENGE,
|
|
141
|
+
FACE_MOVEMENT_CHALLENGE,
|
|
142
|
+
];
|
|
143
|
+
const queryParameterString = SUPPORTED_CHALLENGES.map((challenge) => `${challenge.type}_${challenge.version}`).join(',');
|
|
120
144
|
|
|
121
145
|
/**
|
|
122
146
|
* Returns the random number between min and max
|
|
@@ -161,9 +185,8 @@ function getIntersectionOverUnion(box1, box2) {
|
|
|
161
185
|
* Returns the details of a randomly generated liveness oval
|
|
162
186
|
* from SDK
|
|
163
187
|
*/
|
|
164
|
-
function getOvalDetailsFromSessionInformation({
|
|
165
|
-
const ovalParameters =
|
|
166
|
-
?.OvalParameters;
|
|
188
|
+
function getOvalDetailsFromSessionInformation({ parsedSessionInformation, videoWidth, }) {
|
|
189
|
+
const ovalParameters = parsedSessionInformation.Challenge.OvalParameters;
|
|
167
190
|
if (!ovalParameters ||
|
|
168
191
|
!ovalParameters.CenterX ||
|
|
169
192
|
!ovalParameters.CenterY ||
|
|
@@ -185,7 +208,7 @@ function getOvalDetailsFromSessionInformation({ sessionInformation, videoWidth,
|
|
|
185
208
|
/**
|
|
186
209
|
* Returns the details of a statically generated liveness oval based on the video dimensions
|
|
187
210
|
*/
|
|
188
|
-
function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXSeed = 0.5, centerYSeed = 0.5, ratioMultiplier = 0.8, }) {
|
|
211
|
+
function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXSeed = 0.5, centerYSeed = 0.5, ratioMultiplier = 0.8, ovalHeightWidthRatio = OVAL_HEIGHT_WIDTH_RATIO, }) {
|
|
189
212
|
const videoHeight = height;
|
|
190
213
|
let videoWidth = width;
|
|
191
214
|
const ovalRatio = widthSeed * ratioMultiplier;
|
|
@@ -199,7 +222,7 @@ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXS
|
|
|
199
222
|
videoWidth = (3 / 4) * videoHeight;
|
|
200
223
|
}
|
|
201
224
|
const ovalWidth = ovalRatio * videoWidth;
|
|
202
|
-
const ovalHeight =
|
|
225
|
+
const ovalHeight = ovalHeightWidthRatio * ovalWidth;
|
|
203
226
|
return {
|
|
204
227
|
flippedCenterX: Math.floor(videoWidth - centerX),
|
|
205
228
|
centerX: Math.floor(centerX),
|
|
@@ -262,7 +285,7 @@ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
|
|
|
262
285
|
const ovalDetails = getStaticLivenessOvalDetails({
|
|
263
286
|
width: width,
|
|
264
287
|
height: height,
|
|
265
|
-
ratioMultiplier: 0.
|
|
288
|
+
ratioMultiplier: 0.3,
|
|
266
289
|
});
|
|
267
290
|
ovalDetails.flippedCenterX = width - ovalDetails.centerX;
|
|
268
291
|
// Compute scaleFactor which is how much our video element is scaled
|
|
@@ -296,7 +319,7 @@ function getPupilDistanceAndFaceHeight(face) {
|
|
|
296
319
|
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
297
320
|
return { pupilDistance, faceHeight };
|
|
298
321
|
}
|
|
299
|
-
function generateBboxFromLandmarks(face, oval, frameHeight) {
|
|
322
|
+
function generateBboxFromLandmarks({ ovalHeightWidthRatio = OVAL_HEIGHT_WIDTH_RATIO, face, oval, frameHeight, }) {
|
|
300
323
|
const { leftEye, rightEye, nose, leftEar, rightEar } = face;
|
|
301
324
|
const { height: ovalHeight, centerY } = oval;
|
|
302
325
|
const ovalTop = centerY - ovalHeight / 2;
|
|
@@ -316,7 +339,7 @@ function generateBboxFromLandmarks(face, oval, frameHeight) {
|
|
|
316
339
|
centerFaceY = eyeCenter[1];
|
|
317
340
|
}
|
|
318
341
|
const faceWidth = ocularWidth;
|
|
319
|
-
const faceHeight =
|
|
342
|
+
const faceHeight = ovalHeightWidthRatio * faceWidth;
|
|
320
343
|
const top = Math.max(centerFaceY - faceHeight / 2, 0);
|
|
321
344
|
const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
|
|
322
345
|
const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
|
|
@@ -435,11 +458,11 @@ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, vide
|
|
|
435
458
|
throw new Error('Cannot find Overlay Canvas.');
|
|
436
459
|
}
|
|
437
460
|
}
|
|
438
|
-
const
|
|
439
|
-
function getColorsSequencesFromSessionInformation(
|
|
440
|
-
const
|
|
461
|
+
const isColorSequence = (obj) => !!obj;
|
|
462
|
+
function getColorsSequencesFromSessionInformation(parsedSessionInformation) {
|
|
463
|
+
const colorSequenceFromServerChallenge = parsedSessionInformation.Challenge
|
|
441
464
|
.ColorSequences ?? [];
|
|
442
|
-
const colorSequences =
|
|
465
|
+
const colorSequences = colorSequenceFromServerChallenge.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
|
|
443
466
|
const colorArray = FreshnessColor.RGB;
|
|
444
467
|
const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
|
|
445
468
|
return typeof color !== 'undefined' &&
|
|
@@ -452,13 +475,7 @@ function getColorsSequencesFromSessionInformation(sessionInformation) {
|
|
|
452
475
|
}
|
|
453
476
|
: undefined;
|
|
454
477
|
});
|
|
455
|
-
return colorSequences.filter(
|
|
456
|
-
}
|
|
457
|
-
function getRGBArrayFromColorString(colorStr) {
|
|
458
|
-
return colorStr
|
|
459
|
-
.slice(colorStr.indexOf('(') + 1, colorStr.indexOf(')'))
|
|
460
|
-
.split(',')
|
|
461
|
-
.map((str) => parseInt(str));
|
|
478
|
+
return colorSequences.filter(isColorSequence);
|
|
462
479
|
}
|
|
463
480
|
async function getFaceMatchState(faceDetector, videoEl) {
|
|
464
481
|
const detectedFaces = await faceDetector.detectFaces(videoEl);
|
|
@@ -482,7 +499,9 @@ async function getFaceMatchState(faceDetector, videoEl) {
|
|
|
482
499
|
}
|
|
483
500
|
return faceMatchState;
|
|
484
501
|
}
|
|
485
|
-
async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails, reduceThreshold = false,
|
|
502
|
+
async function isFaceDistanceBelowThreshold({ parsedSessionInformation, faceDetector, videoEl, ovalDetails, reduceThreshold = false, }) {
|
|
503
|
+
const challengeConfig = parsedSessionInformation.Challenge.ChallengeConfig;
|
|
504
|
+
const { FaceDistanceThresholdMin, FaceDistanceThreshold } = challengeConfig;
|
|
486
505
|
const detectedFaces = await faceDetector.detectFaces(videoEl);
|
|
487
506
|
let detectedFace;
|
|
488
507
|
let isDistanceBelowThreshold = false;
|
|
@@ -506,10 +525,8 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
|
|
|
506
525
|
isDistanceBelowThreshold =
|
|
507
526
|
calibratedPupilDistance / width <
|
|
508
527
|
(!reduceThreshold
|
|
509
|
-
?
|
|
510
|
-
:
|
|
511
|
-
? REDUCED_THRESHOLD_MOBILE
|
|
512
|
-
: REDUCED_THRESHOLD);
|
|
528
|
+
? FaceDistanceThresholdMin
|
|
529
|
+
: FaceDistanceThreshold);
|
|
513
530
|
if (!isDistanceBelowThreshold) {
|
|
514
531
|
error = LivenessErrorState.FACE_DISTANCE_ERROR;
|
|
515
532
|
}
|
|
@@ -524,14 +541,6 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
|
|
|
524
541
|
}
|
|
525
542
|
return { isDistanceBelowThreshold, error };
|
|
526
543
|
}
|
|
527
|
-
function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }) {
|
|
528
|
-
return {
|
|
529
|
-
Height: height / deviceHeight,
|
|
530
|
-
Width: width / deviceWidth,
|
|
531
|
-
Top: top / deviceHeight,
|
|
532
|
-
Left: left / deviceWidth,
|
|
533
|
-
};
|
|
534
|
-
}
|
|
535
544
|
|
|
536
545
|
/**
|
|
537
546
|
* Checks whether WebAssembly is supported in the current environment.
|
|
@@ -638,152 +647,12 @@ class BlazeFaceFaceDetection extends FaceDetection {
|
|
|
638
647
|
}
|
|
639
648
|
}
|
|
640
649
|
|
|
641
|
-
function isNewerIpad() {
|
|
642
|
-
// iPads on iOS13+ return as if a desktop Mac
|
|
643
|
-
// so check for maxTouchPoints also.
|
|
644
|
-
return (/Macintosh/i.test(navigator.userAgent) &&
|
|
645
|
-
!!navigator.maxTouchPoints &&
|
|
646
|
-
navigator.maxTouchPoints > 1);
|
|
647
|
-
}
|
|
648
|
-
function isMobileScreen() {
|
|
649
|
-
const isMobileDevice =
|
|
650
|
-
// Test Android/iPhone/iPad
|
|
651
|
-
/Android|iPhone|iPad/i.test(navigator.userAgent) || isNewerIpad();
|
|
652
|
-
return isMobileDevice;
|
|
653
|
-
}
|
|
654
|
-
/**
|
|
655
|
-
* Use window.matchMedia to direct landscape orientation
|
|
656
|
-
* screen.orientation is not supported in Safari so we will use
|
|
657
|
-
* media query detection to listen for changes instead.
|
|
658
|
-
* @returns MediaQueryList object
|
|
659
|
-
*/
|
|
660
|
-
function getLandscapeMediaQuery() {
|
|
661
|
-
return window.matchMedia('(orientation: landscape)');
|
|
662
|
-
}
|
|
663
|
-
// minor version 146+ is confirmed to have the fix https://issues.chromium.org/issues/343199623#comment34
|
|
664
|
-
function isAndroidChromeWithBrokenH264() {
|
|
665
|
-
const groups = /Chrome\/125\.[0-9]+\.[0-9]+\.([0-9]+)/i.exec(navigator.userAgent);
|
|
666
|
-
if (!groups) {
|
|
667
|
-
return false;
|
|
668
|
-
}
|
|
669
|
-
const minorVersion = groups[1];
|
|
670
|
-
return (/Android/i.test(navigator.userAgent) &&
|
|
671
|
-
/Chrome\/125/i.test(navigator.userAgent) &&
|
|
672
|
-
parseInt(minorVersion) < 146);
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
// Equivalent to 2 Kbps - needed for maintaining video quality at 60 FPS
|
|
676
|
-
const BITS_PER_SECOND = 2000000;
|
|
677
|
-
// Only to be used with Chrome for the Android Chrome H264 Bug - https://issues.chromium.org/issues/343199623
|
|
678
|
-
const ALTERNATE_CHROME_MIME_TYPE = 'video/x-matroska;codecs=vp8';
|
|
679
|
-
/**
|
|
680
|
-
* Helper wrapper class over the native MediaRecorder.
|
|
681
|
-
*/
|
|
682
|
-
class VideoRecorder {
|
|
683
|
-
constructor(stream) {
|
|
684
|
-
if (typeof MediaRecorder === 'undefined') {
|
|
685
|
-
throw Error('MediaRecorder is not supported by this browser');
|
|
686
|
-
}
|
|
687
|
-
this._stream = stream;
|
|
688
|
-
this._chunks = [];
|
|
689
|
-
this._recorder = new MediaRecorder(stream, {
|
|
690
|
-
bitsPerSecond: BITS_PER_SECOND,
|
|
691
|
-
mimeType: isAndroidChromeWithBrokenH264()
|
|
692
|
-
? ALTERNATE_CHROME_MIME_TYPE
|
|
693
|
-
: undefined,
|
|
694
|
-
});
|
|
695
|
-
this._setupCallbacks();
|
|
696
|
-
}
|
|
697
|
-
getState() {
|
|
698
|
-
return this._recorder.state;
|
|
699
|
-
}
|
|
700
|
-
start(timeSlice) {
|
|
701
|
-
this.clearRecordedData();
|
|
702
|
-
this.recordingStartApiTimestamp = Date.now();
|
|
703
|
-
this._recorder.start(timeSlice);
|
|
704
|
-
}
|
|
705
|
-
async stop() {
|
|
706
|
-
if (this.getState() === 'recording') {
|
|
707
|
-
this._recorder.stop();
|
|
708
|
-
}
|
|
709
|
-
return this._recorderStopped;
|
|
710
|
-
}
|
|
711
|
-
pause() {
|
|
712
|
-
this._recorder.pause();
|
|
713
|
-
}
|
|
714
|
-
clearRecordedData() {
|
|
715
|
-
this._chunks = [];
|
|
716
|
-
}
|
|
717
|
-
dispatch(event) {
|
|
718
|
-
this._recorder.dispatchEvent(event);
|
|
719
|
-
}
|
|
720
|
-
getVideoChunkSize() {
|
|
721
|
-
return this._chunks.length;
|
|
722
|
-
}
|
|
723
|
-
_setupCallbacks() {
|
|
724
|
-
// Creates a Readablestream of video chunks. Waits to receive a clientSessionInfo event before pushing
|
|
725
|
-
// a livenessActionDocument to the ReadableStream and finally closing the ReadableStream
|
|
726
|
-
this.videoStream = new ReadableStream({
|
|
727
|
-
start: (controller) => {
|
|
728
|
-
if (!this._recorder) {
|
|
729
|
-
return;
|
|
730
|
-
}
|
|
731
|
-
this._recorder.ondataavailable = (e) => {
|
|
732
|
-
if (e.data && e.data.size > 0) {
|
|
733
|
-
if (this._chunks.length === 0) {
|
|
734
|
-
this.firstChunkTimestamp = Date.now();
|
|
735
|
-
}
|
|
736
|
-
this._chunks.push(e.data);
|
|
737
|
-
controller.enqueue(e.data);
|
|
738
|
-
}
|
|
739
|
-
};
|
|
740
|
-
this._recorder.addEventListener('clientSesssionInfo', (e) => {
|
|
741
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
|
|
742
|
-
controller.enqueue(e.data.clientInfo);
|
|
743
|
-
});
|
|
744
|
-
this._recorder.addEventListener('stopVideo', () => {
|
|
745
|
-
controller.enqueue('stopVideo');
|
|
746
|
-
});
|
|
747
|
-
this._recorder.addEventListener('endStream', () => {
|
|
748
|
-
controller.close();
|
|
749
|
-
});
|
|
750
|
-
this._recorder.addEventListener('endStreamWithCode', (e) => {
|
|
751
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
|
|
752
|
-
controller.enqueue({
|
|
753
|
-
type: 'endStreamWithCode',
|
|
754
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
|
755
|
-
code: e.data.code,
|
|
756
|
-
});
|
|
757
|
-
});
|
|
758
|
-
},
|
|
759
|
-
});
|
|
760
|
-
this.recorderStarted = new Promise((resolve) => {
|
|
761
|
-
this._recorder.onstart = () => {
|
|
762
|
-
this.recorderStartTimestamp = Date.now();
|
|
763
|
-
resolve();
|
|
764
|
-
};
|
|
765
|
-
});
|
|
766
|
-
this._recorderStopped = new Promise((resolve) => {
|
|
767
|
-
this._recorder.onstop = () => {
|
|
768
|
-
this.recorderEndTimestamp = Date.now();
|
|
769
|
-
resolve();
|
|
770
|
-
};
|
|
771
|
-
});
|
|
772
|
-
this._recorder.onerror = () => {
|
|
773
|
-
if (this.getState() !== 'stopped') {
|
|
774
|
-
this.stop();
|
|
775
|
-
}
|
|
776
|
-
};
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
|
|
780
650
|
/**
|
|
781
651
|
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
782
652
|
*/
|
|
783
|
-
function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection,
|
|
653
|
+
function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, parsedSessionInformation, frameHeight, }) {
|
|
784
654
|
let faceMatchState;
|
|
785
|
-
const challengeConfig =
|
|
786
|
-
?.ChallengeConfig;
|
|
655
|
+
const challengeConfig = parsedSessionInformation.Challenge.ChallengeConfig;
|
|
787
656
|
if (!challengeConfig ||
|
|
788
657
|
!challengeConfig.OvalIouThreshold ||
|
|
789
658
|
!challengeConfig.OvalIouHeightThreshold ||
|
|
@@ -792,8 +661,13 @@ function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceInterse
|
|
|
792
661
|
!challengeConfig.FaceIouWidthThreshold) {
|
|
793
662
|
throw new Error('Challenge information not returned from session information.');
|
|
794
663
|
}
|
|
795
|
-
const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
|
|
796
|
-
const faceBoundingBox = generateBboxFromLandmarks(
|
|
664
|
+
const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, OvalHeightWidthRatio, } = challengeConfig;
|
|
665
|
+
const faceBoundingBox = generateBboxFromLandmarks({
|
|
666
|
+
ovalHeightWidthRatio: OvalHeightWidthRatio,
|
|
667
|
+
face,
|
|
668
|
+
oval: ovalDetails,
|
|
669
|
+
frameHeight,
|
|
670
|
+
});
|
|
797
671
|
const minFaceX = faceBoundingBox.left;
|
|
798
672
|
const maxFaceX = faceBoundingBox.right;
|
|
799
673
|
const minFaceY = faceBoundingBox.top;
|
|
@@ -831,7 +705,357 @@ function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceInterse
|
|
|
831
705
|
return { faceMatchState, faceMatchPercentage };
|
|
832
706
|
}
|
|
833
707
|
|
|
834
|
-
|
|
708
|
+
var _ColorSequenceDisplay_instances, _ColorSequenceDisplay_sequence, _ColorSequenceDisplay_previousSequence, _ColorSequenceDisplay_colorStage, _ColorSequenceDisplay_sequenceIndex, _ColorSequenceDisplay_colorSequences, _ColorSequenceDisplay_isFirstTick, _ColorSequenceDisplay_lastColorStageChangeTimestamp, _ColorSequenceDisplay_isFlatStage, _ColorSequenceDisplay_isScrollingStage, _ColorSequenceDisplay_startColorSequence, _ColorSequenceDisplay_handleSequenceChange;
|
|
709
|
+
var ColorStageType;
|
|
710
|
+
(function (ColorStageType) {
|
|
711
|
+
ColorStageType[ColorStageType["Scrolling"] = 0] = "Scrolling";
|
|
712
|
+
ColorStageType[ColorStageType["Flat"] = 1] = "Flat";
|
|
713
|
+
})(ColorStageType || (ColorStageType = {}));
|
|
714
|
+
class ColorSequenceDisplay {
|
|
715
|
+
/**
|
|
716
|
+
* Iterates over provided color sequences and executes sequence event callbacks
|
|
717
|
+
*
|
|
718
|
+
* @param {ColorSequences} colorSequences array of color sequences to iterate over
|
|
719
|
+
*/
|
|
720
|
+
constructor(colorSequences) {
|
|
721
|
+
_ColorSequenceDisplay_instances.add(this);
|
|
722
|
+
/**
|
|
723
|
+
* the current color sequence used for flat display and the prev color when scrolling
|
|
724
|
+
*/
|
|
725
|
+
_ColorSequenceDisplay_sequence.set(this, void 0);
|
|
726
|
+
/**
|
|
727
|
+
* previous color sequence, during flat display curr === prev and during scroll it is the prev indexed color
|
|
728
|
+
*/
|
|
729
|
+
_ColorSequenceDisplay_previousSequence.set(this, void 0);
|
|
730
|
+
/**
|
|
731
|
+
* current ColorStage, initialize to 'FLAT'
|
|
732
|
+
*/
|
|
733
|
+
_ColorSequenceDisplay_colorStage.set(this, ColorStageType.Flat);
|
|
734
|
+
/**
|
|
735
|
+
* current color sequence index (black flat, red scrolling, etc)
|
|
736
|
+
*/
|
|
737
|
+
_ColorSequenceDisplay_sequenceIndex.set(this, 0);
|
|
738
|
+
_ColorSequenceDisplay_colorSequences.set(this, void 0);
|
|
739
|
+
_ColorSequenceDisplay_isFirstTick.set(this, true);
|
|
740
|
+
_ColorSequenceDisplay_lastColorStageChangeTimestamp.set(this, 0);
|
|
741
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_colorSequences, colorSequences, "f");
|
|
742
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_sequence, colorSequences[0], "f");
|
|
743
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_previousSequence, colorSequences[0], "f");
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Start sequence iteration and execute event callbacks
|
|
747
|
+
*
|
|
748
|
+
* @async
|
|
749
|
+
* @param {StartSequencesParams} params Sequence event handlers
|
|
750
|
+
* @returns {Promise<boolean>} Resolves to true when complete
|
|
751
|
+
*/
|
|
752
|
+
async startSequences(params) {
|
|
753
|
+
return new Promise((resolve) => {
|
|
754
|
+
setTimeout(() => {
|
|
755
|
+
tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_startColorSequence).call(this, { ...params, resolve });
|
|
756
|
+
}, Math.min(TICK_RATE));
|
|
757
|
+
});
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
_ColorSequenceDisplay_sequence = new WeakMap(), _ColorSequenceDisplay_previousSequence = new WeakMap(), _ColorSequenceDisplay_colorStage = new WeakMap(), _ColorSequenceDisplay_sequenceIndex = new WeakMap(), _ColorSequenceDisplay_colorSequences = new WeakMap(), _ColorSequenceDisplay_isFirstTick = new WeakMap(), _ColorSequenceDisplay_lastColorStageChangeTimestamp = new WeakMap(), _ColorSequenceDisplay_instances = new WeakSet(), _ColorSequenceDisplay_isFlatStage = function _ColorSequenceDisplay_isFlatStage() {
|
|
761
|
+
return tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_colorStage, "f") === ColorStageType.Flat;
|
|
762
|
+
}, _ColorSequenceDisplay_isScrollingStage = function _ColorSequenceDisplay_isScrollingStage() {
|
|
763
|
+
return tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_colorStage, "f") === ColorStageType.Scrolling;
|
|
764
|
+
}, _ColorSequenceDisplay_startColorSequence = function _ColorSequenceDisplay_startColorSequence({ onSequenceChange, onSequenceColorChange, onSequenceStart, onSequencesComplete, resolve, }) {
|
|
765
|
+
if (ui.isFunction(onSequenceStart)) {
|
|
766
|
+
onSequenceStart();
|
|
767
|
+
}
|
|
768
|
+
const sequenceStartTime = Date.now();
|
|
769
|
+
let timeSinceLastColorStageChange = sequenceStartTime - tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, "f");
|
|
770
|
+
// Send a colorStart time only for the first tick of the first color
|
|
771
|
+
if (tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_isFirstTick, "f")) {
|
|
772
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, Date.now(), "f");
|
|
773
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_isFirstTick, false, "f");
|
|
774
|
+
// initial sequence change
|
|
775
|
+
if (ui.isFunction(onSequenceChange)) {
|
|
776
|
+
onSequenceChange({
|
|
777
|
+
prevSequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
778
|
+
sequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
779
|
+
sequenceIndex: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f"),
|
|
780
|
+
sequenceStartTime,
|
|
781
|
+
});
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
// Every 10 ms tick we will check if the threshold for flat or scrolling, if so we will try to go to the next stage
|
|
785
|
+
if ((tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isFlatStage).call(this) &&
|
|
786
|
+
timeSinceLastColorStageChange >= tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").flatDisplayDuration) ||
|
|
787
|
+
(tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this) &&
|
|
788
|
+
timeSinceLastColorStageChange >= tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").downscrollDuration)) {
|
|
789
|
+
tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_handleSequenceChange).call(this, { sequenceStartTime, onSequenceChange });
|
|
790
|
+
timeSinceLastColorStageChange = 0;
|
|
791
|
+
}
|
|
792
|
+
const hasRemainingSequences = tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") < tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f").length;
|
|
793
|
+
// Every 10 ms tick we will update the colors displayed
|
|
794
|
+
if (hasRemainingSequences) {
|
|
795
|
+
const heightFraction = timeSinceLastColorStageChange /
|
|
796
|
+
(tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this)
|
|
797
|
+
? tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").downscrollDuration
|
|
798
|
+
: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").flatDisplayDuration);
|
|
799
|
+
if (ui.isFunction(onSequenceColorChange)) {
|
|
800
|
+
onSequenceColorChange({
|
|
801
|
+
sequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
802
|
+
heightFraction,
|
|
803
|
+
prevSequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
804
|
+
});
|
|
805
|
+
}
|
|
806
|
+
resolve(false);
|
|
807
|
+
}
|
|
808
|
+
else {
|
|
809
|
+
if (ui.isFunction(onSequencesComplete)) {
|
|
810
|
+
onSequencesComplete();
|
|
811
|
+
}
|
|
812
|
+
resolve(true);
|
|
813
|
+
}
|
|
814
|
+
}, _ColorSequenceDisplay_handleSequenceChange = function _ColorSequenceDisplay_handleSequenceChange({ sequenceStartTime, onSequenceChange, }) {
|
|
815
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_previousSequence, tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f"), "f");
|
|
816
|
+
if (tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isFlatStage).call(this)) {
|
|
817
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_sequenceIndex, tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") + 1, "f");
|
|
818
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_colorStage, ColorStageType.Scrolling, "f");
|
|
819
|
+
}
|
|
820
|
+
else if (tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this)) {
|
|
821
|
+
const nextColorSequence = tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f")[tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f")];
|
|
822
|
+
if (nextColorSequence.flatDisplayDuration > 0) {
|
|
823
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_colorStage, ColorStageType.Flat, "f");
|
|
824
|
+
}
|
|
825
|
+
else {
|
|
826
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_sequenceIndex, tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") + 1, "f");
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_sequence, tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f")[tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f")], "f");
|
|
830
|
+
tslib.__classPrivateFieldSet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, Date.now(), "f");
|
|
831
|
+
if (tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f")) {
|
|
832
|
+
if (ui.isFunction(onSequenceChange)) {
|
|
833
|
+
onSequenceChange({
|
|
834
|
+
prevSequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
835
|
+
sequenceColor: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
836
|
+
sequenceIndex: tslib.__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f"),
|
|
837
|
+
sequenceStartTime: sequenceStartTime,
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
};
|
|
842
|
+
|
|
843
|
+
const isFaceMovementAndLightChallenge = (value) => {
|
|
844
|
+
return (value?.Challenge?.Name ===
|
|
845
|
+
'FaceMovementAndLightChallenge');
|
|
846
|
+
};
|
|
847
|
+
const isFaceMovementChallenge = (value) => {
|
|
848
|
+
return (value?.Challenge?.Name ===
|
|
849
|
+
'FaceMovementChallenge');
|
|
850
|
+
};
|
|
851
|
+
const isFaceMovementAndLightServerChallenge = (value) => {
|
|
852
|
+
return !!value
|
|
853
|
+
?.FaceMovementAndLightChallenge;
|
|
854
|
+
};
|
|
855
|
+
const isFaceMovementServerChallenge = (value) => {
|
|
856
|
+
return !!value
|
|
857
|
+
?.FaceMovementChallenge;
|
|
858
|
+
};
|
|
859
|
+
const createSessionInfoFromServerSessionInformation = (serverSessionInformation) => {
|
|
860
|
+
let challenge;
|
|
861
|
+
if (isFaceMovementAndLightServerChallenge(serverSessionInformation.Challenge)) {
|
|
862
|
+
challenge = {
|
|
863
|
+
...serverSessionInformation.Challenge.FaceMovementAndLightChallenge,
|
|
864
|
+
Name: FACE_MOVEMENT_AND_LIGHT_CHALLENGE.type,
|
|
865
|
+
};
|
|
866
|
+
}
|
|
867
|
+
else if (isFaceMovementServerChallenge(serverSessionInformation.Challenge)) {
|
|
868
|
+
challenge = {
|
|
869
|
+
...serverSessionInformation.Challenge.FaceMovementChallenge,
|
|
870
|
+
Name: FACE_MOVEMENT_CHALLENGE.type,
|
|
871
|
+
};
|
|
872
|
+
}
|
|
873
|
+
else {
|
|
874
|
+
throw new Error('Unsupported challenge type returned from session information.');
|
|
875
|
+
}
|
|
876
|
+
if (!challenge.ChallengeConfig ||
|
|
877
|
+
!challenge.ChallengeConfig.FaceDistanceThreshold ||
|
|
878
|
+
!challenge.ChallengeConfig.FaceDistanceThresholdMin ||
|
|
879
|
+
!challenge.ChallengeConfig.OvalHeightWidthRatio) {
|
|
880
|
+
throw new Error('Challenge config not returned from session information.');
|
|
881
|
+
}
|
|
882
|
+
return { ...serverSessionInformation, Challenge: challenge };
|
|
883
|
+
};
|
|
884
|
+
|
|
885
|
+
const createVideoEvent = async (result) => {
|
|
886
|
+
const { data, type } = result;
|
|
887
|
+
return {
|
|
888
|
+
VideoChunk: new Uint8Array(
|
|
889
|
+
// server expects an empty chunk on 'stopStream' event
|
|
890
|
+
type === 'streamVideo' ? await data.arrayBuffer() : []),
|
|
891
|
+
// @ts-expect-error for 'closeCode' event, `data` is an object which is
|
|
892
|
+
// unexpected by `VideoEvent` but is expected by the streaming service
|
|
893
|
+
TimestampMillis: type === 'closeCode' ? data : Date.now(),
|
|
894
|
+
};
|
|
895
|
+
};
|
|
896
|
+
const getTrackDimensions = (stream) => {
|
|
897
|
+
const { height: trackHeight, width: trackWidth } = stream
|
|
898
|
+
.getVideoTracks()[0]
|
|
899
|
+
.getSettings();
|
|
900
|
+
if (ui.isUndefined(trackHeight) || ui.isUndefined(trackWidth)) {
|
|
901
|
+
throw new Error(`Invalid Track Dimensions. height: ${trackHeight}, width: ${trackWidth} `);
|
|
902
|
+
}
|
|
903
|
+
return { trackHeight, trackWidth };
|
|
904
|
+
};
|
|
905
|
+
function getBoundingBox({ trackHeight, trackWidth, height, width, top, left, }) {
|
|
906
|
+
return {
|
|
907
|
+
Height: height / trackHeight,
|
|
908
|
+
Width: width / trackWidth,
|
|
909
|
+
Top: top / trackHeight,
|
|
910
|
+
Left: left / trackWidth,
|
|
911
|
+
};
|
|
912
|
+
}
|
|
913
|
+
const getFlippedInitialFaceLeft = (trackWidth, faceLeft, faceWidth) => trackWidth - faceLeft - faceWidth;
|
|
914
|
+
const getInitialFaceBoundingBox = (params) => {
|
|
915
|
+
const { trackWidth, left, width } = params;
|
|
916
|
+
return getBoundingBox({
|
|
917
|
+
...params,
|
|
918
|
+
left: getFlippedInitialFaceLeft(trackWidth, left, width),
|
|
919
|
+
});
|
|
920
|
+
};
|
|
921
|
+
const getTargetFaceBoundingBox = (params) => {
|
|
922
|
+
const { height, width, centerX, centerY } = params;
|
|
923
|
+
return getBoundingBox({
|
|
924
|
+
...params,
|
|
925
|
+
top: centerY - height / 2,
|
|
926
|
+
left: centerX - width / 2,
|
|
927
|
+
});
|
|
928
|
+
};
|
|
929
|
+
function createClientSessionInformationEvent({ parsedSessionInformation, clientChallenge, }) {
|
|
930
|
+
if (isFaceMovementChallenge(parsedSessionInformation)) {
|
|
931
|
+
return {
|
|
932
|
+
Challenge: {
|
|
933
|
+
FaceMovementChallenge: clientChallenge,
|
|
934
|
+
},
|
|
935
|
+
};
|
|
936
|
+
}
|
|
937
|
+
if (isFaceMovementAndLightChallenge(parsedSessionInformation)) {
|
|
938
|
+
return {
|
|
939
|
+
Challenge: {
|
|
940
|
+
FaceMovementAndLightChallenge: clientChallenge,
|
|
941
|
+
},
|
|
942
|
+
};
|
|
943
|
+
}
|
|
944
|
+
throw new Error('Unable to create ClientSessionInformationEvent');
|
|
945
|
+
}
|
|
946
|
+
function createSessionEndEvent({ parsedSessionInformation, challengeId, faceMatchAssociatedParams, ovalAssociatedParams, recordingEndedTimestamp, trackHeight, trackWidth, }) {
|
|
947
|
+
const { initialFace, ovalDetails } = ovalAssociatedParams;
|
|
948
|
+
const { startFace, endFace } = faceMatchAssociatedParams;
|
|
949
|
+
const initialFaceBoundingBox = getInitialFaceBoundingBox({
|
|
950
|
+
trackHeight,
|
|
951
|
+
trackWidth,
|
|
952
|
+
...initialFace,
|
|
953
|
+
});
|
|
954
|
+
const targetFaceBoundingBox = getTargetFaceBoundingBox({
|
|
955
|
+
trackHeight,
|
|
956
|
+
trackWidth,
|
|
957
|
+
...ovalDetails,
|
|
958
|
+
});
|
|
959
|
+
const clientChallenge = {
|
|
960
|
+
ChallengeId: challengeId,
|
|
961
|
+
InitialFace: {
|
|
962
|
+
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
963
|
+
BoundingBox: initialFaceBoundingBox,
|
|
964
|
+
},
|
|
965
|
+
TargetFace: {
|
|
966
|
+
FaceDetectedInTargetPositionStartTimestamp: startFace.timestampMs,
|
|
967
|
+
FaceDetectedInTargetPositionEndTimestamp: endFace.timestampMs,
|
|
968
|
+
BoundingBox: targetFaceBoundingBox,
|
|
969
|
+
},
|
|
970
|
+
VideoEndTimestamp: recordingEndedTimestamp,
|
|
971
|
+
};
|
|
972
|
+
return createClientSessionInformationEvent({
|
|
973
|
+
parsedSessionInformation,
|
|
974
|
+
clientChallenge,
|
|
975
|
+
});
|
|
976
|
+
}
|
|
977
|
+
function createSessionStartEvent({ parsedSessionInformation, challengeId, ovalAssociatedParams, recordingStartedTimestamp, trackHeight, trackWidth, }) {
|
|
978
|
+
const { initialFace } = ovalAssociatedParams;
|
|
979
|
+
const initialFaceBoundingBox = getInitialFaceBoundingBox({
|
|
980
|
+
trackHeight,
|
|
981
|
+
trackWidth,
|
|
982
|
+
...initialFace,
|
|
983
|
+
});
|
|
984
|
+
const clientChallenge = {
|
|
985
|
+
ChallengeId: challengeId,
|
|
986
|
+
VideoStartTimestamp: recordingStartedTimestamp,
|
|
987
|
+
InitialFace: {
|
|
988
|
+
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
989
|
+
BoundingBox: initialFaceBoundingBox,
|
|
990
|
+
},
|
|
991
|
+
};
|
|
992
|
+
return createClientSessionInformationEvent({
|
|
993
|
+
parsedSessionInformation,
|
|
994
|
+
clientChallenge,
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
/**
|
|
998
|
+
* Translates provided sequence color string to an RGB array
|
|
999
|
+
*
|
|
1000
|
+
* @param {SequenceColorValue} color
|
|
1001
|
+
* @returns {number[]}
|
|
1002
|
+
*/
|
|
1003
|
+
const colorToRgb = (color) => {
|
|
1004
|
+
return color
|
|
1005
|
+
.slice(color.indexOf('(') + 1, color.indexOf(')'))
|
|
1006
|
+
.split(',')
|
|
1007
|
+
.map((str) => parseInt(str));
|
|
1008
|
+
};
|
|
1009
|
+
function createColorDisplayEvent({ challengeId, sequenceStartTime, sequenceIndex, sequenceColor, prevSequenceColor, }) {
|
|
1010
|
+
const CurrentColor = { RGB: colorToRgb(sequenceColor) };
|
|
1011
|
+
const PreviousColor = {
|
|
1012
|
+
RGB: colorToRgb(prevSequenceColor),
|
|
1013
|
+
};
|
|
1014
|
+
return {
|
|
1015
|
+
Challenge: {
|
|
1016
|
+
FaceMovementAndLightChallenge: {
|
|
1017
|
+
ChallengeId: challengeId,
|
|
1018
|
+
ColorDisplayed: {
|
|
1019
|
+
CurrentColor,
|
|
1020
|
+
PreviousColor,
|
|
1021
|
+
SequenceNumber: sequenceIndex,
|
|
1022
|
+
CurrentColorStartTimestamp: sequenceStartTime,
|
|
1023
|
+
},
|
|
1024
|
+
},
|
|
1025
|
+
},
|
|
1026
|
+
};
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
/**
|
|
1030
|
+
* Creates an async generator that reads over the provided stream and yielding stream results
|
|
1031
|
+
*
|
|
1032
|
+
* @param {VideoStream} stream target video stream
|
|
1033
|
+
* @returns {GetRequestStream} async request stream generator
|
|
1034
|
+
*/
|
|
1035
|
+
function createRequestStreamGenerator(stream) {
|
|
1036
|
+
const reader = stream.getReader();
|
|
1037
|
+
return {
|
|
1038
|
+
getRequestStream: async function* () {
|
|
1039
|
+
while (true) {
|
|
1040
|
+
const { done, value } = await reader.read();
|
|
1041
|
+
if (done) {
|
|
1042
|
+
return;
|
|
1043
|
+
}
|
|
1044
|
+
if (value.type === 'sessionInfo') {
|
|
1045
|
+
yield { ClientSessionInformationEvent: value.data };
|
|
1046
|
+
}
|
|
1047
|
+
else {
|
|
1048
|
+
// Unless value.type is closeCode we never want to send a 0 size video event as it signals end of stream
|
|
1049
|
+
if (value.type === 'streamVideo' && value.data.size < 1)
|
|
1050
|
+
continue;
|
|
1051
|
+
yield { VideoEvent: await createVideoEvent(value) };
|
|
1052
|
+
}
|
|
1053
|
+
}
|
|
1054
|
+
},
|
|
1055
|
+
};
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
const VERSION = '3.4.0';
|
|
835
1059
|
|
|
836
1060
|
const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
|
|
837
1061
|
const getLivenessUserAgent = () => {
|
|
@@ -1049,7 +1273,7 @@ class CustomWebSocketFetchHandler {
|
|
|
1049
1273
|
}
|
|
1050
1274
|
}
|
|
1051
1275
|
|
|
1052
|
-
const
|
|
1276
|
+
const isValidCredentialsProvider = (credentialsProvider) => typeof credentialsProvider === 'function';
|
|
1053
1277
|
// the return interface of `fetchAuthSession` includes `credentials` as
|
|
1054
1278
|
// optional, but `credentials` is always returned. If `fetchAuthSession`
|
|
1055
1279
|
// is called for an unauthenticated end user, values of `accessKeyId`
|
|
@@ -1064,13 +1288,14 @@ const isCredentials = (credentials) => !!(credentials?.accessKeyId && credential
|
|
|
1064
1288
|
* @returns {Promise<AwsCredentials | AwsCredentialProvider>} `credentials` object or valid `credentialsProvider` callback
|
|
1065
1289
|
*/
|
|
1066
1290
|
async function resolveCredentials(credentialsProvider) {
|
|
1067
|
-
const
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
}
|
|
1071
|
-
if (credentialsProvider && !hasCredentialsProvider) {
|
|
1291
|
+
const hasValidCredentialsProvider = isValidCredentialsProvider(credentialsProvider);
|
|
1292
|
+
// provided `credentialsProvider` is not valid
|
|
1293
|
+
if (credentialsProvider && !hasValidCredentialsProvider) {
|
|
1072
1294
|
throw new Error('Invalid credentialsProvider');
|
|
1073
1295
|
}
|
|
1296
|
+
if (hasValidCredentialsProvider) {
|
|
1297
|
+
return credentialsProvider;
|
|
1298
|
+
}
|
|
1074
1299
|
try {
|
|
1075
1300
|
const result = (await auth.fetchAuthSession()).credentials;
|
|
1076
1301
|
if (isCredentials(result)) {
|
|
@@ -1084,8 +1309,6 @@ async function resolveCredentials(credentialsProvider) {
|
|
|
1084
1309
|
}
|
|
1085
1310
|
}
|
|
1086
1311
|
|
|
1087
|
-
// override aws sdk default value of 60
|
|
1088
|
-
const REQUEST_EXPIRY = 299;
|
|
1089
1312
|
class Signer extends signatureV4.SignatureV4 {
|
|
1090
1313
|
presign(request, options) {
|
|
1091
1314
|
return super.presign(request, {
|
|
@@ -1099,12 +1322,36 @@ class Signer extends signatureV4.SignatureV4 {
|
|
|
1099
1322
|
}
|
|
1100
1323
|
}
|
|
1101
1324
|
|
|
1102
|
-
const
|
|
1325
|
+
const DEFAULT_ATTEMPT_COUNT_TIMEOUT = 300000; // 5 minutes / 300000 ms
|
|
1326
|
+
// Telemetry data is for internal use only and should not be depended upon or used by the customer
|
|
1327
|
+
class TelemetryReporter {
|
|
1328
|
+
static getAttemptCountAndUpdateTimestamp() {
|
|
1329
|
+
const timeSinceLastAttempt = Date.now() - TelemetryReporter.timestamp;
|
|
1330
|
+
if (timeSinceLastAttempt > DEFAULT_ATTEMPT_COUNT_TIMEOUT) {
|
|
1331
|
+
TelemetryReporter.attemptCount = 1;
|
|
1332
|
+
}
|
|
1333
|
+
else {
|
|
1334
|
+
TelemetryReporter.attemptCount += 1;
|
|
1335
|
+
}
|
|
1336
|
+
TelemetryReporter.timestamp = Date.now();
|
|
1337
|
+
return TelemetryReporter.attemptCount;
|
|
1338
|
+
}
|
|
1339
|
+
}
|
|
1340
|
+
TelemetryReporter.attemptCount = 0;
|
|
1341
|
+
TelemetryReporter.timestamp = Date.now();
|
|
1342
|
+
const createTelemetryReporterMiddleware = (attemptCount, preCheckViewEnabled) => (next) => async (args) => {
|
|
1343
|
+
args.request.query['attempt-count'] =
|
|
1344
|
+
attemptCount.toString();
|
|
1345
|
+
args.request.query['precheck-view-enabled'] =
|
|
1346
|
+
preCheckViewEnabled ? '1' : '0';
|
|
1347
|
+
const result = await next(args);
|
|
1348
|
+
return result;
|
|
1349
|
+
};
|
|
1350
|
+
|
|
1103
1351
|
const CUSTOM_USER_AGENT = `${utils.getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
|
|
1104
|
-
async function
|
|
1105
|
-
const credentials = await resolveCredentials(credentialsProvider);
|
|
1352
|
+
async function getStreamingClient({ credentialsProvider, endpointOverride, region, systemClockOffset, }) {
|
|
1106
1353
|
const clientconfig = {
|
|
1107
|
-
credentials,
|
|
1354
|
+
credentials: await resolveCredentials(credentialsProvider),
|
|
1108
1355
|
customUserAgent: CUSTOM_USER_AGENT,
|
|
1109
1356
|
region,
|
|
1110
1357
|
requestHandler: new CustomWebSocketFetchHandler({
|
|
@@ -1113,262 +1360,208 @@ async function createStreamingClient({ credentialsProvider, endpointOverride, re
|
|
|
1113
1360
|
signerConstructor: Signer,
|
|
1114
1361
|
systemClockOffset,
|
|
1115
1362
|
};
|
|
1116
|
-
if (endpointOverride) {
|
|
1363
|
+
if (ui.isString(endpointOverride)) {
|
|
1117
1364
|
clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
|
|
1118
1365
|
}
|
|
1119
1366
|
return new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
|
|
1120
1367
|
}
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1368
|
+
const createCommandInput = ({ requestStream, sessionId, videoWidth, videoHeight, }) => ({
|
|
1369
|
+
ChallengeVersions: queryParameterString,
|
|
1370
|
+
SessionId: sessionId,
|
|
1371
|
+
LivenessRequestStream: requestStream,
|
|
1372
|
+
VideoWidth: videoWidth,
|
|
1373
|
+
VideoHeight: videoHeight,
|
|
1374
|
+
});
|
|
1375
|
+
/**
|
|
1376
|
+
* Initializes an instance of the Rekognition streaming client, returns `getResponseStream`
|
|
1377
|
+
*
|
|
1378
|
+
* @async
|
|
1379
|
+
* @param clientConfig configuration fpr the client
|
|
1380
|
+
* @returns {Promise<{ getResponseStream: GetReponseStream }>}
|
|
1381
|
+
*/
|
|
1382
|
+
async function createStreamingClient(clientConfig) {
|
|
1383
|
+
const client = await getStreamingClient(clientConfig);
|
|
1384
|
+
client.middlewareStack.add(createTelemetryReporterMiddleware(clientConfig.attemptCount, clientConfig.preCheckViewEnabled), {
|
|
1385
|
+
step: 'build',
|
|
1386
|
+
name: 'telemetryMiddleware',
|
|
1387
|
+
tags: ['liveness', 'amplify-ui'],
|
|
1388
|
+
});
|
|
1389
|
+
return {
|
|
1390
|
+
async getResponseStream(input) {
|
|
1391
|
+
const command = new clientRekognitionstreaming.StartFaceLivenessSessionCommand(createCommandInput(input));
|
|
1392
|
+
const { LivenessResponseStream } = await client.send(command);
|
|
1393
|
+
return LivenessResponseStream;
|
|
1394
|
+
},
|
|
1395
|
+
};
|
|
1128
1396
|
}
|
|
1129
|
-
|
|
1130
|
-
|
|
1397
|
+
|
|
1398
|
+
var _StreamRecorder_instances, _StreamRecorder_chunks, _StreamRecorder_recorder, _StreamRecorder_initialRecorder, _StreamRecorder_recordingStarted, _StreamRecorder_firstChunkTimestamp, _StreamRecorder_recorderEndTimestamp, _StreamRecorder_recorderStartTimestamp, _StreamRecorder_recordingStartTimestamp, _StreamRecorder_recorderStopped, _StreamRecorder_videoStream, _StreamRecorder_eventListeners, _StreamRecorder_clearRecordedChunks, _StreamRecorder_createReadableStream, _StreamRecorder_attachHandlers, _StreamRecorder_setupCallbacks, _StreamRecorder_cleanUpEventListeners;
|
|
1399
|
+
class StreamRecorder {
|
|
1400
|
+
constructor(stream) {
|
|
1401
|
+
_StreamRecorder_instances.add(this);
|
|
1402
|
+
_StreamRecorder_chunks.set(this, void 0);
|
|
1403
|
+
_StreamRecorder_recorder.set(this, void 0);
|
|
1404
|
+
_StreamRecorder_initialRecorder.set(this, void 0);
|
|
1405
|
+
_StreamRecorder_recordingStarted.set(this, false);
|
|
1406
|
+
_StreamRecorder_firstChunkTimestamp.set(this, void 0);
|
|
1407
|
+
_StreamRecorder_recorderEndTimestamp.set(this, void 0);
|
|
1408
|
+
_StreamRecorder_recorderStartTimestamp.set(this, void 0);
|
|
1409
|
+
_StreamRecorder_recordingStartTimestamp.set(this, void 0);
|
|
1410
|
+
_StreamRecorder_recorderStopped.set(this, void 0);
|
|
1411
|
+
_StreamRecorder_videoStream.set(this, void 0);
|
|
1412
|
+
_StreamRecorder_eventListeners.set(this, void 0);
|
|
1413
|
+
if (typeof MediaRecorder === 'undefined') {
|
|
1414
|
+
throw Error('MediaRecorder is not supported by this browser');
|
|
1415
|
+
}
|
|
1416
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_chunks, [], "f");
|
|
1417
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recorder, new MediaRecorder(stream, { bitsPerSecond: 1000000 }), "f");
|
|
1418
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_initialRecorder, tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f"), "f");
|
|
1419
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_videoStream, tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_createReadableStream).call(this), "f");
|
|
1420
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {}, "f");
|
|
1421
|
+
}
|
|
1422
|
+
getVideoStream() {
|
|
1423
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_videoStream, "f");
|
|
1424
|
+
}
|
|
1425
|
+
setNewVideoStream(stream) {
|
|
1426
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_cleanUpEventListeners).call(this);
|
|
1427
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recorder, new MediaRecorder(stream, { bitsPerSecond: 1000000 }), "f");
|
|
1428
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_attachHandlers).call(this, tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f"));
|
|
1429
|
+
}
|
|
1430
|
+
dispatchStreamEvent(event) {
|
|
1431
|
+
const { type } = event;
|
|
1432
|
+
const data = type === 'streamStop' ? undefined : event.data;
|
|
1433
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").dispatchEvent(new MessageEvent(type, { data }));
|
|
1434
|
+
}
|
|
1435
|
+
getRecordingStartTimestamp() {
|
|
1436
|
+
if (ui.isUndefined(tslib.__classPrivateFieldGet(this, _StreamRecorder_recorderStartTimestamp, "f")) ||
|
|
1437
|
+
ui.isUndefined(tslib.__classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f"))) {
|
|
1438
|
+
throw new Error('Recording has not started');
|
|
1439
|
+
}
|
|
1440
|
+
/**
|
|
1441
|
+
* This calculation is provided by Science team after doing analysis
|
|
1442
|
+
* of unreliable .onstart() (this.#recorderStartTimestamp) timestamp that is
|
|
1443
|
+
* returned from mediaRecorder.
|
|
1444
|
+
*/
|
|
1445
|
+
return Math.round(0.73 * (tslib.__classPrivateFieldGet(this, _StreamRecorder_recorderStartTimestamp, "f") - tslib.__classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f")) +
|
|
1446
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f"));
|
|
1447
|
+
}
|
|
1448
|
+
getRecordingEndedTimestamp() {
|
|
1449
|
+
if (ui.isUndefined(tslib.__classPrivateFieldGet(this, _StreamRecorder_recorderEndTimestamp, "f"))) {
|
|
1450
|
+
throw new Error('Recording has not ended');
|
|
1451
|
+
}
|
|
1452
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_recorderEndTimestamp, "f");
|
|
1453
|
+
}
|
|
1454
|
+
startRecording() {
|
|
1455
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_clearRecordedChunks).call(this);
|
|
1456
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recordingStartTimestamp, Date.now(), "f");
|
|
1457
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").start(TIME_SLICE);
|
|
1458
|
+
}
|
|
1459
|
+
isRecording() {
|
|
1460
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").state === 'recording';
|
|
1461
|
+
}
|
|
1462
|
+
getChunksLength() {
|
|
1463
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_chunks, "f").length;
|
|
1464
|
+
}
|
|
1465
|
+
hasRecordingStarted() {
|
|
1466
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_recordingStarted, "f") && tslib.__classPrivateFieldGet(this, _StreamRecorder_firstChunkTimestamp, "f") !== undefined;
|
|
1467
|
+
}
|
|
1468
|
+
async stopRecording() {
|
|
1469
|
+
if (this.isRecording()) {
|
|
1470
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").stop();
|
|
1471
|
+
}
|
|
1472
|
+
return tslib.__classPrivateFieldGet(this, _StreamRecorder_recorderStopped, "f");
|
|
1473
|
+
}
|
|
1131
1474
|
}
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
return this.responseStream;
|
|
1147
|
-
}
|
|
1148
|
-
startRecordingLivenessVideo() {
|
|
1149
|
-
this.videoRecorder.start(TIME_SLICE);
|
|
1150
|
-
}
|
|
1151
|
-
sendClientInfo(clientInfo) {
|
|
1152
|
-
this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', { data: { clientInfo } }));
|
|
1153
|
-
}
|
|
1154
|
-
async stopVideo() {
|
|
1155
|
-
await this.videoRecorder.stop();
|
|
1156
|
-
}
|
|
1157
|
-
dispatchStopVideoEvent() {
|
|
1158
|
-
this.videoRecorder.dispatch(new Event('stopVideo'));
|
|
1159
|
-
}
|
|
1160
|
-
async endStreamWithCode(code) {
|
|
1161
|
-
if (this.videoRecorder.getState() === 'recording') {
|
|
1162
|
-
await this.stopVideo();
|
|
1163
|
-
}
|
|
1164
|
-
this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', { data: { code } }));
|
|
1165
|
-
return;
|
|
1166
|
-
}
|
|
1167
|
-
async init() {
|
|
1168
|
-
this._client = await createStreamingClient({
|
|
1169
|
-
credentialsProvider: this.credentialProvider,
|
|
1170
|
-
endpointOverride: this.endpointOverride,
|
|
1171
|
-
region: this.region,
|
|
1172
|
-
systemClockOffset: this.systemClockOffset,
|
|
1173
|
-
});
|
|
1174
|
-
this.responseStream = await this.startLivenessVideoConnection();
|
|
1175
|
-
}
|
|
1176
|
-
// Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
|
|
1177
|
-
getAsyncGeneratorFromReadableStream(stream) {
|
|
1178
|
-
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
1179
|
-
const current = this;
|
|
1180
|
-
this._reader = stream.getReader();
|
|
1181
|
-
return async function* () {
|
|
1182
|
-
while (true) {
|
|
1183
|
-
const { done, value } = (await current._reader.read());
|
|
1184
|
-
if (done) {
|
|
1185
|
-
return;
|
|
1186
|
-
}
|
|
1187
|
-
// Video chunks blobs should be sent as video events
|
|
1188
|
-
if (value === 'stopVideo') {
|
|
1189
|
-
// sending an empty video chunk signals that we have ended sending video
|
|
1190
|
-
yield {
|
|
1191
|
-
VideoEvent: {
|
|
1192
|
-
VideoChunk: new Uint8Array([]),
|
|
1193
|
-
TimestampMillis: Date.now(),
|
|
1194
|
-
},
|
|
1195
|
-
};
|
|
1196
|
-
}
|
|
1197
|
-
else if (isBlob(value)) {
|
|
1198
|
-
const buffer = await value.arrayBuffer();
|
|
1199
|
-
const chunk = new Uint8Array(buffer);
|
|
1200
|
-
if (chunk.length > 0) {
|
|
1201
|
-
yield {
|
|
1202
|
-
VideoEvent: {
|
|
1203
|
-
VideoChunk: chunk,
|
|
1204
|
-
TimestampMillis: Date.now(),
|
|
1205
|
-
},
|
|
1206
|
-
};
|
|
1207
|
-
}
|
|
1208
|
-
}
|
|
1209
|
-
else if (isClientSessionInformationEvent(value)) {
|
|
1210
|
-
yield {
|
|
1211
|
-
ClientSessionInformationEvent: {
|
|
1212
|
-
Challenge: value.Challenge,
|
|
1213
|
-
},
|
|
1214
|
-
};
|
|
1215
|
-
}
|
|
1216
|
-
else if (isEndStreamWithCodeEvent(value)) {
|
|
1217
|
-
yield {
|
|
1218
|
-
VideoEvent: {
|
|
1219
|
-
VideoChunk: new Uint8Array([]),
|
|
1220
|
-
// this is a custom type that does not match LivenessRequestStream.
|
|
1221
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
1222
|
-
TimestampMillis: { closeCode: value.code },
|
|
1223
|
-
},
|
|
1224
|
-
};
|
|
1475
|
+
_StreamRecorder_chunks = new WeakMap(), _StreamRecorder_recorder = new WeakMap(), _StreamRecorder_initialRecorder = new WeakMap(), _StreamRecorder_recordingStarted = new WeakMap(), _StreamRecorder_firstChunkTimestamp = new WeakMap(), _StreamRecorder_recorderEndTimestamp = new WeakMap(), _StreamRecorder_recorderStartTimestamp = new WeakMap(), _StreamRecorder_recordingStartTimestamp = new WeakMap(), _StreamRecorder_recorderStopped = new WeakMap(), _StreamRecorder_videoStream = new WeakMap(), _StreamRecorder_eventListeners = new WeakMap(), _StreamRecorder_instances = new WeakSet(), _StreamRecorder_clearRecordedChunks = function _StreamRecorder_clearRecordedChunks() {
|
|
1476
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_chunks, [], "f");
|
|
1477
|
+
}, _StreamRecorder_createReadableStream = function _StreamRecorder_createReadableStream() {
|
|
1478
|
+
return new ReadableStream({
|
|
1479
|
+
start: (controller) => {
|
|
1480
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_attachHandlers).call(this, tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f"), controller);
|
|
1481
|
+
},
|
|
1482
|
+
});
|
|
1483
|
+
}, _StreamRecorder_attachHandlers = function _StreamRecorder_attachHandlers(recorder, controller) {
|
|
1484
|
+
const onDataAvailableHandler = controller
|
|
1485
|
+
? ({ data }) => {
|
|
1486
|
+
if (data && data.size > 0) {
|
|
1487
|
+
if (tslib.__classPrivateFieldGet(this, _StreamRecorder_chunks, "f").length === 0) {
|
|
1488
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_firstChunkTimestamp, Date.now(), "f");
|
|
1225
1489
|
}
|
|
1490
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_chunks, "f").push(data);
|
|
1491
|
+
controller.enqueue({ type: 'streamVideo', data });
|
|
1226
1492
|
}
|
|
1493
|
+
}
|
|
1494
|
+
: ({ data }) => {
|
|
1495
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('dataavailable', { data }));
|
|
1227
1496
|
};
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
ChallengeVersions: 'FaceMovementAndLightChallenge_1.0.0',
|
|
1234
|
-
SessionId: this.sessionId,
|
|
1235
|
-
LivenessRequestStream: livenessRequestGenerator,
|
|
1236
|
-
VideoWidth: (mediaSettings.width ?? this.videoEl.width).toString(),
|
|
1237
|
-
VideoHeight: (mediaSettings.height ?? this.videoEl.height).toString(),
|
|
1238
|
-
}));
|
|
1239
|
-
return response.LivenessResponseStream;
|
|
1240
|
-
}
|
|
1241
|
-
}
|
|
1242
|
-
|
|
1243
|
-
const TICK_RATE = 10; // ms -- the rate at which we will render/check colors
|
|
1244
|
-
var COLOR_STAGE;
|
|
1245
|
-
(function (COLOR_STAGE) {
|
|
1246
|
-
COLOR_STAGE["SCROLLING"] = "SCROLLING";
|
|
1247
|
-
COLOR_STAGE["FLAT"] = "FLAT";
|
|
1248
|
-
})(COLOR_STAGE || (COLOR_STAGE = {}));
|
|
1249
|
-
class FreshnessColorDisplay {
|
|
1250
|
-
constructor(context, freshnessColorsSequence) {
|
|
1251
|
-
this.context = context;
|
|
1252
|
-
this.freshnessColorsSequence = freshnessColorsSequence;
|
|
1253
|
-
this.isFirstTick = true;
|
|
1254
|
-
}
|
|
1255
|
-
async displayColorTick() {
|
|
1256
|
-
return new Promise((resolve, reject) => {
|
|
1257
|
-
setTimeout(() => {
|
|
1258
|
-
this.displayNextColorTick(resolve, reject);
|
|
1259
|
-
}, Math.min(TICK_RATE));
|
|
1260
|
-
});
|
|
1261
|
-
}
|
|
1262
|
-
init() {
|
|
1263
|
-
this.stageIndex = 0;
|
|
1264
|
-
this.currColorIndex = 0;
|
|
1265
|
-
this.currColorSequence = this.freshnessColorsSequence[0];
|
|
1266
|
-
this.prevColorSequence = this.freshnessColorsSequence[0];
|
|
1267
|
-
this.stage = COLOR_STAGE.FLAT;
|
|
1268
|
-
this.timeLastFlatOrScrollChange = Date.now();
|
|
1269
|
-
this.timeLastFaceMatchChecked = Date.now();
|
|
1270
|
-
}
|
|
1271
|
-
displayNextColorTick(resolve, _) {
|
|
1272
|
-
const { freshnessColorEl } = this.context.freshnessColorAssociatedParams;
|
|
1273
|
-
const { ovalDetails, scaleFactor } = this.context.ovalAssociatedParams;
|
|
1274
|
-
const { videoEl } = this.context.videoAssociatedParams;
|
|
1275
|
-
const tickStartTime = Date.now();
|
|
1276
|
-
// Send a colorStart time only for the first tick of the first color
|
|
1277
|
-
if (this.isFirstTick) {
|
|
1278
|
-
this.init();
|
|
1279
|
-
this.isFirstTick = false;
|
|
1280
|
-
this.sendColorStartTime({
|
|
1281
|
-
tickStartTime: tickStartTime,
|
|
1282
|
-
currColor: this.currColorSequence.color,
|
|
1283
|
-
prevColor: this.currColorSequence.color,
|
|
1284
|
-
currColorIndex: this.stageIndex,
|
|
1285
|
-
});
|
|
1497
|
+
recorder.ondataavailable = onDataAvailableHandler;
|
|
1498
|
+
const onSessionInfoHandler = controller
|
|
1499
|
+
? (e) => {
|
|
1500
|
+
const { data } = e;
|
|
1501
|
+
controller.enqueue({ type: 'sessionInfo', data });
|
|
1286
1502
|
}
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
this.incrementStageIndex(tickStartTime);
|
|
1296
|
-
timeSinceLastColorChange = 0;
|
|
1297
|
-
}
|
|
1298
|
-
// Every 10 ms tick we will update the colors displayed
|
|
1299
|
-
if (this.currColorIndex < this.freshnessColorsSequence.length) {
|
|
1300
|
-
const heightFraction = timeSinceLastColorChange /
|
|
1301
|
-
(this.stage === COLOR_STAGE.SCROLLING
|
|
1302
|
-
? this.currColorSequence.downscrollDuration
|
|
1303
|
-
: this.currColorSequence.flatDisplayDuration);
|
|
1304
|
-
fillOverlayCanvasFractional({
|
|
1305
|
-
overlayCanvas: freshnessColorEl,
|
|
1306
|
-
prevColor: this.prevColorSequence.color,
|
|
1307
|
-
nextColor: this.currColorSequence.color,
|
|
1308
|
-
videoEl: videoEl,
|
|
1309
|
-
ovalDetails: ovalDetails,
|
|
1310
|
-
heightFraction,
|
|
1311
|
-
scaleFactor: scaleFactor,
|
|
1312
|
-
});
|
|
1313
|
-
resolve(false);
|
|
1503
|
+
: (e) => {
|
|
1504
|
+
const { data } = e;
|
|
1505
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('sessionInfo', { data }));
|
|
1506
|
+
};
|
|
1507
|
+
recorder.addEventListener('sessionInfo', onSessionInfoHandler);
|
|
1508
|
+
const onStreamStopHandler = controller
|
|
1509
|
+
? () => {
|
|
1510
|
+
controller.enqueue({ type: 'streamStop' });
|
|
1314
1511
|
}
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
// SCROLL - prev = 1, curr = 2
|
|
1324
|
-
// SCROLL - prev = 2, curr = 3
|
|
1325
|
-
incrementStageIndex(tickStartTime) {
|
|
1326
|
-
this.stageIndex += 1;
|
|
1327
|
-
this.prevColorSequence = this.freshnessColorsSequence[this.currColorIndex];
|
|
1328
|
-
if (this.stage === COLOR_STAGE.FLAT) {
|
|
1329
|
-
this.currColorIndex += 1;
|
|
1330
|
-
this.stage = COLOR_STAGE.SCROLLING;
|
|
1331
|
-
}
|
|
1332
|
-
else if (this.stage === COLOR_STAGE.SCROLLING) {
|
|
1333
|
-
const nextFlatColor = this.freshnessColorsSequence[this.currColorIndex];
|
|
1334
|
-
if (nextFlatColor.flatDisplayDuration > 0) {
|
|
1335
|
-
this.stage = COLOR_STAGE.FLAT;
|
|
1336
|
-
}
|
|
1337
|
-
else {
|
|
1338
|
-
this.stage = COLOR_STAGE.SCROLLING;
|
|
1339
|
-
this.currColorIndex += 1;
|
|
1340
|
-
}
|
|
1512
|
+
: () => {
|
|
1513
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('streamStop'));
|
|
1514
|
+
};
|
|
1515
|
+
recorder.addEventListener('streamStop', onStreamStopHandler);
|
|
1516
|
+
const onCloseCodeHandler = controller
|
|
1517
|
+
? (e) => {
|
|
1518
|
+
const { data } = e;
|
|
1519
|
+
controller.enqueue({ type: 'closeCode', data });
|
|
1341
1520
|
}
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
});
|
|
1521
|
+
: (e) => {
|
|
1522
|
+
const { data } = e;
|
|
1523
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('closeCode', { data }));
|
|
1524
|
+
};
|
|
1525
|
+
recorder.addEventListener('closeCode', onCloseCodeHandler);
|
|
1526
|
+
const onEndStreamHandler = controller
|
|
1527
|
+
? () => {
|
|
1528
|
+
controller.close();
|
|
1351
1529
|
}
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
}
|
|
1369
|
-
|
|
1530
|
+
: () => {
|
|
1531
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('endStream'));
|
|
1532
|
+
};
|
|
1533
|
+
recorder.addEventListener('endStream', onEndStreamHandler);
|
|
1534
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_setupCallbacks).call(this);
|
|
1535
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {
|
|
1536
|
+
endStream: onEndStreamHandler,
|
|
1537
|
+
closeCode: onCloseCodeHandler,
|
|
1538
|
+
streamStop: onStreamStopHandler,
|
|
1539
|
+
sessionInfo: onSessionInfoHandler,
|
|
1540
|
+
dataavailable: onDataAvailableHandler,
|
|
1541
|
+
}, "f");
|
|
1542
|
+
}, _StreamRecorder_setupCallbacks = function _StreamRecorder_setupCallbacks() {
|
|
1543
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onstart = () => {
|
|
1544
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recordingStarted, true, "f");
|
|
1545
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recorderStartTimestamp, Date.now(), "f");
|
|
1546
|
+
};
|
|
1547
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recorderStopped, new Promise((resolve) => {
|
|
1548
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onstop = () => {
|
|
1549
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_recorderEndTimestamp, Date.now(), "f");
|
|
1550
|
+
resolve();
|
|
1551
|
+
};
|
|
1552
|
+
}), "f");
|
|
1553
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onerror = () => {
|
|
1554
|
+
this.stopRecording();
|
|
1555
|
+
};
|
|
1556
|
+
}, _StreamRecorder_cleanUpEventListeners = function _StreamRecorder_cleanUpEventListeners() {
|
|
1557
|
+
const eventNames = Object.keys(tslib.__classPrivateFieldGet(this, _StreamRecorder_eventListeners, "f"));
|
|
1558
|
+
eventNames.forEach((name) => {
|
|
1559
|
+
tslib.__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").removeEventListener(name, tslib.__classPrivateFieldGet(this, _StreamRecorder_eventListeners, "f")[name]);
|
|
1560
|
+
});
|
|
1561
|
+
tslib.__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {}, "f");
|
|
1562
|
+
};
|
|
1370
1563
|
|
|
1371
|
-
const
|
|
1564
|
+
const isServerSessionInformationEvent = (value) => {
|
|
1372
1565
|
return !!value
|
|
1373
1566
|
?.ServerSessionInformationEvent;
|
|
1374
1567
|
};
|
|
@@ -1421,11 +1614,11 @@ const responseStreamActor = async (callback) => {
|
|
|
1421
1614
|
try {
|
|
1422
1615
|
const stream = await responseStream;
|
|
1423
1616
|
for await (const event of stream) {
|
|
1424
|
-
if (
|
|
1617
|
+
if (isServerSessionInformationEvent(event)) {
|
|
1425
1618
|
callback({
|
|
1426
1619
|
type: 'SET_SESSION_INFO',
|
|
1427
1620
|
data: {
|
|
1428
|
-
|
|
1621
|
+
serverSessionInformation: event.ServerSessionInformationEvent.SessionInformation,
|
|
1429
1622
|
},
|
|
1430
1623
|
});
|
|
1431
1624
|
}
|
|
@@ -1477,21 +1670,20 @@ const responseStreamActor = async (callback) => {
|
|
|
1477
1670
|
}
|
|
1478
1671
|
}
|
|
1479
1672
|
};
|
|
1480
|
-
function getLastSelectedCameraId() {
|
|
1481
|
-
return localStorage.getItem(CAMERA_ID_KEY);
|
|
1482
|
-
}
|
|
1483
1673
|
function setLastSelectedCameraId(deviceId) {
|
|
1484
1674
|
localStorage.setItem(CAMERA_ID_KEY, deviceId);
|
|
1485
1675
|
}
|
|
1486
1676
|
const livenessMachine = xstate.createMachine({
|
|
1487
1677
|
id: 'livenessMachine',
|
|
1488
|
-
initial: '
|
|
1678
|
+
initial: 'initCamera',
|
|
1489
1679
|
predictableActionArguments: true,
|
|
1490
1680
|
context: {
|
|
1491
|
-
challengeId:
|
|
1681
|
+
challengeId: uuid.v4(),
|
|
1682
|
+
errorMessage: undefined,
|
|
1492
1683
|
maxFailedAttempts: 0,
|
|
1493
1684
|
failedAttempts: 0,
|
|
1494
1685
|
componentProps: undefined,
|
|
1686
|
+
parsedSessionInformation: undefined,
|
|
1495
1687
|
serverSessionInformation: undefined,
|
|
1496
1688
|
videoAssociatedParams: {
|
|
1497
1689
|
videoConstraints: STATIC_VIDEO_CONSTRAINTS,
|
|
@@ -1516,8 +1708,8 @@ const livenessMachine = xstate.createMachine({
|
|
|
1516
1708
|
freshnessColorEl: undefined,
|
|
1517
1709
|
freshnessColors: [],
|
|
1518
1710
|
freshnessColorsComplete: false,
|
|
1519
|
-
freshnessColorDisplay: undefined,
|
|
1520
1711
|
},
|
|
1712
|
+
colorSequenceDisplay: undefined,
|
|
1521
1713
|
errorState: undefined,
|
|
1522
1714
|
livenessStreamProvider: undefined,
|
|
1523
1715
|
responseStreamActorRef: undefined,
|
|
@@ -1532,71 +1724,82 @@ const livenessMachine = xstate.createMachine({
|
|
|
1532
1724
|
target: 'retryableTimeout',
|
|
1533
1725
|
actions: 'updateErrorStateForTimeout',
|
|
1534
1726
|
},
|
|
1535
|
-
SET_SESSION_INFO: {
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
},
|
|
1539
|
-
DISCONNECT_EVENT: {
|
|
1540
|
-
internal: true,
|
|
1541
|
-
actions: 'updateShouldDisconnect',
|
|
1542
|
-
},
|
|
1543
|
-
SET_DOM_AND_CAMERA_DETAILS: {
|
|
1544
|
-
actions: 'setDOMAndCameraDetails',
|
|
1545
|
-
},
|
|
1727
|
+
SET_SESSION_INFO: { internal: true, actions: 'updateSessionInfo' },
|
|
1728
|
+
DISCONNECT_EVENT: { internal: true, actions: 'updateShouldDisconnect' },
|
|
1729
|
+
SET_DOM_AND_CAMERA_DETAILS: { actions: 'setDOMAndCameraDetails' },
|
|
1546
1730
|
UPDATE_DEVICE_AND_STREAM: {
|
|
1547
1731
|
actions: 'updateDeviceAndStream',
|
|
1732
|
+
target: 'start',
|
|
1548
1733
|
},
|
|
1549
|
-
SERVER_ERROR: {
|
|
1550
|
-
target: 'error',
|
|
1551
|
-
actions: 'updateErrorStateForServer',
|
|
1552
|
-
},
|
|
1734
|
+
SERVER_ERROR: { target: 'error', actions: 'updateErrorStateForServer' },
|
|
1553
1735
|
CONNECTION_TIMEOUT: {
|
|
1554
1736
|
target: 'error',
|
|
1555
1737
|
actions: 'updateErrorStateForConnectionTimeout',
|
|
1556
1738
|
},
|
|
1557
|
-
RUNTIME_ERROR: {
|
|
1558
|
-
target: 'error',
|
|
1559
|
-
actions: 'updateErrorStateForRuntime',
|
|
1560
|
-
},
|
|
1739
|
+
RUNTIME_ERROR: { target: 'error', actions: 'updateErrorStateForRuntime' },
|
|
1561
1740
|
MOBILE_LANDSCAPE_WARNING: {
|
|
1562
1741
|
target: 'mobileLandscapeWarning',
|
|
1563
1742
|
actions: 'updateErrorStateForServer',
|
|
1564
1743
|
},
|
|
1565
1744
|
},
|
|
1566
1745
|
states: {
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
target: '
|
|
1573
|
-
actions: 'updateVideoMediaStream',
|
|
1746
|
+
initCamera: {
|
|
1747
|
+
initial: 'cameraCheck',
|
|
1748
|
+
on: {
|
|
1749
|
+
SET_DOM_AND_CAMERA_DETAILS: {
|
|
1750
|
+
actions: 'setDOMAndCameraDetails',
|
|
1751
|
+
target: '#livenessMachine.initWebsocket',
|
|
1574
1752
|
},
|
|
1575
|
-
|
|
1576
|
-
|
|
1753
|
+
},
|
|
1754
|
+
states: {
|
|
1755
|
+
cameraCheck: {
|
|
1756
|
+
entry: 'resetErrorState',
|
|
1757
|
+
invoke: {
|
|
1758
|
+
src: 'checkVirtualCameraAndGetStream',
|
|
1759
|
+
onDone: {
|
|
1760
|
+
target: 'waitForDOMAndCameraDetails',
|
|
1761
|
+
actions: 'updateVideoMediaStream',
|
|
1762
|
+
},
|
|
1763
|
+
onError: { target: '#livenessMachine.permissionDenied' },
|
|
1764
|
+
},
|
|
1577
1765
|
},
|
|
1766
|
+
waitForDOMAndCameraDetails: {},
|
|
1578
1767
|
},
|
|
1579
1768
|
},
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1769
|
+
initWebsocket: {
|
|
1770
|
+
entry: () => { },
|
|
1771
|
+
initial: 'initializeLivenessStream',
|
|
1772
|
+
states: {
|
|
1773
|
+
initializeLivenessStream: {
|
|
1774
|
+
invoke: {
|
|
1775
|
+
src: 'openLivenessStreamConnection',
|
|
1776
|
+
onDone: {
|
|
1777
|
+
target: 'waitForSessionInfo',
|
|
1778
|
+
actions: [
|
|
1779
|
+
'updateLivenessStreamProvider',
|
|
1780
|
+
'spawnResponseStreamActor',
|
|
1781
|
+
],
|
|
1782
|
+
},
|
|
1783
|
+
},
|
|
1784
|
+
},
|
|
1785
|
+
waitForSessionInfo: {
|
|
1786
|
+
entry: () => { },
|
|
1787
|
+
after: {
|
|
1788
|
+
0: {
|
|
1789
|
+
target: '#livenessMachine.start',
|
|
1790
|
+
cond: 'hasParsedSessionInfo',
|
|
1791
|
+
},
|
|
1792
|
+
100: { target: 'waitForSessionInfo' },
|
|
1793
|
+
},
|
|
1585
1794
|
},
|
|
1586
|
-
10: { target: 'waitForDOMAndCameraDetails' },
|
|
1587
1795
|
},
|
|
1588
1796
|
},
|
|
1589
1797
|
start: {
|
|
1590
|
-
entry: ['
|
|
1798
|
+
entry: ['initializeFaceDetector', () => { }],
|
|
1591
1799
|
always: [
|
|
1592
|
-
{
|
|
1593
|
-
target: 'detectFaceBeforeStart',
|
|
1594
|
-
cond: 'shouldSkipStartScreen',
|
|
1595
|
-
},
|
|
1800
|
+
{ target: 'detectFaceBeforeStart', cond: 'shouldSkipStartScreen' },
|
|
1596
1801
|
],
|
|
1597
|
-
on: {
|
|
1598
|
-
BEGIN: 'detectFaceBeforeStart',
|
|
1599
|
-
},
|
|
1802
|
+
on: { BEGIN: 'detectFaceBeforeStart' },
|
|
1600
1803
|
},
|
|
1601
1804
|
detectFaceBeforeStart: {
|
|
1602
1805
|
invoke: {
|
|
@@ -1628,38 +1831,12 @@ const livenessMachine = xstate.createMachine({
|
|
|
1628
1831
|
checkFaceDistanceBeforeRecording: {
|
|
1629
1832
|
after: {
|
|
1630
1833
|
0: {
|
|
1631
|
-
target: '
|
|
1834
|
+
target: 'recording',
|
|
1632
1835
|
cond: 'hasEnoughFaceDistanceBeforeRecording',
|
|
1633
1836
|
},
|
|
1634
1837
|
100: { target: 'detectFaceDistanceBeforeRecording' },
|
|
1635
1838
|
},
|
|
1636
1839
|
},
|
|
1637
|
-
initializeLivenessStream: {
|
|
1638
|
-
invoke: {
|
|
1639
|
-
src: 'openLivenessStreamConnection',
|
|
1640
|
-
onDone: {
|
|
1641
|
-
target: 'notRecording',
|
|
1642
|
-
actions: [
|
|
1643
|
-
'updateLivenessStreamProvider',
|
|
1644
|
-
'spawnResponseStreamActor',
|
|
1645
|
-
],
|
|
1646
|
-
},
|
|
1647
|
-
},
|
|
1648
|
-
},
|
|
1649
|
-
notRecording: {
|
|
1650
|
-
initial: 'waitForSessionInfo',
|
|
1651
|
-
states: {
|
|
1652
|
-
waitForSessionInfo: {
|
|
1653
|
-
after: {
|
|
1654
|
-
0: {
|
|
1655
|
-
target: '#livenessMachine.recording',
|
|
1656
|
-
cond: 'hasServerSessionInfo',
|
|
1657
|
-
},
|
|
1658
|
-
100: { target: 'waitForSessionInfo' },
|
|
1659
|
-
},
|
|
1660
|
-
},
|
|
1661
|
-
},
|
|
1662
|
-
},
|
|
1663
1840
|
recording: {
|
|
1664
1841
|
entry: [
|
|
1665
1842
|
'clearErrorState',
|
|
@@ -1686,10 +1863,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1686
1863
|
},
|
|
1687
1864
|
checkFaceDetected: {
|
|
1688
1865
|
after: {
|
|
1689
|
-
0: {
|
|
1690
|
-
target: 'cancelOvalDrawingTimeout',
|
|
1691
|
-
cond: 'hasSingleFace',
|
|
1692
|
-
},
|
|
1866
|
+
0: { target: 'cancelOvalDrawingTimeout', cond: 'hasSingleFace' },
|
|
1693
1867
|
100: { target: 'ovalDrawing' },
|
|
1694
1868
|
},
|
|
1695
1869
|
},
|
|
@@ -1698,18 +1872,15 @@ const livenessMachine = xstate.createMachine({
|
|
|
1698
1872
|
'cancelOvalDrawingTimeout',
|
|
1699
1873
|
'sendTimeoutAfterRecordingDelay',
|
|
1700
1874
|
],
|
|
1701
|
-
after: {
|
|
1702
|
-
0: {
|
|
1703
|
-
target: 'checkRecordingStarted',
|
|
1704
|
-
},
|
|
1705
|
-
},
|
|
1875
|
+
after: { 0: { target: 'checkRecordingStarted' } },
|
|
1706
1876
|
},
|
|
1707
1877
|
checkRecordingStarted: {
|
|
1878
|
+
entry: () => { },
|
|
1708
1879
|
after: {
|
|
1709
1880
|
0: {
|
|
1710
1881
|
target: 'ovalMatching',
|
|
1711
1882
|
cond: 'hasRecordingStarted',
|
|
1712
|
-
actions: '
|
|
1883
|
+
actions: 'updateRecordingStartTimestamp',
|
|
1713
1884
|
},
|
|
1714
1885
|
100: { target: 'checkRecordingStarted' },
|
|
1715
1886
|
},
|
|
@@ -1730,36 +1901,41 @@ const livenessMachine = xstate.createMachine({
|
|
|
1730
1901
|
// for one second to show "Hold still" text before moving to `flashFreshnessColors`.
|
|
1731
1902
|
// If not, move back to ovalMatching and re-evaluate match state
|
|
1732
1903
|
checkMatch: {
|
|
1904
|
+
entry: () => { },
|
|
1733
1905
|
after: {
|
|
1734
1906
|
0: {
|
|
1735
|
-
target: '
|
|
1907
|
+
target: 'handleChallenge',
|
|
1736
1908
|
cond: 'hasFaceMatchedInOval',
|
|
1737
1909
|
actions: [
|
|
1738
1910
|
'setFaceMatchTimeAndStartFace',
|
|
1739
1911
|
'updateEndFaceMatch',
|
|
1740
|
-
'
|
|
1912
|
+
'setColorDisplay',
|
|
1741
1913
|
'cancelOvalMatchTimeout',
|
|
1742
1914
|
'cancelOvalDrawingTimeout',
|
|
1743
1915
|
],
|
|
1744
1916
|
},
|
|
1745
|
-
1: {
|
|
1746
|
-
target: 'ovalMatching',
|
|
1747
|
-
},
|
|
1917
|
+
1: { target: 'ovalMatching' },
|
|
1748
1918
|
},
|
|
1749
1919
|
},
|
|
1920
|
+
handleChallenge: {
|
|
1921
|
+
entry: () => { },
|
|
1922
|
+
always: [
|
|
1923
|
+
{
|
|
1924
|
+
target: 'delayBeforeFlash',
|
|
1925
|
+
cond: 'isFaceMovementAndLightChallenge',
|
|
1926
|
+
},
|
|
1927
|
+
{ target: 'success', cond: 'isFaceMovementChallenge' },
|
|
1928
|
+
],
|
|
1929
|
+
},
|
|
1750
1930
|
delayBeforeFlash: {
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
},
|
|
1931
|
+
entry: () => { },
|
|
1932
|
+
after: { 1000: 'flashFreshnessColors' },
|
|
1754
1933
|
},
|
|
1755
1934
|
flashFreshnessColors: {
|
|
1756
1935
|
invoke: {
|
|
1757
1936
|
src: 'flashColors',
|
|
1758
1937
|
onDone: [
|
|
1759
|
-
{
|
|
1760
|
-
target: 'success',
|
|
1761
|
-
cond: 'hasFreshnessColorShown',
|
|
1762
|
-
},
|
|
1938
|
+
{ target: 'success', cond: 'hasFreshnessColorShown' },
|
|
1763
1939
|
{
|
|
1764
1940
|
target: 'flashFreshnessColors',
|
|
1765
1941
|
actions: 'updateFreshnessDetails',
|
|
@@ -1767,10 +1943,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1767
1943
|
],
|
|
1768
1944
|
},
|
|
1769
1945
|
},
|
|
1770
|
-
success: {
|
|
1771
|
-
entry: 'stopRecording',
|
|
1772
|
-
type: 'final',
|
|
1773
|
-
},
|
|
1946
|
+
success: { entry: 'stopRecording', type: 'final' },
|
|
1774
1947
|
},
|
|
1775
1948
|
onDone: 'uploading',
|
|
1776
1949
|
},
|
|
@@ -1789,11 +1962,9 @@ const livenessMachine = xstate.createMachine({
|
|
|
1789
1962
|
},
|
|
1790
1963
|
},
|
|
1791
1964
|
waitForDisconnectEvent: {
|
|
1965
|
+
entry: () => { },
|
|
1792
1966
|
after: {
|
|
1793
|
-
0: {
|
|
1794
|
-
target: 'getLivenessResult',
|
|
1795
|
-
cond: 'getShouldDisconnect',
|
|
1796
|
-
},
|
|
1967
|
+
0: { cond: 'getShouldDisconnect', target: 'getLivenessResult' },
|
|
1797
1968
|
100: { target: 'waitForDisconnectEvent' },
|
|
1798
1969
|
},
|
|
1799
1970
|
},
|
|
@@ -1812,16 +1983,13 @@ const livenessMachine = xstate.createMachine({
|
|
|
1812
1983
|
retryableTimeout: {
|
|
1813
1984
|
entry: 'updateFailedAttempts',
|
|
1814
1985
|
always: [
|
|
1815
|
-
{
|
|
1816
|
-
|
|
1817
|
-
cond: 'shouldTimeoutOnFailedAttempts',
|
|
1818
|
-
},
|
|
1819
|
-
{ target: 'notRecording' },
|
|
1986
|
+
{ target: 'timeout', cond: 'shouldTimeoutOnFailedAttempts' },
|
|
1987
|
+
{ target: 'start' },
|
|
1820
1988
|
],
|
|
1821
1989
|
},
|
|
1822
1990
|
permissionDenied: {
|
|
1823
1991
|
entry: 'callUserPermissionDeniedCallback',
|
|
1824
|
-
on: { RETRY_CAMERA_CHECK: '
|
|
1992
|
+
on: { RETRY_CAMERA_CHECK: 'initCamera' },
|
|
1825
1993
|
},
|
|
1826
1994
|
mobileLandscapeWarning: {
|
|
1827
1995
|
entry: 'callMobileLandscapeWarningCallback',
|
|
@@ -1842,7 +2010,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1842
2010
|
},
|
|
1843
2011
|
userCancel: {
|
|
1844
2012
|
entry: ['cleanUpResources', 'callUserCancelCallback', 'resetContext'],
|
|
1845
|
-
always: { target: '
|
|
2013
|
+
always: { target: 'initCamera' },
|
|
1846
2014
|
},
|
|
1847
2015
|
},
|
|
1848
2016
|
}, {
|
|
@@ -1892,6 +2060,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1892
2060
|
updateDeviceAndStream: xstate.assign({
|
|
1893
2061
|
videoAssociatedParams: (context, event) => {
|
|
1894
2062
|
setLastSelectedCameraId(event.data?.newDeviceId);
|
|
2063
|
+
context.livenessStreamProvider?.setNewVideoStream(event.data?.newStream);
|
|
1895
2064
|
return {
|
|
1896
2065
|
...context.videoAssociatedParams,
|
|
1897
2066
|
selectedDeviceId: event.data
|
|
@@ -1901,62 +2070,32 @@ const livenessMachine = xstate.createMachine({
|
|
|
1901
2070
|
};
|
|
1902
2071
|
},
|
|
1903
2072
|
}),
|
|
1904
|
-
|
|
1905
|
-
const { canvasEl, videoEl, videoMediaStream } = context.videoAssociatedParams;
|
|
1906
|
-
drawStaticOval(canvasEl, videoEl, videoMediaStream);
|
|
1907
|
-
},
|
|
1908
|
-
updateRecordingStartTimestampMs: xstate.assign({
|
|
2073
|
+
updateRecordingStartTimestamp: xstate.assign({
|
|
1909
2074
|
videoAssociatedParams: (context) => {
|
|
1910
|
-
const { challengeId,
|
|
1911
|
-
const { recordingStartApiTimestamp, recorderStartTimestamp } = livenessStreamProvider.videoRecorder;
|
|
2075
|
+
const { challengeId, ovalAssociatedParams, videoAssociatedParams, livenessStreamProvider, parsedSessionInformation, } = context;
|
|
1912
2076
|
const { videoMediaStream } = videoAssociatedParams;
|
|
1913
|
-
const
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
.getTracks()[0]
|
|
1924
|
-
.getSettings();
|
|
1925
|
-
const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
|
|
1926
|
-
context.livenessStreamProvider.sendClientInfo({
|
|
1927
|
-
Challenge: {
|
|
1928
|
-
FaceMovementAndLightChallenge: {
|
|
1929
|
-
ChallengeId: challengeId,
|
|
1930
|
-
VideoStartTimestamp: timestamp,
|
|
1931
|
-
InitialFace: {
|
|
1932
|
-
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
1933
|
-
BoundingBox: getBoundingBox({
|
|
1934
|
-
deviceHeight: height,
|
|
1935
|
-
deviceWidth: width,
|
|
1936
|
-
height: initialFace.height,
|
|
1937
|
-
width: initialFace.width,
|
|
1938
|
-
top: initialFace.top,
|
|
1939
|
-
left: flippedInitialFaceLeft,
|
|
1940
|
-
}),
|
|
1941
|
-
},
|
|
1942
|
-
},
|
|
1943
|
-
},
|
|
2077
|
+
const recordingStartedTimestamp = livenessStreamProvider.getRecordingStartTimestamp();
|
|
2078
|
+
context.livenessStreamProvider.dispatchStreamEvent({
|
|
2079
|
+
type: 'sessionInfo',
|
|
2080
|
+
data: createSessionStartEvent({
|
|
2081
|
+
parsedSessionInformation: parsedSessionInformation,
|
|
2082
|
+
...getTrackDimensions(videoMediaStream),
|
|
2083
|
+
challengeId: challengeId,
|
|
2084
|
+
ovalAssociatedParams: ovalAssociatedParams,
|
|
2085
|
+
recordingStartedTimestamp,
|
|
2086
|
+
}),
|
|
1944
2087
|
});
|
|
1945
2088
|
return {
|
|
1946
2089
|
...context.videoAssociatedParams,
|
|
1947
|
-
|
|
2090
|
+
recordingStartedTimestamp,
|
|
1948
2091
|
};
|
|
1949
2092
|
},
|
|
1950
2093
|
}),
|
|
1951
2094
|
startRecording: xstate.assign({
|
|
1952
2095
|
videoAssociatedParams: (context) => {
|
|
1953
|
-
if (
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
if (context.livenessStreamProvider.videoRecorder &&
|
|
1957
|
-
context.livenessStreamProvider.videoRecorder.getState() !==
|
|
1958
|
-
'recording') {
|
|
1959
|
-
context.livenessStreamProvider.startRecordingLivenessVideo();
|
|
2096
|
+
if (context.livenessStreamProvider &&
|
|
2097
|
+
!context.livenessStreamProvider.isRecording()) {
|
|
2098
|
+
context.livenessStreamProvider.startRecording();
|
|
1960
2099
|
}
|
|
1961
2100
|
return { ...context.videoAssociatedParams };
|
|
1962
2101
|
},
|
|
@@ -2036,8 +2175,9 @@ const livenessMachine = xstate.createMachine({
|
|
|
2036
2175
|
}),
|
|
2037
2176
|
clearErrorState: xstate.assign({ errorState: (_) => undefined }),
|
|
2038
2177
|
updateSessionInfo: xstate.assign({
|
|
2039
|
-
|
|
2040
|
-
|
|
2178
|
+
parsedSessionInformation: (_, event) => {
|
|
2179
|
+
const { serverSessionInformation } = event.data;
|
|
2180
|
+
return createSessionInfoFromServerSessionInformation(serverSessionInformation);
|
|
2041
2181
|
},
|
|
2042
2182
|
}),
|
|
2043
2183
|
updateShouldDisconnect: xstate.assign({ shouldDisconnect: () => true }),
|
|
@@ -2050,47 +2190,26 @@ const livenessMachine = xstate.createMachine({
|
|
|
2050
2190
|
};
|
|
2051
2191
|
},
|
|
2052
2192
|
}),
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
const { serverSessionInformation } = context;
|
|
2056
|
-
const freshnessColors = getColorsSequencesFromSessionInformation(serverSessionInformation);
|
|
2057
|
-
const freshnessColorDisplay = new FreshnessColorDisplay(context, freshnessColors);
|
|
2058
|
-
return {
|
|
2059
|
-
...context.freshnessColorAssociatedParams,
|
|
2060
|
-
freshnessColorDisplay,
|
|
2061
|
-
};
|
|
2062
|
-
},
|
|
2193
|
+
setColorDisplay: xstate.assign({
|
|
2194
|
+
colorSequenceDisplay: ({ parsedSessionInformation }) => new ColorSequenceDisplay(getColorsSequencesFromSessionInformation(parsedSessionInformation)),
|
|
2063
2195
|
}),
|
|
2064
2196
|
// timeouts
|
|
2065
2197
|
sendTimeoutAfterOvalDrawingDelay: xstate.actions.send({
|
|
2066
2198
|
type: 'RUNTIME_ERROR',
|
|
2067
|
-
data: {
|
|
2068
|
-
|
|
2069
|
-
},
|
|
2070
|
-
}, {
|
|
2071
|
-
delay: 5000,
|
|
2072
|
-
id: 'ovalDrawingTimeout',
|
|
2073
|
-
}),
|
|
2199
|
+
data: { message: 'Client failed to draw oval.' },
|
|
2200
|
+
}, { delay: 5000, id: 'ovalDrawingTimeout' }),
|
|
2074
2201
|
cancelOvalDrawingTimeout: xstate.actions.cancel('ovalDrawingTimeout'),
|
|
2075
2202
|
sendTimeoutAfterRecordingDelay: xstate.actions.send({
|
|
2076
2203
|
type: 'RUNTIME_ERROR',
|
|
2077
|
-
data: {
|
|
2078
|
-
|
|
2079
|
-
},
|
|
2080
|
-
}, {
|
|
2081
|
-
delay: 5000,
|
|
2082
|
-
id: 'recordingTimeout',
|
|
2083
|
-
}),
|
|
2204
|
+
data: { message: 'Client failed to start recording.' },
|
|
2205
|
+
}, { delay: 5000, id: 'recordingTimeout' }),
|
|
2084
2206
|
cancelRecordingTimeout: xstate.actions.cancel('recordingTimeout'),
|
|
2085
2207
|
sendTimeoutAfterOvalMatchDelay: xstate.actions.send({
|
|
2086
2208
|
type: 'TIMEOUT',
|
|
2087
|
-
data: {
|
|
2088
|
-
message: 'Client timed out waiting for face to match oval.',
|
|
2089
|
-
},
|
|
2209
|
+
data: { message: 'Client timed out waiting for face to match oval.' },
|
|
2090
2210
|
}, {
|
|
2091
2211
|
delay: (context) => {
|
|
2092
|
-
return (context.
|
|
2093
|
-
?.FaceMovementAndLightChallenge?.ChallengeConfig
|
|
2212
|
+
return (context.parsedSessionInformation.Challenge.ChallengeConfig
|
|
2094
2213
|
?.OvalFitTimeout ?? DEFAULT_FACE_FIT_TIMEOUT);
|
|
2095
2214
|
},
|
|
2096
2215
|
id: 'ovalMatchTimeout',
|
|
@@ -2143,21 +2262,26 @@ const livenessMachine = xstate.createMachine({
|
|
|
2143
2262
|
if (freshnessColorEl) {
|
|
2144
2263
|
freshnessColorEl.style.display = 'none';
|
|
2145
2264
|
}
|
|
2146
|
-
let
|
|
2265
|
+
let closeCode = WS_CLOSURE_CODE.DEFAULT_ERROR_CODE;
|
|
2147
2266
|
if (context.errorState === LivenessErrorState.TIMEOUT) {
|
|
2148
|
-
|
|
2267
|
+
closeCode = WS_CLOSURE_CODE.FACE_FIT_TIMEOUT;
|
|
2149
2268
|
}
|
|
2150
2269
|
else if (context.errorState === LivenessErrorState.RUNTIME_ERROR) {
|
|
2151
|
-
|
|
2270
|
+
closeCode = WS_CLOSURE_CODE.RUNTIME_ERROR;
|
|
2152
2271
|
}
|
|
2153
2272
|
else if (context.errorState === LivenessErrorState.FACE_DISTANCE_ERROR ||
|
|
2154
2273
|
context.errorState === LivenessErrorState.MULTIPLE_FACES_ERROR) {
|
|
2155
|
-
|
|
2274
|
+
closeCode = WS_CLOSURE_CODE.USER_ERROR_DURING_CONNECTION;
|
|
2156
2275
|
}
|
|
2157
2276
|
else if (context.errorState === undefined) {
|
|
2158
|
-
|
|
2277
|
+
closeCode = WS_CLOSURE_CODE.USER_CANCEL;
|
|
2159
2278
|
}
|
|
2160
|
-
context.livenessStreamProvider?.
|
|
2279
|
+
context.livenessStreamProvider?.stopRecording().then(() => {
|
|
2280
|
+
context.livenessStreamProvider?.dispatchStreamEvent({
|
|
2281
|
+
type: 'closeCode',
|
|
2282
|
+
data: { closeCode },
|
|
2283
|
+
});
|
|
2284
|
+
});
|
|
2161
2285
|
},
|
|
2162
2286
|
freezeStream: (context) => {
|
|
2163
2287
|
const { videoMediaStream, videoEl } = context.videoAssociatedParams;
|
|
@@ -2173,15 +2297,13 @@ const livenessMachine = xstate.createMachine({
|
|
|
2173
2297
|
videoEl.pause();
|
|
2174
2298
|
},
|
|
2175
2299
|
resetContext: xstate.assign({
|
|
2176
|
-
challengeId:
|
|
2300
|
+
challengeId: uuid.v4(),
|
|
2177
2301
|
maxFailedAttempts: 0,
|
|
2178
2302
|
failedAttempts: 0,
|
|
2179
2303
|
componentProps: (context) => context.componentProps,
|
|
2180
|
-
|
|
2304
|
+
parsedSessionInformation: (_) => undefined,
|
|
2181
2305
|
videoAssociatedParams: (_) => {
|
|
2182
|
-
return {
|
|
2183
|
-
videoConstraints: STATIC_VIDEO_CONSTRAINTS,
|
|
2184
|
-
};
|
|
2306
|
+
return { videoConstraints: STATIC_VIDEO_CONSTRAINTS };
|
|
2185
2307
|
},
|
|
2186
2308
|
ovalAssociatedParams: (_) => undefined,
|
|
2187
2309
|
errorState: (_) => undefined,
|
|
@@ -2212,21 +2334,30 @@ const livenessMachine = xstate.createMachine({
|
|
|
2212
2334
|
hasNotEnoughFaceDistanceBeforeRecording: (context) => {
|
|
2213
2335
|
return !context.isFaceFarEnoughBeforeRecording;
|
|
2214
2336
|
},
|
|
2215
|
-
hasFreshnessColorShown: (context) =>
|
|
2216
|
-
|
|
2217
|
-
|
|
2337
|
+
hasFreshnessColorShown: (context) => {
|
|
2338
|
+
return context.freshnessColorAssociatedParams.freshnessColorsComplete;
|
|
2339
|
+
},
|
|
2340
|
+
hasParsedSessionInfo: (context) => {
|
|
2341
|
+
return context.parsedSessionInformation !== undefined;
|
|
2218
2342
|
},
|
|
2219
2343
|
hasDOMAndCameraDetails: (context) => {
|
|
2220
2344
|
return (context.videoAssociatedParams.videoEl !== undefined &&
|
|
2221
2345
|
context.videoAssociatedParams.canvasEl !== undefined &&
|
|
2222
2346
|
context.freshnessColorAssociatedParams.freshnessColorEl !== undefined);
|
|
2223
2347
|
},
|
|
2348
|
+
isFaceMovementChallenge: (context) => {
|
|
2349
|
+
return (context.parsedSessionInformation?.Challenge?.Name ===
|
|
2350
|
+
FACE_MOVEMENT_CHALLENGE.type);
|
|
2351
|
+
},
|
|
2352
|
+
isFaceMovementAndLightChallenge: (context) => {
|
|
2353
|
+
return (context.parsedSessionInformation?.Challenge?.Name ===
|
|
2354
|
+
FACE_MOVEMENT_AND_LIGHT_CHALLENGE.type);
|
|
2355
|
+
},
|
|
2224
2356
|
getShouldDisconnect: (context) => {
|
|
2225
2357
|
return !!context.shouldDisconnect;
|
|
2226
2358
|
},
|
|
2227
2359
|
hasRecordingStarted: (context) => {
|
|
2228
|
-
return
|
|
2229
|
-
undefined);
|
|
2360
|
+
return context.livenessStreamProvider.hasRecordingStarted();
|
|
2230
2361
|
},
|
|
2231
2362
|
shouldSkipStartScreen: (context) => {
|
|
2232
2363
|
return !!context.componentProps?.disableStartScreen;
|
|
@@ -2236,12 +2367,8 @@ const livenessMachine = xstate.createMachine({
|
|
|
2236
2367
|
async checkVirtualCameraAndGetStream(context) {
|
|
2237
2368
|
const { videoConstraints } = context.videoAssociatedParams;
|
|
2238
2369
|
// Get initial stream to enumerate devices with non-empty labels
|
|
2239
|
-
const existingDeviceId = getLastSelectedCameraId();
|
|
2240
2370
|
const initialStream = await navigator.mediaDevices.getUserMedia({
|
|
2241
|
-
video: {
|
|
2242
|
-
...videoConstraints,
|
|
2243
|
-
...(existingDeviceId ? { deviceId: existingDeviceId } : {}),
|
|
2244
|
-
},
|
|
2371
|
+
video: { ...videoConstraints },
|
|
2245
2372
|
audio: false,
|
|
2246
2373
|
});
|
|
2247
2374
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
@@ -2270,10 +2397,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2270
2397
|
let realVideoDeviceStream = initialStream;
|
|
2271
2398
|
if (!isInitialStreamFromRealDevice) {
|
|
2272
2399
|
realVideoDeviceStream = await navigator.mediaDevices.getUserMedia({
|
|
2273
|
-
video: {
|
|
2274
|
-
...videoConstraints,
|
|
2275
|
-
deviceId: { exact: deviceId },
|
|
2276
|
-
},
|
|
2400
|
+
video: { ...videoConstraints, deviceId: { exact: deviceId } },
|
|
2277
2401
|
audio: false,
|
|
2278
2402
|
});
|
|
2279
2403
|
}
|
|
@@ -2286,18 +2410,25 @@ const livenessMachine = xstate.createMachine({
|
|
|
2286
2410
|
},
|
|
2287
2411
|
// eslint-disable-next-line @typescript-eslint/require-await
|
|
2288
2412
|
async openLivenessStreamConnection(context) {
|
|
2289
|
-
const { config } = context.componentProps;
|
|
2413
|
+
const { config, disableStartScreen } = context.componentProps;
|
|
2290
2414
|
const { credentialProvider, endpointOverride, systemClockOffset } = config;
|
|
2291
|
-
const
|
|
2292
|
-
|
|
2415
|
+
const { videoHeight, videoWidth } = context.videoAssociatedParams.videoEl;
|
|
2416
|
+
const livenessStreamProvider = new StreamRecorder(context.videoAssociatedParams.videoMediaStream);
|
|
2417
|
+
const requestStream = createRequestStreamGenerator(livenessStreamProvider.getVideoStream()).getRequestStream();
|
|
2418
|
+
const { getResponseStream } = await createStreamingClient({
|
|
2419
|
+
credentialsProvider: credentialProvider,
|
|
2420
|
+
endpointOverride,
|
|
2293
2421
|
region: context.componentProps.region,
|
|
2422
|
+
attemptCount: TelemetryReporter.getAttemptCountAndUpdateTimestamp(),
|
|
2423
|
+
preCheckViewEnabled: !disableStartScreen,
|
|
2294
2424
|
systemClockOffset,
|
|
2295
|
-
stream: context.videoAssociatedParams.videoMediaStream,
|
|
2296
|
-
videoEl: context.videoAssociatedParams.videoEl,
|
|
2297
|
-
credentialProvider,
|
|
2298
|
-
endpointOverride,
|
|
2299
2425
|
});
|
|
2300
|
-
responseStream =
|
|
2426
|
+
responseStream = getResponseStream({
|
|
2427
|
+
requestStream,
|
|
2428
|
+
sessionId: context.componentProps.sessionId,
|
|
2429
|
+
videoHeight: videoHeight.toString(),
|
|
2430
|
+
videoWidth: videoWidth.toString(),
|
|
2431
|
+
});
|
|
2301
2432
|
return { livenessStreamProvider };
|
|
2302
2433
|
},
|
|
2303
2434
|
async detectFace(context) {
|
|
@@ -2316,54 +2447,34 @@ const livenessMachine = xstate.createMachine({
|
|
|
2316
2447
|
return { faceMatchState };
|
|
2317
2448
|
},
|
|
2318
2449
|
async detectFaceDistance(context) {
|
|
2319
|
-
const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
|
|
2320
|
-
const { videoEl, videoMediaStream
|
|
2450
|
+
const { parsedSessionInformation, isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
|
|
2451
|
+
const { videoEl, videoMediaStream } = context.videoAssociatedParams;
|
|
2321
2452
|
const { faceDetector } = context.ovalAssociatedParams;
|
|
2322
2453
|
const { width, height } = videoMediaStream
|
|
2323
2454
|
.getTracks()[0]
|
|
2324
2455
|
.getSettings();
|
|
2456
|
+
const challengeConfig = parsedSessionInformation.Challenge.ChallengeConfig;
|
|
2325
2457
|
const ovalDetails = getStaticLivenessOvalDetails({
|
|
2326
2458
|
width: width,
|
|
2327
2459
|
height: height,
|
|
2460
|
+
ovalHeightWidthRatio: challengeConfig.OvalHeightWidthRatio,
|
|
2328
2461
|
});
|
|
2329
2462
|
const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording } = await isFaceDistanceBelowThreshold({
|
|
2463
|
+
parsedSessionInformation: parsedSessionInformation,
|
|
2330
2464
|
faceDetector: faceDetector,
|
|
2331
2465
|
videoEl: videoEl,
|
|
2332
2466
|
ovalDetails,
|
|
2333
|
-
reduceThreshold: faceDistanceCheckBeforeRecording,
|
|
2334
|
-
isMobile,
|
|
2467
|
+
reduceThreshold: faceDistanceCheckBeforeRecording, // if this is the second face distance check reduce the threshold
|
|
2335
2468
|
});
|
|
2336
2469
|
return { isFaceFarEnoughBeforeRecording };
|
|
2337
2470
|
},
|
|
2338
|
-
async detectFaceDistanceWhileLoading(context) {
|
|
2339
|
-
const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
|
|
2340
|
-
const { videoEl, videoMediaStream, isMobile } = context.videoAssociatedParams;
|
|
2341
|
-
const { faceDetector } = context.ovalAssociatedParams;
|
|
2342
|
-
const { width, height } = videoMediaStream
|
|
2343
|
-
.getTracks()[0]
|
|
2344
|
-
.getSettings();
|
|
2345
|
-
const ovalDetails = getStaticLivenessOvalDetails({
|
|
2346
|
-
width: width,
|
|
2347
|
-
height: height,
|
|
2348
|
-
});
|
|
2349
|
-
const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording, error, } = await isFaceDistanceBelowThreshold({
|
|
2350
|
-
faceDetector: faceDetector,
|
|
2351
|
-
isMobile,
|
|
2352
|
-
ovalDetails,
|
|
2353
|
-
videoEl: videoEl,
|
|
2354
|
-
// if this is the second face distance check reduce the threshold
|
|
2355
|
-
reduceThreshold: faceDistanceCheckBeforeRecording,
|
|
2356
|
-
});
|
|
2357
|
-
return { isFaceFarEnoughBeforeRecording, error };
|
|
2358
|
-
},
|
|
2359
2471
|
async detectInitialFaceAndDrawOval(context) {
|
|
2360
|
-
const {
|
|
2472
|
+
const { parsedSessionInformation } = context;
|
|
2361
2473
|
const { videoEl, canvasEl, isMobile } = context.videoAssociatedParams;
|
|
2362
2474
|
const { faceDetector } = context.ovalAssociatedParams;
|
|
2363
2475
|
// initialize models
|
|
2364
2476
|
try {
|
|
2365
2477
|
await faceDetector.modelLoadingPromise;
|
|
2366
|
-
await livenessStreamProvider.videoRecorder.recorderStarted;
|
|
2367
2478
|
}
|
|
2368
2479
|
catch (err) {
|
|
2369
2480
|
// eslint-disable-next-line no-console
|
|
@@ -2412,11 +2523,17 @@ const livenessMachine = xstate.createMachine({
|
|
|
2412
2523
|
const scaleFactor = videoScaledWidth / videoEl.videoWidth;
|
|
2413
2524
|
// generate oval details from initialFace and video dimensions
|
|
2414
2525
|
const ovalDetails = getOvalDetailsFromSessionInformation({
|
|
2415
|
-
|
|
2526
|
+
parsedSessionInformation: parsedSessionInformation,
|
|
2416
2527
|
videoWidth: videoEl.width,
|
|
2417
2528
|
});
|
|
2529
|
+
const challengeConfig = parsedSessionInformation.Challenge.ChallengeConfig;
|
|
2418
2530
|
// renormalize initial face
|
|
2419
|
-
const renormalizedFace = generateBboxFromLandmarks(
|
|
2531
|
+
const renormalizedFace = generateBboxFromLandmarks({
|
|
2532
|
+
ovalHeightWidthRatio: challengeConfig.OvalHeightWidthRatio,
|
|
2533
|
+
face: initialFace,
|
|
2534
|
+
oval: ovalDetails,
|
|
2535
|
+
frameHeight: videoEl.videoHeight,
|
|
2536
|
+
});
|
|
2420
2537
|
initialFace.top = renormalizedFace.top;
|
|
2421
2538
|
initialFace.left = renormalizedFace.left;
|
|
2422
2539
|
initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
|
|
@@ -2428,15 +2545,10 @@ const livenessMachine = xstate.createMachine({
|
|
|
2428
2545
|
scaleFactor,
|
|
2429
2546
|
videoEl: videoEl,
|
|
2430
2547
|
});
|
|
2431
|
-
return {
|
|
2432
|
-
faceMatchState,
|
|
2433
|
-
ovalDetails,
|
|
2434
|
-
scaleFactor,
|
|
2435
|
-
initialFace,
|
|
2436
|
-
};
|
|
2548
|
+
return { faceMatchState, ovalDetails, scaleFactor, initialFace };
|
|
2437
2549
|
},
|
|
2438
2550
|
async detectFaceAndMatchOval(context) {
|
|
2439
|
-
const {
|
|
2551
|
+
const { parsedSessionInformation } = context;
|
|
2440
2552
|
const { videoEl } = context.videoAssociatedParams;
|
|
2441
2553
|
const { faceDetector, ovalDetails, initialFace } = context.ovalAssociatedParams;
|
|
2442
2554
|
// detect face
|
|
@@ -2445,7 +2557,13 @@ const livenessMachine = xstate.createMachine({
|
|
|
2445
2557
|
let faceMatchPercentage = 0;
|
|
2446
2558
|
let detectedFace;
|
|
2447
2559
|
let illuminationState;
|
|
2448
|
-
const
|
|
2560
|
+
const challengeConfig = parsedSessionInformation.Challenge.ChallengeConfig;
|
|
2561
|
+
const initialFaceBoundingBox = generateBboxFromLandmarks({
|
|
2562
|
+
ovalHeightWidthRatio: challengeConfig.OvalHeightWidthRatio,
|
|
2563
|
+
face: initialFace,
|
|
2564
|
+
oval: ovalDetails,
|
|
2565
|
+
frameHeight: videoEl.videoHeight,
|
|
2566
|
+
});
|
|
2449
2567
|
const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
|
|
2450
2568
|
const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
|
|
2451
2569
|
switch (detectedFaces.length) {
|
|
@@ -2462,7 +2580,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2462
2580
|
face: detectedFace,
|
|
2463
2581
|
ovalDetails: ovalDetails,
|
|
2464
2582
|
initialFaceIntersection,
|
|
2465
|
-
|
|
2583
|
+
parsedSessionInformation: parsedSessionInformation,
|
|
2466
2584
|
frameHeight: videoEl.videoHeight,
|
|
2467
2585
|
});
|
|
2468
2586
|
faceMatchState = faceMatchStateInLivenessOval;
|
|
@@ -2482,60 +2600,63 @@ const livenessMachine = xstate.createMachine({
|
|
|
2482
2600
|
detectedFace,
|
|
2483
2601
|
};
|
|
2484
2602
|
},
|
|
2485
|
-
async flashColors(
|
|
2486
|
-
const { freshnessColorsComplete,
|
|
2603
|
+
async flashColors({ challengeId, colorSequenceDisplay, freshnessColorAssociatedParams, livenessStreamProvider, ovalAssociatedParams, videoAssociatedParams, }) {
|
|
2604
|
+
const { freshnessColorsComplete, freshnessColorEl } = freshnessColorAssociatedParams;
|
|
2487
2605
|
if (freshnessColorsComplete) {
|
|
2488
2606
|
return;
|
|
2489
2607
|
}
|
|
2490
|
-
const
|
|
2608
|
+
const { ovalDetails, scaleFactor } = ovalAssociatedParams;
|
|
2609
|
+
const { videoEl } = videoAssociatedParams;
|
|
2610
|
+
const completed = await colorSequenceDisplay.startSequences({
|
|
2611
|
+
onSequenceColorChange: ({ sequenceColor, prevSequenceColor, heightFraction, }) => {
|
|
2612
|
+
fillOverlayCanvasFractional({
|
|
2613
|
+
heightFraction,
|
|
2614
|
+
overlayCanvas: freshnessColorEl,
|
|
2615
|
+
ovalDetails: ovalDetails,
|
|
2616
|
+
nextColor: sequenceColor,
|
|
2617
|
+
prevColor: prevSequenceColor,
|
|
2618
|
+
scaleFactor: scaleFactor,
|
|
2619
|
+
videoEl: videoEl,
|
|
2620
|
+
});
|
|
2621
|
+
},
|
|
2622
|
+
onSequenceStart: () => {
|
|
2623
|
+
freshnessColorEl.style.display = 'block';
|
|
2624
|
+
},
|
|
2625
|
+
onSequencesComplete: () => {
|
|
2626
|
+
freshnessColorEl.style.display = 'none';
|
|
2627
|
+
},
|
|
2628
|
+
onSequenceChange: (params) => {
|
|
2629
|
+
livenessStreamProvider.dispatchStreamEvent({
|
|
2630
|
+
type: 'sessionInfo',
|
|
2631
|
+
data: createColorDisplayEvent({
|
|
2632
|
+
...params,
|
|
2633
|
+
challengeId: challengeId,
|
|
2634
|
+
}),
|
|
2635
|
+
});
|
|
2636
|
+
},
|
|
2637
|
+
});
|
|
2491
2638
|
return { freshnessColorsComplete: completed };
|
|
2492
2639
|
},
|
|
2493
2640
|
async stopVideo(context) {
|
|
2494
|
-
const { challengeId, livenessStreamProvider } = context;
|
|
2495
|
-
const { videoMediaStream } =
|
|
2496
|
-
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
.getTracks()[0]
|
|
2500
|
-
.getSettings();
|
|
2501
|
-
const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
|
|
2502
|
-
await livenessStreamProvider.stopVideo();
|
|
2503
|
-
const livenessActionDocument = {
|
|
2504
|
-
Challenge: {
|
|
2505
|
-
FaceMovementAndLightChallenge: {
|
|
2506
|
-
ChallengeId: challengeId,
|
|
2507
|
-
InitialFace: {
|
|
2508
|
-
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
2509
|
-
BoundingBox: getBoundingBox({
|
|
2510
|
-
deviceHeight: height,
|
|
2511
|
-
deviceWidth: width,
|
|
2512
|
-
height: initialFace.height,
|
|
2513
|
-
width: initialFace.width,
|
|
2514
|
-
top: initialFace.top,
|
|
2515
|
-
left: flippedInitialFaceLeft,
|
|
2516
|
-
}),
|
|
2517
|
-
},
|
|
2518
|
-
TargetFace: {
|
|
2519
|
-
FaceDetectedInTargetPositionStartTimestamp: startFace.timestampMs,
|
|
2520
|
-
FaceDetectedInTargetPositionEndTimestamp: endFace.timestampMs,
|
|
2521
|
-
BoundingBox: getBoundingBox({
|
|
2522
|
-
deviceHeight: height,
|
|
2523
|
-
deviceWidth: width,
|
|
2524
|
-
height: ovalDetails.height,
|
|
2525
|
-
width: ovalDetails.width,
|
|
2526
|
-
top: ovalDetails.centerY - ovalDetails.height / 2,
|
|
2527
|
-
left: ovalDetails.centerX - ovalDetails.width / 2,
|
|
2528
|
-
}),
|
|
2529
|
-
},
|
|
2530
|
-
VideoEndTimestamp: livenessStreamProvider.videoRecorder.recorderEndTimestamp,
|
|
2531
|
-
},
|
|
2532
|
-
},
|
|
2533
|
-
};
|
|
2534
|
-
if (livenessStreamProvider.videoRecorder.getVideoChunkSize() === 0) {
|
|
2641
|
+
const { challengeId, parsedSessionInformation, faceMatchAssociatedParams, ovalAssociatedParams, livenessStreamProvider, videoAssociatedParams, } = context;
|
|
2642
|
+
const { videoMediaStream } = videoAssociatedParams;
|
|
2643
|
+
// if not awaited, `getRecordingEndTimestamp` will throw
|
|
2644
|
+
await livenessStreamProvider.stopRecording();
|
|
2645
|
+
if (livenessStreamProvider.getChunksLength() === 0) {
|
|
2535
2646
|
throw new Error('Video chunks not recorded successfully.');
|
|
2536
2647
|
}
|
|
2537
|
-
livenessStreamProvider.
|
|
2538
|
-
|
|
2648
|
+
livenessStreamProvider.dispatchStreamEvent({
|
|
2649
|
+
type: 'sessionInfo',
|
|
2650
|
+
data: createSessionEndEvent({
|
|
2651
|
+
...getTrackDimensions(videoMediaStream),
|
|
2652
|
+
parsedSessionInformation: parsedSessionInformation,
|
|
2653
|
+
challengeId: challengeId,
|
|
2654
|
+
faceMatchAssociatedParams: faceMatchAssociatedParams,
|
|
2655
|
+
ovalAssociatedParams: ovalAssociatedParams,
|
|
2656
|
+
recordingEndedTimestamp: livenessStreamProvider.getRecordingEndedTimestamp(),
|
|
2657
|
+
}),
|
|
2658
|
+
});
|
|
2659
|
+
livenessStreamProvider.dispatchStreamEvent({ type: 'streamStop' });
|
|
2539
2660
|
},
|
|
2540
2661
|
async getLiveness(context) {
|
|
2541
2662
|
const { onAnalysisComplete } = context.componentProps;
|
|
@@ -2612,6 +2733,8 @@ var LivenessClassNames;
|
|
|
2612
2733
|
LivenessClassNames["CameraModule"] = "amplify-liveness-camera-module";
|
|
2613
2734
|
LivenessClassNames["CancelContainer"] = "amplify-liveness-cancel-container";
|
|
2614
2735
|
LivenessClassNames["CancelButton"] = "amplify-liveness-cancel-button";
|
|
2736
|
+
LivenessClassNames["CenteredLoader"] = "amplify-liveness-centered-loader";
|
|
2737
|
+
LivenessClassNames["ConnectingLoader"] = "amplify-liveness-connecting-loader";
|
|
2615
2738
|
LivenessClassNames["CountdownContainer"] = "amplify-liveness-countdown-container";
|
|
2616
2739
|
LivenessClassNames["DescriptionBullet"] = "amplify-liveness-description-bullet";
|
|
2617
2740
|
LivenessClassNames["DescriptionBulletIndex"] = "amplify-liveness-description-bullet__index";
|
|
@@ -2651,6 +2774,7 @@ var LivenessClassNames;
|
|
|
2651
2774
|
LivenessClassNames["Toast"] = "amplify-liveness-toast";
|
|
2652
2775
|
LivenessClassNames["ToastContainer"] = "amplify-liveness-toast__container";
|
|
2653
2776
|
LivenessClassNames["ToastMessage"] = "amplify-liveness-toast__message";
|
|
2777
|
+
LivenessClassNames["UserFacingVideo"] = "amplify-liveness-video--user-facing";
|
|
2654
2778
|
LivenessClassNames["Video"] = "amplify-liveness-video";
|
|
2655
2779
|
LivenessClassNames["VideoAnchor"] = "amplify-liveness-video-anchor";
|
|
2656
2780
|
})(LivenessClassNames || (LivenessClassNames = {}));
|
|
@@ -2708,7 +2832,6 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
2708
2832
|
state.matches('detectFaceDistanceBeforeRecording');
|
|
2709
2833
|
const isStartView = state.matches('start') || state.matches('userCancel');
|
|
2710
2834
|
const isRecording = state.matches('recording');
|
|
2711
|
-
const isNotRecording = state.matches('notRecording');
|
|
2712
2835
|
const isUploading = state.matches('uploading');
|
|
2713
2836
|
const isCheckSuccessful = state.matches('checkSucceeded');
|
|
2714
2837
|
const isCheckFailed = state.matches('checkFailed');
|
|
@@ -2748,9 +2871,6 @@ const Hint = ({ hintDisplayText }) => {
|
|
|
2748
2871
|
isFaceFarEnoughBeforeRecordingState === false) {
|
|
2749
2872
|
return React__namespace.createElement(DefaultToast, { text: hintDisplayText.hintTooCloseText });
|
|
2750
2873
|
}
|
|
2751
|
-
if (isNotRecording) {
|
|
2752
|
-
return (React__namespace.createElement(ToastWithLoader, { displayText: hintDisplayText.hintConnectingText }));
|
|
2753
|
-
}
|
|
2754
2874
|
if (isUploading) {
|
|
2755
2875
|
return (React__namespace.createElement(React__namespace.Fragment, null,
|
|
2756
2876
|
React__namespace.createElement(uiReact.VisuallyHidden, { "aria-live": "assertive" }, hintDisplayText.hintCheckCompleteText),
|
|
@@ -2821,6 +2941,40 @@ const RecordingIcon = ({ children }) => {
|
|
|
2821
2941
|
React__namespace["default"].createElement(uiReact.Text, { as: "span", fontWeight: "bold" }, children)));
|
|
2822
2942
|
};
|
|
2823
2943
|
|
|
2944
|
+
function isNewerIpad() {
|
|
2945
|
+
// iPads on iOS13+ return as if a desktop Mac
|
|
2946
|
+
// so check for maxTouchPoints also.
|
|
2947
|
+
return (/Macintosh/i.test(navigator.userAgent) &&
|
|
2948
|
+
!!navigator.maxTouchPoints &&
|
|
2949
|
+
navigator.maxTouchPoints > 1);
|
|
2950
|
+
}
|
|
2951
|
+
function isMobileScreen() {
|
|
2952
|
+
const isMobileDevice =
|
|
2953
|
+
// Test Android/iPhone/iPad
|
|
2954
|
+
/Android|iPhone|iPad/i.test(navigator.userAgent) || isNewerIpad();
|
|
2955
|
+
return isMobileDevice;
|
|
2956
|
+
}
|
|
2957
|
+
async function isDeviceUserFacing(deviceId) {
|
|
2958
|
+
const devices = await navigator.mediaDevices?.enumerateDevices();
|
|
2959
|
+
// Find the video input device with the matching deviceId
|
|
2960
|
+
const videoDevice = devices?.find((device) => device.deviceId === deviceId && device.kind === 'videoinput');
|
|
2961
|
+
if (videoDevice) {
|
|
2962
|
+
// Check if the device label contains the word "back"
|
|
2963
|
+
return !videoDevice.label.toLowerCase().includes('back');
|
|
2964
|
+
}
|
|
2965
|
+
// If the device is not found or not a video input device, return false
|
|
2966
|
+
return true;
|
|
2967
|
+
}
|
|
2968
|
+
/**
|
|
2969
|
+
* Use window.matchMedia to direct landscape orientation
|
|
2970
|
+
* screen.orientation is not supported in Safari so we will use
|
|
2971
|
+
* media query detection to listen for changes instead.
|
|
2972
|
+
* @returns MediaQueryList object
|
|
2973
|
+
*/
|
|
2974
|
+
function getLandscapeMediaQuery() {
|
|
2975
|
+
return window.matchMedia('(orientation: landscape)');
|
|
2976
|
+
}
|
|
2977
|
+
|
|
2824
2978
|
const defaultErrorDisplayText = {
|
|
2825
2979
|
errorLabelText: 'Error',
|
|
2826
2980
|
connectionTimeoutHeaderText: 'Connection time out',
|
|
@@ -2999,13 +3153,21 @@ const DefaultCancelButton = ({ cancelLivenessCheckText, }) => {
|
|
|
2999
3153
|
React__namespace["default"].createElement(CancelButton, { ariaLabel: cancelLivenessCheckText })));
|
|
3000
3154
|
};
|
|
3001
3155
|
|
|
3156
|
+
const CameraSelector = (props) => {
|
|
3157
|
+
const { onSelect: onCameraChange, devices: selectableDevices, deviceId: selectedDeviceId, } = props;
|
|
3158
|
+
return (React__namespace["default"].createElement(uiReact.Flex, { className: LivenessClassNames.StartScreenCameraSelect },
|
|
3159
|
+
React__namespace["default"].createElement(uiReact.View, { className: LivenessClassNames.StartScreenCameraSelectContainer },
|
|
3160
|
+
React__namespace["default"].createElement(uiReact.Label, { htmlFor: "amplify-liveness-camera-select", className: `${LivenessClassNames.StartScreenCameraSelect}__label` }, "Camera:"),
|
|
3161
|
+
React__namespace["default"].createElement(uiReact.SelectField, { id: "amplify-liveness-camera-select", testId: "amplify-liveness-camera-select", label: "Camera", labelHidden: true, value: selectedDeviceId, onChange: onCameraChange }, selectableDevices.map((device) => (React__namespace["default"].createElement("option", { value: device.deviceId, key: device.deviceId }, device.label)))))));
|
|
3162
|
+
};
|
|
3163
|
+
|
|
3164
|
+
const selectChallengeType = createLivenessSelector((state) => state.context.parsedSessionInformation?.Challenge?.Name);
|
|
3002
3165
|
const selectVideoConstraints = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoConstraints);
|
|
3003
3166
|
const selectVideoStream = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoMediaStream);
|
|
3004
3167
|
const selectFaceMatchPercentage = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchPercentage);
|
|
3005
3168
|
const selectFaceMatchState = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchState);
|
|
3006
3169
|
const selectSelectedDeviceId = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectedDeviceId);
|
|
3007
3170
|
const selectSelectableDevices = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectableDevices);
|
|
3008
|
-
const centeredLoader = (React__namespace["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader" }));
|
|
3009
3171
|
const showMatchIndicatorStates = [
|
|
3010
3172
|
FaceMatchState.TOO_FAR,
|
|
3011
3173
|
FaceMatchState.CANT_IDENTIFY,
|
|
@@ -3023,6 +3185,7 @@ const LivenessCameraModule = (props) => {
|
|
|
3023
3185
|
const { cancelLivenessCheckText, recordingIndicatorText } = streamDisplayText;
|
|
3024
3186
|
const { ErrorView = FaceLivenessErrorModal, PhotosensitiveWarning = DefaultPhotosensitiveWarning, } = customComponents ?? {};
|
|
3025
3187
|
const [state, send] = useLivenessActor();
|
|
3188
|
+
const isFaceMovementChallenge = useLivenessSelector(selectChallengeType) === FACE_MOVEMENT_CHALLENGE.type;
|
|
3026
3189
|
const videoStream = useLivenessSelector(selectVideoStream);
|
|
3027
3190
|
const videoConstraints = useLivenessSelector(selectVideoConstraints);
|
|
3028
3191
|
const selectedDeviceId = useLivenessSelector(selectSelectedDeviceId);
|
|
@@ -3035,8 +3198,14 @@ const LivenessCameraModule = (props) => {
|
|
|
3035
3198
|
const canvasRef = React.useRef(null);
|
|
3036
3199
|
const freshnessColorRef = React.useRef(null);
|
|
3037
3200
|
const [isCameraReady, setIsCameraReady] = React.useState(false);
|
|
3038
|
-
const
|
|
3039
|
-
const
|
|
3201
|
+
const [isMetadataLoaded, setIsMetadataLoaded] = React.useState(false);
|
|
3202
|
+
const [isCameraUserFacing, setIsCameraUserFacing] = React.useState(true);
|
|
3203
|
+
const isInitCamera = state.matches('initCamera');
|
|
3204
|
+
const isInitWebsocket = state.matches('initWebsocket');
|
|
3205
|
+
const isCheckingCamera = state.matches({ initCamera: 'cameraCheck' });
|
|
3206
|
+
const isWaitingForCamera = state.matches({
|
|
3207
|
+
initCamera: 'waitForDOMAndCameraDetails',
|
|
3208
|
+
});
|
|
3040
3209
|
const isStartView = state.matches('start') || state.matches('userCancel');
|
|
3041
3210
|
const isDetectFaceBeforeStart = state.matches('detectFaceBeforeStart');
|
|
3042
3211
|
const isRecording = state.matches('recording');
|
|
@@ -3050,18 +3219,29 @@ const LivenessCameraModule = (props) => {
|
|
|
3050
3219
|
const [mediaWidth, setMediaWidth] = React.useState(videoWidth);
|
|
3051
3220
|
const [mediaHeight, setMediaHeight] = React.useState(videoHeight);
|
|
3052
3221
|
const [aspectRatio, setAspectRatio] = React.useState(() => videoWidth && videoHeight ? videoWidth / videoHeight : 0);
|
|
3222
|
+
// Only mobile device camera selection for no light challenge
|
|
3223
|
+
const hasMultipleDevices = !!selectableDevices?.length && selectableDevices.length > 1;
|
|
3224
|
+
const allowDeviceSelection = isStartView &&
|
|
3225
|
+
hasMultipleDevices &&
|
|
3226
|
+
(!isMobileScreen || isFaceMovementChallenge);
|
|
3053
3227
|
React__namespace["default"].useEffect(() => {
|
|
3054
|
-
|
|
3055
|
-
|
|
3228
|
+
async function checkCameraFacing() {
|
|
3229
|
+
const isUserFacing = await isDeviceUserFacing(selectedDeviceId);
|
|
3230
|
+
setIsCameraUserFacing(isUserFacing);
|
|
3056
3231
|
}
|
|
3057
|
-
|
|
3232
|
+
checkCameraFacing();
|
|
3233
|
+
}, [selectedDeviceId]);
|
|
3058
3234
|
React__namespace["default"].useEffect(() => {
|
|
3235
|
+
const shouldDrawOval = canvasRef?.current &&
|
|
3236
|
+
videoRef?.current &&
|
|
3237
|
+
videoStream &&
|
|
3238
|
+
isStartView &&
|
|
3239
|
+
isMetadataLoaded;
|
|
3240
|
+
if (shouldDrawOval) {
|
|
3241
|
+
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|
|
3242
|
+
}
|
|
3059
3243
|
const updateColorModeHandler = (e) => {
|
|
3060
|
-
if (e.matches &&
|
|
3061
|
-
canvasRef?.current &&
|
|
3062
|
-
videoRef?.current &&
|
|
3063
|
-
videoStream &&
|
|
3064
|
-
isStartView) {
|
|
3244
|
+
if (e.matches && shouldDrawOval) {
|
|
3065
3245
|
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|
|
3066
3246
|
}
|
|
3067
3247
|
};
|
|
@@ -3073,7 +3253,7 @@ const LivenessCameraModule = (props) => {
|
|
|
3073
3253
|
darkModePreference.removeEventListener('change', updateColorModeHandler);
|
|
3074
3254
|
lightModePreference.addEventListener('change', updateColorModeHandler);
|
|
3075
3255
|
};
|
|
3076
|
-
}, [
|
|
3256
|
+
}, [videoRef, videoStream, colorMode, isStartView, isMetadataLoaded]);
|
|
3077
3257
|
React__namespace["default"].useLayoutEffect(() => {
|
|
3078
3258
|
if (isCameraReady) {
|
|
3079
3259
|
send({
|
|
@@ -3104,6 +3284,9 @@ const LivenessCameraModule = (props) => {
|
|
|
3104
3284
|
const handleMediaPlay = () => {
|
|
3105
3285
|
setIsCameraReady(true);
|
|
3106
3286
|
};
|
|
3287
|
+
const handleLoadedMetadata = () => {
|
|
3288
|
+
setIsMetadataLoaded(true);
|
|
3289
|
+
};
|
|
3107
3290
|
const beginLivenessCheck = React__namespace["default"].useCallback(() => {
|
|
3108
3291
|
send({
|
|
3109
3292
|
type: 'BEGIN',
|
|
@@ -3112,6 +3295,7 @@ const LivenessCameraModule = (props) => {
|
|
|
3112
3295
|
const onCameraChange = React__namespace["default"].useCallback((e) => {
|
|
3113
3296
|
const newDeviceId = e.target.value;
|
|
3114
3297
|
const changeCamera = async () => {
|
|
3298
|
+
setIsMetadataLoaded(false);
|
|
3115
3299
|
const newStream = await navigator.mediaDevices.getUserMedia({
|
|
3116
3300
|
video: {
|
|
3117
3301
|
...videoConstraints,
|
|
@@ -3128,16 +3312,19 @@ const LivenessCameraModule = (props) => {
|
|
|
3128
3312
|
}, [videoConstraints, send]);
|
|
3129
3313
|
if (isCheckingCamera) {
|
|
3130
3314
|
return (React__namespace["default"].createElement(uiReact.Flex, { justifyContent: 'center', className: LivenessClassNames.StartScreenCameraWaiting },
|
|
3131
|
-
React__namespace["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.
|
|
3315
|
+
React__namespace["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.CenteredLoader, "data-testid": "centered-loader", position: "unset" }),
|
|
3132
3316
|
React__namespace["default"].createElement(uiReact.Text, { fontSize: "large", fontWeight: "bold", "data-testid": "waiting-camera-permission", className: `${LivenessClassNames.StartScreenCameraWaiting}__text` }, cameraDisplayText.waitingCameraPermissionText)));
|
|
3133
3317
|
}
|
|
3318
|
+
const shouldShowCenteredLoader = isInitCamera || isInitWebsocket;
|
|
3134
3319
|
// We don't show full screen camera on the pre check screen (isStartView/isWaitingForCamera)
|
|
3135
|
-
const shouldShowFullScreenCamera = isMobileScreen && !isStartView && !
|
|
3320
|
+
const shouldShowFullScreenCamera = isMobileScreen && !isStartView && !shouldShowCenteredLoader;
|
|
3136
3321
|
return (React__namespace["default"].createElement(React__namespace["default"].Fragment, null,
|
|
3137
|
-
photoSensitivityWarning,
|
|
3322
|
+
!isFaceMovementChallenge && photoSensitivityWarning,
|
|
3323
|
+
shouldShowCenteredLoader && (React__namespace["default"].createElement(uiReact.Flex, { className: LivenessClassNames.ConnectingLoader },
|
|
3324
|
+
React__namespace["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader" }),
|
|
3325
|
+
React__namespace["default"].createElement(uiReact.Text, { className: LivenessClassNames.LandscapeErrorModalHeader }, hintDisplayText.hintConnectingText))),
|
|
3138
3326
|
React__namespace["default"].createElement(uiReact.Flex, { className: ui.classNames(LivenessClassNames.CameraModule, shouldShowFullScreenCamera &&
|
|
3139
3327
|
`${LivenessClassNames.CameraModule}--mobile`), "data-testid": testId, gap: "zero" },
|
|
3140
|
-
!isCameraReady && centeredLoader,
|
|
3141
3328
|
React__namespace["default"].createElement(Overlay, { horizontal: "center", vertical: isRecording && !isFlashingFreshness ? 'start' : 'space-between', className: LivenessClassNames.InstructionOverlay },
|
|
3142
3329
|
isRecording && (React__namespace["default"].createElement(DefaultRecordingIcon, { recordingIndicatorText: recordingIndicatorText })),
|
|
3143
3330
|
!isStartView && !isWaitingForCamera && !isCheckSucceeded && (React__namespace["default"].createElement(DefaultCancelButton, { cancelLivenessCheckText: cancelLivenessCheckText })),
|
|
@@ -3156,17 +3343,11 @@ const LivenessCameraModule = (props) => {
|
|
|
3156
3343
|
React__namespace["default"].createElement(uiReact.View, { className: LivenessClassNames.VideoAnchor, style: {
|
|
3157
3344
|
aspectRatio: `${aspectRatio}`,
|
|
3158
3345
|
} },
|
|
3159
|
-
React__namespace["default"].createElement("video", { ref: videoRef, muted: true, autoPlay: true, playsInline: true, width: mediaWidth, height: mediaHeight, onCanPlay: handleMediaPlay, "data-testid": "video", className: ui.classNames(LivenessClassNames.Video, isRecordingStopped && LivenessClassNames.FadeOut), "aria-label": cameraDisplayText.a11yVideoLabelText }),
|
|
3346
|
+
React__namespace["default"].createElement("video", { ref: videoRef, muted: true, autoPlay: true, playsInline: true, width: mediaWidth, height: mediaHeight, onCanPlay: handleMediaPlay, onLoadedMetadata: handleLoadedMetadata, "data-testid": "video", className: ui.classNames(LivenessClassNames.Video, isCameraUserFacing && LivenessClassNames.UserFacingVideo, isRecordingStopped && LivenessClassNames.FadeOut), "aria-label": cameraDisplayText.a11yVideoLabelText }),
|
|
3160
3347
|
React__namespace["default"].createElement(uiReact.Flex, { className: ui.classNames(LivenessClassNames.OvalCanvas, shouldShowFullScreenCamera &&
|
|
3161
3348
|
`${LivenessClassNames.OvalCanvas}--mobile`, isRecordingStopped && LivenessClassNames.FadeOut) },
|
|
3162
3349
|
React__namespace["default"].createElement(uiReact.View, { as: "canvas", ref: canvasRef })),
|
|
3163
|
-
|
|
3164
|
-
!isMobileScreen &&
|
|
3165
|
-
selectableDevices &&
|
|
3166
|
-
selectableDevices.length > 1 && (React__namespace["default"].createElement(uiReact.Flex, { className: LivenessClassNames.StartScreenCameraSelect },
|
|
3167
|
-
React__namespace["default"].createElement(uiReact.View, { className: LivenessClassNames.StartScreenCameraSelectContainer },
|
|
3168
|
-
React__namespace["default"].createElement(uiReact.Label, { htmlFor: "amplify-liveness-camera-select", className: `${LivenessClassNames.StartScreenCameraSelect}__label` }, "Camera:"),
|
|
3169
|
-
React__namespace["default"].createElement(uiReact.SelectField, { id: "amplify-liveness-camera-select", label: "Camera", labelHidden: true, value: selectedDeviceId, onChange: onCameraChange }, selectableDevices?.map((device) => (React__namespace["default"].createElement("option", { value: device.deviceId, key: device.deviceId }, device.label))))))))),
|
|
3350
|
+
allowDeviceSelection ? (React__namespace["default"].createElement(CameraSelector, { onSelect: onCameraChange, devices: selectableDevices, deviceId: selectedDeviceId })) : null)),
|
|
3170
3351
|
isStartView && (React__namespace["default"].createElement(uiReact.Flex, { justifyContent: "center" },
|
|
3171
3352
|
React__namespace["default"].createElement(uiReact.Button, { variation: "primary", type: "button", onClick: beginLivenessCheck }, instructionDisplayText.startScreenBeginCheckText)))));
|
|
3172
3353
|
};
|