@aws-amplify/ui-react-liveness 3.0.13 → 3.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +1 -5
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +4 -15
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +1 -5
- package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +144 -158
- package/dist/esm/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.mjs → createStreamingClient/CustomWebSocketFetchHandler.mjs} +4 -5
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +18 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +27 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +38 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +62 -67
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +14 -35
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +1 -2
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +1 -5
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +1 -5
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +1 -5
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +0 -5
- package/dist/esm/version.mjs +1 -1
- package/dist/index.js +294 -275
- package/dist/styles.css +1 -1
- package/dist/types/components/FaceLivenessDetector/hooks/useLivenessActor.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +3 -4
- package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +9 -10
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +17 -18
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +6 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +9 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +10 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +7 -9
- package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +3 -10
- package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +1 -7
- package/dist/types/components/FaceLivenessDetector/shared/index.d.ts +0 -3
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +0 -5
- package/dist/types/version.d.ts +1 -1
- package/package.json +5 -4
- package/dist/types/components/FaceLivenessDetector/shared/GoodFitIllustration.d.ts +0 -7
- package/dist/types/components/FaceLivenessDetector/shared/StartScreenFigure.d.ts +0 -8
- package/dist/types/components/FaceLivenessDetector/shared/TooFarIllustration.d.ts +0 -7
- /package/dist/types/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.d.ts → createStreamingClient/CustomWebSocketFetchHandler.d.ts} +0 -0
package/dist/index.js
CHANGED
|
@@ -5,6 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
5
5
|
var React = require('react');
|
|
6
6
|
var auth = require('aws-amplify/auth');
|
|
7
7
|
var react = require('@xstate/react');
|
|
8
|
+
var nanoid = require('nanoid');
|
|
8
9
|
var xstate = require('xstate');
|
|
9
10
|
var tfjsCore = require('@tensorflow/tfjs-core');
|
|
10
11
|
var faceDetection = require('@tensorflow-models/face-detection');
|
|
@@ -16,7 +17,7 @@ var utilFormatUrl = require('@aws-sdk/util-format-url');
|
|
|
16
17
|
var eventstreamSerdeBrowser = require('@smithy/eventstream-serde-browser');
|
|
17
18
|
var fetchHttpHandler = require('@smithy/fetch-http-handler');
|
|
18
19
|
var protocolHttp = require('@smithy/protocol-http');
|
|
19
|
-
var
|
|
20
|
+
var signatureV4 = require('@smithy/signature-v4');
|
|
20
21
|
var uiReact = require('@aws-amplify/ui-react');
|
|
21
22
|
var ui = require('@aws-amplify/ui');
|
|
22
23
|
var internal = require('@aws-amplify/ui-react/internal');
|
|
@@ -113,7 +114,6 @@ const WS_CLOSURE_CODE = {
|
|
|
113
114
|
USER_ERROR_DURING_CONNECTION: 4007,
|
|
114
115
|
};
|
|
115
116
|
|
|
116
|
-
/* eslint-disable */
|
|
117
117
|
/**
|
|
118
118
|
* Returns the random number between min and max
|
|
119
119
|
* seeded with the provided random seed.
|
|
@@ -204,31 +204,6 @@ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXS
|
|
|
204
204
|
height: Math.floor(ovalHeight),
|
|
205
205
|
};
|
|
206
206
|
}
|
|
207
|
-
function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
|
|
208
|
-
const { width, height } = videoMediaStream.getTracks()[0].getSettings();
|
|
209
|
-
// Get width/height of video element so we can compute scaleFactor
|
|
210
|
-
// and set canvas width/height.
|
|
211
|
-
const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
|
|
212
|
-
canvasEl.width = Math.ceil(videoScaledWidth);
|
|
213
|
-
canvasEl.height = Math.ceil(videoScaledHeight);
|
|
214
|
-
const ovalDetails = getStaticLivenessOvalDetails({
|
|
215
|
-
width: width,
|
|
216
|
-
height: height,
|
|
217
|
-
ratioMultiplier: 0.5,
|
|
218
|
-
});
|
|
219
|
-
ovalDetails.flippedCenterX = width - ovalDetails.centerX;
|
|
220
|
-
// Compute scaleFactor which is how much our video element is scaled
|
|
221
|
-
// vs the intrinsic video resolution
|
|
222
|
-
const scaleFactor = videoScaledWidth / videoEl.videoWidth;
|
|
223
|
-
// Draw oval in canvas using ovalDetails and scaleFactor
|
|
224
|
-
drawLivenessOvalInCanvas({
|
|
225
|
-
canvas: canvasEl,
|
|
226
|
-
oval: ovalDetails,
|
|
227
|
-
scaleFactor,
|
|
228
|
-
videoEl: videoEl,
|
|
229
|
-
isStartScreen: true,
|
|
230
|
-
});
|
|
231
|
-
}
|
|
232
207
|
/**
|
|
233
208
|
* Draws the provided liveness oval on the canvas.
|
|
234
209
|
*/
|
|
@@ -273,6 +248,31 @@ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartS
|
|
|
273
248
|
throw new Error('Cannot find Canvas.');
|
|
274
249
|
}
|
|
275
250
|
}
|
|
251
|
+
function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
|
|
252
|
+
const { width, height } = videoMediaStream.getTracks()[0].getSettings();
|
|
253
|
+
// Get width/height of video element so we can compute scaleFactor
|
|
254
|
+
// and set canvas width/height.
|
|
255
|
+
const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
|
|
256
|
+
canvasEl.width = Math.ceil(videoScaledWidth);
|
|
257
|
+
canvasEl.height = Math.ceil(videoScaledHeight);
|
|
258
|
+
const ovalDetails = getStaticLivenessOvalDetails({
|
|
259
|
+
width: width,
|
|
260
|
+
height: height,
|
|
261
|
+
ratioMultiplier: 0.5,
|
|
262
|
+
});
|
|
263
|
+
ovalDetails.flippedCenterX = width - ovalDetails.centerX;
|
|
264
|
+
// Compute scaleFactor which is how much our video element is scaled
|
|
265
|
+
// vs the intrinsic video resolution
|
|
266
|
+
const scaleFactor = videoScaledWidth / videoEl.videoWidth;
|
|
267
|
+
// Draw oval in canvas using ovalDetails and scaleFactor
|
|
268
|
+
drawLivenessOvalInCanvas({
|
|
269
|
+
canvas: canvasEl,
|
|
270
|
+
oval: ovalDetails,
|
|
271
|
+
scaleFactor,
|
|
272
|
+
videoEl: videoEl,
|
|
273
|
+
isStartScreen: true,
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
276
|
function clearOvalCanvas({ canvas, }) {
|
|
277
277
|
const ctx = canvas.getContext('2d');
|
|
278
278
|
if (ctx) {
|
|
@@ -283,9 +283,43 @@ function clearOvalCanvas({ canvas, }) {
|
|
|
283
283
|
throw new Error('Cannot find Canvas.');
|
|
284
284
|
}
|
|
285
285
|
}
|
|
286
|
+
function getPupilDistanceAndFaceHeight(face) {
|
|
287
|
+
const { leftEye, rightEye, mouth } = face;
|
|
288
|
+
const eyeCenter = [];
|
|
289
|
+
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
290
|
+
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
291
|
+
const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
|
|
292
|
+
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
293
|
+
return { pupilDistance, faceHeight };
|
|
294
|
+
}
|
|
295
|
+
function generateBboxFromLandmarks(face, oval) {
|
|
296
|
+
const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
|
|
297
|
+
const { height: ovalHeight, centerY } = oval;
|
|
298
|
+
const ovalTop = centerY - ovalHeight / 2;
|
|
299
|
+
const eyeCenter = [];
|
|
300
|
+
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
301
|
+
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
302
|
+
const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
|
|
303
|
+
const alpha = 2.0, gamma = 1.8;
|
|
304
|
+
const ow = (alpha * pd + gamma * fh) / 2;
|
|
305
|
+
const oh = 1.618 * ow;
|
|
306
|
+
let cx;
|
|
307
|
+
if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
|
|
308
|
+
cx = (eyeCenter[0] + nose[0]) / 2;
|
|
309
|
+
}
|
|
310
|
+
else {
|
|
311
|
+
cx = eyeCenter[0];
|
|
312
|
+
}
|
|
313
|
+
const bottom = faceTop + faceHeight;
|
|
314
|
+
const top = bottom - oh;
|
|
315
|
+
const left = Math.min(cx - ow / 2, rightEar[0]);
|
|
316
|
+
const right = Math.max(cx + ow / 2, leftEar[0]);
|
|
317
|
+
return { bottom, left, right, top };
|
|
318
|
+
}
|
|
286
319
|
/**
|
|
287
320
|
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
288
321
|
*/
|
|
322
|
+
// eslint-disable-next-line max-params
|
|
289
323
|
function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
|
|
290
324
|
let faceMatchState;
|
|
291
325
|
const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
|
|
@@ -312,7 +346,7 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
|
|
|
312
346
|
const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
|
|
313
347
|
const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
|
|
314
348
|
/** From Science
|
|
315
|
-
* p=max(min(1,0.75∗(si
|
|
349
|
+
* p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
|
|
316
350
|
*/
|
|
317
351
|
const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
|
|
318
352
|
(intersectionThreshold - initialFaceIntersection) +
|
|
@@ -339,44 +373,6 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
|
|
|
339
373
|
}
|
|
340
374
|
return { faceMatchState, faceMatchPercentage };
|
|
341
375
|
}
|
|
342
|
-
function getPupilDistanceAndFaceHeight(face) {
|
|
343
|
-
const { leftEye, rightEye, mouth } = face;
|
|
344
|
-
const eyeCenter = [];
|
|
345
|
-
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
346
|
-
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
347
|
-
const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
|
|
348
|
-
const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
|
|
349
|
-
return { pupilDistance, faceHeight };
|
|
350
|
-
}
|
|
351
|
-
function generateBboxFromLandmarks(face, oval) {
|
|
352
|
-
const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
|
|
353
|
-
const { height: ovalHeight, centerY } = oval;
|
|
354
|
-
const ovalTop = centerY - ovalHeight / 2;
|
|
355
|
-
const eyeCenter = [];
|
|
356
|
-
eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
|
|
357
|
-
eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
|
|
358
|
-
const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
|
|
359
|
-
const alpha = 2.0, gamma = 1.8;
|
|
360
|
-
const ow = (alpha * pd + gamma * fh) / 2;
|
|
361
|
-
const oh = 1.618 * ow;
|
|
362
|
-
let cx;
|
|
363
|
-
if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
|
|
364
|
-
cx = (eyeCenter[0] + nose[0]) / 2;
|
|
365
|
-
}
|
|
366
|
-
else {
|
|
367
|
-
cx = eyeCenter[0];
|
|
368
|
-
}
|
|
369
|
-
const faceBottom = faceTop + faceHeight;
|
|
370
|
-
const top = faceBottom - oh;
|
|
371
|
-
const left = Math.min(cx - ow / 2, rightEar[0]);
|
|
372
|
-
const right = Math.max(cx + ow / 2, leftEar[0]);
|
|
373
|
-
return {
|
|
374
|
-
left: left,
|
|
375
|
-
top: top,
|
|
376
|
-
right: right,
|
|
377
|
-
bottom: faceBottom,
|
|
378
|
-
};
|
|
379
|
-
}
|
|
380
376
|
/**
|
|
381
377
|
* Returns the illumination state in the provided video frame.
|
|
382
378
|
*/
|
|
@@ -492,7 +488,7 @@ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, vide
|
|
|
492
488
|
const isClientFreshnessColorSequence = (obj) => !!obj;
|
|
493
489
|
function getColorsSequencesFromSessionInformation(sessionInformation) {
|
|
494
490
|
const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
|
|
495
|
-
.ColorSequences
|
|
491
|
+
.ColorSequences ?? [];
|
|
496
492
|
const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
|
|
497
493
|
const colorArray = FreshnessColor.RGB;
|
|
498
494
|
const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
|
|
@@ -550,7 +546,7 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
|
|
|
550
546
|
case 1: {
|
|
551
547
|
//exactly one face detected, match face with oval;
|
|
552
548
|
detectedFace = detectedFaces[0];
|
|
553
|
-
const width = ovalDetails
|
|
549
|
+
const { width } = ovalDetails;
|
|
554
550
|
const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
|
|
555
551
|
const alpha = 2.0, gamma = 1.8;
|
|
556
552
|
const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
|
|
@@ -694,12 +690,11 @@ class BlazeFaceFaceDetection extends FaceDetection {
|
|
|
694
690
|
* Helper wrapper class over the native MediaRecorder.
|
|
695
691
|
*/
|
|
696
692
|
class VideoRecorder {
|
|
697
|
-
constructor(stream
|
|
693
|
+
constructor(stream) {
|
|
698
694
|
if (typeof MediaRecorder === 'undefined') {
|
|
699
695
|
throw Error('MediaRecorder is not supported by this browser');
|
|
700
696
|
}
|
|
701
697
|
this._stream = stream;
|
|
702
|
-
this._options = options;
|
|
703
698
|
this._chunks = [];
|
|
704
699
|
this._recorder = new MediaRecorder(stream, { bitsPerSecond: 1000000 });
|
|
705
700
|
this._setupCallbacks();
|
|
@@ -787,7 +782,7 @@ class VideoRecorder {
|
|
|
787
782
|
}
|
|
788
783
|
}
|
|
789
784
|
|
|
790
|
-
const VERSION = '3.0.
|
|
785
|
+
const VERSION = '3.0.15';
|
|
791
786
|
|
|
792
787
|
const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
|
|
793
788
|
const getLivenessUserAgent = () => {
|
|
@@ -813,10 +808,10 @@ const getIterator = (stream) => {
|
|
|
813
808
|
return stream;
|
|
814
809
|
}
|
|
815
810
|
if (isReadableStream(stream)) {
|
|
816
|
-
//If stream is a ReadableStream, transfer the ReadableStream to async iterable.
|
|
811
|
+
// If stream is a ReadableStream, transfer the ReadableStream to async iterable.
|
|
817
812
|
return eventstreamSerdeBrowser.readableStreamtoIterable(stream);
|
|
818
813
|
}
|
|
819
|
-
//For other types, just wrap them with an async iterable.
|
|
814
|
+
// For other types, just wrap them with an async iterable.
|
|
820
815
|
return {
|
|
821
816
|
[Symbol.asyncIterator]: async function* () {
|
|
822
817
|
yield stream;
|
|
@@ -875,8 +870,7 @@ class CustomWebSocketFetchHandler {
|
|
|
875
870
|
}
|
|
876
871
|
this.sockets[url].push(socket);
|
|
877
872
|
socket.binaryType = 'arraybuffer';
|
|
878
|
-
const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this
|
|
879
|
-
.configPromise;
|
|
873
|
+
const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this.configPromise;
|
|
880
874
|
await this.waitForReady(socket, connectionTimeout);
|
|
881
875
|
const { body } = request;
|
|
882
876
|
const bodyStream = getIterator(body);
|
|
@@ -987,6 +981,75 @@ class CustomWebSocketFetchHandler {
|
|
|
987
981
|
}
|
|
988
982
|
}
|
|
989
983
|
|
|
984
|
+
const isCredentialsProvider = (credentialsProvider) => typeof credentialsProvider === 'function';
|
|
985
|
+
// the return interface of `fetchAuthSession` includes `credentials` as
|
|
986
|
+
// optional, but `credentials` is always returned. If `fetchAuthSession`
|
|
987
|
+
// is called for an unauthenticated end user, values of `accessKeyId`
|
|
988
|
+
// and `secretAccessKey` are `undefined`
|
|
989
|
+
const isCredentials = (credentials) => !!(credentials?.accessKeyId && credentials?.secretAccessKey);
|
|
990
|
+
/**
|
|
991
|
+
* Resolves the `credentials` param to be passed to `RekognitionStreamingClient` which accepts either:
|
|
992
|
+
* - a `credentials` object
|
|
993
|
+
* - a `credentialsProvider` callback
|
|
994
|
+
*
|
|
995
|
+
* @param credentialsProvider optional `credentialsProvider` callback
|
|
996
|
+
* @returns {Promise<AwsCredentials | AwsCredentialProvider>} `credentials` object or valid `credentialsProvider` callback
|
|
997
|
+
*/
|
|
998
|
+
async function resolveCredentials(credentialsProvider) {
|
|
999
|
+
const hasCredentialsProvider = isCredentialsProvider(credentialsProvider);
|
|
1000
|
+
if (hasCredentialsProvider) {
|
|
1001
|
+
return credentialsProvider;
|
|
1002
|
+
}
|
|
1003
|
+
if (credentialsProvider && !hasCredentialsProvider) {
|
|
1004
|
+
throw new Error('Invalid credentialsProvider');
|
|
1005
|
+
}
|
|
1006
|
+
try {
|
|
1007
|
+
const result = (await auth.fetchAuthSession()).credentials;
|
|
1008
|
+
if (isCredentials(result)) {
|
|
1009
|
+
return result;
|
|
1010
|
+
}
|
|
1011
|
+
throw new Error('Missing credentials');
|
|
1012
|
+
}
|
|
1013
|
+
catch (e) {
|
|
1014
|
+
const { message } = e;
|
|
1015
|
+
throw new Error(`Invalid credentials: ${message}`);
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
// override aws sdk default value of 60
|
|
1020
|
+
const REQUEST_EXPIRY = 299;
|
|
1021
|
+
class Signer extends signatureV4.SignatureV4 {
|
|
1022
|
+
presign(request, options) {
|
|
1023
|
+
return super.presign(request, {
|
|
1024
|
+
...options,
|
|
1025
|
+
expiresIn: REQUEST_EXPIRY,
|
|
1026
|
+
// `headers` that should not be signed. Liveness WebSocket
|
|
1027
|
+
// request omits `headers` except for required `host` header. Signature
|
|
1028
|
+
// could be a mismatch if other `headers` are signed
|
|
1029
|
+
unsignableHeaders: new Set(Object.keys(request.headers).filter((header) => header !== 'host')),
|
|
1030
|
+
});
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
const CONNECTION_TIMEOUT = 10000;
|
|
1035
|
+
const CUSTOM_USER_AGENT = `${utils.getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
|
|
1036
|
+
async function createStreamingClient({ credentialsProvider, endpointOverride, region, }) {
|
|
1037
|
+
const credentials = await resolveCredentials(credentialsProvider);
|
|
1038
|
+
const clientconfig = {
|
|
1039
|
+
credentials,
|
|
1040
|
+
customUserAgent: CUSTOM_USER_AGENT,
|
|
1041
|
+
region,
|
|
1042
|
+
requestHandler: new CustomWebSocketFetchHandler({
|
|
1043
|
+
connectionTimeout: CONNECTION_TIMEOUT,
|
|
1044
|
+
}),
|
|
1045
|
+
signerConstructor: Signer,
|
|
1046
|
+
};
|
|
1047
|
+
if (endpointOverride) {
|
|
1048
|
+
clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
|
|
1049
|
+
}
|
|
1050
|
+
return new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
|
|
1051
|
+
}
|
|
1052
|
+
|
|
990
1053
|
const TIME_SLICE = 1000;
|
|
991
1054
|
function isBlob(obj) {
|
|
992
1055
|
return obj.arrayBuffer !== undefined;
|
|
@@ -1016,9 +1079,7 @@ class LivenessStreamProvider {
|
|
|
1016
1079
|
this.videoRecorder.start(TIME_SLICE);
|
|
1017
1080
|
}
|
|
1018
1081
|
sendClientInfo(clientInfo) {
|
|
1019
|
-
this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
|
|
1020
|
-
data: { clientInfo },
|
|
1021
|
-
}));
|
|
1082
|
+
this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', { data: { clientInfo } }));
|
|
1022
1083
|
}
|
|
1023
1084
|
async stopVideo() {
|
|
1024
1085
|
await this.videoRecorder.stop();
|
|
@@ -1030,32 +1091,15 @@ class LivenessStreamProvider {
|
|
|
1030
1091
|
if (this.videoRecorder.getState() === 'recording') {
|
|
1031
1092
|
await this.stopVideo();
|
|
1032
1093
|
}
|
|
1033
|
-
this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
|
|
1034
|
-
data: { code: code },
|
|
1035
|
-
}));
|
|
1094
|
+
this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', { data: { code } }));
|
|
1036
1095
|
return;
|
|
1037
1096
|
}
|
|
1038
1097
|
async init() {
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
}
|
|
1043
|
-
const clientconfig = {
|
|
1044
|
-
credentials,
|
|
1098
|
+
this._client = await createStreamingClient({
|
|
1099
|
+
credentialsProvider: this.credentialProvider,
|
|
1100
|
+
endpointOverride: this.endpointOverride,
|
|
1045
1101
|
region: this.region,
|
|
1046
|
-
|
|
1047
|
-
requestHandler: new CustomWebSocketFetchHandler({
|
|
1048
|
-
connectionTimeout: 10000,
|
|
1049
|
-
}),
|
|
1050
|
-
};
|
|
1051
|
-
if (this.endpointOverride) {
|
|
1052
|
-
const override = this.endpointOverride;
|
|
1053
|
-
clientconfig.endpointProvider = () => {
|
|
1054
|
-
const url = new URL(override);
|
|
1055
|
-
return { url };
|
|
1056
|
-
};
|
|
1057
|
-
}
|
|
1058
|
-
this._client = new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
|
|
1102
|
+
});
|
|
1059
1103
|
this.responseStream = await this.startLivenessVideoConnection();
|
|
1060
1104
|
}
|
|
1061
1105
|
// Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
|
|
@@ -1065,8 +1109,7 @@ class LivenessStreamProvider {
|
|
|
1065
1109
|
this._reader = stream.getReader();
|
|
1066
1110
|
return async function* () {
|
|
1067
1111
|
while (true) {
|
|
1068
|
-
|
|
1069
|
-
const { done, value } = await current._reader.read();
|
|
1112
|
+
const { done, value } = (await current._reader.read());
|
|
1070
1113
|
if (done) {
|
|
1071
1114
|
return;
|
|
1072
1115
|
}
|
|
@@ -1075,7 +1118,7 @@ class LivenessStreamProvider {
|
|
|
1075
1118
|
// sending an empty video chunk signals that we have ended sending video
|
|
1076
1119
|
yield {
|
|
1077
1120
|
VideoEvent: {
|
|
1078
|
-
VideoChunk: [],
|
|
1121
|
+
VideoChunk: new Uint8Array([]),
|
|
1079
1122
|
TimestampMillis: Date.now(),
|
|
1080
1123
|
},
|
|
1081
1124
|
};
|
|
@@ -1102,7 +1145,9 @@ class LivenessStreamProvider {
|
|
|
1102
1145
|
else if (isEndStreamWithCodeEvent(value)) {
|
|
1103
1146
|
yield {
|
|
1104
1147
|
VideoEvent: {
|
|
1105
|
-
VideoChunk: [],
|
|
1148
|
+
VideoChunk: new Uint8Array([]),
|
|
1149
|
+
// this is a custom type that does not match LivenessRequestStream.
|
|
1150
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
1106
1151
|
TimestampMillis: { closeCode: value.code },
|
|
1107
1152
|
},
|
|
1108
1153
|
};
|
|
@@ -1293,11 +1338,64 @@ const STATIC_VIDEO_CONSTRAINTS = {
|
|
|
1293
1338
|
facingMode: 'user',
|
|
1294
1339
|
};
|
|
1295
1340
|
|
|
1296
|
-
|
|
1297
|
-
const MIN_FACE_MATCH_TIME = 1000;
|
|
1341
|
+
const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
|
|
1298
1342
|
const DEFAULT_FACE_FIT_TIMEOUT = 7000;
|
|
1343
|
+
const MIN_FACE_MATCH_TIME = 1000;
|
|
1299
1344
|
let responseStream;
|
|
1300
|
-
const
|
|
1345
|
+
const responseStreamActor = async (callback) => {
|
|
1346
|
+
try {
|
|
1347
|
+
const stream = await responseStream;
|
|
1348
|
+
for await (const event of stream) {
|
|
1349
|
+
if (isServerSesssionInformationEvent(event)) {
|
|
1350
|
+
callback({
|
|
1351
|
+
type: 'SET_SESSION_INFO',
|
|
1352
|
+
data: {
|
|
1353
|
+
sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
|
|
1354
|
+
},
|
|
1355
|
+
});
|
|
1356
|
+
}
|
|
1357
|
+
else if (isDisconnectionEvent(event)) {
|
|
1358
|
+
callback({ type: 'DISCONNECT_EVENT' });
|
|
1359
|
+
}
|
|
1360
|
+
else if (isValidationExceptionEvent(event)) {
|
|
1361
|
+
callback({
|
|
1362
|
+
type: 'SERVER_ERROR',
|
|
1363
|
+
data: { error: { ...event.ValidationException } },
|
|
1364
|
+
});
|
|
1365
|
+
}
|
|
1366
|
+
else if (isInternalServerExceptionEvent(event)) {
|
|
1367
|
+
callback({
|
|
1368
|
+
type: 'SERVER_ERROR',
|
|
1369
|
+
data: { error: { ...event.InternalServerException } },
|
|
1370
|
+
});
|
|
1371
|
+
}
|
|
1372
|
+
else if (isThrottlingExceptionEvent(event)) {
|
|
1373
|
+
callback({
|
|
1374
|
+
type: 'SERVER_ERROR',
|
|
1375
|
+
data: { error: { ...event.ThrottlingException } },
|
|
1376
|
+
});
|
|
1377
|
+
}
|
|
1378
|
+
else if (isServiceQuotaExceededExceptionEvent(event)) {
|
|
1379
|
+
callback({
|
|
1380
|
+
type: 'SERVER_ERROR',
|
|
1381
|
+
data: { error: { ...event.ServiceQuotaExceededException } },
|
|
1382
|
+
});
|
|
1383
|
+
}
|
|
1384
|
+
}
|
|
1385
|
+
}
|
|
1386
|
+
catch (error) {
|
|
1387
|
+
let returnedError = error;
|
|
1388
|
+
if (isInvalidSignatureRegionException(error)) {
|
|
1389
|
+
returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
|
|
1390
|
+
}
|
|
1391
|
+
if (returnedError instanceof Error) {
|
|
1392
|
+
callback({
|
|
1393
|
+
type: 'SERVER_ERROR',
|
|
1394
|
+
data: { error: returnedError },
|
|
1395
|
+
});
|
|
1396
|
+
}
|
|
1397
|
+
}
|
|
1398
|
+
};
|
|
1301
1399
|
function getLastSelectedCameraId() {
|
|
1302
1400
|
return localStorage.getItem(CAMERA_ID_KEY);
|
|
1303
1401
|
}
|
|
@@ -1382,12 +1480,12 @@ const livenessMachine = xstate.createMachine({
|
|
|
1382
1480
|
},
|
|
1383
1481
|
states: {
|
|
1384
1482
|
cameraCheck: {
|
|
1385
|
-
entry:
|
|
1483
|
+
entry: 'resetErrorState',
|
|
1386
1484
|
invoke: {
|
|
1387
1485
|
src: 'checkVirtualCameraAndGetStream',
|
|
1388
1486
|
onDone: {
|
|
1389
1487
|
target: 'waitForDOMAndCameraDetails',
|
|
1390
|
-
actions:
|
|
1488
|
+
actions: 'updateVideoMediaStream',
|
|
1391
1489
|
},
|
|
1392
1490
|
onError: {
|
|
1393
1491
|
target: 'permissionDenied',
|
|
@@ -1420,7 +1518,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1420
1518
|
src: 'detectFace',
|
|
1421
1519
|
onDone: {
|
|
1422
1520
|
target: 'checkFaceDetectedBeforeStart',
|
|
1423
|
-
actions:
|
|
1521
|
+
actions: 'updateFaceMatchBeforeStartDetails',
|
|
1424
1522
|
},
|
|
1425
1523
|
},
|
|
1426
1524
|
},
|
|
@@ -1438,7 +1536,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1438
1536
|
src: 'detectFaceDistance',
|
|
1439
1537
|
onDone: {
|
|
1440
1538
|
target: 'checkFaceDistanceBeforeRecording',
|
|
1441
|
-
actions:
|
|
1539
|
+
actions: 'updateFaceDistanceBeforeRecording',
|
|
1442
1540
|
},
|
|
1443
1541
|
},
|
|
1444
1542
|
},
|
|
@@ -1482,7 +1580,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1482
1580
|
initial: 'ovalDrawing',
|
|
1483
1581
|
states: {
|
|
1484
1582
|
ovalDrawing: {
|
|
1485
|
-
entry:
|
|
1583
|
+
entry: 'sendTimeoutAfterOvalDrawingDelay',
|
|
1486
1584
|
invoke: {
|
|
1487
1585
|
src: 'detectInitialFaceAndDrawOval',
|
|
1488
1586
|
onDone: {
|
|
@@ -1512,13 +1610,13 @@ const livenessMachine = xstate.createMachine({
|
|
|
1512
1610
|
0: {
|
|
1513
1611
|
target: 'ovalMatching',
|
|
1514
1612
|
cond: 'hasRecordingStarted',
|
|
1515
|
-
actions:
|
|
1613
|
+
actions: 'updateRecordingStartTimestampMs',
|
|
1516
1614
|
},
|
|
1517
1615
|
100: { target: 'checkRecordingStarted' },
|
|
1518
1616
|
},
|
|
1519
1617
|
},
|
|
1520
1618
|
ovalMatching: {
|
|
1521
|
-
entry:
|
|
1619
|
+
entry: 'cancelOvalDrawingTimeout',
|
|
1522
1620
|
invoke: {
|
|
1523
1621
|
src: 'detectFaceAndMatchOval',
|
|
1524
1622
|
onDone: {
|
|
@@ -1566,7 +1664,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1566
1664
|
},
|
|
1567
1665
|
},
|
|
1568
1666
|
success: {
|
|
1569
|
-
entry:
|
|
1667
|
+
entry: 'stopRecording',
|
|
1570
1668
|
type: 'final',
|
|
1571
1669
|
},
|
|
1572
1670
|
},
|
|
@@ -1619,13 +1717,11 @@ const livenessMachine = xstate.createMachine({
|
|
|
1619
1717
|
},
|
|
1620
1718
|
permissionDenied: {
|
|
1621
1719
|
entry: 'callUserPermissionDeniedCallback',
|
|
1622
|
-
on: {
|
|
1623
|
-
RETRY_CAMERA_CHECK: 'cameraCheck',
|
|
1624
|
-
},
|
|
1720
|
+
on: { RETRY_CAMERA_CHECK: 'cameraCheck' },
|
|
1625
1721
|
},
|
|
1626
1722
|
mobileLandscapeWarning: {
|
|
1627
1723
|
entry: 'callMobileLandscapeWarningCallback',
|
|
1628
|
-
always:
|
|
1724
|
+
always: { target: 'error' },
|
|
1629
1725
|
},
|
|
1630
1726
|
timeout: {
|
|
1631
1727
|
entry: ['cleanUpResources', 'callUserTimeoutCallback', 'freezeStream'],
|
|
@@ -1642,7 +1738,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1642
1738
|
},
|
|
1643
1739
|
userCancel: {
|
|
1644
1740
|
entry: ['cleanUpResources', 'callUserCancelCallback', 'resetContext'],
|
|
1645
|
-
always:
|
|
1741
|
+
always: { target: 'cameraCheck' },
|
|
1646
1742
|
},
|
|
1647
1743
|
},
|
|
1648
1744
|
}, {
|
|
@@ -1651,16 +1747,17 @@ const livenessMachine = xstate.createMachine({
|
|
|
1651
1747
|
responseStreamActorRef: () => xstate.spawn(responseStreamActor),
|
|
1652
1748
|
}),
|
|
1653
1749
|
updateFailedAttempts: xstate.assign({
|
|
1654
|
-
failedAttempts: (context) =>
|
|
1655
|
-
return context.failedAttempts + 1;
|
|
1656
|
-
},
|
|
1750
|
+
failedAttempts: (context) => context.failedAttempts + 1,
|
|
1657
1751
|
}),
|
|
1658
1752
|
updateVideoMediaStream: xstate.assign({
|
|
1659
1753
|
videoAssociatedParams: (context, event) => ({
|
|
1660
1754
|
...context.videoAssociatedParams,
|
|
1661
|
-
videoMediaStream: event.data
|
|
1662
|
-
|
|
1663
|
-
|
|
1755
|
+
videoMediaStream: event.data
|
|
1756
|
+
?.stream,
|
|
1757
|
+
selectedDeviceId: event.data
|
|
1758
|
+
?.selectedDeviceId,
|
|
1759
|
+
selectableDevices: event.data
|
|
1760
|
+
?.selectableDevices,
|
|
1664
1761
|
}),
|
|
1665
1762
|
}),
|
|
1666
1763
|
initializeFaceDetector: xstate.assign({
|
|
@@ -1669,29 +1766,23 @@ const livenessMachine = xstate.createMachine({
|
|
|
1669
1766
|
const { faceModelUrl, binaryPath } = componentProps.config;
|
|
1670
1767
|
const faceDetector = new BlazeFaceFaceDetection(binaryPath, faceModelUrl);
|
|
1671
1768
|
faceDetector.triggerModelLoading();
|
|
1672
|
-
return {
|
|
1673
|
-
...context.ovalAssociatedParams,
|
|
1674
|
-
faceDetector,
|
|
1675
|
-
};
|
|
1769
|
+
return { ...context.ovalAssociatedParams, faceDetector };
|
|
1676
1770
|
},
|
|
1677
1771
|
}),
|
|
1678
1772
|
updateLivenessStreamProvider: xstate.assign({
|
|
1679
|
-
livenessStreamProvider: (context, event) =>
|
|
1680
|
-
return event.data?.livenessStreamProvider;
|
|
1681
|
-
},
|
|
1773
|
+
livenessStreamProvider: (context, event) => event.data?.livenessStreamProvider,
|
|
1682
1774
|
}),
|
|
1683
1775
|
setDOMAndCameraDetails: xstate.assign({
|
|
1684
|
-
videoAssociatedParams: (context, event) => {
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
};
|
|
1691
|
-
},
|
|
1776
|
+
videoAssociatedParams: (context, event) => ({
|
|
1777
|
+
...context.videoAssociatedParams,
|
|
1778
|
+
videoEl: event.data?.videoEl,
|
|
1779
|
+
canvasEl: event.data?.canvasEl,
|
|
1780
|
+
isMobile: event.data?.isMobile,
|
|
1781
|
+
}),
|
|
1692
1782
|
freshnessColorAssociatedParams: (context, event) => ({
|
|
1693
1783
|
...context.freshnessColorAssociatedParams,
|
|
1694
|
-
freshnessColorEl: event.data
|
|
1784
|
+
freshnessColorEl: event.data
|
|
1785
|
+
?.freshnessColorEl,
|
|
1695
1786
|
}),
|
|
1696
1787
|
}),
|
|
1697
1788
|
updateDeviceAndStream: xstate.assign({
|
|
@@ -1699,8 +1790,10 @@ const livenessMachine = xstate.createMachine({
|
|
|
1699
1790
|
setLastSelectedCameraId(event.data?.newDeviceId);
|
|
1700
1791
|
return {
|
|
1701
1792
|
...context.videoAssociatedParams,
|
|
1702
|
-
selectedDeviceId: event.data
|
|
1703
|
-
|
|
1793
|
+
selectedDeviceId: event.data
|
|
1794
|
+
?.newDeviceId,
|
|
1795
|
+
videoMediaStream: event.data
|
|
1796
|
+
?.newStream,
|
|
1704
1797
|
};
|
|
1705
1798
|
},
|
|
1706
1799
|
}),
|
|
@@ -1761,50 +1854,49 @@ const livenessMachine = xstate.createMachine({
|
|
|
1761
1854
|
'recording') {
|
|
1762
1855
|
context.livenessStreamProvider.startRecordingLivenessVideo();
|
|
1763
1856
|
}
|
|
1764
|
-
return {
|
|
1765
|
-
...context.videoAssociatedParams,
|
|
1766
|
-
};
|
|
1857
|
+
return { ...context.videoAssociatedParams };
|
|
1767
1858
|
},
|
|
1768
1859
|
}),
|
|
1769
|
-
stopRecording: (
|
|
1860
|
+
stopRecording: () => { },
|
|
1770
1861
|
updateFaceMatchBeforeStartDetails: xstate.assign({
|
|
1771
|
-
faceMatchStateBeforeStart: (_, event) =>
|
|
1772
|
-
return event.data.faceMatchState;
|
|
1773
|
-
},
|
|
1862
|
+
faceMatchStateBeforeStart: (_, event) => event.data.faceMatchState,
|
|
1774
1863
|
}),
|
|
1775
1864
|
updateFaceDistanceBeforeRecording: xstate.assign({
|
|
1776
|
-
isFaceFarEnoughBeforeRecording: (_, event) =>
|
|
1777
|
-
return event.data.isFaceFarEnoughBeforeRecording;
|
|
1778
|
-
},
|
|
1865
|
+
isFaceFarEnoughBeforeRecording: (_, event) => !!event.data.isFaceFarEnoughBeforeRecording,
|
|
1779
1866
|
}),
|
|
1780
1867
|
updateFaceDistanceWhileLoading: xstate.assign({
|
|
1781
|
-
isFaceFarEnoughBeforeRecording: (_, event) =>
|
|
1782
|
-
|
|
1783
|
-
},
|
|
1784
|
-
errorState: (_, event) => {
|
|
1785
|
-
return event.data?.error;
|
|
1786
|
-
},
|
|
1868
|
+
isFaceFarEnoughBeforeRecording: (_, event) => !!event.data.isFaceFarEnoughBeforeRecording,
|
|
1869
|
+
errorState: (_, event) => event.data?.error,
|
|
1787
1870
|
}),
|
|
1788
1871
|
updateOvalAndFaceDetailsPostDraw: xstate.assign({
|
|
1789
1872
|
ovalAssociatedParams: (context, event) => ({
|
|
1790
1873
|
...context.ovalAssociatedParams,
|
|
1791
|
-
initialFace: event.data
|
|
1792
|
-
|
|
1793
|
-
|
|
1874
|
+
initialFace: event.data
|
|
1875
|
+
.initialFace,
|
|
1876
|
+
ovalDetails: event.data
|
|
1877
|
+
.ovalDetails,
|
|
1878
|
+
scaleFactor: event.data
|
|
1879
|
+
.scaleFactor,
|
|
1794
1880
|
}),
|
|
1795
1881
|
faceMatchAssociatedParams: (context, event) => ({
|
|
1796
1882
|
...context.faceMatchAssociatedParams,
|
|
1797
|
-
faceMatchState: event.data
|
|
1798
|
-
|
|
1883
|
+
faceMatchState: event.data
|
|
1884
|
+
.faceMatchState,
|
|
1885
|
+
illuminationState: event.data
|
|
1886
|
+
.illuminationState,
|
|
1799
1887
|
}),
|
|
1800
1888
|
}),
|
|
1801
1889
|
updateFaceDetailsPostMatch: xstate.assign({
|
|
1802
1890
|
faceMatchAssociatedParams: (context, event) => ({
|
|
1803
1891
|
...context.faceMatchAssociatedParams,
|
|
1804
|
-
faceMatchState: event.data
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1892
|
+
faceMatchState: event.data
|
|
1893
|
+
.faceMatchState,
|
|
1894
|
+
faceMatchPercentage: event.data
|
|
1895
|
+
.faceMatchPercentage,
|
|
1896
|
+
illuminationState: event.data
|
|
1897
|
+
.illuminationState,
|
|
1898
|
+
currentDetectedFace: event.data
|
|
1899
|
+
.detectedFace,
|
|
1808
1900
|
}),
|
|
1809
1901
|
}),
|
|
1810
1902
|
updateEndFaceMatch: xstate.assign({
|
|
@@ -1827,40 +1919,30 @@ const livenessMachine = xstate.createMachine({
|
|
|
1827
1919
|
};
|
|
1828
1920
|
},
|
|
1829
1921
|
}),
|
|
1830
|
-
resetErrorState: xstate.assign({
|
|
1831
|
-
errorState: (_) => undefined,
|
|
1832
|
-
}),
|
|
1922
|
+
resetErrorState: xstate.assign({ errorState: (_) => undefined }),
|
|
1833
1923
|
updateErrorStateForTimeout: xstate.assign({
|
|
1834
|
-
errorState: (_, event) =>
|
|
1835
|
-
return event.data?.errorState || LivenessErrorState.TIMEOUT;
|
|
1836
|
-
},
|
|
1924
|
+
errorState: (_, event) => event.data?.errorState || LivenessErrorState.TIMEOUT,
|
|
1837
1925
|
}),
|
|
1838
1926
|
updateErrorStateForRuntime: xstate.assign({
|
|
1839
|
-
errorState: (_, event) =>
|
|
1840
|
-
|
|
1841
|
-
},
|
|
1927
|
+
errorState: (_, event) => event.data?.errorState ||
|
|
1928
|
+
LivenessErrorState.RUNTIME_ERROR,
|
|
1842
1929
|
}),
|
|
1843
1930
|
updateErrorStateForServer: xstate.assign({
|
|
1844
1931
|
errorState: (_) => LivenessErrorState.SERVER_ERROR,
|
|
1845
1932
|
}),
|
|
1846
|
-
clearErrorState: xstate.assign({
|
|
1847
|
-
errorState: (_) => undefined,
|
|
1848
|
-
}),
|
|
1933
|
+
clearErrorState: xstate.assign({ errorState: (_) => undefined }),
|
|
1849
1934
|
updateSessionInfo: xstate.assign({
|
|
1850
|
-
serverSessionInformation: (
|
|
1935
|
+
serverSessionInformation: (_, event) => {
|
|
1851
1936
|
return event.data.sessionInfo;
|
|
1852
1937
|
},
|
|
1853
1938
|
}),
|
|
1854
|
-
updateShouldDisconnect: xstate.assign({
|
|
1855
|
-
shouldDisconnect: (context) => {
|
|
1856
|
-
return true;
|
|
1857
|
-
},
|
|
1858
|
-
}),
|
|
1939
|
+
updateShouldDisconnect: xstate.assign({ shouldDisconnect: () => true }),
|
|
1859
1940
|
updateFreshnessDetails: xstate.assign({
|
|
1860
1941
|
freshnessColorAssociatedParams: (context, event) => {
|
|
1861
1942
|
return {
|
|
1862
1943
|
...context.freshnessColorAssociatedParams,
|
|
1863
|
-
freshnessColorsComplete: event.data
|
|
1944
|
+
freshnessColorsComplete: event.data
|
|
1945
|
+
.freshnessColorsComplete,
|
|
1864
1946
|
};
|
|
1865
1947
|
},
|
|
1866
1948
|
}),
|
|
@@ -1885,7 +1967,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1885
1967
|
delay: (context) => {
|
|
1886
1968
|
return (context.serverSessionInformation?.Challenge
|
|
1887
1969
|
?.FaceMovementAndLightChallenge?.ChallengeConfig
|
|
1888
|
-
?.OvalFitTimeout
|
|
1970
|
+
?.OvalFitTimeout ?? DEFAULT_FACE_FIT_TIMEOUT);
|
|
1889
1971
|
},
|
|
1890
1972
|
id: 'ovalMatchTimeout',
|
|
1891
1973
|
}),
|
|
@@ -1927,14 +2009,12 @@ const livenessMachine = xstate.createMachine({
|
|
|
1927
2009
|
},
|
|
1928
2010
|
}),
|
|
1929
2011
|
callMobileLandscapeWarningCallback: xstate.assign({
|
|
1930
|
-
errorState: (
|
|
1931
|
-
return LivenessErrorState.MOBILE_LANDSCAPE_ERROR;
|
|
1932
|
-
},
|
|
2012
|
+
errorState: () => LivenessErrorState.MOBILE_LANDSCAPE_ERROR,
|
|
1933
2013
|
}),
|
|
1934
|
-
callUserCancelCallback:
|
|
2014
|
+
callUserCancelCallback: (context) => {
|
|
1935
2015
|
context.componentProps.onUserCancel?.();
|
|
1936
2016
|
},
|
|
1937
|
-
callUserTimeoutCallback:
|
|
2017
|
+
callUserTimeoutCallback: (context) => {
|
|
1938
2018
|
const error = new Error('Client Timeout');
|
|
1939
2019
|
error.name = context.errorState;
|
|
1940
2020
|
const livenessError = {
|
|
@@ -1943,14 +2023,14 @@ const livenessMachine = xstate.createMachine({
|
|
|
1943
2023
|
};
|
|
1944
2024
|
context.componentProps.onError?.(livenessError);
|
|
1945
2025
|
},
|
|
1946
|
-
callErrorCallback:
|
|
2026
|
+
callErrorCallback: (context, event) => {
|
|
1947
2027
|
const livenessError = {
|
|
1948
2028
|
state: context.errorState,
|
|
1949
2029
|
error: event.data?.error || event.data,
|
|
1950
2030
|
};
|
|
1951
2031
|
context.componentProps.onError?.(livenessError);
|
|
1952
2032
|
},
|
|
1953
|
-
cleanUpResources:
|
|
2033
|
+
cleanUpResources: (context) => {
|
|
1954
2034
|
const { freshnessColorEl } = context.freshnessColorAssociatedParams;
|
|
1955
2035
|
if (freshnessColorEl) {
|
|
1956
2036
|
freshnessColorEl.style.display = 'none';
|
|
@@ -1969,9 +2049,9 @@ const livenessMachine = xstate.createMachine({
|
|
|
1969
2049
|
else if (context.errorState === undefined) {
|
|
1970
2050
|
closureCode = WS_CLOSURE_CODE.USER_CANCEL;
|
|
1971
2051
|
}
|
|
1972
|
-
|
|
2052
|
+
context.livenessStreamProvider?.endStreamWithCode(closureCode);
|
|
1973
2053
|
},
|
|
1974
|
-
freezeStream:
|
|
2054
|
+
freezeStream: (context) => {
|
|
1975
2055
|
const { videoMediaStream, videoEl } = context.videoAssociatedParams;
|
|
1976
2056
|
context.isRecordingStopped = true;
|
|
1977
2057
|
videoEl?.pause();
|
|
@@ -1979,7 +2059,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
1979
2059
|
track.stop();
|
|
1980
2060
|
});
|
|
1981
2061
|
},
|
|
1982
|
-
pauseVideoStream:
|
|
2062
|
+
pauseVideoStream: (context) => {
|
|
1983
2063
|
const { videoEl } = context.videoAssociatedParams;
|
|
1984
2064
|
context.isRecordingStopped = true;
|
|
1985
2065
|
videoEl.pause();
|
|
@@ -2035,7 +2115,6 @@ const livenessMachine = xstate.createMachine({
|
|
|
2035
2115
|
hasNotEnoughFaceDistanceBeforeRecording: (context) => {
|
|
2036
2116
|
return !context.isFaceFarEnoughBeforeRecording;
|
|
2037
2117
|
},
|
|
2038
|
-
hasLivenessCheckSucceeded: (_, __, meta) => meta.state.event.data.isLive,
|
|
2039
2118
|
hasFreshnessColorShown: (context) => context.freshnessColorAssociatedParams.freshnessColorsComplete,
|
|
2040
2119
|
hasServerSessionInfo: (context) => {
|
|
2041
2120
|
return context.serverSessionInformation !== undefined;
|
|
@@ -2088,14 +2167,15 @@ const livenessMachine = xstate.createMachine({
|
|
|
2088
2167
|
// If the initial stream is of real camera, use it otherwise use the first real camera
|
|
2089
2168
|
const initialStreamDeviceId = tracksWithMoreThan15Fps[0].getSettings().deviceId;
|
|
2090
2169
|
const isInitialStreamFromRealDevice = realVideoDevices.some((device) => device.deviceId === initialStreamDeviceId);
|
|
2091
|
-
|
|
2170
|
+
const deviceId = isInitialStreamFromRealDevice
|
|
2171
|
+
? initialStreamDeviceId
|
|
2172
|
+
: realVideoDevices[0].deviceId;
|
|
2092
2173
|
let realVideoDeviceStream = initialStream;
|
|
2093
2174
|
if (!isInitialStreamFromRealDevice) {
|
|
2094
|
-
deviceId = realVideoDevices[0].deviceId;
|
|
2095
2175
|
realVideoDeviceStream = await navigator.mediaDevices.getUserMedia({
|
|
2096
2176
|
video: {
|
|
2097
2177
|
...videoConstraints,
|
|
2098
|
-
deviceId: { exact:
|
|
2178
|
+
deviceId: { exact: deviceId },
|
|
2099
2179
|
},
|
|
2100
2180
|
audio: false,
|
|
2101
2181
|
});
|
|
@@ -2107,6 +2187,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2107
2187
|
selectableDevices: realVideoDevices,
|
|
2108
2188
|
};
|
|
2109
2189
|
},
|
|
2190
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
2110
2191
|
async openLivenessStreamConnection(context) {
|
|
2111
2192
|
const { config } = context.componentProps;
|
|
2112
2193
|
const { credentialProvider, endpointOverride } = config;
|
|
@@ -2129,6 +2210,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2129
2210
|
await faceDetector.modelLoadingPromise;
|
|
2130
2211
|
}
|
|
2131
2212
|
catch (err) {
|
|
2213
|
+
// eslint-disable-next-line no-console
|
|
2132
2214
|
console.log({ err });
|
|
2133
2215
|
}
|
|
2134
2216
|
// detect face
|
|
@@ -2168,10 +2250,11 @@ const livenessMachine = xstate.createMachine({
|
|
|
2168
2250
|
});
|
|
2169
2251
|
const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording, error, } = await isFaceDistanceBelowThreshold({
|
|
2170
2252
|
faceDetector: faceDetector,
|
|
2171
|
-
|
|
2253
|
+
isMobile,
|
|
2172
2254
|
ovalDetails,
|
|
2255
|
+
videoEl: videoEl,
|
|
2256
|
+
// if this is the second face distance check reduce the threshold
|
|
2173
2257
|
reduceThreshold: faceDistanceCheckBeforeRecording,
|
|
2174
|
-
isMobile,
|
|
2175
2258
|
});
|
|
2176
2259
|
return { isFaceFarEnoughBeforeRecording, error };
|
|
2177
2260
|
},
|
|
@@ -2185,6 +2268,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2185
2268
|
await livenessStreamProvider.videoRecorder.recorderStarted;
|
|
2186
2269
|
}
|
|
2187
2270
|
catch (err) {
|
|
2271
|
+
// eslint-disable-next-line no-console
|
|
2188
2272
|
console.log({ err });
|
|
2189
2273
|
}
|
|
2190
2274
|
// detect face
|
|
@@ -2200,6 +2284,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2200
2284
|
break;
|
|
2201
2285
|
}
|
|
2202
2286
|
case 1: {
|
|
2287
|
+
//exactly one face detected;
|
|
2203
2288
|
faceMatchState = FaceMatchState.FACE_IDENTIFIED;
|
|
2204
2289
|
initialFace = detectedFaces[0];
|
|
2205
2290
|
break;
|
|
@@ -2346,7 +2431,7 @@ const livenessMachine = xstate.createMachine({
|
|
|
2346
2431
|
throw new Error('Video chunks not recorded successfully.');
|
|
2347
2432
|
}
|
|
2348
2433
|
livenessStreamProvider.sendClientInfo(livenessActionDocument);
|
|
2349
|
-
|
|
2434
|
+
livenessStreamProvider.dispatchStopVideoEvent();
|
|
2350
2435
|
},
|
|
2351
2436
|
async getLiveness(context) {
|
|
2352
2437
|
const { onAnalysisComplete } = context.componentProps;
|
|
@@ -2355,60 +2440,6 @@ const livenessMachine = xstate.createMachine({
|
|
|
2355
2440
|
},
|
|
2356
2441
|
},
|
|
2357
2442
|
});
|
|
2358
|
-
const responseStreamActor = async (callback) => {
|
|
2359
|
-
try {
|
|
2360
|
-
const stream = await responseStream;
|
|
2361
|
-
for await (const event of stream) {
|
|
2362
|
-
if (isServerSesssionInformationEvent(event)) {
|
|
2363
|
-
callback({
|
|
2364
|
-
type: 'SET_SESSION_INFO',
|
|
2365
|
-
data: {
|
|
2366
|
-
sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
|
|
2367
|
-
},
|
|
2368
|
-
});
|
|
2369
|
-
}
|
|
2370
|
-
else if (isDisconnectionEvent(event)) {
|
|
2371
|
-
callback({ type: 'DISCONNECT_EVENT' });
|
|
2372
|
-
}
|
|
2373
|
-
else if (isValidationExceptionEvent(event)) {
|
|
2374
|
-
callback({
|
|
2375
|
-
type: 'SERVER_ERROR',
|
|
2376
|
-
data: { error: { ...event.ValidationException } },
|
|
2377
|
-
});
|
|
2378
|
-
}
|
|
2379
|
-
else if (isInternalServerExceptionEvent(event)) {
|
|
2380
|
-
callback({
|
|
2381
|
-
type: 'SERVER_ERROR',
|
|
2382
|
-
data: { error: { ...event.InternalServerException } },
|
|
2383
|
-
});
|
|
2384
|
-
}
|
|
2385
|
-
else if (isThrottlingExceptionEvent(event)) {
|
|
2386
|
-
callback({
|
|
2387
|
-
type: 'SERVER_ERROR',
|
|
2388
|
-
data: { error: { ...event.ThrottlingException } },
|
|
2389
|
-
});
|
|
2390
|
-
}
|
|
2391
|
-
else if (isServiceQuotaExceededExceptionEvent(event)) {
|
|
2392
|
-
callback({
|
|
2393
|
-
type: 'SERVER_ERROR',
|
|
2394
|
-
data: { error: { ...event.ServiceQuotaExceededException } },
|
|
2395
|
-
});
|
|
2396
|
-
}
|
|
2397
|
-
}
|
|
2398
|
-
}
|
|
2399
|
-
catch (error) {
|
|
2400
|
-
let returnedError = error;
|
|
2401
|
-
if (isInvalidSignatureRegionException(error)) {
|
|
2402
|
-
returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
|
|
2403
|
-
}
|
|
2404
|
-
if (returnedError instanceof Error) {
|
|
2405
|
-
callback({
|
|
2406
|
-
type: 'SERVER_ERROR',
|
|
2407
|
-
data: { error: returnedError },
|
|
2408
|
-
});
|
|
2409
|
-
}
|
|
2410
|
-
}
|
|
2411
|
-
};
|
|
2412
2443
|
|
|
2413
2444
|
const FaceLivenessDetectorContext = React__default["default"].createContext(null);
|
|
2414
2445
|
function FaceLivenessDetectorProvider({ children, ...props }) {
|
|
@@ -2488,11 +2519,6 @@ var LivenessClassNames;
|
|
|
2488
2519
|
LivenessClassNames["FreshnessCanvas"] = "amplify-liveness-freshness-canvas";
|
|
2489
2520
|
LivenessClassNames["InstructionList"] = "amplify-liveness-instruction-list";
|
|
2490
2521
|
LivenessClassNames["InstructionOverlay"] = "amplify-liveness-instruction-overlay";
|
|
2491
|
-
LivenessClassNames["Figure"] = "amplify-liveness-figure";
|
|
2492
|
-
LivenessClassNames["FigureCaption"] = "amplify-liveness-figure__caption";
|
|
2493
|
-
LivenessClassNames["FigureIcon"] = "amplify-liveness-figure__icon";
|
|
2494
|
-
LivenessClassNames["FigureImage"] = "amplify-liveness-figure__image";
|
|
2495
|
-
LivenessClassNames["Figures"] = "amplify-liveness-figures";
|
|
2496
2522
|
LivenessClassNames["Hint"] = "amplify-liveness-hint";
|
|
2497
2523
|
LivenessClassNames["HintText"] = "amplify-liveness-hint__text";
|
|
2498
2524
|
LivenessClassNames["LandscapeErrorModal"] = "amplify-liveness-landscape-error-modal";
|
|
@@ -2918,22 +2944,15 @@ const LivenessCameraModule = (props) => {
|
|
|
2918
2944
|
const [mediaHeight, setMediaHeight] = React.useState(videoHeight);
|
|
2919
2945
|
const [aspectRatio, setAspectRatio] = React.useState(() => videoWidth && videoHeight ? videoWidth / videoHeight : 0);
|
|
2920
2946
|
React__default["default"].useEffect(() => {
|
|
2921
|
-
if (canvasRef &&
|
|
2922
|
-
videoRef &&
|
|
2923
|
-
canvasRef.current &&
|
|
2924
|
-
videoRef.current &&
|
|
2925
|
-
videoStream &&
|
|
2926
|
-
isStartView) {
|
|
2947
|
+
if (canvasRef?.current && videoRef?.current && videoStream && isStartView) {
|
|
2927
2948
|
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|
|
2928
2949
|
}
|
|
2929
2950
|
}, [canvasRef, videoRef, videoStream, colorMode, isStartView]);
|
|
2930
2951
|
React__default["default"].useEffect(() => {
|
|
2931
2952
|
const updateColorModeHandler = (e) => {
|
|
2932
2953
|
if (e.matches &&
|
|
2933
|
-
canvasRef &&
|
|
2934
|
-
videoRef &&
|
|
2935
|
-
canvasRef.current &&
|
|
2936
|
-
videoRef.current &&
|
|
2954
|
+
canvasRef?.current &&
|
|
2955
|
+
videoRef?.current &&
|
|
2937
2956
|
videoStream &&
|
|
2938
2957
|
isStartView) {
|
|
2939
2958
|
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|