@aws-amplify/ui-react-liveness 3.0.12 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +3 -10
  2. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +144 -158
  3. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +3 -4
  4. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +62 -67
  5. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +6 -5
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +1 -2
  7. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +0 -5
  8. package/dist/esm/version.mjs +1 -1
  9. package/dist/index.js +218 -250
  10. package/dist/styles.css +6 -2
  11. package/dist/types/components/FaceLivenessDetector/hooks/useLivenessActor.d.ts +1 -1
  12. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +3 -4
  13. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +17 -18
  14. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +7 -9
  15. package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +3 -10
  16. package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +1 -7
  17. package/dist/types/components/FaceLivenessDetector/shared/index.d.ts +0 -3
  18. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +0 -5
  19. package/dist/types/version.d.ts +1 -1
  20. package/package.json +3 -3
  21. package/dist/types/components/FaceLivenessDetector/shared/GoodFitIllustration.d.ts +0 -7
  22. package/dist/types/components/FaceLivenessDetector/shared/StartScreenFigure.d.ts +0 -8
  23. package/dist/types/components/FaceLivenessDetector/shared/TooFarIllustration.d.ts +0 -7
package/dist/index.js CHANGED
@@ -5,6 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
5
5
  var React = require('react');
6
6
  var auth = require('aws-amplify/auth');
7
7
  var react = require('@xstate/react');
8
+ var nanoid = require('nanoid');
8
9
  var xstate = require('xstate');
9
10
  var tfjsCore = require('@tensorflow/tfjs-core');
10
11
  var faceDetection = require('@tensorflow-models/face-detection');
@@ -16,7 +17,6 @@ var utilFormatUrl = require('@aws-sdk/util-format-url');
16
17
  var eventstreamSerdeBrowser = require('@smithy/eventstream-serde-browser');
17
18
  var fetchHttpHandler = require('@smithy/fetch-http-handler');
18
19
  var protocolHttp = require('@smithy/protocol-http');
19
- var nanoid = require('nanoid');
20
20
  var uiReact = require('@aws-amplify/ui-react');
21
21
  var ui = require('@aws-amplify/ui');
22
22
  var internal = require('@aws-amplify/ui-react/internal');
@@ -113,7 +113,6 @@ const WS_CLOSURE_CODE = {
113
113
  USER_ERROR_DURING_CONNECTION: 4007,
114
114
  };
115
115
 
116
- /* eslint-disable */
117
116
  /**
118
117
  * Returns the random number between min and max
119
118
  * seeded with the provided random seed.
@@ -204,31 +203,6 @@ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXS
204
203
  height: Math.floor(ovalHeight),
205
204
  };
206
205
  }
207
- function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
208
- const { width, height } = videoMediaStream.getTracks()[0].getSettings();
209
- // Get width/height of video element so we can compute scaleFactor
210
- // and set canvas width/height.
211
- const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
212
- canvasEl.width = Math.ceil(videoScaledWidth);
213
- canvasEl.height = Math.ceil(videoScaledHeight);
214
- const ovalDetails = getStaticLivenessOvalDetails({
215
- width: width,
216
- height: height,
217
- ratioMultiplier: 0.5,
218
- });
219
- ovalDetails.flippedCenterX = width - ovalDetails.centerX;
220
- // Compute scaleFactor which is how much our video element is scaled
221
- // vs the intrinsic video resolution
222
- const scaleFactor = videoScaledWidth / videoEl.videoWidth;
223
- // Draw oval in canvas using ovalDetails and scaleFactor
224
- drawLivenessOvalInCanvas({
225
- canvas: canvasEl,
226
- oval: ovalDetails,
227
- scaleFactor,
228
- videoEl: videoEl,
229
- isStartScreen: true,
230
- });
231
- }
232
206
  /**
233
207
  * Draws the provided liveness oval on the canvas.
234
208
  */
@@ -273,6 +247,31 @@ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartS
273
247
  throw new Error('Cannot find Canvas.');
274
248
  }
275
249
  }
250
+ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
251
+ const { width, height } = videoMediaStream.getTracks()[0].getSettings();
252
+ // Get width/height of video element so we can compute scaleFactor
253
+ // and set canvas width/height.
254
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
255
+ canvasEl.width = Math.ceil(videoScaledWidth);
256
+ canvasEl.height = Math.ceil(videoScaledHeight);
257
+ const ovalDetails = getStaticLivenessOvalDetails({
258
+ width: width,
259
+ height: height,
260
+ ratioMultiplier: 0.5,
261
+ });
262
+ ovalDetails.flippedCenterX = width - ovalDetails.centerX;
263
+ // Compute scaleFactor which is how much our video element is scaled
264
+ // vs the intrinsic video resolution
265
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
266
+ // Draw oval in canvas using ovalDetails and scaleFactor
267
+ drawLivenessOvalInCanvas({
268
+ canvas: canvasEl,
269
+ oval: ovalDetails,
270
+ scaleFactor,
271
+ videoEl: videoEl,
272
+ isStartScreen: true,
273
+ });
274
+ }
276
275
  function clearOvalCanvas({ canvas, }) {
277
276
  const ctx = canvas.getContext('2d');
278
277
  if (ctx) {
@@ -283,9 +282,43 @@ function clearOvalCanvas({ canvas, }) {
283
282
  throw new Error('Cannot find Canvas.');
284
283
  }
285
284
  }
285
+ function getPupilDistanceAndFaceHeight(face) {
286
+ const { leftEye, rightEye, mouth } = face;
287
+ const eyeCenter = [];
288
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
289
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
290
+ const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
291
+ const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
292
+ return { pupilDistance, faceHeight };
293
+ }
294
+ function generateBboxFromLandmarks(face, oval) {
295
+ const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
296
+ const { height: ovalHeight, centerY } = oval;
297
+ const ovalTop = centerY - ovalHeight / 2;
298
+ const eyeCenter = [];
299
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
300
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
301
+ const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
302
+ const alpha = 2.0, gamma = 1.8;
303
+ const ow = (alpha * pd + gamma * fh) / 2;
304
+ const oh = 1.618 * ow;
305
+ let cx;
306
+ if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
307
+ cx = (eyeCenter[0] + nose[0]) / 2;
308
+ }
309
+ else {
310
+ cx = eyeCenter[0];
311
+ }
312
+ const bottom = faceTop + faceHeight;
313
+ const top = bottom - oh;
314
+ const left = Math.min(cx - ow / 2, rightEar[0]);
315
+ const right = Math.max(cx + ow / 2, leftEar[0]);
316
+ return { bottom, left, right, top };
317
+ }
286
318
  /**
287
319
  * Returns the state of the provided face with respect to the provided liveness oval.
288
320
  */
321
+ // eslint-disable-next-line max-params
289
322
  function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
290
323
  let faceMatchState;
291
324
  const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
@@ -312,7 +345,7 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
312
345
  const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
313
346
  const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
314
347
  /** From Science
315
- * p=max(min(1,0.75∗(si​−s0)/(st​−s0)+0.25)),0)
348
+ * p=max(min(1,0.75∗(sis0)/(sts0)+0.25)),0)
316
349
  */
317
350
  const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
318
351
  (intersectionThreshold - initialFaceIntersection) +
@@ -339,44 +372,6 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
339
372
  }
340
373
  return { faceMatchState, faceMatchPercentage };
341
374
  }
342
- function getPupilDistanceAndFaceHeight(face) {
343
- const { leftEye, rightEye, mouth } = face;
344
- const eyeCenter = [];
345
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
346
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
347
- const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
348
- const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
349
- return { pupilDistance, faceHeight };
350
- }
351
- function generateBboxFromLandmarks(face, oval) {
352
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
353
- const { height: ovalHeight, centerY } = oval;
354
- const ovalTop = centerY - ovalHeight / 2;
355
- const eyeCenter = [];
356
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
357
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
358
- const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
359
- const alpha = 2.0, gamma = 1.8;
360
- const ow = (alpha * pd + gamma * fh) / 2;
361
- const oh = 1.618 * ow;
362
- let cx;
363
- if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
364
- cx = (eyeCenter[0] + nose[0]) / 2;
365
- }
366
- else {
367
- cx = eyeCenter[0];
368
- }
369
- const faceBottom = faceTop + faceHeight;
370
- const top = faceBottom - oh;
371
- const left = Math.min(cx - ow / 2, rightEar[0]);
372
- const right = Math.max(cx + ow / 2, leftEar[0]);
373
- return {
374
- left: left,
375
- top: top,
376
- right: right,
377
- bottom: faceBottom,
378
- };
379
- }
380
375
  /**
381
376
  * Returns the illumination state in the provided video frame.
382
377
  */
@@ -492,7 +487,7 @@ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, vide
492
487
  const isClientFreshnessColorSequence = (obj) => !!obj;
493
488
  function getColorsSequencesFromSessionInformation(sessionInformation) {
494
489
  const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
495
- .ColorSequences || [];
490
+ .ColorSequences ?? [];
496
491
  const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
497
492
  const colorArray = FreshnessColor.RGB;
498
493
  const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
@@ -550,7 +545,7 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
550
545
  case 1: {
551
546
  //exactly one face detected, match face with oval;
552
547
  detectedFace = detectedFaces[0];
553
- const width = ovalDetails.width;
548
+ const { width } = ovalDetails;
554
549
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
555
550
  const alpha = 2.0, gamma = 1.8;
556
551
  const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
@@ -694,12 +689,11 @@ class BlazeFaceFaceDetection extends FaceDetection {
694
689
  * Helper wrapper class over the native MediaRecorder.
695
690
  */
696
691
  class VideoRecorder {
697
- constructor(stream, options = {}) {
692
+ constructor(stream) {
698
693
  if (typeof MediaRecorder === 'undefined') {
699
694
  throw Error('MediaRecorder is not supported by this browser');
700
695
  }
701
696
  this._stream = stream;
702
- this._options = options;
703
697
  this._chunks = [];
704
698
  this._recorder = new MediaRecorder(stream, { bitsPerSecond: 1000000 });
705
699
  this._setupCallbacks();
@@ -787,7 +781,7 @@ class VideoRecorder {
787
781
  }
788
782
  }
789
783
 
790
- const VERSION = '3.0.12';
784
+ const VERSION = '3.0.14';
791
785
 
792
786
  const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
793
787
  const getLivenessUserAgent = () => {
@@ -813,10 +807,10 @@ const getIterator = (stream) => {
813
807
  return stream;
814
808
  }
815
809
  if (isReadableStream(stream)) {
816
- //If stream is a ReadableStream, transfer the ReadableStream to async iterable.
810
+ // If stream is a ReadableStream, transfer the ReadableStream to async iterable.
817
811
  return eventstreamSerdeBrowser.readableStreamtoIterable(stream);
818
812
  }
819
- //For other types, just wrap them with an async iterable.
813
+ // For other types, just wrap them with an async iterable.
820
814
  return {
821
815
  [Symbol.asyncIterator]: async function* () {
822
816
  yield stream;
@@ -875,8 +869,7 @@ class CustomWebSocketFetchHandler {
875
869
  }
876
870
  this.sockets[url].push(socket);
877
871
  socket.binaryType = 'arraybuffer';
878
- const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this
879
- .configPromise;
872
+ const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this.configPromise;
880
873
  await this.waitForReady(socket, connectionTimeout);
881
874
  const { body } = request;
882
875
  const bodyStream = getIterator(body);
@@ -1065,8 +1058,7 @@ class LivenessStreamProvider {
1065
1058
  this._reader = stream.getReader();
1066
1059
  return async function* () {
1067
1060
  while (true) {
1068
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
1069
- const { done, value } = await current._reader.read();
1061
+ const { done, value } = (await current._reader.read());
1070
1062
  if (done) {
1071
1063
  return;
1072
1064
  }
@@ -1075,7 +1067,7 @@ class LivenessStreamProvider {
1075
1067
  // sending an empty video chunk signals that we have ended sending video
1076
1068
  yield {
1077
1069
  VideoEvent: {
1078
- VideoChunk: [],
1070
+ VideoChunk: new Uint8Array([]),
1079
1071
  TimestampMillis: Date.now(),
1080
1072
  },
1081
1073
  };
@@ -1102,7 +1094,9 @@ class LivenessStreamProvider {
1102
1094
  else if (isEndStreamWithCodeEvent(value)) {
1103
1095
  yield {
1104
1096
  VideoEvent: {
1105
- VideoChunk: [],
1097
+ VideoChunk: new Uint8Array([]),
1098
+ // this is a custom type that does not match LivenessRequestStream.
1099
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
1106
1100
  TimestampMillis: { closeCode: value.code },
1107
1101
  },
1108
1102
  };
@@ -1293,11 +1287,64 @@ const STATIC_VIDEO_CONSTRAINTS = {
1293
1287
  facingMode: 'user',
1294
1288
  };
1295
1289
 
1296
- /* eslint-disable */
1297
- const MIN_FACE_MATCH_TIME = 1000;
1290
+ const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1298
1291
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
1292
+ const MIN_FACE_MATCH_TIME = 1000;
1299
1293
  let responseStream;
1300
- const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1294
+ const responseStreamActor = async (callback) => {
1295
+ try {
1296
+ const stream = await responseStream;
1297
+ for await (const event of stream) {
1298
+ if (isServerSesssionInformationEvent(event)) {
1299
+ callback({
1300
+ type: 'SET_SESSION_INFO',
1301
+ data: {
1302
+ sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
1303
+ },
1304
+ });
1305
+ }
1306
+ else if (isDisconnectionEvent(event)) {
1307
+ callback({ type: 'DISCONNECT_EVENT' });
1308
+ }
1309
+ else if (isValidationExceptionEvent(event)) {
1310
+ callback({
1311
+ type: 'SERVER_ERROR',
1312
+ data: { error: { ...event.ValidationException } },
1313
+ });
1314
+ }
1315
+ else if (isInternalServerExceptionEvent(event)) {
1316
+ callback({
1317
+ type: 'SERVER_ERROR',
1318
+ data: { error: { ...event.InternalServerException } },
1319
+ });
1320
+ }
1321
+ else if (isThrottlingExceptionEvent(event)) {
1322
+ callback({
1323
+ type: 'SERVER_ERROR',
1324
+ data: { error: { ...event.ThrottlingException } },
1325
+ });
1326
+ }
1327
+ else if (isServiceQuotaExceededExceptionEvent(event)) {
1328
+ callback({
1329
+ type: 'SERVER_ERROR',
1330
+ data: { error: { ...event.ServiceQuotaExceededException } },
1331
+ });
1332
+ }
1333
+ }
1334
+ }
1335
+ catch (error) {
1336
+ let returnedError = error;
1337
+ if (isInvalidSignatureRegionException(error)) {
1338
+ returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
1339
+ }
1340
+ if (returnedError instanceof Error) {
1341
+ callback({
1342
+ type: 'SERVER_ERROR',
1343
+ data: { error: returnedError },
1344
+ });
1345
+ }
1346
+ }
1347
+ };
1301
1348
  function getLastSelectedCameraId() {
1302
1349
  return localStorage.getItem(CAMERA_ID_KEY);
1303
1350
  }
@@ -1382,12 +1429,12 @@ const livenessMachine = xstate.createMachine({
1382
1429
  },
1383
1430
  states: {
1384
1431
  cameraCheck: {
1385
- entry: ['resetErrorState'],
1432
+ entry: 'resetErrorState',
1386
1433
  invoke: {
1387
1434
  src: 'checkVirtualCameraAndGetStream',
1388
1435
  onDone: {
1389
1436
  target: 'waitForDOMAndCameraDetails',
1390
- actions: ['updateVideoMediaStream'],
1437
+ actions: 'updateVideoMediaStream',
1391
1438
  },
1392
1439
  onError: {
1393
1440
  target: 'permissionDenied',
@@ -1420,7 +1467,7 @@ const livenessMachine = xstate.createMachine({
1420
1467
  src: 'detectFace',
1421
1468
  onDone: {
1422
1469
  target: 'checkFaceDetectedBeforeStart',
1423
- actions: ['updateFaceMatchBeforeStartDetails'],
1470
+ actions: 'updateFaceMatchBeforeStartDetails',
1424
1471
  },
1425
1472
  },
1426
1473
  },
@@ -1438,7 +1485,7 @@ const livenessMachine = xstate.createMachine({
1438
1485
  src: 'detectFaceDistance',
1439
1486
  onDone: {
1440
1487
  target: 'checkFaceDistanceBeforeRecording',
1441
- actions: ['updateFaceDistanceBeforeRecording'],
1488
+ actions: 'updateFaceDistanceBeforeRecording',
1442
1489
  },
1443
1490
  },
1444
1491
  },
@@ -1482,7 +1529,7 @@ const livenessMachine = xstate.createMachine({
1482
1529
  initial: 'ovalDrawing',
1483
1530
  states: {
1484
1531
  ovalDrawing: {
1485
- entry: ['sendTimeoutAfterOvalDrawingDelay'],
1532
+ entry: 'sendTimeoutAfterOvalDrawingDelay',
1486
1533
  invoke: {
1487
1534
  src: 'detectInitialFaceAndDrawOval',
1488
1535
  onDone: {
@@ -1512,13 +1559,13 @@ const livenessMachine = xstate.createMachine({
1512
1559
  0: {
1513
1560
  target: 'ovalMatching',
1514
1561
  cond: 'hasRecordingStarted',
1515
- actions: ['updateRecordingStartTimestampMs'],
1562
+ actions: 'updateRecordingStartTimestampMs',
1516
1563
  },
1517
1564
  100: { target: 'checkRecordingStarted' },
1518
1565
  },
1519
1566
  },
1520
1567
  ovalMatching: {
1521
- entry: ['cancelOvalDrawingTimeout'],
1568
+ entry: 'cancelOvalDrawingTimeout',
1522
1569
  invoke: {
1523
1570
  src: 'detectFaceAndMatchOval',
1524
1571
  onDone: {
@@ -1566,7 +1613,7 @@ const livenessMachine = xstate.createMachine({
1566
1613
  },
1567
1614
  },
1568
1615
  success: {
1569
- entry: ['stopRecording'],
1616
+ entry: 'stopRecording',
1570
1617
  type: 'final',
1571
1618
  },
1572
1619
  },
@@ -1619,13 +1666,11 @@ const livenessMachine = xstate.createMachine({
1619
1666
  },
1620
1667
  permissionDenied: {
1621
1668
  entry: 'callUserPermissionDeniedCallback',
1622
- on: {
1623
- RETRY_CAMERA_CHECK: 'cameraCheck',
1624
- },
1669
+ on: { RETRY_CAMERA_CHECK: 'cameraCheck' },
1625
1670
  },
1626
1671
  mobileLandscapeWarning: {
1627
1672
  entry: 'callMobileLandscapeWarningCallback',
1628
- always: [{ target: 'error' }],
1673
+ always: { target: 'error' },
1629
1674
  },
1630
1675
  timeout: {
1631
1676
  entry: ['cleanUpResources', 'callUserTimeoutCallback', 'freezeStream'],
@@ -1642,7 +1687,7 @@ const livenessMachine = xstate.createMachine({
1642
1687
  },
1643
1688
  userCancel: {
1644
1689
  entry: ['cleanUpResources', 'callUserCancelCallback', 'resetContext'],
1645
- always: [{ target: 'cameraCheck' }],
1690
+ always: { target: 'cameraCheck' },
1646
1691
  },
1647
1692
  },
1648
1693
  }, {
@@ -1651,16 +1696,17 @@ const livenessMachine = xstate.createMachine({
1651
1696
  responseStreamActorRef: () => xstate.spawn(responseStreamActor),
1652
1697
  }),
1653
1698
  updateFailedAttempts: xstate.assign({
1654
- failedAttempts: (context) => {
1655
- return context.failedAttempts + 1;
1656
- },
1699
+ failedAttempts: (context) => context.failedAttempts + 1,
1657
1700
  }),
1658
1701
  updateVideoMediaStream: xstate.assign({
1659
1702
  videoAssociatedParams: (context, event) => ({
1660
1703
  ...context.videoAssociatedParams,
1661
- videoMediaStream: event.data?.stream,
1662
- selectedDeviceId: event.data?.selectedDeviceId,
1663
- selectableDevices: event.data?.selectableDevices,
1704
+ videoMediaStream: event.data
1705
+ ?.stream,
1706
+ selectedDeviceId: event.data
1707
+ ?.selectedDeviceId,
1708
+ selectableDevices: event.data
1709
+ ?.selectableDevices,
1664
1710
  }),
1665
1711
  }),
1666
1712
  initializeFaceDetector: xstate.assign({
@@ -1669,29 +1715,23 @@ const livenessMachine = xstate.createMachine({
1669
1715
  const { faceModelUrl, binaryPath } = componentProps.config;
1670
1716
  const faceDetector = new BlazeFaceFaceDetection(binaryPath, faceModelUrl);
1671
1717
  faceDetector.triggerModelLoading();
1672
- return {
1673
- ...context.ovalAssociatedParams,
1674
- faceDetector,
1675
- };
1718
+ return { ...context.ovalAssociatedParams, faceDetector };
1676
1719
  },
1677
1720
  }),
1678
1721
  updateLivenessStreamProvider: xstate.assign({
1679
- livenessStreamProvider: (context, event) => {
1680
- return event.data?.livenessStreamProvider;
1681
- },
1722
+ livenessStreamProvider: (context, event) => event.data?.livenessStreamProvider,
1682
1723
  }),
1683
1724
  setDOMAndCameraDetails: xstate.assign({
1684
- videoAssociatedParams: (context, event) => {
1685
- return {
1686
- ...context.videoAssociatedParams,
1687
- videoEl: event.data?.videoEl,
1688
- canvasEl: event.data?.canvasEl,
1689
- isMobile: event.data?.isMobile,
1690
- };
1691
- },
1725
+ videoAssociatedParams: (context, event) => ({
1726
+ ...context.videoAssociatedParams,
1727
+ videoEl: event.data?.videoEl,
1728
+ canvasEl: event.data?.canvasEl,
1729
+ isMobile: event.data?.isMobile,
1730
+ }),
1692
1731
  freshnessColorAssociatedParams: (context, event) => ({
1693
1732
  ...context.freshnessColorAssociatedParams,
1694
- freshnessColorEl: event.data?.freshnessColorEl,
1733
+ freshnessColorEl: event.data
1734
+ ?.freshnessColorEl,
1695
1735
  }),
1696
1736
  }),
1697
1737
  updateDeviceAndStream: xstate.assign({
@@ -1699,8 +1739,10 @@ const livenessMachine = xstate.createMachine({
1699
1739
  setLastSelectedCameraId(event.data?.newDeviceId);
1700
1740
  return {
1701
1741
  ...context.videoAssociatedParams,
1702
- selectedDeviceId: event.data?.newDeviceId,
1703
- videoMediaStream: event.data?.newStream,
1742
+ selectedDeviceId: event.data
1743
+ ?.newDeviceId,
1744
+ videoMediaStream: event.data
1745
+ ?.newStream,
1704
1746
  };
1705
1747
  },
1706
1748
  }),
@@ -1761,50 +1803,49 @@ const livenessMachine = xstate.createMachine({
1761
1803
  'recording') {
1762
1804
  context.livenessStreamProvider.startRecordingLivenessVideo();
1763
1805
  }
1764
- return {
1765
- ...context.videoAssociatedParams,
1766
- };
1806
+ return { ...context.videoAssociatedParams };
1767
1807
  },
1768
1808
  }),
1769
- stopRecording: (context) => { },
1809
+ stopRecording: () => { },
1770
1810
  updateFaceMatchBeforeStartDetails: xstate.assign({
1771
- faceMatchStateBeforeStart: (_, event) => {
1772
- return event.data.faceMatchState;
1773
- },
1811
+ faceMatchStateBeforeStart: (_, event) => event.data.faceMatchState,
1774
1812
  }),
1775
1813
  updateFaceDistanceBeforeRecording: xstate.assign({
1776
- isFaceFarEnoughBeforeRecording: (_, event) => {
1777
- return event.data.isFaceFarEnoughBeforeRecording;
1778
- },
1814
+ isFaceFarEnoughBeforeRecording: (_, event) => !!event.data.isFaceFarEnoughBeforeRecording,
1779
1815
  }),
1780
1816
  updateFaceDistanceWhileLoading: xstate.assign({
1781
- isFaceFarEnoughBeforeRecording: (_, event) => {
1782
- return event.data.isFaceFarEnoughBeforeRecording;
1783
- },
1784
- errorState: (_, event) => {
1785
- return event.data?.error;
1786
- },
1817
+ isFaceFarEnoughBeforeRecording: (_, event) => !!event.data.isFaceFarEnoughBeforeRecording,
1818
+ errorState: (_, event) => event.data?.error,
1787
1819
  }),
1788
1820
  updateOvalAndFaceDetailsPostDraw: xstate.assign({
1789
1821
  ovalAssociatedParams: (context, event) => ({
1790
1822
  ...context.ovalAssociatedParams,
1791
- initialFace: event.data.initialFace,
1792
- ovalDetails: event.data.ovalDetails,
1793
- scaleFactor: event.data.scaleFactor,
1823
+ initialFace: event.data
1824
+ .initialFace,
1825
+ ovalDetails: event.data
1826
+ .ovalDetails,
1827
+ scaleFactor: event.data
1828
+ .scaleFactor,
1794
1829
  }),
1795
1830
  faceMatchAssociatedParams: (context, event) => ({
1796
1831
  ...context.faceMatchAssociatedParams,
1797
- faceMatchState: event.data.faceMatchState,
1798
- illuminationState: event.data.illuminationState,
1832
+ faceMatchState: event.data
1833
+ .faceMatchState,
1834
+ illuminationState: event.data
1835
+ .illuminationState,
1799
1836
  }),
1800
1837
  }),
1801
1838
  updateFaceDetailsPostMatch: xstate.assign({
1802
1839
  faceMatchAssociatedParams: (context, event) => ({
1803
1840
  ...context.faceMatchAssociatedParams,
1804
- faceMatchState: event.data.faceMatchState,
1805
- faceMatchPercentage: event.data.faceMatchPercentage,
1806
- illuminationState: event.data.illuminationState,
1807
- currentDetectedFace: event.data.detectedFace,
1841
+ faceMatchState: event.data
1842
+ .faceMatchState,
1843
+ faceMatchPercentage: event.data
1844
+ .faceMatchPercentage,
1845
+ illuminationState: event.data
1846
+ .illuminationState,
1847
+ currentDetectedFace: event.data
1848
+ .detectedFace,
1808
1849
  }),
1809
1850
  }),
1810
1851
  updateEndFaceMatch: xstate.assign({
@@ -1827,40 +1868,30 @@ const livenessMachine = xstate.createMachine({
1827
1868
  };
1828
1869
  },
1829
1870
  }),
1830
- resetErrorState: xstate.assign({
1831
- errorState: (_) => undefined,
1832
- }),
1871
+ resetErrorState: xstate.assign({ errorState: (_) => undefined }),
1833
1872
  updateErrorStateForTimeout: xstate.assign({
1834
- errorState: (_, event) => {
1835
- return event.data?.errorState || LivenessErrorState.TIMEOUT;
1836
- },
1873
+ errorState: (_, event) => event.data?.errorState || LivenessErrorState.TIMEOUT,
1837
1874
  }),
1838
1875
  updateErrorStateForRuntime: xstate.assign({
1839
- errorState: (_, event) => {
1840
- return event.data?.errorState || LivenessErrorState.RUNTIME_ERROR;
1841
- },
1876
+ errorState: (_, event) => event.data?.errorState ||
1877
+ LivenessErrorState.RUNTIME_ERROR,
1842
1878
  }),
1843
1879
  updateErrorStateForServer: xstate.assign({
1844
1880
  errorState: (_) => LivenessErrorState.SERVER_ERROR,
1845
1881
  }),
1846
- clearErrorState: xstate.assign({
1847
- errorState: (_) => undefined,
1848
- }),
1882
+ clearErrorState: xstate.assign({ errorState: (_) => undefined }),
1849
1883
  updateSessionInfo: xstate.assign({
1850
- serverSessionInformation: (context, event) => {
1884
+ serverSessionInformation: (_, event) => {
1851
1885
  return event.data.sessionInfo;
1852
1886
  },
1853
1887
  }),
1854
- updateShouldDisconnect: xstate.assign({
1855
- shouldDisconnect: (context) => {
1856
- return true;
1857
- },
1858
- }),
1888
+ updateShouldDisconnect: xstate.assign({ shouldDisconnect: () => true }),
1859
1889
  updateFreshnessDetails: xstate.assign({
1860
1890
  freshnessColorAssociatedParams: (context, event) => {
1861
1891
  return {
1862
1892
  ...context.freshnessColorAssociatedParams,
1863
- freshnessColorsComplete: event.data.freshnessColorsComplete,
1893
+ freshnessColorsComplete: event.data
1894
+ .freshnessColorsComplete,
1864
1895
  };
1865
1896
  },
1866
1897
  }),
@@ -1885,7 +1916,7 @@ const livenessMachine = xstate.createMachine({
1885
1916
  delay: (context) => {
1886
1917
  return (context.serverSessionInformation?.Challenge
1887
1918
  ?.FaceMovementAndLightChallenge?.ChallengeConfig
1888
- ?.OvalFitTimeout || DEFAULT_FACE_FIT_TIMEOUT);
1919
+ ?.OvalFitTimeout ?? DEFAULT_FACE_FIT_TIMEOUT);
1889
1920
  },
1890
1921
  id: 'ovalMatchTimeout',
1891
1922
  }),
@@ -1927,14 +1958,12 @@ const livenessMachine = xstate.createMachine({
1927
1958
  },
1928
1959
  }),
1929
1960
  callMobileLandscapeWarningCallback: xstate.assign({
1930
- errorState: (context) => {
1931
- return LivenessErrorState.MOBILE_LANDSCAPE_ERROR;
1932
- },
1961
+ errorState: () => LivenessErrorState.MOBILE_LANDSCAPE_ERROR,
1933
1962
  }),
1934
- callUserCancelCallback: async (context) => {
1963
+ callUserCancelCallback: (context) => {
1935
1964
  context.componentProps.onUserCancel?.();
1936
1965
  },
1937
- callUserTimeoutCallback: async (context) => {
1966
+ callUserTimeoutCallback: (context) => {
1938
1967
  const error = new Error('Client Timeout');
1939
1968
  error.name = context.errorState;
1940
1969
  const livenessError = {
@@ -1943,14 +1972,14 @@ const livenessMachine = xstate.createMachine({
1943
1972
  };
1944
1973
  context.componentProps.onError?.(livenessError);
1945
1974
  },
1946
- callErrorCallback: async (context, event) => {
1975
+ callErrorCallback: (context, event) => {
1947
1976
  const livenessError = {
1948
1977
  state: context.errorState,
1949
1978
  error: event.data?.error || event.data,
1950
1979
  };
1951
1980
  context.componentProps.onError?.(livenessError);
1952
1981
  },
1953
- cleanUpResources: async (context) => {
1982
+ cleanUpResources: (context) => {
1954
1983
  const { freshnessColorEl } = context.freshnessColorAssociatedParams;
1955
1984
  if (freshnessColorEl) {
1956
1985
  freshnessColorEl.style.display = 'none';
@@ -1969,9 +1998,9 @@ const livenessMachine = xstate.createMachine({
1969
1998
  else if (context.errorState === undefined) {
1970
1999
  closureCode = WS_CLOSURE_CODE.USER_CANCEL;
1971
2000
  }
1972
- await context.livenessStreamProvider?.endStreamWithCode(closureCode);
2001
+ context.livenessStreamProvider?.endStreamWithCode(closureCode);
1973
2002
  },
1974
- freezeStream: async (context) => {
2003
+ freezeStream: (context) => {
1975
2004
  const { videoMediaStream, videoEl } = context.videoAssociatedParams;
1976
2005
  context.isRecordingStopped = true;
1977
2006
  videoEl?.pause();
@@ -1979,7 +2008,7 @@ const livenessMachine = xstate.createMachine({
1979
2008
  track.stop();
1980
2009
  });
1981
2010
  },
1982
- pauseVideoStream: async (context) => {
2011
+ pauseVideoStream: (context) => {
1983
2012
  const { videoEl } = context.videoAssociatedParams;
1984
2013
  context.isRecordingStopped = true;
1985
2014
  videoEl.pause();
@@ -2035,7 +2064,6 @@ const livenessMachine = xstate.createMachine({
2035
2064
  hasNotEnoughFaceDistanceBeforeRecording: (context) => {
2036
2065
  return !context.isFaceFarEnoughBeforeRecording;
2037
2066
  },
2038
- hasLivenessCheckSucceeded: (_, __, meta) => meta.state.event.data.isLive,
2039
2067
  hasFreshnessColorShown: (context) => context.freshnessColorAssociatedParams.freshnessColorsComplete,
2040
2068
  hasServerSessionInfo: (context) => {
2041
2069
  return context.serverSessionInformation !== undefined;
@@ -2088,14 +2116,15 @@ const livenessMachine = xstate.createMachine({
2088
2116
  // If the initial stream is of real camera, use it otherwise use the first real camera
2089
2117
  const initialStreamDeviceId = tracksWithMoreThan15Fps[0].getSettings().deviceId;
2090
2118
  const isInitialStreamFromRealDevice = realVideoDevices.some((device) => device.deviceId === initialStreamDeviceId);
2091
- let deviceId = initialStreamDeviceId;
2119
+ const deviceId = isInitialStreamFromRealDevice
2120
+ ? initialStreamDeviceId
2121
+ : realVideoDevices[0].deviceId;
2092
2122
  let realVideoDeviceStream = initialStream;
2093
2123
  if (!isInitialStreamFromRealDevice) {
2094
- deviceId = realVideoDevices[0].deviceId;
2095
2124
  realVideoDeviceStream = await navigator.mediaDevices.getUserMedia({
2096
2125
  video: {
2097
2126
  ...videoConstraints,
2098
- deviceId: { exact: realVideoDevices[0].deviceId },
2127
+ deviceId: { exact: deviceId },
2099
2128
  },
2100
2129
  audio: false,
2101
2130
  });
@@ -2107,6 +2136,7 @@ const livenessMachine = xstate.createMachine({
2107
2136
  selectableDevices: realVideoDevices,
2108
2137
  };
2109
2138
  },
2139
+ // eslint-disable-next-line @typescript-eslint/require-await
2110
2140
  async openLivenessStreamConnection(context) {
2111
2141
  const { config } = context.componentProps;
2112
2142
  const { credentialProvider, endpointOverride } = config;
@@ -2129,6 +2159,7 @@ const livenessMachine = xstate.createMachine({
2129
2159
  await faceDetector.modelLoadingPromise;
2130
2160
  }
2131
2161
  catch (err) {
2162
+ // eslint-disable-next-line no-console
2132
2163
  console.log({ err });
2133
2164
  }
2134
2165
  // detect face
@@ -2168,10 +2199,11 @@ const livenessMachine = xstate.createMachine({
2168
2199
  });
2169
2200
  const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording, error, } = await isFaceDistanceBelowThreshold({
2170
2201
  faceDetector: faceDetector,
2171
- videoEl: videoEl,
2202
+ isMobile,
2172
2203
  ovalDetails,
2204
+ videoEl: videoEl,
2205
+ // if this is the second face distance check reduce the threshold
2173
2206
  reduceThreshold: faceDistanceCheckBeforeRecording,
2174
- isMobile,
2175
2207
  });
2176
2208
  return { isFaceFarEnoughBeforeRecording, error };
2177
2209
  },
@@ -2185,6 +2217,7 @@ const livenessMachine = xstate.createMachine({
2185
2217
  await livenessStreamProvider.videoRecorder.recorderStarted;
2186
2218
  }
2187
2219
  catch (err) {
2220
+ // eslint-disable-next-line no-console
2188
2221
  console.log({ err });
2189
2222
  }
2190
2223
  // detect face
@@ -2200,6 +2233,7 @@ const livenessMachine = xstate.createMachine({
2200
2233
  break;
2201
2234
  }
2202
2235
  case 1: {
2236
+ //exactly one face detected;
2203
2237
  faceMatchState = FaceMatchState.FACE_IDENTIFIED;
2204
2238
  initialFace = detectedFaces[0];
2205
2239
  break;
@@ -2346,7 +2380,7 @@ const livenessMachine = xstate.createMachine({
2346
2380
  throw new Error('Video chunks not recorded successfully.');
2347
2381
  }
2348
2382
  livenessStreamProvider.sendClientInfo(livenessActionDocument);
2349
- await livenessStreamProvider.dispatchStopVideoEvent();
2383
+ livenessStreamProvider.dispatchStopVideoEvent();
2350
2384
  },
2351
2385
  async getLiveness(context) {
2352
2386
  const { onAnalysisComplete } = context.componentProps;
@@ -2355,60 +2389,6 @@ const livenessMachine = xstate.createMachine({
2355
2389
  },
2356
2390
  },
2357
2391
  });
2358
- const responseStreamActor = async (callback) => {
2359
- try {
2360
- const stream = await responseStream;
2361
- for await (const event of stream) {
2362
- if (isServerSesssionInformationEvent(event)) {
2363
- callback({
2364
- type: 'SET_SESSION_INFO',
2365
- data: {
2366
- sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
2367
- },
2368
- });
2369
- }
2370
- else if (isDisconnectionEvent(event)) {
2371
- callback({ type: 'DISCONNECT_EVENT' });
2372
- }
2373
- else if (isValidationExceptionEvent(event)) {
2374
- callback({
2375
- type: 'SERVER_ERROR',
2376
- data: { error: { ...event.ValidationException } },
2377
- });
2378
- }
2379
- else if (isInternalServerExceptionEvent(event)) {
2380
- callback({
2381
- type: 'SERVER_ERROR',
2382
- data: { error: { ...event.InternalServerException } },
2383
- });
2384
- }
2385
- else if (isThrottlingExceptionEvent(event)) {
2386
- callback({
2387
- type: 'SERVER_ERROR',
2388
- data: { error: { ...event.ThrottlingException } },
2389
- });
2390
- }
2391
- else if (isServiceQuotaExceededExceptionEvent(event)) {
2392
- callback({
2393
- type: 'SERVER_ERROR',
2394
- data: { error: { ...event.ServiceQuotaExceededException } },
2395
- });
2396
- }
2397
- }
2398
- }
2399
- catch (error) {
2400
- let returnedError = error;
2401
- if (isInvalidSignatureRegionException(error)) {
2402
- returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
2403
- }
2404
- if (returnedError instanceof Error) {
2405
- callback({
2406
- type: 'SERVER_ERROR',
2407
- data: { error: returnedError },
2408
- });
2409
- }
2410
- }
2411
- };
2412
2392
 
2413
2393
  const FaceLivenessDetectorContext = React__default["default"].createContext(null);
2414
2394
  function FaceLivenessDetectorProvider({ children, ...props }) {
@@ -2488,11 +2468,6 @@ var LivenessClassNames;
2488
2468
  LivenessClassNames["FreshnessCanvas"] = "amplify-liveness-freshness-canvas";
2489
2469
  LivenessClassNames["InstructionList"] = "amplify-liveness-instruction-list";
2490
2470
  LivenessClassNames["InstructionOverlay"] = "amplify-liveness-instruction-overlay";
2491
- LivenessClassNames["Figure"] = "amplify-liveness-figure";
2492
- LivenessClassNames["FigureCaption"] = "amplify-liveness-figure__caption";
2493
- LivenessClassNames["FigureIcon"] = "amplify-liveness-figure__icon";
2494
- LivenessClassNames["FigureImage"] = "amplify-liveness-figure__image";
2495
- LivenessClassNames["Figures"] = "amplify-liveness-figures";
2496
2471
  LivenessClassNames["Hint"] = "amplify-liveness-hint";
2497
2472
  LivenessClassNames["HintText"] = "amplify-liveness-hint__text";
2498
2473
  LivenessClassNames["LandscapeErrorModal"] = "amplify-liveness-landscape-error-modal";
@@ -2918,22 +2893,15 @@ const LivenessCameraModule = (props) => {
2918
2893
  const [mediaHeight, setMediaHeight] = React.useState(videoHeight);
2919
2894
  const [aspectRatio, setAspectRatio] = React.useState(() => videoWidth && videoHeight ? videoWidth / videoHeight : 0);
2920
2895
  React__default["default"].useEffect(() => {
2921
- if (canvasRef &&
2922
- videoRef &&
2923
- canvasRef.current &&
2924
- videoRef.current &&
2925
- videoStream &&
2926
- isStartView) {
2896
+ if (canvasRef?.current && videoRef?.current && videoStream && isStartView) {
2927
2897
  drawStaticOval(canvasRef.current, videoRef.current, videoStream);
2928
2898
  }
2929
2899
  }, [canvasRef, videoRef, videoStream, colorMode, isStartView]);
2930
2900
  React__default["default"].useEffect(() => {
2931
2901
  const updateColorModeHandler = (e) => {
2932
2902
  if (e.matches &&
2933
- canvasRef &&
2934
- videoRef &&
2935
- canvasRef.current &&
2936
- videoRef.current &&
2903
+ canvasRef?.current &&
2904
+ videoRef?.current &&
2937
2905
  videoStream &&
2938
2906
  isStartView) {
2939
2907
  drawStaticOval(canvasRef.current, videoRef.current, videoStream);