@aws-amplify/ui-react-liveness 3.0.15 → 3.0.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +1 -1
  2. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +1 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +1 -1
  4. package/dist/esm/components/FaceLivenessDetector/displayText.mjs +2 -0
  5. package/dist/esm/components/FaceLivenessDetector/service/machine/{index.mjs → machine.mjs} +43 -35
  6. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +1 -0
  7. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +0 -1
  8. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +10 -2
  9. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/CustomWebSocketFetchHandler.mjs +3 -6
  10. package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +7 -1
  11. package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +59 -0
  12. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +22 -74
  13. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +1 -1
  14. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +6 -2
  15. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +5 -8
  16. package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +3 -1
  17. package/dist/esm/version.mjs +1 -1
  18. package/dist/index.js +145 -122
  19. package/dist/styles.css +1 -1
  20. package/dist/types/components/FaceLivenessDetector/displayText.d.ts +2 -0
  21. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -5
  22. package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +5 -0
  23. package/dist/types/components/FaceLivenessDetector/service/types/error.d.ts +1 -0
  24. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +0 -1
  25. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +2 -3
  26. package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +6 -0
  27. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/CustomWebSocketFetchHandler.d.ts +1 -0
  28. package/dist/types/components/FaceLivenessDetector/service/utils/eventUtils.d.ts +1 -0
  29. package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +17 -0
  30. package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +1 -0
  31. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +2 -8
  32. package/dist/types/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.d.ts +2 -0
  33. package/dist/types/components/FaceLivenessDetector/shared/Hint.d.ts +1 -1
  34. package/dist/types/version.d.ts +1 -1
  35. package/package.json +4 -4
package/dist/index.js CHANGED
@@ -75,7 +75,6 @@ var FaceMatchState;
75
75
  (function (FaceMatchState) {
76
76
  FaceMatchState["MATCHED"] = "MATCHED";
77
77
  FaceMatchState["TOO_FAR"] = "TOO FAR";
78
- FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
79
78
  FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
80
79
  FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
81
80
  FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
@@ -86,6 +85,7 @@ var FaceMatchState;
86
85
  * The liveness error states
87
86
  */
88
87
  const LivenessErrorState = {
88
+ CONNECTION_TIMEOUT: 'CONNECTION_TIMEOUT',
89
89
  TIMEOUT: 'TIMEOUT',
90
90
  RUNTIME_ERROR: 'RUNTIME_ERROR',
91
91
  FRESHNESS_TIMEOUT: 'FRESHNESS_TIMEOUT',
@@ -99,12 +99,20 @@ const LivenessErrorState = {
99
99
 
100
100
  // Face distance is calculated as pupilDistance / ovalWidth.
101
101
  // The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
102
- // These FACE_DISTNACE_THRESHOLD values are determined by the science team and should only be changed with their approval.
102
+ // These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
103
103
  // We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
104
104
  // a certain distance away from the camera.
105
105
  const FACE_DISTANCE_THRESHOLD = 0.32;
106
106
  const REDUCED_THRESHOLD = 0.4;
107
107
  const REDUCED_THRESHOLD_MOBILE = 0.37;
108
+ // Constants from science team to determine ocular distance (space between eyes)
109
+ const PUPIL_DISTANCE_WEIGHT = 2.0;
110
+ const FACE_HEIGHT_WEIGHT = 1.8;
111
+ // Constants from science team to find face match percentage
112
+ const FACE_MATCH_RANGE_MIN = 0;
113
+ const FACE_MATCH_RANGE_MAX = 1;
114
+ const FACE_MATCH_WEIGHT_MIN = 0.25;
115
+ const FACE_MATCH_WEIGHT_MAX = 0.75;
108
116
  const WS_CLOSURE_CODE = {
109
117
  SUCCESS_CODE: 1000,
110
118
  DEFAULT_ERROR_CODE: 4000,
@@ -292,87 +300,33 @@ function getPupilDistanceAndFaceHeight(face) {
292
300
  const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
293
301
  return { pupilDistance, faceHeight };
294
302
  }
295
- function generateBboxFromLandmarks(face, oval) {
296
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
303
+ function generateBboxFromLandmarks(face, oval, frameHeight) {
304
+ const { leftEye, rightEye, nose, leftEar, rightEar } = face;
297
305
  const { height: ovalHeight, centerY } = oval;
298
306
  const ovalTop = centerY - ovalHeight / 2;
299
307
  const eyeCenter = [];
300
308
  eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
301
309
  eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
302
310
  const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
303
- const alpha = 2.0, gamma = 1.8;
304
- const ow = (alpha * pd + gamma * fh) / 2;
305
- const oh = 1.618 * ow;
306
- let cx;
311
+ const ocularWidth = (PUPIL_DISTANCE_WEIGHT * pd + FACE_HEIGHT_WEIGHT * fh) / 2;
312
+ let centerFaceX, centerFaceY;
307
313
  if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
308
- cx = (eyeCenter[0] + nose[0]) / 2;
314
+ centerFaceX = (eyeCenter[0] + nose[0]) / 2;
315
+ centerFaceY = (eyeCenter[1] + nose[1]) / 2;
309
316
  }
310
317
  else {
311
- cx = eyeCenter[0];
312
- }
313
- const bottom = faceTop + faceHeight;
314
- const top = bottom - oh;
315
- const left = Math.min(cx - ow / 2, rightEar[0]);
316
- const right = Math.max(cx + ow / 2, leftEar[0]);
318
+ // when face tilts down
319
+ centerFaceX = eyeCenter[0];
320
+ centerFaceY = eyeCenter[1];
321
+ }
322
+ const faceWidth = ocularWidth;
323
+ const faceHeight = 1.68 * faceWidth;
324
+ const top = Math.max(centerFaceY - faceHeight / 2, 0);
325
+ const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
326
+ const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
327
+ const right = Math.max(centerFaceX + ocularWidth / 2, leftEar[0]);
317
328
  return { bottom, left, right, top };
318
329
  }
319
- /**
320
- * Returns the state of the provided face with respect to the provided liveness oval.
321
- */
322
- // eslint-disable-next-line max-params
323
- function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
324
- let faceMatchState;
325
- const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
326
- ?.ChallengeConfig;
327
- if (!challengeConfig ||
328
- !challengeConfig.OvalIouThreshold ||
329
- !challengeConfig.OvalIouHeightThreshold ||
330
- !challengeConfig.OvalIouWidthThreshold ||
331
- !challengeConfig.FaceIouHeightThreshold ||
332
- !challengeConfig.FaceIouWidthThreshold) {
333
- throw new Error('Challenge information not returned from session information.');
334
- }
335
- const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
336
- const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
337
- const minFaceX = faceBoundingBox.left;
338
- const maxFaceX = faceBoundingBox.right;
339
- const minFaceY = faceBoundingBox.top;
340
- const maxFaceY = faceBoundingBox.bottom;
341
- const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
342
- const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
343
- const intersectionThreshold = OvalIouThreshold;
344
- const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
345
- const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
346
- const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
347
- const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
348
- /** From Science
349
- * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
350
- */
351
- const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
352
- (intersectionThreshold - initialFaceIntersection) +
353
- 0.25), 0) * 100;
354
- const faceIsOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
355
- const faceIsOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
356
- if (intersection > intersectionThreshold &&
357
- Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
358
- Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
359
- Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
360
- faceMatchState = FaceMatchState.MATCHED;
361
- }
362
- else if (faceIsOutsideOvalToTheLeft || faceIsOutsideOvalToTheRight) {
363
- faceMatchState = FaceMatchState.OFF_CENTER;
364
- }
365
- else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
366
- maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
367
- (minOvalX - minFaceX > faceDetectionWidthThreshold &&
368
- maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
369
- faceMatchState = FaceMatchState.TOO_CLOSE;
370
- }
371
- else {
372
- faceMatchState = FaceMatchState.TOO_FAR;
373
- }
374
- return { faceMatchState, faceMatchPercentage };
375
- }
376
330
  /**
377
331
  * Returns the illumination state in the provided video frame.
378
332
  */
@@ -548,8 +502,10 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
548
502
  detectedFace = detectedFaces[0];
549
503
  const { width } = ovalDetails;
550
504
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
551
- const alpha = 2.0, gamma = 1.8;
552
- const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
505
+ const calibratedPupilDistance = (PUPIL_DISTANCE_WEIGHT * pupilDistance +
506
+ FACE_HEIGHT_WEIGHT * faceHeight) /
507
+ 2 /
508
+ PUPIL_DISTANCE_WEIGHT;
553
509
  if (width) {
554
510
  isDistanceBelowThreshold =
555
511
  calibratedPupilDistance / width <
@@ -782,7 +738,61 @@ class VideoRecorder {
782
738
  }
783
739
  }
784
740
 
785
- const VERSION = '3.0.15';
741
+ /**
742
+ * Returns the state of the provided face with respect to the provided liveness oval.
743
+ */
744
+ function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }) {
745
+ let faceMatchState;
746
+ const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
747
+ ?.ChallengeConfig;
748
+ if (!challengeConfig ||
749
+ !challengeConfig.OvalIouThreshold ||
750
+ !challengeConfig.OvalIouHeightThreshold ||
751
+ !challengeConfig.OvalIouWidthThreshold ||
752
+ !challengeConfig.FaceIouHeightThreshold ||
753
+ !challengeConfig.FaceIouWidthThreshold) {
754
+ throw new Error('Challenge information not returned from session information.');
755
+ }
756
+ const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
757
+ const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails, frameHeight);
758
+ const minFaceX = faceBoundingBox.left;
759
+ const maxFaceX = faceBoundingBox.right;
760
+ const minFaceY = faceBoundingBox.top;
761
+ const maxFaceY = faceBoundingBox.bottom;
762
+ const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
763
+ const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
764
+ const intersectionThreshold = OvalIouThreshold;
765
+ const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
766
+ const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
767
+ /** From Science
768
+ * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
769
+ */
770
+ const faceMatchPercentage = Math.max(Math.min(FACE_MATCH_RANGE_MAX, (FACE_MATCH_WEIGHT_MAX * (intersection - initialFaceIntersection)) /
771
+ (intersectionThreshold - initialFaceIntersection) +
772
+ FACE_MATCH_WEIGHT_MIN), FACE_MATCH_RANGE_MIN) * 100;
773
+ const isFaceOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
774
+ const isFaceOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
775
+ const isFaceMatched = intersection > intersectionThreshold;
776
+ const isFaceMatchedClosely = minOvalY - minFaceY > faceDetectionHeightThreshold ||
777
+ maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
778
+ (minOvalX - minFaceX > faceDetectionWidthThreshold &&
779
+ maxFaceX - maxOvalX > faceDetectionWidthThreshold);
780
+ if (isFaceMatched) {
781
+ faceMatchState = FaceMatchState.MATCHED;
782
+ }
783
+ else if (isFaceOutsideOvalToTheLeft || isFaceOutsideOvalToTheRight) {
784
+ faceMatchState = FaceMatchState.OFF_CENTER;
785
+ }
786
+ else if (isFaceMatchedClosely) {
787
+ faceMatchState = FaceMatchState.MATCHED;
788
+ }
789
+ else {
790
+ faceMatchState = FaceMatchState.TOO_FAR;
791
+ }
792
+ return { faceMatchState, faceMatchPercentage };
793
+ }
794
+
795
+ const VERSION = '3.0.17';
786
796
 
787
797
  const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
788
798
  const getLivenessUserAgent = () => {
@@ -794,6 +804,7 @@ const getLivenessUserAgent = () => {
794
804
  * Because of this the file is not fully typed at this time but we should eventually work on fully typing this file.
795
805
  */
796
806
  const DEFAULT_WS_CONNECTION_TIMEOUT_MS = 2000;
807
+ const WEBSOCKET_CONNECTION_TIMEOUT_MESSAGE = 'Websocket connection timeout';
797
808
  const isWebSocketRequest = (request) => request.protocol === 'ws:' || request.protocol === 'wss:';
798
809
  const isReadableStream = (payload) => typeof ReadableStream === 'function' && payload instanceof ReadableStream;
799
810
  /**
@@ -893,11 +904,7 @@ class CustomWebSocketFetchHandler {
893
904
  return new Promise((resolve, reject) => {
894
905
  const timeout = setTimeout(() => {
895
906
  this.removeNotUsableSockets(socket.url);
896
- reject({
897
- $metadata: {
898
- httpStatusCode: 500,
899
- },
900
- });
907
+ reject(new Error(WEBSOCKET_CONNECTION_TIMEOUT_MESSAGE));
901
908
  }, connectionTimeout);
902
909
  socket.onopen = () => {
903
910
  clearTimeout(timeout);
@@ -1300,6 +1307,10 @@ const isServerSesssionInformationEvent = (value) => {
1300
1307
  return !!value
1301
1308
  ?.ServerSessionInformationEvent;
1302
1309
  };
1310
+ const isConnectionTimeoutError = (error) => {
1311
+ const { message } = error;
1312
+ return message.includes(WEBSOCKET_CONNECTION_TIMEOUT_MESSAGE);
1313
+ };
1303
1314
  const isDisconnectionEvent = (value) => {
1304
1315
  return !!value
1305
1316
  ?.DisconnectionEvent;
@@ -1340,7 +1351,6 @@ const STATIC_VIDEO_CONSTRAINTS = {
1340
1351
 
1341
1352
  const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1342
1353
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
1343
- const MIN_FACE_MATCH_TIME = 1000;
1344
1354
  let responseStream;
1345
1355
  const responseStreamActor = async (callback) => {
1346
1356
  try {
@@ -1384,14 +1394,20 @@ const responseStreamActor = async (callback) => {
1384
1394
  }
1385
1395
  }
1386
1396
  catch (error) {
1387
- let returnedError = error;
1388
1397
  if (isInvalidSignatureRegionException(error)) {
1389
- returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
1390
- }
1391
- if (returnedError instanceof Error) {
1392
1398
  callback({
1393
1399
  type: 'SERVER_ERROR',
1394
- data: { error: returnedError },
1400
+ data: {
1401
+ error: new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.'),
1402
+ },
1403
+ });
1404
+ }
1405
+ else if (error instanceof Error) {
1406
+ callback({
1407
+ type: isConnectionTimeoutError(error)
1408
+ ? 'CONNECTION_TIMEOUT'
1409
+ : 'SERVER_ERROR',
1410
+ data: { error },
1395
1411
  });
1396
1412
  }
1397
1413
  }
@@ -1430,7 +1446,6 @@ const livenessMachine = xstate.createMachine({
1430
1446
  currentDetectedFace: undefined,
1431
1447
  startFace: undefined,
1432
1448
  endFace: undefined,
1433
- initialFaceMatchTime: undefined,
1434
1449
  },
1435
1450
  freshnessColorAssociatedParams: {
1436
1451
  freshnessColorEl: undefined,
@@ -1470,6 +1485,10 @@ const livenessMachine = xstate.createMachine({
1470
1485
  target: 'error',
1471
1486
  actions: 'updateErrorStateForServer',
1472
1487
  },
1488
+ CONNECTION_TIMEOUT: {
1489
+ target: 'error',
1490
+ actions: 'updateErrorStateForConnectionTimeout',
1491
+ },
1473
1492
  RUNTIME_ERROR: {
1474
1493
  target: 'error',
1475
1494
  },
@@ -1615,6 +1634,8 @@ const livenessMachine = xstate.createMachine({
1615
1634
  100: { target: 'checkRecordingStarted' },
1616
1635
  },
1617
1636
  },
1637
+ // Evaluates face match and moves to checkMatch
1638
+ // which continually checks for match until either timeout or face match
1618
1639
  ovalMatching: {
1619
1640
  entry: 'cancelOvalDrawingTimeout',
1620
1641
  invoke: {
@@ -1625,29 +1646,32 @@ const livenessMachine = xstate.createMachine({
1625
1646
  },
1626
1647
  },
1627
1648
  },
1649
+ // If `hasFaceMatchedInOval` is true, then move to `delayBeforeFlash`, which pauses
1650
+ // for one second to show "Hold still" text before moving to `flashFreshnessColors`.
1651
+ // If not, move back to ovalMatching and re-evaluate match state
1628
1652
  checkMatch: {
1629
1653
  after: {
1630
1654
  0: {
1631
- target: 'flashFreshnessColors',
1632
- cond: 'hasFaceMatchedInOvalWithMinTime',
1655
+ target: 'delayBeforeFlash',
1656
+ cond: 'hasFaceMatchedInOval',
1633
1657
  actions: [
1658
+ 'setFaceMatchTimeAndStartFace',
1634
1659
  'updateEndFaceMatch',
1635
1660
  'setupFlashFreshnessColors',
1636
1661
  'cancelOvalMatchTimeout',
1637
1662
  'cancelOvalDrawingTimeout',
1638
1663
  ],
1639
1664
  },
1640
- 0.1: {
1641
- target: 'ovalMatching',
1642
- cond: 'hasFaceMatchedInOval',
1643
- actions: 'setFaceMatchTimeAndStartFace',
1644
- },
1645
1665
  1: {
1646
1666
  target: 'ovalMatching',
1647
- cond: 'hasNotFaceMatchedInOval',
1648
1667
  },
1649
1668
  },
1650
1669
  },
1670
+ delayBeforeFlash: {
1671
+ after: {
1672
+ 1000: 'flashFreshnessColors',
1673
+ },
1674
+ },
1651
1675
  flashFreshnessColors: {
1652
1676
  invoke: {
1653
1677
  src: 'flashColors',
@@ -1912,14 +1936,13 @@ const livenessMachine = xstate.createMachine({
1912
1936
  startFace: context.faceMatchAssociatedParams.startFace === undefined
1913
1937
  ? context.faceMatchAssociatedParams.currentDetectedFace
1914
1938
  : context.faceMatchAssociatedParams.startFace,
1915
- initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
1916
- undefined
1917
- ? Date.now()
1918
- : context.faceMatchAssociatedParams.initialFaceMatchTime,
1919
1939
  };
1920
1940
  },
1921
1941
  }),
1922
1942
  resetErrorState: xstate.assign({ errorState: (_) => undefined }),
1943
+ updateErrorStateForConnectionTimeout: xstate.assign({
1944
+ errorState: (_) => LivenessErrorState.CONNECTION_TIMEOUT,
1945
+ }),
1923
1946
  updateErrorStateForTimeout: xstate.assign({
1924
1947
  errorState: (_, event) => event.data?.errorState || LivenessErrorState.TIMEOUT,
1925
1948
  }),
@@ -2087,21 +2110,10 @@ const livenessMachine = xstate.createMachine({
2087
2110
  },
2088
2111
  guards: {
2089
2112
  shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
2090
- hasFaceMatchedInOvalWithMinTime: (context) => {
2091
- const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
2092
- const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
2093
- const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
2094
- timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
2095
- return hasMatched;
2096
- },
2097
2113
  hasFaceMatchedInOval: (context) => {
2098
2114
  return (context.faceMatchAssociatedParams.faceMatchState ===
2099
2115
  FaceMatchState.MATCHED);
2100
2116
  },
2101
- hasNotFaceMatchedInOval: (context) => {
2102
- return (context.faceMatchAssociatedParams.faceMatchState !==
2103
- FaceMatchState.MATCHED);
2104
- },
2105
2117
  hasSingleFace: (context) => {
2106
2118
  return (context.faceMatchAssociatedParams.faceMatchState ===
2107
2119
  FaceMatchState.FACE_IDENTIFIED);
@@ -2318,7 +2330,7 @@ const livenessMachine = xstate.createMachine({
2318
2330
  videoWidth: videoEl.width,
2319
2331
  });
2320
2332
  // renormalize initial face
2321
- const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
2333
+ const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
2322
2334
  initialFace.top = renormalizedFace.top;
2323
2335
  initialFace.left = renormalizedFace.left;
2324
2336
  initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
@@ -2347,7 +2359,7 @@ const livenessMachine = xstate.createMachine({
2347
2359
  let faceMatchPercentage = 0;
2348
2360
  let detectedFace;
2349
2361
  let illuminationState;
2350
- const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
2362
+ const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
2351
2363
  const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
2352
2364
  const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
2353
2365
  switch (detectedFaces.length) {
@@ -2360,7 +2372,13 @@ const livenessMachine = xstate.createMachine({
2360
2372
  case 1: {
2361
2373
  //exactly one face detected, match face with oval;
2362
2374
  detectedFace = detectedFaces[0];
2363
- const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(detectedFace, ovalDetails, initialFaceIntersection, serverSessionInformation);
2375
+ const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval({
2376
+ face: detectedFace,
2377
+ ovalDetails: ovalDetails,
2378
+ initialFaceIntersection,
2379
+ sessionInformation: serverSessionInformation,
2380
+ frameHeight: videoEl.videoHeight,
2381
+ });
2364
2382
  faceMatchState = faceMatchStateInLivenessOval;
2365
2383
  faceMatchPercentage = faceMatchPercentageInLivenessOval;
2366
2384
  break;
@@ -2615,7 +2633,6 @@ const Hint = ({ hintDisplayText }) => {
2615
2633
  [FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
2616
2634
  [FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
2617
2635
  [FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
2618
- [FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
2619
2636
  [FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
2620
2637
  [FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
2621
2638
  [FaceMatchState.OFF_CENTER]: hintDisplayText.hintFaceOffCenterText,
@@ -2661,13 +2678,11 @@ const Hint = ({ hintDisplayText }) => {
2661
2678
  return React__namespace.createElement(DefaultToast, { text: hintDisplayText.hintHoldFaceForFreshnessText });
2662
2679
  }
2663
2680
  if (isRecording && !isFlashingFreshness) {
2664
- // During face matching, we want to only show the TOO_CLOSE or
2665
- // TOO_FAR texts. If FaceMatchState matches TOO_CLOSE, we'll show
2666
- // the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
2681
+ // During face matching, we want to only show the
2682
+ // TOO_FAR texts. For FACE_IDENTIFIED, CANT_IDENTIFY, TOO_MANY
2667
2683
  // we are defaulting to the TOO_FAR text (for now).
2668
2684
  let resultHintString = FaceMatchStateStringMap[FaceMatchState.TOO_FAR];
2669
- if (faceMatchState === FaceMatchState.TOO_CLOSE ||
2670
- faceMatchState === FaceMatchState.MATCHED) {
2685
+ if (faceMatchState === FaceMatchState.MATCHED) {
2671
2686
  resultHintString = FaceMatchStateStringMap[faceMatchState];
2672
2687
  }
2673
2688
  // If the face is outside the oval set the aria-label to a string about centering face in oval
@@ -2681,7 +2696,7 @@ const Hint = ({ hintDisplayText }) => {
2681
2696
  faceMatchPercentage > 50) {
2682
2697
  a11yHintString = hintDisplayText.hintMatchIndicatorText;
2683
2698
  }
2684
- return (React__namespace.createElement(Toast, { size: "large", variation: faceMatchState === FaceMatchState.TOO_CLOSE ? 'error' : 'primary' },
2699
+ return (React__namespace.createElement(Toast, { size: "large", variation: 'primary' },
2685
2700
  React__namespace.createElement(uiReact.VisuallyHidden, { "aria-live": "assertive" }, a11yHintString),
2686
2701
  React__namespace.createElement(uiReact.View, { "aria-label": a11yHintString }, resultHintString)));
2687
2702
  }
@@ -2722,6 +2737,8 @@ const RecordingIcon = ({ children }) => {
2722
2737
 
2723
2738
  const defaultErrorDisplayText = {
2724
2739
  errorLabelText: 'Error',
2740
+ connectionTimeoutHeaderText: 'Connection time out',
2741
+ connectionTimeoutMessageText: 'Connection has timed out.',
2725
2742
  timeoutHeaderText: 'Time out',
2726
2743
  timeoutMessageText: "Face didn't fit inside oval in time limit. Try again and completely fill the oval with face in it.",
2727
2744
  faceDistanceHeaderText: 'Forward movement detected',
@@ -2782,10 +2799,14 @@ const defaultLivenessDisplayText = {
2782
2799
 
2783
2800
  const renderToastErrorModal = (props) => {
2784
2801
  const { error: errorState, displayText } = props;
2785
- const { errorLabelText, timeoutHeaderText, timeoutMessageText, faceDistanceHeaderText, faceDistanceMessageText, multipleFacesHeaderText, multipleFacesMessageText, clientHeaderText, clientMessageText, serverHeaderText, serverMessageText, } = displayText;
2802
+ const { connectionTimeoutHeaderText, connectionTimeoutMessageText, errorLabelText, timeoutHeaderText, timeoutMessageText, faceDistanceHeaderText, faceDistanceMessageText, multipleFacesHeaderText, multipleFacesMessageText, clientHeaderText, clientMessageText, serverHeaderText, serverMessageText, } = displayText;
2786
2803
  let heading;
2787
2804
  let message;
2788
2805
  switch (errorState) {
2806
+ case LivenessErrorState.CONNECTION_TIMEOUT:
2807
+ heading = connectionTimeoutHeaderText;
2808
+ message = connectionTimeoutMessageText;
2809
+ break;
2789
2810
  case LivenessErrorState.TIMEOUT:
2790
2811
  heading = timeoutHeaderText;
2791
2812
  message = timeoutMessageText;
@@ -3191,7 +3212,7 @@ function getDisplayText(overrideDisplayText) {
3191
3212
  ...defaultLivenessDisplayText,
3192
3213
  ...overrideDisplayText,
3193
3214
  };
3194
- const { a11yVideoLabelText, cameraMinSpecificationsHeadingText, cameraMinSpecificationsMessageText, cameraNotFoundHeadingText, cameraNotFoundMessageText, cancelLivenessCheckText, clientHeaderText, clientMessageText, errorLabelText, hintCanNotIdentifyText, hintCenterFaceText, hintCenterFaceInstructionText, hintFaceOffCenterText, hintConnectingText, hintFaceDetectedText, hintHoldFaceForFreshnessText, hintIlluminationNormalText, hintIlluminationTooBrightText, hintIlluminationTooDarkText, hintMoveFaceFrontOfCameraText, hintTooManyFacesText, hintTooCloseText, hintTooFarText, hintVerifyingText, hintCheckCompleteText, hintMatchIndicatorText, faceDistanceHeaderText, faceDistanceMessageText, goodFitCaptionText, goodFitAltText, landscapeHeaderText, landscapeMessageText, multipleFacesHeaderText, multipleFacesMessageText, photosensitivityWarningBodyText, photosensitivityWarningHeadingText, photosensitivityWarningInfoText, photosensitivityWarningLabelText, photosensitivyWarningBodyText, photosensitivyWarningHeadingText, photosensitivyWarningInfoText, photosensitivyWarningLabelText, portraitMessageText, retryCameraPermissionsText, recordingIndicatorText, serverHeaderText, serverMessageText, startScreenBeginCheckText, timeoutHeaderText, timeoutMessageText, tooFarCaptionText, tooFarAltText, tryAgainText, waitingCameraPermissionText, } = displayText;
3215
+ const { a11yVideoLabelText, cameraMinSpecificationsHeadingText, cameraMinSpecificationsMessageText, cameraNotFoundHeadingText, cameraNotFoundMessageText, cancelLivenessCheckText, connectionTimeoutHeaderText, connectionTimeoutMessageText, clientHeaderText, clientMessageText, errorLabelText, hintCanNotIdentifyText, hintCenterFaceText, hintCenterFaceInstructionText, hintFaceOffCenterText, hintConnectingText, hintFaceDetectedText, hintHoldFaceForFreshnessText, hintIlluminationNormalText, hintIlluminationTooBrightText, hintIlluminationTooDarkText, hintMoveFaceFrontOfCameraText, hintTooManyFacesText, hintTooCloseText, hintTooFarText, hintVerifyingText, hintCheckCompleteText, hintMatchIndicatorText, faceDistanceHeaderText, faceDistanceMessageText, goodFitCaptionText, goodFitAltText, landscapeHeaderText, landscapeMessageText, multipleFacesHeaderText, multipleFacesMessageText, photosensitivityWarningBodyText, photosensitivityWarningHeadingText, photosensitivityWarningInfoText, photosensitivityWarningLabelText, photosensitivyWarningBodyText, photosensitivyWarningHeadingText, photosensitivyWarningInfoText, photosensitivyWarningLabelText, portraitMessageText, retryCameraPermissionsText, recordingIndicatorText, serverHeaderText, serverMessageText, startScreenBeginCheckText, timeoutHeaderText, timeoutMessageText, tooFarCaptionText, tooFarAltText, tryAgainText, waitingCameraPermissionText, } = displayText;
3195
3216
  const hintDisplayText = {
3196
3217
  hintMoveFaceFrontOfCameraText,
3197
3218
  hintTooManyFacesText,
@@ -3240,6 +3261,8 @@ function getDisplayText(overrideDisplayText) {
3240
3261
  recordingIndicatorText,
3241
3262
  };
3242
3263
  const errorDisplayText = {
3264
+ connectionTimeoutHeaderText,
3265
+ connectionTimeoutMessageText,
3243
3266
  errorLabelText,
3244
3267
  timeoutHeaderText,
3245
3268
  timeoutMessageText,
package/dist/styles.css CHANGED
@@ -718,7 +718,7 @@
718
718
  --amplify-components-link-focus-color: var(--amplify-colors-font-focus);
719
719
  --amplify-components-link-hover-color: var(--amplify-colors-font-hover);
720
720
  --amplify-components-link-visited-color: var(--amplify-colors-font-interactive);
721
- --amplify-components-liveness-camera-module-background-color: var(--amplify-colors-black);
721
+ --amplify-components-liveness-camera-module-background-color: var(--amplify-colors-background-primary);
722
722
  --amplify-components-loader-width: var(--amplify-font-sizes-medium);
723
723
  --amplify-components-loader-height: var(--amplify-font-sizes-medium);
724
724
  --amplify-components-loader-font-size: var(--amplify-font-sizes-xs);
@@ -60,6 +60,8 @@ export type StreamDisplayText = {
60
60
  };
61
61
  export declare const defaultErrorDisplayText: {
62
62
  errorLabelText: string;
63
+ connectionTimeoutHeaderText: string;
64
+ connectionTimeoutMessageText: string;
63
65
  timeoutHeaderText: string;
64
66
  timeoutMessageText: string;
65
67
  faceDistanceHeaderText: string;
@@ -1,5 +1 @@
1
- import { LivenessContext, LivenessEvent } from '../types';
2
- export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
3
- value: any;
4
- context: LivenessContext;
5
- }, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
1
+ export { livenessMachine } from './machine';
@@ -0,0 +1,5 @@
1
+ import { LivenessContext, LivenessEvent } from '../types';
2
+ export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
3
+ value: any;
4
+ context: LivenessContext;
5
+ }, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
@@ -2,6 +2,7 @@
2
2
  * The liveness error states
3
3
  */
4
4
  export declare const LivenessErrorState: {
5
+ readonly CONNECTION_TIMEOUT: "CONNECTION_TIMEOUT";
5
6
  readonly TIMEOUT: "TIMEOUT";
6
7
  readonly RUNTIME_ERROR: "RUNTIME_ERROR";
7
8
  readonly FRESHNESS_TIMEOUT: "FRESHNESS_TIMEOUT";
@@ -98,7 +98,6 @@ export declare enum IlluminationState {
98
98
  export declare enum FaceMatchState {
99
99
  MATCHED = "MATCHED",
100
100
  TOO_FAR = "TOO FAR",
101
- TOO_CLOSE = "TOO CLOSE",
102
101
  CANT_IDENTIFY = "CANNOT IDENTIFY",
103
102
  FACE_IDENTIFIED = "ONE FACE IDENTIFIED",
104
103
  TOO_MANY = "TOO MANY FACES",
@@ -11,7 +11,6 @@ export interface FaceMatchAssociatedParams {
11
11
  currentDetectedFace?: Face;
12
12
  startFace?: Face;
13
13
  endFace?: Face;
14
- initialFaceMatchTime?: number;
15
14
  }
16
15
  export interface FreshnessColorAssociatedParams {
17
16
  freshnessColorEl?: HTMLCanvasElement;
@@ -54,7 +53,7 @@ export interface LivenessContext {
54
53
  shouldDisconnect?: boolean;
55
54
  videoAssociatedParams?: VideoAssociatedParams;
56
55
  }
57
- export type LivenessEventTypes = 'BEGIN' | 'START_RECORDING' | 'TIMEOUT' | 'ERROR' | 'CANCEL' | 'SET_SESSION_INFO' | 'DISCONNECT_EVENT' | 'SET_DOM_AND_CAMERA_DETAILS' | 'UPDATE_DEVICE_AND_STREAM' | 'SERVER_ERROR' | 'RUNTIME_ERROR' | 'RETRY_CAMERA_CHECK' | 'MOBILE_LANDSCAPE_WARNING';
56
+ export type LivenessEventTypes = 'BEGIN' | 'CONNECTION_TIMEOUT' | 'START_RECORDING' | 'TIMEOUT' | 'ERROR' | 'CANCEL' | 'SET_SESSION_INFO' | 'DISCONNECT_EVENT' | 'SET_DOM_AND_CAMERA_DETAILS' | 'UPDATE_DEVICE_AND_STREAM' | 'SERVER_ERROR' | 'RUNTIME_ERROR' | 'RETRY_CAMERA_CHECK' | 'MOBILE_LANDSCAPE_WARNING';
58
57
  export type LivenessEventData = Record<PropertyKey, any>;
59
58
  export interface LivenessEvent {
60
59
  type: LivenessEventTypes;
@@ -67,7 +66,7 @@ export interface StreamActorCallback {
67
66
  type: 'DISCONNECT_EVENT';
68
67
  }): void;
69
68
  (params: {
70
- type: 'SERVER_ERROR';
69
+ type: 'SERVER_ERROR' | 'CONNECTION_TIMEOUT';
71
70
  data: {
72
71
  error: Error;
73
72
  };
@@ -1,6 +1,12 @@
1
1
  export declare const FACE_DISTANCE_THRESHOLD = 0.32;
2
2
  export declare const REDUCED_THRESHOLD = 0.4;
3
3
  export declare const REDUCED_THRESHOLD_MOBILE = 0.37;
4
+ export declare const PUPIL_DISTANCE_WEIGHT = 2;
5
+ export declare const FACE_HEIGHT_WEIGHT = 1.8;
6
+ export declare const FACE_MATCH_RANGE_MIN = 0;
7
+ export declare const FACE_MATCH_RANGE_MAX = 1;
8
+ export declare const FACE_MATCH_WEIGHT_MIN = 0.25;
9
+ export declare const FACE_MATCH_WEIGHT_MAX = 0.75;
4
10
  export declare const WS_CLOSURE_CODE: {
5
11
  SUCCESS_CODE: number;
6
12
  DEFAULT_ERROR_CODE: number;
@@ -1,5 +1,6 @@
1
1
  import { HttpRequest, HttpResponse } from '@smithy/protocol-http';
2
2
  import { Provider, RequestHandler, RequestHandlerMetadata } from '@smithy/types';
3
+ export declare const WEBSOCKET_CONNECTION_TIMEOUT_MESSAGE = "Websocket connection timeout";
3
4
  export interface WebSocketFetchHandlerOptions {
4
5
  /**
5
6
  * The maximum time in milliseconds that the connection phase of a request
@@ -1,5 +1,6 @@
1
1
  import { LivenessResponseStream } from '@aws-sdk/client-rekognitionstreaming';
2
2
  export declare const isServerSesssionInformationEvent: (value: unknown) => value is LivenessResponseStream.ServerSessionInformationEventMember;
3
+ export declare const isConnectionTimeoutError: (error: unknown) => error is Error;
3
4
  export declare const isDisconnectionEvent: (value: unknown) => value is LivenessResponseStream.DisconnectionEventMember;
4
5
  export declare const isValidationExceptionEvent: (value: unknown) => value is LivenessResponseStream.ValidationExceptionMember;
5
6
  export declare const isInternalServerExceptionEvent: (value: unknown) => value is LivenessResponseStream.InternalServerExceptionMember;
@@ -0,0 +1,17 @@
1
+ import { LivenessOvalDetails, Face, FaceMatchState } from '../types';
2
+ import { SessionInformation } from '@aws-sdk/client-rekognitionstreaming';
3
+ interface MatchStateInOvalParams {
4
+ face: Face;
5
+ ovalDetails: LivenessOvalDetails;
6
+ initialFaceIntersection: number;
7
+ sessionInformation: SessionInformation;
8
+ frameHeight: number;
9
+ }
10
+ /**
11
+ * Returns the state of the provided face with respect to the provided liveness oval.
12
+ */
13
+ export declare function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }: MatchStateInOvalParams): {
14
+ faceMatchState: FaceMatchState;
15
+ faceMatchPercentage: number;
16
+ };
17
+ export {};
@@ -2,5 +2,6 @@ export * from './blazefaceFaceDetection';
2
2
  export * from './videoRecorder';
3
3
  export * from './support';
4
4
  export * from './liveness';
5
+ export * from './getFaceMatchStateInLivenessOval';
5
6
  export * from './streamProvider';
6
7
  export * from './freshnessColorDisplay';