@aws-amplify/ui-react-liveness 3.0.14 → 3.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +2 -6
  2. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +2 -6
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +2 -6
  4. package/dist/esm/components/FaceLivenessDetector/service/machine/{index.mjs → machine.mjs} +24 -29
  5. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +0 -1
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +10 -2
  7. package/dist/esm/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.mjs → createStreamingClient/CustomWebSocketFetchHandler.mjs} +1 -1
  8. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +18 -0
  9. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +27 -0
  10. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +38 -0
  11. package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +59 -0
  12. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +22 -74
  13. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +8 -30
  14. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +2 -6
  15. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +2 -6
  16. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +6 -13
  17. package/dist/esm/version.mjs +1 -1
  18. package/dist/index.js +186 -135
  19. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -5
  20. package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +5 -0
  21. package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +9 -10
  22. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +0 -1
  23. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +0 -1
  24. package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +6 -0
  25. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +6 -0
  26. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +9 -0
  27. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
  28. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +10 -0
  29. package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +17 -0
  30. package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +1 -0
  31. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +1 -8
  32. package/dist/types/version.d.ts +1 -1
  33. package/package.json +3 -2
  34. /package/dist/types/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.d.ts → createStreamingClient/CustomWebSocketFetchHandler.d.ts} +0 -0
@@ -1,18 +1,14 @@
1
1
  import * as React from 'react';
2
2
  import { VisuallyHidden, View } from '@aws-amplify/ui-react';
3
- import '../service/machine/index.mjs';
3
+ import '../service/machine/machine.mjs';
4
4
  import { FaceMatchState, IlluminationState } from '../service/types/liveness.mjs';
5
5
  import '@tensorflow/tfjs-core';
6
6
  import '@tensorflow-models/face-detection';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
10
- import 'aws-amplify/auth';
11
10
  import '@aws-sdk/client-rekognitionstreaming';
12
- import '@aws-sdk/util-format-url';
13
- import '@smithy/eventstream-serde-browser';
14
- import '@smithy/fetch-http-handler';
15
- import '@smithy/protocol-http';
11
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
16
12
  import '../service/utils/freshnessColorDisplay.mjs';
17
13
  import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
18
14
  import { createLivenessSelector, useLivenessSelector } from '../hooks/useLivenessSelector.mjs';
@@ -56,7 +52,6 @@ const Hint = ({ hintDisplayText }) => {
56
52
  [FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
57
53
  [FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
58
54
  [FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
59
- [FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
60
55
  [FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
61
56
  [FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
62
57
  [FaceMatchState.OFF_CENTER]: hintDisplayText.hintFaceOffCenterText,
@@ -102,13 +97,11 @@ const Hint = ({ hintDisplayText }) => {
102
97
  return React.createElement(DefaultToast, { text: hintDisplayText.hintHoldFaceForFreshnessText });
103
98
  }
104
99
  if (isRecording && !isFlashingFreshness) {
105
- // During face matching, we want to only show the TOO_CLOSE or
106
- // TOO_FAR texts. If FaceMatchState matches TOO_CLOSE, we'll show
107
- // the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
100
+ // During face matching, we want to only show the
101
+ // TOO_FAR texts. For FACE_IDENTIFIED, CANT_IDENTIFY, TOO_MANY
108
102
  // we are defaulting to the TOO_FAR text (for now).
109
103
  let resultHintString = FaceMatchStateStringMap[FaceMatchState.TOO_FAR];
110
- if (faceMatchState === FaceMatchState.TOO_CLOSE ||
111
- faceMatchState === FaceMatchState.MATCHED) {
104
+ if (faceMatchState === FaceMatchState.MATCHED) {
112
105
  resultHintString = FaceMatchStateStringMap[faceMatchState];
113
106
  }
114
107
  // If the face is outside the oval set the aria-label to a string about centering face in oval
@@ -122,7 +115,7 @@ const Hint = ({ hintDisplayText }) => {
122
115
  faceMatchPercentage > 50) {
123
116
  a11yHintString = hintDisplayText.hintMatchIndicatorText;
124
117
  }
125
- return (React.createElement(Toast, { size: "large", variation: faceMatchState === FaceMatchState.TOO_CLOSE ? 'error' : 'primary' },
118
+ return (React.createElement(Toast, { size: "large", variation: 'primary' },
126
119
  React.createElement(VisuallyHidden, { "aria-live": "assertive" }, a11yHintString),
127
120
  React.createElement(View, { "aria-label": a11yHintString }, resultHintString)));
128
121
  }
@@ -1,3 +1,3 @@
1
- const VERSION = '3.0.14';
1
+ const VERSION = '3.0.16';
2
2
 
3
3
  export { VERSION };
package/dist/index.js CHANGED
@@ -17,6 +17,7 @@ var utilFormatUrl = require('@aws-sdk/util-format-url');
17
17
  var eventstreamSerdeBrowser = require('@smithy/eventstream-serde-browser');
18
18
  var fetchHttpHandler = require('@smithy/fetch-http-handler');
19
19
  var protocolHttp = require('@smithy/protocol-http');
20
+ var signatureV4 = require('@smithy/signature-v4');
20
21
  var uiReact = require('@aws-amplify/ui-react');
21
22
  var ui = require('@aws-amplify/ui');
22
23
  var internal = require('@aws-amplify/ui-react/internal');
@@ -74,7 +75,6 @@ var FaceMatchState;
74
75
  (function (FaceMatchState) {
75
76
  FaceMatchState["MATCHED"] = "MATCHED";
76
77
  FaceMatchState["TOO_FAR"] = "TOO FAR";
77
- FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
78
78
  FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
79
79
  FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
80
80
  FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
@@ -98,12 +98,20 @@ const LivenessErrorState = {
98
98
 
99
99
  // Face distance is calculated as pupilDistance / ovalWidth.
100
100
  // The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
101
- // These FACE_DISTNACE_THRESHOLD values are determined by the science team and should only be changed with their approval.
101
+ // These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
102
102
  // We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
103
103
  // a certain distance away from the camera.
104
104
  const FACE_DISTANCE_THRESHOLD = 0.32;
105
105
  const REDUCED_THRESHOLD = 0.4;
106
106
  const REDUCED_THRESHOLD_MOBILE = 0.37;
107
+ // Constants from science team to determine ocular distance (space between eyes)
108
+ const PUPIL_DISTANCE_WEIGHT = 2.0;
109
+ const FACE_HEIGHT_WEIGHT = 1.8;
110
+ // Constants from science team to find face match percentage
111
+ const FACE_MATCH_RANGE_MIN = 0;
112
+ const FACE_MATCH_RANGE_MAX = 1;
113
+ const FACE_MATCH_WEIGHT_MIN = 0.25;
114
+ const FACE_MATCH_WEIGHT_MAX = 0.75;
107
115
  const WS_CLOSURE_CODE = {
108
116
  SUCCESS_CODE: 1000,
109
117
  DEFAULT_ERROR_CODE: 4000,
@@ -291,87 +299,33 @@ function getPupilDistanceAndFaceHeight(face) {
291
299
  const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
292
300
  return { pupilDistance, faceHeight };
293
301
  }
294
- function generateBboxFromLandmarks(face, oval) {
295
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
302
+ function generateBboxFromLandmarks(face, oval, frameHeight) {
303
+ const { leftEye, rightEye, nose, leftEar, rightEar } = face;
296
304
  const { height: ovalHeight, centerY } = oval;
297
305
  const ovalTop = centerY - ovalHeight / 2;
298
306
  const eyeCenter = [];
299
307
  eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
300
308
  eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
301
309
  const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
302
- const alpha = 2.0, gamma = 1.8;
303
- const ow = (alpha * pd + gamma * fh) / 2;
304
- const oh = 1.618 * ow;
305
- let cx;
310
+ const ocularWidth = (PUPIL_DISTANCE_WEIGHT * pd + FACE_HEIGHT_WEIGHT * fh) / 2;
311
+ let centerFaceX, centerFaceY;
306
312
  if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
307
- cx = (eyeCenter[0] + nose[0]) / 2;
313
+ centerFaceX = (eyeCenter[0] + nose[0]) / 2;
314
+ centerFaceY = (eyeCenter[1] + nose[1]) / 2;
308
315
  }
309
316
  else {
310
- cx = eyeCenter[0];
311
- }
312
- const bottom = faceTop + faceHeight;
313
- const top = bottom - oh;
314
- const left = Math.min(cx - ow / 2, rightEar[0]);
315
- const right = Math.max(cx + ow / 2, leftEar[0]);
317
+ // when face tilts down
318
+ centerFaceX = eyeCenter[0];
319
+ centerFaceY = eyeCenter[1];
320
+ }
321
+ const faceWidth = ocularWidth;
322
+ const faceHeight = 1.68 * faceWidth;
323
+ const top = Math.max(centerFaceY - faceHeight / 2, 0);
324
+ const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
325
+ const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
326
+ const right = Math.max(centerFaceX + ocularWidth / 2, leftEar[0]);
316
327
  return { bottom, left, right, top };
317
328
  }
318
- /**
319
- * Returns the state of the provided face with respect to the provided liveness oval.
320
- */
321
- // eslint-disable-next-line max-params
322
- function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
323
- let faceMatchState;
324
- const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
325
- ?.ChallengeConfig;
326
- if (!challengeConfig ||
327
- !challengeConfig.OvalIouThreshold ||
328
- !challengeConfig.OvalIouHeightThreshold ||
329
- !challengeConfig.OvalIouWidthThreshold ||
330
- !challengeConfig.FaceIouHeightThreshold ||
331
- !challengeConfig.FaceIouWidthThreshold) {
332
- throw new Error('Challenge information not returned from session information.');
333
- }
334
- const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
335
- const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
336
- const minFaceX = faceBoundingBox.left;
337
- const maxFaceX = faceBoundingBox.right;
338
- const minFaceY = faceBoundingBox.top;
339
- const maxFaceY = faceBoundingBox.bottom;
340
- const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
341
- const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
342
- const intersectionThreshold = OvalIouThreshold;
343
- const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
344
- const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
345
- const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
346
- const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
347
- /** From Science
348
- * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
349
- */
350
- const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
351
- (intersectionThreshold - initialFaceIntersection) +
352
- 0.25), 0) * 100;
353
- const faceIsOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
354
- const faceIsOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
355
- if (intersection > intersectionThreshold &&
356
- Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
357
- Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
358
- Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
359
- faceMatchState = FaceMatchState.MATCHED;
360
- }
361
- else if (faceIsOutsideOvalToTheLeft || faceIsOutsideOvalToTheRight) {
362
- faceMatchState = FaceMatchState.OFF_CENTER;
363
- }
364
- else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
365
- maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
366
- (minOvalX - minFaceX > faceDetectionWidthThreshold &&
367
- maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
368
- faceMatchState = FaceMatchState.TOO_CLOSE;
369
- }
370
- else {
371
- faceMatchState = FaceMatchState.TOO_FAR;
372
- }
373
- return { faceMatchState, faceMatchPercentage };
374
- }
375
329
  /**
376
330
  * Returns the illumination state in the provided video frame.
377
331
  */
@@ -547,8 +501,10 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
547
501
  detectedFace = detectedFaces[0];
548
502
  const { width } = ovalDetails;
549
503
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
550
- const alpha = 2.0, gamma = 1.8;
551
- const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
504
+ const calibratedPupilDistance = (PUPIL_DISTANCE_WEIGHT * pupilDistance +
505
+ FACE_HEIGHT_WEIGHT * faceHeight) /
506
+ 2 /
507
+ PUPIL_DISTANCE_WEIGHT;
552
508
  if (width) {
553
509
  isDistanceBelowThreshold =
554
510
  calibratedPupilDistance / width <
@@ -781,7 +737,61 @@ class VideoRecorder {
781
737
  }
782
738
  }
783
739
 
784
- const VERSION = '3.0.14';
740
+ /**
741
+ * Returns the state of the provided face with respect to the provided liveness oval.
742
+ */
743
+ function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }) {
744
+ let faceMatchState;
745
+ const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
746
+ ?.ChallengeConfig;
747
+ if (!challengeConfig ||
748
+ !challengeConfig.OvalIouThreshold ||
749
+ !challengeConfig.OvalIouHeightThreshold ||
750
+ !challengeConfig.OvalIouWidthThreshold ||
751
+ !challengeConfig.FaceIouHeightThreshold ||
752
+ !challengeConfig.FaceIouWidthThreshold) {
753
+ throw new Error('Challenge information not returned from session information.');
754
+ }
755
+ const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
756
+ const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails, frameHeight);
757
+ const minFaceX = faceBoundingBox.left;
758
+ const maxFaceX = faceBoundingBox.right;
759
+ const minFaceY = faceBoundingBox.top;
760
+ const maxFaceY = faceBoundingBox.bottom;
761
+ const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
762
+ const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
763
+ const intersectionThreshold = OvalIouThreshold;
764
+ const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
765
+ const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
766
+ /** From Science
767
+ * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
768
+ */
769
+ const faceMatchPercentage = Math.max(Math.min(FACE_MATCH_RANGE_MAX, (FACE_MATCH_WEIGHT_MAX * (intersection - initialFaceIntersection)) /
770
+ (intersectionThreshold - initialFaceIntersection) +
771
+ FACE_MATCH_WEIGHT_MIN), FACE_MATCH_RANGE_MIN) * 100;
772
+ const isFaceOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
773
+ const isFaceOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
774
+ const isFaceMatched = intersection > intersectionThreshold;
775
+ const isFaceMatchedClosely = minOvalY - minFaceY > faceDetectionHeightThreshold ||
776
+ maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
777
+ (minOvalX - minFaceX > faceDetectionWidthThreshold &&
778
+ maxFaceX - maxOvalX > faceDetectionWidthThreshold);
779
+ if (isFaceMatched) {
780
+ faceMatchState = FaceMatchState.MATCHED;
781
+ }
782
+ else if (isFaceOutsideOvalToTheLeft || isFaceOutsideOvalToTheRight) {
783
+ faceMatchState = FaceMatchState.OFF_CENTER;
784
+ }
785
+ else if (isFaceMatchedClosely) {
786
+ faceMatchState = FaceMatchState.MATCHED;
787
+ }
788
+ else {
789
+ faceMatchState = FaceMatchState.TOO_FAR;
790
+ }
791
+ return { faceMatchState, faceMatchPercentage };
792
+ }
793
+
794
+ const VERSION = '3.0.16';
785
795
 
786
796
  const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
787
797
  const getLivenessUserAgent = () => {
@@ -980,6 +990,75 @@ class CustomWebSocketFetchHandler {
980
990
  }
981
991
  }
982
992
 
993
+ const isCredentialsProvider = (credentialsProvider) => typeof credentialsProvider === 'function';
994
+ // the return interface of `fetchAuthSession` includes `credentials` as
995
+ // optional, but `credentials` is always returned. If `fetchAuthSession`
996
+ // is called for an unauthenticated end user, values of `accessKeyId`
997
+ // and `secretAccessKey` are `undefined`
998
+ const isCredentials = (credentials) => !!(credentials?.accessKeyId && credentials?.secretAccessKey);
999
+ /**
1000
+ * Resolves the `credentials` param to be passed to `RekognitionStreamingClient` which accepts either:
1001
+ * - a `credentials` object
1002
+ * - a `credentialsProvider` callback
1003
+ *
1004
+ * @param credentialsProvider optional `credentialsProvider` callback
1005
+ * @returns {Promise<AwsCredentials | AwsCredentialProvider>} `credentials` object or valid `credentialsProvider` callback
1006
+ */
1007
+ async function resolveCredentials(credentialsProvider) {
1008
+ const hasCredentialsProvider = isCredentialsProvider(credentialsProvider);
1009
+ if (hasCredentialsProvider) {
1010
+ return credentialsProvider;
1011
+ }
1012
+ if (credentialsProvider && !hasCredentialsProvider) {
1013
+ throw new Error('Invalid credentialsProvider');
1014
+ }
1015
+ try {
1016
+ const result = (await auth.fetchAuthSession()).credentials;
1017
+ if (isCredentials(result)) {
1018
+ return result;
1019
+ }
1020
+ throw new Error('Missing credentials');
1021
+ }
1022
+ catch (e) {
1023
+ const { message } = e;
1024
+ throw new Error(`Invalid credentials: ${message}`);
1025
+ }
1026
+ }
1027
+
1028
+ // override aws sdk default value of 60
1029
+ const REQUEST_EXPIRY = 299;
1030
+ class Signer extends signatureV4.SignatureV4 {
1031
+ presign(request, options) {
1032
+ return super.presign(request, {
1033
+ ...options,
1034
+ expiresIn: REQUEST_EXPIRY,
1035
+ // `headers` that should not be signed. Liveness WebSocket
1036
+ // request omits `headers` except for required `host` header. Signature
1037
+ // could be a mismatch if other `headers` are signed
1038
+ unsignableHeaders: new Set(Object.keys(request.headers).filter((header) => header !== 'host')),
1039
+ });
1040
+ }
1041
+ }
1042
+
1043
+ const CONNECTION_TIMEOUT = 10000;
1044
+ const CUSTOM_USER_AGENT = `${utils.getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
1045
+ async function createStreamingClient({ credentialsProvider, endpointOverride, region, }) {
1046
+ const credentials = await resolveCredentials(credentialsProvider);
1047
+ const clientconfig = {
1048
+ credentials,
1049
+ customUserAgent: CUSTOM_USER_AGENT,
1050
+ region,
1051
+ requestHandler: new CustomWebSocketFetchHandler({
1052
+ connectionTimeout: CONNECTION_TIMEOUT,
1053
+ }),
1054
+ signerConstructor: Signer,
1055
+ };
1056
+ if (endpointOverride) {
1057
+ clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
1058
+ }
1059
+ return new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
1060
+ }
1061
+
983
1062
  const TIME_SLICE = 1000;
984
1063
  function isBlob(obj) {
985
1064
  return obj.arrayBuffer !== undefined;
@@ -1009,9 +1088,7 @@ class LivenessStreamProvider {
1009
1088
  this.videoRecorder.start(TIME_SLICE);
1010
1089
  }
1011
1090
  sendClientInfo(clientInfo) {
1012
- this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
1013
- data: { clientInfo },
1014
- }));
1091
+ this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', { data: { clientInfo } }));
1015
1092
  }
1016
1093
  async stopVideo() {
1017
1094
  await this.videoRecorder.stop();
@@ -1023,32 +1100,15 @@ class LivenessStreamProvider {
1023
1100
  if (this.videoRecorder.getState() === 'recording') {
1024
1101
  await this.stopVideo();
1025
1102
  }
1026
- this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
1027
- data: { code: code },
1028
- }));
1103
+ this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', { data: { code } }));
1029
1104
  return;
1030
1105
  }
1031
1106
  async init() {
1032
- const credentials = this.credentialProvider ?? (await auth.fetchAuthSession()).credentials;
1033
- if (!credentials) {
1034
- throw new Error('No credentials');
1035
- }
1036
- const clientconfig = {
1037
- credentials,
1107
+ this._client = await createStreamingClient({
1108
+ credentialsProvider: this.credentialProvider,
1109
+ endpointOverride: this.endpointOverride,
1038
1110
  region: this.region,
1039
- customUserAgent: `${utils.getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
1040
- requestHandler: new CustomWebSocketFetchHandler({
1041
- connectionTimeout: 10000,
1042
- }),
1043
- };
1044
- if (this.endpointOverride) {
1045
- const override = this.endpointOverride;
1046
- clientconfig.endpointProvider = () => {
1047
- const url = new URL(override);
1048
- return { url };
1049
- };
1050
- }
1051
- this._client = new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
1111
+ });
1052
1112
  this.responseStream = await this.startLivenessVideoConnection();
1053
1113
  }
1054
1114
  // Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
@@ -1289,7 +1349,6 @@ const STATIC_VIDEO_CONSTRAINTS = {
1289
1349
 
1290
1350
  const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1291
1351
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
1292
- const MIN_FACE_MATCH_TIME = 1000;
1293
1352
  let responseStream;
1294
1353
  const responseStreamActor = async (callback) => {
1295
1354
  try {
@@ -1379,7 +1438,6 @@ const livenessMachine = xstate.createMachine({
1379
1438
  currentDetectedFace: undefined,
1380
1439
  startFace: undefined,
1381
1440
  endFace: undefined,
1382
- initialFaceMatchTime: undefined,
1383
1441
  },
1384
1442
  freshnessColorAssociatedParams: {
1385
1443
  freshnessColorEl: undefined,
@@ -1564,6 +1622,8 @@ const livenessMachine = xstate.createMachine({
1564
1622
  100: { target: 'checkRecordingStarted' },
1565
1623
  },
1566
1624
  },
1625
+ // Evaluates face match and moves to checkMatch
1626
+ // which continually checks for match until either timeout or face match
1567
1627
  ovalMatching: {
1568
1628
  entry: 'cancelOvalDrawingTimeout',
1569
1629
  invoke: {
@@ -1574,29 +1634,32 @@ const livenessMachine = xstate.createMachine({
1574
1634
  },
1575
1635
  },
1576
1636
  },
1637
+ // If `hasFaceMatchedInOval` is true, then move to `delayBeforeFlash`, which pauses
1638
+ // for one second to show "Hold still" text before moving to `flashFreshnessColors`.
1639
+ // If not, move back to ovalMatching and re-evaluate match state
1577
1640
  checkMatch: {
1578
1641
  after: {
1579
1642
  0: {
1580
- target: 'flashFreshnessColors',
1581
- cond: 'hasFaceMatchedInOvalWithMinTime',
1643
+ target: 'delayBeforeFlash',
1644
+ cond: 'hasFaceMatchedInOval',
1582
1645
  actions: [
1646
+ 'setFaceMatchTimeAndStartFace',
1583
1647
  'updateEndFaceMatch',
1584
1648
  'setupFlashFreshnessColors',
1585
1649
  'cancelOvalMatchTimeout',
1586
1650
  'cancelOvalDrawingTimeout',
1587
1651
  ],
1588
1652
  },
1589
- 0.1: {
1590
- target: 'ovalMatching',
1591
- cond: 'hasFaceMatchedInOval',
1592
- actions: 'setFaceMatchTimeAndStartFace',
1593
- },
1594
1653
  1: {
1595
1654
  target: 'ovalMatching',
1596
- cond: 'hasNotFaceMatchedInOval',
1597
1655
  },
1598
1656
  },
1599
1657
  },
1658
+ delayBeforeFlash: {
1659
+ after: {
1660
+ 1000: 'flashFreshnessColors',
1661
+ },
1662
+ },
1600
1663
  flashFreshnessColors: {
1601
1664
  invoke: {
1602
1665
  src: 'flashColors',
@@ -1861,10 +1924,6 @@ const livenessMachine = xstate.createMachine({
1861
1924
  startFace: context.faceMatchAssociatedParams.startFace === undefined
1862
1925
  ? context.faceMatchAssociatedParams.currentDetectedFace
1863
1926
  : context.faceMatchAssociatedParams.startFace,
1864
- initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
1865
- undefined
1866
- ? Date.now()
1867
- : context.faceMatchAssociatedParams.initialFaceMatchTime,
1868
1927
  };
1869
1928
  },
1870
1929
  }),
@@ -2036,21 +2095,10 @@ const livenessMachine = xstate.createMachine({
2036
2095
  },
2037
2096
  guards: {
2038
2097
  shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
2039
- hasFaceMatchedInOvalWithMinTime: (context) => {
2040
- const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
2041
- const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
2042
- const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
2043
- timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
2044
- return hasMatched;
2045
- },
2046
2098
  hasFaceMatchedInOval: (context) => {
2047
2099
  return (context.faceMatchAssociatedParams.faceMatchState ===
2048
2100
  FaceMatchState.MATCHED);
2049
2101
  },
2050
- hasNotFaceMatchedInOval: (context) => {
2051
- return (context.faceMatchAssociatedParams.faceMatchState !==
2052
- FaceMatchState.MATCHED);
2053
- },
2054
2102
  hasSingleFace: (context) => {
2055
2103
  return (context.faceMatchAssociatedParams.faceMatchState ===
2056
2104
  FaceMatchState.FACE_IDENTIFIED);
@@ -2267,7 +2315,7 @@ const livenessMachine = xstate.createMachine({
2267
2315
  videoWidth: videoEl.width,
2268
2316
  });
2269
2317
  // renormalize initial face
2270
- const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
2318
+ const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
2271
2319
  initialFace.top = renormalizedFace.top;
2272
2320
  initialFace.left = renormalizedFace.left;
2273
2321
  initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
@@ -2296,7 +2344,7 @@ const livenessMachine = xstate.createMachine({
2296
2344
  let faceMatchPercentage = 0;
2297
2345
  let detectedFace;
2298
2346
  let illuminationState;
2299
- const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
2347
+ const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
2300
2348
  const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
2301
2349
  const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
2302
2350
  switch (detectedFaces.length) {
@@ -2309,7 +2357,13 @@ const livenessMachine = xstate.createMachine({
2309
2357
  case 1: {
2310
2358
  //exactly one face detected, match face with oval;
2311
2359
  detectedFace = detectedFaces[0];
2312
- const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(detectedFace, ovalDetails, initialFaceIntersection, serverSessionInformation);
2360
+ const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval({
2361
+ face: detectedFace,
2362
+ ovalDetails: ovalDetails,
2363
+ initialFaceIntersection,
2364
+ sessionInformation: serverSessionInformation,
2365
+ frameHeight: videoEl.videoHeight,
2366
+ });
2313
2367
  faceMatchState = faceMatchStateInLivenessOval;
2314
2368
  faceMatchPercentage = faceMatchPercentageInLivenessOval;
2315
2369
  break;
@@ -2564,7 +2618,6 @@ const Hint = ({ hintDisplayText }) => {
2564
2618
  [FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
2565
2619
  [FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
2566
2620
  [FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
2567
- [FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
2568
2621
  [FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
2569
2622
  [FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
2570
2623
  [FaceMatchState.OFF_CENTER]: hintDisplayText.hintFaceOffCenterText,
@@ -2610,13 +2663,11 @@ const Hint = ({ hintDisplayText }) => {
2610
2663
  return React__namespace.createElement(DefaultToast, { text: hintDisplayText.hintHoldFaceForFreshnessText });
2611
2664
  }
2612
2665
  if (isRecording && !isFlashingFreshness) {
2613
- // During face matching, we want to only show the TOO_CLOSE or
2614
- // TOO_FAR texts. If FaceMatchState matches TOO_CLOSE, we'll show
2615
- // the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
2666
+ // During face matching, we want to only show the
2667
+ // TOO_FAR texts. For FACE_IDENTIFIED, CANT_IDENTIFY, TOO_MANY
2616
2668
  // we are defaulting to the TOO_FAR text (for now).
2617
2669
  let resultHintString = FaceMatchStateStringMap[FaceMatchState.TOO_FAR];
2618
- if (faceMatchState === FaceMatchState.TOO_CLOSE ||
2619
- faceMatchState === FaceMatchState.MATCHED) {
2670
+ if (faceMatchState === FaceMatchState.MATCHED) {
2620
2671
  resultHintString = FaceMatchStateStringMap[faceMatchState];
2621
2672
  }
2622
2673
  // If the face is outside the oval set the aria-label to a string about centering face in oval
@@ -2630,7 +2681,7 @@ const Hint = ({ hintDisplayText }) => {
2630
2681
  faceMatchPercentage > 50) {
2631
2682
  a11yHintString = hintDisplayText.hintMatchIndicatorText;
2632
2683
  }
2633
- return (React__namespace.createElement(Toast, { size: "large", variation: faceMatchState === FaceMatchState.TOO_CLOSE ? 'error' : 'primary' },
2684
+ return (React__namespace.createElement(Toast, { size: "large", variation: 'primary' },
2634
2685
  React__namespace.createElement(uiReact.VisuallyHidden, { "aria-live": "assertive" }, a11yHintString),
2635
2686
  React__namespace.createElement(uiReact.View, { "aria-label": a11yHintString }, resultHintString)));
2636
2687
  }
@@ -1,5 +1 @@
1
- import { LivenessContext, LivenessEvent } from '../types';
2
- export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
3
- value: any;
4
- context: LivenessContext;
5
- }, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
1
+ export { livenessMachine } from './machine';
@@ -0,0 +1,5 @@
1
+ import { LivenessContext, LivenessEvent } from '../types';
2
+ export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
3
+ value: any;
4
+ context: LivenessContext;
5
+ }, import("xstate").BaseActionObject, import("xstate").ServiceMap, import("xstate").ResolveTypegenMeta<import("xstate").TypegenDisabled, LivenessEvent, import("xstate").BaseActionObject, import("xstate").ServiceMap>>;
@@ -1,16 +1,15 @@
1
+ import { CredentialsAndIdentityId } from 'aws-amplify/auth';
2
+ export type AwsCredentials = CredentialsAndIdentityId['credentials'];
3
+ export interface IdentityProvider<IdentityT extends AwsCredentials> {
4
+ (identityProperties?: Record<string, any>): Promise<IdentityT>;
5
+ }
1
6
  /**
2
- * These types are copied over / adapted from the aws-sdk/types package as they do not semantic versioning and we do not want these changing unexpectedly.
3
- * When Amplify Auth exports these types this file should be removed and the type definitions should come from the Amplify auth package
7
+ * @deprecated `AwsTemporaryCredentials` has been replaced with `AwsCredentials`.
8
+ *
9
+ * The `AwsTemporaryCredentials` type may be removed in a future major version of _@aws-amplify/ui-react-liveness_.
4
10
  */
5
- export interface AwsCredentials {
6
- readonly accessKeyId: string;
7
- readonly secretAccessKey: string;
8
- }
9
11
  export interface AwsTemporaryCredentials extends AwsCredentials {
10
12
  readonly sessionToken?: string;
11
13
  readonly expiration?: Date;
12
14
  }
13
- export interface IdentityProvider<IdentityT extends AwsTemporaryCredentials> {
14
- (identityProperties?: Record<string, any>): Promise<IdentityT>;
15
- }
16
- export type AwsCredentialProvider = IdentityProvider<AwsTemporaryCredentials>;
15
+ export type AwsCredentialProvider = IdentityProvider<AwsCredentials>;
@@ -98,7 +98,6 @@ export declare enum IlluminationState {
98
98
  export declare enum FaceMatchState {
99
99
  MATCHED = "MATCHED",
100
100
  TOO_FAR = "TOO FAR",
101
- TOO_CLOSE = "TOO CLOSE",
102
101
  CANT_IDENTIFY = "CANNOT IDENTIFY",
103
102
  FACE_IDENTIFIED = "ONE FACE IDENTIFIED",
104
103
  TOO_MANY = "TOO MANY FACES",
@@ -11,7 +11,6 @@ export interface FaceMatchAssociatedParams {
11
11
  currentDetectedFace?: Face;
12
12
  startFace?: Face;
13
13
  endFace?: Face;
14
- initialFaceMatchTime?: number;
15
14
  }
16
15
  export interface FreshnessColorAssociatedParams {
17
16
  freshnessColorEl?: HTMLCanvasElement;
@@ -1,6 +1,12 @@
1
1
  export declare const FACE_DISTANCE_THRESHOLD = 0.32;
2
2
  export declare const REDUCED_THRESHOLD = 0.4;
3
3
  export declare const REDUCED_THRESHOLD_MOBILE = 0.37;
4
+ export declare const PUPIL_DISTANCE_WEIGHT = 2;
5
+ export declare const FACE_HEIGHT_WEIGHT = 1.8;
6
+ export declare const FACE_MATCH_RANGE_MIN = 0;
7
+ export declare const FACE_MATCH_RANGE_MAX = 1;
8
+ export declare const FACE_MATCH_WEIGHT_MIN = 0.25;
9
+ export declare const FACE_MATCH_WEIGHT_MAX = 0.75;
4
10
  export declare const WS_CLOSURE_CODE: {
5
11
  SUCCESS_CODE: number;
6
12
  DEFAULT_ERROR_CODE: number;
@@ -0,0 +1,6 @@
1
+ import { SignatureV4 } from '@smithy/signature-v4';
2
+ import { HttpRequest as HttpRequest, RequestPresigningArguments } from '@smithy/types';
3
+ export declare const REQUEST_EXPIRY = 299;
4
+ export declare class Signer extends SignatureV4 {
5
+ presign(request: HttpRequest, options?: Omit<RequestPresigningArguments, 'expiresIn'>): Promise<HttpRequest>;
6
+ }
@@ -0,0 +1,9 @@
1
+ import { RekognitionStreamingClient } from '@aws-sdk/client-rekognitionstreaming';
2
+ import { AwsCredentialProvider } from '../../types';
3
+ interface CreateClientConfig {
4
+ credentialsProvider?: AwsCredentialProvider;
5
+ endpointOverride?: string;
6
+ region: string;
7
+ }
8
+ export declare function createStreamingClient({ credentialsProvider, endpointOverride, region, }: CreateClientConfig): Promise<RekognitionStreamingClient>;
9
+ export {};
@@ -0,0 +1 @@
1
+ export { createStreamingClient } from './createStreamingClient';