@aws-amplify/ui-react-liveness 3.0.14 → 3.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +2 -6
  2. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +2 -6
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +2 -6
  4. package/dist/esm/components/FaceLivenessDetector/service/machine/{index.mjs → machine.mjs} +24 -29
  5. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +0 -1
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +10 -2
  7. package/dist/esm/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.mjs → createStreamingClient/CustomWebSocketFetchHandler.mjs} +1 -1
  8. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +18 -0
  9. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +27 -0
  10. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +38 -0
  11. package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +59 -0
  12. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +22 -74
  13. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +8 -30
  14. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +2 -6
  15. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +2 -6
  16. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +6 -13
  17. package/dist/esm/version.mjs +1 -1
  18. package/dist/index.js +186 -135
  19. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -5
  20. package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +5 -0
  21. package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +9 -10
  22. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +0 -1
  23. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +0 -1
  24. package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +6 -0
  25. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +6 -0
  26. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +9 -0
  27. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
  28. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +10 -0
  29. package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +17 -0
  30. package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +1 -0
  31. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +1 -8
  32. package/dist/types/version.d.ts +1 -1
  33. package/package.json +3 -2
  34. /package/dist/types/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.d.ts → createStreamingClient/CustomWebSocketFetchHandler.d.ts} +0 -0
@@ -1,18 +1,14 @@
1
1
  import * as React from 'react';
2
2
  import { useInterpret } from '@xstate/react';
3
- import { livenessMachine } from './service/machine/index.mjs';
3
+ import { livenessMachine } from './service/machine/machine.mjs';
4
4
  import './service/types/liveness.mjs';
5
5
  import '@tensorflow/tfjs-core';
6
6
  import '@tensorflow-models/face-detection';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
10
- import 'aws-amplify/auth';
11
10
  import '@aws-sdk/client-rekognitionstreaming';
12
- import '@aws-sdk/util-format-url';
13
- import '@smithy/eventstream-serde-browser';
14
- import '@smithy/fetch-http-handler';
15
- import '@smithy/protocol-http';
11
+ import './service/utils/createStreamingClient/createStreamingClient.mjs';
16
12
  import './service/utils/freshnessColorDisplay.mjs';
17
13
  import { View, Flex } from '@aws-amplify/ui-react';
18
14
  import { FaceLivenessDetectorProvider } from './providers/FaceLivenessDetectorProvider.mjs';
@@ -2,7 +2,7 @@ import React__default, { useRef, useState } from 'react';
2
2
  import { classNames } from '@aws-amplify/ui';
3
3
  import { Loader, View, Flex, Text, Label, SelectField, Button } from '@aws-amplify/ui-react';
4
4
  import { useColorMode } from '@aws-amplify/ui-react/internal';
5
- import '../service/machine/index.mjs';
5
+ import '../service/machine/machine.mjs';
6
6
  import { FaceMatchState } from '../service/types/liveness.mjs';
7
7
  import '@tensorflow/tfjs-core';
8
8
  import '@tensorflow-models/face-detection';
@@ -10,12 +10,8 @@ import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
12
12
  import { drawStaticOval, clearOvalCanvas } from '../service/utils/liveness.mjs';
13
- import 'aws-amplify/auth';
14
13
  import '@aws-sdk/client-rekognitionstreaming';
15
- import '@aws-sdk/util-format-url';
16
- import '@smithy/eventstream-serde-browser';
17
- import '@smithy/fetch-http-handler';
18
- import '@smithy/protocol-http';
14
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
19
15
  import '../service/utils/freshnessColorDisplay.mjs';
20
16
  import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
21
17
  import { useLivenessSelector, createLivenessSelector } from '../hooks/useLivenessSelector.mjs';
@@ -1,6 +1,6 @@
1
1
  import * as React from 'react';
2
2
  import { Flex, Text, Button, View } from '@aws-amplify/ui-react';
3
- import '../service/machine/index.mjs';
3
+ import '../service/machine/machine.mjs';
4
4
  import '../service/types/liveness.mjs';
5
5
  import { LivenessErrorState } from '../service/types/error.mjs';
6
6
  import '@tensorflow/tfjs-core';
@@ -8,12 +8,8 @@ import '@tensorflow-models/face-detection';
8
8
  import '@tensorflow/tfjs-backend-wasm';
9
9
  import '@tensorflow/tfjs-backend-cpu';
10
10
  import '@aws-amplify/core/internals/utils';
11
- import 'aws-amplify/auth';
12
11
  import '@aws-sdk/client-rekognitionstreaming';
13
- import '@aws-sdk/util-format-url';
14
- import '@smithy/eventstream-serde-browser';
15
- import '@smithy/fetch-http-handler';
16
- import '@smithy/protocol-http';
12
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
17
13
  import '../service/utils/freshnessColorDisplay.mjs';
18
14
  import { LivenessCameraModule } from './LivenessCameraModule.mjs';
19
15
  import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
@@ -1,9 +1,10 @@
1
1
  import { nanoid } from 'nanoid';
2
2
  import { createMachine, assign, spawn, actions } from 'xstate';
3
- import { drawStaticOval, getBoundingBox, getColorsSequencesFromSessionInformation, isCameraDeviceVirtual, getFaceMatchState, isFaceDistanceBelowThreshold, estimateIllumination, getOvalDetailsFromSessionInformation, generateBboxFromLandmarks, drawLivenessOvalInCanvas, getOvalBoundingBox, getIntersectionOverUnion, getFaceMatchStateInLivenessOval, getStaticLivenessOvalDetails } from '../utils/liveness.mjs';
3
+ import { drawStaticOval, getBoundingBox, getColorsSequencesFromSessionInformation, isCameraDeviceVirtual, getFaceMatchState, isFaceDistanceBelowThreshold, estimateIllumination, getOvalDetailsFromSessionInformation, generateBboxFromLandmarks, drawLivenessOvalInCanvas, getOvalBoundingBox, getIntersectionOverUnion, getStaticLivenessOvalDetails } from '../utils/liveness.mjs';
4
4
  import { FaceMatchState } from '../types/liveness.mjs';
5
5
  import { LivenessErrorState } from '../types/error.mjs';
6
6
  import { BlazeFaceFaceDetection } from '../utils/blazefaceFaceDetection.mjs';
7
+ import { getFaceMatchStateInLivenessOval } from '../utils/getFaceMatchStateInLivenessOval.mjs';
7
8
  import { LivenessStreamProvider } from '../utils/streamProvider.mjs';
8
9
  import { FreshnessColorDisplay } from '../utils/freshnessColorDisplay.mjs';
9
10
  import { isServerSesssionInformationEvent, isDisconnectionEvent, isValidationExceptionEvent, isInternalServerExceptionEvent, isThrottlingExceptionEvent, isServiceQuotaExceededExceptionEvent, isInvalidSignatureRegionException } from '../utils/eventUtils.mjs';
@@ -12,7 +13,6 @@ import { WS_CLOSURE_CODE } from '../utils/constants.mjs';
12
13
 
13
14
  const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
14
15
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
15
- const MIN_FACE_MATCH_TIME = 1000;
16
16
  let responseStream;
17
17
  const responseStreamActor = async (callback) => {
18
18
  try {
@@ -102,7 +102,6 @@ const livenessMachine = createMachine({
102
102
  currentDetectedFace: undefined,
103
103
  startFace: undefined,
104
104
  endFace: undefined,
105
- initialFaceMatchTime: undefined,
106
105
  },
107
106
  freshnessColorAssociatedParams: {
108
107
  freshnessColorEl: undefined,
@@ -287,6 +286,8 @@ const livenessMachine = createMachine({
287
286
  100: { target: 'checkRecordingStarted' },
288
287
  },
289
288
  },
289
+ // Evaluates face match and moves to checkMatch
290
+ // which continually checks for match until either timeout or face match
290
291
  ovalMatching: {
291
292
  entry: 'cancelOvalDrawingTimeout',
292
293
  invoke: {
@@ -297,29 +298,32 @@ const livenessMachine = createMachine({
297
298
  },
298
299
  },
299
300
  },
301
+ // If `hasFaceMatchedInOval` is true, then move to `delayBeforeFlash`, which pauses
302
+ // for one second to show "Hold still" text before moving to `flashFreshnessColors`.
303
+ // If not, move back to ovalMatching and re-evaluate match state
300
304
  checkMatch: {
301
305
  after: {
302
306
  0: {
303
- target: 'flashFreshnessColors',
304
- cond: 'hasFaceMatchedInOvalWithMinTime',
307
+ target: 'delayBeforeFlash',
308
+ cond: 'hasFaceMatchedInOval',
305
309
  actions: [
310
+ 'setFaceMatchTimeAndStartFace',
306
311
  'updateEndFaceMatch',
307
312
  'setupFlashFreshnessColors',
308
313
  'cancelOvalMatchTimeout',
309
314
  'cancelOvalDrawingTimeout',
310
315
  ],
311
316
  },
312
- 0.1: {
313
- target: 'ovalMatching',
314
- cond: 'hasFaceMatchedInOval',
315
- actions: 'setFaceMatchTimeAndStartFace',
316
- },
317
317
  1: {
318
318
  target: 'ovalMatching',
319
- cond: 'hasNotFaceMatchedInOval',
320
319
  },
321
320
  },
322
321
  },
322
+ delayBeforeFlash: {
323
+ after: {
324
+ 1000: 'flashFreshnessColors',
325
+ },
326
+ },
323
327
  flashFreshnessColors: {
324
328
  invoke: {
325
329
  src: 'flashColors',
@@ -584,10 +588,6 @@ const livenessMachine = createMachine({
584
588
  startFace: context.faceMatchAssociatedParams.startFace === undefined
585
589
  ? context.faceMatchAssociatedParams.currentDetectedFace
586
590
  : context.faceMatchAssociatedParams.startFace,
587
- initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
588
- undefined
589
- ? Date.now()
590
- : context.faceMatchAssociatedParams.initialFaceMatchTime,
591
591
  };
592
592
  },
593
593
  }),
@@ -759,21 +759,10 @@ const livenessMachine = createMachine({
759
759
  },
760
760
  guards: {
761
761
  shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
762
- hasFaceMatchedInOvalWithMinTime: (context) => {
763
- const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
764
- const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
765
- const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
766
- timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
767
- return hasMatched;
768
- },
769
762
  hasFaceMatchedInOval: (context) => {
770
763
  return (context.faceMatchAssociatedParams.faceMatchState ===
771
764
  FaceMatchState.MATCHED);
772
765
  },
773
- hasNotFaceMatchedInOval: (context) => {
774
- return (context.faceMatchAssociatedParams.faceMatchState !==
775
- FaceMatchState.MATCHED);
776
- },
777
766
  hasSingleFace: (context) => {
778
767
  return (context.faceMatchAssociatedParams.faceMatchState ===
779
768
  FaceMatchState.FACE_IDENTIFIED);
@@ -990,7 +979,7 @@ const livenessMachine = createMachine({
990
979
  videoWidth: videoEl.width,
991
980
  });
992
981
  // renormalize initial face
993
- const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
982
+ const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
994
983
  initialFace.top = renormalizedFace.top;
995
984
  initialFace.left = renormalizedFace.left;
996
985
  initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
@@ -1019,7 +1008,7 @@ const livenessMachine = createMachine({
1019
1008
  let faceMatchPercentage = 0;
1020
1009
  let detectedFace;
1021
1010
  let illuminationState;
1022
- const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
1011
+ const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails, videoEl.videoHeight);
1023
1012
  const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
1024
1013
  const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
1025
1014
  switch (detectedFaces.length) {
@@ -1032,7 +1021,13 @@ const livenessMachine = createMachine({
1032
1021
  case 1: {
1033
1022
  //exactly one face detected, match face with oval;
1034
1023
  detectedFace = detectedFaces[0];
1035
- const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(detectedFace, ovalDetails, initialFaceIntersection, serverSessionInformation);
1024
+ const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval({
1025
+ face: detectedFace,
1026
+ ovalDetails: ovalDetails,
1027
+ initialFaceIntersection,
1028
+ sessionInformation: serverSessionInformation,
1029
+ frameHeight: videoEl.videoHeight,
1030
+ });
1036
1031
  faceMatchState = faceMatchStateInLivenessOval;
1037
1032
  faceMatchPercentage = faceMatchPercentageInLivenessOval;
1038
1033
  break;
@@ -14,7 +14,6 @@ var FaceMatchState;
14
14
  (function (FaceMatchState) {
15
15
  FaceMatchState["MATCHED"] = "MATCHED";
16
16
  FaceMatchState["TOO_FAR"] = "TOO FAR";
17
- FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
18
17
  FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
19
18
  FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
20
19
  FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
@@ -1,11 +1,19 @@
1
1
  // Face distance is calculated as pupilDistance / ovalWidth.
2
2
  // The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
3
- // These FACE_DISTNACE_THRESHOLD values are determined by the science team and should only be changed with their approval.
3
+ // These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
4
4
  // We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
5
5
  // a certain distance away from the camera.
6
6
  const FACE_DISTANCE_THRESHOLD = 0.32;
7
7
  const REDUCED_THRESHOLD = 0.4;
8
8
  const REDUCED_THRESHOLD_MOBILE = 0.37;
9
+ // Constants from science team to determine ocular distance (space between eyes)
10
+ const PUPIL_DISTANCE_WEIGHT = 2.0;
11
+ const FACE_HEIGHT_WEIGHT = 1.8;
12
+ // Constants from science team to find face match percentage
13
+ const FACE_MATCH_RANGE_MIN = 0;
14
+ const FACE_MATCH_RANGE_MAX = 1;
15
+ const FACE_MATCH_WEIGHT_MIN = 0.25;
16
+ const FACE_MATCH_WEIGHT_MAX = 0.75;
9
17
  const WS_CLOSURE_CODE = {
10
18
  SUCCESS_CODE: 1000,
11
19
  DEFAULT_ERROR_CODE: 4000,
@@ -15,4 +23,4 @@ const WS_CLOSURE_CODE = {
15
23
  USER_ERROR_DURING_CONNECTION: 4007,
16
24
  };
17
25
 
18
- export { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD, REDUCED_THRESHOLD_MOBILE, WS_CLOSURE_CODE };
26
+ export { FACE_DISTANCE_THRESHOLD, FACE_HEIGHT_WEIGHT, FACE_MATCH_RANGE_MAX, FACE_MATCH_RANGE_MIN, FACE_MATCH_WEIGHT_MAX, FACE_MATCH_WEIGHT_MIN, PUPIL_DISTANCE_WEIGHT, REDUCED_THRESHOLD, REDUCED_THRESHOLD_MOBILE, WS_CLOSURE_CODE };
@@ -2,7 +2,7 @@ import { formatUrl } from '@aws-sdk/util-format-url';
2
2
  import { readableStreamtoIterable, iterableToReadableStream } from '@smithy/eventstream-serde-browser';
3
3
  import { FetchHttpHandler } from '@smithy/fetch-http-handler';
4
4
  import { HttpResponse } from '@smithy/protocol-http';
5
- import { WS_CLOSURE_CODE } from './constants.mjs';
5
+ import { WS_CLOSURE_CODE } from '../constants.mjs';
6
6
 
7
7
  /**
8
8
  * Note: This file was copied from https://github.com/aws/aws-sdk-js-v3/blob/main/packages/middleware-websocket/src/websocket-fetch-handler.ts#L176
@@ -0,0 +1,18 @@
1
+ import { SignatureV4 } from '@smithy/signature-v4';
2
+
3
+ // override aws sdk default value of 60
4
+ const REQUEST_EXPIRY = 299;
5
+ class Signer extends SignatureV4 {
6
+ presign(request, options) {
7
+ return super.presign(request, {
8
+ ...options,
9
+ expiresIn: REQUEST_EXPIRY,
10
+ // `headers` that should not be signed. Liveness WebSocket
11
+ // request omits `headers` except for required `host` header. Signature
12
+ // could be a mismatch if other `headers` are signed
13
+ unsignableHeaders: new Set(Object.keys(request.headers).filter((header) => header !== 'host')),
14
+ });
15
+ }
16
+ }
17
+
18
+ export { REQUEST_EXPIRY, Signer };
@@ -0,0 +1,27 @@
1
+ import { RekognitionStreamingClient } from '@aws-sdk/client-rekognitionstreaming';
2
+ import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
3
+ import { getLivenessUserAgent } from '../../../utils/platform.mjs';
4
+ import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
5
+ import { resolveCredentials } from './resolveCredentials.mjs';
6
+ import { Signer } from './Signer.mjs';
7
+
8
+ const CONNECTION_TIMEOUT = 10000;
9
+ const CUSTOM_USER_AGENT = `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
10
+ async function createStreamingClient({ credentialsProvider, endpointOverride, region, }) {
11
+ const credentials = await resolveCredentials(credentialsProvider);
12
+ const clientconfig = {
13
+ credentials,
14
+ customUserAgent: CUSTOM_USER_AGENT,
15
+ region,
16
+ requestHandler: new CustomWebSocketFetchHandler({
17
+ connectionTimeout: CONNECTION_TIMEOUT,
18
+ }),
19
+ signerConstructor: Signer,
20
+ };
21
+ if (endpointOverride) {
22
+ clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
23
+ }
24
+ return new RekognitionStreamingClient(clientconfig);
25
+ }
26
+
27
+ export { createStreamingClient };
@@ -0,0 +1,38 @@
1
+ import { fetchAuthSession } from 'aws-amplify/auth';
2
+
3
+ const isCredentialsProvider = (credentialsProvider) => typeof credentialsProvider === 'function';
4
+ // the return interface of `fetchAuthSession` includes `credentials` as
5
+ // optional, but `credentials` is always returned. If `fetchAuthSession`
6
+ // is called for an unauthenticated end user, values of `accessKeyId`
7
+ // and `secretAccessKey` are `undefined`
8
+ const isCredentials = (credentials) => !!(credentials?.accessKeyId && credentials?.secretAccessKey);
9
+ /**
10
+ * Resolves the `credentials` param to be passed to `RekognitionStreamingClient` which accepts either:
11
+ * - a `credentials` object
12
+ * - a `credentialsProvider` callback
13
+ *
14
+ * @param credentialsProvider optional `credentialsProvider` callback
15
+ * @returns {Promise<AwsCredentials | AwsCredentialProvider>} `credentials` object or valid `credentialsProvider` callback
16
+ */
17
+ async function resolveCredentials(credentialsProvider) {
18
+ const hasCredentialsProvider = isCredentialsProvider(credentialsProvider);
19
+ if (hasCredentialsProvider) {
20
+ return credentialsProvider;
21
+ }
22
+ if (credentialsProvider && !hasCredentialsProvider) {
23
+ throw new Error('Invalid credentialsProvider');
24
+ }
25
+ try {
26
+ const result = (await fetchAuthSession()).credentials;
27
+ if (isCredentials(result)) {
28
+ return result;
29
+ }
30
+ throw new Error('Missing credentials');
31
+ }
32
+ catch (e) {
33
+ const { message } = e;
34
+ throw new Error(`Invalid credentials: ${message}`);
35
+ }
36
+ }
37
+
38
+ export { resolveCredentials };
@@ -0,0 +1,59 @@
1
+ import { FaceMatchState } from '../types/liveness.mjs';
2
+ import { generateBboxFromLandmarks, getOvalBoundingBox, getIntersectionOverUnion } from './liveness.mjs';
3
+ import { FACE_MATCH_RANGE_MAX, FACE_MATCH_WEIGHT_MAX, FACE_MATCH_WEIGHT_MIN, FACE_MATCH_RANGE_MIN } from './constants.mjs';
4
+
5
+ /**
6
+ * Returns the state of the provided face with respect to the provided liveness oval.
7
+ */
8
+ function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, sessionInformation, frameHeight, }) {
9
+ let faceMatchState;
10
+ const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
11
+ ?.ChallengeConfig;
12
+ if (!challengeConfig ||
13
+ !challengeConfig.OvalIouThreshold ||
14
+ !challengeConfig.OvalIouHeightThreshold ||
15
+ !challengeConfig.OvalIouWidthThreshold ||
16
+ !challengeConfig.FaceIouHeightThreshold ||
17
+ !challengeConfig.FaceIouWidthThreshold) {
18
+ throw new Error('Challenge information not returned from session information.');
19
+ }
20
+ const { OvalIouThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold } = challengeConfig;
21
+ const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails, frameHeight);
22
+ const minFaceX = faceBoundingBox.left;
23
+ const maxFaceX = faceBoundingBox.right;
24
+ const minFaceY = faceBoundingBox.top;
25
+ const maxFaceY = faceBoundingBox.bottom;
26
+ const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
27
+ const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
28
+ const intersectionThreshold = OvalIouThreshold;
29
+ const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
30
+ const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
31
+ /** From Science
32
+ * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
33
+ */
34
+ const faceMatchPercentage = Math.max(Math.min(FACE_MATCH_RANGE_MAX, (FACE_MATCH_WEIGHT_MAX * (intersection - initialFaceIntersection)) /
35
+ (intersectionThreshold - initialFaceIntersection) +
36
+ FACE_MATCH_WEIGHT_MIN), FACE_MATCH_RANGE_MIN) * 100;
37
+ const isFaceOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
38
+ const isFaceOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
39
+ const isFaceMatched = intersection > intersectionThreshold;
40
+ const isFaceMatchedClosely = minOvalY - minFaceY > faceDetectionHeightThreshold ||
41
+ maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
42
+ (minOvalX - minFaceX > faceDetectionWidthThreshold &&
43
+ maxFaceX - maxOvalX > faceDetectionWidthThreshold);
44
+ if (isFaceMatched) {
45
+ faceMatchState = FaceMatchState.MATCHED;
46
+ }
47
+ else if (isFaceOutsideOvalToTheLeft || isFaceOutsideOvalToTheRight) {
48
+ faceMatchState = FaceMatchState.OFF_CENTER;
49
+ }
50
+ else if (isFaceMatchedClosely) {
51
+ faceMatchState = FaceMatchState.MATCHED;
52
+ }
53
+ else {
54
+ faceMatchState = FaceMatchState.TOO_FAR;
55
+ }
56
+ return { faceMatchState, faceMatchPercentage };
57
+ }
58
+
59
+ export { getFaceMatchStateInLivenessOval };
@@ -1,6 +1,6 @@
1
- import { FaceMatchState, IlluminationState } from '../types/liveness.mjs';
1
+ import { IlluminationState, FaceMatchState } from '../types/liveness.mjs';
2
2
  import { LivenessErrorState } from '../types/error.mjs';
3
- import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
3
+ import { PUPIL_DISTANCE_WEIGHT, FACE_HEIGHT_WEIGHT, FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
4
4
 
5
5
  /**
6
6
  * Returns the random number between min and max
@@ -180,87 +180,33 @@ function getPupilDistanceAndFaceHeight(face) {
180
180
  const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
181
181
  return { pupilDistance, faceHeight };
182
182
  }
183
- function generateBboxFromLandmarks(face, oval) {
184
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
183
+ function generateBboxFromLandmarks(face, oval, frameHeight) {
184
+ const { leftEye, rightEye, nose, leftEar, rightEar } = face;
185
185
  const { height: ovalHeight, centerY } = oval;
186
186
  const ovalTop = centerY - ovalHeight / 2;
187
187
  const eyeCenter = [];
188
188
  eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
189
189
  eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
190
190
  const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
191
- const alpha = 2.0, gamma = 1.8;
192
- const ow = (alpha * pd + gamma * fh) / 2;
193
- const oh = 1.618 * ow;
194
- let cx;
191
+ const ocularWidth = (PUPIL_DISTANCE_WEIGHT * pd + FACE_HEIGHT_WEIGHT * fh) / 2;
192
+ let centerFaceX, centerFaceY;
195
193
  if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
196
- cx = (eyeCenter[0] + nose[0]) / 2;
194
+ centerFaceX = (eyeCenter[0] + nose[0]) / 2;
195
+ centerFaceY = (eyeCenter[1] + nose[1]) / 2;
197
196
  }
198
197
  else {
199
- cx = eyeCenter[0];
198
+ // when face tilts down
199
+ centerFaceX = eyeCenter[0];
200
+ centerFaceY = eyeCenter[1];
200
201
  }
201
- const bottom = faceTop + faceHeight;
202
- const top = bottom - oh;
203
- const left = Math.min(cx - ow / 2, rightEar[0]);
204
- const right = Math.max(cx + ow / 2, leftEar[0]);
202
+ const faceWidth = ocularWidth;
203
+ const faceHeight = 1.68 * faceWidth;
204
+ const top = Math.max(centerFaceY - faceHeight / 2, 0);
205
+ const bottom = Math.min(centerFaceY + faceHeight / 2, frameHeight);
206
+ const left = Math.min(centerFaceX - ocularWidth / 2, rightEar[0]);
207
+ const right = Math.max(centerFaceX + ocularWidth / 2, leftEar[0]);
205
208
  return { bottom, left, right, top };
206
209
  }
207
- /**
208
- * Returns the state of the provided face with respect to the provided liveness oval.
209
- */
210
- // eslint-disable-next-line max-params
211
- function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
212
- let faceMatchState;
213
- const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
214
- ?.ChallengeConfig;
215
- if (!challengeConfig ||
216
- !challengeConfig.OvalIouThreshold ||
217
- !challengeConfig.OvalIouHeightThreshold ||
218
- !challengeConfig.OvalIouWidthThreshold ||
219
- !challengeConfig.FaceIouHeightThreshold ||
220
- !challengeConfig.FaceIouWidthThreshold) {
221
- throw new Error('Challenge information not returned from session information.');
222
- }
223
- const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
224
- const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
225
- const minFaceX = faceBoundingBox.left;
226
- const maxFaceX = faceBoundingBox.right;
227
- const minFaceY = faceBoundingBox.top;
228
- const maxFaceY = faceBoundingBox.bottom;
229
- const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
230
- const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
231
- const intersectionThreshold = OvalIouThreshold;
232
- const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
233
- const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
234
- const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
235
- const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
236
- /** From Science
237
- * p=max(min(1,0.75∗(si−s0)/(st−s0)+0.25)),0)
238
- */
239
- const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
240
- (intersectionThreshold - initialFaceIntersection) +
241
- 0.25), 0) * 100;
242
- const faceIsOutsideOvalToTheLeft = minOvalX > minFaceX && maxOvalX > maxFaceX;
243
- const faceIsOutsideOvalToTheRight = minFaceX > minOvalX && maxFaceX > maxOvalX;
244
- if (intersection > intersectionThreshold &&
245
- Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
246
- Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
247
- Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
248
- faceMatchState = FaceMatchState.MATCHED;
249
- }
250
- else if (faceIsOutsideOvalToTheLeft || faceIsOutsideOvalToTheRight) {
251
- faceMatchState = FaceMatchState.OFF_CENTER;
252
- }
253
- else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
254
- maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
255
- (minOvalX - minFaceX > faceDetectionWidthThreshold &&
256
- maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
257
- faceMatchState = FaceMatchState.TOO_CLOSE;
258
- }
259
- else {
260
- faceMatchState = FaceMatchState.TOO_FAR;
261
- }
262
- return { faceMatchState, faceMatchPercentage };
263
- }
264
210
  /**
265
211
  * Returns the illumination state in the provided video frame.
266
212
  */
@@ -436,8 +382,10 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
436
382
  detectedFace = detectedFaces[0];
437
383
  const { width } = ovalDetails;
438
384
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
439
- const alpha = 2.0, gamma = 1.8;
440
- const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
385
+ const calibratedPupilDistance = (PUPIL_DISTANCE_WEIGHT * pupilDistance +
386
+ FACE_HEIGHT_WEIGHT * faceHeight) /
387
+ 2 /
388
+ PUPIL_DISTANCE_WEIGHT;
441
389
  if (width) {
442
390
  isDistanceBelowThreshold =
443
391
  calibratedPupilDistance / width <
@@ -469,4 +417,4 @@ function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }
469
417
  };
470
418
  }
471
419
 
472
- export { clearOvalCanvas, drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState, getFaceMatchStateInLivenessOval, getIntersectionOverUnion, getOvalBoundingBox, getOvalDetailsFromSessionInformation, getRGBArrayFromColorString, getStaticLivenessOvalDetails, isCameraDeviceVirtual, isClientFreshnessColorSequence, isFaceDistanceBelowThreshold };
420
+ export { clearOvalCanvas, drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState, getIntersectionOverUnion, getOvalBoundingBox, getOvalDetailsFromSessionInformation, getRGBArrayFromColorString, getStaticLivenessOvalDetails, isCameraDeviceVirtual, isClientFreshnessColorSequence, isFaceDistanceBelowThreshold };
@@ -1,9 +1,6 @@
1
- import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
2
- import { fetchAuthSession } from 'aws-amplify/auth';
3
- import { RekognitionStreamingClient, StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
1
+ import { StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
4
2
  import { VideoRecorder } from './videoRecorder.mjs';
5
- import { getLivenessUserAgent } from '../../utils/platform.mjs';
6
- import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
3
+ import { createStreamingClient } from './createStreamingClient/createStreamingClient.mjs';
7
4
 
8
5
  const TIME_SLICE = 1000;
9
6
  function isBlob(obj) {
@@ -34,9 +31,7 @@ class LivenessStreamProvider {
34
31
  this.videoRecorder.start(TIME_SLICE);
35
32
  }
36
33
  sendClientInfo(clientInfo) {
37
- this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
38
- data: { clientInfo },
39
- }));
34
+ this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', { data: { clientInfo } }));
40
35
  }
41
36
  async stopVideo() {
42
37
  await this.videoRecorder.stop();
@@ -48,32 +43,15 @@ class LivenessStreamProvider {
48
43
  if (this.videoRecorder.getState() === 'recording') {
49
44
  await this.stopVideo();
50
45
  }
51
- this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
52
- data: { code: code },
53
- }));
46
+ this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', { data: { code } }));
54
47
  return;
55
48
  }
56
49
  async init() {
57
- const credentials = this.credentialProvider ?? (await fetchAuthSession()).credentials;
58
- if (!credentials) {
59
- throw new Error('No credentials');
60
- }
61
- const clientconfig = {
62
- credentials,
50
+ this._client = await createStreamingClient({
51
+ credentialsProvider: this.credentialProvider,
52
+ endpointOverride: this.endpointOverride,
63
53
  region: this.region,
64
- customUserAgent: `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
65
- requestHandler: new CustomWebSocketFetchHandler({
66
- connectionTimeout: 10000,
67
- }),
68
- };
69
- if (this.endpointOverride) {
70
- const override = this.endpointOverride;
71
- clientconfig.endpointProvider = () => {
72
- const url = new URL(override);
73
- return { url };
74
- };
75
- }
76
- this._client = new RekognitionStreamingClient(clientconfig);
54
+ });
77
55
  this.responseStream = await this.startLivenessVideoConnection();
78
56
  }
79
57
  // Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
@@ -2,19 +2,15 @@ import React__default from 'react';
2
2
  import { ComponentClassName } from '@aws-amplify/ui';
3
3
  import { View, Flex } from '@aws-amplify/ui-react';
4
4
  import { CancelButton } from './CancelButton.mjs';
5
- import '../service/machine/index.mjs';
5
+ import '../service/machine/machine.mjs';
6
6
  import '../service/types/liveness.mjs';
7
7
  import '@tensorflow/tfjs-core';
8
8
  import '@tensorflow-models/face-detection';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
12
- import 'aws-amplify/auth';
13
12
  import '@aws-sdk/client-rekognitionstreaming';
14
- import '@aws-sdk/util-format-url';
15
- import '@smithy/eventstream-serde-browser';
16
- import '@smithy/fetch-http-handler';
17
- import '@smithy/protocol-http';
13
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
18
14
  import '../service/utils/freshnessColorDisplay.mjs';
19
15
  import '@xstate/react';
20
16
  import '../providers/FaceLivenessDetectorProvider.mjs';
@@ -1,7 +1,7 @@
1
1
  import React__default from 'react';
2
2
  import { Flex, Text, Button } from '@aws-amplify/ui-react';
3
3
  import { AlertIcon } from '@aws-amplify/ui-react/internal';
4
- import '../service/machine/index.mjs';
4
+ import '../service/machine/machine.mjs';
5
5
  import '../service/types/liveness.mjs';
6
6
  import { LivenessErrorState } from '../service/types/error.mjs';
7
7
  import '@tensorflow/tfjs-core';
@@ -9,12 +9,8 @@ import '@tensorflow-models/face-detection';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
12
- import 'aws-amplify/auth';
13
12
  import '@aws-sdk/client-rekognitionstreaming';
14
- import '@aws-sdk/util-format-url';
15
- import '@smithy/eventstream-serde-browser';
16
- import '@smithy/fetch-http-handler';
17
- import '@smithy/protocol-http';
13
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
18
14
  import '../service/utils/freshnessColorDisplay.mjs';
19
15
  import { Toast } from './Toast.mjs';
20
16
  import { Overlay } from './Overlay.mjs';