@aws-amplify/ui-react-liveness 2.0.11 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
- package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
- package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
- package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
- package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
- package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
- package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
- package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
- package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
- package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
- package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
- package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
- package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
- package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
- package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
- package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
- package/dist/esm/index.mjs +2 -1
- package/dist/esm/version.mjs +3 -1
- package/dist/index.js +3208 -1
- package/dist/styles.css +343 -680
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
- package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
- package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
- package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
- package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
- package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
- package/dist/types/version.d.ts +1 -1
- package/package.json +16 -37
- package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
- package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
- package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
- package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
- /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
|
@@ -1 +1,17 @@
|
|
|
1
|
-
import
|
|
1
|
+
import * as React from 'react';
|
|
2
|
+
import { fetchAuthSession } from 'aws-amplify/auth';
|
|
3
|
+
import FaceLivenessDetectorCore from './FaceLivenessDetectorCore.mjs';
|
|
4
|
+
|
|
5
|
+
const credentialProvider = async () => {
|
|
6
|
+
const { credentials } = await fetchAuthSession();
|
|
7
|
+
if (!credentials) {
|
|
8
|
+
throw new Error('No credentials provided');
|
|
9
|
+
}
|
|
10
|
+
return credentials;
|
|
11
|
+
};
|
|
12
|
+
function FaceLivenessDetector(props) {
|
|
13
|
+
const { config, ...rest } = props;
|
|
14
|
+
return (React.createElement(FaceLivenessDetectorCore, { ...rest, config: { credentialProvider, ...config } }));
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export { FaceLivenessDetector as default };
|
|
@@ -1 +1,42 @@
|
|
|
1
|
-
import*
|
|
1
|
+
import * as React from 'react';
|
|
2
|
+
import { useInterpret } from '@xstate/react';
|
|
3
|
+
import { livenessMachine } from './service/machine/index.mjs';
|
|
4
|
+
import './service/types/liveness.mjs';
|
|
5
|
+
import '@tensorflow/tfjs-core';
|
|
6
|
+
import '@tensorflow-models/face-detection';
|
|
7
|
+
import '@tensorflow/tfjs-backend-wasm';
|
|
8
|
+
import '@tensorflow/tfjs-backend-cpu';
|
|
9
|
+
import '@aws-amplify/core/internals/utils';
|
|
10
|
+
import 'aws-amplify/auth';
|
|
11
|
+
import '@aws-sdk/client-rekognitionstreaming';
|
|
12
|
+
import '@aws-sdk/util-format-url';
|
|
13
|
+
import '@smithy/eventstream-serde-browser';
|
|
14
|
+
import '@smithy/fetch-http-handler';
|
|
15
|
+
import '@smithy/protocol-http';
|
|
16
|
+
import './service/utils/freshnessColorDisplay.mjs';
|
|
17
|
+
import { View, Flex } from '@aws-amplify/ui-react';
|
|
18
|
+
import { FaceLivenessDetectorProvider } from './providers/FaceLivenessDetectorProvider.mjs';
|
|
19
|
+
import { LivenessCheck } from './LivenessCheck/LivenessCheck.mjs';
|
|
20
|
+
import { getDisplayText } from './utils/getDisplayText.mjs';
|
|
21
|
+
|
|
22
|
+
const DETECTOR_CLASS_NAME = 'liveness-detector';
|
|
23
|
+
function FaceLivenessDetectorCore(props) {
|
|
24
|
+
const { components, config, displayText } = props;
|
|
25
|
+
const currElementRef = React.useRef(null);
|
|
26
|
+
const { hintDisplayText, cameraDisplayText, instructionDisplayText, streamDisplayText, errorDisplayText, } = getDisplayText(displayText);
|
|
27
|
+
const service = useInterpret(livenessMachine, {
|
|
28
|
+
devTools: process.env.NODE_ENV === 'development',
|
|
29
|
+
context: {
|
|
30
|
+
componentProps: {
|
|
31
|
+
...props,
|
|
32
|
+
config: config ?? {},
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
});
|
|
36
|
+
return (React.createElement(View, { className: DETECTOR_CLASS_NAME, testId: DETECTOR_CLASS_NAME },
|
|
37
|
+
React.createElement(FaceLivenessDetectorProvider, { componentProps: props, service: service },
|
|
38
|
+
React.createElement(Flex, { direction: "column", ref: currElementRef },
|
|
39
|
+
React.createElement(LivenessCheck, { instructionDisplayText: instructionDisplayText, hintDisplayText: hintDisplayText, cameraDisplayText: cameraDisplayText, streamDisplayText: streamDisplayText, errorDisplayText: errorDisplayText, components: components })))));
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export { FaceLivenessDetectorCore as default };
|
|
@@ -1 +1,199 @@
|
|
|
1
|
-
import
|
|
1
|
+
import React__default, { useRef, useState } from 'react';
|
|
2
|
+
import { classNames } from '@aws-amplify/ui';
|
|
3
|
+
import { Loader, View, Flex, Text, Label, SelectField, Button } from '@aws-amplify/ui-react';
|
|
4
|
+
import { useColorMode } from '@aws-amplify/ui-react/internal';
|
|
5
|
+
import '../service/machine/index.mjs';
|
|
6
|
+
import { FaceMatchState } from '../service/types/liveness.mjs';
|
|
7
|
+
import '@tensorflow/tfjs-core';
|
|
8
|
+
import '@tensorflow-models/face-detection';
|
|
9
|
+
import '@tensorflow/tfjs-backend-wasm';
|
|
10
|
+
import '@tensorflow/tfjs-backend-cpu';
|
|
11
|
+
import '@aws-amplify/core/internals/utils';
|
|
12
|
+
import { drawStaticOval } from '../service/utils/liveness.mjs';
|
|
13
|
+
import 'aws-amplify/auth';
|
|
14
|
+
import '@aws-sdk/client-rekognitionstreaming';
|
|
15
|
+
import '@aws-sdk/util-format-url';
|
|
16
|
+
import '@smithy/eventstream-serde-browser';
|
|
17
|
+
import '@smithy/fetch-http-handler';
|
|
18
|
+
import '@smithy/protocol-http';
|
|
19
|
+
import '../service/utils/freshnessColorDisplay.mjs';
|
|
20
|
+
import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
|
|
21
|
+
import { useLivenessSelector, createLivenessSelector } from '../hooks/useLivenessSelector.mjs';
|
|
22
|
+
import { useMediaStreamInVideo } from '../hooks/useMediaStreamInVideo.mjs';
|
|
23
|
+
import { LivenessClassNames } from '../types/classNames.mjs';
|
|
24
|
+
import { selectErrorState, Hint } from '../shared/Hint.mjs';
|
|
25
|
+
import { MatchIndicator } from '../shared/MatchIndicator.mjs';
|
|
26
|
+
import { Overlay } from '../shared/Overlay.mjs';
|
|
27
|
+
import { renderErrorModal, FaceLivenessErrorModal } from '../shared/FaceLivenessErrorModal.mjs';
|
|
28
|
+
import { DefaultPhotosensitiveWarning, DefaultRecordingIcon, DefaultCancelButton } from '../shared/DefaultStartScreenComponents.mjs';
|
|
29
|
+
|
|
30
|
+
const selectVideoConstraints = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoConstraints);
|
|
31
|
+
const selectVideoStream = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoMediaStream);
|
|
32
|
+
const selectFaceMatchPercentage = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchPercentage);
|
|
33
|
+
const selectFaceMatchState = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchState);
|
|
34
|
+
const selectSelectedDeviceId = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectedDeviceId);
|
|
35
|
+
const selectSelectableDevices = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectableDevices);
|
|
36
|
+
const centeredLoader = (React__default.createElement(Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader" }));
|
|
37
|
+
const showMatchIndicatorStates = [
|
|
38
|
+
FaceMatchState.TOO_FAR,
|
|
39
|
+
FaceMatchState.CANT_IDENTIFY,
|
|
40
|
+
FaceMatchState.FACE_IDENTIFIED,
|
|
41
|
+
FaceMatchState.MATCHED,
|
|
42
|
+
];
|
|
43
|
+
/**
|
|
44
|
+
* For now we want to memoize the HOC for MatchIndicator because to optimize renders
|
|
45
|
+
* The LivenessCameraModule still needs to be optimized for re-renders and at that time
|
|
46
|
+
* we should be able to remove this memoization
|
|
47
|
+
*/
|
|
48
|
+
const MemoizedMatchIndicator = React__default.memo(MatchIndicator);
|
|
49
|
+
const LivenessCameraModule = (props) => {
|
|
50
|
+
const { isMobileScreen, isRecordingStopped, instructionDisplayText, streamDisplayText, hintDisplayText, errorDisplayText, cameraDisplayText, components: customComponents, testId, } = props;
|
|
51
|
+
const { cancelLivenessCheckText, recordingIndicatorText } = streamDisplayText;
|
|
52
|
+
const { ErrorView = FaceLivenessErrorModal } = customComponents ?? {};
|
|
53
|
+
const [state, send] = useLivenessActor();
|
|
54
|
+
const videoStream = useLivenessSelector(selectVideoStream);
|
|
55
|
+
const videoConstraints = useLivenessSelector(selectVideoConstraints);
|
|
56
|
+
const selectedDeviceId = useLivenessSelector(selectSelectedDeviceId);
|
|
57
|
+
const selectableDevices = useLivenessSelector(selectSelectableDevices);
|
|
58
|
+
const faceMatchPercentage = useLivenessSelector(selectFaceMatchPercentage);
|
|
59
|
+
const faceMatchState = useLivenessSelector(selectFaceMatchState);
|
|
60
|
+
const errorState = useLivenessSelector(selectErrorState);
|
|
61
|
+
const colorMode = useColorMode();
|
|
62
|
+
const { videoRef, videoWidth, videoHeight } = useMediaStreamInVideo(videoStream);
|
|
63
|
+
const canvasRef = useRef(null);
|
|
64
|
+
const freshnessColorRef = useRef(null);
|
|
65
|
+
const [isCameraReady, setIsCameraReady] = useState(false);
|
|
66
|
+
const isCheckingCamera = state.matches('cameraCheck');
|
|
67
|
+
const isWaitingForCamera = state.matches('waitForDOMAndCameraDetails');
|
|
68
|
+
const isStartView = state.matches('start') || state.matches('userCancel');
|
|
69
|
+
const isRecording = state.matches('recording');
|
|
70
|
+
const isCheckSucceeded = state.matches('checkSucceeded');
|
|
71
|
+
const isFlashingFreshness = state.matches({
|
|
72
|
+
recording: 'flashFreshnessColors',
|
|
73
|
+
});
|
|
74
|
+
// Android/Firefox and iOS flip the values of width/height returned from
|
|
75
|
+
// getUserMedia, so we'll reset these in useLayoutEffect with the videoRef
|
|
76
|
+
// element's intrinsic videoWidth and videoHeight attributes
|
|
77
|
+
const [mediaWidth, setMediaWidth] = useState(videoWidth);
|
|
78
|
+
const [mediaHeight, setMediaHeight] = useState(videoHeight);
|
|
79
|
+
const [aspectRatio, setAspectRatio] = useState(() => videoWidth && videoHeight ? videoWidth / videoHeight : 0);
|
|
80
|
+
React__default.useEffect(() => {
|
|
81
|
+
if (canvasRef &&
|
|
82
|
+
videoRef &&
|
|
83
|
+
canvasRef.current &&
|
|
84
|
+
videoRef.current &&
|
|
85
|
+
videoStream &&
|
|
86
|
+
isStartView) {
|
|
87
|
+
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|
|
88
|
+
}
|
|
89
|
+
}, [canvasRef, videoRef, videoStream, colorMode, isStartView]);
|
|
90
|
+
React__default.useEffect(() => {
|
|
91
|
+
const updateColorModeHandler = (e) => {
|
|
92
|
+
if (e.matches &&
|
|
93
|
+
canvasRef &&
|
|
94
|
+
videoRef &&
|
|
95
|
+
canvasRef.current &&
|
|
96
|
+
videoRef.current &&
|
|
97
|
+
videoStream &&
|
|
98
|
+
isStartView) {
|
|
99
|
+
drawStaticOval(canvasRef.current, videoRef.current, videoStream);
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
const darkModePreference = window.matchMedia('(prefers-color-scheme: dark)');
|
|
103
|
+
const lightModePreference = window.matchMedia('(prefers-color-scheme: light)');
|
|
104
|
+
darkModePreference.addEventListener('change', updateColorModeHandler);
|
|
105
|
+
lightModePreference.addEventListener('change', updateColorModeHandler);
|
|
106
|
+
return () => {
|
|
107
|
+
darkModePreference.removeEventListener('change', updateColorModeHandler);
|
|
108
|
+
lightModePreference.addEventListener('change', updateColorModeHandler);
|
|
109
|
+
};
|
|
110
|
+
}, [canvasRef, videoRef, videoStream, isStartView]);
|
|
111
|
+
React__default.useLayoutEffect(() => {
|
|
112
|
+
if (isCameraReady) {
|
|
113
|
+
send({
|
|
114
|
+
type: 'SET_DOM_AND_CAMERA_DETAILS',
|
|
115
|
+
data: {
|
|
116
|
+
videoEl: videoRef.current,
|
|
117
|
+
canvasEl: canvasRef.current,
|
|
118
|
+
freshnessColorEl: freshnessColorRef.current,
|
|
119
|
+
isMobile: isMobileScreen,
|
|
120
|
+
},
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
if (videoRef.current) {
|
|
124
|
+
setMediaWidth(videoRef.current.videoWidth);
|
|
125
|
+
setMediaHeight(videoRef.current.videoHeight);
|
|
126
|
+
setAspectRatio(videoRef.current.videoWidth / videoRef.current.videoHeight);
|
|
127
|
+
}
|
|
128
|
+
}, [send, videoRef, isCameraReady, isMobileScreen]);
|
|
129
|
+
const photoSensitivtyWarning = React__default.useMemo(() => {
|
|
130
|
+
return (React__default.createElement(View, { style: { visibility: isStartView ? 'visible' : 'hidden' } },
|
|
131
|
+
React__default.createElement(DefaultPhotosensitiveWarning, { headingText: instructionDisplayText.photosensitivyWarningHeadingText, bodyText: instructionDisplayText.photosensitivyWarningBodyText, infoText: instructionDisplayText.photosensitivyWarningInfoText })));
|
|
132
|
+
}, [instructionDisplayText, isStartView]);
|
|
133
|
+
const handleMediaPlay = () => {
|
|
134
|
+
setIsCameraReady(true);
|
|
135
|
+
};
|
|
136
|
+
const beginLivenessCheck = React__default.useCallback(() => {
|
|
137
|
+
send({
|
|
138
|
+
type: 'BEGIN',
|
|
139
|
+
});
|
|
140
|
+
}, [send]);
|
|
141
|
+
const onCameraChange = React__default.useCallback((e) => {
|
|
142
|
+
const newDeviceId = e.target.value;
|
|
143
|
+
const changeCamera = async () => {
|
|
144
|
+
const newStream = await navigator.mediaDevices.getUserMedia({
|
|
145
|
+
video: {
|
|
146
|
+
...videoConstraints,
|
|
147
|
+
deviceId: { exact: newDeviceId },
|
|
148
|
+
},
|
|
149
|
+
audio: false,
|
|
150
|
+
});
|
|
151
|
+
send({
|
|
152
|
+
type: 'UPDATE_DEVICE_AND_STREAM',
|
|
153
|
+
data: { newDeviceId, newStream },
|
|
154
|
+
});
|
|
155
|
+
};
|
|
156
|
+
changeCamera();
|
|
157
|
+
}, [videoConstraints, send]);
|
|
158
|
+
if (isCheckingCamera) {
|
|
159
|
+
return (React__default.createElement(Flex, { justifyContent: 'center', className: LivenessClassNames.StartScreenCameraWaiting },
|
|
160
|
+
React__default.createElement(Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader", position: "unset" }),
|
|
161
|
+
React__default.createElement(Text, { fontSize: "large", fontWeight: "bold", "data-testid": "waiting-camera-permission", className: `${LivenessClassNames.StartScreenCameraWaiting}__text` }, cameraDisplayText.waitingCameraPermissionText)));
|
|
162
|
+
}
|
|
163
|
+
const isRecordingOnMobile = isMobileScreen && !isStartView && !isWaitingForCamera && isRecording;
|
|
164
|
+
return (React__default.createElement(React__default.Fragment, null,
|
|
165
|
+
photoSensitivtyWarning,
|
|
166
|
+
React__default.createElement(Flex, { className: classNames(LivenessClassNames.CameraModule, isRecordingOnMobile && `${LivenessClassNames.CameraModule}--mobile`), "data-testid": testId, gap: "zero" },
|
|
167
|
+
!isCameraReady && centeredLoader,
|
|
168
|
+
React__default.createElement(View, { as: "canvas", ref: freshnessColorRef, className: LivenessClassNames.FreshnessCanvas, hidden: true }),
|
|
169
|
+
React__default.createElement(View, { className: LivenessClassNames.VideoAnchor, style: {
|
|
170
|
+
aspectRatio: `${aspectRatio}`,
|
|
171
|
+
} },
|
|
172
|
+
React__default.createElement("video", { ref: videoRef, muted: true, autoPlay: true, playsInline: true, width: mediaWidth, height: mediaHeight, onCanPlay: handleMediaPlay, "data-testid": "video", className: LivenessClassNames.Video }),
|
|
173
|
+
React__default.createElement(Flex, { className: classNames(LivenessClassNames.OvalCanvas, isRecordingOnMobile && `${LivenessClassNames.OvalCanvas}--mobile`, isRecordingStopped && LivenessClassNames.FadeOut) },
|
|
174
|
+
React__default.createElement(View, { as: "canvas", ref: canvasRef })),
|
|
175
|
+
isRecording && (React__default.createElement(DefaultRecordingIcon, { recordingIndicatorText: recordingIndicatorText })),
|
|
176
|
+
!isStartView && !isWaitingForCamera && !isCheckSucceeded && (React__default.createElement(DefaultCancelButton, { cancelLivenessCheckText: cancelLivenessCheckText })),
|
|
177
|
+
React__default.createElement(Overlay, { horizontal: "center", vertical: isRecording && !isFlashingFreshness ? 'start' : 'space-between', className: LivenessClassNames.InstructionOverlay },
|
|
178
|
+
React__default.createElement(Hint, { hintDisplayText: hintDisplayText }),
|
|
179
|
+
errorState && (React__default.createElement(ErrorView, { onRetry: () => {
|
|
180
|
+
send({ type: 'CANCEL' });
|
|
181
|
+
} }, renderErrorModal({
|
|
182
|
+
errorState,
|
|
183
|
+
overrideErrorDisplayText: errorDisplayText,
|
|
184
|
+
}))),
|
|
185
|
+
isRecording &&
|
|
186
|
+
!isFlashingFreshness &&
|
|
187
|
+
showMatchIndicatorStates.includes(faceMatchState) ? (React__default.createElement(MemoizedMatchIndicator, { percentage: Math.ceil(faceMatchPercentage) })) : null),
|
|
188
|
+
isStartView &&
|
|
189
|
+
!isMobileScreen &&
|
|
190
|
+
selectableDevices &&
|
|
191
|
+
selectableDevices.length > 1 && (React__default.createElement(Flex, { className: LivenessClassNames.StartScreenCameraSelect },
|
|
192
|
+
React__default.createElement(View, { className: LivenessClassNames.StartScreenCameraSelectContainer },
|
|
193
|
+
React__default.createElement(Label, { htmlFor: "amplify-liveness-camera-select", className: `${LivenessClassNames.StartScreenCameraSelect}__label` }, "Camera:"),
|
|
194
|
+
React__default.createElement(SelectField, { id: "amplify-liveness-camera-select", label: "Camera", labelHidden: true, value: selectedDeviceId, onChange: onCameraChange }, selectableDevices?.map((device) => (React__default.createElement("option", { value: device.deviceId, key: device.deviceId }, device.label))))))))),
|
|
195
|
+
isStartView && (React__default.createElement(Flex, { justifyContent: "center" },
|
|
196
|
+
React__default.createElement(Button, { variation: "primary", type: "button", onClick: beginLivenessCheck }, instructionDisplayText.startScreenBeginCheckText)))));
|
|
197
|
+
};
|
|
198
|
+
|
|
199
|
+
export { LivenessCameraModule, selectFaceMatchPercentage, selectFaceMatchState, selectSelectableDevices, selectSelectedDeviceId, selectVideoConstraints, selectVideoStream };
|
|
@@ -1 +1,97 @@
|
|
|
1
|
-
import*as
|
|
1
|
+
import * as React from 'react';
|
|
2
|
+
import { Flex, Text, Button, View } from '@aws-amplify/ui-react';
|
|
3
|
+
import '../service/machine/index.mjs';
|
|
4
|
+
import '../service/types/liveness.mjs';
|
|
5
|
+
import { LivenessErrorState } from '../service/types/error.mjs';
|
|
6
|
+
import '@tensorflow/tfjs-core';
|
|
7
|
+
import '@tensorflow-models/face-detection';
|
|
8
|
+
import '@tensorflow/tfjs-backend-wasm';
|
|
9
|
+
import '@tensorflow/tfjs-backend-cpu';
|
|
10
|
+
import '@aws-amplify/core/internals/utils';
|
|
11
|
+
import 'aws-amplify/auth';
|
|
12
|
+
import '@aws-sdk/client-rekognitionstreaming';
|
|
13
|
+
import '@aws-sdk/util-format-url';
|
|
14
|
+
import '@smithy/eventstream-serde-browser';
|
|
15
|
+
import '@smithy/fetch-http-handler';
|
|
16
|
+
import '@smithy/protocol-http';
|
|
17
|
+
import '../service/utils/freshnessColorDisplay.mjs';
|
|
18
|
+
import { LivenessCameraModule } from './LivenessCameraModule.mjs';
|
|
19
|
+
import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
|
|
20
|
+
import { useLivenessSelector, createLivenessSelector } from '../hooks/useLivenessSelector.mjs';
|
|
21
|
+
import '@aws-amplify/ui';
|
|
22
|
+
import { isMobileScreen, getLandscapeMediaQuery } from '../utils/device.mjs';
|
|
23
|
+
import { CancelButton } from '../shared/CancelButton.mjs';
|
|
24
|
+
import { defaultErrorDisplayText } from '../displayText.mjs';
|
|
25
|
+
import { LandscapeErrorModal } from '../shared/LandscapeErrorModal.mjs';
|
|
26
|
+
import { selectErrorState } from '../shared/Hint.mjs';
|
|
27
|
+
import '../types/classNames.mjs';
|
|
28
|
+
|
|
29
|
+
const CHECK_CLASS_NAME = 'liveness-detector-check';
|
|
30
|
+
const CAMERA_ERROR_TEXT_WIDTH = 420;
|
|
31
|
+
const selectIsRecordingStopped = createLivenessSelector((state) => state.context.isRecordingStopped);
|
|
32
|
+
const LivenessCheck = ({ instructionDisplayText, hintDisplayText, cameraDisplayText, streamDisplayText, errorDisplayText, components, }) => {
|
|
33
|
+
const [state, send] = useLivenessActor();
|
|
34
|
+
const errorState = useLivenessSelector(selectErrorState);
|
|
35
|
+
const isRecordingStopped = useLivenessSelector(selectIsRecordingStopped);
|
|
36
|
+
const isPermissionDenied = state.matches('permissionDenied');
|
|
37
|
+
const isMobile = isMobileScreen();
|
|
38
|
+
const recheckCameraPermissions = () => {
|
|
39
|
+
send({ type: 'RETRY_CAMERA_CHECK' });
|
|
40
|
+
};
|
|
41
|
+
const { cameraMinSpecificationsHeadingText, cameraMinSpecificationsMessageText, cameraNotFoundHeadingText, cameraNotFoundMessageText, retryCameraPermissionsText, } = cameraDisplayText;
|
|
42
|
+
const { cancelLivenessCheckText } = streamDisplayText;
|
|
43
|
+
React.useLayoutEffect(() => {
|
|
44
|
+
if (isMobile) {
|
|
45
|
+
const sendLandscapeWarning = (isLandscapeMatched) => {
|
|
46
|
+
if (isLandscapeMatched) {
|
|
47
|
+
send({ type: 'MOBILE_LANDSCAPE_WARNING' });
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
// Get orientation: landscape media query
|
|
51
|
+
const landscapeMediaQuery = getLandscapeMediaQuery();
|
|
52
|
+
// Send warning based on initial orientation
|
|
53
|
+
sendLandscapeWarning(landscapeMediaQuery.matches);
|
|
54
|
+
// Listen for future orientation changes and send warning
|
|
55
|
+
landscapeMediaQuery.addEventListener('change', (e) => {
|
|
56
|
+
sendLandscapeWarning(e.matches);
|
|
57
|
+
});
|
|
58
|
+
// Remove matchMedia event listener
|
|
59
|
+
return () => {
|
|
60
|
+
landscapeMediaQuery.removeEventListener('change', (e) => sendLandscapeWarning(e.matches));
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
}, [isMobile, send]);
|
|
64
|
+
const renderCheck = () => {
|
|
65
|
+
if (errorState === LivenessErrorState.MOBILE_LANDSCAPE_ERROR) {
|
|
66
|
+
const displayText = {
|
|
67
|
+
...defaultErrorDisplayText,
|
|
68
|
+
...errorDisplayText,
|
|
69
|
+
};
|
|
70
|
+
const { landscapeHeaderText, portraitMessageText, landscapeMessageText, tryAgainText, } = displayText;
|
|
71
|
+
return (React.createElement(Flex, { backgroundColor: "background.primary", direction: "column", textAlign: "center", alignItems: "center", justifyContent: "center", position: "absolute", width: "100%" },
|
|
72
|
+
React.createElement(LandscapeErrorModal, { header: landscapeHeaderText, portraitMessage: portraitMessageText, landscapeMessage: landscapeMessageText, tryAgainText: tryAgainText, onRetry: () => {
|
|
73
|
+
send({
|
|
74
|
+
type: 'CANCEL',
|
|
75
|
+
});
|
|
76
|
+
} })));
|
|
77
|
+
}
|
|
78
|
+
else if (isPermissionDenied) {
|
|
79
|
+
return (React.createElement(Flex, { backgroundColor: "background.primary", direction: "column", textAlign: "center", alignItems: "center", justifyContent: "center", width: "100%", height: 480 },
|
|
80
|
+
React.createElement(Text, { fontSize: "large", fontWeight: "bold" }, errorState === LivenessErrorState.CAMERA_FRAMERATE_ERROR
|
|
81
|
+
? cameraMinSpecificationsHeadingText
|
|
82
|
+
: cameraNotFoundHeadingText),
|
|
83
|
+
React.createElement(Text, { maxWidth: CAMERA_ERROR_TEXT_WIDTH }, errorState === LivenessErrorState.CAMERA_FRAMERATE_ERROR
|
|
84
|
+
? cameraMinSpecificationsMessageText
|
|
85
|
+
: cameraNotFoundMessageText),
|
|
86
|
+
React.createElement(Button, { variation: "primary", type: "button", onClick: recheckCameraPermissions }, retryCameraPermissionsText),
|
|
87
|
+
React.createElement(View, { position: "absolute", top: "medium", right: "medium" },
|
|
88
|
+
React.createElement(CancelButton, { ariaLabel: cancelLivenessCheckText }))));
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
return (React.createElement(LivenessCameraModule, { isMobileScreen: isMobile, isRecordingStopped: isRecordingStopped, instructionDisplayText: instructionDisplayText, streamDisplayText: streamDisplayText, hintDisplayText: hintDisplayText, errorDisplayText: errorDisplayText, cameraDisplayText: cameraDisplayText, components: components }));
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
return (React.createElement(Flex, { direction: "column", position: "relative", testId: CHECK_CLASS_NAME, className: CHECK_CLASS_NAME, gap: "xl" }, renderCheck()));
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
export { LivenessCheck, selectIsRecordingStopped };
|
|
@@ -1 +1,50 @@
|
|
|
1
|
-
const
|
|
1
|
+
const defaultErrorDisplayText = {
|
|
2
|
+
timeoutHeaderText: 'Time out',
|
|
3
|
+
timeoutMessageText: "Face didn't fit inside oval in time limit. Try again and completely fill the oval with face in it.",
|
|
4
|
+
faceDistanceHeaderText: 'Forward movement detected',
|
|
5
|
+
faceDistanceMessageText: 'Avoid moving closer when connecting.',
|
|
6
|
+
multipleFacesHeaderText: 'Multiple faces detected',
|
|
7
|
+
multipleFacesMessageText: 'Ensure only one face is present in front of the camera when connecting.',
|
|
8
|
+
clientHeaderText: 'Client error',
|
|
9
|
+
clientMessageText: 'Check failed due to client issue',
|
|
10
|
+
serverHeaderText: 'Server issue',
|
|
11
|
+
serverMessageText: 'Cannot complete check due to server issue',
|
|
12
|
+
landscapeHeaderText: 'Landscape orientation not supported',
|
|
13
|
+
landscapeMessageText: 'Rotate your device to portrait (vertical) orientation.',
|
|
14
|
+
portraitMessageText: 'Ensure your device remains in portrait (vertical) orientation for the check’s duration.',
|
|
15
|
+
tryAgainText: 'Try again',
|
|
16
|
+
};
|
|
17
|
+
const defaultLivenessDisplayText = {
|
|
18
|
+
hintCenterFaceText: 'Center your face',
|
|
19
|
+
startScreenBeginCheckText: 'Start video check',
|
|
20
|
+
photosensitivyWarningHeadingText: 'Photosensitivity warning',
|
|
21
|
+
photosensitivyWarningBodyText: 'This check flashes different colors. Use caution if you are photosensitive.',
|
|
22
|
+
photosensitivyWarningInfoText: 'Some people may experience may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.',
|
|
23
|
+
goodFitCaptionText: 'Good fit',
|
|
24
|
+
goodFitAltText: "Ilustration of a person's face, perfectly fitting inside of an oval.",
|
|
25
|
+
tooFarCaptionText: 'Too far',
|
|
26
|
+
tooFarAltText: "Illustration of a person's face inside of an oval; there is a gap between the perimeter of the face and the boundaries of the oval.",
|
|
27
|
+
cameraMinSpecificationsHeadingText: 'Camera does not meet minimum specifications',
|
|
28
|
+
cameraMinSpecificationsMessageText: 'Camera must support at least 320*240 resolution and 15 frames per second.',
|
|
29
|
+
cameraNotFoundHeadingText: 'Camera is not accessible.',
|
|
30
|
+
cameraNotFoundMessageText: 'Check that a camera is connected and there is not another application using the camera. You may have to go into settings to grant camera permissions and close out all instances of your browser and retry.',
|
|
31
|
+
retryCameraPermissionsText: 'Retry',
|
|
32
|
+
waitingCameraPermissionText: 'Waiting for you to allow camera permission.',
|
|
33
|
+
cancelLivenessCheckText: 'Cancel Liveness check',
|
|
34
|
+
recordingIndicatorText: 'Rec',
|
|
35
|
+
hintMoveFaceFrontOfCameraText: 'Move face in front of camera',
|
|
36
|
+
hintTooManyFacesText: 'Ensure only one face is in front of camera',
|
|
37
|
+
hintFaceDetectedText: 'Face detected',
|
|
38
|
+
hintCanNotIdentifyText: 'Move face in front of camera',
|
|
39
|
+
hintTooCloseText: 'Move back',
|
|
40
|
+
hintTooFarText: 'Move closer',
|
|
41
|
+
hintConnectingText: 'Connecting...',
|
|
42
|
+
hintVerifyingText: 'Verifying...',
|
|
43
|
+
hintIlluminationTooBrightText: 'Move to dimmer area',
|
|
44
|
+
hintIlluminationTooDarkText: 'Move to brighter area',
|
|
45
|
+
hintIlluminationNormalText: 'Lighting conditions normal',
|
|
46
|
+
hintHoldFaceForFreshnessText: 'Hold still',
|
|
47
|
+
...defaultErrorDisplayText,
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
export { defaultErrorDisplayText, defaultLivenessDisplayText };
|
|
@@ -1 +1,13 @@
|
|
|
1
|
-
import{useActor
|
|
1
|
+
import { useActor } from '@xstate/react';
|
|
2
|
+
import { useFaceLivenessDetector } from '../providers/FaceLivenessDetectorProvider.mjs';
|
|
3
|
+
|
|
4
|
+
// TODO: Add type annotations. Currently typing the actors returned from Xstate is difficult
|
|
5
|
+
// because the interpreter and state can not be used to form a type.
|
|
6
|
+
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
|
7
|
+
function useLivenessActor() {
|
|
8
|
+
const { service } = useFaceLivenessDetector();
|
|
9
|
+
const actor = useActor(service);
|
|
10
|
+
return actor;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export { useLivenessActor };
|
|
@@ -1 +1,12 @@
|
|
|
1
|
-
import{useSelector
|
|
1
|
+
import { useSelector } from '@xstate/react';
|
|
2
|
+
import { useFaceLivenessDetector } from '../providers/FaceLivenessDetectorProvider.mjs';
|
|
3
|
+
|
|
4
|
+
function createLivenessSelector(selector) {
|
|
5
|
+
return selector;
|
|
6
|
+
}
|
|
7
|
+
function useLivenessSelector(selector) {
|
|
8
|
+
const { service } = useFaceLivenessDetector();
|
|
9
|
+
return useSelector(service, selector);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export { createLivenessSelector, useLivenessSelector };
|
|
@@ -1 +1,38 @@
|
|
|
1
|
-
import{useRef
|
|
1
|
+
import { useRef, useState, useEffect } from 'react';
|
|
2
|
+
import { isObject } from '@aws-amplify/ui';
|
|
3
|
+
import { STATIC_VIDEO_CONSTRAINTS } from '../utils/helpers.mjs';
|
|
4
|
+
|
|
5
|
+
function useMediaStreamInVideo(stream) {
|
|
6
|
+
const height = STATIC_VIDEO_CONSTRAINTS.height.ideal;
|
|
7
|
+
const width = STATIC_VIDEO_CONSTRAINTS.width.ideal;
|
|
8
|
+
const videoRef = useRef(null);
|
|
9
|
+
const [videoHeight, setVideoHeight] = useState(height);
|
|
10
|
+
const [videoWidth, setVideoWidth] = useState(width);
|
|
11
|
+
useEffect(() => {
|
|
12
|
+
if (stream) {
|
|
13
|
+
if (isObject(videoRef.current)) {
|
|
14
|
+
videoRef.current.srcObject = stream;
|
|
15
|
+
}
|
|
16
|
+
const { height: streamHeight, width: streamWidth } = stream
|
|
17
|
+
.getTracks()[0]
|
|
18
|
+
.getSettings();
|
|
19
|
+
setVideoHeight(streamHeight);
|
|
20
|
+
setVideoWidth(streamWidth);
|
|
21
|
+
}
|
|
22
|
+
return () => {
|
|
23
|
+
if (stream) {
|
|
24
|
+
stream.getTracks().forEach((track) => {
|
|
25
|
+
stream.removeTrack(track);
|
|
26
|
+
track.stop();
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
}, [stream]);
|
|
31
|
+
return {
|
|
32
|
+
videoRef,
|
|
33
|
+
videoHeight,
|
|
34
|
+
videoWidth,
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export { useMediaStreamInVideo };
|
|
@@ -1 +1,15 @@
|
|
|
1
|
-
import
|
|
1
|
+
import React__default from 'react';
|
|
2
|
+
|
|
3
|
+
const FaceLivenessDetectorContext = React__default.createContext(null);
|
|
4
|
+
function FaceLivenessDetectorProvider({ children, ...props }) {
|
|
5
|
+
return (React__default.createElement(FaceLivenessDetectorContext.Provider, { value: props }, children));
|
|
6
|
+
}
|
|
7
|
+
function useFaceLivenessDetector() {
|
|
8
|
+
const props = React__default.useContext(FaceLivenessDetectorContext);
|
|
9
|
+
if (props === null) {
|
|
10
|
+
throw new Error('useFaceLivenessDetector must be used within a FaceLivenessDetectorProvider');
|
|
11
|
+
}
|
|
12
|
+
return props;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export { FaceLivenessDetectorProvider, useFaceLivenessDetector };
|