@aws-amplify/ui-react-liveness 3.3.9 → 3.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/CameraSelector.mjs +13 -0
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +50 -28
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +5 -4
- package/dist/esm/components/FaceLivenessDetector/service/machine/machine.mjs +247 -314
- package/dist/esm/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.mjs +140 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.mjs +171 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.mjs +27 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +30 -7
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.mjs +32 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.mjs +148 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +2 -3
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +36 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +7 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +9 -5
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +19 -34
- package/dist/esm/components/FaceLivenessDetector/service/utils/{eventUtils.mjs → responseStreamEvent.mjs} +2 -2
- package/dist/esm/components/FaceLivenessDetector/service/utils/sessionInformation.mjs +45 -0
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +3 -2
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +4 -7
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +3 -0
- package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +12 -12
- package/dist/esm/index.mjs +12 -0
- package/dist/esm/version.mjs +1 -1
- package/dist/index.js +956 -775
- package/dist/styles.css +17 -2
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/CameraSelector.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +37 -24
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.d.ts +55 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +27 -3
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.d.ts +30 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +0 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +27 -5
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +3 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +7 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +15 -26
- package/dist/types/components/FaceLivenessDetector/service/utils/{eventUtils.d.ts → responseStreamEvent.d.ts} +1 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/sessionInformation.d.ts +7 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/types.d.ts +21 -0
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
- package/dist/types/components/FaceLivenessDetector/utils/device.d.ts +1 -0
- package/dist/types/version.d.ts +1 -1
- package/package.json +8 -8
- package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +0 -131
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +0 -126
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +0 -108
- package/dist/types/components/FaceLivenessDetector/service/types/service.d.ts +0 -5
- package/dist/types/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.d.ts +0 -21
- package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +0 -42
- package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +0 -27
package/dist/styles.css
CHANGED
|
@@ -4250,11 +4250,26 @@ html[dir=rtl] .amplify-field-group__inner-start {
|
|
|
4250
4250
|
z-index: 1;
|
|
4251
4251
|
}
|
|
4252
4252
|
|
|
4253
|
-
.amplify-liveness-loader {
|
|
4253
|
+
.amplify-liveness-loader .amplify-liveness-centered-loader {
|
|
4254
|
+
transform: translate(-50%, -50%);
|
|
4255
|
+
}
|
|
4256
|
+
|
|
4257
|
+
.amplify-liveness-centered-loader {
|
|
4254
4258
|
position: absolute;
|
|
4255
4259
|
left: 50%;
|
|
4256
4260
|
top: 50%;
|
|
4257
|
-
|
|
4261
|
+
}
|
|
4262
|
+
|
|
4263
|
+
.amplify-liveness-connecting-loader {
|
|
4264
|
+
display: flex;
|
|
4265
|
+
position: absolute;
|
|
4266
|
+
flex-direction: column;
|
|
4267
|
+
justify-content: center;
|
|
4268
|
+
align-items: center;
|
|
4269
|
+
z-index: 3;
|
|
4270
|
+
width: 100%;
|
|
4271
|
+
height: 100%;
|
|
4272
|
+
background-color: var(--amplify-colors-background-primary);
|
|
4258
4273
|
}
|
|
4259
4274
|
|
|
4260
4275
|
.amplify-liveness-oval-canvas {
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
interface CameraSelectorProps {
|
|
3
|
+
deviceId?: string;
|
|
4
|
+
onSelect: (e: React.ChangeEvent<HTMLSelectElement>) => void;
|
|
5
|
+
devices: MediaDeviceInfo[];
|
|
6
|
+
}
|
|
7
|
+
export declare const CameraSelector: (props: CameraSelectorProps) => JSX.Element;
|
|
8
|
+
export {};
|
|
@@ -2,6 +2,7 @@ import React from 'react';
|
|
|
2
2
|
import { FaceMatchState } from '../service';
|
|
3
3
|
import type { InstructionDisplayText, ErrorDisplayText, HintDisplayText, StreamDisplayText, CameraDisplayText } from '../displayText';
|
|
4
4
|
import type { FaceLivenessDetectorComponents } from '../shared/DefaultStartScreenComponents';
|
|
5
|
+
export declare const selectChallengeType: import("../hooks").LivenessSelectorFn<string | undefined>;
|
|
5
6
|
export declare const selectVideoConstraints: import("../hooks").LivenessSelectorFn<MediaTrackConstraints | undefined>;
|
|
6
7
|
export declare const selectVideoStream: import("../hooks").LivenessSelectorFn<MediaStream | undefined>;
|
|
7
8
|
export declare const selectFaceMatchPercentage: import("../hooks").LivenessSelectorFn<number | undefined>;
|
|
@@ -3,3 +3,4 @@ export { default as FaceLivenessDetector } from './FaceLivenessDetector';
|
|
|
3
3
|
export type { FaceLivenessDetectorCoreProps } from './FaceLivenessDetectorCore';
|
|
4
4
|
export { default as FaceLivenessDetectorCore } from './FaceLivenessDetectorCore';
|
|
5
5
|
export type { AwsCredentialProvider, AwsTemporaryCredentials, AwsCredentials, ErrorState, } from './service';
|
|
6
|
+
export { FACE_MOVEMENT_CHALLENGE, FACE_MOVEMENT_AND_LIGHT_CHALLENGE, SUPPORTED_CHALLENGES, } from './service';
|
|
@@ -1,9 +1,21 @@
|
|
|
1
1
|
import type { ActorRef, Interpreter, State } from 'xstate';
|
|
2
|
-
import type {
|
|
3
|
-
import type {
|
|
2
|
+
import type { FaceMovementServerChallenge, FaceMovementAndLightServerChallenge, InternalServerException, ServiceQuotaExceededException, SessionInformation, ValidationException, ThrottlingException } from '@aws-sdk/client-rekognitionstreaming';
|
|
3
|
+
import type { FACE_MOVEMENT_CHALLENGE, FACE_MOVEMENT_AND_LIGHT_CHALLENGE, StreamRecorder, ColorSequenceDisplay } from '../utils';
|
|
4
4
|
import type { ErrorState } from './error';
|
|
5
|
-
import type { VideoRecorder, LivenessStreamProvider, FreshnessColorDisplay } from '../utils';
|
|
6
5
|
import type { Face, FaceDetection } from './faceDetection';
|
|
6
|
+
import type { FaceLivenessDetectorCoreProps, FaceMatchState, LivenessOvalDetails, IlluminationState } from './liveness';
|
|
7
|
+
interface Challenge {
|
|
8
|
+
Name: string;
|
|
9
|
+
}
|
|
10
|
+
export interface FaceMovementAndLightChallenge extends Challenge, FaceMovementAndLightServerChallenge {
|
|
11
|
+
Name: (typeof FACE_MOVEMENT_AND_LIGHT_CHALLENGE)['type'];
|
|
12
|
+
}
|
|
13
|
+
export interface FaceMovementChallenge extends Challenge, FaceMovementServerChallenge {
|
|
14
|
+
Name: (typeof FACE_MOVEMENT_CHALLENGE)['type'];
|
|
15
|
+
}
|
|
16
|
+
export interface ParsedSessionInformation {
|
|
17
|
+
Challenge: FaceMovementChallenge | FaceMovementAndLightChallenge | undefined;
|
|
18
|
+
}
|
|
7
19
|
export interface FaceMatchAssociatedParams {
|
|
8
20
|
illuminationState?: IlluminationState;
|
|
9
21
|
faceMatchState?: FaceMatchState;
|
|
@@ -16,7 +28,6 @@ export interface FreshnessColorAssociatedParams {
|
|
|
16
28
|
freshnessColorEl?: HTMLCanvasElement;
|
|
17
29
|
freshnessColors?: string[];
|
|
18
30
|
freshnessColorsComplete?: boolean;
|
|
19
|
-
freshnessColorDisplay?: FreshnessColorDisplay;
|
|
20
31
|
}
|
|
21
32
|
export interface OvalAssociatedParams {
|
|
22
33
|
faceDetector?: FaceDetection;
|
|
@@ -29,30 +40,31 @@ export interface VideoAssociatedParams {
|
|
|
29
40
|
videoEl?: HTMLVideoElement;
|
|
30
41
|
canvasEl?: HTMLCanvasElement;
|
|
31
42
|
videoMediaStream?: MediaStream;
|
|
32
|
-
|
|
33
|
-
recordingStartTimestampMs?: number;
|
|
43
|
+
recordingStartTimestamp?: number;
|
|
34
44
|
isMobile?: boolean;
|
|
35
45
|
selectedDeviceId?: string;
|
|
36
46
|
selectableDevices?: MediaDeviceInfo[];
|
|
37
47
|
}
|
|
38
48
|
export interface LivenessContext {
|
|
39
|
-
challengeId
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
errorMessage
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
49
|
+
challengeId: string | undefined;
|
|
50
|
+
colorSequenceDisplay: ColorSequenceDisplay | undefined;
|
|
51
|
+
componentProps: FaceLivenessDetectorCoreProps | undefined;
|
|
52
|
+
errorMessage: string | undefined;
|
|
53
|
+
errorState: ErrorState | undefined;
|
|
54
|
+
faceMatchAssociatedParams: FaceMatchAssociatedParams | undefined;
|
|
55
|
+
faceMatchStateBeforeStart: FaceMatchState | undefined;
|
|
56
|
+
failedAttempts: number | undefined;
|
|
57
|
+
freshnessColorAssociatedParams: FreshnessColorAssociatedParams | undefined;
|
|
58
|
+
isFaceFarEnoughBeforeRecording: boolean | undefined;
|
|
59
|
+
isRecordingStopped: boolean | undefined;
|
|
60
|
+
livenessStreamProvider: StreamRecorder | undefined;
|
|
61
|
+
maxFailedAttempts: number | undefined;
|
|
62
|
+
ovalAssociatedParams: OvalAssociatedParams | undefined;
|
|
63
|
+
parsedSessionInformation: ParsedSessionInformation | undefined;
|
|
64
|
+
responseStreamActorRef: ActorRef<any> | undefined;
|
|
65
|
+
serverSessionInformation: SessionInformation | undefined;
|
|
66
|
+
shouldDisconnect: boolean | undefined;
|
|
67
|
+
videoAssociatedParams: VideoAssociatedParams | undefined;
|
|
56
68
|
}
|
|
57
69
|
export type LivenessEventTypes = 'BEGIN' | 'CONNECTION_TIMEOUT' | 'START_RECORDING' | 'TIMEOUT' | 'ERROR' | 'CANCEL' | 'SET_SESSION_INFO' | 'DISCONNECT_EVENT' | 'SET_DOM_AND_CAMERA_DETAILS' | 'UPDATE_DEVICE_AND_STREAM' | 'SERVER_ERROR' | 'RUNTIME_ERROR' | 'RETRY_CAMERA_CHECK' | 'MOBILE_LANDSCAPE_WARNING';
|
|
58
70
|
export type LivenessEventData = Record<PropertyKey, any>;
|
|
@@ -99,7 +111,8 @@ export interface StreamActorCallback {
|
|
|
99
111
|
(params: {
|
|
100
112
|
type: 'SET_SESSION_INFO';
|
|
101
113
|
data: {
|
|
102
|
-
|
|
114
|
+
serverSessionInformation: SessionInformation | undefined;
|
|
103
115
|
};
|
|
104
116
|
}): void;
|
|
105
117
|
}
|
|
118
|
+
export {};
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
interface SequenceColors {
|
|
2
|
+
sequenceColor: SequenceColorValue;
|
|
3
|
+
prevSequenceColor: SequenceColorValue;
|
|
4
|
+
}
|
|
5
|
+
export interface SequenceChangeParams extends SequenceColors {
|
|
6
|
+
sequenceIndex: number;
|
|
7
|
+
sequenceStartTime: number;
|
|
8
|
+
}
|
|
9
|
+
export interface SequenceColorChangeParams extends SequenceColors {
|
|
10
|
+
heightFraction: number;
|
|
11
|
+
}
|
|
12
|
+
export type SequenceColorValue = `rgb(${string},${string},${string})`;
|
|
13
|
+
export interface ColorSequence {
|
|
14
|
+
color: SequenceColorValue;
|
|
15
|
+
downscrollDuration: number;
|
|
16
|
+
flatDisplayDuration: number;
|
|
17
|
+
}
|
|
18
|
+
type OnSequenceChange = (params: SequenceChangeParams) => void;
|
|
19
|
+
export interface StartSequencesParams {
|
|
20
|
+
/**
|
|
21
|
+
* called on sequence change
|
|
22
|
+
*/
|
|
23
|
+
onSequenceChange?: OnSequenceChange;
|
|
24
|
+
/**
|
|
25
|
+
* called on sequence color change
|
|
26
|
+
*/
|
|
27
|
+
onSequenceColorChange?: (params: SequenceColorChangeParams) => void;
|
|
28
|
+
/**
|
|
29
|
+
* called on sequence start
|
|
30
|
+
*/
|
|
31
|
+
onSequenceStart?: () => void;
|
|
32
|
+
/**
|
|
33
|
+
* called on all sequences complete
|
|
34
|
+
*/
|
|
35
|
+
onSequencesComplete?: () => void;
|
|
36
|
+
}
|
|
37
|
+
export type ColorSequences = ColorSequence[];
|
|
38
|
+
export declare class ColorSequenceDisplay {
|
|
39
|
+
#private;
|
|
40
|
+
/**
|
|
41
|
+
* Iterates over provided color sequences and executes sequence event callbacks
|
|
42
|
+
*
|
|
43
|
+
* @param {ColorSequences} colorSequences array of color sequences to iterate over
|
|
44
|
+
*/
|
|
45
|
+
constructor(colorSequences: ColorSequences);
|
|
46
|
+
/**
|
|
47
|
+
* Start sequence iteration and execute event callbacks
|
|
48
|
+
*
|
|
49
|
+
* @async
|
|
50
|
+
* @param {StartSequencesParams} params Sequence event handlers
|
|
51
|
+
* @returns {Promise<boolean>} Resolves to true when complete
|
|
52
|
+
*/
|
|
53
|
+
startSequences(params?: StartSequencesParams): Promise<boolean>;
|
|
54
|
+
}
|
|
55
|
+
export {};
|
package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { VideoStream, StreamResult, StreamResultType } from '../types';
|
|
2
|
+
export declare class StreamRecorder {
|
|
3
|
+
#private;
|
|
4
|
+
constructor(stream: MediaStream);
|
|
5
|
+
getVideoStream(): VideoStream;
|
|
6
|
+
setNewVideoStream(stream: MediaStream): void;
|
|
7
|
+
dispatchStreamEvent<T extends StreamResultType>(event: T extends 'streamStop' ? Pick<StreamResult<T>, 'type'> : StreamResult<T>): void;
|
|
8
|
+
getRecordingStartTimestamp(): number;
|
|
9
|
+
getRecordingEndedTimestamp(): number;
|
|
10
|
+
startRecording(): void;
|
|
11
|
+
isRecording(): boolean;
|
|
12
|
+
getChunksLength(): number;
|
|
13
|
+
hasRecordingStarted(): boolean;
|
|
14
|
+
stopRecording(): Promise<void>;
|
|
15
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { StreamRecorder } from './StreamRecorder';
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { StartFaceLivenessSessionCommandInput, StartFaceLivenessSessionCommandOutput } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
+
import type { BuildHandler, BuildHandlerArguments, BuildHandlerOutput } from '@smithy/types';
|
|
3
|
+
export declare class TelemetryReporter {
|
|
4
|
+
static attemptCount: number;
|
|
5
|
+
static timestamp: number;
|
|
6
|
+
static getAttemptCountAndUpdateTimestamp(): number;
|
|
7
|
+
}
|
|
8
|
+
export declare const createTelemetryReporterMiddleware: (attemptCount: number, preCheckViewEnabled: boolean) => (next: BuildHandler<StartFaceLivenessSessionCommandInput, StartFaceLivenessSessionCommandOutput>) => (args: BuildHandlerArguments<StartFaceLivenessSessionCommandInput>) => Promise<BuildHandlerOutput<StartFaceLivenessSessionCommandOutput>>;
|
|
@@ -1,12 +1,10 @@
|
|
|
1
|
-
export declare const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
2
|
-
export declare const REDUCED_THRESHOLD = 0.4;
|
|
3
|
-
export declare const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
4
1
|
export declare const PUPIL_DISTANCE_WEIGHT = 2;
|
|
5
2
|
export declare const FACE_HEIGHT_WEIGHT = 1.8;
|
|
6
3
|
export declare const FACE_MATCH_RANGE_MIN = 0;
|
|
7
4
|
export declare const FACE_MATCH_RANGE_MAX = 1;
|
|
8
5
|
export declare const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
9
6
|
export declare const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
7
|
+
export declare const OVAL_HEIGHT_WIDTH_RATIO = 1.618;
|
|
10
8
|
export declare const WS_CLOSURE_CODE: {
|
|
11
9
|
SUCCESS_CODE: number;
|
|
12
10
|
DEFAULT_ERROR_CODE: number;
|
|
@@ -15,3 +13,29 @@ export declare const WS_CLOSURE_CODE: {
|
|
|
15
13
|
RUNTIME_ERROR: number;
|
|
16
14
|
USER_ERROR_DURING_CONNECTION: number;
|
|
17
15
|
};
|
|
16
|
+
export declare const TIME_SLICE = 1000;
|
|
17
|
+
export declare const TICK_RATE = 10;
|
|
18
|
+
/**
|
|
19
|
+
* The number of seconds before the presigned URL expires.
|
|
20
|
+
* Used to override aws sdk default value of 60
|
|
21
|
+
*/
|
|
22
|
+
export declare const REQUEST_EXPIRY = 299;
|
|
23
|
+
/**
|
|
24
|
+
* The maximum time in milliseconds that the connection phase of a request
|
|
25
|
+
* may take before the connection attempt is abandoned.
|
|
26
|
+
*/
|
|
27
|
+
export declare const CONNECTION_TIMEOUT = 10000;
|
|
28
|
+
/**
|
|
29
|
+
* Indicates connection success
|
|
30
|
+
*/
|
|
31
|
+
export declare const SUCCESS_STATUS_CODE = 200;
|
|
32
|
+
interface ChallengeType {
|
|
33
|
+
type: string;
|
|
34
|
+
version: string;
|
|
35
|
+
}
|
|
36
|
+
export declare const FACE_MOVEMENT_AND_LIGHT_CHALLENGE: ChallengeType;
|
|
37
|
+
export declare const FACE_MOVEMENT_CHALLENGE: ChallengeType;
|
|
38
|
+
export declare const SUPPORTED_CHALLENGES: ChallengeType[];
|
|
39
|
+
export declare const queryParameterString: string;
|
|
40
|
+
export declare const DEFAULT_WS_CONNECTION_TIMEOUT_MS = 2000;
|
|
41
|
+
export {};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { LivenessRequestStream } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
+
import type { VideoStream } from '../types';
|
|
3
|
+
interface RequestStream extends AsyncGenerator<LivenessRequestStream> {
|
|
4
|
+
}
|
|
5
|
+
type GetRequestStream = () => RequestStream;
|
|
6
|
+
/**
|
|
7
|
+
* Creates an async generator that reads over the provided stream and yielding stream results
|
|
8
|
+
*
|
|
9
|
+
* @param {VideoStream} stream target video stream
|
|
10
|
+
* @returns {GetRequestStream} async request stream generator
|
|
11
|
+
*/
|
|
12
|
+
export declare function createRequestStreamGenerator(stream: VideoStream): {
|
|
13
|
+
getRequestStream: GetRequestStream;
|
|
14
|
+
};
|
|
15
|
+
export {};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import type { ClientSessionInformationEvent, VideoEvent } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
+
import type { LivenessContext, ParsedSessionInformation } from '../../types';
|
|
3
|
+
import type { SequenceChangeParams } from '../ColorSequenceDisplay';
|
|
4
|
+
import type { StreamResult } from '../types';
|
|
5
|
+
interface TrackDimensions {
|
|
6
|
+
trackHeight: number;
|
|
7
|
+
trackWidth: number;
|
|
8
|
+
}
|
|
9
|
+
export declare const createVideoEvent: (result: Exclude<StreamResult, StreamResult<'sessionInfo'>>) => Promise<VideoEvent>;
|
|
10
|
+
export declare const getTrackDimensions: (stream: MediaStream) => TrackDimensions;
|
|
11
|
+
interface CreateSessionEndEventParams extends TrackDimensions {
|
|
12
|
+
parsedSessionInformation: ParsedSessionInformation;
|
|
13
|
+
challengeId: NonNullable<LivenessContext['challengeId']>;
|
|
14
|
+
faceMatchAssociatedParams: NonNullable<LivenessContext['faceMatchAssociatedParams']>;
|
|
15
|
+
ovalAssociatedParams: NonNullable<LivenessContext['ovalAssociatedParams']>;
|
|
16
|
+
recordingEndedTimestamp: number;
|
|
17
|
+
}
|
|
18
|
+
export declare function createSessionEndEvent({ parsedSessionInformation, challengeId, faceMatchAssociatedParams, ovalAssociatedParams, recordingEndedTimestamp, trackHeight, trackWidth, }: CreateSessionEndEventParams): ClientSessionInformationEvent;
|
|
19
|
+
interface CreateSessionStartEventParams extends TrackDimensions {
|
|
20
|
+
parsedSessionInformation: ParsedSessionInformation;
|
|
21
|
+
challengeId: NonNullable<LivenessContext['challengeId']>;
|
|
22
|
+
ovalAssociatedParams: NonNullable<LivenessContext['ovalAssociatedParams']>;
|
|
23
|
+
recordingStartedTimestamp: number;
|
|
24
|
+
}
|
|
25
|
+
export declare function createSessionStartEvent({ parsedSessionInformation, challengeId, ovalAssociatedParams, recordingStartedTimestamp, trackHeight, trackWidth, }: CreateSessionStartEventParams): ClientSessionInformationEvent;
|
|
26
|
+
interface CreateColorDisplayEventParams extends SequenceChangeParams {
|
|
27
|
+
challengeId: string;
|
|
28
|
+
}
|
|
29
|
+
export declare function createColorDisplayEvent({ challengeId, sequenceStartTime, sequenceIndex, sequenceColor, prevSequenceColor, }: CreateColorDisplayEventParams): ClientSessionInformationEvent;
|
|
30
|
+
export {};
|
package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { SignatureV4 } from '@smithy/signature-v4';
|
|
2
2
|
import type { HttpRequest as HttpRequest, RequestPresigningArguments } from './types';
|
|
3
|
-
export declare const REQUEST_EXPIRY = 299;
|
|
4
3
|
export declare class Signer extends SignatureV4 {
|
|
5
4
|
presign(request: HttpRequest, options?: Omit<RequestPresigningArguments, 'expiresIn'>): Promise<HttpRequest>;
|
|
6
5
|
}
|
|
@@ -1,10 +1,32 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import type { LivenessRequestStream, LivenessResponseStream, StartFaceLivenessSessionCommandInput } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
2
|
import type { AwsCredentialProvider } from '../../types';
|
|
3
|
-
interface
|
|
4
|
-
|
|
5
|
-
|
|
3
|
+
export interface RequestStream extends AsyncGenerator<LivenessRequestStream> {
|
|
4
|
+
}
|
|
5
|
+
export interface ResponseStream extends AsyncIterable<LivenessResponseStream> {
|
|
6
|
+
}
|
|
7
|
+
export interface CreateClientConfig {
|
|
8
|
+
credentialsProvider: AwsCredentialProvider | undefined;
|
|
9
|
+
endpointOverride: string | undefined;
|
|
6
10
|
region: string;
|
|
11
|
+
attemptCount: number;
|
|
12
|
+
preCheckViewEnabled: boolean;
|
|
7
13
|
systemClockOffset?: number;
|
|
8
14
|
}
|
|
9
|
-
|
|
15
|
+
interface GetResponseStreamInput {
|
|
16
|
+
requestStream: RequestStream;
|
|
17
|
+
sessionId: StartFaceLivenessSessionCommandInput['SessionId'];
|
|
18
|
+
videoHeight: StartFaceLivenessSessionCommandInput['VideoHeight'];
|
|
19
|
+
videoWidth: StartFaceLivenessSessionCommandInput['VideoWidth'];
|
|
20
|
+
}
|
|
21
|
+
type GetReponseStream = (input: GetResponseStreamInput) => Promise<ResponseStream>;
|
|
22
|
+
/**
|
|
23
|
+
* Initializes an instance of the Rekognition streaming client, returns `getResponseStream`
|
|
24
|
+
*
|
|
25
|
+
* @async
|
|
26
|
+
* @param clientConfig configuration fpr the client
|
|
27
|
+
* @returns {Promise<{ getResponseStream: GetReponseStream }>}
|
|
28
|
+
*/
|
|
29
|
+
export declare function createStreamingClient(clientConfig: CreateClientConfig): Promise<{
|
|
30
|
+
getResponseStream: GetReponseStream;
|
|
31
|
+
}>;
|
|
10
32
|
export {};
|
|
@@ -1,17 +1,16 @@
|
|
|
1
|
-
import type { LivenessOvalDetails,
|
|
1
|
+
import type { Face, LivenessOvalDetails, ParsedSessionInformation } from '../types';
|
|
2
2
|
import { FaceMatchState } from '../types';
|
|
3
|
-
import type { SessionInformation } from '@aws-sdk/client-rekognitionstreaming';
|
|
4
3
|
interface MatchStateInOvalParams {
|
|
5
4
|
face: Face;
|
|
6
5
|
ovalDetails: LivenessOvalDetails;
|
|
7
6
|
initialFaceIntersection: number;
|
|
8
|
-
|
|
7
|
+
parsedSessionInformation: ParsedSessionInformation;
|
|
9
8
|
frameHeight: number;
|
|
10
9
|
}
|
|
11
10
|
/**
|
|
12
11
|
* Returns the state of the provided face with respect to the provided liveness oval.
|
|
13
12
|
*/
|
|
14
|
-
export declare function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection,
|
|
13
|
+
export declare function getFaceMatchStateInLivenessOval({ face, ovalDetails, initialFaceIntersection, parsedSessionInformation, frameHeight, }: MatchStateInOvalParams): {
|
|
15
14
|
faceMatchState: FaceMatchState;
|
|
16
15
|
faceMatchPercentage: number;
|
|
17
16
|
};
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
export * from './blazefaceFaceDetection';
|
|
2
|
-
export * from './
|
|
2
|
+
export * from './getFaceMatchStateInLivenessOval';
|
|
3
3
|
export * from './support';
|
|
4
4
|
export * from './liveness';
|
|
5
|
-
export
|
|
6
|
-
export
|
|
7
|
-
export
|
|
5
|
+
export { ColorSequenceDisplay } from './ColorSequenceDisplay';
|
|
6
|
+
export { FACE_MOVEMENT_CHALLENGE, FACE_MOVEMENT_AND_LIGHT_CHALLENGE, SUPPORTED_CHALLENGES, } from './constants';
|
|
7
|
+
export { createRequestStreamGenerator, createSessionStartEvent, createSessionEndEvent, createColorDisplayEvent, getTrackDimensions, } from './createRequestStreamGenerator';
|
|
8
|
+
export { createStreamingClient } from './createStreamingClient';
|
|
9
|
+
export { isFaceMovementAndLightChallenge, isFaceMovementChallenge, createSessionInfoFromServerSessionInformation, } from './sessionInformation';
|
|
10
|
+
export { StreamRecorder } from './StreamRecorder';
|
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
import type { LivenessOvalDetails, Face, BoundingBox, ErrorState } from '../types';
|
|
1
|
+
import type { LivenessOvalDetails, Face, BoundingBox, ErrorState, ParsedSessionInformation, FaceDetection } from '../types';
|
|
2
2
|
import { IlluminationState, FaceMatchState } from '../types';
|
|
3
|
-
import type {
|
|
4
|
-
import type { ClientFreshnessColorSequence } from '../types/service';
|
|
5
|
-
import type { SessionInformation } from '@aws-sdk/client-rekognitionstreaming';
|
|
3
|
+
import type { ColorSequence } from './ColorSequenceDisplay';
|
|
6
4
|
interface OvalBoundingBox {
|
|
7
5
|
ovalBoundingBox: BoundingBox;
|
|
8
6
|
minOvalX: number;
|
|
@@ -22,20 +20,21 @@ export declare function getIntersectionOverUnion(box1: BoundingBox, box2: Boundi
|
|
|
22
20
|
* Returns the details of a randomly generated liveness oval
|
|
23
21
|
* from SDK
|
|
24
22
|
*/
|
|
25
|
-
export declare function getOvalDetailsFromSessionInformation({
|
|
26
|
-
|
|
23
|
+
export declare function getOvalDetailsFromSessionInformation({ parsedSessionInformation, videoWidth, }: {
|
|
24
|
+
parsedSessionInformation: ParsedSessionInformation;
|
|
27
25
|
videoWidth: number;
|
|
28
26
|
}): LivenessOvalDetails;
|
|
29
27
|
/**
|
|
30
28
|
* Returns the details of a statically generated liveness oval based on the video dimensions
|
|
31
29
|
*/
|
|
32
|
-
export declare function getStaticLivenessOvalDetails({ width, height, widthSeed, centerXSeed, centerYSeed, ratioMultiplier, }: {
|
|
30
|
+
export declare function getStaticLivenessOvalDetails({ width, height, widthSeed, centerXSeed, centerYSeed, ratioMultiplier, ovalHeightWidthRatio, }: {
|
|
33
31
|
width: number;
|
|
34
32
|
height: number;
|
|
35
33
|
widthSeed?: number;
|
|
36
34
|
centerXSeed?: number;
|
|
37
35
|
centerYSeed?: number;
|
|
38
36
|
ratioMultiplier?: number;
|
|
37
|
+
ovalHeightWidthRatio?: number;
|
|
39
38
|
}): LivenessOvalDetails;
|
|
40
39
|
/**
|
|
41
40
|
* Draws the provided liveness oval on the canvas.
|
|
@@ -51,7 +50,12 @@ export declare function drawStaticOval(canvasEl: HTMLCanvasElement, videoEl: HTM
|
|
|
51
50
|
export declare function clearOvalCanvas({ canvas, }: {
|
|
52
51
|
canvas: HTMLCanvasElement;
|
|
53
52
|
}): void;
|
|
54
|
-
export declare function generateBboxFromLandmarks(
|
|
53
|
+
export declare function generateBboxFromLandmarks({ ovalHeightWidthRatio, face, oval, frameHeight, }: {
|
|
54
|
+
ovalHeightWidthRatio?: number;
|
|
55
|
+
face: Face;
|
|
56
|
+
oval: LivenessOvalDetails;
|
|
57
|
+
frameHeight: number;
|
|
58
|
+
}): BoundingBox;
|
|
55
59
|
/**
|
|
56
60
|
* Returns the illumination state in the provided video frame.
|
|
57
61
|
*/
|
|
@@ -83,31 +87,16 @@ interface FillOverlayCanvasFractionalInput {
|
|
|
83
87
|
scaleFactor: number;
|
|
84
88
|
}
|
|
85
89
|
export declare function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, videoEl, ovalDetails, heightFraction, scaleFactor, }: FillOverlayCanvasFractionalInput): void;
|
|
86
|
-
export declare
|
|
87
|
-
export declare function getColorsSequencesFromSessionInformation(sessionInformation: SessionInformation): ClientFreshnessColorSequence[];
|
|
88
|
-
export declare function getRGBArrayFromColorString(colorStr: string): number[];
|
|
90
|
+
export declare function getColorsSequencesFromSessionInformation(parsedSessionInformation: ParsedSessionInformation): ColorSequence[];
|
|
89
91
|
export declare function getFaceMatchState(faceDetector: FaceDetection, videoEl: HTMLVideoElement): Promise<FaceMatchState>;
|
|
90
|
-
export declare function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails, reduceThreshold,
|
|
92
|
+
export declare function isFaceDistanceBelowThreshold({ parsedSessionInformation, faceDetector, videoEl, ovalDetails, reduceThreshold, }: {
|
|
93
|
+
parsedSessionInformation: ParsedSessionInformation;
|
|
91
94
|
faceDetector: FaceDetection;
|
|
92
95
|
videoEl: HTMLVideoElement;
|
|
93
96
|
ovalDetails: LivenessOvalDetails;
|
|
94
97
|
reduceThreshold?: boolean;
|
|
95
|
-
isMobile?: boolean;
|
|
96
98
|
}): Promise<{
|
|
97
99
|
isDistanceBelowThreshold: boolean;
|
|
98
100
|
error?: ErrorState;
|
|
99
101
|
}>;
|
|
100
|
-
export declare function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }: {
|
|
101
|
-
deviceHeight: number;
|
|
102
|
-
deviceWidth: number;
|
|
103
|
-
height: number;
|
|
104
|
-
width: number;
|
|
105
|
-
top: number;
|
|
106
|
-
left: number;
|
|
107
|
-
}): {
|
|
108
|
-
Height: number;
|
|
109
|
-
Width: number;
|
|
110
|
-
Top: number;
|
|
111
|
-
Left: number;
|
|
112
|
-
};
|
|
113
102
|
export {};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { LivenessResponseStream } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
-
export declare const
|
|
2
|
+
export declare const isServerSessionInformationEvent: (value: unknown) => value is LivenessResponseStream.ServerSessionInformationEventMember;
|
|
3
3
|
export declare const isConnectionTimeoutError: (error: unknown) => error is Error;
|
|
4
4
|
export declare const isDisconnectionEvent: (value: unknown) => value is LivenessResponseStream.DisconnectionEventMember;
|
|
5
5
|
export declare const isValidationExceptionEvent: (value: unknown) => value is LivenessResponseStream.ValidationExceptionMember;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { FaceMovementServerChallenge, FaceMovementAndLightServerChallenge, SessionInformation as ServerSessionInformation } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
+
import type { FaceMovementAndLightChallenge, FaceMovementChallenge, ParsedSessionInformation } from '../types';
|
|
3
|
+
export declare const isFaceMovementAndLightChallenge: (value: unknown) => value is FaceMovementAndLightChallenge;
|
|
4
|
+
export declare const isFaceMovementChallenge: (value: unknown) => value is FaceMovementChallenge;
|
|
5
|
+
export declare const isFaceMovementAndLightServerChallenge: (value: unknown) => value is FaceMovementAndLightServerChallenge;
|
|
6
|
+
export declare const isFaceMovementServerChallenge: (value: unknown) => value is FaceMovementServerChallenge;
|
|
7
|
+
export declare const createSessionInfoFromServerSessionInformation: (serverSessionInformation: ServerSessionInformation) => ParsedSessionInformation;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { ClientSessionInformationEvent, LivenessRequestStream } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
|
+
export interface RequestStream extends AsyncGenerator<LivenessRequestStream> {
|
|
3
|
+
}
|
|
4
|
+
export interface VideoStream extends ReadableStream<StreamResult> {
|
|
5
|
+
}
|
|
6
|
+
export type StreamResultType = 'closeCode' | 'sessionInfo' | 'streamStop' | 'streamVideo';
|
|
7
|
+
export type StreamResult<T extends StreamResultType = StreamResultType> = T extends 'closeCode' ? {
|
|
8
|
+
type: T;
|
|
9
|
+
data: {
|
|
10
|
+
closeCode: number;
|
|
11
|
+
};
|
|
12
|
+
} : T extends 'streamVideo' ? {
|
|
13
|
+
type: T;
|
|
14
|
+
data: Blob;
|
|
15
|
+
} : T extends 'sessionInfo' ? {
|
|
16
|
+
type: T;
|
|
17
|
+
data: ClientSessionInformationEvent;
|
|
18
|
+
} : T extends 'streamStop' ? {
|
|
19
|
+
type: T;
|
|
20
|
+
data?: never;
|
|
21
|
+
} : never;
|
|
@@ -2,6 +2,8 @@ export declare enum LivenessClassNames {
|
|
|
2
2
|
CameraModule = "amplify-liveness-camera-module",
|
|
3
3
|
CancelContainer = "amplify-liveness-cancel-container",
|
|
4
4
|
CancelButton = "amplify-liveness-cancel-button",
|
|
5
|
+
CenteredLoader = "amplify-liveness-centered-loader",
|
|
6
|
+
ConnectingLoader = "amplify-liveness-connecting-loader",
|
|
5
7
|
CountdownContainer = "amplify-liveness-countdown-container",
|
|
6
8
|
DescriptionBullet = "amplify-liveness-description-bullet",
|
|
7
9
|
DescriptionBulletIndex = "amplify-liveness-description-bullet__index",
|
|
@@ -41,6 +43,7 @@ export declare enum LivenessClassNames {
|
|
|
41
43
|
Toast = "amplify-liveness-toast",
|
|
42
44
|
ToastContainer = "amplify-liveness-toast__container",
|
|
43
45
|
ToastMessage = "amplify-liveness-toast__message",
|
|
46
|
+
UserFacingVideo = "amplify-liveness-video--user-facing",
|
|
44
47
|
Video = "amplify-liveness-video",
|
|
45
48
|
VideoAnchor = "amplify-liveness-video-anchor"
|
|
46
49
|
}
|
package/dist/types/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "3.
|
|
1
|
+
export declare const VERSION = "3.4.0";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aws-amplify/ui-react-liveness",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.4.0",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"module": "dist/esm/index.mjs",
|
|
6
6
|
"exports": {
|
|
@@ -50,13 +50,13 @@
|
|
|
50
50
|
"dependencies": {
|
|
51
51
|
"@aws-amplify/ui": "6.10.3",
|
|
52
52
|
"@aws-amplify/ui-react": "6.11.2",
|
|
53
|
-
"@aws-sdk/client-rekognitionstreaming": "3.
|
|
53
|
+
"@aws-sdk/client-rekognitionstreaming": "3.828.0",
|
|
54
54
|
"@aws-sdk/util-format-url": "3.609.0",
|
|
55
|
-
"@smithy/eventstream-serde-browser": "^
|
|
56
|
-
"@smithy/fetch-http-handler": "^
|
|
55
|
+
"@smithy/eventstream-serde-browser": "^4.0.4",
|
|
56
|
+
"@smithy/fetch-http-handler": "^5.0.4",
|
|
57
57
|
"@smithy/protocol-http": "^3.0.3",
|
|
58
|
-
"@smithy/signature-v4": "
|
|
59
|
-
"@smithy/types": "^
|
|
58
|
+
"@smithy/signature-v4": "5.1.2",
|
|
59
|
+
"@smithy/types": "^4.3.1",
|
|
60
60
|
"@mediapipe/face_detection": "~0.4.0",
|
|
61
61
|
"@tensorflow-models/face-detection": "1.0.2",
|
|
62
62
|
"@tensorflow/tfjs-backend-cpu": "4.11.0",
|
|
@@ -64,8 +64,8 @@
|
|
|
64
64
|
"@tensorflow/tfjs-converter": "4.11.0",
|
|
65
65
|
"@tensorflow/tfjs-core": "4.11.0",
|
|
66
66
|
"@xstate/react": "^3.2.2",
|
|
67
|
-
"nanoid": "3.3.8",
|
|
68
67
|
"tslib": "^2.5.2",
|
|
68
|
+
"uuid": "^11.1.0",
|
|
69
69
|
"xstate": "^4.33.6"
|
|
70
70
|
},
|
|
71
71
|
"devDependencies": {
|
|
@@ -85,4 +85,4 @@
|
|
|
85
85
|
"limit": "225 kB"
|
|
86
86
|
}
|
|
87
87
|
]
|
|
88
|
-
}
|
|
88
|
+
}
|