@aws-amplify/ui-react-liveness 3.0.13 → 3.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +1 -5
  2. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +4 -15
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +1 -5
  4. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +144 -158
  5. package/dist/esm/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.mjs → createStreamingClient/CustomWebSocketFetchHandler.mjs} +4 -5
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +18 -0
  7. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +27 -0
  8. package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +38 -0
  9. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +62 -67
  10. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +14 -35
  11. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +1 -2
  12. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +1 -5
  13. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +1 -5
  14. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +1 -5
  15. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +0 -5
  16. package/dist/esm/version.mjs +1 -1
  17. package/dist/index.js +294 -275
  18. package/dist/styles.css +1 -1
  19. package/dist/types/components/FaceLivenessDetector/hooks/useLivenessActor.d.ts +1 -1
  20. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +3 -4
  21. package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +9 -10
  22. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +17 -18
  23. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +6 -0
  24. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +9 -0
  25. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
  26. package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +10 -0
  27. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +7 -9
  28. package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +3 -10
  29. package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +1 -7
  30. package/dist/types/components/FaceLivenessDetector/shared/index.d.ts +0 -3
  31. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +0 -5
  32. package/dist/types/version.d.ts +1 -1
  33. package/package.json +5 -4
  34. package/dist/types/components/FaceLivenessDetector/shared/GoodFitIllustration.d.ts +0 -7
  35. package/dist/types/components/FaceLivenessDetector/shared/StartScreenFigure.d.ts +0 -8
  36. package/dist/types/components/FaceLivenessDetector/shared/TooFarIllustration.d.ts +0 -7
  37. /package/dist/types/components/FaceLivenessDetector/service/utils/{CustomWebSocketFetchHandler.d.ts → createStreamingClient/CustomWebSocketFetchHandler.d.ts} +0 -0
@@ -0,0 +1,18 @@
1
+ import { SignatureV4 } from '@smithy/signature-v4';
2
+
3
+ // override aws sdk default value of 60
4
+ const REQUEST_EXPIRY = 299;
5
+ class Signer extends SignatureV4 {
6
+ presign(request, options) {
7
+ return super.presign(request, {
8
+ ...options,
9
+ expiresIn: REQUEST_EXPIRY,
10
+ // `headers` that should not be signed. Liveness WebSocket
11
+ // request omits `headers` except for required `host` header. Signature
12
+ // could be a mismatch if other `headers` are signed
13
+ unsignableHeaders: new Set(Object.keys(request.headers).filter((header) => header !== 'host')),
14
+ });
15
+ }
16
+ }
17
+
18
+ export { REQUEST_EXPIRY, Signer };
@@ -0,0 +1,27 @@
1
+ import { RekognitionStreamingClient } from '@aws-sdk/client-rekognitionstreaming';
2
+ import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
3
+ import { getLivenessUserAgent } from '../../../utils/platform.mjs';
4
+ import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
5
+ import { resolveCredentials } from './resolveCredentials.mjs';
6
+ import { Signer } from './Signer.mjs';
7
+
8
+ const CONNECTION_TIMEOUT = 10000;
9
+ const CUSTOM_USER_AGENT = `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
10
+ async function createStreamingClient({ credentialsProvider, endpointOverride, region, }) {
11
+ const credentials = await resolveCredentials(credentialsProvider);
12
+ const clientconfig = {
13
+ credentials,
14
+ customUserAgent: CUSTOM_USER_AGENT,
15
+ region,
16
+ requestHandler: new CustomWebSocketFetchHandler({
17
+ connectionTimeout: CONNECTION_TIMEOUT,
18
+ }),
19
+ signerConstructor: Signer,
20
+ };
21
+ if (endpointOverride) {
22
+ clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
23
+ }
24
+ return new RekognitionStreamingClient(clientconfig);
25
+ }
26
+
27
+ export { createStreamingClient };
@@ -0,0 +1,38 @@
1
+ import { fetchAuthSession } from 'aws-amplify/auth';
2
+
3
+ const isCredentialsProvider = (credentialsProvider) => typeof credentialsProvider === 'function';
4
+ // the return interface of `fetchAuthSession` includes `credentials` as
5
+ // optional, but `credentials` is always returned. If `fetchAuthSession`
6
+ // is called for an unauthenticated end user, values of `accessKeyId`
7
+ // and `secretAccessKey` are `undefined`
8
+ const isCredentials = (credentials) => !!(credentials?.accessKeyId && credentials?.secretAccessKey);
9
+ /**
10
+ * Resolves the `credentials` param to be passed to `RekognitionStreamingClient` which accepts either:
11
+ * - a `credentials` object
12
+ * - a `credentialsProvider` callback
13
+ *
14
+ * @param credentialsProvider optional `credentialsProvider` callback
15
+ * @returns {Promise<AwsCredentials | AwsCredentialProvider>} `credentials` object or valid `credentialsProvider` callback
16
+ */
17
+ async function resolveCredentials(credentialsProvider) {
18
+ const hasCredentialsProvider = isCredentialsProvider(credentialsProvider);
19
+ if (hasCredentialsProvider) {
20
+ return credentialsProvider;
21
+ }
22
+ if (credentialsProvider && !hasCredentialsProvider) {
23
+ throw new Error('Invalid credentialsProvider');
24
+ }
25
+ try {
26
+ const result = (await fetchAuthSession()).credentials;
27
+ if (isCredentials(result)) {
28
+ return result;
29
+ }
30
+ throw new Error('Missing credentials');
31
+ }
32
+ catch (e) {
33
+ const { message } = e;
34
+ throw new Error(`Invalid credentials: ${message}`);
35
+ }
36
+ }
37
+
38
+ export { resolveCredentials };
@@ -2,7 +2,6 @@ import { FaceMatchState, IlluminationState } from '../types/liveness.mjs';
2
2
  import { LivenessErrorState } from '../types/error.mjs';
3
3
  import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
4
4
 
5
- /* eslint-disable */
6
5
  /**
7
6
  * Returns the random number between min and max
8
7
  * seeded with the provided random seed.
@@ -93,31 +92,6 @@ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXS
93
92
  height: Math.floor(ovalHeight),
94
93
  };
95
94
  }
96
- function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
97
- const { width, height } = videoMediaStream.getTracks()[0].getSettings();
98
- // Get width/height of video element so we can compute scaleFactor
99
- // and set canvas width/height.
100
- const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
101
- canvasEl.width = Math.ceil(videoScaledWidth);
102
- canvasEl.height = Math.ceil(videoScaledHeight);
103
- const ovalDetails = getStaticLivenessOvalDetails({
104
- width: width,
105
- height: height,
106
- ratioMultiplier: 0.5,
107
- });
108
- ovalDetails.flippedCenterX = width - ovalDetails.centerX;
109
- // Compute scaleFactor which is how much our video element is scaled
110
- // vs the intrinsic video resolution
111
- const scaleFactor = videoScaledWidth / videoEl.videoWidth;
112
- // Draw oval in canvas using ovalDetails and scaleFactor
113
- drawLivenessOvalInCanvas({
114
- canvas: canvasEl,
115
- oval: ovalDetails,
116
- scaleFactor,
117
- videoEl: videoEl,
118
- isStartScreen: true,
119
- });
120
- }
121
95
  /**
122
96
  * Draws the provided liveness oval on the canvas.
123
97
  */
@@ -162,6 +136,31 @@ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartS
162
136
  throw new Error('Cannot find Canvas.');
163
137
  }
164
138
  }
139
+ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
140
+ const { width, height } = videoMediaStream.getTracks()[0].getSettings();
141
+ // Get width/height of video element so we can compute scaleFactor
142
+ // and set canvas width/height.
143
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
144
+ canvasEl.width = Math.ceil(videoScaledWidth);
145
+ canvasEl.height = Math.ceil(videoScaledHeight);
146
+ const ovalDetails = getStaticLivenessOvalDetails({
147
+ width: width,
148
+ height: height,
149
+ ratioMultiplier: 0.5,
150
+ });
151
+ ovalDetails.flippedCenterX = width - ovalDetails.centerX;
152
+ // Compute scaleFactor which is how much our video element is scaled
153
+ // vs the intrinsic video resolution
154
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
155
+ // Draw oval in canvas using ovalDetails and scaleFactor
156
+ drawLivenessOvalInCanvas({
157
+ canvas: canvasEl,
158
+ oval: ovalDetails,
159
+ scaleFactor,
160
+ videoEl: videoEl,
161
+ isStartScreen: true,
162
+ });
163
+ }
165
164
  function clearOvalCanvas({ canvas, }) {
166
165
  const ctx = canvas.getContext('2d');
167
166
  if (ctx) {
@@ -172,9 +171,43 @@ function clearOvalCanvas({ canvas, }) {
172
171
  throw new Error('Cannot find Canvas.');
173
172
  }
174
173
  }
174
+ function getPupilDistanceAndFaceHeight(face) {
175
+ const { leftEye, rightEye, mouth } = face;
176
+ const eyeCenter = [];
177
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
178
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
179
+ const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
180
+ const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
181
+ return { pupilDistance, faceHeight };
182
+ }
183
+ function generateBboxFromLandmarks(face, oval) {
184
+ const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
185
+ const { height: ovalHeight, centerY } = oval;
186
+ const ovalTop = centerY - ovalHeight / 2;
187
+ const eyeCenter = [];
188
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
189
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
190
+ const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
191
+ const alpha = 2.0, gamma = 1.8;
192
+ const ow = (alpha * pd + gamma * fh) / 2;
193
+ const oh = 1.618 * ow;
194
+ let cx;
195
+ if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
196
+ cx = (eyeCenter[0] + nose[0]) / 2;
197
+ }
198
+ else {
199
+ cx = eyeCenter[0];
200
+ }
201
+ const bottom = faceTop + faceHeight;
202
+ const top = bottom - oh;
203
+ const left = Math.min(cx - ow / 2, rightEar[0]);
204
+ const right = Math.max(cx + ow / 2, leftEar[0]);
205
+ return { bottom, left, right, top };
206
+ }
175
207
  /**
176
208
  * Returns the state of the provided face with respect to the provided liveness oval.
177
209
  */
210
+ // eslint-disable-next-line max-params
178
211
  function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
179
212
  let faceMatchState;
180
213
  const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
@@ -201,7 +234,7 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
201
234
  const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
202
235
  const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
203
236
  /** From Science
204
- * p=max(min(1,0.75∗(si​−s0)/(st​−s0)+0.25)),0)
237
+ * p=max(min(1,0.75∗(sis0)/(sts0)+0.25)),0)
205
238
  */
206
239
  const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
207
240
  (intersectionThreshold - initialFaceIntersection) +
@@ -228,44 +261,6 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
228
261
  }
229
262
  return { faceMatchState, faceMatchPercentage };
230
263
  }
231
- function getPupilDistanceAndFaceHeight(face) {
232
- const { leftEye, rightEye, mouth } = face;
233
- const eyeCenter = [];
234
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
235
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
236
- const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
237
- const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
238
- return { pupilDistance, faceHeight };
239
- }
240
- function generateBboxFromLandmarks(face, oval) {
241
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
242
- const { height: ovalHeight, centerY } = oval;
243
- const ovalTop = centerY - ovalHeight / 2;
244
- const eyeCenter = [];
245
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
246
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
247
- const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
248
- const alpha = 2.0, gamma = 1.8;
249
- const ow = (alpha * pd + gamma * fh) / 2;
250
- const oh = 1.618 * ow;
251
- let cx;
252
- if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
253
- cx = (eyeCenter[0] + nose[0]) / 2;
254
- }
255
- else {
256
- cx = eyeCenter[0];
257
- }
258
- const faceBottom = faceTop + faceHeight;
259
- const top = faceBottom - oh;
260
- const left = Math.min(cx - ow / 2, rightEar[0]);
261
- const right = Math.max(cx + ow / 2, leftEar[0]);
262
- return {
263
- left: left,
264
- top: top,
265
- right: right,
266
- bottom: faceBottom,
267
- };
268
- }
269
264
  /**
270
265
  * Returns the illumination state in the provided video frame.
271
266
  */
@@ -381,7 +376,7 @@ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, vide
381
376
  const isClientFreshnessColorSequence = (obj) => !!obj;
382
377
  function getColorsSequencesFromSessionInformation(sessionInformation) {
383
378
  const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
384
- .ColorSequences || [];
379
+ .ColorSequences ?? [];
385
380
  const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
386
381
  const colorArray = FreshnessColor.RGB;
387
382
  const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
@@ -439,7 +434,7 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
439
434
  case 1: {
440
435
  //exactly one face detected, match face with oval;
441
436
  detectedFace = detectedFaces[0];
442
- const width = ovalDetails.width;
437
+ const { width } = ovalDetails;
443
438
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
444
439
  const alpha = 2.0, gamma = 1.8;
445
440
  const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
@@ -1,9 +1,6 @@
1
- import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
2
- import { fetchAuthSession } from 'aws-amplify/auth';
3
- import { RekognitionStreamingClient, StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
1
+ import { StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
4
2
  import { VideoRecorder } from './videoRecorder.mjs';
5
- import { getLivenessUserAgent } from '../../utils/platform.mjs';
6
- import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
3
+ import { createStreamingClient } from './createStreamingClient/createStreamingClient.mjs';
7
4
 
8
5
  const TIME_SLICE = 1000;
9
6
  function isBlob(obj) {
@@ -34,9 +31,7 @@ class LivenessStreamProvider {
34
31
  this.videoRecorder.start(TIME_SLICE);
35
32
  }
36
33
  sendClientInfo(clientInfo) {
37
- this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
38
- data: { clientInfo },
39
- }));
34
+ this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', { data: { clientInfo } }));
40
35
  }
41
36
  async stopVideo() {
42
37
  await this.videoRecorder.stop();
@@ -48,32 +43,15 @@ class LivenessStreamProvider {
48
43
  if (this.videoRecorder.getState() === 'recording') {
49
44
  await this.stopVideo();
50
45
  }
51
- this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
52
- data: { code: code },
53
- }));
46
+ this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', { data: { code } }));
54
47
  return;
55
48
  }
56
49
  async init() {
57
- const credentials = this.credentialProvider ?? (await fetchAuthSession()).credentials;
58
- if (!credentials) {
59
- throw new Error('No credentials');
60
- }
61
- const clientconfig = {
62
- credentials,
50
+ this._client = await createStreamingClient({
51
+ credentialsProvider: this.credentialProvider,
52
+ endpointOverride: this.endpointOverride,
63
53
  region: this.region,
64
- customUserAgent: `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
65
- requestHandler: new CustomWebSocketFetchHandler({
66
- connectionTimeout: 10000,
67
- }),
68
- };
69
- if (this.endpointOverride) {
70
- const override = this.endpointOverride;
71
- clientconfig.endpointProvider = () => {
72
- const url = new URL(override);
73
- return { url };
74
- };
75
- }
76
- this._client = new RekognitionStreamingClient(clientconfig);
54
+ });
77
55
  this.responseStream = await this.startLivenessVideoConnection();
78
56
  }
79
57
  // Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
@@ -83,8 +61,7 @@ class LivenessStreamProvider {
83
61
  this._reader = stream.getReader();
84
62
  return async function* () {
85
63
  while (true) {
86
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
87
- const { done, value } = await current._reader.read();
64
+ const { done, value } = (await current._reader.read());
88
65
  if (done) {
89
66
  return;
90
67
  }
@@ -93,7 +70,7 @@ class LivenessStreamProvider {
93
70
  // sending an empty video chunk signals that we have ended sending video
94
71
  yield {
95
72
  VideoEvent: {
96
- VideoChunk: [],
73
+ VideoChunk: new Uint8Array([]),
97
74
  TimestampMillis: Date.now(),
98
75
  },
99
76
  };
@@ -120,7 +97,9 @@ class LivenessStreamProvider {
120
97
  else if (isEndStreamWithCodeEvent(value)) {
121
98
  yield {
122
99
  VideoEvent: {
123
- VideoChunk: [],
100
+ VideoChunk: new Uint8Array([]),
101
+ // this is a custom type that does not match LivenessRequestStream.
102
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
124
103
  TimestampMillis: { closeCode: value.code },
125
104
  },
126
105
  };
@@ -141,4 +120,4 @@ class LivenessStreamProvider {
141
120
  }
142
121
  }
143
122
 
144
- export { LivenessStreamProvider, TIME_SLICE };
123
+ export { LivenessStreamProvider };
@@ -2,12 +2,11 @@
2
2
  * Helper wrapper class over the native MediaRecorder.
3
3
  */
4
4
  class VideoRecorder {
5
- constructor(stream, options = {}) {
5
+ constructor(stream) {
6
6
  if (typeof MediaRecorder === 'undefined') {
7
7
  throw Error('MediaRecorder is not supported by this browser');
8
8
  }
9
9
  this._stream = stream;
10
- this._options = options;
11
10
  this._chunks = [];
12
11
  this._recorder = new MediaRecorder(stream, { bitsPerSecond: 1000000 });
13
12
  this._setupCallbacks();
@@ -9,12 +9,8 @@ import '@tensorflow-models/face-detection';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
12
- import 'aws-amplify/auth';
13
12
  import '@aws-sdk/client-rekognitionstreaming';
14
- import '@aws-sdk/util-format-url';
15
- import '@smithy/eventstream-serde-browser';
16
- import '@smithy/fetch-http-handler';
17
- import '@smithy/protocol-http';
13
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
18
14
  import '../service/utils/freshnessColorDisplay.mjs';
19
15
  import '@xstate/react';
20
16
  import '../providers/FaceLivenessDetectorProvider.mjs';
@@ -9,12 +9,8 @@ import '@tensorflow-models/face-detection';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
12
- import 'aws-amplify/auth';
13
12
  import '@aws-sdk/client-rekognitionstreaming';
14
- import '@aws-sdk/util-format-url';
15
- import '@smithy/eventstream-serde-browser';
16
- import '@smithy/fetch-http-handler';
17
- import '@smithy/protocol-http';
13
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
18
14
  import '../service/utils/freshnessColorDisplay.mjs';
19
15
  import { Toast } from './Toast.mjs';
20
16
  import { Overlay } from './Overlay.mjs';
@@ -7,12 +7,8 @@ import '@tensorflow-models/face-detection';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
10
- import 'aws-amplify/auth';
11
10
  import '@aws-sdk/client-rekognitionstreaming';
12
- import '@aws-sdk/util-format-url';
13
- import '@smithy/eventstream-serde-browser';
14
- import '@smithy/fetch-http-handler';
15
- import '@smithy/protocol-http';
11
+ import '../service/utils/createStreamingClient/createStreamingClient.mjs';
16
12
  import '../service/utils/freshnessColorDisplay.mjs';
17
13
  import { useLivenessActor } from '../hooks/useLivenessActor.mjs';
18
14
  import { createLivenessSelector, useLivenessSelector } from '../hooks/useLivenessSelector.mjs';
@@ -14,11 +14,6 @@ var LivenessClassNames;
14
14
  LivenessClassNames["FreshnessCanvas"] = "amplify-liveness-freshness-canvas";
15
15
  LivenessClassNames["InstructionList"] = "amplify-liveness-instruction-list";
16
16
  LivenessClassNames["InstructionOverlay"] = "amplify-liveness-instruction-overlay";
17
- LivenessClassNames["Figure"] = "amplify-liveness-figure";
18
- LivenessClassNames["FigureCaption"] = "amplify-liveness-figure__caption";
19
- LivenessClassNames["FigureIcon"] = "amplify-liveness-figure__icon";
20
- LivenessClassNames["FigureImage"] = "amplify-liveness-figure__image";
21
- LivenessClassNames["Figures"] = "amplify-liveness-figures";
22
17
  LivenessClassNames["Hint"] = "amplify-liveness-hint";
23
18
  LivenessClassNames["HintText"] = "amplify-liveness-hint__text";
24
19
  LivenessClassNames["LandscapeErrorModal"] = "amplify-liveness-landscape-error-modal";
@@ -1,3 +1,3 @@
1
- const VERSION = '3.0.13';
1
+ const VERSION = '3.0.15';
2
2
 
3
3
  export { VERSION };