@aws-amplify/ui-react-liveness 3.0.12 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +3 -10
  2. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +144 -158
  3. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +3 -4
  4. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +62 -67
  5. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +6 -5
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +1 -2
  7. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +0 -5
  8. package/dist/esm/version.mjs +1 -1
  9. package/dist/index.js +218 -250
  10. package/dist/styles.css +6 -2
  11. package/dist/types/components/FaceLivenessDetector/hooks/useLivenessActor.d.ts +1 -1
  12. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +3 -4
  13. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +17 -18
  14. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +7 -9
  15. package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +3 -10
  16. package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +1 -7
  17. package/dist/types/components/FaceLivenessDetector/shared/index.d.ts +0 -3
  18. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +0 -5
  19. package/dist/types/version.d.ts +1 -1
  20. package/package.json +3 -3
  21. package/dist/types/components/FaceLivenessDetector/shared/GoodFitIllustration.d.ts +0 -7
  22. package/dist/types/components/FaceLivenessDetector/shared/StartScreenFigure.d.ts +0 -8
  23. package/dist/types/components/FaceLivenessDetector/shared/TooFarIllustration.d.ts +0 -7
@@ -2,7 +2,6 @@ import { FaceMatchState, IlluminationState } from '../types/liveness.mjs';
2
2
  import { LivenessErrorState } from '../types/error.mjs';
3
3
  import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
4
4
 
5
- /* eslint-disable */
6
5
  /**
7
6
  * Returns the random number between min and max
8
7
  * seeded with the provided random seed.
@@ -93,31 +92,6 @@ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXS
93
92
  height: Math.floor(ovalHeight),
94
93
  };
95
94
  }
96
- function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
97
- const { width, height } = videoMediaStream.getTracks()[0].getSettings();
98
- // Get width/height of video element so we can compute scaleFactor
99
- // and set canvas width/height.
100
- const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
101
- canvasEl.width = Math.ceil(videoScaledWidth);
102
- canvasEl.height = Math.ceil(videoScaledHeight);
103
- const ovalDetails = getStaticLivenessOvalDetails({
104
- width: width,
105
- height: height,
106
- ratioMultiplier: 0.5,
107
- });
108
- ovalDetails.flippedCenterX = width - ovalDetails.centerX;
109
- // Compute scaleFactor which is how much our video element is scaled
110
- // vs the intrinsic video resolution
111
- const scaleFactor = videoScaledWidth / videoEl.videoWidth;
112
- // Draw oval in canvas using ovalDetails and scaleFactor
113
- drawLivenessOvalInCanvas({
114
- canvas: canvasEl,
115
- oval: ovalDetails,
116
- scaleFactor,
117
- videoEl: videoEl,
118
- isStartScreen: true,
119
- });
120
- }
121
95
  /**
122
96
  * Draws the provided liveness oval on the canvas.
123
97
  */
@@ -162,6 +136,31 @@ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartS
162
136
  throw new Error('Cannot find Canvas.');
163
137
  }
164
138
  }
139
+ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
140
+ const { width, height } = videoMediaStream.getTracks()[0].getSettings();
141
+ // Get width/height of video element so we can compute scaleFactor
142
+ // and set canvas width/height.
143
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
144
+ canvasEl.width = Math.ceil(videoScaledWidth);
145
+ canvasEl.height = Math.ceil(videoScaledHeight);
146
+ const ovalDetails = getStaticLivenessOvalDetails({
147
+ width: width,
148
+ height: height,
149
+ ratioMultiplier: 0.5,
150
+ });
151
+ ovalDetails.flippedCenterX = width - ovalDetails.centerX;
152
+ // Compute scaleFactor which is how much our video element is scaled
153
+ // vs the intrinsic video resolution
154
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
155
+ // Draw oval in canvas using ovalDetails and scaleFactor
156
+ drawLivenessOvalInCanvas({
157
+ canvas: canvasEl,
158
+ oval: ovalDetails,
159
+ scaleFactor,
160
+ videoEl: videoEl,
161
+ isStartScreen: true,
162
+ });
163
+ }
165
164
  function clearOvalCanvas({ canvas, }) {
166
165
  const ctx = canvas.getContext('2d');
167
166
  if (ctx) {
@@ -172,9 +171,43 @@ function clearOvalCanvas({ canvas, }) {
172
171
  throw new Error('Cannot find Canvas.');
173
172
  }
174
173
  }
174
+ function getPupilDistanceAndFaceHeight(face) {
175
+ const { leftEye, rightEye, mouth } = face;
176
+ const eyeCenter = [];
177
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
178
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
179
+ const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
180
+ const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
181
+ return { pupilDistance, faceHeight };
182
+ }
183
+ function generateBboxFromLandmarks(face, oval) {
184
+ const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
185
+ const { height: ovalHeight, centerY } = oval;
186
+ const ovalTop = centerY - ovalHeight / 2;
187
+ const eyeCenter = [];
188
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
189
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
190
+ const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
191
+ const alpha = 2.0, gamma = 1.8;
192
+ const ow = (alpha * pd + gamma * fh) / 2;
193
+ const oh = 1.618 * ow;
194
+ let cx;
195
+ if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
196
+ cx = (eyeCenter[0] + nose[0]) / 2;
197
+ }
198
+ else {
199
+ cx = eyeCenter[0];
200
+ }
201
+ const bottom = faceTop + faceHeight;
202
+ const top = bottom - oh;
203
+ const left = Math.min(cx - ow / 2, rightEar[0]);
204
+ const right = Math.max(cx + ow / 2, leftEar[0]);
205
+ return { bottom, left, right, top };
206
+ }
175
207
  /**
176
208
  * Returns the state of the provided face with respect to the provided liveness oval.
177
209
  */
210
+ // eslint-disable-next-line max-params
178
211
  function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
179
212
  let faceMatchState;
180
213
  const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
@@ -201,7 +234,7 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
201
234
  const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
202
235
  const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
203
236
  /** From Science
204
- * p=max(min(1,0.75∗(si​−s0)/(st​−s0)+0.25)),0)
237
+ * p=max(min(1,0.75∗(sis0)/(sts0)+0.25)),0)
205
238
  */
206
239
  const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
207
240
  (intersectionThreshold - initialFaceIntersection) +
@@ -228,44 +261,6 @@ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersect
228
261
  }
229
262
  return { faceMatchState, faceMatchPercentage };
230
263
  }
231
- function getPupilDistanceAndFaceHeight(face) {
232
- const { leftEye, rightEye, mouth } = face;
233
- const eyeCenter = [];
234
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
235
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
236
- const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
237
- const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
238
- return { pupilDistance, faceHeight };
239
- }
240
- function generateBboxFromLandmarks(face, oval) {
241
- const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
242
- const { height: ovalHeight, centerY } = oval;
243
- const ovalTop = centerY - ovalHeight / 2;
244
- const eyeCenter = [];
245
- eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
246
- eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
247
- const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
248
- const alpha = 2.0, gamma = 1.8;
249
- const ow = (alpha * pd + gamma * fh) / 2;
250
- const oh = 1.618 * ow;
251
- let cx;
252
- if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
253
- cx = (eyeCenter[0] + nose[0]) / 2;
254
- }
255
- else {
256
- cx = eyeCenter[0];
257
- }
258
- const faceBottom = faceTop + faceHeight;
259
- const top = faceBottom - oh;
260
- const left = Math.min(cx - ow / 2, rightEar[0]);
261
- const right = Math.max(cx + ow / 2, leftEar[0]);
262
- return {
263
- left: left,
264
- top: top,
265
- right: right,
266
- bottom: faceBottom,
267
- };
268
- }
269
264
  /**
270
265
  * Returns the illumination state in the provided video frame.
271
266
  */
@@ -381,7 +376,7 @@ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, vide
381
376
  const isClientFreshnessColorSequence = (obj) => !!obj;
382
377
  function getColorsSequencesFromSessionInformation(sessionInformation) {
383
378
  const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
384
- .ColorSequences || [];
379
+ .ColorSequences ?? [];
385
380
  const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
386
381
  const colorArray = FreshnessColor.RGB;
387
382
  const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
@@ -439,7 +434,7 @@ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails
439
434
  case 1: {
440
435
  //exactly one face detected, match face with oval;
441
436
  detectedFace = detectedFaces[0];
442
- const width = ovalDetails.width;
437
+ const { width } = ovalDetails;
443
438
  const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
444
439
  const alpha = 2.0, gamma = 1.8;
445
440
  const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
@@ -83,8 +83,7 @@ class LivenessStreamProvider {
83
83
  this._reader = stream.getReader();
84
84
  return async function* () {
85
85
  while (true) {
86
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
87
- const { done, value } = await current._reader.read();
86
+ const { done, value } = (await current._reader.read());
88
87
  if (done) {
89
88
  return;
90
89
  }
@@ -93,7 +92,7 @@ class LivenessStreamProvider {
93
92
  // sending an empty video chunk signals that we have ended sending video
94
93
  yield {
95
94
  VideoEvent: {
96
- VideoChunk: [],
95
+ VideoChunk: new Uint8Array([]),
97
96
  TimestampMillis: Date.now(),
98
97
  },
99
98
  };
@@ -120,7 +119,9 @@ class LivenessStreamProvider {
120
119
  else if (isEndStreamWithCodeEvent(value)) {
121
120
  yield {
122
121
  VideoEvent: {
123
- VideoChunk: [],
122
+ VideoChunk: new Uint8Array([]),
123
+ // this is a custom type that does not match LivenessRequestStream.
124
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
124
125
  TimestampMillis: { closeCode: value.code },
125
126
  },
126
127
  };
@@ -141,4 +142,4 @@ class LivenessStreamProvider {
141
142
  }
142
143
  }
143
144
 
144
- export { LivenessStreamProvider, TIME_SLICE };
145
+ export { LivenessStreamProvider };
@@ -2,12 +2,11 @@
2
2
  * Helper wrapper class over the native MediaRecorder.
3
3
  */
4
4
  class VideoRecorder {
5
- constructor(stream, options = {}) {
5
+ constructor(stream) {
6
6
  if (typeof MediaRecorder === 'undefined') {
7
7
  throw Error('MediaRecorder is not supported by this browser');
8
8
  }
9
9
  this._stream = stream;
10
- this._options = options;
11
10
  this._chunks = [];
12
11
  this._recorder = new MediaRecorder(stream, { bitsPerSecond: 1000000 });
13
12
  this._setupCallbacks();
@@ -14,11 +14,6 @@ var LivenessClassNames;
14
14
  LivenessClassNames["FreshnessCanvas"] = "amplify-liveness-freshness-canvas";
15
15
  LivenessClassNames["InstructionList"] = "amplify-liveness-instruction-list";
16
16
  LivenessClassNames["InstructionOverlay"] = "amplify-liveness-instruction-overlay";
17
- LivenessClassNames["Figure"] = "amplify-liveness-figure";
18
- LivenessClassNames["FigureCaption"] = "amplify-liveness-figure__caption";
19
- LivenessClassNames["FigureIcon"] = "amplify-liveness-figure__icon";
20
- LivenessClassNames["FigureImage"] = "amplify-liveness-figure__image";
21
- LivenessClassNames["Figures"] = "amplify-liveness-figures";
22
17
  LivenessClassNames["Hint"] = "amplify-liveness-hint";
23
18
  LivenessClassNames["HintText"] = "amplify-liveness-hint__text";
24
19
  LivenessClassNames["LandscapeErrorModal"] = "amplify-liveness-landscape-error-modal";
@@ -1,3 +1,3 @@
1
- const VERSION = '3.0.12';
1
+ const VERSION = '3.0.14';
2
2
 
3
3
  export { VERSION };