@aws-amplify/ui-react-liveness 2.0.11 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
  2. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
  4. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
  5. package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
  6. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
  7. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
  8. package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
  9. package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
  10. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
  11. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
  12. package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
  13. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
  14. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
  15. package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
  16. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
  17. package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
  18. package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
  19. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
  20. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
  21. package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
  22. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
  23. package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
  24. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
  25. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
  26. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
  27. package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
  28. package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
  29. package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
  30. package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
  31. package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
  32. package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
  33. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
  34. package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
  35. package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
  36. package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
  37. package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
  38. package/dist/esm/index.mjs +2 -1
  39. package/dist/esm/version.mjs +3 -1
  40. package/dist/index.js +3208 -1
  41. package/dist/styles.css +343 -680
  42. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
  43. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
  44. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
  45. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
  46. package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
  47. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
  48. package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
  49. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
  50. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
  51. package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
  52. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
  53. package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
  54. package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
  55. package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
  56. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
  57. package/dist/types/version.d.ts +1 -1
  58. package/package.json +16 -37
  59. package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
  60. package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
  61. package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
  62. package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
  63. package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
  64. package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
  65. package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
  66. /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
@@ -1 +1,462 @@
1
- import{__awaiter as t}from"tslib";import{FaceMatchState as e,IlluminationState as o}from"../types/liveness.mjs";import{LivenessErrorState as n}from"../types/error.mjs";import{FACE_DISTANCE_THRESHOLD as i,REDUCED_THRESHOLD_MOBILE as r,REDUCED_THRESHOLD as a}from"./constants.mjs";function h(t,e,o){return t*(o-e)+e}function l(t){const e=t.flippedCenterX-t.width/2,o=t.flippedCenterX+t.width/2,n=t.centerY-t.height/2,i=t.centerY+t.height/2;return{ovalBoundingBox:{left:e,top:n,right:o,bottom:i},minOvalX:e,maxOvalX:o,minOvalY:n,maxOvalY:i}}function c(t,e){const o=Math.max(t.left,e.left),n=Math.max(t.top,e.top),i=Math.min(t.right,e.right),r=Math.min(t.bottom,e.bottom),a=Math.abs(Math.max(0,i-o)*Math.max(0,r-n));if(0===a)return 0;return a/(Math.abs((t.right-t.left)*(t.bottom-t.top))+Math.abs((e.right-e.left)*(e.bottom-e.top))-a)}function s({sessionInformation:t,videoWidth:e}){var o,n;const i=null===(n=null===(o=null==t?void 0:t.Challenge)||void 0===o?void 0:o.FaceMovementAndLightChallenge)||void 0===n?void 0:n.OvalParameters;if(!(i&&i.CenterX&&i.CenterY&&i.Width&&i.Height))throw new Error("Oval parameters not returned from session information.");return{flippedCenterX:e-i.CenterX,centerX:i.CenterX,centerY:i.CenterY,width:i.Width,height:i.Height}}function d({width:t,height:e,widthSeed:o=1,centerXSeed:n=.5,centerYSeed:i=.5}){const r=e;let a=t;const l=.8*o,c=Math.floor(7*t/16),s=Math.floor(9*t/16),d=Math.floor(7*e/16),f=Math.floor(9*e/16),g=h(n,c,s),u=h(i,d,f);t>=e&&(a=3/4*r);const v=l*a,p=1.618*v;return{flippedCenterX:Math.floor(a-g),centerX:Math.floor(g),centerY:Math.floor(u),width:Math.floor(v),height:Math.floor(p)}}function f({canvas:t,oval:e,scaleFactor:o,videoEl:n}){const{flippedCenterX:i,centerY:r,width:a,height:h}=e,{width:l,height:c}=t.getBoundingClientRect(),s=t.getContext("2d");if(!s)throw new Error("Cannot find Canvas.");{s.clearRect(0,0,l,c),s.fillStyle="rgba(255, 255, 255, 1.0)",s.fillRect(0,0,l,c);const t={width:n.videoWidth,height:n.videoHeight},e={x:(l-t.width*o)/2,y:(c-t.height*o)/2};s.setTransform(o,0,0,o,e.x,e.y),s.beginPath(),s.ellipse(i,r,a/2,h/2,0,0,2*Math.PI),s.strokeStyle="#AEB3B7",s.lineWidth=3,s.stroke(),s.clip(),s.setTransform(1,0,0,1,0,0),s.clearRect(0,0,l,c)}}function g(t,o,n,i){var r,a;let h;const s=null===(a=null===(r=null==i?void 0:i.Challenge)||void 0===r?void 0:r.FaceMovementAndLightChallenge)||void 0===a?void 0:a.ChallengeConfig;if(!(s&&s.OvalIouThreshold&&s.OvalIouHeightThreshold&&s.OvalIouWidthThreshold&&s.FaceIouHeightThreshold&&s.FaceIouWidthThreshold))throw new Error("Challenge information not returned from session information.");const{OvalIouThreshold:d,OvalIouHeightThreshold:f,OvalIouWidthThreshold:g,FaceIouHeightThreshold:u,FaceIouWidthThreshold:p}=s,m=v(t,o),C=m.left,w=m.right,M=m.top,E=m.bottom,{ovalBoundingBox:O,minOvalX:x,minOvalY:b,maxOvalX:T,maxOvalY:I}=l(o),R=c(m,O),D=d,y=o.width*g,F=o.height*f,A=o.width*p,H=o.height*u,X=100*Math.max(Math.min(1,.75*(R-n)/(D-n)+.25),0);return h=R>D&&Math.abs(x-C)<y&&Math.abs(T-w)<y&&Math.abs(I-E)<F?e.MATCHED:b-M>H||E-I>H||x-C>A&&w-T>A?e.TOO_CLOSE:e.TOO_FAR,{faceMatchState:h,faceMatchPercentage:X}}function u(t){const{leftEye:e,rightEye:o,mouth:n}=t,i=[];i[0]=(e[0]+o[0])/2,i[1]=(e[1]+o[1])/2;return{pupilDistance:Math.sqrt(Math.pow(e[0]-o[0],2)+Math.pow(e[1]-o[1],2)),faceHeight:Math.sqrt(Math.pow(i[0]-n[0],2)+Math.pow(i[1]-n[1],2))}}function v(t,e){const{leftEye:o,rightEye:n,nose:i}=t,{height:r,centerY:a}=e,h=a-r/2,l=[];l[0]=(o[0]+n[0])/2,l[1]=(o[1]+n[1])/2;const{pupilDistance:c,faceHeight:s}=u(t),d=(2*c+1.8*s)/2,f=1.618*d;let g,v;l[1]<=(h+r)/2?(g=(l[0]+i[0])/2,v=(l[1]+i[1])/2):(g=l[0],v=l[1]);const p=g-d/2,m=v-f/2;return{left:p,top:m,right:p+d,bottom:m+f}}function p(t){const e=document.createElement("canvas");e.width=t.videoWidth,e.height=t.videoHeight;const n=e.getContext("2d");if(n){n.drawImage(t,0,0,e.width,e.height);const i=n.getImageData(0,0,e.width,e.height).data,r=8,a=new Array(r).fill(0);for(let t=0;t<i.length;t++){a[Math.round(.2126*i[t++]+.7152*i[t++]+.0722*i[t++])%32]++}let h=-1,l=0;for(let t=0;t<r;t++)a[t]>l&&(l=a[t],h=t);return e.remove(),0===h?o.DARK:h===r?o.BRIGHT:o.NORMAL}throw new Error("Cannot find Video Element.")}function m(t){return t.label.toLowerCase().includes("virtual")}function C({ctx:t,prevColor:e,nextColor:o,fraction:n}){const i=t.canvas.width,r=t.canvas.height;t.fillStyle=o,t.fillRect(0,0,i,r*n),1!==n&&(t.fillStyle=e,t.fillRect(0,r*n,i,r*(1-n)))}function w({overlayCanvas:t,prevColor:e,nextColor:o,videoEl:n,ovalDetails:i,heightFraction:r,scaleFactor:a}){const{x:h,y:l}=n.getBoundingClientRect(),{flippedCenterX:c,centerY:s,width:d,height:f}=i,g=c*a+h,u=s*a+l,v=t.width,p=t.height,m=t.getContext("2d");if(!m)throw new Error("Cannot find Overlay Canvas.");m.canvas.width=window.innerWidth,m.canvas.height=window.innerHeight,m.clearRect(0,0,v,p),C({ctx:m,prevColor:e,nextColor:o,fraction:r}),m.save(),m.beginPath(),m.rect(0,0,v,p),m.clip(),m.clearRect(0,0,v,p),m.globalAlpha=.9,C({ctx:m,prevColor:e,nextColor:o,fraction:r}),m.beginPath(),m.ellipse(g,u,d*a/2,f*a/2,0,0,2*Math.PI),m.strokeStyle="white",m.lineWidth=8,m.stroke(),m.clip(),m.clearRect(0,0,v,p),m.globalAlpha=.75,C({ctx:m,prevColor:e,nextColor:o,fraction:r}),m.restore()}const M=t=>!!t;function E(t){return(t.Challenge.FaceMovementAndLightChallenge.ColorSequences||[]).map((({FreshnessColor:t,DownscrollDuration:e,FlatDisplayDuration:o})=>{const n=t.RGB,i=`rgb(${n[0]},${n[1]},${n[2]})`;return void 0!==i&&void 0!==e&&void 0!==o?{color:i,downscrollDuration:e,flatDisplayDuration:o}:void 0})).filter(M)}function O(t){return t.slice(t.indexOf("(")+1,t.indexOf(")")).split(",").map((t=>parseInt(t)))}function x(o,n){return t(this,void 0,void 0,(function*(){let t;switch((yield o.detectFaces(n)).length){case 0:t=e.CANT_IDENTIFY;break;case 1:t=e.FACE_IDENTIFIED;break;default:t=e.TOO_MANY}return t}))}function b({faceDetector:e,videoEl:o,ovalDetails:h,reduceThreshold:l=!1,isMobile:c=!1}){return t(this,void 0,void 0,(function*(){const t=yield e.detectFaces(o);let s,d,f=!1;switch(t.length){case 0:d=n.FACE_DISTANCE_ERROR;break;case 1:{s=t[0];const e=h.width,{pupilDistance:o,faceHeight:g}=u(s),v=2;e&&(f=(v*o+1.8*g)/2/v/e<(l?c?r:a:i),f||(d=n.FACE_DISTANCE_ERROR));break}default:d=n.MULTIPLE_FACES_ERROR}return{isDistanceBelowThreshold:f,error:d}}))}function T({deviceHeight:t,deviceWidth:e,height:o,width:n,top:i,left:r}){return{Height:o/t,Width:n/e,Top:i/t,Left:r/e}}export{f as drawLivenessOvalInCanvas,p as estimateIllumination,w as fillOverlayCanvasFractional,v as generateBboxFromLandmarks,T as getBoundingBox,E as getColorsSequencesFromSessionInformation,x as getFaceMatchState,g as getFaceMatchStateInLivenessOval,c as getIntersectionOverUnion,l as getOvalBoundingBox,s as getOvalDetailsFromSessionInformation,O as getRGBArrayFromColorString,d as getStaticLivenessOvalDetails,m as isCameraDeviceVirtual,M as isClientFreshnessColorSequence,b as isFaceDistanceBelowThreshold};
1
+ import { FaceMatchState, IlluminationState } from '../types/liveness.mjs';
2
+ import { LivenessErrorState } from '../types/error.mjs';
3
+ import { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD_MOBILE, REDUCED_THRESHOLD } from './constants.mjs';
4
+
5
+ /* eslint-disable */
6
+ /**
7
+ * Returns the random number between min and max
8
+ * seeded with the provided random seed.
9
+ */
10
+ function getScaledValueFromRandomSeed(randomSeed, min, max) {
11
+ return randomSeed * (max - min) + min;
12
+ }
13
+ /**
14
+ * Returns the bounding box details from an oval
15
+ */
16
+ function getOvalBoundingBox(ovalDetails) {
17
+ const minOvalX = ovalDetails.flippedCenterX - ovalDetails.width / 2;
18
+ const maxOvalX = ovalDetails.flippedCenterX + ovalDetails.width / 2;
19
+ const minOvalY = ovalDetails.centerY - ovalDetails.height / 2;
20
+ const maxOvalY = ovalDetails.centerY + ovalDetails.height / 2;
21
+ const ovalBoundingBox = {
22
+ left: minOvalX,
23
+ top: minOvalY,
24
+ right: maxOvalX,
25
+ bottom: maxOvalY,
26
+ };
27
+ return { ovalBoundingBox, minOvalX, maxOvalX, minOvalY, maxOvalY };
28
+ }
29
+ /**
30
+ * Returns the ratio of intersection and union of two bounding boxes.
31
+ */
32
+ function getIntersectionOverUnion(box1, box2) {
33
+ const xA = Math.max(box1.left, box2.left);
34
+ const yA = Math.max(box1.top, box2.top);
35
+ const xB = Math.min(box1.right, box2.right);
36
+ const yB = Math.min(box1.bottom, box2.bottom);
37
+ const intersectionArea = Math.abs(Math.max(0, xB - xA) * Math.max(0, yB - yA));
38
+ if (intersectionArea === 0) {
39
+ return 0;
40
+ }
41
+ const boxAArea = Math.abs((box1.right - box1.left) * (box1.bottom - box1.top));
42
+ const boxBArea = Math.abs((box2.right - box2.left) * (box2.bottom - box2.top));
43
+ return intersectionArea / (boxAArea + boxBArea - intersectionArea);
44
+ }
45
+ /**
46
+ * Returns the details of a randomly generated liveness oval
47
+ * from SDK
48
+ */
49
+ function getOvalDetailsFromSessionInformation({ sessionInformation, videoWidth, }) {
50
+ const ovalParameters = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
51
+ ?.OvalParameters;
52
+ if (!ovalParameters ||
53
+ !ovalParameters.CenterX ||
54
+ !ovalParameters.CenterY ||
55
+ !ovalParameters.Width ||
56
+ !ovalParameters.Height) {
57
+ throw new Error('Oval parameters not returned from session information.');
58
+ }
59
+ // We need to include a flippedCenterX for visualizing the oval on a flipped camera view
60
+ // The camera view we show the customer is flipped to making moving left and right more obvious
61
+ // The video stream sent to the liveness service is not flipped
62
+ return {
63
+ flippedCenterX: videoWidth - ovalParameters.CenterX,
64
+ centerX: ovalParameters.CenterX,
65
+ centerY: ovalParameters.CenterY,
66
+ width: ovalParameters.Width,
67
+ height: ovalParameters.Height,
68
+ };
69
+ }
70
+ /**
71
+ * Returns the details of a statically generated liveness oval based on the video dimensions
72
+ */
73
+ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXSeed = 0.5, centerYSeed = 0.5, ratioMultiplier = 0.8, }) {
74
+ const videoHeight = height;
75
+ let videoWidth = width;
76
+ const ovalRatio = widthSeed * ratioMultiplier;
77
+ const minOvalCenterX = Math.floor((7 * width) / 16);
78
+ const maxOvalCenterX = Math.floor((9 * width) / 16);
79
+ const minOvalCenterY = Math.floor((7 * height) / 16);
80
+ const maxOvalCenterY = Math.floor((9 * height) / 16);
81
+ const centerX = getScaledValueFromRandomSeed(centerXSeed, minOvalCenterX, maxOvalCenterX);
82
+ const centerY = getScaledValueFromRandomSeed(centerYSeed, minOvalCenterY, maxOvalCenterY);
83
+ if (width >= height) {
84
+ videoWidth = (3 / 4) * videoHeight;
85
+ }
86
+ const ovalWidth = ovalRatio * videoWidth;
87
+ const ovalHeight = 1.618 * ovalWidth;
88
+ return {
89
+ flippedCenterX: Math.floor(videoWidth - centerX),
90
+ centerX: Math.floor(centerX),
91
+ centerY: Math.floor(centerY),
92
+ width: Math.floor(ovalWidth),
93
+ height: Math.floor(ovalHeight),
94
+ };
95
+ }
96
+ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
97
+ const { width, height } = videoMediaStream.getTracks()[0].getSettings();
98
+ // Get width/height of video element so we can compute scaleFactor
99
+ // and set canvas width/height.
100
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
101
+ canvasEl.width = Math.ceil(videoScaledWidth);
102
+ canvasEl.height = Math.ceil(videoScaledHeight);
103
+ const ovalDetails = getStaticLivenessOvalDetails({
104
+ width: width,
105
+ height: height,
106
+ ratioMultiplier: 0.5,
107
+ });
108
+ ovalDetails.flippedCenterX = width - ovalDetails.centerX;
109
+ // Compute scaleFactor which is how much our video element is scaled
110
+ // vs the intrinsic video resolution
111
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
112
+ // Draw oval in canvas using ovalDetails and scaleFactor
113
+ drawLivenessOvalInCanvas({
114
+ canvas: canvasEl,
115
+ oval: ovalDetails,
116
+ scaleFactor,
117
+ videoEl: videoEl,
118
+ isStartScreen: true,
119
+ });
120
+ }
121
+ /**
122
+ * Draws the provided liveness oval on the canvas.
123
+ */
124
+ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartScreen, }) {
125
+ const { flippedCenterX, centerY, width, height } = oval;
126
+ const { width: canvasWidth, height: canvasHeight } = canvas.getBoundingClientRect();
127
+ const ctx = canvas.getContext('2d');
128
+ if (ctx) {
129
+ ctx.restore();
130
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
131
+ // fill the canvas with a transparent rectangle
132
+ ctx.fillStyle = isStartScreen
133
+ ? getComputedStyle(canvas).getPropertyValue('--amplify-colors-background-primary')
134
+ : '#fff';
135
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight);
136
+ // On mobile our canvas is the width/height of the full screen.
137
+ // We need to calculate horizontal and vertical translation to reposition
138
+ // our canvas drawing so the oval is still placed relative to the dimensions
139
+ // of the video element.
140
+ const baseDims = { width: videoEl.videoWidth, height: videoEl.videoHeight };
141
+ const translate = {
142
+ x: (canvasWidth - baseDims.width * scaleFactor) / 2,
143
+ y: (canvasHeight - baseDims.height * scaleFactor) / 2,
144
+ };
145
+ // Set the transform to scale
146
+ ctx.setTransform(scaleFactor, 0, 0, scaleFactor, translate.x, translate.y);
147
+ // draw the oval path
148
+ ctx.beginPath();
149
+ ctx.ellipse(flippedCenterX, centerY, width / 2, height / 2, 0, 0, 2 * Math.PI);
150
+ // add stroke to the oval path
151
+ ctx.strokeStyle = getComputedStyle(canvas).getPropertyValue('--amplify-colors-border-secondary');
152
+ ctx.lineWidth = 3;
153
+ ctx.stroke();
154
+ ctx.save();
155
+ ctx.clip();
156
+ // Restore default canvas transform matrix
157
+ ctx.setTransform(1, 0, 0, 1, 0, 0);
158
+ // clear the oval content from the rectangle
159
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
160
+ }
161
+ else {
162
+ throw new Error('Cannot find Canvas.');
163
+ }
164
+ }
165
+ /**
166
+ * Returns the state of the provided face with respect to the provided liveness oval.
167
+ */
168
+ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
169
+ let faceMatchState;
170
+ const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
171
+ ?.ChallengeConfig;
172
+ if (!challengeConfig ||
173
+ !challengeConfig.OvalIouThreshold ||
174
+ !challengeConfig.OvalIouHeightThreshold ||
175
+ !challengeConfig.OvalIouWidthThreshold ||
176
+ !challengeConfig.FaceIouHeightThreshold ||
177
+ !challengeConfig.FaceIouWidthThreshold) {
178
+ throw new Error('Challenge information not returned from session information.');
179
+ }
180
+ const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
181
+ const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
182
+ const minFaceX = faceBoundingBox.left;
183
+ const maxFaceX = faceBoundingBox.right;
184
+ const minFaceY = faceBoundingBox.top;
185
+ const maxFaceY = faceBoundingBox.bottom;
186
+ const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
187
+ const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
188
+ const intersectionThreshold = OvalIouThreshold;
189
+ const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
190
+ const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
191
+ const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
192
+ const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
193
+ /** From Science
194
+ * p=max(min(1,0.75∗(si​−s0​)/(st​−s0​)+0.25)),0)
195
+ */
196
+ const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
197
+ (intersectionThreshold - initialFaceIntersection) +
198
+ 0.25), 0) * 100;
199
+ if (intersection > intersectionThreshold &&
200
+ Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
201
+ Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
202
+ Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
203
+ faceMatchState = FaceMatchState.MATCHED;
204
+ }
205
+ else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
206
+ maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
207
+ (minOvalX - minFaceX > faceDetectionWidthThreshold &&
208
+ maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
209
+ faceMatchState = FaceMatchState.TOO_CLOSE;
210
+ }
211
+ else {
212
+ faceMatchState = FaceMatchState.TOO_FAR;
213
+ }
214
+ return { faceMatchState, faceMatchPercentage };
215
+ }
216
+ function getPupilDistanceAndFaceHeight(face) {
217
+ const { leftEye, rightEye, mouth } = face;
218
+ const eyeCenter = [];
219
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
220
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
221
+ const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
222
+ const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
223
+ return { pupilDistance, faceHeight };
224
+ }
225
+ function generateBboxFromLandmarks(face, oval) {
226
+ const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
227
+ const { height: ovalHeight, centerY } = oval;
228
+ const ovalTop = centerY - ovalHeight / 2;
229
+ const eyeCenter = [];
230
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
231
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
232
+ const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
233
+ const alpha = 2.0, gamma = 1.8;
234
+ const ow = (alpha * pd + gamma * fh) / 2;
235
+ const oh = 1.618 * ow;
236
+ let cx;
237
+ if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
238
+ cx = (eyeCenter[0] + nose[0]) / 2;
239
+ }
240
+ else {
241
+ cx = eyeCenter[0];
242
+ }
243
+ const faceBottom = faceTop + faceHeight;
244
+ const top = faceBottom - oh;
245
+ const left = Math.min(cx - ow / 2, rightEar[0]);
246
+ const right = Math.max(cx + ow / 2, leftEar[0]);
247
+ return {
248
+ left: left,
249
+ top: top,
250
+ right: right,
251
+ bottom: faceBottom,
252
+ };
253
+ }
254
+ /**
255
+ * Returns the illumination state in the provided video frame.
256
+ */
257
+ function estimateIllumination(videoEl) {
258
+ const canvasEl = document.createElement('canvas');
259
+ canvasEl.width = videoEl.videoWidth;
260
+ canvasEl.height = videoEl.videoHeight;
261
+ const ctx = canvasEl.getContext('2d');
262
+ if (ctx) {
263
+ ctx.drawImage(videoEl, 0, 0, canvasEl.width, canvasEl.height);
264
+ const frame = ctx.getImageData(0, 0, canvasEl.width, canvasEl.height).data;
265
+ // histogram
266
+ const MAX_SCALE = 8;
267
+ const hist = new Array(MAX_SCALE).fill(0);
268
+ for (let i = 0; i < frame.length; i++) {
269
+ const luma = Math.round(frame[i++] * 0.2126 + frame[i++] * 0.7152 + frame[i++] * 0.0722);
270
+ hist[luma % 32]++;
271
+ }
272
+ let ind = -1, maxCount = 0;
273
+ for (let i = 0; i < MAX_SCALE; i++) {
274
+ if (hist[i] > maxCount) {
275
+ maxCount = hist[i];
276
+ ind = i;
277
+ }
278
+ }
279
+ canvasEl.remove();
280
+ if (ind === 0)
281
+ return IlluminationState.DARK;
282
+ if (ind === MAX_SCALE)
283
+ return IlluminationState.BRIGHT;
284
+ return IlluminationState.NORMAL;
285
+ }
286
+ else {
287
+ throw new Error('Cannot find Video Element.');
288
+ }
289
+ }
290
+ /**
291
+ * Checks if the provided media device is a virtual camera.
292
+ * @param device
293
+ */
294
+ function isCameraDeviceVirtual(device) {
295
+ return device.label.toLowerCase().includes('virtual');
296
+ }
297
+ const INITIAL_ALPHA = 0.9;
298
+ const SECONDARY_ALPHA = 0.75;
299
+ function fillFractionalContext({ ctx, prevColor, nextColor, fraction, }) {
300
+ const canvasWidth = ctx.canvas.width;
301
+ const canvasHeight = ctx.canvas.height;
302
+ ctx.fillStyle = nextColor;
303
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight * fraction);
304
+ if (fraction !== 1) {
305
+ ctx.fillStyle = prevColor;
306
+ ctx.fillRect(0, canvasHeight * fraction, canvasWidth, canvasHeight * (1 - fraction));
307
+ }
308
+ }
309
+ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, videoEl, ovalDetails, heightFraction, scaleFactor, }) {
310
+ const { x: videoX, y: videoY } = videoEl.getBoundingClientRect();
311
+ const { flippedCenterX, centerY, width, height } = ovalDetails;
312
+ const updatedCenterX = flippedCenterX * scaleFactor + videoX;
313
+ const updatedCenterY = centerY * scaleFactor + videoY;
314
+ const canvasWidth = overlayCanvas.width;
315
+ const canvasHeight = overlayCanvas.height;
316
+ const ctx = overlayCanvas.getContext('2d');
317
+ if (ctx) {
318
+ // Because the canvas is set to to 100% we need to manually set the height for the canvas to use pixel values
319
+ ctx.canvas.width = window.innerWidth;
320
+ ctx.canvas.height = window.innerHeight;
321
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
322
+ // fill the complete canvas
323
+ fillFractionalContext({
324
+ ctx,
325
+ prevColor,
326
+ nextColor,
327
+ fraction: heightFraction,
328
+ });
329
+ // save the current state
330
+ ctx.save();
331
+ // draw the rectangle path and fill it
332
+ ctx.beginPath();
333
+ ctx.rect(0, 0, canvasWidth, canvasHeight);
334
+ ctx.clip();
335
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
336
+ ctx.globalAlpha = INITIAL_ALPHA;
337
+ fillFractionalContext({
338
+ ctx,
339
+ prevColor,
340
+ nextColor,
341
+ fraction: heightFraction,
342
+ });
343
+ // draw the oval path and fill it
344
+ ctx.beginPath();
345
+ ctx.ellipse(updatedCenterX, updatedCenterY, (width * scaleFactor) / 2, (height * scaleFactor) / 2, 0, 0, 2 * Math.PI);
346
+ // add stroke to the oval path
347
+ ctx.strokeStyle = 'white';
348
+ ctx.lineWidth = 8;
349
+ ctx.stroke();
350
+ ctx.clip();
351
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
352
+ ctx.globalAlpha = SECONDARY_ALPHA;
353
+ fillFractionalContext({
354
+ ctx,
355
+ prevColor,
356
+ nextColor,
357
+ fraction: heightFraction,
358
+ });
359
+ // restore the state
360
+ ctx.restore();
361
+ }
362
+ else {
363
+ throw new Error('Cannot find Overlay Canvas.');
364
+ }
365
+ }
366
+ const isClientFreshnessColorSequence = (obj) => !!obj;
367
+ function getColorsSequencesFromSessionInformation(sessionInformation) {
368
+ const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
369
+ .ColorSequences || [];
370
+ const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
371
+ const colorArray = FreshnessColor.RGB;
372
+ const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
373
+ return typeof color !== 'undefined' &&
374
+ typeof downscrollDuration !== 'undefined' &&
375
+ typeof flatDisplayDuration !== 'undefined'
376
+ ? {
377
+ color,
378
+ downscrollDuration,
379
+ flatDisplayDuration,
380
+ }
381
+ : undefined;
382
+ });
383
+ return colorSequences.filter(isClientFreshnessColorSequence);
384
+ }
385
+ function getRGBArrayFromColorString(colorStr) {
386
+ return colorStr
387
+ .slice(colorStr.indexOf('(') + 1, colorStr.indexOf(')'))
388
+ .split(',')
389
+ .map((str) => parseInt(str));
390
+ }
391
+ async function getFaceMatchState(faceDetector, videoEl) {
392
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
393
+ let faceMatchState;
394
+ switch (detectedFaces.length) {
395
+ case 0: {
396
+ //no face detected;
397
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
398
+ break;
399
+ }
400
+ case 1: {
401
+ //exactly one face detected, match face with oval;
402
+ faceMatchState = FaceMatchState.FACE_IDENTIFIED;
403
+ break;
404
+ }
405
+ default: {
406
+ //more than one face detected ;
407
+ faceMatchState = FaceMatchState.TOO_MANY;
408
+ break;
409
+ }
410
+ }
411
+ return faceMatchState;
412
+ }
413
+ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails, reduceThreshold = false, isMobile = false, }) {
414
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
415
+ let detectedFace;
416
+ let isDistanceBelowThreshold = false;
417
+ let error;
418
+ switch (detectedFaces.length) {
419
+ case 0: {
420
+ //no face detected;
421
+ error = LivenessErrorState.FACE_DISTANCE_ERROR;
422
+ break;
423
+ }
424
+ case 1: {
425
+ //exactly one face detected, match face with oval;
426
+ detectedFace = detectedFaces[0];
427
+ const width = ovalDetails.width;
428
+ const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
429
+ const alpha = 2.0, gamma = 1.8;
430
+ const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
431
+ if (width) {
432
+ isDistanceBelowThreshold =
433
+ calibratedPupilDistance / width <
434
+ (!reduceThreshold
435
+ ? FACE_DISTANCE_THRESHOLD
436
+ : isMobile
437
+ ? REDUCED_THRESHOLD_MOBILE
438
+ : REDUCED_THRESHOLD);
439
+ if (!isDistanceBelowThreshold) {
440
+ error = LivenessErrorState.FACE_DISTANCE_ERROR;
441
+ }
442
+ }
443
+ break;
444
+ }
445
+ default: {
446
+ //more than one face detected
447
+ error = LivenessErrorState.MULTIPLE_FACES_ERROR;
448
+ break;
449
+ }
450
+ }
451
+ return { isDistanceBelowThreshold, error };
452
+ }
453
+ function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }) {
454
+ return {
455
+ Height: height / deviceHeight,
456
+ Width: width / deviceWidth,
457
+ Top: top / deviceHeight,
458
+ Left: left / deviceWidth,
459
+ };
460
+ }
461
+
462
+ export { drawLivenessOvalInCanvas, drawStaticOval, estimateIllumination, fillOverlayCanvasFractional, generateBboxFromLandmarks, getBoundingBox, getColorsSequencesFromSessionInformation, getFaceMatchState, getFaceMatchStateInLivenessOval, getIntersectionOverUnion, getOvalBoundingBox, getOvalDetailsFromSessionInformation, getRGBArrayFromColorString, getStaticLivenessOvalDetails, isCameraDeviceVirtual, isClientFreshnessColorSequence, isFaceDistanceBelowThreshold };
@@ -1 +1,144 @@
1
- import{__awaiter as e,__asyncGenerator as i,__await as t}from"tslib";import{Credentials as o,getAmplifyUserAgent as n}from"@aws-amplify/core";import{RekognitionStreamingClient as r,StartFaceLivenessSessionCommand as s}from"@aws-sdk/client-rekognitionstreaming";import{VideoRecorder as d}from"./videoRecorder.mjs";import{getLivenessUserAgent as a}from"../../utils/platform.mjs";import{CustomWebSocketFetchHandler as l}from"./CustomWebSocketFetchHandler.mjs";const h=1e3;function c(e){return void 0!==e.Challenge}function v(e){return void 0!==e.code}class m{constructor({sessionId:e,region:i,stream:t,videoEl:o,credentialProvider:n,endpointOverride:r}){this.sessionId=e,this.region=i,this._stream=t,this.videoEl=o,this.videoRecorder=new d(t),this.credentialProvider=n,this.endpointOverride=r,this.initPromise=this.init()}getResponseStream(){return e(this,void 0,void 0,(function*(){return yield this.initPromise,this.responseStream}))}startRecordingLivenessVideo(){this.videoRecorder.start(1e3)}sendClientInfo(e){this.videoRecorder.dispatch(new MessageEvent("clientSesssionInfo",{data:{clientInfo:e}}))}stopVideo(){return e(this,void 0,void 0,(function*(){yield this.videoRecorder.stop()}))}dispatchStopVideoEvent(){this.videoRecorder.dispatch(new Event("stopVideo"))}endStreamWithCode(i){return e(this,void 0,void 0,(function*(){"recording"===this.videoRecorder.getState()&&(yield this.stopVideo()),this.videoRecorder.dispatch(new MessageEvent("endStreamWithCode",{data:{code:i}}))}))}init(){var i;return e(this,void 0,void 0,(function*(){const e=null!==(i=this.credentialProvider)&&void 0!==i?i:yield o.get();if(!e)throw new Error("No credentials");const t={credentials:e,region:this.region,customUserAgent:`${n()} ${a()}`,requestHandler:new l({connectionTimeout:1e4})};if(this.endpointOverride){const e=this.endpointOverride;t.endpointProvider=()=>({url:new URL(e)})}this._client=new r(t),this.responseStream=yield this.startLivenessVideoConnection()}))}getAsyncGeneratorFromReadableStream(e){const o=this;return this._reader=e.getReader(),function(){return i(this,arguments,(function*(){for(;;){const{done:e,value:i}=yield t(o._reader.read());if(e)return yield t(void 0);if("stopVideo"===i)yield yield t({VideoEvent:{VideoChunk:[],TimestampMillis:Date.now()}});else if(void 0!==i.arrayBuffer){const e=yield t(i.arrayBuffer()),o=new Uint8Array(e);o.length>0&&(yield yield t({VideoEvent:{VideoChunk:o,TimestampMillis:Date.now()}}))}else c(i)?yield yield t({ClientSessionInformationEvent:{Challenge:i.Challenge}}):v(i)&&(yield yield t({VideoEvent:{VideoChunk:[],TimestampMillis:{closeCode:i.code}}}))}}))}}startLivenessVideoConnection(){return e(this,void 0,void 0,(function*(){const e=this.getAsyncGeneratorFromReadableStream(this.videoRecorder.videoStream)();return(yield this._client.send(new s({ChallengeVersions:"FaceMovementAndLightChallenge_1.0.0",SessionId:this.sessionId,LivenessRequestStream:e,VideoWidth:this.videoEl.videoWidth.toString(),VideoHeight:this.videoEl.videoHeight.toString()}))).LivenessResponseStream}))}}export{m as LivenessStreamProvider,h as TIME_SLICE};
1
+ import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
2
+ import { fetchAuthSession } from 'aws-amplify/auth';
3
+ import { RekognitionStreamingClient, StartFaceLivenessSessionCommand } from '@aws-sdk/client-rekognitionstreaming';
4
+ import { VideoRecorder } from './videoRecorder.mjs';
5
+ import { getLivenessUserAgent } from '../../utils/platform.mjs';
6
+ import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
7
+
8
+ const TIME_SLICE = 1000;
9
+ function isBlob(obj) {
10
+ return obj.arrayBuffer !== undefined;
11
+ }
12
+ function isClientSessionInformationEvent(obj) {
13
+ return obj.Challenge !== undefined;
14
+ }
15
+ function isEndStreamWithCodeEvent(obj) {
16
+ return obj.code !== undefined;
17
+ }
18
+ class LivenessStreamProvider {
19
+ constructor({ sessionId, region, stream, videoEl, credentialProvider, endpointOverride, }) {
20
+ this.sessionId = sessionId;
21
+ this.region = region;
22
+ this._stream = stream;
23
+ this.videoEl = videoEl;
24
+ this.videoRecorder = new VideoRecorder(stream);
25
+ this.credentialProvider = credentialProvider;
26
+ this.endpointOverride = endpointOverride;
27
+ this.initPromise = this.init();
28
+ }
29
+ async getResponseStream() {
30
+ await this.initPromise;
31
+ return this.responseStream;
32
+ }
33
+ startRecordingLivenessVideo() {
34
+ this.videoRecorder.start(TIME_SLICE);
35
+ }
36
+ sendClientInfo(clientInfo) {
37
+ this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
38
+ data: { clientInfo },
39
+ }));
40
+ }
41
+ async stopVideo() {
42
+ await this.videoRecorder.stop();
43
+ }
44
+ dispatchStopVideoEvent() {
45
+ this.videoRecorder.dispatch(new Event('stopVideo'));
46
+ }
47
+ async endStreamWithCode(code) {
48
+ if (this.videoRecorder.getState() === 'recording') {
49
+ await this.stopVideo();
50
+ }
51
+ this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
52
+ data: { code: code },
53
+ }));
54
+ return;
55
+ }
56
+ async init() {
57
+ const credentials = this.credentialProvider ?? (await fetchAuthSession()).credentials;
58
+ if (!credentials) {
59
+ throw new Error('No credentials');
60
+ }
61
+ const clientconfig = {
62
+ credentials,
63
+ region: this.region,
64
+ customUserAgent: `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
65
+ requestHandler: new CustomWebSocketFetchHandler({
66
+ connectionTimeout: 10000,
67
+ }),
68
+ };
69
+ if (this.endpointOverride) {
70
+ const override = this.endpointOverride;
71
+ clientconfig.endpointProvider = () => {
72
+ const url = new URL(override);
73
+ return { url };
74
+ };
75
+ }
76
+ this._client = new RekognitionStreamingClient(clientconfig);
77
+ this.responseStream = await this.startLivenessVideoConnection();
78
+ }
79
+ // Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
80
+ getAsyncGeneratorFromReadableStream(stream) {
81
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
82
+ const current = this;
83
+ this._reader = stream.getReader();
84
+ return async function* () {
85
+ while (true) {
86
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
87
+ const { done, value } = await current._reader.read();
88
+ if (done) {
89
+ return;
90
+ }
91
+ // Video chunks blobs should be sent as video events
92
+ if (value === 'stopVideo') {
93
+ // sending an empty video chunk signals that we have ended sending video
94
+ yield {
95
+ VideoEvent: {
96
+ VideoChunk: [],
97
+ TimestampMillis: Date.now(),
98
+ },
99
+ };
100
+ }
101
+ else if (isBlob(value)) {
102
+ const buffer = await value.arrayBuffer();
103
+ const chunk = new Uint8Array(buffer);
104
+ if (chunk.length > 0) {
105
+ yield {
106
+ VideoEvent: {
107
+ VideoChunk: chunk,
108
+ TimestampMillis: Date.now(),
109
+ },
110
+ };
111
+ }
112
+ }
113
+ else if (isClientSessionInformationEvent(value)) {
114
+ yield {
115
+ ClientSessionInformationEvent: {
116
+ Challenge: value.Challenge,
117
+ },
118
+ };
119
+ }
120
+ else if (isEndStreamWithCodeEvent(value)) {
121
+ yield {
122
+ VideoEvent: {
123
+ VideoChunk: [],
124
+ TimestampMillis: { closeCode: value.code },
125
+ },
126
+ };
127
+ }
128
+ }
129
+ };
130
+ }
131
+ async startLivenessVideoConnection() {
132
+ const livenessRequestGenerator = this.getAsyncGeneratorFromReadableStream(this.videoRecorder.videoStream)();
133
+ const response = await this._client.send(new StartFaceLivenessSessionCommand({
134
+ ChallengeVersions: 'FaceMovementAndLightChallenge_1.0.0',
135
+ SessionId: this.sessionId,
136
+ LivenessRequestStream: livenessRequestGenerator,
137
+ VideoWidth: this.videoEl.videoWidth.toString(),
138
+ VideoHeight: this.videoEl.videoHeight.toString(),
139
+ }));
140
+ return response.LivenessResponseStream;
141
+ }
142
+ }
143
+
144
+ export { LivenessStreamProvider, TIME_SLICE };
@@ -1 +1,14 @@
1
- function e(){try{return!(!window.WebAssembly||!window.WebAssembly.compile&&!window.WebAssembly.compileStreaming)}catch(e){return!1}}export{e as isWebAssemblySupported};
1
+ /**
2
+ * Checks whether WebAssembly is supported in the current environment.
3
+ */
4
+ function isWebAssemblySupported() {
5
+ try {
6
+ return (!!window.WebAssembly &&
7
+ (!!window.WebAssembly.compile || !!window.WebAssembly.compileStreaming));
8
+ }
9
+ catch (e) {
10
+ return false;
11
+ }
12
+ }
13
+
14
+ export { isWebAssemblySupported };