@aws-amplify/ui-react-liveness 2.0.11 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
  2. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
  4. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
  5. package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
  6. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
  7. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
  8. package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
  9. package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
  10. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
  11. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
  12. package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
  13. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
  14. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
  15. package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
  16. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
  17. package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
  18. package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
  19. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
  20. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
  21. package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
  22. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
  23. package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
  24. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
  25. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
  26. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
  27. package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
  28. package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
  29. package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
  30. package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
  31. package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
  32. package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
  33. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
  34. package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
  35. package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
  36. package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
  37. package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
  38. package/dist/esm/index.mjs +2 -1
  39. package/dist/esm/version.mjs +3 -1
  40. package/dist/index.js +3208 -1
  41. package/dist/styles.css +343 -680
  42. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
  43. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
  44. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
  45. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
  46. package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
  47. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
  48. package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
  49. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
  50. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
  51. package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
  52. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
  53. package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
  54. package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
  55. package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
  56. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
  57. package/dist/types/version.d.ts +1 -1
  58. package/package.json +16 -37
  59. package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
  60. package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
  61. package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
  62. package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
  63. package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
  64. package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
  65. package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
  66. /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
package/dist/index.js CHANGED
@@ -1 +1,3208 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var e=require("tslib"),t=require("react"),a=require("@aws-amplify/core"),i=require("@xstate/react"),r=require("xstate"),n=require("@tensorflow/tfjs-core"),o=require("@tensorflow-models/blazeface"),s=require("@tensorflow/tfjs-backend-wasm");require("@tensorflow/tfjs-backend-cpu");var c=require("@aws-sdk/client-rekognitionstreaming"),l=require("@aws-sdk/util-format-url"),d=require("@smithy/eventstream-serde-browser"),h=require("@smithy/fetch-http-handler"),u=require("@smithy/protocol-http"),m=require("nanoid"),f=require("@aws-amplify/ui-react"),v=require("@aws-amplify/ui-react/internal"),g=require("@aws-amplify/ui"),p=require("classnames");function E(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}function T(e){if(e&&e.__esModule)return e;var t=Object.create(null);return e&&Object.keys(e).forEach((function(a){if("default"!==a){var i=Object.getOwnPropertyDescriptor(e,a);Object.defineProperty(t,a,i.get?i:{enumerable:!0,get:function(){return e[a]}})}})),t.default=e,Object.freeze(t)}var C,S,y=E(t),x=T(t),F=T(n),R=T(o),M=T(s),w=E(p);class A{triggerModelLoading(){this.modelLoadingPromise=this.loadModels()}}!function(e){e.DARK="dark",e.BRIGHT="bright",e.NORMAL="normal"}(C||(C={})),function(e){e.MATCHED="MATCHED",e.TOO_FAR="TOO FAR",e.TOO_CLOSE="TOO CLOSE",e.CANT_IDENTIFY="CANNOT IDENTIFY",e.FACE_IDENTIFIED="ONE FACE IDENTIFIED",e.TOO_MANY="TOO MANY FACES"}(S||(S={}));const _={TIMEOUT:"TIMEOUT",RUNTIME_ERROR:"RUNTIME_ERROR",FRESHNESS_TIMEOUT:"FRESHNESS_TIMEOUT",SERVER_ERROR:"SERVER_ERROR",CAMERA_FRAMERATE_ERROR:"CAMERA_FRAMERATE_ERROR",CAMERA_ACCESS_ERROR:"CAMERA_ACCESS_ERROR",FACE_DISTANCE_ERROR:"FACE_DISTANCE_ERROR",MOBILE_LANDSCAPE_ERROR:"MOBILE_LANDSCAPE_ERROR",MULTIPLE_FACES_ERROR:"MULTIPLE_FACES_ERROR"},D=1e3,O=4e3,I=4001,b=4003,P=4005,k=4007;function L(e,t,a){return e*(a-t)+t}function N(e){const t=e.flippedCenterX-e.width/2,a=e.flippedCenterX+e.width/2,i=e.centerY-e.height/2,r=e.centerY+e.height/2;return{ovalBoundingBox:{left:t,top:i,right:a,bottom:r},minOvalX:t,maxOvalX:a,minOvalY:i,maxOvalY:r}}function H(e,t){const a=Math.max(e.left,t.left),i=Math.max(e.top,t.top),r=Math.min(e.right,t.right),n=Math.min(e.bottom,t.bottom),o=Math.abs(Math.max(0,r-a)*Math.max(0,n-i));if(0===o)return 0;return o/(Math.abs((e.right-e.left)*(e.bottom-e.top))+Math.abs((t.right-t.left)*(t.bottom-t.top))-o)}function B({width:e,height:t,widthSeed:a=1,centerXSeed:i=.5,centerYSeed:r=.5}){const n=t;let o=e;const s=.8*a,c=Math.floor(7*e/16),l=Math.floor(9*e/16),d=Math.floor(7*t/16),h=Math.floor(9*t/16),u=L(i,c,l),m=L(r,d,h);e>=t&&(o=3/4*n);const f=s*o,v=1.618*f;return{flippedCenterX:Math.floor(o-u),centerX:Math.floor(u),centerY:Math.floor(m),width:Math.floor(f),height:Math.floor(v)}}function W(e){const{leftEye:t,rightEye:a,mouth:i}=e,r=[];r[0]=(t[0]+a[0])/2,r[1]=(t[1]+a[1])/2;return{pupilDistance:Math.sqrt(Math.pow(t[0]-a[0],2)+Math.pow(t[1]-a[1],2)),faceHeight:Math.sqrt(Math.pow(r[0]-i[0],2)+Math.pow(r[1]-i[1],2))}}function V(e,t){const{leftEye:a,rightEye:i,nose:r}=e,{height:n,centerY:o}=t,s=o-n/2,c=[];c[0]=(a[0]+i[0])/2,c[1]=(a[1]+i[1])/2;const{pupilDistance:l,faceHeight:d}=W(e),h=(2*l+1.8*d)/2,u=1.618*h;let m,f;c[1]<=(s+n)/2?(m=(c[0]+r[0])/2,f=(c[1]+r[1])/2):(m=c[0],f=c[1]);const v=m-h/2,g=f-u/2;return{left:v,top:g,right:v+h,bottom:g+u}}function j(e){const t=document.createElement("canvas");t.width=e.videoWidth,t.height=e.videoHeight;const a=t.getContext("2d");if(a){a.drawImage(e,0,0,t.width,t.height);const i=a.getImageData(0,0,t.width,t.height).data,r=8,n=new Array(r).fill(0);for(let e=0;e<i.length;e++){n[Math.round(.2126*i[e++]+.7152*i[e++]+.0722*i[e++])%32]++}let o=-1,s=0;for(let e=0;e<r;e++)n[e]>s&&(s=n[e],o=e);return t.remove(),0===o?C.DARK:o===r?C.BRIGHT:C.NORMAL}throw new Error("Cannot find Video Element.")}function z({ctx:e,prevColor:t,nextColor:a,fraction:i}){const r=e.canvas.width,n=e.canvas.height;e.fillStyle=a,e.fillRect(0,0,r,n*i),1!==i&&(e.fillStyle=t,e.fillRect(0,n*i,r,n*(1-i)))}const q=e=>!!e;function U(e){return e.slice(e.indexOf("(")+1,e.indexOf(")")).split(",").map((e=>parseInt(e)))}function Y({faceDetector:t,videoEl:a,ovalDetails:i,reduceThreshold:r=!1,isMobile:n=!1}){return e.__awaiter(this,void 0,void 0,(function*(){const e=yield t.detectFaces(a);let o,s,c=!1;switch(e.length){case 0:s=_.FACE_DISTANCE_ERROR;break;case 1:{o=e[0];const t=i.width,{pupilDistance:a,faceHeight:l}=W(o),d=2;t&&(c=(d*a+1.8*l)/2/d/t<(r?n?.37:.4:.32),c||(s=_.FACE_DISTANCE_ERROR));break}default:s=_.MULTIPLE_FACES_ERROR}return{isDistanceBelowThreshold:c,error:s}}))}function $({deviceHeight:e,deviceWidth:t,height:a,width:i,top:r,left:n}){return{Height:a/e,Width:i/t,Top:r/e,Left:n/t}}const G=`https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow/tfjs-backend-wasm/${M.version_wasm}/`;class X extends A{constructor(e,t){super(),this.faceModelUrl=null!=t?t:"https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow-models/blazeface/0.0.7/model/model.json",this.binaryPath=null!=e?e:G}loadModels(){return e.__awaiter(this,void 0,void 0,(function*(){!function(){try{return!(!window.WebAssembly||!window.WebAssembly.compile&&!window.WebAssembly.compileStreaming)}catch(e){return!1}}()?yield this._loadCPUBackend():yield this._loadWebAssemblyBackend();try{yield F.ready(),this._model=yield a.jitteredExponentialRetry(R.load,[{modelUrl:this.faceModelUrl}])}catch(e){throw new Error("There was an error loading the blazeface model. If you are using a custom blazeface model url ensure that it is a fully qualified url that returns a json file.")}}))}detectFaces(t){return e.__awaiter(this,void 0,void 0,(function*(){const e=yield this._model.estimateFaces(t,!1,!0,!0),a=Date.now();return e.filter((e=>!!e.landmarks)).map((e=>{const{topLeft:t,bottomRight:i,probability:r,landmarks:n}=e,[o,s]=t,[c,l]=i,d=Math.abs(o-c),h=Math.abs(l-s),u=n[0],m=n[1],f=n[2],v=n[3];return{top:s,left:c,width:d,height:h,timestampMs:a,probability:r[0],rightEye:u,leftEye:m,mouth:v,nose:f}}))}))}_loadWebAssemblyBackend(){return e.__awaiter(this,void 0,void 0,(function*(){try{M.setWasmPaths(this.binaryPath),yield a.jitteredExponentialRetry((()=>e.__awaiter(this,void 0,void 0,(function*(){if(!(yield F.setBackend("wasm")))throw new Error("Initialization of backend wasm failed")}))),[]),this.modelBackend="wasm"}catch(e){throw new Error('There was an error loading the TFJS WASM backend. If you are using a custom WASM path ensure that it ends with "/" and that it is not the full URL as @tensorflow/tfjs-backend-wasm will append the wasm binary file name. Read more: https://github.com/tensorflow/tfjs/blob/master/tfjs-backend-wasm/src/backend_wasm.ts#L475.')}}))}_loadCPUBackend(){return e.__awaiter(this,void 0,void 0,(function*(){yield F.setBackend("cpu"),this.modelBackend="cpu"}))}}class Z{constructor(e,t={}){if("undefined"==typeof MediaRecorder)throw Error("MediaRecorder is not supported by this browser");this._stream=e,this._options=t,this._chunks=[],this._recorder=new MediaRecorder(e,{bitsPerSecond:1e6}),this._setupCallbacks()}getState(){return this._recorder.state}start(e){this.clearRecordedData(),this.recordingStartApiTimestamp=Date.now(),this._recorder.start(e)}stop(){return e.__awaiter(this,void 0,void 0,(function*(){return"recording"===this.getState()&&this._recorder.stop(),this._recorderStopped}))}pause(){this._recorder.pause()}clearRecordedData(){this._chunks=[]}dispatch(e){this._recorder.dispatchEvent(e)}getVideoChunkSize(){return this._chunks.length}_setupCallbacks(){this.videoStream=new ReadableStream({start:e=>{this._recorder&&(this._recorder.ondataavailable=t=>{t.data&&t.data.size>0&&(0===this._chunks.length&&(this.firstChunkTimestamp=Date.now()),this._chunks.push(t.data),e.enqueue(t.data))},this._recorder.addEventListener("clientSesssionInfo",(t=>{e.enqueue(t.data.clientInfo)})),this._recorder.addEventListener("stopVideo",(()=>{e.enqueue("stopVideo")})),this._recorder.addEventListener("endStream",(()=>{e.close()})),this._recorder.addEventListener("endStreamWithCode",(t=>{e.enqueue({type:"endStreamWithCode",code:t.data.code})})))}}),this.recorderStarted=new Promise((e=>{this._recorder.onstart=()=>{this.recorderStartTimestamp=Date.now(),e()}})),this._recorderStopped=new Promise((e=>{this._recorder.onstop=()=>{this.recorderEndTimestamp=Date.now(),e()}})),this._recorder.onerror=()=>{"stopped"!==this.getState()&&this.stop()}}}const K=2e3,Q=t=>{return t[Symbol.asyncIterator]?t:(a=t,"function"==typeof ReadableStream&&a instanceof ReadableStream?d.readableStreamtoIterable(t):{[Symbol.asyncIterator]:function(){return e.__asyncGenerator(this,arguments,(function*(){yield yield e.__await(t)}))}});var a};class J{constructor(e,t=new h.FetchHttpHandler){this.metadata={handlerProtocol:"websocket/h1.1"},this.sockets={},this.utf8decoder=new TextDecoder,this.httpHandler=t,this.configPromise="function"==typeof e?e().then((e=>null!=e?e:{})):Promise.resolve(null!=e?e:{})}destroy(){for(const[e,t]of Object.entries(this.sockets)){for(const e of t)e.close(1e3,"Socket closed through destroy() call");delete this.sockets[e]}}handle(t){return e.__awaiter(this,void 0,void 0,(function*(){if(!(e=>"ws:"===e.protocol||"wss:"===e.protocol)(t))return this.httpHandler.handle(t);const e=l.formatUrl(t),a=new WebSocket(e);this.sockets[e]||(this.sockets[e]=[]),this.sockets[e].push(a),a.binaryType="arraybuffer";const{connectionTimeout:i=K}=yield this.configPromise;yield this.waitForReady(a,i);const{body:r}=t,n=Q(r),o=(e=>"function"==typeof ReadableStream?d.iterableToReadableStream(e):e)(this.connect(a,n));return{response:new u.HttpResponse({statusCode:200,body:o})}}))}removeNotUsableSockets(e){var t;this.sockets[e]=(null!==(t=this.sockets[e])&&void 0!==t?t:[]).filter((e=>![WebSocket.CLOSING,WebSocket.CLOSED].includes(e.readyState)))}waitForReady(e,t){return new Promise(((a,i)=>{const r=setTimeout((()=>{this.removeNotUsableSockets(e.url),i({$metadata:{httpStatusCode:500}})}),t);e.onopen=()=>{clearTimeout(r),a()}}))}connect(t,a){let i,r=!1,n=()=>{},o=()=>{};t.onmessage=e=>{o({done:!1,value:new Uint8Array(e.data)})},t.onerror=e=>{r=!0,t.close(),n(e)},t.onclose=()=>{this.removeNotUsableSockets(t.url),r||(i?n(i):o({done:!0,value:void 0}))};const s={[Symbol.asyncIterator]:()=>({next:()=>new Promise(((e,t)=>{o=e,n=t}))})};return(()=>{e.__awaiter(this,void 0,void 0,(function*(){var r,n,o,s;try{try{for(var c,l=!0,d=e.__asyncValues(a);!(r=(c=yield d.next()).done);){s=c.value,l=!1;try{const e=s,a=this.utf8decoder.decode(e);if(a.includes("closeCode")){const e=a.match(/"closeCode":([0-9]*)/);if(e){const a=e[1];t.close(parseInt(a))}continue}t.send(e)}finally{l=!0}}}catch(e){n={error:e}}finally{try{l||r||!(o=d.return)||(yield o.call(d))}finally{if(n)throw n.error}}}catch(e){i=e}finally{t.close(D)}}))})(),s}}function ee(e){return void 0!==e.Challenge}function te(e){return void 0!==e.code}class ae{constructor({sessionId:e,region:t,stream:a,videoEl:i,credentialProvider:r,endpointOverride:n}){this.sessionId=e,this.region=t,this._stream=a,this.videoEl=i,this.videoRecorder=new Z(a),this.credentialProvider=r,this.endpointOverride=n,this.initPromise=this.init()}getResponseStream(){return e.__awaiter(this,void 0,void 0,(function*(){return yield this.initPromise,this.responseStream}))}startRecordingLivenessVideo(){this.videoRecorder.start(1e3)}sendClientInfo(e){this.videoRecorder.dispatch(new MessageEvent("clientSesssionInfo",{data:{clientInfo:e}}))}stopVideo(){return e.__awaiter(this,void 0,void 0,(function*(){yield this.videoRecorder.stop()}))}dispatchStopVideoEvent(){this.videoRecorder.dispatch(new Event("stopVideo"))}endStreamWithCode(t){return e.__awaiter(this,void 0,void 0,(function*(){"recording"===this.videoRecorder.getState()&&(yield this.stopVideo()),this.videoRecorder.dispatch(new MessageEvent("endStreamWithCode",{data:{code:t}}))}))}init(){var t;return e.__awaiter(this,void 0,void 0,(function*(){const e=null!==(t=this.credentialProvider)&&void 0!==t?t:yield a.Credentials.get();if(!e)throw new Error("No credentials");const i={credentials:e,region:this.region,customUserAgent:`${a.getAmplifyUserAgent()} ui-react-liveness/2.0.11`,requestHandler:new J({connectionTimeout:1e4})};if(this.endpointOverride){const e=this.endpointOverride;i.endpointProvider=()=>({url:new URL(e)})}this._client=new c.RekognitionStreamingClient(i),this.responseStream=yield this.startLivenessVideoConnection()}))}getAsyncGeneratorFromReadableStream(t){const a=this;return this._reader=t.getReader(),function(){return e.__asyncGenerator(this,arguments,(function*(){for(;;){const{done:t,value:i}=yield e.__await(a._reader.read());if(t)return yield e.__await(void 0);if("stopVideo"===i)yield yield e.__await({VideoEvent:{VideoChunk:[],TimestampMillis:Date.now()}});else if(void 0!==i.arrayBuffer){const t=yield e.__await(i.arrayBuffer()),a=new Uint8Array(t);a.length>0&&(yield yield e.__await({VideoEvent:{VideoChunk:a,TimestampMillis:Date.now()}}))}else ee(i)?yield yield e.__await({ClientSessionInformationEvent:{Challenge:i.Challenge}}):te(i)&&(yield yield e.__await({VideoEvent:{VideoChunk:[],TimestampMillis:{closeCode:i.code}}}))}}))}}startLivenessVideoConnection(){return e.__awaiter(this,void 0,void 0,(function*(){const e=this.getAsyncGeneratorFromReadableStream(this.videoRecorder.videoStream)();return(yield this._client.send(new c.StartFaceLivenessSessionCommand({ChallengeVersions:"FaceMovementAndLightChallenge_1.0.0",SessionId:this.sessionId,LivenessRequestStream:e,VideoWidth:this.videoEl.videoWidth.toString(),VideoHeight:this.videoEl.videoHeight.toString()}))).LivenessResponseStream}))}}var ie;!function(e){e.SCROLLING="SCROLLING",e.FLAT="FLAT"}(ie||(ie={}));class re{constructor(e,t){this.context=e,this.freshnessColorsSequence=t,this.isFirstTick=!0}displayColorTick(){return e.__awaiter(this,void 0,void 0,(function*(){return new Promise(((e,t)=>{setTimeout((()=>{this.displayNextColorTick(e,t)}),Math.min(10))}))}))}init(){this.stageIndex=0,this.currColorIndex=0,this.currColorSequence=this.freshnessColorsSequence[0],this.prevColorSequence=this.freshnessColorsSequence[0],this.stage=ie.FLAT,this.timeLastFlatOrScrollChange=Date.now(),this.timeLastFaceMatchChecked=Date.now()}displayNextColorTick(e,t){const{freshnessColorEl:a}=this.context.freshnessColorAssociatedParams,{ovalDetails:i,scaleFactor:r}=this.context.ovalAssociatedParams,{videoEl:n}=this.context.videoAssociatedParams,o=Date.now();this.isFirstTick&&(this.init(),this.isFirstTick=!1,this.sendColorStartTime({tickStartTime:o,currColor:this.currColorSequence.color,prevColor:this.currColorSequence.color,currColorIndex:this.stageIndex}));let s=o-this.timeLastFlatOrScrollChange;if(a.style.display="block",(this.stage===ie.FLAT&&s>=this.currColorSequence.flatDisplayDuration||this.stage===ie.SCROLLING&&s>=this.currColorSequence.downscrollDuration)&&(this.incrementStageIndex(o),s=0),this.currColorIndex<this.freshnessColorsSequence.length){const t=s/(this.stage===ie.SCROLLING?this.currColorSequence.downscrollDuration:this.currColorSequence.flatDisplayDuration);!function({overlayCanvas:e,prevColor:t,nextColor:a,videoEl:i,ovalDetails:r,heightFraction:n,scaleFactor:o}){const{x:s,y:c}=i.getBoundingClientRect(),{flippedCenterX:l,centerY:d,width:h,height:u}=r,m=l*o+s,f=d*o+c,v=e.width,g=e.height,p=e.getContext("2d");if(!p)throw new Error("Cannot find Overlay Canvas.");p.canvas.width=window.innerWidth,p.canvas.height=window.innerHeight,p.clearRect(0,0,v,g),z({ctx:p,prevColor:t,nextColor:a,fraction:n}),p.save(),p.beginPath(),p.rect(0,0,v,g),p.clip(),p.clearRect(0,0,v,g),p.globalAlpha=.9,z({ctx:p,prevColor:t,nextColor:a,fraction:n}),p.beginPath(),p.ellipse(m,f,h*o/2,u*o/2,0,0,2*Math.PI),p.strokeStyle="white",p.lineWidth=8,p.stroke(),p.clip(),p.clearRect(0,0,v,g),p.globalAlpha=.75,z({ctx:p,prevColor:t,nextColor:a,fraction:n}),p.restore()}({overlayCanvas:a,prevColor:this.prevColorSequence.color,nextColor:this.currColorSequence.color,videoEl:n,ovalDetails:i,heightFraction:t,scaleFactor:r}),e(!1)}else a.style.display="none",e(!0)}incrementStageIndex(e){if(this.stageIndex+=1,this.prevColorSequence=this.freshnessColorsSequence[this.currColorIndex],this.stage===ie.FLAT)this.currColorIndex+=1,this.stage=ie.SCROLLING;else if(this.stage===ie.SCROLLING){this.freshnessColorsSequence[this.currColorIndex].flatDisplayDuration>0?this.stage=ie.FLAT:(this.stage=ie.SCROLLING,this.currColorIndex+=1)}this.currColorSequence=this.freshnessColorsSequence[this.currColorIndex],this.timeLastFlatOrScrollChange=Date.now(),this.currColorSequence&&this.sendColorStartTime({tickStartTime:e,currColor:this.currColorSequence.color,prevColor:this.prevColorSequence.color,currColorIndex:this.stageIndex})}sendColorStartTime({tickStartTime:e,currColor:t,prevColor:a,currColorIndex:i}){const{livenessStreamProvider:r,challengeId:n}=this.context;r.sendClientInfo({Challenge:{FaceMovementAndLightChallenge:{ChallengeId:n,ColorDisplayed:{CurrentColor:{RGB:U(t)},PreviousColor:{RGB:U(a)},SequenceNumber:i,CurrentColorStartTimestamp:e}}}})}}const ne=e=>!!(null==e?void 0:e.DisconnectionEvent),oe=e=>!!(null==e?void 0:e.ValidationException),se=e=>!!(null==e?void 0:e.InternalServerException),ce=e=>!!(null==e?void 0:e.ThrottlingException),le=e=>!!(null==e?void 0:e.ServiceQuotaExceededException),de={width:{min:320,ideal:640},height:{min:240,ideal:480},frameRate:{min:15,ideal:30,max:30},facingMode:"user"};let he;const ue=r.createMachine({id:"livenessMachine",initial:"start",predictableActionArguments:!0,context:{challengeId:m.nanoid(),maxFailedAttempts:0,failedAttempts:0,componentProps:void 0,serverSessionInformation:void 0,videoAssociatedParams:{videoConstraints:de},ovalAssociatedParams:void 0,faceMatchAssociatedParams:{illuminationState:void 0,faceMatchState:void 0,faceMatchPercentage:25,currentDetectedFace:void 0,startFace:void 0,endFace:void 0,initialFaceMatchTime:void 0},freshnessColorAssociatedParams:{freshnessColorEl:void 0,freshnessColors:[],freshnessColorsComplete:!1,freshnessColorDisplay:void 0},errorState:void 0,livenessStreamProvider:void 0,responseStreamActorRef:void 0,shouldDisconnect:!1,faceMatchStateBeforeStart:void 0,isFaceFarEnoughBeforeRecording:void 0,isRecordingStopped:!1},on:{CANCEL:"userCancel",TIMEOUT:{target:"retryableTimeout",actions:"updateErrorStateForTimeout"},SET_SESSION_INFO:{internal:!0,actions:"updateSessionInfo"},DISCONNECT_EVENT:{internal:!0,actions:"updateShouldDisconnect"},SET_DOM_AND_CAMERA_DETAILS:{actions:"setDOMAndCameraDetails"},SERVER_ERROR:{target:"error",actions:"updateErrorStateForServer"},RUNTIME_ERROR:{target:"error"},MOBILE_LANDSCAPE_WARNING:{target:"mobileLandscapeWarning",actions:"updateErrorStateForServer"}},states:{start:{on:{BEGIN:"cameraCheck"}},cameraCheck:{entry:["resetErrorState","initializeFaceDetector"],invoke:{src:"checkVirtualCameraAndGetStream",onDone:{target:"waitForDOMAndCameraDetails",actions:["updateVideoMediaStream"]},onError:{target:"permissionDenied"}}},waitForDOMAndCameraDetails:{after:{0:{target:"detectFaceBeforeStart",cond:"hasDOMAndCameraDetails"},500:{target:"waitForDOMAndCameraDetails"}}},detectFaceBeforeStart:{invoke:{src:"detectFace",onDone:{target:"checkFaceDetectedBeforeStart",actions:["updateFaceMatchBeforeStartDetails"]}}},checkFaceDetectedBeforeStart:{after:{0:{target:"detectFaceDistanceBeforeRecording",cond:"hasSingleFaceBeforeStart"},100:{target:"detectFaceBeforeStart"}}},detectFaceDistanceBeforeRecording:{invoke:{src:"detectFaceDistance",onDone:{target:"checkFaceDistanceBeforeRecording",actions:["updateFaceDistanceBeforeRecording"]}}},checkFaceDistanceBeforeRecording:{after:{0:{target:"initializeLivenessStream",cond:"hasEnoughFaceDistanceBeforeRecording"},100:{target:"detectFaceDistanceBeforeRecording"}}},initializeLivenessStream:{invoke:{src:"openLivenessStreamConnection",onDone:{target:"notRecording",actions:["updateLivenessStreamProvider","spawnResponseStreamActor"]}}},notRecording:{on:{START_RECORDING:"recording"},initial:"waitForSessionInfo",states:{waitForSessionInfo:{after:{0:{target:"#livenessMachine.recording",cond:"hasServerSessionInfo"},100:{target:"waitForSessionInfo"}}}}},recording:{entry:["clearErrorState","startRecording"],initial:"ovalDrawing",states:{ovalDrawing:{entry:["sendTimeoutAfterOvalDrawingDelay"],invoke:{src:"detectInitialFaceAndDrawOval",onDone:{target:"checkFaceDetected",actions:["updateOvalAndFaceDetailsPostDraw","sendTimeoutAfterOvalMatchDelay"]},onError:{target:"#livenessMachine.error",actions:"updateErrorStateForRuntime"}}},checkFaceDetected:{after:{0:{target:"checkRecordingStarted",cond:"hasSingleFace"},100:{target:"ovalDrawing"}}},checkRecordingStarted:{after:{0:{target:"ovalMatching",cond:"hasRecordingStarted",actions:["updateRecordingStartTimestampMs"]},100:{target:"checkRecordingStarted"}}},ovalMatching:{entry:["cancelOvalDrawingTimeout"],invoke:{src:"detectFaceAndMatchOval",onDone:{target:"checkMatch",actions:"updateFaceDetailsPostMatch"}}},checkMatch:{after:{0:{target:"flashFreshnessColors",cond:"hasFaceMatchedInOvalWithMinTime",actions:["updateEndFaceMatch","setupFlashFreshnessColors","cancelOvalMatchTimeout","cancelOvalDrawingTimeout"]},.1:{target:"ovalMatching",cond:"hasFaceMatchedInOval",actions:"setFaceMatchTimeAndStartFace"},1:{target:"ovalMatching",cond:"hasNotFaceMatchedInOval"}}},flashFreshnessColors:{invoke:{src:"flashColors",onDone:[{target:"success",cond:"hasFreshnessColorShown"},{target:"flashFreshnessColors",actions:"updateFreshnessDetails"}]}},success:{entry:["stopRecording"],type:"final"}},onDone:"uploading"},uploading:{initial:"pending",states:{pending:{entry:["sendTimeoutAfterWaitingForDisconnect","pauseVideoStream"],invoke:{src:"stopVideo",onDone:"waitForDisconnectEvent",onError:{target:"#livenessMachine.error",actions:"updateErrorStateForRuntime"}}},waitForDisconnectEvent:{after:{0:{target:"getLivenessResult",cond:"getShouldDisconnect"},100:{target:"waitForDisconnectEvent"}}},getLivenessResult:{entry:["cancelWaitForDisconnectTimeout","freezeStream"],invoke:{src:"getLiveness",onError:{target:"#livenessMachine.error",actions:"updateErrorStateForServer"}}}}},retryableTimeout:{entry:"updateFailedAttempts",always:[{target:"timeout",cond:"shouldTimeoutOnFailedAttempts"},{target:"notRecording"}]},permissionDenied:{entry:"callUserPermissionDeniedCallback",on:{RETRY_CAMERA_CHECK:"cameraCheck"}},mobileLandscapeWarning:{entry:"callMobileLandscapeWarningCallback",always:[{target:"error"}]},timeout:{entry:["cleanUpResources","callUserTimeoutCallback","freezeStream"]},error:{entry:["cleanUpResources","callErrorCallback","cancelOvalDrawingTimeout","cancelWaitForDisconnectTimeout","cancelOvalMatchTimeout","freezeStream"]},userCancel:{entry:["cleanUpResources","callUserCancelCallback","resetContext"],always:[{target:"start"}]}}},{actions:{spawnResponseStreamActor:r.assign({responseStreamActorRef:()=>r.spawn(me)}),updateFailedAttempts:r.assign({failedAttempts:e=>e.failedAttempts+1}),updateVideoMediaStream:r.assign({videoAssociatedParams:(e,t)=>{var a;return Object.assign(Object.assign({},e.videoAssociatedParams),{videoMediaStream:null===(a=t.data)||void 0===a?void 0:a.stream})}}),initializeFaceDetector:r.assign({ovalAssociatedParams:e=>{const{componentProps:t}=e,{faceModelUrl:a,binaryPath:i}=t.config,r=new X(i,a);return r.triggerModelLoading(),Object.assign(Object.assign({},e.ovalAssociatedParams),{faceDetector:r})}}),updateLivenessStreamProvider:r.assign({livenessStreamProvider:(e,t)=>{var a;return null===(a=t.data)||void 0===a?void 0:a.livenessStreamProvider}}),setDOMAndCameraDetails:r.assign({videoAssociatedParams:(e,t)=>{var a,i,r;return Object.assign(Object.assign({},e.videoAssociatedParams),{videoEl:null===(a=t.data)||void 0===a?void 0:a.videoEl,canvasEl:null===(i=t.data)||void 0===i?void 0:i.canvasEl,isMobile:null===(r=t.data)||void 0===r?void 0:r.isMobile})},freshnessColorAssociatedParams:(e,t)=>{var a;return Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorEl:null===(a=t.data)||void 0===a?void 0:a.freshnessColorEl})}}),updateRecordingStartTimestampMs:r.assign({videoAssociatedParams:e=>{const{challengeId:t,videoAssociatedParams:a,ovalAssociatedParams:i,livenessStreamProvider:r}=e,{recordingStartApiTimestamp:n,recorderStartTimestamp:o}=r.videoRecorder,{videoMediaStream:s}=a,{initialFace:c}=i,l=Math.round(.73*(o-n)+n),{width:d,height:h}=s.getTracks()[0].getSettings(),u=d-c.left-c.width;return e.livenessStreamProvider.sendClientInfo({Challenge:{FaceMovementAndLightChallenge:{ChallengeId:t,VideoStartTimestamp:l,InitialFace:{InitialFaceDetectedTimestamp:c.timestampMs,BoundingBox:$({deviceHeight:h,deviceWidth:d,height:c.height,width:c.width,top:c.top,left:u})}}}}),Object.assign(Object.assign({},e.videoAssociatedParams),{recordingStartTimestampMs:l})}}),startRecording:r.assign({videoAssociatedParams:e=>{if(!e.serverSessionInformation)throw new Error("Session information was not received from response stream");return e.livenessStreamProvider.videoRecorder&&"recording"!==e.livenessStreamProvider.videoRecorder.getState()&&e.livenessStreamProvider.startRecordingLivenessVideo(),Object.assign({},e.videoAssociatedParams)}}),stopRecording:e=>{},updateFaceMatchBeforeStartDetails:r.assign({faceMatchStateBeforeStart:(e,t)=>t.data.faceMatchState}),updateFaceDistanceBeforeRecording:r.assign({isFaceFarEnoughBeforeRecording:(e,t)=>t.data.isFaceFarEnoughBeforeRecording}),updateFaceDistanceWhileLoading:r.assign({isFaceFarEnoughBeforeRecording:(e,t)=>t.data.isFaceFarEnoughBeforeRecording,errorState:(e,t)=>{var a;return null===(a=t.data)||void 0===a?void 0:a.error}}),updateOvalAndFaceDetailsPostDraw:r.assign({ovalAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.ovalAssociatedParams),{initialFace:t.data.initialFace,ovalDetails:t.data.ovalDetails,scaleFactor:t.data.scaleFactor}),faceMatchAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{faceMatchState:t.data.faceMatchState,illuminationState:t.data.illuminationState})}),updateFaceDetailsPostMatch:r.assign({faceMatchAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{faceMatchState:t.data.faceMatchState,faceMatchPercentage:t.data.faceMatchPercentage,illuminationState:t.data.illuminationState,currentDetectedFace:t.data.detectedFace})}),updateEndFaceMatch:r.assign({faceMatchAssociatedParams:e=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{endFace:e.faceMatchAssociatedParams.currentDetectedFace})}),setFaceMatchTimeAndStartFace:r.assign({faceMatchAssociatedParams:e=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{startFace:void 0===e.faceMatchAssociatedParams.startFace?e.faceMatchAssociatedParams.currentDetectedFace:e.faceMatchAssociatedParams.startFace,initialFaceMatchTime:void 0===e.faceMatchAssociatedParams.initialFaceMatchTime?Date.now():e.faceMatchAssociatedParams.initialFaceMatchTime})}),resetErrorState:r.assign({errorState:e=>{}}),updateErrorStateForTimeout:r.assign({errorState:(e,t)=>{var a;return(null===(a=t.data)||void 0===a?void 0:a.errorState)||_.TIMEOUT}}),updateErrorStateForRuntime:r.assign({errorState:(e,t)=>{var a;return(null===(a=t.data)||void 0===a?void 0:a.errorState)||_.RUNTIME_ERROR}}),updateErrorStateForServer:r.assign({errorState:e=>_.SERVER_ERROR}),clearErrorState:r.assign({errorState:e=>{}}),updateSessionInfo:r.assign({serverSessionInformation:(e,t)=>t.data.sessionInfo}),updateShouldDisconnect:r.assign({shouldDisconnect:e=>!0}),updateFreshnessDetails:r.assign({freshnessColorAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorsComplete:t.data.freshnessColorsComplete})}),setupFlashFreshnessColors:r.assign({freshnessColorAssociatedParams:e=>{const{serverSessionInformation:t}=e,a=(t.Challenge.FaceMovementAndLightChallenge.ColorSequences||[]).map((({FreshnessColor:e,DownscrollDuration:t,FlatDisplayDuration:a})=>{const i=e.RGB,r=`rgb(${i[0]},${i[1]},${i[2]})`;return void 0!==r&&void 0!==t&&void 0!==a?{color:r,downscrollDuration:t,flatDisplayDuration:a}:void 0})).filter(q);const i=new re(e,a);return Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorDisplay:i})}}),sendTimeoutAfterOvalDrawingDelay:r.actions.send({type:"TIMEOUT"},{delay:5e3,id:"ovalDrawingTimeout"}),cancelOvalDrawingTimeout:r.actions.cancel("ovalDrawingTimeout"),sendTimeoutAfterOvalMatchDelay:r.actions.send({type:"TIMEOUT"},{delay:e=>{var t,a,i,r;return(null===(r=null===(i=null===(a=null===(t=e.serverSessionInformation)||void 0===t?void 0:t.Challenge)||void 0===a?void 0:a.FaceMovementAndLightChallenge)||void 0===i?void 0:i.ChallengeConfig)||void 0===r?void 0:r.OvalFitTimeout)||7e3},id:"ovalMatchTimeout"}),cancelOvalMatchTimeout:r.actions.cancel("ovalMatchTimeout"),sendTimeoutAfterWaitingForDisconnect:r.actions.send({type:"TIMEOUT",data:{errorState:_.SERVER_ERROR}},{delay:2e4,id:"waitForDisconnectTimeout"}),cancelWaitForDisconnectTimeout:r.actions.cancel("waitForDisconnectTimeout"),sendTimeoutAfterFaceDistanceDelay:r.actions.send({type:"RUNTIME_ERROR",data:new Error("Avoid moving closer during countdown and ensure only one face is in front of camera.")},{delay:0,id:"faceDistanceTimeout"}),cancelFaceDistanceTimeout:r.actions.cancel("faceDistanceTimeout"),callUserPermissionDeniedCallback:r.assign({errorState:(e,t)=>{var a,i;let r;r=t.data.message.includes("15 fps")?_.CAMERA_FRAMERATE_ERROR:_.CAMERA_ACCESS_ERROR;const n=t.data.message||t.data.Message,o={state:r,error:new Error(n)};return null===(i=(a=e.componentProps).onError)||void 0===i||i.call(a,o),r}}),callMobileLandscapeWarningCallback:r.assign({errorState:e=>_.MOBILE_LANDSCAPE_ERROR}),callUserCancelCallback:t=>e.__awaiter(void 0,void 0,void 0,(function*(){var e,a;null===(a=(e=t.componentProps).onUserCancel)||void 0===a||a.call(e)})),callUserTimeoutCallback:t=>e.__awaiter(void 0,void 0,void 0,(function*(){var e,a;const i=new Error("Client Timeout");i.name=t.errorState;const r={state:t.errorState,error:i};null===(a=(e=t.componentProps).onError)||void 0===a||a.call(e,r)})),callErrorCallback:(t,a)=>e.__awaiter(void 0,void 0,void 0,(function*(){var e,i,r;const n={state:t.errorState,error:(null===(e=a.data)||void 0===e?void 0:e.error)||a.data};null===(r=(i=t.componentProps).onError)||void 0===r||r.call(i,n)})),cleanUpResources:t=>e.__awaiter(void 0,void 0,void 0,(function*(){var e;const{freshnessColorEl:a}=t.freshnessColorAssociatedParams;a&&(a.style.display="none");let i=O;t.errorState===_.TIMEOUT?i=I:t.errorState===_.RUNTIME_ERROR?i=P:t.errorState===_.FACE_DISTANCE_ERROR||t.errorState===_.MULTIPLE_FACES_ERROR?i=k:void 0===t.errorState&&(i=b),yield null===(e=t.livenessStreamProvider)||void 0===e?void 0:e.endStreamWithCode(i)})),freezeStream:t=>e.__awaiter(void 0,void 0,void 0,(function*(){const{videoMediaStream:e,videoEl:a}=t.videoAssociatedParams;t.isRecordingStopped=!0,null==a||a.pause(),null==e||e.getTracks().forEach((function(e){e.stop()}))})),pauseVideoStream:t=>e.__awaiter(void 0,void 0,void 0,(function*(){const{videoEl:e}=t.videoAssociatedParams;t.isRecordingStopped=!0,e.pause()})),resetContext:r.assign({challengeId:m.nanoid(),maxFailedAttempts:0,failedAttempts:0,componentProps:e=>e.componentProps,serverSessionInformation:e=>{},videoAssociatedParams:e=>({videoConstraints:de}),ovalAssociatedParams:e=>{},errorState:e=>{},livenessStreamProvider:e=>{},responseStreamActorRef:e=>{},shouldDisconnect:!1,faceMatchStateBeforeStart:e=>{},isFaceFarEnoughBeforeRecording:e=>{},isRecordingStopped:!1})},guards:{shouldTimeoutOnFailedAttempts:e=>e.failedAttempts>=e.maxFailedAttempts,hasFaceMatchedInOvalWithMinTime:e=>{const{faceMatchState:t,initialFaceMatchTime:a}=e.faceMatchAssociatedParams,i=Date.now()-a;return t===S.MATCHED&&i>=500},hasFaceMatchedInOval:e=>e.faceMatchAssociatedParams.faceMatchState===S.MATCHED,hasNotFaceMatchedInOval:e=>e.faceMatchAssociatedParams.faceMatchState!==S.MATCHED,hasSingleFace:e=>e.faceMatchAssociatedParams.faceMatchState===S.FACE_IDENTIFIED,hasSingleFaceBeforeStart:e=>e.faceMatchStateBeforeStart===S.FACE_IDENTIFIED,hasEnoughFaceDistanceBeforeRecording:e=>e.isFaceFarEnoughBeforeRecording,hasNotEnoughFaceDistanceBeforeRecording:e=>!e.isFaceFarEnoughBeforeRecording,hasLivenessCheckSucceeded:(e,t,a)=>a.state.event.data.isLive,hasFreshnessColorShown:e=>e.freshnessColorAssociatedParams.freshnessColorsComplete,hasServerSessionInfo:e=>void 0!==e.serverSessionInformation,hasDOMAndCameraDetails:e=>void 0!==e.videoAssociatedParams.videoEl&&void 0!==e.videoAssociatedParams.canvasEl&&void 0!==e.freshnessColorAssociatedParams.freshnessColorEl,getShouldDisconnect:e=>!!e.shouldDisconnect,hasRecordingStarted:e=>void 0!==e.livenessStreamProvider.videoRecorder.firstChunkTimestamp},services:{checkVirtualCameraAndGetStream(t){return e.__awaiter(this,void 0,void 0,(function*(){const{videoConstraints:e}=t.videoAssociatedParams,a=yield navigator.mediaDevices.getUserMedia({video:e,audio:!1}),i=(yield navigator.mediaDevices.enumerateDevices()).filter((e=>"videoinput"===e.kind)).filter((e=>!function(e){return e.label.toLowerCase().includes("virtual")}(e)));if(!i.length)throw new Error("No real video devices found");const r=a.getTracks().filter((e=>e.getSettings().frameRate>=15));if(r.length<1)throw new Error("No camera found with more than 15 fps");const n=r[0].getSettings().deviceId;let o=a;return i.some((e=>e.deviceId===n))||(o=yield navigator.mediaDevices.getUserMedia({video:Object.assign(Object.assign({},e),{deviceId:{exact:i[0].deviceId}}),audio:!1})),{stream:o}}))},openLivenessStreamConnection(t){return e.__awaiter(this,void 0,void 0,(function*(){const{config:e}=t.componentProps,{credentialProvider:a,endpointOverride:i}=e,r=new ae({sessionId:t.componentProps.sessionId,region:t.componentProps.region,stream:t.videoAssociatedParams.videoMediaStream,videoEl:t.videoAssociatedParams.videoEl,credentialProvider:a,endpointOverride:i});return he=r.getResponseStream(),{livenessStreamProvider:r}}))},detectFace(t){return e.__awaiter(this,void 0,void 0,(function*(){const{videoEl:a}=t.videoAssociatedParams,{faceDetector:i}=t.ovalAssociatedParams;try{yield i.modelLoadingPromise}catch(e){console.log({err:e})}const r=yield function(t,a){return e.__awaiter(this,void 0,void 0,(function*(){let e;switch((yield t.detectFaces(a)).length){case 0:e=S.CANT_IDENTIFY;break;case 1:e=S.FACE_IDENTIFIED;break;default:e=S.TOO_MANY}return e}))}(i,a);return{faceMatchState:r}}))},detectFaceDistance(t){return e.__awaiter(this,void 0,void 0,(function*(){const{isFaceFarEnoughBeforeRecording:e}=t,{videoEl:a,videoMediaStream:i,isMobile:r}=t.videoAssociatedParams,{faceDetector:n}=t.ovalAssociatedParams,{width:o,height:s}=i.getTracks()[0].getSettings(),c=B({width:o,height:s}),{isDistanceBelowThreshold:l}=yield Y({faceDetector:n,videoEl:a,ovalDetails:c,reduceThreshold:e,isMobile:r});return{isFaceFarEnoughBeforeRecording:l}}))},detectFaceDistanceWhileLoading(t){return e.__awaiter(this,void 0,void 0,(function*(){const{isFaceFarEnoughBeforeRecording:e}=t,{videoEl:a,videoMediaStream:i,isMobile:r}=t.videoAssociatedParams,{faceDetector:n}=t.ovalAssociatedParams,{width:o,height:s}=i.getTracks()[0].getSettings(),c=B({width:o,height:s}),{isDistanceBelowThreshold:l,error:d}=yield Y({faceDetector:n,videoEl:a,ovalDetails:c,reduceThreshold:e,isMobile:r});return{isFaceFarEnoughBeforeRecording:l,error:d}}))},detectInitialFaceAndDrawOval(t){return e.__awaiter(this,void 0,void 0,(function*(){const{serverSessionInformation:e,livenessStreamProvider:a}=t,{videoEl:i,canvasEl:r,isMobile:n}=t.videoAssociatedParams,{faceDetector:o}=t.ovalAssociatedParams;try{yield o.modelLoadingPromise,yield a.videoRecorder.recorderStarted}catch(e){console.log({err:e})}const s=yield o.detectFaces(i);let c,l,d;switch(s.length){case 0:l=S.CANT_IDENTIFY,d=j(i);break;case 1:l=S.FACE_IDENTIFIED,c=s[0];break;default:l=S.TOO_MANY}if(!c)return{faceMatchState:l,illuminationState:d};const{width:h,height:u}=i.getBoundingClientRect();n?(r.width=window.innerWidth,r.height=window.innerHeight):(r.width=h,r.height=u);const m=h/i.videoWidth,f=function({sessionInformation:e,videoWidth:t}){var a,i;const r=null===(i=null===(a=null==e?void 0:e.Challenge)||void 0===a?void 0:a.FaceMovementAndLightChallenge)||void 0===i?void 0:i.OvalParameters;if(!(r&&r.CenterX&&r.CenterY&&r.Width&&r.Height))throw new Error("Oval parameters not returned from session information.");return{flippedCenterX:t-r.CenterX,centerX:r.CenterX,centerY:r.CenterY,width:r.Width,height:r.Height}}({sessionInformation:e,videoWidth:i.width}),v=V(c,f);return c.top=v.top,c.left=v.left,c.height=v.bottom-v.top,c.width=v.right-v.left,function({canvas:e,oval:t,scaleFactor:a,videoEl:i}){const{flippedCenterX:r,centerY:n,width:o,height:s}=t,{width:c,height:l}=e.getBoundingClientRect(),d=e.getContext("2d");if(!d)throw new Error("Cannot find Canvas.");{d.clearRect(0,0,c,l),d.fillStyle="rgba(255, 255, 255, 1.0)",d.fillRect(0,0,c,l);const e={width:i.videoWidth,height:i.videoHeight},t={x:(c-e.width*a)/2,y:(l-e.height*a)/2};d.setTransform(a,0,0,a,t.x,t.y),d.beginPath(),d.ellipse(r,n,o/2,s/2,0,0,2*Math.PI),d.strokeStyle="#AEB3B7",d.lineWidth=3,d.stroke(),d.clip(),d.setTransform(1,0,0,1,0,0),d.clearRect(0,0,c,l)}}({canvas:r,oval:f,scaleFactor:m,videoEl:i}),{faceMatchState:l,ovalDetails:f,scaleFactor:m,initialFace:c}}))},detectFaceAndMatchOval(t){return e.__awaiter(this,void 0,void 0,(function*(){const{serverSessionInformation:e}=t,{videoEl:a}=t.videoAssociatedParams,{faceDetector:i,ovalDetails:r,initialFace:n}=t.ovalAssociatedParams,o=yield i.detectFaces(a);let s,c,l,d=0;const h=V(n,r),{ovalBoundingBox:u}=N(r),m=H(h,u);switch(o.length){case 0:s=S.CANT_IDENTIFY,l=j(a);break;case 1:{c=o[0];const{faceMatchState:t,faceMatchPercentage:a}=function(e,t,a,i){var r,n;let o;const s=null===(n=null===(r=null==i?void 0:i.Challenge)||void 0===r?void 0:r.FaceMovementAndLightChallenge)||void 0===n?void 0:n.ChallengeConfig;if(!(s&&s.OvalIouThreshold&&s.OvalIouHeightThreshold&&s.OvalIouWidthThreshold&&s.FaceIouHeightThreshold&&s.FaceIouWidthThreshold))throw new Error("Challenge information not returned from session information.");const{OvalIouThreshold:c,OvalIouHeightThreshold:l,OvalIouWidthThreshold:d,FaceIouHeightThreshold:h,FaceIouWidthThreshold:u}=s,m=V(e,t),f=m.left,v=m.right,g=m.top,p=m.bottom,{ovalBoundingBox:E,minOvalX:T,minOvalY:C,maxOvalX:y,maxOvalY:x}=N(t),F=H(m,E),R=c,M=t.width*d,w=t.height*l,A=t.width*u,_=t.height*h,D=100*Math.max(Math.min(1,.75*(F-a)/(R-a)+.25),0);return o=F>R&&Math.abs(T-f)<M&&Math.abs(y-v)<M&&Math.abs(x-p)<w?S.MATCHED:C-g>_||p-x>_||T-f>A&&v-y>A?S.TOO_CLOSE:S.TOO_FAR,{faceMatchState:o,faceMatchPercentage:D}}(c,r,m,e);s=t,d=a;break}default:s=S.TOO_MANY}return{faceMatchState:s,faceMatchPercentage:d,illuminationState:l,detectedFace:c}}))},flashColors(t){return e.__awaiter(this,void 0,void 0,(function*(){const{freshnessColorsComplete:e,freshnessColorDisplay:a}=t.freshnessColorAssociatedParams;if(e)return;return{freshnessColorsComplete:yield a.displayColorTick()}}))},stopVideo(t){return e.__awaiter(this,void 0,void 0,(function*(){const{challengeId:e,livenessStreamProvider:a}=t,{videoMediaStream:i}=t.videoAssociatedParams,{initialFace:r,ovalDetails:n}=t.ovalAssociatedParams,{startFace:o,endFace:s}=t.faceMatchAssociatedParams,{width:c,height:l}=i.getTracks()[0].getSettings(),d=c-r.left-r.width;yield a.stopVideo();const h={Challenge:{FaceMovementAndLightChallenge:{ChallengeId:e,InitialFace:{InitialFaceDetectedTimestamp:r.timestampMs,BoundingBox:$({deviceHeight:l,deviceWidth:c,height:r.height,width:r.width,top:r.top,left:d})},TargetFace:{FaceDetectedInTargetPositionStartTimestamp:o.timestampMs,FaceDetectedInTargetPositionEndTimestamp:s.timestampMs,BoundingBox:$({deviceHeight:l,deviceWidth:c,height:n.height,width:n.width,top:n.centerY-n.height/2,left:n.centerX-n.width/2})},VideoEndTimestamp:a.videoRecorder.recorderEndTimestamp}}};if(0===a.videoRecorder.getVideoChunkSize())throw new Error("Video chunks not recorded successfully.");a.sendClientInfo(h),yield a.dispatchStopVideoEvent()}))},getLiveness(t){return e.__awaiter(this,void 0,void 0,(function*(){const{onAnalysisComplete:e}=t.componentProps;yield e()}))}}}),me=t=>e.__awaiter(void 0,void 0,void 0,(function*(){var a,i,r,n,o;try{const d=yield he;try{for(var s,c=!0,l=e.__asyncValues(d);!(a=(s=yield l.next()).done);){n=s.value,c=!1;try{const e=n;(null==(o=e)?void 0:o.ServerSessionInformationEvent)?t({type:"SET_SESSION_INFO",data:{sessionInfo:e.ServerSessionInformationEvent.SessionInformation}}):ne(e)?t({type:"DISCONNECT_EVENT"}):oe(e)?t({type:"SERVER_ERROR",data:{error:Object.assign({},e.ValidationException)}}):se(e)?t({type:"SERVER_ERROR",data:{error:Object.assign({},e.InternalServerException)}}):ce(e)?t({type:"SERVER_ERROR",data:{error:Object.assign({},e.ThrottlingException)}}):le(e)&&t({type:"SERVER_ERROR",data:{error:Object.assign({},e.ServiceQuotaExceededException)}})}finally{c=!0}}}catch(e){i={error:e}}finally{try{c||a||!(r=l.return)||(yield r.call(l))}finally{if(i)throw i.error}}}catch(e){let a=e;(e=>{const{message:t,name:a}=e;return"InvalidSignatureException"===a&&t.includes("valid region")})(e)&&(a=new Error("Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.")),a instanceof Error&&t({type:"SERVER_ERROR",data:{error:a}})}})),fe=y.default.createContext(null);function ve(t){var{children:a}=t,i=e.__rest(t,["children"]);return y.default.createElement(fe.Provider,{value:i},a)}function ge(){const e=y.default.useContext(fe);if(null===e)throw new Error("useFaceLivenessDetector must be used within a FaceLivenessDetectorProvider");return e}function pe(){const{service:e}=ge();return i.useActor(e)}function Ee(e){const{service:t}=ge();return i.useSelector(t,e)}var Te;!function(e){e.CameraModule="amplify-liveness-camera-module",e.CancelContainer="amplify-liveness-cancel-container",e.CancelButton="amplify-liveness-cancel-button",e.CountdownContainer="amplify-liveness-countdown-container",e.DescriptionBullet="amplify-liveness-description-bullet",e.DescriptionBulletIndex="amplify-liveness-description-bullet__index",e.DescriptionBulletIndexText="amplify-liveness-description-bullet__index__text",e.DescriptionBulletMessage="amplify-liveness-description-bullet__message",e.ErrorModal="amplify-liveness-error-modal",e.ErrorModalHeading="amplify-liveness-error-modal__heading",e.FadeOut="amplify-liveness-fade-out",e.FreshnessCanvas="amplify-liveness-freshness-canvas",e.InstructionList="amplify-liveness-instruction-list",e.InstructionOverlay="amplify-liveness-instruction-overlay",e.Figure="amplify-liveness-figure",e.FigureCaption="amplify-liveness-figure__caption",e.FigureIcon="amplify-liveness-figure__icon",e.FigureImage="amplify-liveness-figure__image",e.Figures="amplify-liveness-figures",e.Hint="amplify-liveness-hint",e.HintText="amplify-liveness-hint__text",e.LandscapeErrorModal="amplify-liveness-landscape-error-modal",e.LandscapeErrorModalButton="amplify-liveness-landscape-error-modal__button",e.LandscapeErrorModalHeader="amplify-liveness-landscape-error-modal__header",e.Loader="amplify-liveness-loader",e.MatchIndicator="amplify-liveness-match-indicator",e.OvalCanvas="amplify-liveness-oval-canvas",e.OpaqueOverlay="amplify-liveness-overlay-opaque",e.Overlay="amplify-liveness-overlay",e.Popover="amplify-liveness-popover",e.PopoverContainer="amplify-liveness-popover__container",e.PopoverAnchor="amplify-liveness-popover__anchor",e.PopoverAnchorSecondary="amplify-liveness-popover__anchor-secondary",e.RecordingIconContainer="amplify-liveness-recording-icon-container",e.RecordingIcon="amplify-liveness-recording-icon",e.StartScreenHeader="amplify-liveness-start-screen-header",e.StartScreenHeaderBody="amplify-liveness-start-screen-header__body",e.StartScreenHeaderHeading="amplify-liveness-start-screen-header__heading",e.StartScreenWarning="amplify-liveness-start-screen-warning",e.StartScreenInstructions="amplify-liveness-start-screen-instructions",e.StartScreenInstructionsHeading="amplify-liveness-start-screen-instructions__heading",e.Toast="amplify-liveness-toast",e.ToastContainer="amplify-liveness-toast__container",e.ToastMessage="amplify-liveness-toast__message",e.Video="amplify-liveness-video",e.VideoAnchor="amplify-liveness-video-anchor"}(Te||(Te={}));const Ce=({ariaLabel:e})=>{const[t,a]=pe();return t.done?null:y.default.createElement(f.Button,{autoFocus:!0,variation:"link",onClick:()=>{a({type:"CANCEL"})},size:"large",className:Te.CancelButton,"aria-label":e},y.default.createElement(v.IconClose,{"aria-hidden":"true","data-testid":"close-icon"}))},Se=t=>{var{variation:a="default",size:i="medium",children:r}=t,n=e.__rest(t,["variation","size","children"]);return x.createElement(f.View,Object.assign({className:`${Te.Toast} ${Te.Toast}--${a} ${Te.Toast}--${i}`,maxWidth:{base:"100%",small:"70%"}},n),x.createElement(f.Flex,{className:Te.ToastContainer},x.createElement(f.Flex,{className:Te.ToastMessage},r)))},ye=t=>{var{children:a,anchorOrigin:i={horizontal:"center",vertical:"center"},className:r}=t,n=e.__rest(t,["children","anchorOrigin","className"]);return x.createElement(f.Flex,Object.assign({className:`${Te.Overlay} ${r}`,alignItems:i.horizontal,justifyContent:i.vertical},n),a)},xe=e=>e.context.errorState,Fe=e=>e.context.faceMatchAssociatedParams.faceMatchState,Re=e=>e.context.faceMatchAssociatedParams.illuminationState,Me=e=>e.context.isFaceFarEnoughBeforeRecording,we=e=>e.context.faceMatchStateBeforeStart,Ae=({hintDisplayText:e})=>{const[t]=pe(),a=Ee(xe),i=Ee(Fe),r=Ee(Re),n=Ee(we),o=Ee(Me),s=t.matches("checkFaceDetectedBeforeStart"),c=t.matches("checkFaceDistanceBeforeRecording"),l=t.matches("recording"),d=t.matches("notRecording"),h=t.matches("uploading"),u=t.matches("checkSucceeded"),m=t.matches("checkFailed"),v=t.matches({recording:"flashFreshnessColors"}),g={[S.CANT_IDENTIFY]:e.hintCanNotIdentifyText,[S.FACE_IDENTIFIED]:e.hintTooFarText,[S.TOO_MANY]:e.hintTooManyFacesText,[S.TOO_CLOSE]:e.hintTooCloseText,[S.TOO_FAR]:e.hintTooFarText,[S.MATCHED]:e.hintHoldFaceForFreshnessText},p={[C.BRIGHT]:e.hintIlluminationTooBrightText,[C.DARK]:e.hintIlluminationTooDarkText,[C.NORMAL]:e.hintIlluminationNormalText},E=(()=>{if(!(a||m||u)){if(!l){if(s)return n===S.TOO_MANY?x.createElement(Se,null,g[n]):x.createElement(Se,null,e.hintMoveFaceFrontOfCameraText);if(c&&!1===o)return x.createElement(Se,null,e.hintTooCloseText);if(d)return x.createElement(Se,null,x.createElement(f.Flex,{className:Te.HintText},x.createElement(f.Loader,null),x.createElement(f.View,null,e.hintConnectingText)));if(h)return x.createElement(ye,{className:Te.OpaqueOverlay,anchorOrigin:{horizontal:"center",vertical:"end"}},x.createElement(Se,null,x.createElement(f.Flex,{className:Te.HintText},x.createElement(f.Loader,null),x.createElement(f.View,null,e.hintVerifyingText))));if(r&&r!==C.NORMAL)return x.createElement(Se,null,p[r])}return v?x.createElement(Se,{size:"large",variation:"primary"},e.hintHoldFaceForFreshnessText):l&&!v?x.createElement(Se,{size:"large",variation:i===S.TOO_CLOSE?"error":"primary"},i===S.TOO_CLOSE?g[S.TOO_CLOSE]:g[S.TOO_FAR]):null}})();return E||null},_e=({children:e})=>y.default.createElement(f.Flex,{className:Te.RecordingIcon},y.default.createElement(f.Flex,{"data-testid":"rec-icon",justifyContent:"center"},y.default.createElement(f.Icon,{viewBox:{width:20,height:20},width:"20",height:"20"},y.default.createElement("circle",{cx:"10",cy:"10",r:"8",fill:"red"}))),y.default.createElement(f.Text,{as:"span",fontWeight:"bold"},e)),De=t=>{var{children:a,caption:i,variation:r="default"}=t,n=e.__rest(t,["children","caption","variation"]);return y.default.createElement(f.Flex,Object.assign({as:"figure",className:`${Te.Figure} ${Te.Figure}--${r}`},n),y.default.createElement(f.View,{className:`${Te.FigureImage} ${Te.FigureImage}--${r}`},"success"===r?y.default.createElement("svg",{className:Te.FigureIcon,"aria-hidden":"true",width:"24",height:"24"},y.default.createElement("g",{fill:"none"},y.default.createElement("path",{fill:"#365E3D",d:"M0 0h24v24H0z"}),y.default.createElement("path",{fill:"#FFF",d:"m9.435 15.62-4.054-4.055L4 12.936l5.435 5.435L21.101 6.704l-1.37-1.371z"}))):null,"error"===r?y.default.createElement("svg",{className:Te.FigureIcon,"aria-hidden":"true",width:"24",height:"24"},y.default.createElement("g",{fill:"none"},y.default.createElement("path",{fill:"#600",d:"M0 0h24v24H0z"}),y.default.createElement("path",{fill:"#FFF",d:"M19 6.41 17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"}))):null,a),y.default.createElement(f.View,{as:"figcaption",className:`${Te.FigureCaption} ${Te.FigureCaption}--${r}`},i))},Oe=({title:e,testId:t})=>y.default.createElement("svg",{width:"150",height:"150","data-testid":t},y.default.createElement("title",null,e),y.default.createElement("g",{fill:"none",fillRule:"evenodd",transform:"translate(0 -.001)"},y.default.createElement("path",{fill:"#5B361E",fillRule:"nonzero",d:"M124.655 0c9.173 8.155 9.394 17.812 13.258 32.385l.053.336.108.726.11.796.112.864.114.931.174 1.515.117 1.087.18 1.739.12 1.23.183 1.944.123 1.36.186 2.13.187 2.232.313 3.928.25 3.31.25 3.443.31 4.463.245 3.679.36 5.658.345 5.778.33 5.841.26 4.876.199 3.883.187 3.849.217 4.738.16 3.712.178 4.515.097 2.63v34.977L.519 150 .517 41.97c3-13.353 9.664-29.4 23.841-41.97h100.297Z"}),y.default.createElement("path",{fill:"#FCDDCC",fillRule:"nonzero",stroke:"#000",d:"m50.469 18.849.145.153c9.021 9.393 22.62 16.197 36.089 21.996l2.1.897 1.05.443 2.089.876 8.176 3.385 1.979.825 1.944.82c7.782 3.3 14.617 6.491 19.213 10.006 3.57 2.73 5.793 5.645 5.924 8.999v20.474l-.008.624-.016.669-.04 1.089-.04.777-.047.815-.06.853-.068.887-.08.918-.093.95-.104.978-.057.496-.123 1.016-.066.513-.144 1.049-.076.527-.165 1.077c-.057.36-.116.724-.178 1.086l-.193 1.103-.21 1.116-.11.557-.233 1.13c-.12.564-.247 1.13-.38 1.694l-.275 1.14c-1.037 4.147-2.426 8.3-4.271 11.978-6.17 9.34-12.996 16.035-19.28 20.691l-.8.584-.794.562-.784.539-1.165.77-1.147.724-.755.459c-.249.148-.497.294-.74.434l-.73.416-1.078.588-.702.367-1.033.517-.671.321-.657.303-.643.285-.541.23H68.149a75.81 75.81 0 0 1-.81-.284l-.918-.336a75.953 75.953 0 0 1-.935-.355l-.963-.382a85.513 85.513 0 0 1-1.988-.83l-1.032-.455c-.52-.233-1.05-.475-1.585-.727l-1.087-.517-1.113-.547c-.935-.465-1.893-.959-2.873-1.482l-1.193-.644a141.053 141.053 0 0 1-6.297-3.669l-1.33-.83c-17.11-10.783-22.636-33.458-23.66-49.98l-.071-1.267c-.02-.417-.038-.83-.053-1.235l-.037-1.212a86.317 86.317 0 0 1 .042-5.559l.047-1.002.06-.96.064-.843c1.09-2.51 2.164-4.304 3.296-5.882l.408-.558.415-.545.421-.538 2.026-2.492.481-.597.493-.624.507-.656.518-.69a61.722 61.722 0 0 0 3.769-5.754c4.03-6.917 7.127-14.806 9.544-21.668l.566-1.623.802-2.344 2.077-6.175.416-1.205.395-1.109.373-1.007.267-.682.253-.612c.47-.943.8-1.531 1.06-1.876l-.035.047Z"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"m94.566 121.353.722.895c-6.828 5.51-14.13 7.462-21.382 6.447-5.417-.758-10.535-3.2-13.987-6.186l-.318-.282.77-.854c3.285 2.964 8.343 5.434 13.694 6.183 6.797.95 13.632-.819 20.089-5.876l.412-.327Z"}),y.default.createElement("ellipse",{cx:"51.331",cy:"80.698",fill:"#000",fillRule:"nonzero",rx:"4.886",ry:"6.707"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"M42.539 63.719c4.453-2.586 11.355-3.268 17.22-.195l.35.19-.556 1.005c-5.437-3.01-11.946-2.479-16.175-.153l-.262.148-.577-.995Z"}),y.default.createElement("ellipse",{cx:"103.281",cy:"80.698",fill:"#000",fillRule:"nonzero",rx:"4.886",ry:"6.707"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"M94.492 63.719c4.453-2.586 11.355-3.268 17.22-.195l.35.19-.557 1.005c-5.436-3.01-11.946-2.479-16.174-.153l-.262.148-.577-.995Zm-22.972 32.9c0 4.216 2.006 7.72 5.831 7.48l.232-.018.115 1.144c-4.774.477-7.239-3.571-7.326-8.345l-.003-.26 1.15-.001h.001Z"}),y.default.createElement("path",{fill:"#FFF",fillRule:"nonzero",d:"M75.002.001H0v150h150v-150H75.002Zm0 0c25.627 0 46.402 33.579 46.402 75s-20.775 75-46.402 75c-25.627 0-46.402-33.579-46.402-75s20.775-75 46.402-75Z"}),y.default.createElement("path",{stroke:"#AEB3B7",strokeWidth:"2",d:"M120.921 75.001c0 20.555-5.214 39.117-13.589 52.507-8.386 13.406-19.838 21.493-32.313 21.493-12.476 0-23.928-8.087-32.312-21.493-8.377-13.39-13.59-31.952-13.59-52.507 0-20.555 5.214-39.116 13.589-52.507C51.091 9.09 62.543 1.001 75.018 1.001c12.476 0 23.928 8.088 32.314 21.493 8.375 13.39 13.588 31.952 13.588 52.507h.001Z"}))),Ie=({title:e,testId:t})=>y.default.createElement("svg",{width:"150",height:"150","data-testid":t},y.default.createElement("title",null,e),y.default.createElement("defs",null,y.default.createElement("linearGradient",{id:"a",x1:"50%",x2:"50%",y1:"0%",y2:"100%"},y.default.createElement("stop",{offset:"0%",stopColor:"#C2C2C2"}),y.default.createElement("stop",{offset:"100%",stopColor:"#C2C2C2",stopOpacity:"0"}))),y.default.createElement("g",{fill:"none",fillRule:"evenodd",transform:"translate(2)"},y.default.createElement("path",{fill:"#FFF",fillRule:"nonzero",d:"M3.997 0h136v150h-136z"}),y.default.createElement("path",{fill:"url(#a)",fillRule:"nonzero",d:"M4.333 0h138v150h-138z"}),y.default.createElement("path",{fill:"#5B361E",fillRule:"nonzero",stroke:"#000",strokeWidth:"1.353",d:"m22.515 58.137-1.895 82.434 98.784-2.343c0-8.798.813-16.271.813-31.497 0-15.748-2.345-55.36-3.766-64.125C108.16 11.338 74.737 5.03 56.707 11.04c-28.425 9.475-33.64 35.095-34.192 47.097Z"}),y.default.createElement("path",{fill:"#EF992A",fillRule:"nonzero",d:"M38.104 138.228c8.339-3.98 5.1-1.525 15.916-3.104H90.5c5.448 0 9.541 3.104 28.904 3.104 6.391 0 5.987 8.988-2.473 8.988-24.315 1.08-66.07 1.08-78.281 0-77.975-6.896-10.97-4.014-.546-8.988Z"}),y.default.createElement("path",{fill:"#FCDDCC",fillRule:"nonzero",stroke:"#000",strokeWidth:"1.353",d:"m54.306 134.26 2.645-21.765h30.498l3.05 21.765c-15.778 14.791-30.703 6.163-36.193 0zm-27.59-54.458C25.42 66.68 33.467 67.18 37.653 69.07l2.837 25.314c-10.328-2.228-13.772-12.961-13.772-14.58zm89.512-.81c4.05-15.067-3.984-15.998-8.506-14.58L105.9 91.75c10.328-8.505 9.113-12.758 10.328-12.758z"}),y.default.createElement("path",{fill:"#FCDDCC",fillRule:"nonzero",stroke:"#000",strokeWidth:"1.353",d:"M53.564 109.804c-14.195-8.986-16.116-30.658-15.302-40.37 2.24-5.21 4.37-5.723 7.958-11.909 6.3-10.86 9.028-25.451 10.579-25.009 14.241 16.008 50.215 20.259 50.649 31.708v13.023c0 4.178-.911 14.358-4.558 21.65-8.986 13.674-20.131 18.612-24.58 19.372-2.334.922-10.55.521-24.746-8.465Z"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"m83.935 98.402.85 1.052c-7.974 6.435-17.2 5.243-23.018.18l-.23-.204.905-1.004c5.273 4.756 13.744 5.998 21.175.227l.318-.251Z"}),y.default.createElement("ellipse",{cx:"56.448",cy:"72.613",fill:"#000",fillRule:"nonzero",rx:"3.128",ry:"4.294"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"M50.664 61.476c2.917-1.694 7.404-2.147 11.244-.172l.31.165-.655 1.183c-3.348-1.854-7.361-1.545-9.985-.137l-.234.13-.68-1.17Z"}),y.default.createElement("ellipse",{cx:"89.709",cy:"72.613",fill:"#000",fillRule:"nonzero",rx:"3.128",ry:"4.294"}),y.default.createElement("path",{fill:"#000",fillRule:"nonzero",d:"M83.926 61.476c2.917-1.694 7.404-2.147 11.244-.172l.31.165-.655 1.183c-3.348-1.854-7.361-1.545-9.985-.137l-.234.13-.68-1.17Z"}),y.default.createElement("path",{stroke:"#000",strokeWidth:"1.353",d:"M69.005 82.806c0 1.858.859 5.487 4.287 5.144"}),y.default.createElement("path",{fill:"#FFF",d:"M73.004 0H0v150h146V0H73.004Zm.496 0C98.629 0 119 33.579 119 75s-20.371 75-45.5 75S28 116.421 28 75 48.371 0 73.5 0Z"}),y.default.createElement("path",{stroke:"#AEB3B7",strokeWidth:"2",d:"M118.4 75c0 20.555-5.156 39.117-13.441 52.507C96.665 140.913 85.338 149 72.999 149c-12.34 0-23.667-8.087-31.961-21.493C32.753 114.117 27.597 95.555 27.597 75c0-20.555 5.156-39.117 13.44-52.507C49.333 9.087 60.66 1 72.999 1c12.34 0 23.667 8.087 31.961 21.493C113.244 35.883 118.4 54.445 118.4 75Z"}))),be=({children:e})=>{const t=v.useThemeBreakpoint(),[a,i]=x.useState(!1),r=x.useRef(null),n="base"===t;return x.useEffect((()=>{function e(e){a&&r.current&&!r.current.contains(e.target)&&i(!1)}return document.addEventListener("mousedown",e),()=>{document.removeEventListener("mousedown",e)}}),[r,a]),x.createElement(f.Flex,{className:Te.Popover,onClick:()=>i(!a),ref:r,testId:"popover-icon"},x.createElement(v.AlertIcon,{ariaHidden:!0,variation:"info"}),a&&x.createElement(x.Fragment,null,x.createElement(f.Flex,{className:Te.PopoverAnchor}),x.createElement(f.Flex,{className:Te.PopoverAnchorSecondary}),x.createElement(f.Flex,{className:Te.PopoverContainer,left:n?-190:-108,"data-testid":"popover-text"},e)))};be.displayName="LivenessIconWithPopover";const Pe=({headingText:e,bodyText:t})=>y.default.createElement(f.View,{className:Te.StartScreenHeader},y.default.createElement(f.View,{className:Te.StartScreenHeaderHeading},e),y.default.createElement(f.View,{className:Te.StartScreenHeaderBody},t)),ke=({headingText:e,bodyText:t,infoText:a})=>y.default.createElement(f.Flex,{className:`${f.ComponentClassNames.Alert} ${Te.StartScreenWarning}`},y.default.createElement(f.View,{flex:"1"},y.default.createElement(f.View,{className:f.ComponentClassNames.AlertHeading},e),y.default.createElement(f.View,{className:f.ComponentClassNames.AlertBody},t)),y.default.createElement(be,null,a)),Le=({headingText:e,goodFitCaptionText:t,goodFitAltText:a,tooFarCaptionText:i,tooFarAltText:r,steps:n})=>y.default.createElement(f.Flex,{direction:"column"},y.default.createElement(f.Text,{className:Te.StartScreenInstructionsHeading},e),y.default.createElement(f.Flex,{className:Te.Figures},y.default.createElement(De,{variation:"success",caption:t},y.default.createElement(Oe,{title:a})),y.default.createElement(De,{variation:"error",caption:i},y.default.createElement(Ie,{title:r}))),y.default.createElement(f.Flex,{as:"ol",className:Te.InstructionList},n.map(((e,t)=>y.default.createElement(f.Flex,{as:"li",key:t+1},y.default.createElement(f.Text,{as:"span","aria-hidden":"true"},t+1,"."),y.default.createElement(f.Text,{as:"span"},e)))))),Ne="liveness-detector-start";function He(e){const{beginLivenessCheck:t,components:a,instructionDisplayText:i}=e;return x.createElement(f.Card,{className:Ne,"data-testid":Ne},x.createElement(f.Flex,{direction:"column"},(null==a?void 0:a.Header)?x.createElement(a.Header,null):x.createElement(Pe,{headingText:i.instructionsHeaderHeadingText,bodyText:i.instructionsHeaderBodyText}),(null==a?void 0:a.PhotosensitiveWarning)?x.createElement(a.PhotosensitiveWarning,null):x.createElement(ke,{headingText:i.photosensitivyWarningHeadingText,bodyText:i.photosensitivyWarningBodyText,infoText:i.photosensitivyWarningInfoText}),(null==a?void 0:a.Instructions)?x.createElement(a.Instructions,null):x.createElement(Le,{headingText:i.instructionListHeadingText,goodFitCaptionText:i.goodFitCaptionText,goodFitAltText:i.goodFitAltText,tooFarCaptionText:i.tooFarCaptionText,tooFarAltText:i.tooFarAltText,steps:[i.instructionListStepOneText,i.instructionListStepTwoText,i.instructionListStepThreeText,i.instructionListStepFourText]}),x.createElement(f.Flex,{justifyContent:"center"},x.createElement(f.Button,{variation:"primary",type:"button",onClick:t},i.instructionsBeginCheckText))))}const Be={timeoutHeaderText:"Time out",timeoutMessageText:"Face didn't fit inside oval in time limit. Try again and completely fill the oval with face in it.",faceDistanceHeaderText:"Forward movement detected",faceDistanceMessageText:"Avoid moving closer when connecting.",multipleFacesHeaderText:"Multiple faces detected",multipleFacesMessageText:"Ensure only one face is present in front of the camera when connecting.",clientHeaderText:"Client error",clientMessageText:"Check failed due to client issue",serverHeaderText:"Server issue",serverMessageText:"Cannot complete check due to server issue",landscapeHeaderText:"Landscape orientation not supported",landscapeMessageText:"Rotate your device to portrait (vertical) orientation.",portraitMessageText:"Ensure your device remains in portrait (vertical) orientation for the check’s duration.",tryAgainText:"Try again"},We=Object.assign({instructionsHeaderHeadingText:"Liveness check",instructionsHeaderBodyText:"You will go through a face verification process to prove that you are a real person.",instructionsBeginCheckText:"Begin check",photosensitivyWarningHeadingText:"Photosensitivity warning",photosensitivyWarningBodyText:"This check displays colored lights. Use caution if you are photosensitive.",photosensitivyWarningInfoText:"A small percentage of individuals may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.",instructionListHeadingText:"Follow the instructions to complete the check:",goodFitCaptionText:"Good fit",goodFitAltText:"Ilustration of a person's face, perfectly fitting inside of an oval.",tooFarCaptionText:"Too far",tooFarAltText:"Illustration of a person's face inside of an oval; there is a gap between the perimeter of the face and the boundaries of the oval.",instructionListStepOneText:"When an oval appears, follow the instructions to fit your face in it.",instructionListStepTwoText:"Maximize your screen's brightness.",instructionListStepThreeText:"Make sure your face is not covered with sunglasses or a mask.",instructionListStepFourText:"Move to a well-lit place that is not in direct sunlight.",cameraMinSpecificationsHeadingText:"Camera does not meet minimum specifications",cameraMinSpecificationsMessageText:"Camera must support at least 320*240 resolution and 15 frames per second.",cameraNotFoundHeadingText:"Camera not accessible.",cameraNotFoundMessageText:"Check that camera is connected and camera permissions are enabled in settings before retrying.",retryCameraPermissionsText:"Retry",cancelLivenessCheckText:"Cancel Liveness check",recordingIndicatorText:"Rec",hintMoveFaceFrontOfCameraText:"Move face in front of camera",hintTooManyFacesText:"Ensure only one face is in front of camera",hintFaceDetectedText:"Face detected",hintCanNotIdentifyText:"Move face in front of camera",hintTooCloseText:"Move back",hintTooFarText:"Move closer",hintHoldFacePositionCountdownText:"Hold face position during countdown",hintConnectingText:"Connecting...",hintVerifyingText:"Verifying...",hintIlluminationTooBrightText:"Move to dimmer area",hintIlluminationTooDarkText:"Move to brighter area",hintIlluminationNormalText:"Lighting conditions normal",hintHoldFaceForFreshnessText:"Hold still"},Be),Ve=({errorState:e,overrideErrorDisplayText:t})=>{const a=Object.assign(Object.assign({},Be),t);return e===_.CAMERA_ACCESS_ERROR||e===_.CAMERA_FRAMERATE_ERROR||e===_.MOBILE_LANDSCAPE_ERROR?null:(e=>{const{error:t,displayText:a}=e,{timeoutHeaderText:i,timeoutMessageText:r,faceDistanceHeaderText:n,faceDistanceMessageText:o,multipleFacesHeaderText:s,multipleFacesMessageText:c,clientHeaderText:l,clientMessageText:d,serverHeaderText:h,serverMessageText:u}=a;let m,g;switch(t){case _.TIMEOUT:m=i,g=r;break;case _.FACE_DISTANCE_ERROR:m=n,g=o;break;case _.MULTIPLE_FACES_ERROR:m=s,g=c;break;case _.RUNTIME_ERROR:m=l,g=d;break;case _.SERVER_ERROR:default:m=h,g=u}return y.default.createElement(y.default.Fragment,null,y.default.createElement(f.Flex,{className:Te.ErrorModal},y.default.createElement(v.AlertIcon,{ariaHidden:!0,variation:"error"}),y.default.createElement(f.Text,{className:Te.ErrorModalHeading},m)),g)})({error:e,displayText:a})},je=e=>{const{children:t,onRetry:a,displayText:i}=e,r=Object.assign(Object.assign({},Be),i),{tryAgainText:n}=r;return y.default.createElement(ye,{className:Te.OpaqueOverlay},y.default.createElement(Se,null,t,y.default.createElement(f.Flex,{justifyContent:"center"},y.default.createElement(f.Button,{variation:"primary",type:"button",onClick:a},n))))},ze=e=>{var t;return null===(t=e.context.videoAssociatedParams)||void 0===t?void 0:t.videoMediaStream},qe=e=>e.context.faceMatchAssociatedParams.faceMatchPercentage,Ue=e=>e.context.faceMatchAssociatedParams.faceMatchState,Ye=y.default.createElement(f.Loader,{size:"large",className:Te.Loader,"data-testid":"centered-loader"}),$e=y.default.memo((({percentage:e,initialPercentage:t=25,testId:a})=>{const[i,r]=y.default.useState(t);y.default.useEffect((()=>{r(e<0?0:e>100?100:e)}),[e]);const n={"--percentage":`${i}%`};return y.default.createElement("div",{className:Te.MatchIndicator,"data-testid":a},y.default.createElement("div",{className:`${Te.MatchIndicator}__bar`,style:n}))})),Ge=e=>{const{isMobileScreen:a,isRecordingStopped:i,streamDisplayText:r,hintDisplayText:n,errorDisplayText:o,components:s,testId:c}=e,{cancelLivenessCheckText:l,recordingIndicatorText:d}=r,{ErrorView:h=je}=null!=s?s:{},[u,m]=pe(),v=Ee(ze),p=Ee(qe),E=Ee(Ue),T=Ee(xe),C=[S.TOO_FAR,S.CANT_IDENTIFY,S.FACE_IDENTIFIED,S.MATCHED],{videoRef:x,videoWidth:F,videoHeight:R}=function(e){const a=de.height.ideal,i=de.width.ideal,r=t.useRef(null),[n,o]=t.useState(a),[s,c]=t.useState(i);return t.useEffect((()=>{if(e){g.isObject(r.current)&&(r.current.srcObject=e);const{height:t,width:a}=e.getTracks()[0].getSettings();o(t),c(a)}return()=>{e&&e.getTracks().forEach((t=>{e.removeTrack(t),t.stop()}))}}),[e]),{videoRef:r,videoHeight:n,videoWidth:s}}(v),M=t.useRef(null),A=t.useRef(null),[_,D]=t.useState(!1),O=u.matches("cameraCheck"),I=u.matches("recording"),b=u.matches("checkSucceeded"),P=u.matches({recording:"flashFreshnessColors"}),[k,L]=t.useState(F),[N,H]=t.useState(R),[B,W]=t.useState((()=>F&&R?F/R:0));y.default.useLayoutEffect((()=>{_&&m({type:"SET_DOM_AND_CAMERA_DETAILS",data:{videoEl:x.current,canvasEl:M.current,freshnessColorEl:A.current,isMobile:a}}),x.current&&(L(x.current.videoWidth),H(x.current.videoHeight),W(x.current.videoWidth/x.current.videoHeight))}),[m,x,_,a]);return O?y.default.createElement(f.Flex,{height:R,width:"100%",position:"relative"},Ye):y.default.createElement(f.Flex,{className:w.default(Te.CameraModule,a&&`${Te.CameraModule}--mobile`),"data-testid":c},!_&&Ye,y.default.createElement(f.View,{as:"canvas",ref:A,className:Te.FreshnessCanvas,hidden:!0}),y.default.createElement(f.View,{className:Te.VideoAnchor,style:{aspectRatio:`${B}`}},y.default.createElement("video",{ref:x,muted:!0,autoPlay:!0,playsInline:!0,style:{transform:"scaleX(-1)"},width:k,height:N,onCanPlay:()=>{D(!0)},"data-testid":"video",className:Te.Video}),y.default.createElement(f.Flex,{className:w.default(Te.OvalCanvas,a&&`${Te.OvalCanvas}--mobile`,i&&Te.FadeOut)},y.default.createElement(f.View,{as:"canvas",width:"100%",height:"100%",ref:M})),I&&y.default.createElement(f.View,{className:Te.RecordingIconContainer},y.default.createElement(_e,null,d)),!b&&y.default.createElement(f.View,{className:Te.CancelContainer},y.default.createElement(Ce,{ariaLabel:l})),y.default.createElement(ye,{anchorOrigin:{horizontal:"center",vertical:I&&!P?"start":"space-between"},className:Te.InstructionOverlay},y.default.createElement(Ae,{hintDisplayText:n}),T&&y.default.createElement(h,{onRetry:()=>{m({type:"CANCEL"})}},Ve({errorState:T,overrideErrorDisplayText:o})),I&&!P&&C.includes(E)?y.default.createElement($e,{percentage:Math.ceil(p)}):null)))};function Xe(){return/Android|iPhone|iPad/i.test(navigator.userAgent)||/Macintosh/i.test(navigator.userAgent)&&!!navigator.maxTouchPoints&&navigator.maxTouchPoints>1}function Ze(){return window.matchMedia("(orientation: landscape)")}const Ke=e=>{const{onRetry:t,header:a,portraitMessage:i,landscapeMessage:r,tryAgainText:n}=e,[o,s]=x.useState(!0);return x.useLayoutEffect((()=>{const e=Ze();return s(e.matches),e.addEventListener("change",(e=>{s(e.matches)})),()=>{e.removeEventListener("change",(e=>s(e.matches)))}}),[]),x.createElement(f.Flex,{className:Te.LandscapeErrorModal,height:o?"auto":480},x.createElement(f.Text,{className:Te.LandscapeErrorModalHeader},a),x.createElement(f.Text,null,o?r:i),o?null:x.createElement(f.Flex,{className:Te.LandscapeErrorModalButton},x.createElement(f.Button,{variation:"primary",type:"button",onClick:t},n)))},Qe="liveness-detector-check",Je=e=>e.context.isRecordingStopped,et=({hintDisplayText:e,cameraDisplayText:t,streamDisplayText:a,errorDisplayText:i,components:r})=>{const[n,o]=pe(),s=Ee(xe),c=Ee(Je),l=n.matches("permissionDenied"),d=Xe(),h=()=>{o({type:"RETRY_CAMERA_CHECK"})},{cameraMinSpecificationsHeadingText:u,cameraMinSpecificationsMessageText:m,cameraNotFoundHeadingText:v,cameraNotFoundMessageText:g,retryCameraPermissionsText:p}=t,{cancelLivenessCheckText:E}=a;x.useLayoutEffect((()=>{if(d){const e=e=>{e&&o({type:"MOBILE_LANDSCAPE_WARNING"})},t=Ze();return e(t.matches),t.addEventListener("change",(t=>{e(t.matches)})),()=>{t.removeEventListener("change",(t=>e(t.matches)))}}}),[d,o]);return x.createElement(f.Flex,{direction:"column",position:"relative",testId:Qe,className:Qe},(()=>{if(s===_.MOBILE_LANDSCAPE_ERROR){const e=Object.assign(Object.assign({},Be),i),{landscapeHeaderText:t,portraitMessageText:a,landscapeMessageText:r,tryAgainText:n}=e;return x.createElement(f.Flex,{backgroundColor:"background.primary",direction:"column",textAlign:"center",alignItems:"center",justifyContent:"center",position:"absolute",width:"100%"},x.createElement(Ke,{header:t,portraitMessage:a,landscapeMessage:r,tryAgainText:n,onRetry:()=>{o({type:"CANCEL"})}}))}return l?x.createElement(f.Flex,{backgroundColor:"background.primary",direction:"column",textAlign:"center",alignItems:"center",justifyContent:"center",width:"100%",height:480},x.createElement(f.Text,{fontSize:"large",fontWeight:"bold"},s===_.CAMERA_FRAMERATE_ERROR?u:v),x.createElement(f.Text,{maxWidth:300},s===_.CAMERA_FRAMERATE_ERROR?m:g),x.createElement(f.Button,{variation:"primary",type:"button",onClick:h},p),x.createElement(f.View,{position:"absolute",top:"medium",right:"medium"},x.createElement(Ce,{ariaLabel:E}))):x.createElement(Ge,{isMobileScreen:d,isRecordingStopped:c,streamDisplayText:a,hintDisplayText:e,errorDisplayText:i,components:r})})())};const tt="liveness-detector";function at(e){const{disableInstructionScreen:t=!1,components:a,config:r,displayText:n}=e,o=x.useRef(null),{hintDisplayText:s,cameraDisplayText:c,instructionDisplayText:l,streamDisplayText:d,errorDisplayText:h}=function(e){const t=Object.assign(Object.assign({},We),e),{instructionsHeaderHeadingText:a,instructionsHeaderBodyText:i,instructionsBeginCheckText:r,photosensitivyWarningHeadingText:n,photosensitivyWarningBodyText:o,photosensitivyWarningInfoText:s,instructionListHeadingText:c,goodFitCaptionText:l,goodFitAltText:d,tooFarCaptionText:h,tooFarAltText:u,instructionListStepOneText:m,instructionListStepTwoText:f,instructionListStepThreeText:v,instructionListStepFourText:g,cameraMinSpecificationsHeadingText:p,cameraMinSpecificationsMessageText:E,cameraNotFoundHeadingText:T,cameraNotFoundMessageText:C,retryCameraPermissionsText:S,cancelLivenessCheckText:y,recordingIndicatorText:x,hintMoveFaceFrontOfCameraText:F,hintTooManyFacesText:R,hintFaceDetectedText:M,hintCanNotIdentifyText:w,hintTooCloseText:A,hintTooFarText:_,hintHoldFacePositionCountdownText:D,hintConnectingText:O,hintVerifyingText:I,hintIlluminationTooBrightText:b,hintIlluminationTooDarkText:P,hintIlluminationNormalText:k,hintHoldFaceForFreshnessText:L,timeoutHeaderText:N,timeoutMessageText:H,faceDistanceHeaderText:B,faceDistanceMessageText:W,multipleFacesHeaderText:V,multipleFacesMessageText:j,clientHeaderText:z,clientMessageText:q,serverHeaderText:U,serverMessageText:Y,landscapeHeaderText:$,landscapeMessageText:G,portraitMessageText:X,tryAgainText:Z}=t;return{hintDisplayText:{hintMoveFaceFrontOfCameraText:F,hintTooManyFacesText:R,hintFaceDetectedText:M,hintCanNotIdentifyText:w,hintTooCloseText:A,hintTooFarText:_,hintHoldFacePositionCountdownText:D,hintConnectingText:O,hintVerifyingText:I,hintIlluminationTooBrightText:b,hintIlluminationTooDarkText:P,hintIlluminationNormalText:k,hintHoldFaceForFreshnessText:L},cameraDisplayText:{cameraMinSpecificationsHeadingText:p,cameraMinSpecificationsMessageText:E,cameraNotFoundHeadingText:T,cameraNotFoundMessageText:C,retryCameraPermissionsText:S},instructionDisplayText:{instructionsHeaderHeadingText:a,instructionsHeaderBodyText:i,instructionsBeginCheckText:r,photosensitivyWarningHeadingText:n,photosensitivyWarningBodyText:o,photosensitivyWarningInfoText:s,instructionListHeadingText:c,goodFitCaptionText:l,goodFitAltText:d,tooFarCaptionText:h,tooFarAltText:u,instructionListStepOneText:m,instructionListStepTwoText:f,instructionListStepThreeText:v,instructionListStepFourText:g},streamDisplayText:{cancelLivenessCheckText:y,recordingIndicatorText:x},errorDisplayText:{timeoutHeaderText:N,timeoutMessageText:H,faceDistanceHeaderText:B,faceDistanceMessageText:W,multipleFacesHeaderText:V,multipleFacesMessageText:j,clientHeaderText:z,clientMessageText:q,serverHeaderText:U,serverMessageText:Y,landscapeHeaderText:$,landscapeMessageText:G,portraitMessageText:X,tryAgainText:Z}}}(n),u=i.useInterpret(ue,{devTools:"development"===process.env.NODE_ENV,context:{componentProps:Object.assign(Object.assign({},e),{config:null!=r?r:{}})}}),[m,v]=i.useActor(u),g=m.matches("start")||m.matches("userCancel"),p=x.useCallback((()=>{v({type:"BEGIN"})}),[v]);return x.useLayoutEffect((()=>{t&&g&&p()}),[p,t,g]),x.createElement(f.View,{className:tt,testId:tt},x.createElement(ve,{componentProps:e,service:u},x.createElement(f.Flex,{direction:"column",ref:o},g?x.createElement(He,{beginLivenessCheck:p,components:a,instructionDisplayText:l}):x.createElement(et,{hintDisplayText:s,cameraDisplayText:c,streamDisplayText:d,errorDisplayText:h,components:a}))))}const it=()=>e.__awaiter(void 0,void 0,void 0,(function*(){return yield a.Credentials.get()}));exports.FaceLivenessDetector=function(t){const{config:a}=t,i=e.__rest(t,["config"]);return x.createElement(at,Object.assign({},i,{config:Object.assign({credentialProvider:it},a)}))},exports.FaceLivenessDetectorCore=at;
1
+ 'use strict';
2
+
3
+ Object.defineProperty(exports, '__esModule', { value: true });
4
+
5
+ var React = require('react');
6
+ var auth = require('aws-amplify/auth');
7
+ var react = require('@xstate/react');
8
+ var xstate = require('xstate');
9
+ var tfjsCore = require('@tensorflow/tfjs-core');
10
+ var faceDetection = require('@tensorflow-models/face-detection');
11
+ var tfjsBackendWasm = require('@tensorflow/tfjs-backend-wasm');
12
+ require('@tensorflow/tfjs-backend-cpu');
13
+ var utils = require('@aws-amplify/core/internals/utils');
14
+ var clientRekognitionstreaming = require('@aws-sdk/client-rekognitionstreaming');
15
+ var utilFormatUrl = require('@aws-sdk/util-format-url');
16
+ var eventstreamSerdeBrowser = require('@smithy/eventstream-serde-browser');
17
+ var fetchHttpHandler = require('@smithy/fetch-http-handler');
18
+ var protocolHttp = require('@smithy/protocol-http');
19
+ var nanoid = require('nanoid');
20
+ var uiReact = require('@aws-amplify/ui-react');
21
+ var ui = require('@aws-amplify/ui');
22
+ var internal = require('@aws-amplify/ui-react/internal');
23
+
24
+ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
25
+
26
+ function _interopNamespace(e) {
27
+ if (e && e.__esModule) return e;
28
+ var n = Object.create(null);
29
+ if (e) {
30
+ Object.keys(e).forEach(function (k) {
31
+ if (k !== 'default') {
32
+ var d = Object.getOwnPropertyDescriptor(e, k);
33
+ Object.defineProperty(n, k, d.get ? d : {
34
+ enumerable: true,
35
+ get: function () { return e[k]; }
36
+ });
37
+ }
38
+ });
39
+ }
40
+ n["default"] = e;
41
+ return Object.freeze(n);
42
+ }
43
+
44
+ var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
45
+ var React__namespace = /*#__PURE__*/_interopNamespace(React);
46
+
47
+ /**
48
+ * The abstract class representing FaceDetection
49
+ * to be implemented for different libraries.
50
+ */
51
+ class FaceDetection {
52
+ /**
53
+ * Triggers the `loadModels` method and stores
54
+ * the corresponding promise to be awaited later.
55
+ */
56
+ triggerModelLoading() {
57
+ this.modelLoadingPromise = this.loadModels();
58
+ }
59
+ }
60
+
61
+ /**
62
+ * The illumination states
63
+ */
64
+ var IlluminationState;
65
+ (function (IlluminationState) {
66
+ IlluminationState["DARK"] = "dark";
67
+ IlluminationState["BRIGHT"] = "bright";
68
+ IlluminationState["NORMAL"] = "normal";
69
+ })(IlluminationState || (IlluminationState = {}));
70
+ /**
71
+ * The detected face states with respect to the liveness oval
72
+ */
73
+ var FaceMatchState;
74
+ (function (FaceMatchState) {
75
+ FaceMatchState["MATCHED"] = "MATCHED";
76
+ FaceMatchState["TOO_FAR"] = "TOO FAR";
77
+ FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
78
+ FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
79
+ FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
80
+ FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
81
+ })(FaceMatchState || (FaceMatchState = {}));
82
+
83
+ /**
84
+ * The liveness error states
85
+ */
86
+ const LivenessErrorState = {
87
+ TIMEOUT: 'TIMEOUT',
88
+ RUNTIME_ERROR: 'RUNTIME_ERROR',
89
+ FRESHNESS_TIMEOUT: 'FRESHNESS_TIMEOUT',
90
+ SERVER_ERROR: 'SERVER_ERROR',
91
+ CAMERA_FRAMERATE_ERROR: 'CAMERA_FRAMERATE_ERROR',
92
+ CAMERA_ACCESS_ERROR: 'CAMERA_ACCESS_ERROR',
93
+ FACE_DISTANCE_ERROR: 'FACE_DISTANCE_ERROR',
94
+ MOBILE_LANDSCAPE_ERROR: 'MOBILE_LANDSCAPE_ERROR',
95
+ MULTIPLE_FACES_ERROR: 'MULTIPLE_FACES_ERROR',
96
+ };
97
+
98
+ // Face distance is calculated as pupilDistance / ovalWidth.
99
+ // The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
100
+ // These FACE_DISTNACE_THRESHOLD values are determined by the science team and should only be changed with their approval.
101
+ // We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
102
+ // a certain distance away from the camera.
103
+ const FACE_DISTANCE_THRESHOLD = 0.32;
104
+ const REDUCED_THRESHOLD = 0.4;
105
+ const REDUCED_THRESHOLD_MOBILE = 0.37;
106
+ const WS_CLOSURE_CODE = {
107
+ SUCCESS_CODE: 1000,
108
+ DEFAULT_ERROR_CODE: 4000,
109
+ FACE_FIT_TIMEOUT: 4001,
110
+ USER_CANCEL: 4003,
111
+ RUNTIME_ERROR: 4005,
112
+ USER_ERROR_DURING_CONNECTION: 4007,
113
+ };
114
+
115
+ /* eslint-disable */
116
+ /**
117
+ * Returns the random number between min and max
118
+ * seeded with the provided random seed.
119
+ */
120
+ function getScaledValueFromRandomSeed(randomSeed, min, max) {
121
+ return randomSeed * (max - min) + min;
122
+ }
123
+ /**
124
+ * Returns the bounding box details from an oval
125
+ */
126
+ function getOvalBoundingBox(ovalDetails) {
127
+ const minOvalX = ovalDetails.flippedCenterX - ovalDetails.width / 2;
128
+ const maxOvalX = ovalDetails.flippedCenterX + ovalDetails.width / 2;
129
+ const minOvalY = ovalDetails.centerY - ovalDetails.height / 2;
130
+ const maxOvalY = ovalDetails.centerY + ovalDetails.height / 2;
131
+ const ovalBoundingBox = {
132
+ left: minOvalX,
133
+ top: minOvalY,
134
+ right: maxOvalX,
135
+ bottom: maxOvalY,
136
+ };
137
+ return { ovalBoundingBox, minOvalX, maxOvalX, minOvalY, maxOvalY };
138
+ }
139
+ /**
140
+ * Returns the ratio of intersection and union of two bounding boxes.
141
+ */
142
+ function getIntersectionOverUnion(box1, box2) {
143
+ const xA = Math.max(box1.left, box2.left);
144
+ const yA = Math.max(box1.top, box2.top);
145
+ const xB = Math.min(box1.right, box2.right);
146
+ const yB = Math.min(box1.bottom, box2.bottom);
147
+ const intersectionArea = Math.abs(Math.max(0, xB - xA) * Math.max(0, yB - yA));
148
+ if (intersectionArea === 0) {
149
+ return 0;
150
+ }
151
+ const boxAArea = Math.abs((box1.right - box1.left) * (box1.bottom - box1.top));
152
+ const boxBArea = Math.abs((box2.right - box2.left) * (box2.bottom - box2.top));
153
+ return intersectionArea / (boxAArea + boxBArea - intersectionArea);
154
+ }
155
+ /**
156
+ * Returns the details of a randomly generated liveness oval
157
+ * from SDK
158
+ */
159
+ function getOvalDetailsFromSessionInformation({ sessionInformation, videoWidth, }) {
160
+ const ovalParameters = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
161
+ ?.OvalParameters;
162
+ if (!ovalParameters ||
163
+ !ovalParameters.CenterX ||
164
+ !ovalParameters.CenterY ||
165
+ !ovalParameters.Width ||
166
+ !ovalParameters.Height) {
167
+ throw new Error('Oval parameters not returned from session information.');
168
+ }
169
+ // We need to include a flippedCenterX for visualizing the oval on a flipped camera view
170
+ // The camera view we show the customer is flipped to making moving left and right more obvious
171
+ // The video stream sent to the liveness service is not flipped
172
+ return {
173
+ flippedCenterX: videoWidth - ovalParameters.CenterX,
174
+ centerX: ovalParameters.CenterX,
175
+ centerY: ovalParameters.CenterY,
176
+ width: ovalParameters.Width,
177
+ height: ovalParameters.Height,
178
+ };
179
+ }
180
+ /**
181
+ * Returns the details of a statically generated liveness oval based on the video dimensions
182
+ */
183
+ function getStaticLivenessOvalDetails({ width, height, widthSeed = 1.0, centerXSeed = 0.5, centerYSeed = 0.5, ratioMultiplier = 0.8, }) {
184
+ const videoHeight = height;
185
+ let videoWidth = width;
186
+ const ovalRatio = widthSeed * ratioMultiplier;
187
+ const minOvalCenterX = Math.floor((7 * width) / 16);
188
+ const maxOvalCenterX = Math.floor((9 * width) / 16);
189
+ const minOvalCenterY = Math.floor((7 * height) / 16);
190
+ const maxOvalCenterY = Math.floor((9 * height) / 16);
191
+ const centerX = getScaledValueFromRandomSeed(centerXSeed, minOvalCenterX, maxOvalCenterX);
192
+ const centerY = getScaledValueFromRandomSeed(centerYSeed, minOvalCenterY, maxOvalCenterY);
193
+ if (width >= height) {
194
+ videoWidth = (3 / 4) * videoHeight;
195
+ }
196
+ const ovalWidth = ovalRatio * videoWidth;
197
+ const ovalHeight = 1.618 * ovalWidth;
198
+ return {
199
+ flippedCenterX: Math.floor(videoWidth - centerX),
200
+ centerX: Math.floor(centerX),
201
+ centerY: Math.floor(centerY),
202
+ width: Math.floor(ovalWidth),
203
+ height: Math.floor(ovalHeight),
204
+ };
205
+ }
206
+ function drawStaticOval(canvasEl, videoEl, videoMediaStream) {
207
+ const { width, height } = videoMediaStream.getTracks()[0].getSettings();
208
+ // Get width/height of video element so we can compute scaleFactor
209
+ // and set canvas width/height.
210
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
211
+ canvasEl.width = Math.ceil(videoScaledWidth);
212
+ canvasEl.height = Math.ceil(videoScaledHeight);
213
+ const ovalDetails = getStaticLivenessOvalDetails({
214
+ width: width,
215
+ height: height,
216
+ ratioMultiplier: 0.5,
217
+ });
218
+ ovalDetails.flippedCenterX = width - ovalDetails.centerX;
219
+ // Compute scaleFactor which is how much our video element is scaled
220
+ // vs the intrinsic video resolution
221
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
222
+ // Draw oval in canvas using ovalDetails and scaleFactor
223
+ drawLivenessOvalInCanvas({
224
+ canvas: canvasEl,
225
+ oval: ovalDetails,
226
+ scaleFactor,
227
+ videoEl: videoEl,
228
+ isStartScreen: true,
229
+ });
230
+ }
231
+ /**
232
+ * Draws the provided liveness oval on the canvas.
233
+ */
234
+ function drawLivenessOvalInCanvas({ canvas, oval, scaleFactor, videoEl, isStartScreen, }) {
235
+ const { flippedCenterX, centerY, width, height } = oval;
236
+ const { width: canvasWidth, height: canvasHeight } = canvas.getBoundingClientRect();
237
+ const ctx = canvas.getContext('2d');
238
+ if (ctx) {
239
+ ctx.restore();
240
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
241
+ // fill the canvas with a transparent rectangle
242
+ ctx.fillStyle = isStartScreen
243
+ ? getComputedStyle(canvas).getPropertyValue('--amplify-colors-background-primary')
244
+ : '#fff';
245
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight);
246
+ // On mobile our canvas is the width/height of the full screen.
247
+ // We need to calculate horizontal and vertical translation to reposition
248
+ // our canvas drawing so the oval is still placed relative to the dimensions
249
+ // of the video element.
250
+ const baseDims = { width: videoEl.videoWidth, height: videoEl.videoHeight };
251
+ const translate = {
252
+ x: (canvasWidth - baseDims.width * scaleFactor) / 2,
253
+ y: (canvasHeight - baseDims.height * scaleFactor) / 2,
254
+ };
255
+ // Set the transform to scale
256
+ ctx.setTransform(scaleFactor, 0, 0, scaleFactor, translate.x, translate.y);
257
+ // draw the oval path
258
+ ctx.beginPath();
259
+ ctx.ellipse(flippedCenterX, centerY, width / 2, height / 2, 0, 0, 2 * Math.PI);
260
+ // add stroke to the oval path
261
+ ctx.strokeStyle = getComputedStyle(canvas).getPropertyValue('--amplify-colors-border-secondary');
262
+ ctx.lineWidth = 3;
263
+ ctx.stroke();
264
+ ctx.save();
265
+ ctx.clip();
266
+ // Restore default canvas transform matrix
267
+ ctx.setTransform(1, 0, 0, 1, 0, 0);
268
+ // clear the oval content from the rectangle
269
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
270
+ }
271
+ else {
272
+ throw new Error('Cannot find Canvas.');
273
+ }
274
+ }
275
+ /**
276
+ * Returns the state of the provided face with respect to the provided liveness oval.
277
+ */
278
+ function getFaceMatchStateInLivenessOval(face, ovalDetails, initialFaceIntersection, sessionInformation) {
279
+ let faceMatchState;
280
+ const challengeConfig = sessionInformation?.Challenge?.FaceMovementAndLightChallenge
281
+ ?.ChallengeConfig;
282
+ if (!challengeConfig ||
283
+ !challengeConfig.OvalIouThreshold ||
284
+ !challengeConfig.OvalIouHeightThreshold ||
285
+ !challengeConfig.OvalIouWidthThreshold ||
286
+ !challengeConfig.FaceIouHeightThreshold ||
287
+ !challengeConfig.FaceIouWidthThreshold) {
288
+ throw new Error('Challenge information not returned from session information.');
289
+ }
290
+ const { OvalIouThreshold, OvalIouHeightThreshold, OvalIouWidthThreshold, FaceIouHeightThreshold, FaceIouWidthThreshold, } = challengeConfig;
291
+ const faceBoundingBox = generateBboxFromLandmarks(face, ovalDetails);
292
+ const minFaceX = faceBoundingBox.left;
293
+ const maxFaceX = faceBoundingBox.right;
294
+ const minFaceY = faceBoundingBox.top;
295
+ const maxFaceY = faceBoundingBox.bottom;
296
+ const { ovalBoundingBox, minOvalX, minOvalY, maxOvalX, maxOvalY } = getOvalBoundingBox(ovalDetails);
297
+ const intersection = getIntersectionOverUnion(faceBoundingBox, ovalBoundingBox);
298
+ const intersectionThreshold = OvalIouThreshold;
299
+ const ovalMatchWidthThreshold = ovalDetails.width * OvalIouWidthThreshold;
300
+ const ovalMatchHeightThreshold = ovalDetails.height * OvalIouHeightThreshold;
301
+ const faceDetectionWidthThreshold = ovalDetails.width * FaceIouWidthThreshold;
302
+ const faceDetectionHeightThreshold = ovalDetails.height * FaceIouHeightThreshold;
303
+ /** From Science
304
+ * p=max(min(1,0.75∗(si​−s0​)/(st​−s0​)+0.25)),0)
305
+ */
306
+ const faceMatchPercentage = Math.max(Math.min(1, (0.75 * (intersection - initialFaceIntersection)) /
307
+ (intersectionThreshold - initialFaceIntersection) +
308
+ 0.25), 0) * 100;
309
+ if (intersection > intersectionThreshold &&
310
+ Math.abs(minOvalX - minFaceX) < ovalMatchWidthThreshold &&
311
+ Math.abs(maxOvalX - maxFaceX) < ovalMatchWidthThreshold &&
312
+ Math.abs(maxOvalY - maxFaceY) < ovalMatchHeightThreshold) {
313
+ faceMatchState = FaceMatchState.MATCHED;
314
+ }
315
+ else if (minOvalY - minFaceY > faceDetectionHeightThreshold ||
316
+ maxFaceY - maxOvalY > faceDetectionHeightThreshold ||
317
+ (minOvalX - minFaceX > faceDetectionWidthThreshold &&
318
+ maxFaceX - maxOvalX > faceDetectionWidthThreshold)) {
319
+ faceMatchState = FaceMatchState.TOO_CLOSE;
320
+ }
321
+ else {
322
+ faceMatchState = FaceMatchState.TOO_FAR;
323
+ }
324
+ return { faceMatchState, faceMatchPercentage };
325
+ }
326
+ function getPupilDistanceAndFaceHeight(face) {
327
+ const { leftEye, rightEye, mouth } = face;
328
+ const eyeCenter = [];
329
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
330
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
331
+ const pupilDistance = Math.sqrt((leftEye[0] - rightEye[0]) ** 2 + (leftEye[1] - rightEye[1]) ** 2);
332
+ const faceHeight = Math.sqrt((eyeCenter[0] - mouth[0]) ** 2 + (eyeCenter[1] - mouth[1]) ** 2);
333
+ return { pupilDistance, faceHeight };
334
+ }
335
+ function generateBboxFromLandmarks(face, oval) {
336
+ const { leftEye, rightEye, nose, leftEar, rightEar, top: faceTop, height: faceHeight, } = face;
337
+ const { height: ovalHeight, centerY } = oval;
338
+ const ovalTop = centerY - ovalHeight / 2;
339
+ const eyeCenter = [];
340
+ eyeCenter[0] = (leftEye[0] + rightEye[0]) / 2;
341
+ eyeCenter[1] = (leftEye[1] + rightEye[1]) / 2;
342
+ const { pupilDistance: pd, faceHeight: fh } = getPupilDistanceAndFaceHeight(face);
343
+ const alpha = 2.0, gamma = 1.8;
344
+ const ow = (alpha * pd + gamma * fh) / 2;
345
+ const oh = 1.618 * ow;
346
+ let cx;
347
+ if (eyeCenter[1] <= (ovalTop + ovalHeight) / 2) {
348
+ cx = (eyeCenter[0] + nose[0]) / 2;
349
+ }
350
+ else {
351
+ cx = eyeCenter[0];
352
+ }
353
+ const faceBottom = faceTop + faceHeight;
354
+ const top = faceBottom - oh;
355
+ const left = Math.min(cx - ow / 2, rightEar[0]);
356
+ const right = Math.max(cx + ow / 2, leftEar[0]);
357
+ return {
358
+ left: left,
359
+ top: top,
360
+ right: right,
361
+ bottom: faceBottom,
362
+ };
363
+ }
364
+ /**
365
+ * Returns the illumination state in the provided video frame.
366
+ */
367
+ function estimateIllumination(videoEl) {
368
+ const canvasEl = document.createElement('canvas');
369
+ canvasEl.width = videoEl.videoWidth;
370
+ canvasEl.height = videoEl.videoHeight;
371
+ const ctx = canvasEl.getContext('2d');
372
+ if (ctx) {
373
+ ctx.drawImage(videoEl, 0, 0, canvasEl.width, canvasEl.height);
374
+ const frame = ctx.getImageData(0, 0, canvasEl.width, canvasEl.height).data;
375
+ // histogram
376
+ const MAX_SCALE = 8;
377
+ const hist = new Array(MAX_SCALE).fill(0);
378
+ for (let i = 0; i < frame.length; i++) {
379
+ const luma = Math.round(frame[i++] * 0.2126 + frame[i++] * 0.7152 + frame[i++] * 0.0722);
380
+ hist[luma % 32]++;
381
+ }
382
+ let ind = -1, maxCount = 0;
383
+ for (let i = 0; i < MAX_SCALE; i++) {
384
+ if (hist[i] > maxCount) {
385
+ maxCount = hist[i];
386
+ ind = i;
387
+ }
388
+ }
389
+ canvasEl.remove();
390
+ if (ind === 0)
391
+ return IlluminationState.DARK;
392
+ if (ind === MAX_SCALE)
393
+ return IlluminationState.BRIGHT;
394
+ return IlluminationState.NORMAL;
395
+ }
396
+ else {
397
+ throw new Error('Cannot find Video Element.');
398
+ }
399
+ }
400
+ /**
401
+ * Checks if the provided media device is a virtual camera.
402
+ * @param device
403
+ */
404
+ function isCameraDeviceVirtual(device) {
405
+ return device.label.toLowerCase().includes('virtual');
406
+ }
407
+ const INITIAL_ALPHA = 0.9;
408
+ const SECONDARY_ALPHA = 0.75;
409
+ function fillFractionalContext({ ctx, prevColor, nextColor, fraction, }) {
410
+ const canvasWidth = ctx.canvas.width;
411
+ const canvasHeight = ctx.canvas.height;
412
+ ctx.fillStyle = nextColor;
413
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight * fraction);
414
+ if (fraction !== 1) {
415
+ ctx.fillStyle = prevColor;
416
+ ctx.fillRect(0, canvasHeight * fraction, canvasWidth, canvasHeight * (1 - fraction));
417
+ }
418
+ }
419
+ function fillOverlayCanvasFractional({ overlayCanvas, prevColor, nextColor, videoEl, ovalDetails, heightFraction, scaleFactor, }) {
420
+ const { x: videoX, y: videoY } = videoEl.getBoundingClientRect();
421
+ const { flippedCenterX, centerY, width, height } = ovalDetails;
422
+ const updatedCenterX = flippedCenterX * scaleFactor + videoX;
423
+ const updatedCenterY = centerY * scaleFactor + videoY;
424
+ const canvasWidth = overlayCanvas.width;
425
+ const canvasHeight = overlayCanvas.height;
426
+ const ctx = overlayCanvas.getContext('2d');
427
+ if (ctx) {
428
+ // Because the canvas is set to to 100% we need to manually set the height for the canvas to use pixel values
429
+ ctx.canvas.width = window.innerWidth;
430
+ ctx.canvas.height = window.innerHeight;
431
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
432
+ // fill the complete canvas
433
+ fillFractionalContext({
434
+ ctx,
435
+ prevColor,
436
+ nextColor,
437
+ fraction: heightFraction,
438
+ });
439
+ // save the current state
440
+ ctx.save();
441
+ // draw the rectangle path and fill it
442
+ ctx.beginPath();
443
+ ctx.rect(0, 0, canvasWidth, canvasHeight);
444
+ ctx.clip();
445
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
446
+ ctx.globalAlpha = INITIAL_ALPHA;
447
+ fillFractionalContext({
448
+ ctx,
449
+ prevColor,
450
+ nextColor,
451
+ fraction: heightFraction,
452
+ });
453
+ // draw the oval path and fill it
454
+ ctx.beginPath();
455
+ ctx.ellipse(updatedCenterX, updatedCenterY, (width * scaleFactor) / 2, (height * scaleFactor) / 2, 0, 0, 2 * Math.PI);
456
+ // add stroke to the oval path
457
+ ctx.strokeStyle = 'white';
458
+ ctx.lineWidth = 8;
459
+ ctx.stroke();
460
+ ctx.clip();
461
+ ctx.clearRect(0, 0, canvasWidth, canvasHeight);
462
+ ctx.globalAlpha = SECONDARY_ALPHA;
463
+ fillFractionalContext({
464
+ ctx,
465
+ prevColor,
466
+ nextColor,
467
+ fraction: heightFraction,
468
+ });
469
+ // restore the state
470
+ ctx.restore();
471
+ }
472
+ else {
473
+ throw new Error('Cannot find Overlay Canvas.');
474
+ }
475
+ }
476
+ const isClientFreshnessColorSequence = (obj) => !!obj;
477
+ function getColorsSequencesFromSessionInformation(sessionInformation) {
478
+ const colorSequenceFromSessionInfo = sessionInformation.Challenge.FaceMovementAndLightChallenge
479
+ .ColorSequences || [];
480
+ const colorSequences = colorSequenceFromSessionInfo.map(({ FreshnessColor, DownscrollDuration: downscrollDuration, FlatDisplayDuration: flatDisplayDuration, }) => {
481
+ const colorArray = FreshnessColor.RGB;
482
+ const color = `rgb(${colorArray[0]},${colorArray[1]},${colorArray[2]})`;
483
+ return typeof color !== 'undefined' &&
484
+ typeof downscrollDuration !== 'undefined' &&
485
+ typeof flatDisplayDuration !== 'undefined'
486
+ ? {
487
+ color,
488
+ downscrollDuration,
489
+ flatDisplayDuration,
490
+ }
491
+ : undefined;
492
+ });
493
+ return colorSequences.filter(isClientFreshnessColorSequence);
494
+ }
495
+ function getRGBArrayFromColorString(colorStr) {
496
+ return colorStr
497
+ .slice(colorStr.indexOf('(') + 1, colorStr.indexOf(')'))
498
+ .split(',')
499
+ .map((str) => parseInt(str));
500
+ }
501
+ async function getFaceMatchState(faceDetector, videoEl) {
502
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
503
+ let faceMatchState;
504
+ switch (detectedFaces.length) {
505
+ case 0: {
506
+ //no face detected;
507
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
508
+ break;
509
+ }
510
+ case 1: {
511
+ //exactly one face detected, match face with oval;
512
+ faceMatchState = FaceMatchState.FACE_IDENTIFIED;
513
+ break;
514
+ }
515
+ default: {
516
+ //more than one face detected ;
517
+ faceMatchState = FaceMatchState.TOO_MANY;
518
+ break;
519
+ }
520
+ }
521
+ return faceMatchState;
522
+ }
523
+ async function isFaceDistanceBelowThreshold({ faceDetector, videoEl, ovalDetails, reduceThreshold = false, isMobile = false, }) {
524
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
525
+ let detectedFace;
526
+ let isDistanceBelowThreshold = false;
527
+ let error;
528
+ switch (detectedFaces.length) {
529
+ case 0: {
530
+ //no face detected;
531
+ error = LivenessErrorState.FACE_DISTANCE_ERROR;
532
+ break;
533
+ }
534
+ case 1: {
535
+ //exactly one face detected, match face with oval;
536
+ detectedFace = detectedFaces[0];
537
+ const width = ovalDetails.width;
538
+ const { pupilDistance, faceHeight } = getPupilDistanceAndFaceHeight(detectedFace);
539
+ const alpha = 2.0, gamma = 1.8;
540
+ const calibratedPupilDistance = (alpha * pupilDistance + gamma * faceHeight) / 2 / alpha;
541
+ if (width) {
542
+ isDistanceBelowThreshold =
543
+ calibratedPupilDistance / width <
544
+ (!reduceThreshold
545
+ ? FACE_DISTANCE_THRESHOLD
546
+ : isMobile
547
+ ? REDUCED_THRESHOLD_MOBILE
548
+ : REDUCED_THRESHOLD);
549
+ if (!isDistanceBelowThreshold) {
550
+ error = LivenessErrorState.FACE_DISTANCE_ERROR;
551
+ }
552
+ }
553
+ break;
554
+ }
555
+ default: {
556
+ //more than one face detected
557
+ error = LivenessErrorState.MULTIPLE_FACES_ERROR;
558
+ break;
559
+ }
560
+ }
561
+ return { isDistanceBelowThreshold, error };
562
+ }
563
+ function getBoundingBox({ deviceHeight, deviceWidth, height, width, top, left, }) {
564
+ return {
565
+ Height: height / deviceHeight,
566
+ Width: width / deviceWidth,
567
+ Top: top / deviceHeight,
568
+ Left: left / deviceWidth,
569
+ };
570
+ }
571
+
572
+ /**
573
+ * Checks whether WebAssembly is supported in the current environment.
574
+ */
575
+ function isWebAssemblySupported() {
576
+ try {
577
+ return (!!window.WebAssembly &&
578
+ (!!window.WebAssembly.compile || !!window.WebAssembly.compileStreaming));
579
+ }
580
+ catch (e) {
581
+ return false;
582
+ }
583
+ }
584
+
585
+ const BLAZEFACE_VERSION = '1.0.2';
586
+ /**
587
+ * WARNING: When updating these links,
588
+ * also make sure to update documentation and the link in the canary/e2e test "canary/e2e/features/liveness/face-detect.feature"
589
+ */
590
+ const DEFAULT_BLAZEFACE_URL = `https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow-models/blazeface/${BLAZEFACE_VERSION}/model/model.json`;
591
+ const DEFAULT_TFJS_WASM_URL = `https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow/tfjs-backend-wasm/${tfjsBackendWasm.version_wasm}/`;
592
+ /**
593
+ * The BlazeFace implementation of the FaceDetection interface.
594
+ */
595
+ class BlazeFaceFaceDetection extends FaceDetection {
596
+ constructor(binaryPath, faceModelUrl) {
597
+ super();
598
+ this.faceModelUrl = faceModelUrl ?? DEFAULT_BLAZEFACE_URL;
599
+ this.binaryPath = binaryPath ?? DEFAULT_TFJS_WASM_URL;
600
+ }
601
+ async loadModels() {
602
+ if (isWebAssemblySupported()) {
603
+ await this._loadWebAssemblyBackend();
604
+ }
605
+ else {
606
+ await this._loadCPUBackend();
607
+ }
608
+ try {
609
+ await tfjsCore.ready();
610
+ this._model = await faceDetection.createDetector(faceDetection.SupportedModels.MediaPipeFaceDetector, {
611
+ runtime: 'tfjs',
612
+ detectorModelUrl: this.faceModelUrl,
613
+ });
614
+ }
615
+ catch (e) {
616
+ throw new Error('There was an error loading the blazeface model. If you are using a custom blazeface model url ensure that it is a fully qualified url that returns a json file.');
617
+ }
618
+ }
619
+ async detectFaces(videoEl) {
620
+ const flipHorizontal = true;
621
+ const predictions = await this._model.estimateFaces(videoEl, {
622
+ flipHorizontal,
623
+ });
624
+ const timestampMs = Date.now();
625
+ const faces = predictions.map((prediction) => {
626
+ const { box, keypoints } = prediction;
627
+ const { xMin: left, yMin: top, width, height } = box;
628
+ const rightEye = this._getCoordinate(keypoints, 'rightEye');
629
+ const leftEye = this._getCoordinate(keypoints, 'leftEye');
630
+ const nose = this._getCoordinate(keypoints, 'noseTip');
631
+ const mouth = this._getCoordinate(keypoints, 'mouthCenter');
632
+ const rightEar = this._getCoordinate(keypoints, 'rightEarTragion');
633
+ const leftEar = this._getCoordinate(keypoints, 'leftEarTragion');
634
+ const probability = [90];
635
+ return {
636
+ top,
637
+ left,
638
+ width,
639
+ height,
640
+ timestampMs,
641
+ probability: probability[0],
642
+ rightEye,
643
+ leftEye,
644
+ mouth,
645
+ nose,
646
+ rightEar,
647
+ leftEar,
648
+ };
649
+ });
650
+ return faces;
651
+ }
652
+ _getCoordinate(keypoints, name) {
653
+ const keypoint = keypoints.find((k) => k.name === name);
654
+ return [keypoint.x, keypoint.y];
655
+ }
656
+ async _loadWebAssemblyBackend() {
657
+ try {
658
+ tfjsBackendWasm.setWasmPaths(this.binaryPath);
659
+ await utils.jitteredExponentialRetry(async () => {
660
+ const success = await tfjsCore.setBackend('wasm');
661
+ if (!success) {
662
+ throw new Error(`Initialization of backend wasm failed`);
663
+ }
664
+ }, []);
665
+ this.modelBackend = 'wasm';
666
+ }
667
+ catch (e) {
668
+ throw new Error('There was an error loading the TFJS WASM backend. If you are using a custom WASM path ensure that it ends with "/" and that it is not the full URL as @tensorflow/tfjs-backend-wasm will append the wasm binary file name. Read more: https://github.com/tensorflow/tfjs/blob/master/tfjs-backend-wasm/src/backend_wasm.ts#L475.');
669
+ }
670
+ }
671
+ async _loadCPUBackend() {
672
+ await tfjsCore.setBackend('cpu');
673
+ this.modelBackend = 'cpu';
674
+ }
675
+ }
676
+
677
+ /**
678
+ * Helper wrapper class over the native MediaRecorder.
679
+ */
680
+ class VideoRecorder {
681
+ constructor(stream, options = {}) {
682
+ if (typeof MediaRecorder === 'undefined') {
683
+ throw Error('MediaRecorder is not supported by this browser');
684
+ }
685
+ this._stream = stream;
686
+ this._options = options;
687
+ this._chunks = [];
688
+ this._recorder = new MediaRecorder(stream, { bitsPerSecond: 1000000 });
689
+ this._setupCallbacks();
690
+ }
691
+ getState() {
692
+ return this._recorder.state;
693
+ }
694
+ start(timeSlice) {
695
+ this.clearRecordedData();
696
+ this.recordingStartApiTimestamp = Date.now();
697
+ this._recorder.start(timeSlice);
698
+ }
699
+ async stop() {
700
+ if (this.getState() === 'recording') {
701
+ this._recorder.stop();
702
+ }
703
+ return this._recorderStopped;
704
+ }
705
+ pause() {
706
+ this._recorder.pause();
707
+ }
708
+ clearRecordedData() {
709
+ this._chunks = [];
710
+ }
711
+ dispatch(event) {
712
+ this._recorder.dispatchEvent(event);
713
+ }
714
+ getVideoChunkSize() {
715
+ return this._chunks.length;
716
+ }
717
+ _setupCallbacks() {
718
+ // Creates a Readablestream of video chunks. Waits to receive a clientSessionInfo event before pushing
719
+ // a livenessActionDocument to the ReadableStream and finally closing the ReadableStream
720
+ this.videoStream = new ReadableStream({
721
+ start: (controller) => {
722
+ if (!this._recorder) {
723
+ return;
724
+ }
725
+ this._recorder.ondataavailable = (e) => {
726
+ if (e.data && e.data.size > 0) {
727
+ if (this._chunks.length === 0) {
728
+ this.firstChunkTimestamp = Date.now();
729
+ }
730
+ this._chunks.push(e.data);
731
+ controller.enqueue(e.data);
732
+ }
733
+ };
734
+ this._recorder.addEventListener('clientSesssionInfo', (e) => {
735
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
736
+ controller.enqueue(e.data.clientInfo);
737
+ });
738
+ this._recorder.addEventListener('stopVideo', () => {
739
+ controller.enqueue('stopVideo');
740
+ });
741
+ this._recorder.addEventListener('endStream', () => {
742
+ controller.close();
743
+ });
744
+ this._recorder.addEventListener('endStreamWithCode', (e) => {
745
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access
746
+ controller.enqueue({
747
+ type: 'endStreamWithCode',
748
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
749
+ code: e.data.code,
750
+ });
751
+ });
752
+ },
753
+ });
754
+ this.recorderStarted = new Promise((resolve) => {
755
+ this._recorder.onstart = () => {
756
+ this.recorderStartTimestamp = Date.now();
757
+ resolve();
758
+ };
759
+ });
760
+ this._recorderStopped = new Promise((resolve) => {
761
+ this._recorder.onstop = () => {
762
+ this.recorderEndTimestamp = Date.now();
763
+ resolve();
764
+ };
765
+ });
766
+ this._recorder.onerror = () => {
767
+ if (this.getState() !== 'stopped') {
768
+ this.stop();
769
+ }
770
+ };
771
+ }
772
+ }
773
+
774
+ const VERSION = '3.0.0';
775
+
776
+ const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
777
+ const getLivenessUserAgent = () => {
778
+ return BASE_USER_AGENT;
779
+ };
780
+
781
+ /**
782
+ * Note: This file was copied from https://github.com/aws/aws-sdk-js-v3/blob/main/packages/middleware-websocket/src/websocket-fetch-handler.ts#L176
783
+ * Because of this the file is not fully typed at this time but we should eventually work on fully typing this file.
784
+ */
785
+ const DEFAULT_WS_CONNECTION_TIMEOUT_MS = 2000;
786
+ const isWebSocketRequest = (request) => request.protocol === 'ws:' || request.protocol === 'wss:';
787
+ const isReadableStream = (payload) => typeof ReadableStream === 'function' && payload instanceof ReadableStream;
788
+ /**
789
+ * Transfer payload data to an AsyncIterable.
790
+ * When the ReadableStream API is available in the runtime(e.g. browser), and
791
+ * the request body is ReadableStream, so we need to transfer it to AsyncIterable
792
+ * to make the stream consumable by WebSocket.
793
+ */
794
+ const getIterator = (stream) => {
795
+ // Noop if stream is already an async iterable
796
+ if (stream[Symbol.asyncIterator]) {
797
+ return stream;
798
+ }
799
+ if (isReadableStream(stream)) {
800
+ //If stream is a ReadableStream, transfer the ReadableStream to async iterable.
801
+ return eventstreamSerdeBrowser.readableStreamtoIterable(stream);
802
+ }
803
+ //For other types, just wrap them with an async iterable.
804
+ return {
805
+ [Symbol.asyncIterator]: async function* () {
806
+ yield stream;
807
+ },
808
+ };
809
+ };
810
+ /**
811
+ * Convert async iterable to a ReadableStream when ReadableStream API
812
+ * is available(browsers). Otherwise, leave as it is(ReactNative).
813
+ */
814
+ const toReadableStream = (asyncIterable) => typeof ReadableStream === 'function'
815
+ ? eventstreamSerdeBrowser.iterableToReadableStream(asyncIterable)
816
+ : asyncIterable;
817
+ /**
818
+ * Base handler for websocket requests and HTTP request. By default, the request input and output
819
+ * body will be in a ReadableStream, because of interface consistency among middleware.
820
+ * If ReadableStream is not available, like in React-Native, the response body
821
+ * will be an async iterable.
822
+ */
823
+ class CustomWebSocketFetchHandler {
824
+ constructor(options, httpHandler = new fetchHttpHandler.FetchHttpHandler()) {
825
+ this.metadata = {
826
+ handlerProtocol: 'websocket/h1.1',
827
+ };
828
+ this.sockets = {};
829
+ this.utf8decoder = new TextDecoder(); // default 'utf-8' or 'utf8'
830
+ this.httpHandler = httpHandler;
831
+ if (typeof options === 'function') {
832
+ this.configPromise = options().then((opts) => opts ?? {});
833
+ }
834
+ else {
835
+ this.configPromise = Promise.resolve(options ?? {});
836
+ }
837
+ }
838
+ /**
839
+ * Destroys the WebSocketHandler.
840
+ * Closes all sockets from the socket pool.
841
+ */
842
+ destroy() {
843
+ for (const [key, sockets] of Object.entries(this.sockets)) {
844
+ for (const socket of sockets) {
845
+ socket.close(1000, `Socket closed through destroy() call`);
846
+ }
847
+ delete this.sockets[key];
848
+ }
849
+ }
850
+ async handle(request) {
851
+ if (!isWebSocketRequest(request)) {
852
+ return this.httpHandler.handle(request);
853
+ }
854
+ const url = utilFormatUrl.formatUrl(request);
855
+ const socket = new WebSocket(url);
856
+ // Add socket to sockets pool
857
+ if (!this.sockets[url]) {
858
+ this.sockets[url] = [];
859
+ }
860
+ this.sockets[url].push(socket);
861
+ socket.binaryType = 'arraybuffer';
862
+ const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this
863
+ .configPromise;
864
+ await this.waitForReady(socket, connectionTimeout);
865
+ const { body } = request;
866
+ const bodyStream = getIterator(body);
867
+ const asyncIterable = this.connect(socket, bodyStream);
868
+ const outputPayload = toReadableStream(asyncIterable);
869
+ return {
870
+ response: new protocolHttp.HttpResponse({
871
+ statusCode: 200,
872
+ body: outputPayload,
873
+ }),
874
+ };
875
+ }
876
+ /**
877
+ * Removes all closing/closed sockets from the socket pool for URL.
878
+ */
879
+ removeNotUsableSockets(url) {
880
+ this.sockets[url] = (this.sockets[url] ?? []).filter((socket) => ![WebSocket.CLOSING, WebSocket.CLOSED].includes(socket.readyState));
881
+ }
882
+ waitForReady(socket, connectionTimeout) {
883
+ return new Promise((resolve, reject) => {
884
+ const timeout = setTimeout(() => {
885
+ this.removeNotUsableSockets(socket.url);
886
+ reject({
887
+ $metadata: {
888
+ httpStatusCode: 500,
889
+ },
890
+ });
891
+ }, connectionTimeout);
892
+ socket.onopen = () => {
893
+ clearTimeout(timeout);
894
+ resolve();
895
+ };
896
+ });
897
+ }
898
+ connect(socket, data) {
899
+ // To notify output stream any error thrown after response
900
+ // is returned while data keeps streaming.
901
+ let streamError = undefined;
902
+ // To notify onclose event that error has occurred.
903
+ let socketErrorOccurred = false;
904
+ // initialize as no-op.
905
+ let reject = () => { };
906
+ let resolve = () => { };
907
+ socket.onmessage = (event) => {
908
+ resolve({
909
+ done: false,
910
+ value: new Uint8Array(event.data),
911
+ });
912
+ };
913
+ socket.onerror = (error) => {
914
+ socketErrorOccurred = true;
915
+ socket.close();
916
+ reject(error);
917
+ };
918
+ socket.onclose = () => {
919
+ this.removeNotUsableSockets(socket.url);
920
+ if (socketErrorOccurred)
921
+ return;
922
+ if (streamError) {
923
+ reject(streamError);
924
+ }
925
+ else {
926
+ resolve({
927
+ done: true,
928
+ value: undefined, // unchecked because done=true.
929
+ });
930
+ }
931
+ };
932
+ const outputStream = {
933
+ [Symbol.asyncIterator]: () => ({
934
+ next: () => {
935
+ return new Promise((_resolve, _reject) => {
936
+ resolve = _resolve;
937
+ reject = _reject;
938
+ });
939
+ },
940
+ }),
941
+ };
942
+ const send = async () => {
943
+ try {
944
+ for await (const inputChunk of data) {
945
+ const decodedString = this.utf8decoder.decode(inputChunk);
946
+ if (decodedString.includes('closeCode')) {
947
+ const match = decodedString.match(/"closeCode":([0-9]*)/);
948
+ if (match) {
949
+ const closeCode = match[1];
950
+ socket.close(parseInt(closeCode));
951
+ }
952
+ continue;
953
+ }
954
+ socket.send(inputChunk);
955
+ }
956
+ }
957
+ catch (err) {
958
+ // We don't throw the error here because the send()'s returned
959
+ // would already be settled by the time sending chunk throws error.
960
+ // Instead, the notify the output stream to throw if there's
961
+ // exceptions
962
+ streamError = err;
963
+ }
964
+ finally {
965
+ // WS status code: https://tools.ietf.org/html/rfc6455#section-7.4
966
+ socket.close(WS_CLOSURE_CODE.SUCCESS_CODE);
967
+ }
968
+ };
969
+ send();
970
+ return outputStream;
971
+ }
972
+ }
973
+
974
+ const TIME_SLICE = 1000;
975
+ function isBlob(obj) {
976
+ return obj.arrayBuffer !== undefined;
977
+ }
978
+ function isClientSessionInformationEvent(obj) {
979
+ return obj.Challenge !== undefined;
980
+ }
981
+ function isEndStreamWithCodeEvent(obj) {
982
+ return obj.code !== undefined;
983
+ }
984
+ class LivenessStreamProvider {
985
+ constructor({ sessionId, region, stream, videoEl, credentialProvider, endpointOverride, }) {
986
+ this.sessionId = sessionId;
987
+ this.region = region;
988
+ this._stream = stream;
989
+ this.videoEl = videoEl;
990
+ this.videoRecorder = new VideoRecorder(stream);
991
+ this.credentialProvider = credentialProvider;
992
+ this.endpointOverride = endpointOverride;
993
+ this.initPromise = this.init();
994
+ }
995
+ async getResponseStream() {
996
+ await this.initPromise;
997
+ return this.responseStream;
998
+ }
999
+ startRecordingLivenessVideo() {
1000
+ this.videoRecorder.start(TIME_SLICE);
1001
+ }
1002
+ sendClientInfo(clientInfo) {
1003
+ this.videoRecorder.dispatch(new MessageEvent('clientSesssionInfo', {
1004
+ data: { clientInfo },
1005
+ }));
1006
+ }
1007
+ async stopVideo() {
1008
+ await this.videoRecorder.stop();
1009
+ }
1010
+ dispatchStopVideoEvent() {
1011
+ this.videoRecorder.dispatch(new Event('stopVideo'));
1012
+ }
1013
+ async endStreamWithCode(code) {
1014
+ if (this.videoRecorder.getState() === 'recording') {
1015
+ await this.stopVideo();
1016
+ }
1017
+ this.videoRecorder.dispatch(new MessageEvent('endStreamWithCode', {
1018
+ data: { code: code },
1019
+ }));
1020
+ return;
1021
+ }
1022
+ async init() {
1023
+ const credentials = this.credentialProvider ?? (await auth.fetchAuthSession()).credentials;
1024
+ if (!credentials) {
1025
+ throw new Error('No credentials');
1026
+ }
1027
+ const clientconfig = {
1028
+ credentials,
1029
+ region: this.region,
1030
+ customUserAgent: `${utils.getAmplifyUserAgent()} ${getLivenessUserAgent()}`,
1031
+ requestHandler: new CustomWebSocketFetchHandler({
1032
+ connectionTimeout: 10000,
1033
+ }),
1034
+ };
1035
+ if (this.endpointOverride) {
1036
+ const override = this.endpointOverride;
1037
+ clientconfig.endpointProvider = () => {
1038
+ const url = new URL(override);
1039
+ return { url };
1040
+ };
1041
+ }
1042
+ this._client = new clientRekognitionstreaming.RekognitionStreamingClient(clientconfig);
1043
+ this.responseStream = await this.startLivenessVideoConnection();
1044
+ }
1045
+ // Creates a generator from a stream of video chunks and livenessActionDocuments and yields VideoEvent and ClientEvents
1046
+ getAsyncGeneratorFromReadableStream(stream) {
1047
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
1048
+ const current = this;
1049
+ this._reader = stream.getReader();
1050
+ return async function* () {
1051
+ while (true) {
1052
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
1053
+ const { done, value } = await current._reader.read();
1054
+ if (done) {
1055
+ return;
1056
+ }
1057
+ // Video chunks blobs should be sent as video events
1058
+ if (value === 'stopVideo') {
1059
+ // sending an empty video chunk signals that we have ended sending video
1060
+ yield {
1061
+ VideoEvent: {
1062
+ VideoChunk: [],
1063
+ TimestampMillis: Date.now(),
1064
+ },
1065
+ };
1066
+ }
1067
+ else if (isBlob(value)) {
1068
+ const buffer = await value.arrayBuffer();
1069
+ const chunk = new Uint8Array(buffer);
1070
+ if (chunk.length > 0) {
1071
+ yield {
1072
+ VideoEvent: {
1073
+ VideoChunk: chunk,
1074
+ TimestampMillis: Date.now(),
1075
+ },
1076
+ };
1077
+ }
1078
+ }
1079
+ else if (isClientSessionInformationEvent(value)) {
1080
+ yield {
1081
+ ClientSessionInformationEvent: {
1082
+ Challenge: value.Challenge,
1083
+ },
1084
+ };
1085
+ }
1086
+ else if (isEndStreamWithCodeEvent(value)) {
1087
+ yield {
1088
+ VideoEvent: {
1089
+ VideoChunk: [],
1090
+ TimestampMillis: { closeCode: value.code },
1091
+ },
1092
+ };
1093
+ }
1094
+ }
1095
+ };
1096
+ }
1097
+ async startLivenessVideoConnection() {
1098
+ const livenessRequestGenerator = this.getAsyncGeneratorFromReadableStream(this.videoRecorder.videoStream)();
1099
+ const response = await this._client.send(new clientRekognitionstreaming.StartFaceLivenessSessionCommand({
1100
+ ChallengeVersions: 'FaceMovementAndLightChallenge_1.0.0',
1101
+ SessionId: this.sessionId,
1102
+ LivenessRequestStream: livenessRequestGenerator,
1103
+ VideoWidth: this.videoEl.videoWidth.toString(),
1104
+ VideoHeight: this.videoEl.videoHeight.toString(),
1105
+ }));
1106
+ return response.LivenessResponseStream;
1107
+ }
1108
+ }
1109
+
1110
+ const TICK_RATE = 10; // ms -- the rate at which we will render/check colors
1111
+ var COLOR_STAGE;
1112
+ (function (COLOR_STAGE) {
1113
+ COLOR_STAGE["SCROLLING"] = "SCROLLING";
1114
+ COLOR_STAGE["FLAT"] = "FLAT";
1115
+ })(COLOR_STAGE || (COLOR_STAGE = {}));
1116
+ class FreshnessColorDisplay {
1117
+ constructor(context, freshnessColorsSequence) {
1118
+ this.context = context;
1119
+ this.freshnessColorsSequence = freshnessColorsSequence;
1120
+ this.isFirstTick = true;
1121
+ }
1122
+ async displayColorTick() {
1123
+ return new Promise((resolve, reject) => {
1124
+ setTimeout(() => {
1125
+ this.displayNextColorTick(resolve, reject);
1126
+ }, Math.min(TICK_RATE));
1127
+ });
1128
+ }
1129
+ init() {
1130
+ this.stageIndex = 0;
1131
+ this.currColorIndex = 0;
1132
+ this.currColorSequence = this.freshnessColorsSequence[0];
1133
+ this.prevColorSequence = this.freshnessColorsSequence[0];
1134
+ this.stage = COLOR_STAGE.FLAT;
1135
+ this.timeLastFlatOrScrollChange = Date.now();
1136
+ this.timeLastFaceMatchChecked = Date.now();
1137
+ }
1138
+ displayNextColorTick(resolve, _) {
1139
+ const { freshnessColorEl } = this.context.freshnessColorAssociatedParams;
1140
+ const { ovalDetails, scaleFactor } = this.context.ovalAssociatedParams;
1141
+ const { videoEl } = this.context.videoAssociatedParams;
1142
+ const tickStartTime = Date.now();
1143
+ // Send a colorStart time only for the first tick of the first color
1144
+ if (this.isFirstTick) {
1145
+ this.init();
1146
+ this.isFirstTick = false;
1147
+ this.sendColorStartTime({
1148
+ tickStartTime: tickStartTime,
1149
+ currColor: this.currColorSequence.color,
1150
+ prevColor: this.currColorSequence.color,
1151
+ currColorIndex: this.stageIndex,
1152
+ });
1153
+ }
1154
+ let timeSinceLastColorChange = tickStartTime - this.timeLastFlatOrScrollChange;
1155
+ freshnessColorEl.style.display = 'block';
1156
+ // Every 10 ms tick we will check if the threshold for flat or scrolling, if so we will try to go to the next stage
1157
+ if ((this.stage === COLOR_STAGE.FLAT &&
1158
+ timeSinceLastColorChange >=
1159
+ this.currColorSequence.flatDisplayDuration) ||
1160
+ (this.stage === COLOR_STAGE.SCROLLING &&
1161
+ timeSinceLastColorChange >= this.currColorSequence.downscrollDuration)) {
1162
+ this.incrementStageIndex(tickStartTime);
1163
+ timeSinceLastColorChange = 0;
1164
+ }
1165
+ // Every 10 ms tick we will update the colors displayed
1166
+ if (this.currColorIndex < this.freshnessColorsSequence.length) {
1167
+ const heightFraction = timeSinceLastColorChange /
1168
+ (this.stage === COLOR_STAGE.SCROLLING
1169
+ ? this.currColorSequence.downscrollDuration
1170
+ : this.currColorSequence.flatDisplayDuration);
1171
+ fillOverlayCanvasFractional({
1172
+ overlayCanvas: freshnessColorEl,
1173
+ prevColor: this.prevColorSequence.color,
1174
+ nextColor: this.currColorSequence.color,
1175
+ videoEl: videoEl,
1176
+ ovalDetails: ovalDetails,
1177
+ heightFraction,
1178
+ scaleFactor: scaleFactor,
1179
+ });
1180
+ resolve(false);
1181
+ }
1182
+ else {
1183
+ freshnessColorEl.style.display = 'none';
1184
+ resolve(true);
1185
+ }
1186
+ }
1187
+ // FLAT - prev = 0, curr = 0
1188
+ // SCROLL - prev = 0, curr = 1
1189
+ // FLAT - prev = 1, curr = 1
1190
+ // SCROLL - prev = 1, curr = 2
1191
+ // SCROLL - prev = 2, curr = 3
1192
+ incrementStageIndex(tickStartTime) {
1193
+ this.stageIndex += 1;
1194
+ this.prevColorSequence = this.freshnessColorsSequence[this.currColorIndex];
1195
+ if (this.stage === COLOR_STAGE.FLAT) {
1196
+ this.currColorIndex += 1;
1197
+ this.stage = COLOR_STAGE.SCROLLING;
1198
+ }
1199
+ else if (this.stage === COLOR_STAGE.SCROLLING) {
1200
+ const nextFlatColor = this.freshnessColorsSequence[this.currColorIndex];
1201
+ if (nextFlatColor.flatDisplayDuration > 0) {
1202
+ this.stage = COLOR_STAGE.FLAT;
1203
+ }
1204
+ else {
1205
+ this.stage = COLOR_STAGE.SCROLLING;
1206
+ this.currColorIndex += 1;
1207
+ }
1208
+ }
1209
+ this.currColorSequence = this.freshnessColorsSequence[this.currColorIndex];
1210
+ this.timeLastFlatOrScrollChange = Date.now();
1211
+ if (this.currColorSequence) {
1212
+ this.sendColorStartTime({
1213
+ tickStartTime: tickStartTime,
1214
+ currColor: this.currColorSequence.color,
1215
+ prevColor: this.prevColorSequence.color,
1216
+ currColorIndex: this.stageIndex,
1217
+ });
1218
+ }
1219
+ }
1220
+ sendColorStartTime({ tickStartTime, currColor, prevColor, currColorIndex, }) {
1221
+ const { livenessStreamProvider, challengeId } = this.context;
1222
+ livenessStreamProvider.sendClientInfo({
1223
+ Challenge: {
1224
+ FaceMovementAndLightChallenge: {
1225
+ ChallengeId: challengeId,
1226
+ ColorDisplayed: {
1227
+ CurrentColor: { RGB: getRGBArrayFromColorString(currColor) },
1228
+ PreviousColor: { RGB: getRGBArrayFromColorString(prevColor) },
1229
+ SequenceNumber: currColorIndex,
1230
+ CurrentColorStartTimestamp: tickStartTime,
1231
+ },
1232
+ },
1233
+ },
1234
+ });
1235
+ }
1236
+ }
1237
+
1238
+ const isServerSesssionInformationEvent = (value) => {
1239
+ return !!value
1240
+ ?.ServerSessionInformationEvent;
1241
+ };
1242
+ const isDisconnectionEvent = (value) => {
1243
+ return !!value
1244
+ ?.DisconnectionEvent;
1245
+ };
1246
+ const isValidationExceptionEvent = (value) => {
1247
+ return !!value
1248
+ ?.ValidationException;
1249
+ };
1250
+ const isInternalServerExceptionEvent = (value) => {
1251
+ return !!value
1252
+ ?.InternalServerException;
1253
+ };
1254
+ const isThrottlingExceptionEvent = (value) => {
1255
+ return !!value
1256
+ ?.ThrottlingException;
1257
+ };
1258
+ const isServiceQuotaExceededExceptionEvent = (value) => {
1259
+ return !!value
1260
+ ?.ServiceQuotaExceededException;
1261
+ };
1262
+ const isInvalidSignatureRegionException = (error) => {
1263
+ const { message, name } = error;
1264
+ return (name === 'InvalidSignatureException' && message.includes('valid region'));
1265
+ };
1266
+
1267
+ const STATIC_VIDEO_CONSTRAINTS = {
1268
+ width: {
1269
+ min: 320,
1270
+ ideal: 640,
1271
+ },
1272
+ height: {
1273
+ min: 240,
1274
+ ideal: 480,
1275
+ },
1276
+ frameRate: { min: 15, ideal: 30, max: 30 },
1277
+ facingMode: 'user',
1278
+ };
1279
+
1280
+ /* eslint-disable */
1281
+ const MIN_FACE_MATCH_TIME = 1000;
1282
+ const DEFAULT_FACE_FIT_TIMEOUT = 7000;
1283
+ let responseStream;
1284
+ const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1285
+ function getLastSelectedCameraId() {
1286
+ return localStorage.getItem(CAMERA_ID_KEY);
1287
+ }
1288
+ function setLastSelectedCameraId(deviceId) {
1289
+ localStorage.setItem(CAMERA_ID_KEY, deviceId);
1290
+ }
1291
+ const livenessMachine = xstate.createMachine({
1292
+ id: 'livenessMachine',
1293
+ initial: 'cameraCheck',
1294
+ predictableActionArguments: true,
1295
+ context: {
1296
+ challengeId: nanoid.nanoid(),
1297
+ maxFailedAttempts: 0,
1298
+ failedAttempts: 0,
1299
+ componentProps: undefined,
1300
+ serverSessionInformation: undefined,
1301
+ videoAssociatedParams: {
1302
+ videoConstraints: STATIC_VIDEO_CONSTRAINTS,
1303
+ selectableDevices: [],
1304
+ },
1305
+ ovalAssociatedParams: undefined,
1306
+ faceMatchAssociatedParams: {
1307
+ illuminationState: undefined,
1308
+ faceMatchState: undefined,
1309
+ /**
1310
+ * faceMatchPercentage is a starting point we set as a baseline
1311
+ * for what we want our progress bar to visually start at. This correlates
1312
+ * to the formula we use to calculate the faceMatchPercentage
1313
+ * in getFaceMatchStateInLivenessOval
1314
+ */
1315
+ faceMatchPercentage: 25,
1316
+ currentDetectedFace: undefined,
1317
+ startFace: undefined,
1318
+ endFace: undefined,
1319
+ initialFaceMatchTime: undefined,
1320
+ },
1321
+ freshnessColorAssociatedParams: {
1322
+ freshnessColorEl: undefined,
1323
+ freshnessColors: [],
1324
+ freshnessColorsComplete: false,
1325
+ freshnessColorDisplay: undefined,
1326
+ },
1327
+ errorState: undefined,
1328
+ livenessStreamProvider: undefined,
1329
+ responseStreamActorRef: undefined,
1330
+ shouldDisconnect: false,
1331
+ faceMatchStateBeforeStart: undefined,
1332
+ isFaceFarEnoughBeforeRecording: undefined,
1333
+ isRecordingStopped: false,
1334
+ },
1335
+ on: {
1336
+ CANCEL: 'userCancel',
1337
+ TIMEOUT: {
1338
+ target: 'retryableTimeout',
1339
+ actions: 'updateErrorStateForTimeout',
1340
+ },
1341
+ SET_SESSION_INFO: {
1342
+ internal: true,
1343
+ actions: 'updateSessionInfo',
1344
+ },
1345
+ DISCONNECT_EVENT: {
1346
+ internal: true,
1347
+ actions: 'updateShouldDisconnect',
1348
+ },
1349
+ SET_DOM_AND_CAMERA_DETAILS: {
1350
+ actions: 'setDOMAndCameraDetails',
1351
+ },
1352
+ UPDATE_DEVICE_AND_STREAM: {
1353
+ actions: 'updateDeviceAndStream',
1354
+ },
1355
+ SERVER_ERROR: {
1356
+ target: 'error',
1357
+ actions: 'updateErrorStateForServer',
1358
+ },
1359
+ RUNTIME_ERROR: {
1360
+ target: 'error',
1361
+ },
1362
+ MOBILE_LANDSCAPE_WARNING: {
1363
+ target: 'mobileLandscapeWarning',
1364
+ actions: 'updateErrorStateForServer',
1365
+ },
1366
+ },
1367
+ states: {
1368
+ cameraCheck: {
1369
+ entry: ['resetErrorState'],
1370
+ invoke: {
1371
+ src: 'checkVirtualCameraAndGetStream',
1372
+ onDone: {
1373
+ target: 'waitForDOMAndCameraDetails',
1374
+ actions: ['updateVideoMediaStream'],
1375
+ },
1376
+ onError: {
1377
+ target: 'permissionDenied',
1378
+ },
1379
+ },
1380
+ },
1381
+ waitForDOMAndCameraDetails: {
1382
+ after: {
1383
+ 0: {
1384
+ target: 'start',
1385
+ cond: 'hasDOMAndCameraDetails',
1386
+ },
1387
+ 10: { target: 'waitForDOMAndCameraDetails' },
1388
+ },
1389
+ },
1390
+ start: {
1391
+ entry: ['drawStaticOval', 'initializeFaceDetector'],
1392
+ always: [
1393
+ {
1394
+ target: 'detectFaceBeforeStart',
1395
+ cond: 'shouldSkipStartScreen',
1396
+ },
1397
+ ],
1398
+ on: {
1399
+ BEGIN: 'detectFaceBeforeStart',
1400
+ },
1401
+ },
1402
+ detectFaceBeforeStart: {
1403
+ invoke: {
1404
+ src: 'detectFace',
1405
+ onDone: {
1406
+ target: 'checkFaceDetectedBeforeStart',
1407
+ actions: ['updateFaceMatchBeforeStartDetails'],
1408
+ },
1409
+ },
1410
+ },
1411
+ checkFaceDetectedBeforeStart: {
1412
+ after: {
1413
+ 0: {
1414
+ target: 'detectFaceDistanceBeforeRecording',
1415
+ cond: 'hasSingleFaceBeforeStart',
1416
+ },
1417
+ 100: { target: 'detectFaceBeforeStart' },
1418
+ },
1419
+ },
1420
+ detectFaceDistanceBeforeRecording: {
1421
+ invoke: {
1422
+ src: 'detectFaceDistance',
1423
+ onDone: {
1424
+ target: 'checkFaceDistanceBeforeRecording',
1425
+ actions: ['updateFaceDistanceBeforeRecording'],
1426
+ },
1427
+ },
1428
+ },
1429
+ checkFaceDistanceBeforeRecording: {
1430
+ after: {
1431
+ 0: {
1432
+ target: 'initializeLivenessStream',
1433
+ cond: 'hasEnoughFaceDistanceBeforeRecording',
1434
+ },
1435
+ 100: { target: 'detectFaceDistanceBeforeRecording' },
1436
+ },
1437
+ },
1438
+ initializeLivenessStream: {
1439
+ invoke: {
1440
+ src: 'openLivenessStreamConnection',
1441
+ onDone: {
1442
+ target: 'notRecording',
1443
+ actions: [
1444
+ 'updateLivenessStreamProvider',
1445
+ 'spawnResponseStreamActor',
1446
+ ],
1447
+ },
1448
+ },
1449
+ },
1450
+ notRecording: {
1451
+ initial: 'waitForSessionInfo',
1452
+ states: {
1453
+ waitForSessionInfo: {
1454
+ after: {
1455
+ 0: {
1456
+ target: '#livenessMachine.recording',
1457
+ cond: 'hasServerSessionInfo',
1458
+ },
1459
+ 100: { target: 'waitForSessionInfo' },
1460
+ },
1461
+ },
1462
+ },
1463
+ },
1464
+ recording: {
1465
+ entry: ['clearErrorState', 'startRecording'],
1466
+ initial: 'ovalDrawing',
1467
+ states: {
1468
+ ovalDrawing: {
1469
+ entry: ['sendTimeoutAfterOvalDrawingDelay'],
1470
+ invoke: {
1471
+ src: 'detectInitialFaceAndDrawOval',
1472
+ onDone: {
1473
+ target: 'checkFaceDetected',
1474
+ actions: [
1475
+ 'updateOvalAndFaceDetailsPostDraw',
1476
+ 'sendTimeoutAfterOvalMatchDelay',
1477
+ ],
1478
+ },
1479
+ onError: {
1480
+ target: '#livenessMachine.error',
1481
+ actions: 'updateErrorStateForRuntime',
1482
+ },
1483
+ },
1484
+ },
1485
+ checkFaceDetected: {
1486
+ after: {
1487
+ 0: {
1488
+ target: 'checkRecordingStarted',
1489
+ cond: 'hasSingleFace',
1490
+ },
1491
+ 100: { target: 'ovalDrawing' },
1492
+ },
1493
+ },
1494
+ checkRecordingStarted: {
1495
+ after: {
1496
+ 0: {
1497
+ target: 'ovalMatching',
1498
+ cond: 'hasRecordingStarted',
1499
+ actions: ['updateRecordingStartTimestampMs'],
1500
+ },
1501
+ 100: { target: 'checkRecordingStarted' },
1502
+ },
1503
+ },
1504
+ ovalMatching: {
1505
+ entry: ['cancelOvalDrawingTimeout'],
1506
+ invoke: {
1507
+ src: 'detectFaceAndMatchOval',
1508
+ onDone: {
1509
+ target: 'checkMatch',
1510
+ actions: 'updateFaceDetailsPostMatch',
1511
+ },
1512
+ },
1513
+ },
1514
+ checkMatch: {
1515
+ after: {
1516
+ 0: {
1517
+ target: 'flashFreshnessColors',
1518
+ cond: 'hasFaceMatchedInOvalWithMinTime',
1519
+ actions: [
1520
+ 'updateEndFaceMatch',
1521
+ 'setupFlashFreshnessColors',
1522
+ 'cancelOvalMatchTimeout',
1523
+ 'cancelOvalDrawingTimeout',
1524
+ ],
1525
+ },
1526
+ 0.1: {
1527
+ target: 'ovalMatching',
1528
+ cond: 'hasFaceMatchedInOval',
1529
+ actions: 'setFaceMatchTimeAndStartFace',
1530
+ },
1531
+ 1: {
1532
+ target: 'ovalMatching',
1533
+ cond: 'hasNotFaceMatchedInOval',
1534
+ },
1535
+ },
1536
+ },
1537
+ flashFreshnessColors: {
1538
+ invoke: {
1539
+ src: 'flashColors',
1540
+ onDone: [
1541
+ {
1542
+ target: 'success',
1543
+ cond: 'hasFreshnessColorShown',
1544
+ },
1545
+ {
1546
+ target: 'flashFreshnessColors',
1547
+ actions: 'updateFreshnessDetails',
1548
+ },
1549
+ ],
1550
+ },
1551
+ },
1552
+ success: {
1553
+ entry: ['stopRecording'],
1554
+ type: 'final',
1555
+ },
1556
+ },
1557
+ onDone: 'uploading',
1558
+ },
1559
+ uploading: {
1560
+ initial: 'pending',
1561
+ states: {
1562
+ pending: {
1563
+ entry: ['sendTimeoutAfterWaitingForDisconnect', 'pauseVideoStream'],
1564
+ invoke: {
1565
+ src: 'stopVideo',
1566
+ onDone: 'waitForDisconnectEvent',
1567
+ onError: {
1568
+ target: '#livenessMachine.error',
1569
+ actions: 'updateErrorStateForRuntime',
1570
+ },
1571
+ },
1572
+ },
1573
+ waitForDisconnectEvent: {
1574
+ after: {
1575
+ 0: {
1576
+ target: 'getLivenessResult',
1577
+ cond: 'getShouldDisconnect',
1578
+ },
1579
+ 100: { target: 'waitForDisconnectEvent' },
1580
+ },
1581
+ },
1582
+ getLivenessResult: {
1583
+ entry: ['cancelWaitForDisconnectTimeout', 'freezeStream'],
1584
+ invoke: {
1585
+ src: 'getLiveness',
1586
+ onError: {
1587
+ target: '#livenessMachine.error',
1588
+ actions: 'updateErrorStateForServer',
1589
+ },
1590
+ },
1591
+ },
1592
+ },
1593
+ },
1594
+ retryableTimeout: {
1595
+ entry: 'updateFailedAttempts',
1596
+ always: [
1597
+ {
1598
+ target: 'timeout',
1599
+ cond: 'shouldTimeoutOnFailedAttempts',
1600
+ },
1601
+ { target: 'notRecording' },
1602
+ ],
1603
+ },
1604
+ permissionDenied: {
1605
+ entry: 'callUserPermissionDeniedCallback',
1606
+ on: {
1607
+ RETRY_CAMERA_CHECK: 'cameraCheck',
1608
+ },
1609
+ },
1610
+ mobileLandscapeWarning: {
1611
+ entry: 'callMobileLandscapeWarningCallback',
1612
+ always: [{ target: 'error' }],
1613
+ },
1614
+ timeout: {
1615
+ entry: ['cleanUpResources', 'callUserTimeoutCallback', 'freezeStream'],
1616
+ },
1617
+ error: {
1618
+ entry: [
1619
+ 'cleanUpResources',
1620
+ 'callErrorCallback',
1621
+ 'cancelOvalDrawingTimeout',
1622
+ 'cancelWaitForDisconnectTimeout',
1623
+ 'cancelOvalMatchTimeout',
1624
+ 'freezeStream',
1625
+ ],
1626
+ },
1627
+ userCancel: {
1628
+ entry: ['cleanUpResources', 'callUserCancelCallback', 'resetContext'],
1629
+ always: [{ target: 'cameraCheck' }],
1630
+ },
1631
+ },
1632
+ }, {
1633
+ actions: {
1634
+ spawnResponseStreamActor: xstate.assign({
1635
+ responseStreamActorRef: () => xstate.spawn(responseStreamActor),
1636
+ }),
1637
+ updateFailedAttempts: xstate.assign({
1638
+ failedAttempts: (context) => {
1639
+ return context.failedAttempts + 1;
1640
+ },
1641
+ }),
1642
+ updateVideoMediaStream: xstate.assign({
1643
+ videoAssociatedParams: (context, event) => ({
1644
+ ...context.videoAssociatedParams,
1645
+ videoMediaStream: event.data?.stream,
1646
+ selectedDeviceId: event.data?.selectedDeviceId,
1647
+ selectableDevices: event.data?.selectableDevices,
1648
+ }),
1649
+ }),
1650
+ initializeFaceDetector: xstate.assign({
1651
+ ovalAssociatedParams: (context) => {
1652
+ const { componentProps } = context;
1653
+ const { faceModelUrl, binaryPath } = componentProps.config;
1654
+ const faceDetector = new BlazeFaceFaceDetection(binaryPath, faceModelUrl);
1655
+ faceDetector.triggerModelLoading();
1656
+ return {
1657
+ ...context.ovalAssociatedParams,
1658
+ faceDetector,
1659
+ };
1660
+ },
1661
+ }),
1662
+ updateLivenessStreamProvider: xstate.assign({
1663
+ livenessStreamProvider: (context, event) => {
1664
+ return event.data?.livenessStreamProvider;
1665
+ },
1666
+ }),
1667
+ setDOMAndCameraDetails: xstate.assign({
1668
+ videoAssociatedParams: (context, event) => {
1669
+ return {
1670
+ ...context.videoAssociatedParams,
1671
+ videoEl: event.data?.videoEl,
1672
+ canvasEl: event.data?.canvasEl,
1673
+ isMobile: event.data?.isMobile,
1674
+ };
1675
+ },
1676
+ freshnessColorAssociatedParams: (context, event) => ({
1677
+ ...context.freshnessColorAssociatedParams,
1678
+ freshnessColorEl: event.data?.freshnessColorEl,
1679
+ }),
1680
+ }),
1681
+ updateDeviceAndStream: xstate.assign({
1682
+ videoAssociatedParams: (context, event) => {
1683
+ setLastSelectedCameraId(event.data?.newDeviceId);
1684
+ return {
1685
+ ...context.videoAssociatedParams,
1686
+ selectedDeviceId: event.data?.newDeviceId,
1687
+ videoMediaStream: event.data?.newStream,
1688
+ };
1689
+ },
1690
+ }),
1691
+ drawStaticOval: (context) => {
1692
+ const { canvasEl, videoEl, videoMediaStream } = context.videoAssociatedParams;
1693
+ drawStaticOval(canvasEl, videoEl, videoMediaStream);
1694
+ },
1695
+ updateRecordingStartTimestampMs: xstate.assign({
1696
+ videoAssociatedParams: (context) => {
1697
+ const { challengeId, videoAssociatedParams, ovalAssociatedParams, livenessStreamProvider, } = context;
1698
+ const { recordingStartApiTimestamp, recorderStartTimestamp } = livenessStreamProvider.videoRecorder;
1699
+ const { videoMediaStream } = videoAssociatedParams;
1700
+ const { initialFace } = ovalAssociatedParams;
1701
+ /**
1702
+ * This calculation is provided by Science team after doing analysis
1703
+ * of unreliable .onstart() (recorderStartTimestamp) timestamp that is
1704
+ * returned from mediaRecorder.
1705
+ */
1706
+ const timestamp = Math.round(0.73 * (recorderStartTimestamp - recordingStartApiTimestamp) +
1707
+ recordingStartApiTimestamp);
1708
+ // Send client info for initial face position
1709
+ const { width, height } = videoMediaStream
1710
+ .getTracks()[0]
1711
+ .getSettings();
1712
+ const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
1713
+ context.livenessStreamProvider.sendClientInfo({
1714
+ Challenge: {
1715
+ FaceMovementAndLightChallenge: {
1716
+ ChallengeId: challengeId,
1717
+ VideoStartTimestamp: timestamp,
1718
+ InitialFace: {
1719
+ InitialFaceDetectedTimestamp: initialFace.timestampMs,
1720
+ BoundingBox: getBoundingBox({
1721
+ deviceHeight: height,
1722
+ deviceWidth: width,
1723
+ height: initialFace.height,
1724
+ width: initialFace.width,
1725
+ top: initialFace.top,
1726
+ left: flippedInitialFaceLeft,
1727
+ }),
1728
+ },
1729
+ },
1730
+ },
1731
+ });
1732
+ return {
1733
+ ...context.videoAssociatedParams,
1734
+ recordingStartTimestampMs: timestamp,
1735
+ };
1736
+ },
1737
+ }),
1738
+ startRecording: xstate.assign({
1739
+ videoAssociatedParams: (context) => {
1740
+ if (!context.serverSessionInformation) {
1741
+ throw new Error('Session information was not received from response stream');
1742
+ }
1743
+ if (context.livenessStreamProvider.videoRecorder &&
1744
+ context.livenessStreamProvider.videoRecorder.getState() !==
1745
+ 'recording') {
1746
+ context.livenessStreamProvider.startRecordingLivenessVideo();
1747
+ }
1748
+ return {
1749
+ ...context.videoAssociatedParams,
1750
+ };
1751
+ },
1752
+ }),
1753
+ stopRecording: (context) => { },
1754
+ updateFaceMatchBeforeStartDetails: xstate.assign({
1755
+ faceMatchStateBeforeStart: (_, event) => {
1756
+ return event.data.faceMatchState;
1757
+ },
1758
+ }),
1759
+ updateFaceDistanceBeforeRecording: xstate.assign({
1760
+ isFaceFarEnoughBeforeRecording: (_, event) => {
1761
+ return event.data.isFaceFarEnoughBeforeRecording;
1762
+ },
1763
+ }),
1764
+ updateFaceDistanceWhileLoading: xstate.assign({
1765
+ isFaceFarEnoughBeforeRecording: (_, event) => {
1766
+ return event.data.isFaceFarEnoughBeforeRecording;
1767
+ },
1768
+ errorState: (_, event) => {
1769
+ return event.data?.error;
1770
+ },
1771
+ }),
1772
+ updateOvalAndFaceDetailsPostDraw: xstate.assign({
1773
+ ovalAssociatedParams: (context, event) => ({
1774
+ ...context.ovalAssociatedParams,
1775
+ initialFace: event.data.initialFace,
1776
+ ovalDetails: event.data.ovalDetails,
1777
+ scaleFactor: event.data.scaleFactor,
1778
+ }),
1779
+ faceMatchAssociatedParams: (context, event) => ({
1780
+ ...context.faceMatchAssociatedParams,
1781
+ faceMatchState: event.data.faceMatchState,
1782
+ illuminationState: event.data.illuminationState,
1783
+ }),
1784
+ }),
1785
+ updateFaceDetailsPostMatch: xstate.assign({
1786
+ faceMatchAssociatedParams: (context, event) => ({
1787
+ ...context.faceMatchAssociatedParams,
1788
+ faceMatchState: event.data.faceMatchState,
1789
+ faceMatchPercentage: event.data.faceMatchPercentage,
1790
+ illuminationState: event.data.illuminationState,
1791
+ currentDetectedFace: event.data.detectedFace,
1792
+ }),
1793
+ }),
1794
+ updateEndFaceMatch: xstate.assign({
1795
+ faceMatchAssociatedParams: (context) => ({
1796
+ ...context.faceMatchAssociatedParams,
1797
+ endFace: context.faceMatchAssociatedParams.currentDetectedFace,
1798
+ }),
1799
+ }),
1800
+ setFaceMatchTimeAndStartFace: xstate.assign({
1801
+ faceMatchAssociatedParams: (context) => {
1802
+ return {
1803
+ ...context.faceMatchAssociatedParams,
1804
+ startFace: context.faceMatchAssociatedParams.startFace === undefined
1805
+ ? context.faceMatchAssociatedParams.currentDetectedFace
1806
+ : context.faceMatchAssociatedParams.startFace,
1807
+ initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
1808
+ undefined
1809
+ ? Date.now()
1810
+ : context.faceMatchAssociatedParams.initialFaceMatchTime,
1811
+ };
1812
+ },
1813
+ }),
1814
+ resetErrorState: xstate.assign({
1815
+ errorState: (_) => undefined,
1816
+ }),
1817
+ updateErrorStateForTimeout: xstate.assign({
1818
+ errorState: (_, event) => {
1819
+ return event.data?.errorState || LivenessErrorState.TIMEOUT;
1820
+ },
1821
+ }),
1822
+ updateErrorStateForRuntime: xstate.assign({
1823
+ errorState: (_, event) => {
1824
+ return event.data?.errorState || LivenessErrorState.RUNTIME_ERROR;
1825
+ },
1826
+ }),
1827
+ updateErrorStateForServer: xstate.assign({
1828
+ errorState: (_) => LivenessErrorState.SERVER_ERROR,
1829
+ }),
1830
+ clearErrorState: xstate.assign({
1831
+ errorState: (_) => undefined,
1832
+ }),
1833
+ updateSessionInfo: xstate.assign({
1834
+ serverSessionInformation: (context, event) => {
1835
+ return event.data.sessionInfo;
1836
+ },
1837
+ }),
1838
+ updateShouldDisconnect: xstate.assign({
1839
+ shouldDisconnect: (context) => {
1840
+ return true;
1841
+ },
1842
+ }),
1843
+ updateFreshnessDetails: xstate.assign({
1844
+ freshnessColorAssociatedParams: (context, event) => {
1845
+ return {
1846
+ ...context.freshnessColorAssociatedParams,
1847
+ freshnessColorsComplete: event.data.freshnessColorsComplete,
1848
+ };
1849
+ },
1850
+ }),
1851
+ setupFlashFreshnessColors: xstate.assign({
1852
+ freshnessColorAssociatedParams: (context) => {
1853
+ const { serverSessionInformation } = context;
1854
+ const freshnessColors = getColorsSequencesFromSessionInformation(serverSessionInformation);
1855
+ const freshnessColorDisplay = new FreshnessColorDisplay(context, freshnessColors);
1856
+ return {
1857
+ ...context.freshnessColorAssociatedParams,
1858
+ freshnessColorDisplay,
1859
+ };
1860
+ },
1861
+ }),
1862
+ // timeouts
1863
+ sendTimeoutAfterOvalDrawingDelay: xstate.actions.send({ type: 'TIMEOUT' }, {
1864
+ delay: 5000,
1865
+ id: 'ovalDrawingTimeout',
1866
+ }),
1867
+ cancelOvalDrawingTimeout: xstate.actions.cancel('ovalDrawingTimeout'),
1868
+ sendTimeoutAfterOvalMatchDelay: xstate.actions.send({ type: 'TIMEOUT' }, {
1869
+ delay: (context) => {
1870
+ return (context.serverSessionInformation?.Challenge
1871
+ ?.FaceMovementAndLightChallenge?.ChallengeConfig
1872
+ ?.OvalFitTimeout || DEFAULT_FACE_FIT_TIMEOUT);
1873
+ },
1874
+ id: 'ovalMatchTimeout',
1875
+ }),
1876
+ cancelOvalMatchTimeout: xstate.actions.cancel('ovalMatchTimeout'),
1877
+ sendTimeoutAfterWaitingForDisconnect: xstate.actions.send({
1878
+ type: 'TIMEOUT',
1879
+ data: { errorState: LivenessErrorState.SERVER_ERROR },
1880
+ }, {
1881
+ delay: 20000,
1882
+ id: 'waitForDisconnectTimeout',
1883
+ }),
1884
+ cancelWaitForDisconnectTimeout: xstate.actions.cancel('waitForDisconnectTimeout'),
1885
+ sendTimeoutAfterFaceDistanceDelay: xstate.actions.send({
1886
+ type: 'RUNTIME_ERROR',
1887
+ data: new Error('Avoid moving closer during countdown and ensure only one face is in front of camera.'),
1888
+ }, {
1889
+ delay: 0,
1890
+ id: 'faceDistanceTimeout',
1891
+ }),
1892
+ cancelFaceDistanceTimeout: xstate.actions.cancel('faceDistanceTimeout'),
1893
+ // callbacks
1894
+ callUserPermissionDeniedCallback: xstate.assign({
1895
+ errorState: (context, event) => {
1896
+ let errorState;
1897
+ if (event.data.message.includes('15 fps')) {
1898
+ errorState = LivenessErrorState.CAMERA_FRAMERATE_ERROR;
1899
+ }
1900
+ else {
1901
+ errorState = LivenessErrorState.CAMERA_ACCESS_ERROR;
1902
+ }
1903
+ const errorMessage = event.data.message || event.data.Message;
1904
+ const error = new Error(errorMessage);
1905
+ const livenessError = {
1906
+ state: errorState,
1907
+ error: error,
1908
+ };
1909
+ context.componentProps.onError?.(livenessError);
1910
+ return errorState;
1911
+ },
1912
+ }),
1913
+ callMobileLandscapeWarningCallback: xstate.assign({
1914
+ errorState: (context) => {
1915
+ return LivenessErrorState.MOBILE_LANDSCAPE_ERROR;
1916
+ },
1917
+ }),
1918
+ callUserCancelCallback: async (context) => {
1919
+ context.componentProps.onUserCancel?.();
1920
+ },
1921
+ callUserTimeoutCallback: async (context) => {
1922
+ const error = new Error('Client Timeout');
1923
+ error.name = context.errorState;
1924
+ const livenessError = {
1925
+ state: context.errorState,
1926
+ error: error,
1927
+ };
1928
+ context.componentProps.onError?.(livenessError);
1929
+ },
1930
+ callErrorCallback: async (context, event) => {
1931
+ const livenessError = {
1932
+ state: context.errorState,
1933
+ error: event.data?.error || event.data,
1934
+ };
1935
+ context.componentProps.onError?.(livenessError);
1936
+ },
1937
+ cleanUpResources: async (context) => {
1938
+ const { freshnessColorEl } = context.freshnessColorAssociatedParams;
1939
+ if (freshnessColorEl) {
1940
+ freshnessColorEl.style.display = 'none';
1941
+ }
1942
+ let closureCode = WS_CLOSURE_CODE.DEFAULT_ERROR_CODE;
1943
+ if (context.errorState === LivenessErrorState.TIMEOUT) {
1944
+ closureCode = WS_CLOSURE_CODE.FACE_FIT_TIMEOUT;
1945
+ }
1946
+ else if (context.errorState === LivenessErrorState.RUNTIME_ERROR) {
1947
+ closureCode = WS_CLOSURE_CODE.RUNTIME_ERROR;
1948
+ }
1949
+ else if (context.errorState === LivenessErrorState.FACE_DISTANCE_ERROR ||
1950
+ context.errorState === LivenessErrorState.MULTIPLE_FACES_ERROR) {
1951
+ closureCode = WS_CLOSURE_CODE.USER_ERROR_DURING_CONNECTION;
1952
+ }
1953
+ else if (context.errorState === undefined) {
1954
+ closureCode = WS_CLOSURE_CODE.USER_CANCEL;
1955
+ }
1956
+ await context.livenessStreamProvider?.endStreamWithCode(closureCode);
1957
+ },
1958
+ freezeStream: async (context) => {
1959
+ const { videoMediaStream, videoEl } = context.videoAssociatedParams;
1960
+ context.isRecordingStopped = true;
1961
+ videoEl?.pause();
1962
+ videoMediaStream?.getTracks().forEach(function (track) {
1963
+ track.stop();
1964
+ });
1965
+ },
1966
+ pauseVideoStream: async (context) => {
1967
+ const { videoEl } = context.videoAssociatedParams;
1968
+ context.isRecordingStopped = true;
1969
+ videoEl.pause();
1970
+ },
1971
+ resetContext: xstate.assign({
1972
+ challengeId: nanoid.nanoid(),
1973
+ maxFailedAttempts: 0,
1974
+ failedAttempts: 0,
1975
+ componentProps: (context) => context.componentProps,
1976
+ serverSessionInformation: (_) => undefined,
1977
+ videoAssociatedParams: (_) => {
1978
+ return {
1979
+ videoConstraints: STATIC_VIDEO_CONSTRAINTS,
1980
+ };
1981
+ },
1982
+ ovalAssociatedParams: (_) => undefined,
1983
+ errorState: (_) => undefined,
1984
+ livenessStreamProvider: (_) => undefined,
1985
+ responseStreamActorRef: (_) => undefined,
1986
+ shouldDisconnect: false,
1987
+ faceMatchStateBeforeStart: (_) => undefined,
1988
+ isFaceFarEnoughBeforeRecording: (_) => undefined,
1989
+ isRecordingStopped: false,
1990
+ }),
1991
+ },
1992
+ guards: {
1993
+ shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
1994
+ hasFaceMatchedInOvalWithMinTime: (context) => {
1995
+ const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
1996
+ const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
1997
+ const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
1998
+ timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
1999
+ return hasMatched;
2000
+ },
2001
+ hasFaceMatchedInOval: (context) => {
2002
+ return (context.faceMatchAssociatedParams.faceMatchState ===
2003
+ FaceMatchState.MATCHED);
2004
+ },
2005
+ hasNotFaceMatchedInOval: (context) => {
2006
+ return (context.faceMatchAssociatedParams.faceMatchState !==
2007
+ FaceMatchState.MATCHED);
2008
+ },
2009
+ hasSingleFace: (context) => {
2010
+ return (context.faceMatchAssociatedParams.faceMatchState ===
2011
+ FaceMatchState.FACE_IDENTIFIED);
2012
+ },
2013
+ hasSingleFaceBeforeStart: (context) => {
2014
+ return (context.faceMatchStateBeforeStart === FaceMatchState.FACE_IDENTIFIED);
2015
+ },
2016
+ hasEnoughFaceDistanceBeforeRecording: (context) => {
2017
+ return context.isFaceFarEnoughBeforeRecording;
2018
+ },
2019
+ hasNotEnoughFaceDistanceBeforeRecording: (context) => {
2020
+ return !context.isFaceFarEnoughBeforeRecording;
2021
+ },
2022
+ hasLivenessCheckSucceeded: (_, __, meta) => meta.state.event.data.isLive,
2023
+ hasFreshnessColorShown: (context) => context.freshnessColorAssociatedParams.freshnessColorsComplete,
2024
+ hasServerSessionInfo: (context) => {
2025
+ return context.serverSessionInformation !== undefined;
2026
+ },
2027
+ hasDOMAndCameraDetails: (context) => {
2028
+ return (context.videoAssociatedParams.videoEl !== undefined &&
2029
+ context.videoAssociatedParams.canvasEl !== undefined &&
2030
+ context.freshnessColorAssociatedParams.freshnessColorEl !== undefined);
2031
+ },
2032
+ getShouldDisconnect: (context) => {
2033
+ return !!context.shouldDisconnect;
2034
+ },
2035
+ hasRecordingStarted: (context) => {
2036
+ return (context.livenessStreamProvider.videoRecorder.firstChunkTimestamp !==
2037
+ undefined);
2038
+ },
2039
+ shouldSkipStartScreen: (context) => {
2040
+ return !!context.componentProps?.disableStartScreen;
2041
+ },
2042
+ },
2043
+ services: {
2044
+ async checkVirtualCameraAndGetStream(context) {
2045
+ const { videoConstraints } = context.videoAssociatedParams;
2046
+ // Get initial stream to enumerate devices with non-empty labels
2047
+ const existingDeviceId = getLastSelectedCameraId();
2048
+ const initialStream = await navigator.mediaDevices.getUserMedia({
2049
+ video: {
2050
+ ...videoConstraints,
2051
+ ...(existingDeviceId ? { deviceId: existingDeviceId } : {}),
2052
+ },
2053
+ audio: false,
2054
+ });
2055
+ const devices = await navigator.mediaDevices.enumerateDevices();
2056
+ const realVideoDevices = devices
2057
+ .filter((device) => device.kind === 'videoinput')
2058
+ .filter((device) => !isCameraDeviceVirtual(device));
2059
+ if (!realVideoDevices.length) {
2060
+ throw new Error('No real video devices found');
2061
+ }
2062
+ // Ensure that at least one of the cameras is capable of at least 15 fps
2063
+ const tracksWithMoreThan15Fps = initialStream
2064
+ .getTracks()
2065
+ .filter((track) => {
2066
+ const settings = track.getSettings();
2067
+ return settings.frameRate >= 15;
2068
+ });
2069
+ if (tracksWithMoreThan15Fps.length < 1) {
2070
+ throw new Error('No camera found with more than 15 fps');
2071
+ }
2072
+ // If the initial stream is of real camera, use it otherwise use the first real camera
2073
+ const initialStreamDeviceId = tracksWithMoreThan15Fps[0].getSettings().deviceId;
2074
+ const isInitialStreamFromRealDevice = realVideoDevices.some((device) => device.deviceId === initialStreamDeviceId);
2075
+ let deviceId = initialStreamDeviceId;
2076
+ let realVideoDeviceStream = initialStream;
2077
+ if (!isInitialStreamFromRealDevice) {
2078
+ deviceId = realVideoDevices[0].deviceId;
2079
+ realVideoDeviceStream = await navigator.mediaDevices.getUserMedia({
2080
+ video: {
2081
+ ...videoConstraints,
2082
+ deviceId: { exact: realVideoDevices[0].deviceId },
2083
+ },
2084
+ audio: false,
2085
+ });
2086
+ }
2087
+ setLastSelectedCameraId(deviceId);
2088
+ return {
2089
+ stream: realVideoDeviceStream,
2090
+ selectedDeviceId: initialStreamDeviceId,
2091
+ selectableDevices: realVideoDevices,
2092
+ };
2093
+ },
2094
+ async openLivenessStreamConnection(context) {
2095
+ const { config } = context.componentProps;
2096
+ const { credentialProvider, endpointOverride } = config;
2097
+ const livenessStreamProvider = new LivenessStreamProvider({
2098
+ sessionId: context.componentProps.sessionId,
2099
+ region: context.componentProps.region,
2100
+ stream: context.videoAssociatedParams.videoMediaStream,
2101
+ videoEl: context.videoAssociatedParams.videoEl,
2102
+ credentialProvider: credentialProvider,
2103
+ endpointOverride: endpointOverride,
2104
+ });
2105
+ responseStream = livenessStreamProvider.getResponseStream();
2106
+ return { livenessStreamProvider };
2107
+ },
2108
+ async detectFace(context) {
2109
+ const { videoEl } = context.videoAssociatedParams;
2110
+ const { faceDetector } = context.ovalAssociatedParams;
2111
+ // initialize models
2112
+ try {
2113
+ await faceDetector.modelLoadingPromise;
2114
+ }
2115
+ catch (err) {
2116
+ console.log({ err });
2117
+ }
2118
+ // detect face
2119
+ const faceMatchState = await getFaceMatchState(faceDetector, videoEl);
2120
+ return { faceMatchState };
2121
+ },
2122
+ async detectFaceDistance(context) {
2123
+ const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
2124
+ const { videoEl, videoMediaStream, isMobile } = context.videoAssociatedParams;
2125
+ const { faceDetector } = context.ovalAssociatedParams;
2126
+ const { width, height } = videoMediaStream
2127
+ .getTracks()[0]
2128
+ .getSettings();
2129
+ const ovalDetails = getStaticLivenessOvalDetails({
2130
+ width: width,
2131
+ height: height,
2132
+ });
2133
+ const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording } = await isFaceDistanceBelowThreshold({
2134
+ faceDetector: faceDetector,
2135
+ videoEl: videoEl,
2136
+ ovalDetails,
2137
+ reduceThreshold: faceDistanceCheckBeforeRecording,
2138
+ isMobile,
2139
+ });
2140
+ return { isFaceFarEnoughBeforeRecording };
2141
+ },
2142
+ async detectFaceDistanceWhileLoading(context) {
2143
+ const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
2144
+ const { videoEl, videoMediaStream, isMobile } = context.videoAssociatedParams;
2145
+ const { faceDetector } = context.ovalAssociatedParams;
2146
+ const { width, height } = videoMediaStream
2147
+ .getTracks()[0]
2148
+ .getSettings();
2149
+ const ovalDetails = getStaticLivenessOvalDetails({
2150
+ width: width,
2151
+ height: height,
2152
+ });
2153
+ const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording, error, } = await isFaceDistanceBelowThreshold({
2154
+ faceDetector: faceDetector,
2155
+ videoEl: videoEl,
2156
+ ovalDetails,
2157
+ reduceThreshold: faceDistanceCheckBeforeRecording,
2158
+ isMobile,
2159
+ });
2160
+ return { isFaceFarEnoughBeforeRecording, error };
2161
+ },
2162
+ async detectInitialFaceAndDrawOval(context) {
2163
+ const { serverSessionInformation, livenessStreamProvider } = context;
2164
+ const { videoEl, canvasEl, isMobile } = context.videoAssociatedParams;
2165
+ const { faceDetector } = context.ovalAssociatedParams;
2166
+ // initialize models
2167
+ try {
2168
+ await faceDetector.modelLoadingPromise;
2169
+ await livenessStreamProvider.videoRecorder.recorderStarted;
2170
+ }
2171
+ catch (err) {
2172
+ console.log({ err });
2173
+ }
2174
+ // detect face
2175
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
2176
+ let initialFace;
2177
+ let faceMatchState;
2178
+ let illuminationState;
2179
+ switch (detectedFaces.length) {
2180
+ case 0: {
2181
+ // no face detected;
2182
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
2183
+ illuminationState = estimateIllumination(videoEl);
2184
+ break;
2185
+ }
2186
+ case 1: {
2187
+ faceMatchState = FaceMatchState.FACE_IDENTIFIED;
2188
+ initialFace = detectedFaces[0];
2189
+ break;
2190
+ }
2191
+ default: {
2192
+ //more than one face detected ;
2193
+ faceMatchState = FaceMatchState.TOO_MANY;
2194
+ break;
2195
+ }
2196
+ }
2197
+ if (!initialFace) {
2198
+ return { faceMatchState, illuminationState };
2199
+ }
2200
+ // Get width/height of video element so we can compute scaleFactor
2201
+ // and set canvas width/height.
2202
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
2203
+ if (isMobile) {
2204
+ canvasEl.width = window.innerWidth;
2205
+ canvasEl.height = window.innerHeight;
2206
+ }
2207
+ else {
2208
+ canvasEl.width = videoScaledWidth;
2209
+ canvasEl.height = videoScaledHeight;
2210
+ }
2211
+ // Compute scaleFactor which is how much our video element is scaled
2212
+ // vs the intrinsic video resolution
2213
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
2214
+ // generate oval details from initialFace and video dimensions
2215
+ const ovalDetails = getOvalDetailsFromSessionInformation({
2216
+ sessionInformation: serverSessionInformation,
2217
+ videoWidth: videoEl.width,
2218
+ });
2219
+ // renormalize initial face
2220
+ const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
2221
+ initialFace.top = renormalizedFace.top;
2222
+ initialFace.left = renormalizedFace.left;
2223
+ initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
2224
+ initialFace.width = renormalizedFace.right - renormalizedFace.left;
2225
+ // Draw oval in canvas using ovalDetails and scaleFactor
2226
+ drawLivenessOvalInCanvas({
2227
+ canvas: canvasEl,
2228
+ oval: ovalDetails,
2229
+ scaleFactor,
2230
+ videoEl: videoEl,
2231
+ });
2232
+ return {
2233
+ faceMatchState,
2234
+ ovalDetails,
2235
+ scaleFactor,
2236
+ initialFace,
2237
+ };
2238
+ },
2239
+ async detectFaceAndMatchOval(context) {
2240
+ const { serverSessionInformation } = context;
2241
+ const { videoEl } = context.videoAssociatedParams;
2242
+ const { faceDetector, ovalDetails, initialFace } = context.ovalAssociatedParams;
2243
+ // detect face
2244
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
2245
+ let faceMatchState;
2246
+ let faceMatchPercentage = 0;
2247
+ let detectedFace;
2248
+ let illuminationState;
2249
+ const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
2250
+ const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
2251
+ const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
2252
+ switch (detectedFaces.length) {
2253
+ case 0: {
2254
+ //no face detected;
2255
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
2256
+ illuminationState = estimateIllumination(videoEl);
2257
+ break;
2258
+ }
2259
+ case 1: {
2260
+ //exactly one face detected, match face with oval;
2261
+ detectedFace = detectedFaces[0];
2262
+ const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(detectedFace, ovalDetails, initialFaceIntersection, serverSessionInformation);
2263
+ faceMatchState = faceMatchStateInLivenessOval;
2264
+ faceMatchPercentage = faceMatchPercentageInLivenessOval;
2265
+ break;
2266
+ }
2267
+ default: {
2268
+ //more than one face detected ;
2269
+ faceMatchState = FaceMatchState.TOO_MANY;
2270
+ break;
2271
+ }
2272
+ }
2273
+ return {
2274
+ faceMatchState,
2275
+ faceMatchPercentage,
2276
+ illuminationState,
2277
+ detectedFace,
2278
+ };
2279
+ },
2280
+ async flashColors(context) {
2281
+ const { freshnessColorsComplete, freshnessColorDisplay } = context.freshnessColorAssociatedParams;
2282
+ if (freshnessColorsComplete) {
2283
+ return;
2284
+ }
2285
+ const completed = await freshnessColorDisplay.displayColorTick();
2286
+ return { freshnessColorsComplete: completed };
2287
+ },
2288
+ async stopVideo(context) {
2289
+ const { challengeId, livenessStreamProvider } = context;
2290
+ const { videoMediaStream } = context.videoAssociatedParams;
2291
+ const { initialFace, ovalDetails } = context.ovalAssociatedParams;
2292
+ const { startFace, endFace } = context.faceMatchAssociatedParams;
2293
+ const { width, height } = videoMediaStream
2294
+ .getTracks()[0]
2295
+ .getSettings();
2296
+ const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
2297
+ await livenessStreamProvider.stopVideo();
2298
+ const livenessActionDocument = {
2299
+ Challenge: {
2300
+ FaceMovementAndLightChallenge: {
2301
+ ChallengeId: challengeId,
2302
+ InitialFace: {
2303
+ InitialFaceDetectedTimestamp: initialFace.timestampMs,
2304
+ BoundingBox: getBoundingBox({
2305
+ deviceHeight: height,
2306
+ deviceWidth: width,
2307
+ height: initialFace.height,
2308
+ width: initialFace.width,
2309
+ top: initialFace.top,
2310
+ left: flippedInitialFaceLeft,
2311
+ }),
2312
+ },
2313
+ TargetFace: {
2314
+ FaceDetectedInTargetPositionStartTimestamp: startFace.timestampMs,
2315
+ FaceDetectedInTargetPositionEndTimestamp: endFace.timestampMs,
2316
+ BoundingBox: getBoundingBox({
2317
+ deviceHeight: height,
2318
+ deviceWidth: width,
2319
+ height: ovalDetails.height,
2320
+ width: ovalDetails.width,
2321
+ top: ovalDetails.centerY - ovalDetails.height / 2,
2322
+ left: ovalDetails.centerX - ovalDetails.width / 2,
2323
+ }),
2324
+ },
2325
+ VideoEndTimestamp: livenessStreamProvider.videoRecorder.recorderEndTimestamp,
2326
+ },
2327
+ },
2328
+ };
2329
+ if (livenessStreamProvider.videoRecorder.getVideoChunkSize() === 0) {
2330
+ throw new Error('Video chunks not recorded successfully.');
2331
+ }
2332
+ livenessStreamProvider.sendClientInfo(livenessActionDocument);
2333
+ await livenessStreamProvider.dispatchStopVideoEvent();
2334
+ },
2335
+ async getLiveness(context) {
2336
+ const { onAnalysisComplete } = context.componentProps;
2337
+ // Get liveness result
2338
+ await onAnalysisComplete();
2339
+ },
2340
+ },
2341
+ });
2342
+ const responseStreamActor = async (callback) => {
2343
+ try {
2344
+ const stream = await responseStream;
2345
+ for await (const event of stream) {
2346
+ if (isServerSesssionInformationEvent(event)) {
2347
+ callback({
2348
+ type: 'SET_SESSION_INFO',
2349
+ data: {
2350
+ sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
2351
+ },
2352
+ });
2353
+ }
2354
+ else if (isDisconnectionEvent(event)) {
2355
+ callback({ type: 'DISCONNECT_EVENT' });
2356
+ }
2357
+ else if (isValidationExceptionEvent(event)) {
2358
+ callback({
2359
+ type: 'SERVER_ERROR',
2360
+ data: { error: { ...event.ValidationException } },
2361
+ });
2362
+ }
2363
+ else if (isInternalServerExceptionEvent(event)) {
2364
+ callback({
2365
+ type: 'SERVER_ERROR',
2366
+ data: { error: { ...event.InternalServerException } },
2367
+ });
2368
+ }
2369
+ else if (isThrottlingExceptionEvent(event)) {
2370
+ callback({
2371
+ type: 'SERVER_ERROR',
2372
+ data: { error: { ...event.ThrottlingException } },
2373
+ });
2374
+ }
2375
+ else if (isServiceQuotaExceededExceptionEvent(event)) {
2376
+ callback({
2377
+ type: 'SERVER_ERROR',
2378
+ data: { error: { ...event.ServiceQuotaExceededException } },
2379
+ });
2380
+ }
2381
+ }
2382
+ }
2383
+ catch (error) {
2384
+ let returnedError = error;
2385
+ if (isInvalidSignatureRegionException(error)) {
2386
+ returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
2387
+ }
2388
+ if (returnedError instanceof Error) {
2389
+ callback({
2390
+ type: 'SERVER_ERROR',
2391
+ data: { error: returnedError },
2392
+ });
2393
+ }
2394
+ }
2395
+ };
2396
+
2397
+ const FaceLivenessDetectorContext = React__default["default"].createContext(null);
2398
+ function FaceLivenessDetectorProvider({ children, ...props }) {
2399
+ return (React__default["default"].createElement(FaceLivenessDetectorContext.Provider, { value: props }, children));
2400
+ }
2401
+ function useFaceLivenessDetector() {
2402
+ const props = React__default["default"].useContext(FaceLivenessDetectorContext);
2403
+ if (props === null) {
2404
+ throw new Error('useFaceLivenessDetector must be used within a FaceLivenessDetectorProvider');
2405
+ }
2406
+ return props;
2407
+ }
2408
+
2409
+ // TODO: Add type annotations. Currently typing the actors returned from Xstate is difficult
2410
+ // because the interpreter and state can not be used to form a type.
2411
+ // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
2412
+ function useLivenessActor() {
2413
+ const { service } = useFaceLivenessDetector();
2414
+ const actor = react.useActor(service);
2415
+ return actor;
2416
+ }
2417
+
2418
+ function createLivenessSelector(selector) {
2419
+ return selector;
2420
+ }
2421
+ function useLivenessSelector(selector) {
2422
+ const { service } = useFaceLivenessDetector();
2423
+ return react.useSelector(service, selector);
2424
+ }
2425
+
2426
+ function useMediaStreamInVideo(stream) {
2427
+ const height = STATIC_VIDEO_CONSTRAINTS.height.ideal;
2428
+ const width = STATIC_VIDEO_CONSTRAINTS.width.ideal;
2429
+ const videoRef = React.useRef(null);
2430
+ const [videoHeight, setVideoHeight] = React.useState(height);
2431
+ const [videoWidth, setVideoWidth] = React.useState(width);
2432
+ React.useEffect(() => {
2433
+ if (stream) {
2434
+ if (ui.isObject(videoRef.current)) {
2435
+ videoRef.current.srcObject = stream;
2436
+ }
2437
+ const { height: streamHeight, width: streamWidth } = stream
2438
+ .getTracks()[0]
2439
+ .getSettings();
2440
+ setVideoHeight(streamHeight);
2441
+ setVideoWidth(streamWidth);
2442
+ }
2443
+ return () => {
2444
+ if (stream) {
2445
+ stream.getTracks().forEach((track) => {
2446
+ stream.removeTrack(track);
2447
+ track.stop();
2448
+ });
2449
+ }
2450
+ };
2451
+ }, [stream]);
2452
+ return {
2453
+ videoRef,
2454
+ videoHeight,
2455
+ videoWidth,
2456
+ };
2457
+ }
2458
+
2459
+ var LivenessClassNames;
2460
+ (function (LivenessClassNames) {
2461
+ LivenessClassNames["CameraModule"] = "amplify-liveness-camera-module";
2462
+ LivenessClassNames["CancelContainer"] = "amplify-liveness-cancel-container";
2463
+ LivenessClassNames["CancelButton"] = "amplify-liveness-cancel-button";
2464
+ LivenessClassNames["CountdownContainer"] = "amplify-liveness-countdown-container";
2465
+ LivenessClassNames["DescriptionBullet"] = "amplify-liveness-description-bullet";
2466
+ LivenessClassNames["DescriptionBulletIndex"] = "amplify-liveness-description-bullet__index";
2467
+ LivenessClassNames["DescriptionBulletIndexText"] = "amplify-liveness-description-bullet__index__text";
2468
+ LivenessClassNames["DescriptionBulletMessage"] = "amplify-liveness-description-bullet__message";
2469
+ LivenessClassNames["ErrorModal"] = "amplify-liveness-error-modal";
2470
+ LivenessClassNames["ErrorModalHeading"] = "amplify-liveness-error-modal__heading";
2471
+ LivenessClassNames["FadeOut"] = "amplify-liveness-fade-out";
2472
+ LivenessClassNames["FreshnessCanvas"] = "amplify-liveness-freshness-canvas";
2473
+ LivenessClassNames["InstructionList"] = "amplify-liveness-instruction-list";
2474
+ LivenessClassNames["InstructionOverlay"] = "amplify-liveness-instruction-overlay";
2475
+ LivenessClassNames["Figure"] = "amplify-liveness-figure";
2476
+ LivenessClassNames["FigureCaption"] = "amplify-liveness-figure__caption";
2477
+ LivenessClassNames["FigureIcon"] = "amplify-liveness-figure__icon";
2478
+ LivenessClassNames["FigureImage"] = "amplify-liveness-figure__image";
2479
+ LivenessClassNames["Figures"] = "amplify-liveness-figures";
2480
+ LivenessClassNames["Hint"] = "amplify-liveness-hint";
2481
+ LivenessClassNames["HintText"] = "amplify-liveness-hint__text";
2482
+ LivenessClassNames["LandscapeErrorModal"] = "amplify-liveness-landscape-error-modal";
2483
+ LivenessClassNames["LandscapeErrorModalButton"] = "amplify-liveness-landscape-error-modal__button";
2484
+ LivenessClassNames["LandscapeErrorModalHeader"] = "amplify-liveness-landscape-error-modal__header";
2485
+ LivenessClassNames["Loader"] = "amplify-liveness-loader";
2486
+ LivenessClassNames["MatchIndicator"] = "amplify-liveness-match-indicator";
2487
+ LivenessClassNames["OvalCanvas"] = "amplify-liveness-oval-canvas";
2488
+ LivenessClassNames["OpaqueOverlay"] = "amplify-liveness-overlay-opaque";
2489
+ LivenessClassNames["Overlay"] = "amplify-liveness-overlay";
2490
+ LivenessClassNames["Popover"] = "amplify-liveness-popover";
2491
+ LivenessClassNames["PopoverContainer"] = "amplify-liveness-popover__container";
2492
+ LivenessClassNames["PopoverAnchor"] = "amplify-liveness-popover__anchor";
2493
+ LivenessClassNames["PopoverAnchorSecondary"] = "amplify-liveness-popover__anchor-secondary";
2494
+ LivenessClassNames["RecordingIconContainer"] = "amplify-liveness-recording-icon-container";
2495
+ LivenessClassNames["RecordingIcon"] = "amplify-liveness-recording-icon";
2496
+ LivenessClassNames["StartScreenCameraSelect"] = "amplify-liveness-start-screen-camera-select";
2497
+ LivenessClassNames["StartScreenCameraSelectContainer"] = "amplify-liveness-start-screen-camera-select__container";
2498
+ LivenessClassNames["StartScreenCameraWaiting"] = "amplify-liveness-start-screen-camera-waiting";
2499
+ LivenessClassNames["StartScreenHeader"] = "amplify-liveness-start-screen-header";
2500
+ LivenessClassNames["StartScreenHeaderBody"] = "amplify-liveness-start-screen-header__body";
2501
+ LivenessClassNames["StartScreenHeaderHeading"] = "amplify-liveness-start-screen-header__heading";
2502
+ LivenessClassNames["StartScreenWarning"] = "amplify-liveness-start-screen-warning";
2503
+ LivenessClassNames["StartScreenInstructions"] = "amplify-liveness-start-screen-instructions";
2504
+ LivenessClassNames["StartScreenInstructionsHeading"] = "amplify-liveness-start-screen-instructions__heading";
2505
+ LivenessClassNames["Toast"] = "amplify-liveness-toast";
2506
+ LivenessClassNames["ToastContainer"] = "amplify-liveness-toast__container";
2507
+ LivenessClassNames["ToastMessage"] = "amplify-liveness-toast__message";
2508
+ LivenessClassNames["Video"] = "amplify-liveness-video";
2509
+ LivenessClassNames["VideoAnchor"] = "amplify-liveness-video-anchor";
2510
+ })(LivenessClassNames || (LivenessClassNames = {}));
2511
+
2512
+ const CancelButton = ({ ariaLabel }) => {
2513
+ const [state, send] = useLivenessActor();
2514
+ const isFinalState = state.done;
2515
+ const handleClick = () => {
2516
+ send({
2517
+ type: 'CANCEL',
2518
+ });
2519
+ };
2520
+ if (isFinalState)
2521
+ return null;
2522
+ return (React__default["default"].createElement(uiReact.Button, { autoFocus: true, variation: "link", onClick: handleClick, size: "large", className: LivenessClassNames.CancelButton, "aria-label": ariaLabel },
2523
+ React__default["default"].createElement(internal.IconClose, { "aria-hidden": "true", "data-testid": "close-icon" })));
2524
+ };
2525
+
2526
+ const Toast = ({ variation = 'default', size = 'medium', children, isInitial = false, ...rest }) => {
2527
+ const { tokens } = uiReact.useTheme();
2528
+ return (React__namespace.createElement(uiReact.View, { className: `${LivenessClassNames.Toast} ${LivenessClassNames.Toast}--${variation} ${LivenessClassNames.Toast}--${size}`, ...(isInitial && { backgroundColor: tokens.colors.background.primary }), ...rest },
2529
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.ToastContainer },
2530
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.ToastMessage, ...(isInitial ? { color: tokens.colors.font.primary } : {}) }, children))));
2531
+ };
2532
+
2533
+ const selectErrorState = createLivenessSelector((state) => state.context.errorState);
2534
+ const selectFaceMatchState$1 = createLivenessSelector((state) => state.context.faceMatchAssociatedParams.faceMatchState);
2535
+ const selectIlluminationState = createLivenessSelector((state) => state.context.faceMatchAssociatedParams.illuminationState);
2536
+ const selectIsFaceFarEnoughBeforeRecording = createLivenessSelector((state) => state.context.isFaceFarEnoughBeforeRecording);
2537
+ const selectFaceMatchStateBeforeStart = createLivenessSelector((state) => state.context.faceMatchStateBeforeStart);
2538
+ const Hint = ({ hintDisplayText }) => {
2539
+ const [state] = useLivenessActor();
2540
+ // NOTE: Do not change order of these selectors as the unit tests depend on this order
2541
+ const errorState = useLivenessSelector(selectErrorState);
2542
+ const faceMatchState = useLivenessSelector(selectFaceMatchState$1);
2543
+ const illuminationState = useLivenessSelector(selectIlluminationState);
2544
+ const faceMatchStateBeforeStart = useLivenessSelector(selectFaceMatchStateBeforeStart);
2545
+ const isFaceFarEnoughBeforeRecordingState = useLivenessSelector(selectIsFaceFarEnoughBeforeRecording);
2546
+ const isCheckFaceDetectedBeforeStart = state.matches('checkFaceDetectedBeforeStart');
2547
+ const isCheckFaceDistanceBeforeRecording = state.matches('checkFaceDistanceBeforeRecording');
2548
+ const isStartView = state.matches('start') || state.matches('userCancel');
2549
+ const isRecording = state.matches('recording');
2550
+ const isNotRecording = state.matches('notRecording');
2551
+ const isUploading = state.matches('uploading');
2552
+ const isCheckSuccessful = state.matches('checkSucceeded');
2553
+ const isCheckFailed = state.matches('checkFailed');
2554
+ const isFlashingFreshness = state.matches({
2555
+ recording: 'flashFreshnessColors',
2556
+ });
2557
+ const FaceMatchStateStringMap = {
2558
+ [FaceMatchState.CANT_IDENTIFY]: hintDisplayText.hintCanNotIdentifyText,
2559
+ [FaceMatchState.FACE_IDENTIFIED]: hintDisplayText.hintTooFarText,
2560
+ [FaceMatchState.TOO_MANY]: hintDisplayText.hintTooManyFacesText,
2561
+ [FaceMatchState.TOO_CLOSE]: hintDisplayText.hintTooCloseText,
2562
+ [FaceMatchState.TOO_FAR]: hintDisplayText.hintTooFarText,
2563
+ [FaceMatchState.MATCHED]: hintDisplayText.hintHoldFaceForFreshnessText,
2564
+ };
2565
+ const IlluminationStateStringMap = {
2566
+ [IlluminationState.BRIGHT]: hintDisplayText.hintIlluminationTooBrightText,
2567
+ [IlluminationState.DARK]: hintDisplayText.hintIlluminationTooDarkText,
2568
+ [IlluminationState.NORMAL]: hintDisplayText.hintIlluminationNormalText,
2569
+ };
2570
+ const getInstructionContent = () => {
2571
+ if (isStartView) {
2572
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary", isInitial: true }, hintDisplayText.hintCenterFaceText));
2573
+ }
2574
+ if (errorState ?? (isCheckFailed || isCheckSuccessful)) {
2575
+ return;
2576
+ }
2577
+ if (!isRecording) {
2578
+ if (isCheckFaceDetectedBeforeStart) {
2579
+ if (faceMatchStateBeforeStart === FaceMatchState.TOO_MANY) {
2580
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary" }, FaceMatchStateStringMap[faceMatchStateBeforeStart]));
2581
+ }
2582
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary" }, hintDisplayText.hintMoveFaceFrontOfCameraText));
2583
+ }
2584
+ // Specifically checking for false here because initially the value is undefined and we do not want to show the instruction
2585
+ if (isCheckFaceDistanceBeforeRecording &&
2586
+ isFaceFarEnoughBeforeRecordingState === false) {
2587
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary" }, hintDisplayText.hintTooCloseText));
2588
+ }
2589
+ if (isNotRecording) {
2590
+ return (React__namespace.createElement(Toast, null,
2591
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.HintText },
2592
+ React__namespace.createElement(uiReact.Loader, null),
2593
+ React__namespace.createElement(uiReact.View, null, hintDisplayText.hintConnectingText))));
2594
+ }
2595
+ if (isUploading) {
2596
+ return (React__namespace.createElement(Toast, null,
2597
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.HintText },
2598
+ React__namespace.createElement(uiReact.Loader, null),
2599
+ React__namespace.createElement(uiReact.View, null, hintDisplayText.hintVerifyingText))));
2600
+ }
2601
+ if (illuminationState && illuminationState !== IlluminationState.NORMAL) {
2602
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary" }, IlluminationStateStringMap[illuminationState]));
2603
+ }
2604
+ }
2605
+ if (isFlashingFreshness) {
2606
+ return (React__namespace.createElement(Toast, { size: "large", variation: "primary" }, hintDisplayText.hintHoldFaceForFreshnessText));
2607
+ }
2608
+ if (isRecording && !isFlashingFreshness) {
2609
+ // During face matching, we want to only show the TOO_CLOSE or
2610
+ // TOO_FAR texts. If FaceMatchState matches TOO_CLOSE, we'll show
2611
+ // the TOO_CLOSE text, but for FACE_IDENTIFED, CANT_IDENTIFY, TOO_MANY
2612
+ // we are defaulting to the TOO_FAR text (for now). For MATCHED state,
2613
+ // we don't want to show any toasts.
2614
+ return (React__namespace.createElement(Toast, { size: "large", variation: faceMatchState === FaceMatchState.TOO_CLOSE ? 'error' : 'primary' }, faceMatchState === FaceMatchState.TOO_CLOSE
2615
+ ? FaceMatchStateStringMap[FaceMatchState.TOO_CLOSE]
2616
+ : FaceMatchStateStringMap[FaceMatchState.TOO_FAR]));
2617
+ }
2618
+ return null;
2619
+ };
2620
+ const instructionContent = getInstructionContent();
2621
+ return instructionContent ? instructionContent : null;
2622
+ };
2623
+
2624
+ const MatchIndicator = ({ percentage, initialPercentage = 25, testId, }) => {
2625
+ const [matchPercentage, setMatchPercentage] = React__default["default"].useState(initialPercentage);
2626
+ React__default["default"].useEffect(() => {
2627
+ if (percentage < 0) {
2628
+ setMatchPercentage(0);
2629
+ }
2630
+ else if (percentage > 100) {
2631
+ setMatchPercentage(100);
2632
+ }
2633
+ else {
2634
+ setMatchPercentage(percentage);
2635
+ }
2636
+ }, [percentage]);
2637
+ const percentageStyles = {
2638
+ '--percentage': `${matchPercentage}%`,
2639
+ };
2640
+ return (React__default["default"].createElement("div", { className: LivenessClassNames.MatchIndicator, "data-testid": testId },
2641
+ React__default["default"].createElement("div", { className: `${LivenessClassNames.MatchIndicator}__bar`, style: percentageStyles })));
2642
+ };
2643
+
2644
+ const Overlay = ({ children, horizontal = 'center', vertical = 'center', className, ...rest }) => {
2645
+ return (React__namespace.createElement(uiReact.Flex, { className: `${LivenessClassNames.Overlay} ${className}`, alignItems: horizontal, justifyContent: vertical, ...rest }, children));
2646
+ };
2647
+
2648
+ const RecordingIcon = ({ children }) => {
2649
+ return (React__default["default"].createElement(uiReact.Flex, { className: LivenessClassNames.RecordingIcon },
2650
+ React__default["default"].createElement(uiReact.Flex, { "data-testid": "rec-icon", justifyContent: "center" },
2651
+ React__default["default"].createElement(uiReact.Icon, { viewBox: { width: 20, height: 20 }, width: "20", height: "20" },
2652
+ React__default["default"].createElement("circle", { cx: "10", cy: "10", r: "8", fill: "red" }))),
2653
+ React__default["default"].createElement(uiReact.Text, { as: "span", fontWeight: "bold" }, children)));
2654
+ };
2655
+
2656
+ const defaultErrorDisplayText = {
2657
+ timeoutHeaderText: 'Time out',
2658
+ timeoutMessageText: "Face didn't fit inside oval in time limit. Try again and completely fill the oval with face in it.",
2659
+ faceDistanceHeaderText: 'Forward movement detected',
2660
+ faceDistanceMessageText: 'Avoid moving closer when connecting.',
2661
+ multipleFacesHeaderText: 'Multiple faces detected',
2662
+ multipleFacesMessageText: 'Ensure only one face is present in front of the camera when connecting.',
2663
+ clientHeaderText: 'Client error',
2664
+ clientMessageText: 'Check failed due to client issue',
2665
+ serverHeaderText: 'Server issue',
2666
+ serverMessageText: 'Cannot complete check due to server issue',
2667
+ landscapeHeaderText: 'Landscape orientation not supported',
2668
+ landscapeMessageText: 'Rotate your device to portrait (vertical) orientation.',
2669
+ portraitMessageText: 'Ensure your device remains in portrait (vertical) orientation for the check’s duration.',
2670
+ tryAgainText: 'Try again',
2671
+ };
2672
+ const defaultLivenessDisplayText = {
2673
+ hintCenterFaceText: 'Center your face',
2674
+ startScreenBeginCheckText: 'Start video check',
2675
+ photosensitivyWarningHeadingText: 'Photosensitivity warning',
2676
+ photosensitivyWarningBodyText: 'This check flashes different colors. Use caution if you are photosensitive.',
2677
+ photosensitivyWarningInfoText: 'Some people may experience may experience epileptic seizures when exposed to colored lights. Use caution if you, or anyone in your family, have an epileptic condition.',
2678
+ goodFitCaptionText: 'Good fit',
2679
+ goodFitAltText: "Ilustration of a person's face, perfectly fitting inside of an oval.",
2680
+ tooFarCaptionText: 'Too far',
2681
+ tooFarAltText: "Illustration of a person's face inside of an oval; there is a gap between the perimeter of the face and the boundaries of the oval.",
2682
+ cameraMinSpecificationsHeadingText: 'Camera does not meet minimum specifications',
2683
+ cameraMinSpecificationsMessageText: 'Camera must support at least 320*240 resolution and 15 frames per second.',
2684
+ cameraNotFoundHeadingText: 'Camera is not accessible.',
2685
+ cameraNotFoundMessageText: 'Check that a camera is connected and there is not another application using the camera. You may have to go into settings to grant camera permissions and close out all instances of your browser and retry.',
2686
+ retryCameraPermissionsText: 'Retry',
2687
+ waitingCameraPermissionText: 'Waiting for you to allow camera permission.',
2688
+ cancelLivenessCheckText: 'Cancel Liveness check',
2689
+ recordingIndicatorText: 'Rec',
2690
+ hintMoveFaceFrontOfCameraText: 'Move face in front of camera',
2691
+ hintTooManyFacesText: 'Ensure only one face is in front of camera',
2692
+ hintFaceDetectedText: 'Face detected',
2693
+ hintCanNotIdentifyText: 'Move face in front of camera',
2694
+ hintTooCloseText: 'Move back',
2695
+ hintTooFarText: 'Move closer',
2696
+ hintConnectingText: 'Connecting...',
2697
+ hintVerifyingText: 'Verifying...',
2698
+ hintIlluminationTooBrightText: 'Move to dimmer area',
2699
+ hintIlluminationTooDarkText: 'Move to brighter area',
2700
+ hintIlluminationNormalText: 'Lighting conditions normal',
2701
+ hintHoldFaceForFreshnessText: 'Hold still',
2702
+ ...defaultErrorDisplayText,
2703
+ };
2704
+
2705
+ const renderToastErrorModal = (props) => {
2706
+ const { error: errorState, displayText } = props;
2707
+ const { timeoutHeaderText, timeoutMessageText, faceDistanceHeaderText, faceDistanceMessageText, multipleFacesHeaderText, multipleFacesMessageText, clientHeaderText, clientMessageText, serverHeaderText, serverMessageText, } = displayText;
2708
+ let heading;
2709
+ let message;
2710
+ switch (errorState) {
2711
+ case LivenessErrorState.TIMEOUT:
2712
+ heading = timeoutHeaderText;
2713
+ message = timeoutMessageText;
2714
+ break;
2715
+ case LivenessErrorState.FACE_DISTANCE_ERROR:
2716
+ heading = faceDistanceHeaderText;
2717
+ message = faceDistanceMessageText;
2718
+ break;
2719
+ case LivenessErrorState.MULTIPLE_FACES_ERROR:
2720
+ heading = multipleFacesHeaderText;
2721
+ message = multipleFacesMessageText;
2722
+ break;
2723
+ case LivenessErrorState.RUNTIME_ERROR:
2724
+ heading = clientHeaderText;
2725
+ message = clientMessageText;
2726
+ break;
2727
+ case LivenessErrorState.SERVER_ERROR:
2728
+ default:
2729
+ heading = serverHeaderText;
2730
+ message = serverMessageText;
2731
+ }
2732
+ return (React__default["default"].createElement(React__default["default"].Fragment, null,
2733
+ React__default["default"].createElement(uiReact.Flex, { className: LivenessClassNames.ErrorModal },
2734
+ React__default["default"].createElement(internal.AlertIcon, { ariaHidden: true, variation: "error" }),
2735
+ React__default["default"].createElement(uiReact.Text, { className: LivenessClassNames.ErrorModalHeading }, heading)),
2736
+ message));
2737
+ };
2738
+ const renderErrorModal = ({ errorState, overrideErrorDisplayText, }) => {
2739
+ const displayText = {
2740
+ ...defaultErrorDisplayText,
2741
+ ...overrideErrorDisplayText,
2742
+ };
2743
+ if (errorState === LivenessErrorState.CAMERA_ACCESS_ERROR ||
2744
+ errorState === LivenessErrorState.CAMERA_FRAMERATE_ERROR ||
2745
+ errorState === LivenessErrorState.MOBILE_LANDSCAPE_ERROR) {
2746
+ return null;
2747
+ }
2748
+ else {
2749
+ return renderToastErrorModal({
2750
+ error: errorState,
2751
+ displayText,
2752
+ });
2753
+ }
2754
+ };
2755
+ const FaceLivenessErrorModal = (props) => {
2756
+ const { children, onRetry, displayText: overrideErrorDisplayText } = props;
2757
+ const displayText = {
2758
+ ...defaultErrorDisplayText,
2759
+ ...overrideErrorDisplayText,
2760
+ };
2761
+ const { tryAgainText } = displayText;
2762
+ return (React__default["default"].createElement(Overlay, { className: LivenessClassNames.OpaqueOverlay },
2763
+ React__default["default"].createElement(Toast, null,
2764
+ children,
2765
+ React__default["default"].createElement(uiReact.Flex, { justifyContent: "center" },
2766
+ React__default["default"].createElement(uiReact.Button, { variation: "primary", type: "button", onClick: onRetry }, tryAgainText)))));
2767
+ };
2768
+
2769
+ /**
2770
+ * Copied from src/primitives/Alert/AlertIcon.tsx because we want to re-use the icon but it is not currently expored by AlertIcon.
2771
+ * We currently don't want to make a change to the AlertIcon primitive itself and may expose the icon in the future but for now so as not to introduce cross component dependencies we have duplicated it.
2772
+ */
2773
+ const LivenessIconWithPopover = ({ children }) => {
2774
+ const breakpoint = internal.useThemeBreakpoint();
2775
+ const [shouldShowPopover, setShouldShowPopover] = React__namespace.useState(false);
2776
+ const wrapperRef = React__namespace.useRef(null);
2777
+ const isMobileScreen = breakpoint === 'base';
2778
+ React__namespace.useEffect(() => {
2779
+ function handleClickOutside(event) {
2780
+ if (shouldShowPopover &&
2781
+ wrapperRef.current &&
2782
+ !wrapperRef.current.contains(event.target)) {
2783
+ setShouldShowPopover(false);
2784
+ }
2785
+ }
2786
+ document.addEventListener('mousedown', handleClickOutside);
2787
+ return () => {
2788
+ document.removeEventListener('mousedown', handleClickOutside);
2789
+ };
2790
+ }, [wrapperRef, shouldShowPopover]);
2791
+ return (React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.Popover, onClick: () => setShouldShowPopover(!shouldShowPopover), ref: wrapperRef, testId: "popover-icon" },
2792
+ React__namespace.createElement(internal.AlertIcon, { ariaHidden: true, variation: "info" }),
2793
+ shouldShowPopover && (React__namespace.createElement(React__namespace.Fragment, null,
2794
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.PopoverAnchor }),
2795
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.PopoverAnchorSecondary }),
2796
+ React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.PopoverContainer, left: isMobileScreen ? -190 : -108, "data-testid": "popover-text" }, children)))));
2797
+ };
2798
+ LivenessIconWithPopover.displayName = 'LivenessIconWithPopover';
2799
+
2800
+ const DefaultPhotosensitiveWarning = ({ headingText, bodyText, infoText, }) => {
2801
+ return (React__default["default"].createElement(uiReact.Flex, { className: `${ui.ComponentClassName.Alert} ${LivenessClassNames.StartScreenWarning}`, style: { zIndex: '3' } },
2802
+ React__default["default"].createElement(uiReact.View, { flex: "1" },
2803
+ React__default["default"].createElement(uiReact.View, { className: ui.ComponentClassName.AlertHeading }, headingText),
2804
+ React__default["default"].createElement(uiReact.View, { className: ui.ComponentClassName.AlertBody }, bodyText)),
2805
+ React__default["default"].createElement(LivenessIconWithPopover, null, infoText)));
2806
+ };
2807
+ const DefaultRecordingIcon = ({ recordingIndicatorText, }) => {
2808
+ return (React__default["default"].createElement(uiReact.View, { className: LivenessClassNames.RecordingIconContainer },
2809
+ React__default["default"].createElement(RecordingIcon, null, recordingIndicatorText)));
2810
+ };
2811
+ const DefaultCancelButton = ({ cancelLivenessCheckText, }) => {
2812
+ return (React__default["default"].createElement(uiReact.View, { className: LivenessClassNames.CancelContainer },
2813
+ React__default["default"].createElement(CancelButton, { ariaLabel: cancelLivenessCheckText })));
2814
+ };
2815
+
2816
+ const selectVideoConstraints = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoConstraints);
2817
+ const selectVideoStream = createLivenessSelector((state) => state.context.videoAssociatedParams?.videoMediaStream);
2818
+ const selectFaceMatchPercentage = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchPercentage);
2819
+ const selectFaceMatchState = createLivenessSelector((state) => state.context.faceMatchAssociatedParams?.faceMatchState);
2820
+ const selectSelectedDeviceId = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectedDeviceId);
2821
+ const selectSelectableDevices = createLivenessSelector((state) => state.context.videoAssociatedParams?.selectableDevices);
2822
+ const centeredLoader = (React__default["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader" }));
2823
+ const showMatchIndicatorStates = [
2824
+ FaceMatchState.TOO_FAR,
2825
+ FaceMatchState.CANT_IDENTIFY,
2826
+ FaceMatchState.FACE_IDENTIFIED,
2827
+ FaceMatchState.MATCHED,
2828
+ ];
2829
+ /**
2830
+ * For now we want to memoize the HOC for MatchIndicator because to optimize renders
2831
+ * The LivenessCameraModule still needs to be optimized for re-renders and at that time
2832
+ * we should be able to remove this memoization
2833
+ */
2834
+ const MemoizedMatchIndicator = React__default["default"].memo(MatchIndicator);
2835
+ const LivenessCameraModule = (props) => {
2836
+ const { isMobileScreen, isRecordingStopped, instructionDisplayText, streamDisplayText, hintDisplayText, errorDisplayText, cameraDisplayText, components: customComponents, testId, } = props;
2837
+ const { cancelLivenessCheckText, recordingIndicatorText } = streamDisplayText;
2838
+ const { ErrorView = FaceLivenessErrorModal } = customComponents ?? {};
2839
+ const [state, send] = useLivenessActor();
2840
+ const videoStream = useLivenessSelector(selectVideoStream);
2841
+ const videoConstraints = useLivenessSelector(selectVideoConstraints);
2842
+ const selectedDeviceId = useLivenessSelector(selectSelectedDeviceId);
2843
+ const selectableDevices = useLivenessSelector(selectSelectableDevices);
2844
+ const faceMatchPercentage = useLivenessSelector(selectFaceMatchPercentage);
2845
+ const faceMatchState = useLivenessSelector(selectFaceMatchState);
2846
+ const errorState = useLivenessSelector(selectErrorState);
2847
+ const colorMode = internal.useColorMode();
2848
+ const { videoRef, videoWidth, videoHeight } = useMediaStreamInVideo(videoStream);
2849
+ const canvasRef = React.useRef(null);
2850
+ const freshnessColorRef = React.useRef(null);
2851
+ const [isCameraReady, setIsCameraReady] = React.useState(false);
2852
+ const isCheckingCamera = state.matches('cameraCheck');
2853
+ const isWaitingForCamera = state.matches('waitForDOMAndCameraDetails');
2854
+ const isStartView = state.matches('start') || state.matches('userCancel');
2855
+ const isRecording = state.matches('recording');
2856
+ const isCheckSucceeded = state.matches('checkSucceeded');
2857
+ const isFlashingFreshness = state.matches({
2858
+ recording: 'flashFreshnessColors',
2859
+ });
2860
+ // Android/Firefox and iOS flip the values of width/height returned from
2861
+ // getUserMedia, so we'll reset these in useLayoutEffect with the videoRef
2862
+ // element's intrinsic videoWidth and videoHeight attributes
2863
+ const [mediaWidth, setMediaWidth] = React.useState(videoWidth);
2864
+ const [mediaHeight, setMediaHeight] = React.useState(videoHeight);
2865
+ const [aspectRatio, setAspectRatio] = React.useState(() => videoWidth && videoHeight ? videoWidth / videoHeight : 0);
2866
+ React__default["default"].useEffect(() => {
2867
+ if (canvasRef &&
2868
+ videoRef &&
2869
+ canvasRef.current &&
2870
+ videoRef.current &&
2871
+ videoStream &&
2872
+ isStartView) {
2873
+ drawStaticOval(canvasRef.current, videoRef.current, videoStream);
2874
+ }
2875
+ }, [canvasRef, videoRef, videoStream, colorMode, isStartView]);
2876
+ React__default["default"].useEffect(() => {
2877
+ const updateColorModeHandler = (e) => {
2878
+ if (e.matches &&
2879
+ canvasRef &&
2880
+ videoRef &&
2881
+ canvasRef.current &&
2882
+ videoRef.current &&
2883
+ videoStream &&
2884
+ isStartView) {
2885
+ drawStaticOval(canvasRef.current, videoRef.current, videoStream);
2886
+ }
2887
+ };
2888
+ const darkModePreference = window.matchMedia('(prefers-color-scheme: dark)');
2889
+ const lightModePreference = window.matchMedia('(prefers-color-scheme: light)');
2890
+ darkModePreference.addEventListener('change', updateColorModeHandler);
2891
+ lightModePreference.addEventListener('change', updateColorModeHandler);
2892
+ return () => {
2893
+ darkModePreference.removeEventListener('change', updateColorModeHandler);
2894
+ lightModePreference.addEventListener('change', updateColorModeHandler);
2895
+ };
2896
+ }, [canvasRef, videoRef, videoStream, isStartView]);
2897
+ React__default["default"].useLayoutEffect(() => {
2898
+ if (isCameraReady) {
2899
+ send({
2900
+ type: 'SET_DOM_AND_CAMERA_DETAILS',
2901
+ data: {
2902
+ videoEl: videoRef.current,
2903
+ canvasEl: canvasRef.current,
2904
+ freshnessColorEl: freshnessColorRef.current,
2905
+ isMobile: isMobileScreen,
2906
+ },
2907
+ });
2908
+ }
2909
+ if (videoRef.current) {
2910
+ setMediaWidth(videoRef.current.videoWidth);
2911
+ setMediaHeight(videoRef.current.videoHeight);
2912
+ setAspectRatio(videoRef.current.videoWidth / videoRef.current.videoHeight);
2913
+ }
2914
+ }, [send, videoRef, isCameraReady, isMobileScreen]);
2915
+ const photoSensitivtyWarning = React__default["default"].useMemo(() => {
2916
+ return (React__default["default"].createElement(uiReact.View, { style: { visibility: isStartView ? 'visible' : 'hidden' } },
2917
+ React__default["default"].createElement(DefaultPhotosensitiveWarning, { headingText: instructionDisplayText.photosensitivyWarningHeadingText, bodyText: instructionDisplayText.photosensitivyWarningBodyText, infoText: instructionDisplayText.photosensitivyWarningInfoText })));
2918
+ }, [instructionDisplayText, isStartView]);
2919
+ const handleMediaPlay = () => {
2920
+ setIsCameraReady(true);
2921
+ };
2922
+ const beginLivenessCheck = React__default["default"].useCallback(() => {
2923
+ send({
2924
+ type: 'BEGIN',
2925
+ });
2926
+ }, [send]);
2927
+ const onCameraChange = React__default["default"].useCallback((e) => {
2928
+ const newDeviceId = e.target.value;
2929
+ const changeCamera = async () => {
2930
+ const newStream = await navigator.mediaDevices.getUserMedia({
2931
+ video: {
2932
+ ...videoConstraints,
2933
+ deviceId: { exact: newDeviceId },
2934
+ },
2935
+ audio: false,
2936
+ });
2937
+ send({
2938
+ type: 'UPDATE_DEVICE_AND_STREAM',
2939
+ data: { newDeviceId, newStream },
2940
+ });
2941
+ };
2942
+ changeCamera();
2943
+ }, [videoConstraints, send]);
2944
+ if (isCheckingCamera) {
2945
+ return (React__default["default"].createElement(uiReact.Flex, { justifyContent: 'center', className: LivenessClassNames.StartScreenCameraWaiting },
2946
+ React__default["default"].createElement(uiReact.Loader, { size: "large", className: LivenessClassNames.Loader, "data-testid": "centered-loader", position: "unset" }),
2947
+ React__default["default"].createElement(uiReact.Text, { fontSize: "large", fontWeight: "bold", "data-testid": "waiting-camera-permission", className: `${LivenessClassNames.StartScreenCameraWaiting}__text` }, cameraDisplayText.waitingCameraPermissionText)));
2948
+ }
2949
+ const isRecordingOnMobile = isMobileScreen && !isStartView && !isWaitingForCamera && isRecording;
2950
+ return (React__default["default"].createElement(React__default["default"].Fragment, null,
2951
+ photoSensitivtyWarning,
2952
+ React__default["default"].createElement(uiReact.Flex, { className: ui.classNames(LivenessClassNames.CameraModule, isRecordingOnMobile && `${LivenessClassNames.CameraModule}--mobile`), "data-testid": testId, gap: "zero" },
2953
+ !isCameraReady && centeredLoader,
2954
+ React__default["default"].createElement(uiReact.View, { as: "canvas", ref: freshnessColorRef, className: LivenessClassNames.FreshnessCanvas, hidden: true }),
2955
+ React__default["default"].createElement(uiReact.View, { className: LivenessClassNames.VideoAnchor, style: {
2956
+ aspectRatio: `${aspectRatio}`,
2957
+ } },
2958
+ React__default["default"].createElement("video", { ref: videoRef, muted: true, autoPlay: true, playsInline: true, width: mediaWidth, height: mediaHeight, onCanPlay: handleMediaPlay, "data-testid": "video", className: LivenessClassNames.Video }),
2959
+ React__default["default"].createElement(uiReact.Flex, { className: ui.classNames(LivenessClassNames.OvalCanvas, isRecordingOnMobile && `${LivenessClassNames.OvalCanvas}--mobile`, isRecordingStopped && LivenessClassNames.FadeOut) },
2960
+ React__default["default"].createElement(uiReact.View, { as: "canvas", ref: canvasRef })),
2961
+ isRecording && (React__default["default"].createElement(DefaultRecordingIcon, { recordingIndicatorText: recordingIndicatorText })),
2962
+ !isStartView && !isWaitingForCamera && !isCheckSucceeded && (React__default["default"].createElement(DefaultCancelButton, { cancelLivenessCheckText: cancelLivenessCheckText })),
2963
+ React__default["default"].createElement(Overlay, { horizontal: "center", vertical: isRecording && !isFlashingFreshness ? 'start' : 'space-between', className: LivenessClassNames.InstructionOverlay },
2964
+ React__default["default"].createElement(Hint, { hintDisplayText: hintDisplayText }),
2965
+ errorState && (React__default["default"].createElement(ErrorView, { onRetry: () => {
2966
+ send({ type: 'CANCEL' });
2967
+ } }, renderErrorModal({
2968
+ errorState,
2969
+ overrideErrorDisplayText: errorDisplayText,
2970
+ }))),
2971
+ isRecording &&
2972
+ !isFlashingFreshness &&
2973
+ showMatchIndicatorStates.includes(faceMatchState) ? (React__default["default"].createElement(MemoizedMatchIndicator, { percentage: Math.ceil(faceMatchPercentage) })) : null),
2974
+ isStartView &&
2975
+ !isMobileScreen &&
2976
+ selectableDevices &&
2977
+ selectableDevices.length > 1 && (React__default["default"].createElement(uiReact.Flex, { className: LivenessClassNames.StartScreenCameraSelect },
2978
+ React__default["default"].createElement(uiReact.View, { className: LivenessClassNames.StartScreenCameraSelectContainer },
2979
+ React__default["default"].createElement(uiReact.Label, { htmlFor: "amplify-liveness-camera-select", className: `${LivenessClassNames.StartScreenCameraSelect}__label` }, "Camera:"),
2980
+ React__default["default"].createElement(uiReact.SelectField, { id: "amplify-liveness-camera-select", label: "Camera", labelHidden: true, value: selectedDeviceId, onChange: onCameraChange }, selectableDevices?.map((device) => (React__default["default"].createElement("option", { value: device.deviceId, key: device.deviceId }, device.label))))))))),
2981
+ isStartView && (React__default["default"].createElement(uiReact.Flex, { justifyContent: "center" },
2982
+ React__default["default"].createElement(uiReact.Button, { variation: "primary", type: "button", onClick: beginLivenessCheck }, instructionDisplayText.startScreenBeginCheckText)))));
2983
+ };
2984
+
2985
+ function isNewerIpad() {
2986
+ // iPads on iOS13+ return as if a desktop Mac
2987
+ // so check for maxTouchPoints also.
2988
+ return (/Macintosh/i.test(navigator.userAgent) &&
2989
+ !!navigator.maxTouchPoints &&
2990
+ navigator.maxTouchPoints > 1);
2991
+ }
2992
+ function isMobileScreen() {
2993
+ const isMobileDevice =
2994
+ // Test Android/iPhone/iPad
2995
+ /Android|iPhone|iPad/i.test(navigator.userAgent) || isNewerIpad();
2996
+ return isMobileDevice;
2997
+ }
2998
+ /**
2999
+ * Use window.matchMedia to direct landscape orientation
3000
+ * screen.orientation is not supported in Safari so we will use
3001
+ * media query detection to listen for changes instead.
3002
+ * @returns MediaQueryList object
3003
+ */
3004
+ function getLandscapeMediaQuery() {
3005
+ return window.matchMedia('(orientation: landscape)');
3006
+ }
3007
+
3008
+ const LandscapeErrorModal = (props) => {
3009
+ const { onRetry, header, portraitMessage, landscapeMessage, tryAgainText } = props;
3010
+ const [isLandscape, setIsLandscape] = React__namespace.useState(true);
3011
+ React__namespace.useLayoutEffect(() => {
3012
+ // Get orientation: landscape media query
3013
+ const landscapeMediaQuery = getLandscapeMediaQuery();
3014
+ // Set ui state for initial orientation
3015
+ setIsLandscape(landscapeMediaQuery.matches);
3016
+ // Listen for future orientation changes
3017
+ landscapeMediaQuery.addEventListener('change', (e) => {
3018
+ setIsLandscape(e.matches);
3019
+ });
3020
+ // Remove matchMedia event listener
3021
+ return () => {
3022
+ landscapeMediaQuery.removeEventListener('change', (e) => setIsLandscape(e.matches));
3023
+ };
3024
+ }, []);
3025
+ return (React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.LandscapeErrorModal, height: isLandscape ? 'auto' : 480 },
3026
+ React__namespace.createElement(uiReact.Text, { className: LivenessClassNames.LandscapeErrorModalHeader }, header),
3027
+ React__namespace.createElement(uiReact.Text, null, isLandscape ? landscapeMessage : portraitMessage),
3028
+ !isLandscape ? (React__namespace.createElement(uiReact.Flex, { className: LivenessClassNames.LandscapeErrorModalButton },
3029
+ React__namespace.createElement(uiReact.Button, { variation: "primary", type: "button", onClick: onRetry }, tryAgainText))) : null));
3030
+ };
3031
+
3032
+ const CHECK_CLASS_NAME = 'liveness-detector-check';
3033
+ const CAMERA_ERROR_TEXT_WIDTH = 420;
3034
+ const selectIsRecordingStopped = createLivenessSelector((state) => state.context.isRecordingStopped);
3035
+ const LivenessCheck = ({ instructionDisplayText, hintDisplayText, cameraDisplayText, streamDisplayText, errorDisplayText, components, }) => {
3036
+ const [state, send] = useLivenessActor();
3037
+ const errorState = useLivenessSelector(selectErrorState);
3038
+ const isRecordingStopped = useLivenessSelector(selectIsRecordingStopped);
3039
+ const isPermissionDenied = state.matches('permissionDenied');
3040
+ const isMobile = isMobileScreen();
3041
+ const recheckCameraPermissions = () => {
3042
+ send({ type: 'RETRY_CAMERA_CHECK' });
3043
+ };
3044
+ const { cameraMinSpecificationsHeadingText, cameraMinSpecificationsMessageText, cameraNotFoundHeadingText, cameraNotFoundMessageText, retryCameraPermissionsText, } = cameraDisplayText;
3045
+ const { cancelLivenessCheckText } = streamDisplayText;
3046
+ React__namespace.useLayoutEffect(() => {
3047
+ if (isMobile) {
3048
+ const sendLandscapeWarning = (isLandscapeMatched) => {
3049
+ if (isLandscapeMatched) {
3050
+ send({ type: 'MOBILE_LANDSCAPE_WARNING' });
3051
+ }
3052
+ };
3053
+ // Get orientation: landscape media query
3054
+ const landscapeMediaQuery = getLandscapeMediaQuery();
3055
+ // Send warning based on initial orientation
3056
+ sendLandscapeWarning(landscapeMediaQuery.matches);
3057
+ // Listen for future orientation changes and send warning
3058
+ landscapeMediaQuery.addEventListener('change', (e) => {
3059
+ sendLandscapeWarning(e.matches);
3060
+ });
3061
+ // Remove matchMedia event listener
3062
+ return () => {
3063
+ landscapeMediaQuery.removeEventListener('change', (e) => sendLandscapeWarning(e.matches));
3064
+ };
3065
+ }
3066
+ }, [isMobile, send]);
3067
+ const renderCheck = () => {
3068
+ if (errorState === LivenessErrorState.MOBILE_LANDSCAPE_ERROR) {
3069
+ const displayText = {
3070
+ ...defaultErrorDisplayText,
3071
+ ...errorDisplayText,
3072
+ };
3073
+ const { landscapeHeaderText, portraitMessageText, landscapeMessageText, tryAgainText, } = displayText;
3074
+ return (React__namespace.createElement(uiReact.Flex, { backgroundColor: "background.primary", direction: "column", textAlign: "center", alignItems: "center", justifyContent: "center", position: "absolute", width: "100%" },
3075
+ React__namespace.createElement(LandscapeErrorModal, { header: landscapeHeaderText, portraitMessage: portraitMessageText, landscapeMessage: landscapeMessageText, tryAgainText: tryAgainText, onRetry: () => {
3076
+ send({
3077
+ type: 'CANCEL',
3078
+ });
3079
+ } })));
3080
+ }
3081
+ else if (isPermissionDenied) {
3082
+ return (React__namespace.createElement(uiReact.Flex, { backgroundColor: "background.primary", direction: "column", textAlign: "center", alignItems: "center", justifyContent: "center", width: "100%", height: 480 },
3083
+ React__namespace.createElement(uiReact.Text, { fontSize: "large", fontWeight: "bold" }, errorState === LivenessErrorState.CAMERA_FRAMERATE_ERROR
3084
+ ? cameraMinSpecificationsHeadingText
3085
+ : cameraNotFoundHeadingText),
3086
+ React__namespace.createElement(uiReact.Text, { maxWidth: CAMERA_ERROR_TEXT_WIDTH }, errorState === LivenessErrorState.CAMERA_FRAMERATE_ERROR
3087
+ ? cameraMinSpecificationsMessageText
3088
+ : cameraNotFoundMessageText),
3089
+ React__namespace.createElement(uiReact.Button, { variation: "primary", type: "button", onClick: recheckCameraPermissions }, retryCameraPermissionsText),
3090
+ React__namespace.createElement(uiReact.View, { position: "absolute", top: "medium", right: "medium" },
3091
+ React__namespace.createElement(CancelButton, { ariaLabel: cancelLivenessCheckText }))));
3092
+ }
3093
+ else {
3094
+ return (React__namespace.createElement(LivenessCameraModule, { isMobileScreen: isMobile, isRecordingStopped: isRecordingStopped, instructionDisplayText: instructionDisplayText, streamDisplayText: streamDisplayText, hintDisplayText: hintDisplayText, errorDisplayText: errorDisplayText, cameraDisplayText: cameraDisplayText, components: components }));
3095
+ }
3096
+ };
3097
+ return (React__namespace.createElement(uiReact.Flex, { direction: "column", position: "relative", testId: CHECK_CLASS_NAME, className: CHECK_CLASS_NAME, gap: "xl" }, renderCheck()));
3098
+ };
3099
+
3100
+ /**
3101
+ * Merges optional displayText prop with
3102
+ * defaultLivenessDisplayText and returns more bite size portions to pass
3103
+ * down to child components of FaceLivenessDetector.
3104
+ * @param overrideDisplayText
3105
+ * @returns hintDisplayText, cameraDisplayText, instructionDisplayText, cancelLivenessCheckText
3106
+ */
3107
+ function getDisplayText(overrideDisplayText) {
3108
+ const displayText = {
3109
+ ...defaultLivenessDisplayText,
3110
+ ...overrideDisplayText,
3111
+ };
3112
+ const { photosensitivyWarningHeadingText, photosensitivyWarningBodyText, photosensitivyWarningInfoText, goodFitCaptionText, goodFitAltText, tooFarCaptionText, tooFarAltText, cameraMinSpecificationsHeadingText, cameraMinSpecificationsMessageText, cameraNotFoundHeadingText, cameraNotFoundMessageText, retryCameraPermissionsText, waitingCameraPermissionText, cancelLivenessCheckText, recordingIndicatorText, hintMoveFaceFrontOfCameraText, hintTooManyFacesText, hintFaceDetectedText, hintCanNotIdentifyText, hintTooCloseText, hintTooFarText, hintConnectingText, hintVerifyingText, hintIlluminationTooBrightText, hintIlluminationTooDarkText, hintIlluminationNormalText, hintHoldFaceForFreshnessText, timeoutHeaderText, timeoutMessageText, faceDistanceHeaderText, faceDistanceMessageText, multipleFacesHeaderText, multipleFacesMessageText, clientHeaderText, clientMessageText, serverHeaderText, serverMessageText, landscapeHeaderText, landscapeMessageText, portraitMessageText, tryAgainText, startScreenBeginCheckText, hintCenterFaceText, } = displayText;
3113
+ const hintDisplayText = {
3114
+ hintMoveFaceFrontOfCameraText,
3115
+ hintTooManyFacesText,
3116
+ hintFaceDetectedText,
3117
+ hintCanNotIdentifyText,
3118
+ hintTooCloseText,
3119
+ hintTooFarText,
3120
+ hintConnectingText,
3121
+ hintVerifyingText,
3122
+ hintIlluminationTooBrightText,
3123
+ hintIlluminationTooDarkText,
3124
+ hintIlluminationNormalText,
3125
+ hintHoldFaceForFreshnessText,
3126
+ hintCenterFaceText,
3127
+ };
3128
+ const cameraDisplayText = {
3129
+ cameraMinSpecificationsHeadingText,
3130
+ cameraMinSpecificationsMessageText,
3131
+ cameraNotFoundHeadingText,
3132
+ cameraNotFoundMessageText,
3133
+ retryCameraPermissionsText,
3134
+ waitingCameraPermissionText,
3135
+ };
3136
+ const instructionDisplayText = {
3137
+ photosensitivyWarningHeadingText,
3138
+ photosensitivyWarningBodyText,
3139
+ photosensitivyWarningInfoText,
3140
+ goodFitCaptionText,
3141
+ goodFitAltText,
3142
+ tooFarCaptionText,
3143
+ tooFarAltText,
3144
+ startScreenBeginCheckText,
3145
+ };
3146
+ const streamDisplayText = {
3147
+ cancelLivenessCheckText,
3148
+ recordingIndicatorText,
3149
+ };
3150
+ const errorDisplayText = {
3151
+ timeoutHeaderText,
3152
+ timeoutMessageText,
3153
+ faceDistanceHeaderText,
3154
+ faceDistanceMessageText,
3155
+ multipleFacesHeaderText,
3156
+ multipleFacesMessageText,
3157
+ clientHeaderText,
3158
+ clientMessageText,
3159
+ serverHeaderText,
3160
+ serverMessageText,
3161
+ landscapeHeaderText,
3162
+ landscapeMessageText,
3163
+ portraitMessageText,
3164
+ tryAgainText,
3165
+ };
3166
+ return {
3167
+ hintDisplayText,
3168
+ cameraDisplayText,
3169
+ instructionDisplayText,
3170
+ streamDisplayText,
3171
+ errorDisplayText,
3172
+ };
3173
+ }
3174
+
3175
+ const DETECTOR_CLASS_NAME = 'liveness-detector';
3176
+ function FaceLivenessDetectorCore(props) {
3177
+ const { components, config, displayText } = props;
3178
+ const currElementRef = React__namespace.useRef(null);
3179
+ const { hintDisplayText, cameraDisplayText, instructionDisplayText, streamDisplayText, errorDisplayText, } = getDisplayText(displayText);
3180
+ const service = react.useInterpret(livenessMachine, {
3181
+ devTools: process.env.NODE_ENV === 'development',
3182
+ context: {
3183
+ componentProps: {
3184
+ ...props,
3185
+ config: config ?? {},
3186
+ },
3187
+ },
3188
+ });
3189
+ return (React__namespace.createElement(uiReact.View, { className: DETECTOR_CLASS_NAME, testId: DETECTOR_CLASS_NAME },
3190
+ React__namespace.createElement(FaceLivenessDetectorProvider, { componentProps: props, service: service },
3191
+ React__namespace.createElement(uiReact.Flex, { direction: "column", ref: currElementRef },
3192
+ React__namespace.createElement(LivenessCheck, { instructionDisplayText: instructionDisplayText, hintDisplayText: hintDisplayText, cameraDisplayText: cameraDisplayText, streamDisplayText: streamDisplayText, errorDisplayText: errorDisplayText, components: components })))));
3193
+ }
3194
+
3195
+ const credentialProvider = async () => {
3196
+ const { credentials } = await auth.fetchAuthSession();
3197
+ if (!credentials) {
3198
+ throw new Error('No credentials provided');
3199
+ }
3200
+ return credentials;
3201
+ };
3202
+ function FaceLivenessDetector(props) {
3203
+ const { config, ...rest } = props;
3204
+ return (React__namespace.createElement(FaceLivenessDetectorCore, { ...rest, config: { credentialProvider, ...config } }));
3205
+ }
3206
+
3207
+ exports.FaceLivenessDetector = FaceLivenessDetector;
3208
+ exports.FaceLivenessDetectorCore = FaceLivenessDetectorCore;