@aws-amplify/ui-react-liveness 2.0.10 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
  2. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
  4. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
  5. package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
  6. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
  7. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
  8. package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
  9. package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
  10. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
  11. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
  12. package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
  13. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
  14. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
  15. package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
  16. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
  17. package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
  18. package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
  19. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
  20. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
  21. package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
  22. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
  23. package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
  24. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
  25. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
  26. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
  27. package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
  28. package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
  29. package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
  30. package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
  31. package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
  32. package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
  33. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
  34. package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
  35. package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
  36. package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
  37. package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
  38. package/dist/esm/index.mjs +2 -1
  39. package/dist/esm/version.mjs +3 -1
  40. package/dist/index.js +3208 -1
  41. package/dist/styles.css +343 -680
  42. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
  43. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
  44. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
  45. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
  46. package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
  47. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
  48. package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
  49. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
  50. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
  51. package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
  52. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
  53. package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
  54. package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
  55. package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
  56. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
  57. package/dist/types/version.d.ts +1 -1
  58. package/package.json +16 -37
  59. package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
  60. package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
  61. package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
  62. package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
  63. package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
  64. package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
  65. package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
  66. /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
@@ -1 +1,1130 @@
1
- import{__awaiter as e,__asyncValues as t}from"tslib";import{createMachine as a,assign as o,spawn as r,actions as i}from"xstate";import{getBoundingBox as s,getColorsSequencesFromSessionInformation as n,isCameraDeviceVirtual as c,getFaceMatchState as d,isFaceDistanceBelowThreshold as l,estimateIllumination as v,getOvalDetailsFromSessionInformation as m,generateBboxFromLandmarks as h,drawLivenessOvalInCanvas as g,getOvalBoundingBox as f,getIntersectionOverUnion as u,getFaceMatchStateInLivenessOval as S,getStaticLivenessOvalDetails as p}from"../utils/liveness.mjs";import{FaceMatchState as E}from"../types/liveness.mjs";import{LivenessErrorState as F}from"../types/error.mjs";import{BlazeFaceFaceDetection as R}from"../utils/blazefaceFaceDetection.mjs";import{LivenessStreamProvider as D}from"../utils/streamProvider.mjs";import{FreshnessColorDisplay as A}from"../utils/freshnessColorDisplay.mjs";import{nanoid as M}from"nanoid";import{isServerSesssionInformationEvent as C,isDisconnectionEvent as T,isValidationExceptionEvent as P,isInternalServerExceptionEvent as O,isThrottlingExceptionEvent as I,isServiceQuotaExceededExceptionEvent as w,isInvalidSignatureRegionException as y}from"../utils/eventUtils.mjs";import{STATIC_VIDEO_CONSTRAINTS as b}from"../../StartLiveness/helpers.mjs";import{WS_CLOSURE_CODE as _}from"../utils/constants.mjs";const k=500;let B;const N=a({id:"livenessMachine",initial:"start",predictableActionArguments:!0,context:{challengeId:M(),maxFailedAttempts:0,failedAttempts:0,componentProps:void 0,serverSessionInformation:void 0,videoAssociatedParams:{videoConstraints:b},ovalAssociatedParams:void 0,faceMatchAssociatedParams:{illuminationState:void 0,faceMatchState:void 0,faceMatchPercentage:25,currentDetectedFace:void 0,startFace:void 0,endFace:void 0,initialFaceMatchTime:void 0},freshnessColorAssociatedParams:{freshnessColorEl:void 0,freshnessColors:[],freshnessColorsComplete:!1,freshnessColorDisplay:void 0},errorState:void 0,livenessStreamProvider:void 0,responseStreamActorRef:void 0,shouldDisconnect:!1,faceMatchStateBeforeStart:void 0,isFaceFarEnoughBeforeRecording:void 0,isRecordingStopped:!1},on:{CANCEL:"userCancel",TIMEOUT:{target:"retryableTimeout",actions:"updateErrorStateForTimeout"},SET_SESSION_INFO:{internal:!0,actions:"updateSessionInfo"},DISCONNECT_EVENT:{internal:!0,actions:"updateShouldDisconnect"},SET_DOM_AND_CAMERA_DETAILS:{actions:"setDOMAndCameraDetails"},SERVER_ERROR:{target:"error",actions:"updateErrorStateForServer"},RUNTIME_ERROR:{target:"error"},MOBILE_LANDSCAPE_WARNING:{target:"mobileLandscapeWarning",actions:"updateErrorStateForServer"}},states:{start:{on:{BEGIN:"cameraCheck"}},cameraCheck:{entry:["resetErrorState","initializeFaceDetector"],invoke:{src:"checkVirtualCameraAndGetStream",onDone:{target:"waitForDOMAndCameraDetails",actions:["updateVideoMediaStream"]},onError:{target:"permissionDenied"}}},waitForDOMAndCameraDetails:{after:{0:{target:"detectFaceBeforeStart",cond:"hasDOMAndCameraDetails"},500:{target:"waitForDOMAndCameraDetails"}}},detectFaceBeforeStart:{invoke:{src:"detectFace",onDone:{target:"checkFaceDetectedBeforeStart",actions:["updateFaceMatchBeforeStartDetails"]}}},checkFaceDetectedBeforeStart:{after:{0:{target:"detectFaceDistanceBeforeRecording",cond:"hasSingleFaceBeforeStart"},100:{target:"detectFaceBeforeStart"}}},detectFaceDistanceBeforeRecording:{invoke:{src:"detectFaceDistance",onDone:{target:"checkFaceDistanceBeforeRecording",actions:["updateFaceDistanceBeforeRecording"]}}},checkFaceDistanceBeforeRecording:{after:{0:{target:"initializeLivenessStream",cond:"hasEnoughFaceDistanceBeforeRecording"},100:{target:"detectFaceDistanceBeforeRecording"}}},initializeLivenessStream:{invoke:{src:"openLivenessStreamConnection",onDone:{target:"notRecording",actions:["updateLivenessStreamProvider","spawnResponseStreamActor"]}}},notRecording:{on:{START_RECORDING:"recording"},initial:"waitForSessionInfo",states:{waitForSessionInfo:{after:{0:{target:"#livenessMachine.recording",cond:"hasServerSessionInfo"},100:{target:"waitForSessionInfo"}}}}},recording:{entry:["clearErrorState","startRecording"],initial:"ovalDrawing",states:{ovalDrawing:{entry:["sendTimeoutAfterOvalDrawingDelay"],invoke:{src:"detectInitialFaceAndDrawOval",onDone:{target:"checkFaceDetected",actions:["updateOvalAndFaceDetailsPostDraw","sendTimeoutAfterOvalMatchDelay"]},onError:{target:"#livenessMachine.error",actions:"updateErrorStateForRuntime"}}},checkFaceDetected:{after:{0:{target:"checkRecordingStarted",cond:"hasSingleFace"},100:{target:"ovalDrawing"}}},checkRecordingStarted:{after:{0:{target:"ovalMatching",cond:"hasRecordingStarted",actions:["updateRecordingStartTimestampMs"]},100:{target:"checkRecordingStarted"}}},ovalMatching:{entry:["cancelOvalDrawingTimeout"],invoke:{src:"detectFaceAndMatchOval",onDone:{target:"checkMatch",actions:"updateFaceDetailsPostMatch"}}},checkMatch:{after:{0:{target:"flashFreshnessColors",cond:"hasFaceMatchedInOvalWithMinTime",actions:["updateEndFaceMatch","setupFlashFreshnessColors","cancelOvalMatchTimeout","cancelOvalDrawingTimeout"]},.1:{target:"ovalMatching",cond:"hasFaceMatchedInOval",actions:"setFaceMatchTimeAndStartFace"},1:{target:"ovalMatching",cond:"hasNotFaceMatchedInOval"}}},flashFreshnessColors:{invoke:{src:"flashColors",onDone:[{target:"success",cond:"hasFreshnessColorShown"},{target:"flashFreshnessColors",actions:"updateFreshnessDetails"}]}},success:{entry:["stopRecording"],type:"final"}},onDone:"uploading"},uploading:{initial:"pending",states:{pending:{entry:["sendTimeoutAfterWaitingForDisconnect","pauseVideoStream"],invoke:{src:"stopVideo",onDone:"waitForDisconnectEvent",onError:{target:"#livenessMachine.error",actions:"updateErrorStateForRuntime"}}},waitForDisconnectEvent:{after:{0:{target:"getLivenessResult",cond:"getShouldDisconnect"},100:{target:"waitForDisconnectEvent"}}},getLivenessResult:{entry:["cancelWaitForDisconnectTimeout","freezeStream"],invoke:{src:"getLiveness",onError:{target:"#livenessMachine.error",actions:"updateErrorStateForServer"}}}}},retryableTimeout:{entry:"updateFailedAttempts",always:[{target:"timeout",cond:"shouldTimeoutOnFailedAttempts"},{target:"notRecording"}]},permissionDenied:{entry:"callUserPermissionDeniedCallback",on:{RETRY_CAMERA_CHECK:"cameraCheck"}},mobileLandscapeWarning:{entry:"callMobileLandscapeWarningCallback",always:[{target:"error"}]},timeout:{entry:["cleanUpResources","callUserTimeoutCallback","freezeStream"]},error:{entry:["cleanUpResources","callErrorCallback","cancelOvalDrawingTimeout","cancelWaitForDisconnectTimeout","cancelOvalMatchTimeout","freezeStream"]},userCancel:{entry:["cleanUpResources","callUserCancelCallback","resetContext"],always:[{target:"start"}]}}},{actions:{spawnResponseStreamActor:o({responseStreamActorRef:()=>r(j)}),updateFailedAttempts:o({failedAttempts:e=>e.failedAttempts+1}),updateVideoMediaStream:o({videoAssociatedParams:(e,t)=>{var a;return Object.assign(Object.assign({},e.videoAssociatedParams),{videoMediaStream:null===(a=t.data)||void 0===a?void 0:a.stream})}}),initializeFaceDetector:o({ovalAssociatedParams:e=>{const{componentProps:t}=e,{faceModelUrl:a,binaryPath:o}=t.config,r=new R(o,a);return r.triggerModelLoading(),Object.assign(Object.assign({},e.ovalAssociatedParams),{faceDetector:r})}}),updateLivenessStreamProvider:o({livenessStreamProvider:(e,t)=>{var a;return null===(a=t.data)||void 0===a?void 0:a.livenessStreamProvider}}),setDOMAndCameraDetails:o({videoAssociatedParams:(e,t)=>{var a,o,r;return Object.assign(Object.assign({},e.videoAssociatedParams),{videoEl:null===(a=t.data)||void 0===a?void 0:a.videoEl,canvasEl:null===(o=t.data)||void 0===o?void 0:o.canvasEl,isMobile:null===(r=t.data)||void 0===r?void 0:r.isMobile})},freshnessColorAssociatedParams:(e,t)=>{var a;return Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorEl:null===(a=t.data)||void 0===a?void 0:a.freshnessColorEl})}}),updateRecordingStartTimestampMs:o({videoAssociatedParams:e=>{const{challengeId:t,videoAssociatedParams:a,ovalAssociatedParams:o,livenessStreamProvider:r}=e,{recordingStartApiTimestamp:i,recorderStartTimestamp:n}=r.videoRecorder,{videoMediaStream:c}=a,{initialFace:d}=o,l=Math.round(.73*(n-i)+i),{width:v,height:m}=c.getTracks()[0].getSettings(),h=v-d.left-d.width;return e.livenessStreamProvider.sendClientInfo({Challenge:{FaceMovementAndLightChallenge:{ChallengeId:t,VideoStartTimestamp:l,InitialFace:{InitialFaceDetectedTimestamp:d.timestampMs,BoundingBox:s({deviceHeight:m,deviceWidth:v,height:d.height,width:d.width,top:d.top,left:h})}}}}),Object.assign(Object.assign({},e.videoAssociatedParams),{recordingStartTimestampMs:l})}}),startRecording:o({videoAssociatedParams:e=>{if(!e.serverSessionInformation)throw new Error("Session information was not received from response stream");return e.livenessStreamProvider.videoRecorder&&"recording"!==e.livenessStreamProvider.videoRecorder.getState()&&e.livenessStreamProvider.startRecordingLivenessVideo(),Object.assign({},e.videoAssociatedParams)}}),stopRecording:e=>{},updateFaceMatchBeforeStartDetails:o({faceMatchStateBeforeStart:(e,t)=>t.data.faceMatchState}),updateFaceDistanceBeforeRecording:o({isFaceFarEnoughBeforeRecording:(e,t)=>t.data.isFaceFarEnoughBeforeRecording}),updateFaceDistanceWhileLoading:o({isFaceFarEnoughBeforeRecording:(e,t)=>t.data.isFaceFarEnoughBeforeRecording,errorState:(e,t)=>{var a;return null===(a=t.data)||void 0===a?void 0:a.error}}),updateOvalAndFaceDetailsPostDraw:o({ovalAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.ovalAssociatedParams),{initialFace:t.data.initialFace,ovalDetails:t.data.ovalDetails,scaleFactor:t.data.scaleFactor}),faceMatchAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{faceMatchState:t.data.faceMatchState,illuminationState:t.data.illuminationState})}),updateFaceDetailsPostMatch:o({faceMatchAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{faceMatchState:t.data.faceMatchState,faceMatchPercentage:t.data.faceMatchPercentage,illuminationState:t.data.illuminationState,currentDetectedFace:t.data.detectedFace})}),updateEndFaceMatch:o({faceMatchAssociatedParams:e=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{endFace:e.faceMatchAssociatedParams.currentDetectedFace})}),setFaceMatchTimeAndStartFace:o({faceMatchAssociatedParams:e=>Object.assign(Object.assign({},e.faceMatchAssociatedParams),{startFace:void 0===e.faceMatchAssociatedParams.startFace?e.faceMatchAssociatedParams.currentDetectedFace:e.faceMatchAssociatedParams.startFace,initialFaceMatchTime:void 0===e.faceMatchAssociatedParams.initialFaceMatchTime?Date.now():e.faceMatchAssociatedParams.initialFaceMatchTime})}),resetErrorState:o({errorState:e=>{}}),updateErrorStateForTimeout:o({errorState:(e,t)=>{var a;return(null===(a=t.data)||void 0===a?void 0:a.errorState)||F.TIMEOUT}}),updateErrorStateForRuntime:o({errorState:(e,t)=>{var a;return(null===(a=t.data)||void 0===a?void 0:a.errorState)||F.RUNTIME_ERROR}}),updateErrorStateForServer:o({errorState:e=>F.SERVER_ERROR}),clearErrorState:o({errorState:e=>{}}),updateSessionInfo:o({serverSessionInformation:(e,t)=>t.data.sessionInfo}),updateShouldDisconnect:o({shouldDisconnect:e=>!0}),updateFreshnessDetails:o({freshnessColorAssociatedParams:(e,t)=>Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorsComplete:t.data.freshnessColorsComplete})}),setupFlashFreshnessColors:o({freshnessColorAssociatedParams:e=>{const{serverSessionInformation:t}=e,a=n(t),o=new A(e,a);return Object.assign(Object.assign({},e.freshnessColorAssociatedParams),{freshnessColorDisplay:o})}}),sendTimeoutAfterOvalDrawingDelay:i.send({type:"TIMEOUT"},{delay:5e3,id:"ovalDrawingTimeout"}),cancelOvalDrawingTimeout:i.cancel("ovalDrawingTimeout"),sendTimeoutAfterOvalMatchDelay:i.send({type:"TIMEOUT"},{delay:e=>{var t,a,o,r;return(null===(r=null===(o=null===(a=null===(t=e.serverSessionInformation)||void 0===t?void 0:t.Challenge)||void 0===a?void 0:a.FaceMovementAndLightChallenge)||void 0===o?void 0:o.ChallengeConfig)||void 0===r?void 0:r.OvalFitTimeout)||7e3},id:"ovalMatchTimeout"}),cancelOvalMatchTimeout:i.cancel("ovalMatchTimeout"),sendTimeoutAfterWaitingForDisconnect:i.send({type:"TIMEOUT",data:{errorState:F.SERVER_ERROR}},{delay:2e4,id:"waitForDisconnectTimeout"}),cancelWaitForDisconnectTimeout:i.cancel("waitForDisconnectTimeout"),sendTimeoutAfterFaceDistanceDelay:i.send({type:"RUNTIME_ERROR",data:new Error("Avoid moving closer during countdown and ensure only one face is in front of camera.")},{delay:0,id:"faceDistanceTimeout"}),cancelFaceDistanceTimeout:i.cancel("faceDistanceTimeout"),callUserPermissionDeniedCallback:o({errorState:(e,t)=>{var a,o;let r;r=t.data.message.includes("15 fps")?F.CAMERA_FRAMERATE_ERROR:F.CAMERA_ACCESS_ERROR;const i=t.data.message||t.data.Message,s={state:r,error:new Error(i)};return null===(o=(a=e.componentProps).onError)||void 0===o||o.call(a,s),r}}),callMobileLandscapeWarningCallback:o({errorState:e=>F.MOBILE_LANDSCAPE_ERROR}),callUserCancelCallback:t=>e(void 0,void 0,void 0,(function*(){var e,a;null===(a=(e=t.componentProps).onUserCancel)||void 0===a||a.call(e)})),callUserTimeoutCallback:t=>e(void 0,void 0,void 0,(function*(){var e,a;const o=new Error("Client Timeout");o.name=t.errorState;const r={state:t.errorState,error:o};null===(a=(e=t.componentProps).onError)||void 0===a||a.call(e,r)})),callErrorCallback:(t,a)=>e(void 0,void 0,void 0,(function*(){var e,o,r;const i={state:t.errorState,error:(null===(e=a.data)||void 0===e?void 0:e.error)||a.data};null===(r=(o=t.componentProps).onError)||void 0===r||r.call(o,i)})),cleanUpResources:t=>e(void 0,void 0,void 0,(function*(){var e;const{freshnessColorEl:a}=t.freshnessColorAssociatedParams;a&&(a.style.display="none");let o=_.DEFAULT_ERROR_CODE;t.errorState===F.TIMEOUT?o=_.FACE_FIT_TIMEOUT:t.errorState===F.RUNTIME_ERROR?o=_.RUNTIME_ERROR:t.errorState===F.FACE_DISTANCE_ERROR||t.errorState===F.MULTIPLE_FACES_ERROR?o=_.USER_ERROR_DURING_CONNECTION:void 0===t.errorState&&(o=_.USER_CANCEL),yield null===(e=t.livenessStreamProvider)||void 0===e?void 0:e.endStreamWithCode(o)})),freezeStream:t=>e(void 0,void 0,void 0,(function*(){const{videoMediaStream:e,videoEl:a}=t.videoAssociatedParams;t.isRecordingStopped=!0,null==a||a.pause(),null==e||e.getTracks().forEach((function(e){e.stop()}))})),pauseVideoStream:t=>e(void 0,void 0,void 0,(function*(){const{videoEl:e}=t.videoAssociatedParams;t.isRecordingStopped=!0,e.pause()})),resetContext:o({challengeId:M(),maxFailedAttempts:0,failedAttempts:0,componentProps:e=>e.componentProps,serverSessionInformation:e=>{},videoAssociatedParams:e=>({videoConstraints:b}),ovalAssociatedParams:e=>{},errorState:e=>{},livenessStreamProvider:e=>{},responseStreamActorRef:e=>{},shouldDisconnect:!1,faceMatchStateBeforeStart:e=>{},isFaceFarEnoughBeforeRecording:e=>{},isRecordingStopped:!1})},guards:{shouldTimeoutOnFailedAttempts:e=>e.failedAttempts>=e.maxFailedAttempts,hasFaceMatchedInOvalWithMinTime:e=>{const{faceMatchState:t,initialFaceMatchTime:a}=e.faceMatchAssociatedParams,o=Date.now()-a;return t===E.MATCHED&&o>=500},hasFaceMatchedInOval:e=>e.faceMatchAssociatedParams.faceMatchState===E.MATCHED,hasNotFaceMatchedInOval:e=>e.faceMatchAssociatedParams.faceMatchState!==E.MATCHED,hasSingleFace:e=>e.faceMatchAssociatedParams.faceMatchState===E.FACE_IDENTIFIED,hasSingleFaceBeforeStart:e=>e.faceMatchStateBeforeStart===E.FACE_IDENTIFIED,hasEnoughFaceDistanceBeforeRecording:e=>e.isFaceFarEnoughBeforeRecording,hasNotEnoughFaceDistanceBeforeRecording:e=>!e.isFaceFarEnoughBeforeRecording,hasLivenessCheckSucceeded:(e,t,a)=>a.state.event.data.isLive,hasFreshnessColorShown:e=>e.freshnessColorAssociatedParams.freshnessColorsComplete,hasServerSessionInfo:e=>void 0!==e.serverSessionInformation,hasDOMAndCameraDetails:e=>void 0!==e.videoAssociatedParams.videoEl&&void 0!==e.videoAssociatedParams.canvasEl&&void 0!==e.freshnessColorAssociatedParams.freshnessColorEl,getShouldDisconnect:e=>!!e.shouldDisconnect,hasRecordingStarted:e=>void 0!==e.livenessStreamProvider.videoRecorder.firstChunkTimestamp},services:{checkVirtualCameraAndGetStream(t){return e(this,void 0,void 0,(function*(){const{videoConstraints:e}=t.videoAssociatedParams,a=yield navigator.mediaDevices.getUserMedia({video:e,audio:!1}),o=(yield navigator.mediaDevices.enumerateDevices()).filter((e=>"videoinput"===e.kind)).filter((e=>!c(e)));if(!o.length)throw new Error("No real video devices found");const r=a.getTracks().filter((e=>e.getSettings().frameRate>=15));if(r.length<1)throw new Error("No camera found with more than 15 fps");const i=r[0].getSettings().deviceId;let s=a;return o.some((e=>e.deviceId===i))||(s=yield navigator.mediaDevices.getUserMedia({video:Object.assign(Object.assign({},e),{deviceId:{exact:o[0].deviceId}}),audio:!1})),{stream:s}}))},openLivenessStreamConnection(t){return e(this,void 0,void 0,(function*(){const{config:e}=t.componentProps,{credentialProvider:a,endpointOverride:o}=e,r=new D({sessionId:t.componentProps.sessionId,region:t.componentProps.region,stream:t.videoAssociatedParams.videoMediaStream,videoEl:t.videoAssociatedParams.videoEl,credentialProvider:a,endpointOverride:o});return B=r.getResponseStream(),{livenessStreamProvider:r}}))},detectFace(t){return e(this,void 0,void 0,(function*(){const{videoEl:e}=t.videoAssociatedParams,{faceDetector:a}=t.ovalAssociatedParams;try{yield a.modelLoadingPromise}catch(e){console.log({err:e})}return{faceMatchState:yield d(a,e)}}))},detectFaceDistance(t){return e(this,void 0,void 0,(function*(){const{isFaceFarEnoughBeforeRecording:e}=t,{videoEl:a,videoMediaStream:o,isMobile:r}=t.videoAssociatedParams,{faceDetector:i}=t.ovalAssociatedParams,{width:s,height:n}=o.getTracks()[0].getSettings(),c=p({width:s,height:n}),{isDistanceBelowThreshold:d}=yield l({faceDetector:i,videoEl:a,ovalDetails:c,reduceThreshold:e,isMobile:r});return{isFaceFarEnoughBeforeRecording:d}}))},detectFaceDistanceWhileLoading(t){return e(this,void 0,void 0,(function*(){const{isFaceFarEnoughBeforeRecording:e}=t,{videoEl:a,videoMediaStream:o,isMobile:r}=t.videoAssociatedParams,{faceDetector:i}=t.ovalAssociatedParams,{width:s,height:n}=o.getTracks()[0].getSettings(),c=p({width:s,height:n}),{isDistanceBelowThreshold:d,error:v}=yield l({faceDetector:i,videoEl:a,ovalDetails:c,reduceThreshold:e,isMobile:r});return{isFaceFarEnoughBeforeRecording:d,error:v}}))},detectInitialFaceAndDrawOval(t){return e(this,void 0,void 0,(function*(){const{serverSessionInformation:e,livenessStreamProvider:a}=t,{videoEl:o,canvasEl:r,isMobile:i}=t.videoAssociatedParams,{faceDetector:s}=t.ovalAssociatedParams;try{yield s.modelLoadingPromise,yield a.videoRecorder.recorderStarted}catch(e){console.log({err:e})}const n=yield s.detectFaces(o);let c,d,l;switch(n.length){case 0:d=E.CANT_IDENTIFY,l=v(o);break;case 1:d=E.FACE_IDENTIFIED,c=n[0];break;default:d=E.TOO_MANY}if(!c)return{faceMatchState:d,illuminationState:l};const{width:f,height:u}=o.getBoundingClientRect();i?(r.width=window.innerWidth,r.height=window.innerHeight):(r.width=f,r.height=u);const S=f/o.videoWidth,p=m({sessionInformation:e,videoWidth:o.width}),F=h(c,p);return c.top=F.top,c.left=F.left,c.height=F.bottom-F.top,c.width=F.right-F.left,g({canvas:r,oval:p,scaleFactor:S,videoEl:o}),{faceMatchState:d,ovalDetails:p,scaleFactor:S,initialFace:c}}))},detectFaceAndMatchOval(t){return e(this,void 0,void 0,(function*(){const{serverSessionInformation:e}=t,{videoEl:a}=t.videoAssociatedParams,{faceDetector:o,ovalDetails:r,initialFace:i}=t.ovalAssociatedParams,s=yield o.detectFaces(a);let n,c,d,l=0;const m=h(i,r),{ovalBoundingBox:g}=f(r),p=u(m,g);switch(s.length){case 0:n=E.CANT_IDENTIFY,d=v(a);break;case 1:{c=s[0];const{faceMatchState:t,faceMatchPercentage:a}=S(c,r,p,e);n=t,l=a;break}default:n=E.TOO_MANY}return{faceMatchState:n,faceMatchPercentage:l,illuminationState:d,detectedFace:c}}))},flashColors(t){return e(this,void 0,void 0,(function*(){const{freshnessColorsComplete:e,freshnessColorDisplay:a}=t.freshnessColorAssociatedParams;if(e)return;return{freshnessColorsComplete:yield a.displayColorTick()}}))},stopVideo(t){return e(this,void 0,void 0,(function*(){const{challengeId:e,livenessStreamProvider:a}=t,{videoMediaStream:o}=t.videoAssociatedParams,{initialFace:r,ovalDetails:i}=t.ovalAssociatedParams,{startFace:n,endFace:c}=t.faceMatchAssociatedParams,{width:d,height:l}=o.getTracks()[0].getSettings(),v=d-r.left-r.width;yield a.stopVideo();const m={Challenge:{FaceMovementAndLightChallenge:{ChallengeId:e,InitialFace:{InitialFaceDetectedTimestamp:r.timestampMs,BoundingBox:s({deviceHeight:l,deviceWidth:d,height:r.height,width:r.width,top:r.top,left:v})},TargetFace:{FaceDetectedInTargetPositionStartTimestamp:n.timestampMs,FaceDetectedInTargetPositionEndTimestamp:c.timestampMs,BoundingBox:s({deviceHeight:l,deviceWidth:d,height:i.height,width:i.width,top:i.centerY-i.height/2,left:i.centerX-i.width/2})},VideoEndTimestamp:a.videoRecorder.recorderEndTimestamp}}};if(0===a.videoRecorder.getVideoChunkSize())throw new Error("Video chunks not recorded successfully.");a.sendClientInfo(m),yield a.dispatchStopVideoEvent()}))},getLiveness(t){return e(this,void 0,void 0,(function*(){const{onAnalysisComplete:e}=t.componentProps;yield e()}))}}}),j=a=>e(void 0,void 0,void 0,(function*(){var e,o,r,i;try{const d=yield B;try{for(var s,n=!0,c=t(d);!(e=(s=yield c.next()).done);){i=s.value,n=!1;try{const e=i;C(e)?a({type:"SET_SESSION_INFO",data:{sessionInfo:e.ServerSessionInformationEvent.SessionInformation}}):T(e)?a({type:"DISCONNECT_EVENT"}):P(e)?a({type:"SERVER_ERROR",data:{error:Object.assign({},e.ValidationException)}}):O(e)?a({type:"SERVER_ERROR",data:{error:Object.assign({},e.InternalServerException)}}):I(e)?a({type:"SERVER_ERROR",data:{error:Object.assign({},e.ThrottlingException)}}):w(e)&&a({type:"SERVER_ERROR",data:{error:Object.assign({},e.ServiceQuotaExceededException)}})}finally{n=!0}}}catch(e){o={error:e}}finally{try{n||e||!(r=c.return)||(yield r.call(c))}finally{if(o)throw o.error}}}catch(e){let t=e;y(e)&&(t=new Error("Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.")),t instanceof Error&&a({type:"SERVER_ERROR",data:{error:t}})}}));export{k as MIN_FACE_MATCH_TIME,N as livenessMachine};
1
+ import { createMachine, assign, spawn, actions } from 'xstate';
2
+ import { drawStaticOval, getBoundingBox, getColorsSequencesFromSessionInformation, isCameraDeviceVirtual, getFaceMatchState, isFaceDistanceBelowThreshold, estimateIllumination, getOvalDetailsFromSessionInformation, generateBboxFromLandmarks, drawLivenessOvalInCanvas, getOvalBoundingBox, getIntersectionOverUnion, getFaceMatchStateInLivenessOval, getStaticLivenessOvalDetails } from '../utils/liveness.mjs';
3
+ import { FaceMatchState } from '../types/liveness.mjs';
4
+ import { LivenessErrorState } from '../types/error.mjs';
5
+ import { BlazeFaceFaceDetection } from '../utils/blazefaceFaceDetection.mjs';
6
+ import { LivenessStreamProvider } from '../utils/streamProvider.mjs';
7
+ import { FreshnessColorDisplay } from '../utils/freshnessColorDisplay.mjs';
8
+ import { nanoid } from 'nanoid';
9
+ import { isServerSesssionInformationEvent, isDisconnectionEvent, isValidationExceptionEvent, isInternalServerExceptionEvent, isThrottlingExceptionEvent, isServiceQuotaExceededExceptionEvent, isInvalidSignatureRegionException } from '../utils/eventUtils.mjs';
10
+ import { STATIC_VIDEO_CONSTRAINTS } from '../../utils/helpers.mjs';
11
+ import { WS_CLOSURE_CODE } from '../utils/constants.mjs';
12
+
13
+ /* eslint-disable */
14
+ const MIN_FACE_MATCH_TIME = 1000;
15
+ const DEFAULT_FACE_FIT_TIMEOUT = 7000;
16
+ let responseStream;
17
+ const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
18
+ function getLastSelectedCameraId() {
19
+ return localStorage.getItem(CAMERA_ID_KEY);
20
+ }
21
+ function setLastSelectedCameraId(deviceId) {
22
+ localStorage.setItem(CAMERA_ID_KEY, deviceId);
23
+ }
24
+ const livenessMachine = createMachine({
25
+ id: 'livenessMachine',
26
+ initial: 'cameraCheck',
27
+ predictableActionArguments: true,
28
+ context: {
29
+ challengeId: nanoid(),
30
+ maxFailedAttempts: 0,
31
+ failedAttempts: 0,
32
+ componentProps: undefined,
33
+ serverSessionInformation: undefined,
34
+ videoAssociatedParams: {
35
+ videoConstraints: STATIC_VIDEO_CONSTRAINTS,
36
+ selectableDevices: [],
37
+ },
38
+ ovalAssociatedParams: undefined,
39
+ faceMatchAssociatedParams: {
40
+ illuminationState: undefined,
41
+ faceMatchState: undefined,
42
+ /**
43
+ * faceMatchPercentage is a starting point we set as a baseline
44
+ * for what we want our progress bar to visually start at. This correlates
45
+ * to the formula we use to calculate the faceMatchPercentage
46
+ * in getFaceMatchStateInLivenessOval
47
+ */
48
+ faceMatchPercentage: 25,
49
+ currentDetectedFace: undefined,
50
+ startFace: undefined,
51
+ endFace: undefined,
52
+ initialFaceMatchTime: undefined,
53
+ },
54
+ freshnessColorAssociatedParams: {
55
+ freshnessColorEl: undefined,
56
+ freshnessColors: [],
57
+ freshnessColorsComplete: false,
58
+ freshnessColorDisplay: undefined,
59
+ },
60
+ errorState: undefined,
61
+ livenessStreamProvider: undefined,
62
+ responseStreamActorRef: undefined,
63
+ shouldDisconnect: false,
64
+ faceMatchStateBeforeStart: undefined,
65
+ isFaceFarEnoughBeforeRecording: undefined,
66
+ isRecordingStopped: false,
67
+ },
68
+ on: {
69
+ CANCEL: 'userCancel',
70
+ TIMEOUT: {
71
+ target: 'retryableTimeout',
72
+ actions: 'updateErrorStateForTimeout',
73
+ },
74
+ SET_SESSION_INFO: {
75
+ internal: true,
76
+ actions: 'updateSessionInfo',
77
+ },
78
+ DISCONNECT_EVENT: {
79
+ internal: true,
80
+ actions: 'updateShouldDisconnect',
81
+ },
82
+ SET_DOM_AND_CAMERA_DETAILS: {
83
+ actions: 'setDOMAndCameraDetails',
84
+ },
85
+ UPDATE_DEVICE_AND_STREAM: {
86
+ actions: 'updateDeviceAndStream',
87
+ },
88
+ SERVER_ERROR: {
89
+ target: 'error',
90
+ actions: 'updateErrorStateForServer',
91
+ },
92
+ RUNTIME_ERROR: {
93
+ target: 'error',
94
+ },
95
+ MOBILE_LANDSCAPE_WARNING: {
96
+ target: 'mobileLandscapeWarning',
97
+ actions: 'updateErrorStateForServer',
98
+ },
99
+ },
100
+ states: {
101
+ cameraCheck: {
102
+ entry: ['resetErrorState'],
103
+ invoke: {
104
+ src: 'checkVirtualCameraAndGetStream',
105
+ onDone: {
106
+ target: 'waitForDOMAndCameraDetails',
107
+ actions: ['updateVideoMediaStream'],
108
+ },
109
+ onError: {
110
+ target: 'permissionDenied',
111
+ },
112
+ },
113
+ },
114
+ waitForDOMAndCameraDetails: {
115
+ after: {
116
+ 0: {
117
+ target: 'start',
118
+ cond: 'hasDOMAndCameraDetails',
119
+ },
120
+ 10: { target: 'waitForDOMAndCameraDetails' },
121
+ },
122
+ },
123
+ start: {
124
+ entry: ['drawStaticOval', 'initializeFaceDetector'],
125
+ always: [
126
+ {
127
+ target: 'detectFaceBeforeStart',
128
+ cond: 'shouldSkipStartScreen',
129
+ },
130
+ ],
131
+ on: {
132
+ BEGIN: 'detectFaceBeforeStart',
133
+ },
134
+ },
135
+ detectFaceBeforeStart: {
136
+ invoke: {
137
+ src: 'detectFace',
138
+ onDone: {
139
+ target: 'checkFaceDetectedBeforeStart',
140
+ actions: ['updateFaceMatchBeforeStartDetails'],
141
+ },
142
+ },
143
+ },
144
+ checkFaceDetectedBeforeStart: {
145
+ after: {
146
+ 0: {
147
+ target: 'detectFaceDistanceBeforeRecording',
148
+ cond: 'hasSingleFaceBeforeStart',
149
+ },
150
+ 100: { target: 'detectFaceBeforeStart' },
151
+ },
152
+ },
153
+ detectFaceDistanceBeforeRecording: {
154
+ invoke: {
155
+ src: 'detectFaceDistance',
156
+ onDone: {
157
+ target: 'checkFaceDistanceBeforeRecording',
158
+ actions: ['updateFaceDistanceBeforeRecording'],
159
+ },
160
+ },
161
+ },
162
+ checkFaceDistanceBeforeRecording: {
163
+ after: {
164
+ 0: {
165
+ target: 'initializeLivenessStream',
166
+ cond: 'hasEnoughFaceDistanceBeforeRecording',
167
+ },
168
+ 100: { target: 'detectFaceDistanceBeforeRecording' },
169
+ },
170
+ },
171
+ initializeLivenessStream: {
172
+ invoke: {
173
+ src: 'openLivenessStreamConnection',
174
+ onDone: {
175
+ target: 'notRecording',
176
+ actions: [
177
+ 'updateLivenessStreamProvider',
178
+ 'spawnResponseStreamActor',
179
+ ],
180
+ },
181
+ },
182
+ },
183
+ notRecording: {
184
+ initial: 'waitForSessionInfo',
185
+ states: {
186
+ waitForSessionInfo: {
187
+ after: {
188
+ 0: {
189
+ target: '#livenessMachine.recording',
190
+ cond: 'hasServerSessionInfo',
191
+ },
192
+ 100: { target: 'waitForSessionInfo' },
193
+ },
194
+ },
195
+ },
196
+ },
197
+ recording: {
198
+ entry: ['clearErrorState', 'startRecording'],
199
+ initial: 'ovalDrawing',
200
+ states: {
201
+ ovalDrawing: {
202
+ entry: ['sendTimeoutAfterOvalDrawingDelay'],
203
+ invoke: {
204
+ src: 'detectInitialFaceAndDrawOval',
205
+ onDone: {
206
+ target: 'checkFaceDetected',
207
+ actions: [
208
+ 'updateOvalAndFaceDetailsPostDraw',
209
+ 'sendTimeoutAfterOvalMatchDelay',
210
+ ],
211
+ },
212
+ onError: {
213
+ target: '#livenessMachine.error',
214
+ actions: 'updateErrorStateForRuntime',
215
+ },
216
+ },
217
+ },
218
+ checkFaceDetected: {
219
+ after: {
220
+ 0: {
221
+ target: 'checkRecordingStarted',
222
+ cond: 'hasSingleFace',
223
+ },
224
+ 100: { target: 'ovalDrawing' },
225
+ },
226
+ },
227
+ checkRecordingStarted: {
228
+ after: {
229
+ 0: {
230
+ target: 'ovalMatching',
231
+ cond: 'hasRecordingStarted',
232
+ actions: ['updateRecordingStartTimestampMs'],
233
+ },
234
+ 100: { target: 'checkRecordingStarted' },
235
+ },
236
+ },
237
+ ovalMatching: {
238
+ entry: ['cancelOvalDrawingTimeout'],
239
+ invoke: {
240
+ src: 'detectFaceAndMatchOval',
241
+ onDone: {
242
+ target: 'checkMatch',
243
+ actions: 'updateFaceDetailsPostMatch',
244
+ },
245
+ },
246
+ },
247
+ checkMatch: {
248
+ after: {
249
+ 0: {
250
+ target: 'flashFreshnessColors',
251
+ cond: 'hasFaceMatchedInOvalWithMinTime',
252
+ actions: [
253
+ 'updateEndFaceMatch',
254
+ 'setupFlashFreshnessColors',
255
+ 'cancelOvalMatchTimeout',
256
+ 'cancelOvalDrawingTimeout',
257
+ ],
258
+ },
259
+ 0.1: {
260
+ target: 'ovalMatching',
261
+ cond: 'hasFaceMatchedInOval',
262
+ actions: 'setFaceMatchTimeAndStartFace',
263
+ },
264
+ 1: {
265
+ target: 'ovalMatching',
266
+ cond: 'hasNotFaceMatchedInOval',
267
+ },
268
+ },
269
+ },
270
+ flashFreshnessColors: {
271
+ invoke: {
272
+ src: 'flashColors',
273
+ onDone: [
274
+ {
275
+ target: 'success',
276
+ cond: 'hasFreshnessColorShown',
277
+ },
278
+ {
279
+ target: 'flashFreshnessColors',
280
+ actions: 'updateFreshnessDetails',
281
+ },
282
+ ],
283
+ },
284
+ },
285
+ success: {
286
+ entry: ['stopRecording'],
287
+ type: 'final',
288
+ },
289
+ },
290
+ onDone: 'uploading',
291
+ },
292
+ uploading: {
293
+ initial: 'pending',
294
+ states: {
295
+ pending: {
296
+ entry: ['sendTimeoutAfterWaitingForDisconnect', 'pauseVideoStream'],
297
+ invoke: {
298
+ src: 'stopVideo',
299
+ onDone: 'waitForDisconnectEvent',
300
+ onError: {
301
+ target: '#livenessMachine.error',
302
+ actions: 'updateErrorStateForRuntime',
303
+ },
304
+ },
305
+ },
306
+ waitForDisconnectEvent: {
307
+ after: {
308
+ 0: {
309
+ target: 'getLivenessResult',
310
+ cond: 'getShouldDisconnect',
311
+ },
312
+ 100: { target: 'waitForDisconnectEvent' },
313
+ },
314
+ },
315
+ getLivenessResult: {
316
+ entry: ['cancelWaitForDisconnectTimeout', 'freezeStream'],
317
+ invoke: {
318
+ src: 'getLiveness',
319
+ onError: {
320
+ target: '#livenessMachine.error',
321
+ actions: 'updateErrorStateForServer',
322
+ },
323
+ },
324
+ },
325
+ },
326
+ },
327
+ retryableTimeout: {
328
+ entry: 'updateFailedAttempts',
329
+ always: [
330
+ {
331
+ target: 'timeout',
332
+ cond: 'shouldTimeoutOnFailedAttempts',
333
+ },
334
+ { target: 'notRecording' },
335
+ ],
336
+ },
337
+ permissionDenied: {
338
+ entry: 'callUserPermissionDeniedCallback',
339
+ on: {
340
+ RETRY_CAMERA_CHECK: 'cameraCheck',
341
+ },
342
+ },
343
+ mobileLandscapeWarning: {
344
+ entry: 'callMobileLandscapeWarningCallback',
345
+ always: [{ target: 'error' }],
346
+ },
347
+ timeout: {
348
+ entry: ['cleanUpResources', 'callUserTimeoutCallback', 'freezeStream'],
349
+ },
350
+ error: {
351
+ entry: [
352
+ 'cleanUpResources',
353
+ 'callErrorCallback',
354
+ 'cancelOvalDrawingTimeout',
355
+ 'cancelWaitForDisconnectTimeout',
356
+ 'cancelOvalMatchTimeout',
357
+ 'freezeStream',
358
+ ],
359
+ },
360
+ userCancel: {
361
+ entry: ['cleanUpResources', 'callUserCancelCallback', 'resetContext'],
362
+ always: [{ target: 'cameraCheck' }],
363
+ },
364
+ },
365
+ }, {
366
+ actions: {
367
+ spawnResponseStreamActor: assign({
368
+ responseStreamActorRef: () => spawn(responseStreamActor),
369
+ }),
370
+ updateFailedAttempts: assign({
371
+ failedAttempts: (context) => {
372
+ return context.failedAttempts + 1;
373
+ },
374
+ }),
375
+ updateVideoMediaStream: assign({
376
+ videoAssociatedParams: (context, event) => ({
377
+ ...context.videoAssociatedParams,
378
+ videoMediaStream: event.data?.stream,
379
+ selectedDeviceId: event.data?.selectedDeviceId,
380
+ selectableDevices: event.data?.selectableDevices,
381
+ }),
382
+ }),
383
+ initializeFaceDetector: assign({
384
+ ovalAssociatedParams: (context) => {
385
+ const { componentProps } = context;
386
+ const { faceModelUrl, binaryPath } = componentProps.config;
387
+ const faceDetector = new BlazeFaceFaceDetection(binaryPath, faceModelUrl);
388
+ faceDetector.triggerModelLoading();
389
+ return {
390
+ ...context.ovalAssociatedParams,
391
+ faceDetector,
392
+ };
393
+ },
394
+ }),
395
+ updateLivenessStreamProvider: assign({
396
+ livenessStreamProvider: (context, event) => {
397
+ return event.data?.livenessStreamProvider;
398
+ },
399
+ }),
400
+ setDOMAndCameraDetails: assign({
401
+ videoAssociatedParams: (context, event) => {
402
+ return {
403
+ ...context.videoAssociatedParams,
404
+ videoEl: event.data?.videoEl,
405
+ canvasEl: event.data?.canvasEl,
406
+ isMobile: event.data?.isMobile,
407
+ };
408
+ },
409
+ freshnessColorAssociatedParams: (context, event) => ({
410
+ ...context.freshnessColorAssociatedParams,
411
+ freshnessColorEl: event.data?.freshnessColorEl,
412
+ }),
413
+ }),
414
+ updateDeviceAndStream: assign({
415
+ videoAssociatedParams: (context, event) => {
416
+ setLastSelectedCameraId(event.data?.newDeviceId);
417
+ return {
418
+ ...context.videoAssociatedParams,
419
+ selectedDeviceId: event.data?.newDeviceId,
420
+ videoMediaStream: event.data?.newStream,
421
+ };
422
+ },
423
+ }),
424
+ drawStaticOval: (context) => {
425
+ const { canvasEl, videoEl, videoMediaStream } = context.videoAssociatedParams;
426
+ drawStaticOval(canvasEl, videoEl, videoMediaStream);
427
+ },
428
+ updateRecordingStartTimestampMs: assign({
429
+ videoAssociatedParams: (context) => {
430
+ const { challengeId, videoAssociatedParams, ovalAssociatedParams, livenessStreamProvider, } = context;
431
+ const { recordingStartApiTimestamp, recorderStartTimestamp } = livenessStreamProvider.videoRecorder;
432
+ const { videoMediaStream } = videoAssociatedParams;
433
+ const { initialFace } = ovalAssociatedParams;
434
+ /**
435
+ * This calculation is provided by Science team after doing analysis
436
+ * of unreliable .onstart() (recorderStartTimestamp) timestamp that is
437
+ * returned from mediaRecorder.
438
+ */
439
+ const timestamp = Math.round(0.73 * (recorderStartTimestamp - recordingStartApiTimestamp) +
440
+ recordingStartApiTimestamp);
441
+ // Send client info for initial face position
442
+ const { width, height } = videoMediaStream
443
+ .getTracks()[0]
444
+ .getSettings();
445
+ const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
446
+ context.livenessStreamProvider.sendClientInfo({
447
+ Challenge: {
448
+ FaceMovementAndLightChallenge: {
449
+ ChallengeId: challengeId,
450
+ VideoStartTimestamp: timestamp,
451
+ InitialFace: {
452
+ InitialFaceDetectedTimestamp: initialFace.timestampMs,
453
+ BoundingBox: getBoundingBox({
454
+ deviceHeight: height,
455
+ deviceWidth: width,
456
+ height: initialFace.height,
457
+ width: initialFace.width,
458
+ top: initialFace.top,
459
+ left: flippedInitialFaceLeft,
460
+ }),
461
+ },
462
+ },
463
+ },
464
+ });
465
+ return {
466
+ ...context.videoAssociatedParams,
467
+ recordingStartTimestampMs: timestamp,
468
+ };
469
+ },
470
+ }),
471
+ startRecording: assign({
472
+ videoAssociatedParams: (context) => {
473
+ if (!context.serverSessionInformation) {
474
+ throw new Error('Session information was not received from response stream');
475
+ }
476
+ if (context.livenessStreamProvider.videoRecorder &&
477
+ context.livenessStreamProvider.videoRecorder.getState() !==
478
+ 'recording') {
479
+ context.livenessStreamProvider.startRecordingLivenessVideo();
480
+ }
481
+ return {
482
+ ...context.videoAssociatedParams,
483
+ };
484
+ },
485
+ }),
486
+ stopRecording: (context) => { },
487
+ updateFaceMatchBeforeStartDetails: assign({
488
+ faceMatchStateBeforeStart: (_, event) => {
489
+ return event.data.faceMatchState;
490
+ },
491
+ }),
492
+ updateFaceDistanceBeforeRecording: assign({
493
+ isFaceFarEnoughBeforeRecording: (_, event) => {
494
+ return event.data.isFaceFarEnoughBeforeRecording;
495
+ },
496
+ }),
497
+ updateFaceDistanceWhileLoading: assign({
498
+ isFaceFarEnoughBeforeRecording: (_, event) => {
499
+ return event.data.isFaceFarEnoughBeforeRecording;
500
+ },
501
+ errorState: (_, event) => {
502
+ return event.data?.error;
503
+ },
504
+ }),
505
+ updateOvalAndFaceDetailsPostDraw: assign({
506
+ ovalAssociatedParams: (context, event) => ({
507
+ ...context.ovalAssociatedParams,
508
+ initialFace: event.data.initialFace,
509
+ ovalDetails: event.data.ovalDetails,
510
+ scaleFactor: event.data.scaleFactor,
511
+ }),
512
+ faceMatchAssociatedParams: (context, event) => ({
513
+ ...context.faceMatchAssociatedParams,
514
+ faceMatchState: event.data.faceMatchState,
515
+ illuminationState: event.data.illuminationState,
516
+ }),
517
+ }),
518
+ updateFaceDetailsPostMatch: assign({
519
+ faceMatchAssociatedParams: (context, event) => ({
520
+ ...context.faceMatchAssociatedParams,
521
+ faceMatchState: event.data.faceMatchState,
522
+ faceMatchPercentage: event.data.faceMatchPercentage,
523
+ illuminationState: event.data.illuminationState,
524
+ currentDetectedFace: event.data.detectedFace,
525
+ }),
526
+ }),
527
+ updateEndFaceMatch: assign({
528
+ faceMatchAssociatedParams: (context) => ({
529
+ ...context.faceMatchAssociatedParams,
530
+ endFace: context.faceMatchAssociatedParams.currentDetectedFace,
531
+ }),
532
+ }),
533
+ setFaceMatchTimeAndStartFace: assign({
534
+ faceMatchAssociatedParams: (context) => {
535
+ return {
536
+ ...context.faceMatchAssociatedParams,
537
+ startFace: context.faceMatchAssociatedParams.startFace === undefined
538
+ ? context.faceMatchAssociatedParams.currentDetectedFace
539
+ : context.faceMatchAssociatedParams.startFace,
540
+ initialFaceMatchTime: context.faceMatchAssociatedParams.initialFaceMatchTime ===
541
+ undefined
542
+ ? Date.now()
543
+ : context.faceMatchAssociatedParams.initialFaceMatchTime,
544
+ };
545
+ },
546
+ }),
547
+ resetErrorState: assign({
548
+ errorState: (_) => undefined,
549
+ }),
550
+ updateErrorStateForTimeout: assign({
551
+ errorState: (_, event) => {
552
+ return event.data?.errorState || LivenessErrorState.TIMEOUT;
553
+ },
554
+ }),
555
+ updateErrorStateForRuntime: assign({
556
+ errorState: (_, event) => {
557
+ return event.data?.errorState || LivenessErrorState.RUNTIME_ERROR;
558
+ },
559
+ }),
560
+ updateErrorStateForServer: assign({
561
+ errorState: (_) => LivenessErrorState.SERVER_ERROR,
562
+ }),
563
+ clearErrorState: assign({
564
+ errorState: (_) => undefined,
565
+ }),
566
+ updateSessionInfo: assign({
567
+ serverSessionInformation: (context, event) => {
568
+ return event.data.sessionInfo;
569
+ },
570
+ }),
571
+ updateShouldDisconnect: assign({
572
+ shouldDisconnect: (context) => {
573
+ return true;
574
+ },
575
+ }),
576
+ updateFreshnessDetails: assign({
577
+ freshnessColorAssociatedParams: (context, event) => {
578
+ return {
579
+ ...context.freshnessColorAssociatedParams,
580
+ freshnessColorsComplete: event.data.freshnessColorsComplete,
581
+ };
582
+ },
583
+ }),
584
+ setupFlashFreshnessColors: assign({
585
+ freshnessColorAssociatedParams: (context) => {
586
+ const { serverSessionInformation } = context;
587
+ const freshnessColors = getColorsSequencesFromSessionInformation(serverSessionInformation);
588
+ const freshnessColorDisplay = new FreshnessColorDisplay(context, freshnessColors);
589
+ return {
590
+ ...context.freshnessColorAssociatedParams,
591
+ freshnessColorDisplay,
592
+ };
593
+ },
594
+ }),
595
+ // timeouts
596
+ sendTimeoutAfterOvalDrawingDelay: actions.send({ type: 'TIMEOUT' }, {
597
+ delay: 5000,
598
+ id: 'ovalDrawingTimeout',
599
+ }),
600
+ cancelOvalDrawingTimeout: actions.cancel('ovalDrawingTimeout'),
601
+ sendTimeoutAfterOvalMatchDelay: actions.send({ type: 'TIMEOUT' }, {
602
+ delay: (context) => {
603
+ return (context.serverSessionInformation?.Challenge
604
+ ?.FaceMovementAndLightChallenge?.ChallengeConfig
605
+ ?.OvalFitTimeout || DEFAULT_FACE_FIT_TIMEOUT);
606
+ },
607
+ id: 'ovalMatchTimeout',
608
+ }),
609
+ cancelOvalMatchTimeout: actions.cancel('ovalMatchTimeout'),
610
+ sendTimeoutAfterWaitingForDisconnect: actions.send({
611
+ type: 'TIMEOUT',
612
+ data: { errorState: LivenessErrorState.SERVER_ERROR },
613
+ }, {
614
+ delay: 20000,
615
+ id: 'waitForDisconnectTimeout',
616
+ }),
617
+ cancelWaitForDisconnectTimeout: actions.cancel('waitForDisconnectTimeout'),
618
+ sendTimeoutAfterFaceDistanceDelay: actions.send({
619
+ type: 'RUNTIME_ERROR',
620
+ data: new Error('Avoid moving closer during countdown and ensure only one face is in front of camera.'),
621
+ }, {
622
+ delay: 0,
623
+ id: 'faceDistanceTimeout',
624
+ }),
625
+ cancelFaceDistanceTimeout: actions.cancel('faceDistanceTimeout'),
626
+ // callbacks
627
+ callUserPermissionDeniedCallback: assign({
628
+ errorState: (context, event) => {
629
+ let errorState;
630
+ if (event.data.message.includes('15 fps')) {
631
+ errorState = LivenessErrorState.CAMERA_FRAMERATE_ERROR;
632
+ }
633
+ else {
634
+ errorState = LivenessErrorState.CAMERA_ACCESS_ERROR;
635
+ }
636
+ const errorMessage = event.data.message || event.data.Message;
637
+ const error = new Error(errorMessage);
638
+ const livenessError = {
639
+ state: errorState,
640
+ error: error,
641
+ };
642
+ context.componentProps.onError?.(livenessError);
643
+ return errorState;
644
+ },
645
+ }),
646
+ callMobileLandscapeWarningCallback: assign({
647
+ errorState: (context) => {
648
+ return LivenessErrorState.MOBILE_LANDSCAPE_ERROR;
649
+ },
650
+ }),
651
+ callUserCancelCallback: async (context) => {
652
+ context.componentProps.onUserCancel?.();
653
+ },
654
+ callUserTimeoutCallback: async (context) => {
655
+ const error = new Error('Client Timeout');
656
+ error.name = context.errorState;
657
+ const livenessError = {
658
+ state: context.errorState,
659
+ error: error,
660
+ };
661
+ context.componentProps.onError?.(livenessError);
662
+ },
663
+ callErrorCallback: async (context, event) => {
664
+ const livenessError = {
665
+ state: context.errorState,
666
+ error: event.data?.error || event.data,
667
+ };
668
+ context.componentProps.onError?.(livenessError);
669
+ },
670
+ cleanUpResources: async (context) => {
671
+ const { freshnessColorEl } = context.freshnessColorAssociatedParams;
672
+ if (freshnessColorEl) {
673
+ freshnessColorEl.style.display = 'none';
674
+ }
675
+ let closureCode = WS_CLOSURE_CODE.DEFAULT_ERROR_CODE;
676
+ if (context.errorState === LivenessErrorState.TIMEOUT) {
677
+ closureCode = WS_CLOSURE_CODE.FACE_FIT_TIMEOUT;
678
+ }
679
+ else if (context.errorState === LivenessErrorState.RUNTIME_ERROR) {
680
+ closureCode = WS_CLOSURE_CODE.RUNTIME_ERROR;
681
+ }
682
+ else if (context.errorState === LivenessErrorState.FACE_DISTANCE_ERROR ||
683
+ context.errorState === LivenessErrorState.MULTIPLE_FACES_ERROR) {
684
+ closureCode = WS_CLOSURE_CODE.USER_ERROR_DURING_CONNECTION;
685
+ }
686
+ else if (context.errorState === undefined) {
687
+ closureCode = WS_CLOSURE_CODE.USER_CANCEL;
688
+ }
689
+ await context.livenessStreamProvider?.endStreamWithCode(closureCode);
690
+ },
691
+ freezeStream: async (context) => {
692
+ const { videoMediaStream, videoEl } = context.videoAssociatedParams;
693
+ context.isRecordingStopped = true;
694
+ videoEl?.pause();
695
+ videoMediaStream?.getTracks().forEach(function (track) {
696
+ track.stop();
697
+ });
698
+ },
699
+ pauseVideoStream: async (context) => {
700
+ const { videoEl } = context.videoAssociatedParams;
701
+ context.isRecordingStopped = true;
702
+ videoEl.pause();
703
+ },
704
+ resetContext: assign({
705
+ challengeId: nanoid(),
706
+ maxFailedAttempts: 0,
707
+ failedAttempts: 0,
708
+ componentProps: (context) => context.componentProps,
709
+ serverSessionInformation: (_) => undefined,
710
+ videoAssociatedParams: (_) => {
711
+ return {
712
+ videoConstraints: STATIC_VIDEO_CONSTRAINTS,
713
+ };
714
+ },
715
+ ovalAssociatedParams: (_) => undefined,
716
+ errorState: (_) => undefined,
717
+ livenessStreamProvider: (_) => undefined,
718
+ responseStreamActorRef: (_) => undefined,
719
+ shouldDisconnect: false,
720
+ faceMatchStateBeforeStart: (_) => undefined,
721
+ isFaceFarEnoughBeforeRecording: (_) => undefined,
722
+ isRecordingStopped: false,
723
+ }),
724
+ },
725
+ guards: {
726
+ shouldTimeoutOnFailedAttempts: (context) => context.failedAttempts >= context.maxFailedAttempts,
727
+ hasFaceMatchedInOvalWithMinTime: (context) => {
728
+ const { faceMatchState, initialFaceMatchTime } = context.faceMatchAssociatedParams;
729
+ const timeSinceInitialFaceMatch = Date.now() - initialFaceMatchTime;
730
+ const hasMatched = faceMatchState === FaceMatchState.MATCHED &&
731
+ timeSinceInitialFaceMatch >= MIN_FACE_MATCH_TIME;
732
+ return hasMatched;
733
+ },
734
+ hasFaceMatchedInOval: (context) => {
735
+ return (context.faceMatchAssociatedParams.faceMatchState ===
736
+ FaceMatchState.MATCHED);
737
+ },
738
+ hasNotFaceMatchedInOval: (context) => {
739
+ return (context.faceMatchAssociatedParams.faceMatchState !==
740
+ FaceMatchState.MATCHED);
741
+ },
742
+ hasSingleFace: (context) => {
743
+ return (context.faceMatchAssociatedParams.faceMatchState ===
744
+ FaceMatchState.FACE_IDENTIFIED);
745
+ },
746
+ hasSingleFaceBeforeStart: (context) => {
747
+ return (context.faceMatchStateBeforeStart === FaceMatchState.FACE_IDENTIFIED);
748
+ },
749
+ hasEnoughFaceDistanceBeforeRecording: (context) => {
750
+ return context.isFaceFarEnoughBeforeRecording;
751
+ },
752
+ hasNotEnoughFaceDistanceBeforeRecording: (context) => {
753
+ return !context.isFaceFarEnoughBeforeRecording;
754
+ },
755
+ hasLivenessCheckSucceeded: (_, __, meta) => meta.state.event.data.isLive,
756
+ hasFreshnessColorShown: (context) => context.freshnessColorAssociatedParams.freshnessColorsComplete,
757
+ hasServerSessionInfo: (context) => {
758
+ return context.serverSessionInformation !== undefined;
759
+ },
760
+ hasDOMAndCameraDetails: (context) => {
761
+ return (context.videoAssociatedParams.videoEl !== undefined &&
762
+ context.videoAssociatedParams.canvasEl !== undefined &&
763
+ context.freshnessColorAssociatedParams.freshnessColorEl !== undefined);
764
+ },
765
+ getShouldDisconnect: (context) => {
766
+ return !!context.shouldDisconnect;
767
+ },
768
+ hasRecordingStarted: (context) => {
769
+ return (context.livenessStreamProvider.videoRecorder.firstChunkTimestamp !==
770
+ undefined);
771
+ },
772
+ shouldSkipStartScreen: (context) => {
773
+ return !!context.componentProps?.disableStartScreen;
774
+ },
775
+ },
776
+ services: {
777
+ async checkVirtualCameraAndGetStream(context) {
778
+ const { videoConstraints } = context.videoAssociatedParams;
779
+ // Get initial stream to enumerate devices with non-empty labels
780
+ const existingDeviceId = getLastSelectedCameraId();
781
+ const initialStream = await navigator.mediaDevices.getUserMedia({
782
+ video: {
783
+ ...videoConstraints,
784
+ ...(existingDeviceId ? { deviceId: existingDeviceId } : {}),
785
+ },
786
+ audio: false,
787
+ });
788
+ const devices = await navigator.mediaDevices.enumerateDevices();
789
+ const realVideoDevices = devices
790
+ .filter((device) => device.kind === 'videoinput')
791
+ .filter((device) => !isCameraDeviceVirtual(device));
792
+ if (!realVideoDevices.length) {
793
+ throw new Error('No real video devices found');
794
+ }
795
+ // Ensure that at least one of the cameras is capable of at least 15 fps
796
+ const tracksWithMoreThan15Fps = initialStream
797
+ .getTracks()
798
+ .filter((track) => {
799
+ const settings = track.getSettings();
800
+ return settings.frameRate >= 15;
801
+ });
802
+ if (tracksWithMoreThan15Fps.length < 1) {
803
+ throw new Error('No camera found with more than 15 fps');
804
+ }
805
+ // If the initial stream is of real camera, use it otherwise use the first real camera
806
+ const initialStreamDeviceId = tracksWithMoreThan15Fps[0].getSettings().deviceId;
807
+ const isInitialStreamFromRealDevice = realVideoDevices.some((device) => device.deviceId === initialStreamDeviceId);
808
+ let deviceId = initialStreamDeviceId;
809
+ let realVideoDeviceStream = initialStream;
810
+ if (!isInitialStreamFromRealDevice) {
811
+ deviceId = realVideoDevices[0].deviceId;
812
+ realVideoDeviceStream = await navigator.mediaDevices.getUserMedia({
813
+ video: {
814
+ ...videoConstraints,
815
+ deviceId: { exact: realVideoDevices[0].deviceId },
816
+ },
817
+ audio: false,
818
+ });
819
+ }
820
+ setLastSelectedCameraId(deviceId);
821
+ return {
822
+ stream: realVideoDeviceStream,
823
+ selectedDeviceId: initialStreamDeviceId,
824
+ selectableDevices: realVideoDevices,
825
+ };
826
+ },
827
+ async openLivenessStreamConnection(context) {
828
+ const { config } = context.componentProps;
829
+ const { credentialProvider, endpointOverride } = config;
830
+ const livenessStreamProvider = new LivenessStreamProvider({
831
+ sessionId: context.componentProps.sessionId,
832
+ region: context.componentProps.region,
833
+ stream: context.videoAssociatedParams.videoMediaStream,
834
+ videoEl: context.videoAssociatedParams.videoEl,
835
+ credentialProvider: credentialProvider,
836
+ endpointOverride: endpointOverride,
837
+ });
838
+ responseStream = livenessStreamProvider.getResponseStream();
839
+ return { livenessStreamProvider };
840
+ },
841
+ async detectFace(context) {
842
+ const { videoEl } = context.videoAssociatedParams;
843
+ const { faceDetector } = context.ovalAssociatedParams;
844
+ // initialize models
845
+ try {
846
+ await faceDetector.modelLoadingPromise;
847
+ }
848
+ catch (err) {
849
+ console.log({ err });
850
+ }
851
+ // detect face
852
+ const faceMatchState = await getFaceMatchState(faceDetector, videoEl);
853
+ return { faceMatchState };
854
+ },
855
+ async detectFaceDistance(context) {
856
+ const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
857
+ const { videoEl, videoMediaStream, isMobile } = context.videoAssociatedParams;
858
+ const { faceDetector } = context.ovalAssociatedParams;
859
+ const { width, height } = videoMediaStream
860
+ .getTracks()[0]
861
+ .getSettings();
862
+ const ovalDetails = getStaticLivenessOvalDetails({
863
+ width: width,
864
+ height: height,
865
+ });
866
+ const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording } = await isFaceDistanceBelowThreshold({
867
+ faceDetector: faceDetector,
868
+ videoEl: videoEl,
869
+ ovalDetails,
870
+ reduceThreshold: faceDistanceCheckBeforeRecording,
871
+ isMobile,
872
+ });
873
+ return { isFaceFarEnoughBeforeRecording };
874
+ },
875
+ async detectFaceDistanceWhileLoading(context) {
876
+ const { isFaceFarEnoughBeforeRecording: faceDistanceCheckBeforeRecording, } = context;
877
+ const { videoEl, videoMediaStream, isMobile } = context.videoAssociatedParams;
878
+ const { faceDetector } = context.ovalAssociatedParams;
879
+ const { width, height } = videoMediaStream
880
+ .getTracks()[0]
881
+ .getSettings();
882
+ const ovalDetails = getStaticLivenessOvalDetails({
883
+ width: width,
884
+ height: height,
885
+ });
886
+ const { isDistanceBelowThreshold: isFaceFarEnoughBeforeRecording, error, } = await isFaceDistanceBelowThreshold({
887
+ faceDetector: faceDetector,
888
+ videoEl: videoEl,
889
+ ovalDetails,
890
+ reduceThreshold: faceDistanceCheckBeforeRecording,
891
+ isMobile,
892
+ });
893
+ return { isFaceFarEnoughBeforeRecording, error };
894
+ },
895
+ async detectInitialFaceAndDrawOval(context) {
896
+ const { serverSessionInformation, livenessStreamProvider } = context;
897
+ const { videoEl, canvasEl, isMobile } = context.videoAssociatedParams;
898
+ const { faceDetector } = context.ovalAssociatedParams;
899
+ // initialize models
900
+ try {
901
+ await faceDetector.modelLoadingPromise;
902
+ await livenessStreamProvider.videoRecorder.recorderStarted;
903
+ }
904
+ catch (err) {
905
+ console.log({ err });
906
+ }
907
+ // detect face
908
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
909
+ let initialFace;
910
+ let faceMatchState;
911
+ let illuminationState;
912
+ switch (detectedFaces.length) {
913
+ case 0: {
914
+ // no face detected;
915
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
916
+ illuminationState = estimateIllumination(videoEl);
917
+ break;
918
+ }
919
+ case 1: {
920
+ faceMatchState = FaceMatchState.FACE_IDENTIFIED;
921
+ initialFace = detectedFaces[0];
922
+ break;
923
+ }
924
+ default: {
925
+ //more than one face detected ;
926
+ faceMatchState = FaceMatchState.TOO_MANY;
927
+ break;
928
+ }
929
+ }
930
+ if (!initialFace) {
931
+ return { faceMatchState, illuminationState };
932
+ }
933
+ // Get width/height of video element so we can compute scaleFactor
934
+ // and set canvas width/height.
935
+ const { width: videoScaledWidth, height: videoScaledHeight } = videoEl.getBoundingClientRect();
936
+ if (isMobile) {
937
+ canvasEl.width = window.innerWidth;
938
+ canvasEl.height = window.innerHeight;
939
+ }
940
+ else {
941
+ canvasEl.width = videoScaledWidth;
942
+ canvasEl.height = videoScaledHeight;
943
+ }
944
+ // Compute scaleFactor which is how much our video element is scaled
945
+ // vs the intrinsic video resolution
946
+ const scaleFactor = videoScaledWidth / videoEl.videoWidth;
947
+ // generate oval details from initialFace and video dimensions
948
+ const ovalDetails = getOvalDetailsFromSessionInformation({
949
+ sessionInformation: serverSessionInformation,
950
+ videoWidth: videoEl.width,
951
+ });
952
+ // renormalize initial face
953
+ const renormalizedFace = generateBboxFromLandmarks(initialFace, ovalDetails);
954
+ initialFace.top = renormalizedFace.top;
955
+ initialFace.left = renormalizedFace.left;
956
+ initialFace.height = renormalizedFace.bottom - renormalizedFace.top;
957
+ initialFace.width = renormalizedFace.right - renormalizedFace.left;
958
+ // Draw oval in canvas using ovalDetails and scaleFactor
959
+ drawLivenessOvalInCanvas({
960
+ canvas: canvasEl,
961
+ oval: ovalDetails,
962
+ scaleFactor,
963
+ videoEl: videoEl,
964
+ });
965
+ return {
966
+ faceMatchState,
967
+ ovalDetails,
968
+ scaleFactor,
969
+ initialFace,
970
+ };
971
+ },
972
+ async detectFaceAndMatchOval(context) {
973
+ const { serverSessionInformation } = context;
974
+ const { videoEl } = context.videoAssociatedParams;
975
+ const { faceDetector, ovalDetails, initialFace } = context.ovalAssociatedParams;
976
+ // detect face
977
+ const detectedFaces = await faceDetector.detectFaces(videoEl);
978
+ let faceMatchState;
979
+ let faceMatchPercentage = 0;
980
+ let detectedFace;
981
+ let illuminationState;
982
+ const initialFaceBoundingBox = generateBboxFromLandmarks(initialFace, ovalDetails);
983
+ const { ovalBoundingBox } = getOvalBoundingBox(ovalDetails);
984
+ const initialFaceIntersection = getIntersectionOverUnion(initialFaceBoundingBox, ovalBoundingBox);
985
+ switch (detectedFaces.length) {
986
+ case 0: {
987
+ //no face detected;
988
+ faceMatchState = FaceMatchState.CANT_IDENTIFY;
989
+ illuminationState = estimateIllumination(videoEl);
990
+ break;
991
+ }
992
+ case 1: {
993
+ //exactly one face detected, match face with oval;
994
+ detectedFace = detectedFaces[0];
995
+ const { faceMatchState: faceMatchStateInLivenessOval, faceMatchPercentage: faceMatchPercentageInLivenessOval, } = getFaceMatchStateInLivenessOval(detectedFace, ovalDetails, initialFaceIntersection, serverSessionInformation);
996
+ faceMatchState = faceMatchStateInLivenessOval;
997
+ faceMatchPercentage = faceMatchPercentageInLivenessOval;
998
+ break;
999
+ }
1000
+ default: {
1001
+ //more than one face detected ;
1002
+ faceMatchState = FaceMatchState.TOO_MANY;
1003
+ break;
1004
+ }
1005
+ }
1006
+ return {
1007
+ faceMatchState,
1008
+ faceMatchPercentage,
1009
+ illuminationState,
1010
+ detectedFace,
1011
+ };
1012
+ },
1013
+ async flashColors(context) {
1014
+ const { freshnessColorsComplete, freshnessColorDisplay } = context.freshnessColorAssociatedParams;
1015
+ if (freshnessColorsComplete) {
1016
+ return;
1017
+ }
1018
+ const completed = await freshnessColorDisplay.displayColorTick();
1019
+ return { freshnessColorsComplete: completed };
1020
+ },
1021
+ async stopVideo(context) {
1022
+ const { challengeId, livenessStreamProvider } = context;
1023
+ const { videoMediaStream } = context.videoAssociatedParams;
1024
+ const { initialFace, ovalDetails } = context.ovalAssociatedParams;
1025
+ const { startFace, endFace } = context.faceMatchAssociatedParams;
1026
+ const { width, height } = videoMediaStream
1027
+ .getTracks()[0]
1028
+ .getSettings();
1029
+ const flippedInitialFaceLeft = width - initialFace.left - initialFace.width;
1030
+ await livenessStreamProvider.stopVideo();
1031
+ const livenessActionDocument = {
1032
+ Challenge: {
1033
+ FaceMovementAndLightChallenge: {
1034
+ ChallengeId: challengeId,
1035
+ InitialFace: {
1036
+ InitialFaceDetectedTimestamp: initialFace.timestampMs,
1037
+ BoundingBox: getBoundingBox({
1038
+ deviceHeight: height,
1039
+ deviceWidth: width,
1040
+ height: initialFace.height,
1041
+ width: initialFace.width,
1042
+ top: initialFace.top,
1043
+ left: flippedInitialFaceLeft,
1044
+ }),
1045
+ },
1046
+ TargetFace: {
1047
+ FaceDetectedInTargetPositionStartTimestamp: startFace.timestampMs,
1048
+ FaceDetectedInTargetPositionEndTimestamp: endFace.timestampMs,
1049
+ BoundingBox: getBoundingBox({
1050
+ deviceHeight: height,
1051
+ deviceWidth: width,
1052
+ height: ovalDetails.height,
1053
+ width: ovalDetails.width,
1054
+ top: ovalDetails.centerY - ovalDetails.height / 2,
1055
+ left: ovalDetails.centerX - ovalDetails.width / 2,
1056
+ }),
1057
+ },
1058
+ VideoEndTimestamp: livenessStreamProvider.videoRecorder.recorderEndTimestamp,
1059
+ },
1060
+ },
1061
+ };
1062
+ if (livenessStreamProvider.videoRecorder.getVideoChunkSize() === 0) {
1063
+ throw new Error('Video chunks not recorded successfully.');
1064
+ }
1065
+ livenessStreamProvider.sendClientInfo(livenessActionDocument);
1066
+ await livenessStreamProvider.dispatchStopVideoEvent();
1067
+ },
1068
+ async getLiveness(context) {
1069
+ const { onAnalysisComplete } = context.componentProps;
1070
+ // Get liveness result
1071
+ await onAnalysisComplete();
1072
+ },
1073
+ },
1074
+ });
1075
+ const responseStreamActor = async (callback) => {
1076
+ try {
1077
+ const stream = await responseStream;
1078
+ for await (const event of stream) {
1079
+ if (isServerSesssionInformationEvent(event)) {
1080
+ callback({
1081
+ type: 'SET_SESSION_INFO',
1082
+ data: {
1083
+ sessionInfo: event.ServerSessionInformationEvent.SessionInformation,
1084
+ },
1085
+ });
1086
+ }
1087
+ else if (isDisconnectionEvent(event)) {
1088
+ callback({ type: 'DISCONNECT_EVENT' });
1089
+ }
1090
+ else if (isValidationExceptionEvent(event)) {
1091
+ callback({
1092
+ type: 'SERVER_ERROR',
1093
+ data: { error: { ...event.ValidationException } },
1094
+ });
1095
+ }
1096
+ else if (isInternalServerExceptionEvent(event)) {
1097
+ callback({
1098
+ type: 'SERVER_ERROR',
1099
+ data: { error: { ...event.InternalServerException } },
1100
+ });
1101
+ }
1102
+ else if (isThrottlingExceptionEvent(event)) {
1103
+ callback({
1104
+ type: 'SERVER_ERROR',
1105
+ data: { error: { ...event.ThrottlingException } },
1106
+ });
1107
+ }
1108
+ else if (isServiceQuotaExceededExceptionEvent(event)) {
1109
+ callback({
1110
+ type: 'SERVER_ERROR',
1111
+ data: { error: { ...event.ServiceQuotaExceededException } },
1112
+ });
1113
+ }
1114
+ }
1115
+ }
1116
+ catch (error) {
1117
+ let returnedError = error;
1118
+ if (isInvalidSignatureRegionException(error)) {
1119
+ returnedError = new Error('Invalid region in FaceLivenessDetector or credentials are scoped to the wrong region.');
1120
+ }
1121
+ if (returnedError instanceof Error) {
1122
+ callback({
1123
+ type: 'SERVER_ERROR',
1124
+ data: { error: returnedError },
1125
+ });
1126
+ }
1127
+ }
1128
+ };
1129
+
1130
+ export { MIN_FACE_MATCH_TIME, livenessMachine };