@aws-amplify/ui-react-liveness 3.4.7 → 3.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +1 -1
  2. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +1 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +1 -1
  4. package/dist/esm/components/FaceLivenessDetector/service/machine/machine.mjs +89 -13
  5. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +1 -0
  6. package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +2 -2
  7. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +1 -1
  8. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +1 -1
  9. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +1 -1
  10. package/dist/esm/index.mjs +1 -1
  11. package/dist/esm/node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs +23 -0
  12. package/dist/esm/version.mjs +1 -1
  13. package/dist/index.js +114 -15
  14. package/dist/styles.css +17 -0
  15. package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +2 -1
  16. package/dist/types/components/FaceLivenessDetector/service/types/error.d.ts +1 -0
  17. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +15 -3
  18. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +1 -1
  19. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +1 -0
  20. package/dist/types/components/FaceLivenessDetector/shared/Hint.d.ts +1 -1
  21. package/dist/types/version.d.ts +1 -1
  22. package/package.json +13 -11
  23. package/patches/@tensorflow-models+face-detection+1.0.3.patch +9 -0
@@ -3,7 +3,7 @@ import { useInterpret } from '@xstate/react';
3
3
  import { livenessMachine } from './service/machine/machine.mjs';
4
4
  import './service/types/liveness.mjs';
5
5
  import '@tensorflow/tfjs-core';
6
- import '@tensorflow-models/face-detection';
6
+ import '../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
@@ -5,7 +5,7 @@ import { useColorMode } from '@aws-amplify/ui-react/internal';
5
5
  import '../service/machine/machine.mjs';
6
6
  import { FaceMatchState } from '../service/types/liveness.mjs';
7
7
  import '@tensorflow/tfjs-core';
8
- import '@tensorflow-models/face-detection';
8
+ import '../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
@@ -4,7 +4,7 @@ import '../service/machine/machine.mjs';
4
4
  import '../service/types/liveness.mjs';
5
5
  import { LivenessErrorState } from '../service/types/error.mjs';
6
6
  import '@tensorflow/tfjs-core';
7
- import '@tensorflow-models/face-detection';
7
+ import '../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
8
8
  import '@tensorflow/tfjs-backend-wasm';
9
9
  import '@tensorflow/tfjs-backend-cpu';
10
10
  import '@aws-amplify/core/internals/utils';
@@ -19,6 +19,17 @@ import { TelemetryReporter } from '../utils/TelemetryReporter/TelemetryReporter.
19
19
  const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
20
20
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
21
21
  let responseStream;
22
+ // Helper function to get selected device info
23
+ const getSelectedDeviceInfo = (context) => {
24
+ const selected = context.videoAssociatedParams?.selectableDevices?.find((d) => d.deviceId === context.videoAssociatedParams?.selectedDeviceId);
25
+ return selected
26
+ ? {
27
+ deviceId: selected.deviceId,
28
+ groupId: selected.groupId,
29
+ label: selected.label ?? '',
30
+ }
31
+ : undefined;
32
+ };
22
33
  const responseStreamActor = async (callback) => {
23
34
  try {
24
35
  const stream = await responseStream;
@@ -79,6 +90,9 @@ const responseStreamActor = async (callback) => {
79
90
  }
80
91
  }
81
92
  };
93
+ function getLastSelectedCameraId() {
94
+ return localStorage.getItem(CAMERA_ID_KEY);
95
+ }
82
96
  function setLastSelectedCameraId(deviceId) {
83
97
  localStorage.setItem(CAMERA_ID_KEY, deviceId);
84
98
  }
@@ -631,6 +645,9 @@ const livenessMachine = createMachine({
631
645
  if (event.data.message.includes('15 fps')) {
632
646
  errorState = LivenessErrorState.CAMERA_FRAMERATE_ERROR;
633
647
  }
648
+ else if (event.data.message.includes(LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR)) {
649
+ errorState = LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR;
650
+ }
634
651
  else {
635
652
  errorState = LivenessErrorState.CAMERA_ACCESS_ERROR;
636
653
  }
@@ -640,15 +657,26 @@ const livenessMachine = createMachine({
640
657
  state: errorState,
641
658
  error: error,
642
659
  };
643
- context.componentProps.onError?.(livenessError);
660
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
644
661
  return errorState;
645
662
  },
646
663
  }),
647
664
  callMobileLandscapeWarningCallback: assign({
648
665
  errorState: () => LivenessErrorState.MOBILE_LANDSCAPE_ERROR,
649
666
  }),
667
+ getSelectedDeviceInfo: (context) => getSelectedDeviceInfo(context),
650
668
  callUserCancelCallback: (context) => {
651
- context.componentProps.onUserCancel?.();
669
+ const { onUserCancel } = context.componentProps ?? {};
670
+ if (!onUserCancel) {
671
+ return;
672
+ }
673
+ try {
674
+ onUserCancel();
675
+ }
676
+ catch (callbackError) {
677
+ // eslint-disable-next-line no-console
678
+ console.error('Error in onUserCancel callback:', callbackError);
679
+ }
652
680
  },
653
681
  callUserTimeoutCallback: (context) => {
654
682
  const error = new Error(context.errorMessage ?? 'Client Timeout');
@@ -657,14 +685,14 @@ const livenessMachine = createMachine({
657
685
  state: context.errorState,
658
686
  error: error,
659
687
  };
660
- context.componentProps.onError?.(livenessError);
688
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
661
689
  },
662
690
  callErrorCallback: (context, event) => {
663
691
  const livenessError = {
664
692
  state: context.errorState,
665
693
  error: event.data?.error || event.data,
666
694
  };
667
- context.componentProps.onError?.(livenessError);
695
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
668
696
  },
669
697
  cleanUpResources: (context) => {
670
698
  const { freshnessColorEl } = context.freshnessColorAssociatedParams;
@@ -775,10 +803,42 @@ const livenessMachine = createMachine({
775
803
  services: {
776
804
  async checkVirtualCameraAndGetStream(context) {
777
805
  const { videoConstraints } = context.videoAssociatedParams;
778
- // Get initial stream to enumerate devices with non-empty labels
779
- const initialStream = await navigator.mediaDevices.getUserMedia({
780
- video: { ...videoConstraints },
806
+ const { componentProps } = context;
807
+ let targetDeviceId;
808
+ let cameraNotFound = false;
809
+ if (componentProps?.config?.deviceId) {
810
+ targetDeviceId = componentProps.config.deviceId;
811
+ }
812
+ else {
813
+ targetDeviceId = getLastSelectedCameraId() ?? undefined;
814
+ }
815
+ const initialStream = await navigator.mediaDevices
816
+ .getUserMedia({
817
+ video: {
818
+ ...videoConstraints,
819
+ ...(targetDeviceId
820
+ ? { deviceId: { exact: targetDeviceId } }
821
+ : {}),
822
+ },
781
823
  audio: false,
824
+ })
825
+ .catch((error) => {
826
+ if (error instanceof DOMException &&
827
+ (error.name === 'NotFoundError' ||
828
+ error.name === 'OverconstrainedError')) {
829
+ // Mark camera as not found when a specific target device (either provided via props
830
+ // or previously selected/saved as default) cannot be accessed.
831
+ if (targetDeviceId && !cameraNotFound) {
832
+ cameraNotFound = true;
833
+ }
834
+ return navigator.mediaDevices.getUserMedia({
835
+ video: {
836
+ ...videoConstraints,
837
+ },
838
+ audio: false,
839
+ });
840
+ }
841
+ throw error;
782
842
  });
783
843
  const devices = await navigator.mediaDevices.enumerateDevices();
784
844
  const realVideoDevices = devices
@@ -792,7 +852,7 @@ const livenessMachine = createMachine({
792
852
  .getTracks()
793
853
  .filter((track) => {
794
854
  const settings = track.getSettings();
795
- return settings.frameRate >= 15;
855
+ return (settings.frameRate ?? 0) >= 15;
796
856
  });
797
857
  if (tracksWithMoreThan15Fps.length < 1) {
798
858
  throw new Error('No camera found with more than 15 fps');
@@ -811,11 +871,16 @@ const livenessMachine = createMachine({
811
871
  });
812
872
  }
813
873
  setLastSelectedCameraId(deviceId);
814
- return {
874
+ const result = {
815
875
  stream: realVideoDeviceStream,
816
876
  selectedDeviceId: initialStreamDeviceId,
817
877
  selectableDevices: realVideoDevices,
818
878
  };
879
+ // If a specific camera was requested but not found, trigger a specific error
880
+ if (cameraNotFound) {
881
+ throw new Error(LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR);
882
+ }
883
+ return result;
819
884
  },
820
885
  // eslint-disable-next-line @typescript-eslint/require-await
821
886
  async openLivenessStreamConnection(context) {
@@ -1068,11 +1133,22 @@ const livenessMachine = createMachine({
1068
1133
  livenessStreamProvider.dispatchStreamEvent({ type: 'streamStop' });
1069
1134
  },
1070
1135
  async getLiveness(context) {
1071
- const { onAnalysisComplete } = context.componentProps;
1072
- // Get liveness result
1073
- await onAnalysisComplete();
1136
+ const { onAnalysisComplete } = context.componentProps ?? {};
1137
+ if (!onAnalysisComplete) {
1138
+ return;
1139
+ }
1140
+ try {
1141
+ const deviceInfo = getSelectedDeviceInfo(context);
1142
+ await onAnalysisComplete(deviceInfo);
1143
+ }
1144
+ catch (callbackError) {
1145
+ // eslint-disable-next-line no-console
1146
+ console.error('Error in onAnalysisComplete callback:', callbackError);
1147
+ // Rethrow to allow the state machine to handle the error
1148
+ throw callbackError;
1149
+ }
1074
1150
  },
1075
1151
  },
1076
1152
  });
1077
1153
 
1078
- export { livenessMachine };
1154
+ export { getSelectedDeviceInfo, livenessMachine };
@@ -11,6 +11,7 @@ const LivenessErrorState = {
11
11
  FACE_DISTANCE_ERROR: 'FACE_DISTANCE_ERROR',
12
12
  MOBILE_LANDSCAPE_ERROR: 'MOBILE_LANDSCAPE_ERROR',
13
13
  MULTIPLE_FACES_ERROR: 'MULTIPLE_FACES_ERROR',
14
+ DEFAULT_CAMERA_NOT_FOUND_ERROR: 'DEFAULT_CAMERA_NOT_FOUND_ERROR',
14
15
  };
15
16
 
16
17
  export { LivenessErrorState };
@@ -1,5 +1,5 @@
1
1
  import { ready, setBackend } from '@tensorflow/tfjs-core';
2
- import { createDetector, SupportedModels } from '@tensorflow-models/face-detection';
2
+ import { createDetector as te, SupportedModels as Q } from '../../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
3
3
  import { version_wasm, setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
4
4
  import '@tensorflow/tfjs-backend-cpu';
5
5
  import { jitteredExponentialRetry } from '@aws-amplify/core/internals/utils';
@@ -32,7 +32,7 @@ class BlazeFaceFaceDetection extends FaceDetection {
32
32
  }
33
33
  try {
34
34
  await ready();
35
- this._model = await createDetector(SupportedModels.MediaPipeFaceDetector, {
35
+ this._model = await te(Q.MediaPipeFaceDetector, {
36
36
  runtime: 'tfjs',
37
37
  detectorModelUrl: this.faceModelUrl,
38
38
  });
@@ -5,7 +5,7 @@ import { CancelButton } from './CancelButton.mjs';
5
5
  import '../service/machine/machine.mjs';
6
6
  import '../service/types/liveness.mjs';
7
7
  import '@tensorflow/tfjs-core';
8
- import '@tensorflow-models/face-detection';
8
+ import '../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
@@ -5,7 +5,7 @@ import '../service/machine/machine.mjs';
5
5
  import '../service/types/liveness.mjs';
6
6
  import { LivenessErrorState } from '../service/types/error.mjs';
7
7
  import '@tensorflow/tfjs-core';
8
- import '@tensorflow-models/face-detection';
8
+ import '../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
9
9
  import '@tensorflow/tfjs-backend-wasm';
10
10
  import '@tensorflow/tfjs-backend-cpu';
11
11
  import '@aws-amplify/core/internals/utils';
@@ -3,7 +3,7 @@ import { VisuallyHidden, View } from '@aws-amplify/ui-react';
3
3
  import '../service/machine/machine.mjs';
4
4
  import { FaceMatchState, IlluminationState } from '../service/types/liveness.mjs';
5
5
  import '@tensorflow/tfjs-core';
6
- import '@tensorflow-models/face-detection';
6
+ import '../../../node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
@@ -3,7 +3,7 @@ export { default as FaceLivenessDetectorCore } from './components/FaceLivenessDe
3
3
  import './components/FaceLivenessDetector/service/machine/machine.mjs';
4
4
  import './components/FaceLivenessDetector/service/types/liveness.mjs';
5
5
  import '@tensorflow/tfjs-core';
6
- import '@tensorflow-models/face-detection';
6
+ import './node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.mjs';
7
7
  import '@tensorflow/tfjs-backend-wasm';
8
8
  import '@tensorflow/tfjs-backend-cpu';
9
9
  import '@aws-amplify/core/internals/utils';
@@ -0,0 +1,23 @@
1
+ import FaceDetection from '@mediapipe/face_detection';
2
+ import { dispose, tidy, cast, squeeze, image, expandDims, browser, Tensor, tensor1d, tensor2d, slice, add, mul, div, exp, sub, concat, reshape, sigmoid, clipByValue, util } from '@tensorflow/tfjs-core';
3
+ import { loadGraphModel } from '@tensorflow/tfjs-converter';
4
+
5
+ /**
6
+ * @license
7
+ * Copyright 2024 Google LLC. All Rights Reserved.
8
+ * Licensed under the Apache License, Version 2.0 (the "License");
9
+ * you may not use this file except in compliance with the License.
10
+ * You may obtain a copy of the License at
11
+ *
12
+ * http://www.apache.org/licenses/LICENSE-2.0
13
+ *
14
+ * Unless required by applicable law or agreed to in writing, software
15
+ * distributed under the License is distributed on an "AS IS" BASIS,
16
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ * See the License for the specific language governing permissions and
18
+ * limitations under the License.
19
+ * =============================================================================
20
+ */
21
+ var b=function(){return b=Object.assign||function(e){for(var t,n=1,i=arguments.length;n<i;n++)for(var o in t=arguments[n])Object.prototype.hasOwnProperty.call(t,o)&&(e[o]=t[o]);return e},b.apply(this,arguments)};function T(e,t,n,i){return new(n||(n=Promise))((function(o,r){function a(e){try{h(i.next(e));}catch(e){r(e);}}function s(e){try{h(i.throw(e));}catch(e){r(e);}}function h(e){var t;e.done?o(e.value):(t=e.value,t instanceof n?t:new n((function(e){e(t);}))).then(a,s);}h((i=i.apply(e,[])).next());}))}function C(e,t){var n,i,o,r,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return r={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(r[Symbol.iterator]=function(){return this}),r;function s(r){return function(s){return function(r){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,i&&(o=2&r[0]?i.return:r[0]?i.throw||((o=i.return)&&o.call(i),0):i.next)&&!(o=o.call(i,r[1])).done)return o;switch(i=0,o&&(r=[2&r[0],o.value]),r[0]){case 0:case 1:o=r;break;case 4:return a.label++,{value:r[1],done:!1};case 5:a.label++,i=r[1],r=[0];continue;case 7:r=a.ops.pop(),a.trys.pop();continue;default:if(!(o=a.trys,(o=o.length>0&&o[o.length-1])||6!==r[0]&&2!==r[0])){a=0;continue}if(3===r[0]&&(!o||r[1]>o[0]&&r[1]<o[3])){a.label=r[1];break}if(6===r[0]&&a.label<o[1]){a.label=o[1],o=r;break}if(o&&a.label<o[2]){a.label=o[2],a.ops.push(r);break}o[2]&&a.ops.pop(),a.trys.pop();continue}r=t.call(e,a);}catch(e){r=[6,e],i=0;}finally{n=o=0;}if(5&r[0])throw r[1];return {value:r[0]?r[1]:void 0,done:true}}([r,s])}}}var O=["rightEye","leftEye","noseTip","mouthCenter","rightEarTragion","leftEarTragion"];var B={modelType:"short",runtime:"mediapipe",maxFaces:1};var z=function(){function i(t){var n=this;this.width=0,this.height=0,this.selfieMode=false,this.faceDetectorSolution=new FaceDetection({locateFile:function(e,n){if(t.solutionPath){var i=t.solutionPath.replace(/\/+$/,"");return "".concat(i,"/").concat(e)}return "".concat(n,"/").concat(e)}}),this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode,model:t.modelType}),this.faceDetectorSolution.onResults((function(e){if(n.height=e.image.height,n.width=e.image.width,n.faces=[],null!==e.detections)for(var t=0,i=e.detections;t<i.length;t++){var o=i[t];n.faces.push(n.normalizedToAbsolute(o.landmarks,(r=o.boundingBox,a=void 0,s=void 0,h=void 0,a=r.xCenter-r.width/2,s=a+r.width,h=r.yCenter-r.height/2,{xMin:a,xMax:s,yMin:h,yMax:h+r.height,width:r.width,height:r.height})));}var r,a,s,h;}));}return i.prototype.normalizedToAbsolute=function(e,t){var n=this;return {keypoints:e.map((function(e,t){return {x:e.x*n.width,y:e.y*n.height,name:O[t]}})),box:{xMin:t.xMin*this.width,yMin:t.yMin*this.height,xMax:t.xMax*this.width,yMax:t.yMax*this.height,width:t.width*this.width,height:t.height*this.height}}},i.prototype.estimateFaces=function(e,i){return T(this,void 0,void 0,(function(){var o,r;return C(this,(function(a){switch(a.label){case 0:return i&&i.flipHorizontal&&i.flipHorizontal!==this.selfieMode&&(this.selfieMode=i.flipHorizontal,this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode})),e instanceof Tensor?(r=ImageData.bind,[4,browser.toPixels(e)]):[3,2];case 1:return o=new(r.apply(ImageData,[void 0,a.sent(),e.shape[1],e.shape[0]])),[3,3];case 2:o=e,a.label=3;case 3:return e=o,[4,this.faceDetectorSolution.send({image:e})];case 4:return a.sent(),[2,this.faces]}}))}))},i.prototype.dispose=function(){this.faceDetectorSolution.close();},i.prototype.reset=function(){this.faceDetectorSolution.reset(),this.width=0,this.height=0,this.faces=null,this.selfieMode=false;},i.prototype.initialize=function(){return this.faceDetectorSolution.initialize()},i}();function D(e){return T(this,void 0,void 0,(function(){var t,n;return C(this,(function(i){switch(i.label){case 0:return t=function(e){if(null==e)return b({},B);var t=b({},e);return t.runtime="mediapipe",null==t.modelType&&(t.modelType=B.modelType),null==t.maxFaces&&(t.maxFaces=B.maxFaces),t}(e),[4,(n=new z(t)).initialize()];case 1:return i.sent(),[2,n]}}))}))}function A(e,t,n,i){var o=e.width,r=e.height,a=1,s=Math.cos(e.rotation),h=Math.sin(e.rotation),u=e.xCenter,c=e.yCenter,l=1/t,f=1/n,d=new Array(16);return d[0]=o*s*a*l,d[1]=-r*h*l,d[2]=0,d[3]=(-0.5*o*s*a+.5*r*h+u)*l,d[4]=o*h*a*f,d[5]=r*s*f,d[6]=0,d[7]=(-0.5*r*s-.5*o*h*a+c)*f,d[8]=0,d[9]=0,d[10]=o*l,d[11]=0,d[12]=0,d[13]=0,d[14]=0,d[15]=1,function(e){if(16!==e.length)throw new Error("Array length must be 16 but got ".concat(e.length));return [[e[0],e[1],e[2],e[3]],[e[4],e[5],e[6],e[7]],[e[8],e[9],e[10],e[11]],[e[12],e[13],e[14],e[15]]]}(d)}function F(e){return e instanceof Tensor?{height:e.shape[0],width:e.shape[1]}:{height:e.height,width:e.width}}function E(e){return e instanceof Tensor?e:browser.fromPixels(e)}function R(e,t){util.assert(0!==e.width,(function(){return "".concat(t," width cannot be 0.")})),util.assert(0!==e.height,(function(){return "".concat(t," height cannot be 0.")}));}function L(e,t){var n=function(e,t,n,i){var o=t-e,r=i-n;var a=r/o;return {scale:a,offset:n-e*a}}(0,255,t[0],t[1]);return tidy((function(){return add(mul(e,n.scale),n.offset)}))}function K(e,t,n){var i=t.outputTensorSize,r=t.keepAspectRatio,a=t.borderMode,l=t.outputTensorFloatRange,f=F(e),d=function(e,t){return {xCenter:.5*e.width,yCenter:.5*e.height,width:e.width,height:e.height,rotation:0}}(f),p=function(e,t,n){if(void 0===n&&(n=false),!n)return {top:0,left:0,right:0,bottom:0};var i=t.height,o=t.width;R(t,"targetSize"),R(e,"roi");var r,a,s=i/o,h=e.height/e.width,u=0,c=0;return s>h?(r=e.width,a=e.width*s,c=(1-h/s)/2):(r=e.height/s,a=e.height,u=(1-s/h)/2),e.width=r,e.height=a,{top:c,left:u,right:u,bottom:c}}(d,i,r),m=A(d,f.width,f.height),x=tidy((function(){var t=E(e),n=tensor2d(function(e,t,n){return R(n,"inputResolution"),[1/n.width*e[0][0]*t.width,1/n.height*e[0][1]*t.width,e[0][3]*t.width,1/n.width*e[1][0]*t.height,1/n.height*e[1][1]*t.height,e[1][3]*t.height,0,0]}(m,f,i),[1,8]),o="zero"===a?"constant":"nearest",r=image.transform(expandDims(cast(t,"float32")),n,"bilinear",o,0,[i.height,i.width]);return null!=l?L(r,l):r}));return {imageTensor:x,padding:p,transformationMatrix:m}}function k(e){null==e.reduceBoxesInLowestLayer&&(e.reduceBoxesInLowestLayer=false),null==e.interpolatedScaleAspectRatio&&(e.interpolatedScaleAspectRatio=1),null==e.fixedAnchorSize&&(e.fixedAnchorSize=false);for(var t=[],n=0;n<e.numLayers;){for(var i=[],o=[],r=[],a=[],s=n;s<e.strides.length&&e.strides[s]===e.strides[n];){var h=P(e.minScale,e.maxScale,s,e.strides.length);if(0===s&&e.reduceBoxesInLowestLayer)r.push(1),r.push(2),r.push(.5),a.push(.1),a.push(h),a.push(h);else {for(var u=0;u<e.aspectRatios.length;++u)r.push(e.aspectRatios[u]),a.push(h);if(e.interpolatedScaleAspectRatio>0){var c=s===e.strides.length-1?1:P(e.minScale,e.maxScale,s+1,e.strides.length);a.push(Math.sqrt(h*c)),r.push(e.interpolatedScaleAspectRatio);}}s++;}for(var l=0;l<r.length;++l){var f=Math.sqrt(r[l]);i.push(a[l]/f),o.push(a[l]*f);}var d=0,p=0;if(e.featureMapHeight.length>0)d=e.featureMapHeight[n],p=e.featureMapWidth[n];else {var m=e.strides[n];d=Math.ceil(e.inputSizeHeight/m),p=Math.ceil(e.inputSizeWidth/m);}for(var x=0;x<d;++x)for(var g=0;g<p;++g)for(var y=0;y<i.length;++y){var v={xCenter:(g+e.anchorOffsetX)/p,yCenter:(x+e.anchorOffsetY)/d,width:0,height:0};e.fixedAnchorSize?(v.width=1,v.height=1):(v.width=o[y],v.height=i[y]),t.push(v);}n=s;}return t}function P(e,t,n,i){return 1===i?.5*(e+t):e+(t-e)*n/(i-1)}function V(e,t){var n=t[0],i=t[1];return [n*e[0]+i*e[1]+e[3],n*e[4]+i*e[5]+e[7]]}function H(e){return tidy((function(){var t=function(e){return tidy((function(){return [slice(e,[0,0,0],[1,-1,1]),slice(e,[0,0,1],[1,-1,-1])]}))}(e),n=t[0],i=t[1];return {boxes:squeeze(i),logits:squeeze(n)}}))}function U(e,t,n,i){return T(this,void 0,void 0,(function(){var i,o,r,a,u;return C(this,(function(c){switch(c.label){case 0:return e.sort((function(e,t){return Math.max.apply(Math,t.score)-Math.max.apply(Math,e.score)})),i=tensor2d(e.map((function(e){return [e.locationData.relativeBoundingBox.yMin,e.locationData.relativeBoundingBox.xMin,e.locationData.relativeBoundingBox.yMax,e.locationData.relativeBoundingBox.xMax]}))),o=tensor1d(e.map((function(e){return e.score[0]}))),[4,image.nonMaxSuppressionAsync(i,o,t,n)];case 1:return [4,(r=c.sent()).array()];case 2:return a=c.sent(),u=e.filter((function(e,t){return a.indexOf(t)>-1})),dispose([i,o,r]),[2,u]}}))}))}function j(e,t,n){return T(this,void 0,void 0,(function(){var i,s,h,u,c;return C(this,(function(p){switch(p.label){case 0:return i=e[0],s=e[1],h=function(e,t,n){return tidy((function(){var i,o,s,h;n.reverseOutputOrder?(o=squeeze(slice(e,[0,n.boxCoordOffset+0],[-1,1])),i=squeeze(slice(e,[0,n.boxCoordOffset+1],[-1,1])),h=squeeze(slice(e,[0,n.boxCoordOffset+2],[-1,1])),s=squeeze(slice(e,[0,n.boxCoordOffset+3],[-1,1]))):(i=squeeze(slice(e,[0,n.boxCoordOffset+0],[-1,1])),o=squeeze(slice(e,[0,n.boxCoordOffset+1],[-1,1])),s=squeeze(slice(e,[0,n.boxCoordOffset+2],[-1,1])),h=squeeze(slice(e,[0,n.boxCoordOffset+3],[-1,1]))),o=add(mul(div(o,n.xScale),t.w),t.x),i=add(mul(div(i,n.yScale),t.h),t.y),n.applyExponentialOnBoxSize?(s=mul(exp(div(s,n.hScale)),t.h),h=mul(exp(div(h,n.wScale)),t.w)):(s=mul(div(s,n.hScale),t.h),h=mul(div(h,n.wScale),t.h));var u=sub(i,div(s,2)),c=sub(o,div(h,2)),d=add(i,div(s,2)),p=add(o,div(h,2)),w=concat([reshape(u,[n.numBoxes,1]),reshape(c,[n.numBoxes,1]),reshape(d,[n.numBoxes,1]),reshape(p,[n.numBoxes,1])],1);if(n.numKeypoints)for(var M=0;M<n.numKeypoints;++M){var S=n.keypointCoordOffset+M*n.numValuesPerKeypoint,b=void 0,T=void 0;n.reverseOutputOrder?(b=squeeze(slice(e,[0,S],[-1,1])),T=squeeze(slice(e,[0,S+1],[-1,1]))):(T=squeeze(slice(e,[0,S],[-1,1])),b=squeeze(slice(e,[0,S+1],[-1,1])));var C=add(mul(div(b,n.xScale),t.w),t.x),O=add(mul(div(T,n.yScale),t.h),t.y);w=concat([w,reshape(C,[n.numBoxes,1]),reshape(O,[n.numBoxes,1])],1);}return w}))}(s,t,n),u=tidy((function(){var e=i;return n.sigmoidScore?(null!=n.scoreClippingThresh&&(e=clipByValue(i,-n.scoreClippingThresh,n.scoreClippingThresh)),e=sigmoid(e)):e})),[4,I(h,u,n)];case 1:return c=p.sent(),dispose([h,u]),[2,c]}}))}))}function I(e,t,n){return T(this,void 0,void 0,(function(){var i,o,r,a,s,h,u,c,l,f,d,p;return C(this,(function(m){switch(m.label){case 0:return i=[],[4,e.data()];case 1:return o=m.sent(),[4,t.data()];case 2:for(r=m.sent(),a=0;a<n.numBoxes;++a)if(!(null!=n.minScoreThresh&&r[a]<n.minScoreThresh||(s=a*n.numCoords,h=_(o[s+0],o[s+1],o[s+2],o[s+3],r[a],n.flipVertically,a),(u=h.locationData.relativeBoundingBox).width<0||u.height<0))){if(n.numKeypoints>0)for((c=h.locationData).relativeKeypoints=[],l=n.numKeypoints*n.numValuesPerKeypoint,f=0;f<l;f+=n.numValuesPerKeypoint)d=s+n.keypointCoordOffset+f,p={x:o[d+0],y:n.flipVertically?1-o[d+1]:o[d+1]},c.relativeKeypoints.push(p);i.push(h);}return [2,i]}}))}))}function _(e,t,n,i,o,r,a){return {score:[o],ind:a,locationData:{relativeBoundingBox:{xMin:t,yMin:r?1-n:e,xMax:i,yMax:r?1-e:n,width:i-t,height:n-e}}}}var N={reduceBoxesInLowestLayer:false,interpolatedScaleAspectRatio:1,featureMapHeight:[],featureMapWidth:[],numLayers:4,minScale:.1484375,maxScale:.75,inputSizeHeight:128,inputSizeWidth:128,anchorOffsetX:.5,anchorOffsetY:.5,strides:[8,16,16,16],aspectRatios:[1],fixedAnchorSize:true},W={reduceBoxesInLowestLayer:false,interpolatedScaleAspectRatio:0,featureMapHeight:[],featureMapWidth:[],numLayers:1,minScale:.1484375,maxScale:.75,inputSizeHeight:192,inputSizeWidth:192,anchorOffsetX:.5,anchorOffsetY:.5,strides:[4],aspectRatios:[1],fixedAnchorSize:true},X={runtime:"tfjs",modelType:"short",maxFaces:1,detectorModelUrl:"https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1"},Y={applyExponentialOnBoxSize:false,flipVertically:false,ignoreClasses:[],numClasses:1,numBoxes:896,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:true,scoreClippingThresh:100,reverseOutputOrder:true,xScale:128,yScale:128,hScale:128,wScale:128,minScoreThresh:.5},q={applyExponentialOnBoxSize:false,flipVertically:false,ignoreClasses:[],numClasses:1,numBoxes:2304,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:true,scoreClippingThresh:100,reverseOutputOrder:true,xScale:192,yScale:192,hScale:192,wScale:192,minScoreThresh:.6},G=.3,$={outputTensorSize:{width:128,height:128},keepAspectRatio:true,outputTensorFloatRange:[-1,1],borderMode:"zero"},J={outputTensorSize:{width:192,height:192},keepAspectRatio:true,outputTensorFloatRange:[-1,1],borderMode:"zero"};var Q,Z=function(){function e(e,t,n){this.detectorModel=t,this.maxFaces=n,"full"===e?(this.imageToTensorConfig=J,this.tensorsToDetectionConfig=q,this.anchors=k(W)):(this.imageToTensorConfig=$,this.tensorsToDetectionConfig=Y,this.anchors=k(N));var i=tensor1d(this.anchors.map((function(e){return e.width}))),o=tensor1d(this.anchors.map((function(e){return e.height}))),r=tensor1d(this.anchors.map((function(e){return e.xCenter}))),a=tensor1d(this.anchors.map((function(e){return e.yCenter})));this.anchorTensor={x:r,y:a,w:i,h:o};}return e.prototype.dispose=function(){this.detectorModel.dispose(),dispose([this.anchorTensor.x,this.anchorTensor.y,this.anchorTensor.w,this.anchorTensor.h]);},e.prototype.reset=function(){},e.prototype.detectFaces=function(e,t){return void 0===t&&(t=false),T(this,void 0,void 0,(function(){var n,i,r,a,s,l,p,m,x,g,y;return C(this,(function(v){switch(v.label){case 0:return null==e?(this.reset(),[2,[]]):(n=tidy((function(){var n=cast(E(e),"float32");if(t){n=squeeze(image.flipLeftRight(expandDims(n,0)),[0]);}return n})),i=K(n,this.imageToTensorConfig),r=i.imageTensor,a=i.transformationMatrix,s=this.detectorModel.execute(r,"Identity:0"),l=H(s),p=l.boxes,[4,j([m=l.logits,p],this.anchorTensor,this.tensorsToDetectionConfig)]);case 1:return 0===(x=v.sent()).length?(dispose([n,r,s,m,p]),[2,x]):[4,U(x,this.maxFaces,G)];case 2:return g=v.sent(),y=function(e,t){ void 0===e&&(e=[]);var n,i=(n=t,[].concat.apply([],n));return e.forEach((function(e){var t=e.locationData;t.relativeKeypoints.forEach((function(e){var t=V(i,[e.x,e.y]),n=t[0],o=t[1];e.x=n,e.y=o;}));var n=t.relativeBoundingBox,o=Number.MAX_VALUE,r=Number.MAX_VALUE,a=Number.MIN_VALUE,s=Number.MIN_VALUE;[[n.xMin,n.yMin],[n.xMin+n.width,n.yMin],[n.xMin+n.width,n.yMin+n.height],[n.xMin,n.yMin+n.height]].forEach((function(e){var t=V(i,e),n=t[0],h=t[1];o=Math.min(o,n),a=Math.max(a,n),r=Math.min(r,h),s=Math.max(s,h);})),t.relativeBoundingBox={xMin:o,xMax:a,yMin:r,yMax:s,width:a-o,height:s-r};})),e}(g,a),dispose([n,r,s,m,p]),[2,y]}}))}))},e.prototype.estimateFaces=function(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){return n=F(e),i=!!t&&t.flipHorizontal,[2,this.detectFaces(e,i).then((function(e){return e.map((function(e){for(var t=e.locationData.relativeKeypoints.map((function(e,t){return b(b({},e),{x:e.x*n.width,y:e.y*n.height,name:O[t]})})),i=e.locationData.relativeBoundingBox,o=0,r=["width","xMax","xMin"];o<r.length;o++){i[r[o]]*=n.width;}for(var a=0,s=["height","yMax","yMin"];a<s.length;a++){i[s[a]]*=n.height;}return {keypoints:t,box:i}}))}))]}))}))},e}();function ee(e){return T(this,void 0,void 0,(function(){var t,n,i;return C(this,(function(o){switch(o.label){case 0:return t=function(e){if(null==e)return b({},X);var t=b({},e);null==t.modelType&&(t.modelType=X.modelType),null==t.maxFaces&&(t.maxFaces=X.maxFaces),null==t.detectorModelUrl&&("full"===t.modelType?t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/full/1":t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1");return t}(e),n="string"==typeof t.detectorModelUrl&&t.detectorModelUrl.indexOf("https://tfhub.dev")>-1,[4,loadGraphModel(t.detectorModelUrl,{fromTFHub:n})];case 1:return i=o.sent(),[2,new Z(t.modelType,i,t.maxFaces)]}}))}))}function te(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){if(e===Q.MediaPipeFaceDetector){if(i=void 0,null!=(n=t)){if("tfjs"===n.runtime)return [2,ee(n)];if("mediapipe"===n.runtime)return [2,D(n)];i=n.runtime;}throw new Error("Expect modelConfig.runtime to be either 'tfjs' "+"or 'mediapipe', but got ".concat(i))}throw new Error("".concat(e," is not a supported model name."))}))}))}!function(e){e.MediaPipeFaceDetector="MediaPipeFaceDetector";}(Q||(Q={}));
22
+
23
+ export { z as MediaPipeFaceDetectorMediaPipe, Z as MediaPipeFaceDetectorTfjs, Q as SupportedModels, te as createDetector };
@@ -1,3 +1,3 @@
1
- const VERSION = '3.4.7';
1
+ const VERSION = '3.5.1';
2
2
 
3
3
  export { VERSION };
package/dist/index.js CHANGED
@@ -8,7 +8,8 @@ var react = require('@xstate/react');
8
8
  var uuid = require('uuid');
9
9
  var xstate = require('xstate');
10
10
  var tfjsCore = require('@tensorflow/tfjs-core');
11
- var faceDetection = require('@tensorflow-models/face-detection');
11
+ var FaceDetection$1 = require('@mediapipe/face_detection');
12
+ var tfjsConverter = require('@tensorflow/tfjs-converter');
12
13
  var tfjsBackendWasm = require('@tensorflow/tfjs-backend-wasm');
13
14
  require('@tensorflow/tfjs-backend-cpu');
14
15
  var utils = require('@aws-amplify/core/internals/utils');
@@ -23,6 +24,8 @@ var signatureV4 = require('@smithy/signature-v4');
23
24
  var uiReact = require('@aws-amplify/ui-react');
24
25
  var internal = require('@aws-amplify/ui-react/internal');
25
26
 
27
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
28
+
26
29
  function _interopNamespace(e) {
27
30
  if (e && e.__esModule) return e;
28
31
  var n = Object.create(null);
@@ -42,6 +45,7 @@ function _interopNamespace(e) {
42
45
  }
43
46
 
44
47
  var React__namespace = /*#__PURE__*/_interopNamespace(React);
48
+ var FaceDetection__default = /*#__PURE__*/_interopDefault(FaceDetection$1);
45
49
 
46
50
  /**
47
51
  * The abstract class representing FaceDetection
@@ -92,6 +96,7 @@ const LivenessErrorState = {
92
96
  FACE_DISTANCE_ERROR: 'FACE_DISTANCE_ERROR',
93
97
  MOBILE_LANDSCAPE_ERROR: 'MOBILE_LANDSCAPE_ERROR',
94
98
  MULTIPLE_FACES_ERROR: 'MULTIPLE_FACES_ERROR',
99
+ DEFAULT_CAMERA_NOT_FOUND_ERROR: 'DEFAULT_CAMERA_NOT_FOUND_ERROR',
95
100
  };
96
101
 
97
102
  // Face distance is calculated as pupilDistance / ovalWidth.
@@ -542,6 +547,24 @@ async function isFaceDistanceBelowThreshold({ parsedSessionInformation, faceDete
542
547
  return { isDistanceBelowThreshold, error };
543
548
  }
544
549
 
550
+ /**
551
+ * @license
552
+ * Copyright 2024 Google LLC. All Rights Reserved.
553
+ * Licensed under the Apache License, Version 2.0 (the "License");
554
+ * you may not use this file except in compliance with the License.
555
+ * You may obtain a copy of the License at
556
+ *
557
+ * http://www.apache.org/licenses/LICENSE-2.0
558
+ *
559
+ * Unless required by applicable law or agreed to in writing, software
560
+ * distributed under the License is distributed on an "AS IS" BASIS,
561
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
562
+ * See the License for the specific language governing permissions and
563
+ * limitations under the License.
564
+ * =============================================================================
565
+ */
566
+ var b=function(){return b=Object.assign||function(e){for(var t,n=1,i=arguments.length;n<i;n++)for(var o in t=arguments[n])Object.prototype.hasOwnProperty.call(t,o)&&(e[o]=t[o]);return e},b.apply(this,arguments)};function T(e,t,n,i){return new(n||(n=Promise))((function(o,r){function a(e){try{h(i.next(e));}catch(e){r(e);}}function s(e){try{h(i.throw(e));}catch(e){r(e);}}function h(e){var t;e.done?o(e.value):(t=e.value,t instanceof n?t:new n((function(e){e(t);}))).then(a,s);}h((i=i.apply(e,[])).next());}))}function C(e,t){var n,i,o,r,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return r={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(r[Symbol.iterator]=function(){return this}),r;function s(r){return function(s){return function(r){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,i&&(o=2&r[0]?i.return:r[0]?i.throw||((o=i.return)&&o.call(i),0):i.next)&&!(o=o.call(i,r[1])).done)return o;switch(i=0,o&&(r=[2&r[0],o.value]),r[0]){case 0:case 1:o=r;break;case 4:return a.label++,{value:r[1],done:!1};case 5:a.label++,i=r[1],r=[0];continue;case 7:r=a.ops.pop(),a.trys.pop();continue;default:if(!(o=a.trys,(o=o.length>0&&o[o.length-1])||6!==r[0]&&2!==r[0])){a=0;continue}if(3===r[0]&&(!o||r[1]>o[0]&&r[1]<o[3])){a.label=r[1];break}if(6===r[0]&&a.label<o[1]){a.label=o[1],o=r;break}if(o&&a.label<o[2]){a.label=o[2],a.ops.push(r);break}o[2]&&a.ops.pop(),a.trys.pop();continue}r=t.call(e,a);}catch(e){r=[6,e],i=0;}finally{n=o=0;}if(5&r[0])throw r[1];return {value:r[0]?r[1]:void 0,done:true}}([r,s])}}}var O=["rightEye","leftEye","noseTip","mouthCenter","rightEarTragion","leftEarTragion"];var B={modelType:"short",runtime:"mediapipe",maxFaces:1};var z=function(){function i(t){var n=this;this.width=0,this.height=0,this.selfieMode=false,this.faceDetectorSolution=new FaceDetection__default["default"]({locateFile:function(e,n){if(t.solutionPath){var i=t.solutionPath.replace(/\/+$/,"");return "".concat(i,"/").concat(e)}return "".concat(n,"/").concat(e)}}),this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode,model:t.modelType}),this.faceDetectorSolution.onResults((function(e){if(n.height=e.image.height,n.width=e.image.width,n.faces=[],null!==e.detections)for(var t=0,i=e.detections;t<i.length;t++){var o=i[t];n.faces.push(n.normalizedToAbsolute(o.landmarks,(r=o.boundingBox,a=void 0,s=void 0,h=void 0,a=r.xCenter-r.width/2,s=a+r.width,h=r.yCenter-r.height/2,{xMin:a,xMax:s,yMin:h,yMax:h+r.height,width:r.width,height:r.height})));}var r,a,s,h;}));}return i.prototype.normalizedToAbsolute=function(e,t){var n=this;return {keypoints:e.map((function(e,t){return {x:e.x*n.width,y:e.y*n.height,name:O[t]}})),box:{xMin:t.xMin*this.width,yMin:t.yMin*this.height,xMax:t.xMax*this.width,yMax:t.yMax*this.height,width:t.width*this.width,height:t.height*this.height}}},i.prototype.estimateFaces=function(e,i){return T(this,void 0,void 0,(function(){var o,r;return C(this,(function(a){switch(a.label){case 0:return i&&i.flipHorizontal&&i.flipHorizontal!==this.selfieMode&&(this.selfieMode=i.flipHorizontal,this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode})),e instanceof tfjsCore.Tensor?(r=ImageData.bind,[4,tfjsCore.browser.toPixels(e)]):[3,2];case 1:return o=new(r.apply(ImageData,[void 0,a.sent(),e.shape[1],e.shape[0]])),[3,3];case 2:o=e,a.label=3;case 3:return e=o,[4,this.faceDetectorSolution.send({image:e})];case 4:return a.sent(),[2,this.faces]}}))}))},i.prototype.dispose=function(){this.faceDetectorSolution.close();},i.prototype.reset=function(){this.faceDetectorSolution.reset(),this.width=0,this.height=0,this.faces=null,this.selfieMode=false;},i.prototype.initialize=function(){return this.faceDetectorSolution.initialize()},i}();function D(e){return T(this,void 0,void 0,(function(){var t,n;return C(this,(function(i){switch(i.label){case 0:return t=function(e){if(null==e)return b({},B);var t=b({},e);return t.runtime="mediapipe",null==t.modelType&&(t.modelType=B.modelType),null==t.maxFaces&&(t.maxFaces=B.maxFaces),t}(e),[4,(n=new z(t)).initialize()];case 1:return i.sent(),[2,n]}}))}))}function A(e,t,n,i){var o=e.width,r=e.height,a=1,s=Math.cos(e.rotation),h=Math.sin(e.rotation),u=e.xCenter,c=e.yCenter,l=1/t,f=1/n,d=new Array(16);return d[0]=o*s*a*l,d[1]=-r*h*l,d[2]=0,d[3]=(-0.5*o*s*a+.5*r*h+u)*l,d[4]=o*h*a*f,d[5]=r*s*f,d[6]=0,d[7]=(-0.5*r*s-.5*o*h*a+c)*f,d[8]=0,d[9]=0,d[10]=o*l,d[11]=0,d[12]=0,d[13]=0,d[14]=0,d[15]=1,function(e){if(16!==e.length)throw new Error("Array length must be 16 but got ".concat(e.length));return [[e[0],e[1],e[2],e[3]],[e[4],e[5],e[6],e[7]],[e[8],e[9],e[10],e[11]],[e[12],e[13],e[14],e[15]]]}(d)}function F(e){return e instanceof tfjsCore.Tensor?{height:e.shape[0],width:e.shape[1]}:{height:e.height,width:e.width}}function E(e){return e instanceof tfjsCore.Tensor?e:tfjsCore.browser.fromPixels(e)}function R(e,t){tfjsCore.util.assert(0!==e.width,(function(){return "".concat(t," width cannot be 0.")})),tfjsCore.util.assert(0!==e.height,(function(){return "".concat(t," height cannot be 0.")}));}function L(e,t){var n=function(e,t,n,i){var o=t-e,r=i-n;var a=r/o;return {scale:a,offset:n-e*a}}(0,255,t[0],t[1]);return tfjsCore.tidy((function(){return tfjsCore.add(tfjsCore.mul(e,n.scale),n.offset)}))}function K(e,t,n){var i=t.outputTensorSize,r=t.keepAspectRatio,a=t.borderMode,l=t.outputTensorFloatRange,f=F(e),d=function(e,t){return {xCenter:.5*e.width,yCenter:.5*e.height,width:e.width,height:e.height,rotation:0}}(f),p=function(e,t,n){if(void 0===n&&(n=false),!n)return {top:0,left:0,right:0,bottom:0};var i=t.height,o=t.width;R(t,"targetSize"),R(e,"roi");var r,a,s=i/o,h=e.height/e.width,u=0,c=0;return s>h?(r=e.width,a=e.width*s,c=(1-h/s)/2):(r=e.height/s,a=e.height,u=(1-s/h)/2),e.width=r,e.height=a,{top:c,left:u,right:u,bottom:c}}(d,i,r),m=A(d,f.width,f.height),x=tfjsCore.tidy((function(){var t=E(e),n=tfjsCore.tensor2d(function(e,t,n){return R(n,"inputResolution"),[1/n.width*e[0][0]*t.width,1/n.height*e[0][1]*t.width,e[0][3]*t.width,1/n.width*e[1][0]*t.height,1/n.height*e[1][1]*t.height,e[1][3]*t.height,0,0]}(m,f,i),[1,8]),o="zero"===a?"constant":"nearest",r=tfjsCore.image.transform(tfjsCore.expandDims(tfjsCore.cast(t,"float32")),n,"bilinear",o,0,[i.height,i.width]);return null!=l?L(r,l):r}));return {imageTensor:x,padding:p,transformationMatrix:m}}function k(e){null==e.reduceBoxesInLowestLayer&&(e.reduceBoxesInLowestLayer=false),null==e.interpolatedScaleAspectRatio&&(e.interpolatedScaleAspectRatio=1),null==e.fixedAnchorSize&&(e.fixedAnchorSize=false);for(var t=[],n=0;n<e.numLayers;){for(var i=[],o=[],r=[],a=[],s=n;s<e.strides.length&&e.strides[s]===e.strides[n];){var h=P(e.minScale,e.maxScale,s,e.strides.length);if(0===s&&e.reduceBoxesInLowestLayer)r.push(1),r.push(2),r.push(.5),a.push(.1),a.push(h),a.push(h);else {for(var u=0;u<e.aspectRatios.length;++u)r.push(e.aspectRatios[u]),a.push(h);if(e.interpolatedScaleAspectRatio>0){var c=s===e.strides.length-1?1:P(e.minScale,e.maxScale,s+1,e.strides.length);a.push(Math.sqrt(h*c)),r.push(e.interpolatedScaleAspectRatio);}}s++;}for(var l=0;l<r.length;++l){var f=Math.sqrt(r[l]);i.push(a[l]/f),o.push(a[l]*f);}var d=0,p=0;if(e.featureMapHeight.length>0)d=e.featureMapHeight[n],p=e.featureMapWidth[n];else {var m=e.strides[n];d=Math.ceil(e.inputSizeHeight/m),p=Math.ceil(e.inputSizeWidth/m);}for(var x=0;x<d;++x)for(var g=0;g<p;++g)for(var y=0;y<i.length;++y){var v={xCenter:(g+e.anchorOffsetX)/p,yCenter:(x+e.anchorOffsetY)/d,width:0,height:0};e.fixedAnchorSize?(v.width=1,v.height=1):(v.width=o[y],v.height=i[y]),t.push(v);}n=s;}return t}function P(e,t,n,i){return 1===i?.5*(e+t):e+(t-e)*n/(i-1)}function V(e,t){var n=t[0],i=t[1];return [n*e[0]+i*e[1]+e[3],n*e[4]+i*e[5]+e[7]]}function H(e){return tfjsCore.tidy((function(){var t=function(e){return tfjsCore.tidy((function(){return [tfjsCore.slice(e,[0,0,0],[1,-1,1]),tfjsCore.slice(e,[0,0,1],[1,-1,-1])]}))}(e),n=t[0],i=t[1];return {boxes:tfjsCore.squeeze(i),logits:tfjsCore.squeeze(n)}}))}function U(e,t,n,i){return T(this,void 0,void 0,(function(){var i,o,r,a,u;return C(this,(function(c){switch(c.label){case 0:return e.sort((function(e,t){return Math.max.apply(Math,t.score)-Math.max.apply(Math,e.score)})),i=tfjsCore.tensor2d(e.map((function(e){return [e.locationData.relativeBoundingBox.yMin,e.locationData.relativeBoundingBox.xMin,e.locationData.relativeBoundingBox.yMax,e.locationData.relativeBoundingBox.xMax]}))),o=tfjsCore.tensor1d(e.map((function(e){return e.score[0]}))),[4,tfjsCore.image.nonMaxSuppressionAsync(i,o,t,n)];case 1:return [4,(r=c.sent()).array()];case 2:return a=c.sent(),u=e.filter((function(e,t){return a.indexOf(t)>-1})),tfjsCore.dispose([i,o,r]),[2,u]}}))}))}function j(e,t,n){return T(this,void 0,void 0,(function(){var i,s,h,u,c;return C(this,(function(p){switch(p.label){case 0:return i=e[0],s=e[1],h=function(e,t,n){return tfjsCore.tidy((function(){var i,o,s,h;n.reverseOutputOrder?(o=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+0],[-1,1])),i=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+1],[-1,1])),h=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+2],[-1,1])),s=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+3],[-1,1]))):(i=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+0],[-1,1])),o=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+1],[-1,1])),s=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+2],[-1,1])),h=tfjsCore.squeeze(tfjsCore.slice(e,[0,n.boxCoordOffset+3],[-1,1]))),o=tfjsCore.add(tfjsCore.mul(tfjsCore.div(o,n.xScale),t.w),t.x),i=tfjsCore.add(tfjsCore.mul(tfjsCore.div(i,n.yScale),t.h),t.y),n.applyExponentialOnBoxSize?(s=tfjsCore.mul(tfjsCore.exp(tfjsCore.div(s,n.hScale)),t.h),h=tfjsCore.mul(tfjsCore.exp(tfjsCore.div(h,n.wScale)),t.w)):(s=tfjsCore.mul(tfjsCore.div(s,n.hScale),t.h),h=tfjsCore.mul(tfjsCore.div(h,n.wScale),t.h));var u=tfjsCore.sub(i,tfjsCore.div(s,2)),c=tfjsCore.sub(o,tfjsCore.div(h,2)),d=tfjsCore.add(i,tfjsCore.div(s,2)),p=tfjsCore.add(o,tfjsCore.div(h,2)),w=tfjsCore.concat([tfjsCore.reshape(u,[n.numBoxes,1]),tfjsCore.reshape(c,[n.numBoxes,1]),tfjsCore.reshape(d,[n.numBoxes,1]),tfjsCore.reshape(p,[n.numBoxes,1])],1);if(n.numKeypoints)for(var M=0;M<n.numKeypoints;++M){var S=n.keypointCoordOffset+M*n.numValuesPerKeypoint,b=void 0,T=void 0;n.reverseOutputOrder?(b=tfjsCore.squeeze(tfjsCore.slice(e,[0,S],[-1,1])),T=tfjsCore.squeeze(tfjsCore.slice(e,[0,S+1],[-1,1]))):(T=tfjsCore.squeeze(tfjsCore.slice(e,[0,S],[-1,1])),b=tfjsCore.squeeze(tfjsCore.slice(e,[0,S+1],[-1,1])));var C=tfjsCore.add(tfjsCore.mul(tfjsCore.div(b,n.xScale),t.w),t.x),O=tfjsCore.add(tfjsCore.mul(tfjsCore.div(T,n.yScale),t.h),t.y);w=tfjsCore.concat([w,tfjsCore.reshape(C,[n.numBoxes,1]),tfjsCore.reshape(O,[n.numBoxes,1])],1);}return w}))}(s,t,n),u=tfjsCore.tidy((function(){var e=i;return n.sigmoidScore?(null!=n.scoreClippingThresh&&(e=tfjsCore.clipByValue(i,-n.scoreClippingThresh,n.scoreClippingThresh)),e=tfjsCore.sigmoid(e)):e})),[4,I(h,u,n)];case 1:return c=p.sent(),tfjsCore.dispose([h,u]),[2,c]}}))}))}function I(e,t,n){return T(this,void 0,void 0,(function(){var i,o,r,a,s,h,u,c,l,f,d,p;return C(this,(function(m){switch(m.label){case 0:return i=[],[4,e.data()];case 1:return o=m.sent(),[4,t.data()];case 2:for(r=m.sent(),a=0;a<n.numBoxes;++a)if(!(null!=n.minScoreThresh&&r[a]<n.minScoreThresh||(s=a*n.numCoords,h=_(o[s+0],o[s+1],o[s+2],o[s+3],r[a],n.flipVertically,a),(u=h.locationData.relativeBoundingBox).width<0||u.height<0))){if(n.numKeypoints>0)for((c=h.locationData).relativeKeypoints=[],l=n.numKeypoints*n.numValuesPerKeypoint,f=0;f<l;f+=n.numValuesPerKeypoint)d=s+n.keypointCoordOffset+f,p={x:o[d+0],y:n.flipVertically?1-o[d+1]:o[d+1]},c.relativeKeypoints.push(p);i.push(h);}return [2,i]}}))}))}function _(e,t,n,i,o,r,a){return {score:[o],ind:a,locationData:{relativeBoundingBox:{xMin:t,yMin:r?1-n:e,xMax:i,yMax:r?1-e:n,width:i-t,height:n-e}}}}var N={reduceBoxesInLowestLayer:false,interpolatedScaleAspectRatio:1,featureMapHeight:[],featureMapWidth:[],numLayers:4,minScale:.1484375,maxScale:.75,inputSizeHeight:128,inputSizeWidth:128,anchorOffsetX:.5,anchorOffsetY:.5,strides:[8,16,16,16],aspectRatios:[1],fixedAnchorSize:true},W={reduceBoxesInLowestLayer:false,interpolatedScaleAspectRatio:0,featureMapHeight:[],featureMapWidth:[],numLayers:1,minScale:.1484375,maxScale:.75,inputSizeHeight:192,inputSizeWidth:192,anchorOffsetX:.5,anchorOffsetY:.5,strides:[4],aspectRatios:[1],fixedAnchorSize:true},X={runtime:"tfjs",modelType:"short",maxFaces:1,detectorModelUrl:"https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1"},Y={applyExponentialOnBoxSize:false,flipVertically:false,ignoreClasses:[],numClasses:1,numBoxes:896,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:true,scoreClippingThresh:100,reverseOutputOrder:true,xScale:128,yScale:128,hScale:128,wScale:128,minScoreThresh:.5},q={applyExponentialOnBoxSize:false,flipVertically:false,ignoreClasses:[],numClasses:1,numBoxes:2304,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:true,scoreClippingThresh:100,reverseOutputOrder:true,xScale:192,yScale:192,hScale:192,wScale:192,minScoreThresh:.6},G=.3,$={outputTensorSize:{width:128,height:128},keepAspectRatio:true,outputTensorFloatRange:[-1,1],borderMode:"zero"},J={outputTensorSize:{width:192,height:192},keepAspectRatio:true,outputTensorFloatRange:[-1,1],borderMode:"zero"};var Q,Z=function(){function e(e,t,n){this.detectorModel=t,this.maxFaces=n,"full"===e?(this.imageToTensorConfig=J,this.tensorsToDetectionConfig=q,this.anchors=k(W)):(this.imageToTensorConfig=$,this.tensorsToDetectionConfig=Y,this.anchors=k(N));var i=tfjsCore.tensor1d(this.anchors.map((function(e){return e.width}))),o=tfjsCore.tensor1d(this.anchors.map((function(e){return e.height}))),r=tfjsCore.tensor1d(this.anchors.map((function(e){return e.xCenter}))),a=tfjsCore.tensor1d(this.anchors.map((function(e){return e.yCenter})));this.anchorTensor={x:r,y:a,w:i,h:o};}return e.prototype.dispose=function(){this.detectorModel.dispose(),tfjsCore.dispose([this.anchorTensor.x,this.anchorTensor.y,this.anchorTensor.w,this.anchorTensor.h]);},e.prototype.reset=function(){},e.prototype.detectFaces=function(e,t){return void 0===t&&(t=false),T(this,void 0,void 0,(function(){var n,i,r,a,s,l,p,m,x,g,y;return C(this,(function(v){switch(v.label){case 0:return null==e?(this.reset(),[2,[]]):(n=tfjsCore.tidy((function(){var n=tfjsCore.cast(E(e),"float32");if(t){n=tfjsCore.squeeze(tfjsCore.image.flipLeftRight(tfjsCore.expandDims(n,0)),[0]);}return n})),i=K(n,this.imageToTensorConfig),r=i.imageTensor,a=i.transformationMatrix,s=this.detectorModel.execute(r,"Identity:0"),l=H(s),p=l.boxes,[4,j([m=l.logits,p],this.anchorTensor,this.tensorsToDetectionConfig)]);case 1:return 0===(x=v.sent()).length?(tfjsCore.dispose([n,r,s,m,p]),[2,x]):[4,U(x,this.maxFaces,G)];case 2:return g=v.sent(),y=function(e,t){ void 0===e&&(e=[]);var n,i=(n=t,[].concat.apply([],n));return e.forEach((function(e){var t=e.locationData;t.relativeKeypoints.forEach((function(e){var t=V(i,[e.x,e.y]),n=t[0],o=t[1];e.x=n,e.y=o;}));var n=t.relativeBoundingBox,o=Number.MAX_VALUE,r=Number.MAX_VALUE,a=Number.MIN_VALUE,s=Number.MIN_VALUE;[[n.xMin,n.yMin],[n.xMin+n.width,n.yMin],[n.xMin+n.width,n.yMin+n.height],[n.xMin,n.yMin+n.height]].forEach((function(e){var t=V(i,e),n=t[0],h=t[1];o=Math.min(o,n),a=Math.max(a,n),r=Math.min(r,h),s=Math.max(s,h);})),t.relativeBoundingBox={xMin:o,xMax:a,yMin:r,yMax:s,width:a-o,height:s-r};})),e}(g,a),tfjsCore.dispose([n,r,s,m,p]),[2,y]}}))}))},e.prototype.estimateFaces=function(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){return n=F(e),i=!!t&&t.flipHorizontal,[2,this.detectFaces(e,i).then((function(e){return e.map((function(e){for(var t=e.locationData.relativeKeypoints.map((function(e,t){return b(b({},e),{x:e.x*n.width,y:e.y*n.height,name:O[t]})})),i=e.locationData.relativeBoundingBox,o=0,r=["width","xMax","xMin"];o<r.length;o++){i[r[o]]*=n.width;}for(var a=0,s=["height","yMax","yMin"];a<s.length;a++){i[s[a]]*=n.height;}return {keypoints:t,box:i}}))}))]}))}))},e}();function ee(e){return T(this,void 0,void 0,(function(){var t,n,i;return C(this,(function(o){switch(o.label){case 0:return t=function(e){if(null==e)return b({},X);var t=b({},e);null==t.modelType&&(t.modelType=X.modelType),null==t.maxFaces&&(t.maxFaces=X.maxFaces),null==t.detectorModelUrl&&("full"===t.modelType?t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/full/1":t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1");return t}(e),n="string"==typeof t.detectorModelUrl&&t.detectorModelUrl.indexOf("https://tfhub.dev")>-1,[4,tfjsConverter.loadGraphModel(t.detectorModelUrl,{fromTFHub:n})];case 1:return i=o.sent(),[2,new Z(t.modelType,i,t.maxFaces)]}}))}))}function te(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){if(e===Q.MediaPipeFaceDetector){if(i=void 0,null!=(n=t)){if("tfjs"===n.runtime)return [2,ee(n)];if("mediapipe"===n.runtime)return [2,D(n)];i=n.runtime;}throw new Error("Expect modelConfig.runtime to be either 'tfjs' "+"or 'mediapipe', but got ".concat(i))}throw new Error("".concat(e," is not a supported model name."))}))}))}!function(e){e.MediaPipeFaceDetector="MediaPipeFaceDetector";}(Q||(Q={}));
567
+
545
568
  /**
546
569
  * Checks whether WebAssembly is supported in the current environment.
547
570
  */
@@ -580,7 +603,7 @@ class BlazeFaceFaceDetection extends FaceDetection {
580
603
  }
581
604
  try {
582
605
  await tfjsCore.ready();
583
- this._model = await faceDetection.createDetector(faceDetection.SupportedModels.MediaPipeFaceDetector, {
606
+ this._model = await te(Q.MediaPipeFaceDetector, {
584
607
  runtime: 'tfjs',
585
608
  detectorModelUrl: this.faceModelUrl,
586
609
  });
@@ -1055,7 +1078,7 @@ function createRequestStreamGenerator(stream) {
1055
1078
  };
1056
1079
  }
1057
1080
 
1058
- const VERSION = '3.4.7';
1081
+ const VERSION = '3.5.1';
1059
1082
 
1060
1083
  const BASE_USER_AGENT = `ui-react-liveness/${VERSION}`;
1061
1084
  const getLivenessUserAgent = () => {
@@ -1610,6 +1633,17 @@ const STATIC_VIDEO_CONSTRAINTS = {
1610
1633
  const CAMERA_ID_KEY = 'AmplifyLivenessCameraId';
1611
1634
  const DEFAULT_FACE_FIT_TIMEOUT = 7000;
1612
1635
  let responseStream;
1636
+ // Helper function to get selected device info
1637
+ const getSelectedDeviceInfo = (context) => {
1638
+ const selected = context.videoAssociatedParams?.selectableDevices?.find((d) => d.deviceId === context.videoAssociatedParams?.selectedDeviceId);
1639
+ return selected
1640
+ ? {
1641
+ deviceId: selected.deviceId,
1642
+ groupId: selected.groupId,
1643
+ label: selected.label ?? '',
1644
+ }
1645
+ : undefined;
1646
+ };
1613
1647
  const responseStreamActor = async (callback) => {
1614
1648
  try {
1615
1649
  const stream = await responseStream;
@@ -1670,6 +1704,9 @@ const responseStreamActor = async (callback) => {
1670
1704
  }
1671
1705
  }
1672
1706
  };
1707
+ function getLastSelectedCameraId() {
1708
+ return localStorage.getItem(CAMERA_ID_KEY);
1709
+ }
1673
1710
  function setLastSelectedCameraId(deviceId) {
1674
1711
  localStorage.setItem(CAMERA_ID_KEY, deviceId);
1675
1712
  }
@@ -2222,6 +2259,9 @@ const livenessMachine = xstate.createMachine({
2222
2259
  if (event.data.message.includes('15 fps')) {
2223
2260
  errorState = LivenessErrorState.CAMERA_FRAMERATE_ERROR;
2224
2261
  }
2262
+ else if (event.data.message.includes(LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR)) {
2263
+ errorState = LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR;
2264
+ }
2225
2265
  else {
2226
2266
  errorState = LivenessErrorState.CAMERA_ACCESS_ERROR;
2227
2267
  }
@@ -2231,15 +2271,26 @@ const livenessMachine = xstate.createMachine({
2231
2271
  state: errorState,
2232
2272
  error: error,
2233
2273
  };
2234
- context.componentProps.onError?.(livenessError);
2274
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
2235
2275
  return errorState;
2236
2276
  },
2237
2277
  }),
2238
2278
  callMobileLandscapeWarningCallback: xstate.assign({
2239
2279
  errorState: () => LivenessErrorState.MOBILE_LANDSCAPE_ERROR,
2240
2280
  }),
2281
+ getSelectedDeviceInfo: (context) => getSelectedDeviceInfo(context),
2241
2282
  callUserCancelCallback: (context) => {
2242
- context.componentProps.onUserCancel?.();
2283
+ const { onUserCancel } = context.componentProps ?? {};
2284
+ if (!onUserCancel) {
2285
+ return;
2286
+ }
2287
+ try {
2288
+ onUserCancel();
2289
+ }
2290
+ catch (callbackError) {
2291
+ // eslint-disable-next-line no-console
2292
+ console.error('Error in onUserCancel callback:', callbackError);
2293
+ }
2243
2294
  },
2244
2295
  callUserTimeoutCallback: (context) => {
2245
2296
  const error = new Error(context.errorMessage ?? 'Client Timeout');
@@ -2248,14 +2299,14 @@ const livenessMachine = xstate.createMachine({
2248
2299
  state: context.errorState,
2249
2300
  error: error,
2250
2301
  };
2251
- context.componentProps.onError?.(livenessError);
2302
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
2252
2303
  },
2253
2304
  callErrorCallback: (context, event) => {
2254
2305
  const livenessError = {
2255
2306
  state: context.errorState,
2256
2307
  error: event.data?.error || event.data,
2257
2308
  };
2258
- context.componentProps.onError?.(livenessError);
2309
+ context.componentProps.onError?.(livenessError, getSelectedDeviceInfo(context));
2259
2310
  },
2260
2311
  cleanUpResources: (context) => {
2261
2312
  const { freshnessColorEl } = context.freshnessColorAssociatedParams;
@@ -2366,10 +2417,42 @@ const livenessMachine = xstate.createMachine({
2366
2417
  services: {
2367
2418
  async checkVirtualCameraAndGetStream(context) {
2368
2419
  const { videoConstraints } = context.videoAssociatedParams;
2369
- // Get initial stream to enumerate devices with non-empty labels
2370
- const initialStream = await navigator.mediaDevices.getUserMedia({
2371
- video: { ...videoConstraints },
2420
+ const { componentProps } = context;
2421
+ let targetDeviceId;
2422
+ let cameraNotFound = false;
2423
+ if (componentProps?.config?.deviceId) {
2424
+ targetDeviceId = componentProps.config.deviceId;
2425
+ }
2426
+ else {
2427
+ targetDeviceId = getLastSelectedCameraId() ?? undefined;
2428
+ }
2429
+ const initialStream = await navigator.mediaDevices
2430
+ .getUserMedia({
2431
+ video: {
2432
+ ...videoConstraints,
2433
+ ...(targetDeviceId
2434
+ ? { deviceId: { exact: targetDeviceId } }
2435
+ : {}),
2436
+ },
2372
2437
  audio: false,
2438
+ })
2439
+ .catch((error) => {
2440
+ if (error instanceof DOMException &&
2441
+ (error.name === 'NotFoundError' ||
2442
+ error.name === 'OverconstrainedError')) {
2443
+ // Mark camera as not found when a specific target device (either provided via props
2444
+ // or previously selected/saved as default) cannot be accessed.
2445
+ if (targetDeviceId && !cameraNotFound) {
2446
+ cameraNotFound = true;
2447
+ }
2448
+ return navigator.mediaDevices.getUserMedia({
2449
+ video: {
2450
+ ...videoConstraints,
2451
+ },
2452
+ audio: false,
2453
+ });
2454
+ }
2455
+ throw error;
2373
2456
  });
2374
2457
  const devices = await navigator.mediaDevices.enumerateDevices();
2375
2458
  const realVideoDevices = devices
@@ -2383,7 +2466,7 @@ const livenessMachine = xstate.createMachine({
2383
2466
  .getTracks()
2384
2467
  .filter((track) => {
2385
2468
  const settings = track.getSettings();
2386
- return settings.frameRate >= 15;
2469
+ return (settings.frameRate ?? 0) >= 15;
2387
2470
  });
2388
2471
  if (tracksWithMoreThan15Fps.length < 1) {
2389
2472
  throw new Error('No camera found with more than 15 fps');
@@ -2402,11 +2485,16 @@ const livenessMachine = xstate.createMachine({
2402
2485
  });
2403
2486
  }
2404
2487
  setLastSelectedCameraId(deviceId);
2405
- return {
2488
+ const result = {
2406
2489
  stream: realVideoDeviceStream,
2407
2490
  selectedDeviceId: initialStreamDeviceId,
2408
2491
  selectableDevices: realVideoDevices,
2409
2492
  };
2493
+ // If a specific camera was requested but not found, trigger a specific error
2494
+ if (cameraNotFound) {
2495
+ throw new Error(LivenessErrorState.DEFAULT_CAMERA_NOT_FOUND_ERROR);
2496
+ }
2497
+ return result;
2410
2498
  },
2411
2499
  // eslint-disable-next-line @typescript-eslint/require-await
2412
2500
  async openLivenessStreamConnection(context) {
@@ -2659,9 +2747,20 @@ const livenessMachine = xstate.createMachine({
2659
2747
  livenessStreamProvider.dispatchStreamEvent({ type: 'streamStop' });
2660
2748
  },
2661
2749
  async getLiveness(context) {
2662
- const { onAnalysisComplete } = context.componentProps;
2663
- // Get liveness result
2664
- await onAnalysisComplete();
2750
+ const { onAnalysisComplete } = context.componentProps ?? {};
2751
+ if (!onAnalysisComplete) {
2752
+ return;
2753
+ }
2754
+ try {
2755
+ const deviceInfo = getSelectedDeviceInfo(context);
2756
+ await onAnalysisComplete(deviceInfo);
2757
+ }
2758
+ catch (callbackError) {
2759
+ // eslint-disable-next-line no-console
2760
+ console.error('Error in onAnalysisComplete callback:', callbackError);
2761
+ // Rethrow to allow the state machine to handle the error
2762
+ throw callbackError;
2763
+ }
2665
2764
  },
2666
2765
  },
2667
2766
  });
package/dist/styles.css CHANGED
@@ -3565,6 +3565,23 @@ strong.amplify-text {
3565
3565
  gap: var(--amplify-space-medium);
3566
3566
  }
3567
3567
 
3568
+ [data-amplify-authenticator-passkeyprompt] .amplify-authenticator__passkey-success-icon {
3569
+ font-size: var(--amplify-font-sizes-xxxl);
3570
+ color: var(--amplify-colors-green-60, #34a853);
3571
+ }
3572
+ [data-amplify-authenticator-passkeyprompt] .amplify-authenticator__passkey-credential-item {
3573
+ padding: var(--amplify-space-medium);
3574
+ background-color: var(--amplify-colors-background-secondary);
3575
+ border-radius: var(--amplify-radii-small);
3576
+ }
3577
+ [data-amplify-authenticator-passkeyprompt] .amplify-authenticator__passkey-error {
3578
+ color: var(--amplify-colors-font-error);
3579
+ margin-top: var(--amplify-space-small);
3580
+ }
3581
+ [data-amplify-authenticator-passkeyprompt] .amplify-authenticator__passkey-icon {
3582
+ font-size: var(--amplify-components-authenticator-passkey-icon-size, 12rem);
3583
+ }
3584
+
3568
3585
  .amplify-avatar {
3569
3586
  --avatar-color: var(--amplify-components-avatar-color);
3570
3587
  --avatar-background-color: var(--amplify-components-avatar-background-color);
@@ -1,4 +1,5 @@
1
- import type { LivenessContext, LivenessEvent } from '../types';
1
+ import type { LivenessContext, LivenessEvent, DeviceInfo } from '../types';
2
+ export declare const getSelectedDeviceInfo: (context: LivenessContext) => DeviceInfo | undefined;
2
3
  export declare const livenessMachine: import("xstate").StateMachine<LivenessContext, any, LivenessEvent, {
3
4
  value: any;
4
5
  context: LivenessContext;
@@ -12,5 +12,6 @@ export declare const LivenessErrorState: {
12
12
  readonly FACE_DISTANCE_ERROR: "FACE_DISTANCE_ERROR";
13
13
  readonly MOBILE_LANDSCAPE_ERROR: "MOBILE_LANDSCAPE_ERROR";
14
14
  readonly MULTIPLE_FACES_ERROR: "MULTIPLE_FACES_ERROR";
15
+ readonly DEFAULT_CAMERA_NOT_FOUND_ERROR: "DEFAULT_CAMERA_NOT_FOUND_ERROR";
15
16
  };
16
17
  export type ErrorState = keyof typeof LivenessErrorState;
@@ -1,5 +1,10 @@
1
1
  import type { AwsCredentialProvider } from './credentials';
2
2
  import type { ErrorState } from './error';
3
+ export interface DeviceInfo {
4
+ deviceId: string;
5
+ groupId: string;
6
+ label: string;
7
+ }
3
8
  /**
4
9
  * The props for the FaceLivenessDetectorCore which allows for full configuration of auth
5
10
  */
@@ -11,8 +16,9 @@ export interface FaceLivenessDetectorCoreProps {
11
16
  /**
12
17
  * Callback that signals when the liveness session has completed analysis.
13
18
  * At this point a request can be made to GetFaceLivenessSessionResults.
19
+ * @param deviceInfo Information about the selected device
14
20
  */
15
- onAnalysisComplete: () => Promise<void>;
21
+ onAnalysisComplete: (deviceInfo?: DeviceInfo) => Promise<void>;
16
22
  /**
17
23
  * The AWS region to stream the video to, for current regional support see the documentation here: FIXME LINK
18
24
  */
@@ -22,9 +28,11 @@ export interface FaceLivenessDetectorCoreProps {
22
28
  */
23
29
  onUserCancel?: () => void;
24
30
  /**
25
- * Callback called when there is error occured on any step
31
+ * Callback called when there is an error on any step
32
+ * @param livenessError The error that occurred
33
+ * @param deviceInfo Information about the selected device, if available
26
34
  */
27
- onError?: (livenessError: LivenessError) => void;
35
+ onError?: (livenessError: LivenessError, deviceInfo?: DeviceInfo) => void;
28
36
  /**
29
37
  * Optional parameter for the disabling the Start/Get Ready Screen, default: false
30
38
  */
@@ -44,6 +52,10 @@ export type FaceLivenessDetectorProps = Omit<FaceLivenessDetectorCoreProps, 'con
44
52
  config?: FaceLivenessDetectorConfig;
45
53
  };
46
54
  export interface FaceLivenessDetectorCoreConfig {
55
+ /**
56
+ * Optional device ID to pre-select a camera
57
+ */
58
+ deviceId?: string;
47
59
  /**
48
60
  * overrides the Wasm backend binary CDN path
49
61
  * default is https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.11.0/dist/.
@@ -66,7 +66,7 @@ export interface LivenessContext {
66
66
  shouldDisconnect: boolean | undefined;
67
67
  videoAssociatedParams: VideoAssociatedParams | undefined;
68
68
  }
69
- export type LivenessEventTypes = 'BEGIN' | 'CONNECTION_TIMEOUT' | 'START_RECORDING' | 'TIMEOUT' | 'ERROR' | 'CANCEL' | 'SET_SESSION_INFO' | 'DISCONNECT_EVENT' | 'SET_DOM_AND_CAMERA_DETAILS' | 'UPDATE_DEVICE_AND_STREAM' | 'SERVER_ERROR' | 'RUNTIME_ERROR' | 'RETRY_CAMERA_CHECK' | 'MOBILE_LANDSCAPE_WARNING';
69
+ export type LivenessEventTypes = 'BEGIN' | 'CONNECTION_TIMEOUT' | 'START_RECORDING' | 'TIMEOUT' | 'ERROR' | 'CANCEL' | 'SET_SESSION_INFO' | 'DISCONNECT_EVENT' | 'SET_DOM_AND_CAMERA_DETAILS' | 'UPDATE_DEVICE_AND_STREAM' | 'CAMERA_NOT_FOUND' | 'SERVER_ERROR' | 'RUNTIME_ERROR' | 'RETRY_CAMERA_CHECK' | 'MOBILE_LANDSCAPE_WARNING';
70
70
  export type LivenessEventData = Record<PropertyKey, any>;
71
71
  export interface LivenessEvent {
72
72
  type: LivenessEventTypes;
@@ -76,6 +76,7 @@ export declare const LivenessErrorStateStringMap: {
76
76
  CAMERA_ACCESS_ERROR: string;
77
77
  MOBILE_LANDSCAPE_ERROR: string;
78
78
  FRESHNESS_TIMEOUT: string;
79
+ DEFAULT_CAMERA_NOT_FOUND_ERROR: string;
79
80
  };
80
81
  interface FillOverlayCanvasFractionalInput {
81
82
  overlayCanvas: HTMLCanvasElement;
@@ -1,7 +1,7 @@
1
1
  import * as React from 'react';
2
2
  import { IlluminationState, FaceMatchState } from '../service';
3
3
  import type { HintDisplayText } from '../displayText';
4
- export declare const selectErrorState: import("../hooks").LivenessSelectorFn<"CONNECTION_TIMEOUT" | "TIMEOUT" | "RUNTIME_ERROR" | "FRESHNESS_TIMEOUT" | "SERVER_ERROR" | "CAMERA_FRAMERATE_ERROR" | "CAMERA_ACCESS_ERROR" | "FACE_DISTANCE_ERROR" | "MOBILE_LANDSCAPE_ERROR" | "MULTIPLE_FACES_ERROR" | undefined>;
4
+ export declare const selectErrorState: import("../hooks").LivenessSelectorFn<"CONNECTION_TIMEOUT" | "TIMEOUT" | "RUNTIME_ERROR" | "FRESHNESS_TIMEOUT" | "SERVER_ERROR" | "CAMERA_FRAMERATE_ERROR" | "CAMERA_ACCESS_ERROR" | "FACE_DISTANCE_ERROR" | "MOBILE_LANDSCAPE_ERROR" | "MULTIPLE_FACES_ERROR" | "DEFAULT_CAMERA_NOT_FOUND_ERROR" | undefined>;
5
5
  export declare const selectFaceMatchState: import("../hooks").LivenessSelectorFn<FaceMatchState | undefined>;
6
6
  export declare const selectIlluminationState: import("../hooks").LivenessSelectorFn<IlluminationState | undefined>;
7
7
  export declare const selectIsFaceFarEnoughBeforeRecording: import("../hooks").LivenessSelectorFn<boolean | undefined>;
@@ -1 +1 @@
1
- export declare const VERSION = "3.4.7";
1
+ export declare const VERSION = "3.5.1";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aws-amplify/ui-react-liveness",
3
- "version": "3.4.7",
3
+ "version": "3.5.1",
4
4
  "main": "dist/index.js",
5
5
  "module": "dist/esm/index.mjs",
6
6
  "exports": {
@@ -26,6 +26,7 @@
26
26
  },
27
27
  "files": [
28
28
  "dist",
29
+ "patches",
29
30
  "LICENSE"
30
31
  ],
31
32
  "scripts": {
@@ -35,7 +36,7 @@
35
36
  "clean": "rimraf dist node_modules",
36
37
  "dev": "yarn build:rollup --watch",
37
38
  "lint": "yarn typecheck && eslint .",
38
- "prebuild": "rimraf dist",
39
+ "prebuild": "rimraf dist && patch-package",
39
40
  "size": "yarn size-limit",
40
41
  "test": "jest",
41
42
  "test:watch": "yarn test --watch",
@@ -43,22 +44,21 @@
43
44
  },
44
45
  "peerDependencies": {
45
46
  "@aws-amplify/core": "*",
46
- "aws-amplify": "^6.14.3",
47
+ "aws-amplify": "^6.15.9",
47
48
  "react": "^16.14 || ^17 || ^18 || ^19",
48
49
  "react-dom": "^16.14 || ^17 || ^18 || ^19"
49
50
  },
50
51
  "dependencies": {
51
- "@aws-amplify/ui": "6.12.1",
52
- "@aws-amplify/ui-react": "6.13.1",
53
- "@aws-sdk/client-rekognitionstreaming": "3.828.0",
54
- "@aws-sdk/util-format-url": "3.609.0",
52
+ "@aws-amplify/ui": "6.14.0",
53
+ "@aws-amplify/ui-react": "6.14.0",
54
+ "@aws-sdk/client-rekognitionstreaming": "3.967.0",
55
+ "@aws-sdk/util-format-url": "3.965.0",
55
56
  "@smithy/eventstream-serde-browser": "^4.0.4",
56
57
  "@smithy/fetch-http-handler": "^5.0.4",
57
58
  "@smithy/protocol-http": "^3.0.3",
58
59
  "@smithy/signature-v4": "5.1.2",
59
60
  "@smithy/types": "^4.3.1",
60
- "@mediapipe/face_detection": "~0.4.0",
61
- "@tensorflow-models/face-detection": "1.0.2",
61
+ "@mediapipe/face_detection": "^0.4.1646425229",
62
62
  "@tensorflow/tfjs-backend-cpu": "4.11.0",
63
63
  "@tensorflow/tfjs-backend-wasm": "4.11.0",
64
64
  "@tensorflow/tfjs-converter": "4.11.0",
@@ -69,12 +69,14 @@
69
69
  "xstate": "^4.33.6"
70
70
  },
71
71
  "devDependencies": {
72
+ "@tensorflow-models/face-detection": "~1.0.3",
72
73
  "@types/jest-when": "^3.5.0",
73
74
  "blob-polyfill": "^7.0.20220408",
74
75
  "jest-canvas-mock": "^2.5.2",
75
76
  "jest-websocket-mock": "^2.4.1",
76
77
  "jest-when": "^3.5.1",
77
78
  "mock-socket": "^9.2.1",
79
+ "patch-package": "~8.0.1",
78
80
  "web-streams-polyfill": "^3.2.1"
79
81
  },
80
82
  "size-limit": [
@@ -82,7 +84,7 @@
82
84
  "name": "FaceLivenessDetector",
83
85
  "path": "dist/esm/index.mjs",
84
86
  "import": "{ FaceLivenessDetector }",
85
- "limit": "225 kB"
87
+ "limit": "229 kB"
86
88
  }
87
89
  ]
88
- }
90
+ }
@@ -0,0 +1,9 @@
1
+ diff --git a/node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.js b/node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.js
2
+ index 1eb84f8..05b860e 100644
3
+ --- a/node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.js
4
+ +++ b/node_modules/@tensorflow-models/face-detection/dist/face-detection.esm.js
5
+ @@ -15,3 +15,3 @@
6
+ * =============================================================================
7
+ */
8
+ -import{FaceDetection as e}from"@mediapipe/face_detection";import{Tensor as t,browser as n,util as i,tidy as o,add as r,mul as a,tensor2d as s,image as h,expandDims as u,cast as c,slice as l,squeeze as f,dispose as d,tensor1d as p,div as m,exp as x,sub as g,concat as y,reshape as v,clipByValue as w,sigmoid as M}from"@tensorflow/tfjs-core";import{loadGraphModel as S}from"@tensorflow/tfjs-converter";var b=function(){return b=Object.assign||function(e){for(var t,n=1,i=arguments.length;n<i;n++)for(var o in t=arguments[n])Object.prototype.hasOwnProperty.call(t,o)&&(e[o]=t[o]);return e},b.apply(this,arguments)};function T(e,t,n,i){return new(n||(n=Promise))((function(o,r){function a(e){try{h(i.next(e))}catch(e){r(e)}}function s(e){try{h(i.throw(e))}catch(e){r(e)}}function h(e){var t;e.done?o(e.value):(t=e.value,t instanceof n?t:new n((function(e){e(t)}))).then(a,s)}h((i=i.apply(e,t||[])).next())}))}function C(e,t){var n,i,o,r,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return r={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(r[Symbol.iterator]=function(){return this}),r;function s(r){return function(s){return function(r){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,i&&(o=2&r[0]?i.return:r[0]?i.throw||((o=i.return)&&o.call(i),0):i.next)&&!(o=o.call(i,r[1])).done)return o;switch(i=0,o&&(r=[2&r[0],o.value]),r[0]){case 0:case 1:o=r;break;case 4:return a.label++,{value:r[1],done:!1};case 5:a.label++,i=r[1],r=[0];continue;case 7:r=a.ops.pop(),a.trys.pop();continue;default:if(!(o=a.trys,(o=o.length>0&&o[o.length-1])||6!==r[0]&&2!==r[0])){a=0;continue}if(3===r[0]&&(!o||r[1]>o[0]&&r[1]<o[3])){a.label=r[1];break}if(6===r[0]&&a.label<o[1]){a.label=o[1],o=r;break}if(o&&a.label<o[2]){a.label=o[2],a.ops.push(r);break}o[2]&&a.ops.pop(),a.trys.pop();continue}r=t.call(e,a)}catch(e){r=[6,e],i=0}finally{n=o=0}if(5&r[0])throw r[1];return{value:r[0]?r[1]:void 0,done:!0}}([r,s])}}}var O=["rightEye","leftEye","noseTip","mouthCenter","rightEarTragion","leftEarTragion"];var B={modelType:"short",runtime:"mediapipe",maxFaces:1};var z=function(){function i(t){var n=this;this.width=0,this.height=0,this.selfieMode=!1,this.faceDetectorSolution=new e({locateFile:function(e,n){if(t.solutionPath){var i=t.solutionPath.replace(/\/+$/,"");return"".concat(i,"/").concat(e)}return"".concat(n,"/").concat(e)}}),this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode,model:t.modelType}),this.faceDetectorSolution.onResults((function(e){if(n.height=e.image.height,n.width=e.image.width,n.faces=[],null!==e.detections)for(var t=0,i=e.detections;t<i.length;t++){var o=i[t];n.faces.push(n.normalizedToAbsolute(o.landmarks,(r=o.boundingBox,a=void 0,s=void 0,h=void 0,a=r.xCenter-r.width/2,s=a+r.width,h=r.yCenter-r.height/2,{xMin:a,xMax:s,yMin:h,yMax:h+r.height,width:r.width,height:r.height})))}var r,a,s,h}))}return i.prototype.normalizedToAbsolute=function(e,t){var n=this;return{keypoints:e.map((function(e,t){return{x:e.x*n.width,y:e.y*n.height,name:O[t]}})),box:{xMin:t.xMin*this.width,yMin:t.yMin*this.height,xMax:t.xMax*this.width,yMax:t.yMax*this.height,width:t.width*this.width,height:t.height*this.height}}},i.prototype.estimateFaces=function(e,i){return T(this,void 0,void 0,(function(){var o,r;return C(this,(function(a){switch(a.label){case 0:return i&&i.flipHorizontal&&i.flipHorizontal!==this.selfieMode&&(this.selfieMode=i.flipHorizontal,this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode})),e instanceof t?(r=ImageData.bind,[4,n.toPixels(e)]):[3,2];case 1:return o=new(r.apply(ImageData,[void 0,a.sent(),e.shape[1],e.shape[0]])),[3,3];case 2:o=e,a.label=3;case 3:return e=o,[4,this.faceDetectorSolution.send({image:e})];case 4:return a.sent(),[2,this.faces]}}))}))},i.prototype.dispose=function(){this.faceDetectorSolution.close()},i.prototype.reset=function(){this.faceDetectorSolution.reset(),this.width=0,this.height=0,this.faces=null,this.selfieMode=!1},i.prototype.initialize=function(){return this.faceDetectorSolution.initialize()},i}();function D(e){return T(this,void 0,void 0,(function(){var t,n;return C(this,(function(i){switch(i.label){case 0:return t=function(e){if(null==e)return b({},B);var t=b({},e);return t.runtime="mediapipe",null==t.modelType&&(t.modelType=B.modelType),null==t.maxFaces&&(t.maxFaces=B.maxFaces),t}(e),[4,(n=new z(t)).initialize()];case 1:return i.sent(),[2,n]}}))}))}function A(e,t,n,i){var o=e.width,r=e.height,a=i?-1:1,s=Math.cos(e.rotation),h=Math.sin(e.rotation),u=e.xCenter,c=e.yCenter,l=1/t,f=1/n,d=new Array(16);return d[0]=o*s*a*l,d[1]=-r*h*l,d[2]=0,d[3]=(-.5*o*s*a+.5*r*h+u)*l,d[4]=o*h*a*f,d[5]=r*s*f,d[6]=0,d[7]=(-.5*r*s-.5*o*h*a+c)*f,d[8]=0,d[9]=0,d[10]=o*l,d[11]=0,d[12]=0,d[13]=0,d[14]=0,d[15]=1,function(e){if(16!==e.length)throw new Error("Array length must be 16 but got ".concat(e.length));return[[e[0],e[1],e[2],e[3]],[e[4],e[5],e[6],e[7]],[e[8],e[9],e[10],e[11]],[e[12],e[13],e[14],e[15]]]}(d)}function F(e){return e instanceof t?{height:e.shape[0],width:e.shape[1]}:{height:e.height,width:e.width}}function E(e){return e instanceof t?e:n.fromPixels(e)}function R(e,t){i.assert(0!==e.width,(function(){return"".concat(t," width cannot be 0.")})),i.assert(0!==e.height,(function(){return"".concat(t," height cannot be 0.")}))}function L(e,t){var n=function(e,t,n,i){var o=t-e,r=i-n;if(0===o)throw new Error("Original min and max are both ".concat(e,", range cannot be 0."));var a=r/o;return{scale:a,offset:n-e*a}}(0,255,t[0],t[1]);return o((function(){return r(a(e,n.scale),n.offset)}))}function K(e,t,n){var i=t.outputTensorSize,r=t.keepAspectRatio,a=t.borderMode,l=t.outputTensorFloatRange,f=F(e),d=function(e,t){return t?{xCenter:t.xCenter*e.width,yCenter:t.yCenter*e.height,width:t.width*e.width,height:t.height*e.height,rotation:t.rotation}:{xCenter:.5*e.width,yCenter:.5*e.height,width:e.width,height:e.height,rotation:0}}(f,n),p=function(e,t,n){if(void 0===n&&(n=!1),!n)return{top:0,left:0,right:0,bottom:0};var i=t.height,o=t.width;R(t,"targetSize"),R(e,"roi");var r,a,s=i/o,h=e.height/e.width,u=0,c=0;return s>h?(r=e.width,a=e.width*s,c=(1-h/s)/2):(r=e.height/s,a=e.height,u=(1-s/h)/2),e.width=r,e.height=a,{top:c,left:u,right:u,bottom:c}}(d,i,r),m=A(d,f.width,f.height,!1),x=o((function(){var t=E(e),n=s(function(e,t,n){return R(n,"inputResolution"),[1/n.width*e[0][0]*t.width,1/n.height*e[0][1]*t.width,e[0][3]*t.width,1/n.width*e[1][0]*t.height,1/n.height*e[1][1]*t.height,e[1][3]*t.height,0,0]}(m,f,i),[1,8]),o="zero"===a?"constant":"nearest",r=h.transform(u(c(t,"float32")),n,"bilinear",o,0,[i.height,i.width]);return null!=l?L(r,l):r}));return{imageTensor:x,padding:p,transformationMatrix:m}}function k(e){null==e.reduceBoxesInLowestLayer&&(e.reduceBoxesInLowestLayer=!1),null==e.interpolatedScaleAspectRatio&&(e.interpolatedScaleAspectRatio=1),null==e.fixedAnchorSize&&(e.fixedAnchorSize=!1);for(var t=[],n=0;n<e.numLayers;){for(var i=[],o=[],r=[],a=[],s=n;s<e.strides.length&&e.strides[s]===e.strides[n];){var h=P(e.minScale,e.maxScale,s,e.strides.length);if(0===s&&e.reduceBoxesInLowestLayer)r.push(1),r.push(2),r.push(.5),a.push(.1),a.push(h),a.push(h);else{for(var u=0;u<e.aspectRatios.length;++u)r.push(e.aspectRatios[u]),a.push(h);if(e.interpolatedScaleAspectRatio>0){var c=s===e.strides.length-1?1:P(e.minScale,e.maxScale,s+1,e.strides.length);a.push(Math.sqrt(h*c)),r.push(e.interpolatedScaleAspectRatio)}}s++}for(var l=0;l<r.length;++l){var f=Math.sqrt(r[l]);i.push(a[l]/f),o.push(a[l]*f)}var d=0,p=0;if(e.featureMapHeight.length>0)d=e.featureMapHeight[n],p=e.featureMapWidth[n];else{var m=e.strides[n];d=Math.ceil(e.inputSizeHeight/m),p=Math.ceil(e.inputSizeWidth/m)}for(var x=0;x<d;++x)for(var g=0;g<p;++g)for(var y=0;y<i.length;++y){var v={xCenter:(g+e.anchorOffsetX)/p,yCenter:(x+e.anchorOffsetY)/d,width:0,height:0};e.fixedAnchorSize?(v.width=1,v.height=1):(v.width=o[y],v.height=i[y]),t.push(v)}n=s}return t}function P(e,t,n,i){return 1===i?.5*(e+t):e+(t-e)*n/(i-1)}function V(e,t){var n=t[0],i=t[1];return[n*e[0]+i*e[1]+e[3],n*e[4]+i*e[5]+e[7]]}function H(e){return o((function(){var t=function(e){return o((function(){return[l(e,[0,0,0],[1,-1,1]),l(e,[0,0,1],[1,-1,-1])]}))}(e),n=t[0],i=t[1];return{boxes:f(i),logits:f(n)}}))}function U(e,t,n,i){return T(this,void 0,void 0,(function(){var i,o,r,a,u;return C(this,(function(c){switch(c.label){case 0:return e.sort((function(e,t){return Math.max.apply(Math,t.score)-Math.max.apply(Math,e.score)})),i=s(e.map((function(e){return[e.locationData.relativeBoundingBox.yMin,e.locationData.relativeBoundingBox.xMin,e.locationData.relativeBoundingBox.yMax,e.locationData.relativeBoundingBox.xMax]}))),o=p(e.map((function(e){return e.score[0]}))),[4,h.nonMaxSuppressionAsync(i,o,t,n)];case 1:return[4,(r=c.sent()).array()];case 2:return a=c.sent(),u=e.filter((function(e,t){return a.indexOf(t)>-1})),d([i,o,r]),[2,u]}}))}))}function j(e,t,n){return T(this,void 0,void 0,(function(){var i,s,h,u,c;return C(this,(function(p){switch(p.label){case 0:return i=e[0],s=e[1],h=function(e,t,n){return o((function(){var i,o,s,h;n.reverseOutputOrder?(o=f(l(e,[0,n.boxCoordOffset+0],[-1,1])),i=f(l(e,[0,n.boxCoordOffset+1],[-1,1])),h=f(l(e,[0,n.boxCoordOffset+2],[-1,1])),s=f(l(e,[0,n.boxCoordOffset+3],[-1,1]))):(i=f(l(e,[0,n.boxCoordOffset+0],[-1,1])),o=f(l(e,[0,n.boxCoordOffset+1],[-1,1])),s=f(l(e,[0,n.boxCoordOffset+2],[-1,1])),h=f(l(e,[0,n.boxCoordOffset+3],[-1,1]))),o=r(a(m(o,n.xScale),t.w),t.x),i=r(a(m(i,n.yScale),t.h),t.y),n.applyExponentialOnBoxSize?(s=a(x(m(s,n.hScale)),t.h),h=a(x(m(h,n.wScale)),t.w)):(s=a(m(s,n.hScale),t.h),h=a(m(h,n.wScale),t.h));var u=g(i,m(s,2)),c=g(o,m(h,2)),d=r(i,m(s,2)),p=r(o,m(h,2)),w=y([v(u,[n.numBoxes,1]),v(c,[n.numBoxes,1]),v(d,[n.numBoxes,1]),v(p,[n.numBoxes,1])],1);if(n.numKeypoints)for(var M=0;M<n.numKeypoints;++M){var S=n.keypointCoordOffset+M*n.numValuesPerKeypoint,b=void 0,T=void 0;n.reverseOutputOrder?(b=f(l(e,[0,S],[-1,1])),T=f(l(e,[0,S+1],[-1,1]))):(T=f(l(e,[0,S],[-1,1])),b=f(l(e,[0,S+1],[-1,1])));var C=r(a(m(b,n.xScale),t.w),t.x),O=r(a(m(T,n.yScale),t.h),t.y);w=y([w,v(C,[n.numBoxes,1]),v(O,[n.numBoxes,1])],1)}return w}))}(s,t,n),u=o((function(){var e=i;return n.sigmoidScore?(null!=n.scoreClippingThresh&&(e=w(i,-n.scoreClippingThresh,n.scoreClippingThresh)),e=M(e)):e})),[4,I(h,u,n)];case 1:return c=p.sent(),d([h,u]),[2,c]}}))}))}function I(e,t,n){return T(this,void 0,void 0,(function(){var i,o,r,a,s,h,u,c,l,f,d,p;return C(this,(function(m){switch(m.label){case 0:return i=[],[4,e.data()];case 1:return o=m.sent(),[4,t.data()];case 2:for(r=m.sent(),a=0;a<n.numBoxes;++a)if(!(null!=n.minScoreThresh&&r[a]<n.minScoreThresh||(s=a*n.numCoords,h=_(o[s+0],o[s+1],o[s+2],o[s+3],r[a],n.flipVertically,a),(u=h.locationData.relativeBoundingBox).width<0||u.height<0))){if(n.numKeypoints>0)for((c=h.locationData).relativeKeypoints=[],l=n.numKeypoints*n.numValuesPerKeypoint,f=0;f<l;f+=n.numValuesPerKeypoint)d=s+n.keypointCoordOffset+f,p={x:o[d+0],y:n.flipVertically?1-o[d+1]:o[d+1]},c.relativeKeypoints.push(p);i.push(h)}return[2,i]}}))}))}function _(e,t,n,i,o,r,a){return{score:[o],ind:a,locationData:{relativeBoundingBox:{xMin:t,yMin:r?1-n:e,xMax:i,yMax:r?1-e:n,width:i-t,height:n-e}}}}var N={reduceBoxesInLowestLayer:!1,interpolatedScaleAspectRatio:1,featureMapHeight:[],featureMapWidth:[],numLayers:4,minScale:.1484375,maxScale:.75,inputSizeHeight:128,inputSizeWidth:128,anchorOffsetX:.5,anchorOffsetY:.5,strides:[8,16,16,16],aspectRatios:[1],fixedAnchorSize:!0},W={reduceBoxesInLowestLayer:!1,interpolatedScaleAspectRatio:0,featureMapHeight:[],featureMapWidth:[],numLayers:1,minScale:.1484375,maxScale:.75,inputSizeHeight:192,inputSizeWidth:192,anchorOffsetX:.5,anchorOffsetY:.5,strides:[4],aspectRatios:[1],fixedAnchorSize:!0},X={runtime:"tfjs",modelType:"short",maxFaces:1,detectorModelUrl:"https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1"},Y={applyExponentialOnBoxSize:!1,flipVertically:!1,ignoreClasses:[],numClasses:1,numBoxes:896,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:!0,scoreClippingThresh:100,reverseOutputOrder:!0,xScale:128,yScale:128,hScale:128,wScale:128,minScoreThresh:.5},q={applyExponentialOnBoxSize:!1,flipVertically:!1,ignoreClasses:[],numClasses:1,numBoxes:2304,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:!0,scoreClippingThresh:100,reverseOutputOrder:!0,xScale:192,yScale:192,hScale:192,wScale:192,minScoreThresh:.6},G=.3,$={outputTensorSize:{width:128,height:128},keepAspectRatio:!0,outputTensorFloatRange:[-1,1],borderMode:"zero"},J={outputTensorSize:{width:192,height:192},keepAspectRatio:!0,outputTensorFloatRange:[-1,1],borderMode:"zero"};var Q,Z=function(){function e(e,t,n){this.detectorModel=t,this.maxFaces=n,"full"===e?(this.imageToTensorConfig=J,this.tensorsToDetectionConfig=q,this.anchors=k(W)):(this.imageToTensorConfig=$,this.tensorsToDetectionConfig=Y,this.anchors=k(N));var i=p(this.anchors.map((function(e){return e.width}))),o=p(this.anchors.map((function(e){return e.height}))),r=p(this.anchors.map((function(e){return e.xCenter}))),a=p(this.anchors.map((function(e){return e.yCenter})));this.anchorTensor={x:r,y:a,w:i,h:o}}return e.prototype.dispose=function(){this.detectorModel.dispose(),d([this.anchorTensor.x,this.anchorTensor.y,this.anchorTensor.w,this.anchorTensor.h])},e.prototype.reset=function(){},e.prototype.detectFaces=function(e,t){return void 0===t&&(t=!1),T(this,void 0,void 0,(function(){var n,i,r,a,s,l,p,m,x,g,y;return C(this,(function(v){switch(v.label){case 0:return null==e?(this.reset(),[2,[]]):(n=o((function(){var n=c(E(e),"float32");if(t){n=f(h.flipLeftRight(u(n,0)),[0])}return n})),i=K(n,this.imageToTensorConfig),r=i.imageTensor,a=i.transformationMatrix,s=this.detectorModel.execute(r,"Identity:0"),l=H(s),p=l.boxes,[4,j([m=l.logits,p],this.anchorTensor,this.tensorsToDetectionConfig)]);case 1:return 0===(x=v.sent()).length?(d([n,r,s,m,p]),[2,x]):[4,U(x,this.maxFaces,G)];case 2:return g=v.sent(),y=function(e,t){void 0===e&&(e=[]);var n,i=(n=t,[].concat.apply([],n));return e.forEach((function(e){var t=e.locationData;t.relativeKeypoints.forEach((function(e){var t=V(i,[e.x,e.y]),n=t[0],o=t[1];e.x=n,e.y=o}));var n=t.relativeBoundingBox,o=Number.MAX_VALUE,r=Number.MAX_VALUE,a=Number.MIN_VALUE,s=Number.MIN_VALUE;[[n.xMin,n.yMin],[n.xMin+n.width,n.yMin],[n.xMin+n.width,n.yMin+n.height],[n.xMin,n.yMin+n.height]].forEach((function(e){var t=V(i,e),n=t[0],h=t[1];o=Math.min(o,n),a=Math.max(a,n),r=Math.min(r,h),s=Math.max(s,h)})),t.relativeBoundingBox={xMin:o,xMax:a,yMin:r,yMax:s,width:a-o,height:s-r}})),e}(g,a),d([n,r,s,m,p]),[2,y]}}))}))},e.prototype.estimateFaces=function(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){return n=F(e),i=!!t&&t.flipHorizontal,[2,this.detectFaces(e,i).then((function(e){return e.map((function(e){for(var t=e.locationData.relativeKeypoints.map((function(e,t){return b(b({},e),{x:e.x*n.width,y:e.y*n.height,name:O[t]})})),i=e.locationData.relativeBoundingBox,o=0,r=["width","xMax","xMin"];o<r.length;o++){i[r[o]]*=n.width}for(var a=0,s=["height","yMax","yMin"];a<s.length;a++){i[s[a]]*=n.height}return{keypoints:t,box:i}}))}))]}))}))},e}();function ee(e){return T(this,void 0,void 0,(function(){var t,n,i;return C(this,(function(o){switch(o.label){case 0:return t=function(e){if(null==e)return b({},X);var t=b({},e);null==t.modelType&&(t.modelType=X.modelType),null==t.maxFaces&&(t.maxFaces=X.maxFaces),null==t.detectorModelUrl&&("full"===t.modelType?t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/full/1":t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1");return t}(e),n="string"==typeof t.detectorModelUrl&&t.detectorModelUrl.indexOf("https://tfhub.dev")>-1,[4,S(t.detectorModelUrl,{fromTFHub:n})];case 1:return i=o.sent(),[2,new Z(t.modelType,i,t.maxFaces)]}}))}))}function te(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){if(e===Q.MediaPipeFaceDetector){if(i=void 0,null!=(n=t)){if("tfjs"===n.runtime)return[2,ee(n)];if("mediapipe"===n.runtime)return[2,D(n)];i=n.runtime}throw new Error("Expect modelConfig.runtime to be either 'tfjs' "+"or 'mediapipe', but got ".concat(i))}throw new Error("".concat(e," is not a supported model name."))}))}))}!function(e){e.MediaPipeFaceDetector="MediaPipeFaceDetector"}(Q||(Q={}));export{z as MediaPipeFaceDetectorMediaPipe,Z as MediaPipeFaceDetectorTfjs,Q as SupportedModels,te as createDetector};
9
+ +import FaceDetection from"@mediapipe/face_detection";import{Tensor as t,browser as n,util as i,tidy as o,add as r,mul as a,tensor2d as s,image as h,expandDims as u,cast as c,slice as l,squeeze as f,dispose as d,tensor1d as p,div as m,exp as x,sub as g,concat as y,reshape as v,clipByValue as w,sigmoid as M}from"@tensorflow/tfjs-core";import{loadGraphModel as S}from"@tensorflow/tfjs-converter";var b=function(){return b=Object.assign||function(e){for(var t,n=1,i=arguments.length;n<i;n++)for(var o in t=arguments[n])Object.prototype.hasOwnProperty.call(t,o)&&(e[o]=t[o]);return e},b.apply(this,arguments)};function T(e,t,n,i){return new(n||(n=Promise))((function(o,r){function a(e){try{h(i.next(e))}catch(e){r(e)}}function s(e){try{h(i.throw(e))}catch(e){r(e)}}function h(e){var t;e.done?o(e.value):(t=e.value,t instanceof n?t:new n((function(e){e(t)}))).then(a,s)}h((i=i.apply(e,t||[])).next())}))}function C(e,t){var n,i,o,r,a={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return r={next:s(0),throw:s(1),return:s(2)},"function"==typeof Symbol&&(r[Symbol.iterator]=function(){return this}),r;function s(r){return function(s){return function(r){if(n)throw new TypeError("Generator is already executing.");for(;a;)try{if(n=1,i&&(o=2&r[0]?i.return:r[0]?i.throw||((o=i.return)&&o.call(i),0):i.next)&&!(o=o.call(i,r[1])).done)return o;switch(i=0,o&&(r=[2&r[0],o.value]),r[0]){case 0:case 1:o=r;break;case 4:return a.label++,{value:r[1],done:!1};case 5:a.label++,i=r[1],r=[0];continue;case 7:r=a.ops.pop(),a.trys.pop();continue;default:if(!(o=a.trys,(o=o.length>0&&o[o.length-1])||6!==r[0]&&2!==r[0])){a=0;continue}if(3===r[0]&&(!o||r[1]>o[0]&&r[1]<o[3])){a.label=r[1];break}if(6===r[0]&&a.label<o[1]){a.label=o[1],o=r;break}if(o&&a.label<o[2]){a.label=o[2],a.ops.push(r);break}o[2]&&a.ops.pop(),a.trys.pop();continue}r=t.call(e,a)}catch(e){r=[6,e],i=0}finally{n=o=0}if(5&r[0])throw r[1];return{value:r[0]?r[1]:void 0,done:!0}}([r,s])}}}var O=["rightEye","leftEye","noseTip","mouthCenter","rightEarTragion","leftEarTragion"];var B={modelType:"short",runtime:"mediapipe",maxFaces:1};var z=function(){function i(t){var n=this;this.width=0,this.height=0,this.selfieMode=!1,this.faceDetectorSolution=new FaceDetection({locateFile:function(e,n){if(t.solutionPath){var i=t.solutionPath.replace(/\/+$/,"");return"".concat(i,"/").concat(e)}return"".concat(n,"/").concat(e)}}),this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode,model:t.modelType}),this.faceDetectorSolution.onResults((function(e){if(n.height=e.image.height,n.width=e.image.width,n.faces=[],null!==e.detections)for(var t=0,i=e.detections;t<i.length;t++){var o=i[t];n.faces.push(n.normalizedToAbsolute(o.landmarks,(r=o.boundingBox,a=void 0,s=void 0,h=void 0,a=r.xCenter-r.width/2,s=a+r.width,h=r.yCenter-r.height/2,{xMin:a,xMax:s,yMin:h,yMax:h+r.height,width:r.width,height:r.height})))}var r,a,s,h}))}return i.prototype.normalizedToAbsolute=function(e,t){var n=this;return{keypoints:e.map((function(e,t){return{x:e.x*n.width,y:e.y*n.height,name:O[t]}})),box:{xMin:t.xMin*this.width,yMin:t.yMin*this.height,xMax:t.xMax*this.width,yMax:t.yMax*this.height,width:t.width*this.width,height:t.height*this.height}}},i.prototype.estimateFaces=function(e,i){return T(this,void 0,void 0,(function(){var o,r;return C(this,(function(a){switch(a.label){case 0:return i&&i.flipHorizontal&&i.flipHorizontal!==this.selfieMode&&(this.selfieMode=i.flipHorizontal,this.faceDetectorSolution.setOptions({selfieMode:this.selfieMode})),e instanceof t?(r=ImageData.bind,[4,n.toPixels(e)]):[3,2];case 1:return o=new(r.apply(ImageData,[void 0,a.sent(),e.shape[1],e.shape[0]])),[3,3];case 2:o=e,a.label=3;case 3:return e=o,[4,this.faceDetectorSolution.send({image:e})];case 4:return a.sent(),[2,this.faces]}}))}))},i.prototype.dispose=function(){this.faceDetectorSolution.close()},i.prototype.reset=function(){this.faceDetectorSolution.reset(),this.width=0,this.height=0,this.faces=null,this.selfieMode=!1},i.prototype.initialize=function(){return this.faceDetectorSolution.initialize()},i}();function D(e){return T(this,void 0,void 0,(function(){var t,n;return C(this,(function(i){switch(i.label){case 0:return t=function(e){if(null==e)return b({},B);var t=b({},e);return t.runtime="mediapipe",null==t.modelType&&(t.modelType=B.modelType),null==t.maxFaces&&(t.maxFaces=B.maxFaces),t}(e),[4,(n=new z(t)).initialize()];case 1:return i.sent(),[2,n]}}))}))}function A(e,t,n,i){var o=e.width,r=e.height,a=i?-1:1,s=Math.cos(e.rotation),h=Math.sin(e.rotation),u=e.xCenter,c=e.yCenter,l=1/t,f=1/n,d=new Array(16);return d[0]=o*s*a*l,d[1]=-r*h*l,d[2]=0,d[3]=(-.5*o*s*a+.5*r*h+u)*l,d[4]=o*h*a*f,d[5]=r*s*f,d[6]=0,d[7]=(-.5*r*s-.5*o*h*a+c)*f,d[8]=0,d[9]=0,d[10]=o*l,d[11]=0,d[12]=0,d[13]=0,d[14]=0,d[15]=1,function(e){if(16!==e.length)throw new Error("Array length must be 16 but got ".concat(e.length));return[[e[0],e[1],e[2],e[3]],[e[4],e[5],e[6],e[7]],[e[8],e[9],e[10],e[11]],[e[12],e[13],e[14],e[15]]]}(d)}function F(e){return e instanceof t?{height:e.shape[0],width:e.shape[1]}:{height:e.height,width:e.width}}function E(e){return e instanceof t?e:n.fromPixels(e)}function R(e,t){i.assert(0!==e.width,(function(){return"".concat(t," width cannot be 0.")})),i.assert(0!==e.height,(function(){return"".concat(t," height cannot be 0.")}))}function L(e,t){var n=function(e,t,n,i){var o=t-e,r=i-n;if(0===o)throw new Error("Original min and max are both ".concat(e,", range cannot be 0."));var a=r/o;return{scale:a,offset:n-e*a}}(0,255,t[0],t[1]);return o((function(){return r(a(e,n.scale),n.offset)}))}function K(e,t,n){var i=t.outputTensorSize,r=t.keepAspectRatio,a=t.borderMode,l=t.outputTensorFloatRange,f=F(e),d=function(e,t){return t?{xCenter:t.xCenter*e.width,yCenter:t.yCenter*e.height,width:t.width*e.width,height:t.height*e.height,rotation:t.rotation}:{xCenter:.5*e.width,yCenter:.5*e.height,width:e.width,height:e.height,rotation:0}}(f,n),p=function(e,t,n){if(void 0===n&&(n=!1),!n)return{top:0,left:0,right:0,bottom:0};var i=t.height,o=t.width;R(t,"targetSize"),R(e,"roi");var r,a,s=i/o,h=e.height/e.width,u=0,c=0;return s>h?(r=e.width,a=e.width*s,c=(1-h/s)/2):(r=e.height/s,a=e.height,u=(1-s/h)/2),e.width=r,e.height=a,{top:c,left:u,right:u,bottom:c}}(d,i,r),m=A(d,f.width,f.height,!1),x=o((function(){var t=E(e),n=s(function(e,t,n){return R(n,"inputResolution"),[1/n.width*e[0][0]*t.width,1/n.height*e[0][1]*t.width,e[0][3]*t.width,1/n.width*e[1][0]*t.height,1/n.height*e[1][1]*t.height,e[1][3]*t.height,0,0]}(m,f,i),[1,8]),o="zero"===a?"constant":"nearest",r=h.transform(u(c(t,"float32")),n,"bilinear",o,0,[i.height,i.width]);return null!=l?L(r,l):r}));return{imageTensor:x,padding:p,transformationMatrix:m}}function k(e){null==e.reduceBoxesInLowestLayer&&(e.reduceBoxesInLowestLayer=!1),null==e.interpolatedScaleAspectRatio&&(e.interpolatedScaleAspectRatio=1),null==e.fixedAnchorSize&&(e.fixedAnchorSize=!1);for(var t=[],n=0;n<e.numLayers;){for(var i=[],o=[],r=[],a=[],s=n;s<e.strides.length&&e.strides[s]===e.strides[n];){var h=P(e.minScale,e.maxScale,s,e.strides.length);if(0===s&&e.reduceBoxesInLowestLayer)r.push(1),r.push(2),r.push(.5),a.push(.1),a.push(h),a.push(h);else{for(var u=0;u<e.aspectRatios.length;++u)r.push(e.aspectRatios[u]),a.push(h);if(e.interpolatedScaleAspectRatio>0){var c=s===e.strides.length-1?1:P(e.minScale,e.maxScale,s+1,e.strides.length);a.push(Math.sqrt(h*c)),r.push(e.interpolatedScaleAspectRatio)}}s++}for(var l=0;l<r.length;++l){var f=Math.sqrt(r[l]);i.push(a[l]/f),o.push(a[l]*f)}var d=0,p=0;if(e.featureMapHeight.length>0)d=e.featureMapHeight[n],p=e.featureMapWidth[n];else{var m=e.strides[n];d=Math.ceil(e.inputSizeHeight/m),p=Math.ceil(e.inputSizeWidth/m)}for(var x=0;x<d;++x)for(var g=0;g<p;++g)for(var y=0;y<i.length;++y){var v={xCenter:(g+e.anchorOffsetX)/p,yCenter:(x+e.anchorOffsetY)/d,width:0,height:0};e.fixedAnchorSize?(v.width=1,v.height=1):(v.width=o[y],v.height=i[y]),t.push(v)}n=s}return t}function P(e,t,n,i){return 1===i?.5*(e+t):e+(t-e)*n/(i-1)}function V(e,t){var n=t[0],i=t[1];return[n*e[0]+i*e[1]+e[3],n*e[4]+i*e[5]+e[7]]}function H(e){return o((function(){var t=function(e){return o((function(){return[l(e,[0,0,0],[1,-1,1]),l(e,[0,0,1],[1,-1,-1])]}))}(e),n=t[0],i=t[1];return{boxes:f(i),logits:f(n)}}))}function U(e,t,n,i){return T(this,void 0,void 0,(function(){var i,o,r,a,u;return C(this,(function(c){switch(c.label){case 0:return e.sort((function(e,t){return Math.max.apply(Math,t.score)-Math.max.apply(Math,e.score)})),i=s(e.map((function(e){return[e.locationData.relativeBoundingBox.yMin,e.locationData.relativeBoundingBox.xMin,e.locationData.relativeBoundingBox.yMax,e.locationData.relativeBoundingBox.xMax]}))),o=p(e.map((function(e){return e.score[0]}))),[4,h.nonMaxSuppressionAsync(i,o,t,n)];case 1:return[4,(r=c.sent()).array()];case 2:return a=c.sent(),u=e.filter((function(e,t){return a.indexOf(t)>-1})),d([i,o,r]),[2,u]}}))}))}function j(e,t,n){return T(this,void 0,void 0,(function(){var i,s,h,u,c;return C(this,(function(p){switch(p.label){case 0:return i=e[0],s=e[1],h=function(e,t,n){return o((function(){var i,o,s,h;n.reverseOutputOrder?(o=f(l(e,[0,n.boxCoordOffset+0],[-1,1])),i=f(l(e,[0,n.boxCoordOffset+1],[-1,1])),h=f(l(e,[0,n.boxCoordOffset+2],[-1,1])),s=f(l(e,[0,n.boxCoordOffset+3],[-1,1]))):(i=f(l(e,[0,n.boxCoordOffset+0],[-1,1])),o=f(l(e,[0,n.boxCoordOffset+1],[-1,1])),s=f(l(e,[0,n.boxCoordOffset+2],[-1,1])),h=f(l(e,[0,n.boxCoordOffset+3],[-1,1]))),o=r(a(m(o,n.xScale),t.w),t.x),i=r(a(m(i,n.yScale),t.h),t.y),n.applyExponentialOnBoxSize?(s=a(x(m(s,n.hScale)),t.h),h=a(x(m(h,n.wScale)),t.w)):(s=a(m(s,n.hScale),t.h),h=a(m(h,n.wScale),t.h));var u=g(i,m(s,2)),c=g(o,m(h,2)),d=r(i,m(s,2)),p=r(o,m(h,2)),w=y([v(u,[n.numBoxes,1]),v(c,[n.numBoxes,1]),v(d,[n.numBoxes,1]),v(p,[n.numBoxes,1])],1);if(n.numKeypoints)for(var M=0;M<n.numKeypoints;++M){var S=n.keypointCoordOffset+M*n.numValuesPerKeypoint,b=void 0,T=void 0;n.reverseOutputOrder?(b=f(l(e,[0,S],[-1,1])),T=f(l(e,[0,S+1],[-1,1]))):(T=f(l(e,[0,S],[-1,1])),b=f(l(e,[0,S+1],[-1,1])));var C=r(a(m(b,n.xScale),t.w),t.x),O=r(a(m(T,n.yScale),t.h),t.y);w=y([w,v(C,[n.numBoxes,1]),v(O,[n.numBoxes,1])],1)}return w}))}(s,t,n),u=o((function(){var e=i;return n.sigmoidScore?(null!=n.scoreClippingThresh&&(e=w(i,-n.scoreClippingThresh,n.scoreClippingThresh)),e=M(e)):e})),[4,I(h,u,n)];case 1:return c=p.sent(),d([h,u]),[2,c]}}))}))}function I(e,t,n){return T(this,void 0,void 0,(function(){var i,o,r,a,s,h,u,c,l,f,d,p;return C(this,(function(m){switch(m.label){case 0:return i=[],[4,e.data()];case 1:return o=m.sent(),[4,t.data()];case 2:for(r=m.sent(),a=0;a<n.numBoxes;++a)if(!(null!=n.minScoreThresh&&r[a]<n.minScoreThresh||(s=a*n.numCoords,h=_(o[s+0],o[s+1],o[s+2],o[s+3],r[a],n.flipVertically,a),(u=h.locationData.relativeBoundingBox).width<0||u.height<0))){if(n.numKeypoints>0)for((c=h.locationData).relativeKeypoints=[],l=n.numKeypoints*n.numValuesPerKeypoint,f=0;f<l;f+=n.numValuesPerKeypoint)d=s+n.keypointCoordOffset+f,p={x:o[d+0],y:n.flipVertically?1-o[d+1]:o[d+1]},c.relativeKeypoints.push(p);i.push(h)}return[2,i]}}))}))}function _(e,t,n,i,o,r,a){return{score:[o],ind:a,locationData:{relativeBoundingBox:{xMin:t,yMin:r?1-n:e,xMax:i,yMax:r?1-e:n,width:i-t,height:n-e}}}}var N={reduceBoxesInLowestLayer:!1,interpolatedScaleAspectRatio:1,featureMapHeight:[],featureMapWidth:[],numLayers:4,minScale:.1484375,maxScale:.75,inputSizeHeight:128,inputSizeWidth:128,anchorOffsetX:.5,anchorOffsetY:.5,strides:[8,16,16,16],aspectRatios:[1],fixedAnchorSize:!0},W={reduceBoxesInLowestLayer:!1,interpolatedScaleAspectRatio:0,featureMapHeight:[],featureMapWidth:[],numLayers:1,minScale:.1484375,maxScale:.75,inputSizeHeight:192,inputSizeWidth:192,anchorOffsetX:.5,anchorOffsetY:.5,strides:[4],aspectRatios:[1],fixedAnchorSize:!0},X={runtime:"tfjs",modelType:"short",maxFaces:1,detectorModelUrl:"https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1"},Y={applyExponentialOnBoxSize:!1,flipVertically:!1,ignoreClasses:[],numClasses:1,numBoxes:896,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:!0,scoreClippingThresh:100,reverseOutputOrder:!0,xScale:128,yScale:128,hScale:128,wScale:128,minScoreThresh:.5},q={applyExponentialOnBoxSize:!1,flipVertically:!1,ignoreClasses:[],numClasses:1,numBoxes:2304,numCoords:16,boxCoordOffset:0,keypointCoordOffset:4,numKeypoints:6,numValuesPerKeypoint:2,sigmoidScore:!0,scoreClippingThresh:100,reverseOutputOrder:!0,xScale:192,yScale:192,hScale:192,wScale:192,minScoreThresh:.6},G=.3,$={outputTensorSize:{width:128,height:128},keepAspectRatio:!0,outputTensorFloatRange:[-1,1],borderMode:"zero"},J={outputTensorSize:{width:192,height:192},keepAspectRatio:!0,outputTensorFloatRange:[-1,1],borderMode:"zero"};var Q,Z=function(){function e(e,t,n){this.detectorModel=t,this.maxFaces=n,"full"===e?(this.imageToTensorConfig=J,this.tensorsToDetectionConfig=q,this.anchors=k(W)):(this.imageToTensorConfig=$,this.tensorsToDetectionConfig=Y,this.anchors=k(N));var i=p(this.anchors.map((function(e){return e.width}))),o=p(this.anchors.map((function(e){return e.height}))),r=p(this.anchors.map((function(e){return e.xCenter}))),a=p(this.anchors.map((function(e){return e.yCenter})));this.anchorTensor={x:r,y:a,w:i,h:o}}return e.prototype.dispose=function(){this.detectorModel.dispose(),d([this.anchorTensor.x,this.anchorTensor.y,this.anchorTensor.w,this.anchorTensor.h])},e.prototype.reset=function(){},e.prototype.detectFaces=function(e,t){return void 0===t&&(t=!1),T(this,void 0,void 0,(function(){var n,i,r,a,s,l,p,m,x,g,y;return C(this,(function(v){switch(v.label){case 0:return null==e?(this.reset(),[2,[]]):(n=o((function(){var n=c(E(e),"float32");if(t){n=f(h.flipLeftRight(u(n,0)),[0])}return n})),i=K(n,this.imageToTensorConfig),r=i.imageTensor,a=i.transformationMatrix,s=this.detectorModel.execute(r,"Identity:0"),l=H(s),p=l.boxes,[4,j([m=l.logits,p],this.anchorTensor,this.tensorsToDetectionConfig)]);case 1:return 0===(x=v.sent()).length?(d([n,r,s,m,p]),[2,x]):[4,U(x,this.maxFaces,G)];case 2:return g=v.sent(),y=function(e,t){void 0===e&&(e=[]);var n,i=(n=t,[].concat.apply([],n));return e.forEach((function(e){var t=e.locationData;t.relativeKeypoints.forEach((function(e){var t=V(i,[e.x,e.y]),n=t[0],o=t[1];e.x=n,e.y=o}));var n=t.relativeBoundingBox,o=Number.MAX_VALUE,r=Number.MAX_VALUE,a=Number.MIN_VALUE,s=Number.MIN_VALUE;[[n.xMin,n.yMin],[n.xMin+n.width,n.yMin],[n.xMin+n.width,n.yMin+n.height],[n.xMin,n.yMin+n.height]].forEach((function(e){var t=V(i,e),n=t[0],h=t[1];o=Math.min(o,n),a=Math.max(a,n),r=Math.min(r,h),s=Math.max(s,h)})),t.relativeBoundingBox={xMin:o,xMax:a,yMin:r,yMax:s,width:a-o,height:s-r}})),e}(g,a),d([n,r,s,m,p]),[2,y]}}))}))},e.prototype.estimateFaces=function(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){return n=F(e),i=!!t&&t.flipHorizontal,[2,this.detectFaces(e,i).then((function(e){return e.map((function(e){for(var t=e.locationData.relativeKeypoints.map((function(e,t){return b(b({},e),{x:e.x*n.width,y:e.y*n.height,name:O[t]})})),i=e.locationData.relativeBoundingBox,o=0,r=["width","xMax","xMin"];o<r.length;o++){i[r[o]]*=n.width}for(var a=0,s=["height","yMax","yMin"];a<s.length;a++){i[s[a]]*=n.height}return{keypoints:t,box:i}}))}))]}))}))},e}();function ee(e){return T(this,void 0,void 0,(function(){var t,n,i;return C(this,(function(o){switch(o.label){case 0:return t=function(e){if(null==e)return b({},X);var t=b({},e);null==t.modelType&&(t.modelType=X.modelType),null==t.maxFaces&&(t.maxFaces=X.maxFaces),null==t.detectorModelUrl&&("full"===t.modelType?t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/full/1":t.detectorModelUrl="https://tfhub.dev/mediapipe/tfjs-model/face_detection/short/1");return t}(e),n="string"==typeof t.detectorModelUrl&&t.detectorModelUrl.indexOf("https://tfhub.dev")>-1,[4,S(t.detectorModelUrl,{fromTFHub:n})];case 1:return i=o.sent(),[2,new Z(t.modelType,i,t.maxFaces)]}}))}))}function te(e,t){return T(this,void 0,void 0,(function(){var n,i;return C(this,(function(o){if(e===Q.MediaPipeFaceDetector){if(i=void 0,null!=(n=t)){if("tfjs"===n.runtime)return[2,ee(n)];if("mediapipe"===n.runtime)return[2,D(n)];i=n.runtime}throw new Error("Expect modelConfig.runtime to be either 'tfjs' "+"or 'mediapipe', but got ".concat(i))}throw new Error("".concat(e," is not a supported model name."))}))}))}!function(e){e.MediaPipeFaceDetector="MediaPipeFaceDetector"}(Q||(Q={}));export{z as MediaPipeFaceDetectorMediaPipe,Z as MediaPipeFaceDetectorTfjs,Q as SupportedModels,te as createDetector};