pipwave-ekyc-uikit 0.0.1-beta.8 → 0.0.1-beta.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -73,3 +73,25 @@ pw.onClose(() => {
73
73
  // do your own handling here
74
74
  });
75
75
  ```
76
+
77
+ ### Configuration
78
+
79
+ #### Installation on Angular Base Project
80
+
81
+ For Angular-based projects, you'll need to include the `face-api.js` library in your project's `angular.json` configuration:
82
+
83
+ ```
84
+ "architect": {
85
+ "build": {
86
+ "options": {
87
+ "scripts": ["./node_modules/face-api.js/dist/face-api.min.js"]
88
+ }
89
+ }
90
+ }
91
+ ```
92
+
93
+ ## Acknowledgements
94
+
95
+ We extend our gratitude to the following open-source projects that greatly contributed to the development of the Pipwave Ekyc Uikit: `pipwave-ekyc-sdk`,`webpack`, `face-api.js`, `moment`, `pikaday`
96
+
97
+ Thank you to these remarkable projects and their dedicated teams for making our Uikit possible. Your efforts are truly appreciated.
package/dist/pw-bundle.js CHANGED
@@ -3814,7 +3814,7 @@ eval("\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _argument
3814
3814
  /***/ (function(__unused_webpack_module, exports, __webpack_require__) {
3815
3815
 
3816
3816
  "use strict";
3817
- eval("\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nconst faceapi = __importStar(__webpack_require__(/*! face-api.js */ \"./node_modules/face-api.js/build/es6/index.js\"));\nconst components_1 = __webpack_require__(/*! ../components/components */ \"./src/components/components.ts\");\nlet loadedModelState = \"NOT_LOADED\";\nclass FACEDETECTIONSDK {\n constructor(val) {\n this.cameraId = \"\";\n this.isDone = false; // use to check if the face detection is done\n this.prevEmotionalState = \"\";\n this.currenEmotionaltState = \"\";\n this.isLiveness = false;\n // constant variable\n this.face_api_detection_options = new faceapi.TinyFaceDetectorOptions({\n inputSize: 224,\n });\n this.faceTrack_settings = {\n contrast: 3,\n brightness: 0.5,\n threshold: 100,\n minCorrelation: 0.17,\n minScore: 0.6,\n };\n // html element\n this.videoElement = null;\n this.canvasElement = null;\n this.overlayElement = null;\n this.titleElement = null;\n this.instructionElement = null;\n Object.assign(this, val);\n }\n getLoadedModelState() {\n return loadedModelState;\n }\n setCaptureImage(captureImage) {\n this.captureImage = captureImage;\n }\n // get the face position\n getFaceXY(pos) {\n const posF = {\n top: Math.min.apply(Math, pos.map(function (o) {\n return o.y;\n })),\n bottom: Math.max.apply(Math, pos.map(function (o) {\n return o.y;\n })),\n left: Math.min.apply(Math, pos.map(function (o) {\n return o.x;\n })),\n right: Math.max.apply(Math, pos.map(function (o) {\n return o.x;\n })),\n };\n return {\n x: posF[\"left\"],\n y: posF[\"top\"],\n w: posF[\"right\"] - posF[\"left\"],\n h: posF[\"bottom\"] - posF[\"top\"],\n };\n }\n // get the circle area position\n getCircleXY() {\n if (!this.canvasElement || !this.overlayElement)\n return { x: 0, y: 0, w: 0, h: 0 };\n // canvas rectangle position\n const posR = this.canvasElement.getBoundingClientRect();\n // circle position\n const posC = this.overlayElement.getBoundingClientRect();\n return {\n // get the coord x and y of the circle inside the canvas rectangle\n x: posC.x - posR.x,\n y: posC.y - posR.y,\n w: posC.width,\n h: posC.height,\n };\n }\n // to detect if your face is looking straight at the camera\n isAngleDetected(pos) {\n const angleOne = (Math.atan2(Math.abs(pos[7][\"y\"] - pos[33][\"y\"]), Math.abs(pos[7][\"x\"] - pos[33][\"x\"])) *\n 180) /\n Math.PI;\n const angleTwo = (Math.atan2(Math.abs(pos[60][\"y\"] - pos[33][\"y\"]), Math.abs(pos[60][\"x\"] - pos[33][\"x\"])) *\n 180) /\n Math.PI;\n const distance1 = Math.abs(pos[0][\"x\"] - pos[33][\"x\"]);\n const distance2 = Math.abs(pos[14][\"x\"] - pos[33][\"x\"]);\n const isAngle = !((angleOne > 40 && angleOne < 93) ||\n (angleTwo > 40 && angleTwo < 93));\n const isDistance = Math.abs(distance1 - distance2) / distance1 > 0.5;\n return isAngle || isDistance;\n }\n // check if the face is inside the circle area\n isFaceDetected(pos) {\n const posCircle = this.getCircleXY();\n const posFace = this.getFaceXY(pos);\n if (posFace.x + posFace.w < posCircle.x + posCircle.w &&\n posFace.x > posCircle.x &&\n posFace.y > posCircle.y &&\n posFace.y + posFace.h < posCircle.y + posCircle.h)\n return true;\n return false;\n }\n // check if the face emotional is changing\n detectLiveness(current) {\n this.prevEmotionalState = this.currenEmotionaltState;\n this.currenEmotionaltState = current;\n return (this.prevEmotionalState &&\n this.currenEmotionaltState &&\n this.prevEmotionalState != this.currenEmotionaltState);\n }\n // check if the face in the video is similar to the previous face\n detectSimilarFace(current) {\n this.prevFaceDescriptor = this.currentFaceDescriptor;\n this.currentFaceDescriptor = current;\n if (!this.prevFaceDescriptor || !this.currentFaceDescriptor)\n return 1;\n // the smaller the distance, the more similar the faces are\n return faceapi.euclideanDistance(this.prevFaceDescriptor, this.currentFaceDescriptor);\n }\n // reset instruction html text and all those checking\n resetInstructions() {\n var _a, _b, _c;\n this.isLiveness = false;\n this.currenEmotionaltState = \"\";\n this.prevEmotionalState = \"\";\n this.prevFaceDescriptor = undefined;\n this.currentFaceDescriptor = undefined;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.classList.remove(\"overlay-element--selfie-smile\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.classList.remove(\"overlay-element--selfie-neutral\");\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.classList.remove(\"overlay-element--active\");\n }\n checkFace(livenessScore, pos, currentExpression, currentFaceDescriptor) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o;\n const isFace = this.isFaceDetected(pos);\n // 1. check if the face is inside the circle area\n if (!isFace) {\n this.resetInstructions();\n return;\n }\n // 2. check if the face is looking straight at the camera\n else if (this.isAngleDetected(pos)) {\n this.resetInstructions();\n const event = new CustomEvent(\"lookStraight\");\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.dispatchEvent(event);\n if (this.instructionElement)\n this.instructionElement.innerHTML = \"Look straight at the camera.\";\n return;\n }\n // 3. check the liveness of the face\n if (this.detectLiveness(currentExpression))\n this.isLiveness = true;\n // 4. check the similarity of the face\n const isSimilarFace = this.detectSimilarFace(currentFaceDescriptor) < 0.6;\n if (this.isLiveness && !isSimilarFace) {\n this.resetInstructions();\n return;\n }\n if (currentExpression !== \"happy\") {\n const event = new CustomEvent(\"userNoSmile\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.dispatchEvent(event);\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.classList.add(\"overlay-element--selfie-smile\");\n (_d = this.overlayElement) === null || _d === void 0 ? void 0 : _d.classList.remove(\"overlay-element--selfie-neutral\");\n (_e = this.overlayElement) === null || _e === void 0 ? void 0 : _e.classList.remove(\"overlay-element--active\");\n }\n else if (this.isLiveness) {\n // emit event here STAY 3 seconds\n const event = new CustomEvent(\"userSmiled\");\n (_f = this.overlayElement) === null || _f === void 0 ? void 0 : _f.dispatchEvent(event);\n (_g = this.overlayElement) === null || _g === void 0 ? void 0 : _g.classList.remove(\"overlay-element--selfie-smile\");\n (_h = this.overlayElement) === null || _h === void 0 ? void 0 : _h.classList.remove(\"overlay-element--selfie-neutral\");\n (_j = this.overlayElement) === null || _j === void 0 ? void 0 : _j.classList.add(\"overlay-element--active\");\n if (isSimilarFace &&\n this.isLiveness &&\n this.currenEmotionaltState === \"happy\" &&\n this.captureImage) {\n this.captureImage(livenessScore);\n }\n }\n else if (!this.isDone) {\n const event = new CustomEvent(\"userSmiledButNotLiveness\");\n (_k = this.overlayElement) === null || _k === void 0 ? void 0 : _k.dispatchEvent(event);\n (_l = this.overlayElement) === null || _l === void 0 ? void 0 : _l.classList.remove(\"overlay-element--selfie-smile\");\n (_m = this.overlayElement) === null || _m === void 0 ? void 0 : _m.classList.add(\"overlay-element--selfie-neutral\");\n (_o = this.overlayElement) === null || _o === void 0 ? void 0 : _o.classList.remove(\"overlay-element--active\");\n }\n }\n detectFace() {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.videoElement || !this.canvasElement || !this.overlayElement)\n return;\n const displaySize = {\n width: this.canvasElement.width,\n height: this.canvasElement.height,\n };\n const detections = yield faceapi\n .detectSingleFace(this.videoElement, this.face_api_detection_options)\n .withFaceLandmarks()\n .withFaceExpressions()\n .withFaceDescriptor();\n const event = new CustomEvent(\"placeFace\");\n if (typeof detections !== \"undefined\") {\n const expressions = detections.expressions;\n const currentExpression = Object.keys(expressions).reduce((a, b) => {\n return expressions[a] >\n expressions[b]\n ? a\n : b;\n });\n const resizedDetections = faceapi.resizeResults(detections, displaySize);\n const pos = resizedDetections.landmarks.positions;\n const score = detections.detection.score;\n if (score > this.faceTrack_settings.minScore && pos) {\n this.checkFace(score, pos, currentExpression, detections.descriptor);\n }\n else {\n this.resetInstructions();\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.dispatchEvent(event);\n }\n // draw canvas points for developer view only\n // if (typeof resizedDetections !== \"undefined\" && this.canvasElement) {\n // this.canvasElement\n // .getContext(\"2d\")\n // ?.clearRect(\n // 0,\n // 0,\n // this.canvasElement.width,\n // this.canvasElement.height\n // );\n // if (resizedDetections) {\n // faceapi.draw.drawDetections(this.canvasElement, resizedDetections);\n // faceapi.draw.drawFaceLandmarks(this.canvasElement, resizedDetections);\n // faceapi.draw.drawFaceExpressions(\n // this.canvasElement,\n // resizedDetections\n // );\n // }\n // }\n }\n else {\n this.resetInstructions();\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.dispatchEvent(event);\n }\n if (!this.isDone)\n this.iFrameRef = requestAnimationFrame(() => this.detectFace());\n });\n }\n disableDetection() {\n return __awaiter(this, void 0, void 0, function* () {\n this.isDone = true;\n if (this.iFrameRef)\n cancelAnimationFrame(this.iFrameRef);\n this.iFrameRef = undefined;\n this.resetInstructions();\n });\n }\n // use to calculate the cropped image size / ratio\n // from ui or exact image src\n calculateHeightNWidth(width, height) {\n var _a, _b;\n const videoWidth = ((_a = this.videoElement) === null || _a === void 0 ? void 0 : _a.clientWidth) || 0;\n const videoHeight = ((_b = this.videoElement) === null || _b === void 0 ? void 0 : _b.clientHeight) || 0;\n // calculate the circle size for the image\n // based on which has the largest size: height or width\n // then the circle will be 5/6 from that smaller size\n const circleSize = videoHeight > videoWidth ? width - width / 6 : height - height / 6;\n const newWidth = (circleSize / 5) * 4.5;\n const newHeight = circleSize;\n return { width: newWidth, height: newHeight };\n }\n resizeElement() {\n var _a, _b, _c, _d;\n if (!this.videoElement || !this.canvasElement)\n return;\n // 1. resize canvas to match video size\n this.canvasElement.width = this.videoElement.clientWidth;\n this.canvasElement.height = this.videoElement.clientHeight;\n // 2. resize circle overlay to match video size\n const { width, height } = this.calculateHeightNWidth(this.videoElement.clientWidth, this.videoElement.clientHeight);\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.style.setProperty(\"border-radius\", \"50%\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.style.setProperty(\"border\", \"solid white 3px\");\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.style.setProperty(\"width\", `${width}px`);\n (_d = this.overlayElement) === null || _d === void 0 ? void 0 : _d.style.setProperty(\"height\", `${height}px`);\n }\n // train face-api models\n loadFaceModel() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n if (loadedModelState !== \"NOT_LOADED\")\n return;\n const url = \"https://assets.pipwave.com/ekyc/sdk/face-models\" || 0;\n loadedModelState = \"LOADING\";\n yield Promise.all([\n faceapi.loadTinyFaceDetectorModel(url),\n faceapi.loadFaceLandmarkModel(url),\n faceapi.loadFaceRecognitionModel(url),\n faceapi.loadFaceExpressionModel(url),\n ]);\n loadedModelState = \"LOADED\";\n }\n catch (err) {\n loadedModelState = \"NOT_LOADED\";\n return Promise.reject(err);\n }\n });\n }\n init() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // show loading spinner in full pg\n (0, components_1.createFullLoading)(this.cameraId, \"Initializing Camera...\");\n this.isDone = false;\n this.resizeElement();\n if (loadedModelState === \"NOT_LOADED\")\n yield this.loadFaceModel();\n // Wait until loadFaceModel is LOADED\n while (loadedModelState === \"LOADING\") {\n yield new Promise((resolve) => setTimeout(resolve, 500));\n }\n if (!this.iFrameRef)\n yield this.detectFace();\n }\n finally {\n // hide loading spinner in full pg\n (0, components_1.hideFullLoading)(this.cameraId);\n }\n });\n }\n listenToUserNoSmile(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userNoSmile\", () => {\n cb();\n });\n }\n listenToUserSmiled(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userSmiled\", () => {\n cb();\n });\n }\n listenToUserSmiledButNotLiveness(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userSmiledButNotLiveness\", () => {\n cb();\n });\n }\n listenToLookStraight(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"lookStraight\", () => {\n cb();\n });\n }\n listenToPlaceFace(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"placeFace\", () => {\n cb();\n });\n }\n}\nexports[\"default\"] = FACEDETECTIONSDK;\n\n\n//# sourceURL=webpack://PWUISDK/./src/plugins/face-detection.ts?");
3817
+ eval("\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nconst components_1 = __webpack_require__(/*! ../components/components */ \"./src/components/components.ts\");\nlet loadedModelState = \"NOT_LOADED\";\nlet faceApiInstance;\n// direct use faceapi if it is already import in client's project (especially angular)\nfaceApiInstance =\n typeof faceapi !== \"undefined\" ? faceapi : __webpack_require__(/*! face-api.js */ \"./node_modules/face-api.js/build/es6/index.js\");\nclass FACEDETECTIONSDK {\n constructor(val) {\n this.cameraId = \"\";\n this.isDone = false; // use to check if the face detection is done\n this.prevEmotionalState = \"\";\n this.currenEmotionaltState = \"\";\n this.isLiveness = false;\n // constant variable\n this.face_api_detection_options = new faceApiInstance.TinyFaceDetectorOptions({\n inputSize: 224,\n });\n this.faceTrack_settings = {\n contrast: 3,\n brightness: 0.5,\n threshold: 100,\n minCorrelation: 0.17,\n minScore: 0.6,\n };\n // html element\n this.videoElement = null;\n this.canvasElement = null;\n this.overlayElement = null;\n this.titleElement = null;\n this.instructionElement = null;\n Object.assign(this, val);\n }\n getLoadedModelState() {\n return loadedModelState;\n }\n setCaptureImage(captureImage) {\n this.captureImage = captureImage;\n }\n // get the face position\n getFaceXY(pos) {\n const posF = {\n top: Math.min.apply(Math, pos.map(function (o) {\n return o.y;\n })),\n bottom: Math.max.apply(Math, pos.map(function (o) {\n return o.y;\n })),\n left: Math.min.apply(Math, pos.map(function (o) {\n return o.x;\n })),\n right: Math.max.apply(Math, pos.map(function (o) {\n return o.x;\n })),\n };\n return {\n x: posF[\"left\"],\n y: posF[\"top\"],\n w: posF[\"right\"] - posF[\"left\"],\n h: posF[\"bottom\"] - posF[\"top\"],\n };\n }\n // get the circle area position\n getCircleXY() {\n if (!this.canvasElement || !this.overlayElement)\n return { x: 0, y: 0, w: 0, h: 0 };\n // canvas rectangle position\n const posR = this.canvasElement.getBoundingClientRect();\n // circle position\n const posC = this.overlayElement.getBoundingClientRect();\n return {\n // get the coord x and y of the circle inside the canvas rectangle\n x: posC.x - posR.x,\n y: posC.y - posR.y,\n w: posC.width,\n h: posC.height,\n };\n }\n // to detect if your face is looking straight at the camera\n isAngleDetected(pos) {\n const angleOne = (Math.atan2(Math.abs(pos[7][\"y\"] - pos[33][\"y\"]), Math.abs(pos[7][\"x\"] - pos[33][\"x\"])) *\n 180) /\n Math.PI;\n const angleTwo = (Math.atan2(Math.abs(pos[60][\"y\"] - pos[33][\"y\"]), Math.abs(pos[60][\"x\"] - pos[33][\"x\"])) *\n 180) /\n Math.PI;\n const distance1 = Math.abs(pos[0][\"x\"] - pos[33][\"x\"]);\n const distance2 = Math.abs(pos[14][\"x\"] - pos[33][\"x\"]);\n const isAngle = !((angleOne > 40 && angleOne < 93) ||\n (angleTwo > 40 && angleTwo < 93));\n const isDistance = Math.abs(distance1 - distance2) / distance1 > 0.5;\n return isAngle || isDistance;\n }\n // check if the face is inside the circle area\n isFaceDetected(pos) {\n const posCircle = this.getCircleXY();\n const posFace = this.getFaceXY(pos);\n if (posFace.x + posFace.w < posCircle.x + posCircle.w &&\n posFace.x > posCircle.x &&\n posFace.y > posCircle.y &&\n posFace.y + posFace.h < posCircle.y + posCircle.h)\n return true;\n return false;\n }\n // check if the face emotional is changing\n detectLiveness(current) {\n this.prevEmotionalState = this.currenEmotionaltState;\n this.currenEmotionaltState = current;\n return (this.prevEmotionalState &&\n this.currenEmotionaltState &&\n this.prevEmotionalState != this.currenEmotionaltState);\n }\n // check if the face in the video is similar to the previous face\n detectSimilarFace(current) {\n this.prevFaceDescriptor = this.currentFaceDescriptor;\n this.currentFaceDescriptor = current;\n if (!this.prevFaceDescriptor || !this.currentFaceDescriptor)\n return 1;\n // the smaller the distance, the more similar the faces are\n return faceApiInstance.euclideanDistance(this.prevFaceDescriptor, this.currentFaceDescriptor);\n }\n // reset instruction html text and all those checking\n resetInstructions() {\n var _a, _b, _c;\n this.isLiveness = false;\n this.currenEmotionaltState = \"\";\n this.prevEmotionalState = \"\";\n this.prevFaceDescriptor = undefined;\n this.currentFaceDescriptor = undefined;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.classList.remove(\"overlay-element--selfie-smile\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.classList.remove(\"overlay-element--selfie-neutral\");\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.classList.remove(\"overlay-element--active\");\n }\n checkFace(livenessScore, pos, currentExpression, currentFaceDescriptor) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o;\n const isFace = this.isFaceDetected(pos);\n // 1. check if the face is inside the circle area\n if (!isFace) {\n this.resetInstructions();\n return;\n }\n // 2. check if the face is looking straight at the camera\n else if (this.isAngleDetected(pos)) {\n this.resetInstructions();\n const event = new CustomEvent(\"lookStraight\");\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.dispatchEvent(event);\n if (this.instructionElement)\n this.instructionElement.innerHTML = \"Look straight at the camera.\";\n return;\n }\n // 3. check the liveness of the face\n if (this.detectLiveness(currentExpression))\n this.isLiveness = true;\n // 4. check the similarity of the face\n const isSimilarFace = this.detectSimilarFace(currentFaceDescriptor) < 0.6;\n if (this.isLiveness && !isSimilarFace) {\n this.resetInstructions();\n return;\n }\n if (currentExpression !== \"happy\") {\n const event = new CustomEvent(\"userNoSmile\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.dispatchEvent(event);\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.classList.add(\"overlay-element--selfie-smile\");\n (_d = this.overlayElement) === null || _d === void 0 ? void 0 : _d.classList.remove(\"overlay-element--selfie-neutral\");\n (_e = this.overlayElement) === null || _e === void 0 ? void 0 : _e.classList.remove(\"overlay-element--active\");\n }\n else if (this.isLiveness) {\n // emit event here STAY 3 seconds\n const event = new CustomEvent(\"userSmiled\");\n (_f = this.overlayElement) === null || _f === void 0 ? void 0 : _f.dispatchEvent(event);\n (_g = this.overlayElement) === null || _g === void 0 ? void 0 : _g.classList.remove(\"overlay-element--selfie-smile\");\n (_h = this.overlayElement) === null || _h === void 0 ? void 0 : _h.classList.remove(\"overlay-element--selfie-neutral\");\n (_j = this.overlayElement) === null || _j === void 0 ? void 0 : _j.classList.add(\"overlay-element--active\");\n if (isSimilarFace &&\n this.isLiveness &&\n this.currenEmotionaltState === \"happy\" &&\n this.captureImage) {\n this.captureImage(livenessScore);\n }\n }\n else if (!this.isDone) {\n const event = new CustomEvent(\"userSmiledButNotLiveness\");\n (_k = this.overlayElement) === null || _k === void 0 ? void 0 : _k.dispatchEvent(event);\n (_l = this.overlayElement) === null || _l === void 0 ? void 0 : _l.classList.remove(\"overlay-element--selfie-smile\");\n (_m = this.overlayElement) === null || _m === void 0 ? void 0 : _m.classList.add(\"overlay-element--selfie-neutral\");\n (_o = this.overlayElement) === null || _o === void 0 ? void 0 : _o.classList.remove(\"overlay-element--active\");\n }\n }\n detectFace() {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.videoElement || !this.canvasElement || !this.overlayElement)\n return;\n const displaySize = {\n width: this.canvasElement.width,\n height: this.canvasElement.height,\n };\n const detections = yield faceApiInstance\n .detectSingleFace(this.videoElement, this.face_api_detection_options)\n .withFaceLandmarks()\n .withFaceExpressions()\n .withFaceDescriptor();\n const event = new CustomEvent(\"placeFace\");\n if (typeof detections !== \"undefined\") {\n const expressions = detections.expressions;\n const currentExpression = Object.keys(expressions).reduce((a, b) => {\n return expressions[a] >\n expressions[b]\n ? a\n : b;\n });\n const resizedDetections = faceApiInstance.resizeResults(detections, displaySize);\n const pos = resizedDetections.landmarks.positions;\n const score = detections.detection.score;\n if (score > this.faceTrack_settings.minScore && pos) {\n this.checkFace(score, pos, currentExpression, detections.descriptor);\n }\n else {\n this.resetInstructions();\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.dispatchEvent(event);\n }\n // draw canvas points for developer view only\n // if (typeof resizedDetections !== \"undefined\" && this.canvasElement) {\n // this.canvasElement\n // .getContext(\"2d\")\n // ?.clearRect(\n // 0,\n // 0,\n // this.canvasElement.width,\n // this.canvasElement.height\n // );\n // if (resizedDetections) {\n // faceApiInstance.draw.drawDetections(this.canvasElement, resizedDetections);\n // faceApiInstance.draw.drawFaceLandmarks(this.canvasElement, resizedDetections);\n // faceApiInstance.draw.drawFaceExpressions(\n // this.canvasElement,\n // resizedDetections\n // );\n // }\n // }\n }\n else {\n this.resetInstructions();\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.dispatchEvent(event);\n }\n if (!this.isDone)\n this.iFrameRef = requestAnimationFrame(() => this.detectFace());\n });\n }\n disableDetection() {\n return __awaiter(this, void 0, void 0, function* () {\n this.isDone = true;\n if (this.iFrameRef)\n cancelAnimationFrame(this.iFrameRef);\n this.iFrameRef = undefined;\n this.resetInstructions();\n });\n }\n // use to calculate the cropped image size / ratio\n // from ui or exact image src\n calculateHeightNWidth(width, height) {\n var _a, _b;\n const videoWidth = ((_a = this.videoElement) === null || _a === void 0 ? void 0 : _a.clientWidth) || 0;\n const videoHeight = ((_b = this.videoElement) === null || _b === void 0 ? void 0 : _b.clientHeight) || 0;\n // calculate the circle size for the image\n // based on which has the largest size: height or width\n // then the circle will be 5/6 from that smaller size\n const circleSize = videoHeight > videoWidth ? width - width / 6 : height - height / 6;\n const newWidth = (circleSize / 5) * 4.5;\n const newHeight = circleSize;\n return { width: newWidth, height: newHeight };\n }\n resizeElement() {\n var _a, _b, _c, _d;\n if (!this.videoElement || !this.canvasElement)\n return;\n // 1. resize canvas to match video size\n this.canvasElement.width = this.videoElement.clientWidth;\n this.canvasElement.height = this.videoElement.clientHeight;\n // 2. resize circle overlay to match video size\n const { width, height } = this.calculateHeightNWidth(this.videoElement.clientWidth, this.videoElement.clientHeight);\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.style.setProperty(\"border-radius\", \"50%\");\n (_b = this.overlayElement) === null || _b === void 0 ? void 0 : _b.style.setProperty(\"border\", \"solid white 3px\");\n (_c = this.overlayElement) === null || _c === void 0 ? void 0 : _c.style.setProperty(\"width\", `${width}px`);\n (_d = this.overlayElement) === null || _d === void 0 ? void 0 : _d.style.setProperty(\"height\", `${height}px`);\n }\n // train face-api models\n loadFaceModel() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n if (loadedModelState !== \"NOT_LOADED\")\n return;\n const url = \"https://assets.pipwave.com/ekyc/sdk/face-models\" || 0;\n loadedModelState = \"LOADING\";\n yield Promise.all([\n faceApiInstance.loadTinyFaceDetectorModel(url),\n faceApiInstance.loadFaceLandmarkModel(url),\n faceApiInstance.loadFaceRecognitionModel(url),\n faceApiInstance.loadFaceExpressionModel(url),\n ]);\n loadedModelState = \"LOADED\";\n }\n catch (err) {\n loadedModelState = \"NOT_LOADED\";\n return Promise.reject(err);\n }\n });\n }\n init() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // show loading spinner in full pg\n (0, components_1.createFullLoading)(this.cameraId, \"Initializing Camera...\");\n this.isDone = false;\n this.resizeElement();\n if (loadedModelState === \"NOT_LOADED\")\n yield this.loadFaceModel();\n // Wait until loadFaceModel is LOADED\n while (loadedModelState === \"LOADING\") {\n yield new Promise((resolve) => setTimeout(resolve, 500));\n }\n if (!this.iFrameRef)\n yield this.detectFace();\n }\n finally {\n // hide loading spinner in full pg\n (0, components_1.hideFullLoading)(this.cameraId);\n }\n });\n }\n listenToUserNoSmile(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userNoSmile\", () => {\n cb();\n });\n }\n listenToUserSmiled(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userSmiled\", () => {\n cb();\n });\n }\n listenToUserSmiledButNotLiveness(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"userSmiledButNotLiveness\", () => {\n cb();\n });\n }\n listenToLookStraight(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"lookStraight\", () => {\n cb();\n });\n }\n listenToPlaceFace(cb) {\n var _a;\n (_a = this.overlayElement) === null || _a === void 0 ? void 0 : _a.addEventListener(\"placeFace\", () => {\n cb();\n });\n }\n}\nexports[\"default\"] = FACEDETECTIONSDK;\n\n\n//# sourceURL=webpack://PWUISDK/./src/plugins/face-detection.ts?");
3818
3818
 
3819
3819
  /***/ }),
3820
3820
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pipwave-ekyc-uikit",
3
- "version": "0.0.1-beta.8",
3
+ "version": "0.0.1-beta.9",
4
4
  "description": "",
5
5
  "main": "dist/pw-bundle.js",
6
6
  "scripts": {