accurafaceplugin 1.0.13 → 1.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/accuramain.js +1 -1
- package/package.json +1 -1
package/build/accuramain.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export default class FacePlugin{constructor(e,t,{threshold:n,textSize:i,textColor:s,textWeight:o,textBgColor:a,BodyBgColor:c}){this.haarcascadeUrl=e,this.faceDetectionCount=0,this.onResultReceived=t,this.FPS=30,this.ZOOM_FACTOR=1.5,this.lastFaceDetectionTime=Date.now(),this.opencvLoaded=!1,this.faceThreshold=n,this.instructSize=i,this.instructColor=s,this.instructWeight=o,this.instructBgColor=a,this.bgcolor=c,this.createUI(),this.addStyles(),this.video=document.getElementById("videoInput"),this.canvas=document.getElementById("canvasOutput"),this.instructionText=document.getElementById("instructionText"),this.loader=document.getElementById("loader");const d=document.createElement("script");d.src="./node_modules/accurafaceplugin/public/libs/piexif.js",document.head.appendChild(d),this.ding=new Audio("./node_modules/accurafaceplugin/public/audio/ding.mp3"),this.ding.preload="auto",this.audioUnlocked=!1,this.ding.muted=!0;const r=()=>{this.audioUnlocked||(this.ding.play().then((()=>{this.ding.pause(),this.ding.currentTime=0,this.ding.muted=!1,this.audioUnlocked=!0})).catch((()=>{})),document.removeEventListener("click",r))};document.addEventListener("click",r,{once:!0}),this.checkOpenCV()}createUI(){const e=document.createElement("div");e.className="container";const t=document.createElement("div");t.className="camera-input",t.style.position="relative",this.cameraInput=t;const video=document.createElement("video");video.id="videoInput",video.width=640,video.height=480,video.autoplay=!0,video.muted=!0;const canvas=document.createElement("canvas");canvas.id="canvasOutput",canvas.style.display="none";const n=document.createElement("div");n.id="loader",n.className="loader",n.style.display="none";const i=document.createElement("p");i.id="instructionText",t.appendChild(video),t.appendChild(canvas),t.appendChild(n),e.appendChild(t),e.appendChild(i),document.body.appendChild(e)}checkOpenCV(){const e=()=>{window.cv?(this.opencvLoaded=!0,this.openCvReady()):setTimeout(e,100)};e()}openCvReady(){navigator.mediaDevices.getUserMedia({video:!0,audio:!1}).then((e=>{this.video.srcObject=e,this.video.play(),this.isCanvasVisible=!1,this.video.addEventListener("loadedmetadata",(()=>{if(this.video.videoWidth&&this.video.videoHeight){this.video.width=this.video.videoWidth,this.video.height=this.video.videoHeight,this.canvas.width=this.video.videoWidth,this.canvas.height=this.video.videoHeight;const e=new window.cv.Mat(this.video.videoHeight,this.video.videoWidth,window.cv.CV_8UC4),t=new window.cv.Mat(this.video.videoHeight,this.video.videoWidth,window.cv.CV_8UC4),n=new window.cv.VideoCapture(this.video),i=new window.cv.CascadeClassifier,s=new window.Utils("errorMessage"),o=this.haarcascadeUrl;s.createFileFromUrl(o,o,(()=>{i.load(o),this.facepluginopencv(n,e,t,i)}))}}))})).catch((e=>{console.error("An error occurred while accessing media devices: ",e)}))}isFaceInsideOval(e,canvas){const t=canvas.width/2,n=canvas.height/2,i=canvas.width/2,s=canvas.height/2;return(e.x+e.width/2-t)**2/i**2+(e.y+e.height/2-n)**2/s**2<=1}facepluginopencv(e,t,n,i){const s=Date.now();e.read(t);const o=new window.cv.Rect(t.cols/2-t.cols/(2*this.ZOOM_FACTOR),t.rows/2-t.rows/(2*this.ZOOM_FACTOR),t.cols/this.ZOOM_FACTOR,t.rows/this.ZOOM_FACTOR);let a=t.roi(o);window.cv.resize(a,n,new window.cv.Size(t.cols,t.rows),0,0,window.cv.INTER_LINEAR),this.instructionText.textContent="Keep Your Face In The Frame";let c=new window.cv.RectVector;try{if(i.detectMultiScale(t,c,1.5,3,0),1===c.size()){const e=c.get(0),n=(new window.cv.Rect(e.x,e.y,e.width,e.height),.14);let i=Math.max(0,e.x-e.width*n),s=Math.max(0,e.y-e.height*n),o=Math.min(t.cols-i,e.width*(1+2*n)),a=Math.min(t.rows-s,e.height*(1+2*n));const d=new window.cv.Rect(i,s,o,a);window.cv.rectangle(t,new window.cv.Point(d.x,d.y),new window.cv.Point(d.x+d.width,d.y+d.height),[5,255,0,255],2);const r=e.width*e.height,h=r/(Math.PI*(this.canvas.width/2)*(this.canvas.height/2))*100;if(this.isFaceInsideOval(e,this.canvas)){if(h<30)this.instructionText.textContent="Stay closer to the window";else if(h>=30&&h<=100&&(this.instructionText.textContent="Processing....",this.faceDetectionCount++,this.faceDetectionCount===this.faceThreshold)){const e=document.createElement("div");return e.id="faceScanSuccess",e.innerHTML='\n <div class="scan-success-wrapper">\n \x3c!-- Viewfinder frame (your provided paths, scaled up a bit for better visibility) --\x3e\n <svg class="face-viewfinder" width="120" height="120" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">\n <path d="M8 18V8h10" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M46 8h10v10" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M56 46v10H46" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M18 56H8V46" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n \x3c!-- Face representation (small circle + smile) --\x3e\n <circle cx="32" cy="26" r="9" stroke="#ffffff" stroke-width="4" fill="none"/>\n <path d="M22 42c4-8 16-8 20 0" stroke="#ffffff" stroke-width="4" stroke-linecap="round" fill="none"/>\n </svg>\n\n \x3c!-- Black checkmark overlay (appears on success) --\x3e\n <svg class="success-tick" viewBox="0 0 52 52" width="80" height="80">\n <path \n d="M14.1 27.2 L21.2 34.4 L37.9 17.6" \n stroke="green" \n stroke-width="6" \n stroke-linecap="round" \n stroke-linejoin="round" \n fill="none"\n />\n </svg>\n </div>\n',Object.assign(e.style,{position:"absolute",top:"50%",left:"50%",transform:"translate(-50%, -50%)",zIndex:"200",pointerEvents:"none",textAlign:"center"}),this.cameraInput.appendChild(e),setTimeout((()=>e.remove()),1500),this.audioUnlocked&&(this.ding.currentTime=0,this.ding.play().catch((()=>{}))),void setTimeout((()=>{this.convertToBase64WithMetadata(d);const e=this.video.srcObject;e&&e.getTracks().forEach((e=>e.stop()))}),1500)}}else this.instructionText.textContent="Move inside the oval window";this.lastFaceDetectionTime=Date.now()}}catch(e){console.error("Error processing video:",e)}finally{c.delete()}this.isCanvasVisible&&(window.cv.flip(t,t,1),window.cv.imshow(this.canvas.id,t));const d=1e3/this.FPS-(Date.now()-s);setTimeout((()=>this.facepluginopencv(e,t,n,i)),d)}convertToBase64WithMetadata(e){const t=document.createElement("canvas");t.width=this.video.videoWidth,t.height=this.video.videoHeight;const n=t.getContext("2d");n.drawImage(this.video,0,0,t.width,t.height);const i=document.createElement("canvas");i.width=e.width,i.height=e.height;const s=i.getContext("2d"),o=n.getImageData(e.x,e.y,e.width,e.height);s.putImageData(o,0,0);const a={"0th":{[piexif.ImageIFD.Make]:"AccuraFace",[piexif.ImageIFD.Model]:"FaceDetectionCamera"}},c=piexif.dump(a),d=piexif.insert(c,i.toDataURL("image/jpeg",.92));this.onResultReceived&&(document.querySelector(".container").style.display="none",this.onResultReceived({base64:d,metadata:a}))}addStyles(){const e=`\n body {\n display: flex;\n justify-content: center;\n align-items: center;\n height: 100vh;\n margin: 0;\n background-color: ${this.bgcolor||"rgb(255, 255, 255)"};\n font-family: Arial, sans-serif;\n flex-direction: column;\n }\n \n .container {\n display: flex;\n flex-direction: column;\n align-items: center;\n padding: 20px;\n background-color: transparent;\n border-radius: 10px;\n }\n \n #instructionText {\n display: flex;\n justify-content: center;\n align-items: center;\n padding: 10px;\n background-color: ${this.instructBgColor||"#fff"};\n color: ${this.instructColor||"rgb(10, 18, 131)"};\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);\n border-radius: 5px;\n margin-bottom: 20px;\n font-size: ${this.instructSize||"30px"};\n font-weight: ${this.instructWeight||"bold"};\n }\n \n .camera-input {\n width: 250px;\n height: 370px;\n position: relative;\n overflow: hidden;\n border-radius: 50%;\n transform: scale(1.0); \n }\n \n #videoInput {\n width: 100%;\n height: 100%;\n object-fit: cover;\n transition: transform 0.3s ease-in-out;\n transform: scaleX(-1) scale(1.5); \n }\n \n #canvasOutput {\n position: absolute;\n top: 0;\n left: 0;\n width: 100%;\n height: 100%;\n border-radius: 50%;\n transform: scale(1.5);\n }\n \n #response-text {\n font-size: 18px;\n color: #333;\n margin-top: 10px;\n }\n\n/* Desktop (default) */\n\n /* Laptop */\n @media (max-width: 1200px) {\n #instructionText {\n font-size: ${this.instructSize||"28px"};\n }\n }\n\n /* iPad / Tablet */\n @media (max-width: 992px) {\n #instructionText {\n font-size: ${this.instructSize||"28px"};\n }\n }\n\n /* Mobile */\n @media (max-width: 768px) {\n #instructionText {\n font-size: 25px;\n }\n }\n\n /* Small Mobile (Extra Small Devices) */\n @media (max-width: 480px) {\n #instructionText {\n font-size: ${this.instructSize||"18px"};\n }\n }\n\n .scan-success-wrapper {\n position: relative;\n width: 140px;\n height: 140px;\n margin: 0 auto;\n}\n\n.face-viewfinder {\n position: absolute;\n top: 50%;\n left: 50%;\n transform: translate(-50%, -50%);\n opacity: 0.9;\n animation: pulseFrame 2s ease-in-out forwards;\n}\n\n.success-tick {\n position: absolute;\n top: 50%;\n left: 50%;\n transform: translate(-50%, -50%) scale(0.6);\n opacity: 0;\n z-index: 2;\n animation: tickReveal 1.1s ease-out 0.4s forwards;\n}\n\n/* Animations */\n@keyframes pulseFrame {\n 0% { opacity: 0.6; transform: translate(-50%, -50%) scale(0.92); }\n 50% { opacity: 1; transform: translate(-50%, -50%) scale(1.08); }\n 100% { opacity: 0.85; transform: translate(-50%, -50%) scale(1); }\n}\n\n@keyframes tickReveal {\n 0% { \n transform: translate(-50%, -50%) scale(0.4); \n opacity: 0; \n stroke-dashoffset: 60; \n }\n 40% { \n transform: translate(-50%, -50%) scale(1.15); \n opacity: 1; \n }\n 70% { transform: translate(-50%, -50%) scale(0.95); }\n 100% { \n transform: translate(-50%, -50%) scale(1); \n opacity: 1; \n stroke-dashoffset: 0; \n }\n}\n\n/* Add stroke-dasharray to the tick path via JS or CSS for draw effect */\n.success-tick path {\n stroke-dasharray: 60;\n stroke-dashoffset: 60;\n animation: drawTick 0.8s ease-out 0.5s forwards;\n}\n\n@keyframes drawTick {\n to { stroke-dashoffset: 0; }\n}\n `,t=document.createElement("style");t.type="text/css",t.innerText=e,document.head.appendChild(t)}}
|
|
1
|
+
export default class FacePlugin{constructor(e,t,{threshold:n,textSize:i,textColor:s,textWeight:o,textBgColor:a,BodyBgColor:c}){this.haarcascadeUrl=e,this.faceDetectionCount=0,this.onResultReceived=t,this.FPS=30,this.ZOOM_FACTOR=1.5,this.lastFaceDetectionTime=Date.now(),this.opencvLoaded=!1,this.faceThreshold=n,this.instructSize=i,this.instructColor=s,this.instructWeight=o,this.instructBgColor=a,this.bgcolor=c,this.createUI(),this.addStyles(),this.video=document.getElementById("videoInput"),this.canvas=document.getElementById("canvasOutput"),this.instructionText=document.getElementById("instructionText"),this.loader=document.getElementById("loader");const d=document.createElement("script");d.src="accurafaceplugin/public/libs/piexif.js",document.head.appendChild(d),this.ding=new Audio("accurafaceplugin/public/audio/ding.mp3"),this.ding.preload="auto",this.audioUnlocked=!1,this.ding.muted=!0;const r=()=>{this.audioUnlocked||(this.ding.play().then((()=>{this.ding.pause(),this.ding.currentTime=0,this.ding.muted=!1,this.audioUnlocked=!0})).catch((()=>{})),document.removeEventListener("click",r))};document.addEventListener("click",r,{once:!0}),this.checkOpenCV()}createUI(){const e=document.createElement("div");e.className="container";const t=document.createElement("div");t.className="camera-input",t.style.position="relative",this.cameraInput=t;const video=document.createElement("video");video.id="videoInput",video.width=640,video.height=480,video.autoplay=!0,video.muted=!0;const canvas=document.createElement("canvas");canvas.id="canvasOutput",canvas.style.display="none";const n=document.createElement("div");n.id="loader",n.className="loader",n.style.display="none";const i=document.createElement("p");i.id="instructionText",t.appendChild(video),t.appendChild(canvas),t.appendChild(n),e.appendChild(t),e.appendChild(i),document.body.appendChild(e)}checkOpenCV(){const e=()=>{window.cv?(this.opencvLoaded=!0,this.openCvReady()):setTimeout(e,100)};e()}openCvReady(){navigator.mediaDevices.getUserMedia({video:!0,audio:!1}).then((e=>{this.video.srcObject=e,this.video.play(),this.isCanvasVisible=!1,this.video.addEventListener("loadedmetadata",(()=>{if(this.video.videoWidth&&this.video.videoHeight){this.video.width=this.video.videoWidth,this.video.height=this.video.videoHeight,this.canvas.width=this.video.videoWidth,this.canvas.height=this.video.videoHeight;const e=new window.cv.Mat(this.video.videoHeight,this.video.videoWidth,window.cv.CV_8UC4),t=new window.cv.Mat(this.video.videoHeight,this.video.videoWidth,window.cv.CV_8UC4),n=new window.cv.VideoCapture(this.video),i=new window.cv.CascadeClassifier,s=new window.Utils("errorMessage"),o=this.haarcascadeUrl;s.createFileFromUrl(o,o,(()=>{i.load(o),this.facepluginopencv(n,e,t,i)}))}}))})).catch((e=>{console.error("An error occurred while accessing media devices: ",e)}))}isFaceInsideOval(e,canvas){const t=canvas.width/2,n=canvas.height/2,i=canvas.width/2,s=canvas.height/2;return(e.x+e.width/2-t)**2/i**2+(e.y+e.height/2-n)**2/s**2<=1}facepluginopencv(e,t,n,i){const s=Date.now();e.read(t);const o=new window.cv.Rect(t.cols/2-t.cols/(2*this.ZOOM_FACTOR),t.rows/2-t.rows/(2*this.ZOOM_FACTOR),t.cols/this.ZOOM_FACTOR,t.rows/this.ZOOM_FACTOR);let a=t.roi(o);window.cv.resize(a,n,new window.cv.Size(t.cols,t.rows),0,0,window.cv.INTER_LINEAR),this.instructionText.textContent="Keep Your Face In The Frame";let c=new window.cv.RectVector;try{if(i.detectMultiScale(t,c,1.5,3,0),1===c.size()){const e=c.get(0),n=(new window.cv.Rect(e.x,e.y,e.width,e.height),.14);let i=Math.max(0,e.x-e.width*n),s=Math.max(0,e.y-e.height*n),o=Math.min(t.cols-i,e.width*(1+2*n)),a=Math.min(t.rows-s,e.height*(1+2*n));const d=new window.cv.Rect(i,s,o,a);window.cv.rectangle(t,new window.cv.Point(d.x,d.y),new window.cv.Point(d.x+d.width,d.y+d.height),[5,255,0,255],2);const r=e.width*e.height,h=r/(Math.PI*(this.canvas.width/2)*(this.canvas.height/2))*100;if(this.isFaceInsideOval(e,this.canvas)){if(h<30)this.instructionText.textContent="Stay closer to the window";else if(h>=30&&h<=100&&(this.instructionText.textContent="Processing....",this.faceDetectionCount++,this.faceDetectionCount===this.faceThreshold)){const e=document.createElement("div");return e.id="faceScanSuccess",e.innerHTML='\n <div class="scan-success-wrapper">\n \x3c!-- Viewfinder frame (your provided paths, scaled up a bit for better visibility) --\x3e\n <svg class="face-viewfinder" width="120" height="120" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">\n <path d="M8 18V8h10" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M46 8h10v10" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M56 46v10H46" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n <path d="M18 56H8V46" stroke="#ffffff" stroke-width="5" stroke-linecap="round"/>\n \x3c!-- Face representation (small circle + smile) --\x3e\n <circle cx="32" cy="26" r="9" stroke="#ffffff" stroke-width="4" fill="none"/>\n <path d="M22 42c4-8 16-8 20 0" stroke="#ffffff" stroke-width="4" stroke-linecap="round" fill="none"/>\n </svg>\n\n \x3c!-- Black checkmark overlay (appears on success) --\x3e\n <svg class="success-tick" viewBox="0 0 52 52" width="80" height="80">\n <path \n d="M14.1 27.2 L21.2 34.4 L37.9 17.6" \n stroke="green" \n stroke-width="6" \n stroke-linecap="round" \n stroke-linejoin="round" \n fill="none"\n />\n </svg>\n </div>\n',Object.assign(e.style,{position:"absolute",top:"50%",left:"50%",transform:"translate(-50%, -50%)",zIndex:"200",pointerEvents:"none",textAlign:"center"}),this.cameraInput.appendChild(e),setTimeout((()=>e.remove()),1500),this.audioUnlocked&&(this.ding.currentTime=0,this.ding.play().catch((()=>{}))),void setTimeout((()=>{this.convertToBase64WithMetadata(d);const e=this.video.srcObject;e&&e.getTracks().forEach((e=>e.stop()))}),1500)}}else this.instructionText.textContent="Move inside the oval window";this.lastFaceDetectionTime=Date.now()}}catch(e){console.error("Error processing video:",e)}finally{c.delete()}this.isCanvasVisible&&(window.cv.flip(t,t,1),window.cv.imshow(this.canvas.id,t));const d=1e3/this.FPS-(Date.now()-s);setTimeout((()=>this.facepluginopencv(e,t,n,i)),d)}convertToBase64WithMetadata(e){const t=document.createElement("canvas");t.width=this.video.videoWidth,t.height=this.video.videoHeight;const n=t.getContext("2d");n.drawImage(this.video,0,0,t.width,t.height);const i=document.createElement("canvas");i.width=e.width,i.height=e.height;const s=i.getContext("2d"),o=n.getImageData(e.x,e.y,e.width,e.height);s.putImageData(o,0,0);const a={"0th":{[piexif.ImageIFD.Make]:"AccuraFace",[piexif.ImageIFD.Model]:"FaceDetectionCamera"}},c=piexif.dump(a),d=piexif.insert(c,i.toDataURL("image/jpeg",.92));this.onResultReceived&&(document.querySelector(".container").style.display="none",this.onResultReceived({base64:d,metadata:a}))}addStyles(){const e=`\n body {\n display: flex;\n justify-content: center;\n align-items: center;\n height: 100vh;\n margin: 0;\n background-color: ${this.bgcolor||"rgb(255, 255, 255)"};\n font-family: Arial, sans-serif;\n flex-direction: column;\n }\n \n .container {\n display: flex;\n flex-direction: column;\n align-items: center;\n padding: 20px;\n background-color: transparent;\n border-radius: 10px;\n }\n \n #instructionText {\n display: flex;\n justify-content: center;\n align-items: center;\n padding: 10px;\n background-color: ${this.instructBgColor||"#fff"};\n color: ${this.instructColor||"rgb(10, 18, 131)"};\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);\n border-radius: 5px;\n margin-bottom: 20px;\n font-size: ${this.instructSize||"30px"};\n font-weight: ${this.instructWeight||"bold"};\n }\n \n .camera-input {\n width: 250px;\n height: 370px;\n position: relative;\n overflow: hidden;\n border-radius: 50%;\n transform: scale(1.0); \n }\n \n #videoInput {\n width: 100%;\n height: 100%;\n object-fit: cover;\n transition: transform 0.3s ease-in-out;\n transform: scaleX(-1) scale(1.5); \n }\n \n #canvasOutput {\n position: absolute;\n top: 0;\n left: 0;\n width: 100%;\n height: 100%;\n border-radius: 50%;\n transform: scale(1.5);\n }\n \n #response-text {\n font-size: 18px;\n color: #333;\n margin-top: 10px;\n }\n\n/* Desktop (default) */\n\n /* Laptop */\n @media (max-width: 1200px) {\n #instructionText {\n font-size: ${this.instructSize||"28px"};\n }\n }\n\n /* iPad / Tablet */\n @media (max-width: 992px) {\n #instructionText {\n font-size: ${this.instructSize||"28px"};\n }\n }\n\n /* Mobile */\n @media (max-width: 768px) {\n #instructionText {\n font-size: 25px;\n }\n }\n\n /* Small Mobile (Extra Small Devices) */\n @media (max-width: 480px) {\n #instructionText {\n font-size: ${this.instructSize||"18px"};\n }\n }\n\n .scan-success-wrapper {\n position: relative;\n width: 140px;\n height: 140px;\n margin: 0 auto;\n}\n\n.face-viewfinder {\n position: absolute;\n top: 50%;\n left: 50%;\n transform: translate(-50%, -50%);\n opacity: 0.9;\n animation: pulseFrame 2s ease-in-out forwards;\n}\n\n.success-tick {\n position: absolute;\n top: 50%;\n left: 50%;\n transform: translate(-50%, -50%) scale(0.6);\n opacity: 0;\n z-index: 2;\n animation: tickReveal 1.1s ease-out 0.4s forwards;\n}\n\n/* Animations */\n@keyframes pulseFrame {\n 0% { opacity: 0.6; transform: translate(-50%, -50%) scale(0.92); }\n 50% { opacity: 1; transform: translate(-50%, -50%) scale(1.08); }\n 100% { opacity: 0.85; transform: translate(-50%, -50%) scale(1); }\n}\n\n@keyframes tickReveal {\n 0% { \n transform: translate(-50%, -50%) scale(0.4); \n opacity: 0; \n stroke-dashoffset: 60; \n }\n 40% { \n transform: translate(-50%, -50%) scale(1.15); \n opacity: 1; \n }\n 70% { transform: translate(-50%, -50%) scale(0.95); }\n 100% { \n transform: translate(-50%, -50%) scale(1); \n opacity: 1; \n stroke-dashoffset: 0; \n }\n}\n\n/* Add stroke-dasharray to the tick path via JS or CSS for draw effect */\n.success-tick path {\n stroke-dasharray: 60;\n stroke-dashoffset: 60;\n animation: drawTick 0.8s ease-out 0.5s forwards;\n}\n\n@keyframes drawTick {\n to { stroke-dashoffset: 0; }\n}\n `,t=document.createElement("style");t.type="text/css",t.innerText=e,document.head.appendChild(t)}}
|