samvyo-js-sdk 2.0.19 → 2.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -228,7 +228,7 @@
228
228
  personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);
229
229
  outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);
230
230
  }
231
- `,{width:g,height:R}=F,J=g/R,n=aE(A,A.VERTEX_SHADER,C),S=aE(A,A.FRAGMENT_SHADER,E),V=ZE(A,n,S,Q,B),N=A.getUniformLocation(V,"u_backgroundScale"),D=A.getUniformLocation(V,"u_backgroundOffset"),d=A.getUniformLocation(V,"u_inputFrame"),l=A.getUniformLocation(V,"u_personMask"),W=A.getUniformLocation(V,"u_background"),k=A.getUniformLocation(V,"u_coverage"),i=A.getUniformLocation(V,"u_lightWrapping"),Z=A.getUniformLocation(V,"u_blendMode");A.useProgram(V),A.uniform2f(N,1,1),A.uniform2f(D,0,0),A.uniform1i(d,0),A.uniform1i(l,1),A.uniform2f(k,0,1),A.uniform1f(i,0),A.uniform1f(Z,0);let a=null;function o(Q){a=oE(A,A.RGBA8,Q.naturalWidth,Q.naturalHeight,A.LINEAR,A.LINEAR),A.texSubImage2D(A.TEXTURE_2D,0,0,0,Q.naturalWidth,Q.naturalHeight,A.RGBA,A.UNSIGNED_BYTE,Q);let B=0,I=0,U=Q.naturalWidth,F=Q.naturalHeight;U/F<J?(F=U/J,I=(Q.naturalHeight-F)/2):(U=F*J,B=(Q.naturalWidth-U)/2);const C=U/Q.naturalWidth,E=F/Q.naturalHeight;B/=Q.naturalWidth,I/=Q.naturalHeight,A.uniform2f(N,C,E),A.uniform2f(D,B,I)}return U?.complete?o(U):U&&(U.onload=()=>{o(U)}),{render:function(){A.viewport(0,0,g,R),A.useProgram(V),A.activeTexture(A.TEXTURE1),A.bindTexture(A.TEXTURE_2D,I),null!==a&&(A.activeTexture(A.TEXTURE2),A.bindTexture(A.TEXTURE_2D,a),A.uniform1i(W,2)),A.bindFramebuffer(A.FRAMEBUFFER,null),A.drawArrays(A.TRIANGLE_STRIP,0,4)},updateCoverage:function(Q){A.useProgram(V),A.uniform2f(k,Q[0],Q[1])},updateLightWrapping:function(Q){A.useProgram(V),A.uniform1f(i,Q)},updateBlendMode:function(Q){A.useProgram(V),A.uniform1f(Z,"screen"===Q?0:1)},cleanUp:function(){A.deleteTexture(a),A.deleteProgram(V),A.deleteShader(S),A.deleteShader(n)}}}(V,d,l,i,Q,U);return{render:async function(){V.activeTexture(V.TEXTURE0),V.bindTexture(V.TEXTURE_2D,W),V.texImage2D(V.TEXTURE_2D,0,V.RGBA,V.RGBA,V.UNSIGNED_BYTE,A.htmlElement),V.bindVertexArray(D),await Z.render(),E(),F._runInference(),E(),a.render(),o.render(),h.render()},updatePostProcessingConfig:function(A){if(o.updateSigmaSpace(A.jointBilateralFilter.sigmaSpace),o.updateSigmaColor(A.jointBilateralFilter.sigmaColor),"image"===B.type){const Q=h;Q.updateCoverage(A.coverage),Q.updateLightWrapping(A.lightWrapping),Q.updateBlendMode(A.blendMode)}else if("blur"===B.type){h.updateCoverage(A.coverage)}else{const A=h;A.updateCoverage([0,.9999]),A.updateLightWrapping(0)}},cleanUp:function(){h.cleanUp(),o.cleanUp(),a.cleanUp(),Z.cleanUp(),V.deleteTexture(i),V.deleteTexture(k),V.deleteTexture(W),V.deleteBuffer(l),V.deleteBuffer(d),V.deleteVertexArray(D),V.deleteShader(N)}}}class wE{constructor(){this.pipeline=null,this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[],this.isRunning=!1,this.timerWorker=null,this.renderTimeoutId=null,this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[]}async initialize(A,Q,B,I,U,F=null,C=null){this.stop(),this.backgroundImageRef=F,this.canvasRef=C;const E=1e3/B.targetFps;this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[],this.timerWorker=function(){const A=new Map,Q=new Blob(["\n const timeoutIds = new Map();\n \n addEventListener('message', (event) => {\n if (event.data.timeoutMs !== undefined) {\n const timeoutId = setTimeout(() => {\n postMessage({ callbackId: event.data.callbackId });\n timeoutIds.delete(event.data.callbackId);\n }, event.data.timeoutMs);\n timeoutIds.set(event.data.callbackId, timeoutId);\n } else {\n const timeoutId = timeoutIds.get(event.data.callbackId);\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n timeoutIds.delete(event.data.callbackId);\n }\n }\n });\n "],{type:"application/javascript"}),B=new Worker(URL.createObjectURL(Q));B.onmessage=Q=>{const B=A.get(Q.data.callbackId);B&&(A.delete(Q.data.callbackId),B())};let I=1;return{setTimeout:function(Q,U=0){const F=I++;return A.set(F,Q),B.postMessage({callbackId:F,timeoutMs:U}),F},clearTimeout:function(Q){A.has(Q)&&(B.postMessage({callbackId:Q}),A.delete(Q))},terminate:function(){A.clear(),B.terminate()}}}(),this.pipeline="webgl2"===B.pipeline?ME(A,this.backgroundImageRef,Q,B,this.canvasRef,U,this.timerWorker,this.addFrameEvent.bind(this)):kE(A,Q,B,this.canvasRef,I,U,this.addFrameEvent.bind(this));const g=async()=>{if(!this.isRunning)return;const A=performance.now();this.beginFrame(),await this.pipeline.render(),this.endFrame(),this.renderTimeoutId=this.timerWorker.setTimeout(g,Math.max(0,E-(performance.now()-A)))};return this.isRunning=!0,g(),{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations()}}beginFrame(){this.beginTime=Date.now()}addFrameEvent(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.beginTime=A,this.eventCount++}endFrame(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.frameCount++,A>=this.previousTime+1e3&&(this.fps=1e3*this.frameCount/(A-this.previousTime),this.durations=[...this.frameDurations],this.previousTime=A,this.frameCount=0),this.eventCount=0}getProcessingDurations(){return this.frameDurations.length>=3?[this.frameDurations[0]||0,this.frameDurations[1]||0,this.frameDurations[2]||0]:[0,0,0]}stop(){this.isRunning=!1,this.timerWorker&&this.renderTimeoutId&&this.timerWorker.clearTimeout(this.renderTimeoutId),this.timerWorker&&(this.timerWorker.terminate(),this.timerWorker=null),this.pipeline&&(this.pipeline.cleanUp(),this.pipeline=null),this.renderTimeoutId=null}getState(){return{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations(),isRunning:this.isRunning}}getFps(){return this.fps}getDurations(){return this.durations}isActive(){return this.isRunning&&null!==this.pipeline}async updateConfig(A,Q,B,I,U){return this.initialize(A,Q,B,I,U,this.backgroundImageRef,this.canvasRef)}destroy(){this.stop(),this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[]}}const sE=new sB("Room");const TE=new Set(["240p","360p","480p","720p","1080p","1440p","2160p"]);function yE(A){return!!Array.isArray(A)&&A.every(A=>TE.has(A))}const cE=new sB("utils-verifyFiles"),YE=["mp4"];function tE(A,Q){try{const B=new URL(A).pathname;return B.split(".").pop().toLowerCase()===Q.toLowerCase()}catch{return!1}}const KE=new TextEncoder,LE=new TextDecoder;function HE(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64("string"==typeof A?A:LE.decode(A),{alphabet:"base64url"});let Q=A;Q instanceof Uint8Array&&(Q=LE.decode(Q)),Q=Q.replace(/-/g,"+").replace(/_/g,"/").replace(/\s/g,"");try{return function(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64(A);const Q=atob(A),B=new Uint8Array(Q.length);for(let I=0;I<Q.length;I++)B[I]=Q.charCodeAt(I);return B}(Q)}catch{throw new TypeError("The input to be decoded is not correctly encoded.")}}class bE extends Error{static code="ERR_JOSE_GENERIC";code="ERR_JOSE_GENERIC";constructor(A,Q){super(A,Q),this.name=this.constructor.name,Error.captureStackTrace?.(this,this.constructor)}}class pE extends bE{static code="ERR_JWT_CLAIM_VALIDATION_FAILED";code="ERR_JWT_CLAIM_VALIDATION_FAILED";claim;reason;payload;constructor(A,Q,B="unspecified",I="unspecified"){super(A,{cause:{claim:B,reason:I,payload:Q}}),this.claim=B,this.reason=I,this.payload=Q}}class rE extends bE{static code="ERR_JWT_EXPIRED";code="ERR_JWT_EXPIRED";claim;reason;payload;constructor(A,Q,B="unspecified",I="unspecified"){super(A,{cause:{claim:B,reason:I,payload:Q}}),this.claim=B,this.reason=I,this.payload=Q}}class eE extends bE{static code="ERR_JOSE_ALG_NOT_ALLOWED";code="ERR_JOSE_ALG_NOT_ALLOWED"}class mE extends bE{static code="ERR_JOSE_NOT_SUPPORTED";code="ERR_JOSE_NOT_SUPPORTED"}class qE extends bE{static code="ERR_JWS_INVALID";code="ERR_JWS_INVALID"}class xE extends bE{static code="ERR_JWT_INVALID";code="ERR_JWT_INVALID"}class zE extends bE{static code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";constructor(A="signature verification failed",Q){super(A,Q)}}function XE(A,Q="algorithm.name"){return new TypeError(`CryptoKey does not support this operation, its ${Q} must be ${A}`)}function OE(A,Q){return A.name===Q}function jE(A){return parseInt(A.name.slice(4),10)}function vE(A,Q,B){switch(Q){case"HS256":case"HS384":case"HS512":{if(!OE(A.algorithm,"HMAC"))throw XE("HMAC");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"RS256":case"RS384":case"RS512":{if(!OE(A.algorithm,"RSASSA-PKCS1-v1_5"))throw XE("RSASSA-PKCS1-v1_5");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"PS256":case"PS384":case"PS512":{if(!OE(A.algorithm,"RSA-PSS"))throw XE("RSA-PSS");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"Ed25519":case"EdDSA":if(!OE(A.algorithm,"Ed25519"))throw XE("Ed25519");break;case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":if(!OE(A.algorithm,Q))throw XE(Q);break;case"ES256":case"ES384":case"ES512":{if(!OE(A.algorithm,"ECDSA"))throw XE("ECDSA");const B=function(A){switch(A){case"ES256":return"P-256";case"ES384":return"P-384";case"ES512":return"P-521";default:throw new Error("unreachable")}}(Q);if(A.algorithm.namedCurve!==B)throw XE(B,"algorithm.namedCurve");break}default:throw new TypeError("CryptoKey does not support this operation")}!function(A,Q){if(!A.usages.includes(Q))throw new TypeError(`CryptoKey does not support this operation, its usages must include ${Q}.`)}(A,B)}function uE(A,Q,...B){if((B=B.filter(Boolean)).length>2){const Q=B.pop();A+=`one of type ${B.join(", ")}, or ${Q}.`}else 2===B.length?A+=`one of type ${B[0]} or ${B[1]}.`:A+=`of type ${B[0]}.`;return null==Q?A+=` Received ${Q}`:"function"==typeof Q&&Q.name?A+=` Received function ${Q.name}`:"object"==typeof Q&&null!=Q&&Q.constructor?.name&&(A+=` Received an instance of ${Q.constructor.name}`),A}function PE(A,Q,...B){return uE(`Key for the ${A} algorithm must be `,Q,...B)}function fE(A){return"CryptoKey"===A?.[Symbol.toStringTag]}function _E(A){return"KeyObject"===A?.[Symbol.toStringTag]}const $E=A=>fE(A)||_E(A);const Ag=A=>{if("object"!=typeof(Q=A)||null===Q||"[object Object]"!==Object.prototype.toString.call(A))return!1;var Q;if(null===Object.getPrototypeOf(A))return!0;let B=A;for(;null!==Object.getPrototypeOf(B);)B=Object.getPrototypeOf(B);return Object.getPrototypeOf(A)===B};const Qg=async A=>{if(!A.alg)throw new TypeError('"alg" argument is required when "jwk.alg" is not present');const{algorithm:Q,keyUsages:B}=function(A){let Q,B;switch(A.kty){case"AKP":switch(A.alg){case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":Q={name:A.alg},B=A.priv?["sign"]:["verify"];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"RSA":switch(A.alg){case"PS256":case"PS384":case"PS512":Q={name:"RSA-PSS",hash:`SHA-${A.alg.slice(-3)}`},B=A.d?["sign"]:["verify"];break;case"RS256":case"RS384":case"RS512":Q={name:"RSASSA-PKCS1-v1_5",hash:`SHA-${A.alg.slice(-3)}`},B=A.d?["sign"]:["verify"];break;case"RSA-OAEP":case"RSA-OAEP-256":case"RSA-OAEP-384":case"RSA-OAEP-512":Q={name:"RSA-OAEP",hash:`SHA-${parseInt(A.alg.slice(-3),10)||1}`},B=A.d?["decrypt","unwrapKey"]:["encrypt","wrapKey"];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"EC":switch(A.alg){case"ES256":Q={name:"ECDSA",namedCurve:"P-256"},B=A.d?["sign"]:["verify"];break;case"ES384":Q={name:"ECDSA",namedCurve:"P-384"},B=A.d?["sign"]:["verify"];break;case"ES512":Q={name:"ECDSA",namedCurve:"P-521"},B=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":Q={name:"ECDH",namedCurve:A.crv},B=A.d?["deriveBits"]:[];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"OKP":switch(A.alg){case"Ed25519":case"EdDSA":Q={name:"Ed25519"},B=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":Q={name:A.crv},B=A.d?["deriveBits"]:[];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;default:throw new mE('Invalid or unsupported JWK "kty" (Key Type) Parameter value')}return{algorithm:Q,keyUsages:B}}(A),I={...A};return"AKP"!==I.kty&&delete I.alg,delete I.use,crypto.subtle.importKey("jwk",I,Q,A.ext??(!A.d&&!A.priv),A.key_ops??B)};function Bg(A){return Ag(A)&&"string"==typeof A.kty}let Ig;const Ug=async(A,Q,B,I=!1)=>{Ig||=new WeakMap;let U=Ig.get(A);if(U?.[B])return U[B];const F=await Qg({...Q,alg:B});return I&&Object.freeze(A),U?U[B]=F:Ig.set(A,{[B]:F}),F},Fg=async(A,Q)=>{if(A instanceof Uint8Array)return A;if(fE(A))return A;if(_E(A)){if("secret"===A.type)return A.export();if("toCryptoKey"in A&&"function"==typeof A.toCryptoKey)try{return((A,Q)=>{Ig||=new WeakMap;let B=Ig.get(A);if(B?.[Q])return B[Q];const I="public"===A.type,U=!!I;let F;if("x25519"===A.asymmetricKeyType){switch(Q){case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}F=A.toCryptoKey(A.asymmetricKeyType,U,I?[]:["deriveBits"])}if("ed25519"===A.asymmetricKeyType){if("EdDSA"!==Q&&"Ed25519"!==Q)throw new TypeError("given KeyObject instance cannot be used for this algorithm");F=A.toCryptoKey(A.asymmetricKeyType,U,[I?"verify":"sign"])}switch(A.asymmetricKeyType){case"ml-dsa-44":case"ml-dsa-65":case"ml-dsa-87":if(Q!==A.asymmetricKeyType.toUpperCase())throw new TypeError("given KeyObject instance cannot be used for this algorithm");F=A.toCryptoKey(A.asymmetricKeyType,U,[I?"verify":"sign"])}if("rsa"===A.asymmetricKeyType){let B;switch(Q){case"RSA-OAEP":B="SHA-1";break;case"RS256":case"PS256":case"RSA-OAEP-256":B="SHA-256";break;case"RS384":case"PS384":case"RSA-OAEP-384":B="SHA-384";break;case"RS512":case"PS512":case"RSA-OAEP-512":B="SHA-512";break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}if(Q.startsWith("RSA-OAEP"))return A.toCryptoKey({name:"RSA-OAEP",hash:B},U,I?["encrypt"]:["decrypt"]);F=A.toCryptoKey({name:Q.startsWith("PS")?"RSA-PSS":"RSASSA-PKCS1-v1_5",hash:B},U,[I?"verify":"sign"])}if("ec"===A.asymmetricKeyType){const B=new Map([["prime256v1","P-256"],["secp384r1","P-384"],["secp521r1","P-521"]]).get(A.asymmetricKeyDetails?.namedCurve);if(!B)throw new TypeError("given KeyObject instance cannot be used for this algorithm");"ES256"===Q&&"P-256"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),"ES384"===Q&&"P-384"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),"ES512"===Q&&"P-521"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),Q.startsWith("ECDH-ES")&&(F=A.toCryptoKey({name:"ECDH",namedCurve:B},U,I?[]:["deriveBits"]))}if(!F)throw new TypeError("given KeyObject instance cannot be used for this algorithm");return B?B[Q]=F:Ig.set(A,{[Q]:F}),F})(A,Q)}catch(pg){if(pg instanceof TypeError)throw pg}let B=A.export({format:"jwk"});return Ug(A,B,Q)}if(Bg(A))return A.k?HE(A.k):Ug(A,A,Q,!0);throw new Error("unreachable")},Cg=A=>A?.[Symbol.toStringTag],Eg=(A,Q,B)=>{if(void 0!==Q.use){let A;switch(B){case"sign":case"verify":A="sig";break;case"encrypt":case"decrypt":A="enc"}if(Q.use!==A)throw new TypeError(`Invalid key for this operation, its "use" must be "${A}" when present`)}if(void 0!==Q.alg&&Q.alg!==A)throw new TypeError(`Invalid key for this operation, its "alg" must be "${A}" when present`);if(Array.isArray(Q.key_ops)){let I;switch(!0){case"verify"===B:case"dir"===A:case A.includes("CBC-HS"):I=B;break;case A.startsWith("PBES2"):I="deriveBits";break;case/^A\d{3}(?:GCM)?(?:KW)?$/.test(A):I=!A.includes("GCM")&&A.endsWith("KW")?"unwrapKey":B;break;case"encrypt"===B:I="wrapKey";break;case"decrypt"===B:I=A.startsWith("RSA")?"unwrapKey":"deriveBits"}if(I&&!1===Q.key_ops?.includes?.(I))throw new TypeError(`Invalid key for this operation, its "key_ops" must include "${I}" when present`)}return!0},gg=(A,Q,B)=>{A.startsWith("HS")||"dir"===A||A.startsWith("PBES2")||/^A(?:128|192|256)(?:GCM)?(?:KW)?$/.test(A)||/^A(?:128|192|256)CBC-HS(?:256|384|512)$/.test(A)?((A,Q,B)=>{if(!(Q instanceof Uint8Array)){if(Bg(Q)){if(function(A){return"oct"===A.kty&&"string"==typeof A.k}(Q)&&Eg(A,Q,B))return;throw new TypeError('JSON Web Key for symmetric algorithms must have JWK "kty" (Key Type) equal to "oct" and the JWK "k" (Key Value) present')}if(!$E(Q))throw new TypeError(PE(A,Q,"CryptoKey","KeyObject","JSON Web Key","Uint8Array"));if("secret"!==Q.type)throw new TypeError(`${Cg(Q)} instances for symmetric algorithms must be of type "secret"`)}})(A,Q,B):((A,Q,B)=>{if(Bg(Q))switch(B){case"decrypt":case"sign":if(function(A){return"oct"!==A.kty&&("AKP"===A.kty&&"string"==typeof A.priv||"string"==typeof A.d)}(Q)&&Eg(A,Q,B))return;throw new TypeError("JSON Web Key for this operation be a private JWK");case"encrypt":case"verify":if(function(A){return"oct"!==A.kty&&void 0===A.d&&void 0===A.priv}(Q)&&Eg(A,Q,B))return;throw new TypeError("JSON Web Key for this operation be a public JWK")}if(!$E(Q))throw new TypeError(PE(A,Q,"CryptoKey","KeyObject","JSON Web Key"));if("secret"===Q.type)throw new TypeError(`${Cg(Q)} instances for asymmetric algorithms must not be of type "secret"`);if("public"===Q.type)switch(B){case"sign":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm signing must be of type "private"`);case"decrypt":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm decryption must be of type "private"`)}if("private"===Q.type)switch(B){case"verify":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm verifying must be of type "public"`);case"encrypt":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm encryption must be of type "public"`)}})(A,Q,B)},Rg=async(A,Q,B)=>{if(Q instanceof Uint8Array){if(!A.startsWith("HS"))throw new TypeError(((A,...Q)=>uE("Key must be ",A,...Q))(Q,"CryptoKey","KeyObject","JSON Web Key"));return crypto.subtle.importKey("raw",Q,{hash:`SHA-${A.slice(-3)}`,name:"HMAC"},!1,[B])}return vE(Q,A,B),Q},Jg=async(A,Q,B,I)=>{const U=await Rg(A,Q,"verify");((A,Q)=>{if(A.startsWith("RS")||A.startsWith("PS")){const{modulusLength:B}=Q.algorithm;if("number"!=typeof B||B<2048)throw new TypeError(`${A} requires key modulusLength to be 2048 bits or larger`)}})(A,U);const F=((A,Q)=>{const B=`SHA-${A.slice(-3)}`;switch(A){case"HS256":case"HS384":case"HS512":return{hash:B,name:"HMAC"};case"PS256":case"PS384":case"PS512":return{hash:B,name:"RSA-PSS",saltLength:parseInt(A.slice(-3),10)>>3};case"RS256":case"RS384":case"RS512":return{hash:B,name:"RSASSA-PKCS1-v1_5"};case"ES256":case"ES384":case"ES512":return{hash:B,name:"ECDSA",namedCurve:Q.namedCurve};case"Ed25519":case"EdDSA":return{name:"Ed25519"};case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":return{name:A};default:throw new mE(`alg ${A} is not supported either by JOSE or your javascript runtime`)}})(A,U.algorithm);try{return await crypto.subtle.verify(F,U,B,I)}catch{return!1}};async function ng(A,Q,B){if(!Ag(A))throw new qE("Flattened JWS must be an object");if(void 0===A.protected&&void 0===A.header)throw new qE('Flattened JWS must have either of the "protected" or "header" members');if(void 0!==A.protected&&"string"!=typeof A.protected)throw new qE("JWS Protected Header incorrect type");if(void 0===A.payload)throw new qE("JWS Payload missing");if("string"!=typeof A.signature)throw new qE("JWS Signature missing or incorrect type");if(void 0!==A.header&&!Ag(A.header))throw new qE("JWS Unprotected Header incorrect type");let I={};if(A.protected)try{const Q=HE(A.protected);I=JSON.parse(LE.decode(Q))}catch{throw new qE("JWS Protected Header is invalid")}if(!((...A)=>{const Q=A.filter(Boolean);if(0===Q.length||1===Q.length)return!0;let B;for(const I of Q){const A=Object.keys(I);if(B&&0!==B.size)for(const Q of A){if(B.has(Q))return!1;B.add(Q)}else B=new Set(A)}return!0})(I,A.header))throw new qE("JWS Protected and JWS Unprotected Header Parameter names must be disjoint");const U={...I,...A.header},F=((A,Q,B,I,U)=>{if(void 0!==U.crit&&void 0===I?.crit)throw new A('"crit" (Critical) Header Parameter MUST be integrity protected');if(!I||void 0===I.crit)return new Set;if(!Array.isArray(I.crit)||0===I.crit.length||I.crit.some(A=>"string"!=typeof A||0===A.length))throw new A('"crit" (Critical) Header Parameter MUST be an array of non-empty strings when present');let F;F=void 0!==B?new Map([...Object.entries(B),...Q.entries()]):Q;for(const C of I.crit){if(!F.has(C))throw new mE(`Extension Header Parameter "${C}" is not recognized`);if(void 0===U[C])throw new A(`Extension Header Parameter "${C}" is missing`);if(F.get(C)&&void 0===I[C])throw new A(`Extension Header Parameter "${C}" MUST be integrity protected`)}return new Set(I.crit)})(qE,new Map([["b64",!0]]),B?.crit,I,U);let C=!0;if(F.has("b64")&&(C=I.b64,"boolean"!=typeof C))throw new qE('The "b64" (base64url-encode payload) Header Parameter must be a boolean');const{alg:E}=U;if("string"!=typeof E||!E)throw new qE('JWS "alg" (Algorithm) Header Parameter missing or invalid');const g=B&&((A,Q)=>{if(void 0!==Q&&(!Array.isArray(Q)||Q.some(A=>"string"!=typeof A)))throw new TypeError(`"${A}" option must be an array of strings`);if(Q)return new Set(Q)})("algorithms",B.algorithms);if(g&&!g.has(E))throw new eE('"alg" (Algorithm) Header Parameter value not allowed');if(C){if("string"!=typeof A.payload)throw new qE("JWS Payload must be a string")}else if("string"!=typeof A.payload&&!(A.payload instanceof Uint8Array))throw new qE("JWS Payload must be a string or an Uint8Array instance");let R=!1;"function"==typeof Q&&(Q=await Q(I,A),R=!0),gg(E,Q,"verify");const J=function(...A){const Q=A.reduce((A,{length:Q})=>A+Q,0),B=new Uint8Array(Q);let I=0;for(const U of A)B.set(U,I),I+=U.length;return B}(KE.encode(A.protected??""),KE.encode("."),"string"==typeof A.payload?KE.encode(A.payload):A.payload);let n;try{n=HE(A.signature)}catch{throw new qE("Failed to base64url decode the signature")}const S=await Fg(Q,E);if(!(await Jg(E,S,n,J)))throw new zE;let V;if(C)try{V=HE(A.payload)}catch{throw new qE("Failed to base64url decode the payload")}else V="string"==typeof A.payload?KE.encode(A.payload):A.payload;const N={payload:V};return void 0!==A.protected&&(N.protectedHeader=I),void 0!==A.header&&(N.unprotectedHeader=A.header),R?{...N,key:S}:N}const Sg=86400,Vg=/^(\+|\-)? ?(\d+|\d+\.\d+) ?(seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)(?: (ago|from now))?$/i,Ng=A=>{const Q=Vg.exec(A);if(!Q||Q[4]&&Q[1])throw new TypeError("Invalid time period format");const B=parseFloat(Q[2]);let I;switch(Q[3].toLowerCase()){case"sec":case"secs":case"second":case"seconds":case"s":I=Math.round(B);break;case"minute":case"minutes":case"min":case"mins":case"m":I=Math.round(60*B);break;case"hour":case"hours":case"hr":case"hrs":case"h":I=Math.round(3600*B);break;case"day":case"days":case"d":I=Math.round(B*Sg);break;case"week":case"weeks":case"w":I=Math.round(604800*B);break;default:I=Math.round(31557600*B)}return"-"===Q[1]||"ago"===Q[4]?-I:I},Dg=A=>A.includes("/")?A.toLowerCase():`application/${A.toLowerCase()}`;function dg(A,Q,B={}){let I;try{I=JSON.parse(LE.decode(Q))}catch{}if(!Ag(I))throw new xE("JWT Claims Set must be a top-level JSON object");const{typ:U}=B;if(U&&("string"!=typeof A.typ||Dg(A.typ)!==Dg(U)))throw new pE('unexpected "typ" JWT header value',I,"typ","check_failed");const{requiredClaims:F=[],issuer:C,subject:E,audience:g,maxTokenAge:R}=B,J=[...F];void 0!==R&&J.push("iat"),void 0!==g&&J.push("aud"),void 0!==E&&J.push("sub"),void 0!==C&&J.push("iss");for(const l of new Set(J.reverse()))if(!(l in I))throw new pE(`missing required "${l}" claim`,I,l,"missing");if(C&&!(Array.isArray(C)?C:[C]).includes(I.iss))throw new pE('unexpected "iss" claim value',I,"iss","check_failed");if(E&&I.sub!==E)throw new pE('unexpected "sub" claim value',I,"sub","check_failed");if(g&&(n=I.aud,S="string"==typeof g?[g]:g,!("string"==typeof n?S.includes(n):Array.isArray(n)&&S.some(Set.prototype.has.bind(new Set(n))))))throw new pE('unexpected "aud" claim value',I,"aud","check_failed");var n,S;let V;switch(typeof B.clockTolerance){case"string":V=Ng(B.clockTolerance);break;case"number":V=B.clockTolerance;break;case"undefined":V=0;break;default:throw new TypeError("Invalid clockTolerance option type")}const{currentDate:N}=B,D=(d=N||new Date,Math.floor(d.getTime()/1e3));var d;if((void 0!==I.iat||R)&&"number"!=typeof I.iat)throw new pE('"iat" claim must be a number',I,"iat","invalid");if(void 0!==I.nbf){if("number"!=typeof I.nbf)throw new pE('"nbf" claim must be a number',I,"nbf","invalid");if(I.nbf>D+V)throw new pE('"nbf" claim timestamp check failed',I,"nbf","check_failed")}if(void 0!==I.exp){if("number"!=typeof I.exp)throw new pE('"exp" claim must be a number',I,"exp","invalid");if(I.exp<=D-V)throw new rE('"exp" claim timestamp check failed',I,"exp","check_failed")}if(R){const A=D-I.iat;if(A-V>("number"==typeof R?R:Ng(R)))throw new rE('"iat" claim timestamp check failed (too far in the past)',I,"iat","check_failed");if(A<0-V)throw new pE('"iat" claim timestamp check failed (it should be in the past)',I,"iat","check_failed")}return I}async function lg(A,Q,B){const I=await async function(A,Q,B){if(A instanceof Uint8Array&&(A=LE.decode(A)),"string"!=typeof A)throw new qE("Compact JWS must be a string or Uint8Array");const{0:I,1:U,2:F,length:C}=A.split(".");if(3!==C)throw new qE("Invalid Compact JWS");const E=await ng({payload:U,protected:I,signature:F},Q,B),g={payload:E.payload,protectedHeader:E.protectedHeader};return"function"==typeof Q?{...g,key:E.key}:g}(A,Q,B);if(I.protectedHeader.crit?.includes("b64")&&!1===I.protectedHeader.b64)throw new xE("JWTs MUST NOT use unencoded payload");const U={payload:dg(I.protectedHeader,I.payload,B),protectedHeader:I.protectedHeader};return"function"==typeof Q?{...U,key:I.key}:U}const Wg=new sB("Room"),kg={small:{width:{ideal:160},height:{ideal:120}},qvga:{width:{ideal:320},height:{ideal:240}},vga:{width:{ideal:640},height:{ideal:480}},hd:{width:{ideal:1280},height:{ideal:720}}};let ig;const Zg=new class{constructor(){this.queue=new Map}push(A,Q){this.queue.set(A,Q)}get(A){return this.queue.get(A)}remove(A){this.queue.delete(A)}},ag=new class{constructor(){this._localVBStream=null,this._vbDetailsNew={},this._vbDetails=null,this._roomType=null,this._participants={},this._peerId=null,this._peerConnection=null,this._pipelineManager=null,this._updateInterval=null,this._pipelineManager=new wE,this.initializeTFLite()}getVBDetails(){return this._vbDetailsNew}getVBStream(){return this._localVBStream}hasLiveVBTrack(){try{return this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks&&this._localVBStream.getVideoTracks().length>0&&"live"===this._localVBStream.getVideoTracks()[0].readyState}catch(A){return!1}}setPipelineManager(A){this._pipelineManager=A}async initializeTFLite(){try{const A=await WE(dE);this._vbDetailsNew.tfLite=A.tflite,this._vbDetailsNew.isSIMDSupported=A.isSIMDSupported}catch(A){}}async initializePipeline(A,Q){sE.debug("initializePipeline called with videoTrack and backgroundConfig:%O,%O",A,Q);let B=null;try{const I=await this._createHiddenVideoElement(A);if(this._vbDetailsNew.hiddenCanvas=this._createHiddenCanvasElement(I),"image"===Q.type){const A=await this._createHiddenImageElement(Q);if(!A.success)return!1;B=A.hiddenImage}const U=I.htmlElement;U instanceof HTMLVideoElement&&U.paused&&(sE.debug("🎬 Video is paused, starting playback..."),await U.play());const F=await this._pipelineManager.initialize(I,Q,dE,null,this._vbDetailsNew.tfLite,B,this._vbDetailsNew.hiddenCanvas);sE.debug("Inside getUserMediaSuccess result",F),sE.debug("Pipeline manager active? :%s",this._pipelineManager.isActive()),sE.debug("camera stream live status:%s",A.readyState),F.pipeline.updatePostProcessingConfig(lE),this._setupPeriodicUpdates(),this._vbDetailsNew.hiddenImage=B,this._vbDetailsNew.sourcePlayback=I;const C=F.canvasRef.captureStream(30);return this._localVBStream=C,{success:!0,vbStream:C}}catch(I){sE.error("Failed to initialize pipeline:%O",I)}}async _createHiddenVideoElement(A){return new Promise(Q=>{const B=document.createElement("video");B.autoplay=!0,B.loop=!0,B.controls=!1,B.playsInline=!0,B.muted=!0,B.srcObject=new MediaStream([A]),B.style.cssText="position: fixed; top: 10px; right: 10px; width: 200px; height: 150px; border: 2px solid blue; z-index: 9999; ",document.body.appendChild(B),B.play(),B.onloadeddata=()=>{Q({htmlElement:B,width:B.videoWidth,height:B.videoHeight})}})}async _createHiddenVideoElement(A){return new Promise(Q=>{const B=document.createElement("video");B.style.display="none",B.autoplay=!0,B.loop=!0,B.controls=!1,B.playsInline=!0,B.muted=!0,B.srcObject=new MediaStream([A]),document.body.appendChild(B);const I=async()=>{try{await B.play()}catch(A){}B.readyState<2||!B.videoWidth||!B.videoHeight?requestAnimationFrame(I):Q({htmlElement:B,width:B.videoWidth,height:B.videoHeight})};B.addEventListener("loadedmetadata",I,{once:!0}),I()})}_createHiddenCanvasElement(A){const Q=document.createElement("canvas");return Q.style.display="none",Q.width=A.width,Q.height=A.height,document.body.appendChild(Q),Q}_createHiddenImageElement(A){return new Promise(async Q=>{const B=document.createElement("img");if(B.style.display="none",A?.url.includes("http"))try{(await this.testImageCORS(A?.url)).success?(B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})},B.src=A.url):(B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})})}catch(pg){B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})}}else B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})},B.src=A.url})}async testImageCORS(A,Q=1e4){return new Promise((B,I)=>{const U=new Image;U.crossOrigin="anonymous";const F=setTimeout(()=>{U.src="",I(new Error("CORS_TIMEOUT"))},Q);U.onload=()=>{clearTimeout(F);try{const Q=document.createElement("canvas");Q.width=U.width||100,Q.height=U.height||100;const I=Q.getContext("2d");I.drawImage(U,0,0),I.getImageData(0,0,1,1),B({success:!0,url:A,width:U.naturalWidth,height:U.naturalHeight,message:"CORS allowed"})}catch(Q){I(new Error("CORS_BLOCKED"))}},U.onerror=A=>{clearTimeout(F),I(new Error("IMAGE_LOAD_FAILED"))},U.src=A})}_setupPeriodicUpdates(){this._updateInterval&&clearInterval(this._updateInterval),this._updateInterval=setInterval(()=>{if(this._pipelineManager&&this._pipelineManager.isActive()){const A=this._pipelineManager.getState();this._vbDetailsNew.fps=A.fps;const[Q,B,I]=A.durations||[0,0,0];this._vbDetailsNew.resizingDuration=Q,this._vbDetailsNew.inferenceDuration=B,this._vbDetailsNew.postProcessingDuration=I}},1e3)}cleanup(){try{if(this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks)try{this._localVBStream.getVideoTracks().forEach(A=>{try{A.stop()}catch(Q){}})}catch(A){}try{this._pipelineManager&&"function"==typeof this._pipelineManager.stop&&this._pipelineManager.stop()}catch(A){}if(this._updateInterval){try{clearInterval(this._updateInterval)}catch(A){}this._updateInterval=null}try{if(this._vbDetailsNew?.sourcePlayback?.htmlElement){try{this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null}catch(A){}try{this._vbDetailsNew.sourcePlayback.htmlElement.remove()}catch(A){}}}catch(A){}try{if(this._vbDetailsNew?.hiddenCanvas)try{this._vbDetailsNew.hiddenCanvas.remove()}catch(A){}}catch(A){}try{if(this._vbDetailsNew?.hiddenImage)try{this._vbDetailsNew.hiddenImage.remove()}catch(A){}}catch(A){}this._localVBStream=null,this._vbDetailsNew&&(this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null)}catch(A){}}};class og extends cB.EventEmitter{static async listDevices(){if(ig)return Wg.info("Device list already exists:%O",ig),{success:!0,deviceList:ig};const A=await $C();return A.success?(ig=A.deviceList,{success:!0,deviceList:A.deviceList}):{success:!1,reason:A.reason}}static async changeVB({track:A,details:Q}){if(Wg.debug("changeVB Received details are:%O",Q),Wg.debug("changeVB Received track are:%O",A),!Q)return Wg.debug("VB details not provided. Skipping VB processing."),{success:!1};if(!0===A.active){Wg.debug("Track is live, calling initializePipeline",A);const B=A.getVideoTracks()[0],I=await ag.initializePipeline(B,Q);return Wg.debug("response is :%o",I),I}throw Wg.error("Track is not live"),new Error("Track is not live")}static async init({sessionToken:A,roomId:Q,peerId:B}={}){if(!A)throw new Error("Session token is required to join the room.");try{let I;Wg.info("session token:%s",A);try{const Q=(new TextEncoder).encode("samvyo_tech_321"),{payload:B}=await lg(A,Q,{algorithms:["HS256"]});I=B,Wg.info("Decoded token:",I)}catch(pg){throw Wg.error("JWT verification failed:",pg),pg instanceof rE?new Error("Session token has expired"):pg instanceof pE?new Error("Session token not yet active"):new Error("Invalid session token: "+pg.message)}if(!I||"object"!=typeof I)throw new Error("Invalid token format");const{data:U,signallingServerUrl:F}=I;if(!U||!F)throw new Error("Missing required token data");return B||(B=JU()),Q||(Q=RU()),new og({peerId:B,roomId:Q,outputData:{sessionToken:A,innerSessionToken:U,signallingServerUrl:F}})}catch(I){throw Wg.error("Failed to initialize:",I.message),I}}constructor({peerId:A,roomId:Q,outputData:B}){super(),this._closed=!1,this._roomStatus="initialised",this._roomDisplayName=null,this._running=!1,this._cignal=null,this._socket=null,this._sendTransport=null,this._recvTransport=null,this._device=new SB.Device,this._webCamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._producers=new Map,this._consumers=new Map,this._peers=new Map,this._data={...B,inputParams:{peerId:A,roomId:Q,roomType:"conferencing"}},this._micStream=null,this._webCamStream=null,this._webcam={device:null,resolution:"hd"},this._mic={device:null},this._deviceList=ig||null,this._externalVideo=null,this._externalVideoStream=null,this._forceVP8=!1,this._forceH264=!1,this._forceVP9=!1,this._enableWebcamLayers=!0,this._numSimulcastStreams=3,this._enableSharingLayers=!0,this._client=GB.parse(window.navigator.userAgent),this._routerRtpCapabilities=null,this._recordingStartedByMe={},this._cignalConnected=!1,this._reconnectionInitiated=!1,this._restartIceInProgressSendTransport=!1,this._restartIceInProgressRecvTransport=!1,this._activeSpeaker=null,this._speechRecognition=null,this._transcriptStorage=new Map,this._audioContext=null,this._audioAnalyser=null,this._micMonitorStream=null,this._speakingWhileMutedInterval=null,this._speakingThreshold=-50,this._mutedSpeakingDetectionEnabled=!0,this._lastMutedSpeakingNotification=0,this._mutedSpeakingCooldown=3e3,this._audioTroubleShootData={lastDiagnostic:null,deviceTests:{},connectivityStatus:"unknown"},this._audioOutputDevices=[],this._currentSpeakerDevice=null,this._testAudioElements=new Map,this._speakerTestResults=new Map,this._remoteAudioElement=null,this._remoteCaption=null,this.initLocal()}get peerId(){return this._peerId}set peerId(A){this._peerId=A}get roomType(){return this._roomType}set roomType(A){this._roomType=A}get closed(){return this._closed}get data(){return this._data}set data(A){throw new Error("Setting the whole data object is not possible!")}get peers(){return this._peers}set peers(A){throw new Error("Setting the whole peers object is not possible!")}get transports(){return{produce:this._sendTransport,consume:this._recvTransport}}set transports(A){throw new Error("Setting of transport is not possible!")}get videoStream(){return this._webCamStream}get audioStream(){return this._micStream}get clientAgent(){return this._client}get activeParameters(){return this._data.inputParams}get deviceList(){return this._deviceList?this._deviceList:{videoDevices:[],audioDevices:[],audioOutputDevices:[]}}set deviceList(A){throw new Error("Setting of deviceList is not possible!")}get currentlyActiveSpeaker(){return this._activeSpeaker}set currentlyActiveSpeaker(A){throw new Error("Setting of currentActivespeaker is not possible!")}get roomDisplayName(){return this._roomDisplayName}set roomDisplayName(A){throw new Error("Setting of roomDisplayName is not possible!")}async initLocal(){const A=SB.detectDevice();Wg.debug("The device is:%O",A),await this._initSocket()}async _initSocket(){let A=this;const Q=this.data.signallingServerUrl.replace(/^(http|https):\/\//,""),B=`wss://${Q}/?sessionToken=${this.data.sessionToken}&roomId=${this.data.inputParams.roomId}&peerId=${this.data.inputParams.peerId}&roomType=${this.data.inputParams.roomType}`;Wg.info(`Going to create a new socket! with address: ${Q}`),this._socket=new gU(B,!0),this._listenToSocket(),this._socket.on("notify",({type:A,title:Q,message:B})=>{this.emit("notification",{eventType:A,eventText:`${Q}: ${B}`,roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("roomStartedP2p",A=>{Wg.info("P2P room successfully started"),this._running=!0}),this._socket.on("userError",Q=>{Wg.error("User Error happened with message:%O",Q),A.emit("notification",{eventType:Q.title,eventText:`${Q.text}`})}),this._socket.on("validationAlert",A=>{Wg.info("Validation alert happened")}),this._socket.on("alreadyActive",({title:A,text:Q})=>{this.emit("notification",{eventType:"alreadyActive",eventText:"This peer already has an active connection",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("passwordDisabled",()=>{Wg.info("password disabled by moderator!"),this.emit("notification",{eventType:"passwordDisabled",eventText:"Password for this room has been disabled by moderator",roomId:this.data.inputParams.roomId})}),this._socket.on("close",({code:A,reason:Q})=>{if(Wg.info(`socket closed with code ${A}`),4500!==A&&4100!==A){let B=Q||"Connection to server closed unexpectedly! Trying to reconnect.";Wg.info(`socket close code is${A} with reason ${B}`)}else Wg.info("Socket is now closed!"),this.close()}),this._socket.on("connected",async()=>{Wg.info("Socket connected"),this.emit("initSuccess")}),this._socket.on("reconnected",async()=>{Wg.info("Socket re-connected"),A.pc&&A._sendTransport&&A._recvTransport?roomType===tC.P2P&&A.pc?(Wg.info("Socket seems to be reconnected in mid call! RestartIce needed for p2p call."),"failed"!==A.pc.iceConnectionState&&"disconnected"!==A.pc.iceConnectionState||A.restartICE()):(Wg.debug("Ice restarts for mediasoup transports for a joined peer"),A._sendTransport&&["failed","disconnected"].includes(A._sendTransport.connectionState)?(Wg.debug("Restart ice for sendtransport"),A.restartIce(A._sendTransport.id,"send")):Wg.error("Send transport not available!"),A._recvTransport&&["failed","disconnected"].includes(A._recvTransport.connectionState)?(Wg.debug("Restart ice for recvtransport"),A.restartIce(A._recvTransport.id,"recv")):Wg.error("Recv transport not available!")):(Wg.info("Connection getting connected for first time"),this.emit("initSuccess"))}),this._socket.on("defaultJoinStatus",async A=>{Wg.info(" Socket defaultjoinstatus:%O",A)})}_sendMessage(A){this._socket.send({usageType:"sdk",...A})}_listenToSocket(){this._socket.on("message",A=>{try{switch("currentlyActiveSpeaker"===A.id||"allStats"===A.id||Wg.info("message in Room is:%O",A),A.id){case"chatMessage":this.processChatMessage(A);break;case"customMessage":this.processCustomMessage(A);break;case"existingParticipants":this.onExistingParticipants(A);break;case"newPeerJoin":this.onNewPeer(A);break;case"recordingError":this.handleRecordingErrors(A);break;case"moderatorAuthentication":this.authenticateUser(A);break;case"authenticationRequested":this.authenticationRequested(A);break;case"toggleMyMic":this.toggleMyMic(A);break;case"toggleMyCamera":this.toggleMyCamera(A);break;case"logMeOut":this.logMeOutNew(A);break;case"userAlreadyAuthenticated":this.hideUserAuthenticationDialog(A);break;case"peerLeft":this.peerLeft(A);break;case"recordingStarted":this.setRecordingStatusStarted(A);break;case"recordingStopped":this.setRecordingStatusEnded(A);break;case"startDefaultRecording":this.startRecording(A);break;case"mediaToggled":this.mediaToggled(A);break;case"processingStarted":this.handleProcessingStart(A);break;case"processingCompleted":this.handleProcessingCompletion(A);break;case"processingError":this.handleProcessingError(A);break;case"createTransportResponse":this.handleCreateTransportRequest(A);break;case"connectTransportResponse":this.handleConnectTransportRequest(A);break;case"connectRecvTransportResponse":this.handleConnectRecvTransportRequest(A);break;case"sendTrackResponse":this.handleSendTrackRequest(A);break;case"recvTrackResponse":this.handleRecvTrackRequest(A);break;case"roomClosedByModerator":this.leaveRoomCommon(),this.roomClosed();break;case"currentlyActiveSpeaker":this.setCurrentlyActiveSpeaker(A);break;case"restartIceResponse":this.restartIceResponse(A);break;case"consumerClosed":this.closeConsumer(A);break;case"handRaise":this.handleHandRaise(A);break;case"updateCId":this.updateCId(A);break;case"upgradeParticipant":this.handleUpgradeParticipant(A);break;case"downgradeParticipant":this.handleDowngradeParticipant(A);break;case"switchMicOff":this.handleSwitchMicOff(A);break;case"screenShareLimitReached":this.handleScreenShareLimitReached(A);break;case"upgradeLimitReached":this.handleUpgradeLimitReached(A);break;case"modUpgradeReq":this.handleModUpgradeReq(A);break;case"lockUnlockRoom":this.handleLockUnlockRoom(A);break;case"peersWaiting":this.handlePeersWaiting(A);break;case"remotePeerJoin":this.handleRemotePeerJoin(A);break;case"offer":Wg.debug("inside offer"),this.handleOffer(A);break;case"answer":Wg.debug("inside answer"),this.handleAnswer(A);break;case"candidate":Wg.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"p2pRoomClosed":Wg.debug("inside p2p room close"),this.leaveRoomNewP2p("leaveAndCloseRoom");break;case"p2pUserLeft":Wg.debug("inside p2p user left"),this.userLeftRoom(A);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"screenShareP2p":this.handleScreenShareP2p(A);break;default:Wg.warn("Unrecognized message:%o",A)}}catch(pg){Wg.error("listentomessage:%O",pg)}})}joinRoom=async({peerName:A=null,produce:Q=!0,produceAudio:B=!0,produceVideo:I=!0,consume:U=!0,videoResolution:F="hd",forceVp8:C=!1,forceVp9:E=!1,forceH264:g=!1,h264Profile:R="high",forcePCMU:J=!1,forcePCMA:n=!1,forceFPS:S=25,enableWebcamLayers:V=!0,numSimulcastStreams:N=3,autoGainControl:D=!0,echoCancellation:d=!0,noiseSuppression:l=!0,sampleRate:W=44e3,channelCount:k=1,videoBitRates:i=[700,250,75],share:Z=!1,shareAudio:a=!1,enableSharingLayers:o=!0,shareBitRates:h=[2500,1250,500],audioDeviceId:G=null,videoDeviceId:M=null,peerType:w="participant",roomType:s=tC.CONFERENCING,authenticationRequired:T=!1,password:y=null,roomDisplayName:c=null,vbdetails:Y}={})=>{Wg.info("Going to join room"),["hd","vga","qvga"].includes(F)||(Wg.warn("Invalid video resolution value. setting it to default value of 'hd' "),F="hd"),"boolean"!=typeof Q&&(Wg.warn("Produe should either be true or false"),Q=Boolean(Q)),"boolean"!=typeof B&&(Wg.warn("ProduceAudio should either be true or false"),B=Boolean(B)),"boolean"!=typeof I&&(Wg.warn("ProduceVideo should either be true or false"),I=Boolean(I)),"boolean"!=typeof U&&(Wg.warn("Consume should either be true or false"),U=Boolean(U)),"boolean"!=typeof C&&(Wg.warn("forceVp8 should either be true or false"),C=Boolean(C)),"boolean"!=typeof E&&(Wg.warn("forceVp9 should either be true or false"),E=Boolean(E)),"boolean"!=typeof g&&(Wg.warn("forceH264 should either be true or false"),g=Boolean(g)),["high","low"].includes(R.toLowerCase())||(Wg.warn("h264Profile should either be 'high' or 'low'"),R="high"),(!Number.isInteger(S)||Number.isInteger(S)&&(S>65||S<5))&&(Wg.warn("forceFPS should be a number between 5 to 65, default value is 25 fps."),S=25),"boolean"!=typeof V&&(Wg.warn("enableWebcamLayers should either be true or false"),V=Boolean(V)),(!Number.isInteger(N)||Number.isInteger(N)&&(N>3||N<1))&&(Wg.warn("numSimulcastStreams should be a number between 1 to 3, default value is 3."),N=3),Array.isArray(i)&&i.length>=1&&i.length<=3&&i.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?Wg.debug("videoBitRates values are correct"):(Wg.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),i=[700,250,75]),"boolean"!=typeof J&&(Wg.warn("forcePCMU should either be true or false"),J=Boolean(J)),"boolean"!=typeof n&&(Wg.warn("forcePCMA should either be true or false"),n=Boolean(n)),"boolean"!=typeof D&&(Wg.warn("autoGainControl should either be true or false"),D=Boolean(D)),"boolean"!=typeof d&&(Wg.warn("echoCancellation should either be true or false"),d=Boolean(d)),"boolean"!=typeof l&&(Wg.warn("noiseSuppression should either be true or false"),l=Boolean(l)),(!Number.isInteger(W)||Number.isInteger(W)&&(W>64e3||W<8e3))&&(Wg.warn("sampleRate should be a number between 8000 to 64000, default value is 44000 Khz."),W=44e3),(!Number.isInteger(k)||Number.isInteger(k)&&(k>2||k<1))&&(Wg.warn("sampleRate should be a number between 1 to 2, default value is 1, which is a mono audio."),k=1),"boolean"!=typeof Z&&(Wg.warn("share should either be true or false"),Z=Boolean(Z)),"boolean"!=typeof a&&(Wg.warn("shareAudio should either be true or false"),a=Boolean(a)),"boolean"!=typeof o&&(Wg.warn("enableSharingLayers should either be true or false"),o=Boolean(o)),Array.isArray(h)&&h.length>=1&&h.length<=3&&h.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?Wg.debug("shareBitRates values are correct"):(Wg.warn("sharebitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[2500,1250,500]'"),h=[2500,1250,500]),["moderator","participant","attendee"].includes(w)?Wg.debug("peerType is valid:%s",w):(w="participant",Wg.debug("peerType is invalid:%s. By default set to: participant",w)),await this.listDevicesInternal(),this._videoResolution=F,this._forceVP8=Boolean(C),this._forceH264=Boolean(g),this._forceVP9=Boolean(E),this._enableWebcamLayers=Boolean(V),this._numSimulcastStreams=N,this._enableSharingLayers=Boolean(o);try{A||(A=fC()),this.data.inputParams={...this.data.inputParams,peerName:A,produce:Q,produceAudio:B,produceVideo:I,consume:U,videoResolution:F,forceVp8:C,forceVp9:E,forceH264:g,h264Profile:R,forceFPS:S,forcePCMU:J,forcePCMA:n,enableWebcamLayers:V,numSimulcastStreams:N,autoGainControl:D,echoCancellation:d,noiseSuppression:l,sampleRate:W,channelCount:k,videoBitRates:i,share:Z,shareAudio:a,enableSharingLayers:o,shareBitRates:h,audioDeviceId:G,videoDeviceId:M,peerType:w,roomType:s,authenticationRequired:T,password:y,roomDisplayName:c,vbdetails:Y},Wg.info("input params are:%O",this.data.inputParams);const t={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,participantType:"attendee"===w?"viewer":w,roomType:s,roomDisplayName:c||`room-${1e5+Math.round(9e5*Math.random())}`,browser:this._client,name:this.data.inputParams.peerName,room:this.data.inputParams.roomId,authenticationRequired:T,isRoomPassword:!!y,roomPassword:y||null,usageType:"sdk"};this._sendMessage(t)}catch(t){return Wg.error("Failed to join room:",t.message),{success:!1,reason:t.message}}};authenticateUser=A=>{Wg.info("Moderator authentication requested:%O",A),this.emit("moderatorAuthentication",{moderatorName:A.moderatorName,requesterName:A.requesterName,requesterPeerId:A.requesterPeerId,text:A.title})};authenticationRequested=A=>{Wg.info("Moderator authentication requested:%O",A),this.emit("authenticationRequested",{requesterName:A.requesterName,requesterPeerId:this.data.inputParams.peerId,text:A.title})};allowRoomJoin=A=>{if(!A)return Wg.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};Wg.info("Allow user to join room:%O",A);let Q={id:"userAuthenticated",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(Q)};denyRoomJoin=A=>{if(!A)return Wg.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};Wg.info("Deny user to join room:%O",A);let Q={id:"userDenied",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(Q)};setSpeakingWhileMutedDetection(A=!0){this._mutedSpeakingDetectionEnabled=A,Wg.debug("Speaking while muted detection "+(A?"enabled":"disabled"))}setSpeakingThreshold(A=-50){this._speakingThreshold=A,Wg.debug(`Speaking threshold set to: ${A}dB`)}async _initializeAudioMonitoring(){if(this._micStream)try{this._audioContext=new AudioContext,this._audioAnalyser=this._audioContext.createAnalyser(),this._audioAnalyser.fftSize=512,this._audioAnalyser.smoothingTimeConstant=.3;const A={audio:{deviceId:this._mic.device?{exact:this._mic.device.deviceId}:void 0,echoCancellation:!1,noiseSuppression:!1,autoGainControl:!1}};this._micMonitorStream=await navigator.mediaDevices.getUserMedia(A);this._audioContext.createMediaStreamSource(this._micMonitorStream).connect(this._audioAnalyser),Wg.debug("Audio monitoring initialized successfully")}catch(A){Wg.error("Error initializing audio monitoring:%o",A)}}_getAudioLevel(){if(!this._audioAnalyser)return-1/0;const A=this._audioAnalyser.frequencyBinCount,Q=new Uint8Array(A);this._audioAnalyser.getByteFrequencyData(Q);let B=0;for(let F=0;F<A;F++)B+=Q[F];const I=B/A,U=20*Math.log10(I/255);return isFinite(U)?U:-1/0}_startSpeakingWhileMutedDetection(){this._mutedSpeakingDetectionEnabled&&this._audioAnalyser&&(this._speakingWhileMutedInterval=setInterval(()=>{if(!this._micProducer||!this._micProducer.paused)return;const A=this._getAudioLevel();if(A>this._speakingThreshold){const Q=Date.now();Q-this._lastMutedSpeakingNotification>this._mutedSpeakingCooldown&&(this._lastMutedSpeakingNotification=Q,this.data.inputParams.peerId===this.peerId&&(this.emit("speakingWhileMuted",{peerId:this.data.inputParams.peerId,audioLevel:A,timestamp:Q,message:"You appear to be speaking while muted"}),Wg.debug(`Speaking while muted detected - Audio level: ${A}dB`)))}},100))}_stopSpeakingWhileMutedDetection(){this._speakingWhileMutedInterval&&(clearInterval(this._speakingWhileMutedInterval),this._speakingWhileMutedInterval=null)}_cleanupAudioMonitoring(){this._stopSpeakingWhileMutedDetection(),this._micMonitorStream&&(this._micMonitorStream.getTracks().forEach(A=>A.stop()),this._micMonitorStream=null),this._audioContext&&"closed"!==this._audioContext.state&&(this._audioContext.close(),this._audioContext=null),this._audioAnalyser=null}async diagnoseAudio(){Wg.debug("Starting comprehensive audio diagnostic...");const A={timestamp:Date.now(),browser:this._client,permissions:{},devices:{},connectivity:{},currentSetup:{},recommendations:[]};try{return A.permissions=await this._testAudioPermissions(),A.devices=await this._testAudioDevices(),A.currentSetup=await this._testCurrentMicSetup(),A.connectivity=await this._testWebRTCConnectivity(),A.recommendations=this._generateAudioRecommendations(A),this._audioTroubleShootData.lastDiagnostic=A,this.emit("audioDiagnosticComplete",{peerId:this.data.inputParams.peerId,diagnostic:A}),A}catch(Q){return Wg.error("Audio diagnostic failed:",Q),A.error=Q.message,A}}async _testAudioPermissions(){const A={granted:!1,state:"unknown",error:null};try{if(navigator.permissions){const Q=await navigator.permissions.query({name:"microphone"});A.state=Q.state,A.granted="granted"===Q.state}const Q=await navigator.mediaDevices.getUserMedia({audio:!0,video:!1});A.granted=!0,A.actuallyGranted=!0,Q.getTracks().forEach(A=>A.stop())}catch(Q){A.error=Q.name,A.actuallyGranted=!1,Wg.error("Permission test failed:",Q)}return A}getSystemHealthStatus(){return{sdk:{roomStatus:this._roomStatus,isConnected:"connected"===this._roomStatus,micActive:!!this._micProducer&&!this._micProducer.closed,micMuted:this._micProducer?.paused,cameraActive:!!this._webcamProducer&&!this._webcamProducer.closed,screenSharing:!!this._shareProducer&&!this._shareProducer.closed},transports:{send:this._sendTransport?{id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}:null,recv:this._recvTransport?{id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState}:null},audio:{context:this._audioContext?.state,analyser:!!this._audioAnalyser,currentLevel:this._getAudioLevel(),speaking:this._getAudioLevel()>this._speakingThreshold,monitorStream:!!this._micMonitorStream},streams:{mic:this._micStream?.active,camera:this._webCamStream?.active,micTracks:this._micStream?.getTracks()?.length||0,cameraTracks:this._webCamStream?.getTracks()?.length||0}}}async testNetworkConnectivity(){const A={timestamp:Date.now(),stun:{working:!1,latency:null},turn:{working:!1,latency:null},bandwidth:{upload:null,download:null},packetLoss:null};try{const Q=Date.now(),B=new RTCPeerConnection({iceServers:[{urls:"stun:stun.l.google.com:19302"}]}),I=(B.createDataChannel("test"),await B.createOffer());await B.setLocalDescription(I),await new Promise(A=>{B.onicecandidate=Q=>{Q.candidate||A()},setTimeout(A,5e3)}),A.stun.working="failed"!==B.iceConnectionState,A.stun.latency=Date.now()-Q,B.close()}catch(Q){}return A}async assessAudioQuality(A=5e3){if(!this._micStream)throw new Error("No active microphone stream");const Q={duration:A,samples:[],averageLevel:0,peakLevel:-1/0,quietSamples:0,clipSamples:0,quality:"unknown"};try{const B=new AudioContext,I=B.createAnalyser();I.fftSize=1024;B.createMediaStreamSource(this._micStream).connect(I);const U=I.frequencyBinCount,F=new Uint8Array(U),C=Date.now(),E=100;return new Promise(g=>{const R=setInterval(()=>{I.getByteFrequencyData(F);let E=0;for(let A=0;A<U;A++)E+=F[A];const J=E/U,n=20*Math.log10(J/255);if(isFinite(n)&&(Q.samples.push(n),Q.peakLevel=Math.max(Q.peakLevel,n),n<-70&&Q.quietSamples++,n>-3&&Q.clipSamples++),Date.now()-C>=A){clearInterval(R),B.close();const A=Q.samples.filter(A=>isFinite(A));Q.averageLevel=A.reduce((A,Q)=>A+Q,0)/A.length;const I=Q.quietSamples/A.length*100;Q.clipSamples/A.length*100>10?Q.quality="poor-clipping":I>80?Q.quality="poor-quiet":Q.averageLevel>-30?Q.quality="good":Q.averageLevel>-50?Q.quality="fair":Q.quality="poor-low",g(Q)}},E)})}catch(B){throw new Error(`Audio quality assessment failed: ${B.message}`)}}async attemptAutoRemediation(){const A=[],Q=this.getSystemHealthStatus();try{"failed"===Q.transports.send?.connectionState&&(await this.restartIce(Q.transports.send.id,"send"),A.push("Restarted send transport")),"failed"===Q.transports.recv?.connectionState&&(await this.restartIce(Q.transports.recv.id,"recv"),A.push("Restarted receive transport")),!Q.audio.analyser&&this._micStream&&(this._cleanupAudioMonitoring(),await this._initializeAudioMonitoring(),A.push("Restarted audio monitoring")),Q.sdk.micActive&&!Q.streams.mic&&(await this.disableMic(),await this.enableMic(),A.push("Restarted microphone"));const B=await this.diagnoseAudio();if(B.devices?.working?.length>0){const Q=B.currentSetup?.deviceLabel,I=B.devices.working[0];Q!==I.label&&(await this.changeAudioInput({deviceId:I.deviceId}),A.push(`Switched to working device: ${I.label}`))}return{success:!0,fixes:A}}catch(B){return{success:!1,error:B.message,fixes:A}}}async getEnhancedDeviceList(){try{const Q=await navigator.mediaDevices.enumerateDevices(),B=[];for(const I of Q){if("audioinput"!==I.kind)continue;const Q={deviceId:I.deviceId,label:I.label,groupId:I.groupId,capabilities:null,testResult:null};try{const A=(await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:I.deviceId}}})).getAudioTracks()[0];Q.capabilities=A.getCapabilities(),Q.testResult=await this._testSpecificDevice(I.deviceId,1e3),A.stop()}catch(A){Q.testResult={working:!1,error:A.message}}B.push(Q)}return B}catch(A){throw new Error(`Enhanced device enumeration failed: ${A.message}`)}}async optimizeAudioSettings(){const A=this.getSystemHealthStatus(),Q=[];try{const B=await this.assessAudioQuality(3e3);if("poor-clipping"===B.quality?(await this.changeAudioInput({autoGainControl:!1,echoCancellation:!0,noiseSuppression:!0}),Q.push("Disabled auto-gain control to prevent clipping")):"poor-quiet"===B.quality&&(await this.changeAudioInput({autoGainControl:!0,echoCancellation:!0,noiseSuppression:!1}),Q.push("Enabled auto-gain control for low input levels")),"connected"===A.transports.send?.connectionState){await this._sendTransport.getStats()}return{success:!0,recommendations:Q,qualityAssessment:B}}catch(B){return{success:!1,error:B.message,recommendations:Q}}}async _testAudioDevices(){const A={available:[],current:null,working:[],failed:[]};try{const B=await navigator.mediaDevices.enumerateDevices();A.available=B.filter(A=>"audioinput"===A.kind).map(A=>({deviceId:A.deviceId,label:A.label,groupId:A.groupId})),A.current=this._mic.device;for(const I of A.available)try{const Q=await this._testSpecificDevice(I.deviceId);Q.working?A.working.push({...I,audioLevel:Q.audioLevel,testDuration:Q.duration}):A.failed.push({...I,error:Q.error})}catch(Q){A.failed.push({...I,error:Q.message})}}catch(Q){A.error=Q.message}return A}async _testSpecificDevice(A,Q=2e3){return new Promise(B=>{const I={working:!1,audioLevel:-1/0,duration:Q,error:null};let U=null,F=null,C=null;const E=()=>{U&&U.getTracks().forEach(A=>A.stop()),F&&"closed"!==F.state&&F.close()},g=setTimeout(()=>{E(),B(I)},Q);navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:A}}}).then(A=>{U=A,F=new(window.AudioContext||window.webkitAudioContext),C=F.createAnalyser(),C.fftSize=256;F.createMediaStreamSource(U).connect(C);const R=C.frequencyBinCount,J=new Uint8Array(R),n=setInterval(()=>{C.getByteFrequencyData(J);let A=0;for(let I=0;I<R;I++)A+=J[I];const Q=A/R,B=20*Math.log10(Q/255);isFinite(B)&&B>I.audioLevel&&(I.audioLevel=B),Q>0&&(I.working=!0)},100);setTimeout(()=>{clearInterval(n),clearTimeout(g),E(),B(I)},Q-100)}).catch(A=>{clearTimeout(g),I.error=A.message,E(),B(I)})})}async _testCurrentMicSetup(){const A={isActive:!1,isProducing:!1,isMuted:!1,audioLevel:-1/0,deviceLabel:null,streamActive:!1,producerStats:null};try{if(A.isActive=!!this._micProducer,A.isProducing=!(!this._micProducer||this._micProducer.closed),A.isMuted=!(!this._micProducer||!this._micProducer.paused),A.deviceLabel=this._mic.device?.label,A.streamActive=!(!this._micStream||!this._micStream.active),this._micStream){const Q=this._micStream.getAudioTracks();Q.length>0&&(A.trackEnabled=Q[0].enabled,A.trackReadyState=Q[0].readyState,A.trackSettings=Q[0].getSettings())}if(this._audioAnalyser&&(A.audioLevel=this._getAudioLevel()),this._micProducer&&this._sendTransport)try{const Q=await this._sendTransport.getStats();A.producerStats=Q}catch(Q){A.producerStatsError=Q.message}}catch(Q){A.error=Q.message}return A}async _testWebRTCConnectivity(){const A={sendTransport:null,recvTransport:null,iceConnectionState:null,dtlsState:null,error:null};try{this._sendTransport&&(A.sendTransport={id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}),this._recvTransport&&(A.recvTransport={id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState})}catch(Q){A.error=Q.message}return A}_generateAudioRecommendations(A){const Q=[];return A.permissions.granted||Q.push({type:"critical",title:"Microphone Permission Required",description:"Please allow microphone access in your browser",action:"Grant microphone permission in browser settings"}),0===A.devices.working.length&&Q.push({type:"critical",title:"No Working Audio Devices",description:"No functioning microphone devices detected",action:"Check if microphone is connected and enabled in system settings"}),A.currentSetup.isActive&&!A.currentSetup.streamActive&&Q.push({type:"warning",title:"Current Microphone Not Working",description:"The selected microphone device appears to be inactive",action:"Try switching to a different microphone device"}),"failed"===A.connectivity.sendTransport?.connectionState&&Q.push({type:"critical",title:"Connection Failed",description:"Unable to establish audio connection to server",action:"Check internet connection and try rejoining the room"}),A.currentSetup.audioLevel<-60&&Q.push({type:"info",title:"Low Audio Level",description:"Your microphone level appears to be very low",action:"Check microphone volume in system settings or move closer to microphone"}),Q}async quickAudioTest(){const A={working:!1,issues:[],timestamp:Date.now()};try{if(!this._micProducer)return A.issues.push("Microphone not active"),A;if(this._micProducer.closed)return A.issues.push("Microphone producer is closed"),A;if(!this._micStream||!this._micStream.active)return A.issues.push("Microphone stream is not active"),A;const Q=this._micStream.getAudioTracks();if(0===Q.length)return A.issues.push("No audio tracks found"),A;if("live"!==Q[0].readyState)return A.issues.push("Audio track is not live"),A;if("connected"!==this._sendTransport.connectionState)return A.issues.push(`Send transport not connected: ${this._sendTransport.connectionState}`),A;A.working=!0}catch(Q){A.issues.push(`Test error: ${Q.message}`)}return this.emit("quickAudioTestComplete",{peerId:this.data.inputParams.peerId,result:A}),A}async listAudioOutputDevices(){try{if(this._deviceList&&this._deviceList.audioOutputDevices)return Wg.debug("Using cached audio output devices:",this._deviceList.audioOutputDevices),{success:!0,devices:this._deviceList.audioOutputDevices};const A=await navigator.mediaDevices.enumerateDevices();return this._audioOutputDevices=A.filter(A=>"audiooutput"===A.kind),Wg.debug("Found audio output devices:",this._audioOutputDevices),{success:!0,devices:this._audioOutputDevices.map(A=>({deviceId:A.deviceId,label:A.label||`Speaker ${A.deviceId.slice(-4)}`,groupId:A.groupId}))}}catch(A){return Wg.error("Failed to enumerate audio output devices:",A),{success:!1,error:A.message}}}async testSpeakerDevice(A,Q={}){Wg.debug("Testing speaker device",A);const{testDuration:B=3e3,testFrequencies:I=[440,1e3,2e3],volume:U=.3,requireUserConfirmation:F=!0}=Q,C=`speaker-test-${A}-${Date.now()}`;try{if(!HTMLAudioElement.prototype.setSinkId)throw new Error("setSinkId is not supported in this browser");const Q={deviceId:A,testId:C,timestamp:Date.now(),success:!1,frequencies:[],volume:U,duration:B,userConfirmed:!1,error:null},E=new Audio;E.volume=U,E.loop=!1,await E.setSinkId(A),Q.setSinkId=!0,this._testAudioElements.set(C,E);for(const A of I){const U=await this._playTestTone(E,A,B/I.length);Q.frequencies.push(U)}if(F){const B=await this._requestUserConfirmation(A,Q);Q.userConfirmed=B,Q.success=B}else Q.success=Q.frequencies.every(A=>A.played);return this._speakerTestResults.set(A,Q),this.emit("speakerTestComplete",{deviceId:A,testResult:Q}),{success:!0,testResult:Q}}catch(E){Wg.error(`Speaker test failed for device ${A}:`,E);const Q={deviceId:A,testId:C,timestamp:Date.now(),success:!1,error:E.message};return this._speakerTestResults.set(A,Q),this.emit("speakerTestComplete",{deviceId:A,testResult:Q}),{success:!1,error:E.message,testResult:Q}}finally{this._cleanupTestAudio(C)}}async _playTestTone(A,Q,B){return new Promise((I,U)=>{try{const U=new window.AudioContext,F=U.createOscillator(),C=U.createGain(),E=U.createMediaStreamDestination();F.connect(C),C.connect(E),F.frequency.setValueAtTime(Q,U.currentTime),F.type="sine",C.gain.setValueAtTime(0,U.currentTime),C.gain.linearRampToValueAtTime(.1,U.currentTime+.1),C.gain.linearRampToValueAtTime(.1,U.currentTime+B/1e3-.1),C.gain.linearRampToValueAtTime(0,U.currentTime+B/1e3),A.srcObject=E.stream,F.start(),F.stop(U.currentTime+B/1e3);const g=A.play();void 0!==g&&g.then(()=>{setTimeout(()=>{F.disconnect(),C.disconnect(),U.close(),I({frequency:Q,duration:B,played:!0,timestamp:Date.now()})},B)}).catch(A=>{U.close(),I({frequency:Q,duration:B,played:!1,error:A.message,timestamp:Date.now()})})}catch(F){U(F)}})}async _requestUserConfirmation(A,Q){return new Promise(B=>{this.emit("speakerTestConfirmationRequired",{deviceId:A,testResult:Q,onConfirm:A=>B(A)}),setTimeout(()=>B(!1),1e4)})}async testCurrentSpeakerOutput(){try{const Q={timestamp:Date.now(),currentDevice:this._currentSpeakerDevice,remoteAudioPresent:!1,audioElementFound:!1,volumeLevel:0,success:!1},B=document.querySelectorAll("audio"),I=document.querySelectorAll("video");let U=[];if(B.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&U.push(A)}),I.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&U.push(A)}),Q.audioElementFound=U.length>0,Q.elementsCount=U.length,U.length>0){for(const B of U)try{if(B.srcObject){const A=new AudioContext,I=A.createMediaStreamSource(B.srcObject),U=A.createAnalyser();I.connect(U),U.fftSize=256;const F=U.frequencyBinCount,C=new Uint8Array(F);U.getByteFrequencyData(C);const E=C.reduce((A,Q)=>A+Q,0)/F;Q.volumeLevel=Math.max(Q.volumeLevel,E),Q.remoteAudioPresent=E>0,A.close()}}catch(A){Wg.debug("Could not analyze remote audio element:",A)}Q.success=Q.remoteAudioPresent}return this.emit("currentSpeakerTestComplete",Q),{success:!0,testResult:Q}}catch(A){return Wg.error("Current speaker test failed:",A),{success:!1,error:A.message}}}async diagnoseSpeakers(){Wg.debug("Starting comprehensive speaker diagnostic");const A={timestamp:Date.now(),browser:this._client,support:{},devices:{},currentOutput:{},remoteAudio:{},recommendations:[]};try{A.support={setSinkId:!!HTMLAudioElement.prototype.setSinkId,enumerateDevices:!!navigator.mediaDevices?.enumerateDevices,audioContext:!!window.AudioContext};const Q=await this.listAudioOutputDevices();A.devices={available:Q.devices||[],count:Q.devices?.length||0,hasDefault:Q.devices?.some(A=>"default"===A.deviceId)||!1};const B=await this.testCurrentSpeakerOutput();return A.currentOutput=B.testResult,A.remoteAudio=this._analyzeRemoteAudioSetup(),A.recommendations=this._generateSpeakerRecommendations(A),this.emit("speakerDiagnosticComplete",{diagnostic:A}),A}catch(Q){return Wg.error("Speaker diagnostic failed:",Q),A.error=Q.message,A}}_analyzeRemoteAudioSetup(){const A={consumers:0,activeStreams:0,audioElements:0,videoElements:0,totalTracks:0};try{this._consumers&&this._consumers.forEach(Q=>{Q&&"audio"===Q.kind&&!Q.closed&&A.consumers++});const Q=document.querySelectorAll("audio"),B=document.querySelectorAll("video");Q.forEach(Q=>{A.audioElements++,Q.srcObject&&Q.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=Q.srcObject.getAudioTracks().length)}),B.forEach(Q=>{A.videoElements++,Q.srcObject&&Q.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=Q.srcObject.getAudioTracks().length)}),0===A.activeStreams&&0===A.totalTracks&&A.consumers>0&&(A.activeStreams=A.consumers,A.totalTracks=A.consumers)}catch(Q){Wg.error("Remote audio analysis failed:",Q),A.error=Q.message}return A}_generateSpeakerRecommendations(A){const Q=[];return A.support.setSinkId||Q.push({type:"critical",title:"Audio Output Selection Not Supported",description:"Your browser does not support changing audio output devices",actions:["Use Chrome, Edge, or Firefox for audio output selection","Change system default audio device instead","Consider using a different browser"]}),0===A.devices.count&&Q.push({type:"critical",title:"No Audio Output Devices Found",description:"No speakers or headphones detected",actions:["Check if speakers/headphones are connected","Verify audio drivers are installed","Try refreshing the page after connecting devices"]}),0===A.remoteAudio.consumers&&0===A.remoteAudio.activeStreams&&Q.push({type:"warning",title:"No Remote Audio Detected",description:"Not receiving audio from other participants",actions:["Ask other participants to unmute their microphones","Check if you have muted remote participants","Verify your internet connection"]}),!A.currentOutput.success&&A.currentOutput.audioElementFound&&Q.push({type:"warning",title:"Audio Output Issues",description:"Remote audio present but may not be playing correctly",actions:["Check system volume levels","Try switching to a different audio output device","Verify the selected output device is working"]}),0===Q.length&&Q.push({type:"success",title:"Audio Output System Healthy",description:"Speaker setup appears to be working correctly",actions:["Your audio output is configured properly","Run individual device tests if experiencing issues"]}),Q}async progressiveTestAllSpeakers(A={}){Wg.debug("Progressive speaker test started");const{testDuration:Q=2e3,requireConfirmation:B=!0,volume:I=.2}=A;try{const A=await this.listAudioOutputDevices();if(!A.success)throw new Error("Could not enumerate audio devices");const U=[];let F=0;this.emit("progressiveSpeakerTestStarted",{totalDevices:A.devices.length,testDuration:Q,requireConfirmation:B});for(const C of A.devices){F++,this.emit("progressiveSpeakerTestProgress",{currentIndex:F,totalDevices:A.devices.length,currentDevice:C,progress:F/A.devices.length*100});const E=await this.testSpeakerDevice(C.deviceId,{testDuration:Q,volume:I,requireUserConfirmation:B,testFrequencies:[1e3]});U.push({device:C,...E}),await new Promise(A=>setTimeout(A,500))}return this.emit("progressiveSpeakerTestComplete",{results:U,workingDevices:U.filter(A=>A.success),failedDevices:U.filter(A=>!A.success)}),{success:!0,results:U,summary:{total:U.length,working:U.filter(A=>A.success).length,failed:U.filter(A=>!A.success).length}}}catch(U){return Wg.error("Progressive speaker test failed:",U),{success:!1,error:U.message}}}_cleanupTestAudio(A){if(Wg.debug("Cleaning up test audio"),this._testAudioElements.has(A)){const B=this._testAudioElements.get(A);try{B.pause(),B.srcObject=null,B.src=""}catch(Q){Wg.debug("Error cleaning up test audio:",Q)}this._testAudioElements.delete(A)}}getCurrentSpeakerDevice(){return Wg.debug("Getting current speaker device"),this._currentSpeakerDevice}async meetingSafeSpeakerTest(A){return Wg.debug("Meeting safe speaker test started"),this.testSpeakerDevice(A,{testDuration:1500,testFrequencies:[800],volume:.1,requireUserConfirmation:!0})}hideUserAuthenticationDialog=A=>{Wg.debug("authentication already done message:%o",A),this.emit("moderatorAuthStatus",{requesterId:A.requesterId,moderatorActed:A.peerId})};onNewPeer(A){const{peerId:Q,displayName:B,participantType:I}=A;this._peers.set(Q,{displayName:B,participantType:I,consumers:[]}),this.emit("newPeer",{peerId:Q,peerName:B,type:this.data.inputParams.peerId===Q?"local":"remote",peerRole:I})}async onExistingParticipants(A){if(Wg.debug("Onexisting participant message:%O",A),this._routerRtpCapabilities=A.routerRtpCapabilities,this._roomStatus="connected",this._roomDisplayName=A.roomDisplayName,this._running=!0,this._socket.updateRoomJoinStatus(!0),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:this.data.inputParams.peerName,type:"local",peerRole:this.data.inputParams.peerType}),this.data.inputParams.produce?await this._createSendTransport():Wg.debug("Produce is false!"),this.data.inputParams.consume){await this._createRecvTransport();let Q=this;A.peers&&A.peers.length>0&&A.peers.forEach(A=>{Q.emit("newPeer",{peerId:A.peerId,peerName:A.name,type:"remote",peerRole:A.participantType})})}else Wg.debug("Consume is false!")}sendCustomMessage=(A,Q="general",B=null,I,U,F={})=>{const C={id:"customMessage",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,data:A,type:Q,recieverPeerId:B,senderType:I,messageType:U,customData:F};Wg.debug("Room sendCustomMessage",C),this._sendMessage(C)};processCustomMessage=A=>{Wg.debug("Room processCustomMessage",A),this.emit("customMessage",A)};updateCId=A=>{Wg.debug("Received updateCId message",A),A.targetPeerId!==this.data.inputParams.peerId&&A.targetPeerId||this.emit("updateCId",{message:A,cId:A.cId,peerId:this.data.inputParams.peerId,isMyCId:A.targetPeerId===this.data.inputParams.peerId})};setCurrentlyActiveSpeaker(A){const{peerId:Q,volume:B}=A.activeSpeaker;this._activeSpeaker=A.activeSpeaker,this.emit("activeSpeaker",{peerId:Q,volume:B})}_createSendTransport=async()=>{Wg.debug("Room _createSendTransport");try{this._device.loaded||(Wg.debug("Room _createSendTransport","Going to load device with routerrtpcapabilities"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));let A="send";this._sendTransport||this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:A})}catch(pg){Wg.error("Room _createSendTransport",pg)}};_createRecvTransport=async()=>{this._device.loaded||(Wg.debug("loading device for creating recv transport"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));this._recvTransport||(Wg.debug("receive transport created"),this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:"recv"}))};handleCreateTransportRequest=async A=>{Wg.debug("Room handleCreateTransportRequest():%O",A);let Q,{transportOptions:B,direction:I}=A;try{if("recv"===I)Q=await this._device.createRecvTransport(B),Wg.debug("Room",`handleCreateTransportRequest() recv transport created ${Q.id}`),this._recvTransport=Q,this.handleRecvTransportListeners();else{if("send"!==I)throw new Error(`bad transport 'direction': ${I}`);Q=await this._device.createSendTransport(B),Wg.debug("Room",`handleCreateTransportRequest() send transport created [id:%s]${Q.id}`),this._sendTransport=Q,this.handleSendTransportListeners(),this.produceMedia()}}catch(U){Wg.error("Room handleCreateTransportRequest() failed to create transport [error:%o]",U)}};handleSendTransportListeners=()=>{this._sendTransport.on("connect",this.handleTransportConnectEvent),this._sendTransport.on("produce",this.handleTransportProduceEvent);let A=this;this._sendTransport.on("connectionstatechange",async Q=>{if(Wg.debug(`ConferenceRoom sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"disconnected"===Q)setTimeout(async()=>{if("disconnected"===Q)if(Wg.debug("Connection state for Send Transport is:%s even after 5 seconds",Q),Wg.warn(`sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)Wg.debug(`socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._sendTransport.id,"send")}},5e3);else if("failed"===Q)if(Wg.warn(`sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)Wg.debug(`handleSendTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._sendTransport.id,"send")}Wg.debug("ConferenceRoom",`send transport connection state change [state:%s]${Q}`)})};handleTransportConnectEvent=({dtlsParameters:A},Q,B)=>{try{const B=A=>{Wg.debug("connect-transport action"),Q(),Zg.remove("connectTransport")};Zg.push("connectTransport",B);let I={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._sendTransport.id,dtlsParameters:A,direction:"send"};this._sendMessage(I)}catch(I){Wg.error("handleTransportConnectEvent() failed [error:%o]",I),B(I)}};handleTransportProduceEvent=({kind:A,rtpParameters:Q,appData:B},I,U)=>{try{const U=A=>{Wg.debug("handleTransportProduceEvent callback [data:%o]",A),I({id:A.producerId}),Zg.remove("produce")};Zg.push("produce",U);let F="cam-audio"===B.mediaTag&&void 0!==this.data.inputParams.audioStatus&&!this.data.inputParams.audioStatus;Wg.debug(`handleTransportProduceEvent() | pause status->${F}`);let C={id:"sendTrack",transportId:this._sendTransport.id,peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,kind:A,rtpParameters:Q,paused:F,appData:B,clientOs:this._client.os.name,browser:this._client.browser};this._sendMessage(C)}catch(F){Wg.error("handleTransportProduceEvent() failed [error:%o]",F),U(F)}};produceMedia=async()=>{this.data.inputParams.produce?(this.data.inputParams.produceAudio?this.enableMic({deviceId:this.data.inputParams.audioDeviceId?this.data.inputParams.audioDeviceId:null}):Wg.debug("No need to produce audio!"),this._device.canProduce("video")&&(this.data.inputParams.produceVideo?(Wg.debug("going to enable cam with vbdetails",this.data.inputParams.vbdetails),this.enableCam({deviceId:this.data.inputParams.videoDeviceId?this.data.inputParams.videoDeviceId:null,vbdetails:this.data.inputParams.vbdetails})):Wg.debug("No need to produce video!"),this.data.inputParams.share&&this.enableShare({shareAudio:this.data.inputParams.shareAudio,enableSharingLayers:this._enableSharingLayers,shareBitRates:this.data.inputParams.shareBitRates}))):Wg.warn("produce is false!")};handleRecvTransportListeners=async()=>{this._recvTransport.on("connect",this.handleRecvTransportConnectEvent);let A=this;this._recvTransport.on("connectionstatechange",async Q=>{if("disconnected"===Q)setTimeout(async()=>{if("disconnected"===Q)if(Wg.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState),"connected"===this._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==this._socket.wsManager.connectionState;)Wg.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${this._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}},5e3);else if("failed"===Q)if(Wg.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState),"connected"===this._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==this._socket.wsManager.connectionState;)Wg.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${this._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}else Wg.debug("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState)});let Q={id:"transportsAvailable",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,rtpCapabilities:this._device.rtpCapabilities};this._sendMessage(Q)};handleRecvTransportConnectEvent=({dtlsParameters:A},Q,B)=>{try{const B=A=>{Wg.debug("ConferenceRoom","connect-recv-transport action"),Q(),Zg.remove("connectRecvTransport")};Zg.push("connectRecvTransport",B);let I={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._recvTransport.id,dtlsParameters:A,direction:"recv"};this._sendMessage(I)}catch(I){Wg.error("handleTransportConnectEvent() failed [error:%o]",I),B(I)}};handleRecvTrackRequest=async A=>{if(Wg.debug("Room handleRecvTrackRequest",A),!this.data.inputParams.consume)return void Wg.warn("I do not want to consume");let{senderPeerId:Q,mediaTag:B,sender:I,audioStatus:U,videoStatus:F,senderParticipantType:C,type:E,producerPaused:g,...R}=A;Wg.debug("New consumer created",R),R.id=R.consumerId,delete R.consumerId,Wg.debug("ConferenceRoom",`senderPeerId is ->${Q}`);let J=await this._recvTransport.consume({...R,streamId:`${Q}-${"screen-video"===B||"screen-audio"===B?"share":"mic-webcam"}`,appData:{peerId:Q,mediaTag:B}});for(;this._recvTransport&&"connected"!==this._recvTransport.connectionState;)Wg.debug(`recv transport connstate${this._recvTransport.connectionState}`),await FE(100);this._consumers.set(J.id,J),J.on("transportclose",()=>{this._consumers.delete(J.id)});const{spatialLayers:n,temporalLayers:S}=SB.parseScalabilityMode(J.rtpParameters.encodings[0].scalabilityMode),V=this._peers.get(this.data.inputParams.peerId);Wg.debug(`Consumer created for sender peerId ${Q} for kind ${J.kind} for receiver peerId ${this.data.inputParams.peerId}`),Wg.info("The old peer data is :%O",V),V?(V["screen-video"===B||"screen-audio"===B?`ss${J.kind}`:J.kind]={consumerId:J.id,type:E,locallyPaused:!1,remotelyPaused:g,rtpParameters:J.rtpParameters,spatialLayers:n,temporalLayers:S,preferredSpatialLayer:n-1,preferredTemporalLayer:S-1,priority:1,codec:J.rtpParameters.codecs[0].mimeType.split("/")[1],track:J.track,share:"screen-video"===B||"screen-audio"===B},Wg.info("The new peer data is :%O",V),this._peers.set(this.data.inputParams.peerId,V)):(Wg.info("Peer not found!"),this._peers.set(this.data.inputParams.peerId,{["screen-video"===B||"screen-audio"===B?`ss${J.kind}`:J.kind]:{consumerId:J.id,type:E,locallyPaused:!1,remotelyPaused:g,rtpParameters:J.rtpParameters,spatialLayers:n,temporalLayers:S,preferredSpatialLayer:n-1,preferredTemporalLayer:S-1,priority:1,codec:J.rtpParameters.codecs[0].mimeType.split("/")[1],track:J.track,share:"screen-video"===B||"screen-audio"===B}})),await this.resumeConsumer(J),Wg.debug("Going to emit mic start / videostart"),"audio"===J.kind?"screen-audio"===B?this.emit("ssAudioStart",{peerId:Q,audioTrack:J.track,type:"remote"}):this.emit("micStart",{peerId:Q,audioTrack:J.track,type:"remote"}):"video"===J.kind&&("screen-video"===B?this.emit("ssVideoStart",{peerId:Q,videoTrack:J.track,type:"remote"}):this.emit("videoStart",{peerId:Q,videoTrack:J.track,type:"remote"}))};resumeConsumer=async A=>{if(A){Wg.debug("resume consumer",A.appData.peerId,A.appData.mediaTag);try{let Q={id:"resumeConsumer",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,consumerId:A.id};this._sendMessage(Q),await A.resume()}catch(Q){Wg.error("resumeConsumer error",Q)}}};handleConnectTransportRequest=async A=>{Wg.debug("handleTransportConnectRequest()");try{const Q=Zg.get("connectTransport");if(!Q)throw new Error("transport-connect action was not found");await Q(A)}catch(Q){Wg.error("handleTransportConnectRequest() failed [error:%o]",Q)}};handleConnectRecvTransportRequest=async A=>{Wg.debug("handleTransportConnectRequest()");try{const Q=Zg.get("connectRecvTransport");if(!Q)throw new Error("recv transport-connect action was not found");await Q(A)}catch(Q){Wg.error("handleRecvTransportConnectRequest() failed [error:%o]",Q)}};handleSendTrackRequest=async A=>{Wg.debug("ConferenceRoom","handleProduceRequest()");try{const Q=Zg.get("produce");if(!Q)throw new Error("produce action was not found");await Q(A)}catch(Q){Wg.error("handleProduceRequest() failed [error:%o]",Q)}};mediaToggled=A=>{switch(Wg.debug("Media Toggled message:%O",A),A.type){case"video":Wg.debug(`mediaToggled() | inside case video${A.videoStatus}`);break;case"audio":Wg.debug(`mediaToggled() | inside case audio${A.videoStatus}`),A.audioStatus?this.emit("peerUnMuted",{peerId:A.peerId,type:"remote"}):this.emit("peerMuted",{peerId:A.peerId,type:"remote"})}};closeConsumer=A=>{let{consumerId:Q}=A;const B=this._consumers.get(Q);if(!B)return void Wg.warn("Consumer with id not found!:%s",Q);const{peerId:I,mediaTag:U}=B.appData;Wg.debug("Consumer closed for consumerId:%s, type:%s, appData:%o",Q,B?.kind,B.appData);let F="screen-audio"===U||"screen-video"===U?`ss${B.kind}`:B.kind;B.close(),this._consumers.delete(Q);let C=this._peers.get(this.data.inputParams.peerId);Wg.debug("Peer data before deletion:%O",C),C[F]&&C[F].consumerId===Q&&delete C[F],Wg.debug("Peer data after deletion:%O",C),this._peers.set(this.data.inputParams.peerId,C),"audio"===B?.kind?(Wg.debug("Going to emit micEnd, consumer closed for audio"),"screen-audio"===U?this.emit("ssAudioStop",{peerId:I,track:null,type:"remote"}):this.emit("micEnd",{peerId:I,track:null,type:"remote"})):"video"===B?.kind&&(Wg.debug("Going to emit videoEnd, consumer closed for video"),"screen-video"===U?this.emit("ssVideoStop",{peerId:I,track:null,type:"remote"}):this.emit("videoEnd",{peerId:I,track:null,type:"remote"}))};peerLeft=A=>{Wg.debug("Peer Left message is:%o",A);let{peerId:Q}=A;this._peers.delete(Q),this.emit("peerLeft",{peerId:Q})};roomClosed=()=>{Wg.info("room closed by Moderator"),this._peers=null,this.emit("roomClosed",{roomId:this.data.inputParams.roomId})};close(){this._closed||(this._closed=!0,this._socket=null,this.data.inputParams={},Wg.info("Room close()"),this._sendTransport&&this._sendTransport.close(),this._recvTransport&&this._recvTransport.close(),this._roomStatus="closed",this._running=!1)}async leaveRoom(){Wg.debug("Leave room is called!!"),"connected"===this._roomStatus?(this._sendMessage({id:"leaveRoomNew",peerId:this.data.inputParams.peerId,roomLeaveType:"client"}),await this.leaveRoomCommon()):Wg.error("The room state is:%s",this._roomStatus)}async closeRoom(){"connected"===this._roomStatus?(this._sendMessage({id:"leaveAndCloseRoom",peerId:this.data.inputParams.peerId,roomCloseType:"client"}),await this.leaveRoomCommon()):Wg.error("The room state is:%s",this._roomStatus)}leaveRoomCommon=async()=>{try{Wg.debug("Starting comprehensive room leave cleanup...");try{this._cleanupAudioMonitoring(),this._stopSpeakingWhileMutedDetection(),ag&&"function"==typeof ag.cleanup&&ag.cleanup()}catch(A){}const I=new Set;this._webcamProducer?.track&&I.add(this._webcamProducer.track),this._micProducer?.track&&I.add(this._micProducer.track),this._shareProducer?.track&&I.add(this._shareProducer.track),this._shareAudioProducer?.track&&I.add(this._shareAudioProducer.track),this._producers&&this._producers.size>0&&this._producers.forEach(A=>{A?.track&&I.add(A.track)}),this._webCamStream&&this._webCamStream.getTracks().forEach(A=>I.add(A)),this._micStream&&this._micStream.getTracks().forEach(A=>I.add(A));try{const A=ag?.getVBStream?.()||ag?._localVBStream;A&&"function"==typeof A.getTracks&&A.getTracks().forEach(A=>I.add(A))}catch(A){}const U=[];document.querySelectorAll("video, audio").forEach(A=>{if(A.srcObject&&"function"==typeof A.srcObject.getTracks){A.srcObject.getTracks().forEach(A=>I.add(A)),U.push({element:A,stream:A.srcObject})}}),Wg.debug(`Found ${I.size} total tracks to stop`);let F=0;for(const A of I)try{A&&"live"===A.readyState&&"function"==typeof A.stop&&(A.stop(),F++,Wg.debug(`Stopped ${A.kind} track: ${A.label||A.id}`))}catch(Q){Wg.warn("Error stopping track:",Q)}if(Wg.debug(`Stopped ${F} tracks`),this._sendTransport){try{this._sendTransport.close()}catch(A){}this._sendTransport=null}if(this._recvTransport){try{this._recvTransport.close()}catch(A){}this._recvTransport=null}this._webcamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._webCamStream=null,this._micStream=null,this._producers&&this._producers.clear(),this._consumers&&this._consumers.clear(),this._roomStatus="closed",this._running=!1,this._routerRtpCapabilities=null,await new Promise(A=>setTimeout(A,100));let C=0;document.querySelectorAll("video, audio").forEach(A=>{try{A.srcObject&&(A.srcObject=null,"function"==typeof A.pause&&A.pause(),"function"==typeof A.load&&A.load(),C++,Wg.debug(`Force cleared ${A.nodeName} element`))}catch(Q){Wg.warn("Error clearing element:",Q)}}),Wg.debug(`Cleared ${C} DOM elements`);try{I.forEach(A=>{A&&"function"==typeof A.removeEventListener&&(A.removeEventListener("ended",()=>{}),A.removeEventListener("mute",()=>{}),A.removeEventListener("unmute",()=>{}))})}catch(A){}if(window.gc&&"function"==typeof window.gc)try{window.gc()}catch(A){}await new Promise(A=>setTimeout(A,200));try{const A=await this.reportActiveMediaUse();Wg.debug("Final media usage report:",A);const Q=[],B=A.dom.mediaElements.filter(A=>A.hasSrcObject&&A.tracks.length>0);B.forEach(A=>{A.tracks.forEach(B=>{"live"===B.readyState&&Q.push({kind:B.kind,label:B.label,id:B.id,element:A.nodeName})})}),(Q.length>0||B.length>0)&&(Wg.warn("WARNING: Media elements or live tracks still detected after cleanup:",{liveTracks:Q,elementsWithTracks:B.length}),await this.emergencyTrackCleanup())}catch(B){Wg.error("Failed to generate final media usage report:",B)}}catch(Q){Wg.error("Error during room leave cleanup:",Q),await this.emergencyTrackCleanup()}};emergencyTrackCleanup=async()=>{Wg.debug("Performing emergency track cleanup...");try{const A=[];document.querySelectorAll("video, audio").forEach(Q=>{if(Q.srcObject&&"function"==typeof Q.srcObject.getTracks){A.push(...Q.srcObject.getTracks()),Q.srcObject=null,"function"==typeof Q.pause&&Q.pause(),"function"==typeof Q.load&&Q.load();try{Q.src=""}catch(B){}}}),A.forEach(A=>{try{"live"===A.readyState&&(A.stop(),Wg.debug(`Emergency stopped ${A.kind}: ${A.label||A.id}`))}catch(Q){}}),Wg.debug(`Emergency cleanup completed - stopped ${A.length} tracks`),await new Promise(A=>setTimeout(A,300))}catch(A){Wg.error("Emergency cleanup failed:",A)}};reportActiveMediaUse=async(A=!1)=>{const Q={sdk:{micStreamTracks:[],camStreamTracks:[],vbStreamTracks:[],shareTracks:[],producers:[],consumers:[]},dom:{mediaElements:[]},timestamp:Date.now()},B=A=>{try{return{kind:A?.kind,enabled:A?.enabled,readyState:A?.readyState,label:A?.label,id:A?.id,muted:A?.muted}}catch(Q){return{error:!0}}};try{this._micStream&&"function"==typeof this._micStream.getTracks&&(Q.sdk.micStreamTracks=this._micStream.getTracks().map(B))}catch(U){}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&(Q.sdk.camStreamTracks=this._webCamStream.getTracks().map(B))}catch(U){}try{const A=ag?.getVBStream?.()||ag?._localVBStream;A&&"function"==typeof A.getTracks&&(Q.sdk.vbStreamTracks=A.getTracks().map(B))}catch(U){}try{this._shareProducer?.track&&Q.sdk.shareTracks.push(B(this._shareProducer.track)),this._shareAudioProducer?.track&&Q.sdk.shareTracks.push(B(this._shareAudioProducer.track))}catch(U){}try{this._producers&&this._producers.size>0&&this._producers.forEach((A,I)=>{Q.sdk.producers.push({key:I,track:A?.track?B(A.track):null,id:A?.id,paused:A?.paused})})}catch(U){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach((A,I)=>{Q.sdk.consumers.push({key:I,track:A?.track?B(A.track):null,id:A?.id,paused:A?.paused})})}catch(U){}try{const A=Array.from(document.querySelectorAll("video, audio"));Q.dom.mediaElements=A.map(A=>{let Q=[],I=null;try{const U=A.srcObject;U&&"function"==typeof U.getTracks&&(Q=U.getTracks().map(B),I=U.id)}catch(U){}return{nodeName:A.nodeName,muted:!!A.muted,paused:!!A.paused,hasSrcObject:!!A.srcObject,streamId:I,tracks:Q,src:A.src||null,currentSrc:A.currentSrc||null}})}catch(U){}const I=[...Q.sdk.micStreamTracks,...Q.sdk.camStreamTracks,...Q.sdk.vbStreamTracks,...Q.sdk.shareTracks,...Q.sdk.producers.map(A=>A.track).filter(Boolean),...Q.sdk.consumers.map(A=>A.track).filter(Boolean),...Q.dom.mediaElements.flatMap(A=>A.tracks)].filter(A=>A&&"live"===A.readyState);return Q.summary={totalLiveTracks:I.length,elementsWithSrcObject:Q.dom.mediaElements.filter(A=>A.hasSrcObject).length,elementsWithTracks:Q.dom.mediaElements.filter(A=>A.tracks.length>0).length},Q};async listDevicesInternal(){if(navigator.mediaDevices.ondevicechange=async A=>{let Q=await _C();Wg.info("Media devices changed!:%O",Q),Q.audioDevices&&Q.audioDevices.length>0&&(this._deviceList.audioDevices=Q.audioDevices),Q.videoDevices&&Q.videoDevices.length>0&&(this._deviceList.videoDevices=Q.videoDevices),Q.audioDevices&&Q.audioDevices.length>0&&(this._deviceList.audioOutputDevices=Q.audioDevicesOutput),ig=this._deviceList,this.emit("deviceListUpdated")},!this._deviceList){const A=await $C();if(A.success)return this._deviceList=A.deviceList,void(ig=this._deviceList)}}restartIce=async(A,Q)=>{if("send"===Q&&"connected"===this._sendTransport.connectionState||"recv"===Q&&"connected"===this._recvTransport.connectionState)return void Wg.debug("no need to restart ICE as transport now connected");Wg.debug("websocket is ready and connectionstate is still disconnected, therefore going to restart ICE");let B={id:"restartIce",transportId:A,roomName:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId};this._sendMessage(B)};restartIceResponse=A=>{Wg.debug("restart ICE response:%o",A);let{transportId:Q,iceParameters:B}=A;this._sendTransport&&this._sendTransport.id===Q?this._sendTransport.restartIce({iceParameters:B}):this._recvTransport&&this._recvTransport.id===Q&&this._recvTransport.restartIce({iceParameters:B})};startRecording=({recordingType:A=null,outputType:Q=null,outputQualities:B=null}={})=>{Wg.debug("recording type requested is:%s,outputType:%s, outputQualties:%o",A,Q,B);const I=!A||"av"!==A?.toLowerCase()&&"audiovideo"!==A?.toLowerCase()?"mergedA":"mergedAV";if((!Q||"hls"!==Q.toLowerCase()&&"mp4"!==Q.toLowerCase())&&Q)return Wg.error("Invalid outut type"),{success:!1,reason:`Invalid outputType: ${Q}. `};if(Q&&"hls"===Q.toLowerCase()&&B&&!yE(B))return Wg.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(B)}. Allowed values are ${Array.from(TE).join(", ")}.`};let U={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,type:pC,recordingStrategy:I,outputQualities:B,outputType:Q?.toLowerCase()};this._sendMessage(U),this._recordingStartedByMe={...this._recordingStartedByMe,"main-room":{recordingNo:null}}};stopRecording=()=>{Wg.debug("going to stop recording for recordingStartedByMe:%o",this._recordingStartedByMe);let A="main-room";if(!this._recordingStartedByMe[A])return{success:!1,error:!0,code:"RRID001",text:"Error while trying to stop recording. Either the recording has not been started yet Or The same user need to stop recording who started it."};{let Q={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,recordingNo:this._recordingStartedByMe[A].recordingNo,type:pC};this._sendMessage(Q),delete this._recordingStartedByMe[A],this.emit("recordingEnded",{peerId:this.data.inputParams.peerId})}};setRecordingStatusStarted=A=>{Wg.debug("Recording/Streaming started by moderator!!:%O",A);let{breakOutRoom:Q,recordingStartTime:B,recordingNo:I,type:U}=A;[pC,rC].includes(U)&&(this._recordingStartedByMe["main-room"]?(Wg.debug("This recording has been started by me."),this._recordingStartedByMe["main-room"].recordingNo=I,this.emit("recordingStarted",{peerId:this.data.inputParams.peerId,startTime:B})):this.emit("recordingStarted",{startTime:B}))};setRecordingStatusEnded=A=>{Wg.debug("Recording ended by moderator!!, data:%O",A);let{breakOutRoom:Q,type:B}=A;"rtmpStream"===B||this.emit("recordingEnded",{})};startProcessing=async({inputFiles:A=[],outputQualities:Q=null,bucket:B=null,cloud:I=null,region:U=null}={})=>{Wg.debug("Processing of Files requested for:%o",A);const F=Math.round(1e7*Math.random()),C=await async function(A){if(cE.info("The input files are:%o, length:%s",A,A.length),A.length>0){cE.info("Files array length is:%s",A.length);for(const{type:Q,url:B}of A){if(cE.info("The file detais are type:%s, url:%s",Q,B),!YE.includes(Q))return{success:!1,reason:`Type "${Q}" is not allowed.`};if(!tE(B,Q))return{success:!1,reason:`Extension mismatch for ${B}; expected .${Q}`}}return{success:!0}}return{success:!1,reason:"There are no files for processing!"}}(A);if(C.success){if(Q&&!yE(Q))return Wg.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(Q)}. Allowed values are ${Array.from(TE).join(", ")}.`};this._processingStartedByMe={...this._processingStartedByMe,[F]:{}};for(const{type:Q,url:B}of A)this._processingStartedByMe={...this._processingStartedByMe,[F]:{...this._processingStartedByMe[F],[B]:{type:Q,url:B,status:"pending"}}};let C={id:"processVideos",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,inputFiles:A,outputQualities:Q,bucket:B,cloud:I,region:U,requestId:F,type:"process"};return this._sendMessage(C),{success:!0}}return C};checkProcessingStatus=({requestId:A})=>(Wg.debug("Going to check processing status for request Id:%s",A),this._processingStartedByMe[A]?{success:!0,details:this._processingStartedByMe[A]}:{success:!0,details:this._processingStartedByMe});handleProcessingStart=A=>{const{processingStartTime:Q,processingNo:B,requestId:I}=A;Wg.debug("handleProcessingStart()| received message is:%o",A),this.emit("processingStarted",{processingStartTime:Q,requestId:I})};handleProcessingCompletion=A=>{const{totalProcessingTime:Q,hlsfileKey:B,size:I,originalFile:U,lastFile:F,requestId:C}=A;if(Wg.debug("handleProcessingCompletion()| received message is:%o",A),Wg.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[U]&&(this._processingStartedByMe={...this._processingStartedByMe,[C]:{...this._processingStartedByMe[C],[U]:{...this._processingStartedByMe[U],status:"completed",hlsfileKey:B,size:I,totalProcessingTime:Q}}}),Wg.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingCompleted",A),F){Wg.debug("The last file processing has been completed! Remove all the files that has been completed with the same requesterId");let A={...this._processingStartedByMe};delete A[C],Wg.debug("After deleting the current requestId:%o",A),this._processingStartedByMe=A}};handleProcessingError=A=>{const{totalProcessingTime:Q,hlsfileKey:B,size:I,originalFile:U,lastFile:F,requestId:C,error:E}=A;Wg.debug("handleProcessingCompletion()| received message is:%o",A),Wg.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[U]&&(this._processingStartedByMe={...this._processingStartedByMe,[C]:{...this._processingStartedByMe[C],[U]:{...this._processingStartedByMe[U],status:"error",hlsfileKey:B,size:I,totalProcessingTime:Q,error:E}}}),Wg.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingError",A)};async enableMic({deviceId:A=null,autoGainControl:Q,noiseSuppression:B,echoCancellation:I,channelCount:U,sampleRate:F,forcePCMU:C,forcePCMA:E}={}){if(Wg.debug("enableMic()"),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID007",text:"Error while trying to start Mic/Audio. Produce flag need to set to true while joining room in order to enable Mic/Audio."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID008",text:`Error while trying to start Mic/Audio as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Mic? OR if you have already initiated the joinRoom process, then Mic will be enabled automatically once room join process completes."};if(this._micProducer)return Wg.debug("Mic is already active!"),{success:!1,warning:!0,code:"RWID002",text:"Error while trying to start Mic/Audio. Mic/Audio is already active!"};if(!this._device.canProduce("audio"))return Wg.error("enableMic() | cannot produce audio"),{success:!1,error:!0,code:"REID009",text:"Error while trying to start Mic/Audio. Mic/Audio couldnot be activated due to limitations on this device. If you think this device has a functional Mic and the problem persists even after multiple retries, please contact technical support with the error code."};let g,R;C&&"boolean"==typeof C&&(this.data.inputParams.forcePCMU=C),E&&"boolean"==typeof E&&(this.data.inputParams.forcePCMA=E),Q&&"boolean"==typeof Q&&(this.data.inputParams.autoGainControl=Q),I&&"boolean"==typeof I&&(this.data.inputParams.echoCancellation=I),B&&"boolean"==typeof B&&(this.data.inputParams.noiseSuppression=B),F&&Number.isInteger(F)&&F<64e3&&F>8e3&&(this.data.inputParams.sampleRate=F),U&&Number.isInteger(U)&&U>0&&U<3&&(this.data.inputParams.channelCount=U);try{if(this._externalVideo)this._micStream=await this._getExternalVideoStream(),g=this._micStream.getAudioTracks()[0].clone();else{if(A?(R=this._deviceList.audioDevices.find(Q=>Q.deviceId===A),R||(Wg.warn("Selected audio input deviceId:%s not found",A),R=this._deviceList.audioDevices[0])):R=this._deviceList.audioDevices[0],this._mic.device=R,!R)return Wg.error("No mic device found! Can't start audio!"),{success:!1,reason:"No mic available for starting audio!"};A&&this.data.inputParams.audioDeviceId!==A&&(this.data.inputParams.audioDeviceId=A),Wg.debug("enableMic() | calling getUserMedia()");try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:R.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}}),g=this._micStream.getAudioTracks()[0]}catch(pg){throw new Error("Error while acquiring mic. Possible issue with audio constraint values",pg)}}this._micProducer=await this._sendTransport.produce({track:g,codecOptions:this.data.inputParams.forcePCMU||this.data.inputParams.forcePCMA?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):this.data.inputParams.forcePCMA?this._device.rtpCapabilities.codecs.find(A=>"audio/pcma"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"cam-audio"}}),this._producers.set("audio",{id:this._micProducer.id,paused:this._micProducer.paused,track:this._micProducer.track,rtpParameters:this._micProducer.rtpParameters,codec:this._micProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),this._micProducer.on("transportclose",()=>{this._micProducer=null}),this._micProducer.on("trackended",()=>{this.disableMic().catch(()=>{})}),await this._initializeAudioMonitoring(),this._startSpeakingWhileMutedDetection()}catch(J){Wg.error("enableMic() | failed:%o",J),this.emit("error",{code:"EID002",text:"Error enabling microphone!"}),g&&g.stop()}}async disableMic(){if(Wg.debug("disableMic()"),this._cleanupAudioMonitoring(),this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micProducer){this._micProducer.close(),this._producers.delete("audio");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._micProducer.id};this._sendMessage(A),this.emit("micEnd",{peerId:this.data.inputParams.peerId,audioTrack:null,type:"local"})}catch(A){this.emit("error",{code:"EID003",text:"Error disabling microphone!"})}this._micProducer=null}}async muteMic(){Wg.debug("muteMic()"),this._micProducer.pause();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!1,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){Wg.error("muteMic() | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone!"})}}async unmuteMic(){Wg.debug("unmuteMic()"),this._micProducer||(Wg.debug("Mic is not active!"),await this.enableMic()),this._micProducer.resume();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!0,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){Wg.error("unmuteMic() | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone!"})}}startSpeechRecognition(A={}){const Q={lang:"es-ES",continuous:!0,interimResults:!0,maxAlternatives:3,autoRestart:!0,restartDelayMs:250,...A},B=window.SpeechRecognition||window.webkitSpeechRecognition,I=window.SpeechGrammarList||window.webkitSpeechGrammarList;if(!B)return this.emit("sttError",{code:"UNSUPPORTED",message:"Web Speech API not supported"}),{success:!1,reason:"unsupported"};try{if(this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}try{this._speechRecognition.stop()}catch{}this._speechRecognition=null}const A=new B;if(A.lang=Q.lang,A.continuous=!!Q.continuous,A.interimResults=!!Q.interimResults,A.maxAlternatives=Q.maxAlternatives,Q.grammars&&Array.isArray(Q.grammars)&&I){const B=new I,U=`#JSGF V1.0; grammar terms; public <term> = ${Q.grammars.join(" | ")};`;B.addFromString&&B.addFromString(U,1),A.grammars&&(A.grammars=B)}return this._sttShouldRun=!0,this._sttAutoRestart=!!Q.autoRestart,this._sttRestartDelayMs=Number(Q.restartDelayMs)||250,A.onstart=()=>this.emit("sttStart",{timestamp:Date.now(),lang:A.lang}),A.onresult=Q=>{const B=Q.results[Q.results.length-1],I=B&&B[0]?B[0]:null,U={transcript:I?I.transcript:"",confidence:I?I.confidence:0,isFinal:!!B&&B.isFinal,timestamp:Date.now(),lang:A.lang},F=U.timestamp,C=this.data.inputParams.peerId||"local";this._transcriptStorage.has(F)||this._transcriptStorage.set(F,new Map);this._transcriptStorage.get(F).set(C,{transcript:U.transcript,isFinal:U.isFinal}),this.emit("sttResult",U)},A.onerror=A=>{this.emit("sttError",{code:A.error||"UNKNOWN",message:A.message||"Speech recognition error"})},A.onend=()=>{if(this.emit("sttEnd",{timestamp:Date.now()}),this._sttShouldRun&&this._sttAutoRestart){const B=this._sttRestartDelayMs;try{setTimeout(()=>{if(this._sttShouldRun&&this._sttAutoRestart)try{A.start()}catch(Q){}},B)}catch(Q){}}},A.start(),this._speechRecognition=A,{success:!0}}catch(pg){return this.emit("sttError",{code:"INIT_FAILED",message:pg.message}),{success:!1,reason:pg.message}}}stopSpeechRecognition(){try{if(this._sttShouldRun=!1,this._sttAutoRestart=!1,this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}this._speechRecognition.stop(),this._speechRecognition=null}return{success:!0}}catch(pg){return this.emit("sttError",{code:"STOP_FAILED",message:pg.message}),{success:!1,reason:pg.message}}}async startRemoteCaption({lang:A}={}){try{if(this._remoteCaption&&this._remoteCaption.enabled)return{success:!0,alreadyRunning:!0};if(!this._voskModule)try{this._voskModule=await Promise.resolve().then(()=>bg)}catch(Q){return this.emit("remoteCaptionError",{code:"VOSK_IMPORT_FAILED",message:Q?.message||"Failed to import vosk-browser"}),{success:!1,reason:"import_failed"}}let I;switch(A){case"es-ES":I="vosk-model-small-es-0.42.tar.gz";break;case"de-DE":I="vosk-model-small-de-0.15.tar.gz";break;default:I="vosk-model-small-en-us-0.15.tar.gz"}const U=[`/models/${I}`,`models/${I}`,`/samvyo-js-sdk/lib/models/${I}`],F=async A=>{for(const B of A)try{if((await fetch(B,{method:"HEAD"})).ok)return B}catch(Q){}return A[0]},C=await F(U);this._remoteCaption||(this._remoteCaption={});const E=this._remoteCaption;E.model||(E.model=await this._voskModule.createModel(C)),E.enabled=!0,E.audioTracks=E.audioTracks||new Map,E.currentSpeaker=null,E.recognizer=null,E.audioCtx=null,E.source=null,E.processor=null;const g=()=>{try{E.recognizer&&E.recognizer.remove&&E.recognizer.remove()}catch(A){}E.recognizer=null;try{E.processor&&E.processor.disconnect&&E.processor.disconnect()}catch(A){}E.processor=null;try{E.source&&E.source.disconnect&&E.source.disconnect()}catch(A){}E.source=null;try{E.audioCtx&&"closed"!==E.audioCtx.state&&E.audioCtx.close&&E.audioCtx.close()}catch(A){}E.audioCtx=null},R=async A=>{if(!E.enabled)return;if(!A)return;if(E.currentSpeaker===A)return;E.currentSpeaker=A,g();const Q=E.audioTracks.get(A);if(!Q)return;let B;try{B=new E.model.KaldiRecognizer(16e3)}catch(R){try{B=new E.model.KaldiRecognizer(void 0,16e3)}catch(J){try{B=new E.model.KaldiRecognizer}catch(n){return void this.emit("remoteCaptionError",{code:"RECOGNIZER_CREATE_FAILED",message:n?.message||""})}}}E.recognizer=B,B.on&&B.on("result",Q=>{const B=Q?.result?.text||"";B&&E.enabled&&this.emit("remoteCaption",{peerId:A,text:B,isFinal:!0})}),B.on&&B.on("partialresult",Q=>{const B=Q?.result?.partial||"";B&&E.enabled&&this.emit("remoteCaption",{peerId:A,text:B,isFinal:!1})});const I=new MediaStream([Q]),U=new(window.AudioContext||window.webkitAudioContext)({sampleRate:16e3});if(E.audioCtx=U,"suspended"===U.state)try{await U.resume()}catch(S){}const F=U.createMediaStreamSource(I);E.source=F;const C=U.createScriptProcessor(4096,1,1);E.processor=C,F.connect(C),C.connect(U.destination),C.onaudioprocess=A=>{if(E.enabled&&E.recognizer)try{E.recognizer.acceptWaveform(A.inputBuffer)}catch(S){}}};E._onMicStart=({peerId:A,audioTrack:Q})=>{E.enabled&&(Q&&E.audioTracks.set(A,Q),this._activeSpeaker&&this._activeSpeaker.peerId===A&&R(A))},E._onMicEnd=({peerId:A})=>{E.enabled&&(E.audioTracks.delete(A),E.currentSpeaker===A&&(g(),E.currentSpeaker=null))},E._onActiveSpeaker=({peerId:A})=>{E.enabled&&R(A)},this.on("micStart",E._onMicStart),this.on("micEnd",E._onMicEnd),this.on("activeSpeaker",E._onActiveSpeaker);try{this._micProducer?.track&&E.audioTracks.set(this.data.inputParams.peerId,this._micProducer.track)}catch(B){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach(A=>{try{"audio"===A?.kind&&A?.track&&A?.appData?.peerId&&E.audioTracks.set(A.appData.peerId,A.track)}catch(B){}})}catch(B){}return this._activeSpeaker?.peerId&&R(this._activeSpeaker.peerId),{success:!0,modelUrl:C}}catch(pg){return this.emit("remoteCaptionError",{code:"START_FAILED",message:pg?.message||String(pg)}),{success:!1,reason:pg?.message||"unknown"}}}stopRemoteCaption({unloadModel:A=!1}={}){try{const B=this._remoteCaption;if(!B)return{success:!0,alreadyStopped:!0};if(B.enabled=!1,B._onMicStart)try{this.off("micStart",B._onMicStart)}catch(Q){}if(B._onMicEnd)try{this.off("micEnd",B._onMicEnd)}catch(Q){}if(B._onActiveSpeaker)try{this.off("activeSpeaker",B._onActiveSpeaker)}catch(Q){}try{B.recognizer&&B.recognizer.remove&&B.recognizer.remove()}catch(Q){}B.recognizer=null;try{B.processor&&B.processor.disconnect&&B.processor.disconnect()}catch(Q){}B.processor=null;try{B.source&&B.source.disconnect&&B.source.disconnect()}catch(Q){}B.source=null;try{B.audioCtx&&"closed"!==B.audioCtx.state&&B.audioCtx.close&&B.audioCtx.close()}catch(Q){}if(B.audioCtx=null,B.currentSpeaker=null,B.audioTracks=new Map,A){try{B.model&&B.model.terminate&&B.model.terminate()}catch(Q){}B.model=null}return{success:!0}}catch(pg){return this.emit("remoteCaptionError",{code:"STOP_FAILED",message:pg?.message||String(pg)}),{success:!1,reason:pg?.message||"unknown"}}}async enableCam({deviceId:A=null,videoResolution:Q,forceVp8:B,forceVp9:I,forceH264:U,h264Profile:F,forceFPS:C,enableWebcamLayers:E,numSimulcastStreams:g,videoBitRates:R,vbdetails:J}={}){if(Wg.debug("enableWebcam()"),Wg.debug("first vbdetails in enablecam",J),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID004",text:"Error while trying to start Camera. Produce flag need to set to true while joining room in order to enable Camera."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID005",text:`Error while trying to start Camera as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Camera? OR if you have already initiated the joinRoom process, then Camera will be enabled automatically once room join process completes."};if(this._webcamProducer)return Wg.debug("Camera is already active!"),{success:!1,warning:!0,code:"RWID003",text:"Error while trying to start Camera. Camera is already active!"};if(!this._device.canProduce("video"))return Wg.error("enableWebcam() | cannot produce video"),{success:!1,error:!0,code:"REID006",text:"Error while trying to start Camera. Camera couldnot be activated due to limitations on this device. If you think this device has a functional camera and the problem persists even after multiple retries, please contact technical support with the error code."};let n,S;["hd","vga","qvga"].includes(Q)&&(this.data.inputParams.videoResolution=Q,this._webcam.resolution=Q),B&&"boolean"==typeof B&&(this.data.inputParams.forceVp8=B),I&&"boolean"==typeof I&&(this.data.inputParams.forceVp9=I),U&&"boolean"==typeof U&&(this.data.inputParams.forceH264=U),F&&["high","low"].includes(F.toLowerCase())&&(this.data.inputParams.h264Profile=F),C&&Number.isInteger(C)&&C<65&&C>5&&(this.data.inputParams.forceFPS=25),E&&"boolean"==typeof E&&(this.data.inputParams.enableWebcamLayers=E,this._enableWebcamLayers=E),g&&Number.isInteger(g)&&g<4&&g>0&&(this.data.inputParams.numSimulcastStreams=g,this._numSimulcastStreams=g),Array.isArray(R)&&R.length>=1&&R.length<=3&&R.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?(Wg.debug("videoBitRates values are correct"),this.data.inputParams.videoBitRates=R):Wg.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'");try{if(this._externalVideo)S={label:"external video"},this._webCamStream=await this._getExternalVideoStream(),n=this._webCamStream.getVideoTracks()[0].clone();else{A?(S=this._deviceList.videoDevices.find(Q=>Q.deviceId===A),S||(Wg.warn("Selected deviceId:%s not found",A),S=this._deviceList.videoDevices[0])):S=this._deviceList.videoDevices[0],this._webcam.device=S;const{resolution:Q}=this._webcam;if(!S)return Wg.error("No wencam device found! Can't start video!"),{success:!1,reason:"No Webcam available for starting video!"};A&&this.data.inputParams.videoDeviceId!==A&&(this.data.inputParams.videoDeviceId=A),Wg.debug("enableWebcam() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:S.deviceId},...kg[Q],frameRate:{ideal:this.data.inputParams.forceFPS}}}),n=this._webCamStream.getVideoTracks()[0]}let Q,B;const I={videoGoogleStartBitrate:1e3};if(Wg.debug("Current device codec options are:%O",this._device.rtpCapabilities.codecs),this._forceVP8){if(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!B)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!B)throw new Error("desired H264 codec+configuration is not supported");Wg.debug("Selected h264 codec is:%O",B)}else if(this._forceVP9&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!B))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableWebcamLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&B||"video/vp9"===A.mimeType.toLowerCase()?Q=[{maxBitrate:5e6,scalabilityMode:this._webcamScalabilityMode||"L3T3_KEY"}]:(Q=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.videoBitRates[0],scalabilityMode:this._webcamScalabilityMode||"L1T3"}],this._numSimulcastStreams>1&&Q.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.videoBitRates[1],scalabilityMode:this._webcamScalabilityMode||"L1T3"}),this._numSimulcastStreams>2&&Q.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.videoBitRates[2],scalabilityMode:this._webcamScalabilityMode||"L1T3"}))}if(J)try{const A=ag&&ag._localVBStream;let Q=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)Q=A.getVideoTracks()[0],Wg.debug("Using existing Virtual Background track");else{const A=await ag.initializePipeline(n,J);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(Q=A.vbStream.getVideoTracks()[0],Wg.debug("Initialized new Virtual Background pipeline"))}Q&&(n=Q)}catch(V){Wg.debug("VB init failed or skipped in enableCam")}this._webcamProducer=await this._sendTransport.produce({track:n,encodings:Q,codecOptions:I,codec:B,appData:{mediaTag:"cam-video"}}),this._producers.set("video",{id:this._webcamProducer.id,deviceLabel:S.label,type:this._getWebcamType(S),paused:this._webcamProducer.paused,track:this._webcamProducer.track,rtpParameters:this._webcamProducer.rtpParameters,codec:this._webcamProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webcamProducer.track,type:"local"}),this._webcamProducer.on("transportclose",()=>{this._webcamProducer=null}),this._webcamProducer.on("trackended",()=>{this.disableCam().catch(()=>{})})}catch(N){Wg.error("enableWebcam() | failed:%o",N),this.emit("error",{code:"EID011",text:"Enable Webcam failed!"}),n&&n.stop()}}async disableCam(){if(Wg.debug("disableWebcam()"),this._webcamProducer){this._webcamProducer.close(),this._producers.delete("video");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._webcamProducer.id};this._sendMessage(A),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null})}catch(A){this.emit("error",{code:"EID012",text:"Error while closing server side producer!"})}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&this._webCamStream.getTracks().forEach(A=>{try{A.stop()}catch(Q){}})}catch(Q){}this._webCamStream=null,this._webcamProducer=null}}async _updateWebcams(){Wg.debug("_updateWebcams()"),this._webcams=new Map,Wg.debug("_updateWebcams() | calling enumerateDevices()");const A=await navigator.mediaDevices.enumerateDevices();for(const U of A)"videoinput"===U.kind&&this._webcams.set(U.deviceId,U);const Q=Array.from(this._webcams.values()),B=Q.length,I=this._webcam.device?this._webcam.device.deviceId:void 0;Wg.debug("_updateWebcams() [webcams:%o]",Q),0===B?this._webcam.device=null:this._webcams.has(I)||(this._webcam.device=Q[0])}async _getExternalVideoStream(){if(this._externalVideoStream)return this._externalVideoStream;if(this._externalVideo.readyState<3&&await new Promise(A=>this._externalVideo.addEventListener("canplay",A)),this._externalVideo.captureStream)this._externalVideoStream=this._externalVideo.captureStream();else{if(!this._externalVideo.mozCaptureStream)throw new Error("video.captureStream() not supported");this._externalVideoStream=this._externalVideo.mozCaptureStream()}return this._externalVideoStream}_getWebcamType(A){return/(back|rear)/i.test(A.label)?(Wg.debug("_getWebcamType() | it seems to be a back camera"),"back"):(Wg.debug("_getWebcamType() | it seems to be a front camera"),"front")}async changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I}){if(this._webcamProducer){return await this._changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I})}return Wg.error("No webcam producer available!"),{success:!1,reason:"You are not sharing your camera yet. Camera Input can be changed to a new camera only when you are sharing an existing camera. "}}async _changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I}){Wg.info("_changeVideoInput() | Inside"),A&&["hd","vga","qvga"].includes(A)?this._webcam.resolution=A:Wg.warn("Invalid video resolution value "),B&&Number.isInteger(B)&&B<65&&B>5?this.data.inputParams.forceFPS=B:Wg.warn("forceFPS should be a number between 5 to 65, default value is 25 fps.");let U=this._deviceList.videoDevices.find(A=>Q&&A.deviceId===Q);if(!U)return Wg.error("The selected deviceId not found!"),{success:!1,reason:"Invalid deviceId!"};this._webcam.device=U;try{this._webCamStream.getVideoTracks().forEach(A=>A.stop()),this._webCamStream=null,Wg.debug("changeVideoInput() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:U.deviceId},...kg[this._webcam.resolution],frameRate:{ideal:this.data.inputParams.forceFPS}}});let A=this._webCamStream.getVideoTracks()[0];if(Wg.debug("The new video track is:%O",A),I)try{const Q=await ag.initializePipeline(A,I);Q&&Q.vbStream&&"function"==typeof Q.vbStream.getVideoTracks&&Q.vbStream.getVideoTracks()[0]&&(A=Q.vbStream.getVideoTracks()[0],Wg.debug("Reinitialized VB pipeline for changed camera"))}catch(F){Wg.debug("VB init skipped/failed on changeVideoInput")}await this._webcamProducer.replaceTrack({track:A});let Q=this._producers.get("video");return Q.deviceLabel=U.label,Q.type=this._getWebcamType(U),Q.track=this._webcamProducer.track,Wg.debug("Updated producer values are:%O",Q),this._producers.set("video",Q),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:A,type:"local"}),{success:!0}}catch(C){return Wg.error("Error while changing input:%O",C),{success:!1,reason:"Couldn't change video input",error:C}}}async changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F}){if(this._micProducer){return await this._changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F})}return{success:!1,reason:"You are not sharing your mic yet. Mic Input can be changed to a new mic only when you are sharing an existing mic. "}}async _changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F}){A&&"boolean"==typeof A&&(this.data.inputParams.autoGainControl=A),Q&&"boolean"==typeof Q&&(this.data.inputParams.echoCancellation=Boolean(Q)),B&&"boolean"==typeof B&&(this.data.inputParams.noiseSuppression=Boolean(B)),I&&Number.isInteger(I)&&I<64e3&&I>8e3&&(this.data.inputParams.sampleRate=I),U&&Number.isInteger(U)&&U>0&&U<3&&(this.data.inputParams.channelCount=U);let C=this._deviceList.audioDevices.find(A=>F&&A.deviceId===F);if(!C)return{success:!1,reason:"Invalid deviceId!"};this._mic.device=C,this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micStream=null;try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:C.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}});const A=this._micStream.getAudioTracks()[0];this._micProducer.replaceTrack({track:A});let Q=this._producers.get("audio");return Q.deviceLabel=C.label,Q.track=this._micProducer.track,Wg.debug("Updated producer values are:%O",Q),this._producers.set("audio",Q),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),{success:!0}}catch(pg){return Wg.error("Error while changing input:%O",pg),{success:!1,reason:"Couldn't change audio input",err:pg}}}toggleVB=async A=>{if(A&&this._localCamVideo.getVideoTracks()[0]){const A=await this.initializePipeline(this._localCamVideo.getVideoTracks()[0],{type:"blur"});if(Wg.debug("response is :%o, localVBTrack is:%O",A,this._localVBStream.getVideoTracks()[0]),A.success&&this._localVBStream.getVideoTracks()[0]){if(this._roomType===HC||this._roomType===bC)if(this._camVideoProducer&&store.getState().conf.joined){await this._camVideoProducer.replaceTrack({track:this._localVBStream.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else Wg.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===LC&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(Wg.debug("found sender:%o",A),A&&this._localVBStream.getVideoTracks()[0]){A.replaceTrack(this._localVBStream.getVideoTracks()[0]);let Q={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=Q,store.dispatch(confActions.addParticipant(Q))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}}else Wg.error("Virtual background procesing can't be enabled")}else if(this._localVBStream?.getVideoTracks()[0].stop(),this._localVBStream=null,this._pipelineManager.stop(),this._vbDetailsNew.sourcePlayback&&(this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null),this._vbDetailsNew.sourcePlayback?.htmlElement.remove(),this._vbDetailsNew?.hiddenCanvas.remove(),this._vbDetailsNew?.hiddenImage?.remove(),this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null,store.dispatch(confActions.setVBItemsStatus(A)),Wg.debug("Garbage collection completed. Set the video to original video!"),store.getState().conf.videoStatus)if(this._roomType===HC||this._roomType===bC)if(this._camVideoProducer&&store.getState().conf.joined&&this._localCamVideo.getVideoTracks()[0]&&store.getState().conf.joined&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){await this._camVideoProducer.replaceTrack({track:this._localCamVideo.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else Wg.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===LC&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(Wg.debug("found sender:%o",A),A&&this._localCamVideo.getVideoTracks()[0]&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){A.replaceTrack(this._localCamVideo.getVideoTracks()[0]);let Q={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=Q,store.dispatch(confActions.addParticipant(Q))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}};setVBDetails=async A=>{if(this._vbDetails=A,this._roomType!==HC&&this._roomType!==bC||!store.getState().conf.joined){if(this._roomType===LC&&this._peerConnection){const Q=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});Wg.debug("found sender:%o",Q),Q?Q.replaceTrack(A.stream.getVideoTracks()[0]):this.showNotification("danger","Error!","Unable to set virtual background! Try again OR contact support with code:CM-FE-RC-VB-E02")}}else if(this._camVideoProducer){Wg.debug("Going to replace the video track for cam video producer!");try{await this._camVideoProducer.replaceTrack({track:A.stream.getVideoTracks()[0]})}catch(pg){Wg.debug("vb set error",pg)}Wg.debug("all participants",this._participants),Wg.debug("this._localCamVideo",this._localCamVideo)}else Wg.warn("Camvideo producer is not available yet!")};async enableShare({shareAudio:A=!1,enableSharingLayers:Q=!0,shareBitRates:B=[2500,1250,500]}={}){if(Wg.debug("enableShare()"),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID003",text:"Error while trying to start screen share. Produce flag need to set to true while joining room in order to enable screen share."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID001",text:`Error while trying to start screen share as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling screen share? OR if you have already initiated the joinRoom process, then try enabling screen share after some seconds."};if(this._shareProducer)return Wg.debug("Screen share is already active!"),{success:!1,warning:!0,code:"RWID001",text:"Error while trying to start screen share. Screen share is already active!"};if(!this._device.canProduce("video"))return Wg.error("enableShare() | cannot produce video"),{success:!1,error:!0,code:"REID002",text:"Error while trying to start screen share. Screen share couldnot be activated due to limitations on this device. If you think this device is capable of screen share and the problem persists even after multiple retries, please contact technical support with the error code."};let I,U;this._enableSharingLayers="boolean"!=typeof Q?Boolean(Q):Q,Array.isArray(B)&&B.length>=1&&B.length<=3&&B.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?this.data.inputParams.shareBitRates=B:this.data.inputParams.shareBitRates=[2500,1250,500];try{Wg.debug("enableShare() | calling getDisplayMedia()");const Q=await navigator.mediaDevices.getDisplayMedia({audio:!!A,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}});if(!Q)return Wg.error("Unable to capture screen."),void this.emit("error",{code:"EID013",text:"Error while trying to start screen share. Not able to capture screen!"});let B,F;U=Q.getAudioTracks()[0],U&&(this._shareAudioProducer=await this._sendTransport.produce({track:U,codecOptions:this.data.inputParams.forcePCMU?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"screen-audio"}}),this._producers.set("ssAudio",{id:this._shareAudioProducer.id,type:"shareAudio",paused:this._shareAudioProducer.paused,track:this._shareAudioProducer.track,rtpParameters:this._shareAudioProducer.rtpParameters,codec:this._shareAudioProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssAudioStart",{peerId:this.data.inputParams.peerId,audioTrack:this._shareAudioProducer.track,type:"local"})),I=Q.getVideoTracks()[0];const C={videoGoogleStartBitrate:1e3};if(this._forceVP8){if(F=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!F)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?F=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(F=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!F)throw new Error("desired H264 codec+configuration is not supported");Wg.debug("Selected h264 codec is:%O",F)}else if(this._forceVP9&&(F=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!F))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableSharingLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&F||"video/vp9"===A.mimeType.toLowerCase()?B=[{maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L3T3",dtx:!0}]:(B=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}],this._numSimulcastStreams>1&&B.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.shareBitRates[1],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}),this._numSimulcastStreams>2&&B.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.shareBitRates[2],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}))}this._shareProducer=await this._sendTransport.produce({track:I,encodings:B,codecOptions:C,codec:F,appData:{mediaTag:"screen-video"}}),this._producers.set("ssVideo",{id:this._shareProducer.id,type:"shareVideo",paused:this._shareProducer.paused,track:this._shareProducer.track,rtpParameters:this._shareProducer.rtpParameters,codec:this._shareProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._shareProducer.track,type:"local"}),this._shareProducer.on("transportclose",()=>{this._shareProducer=null}),this._shareProducer.on("trackended",()=>{this.disableShare().catch(()=>{})})}catch(F){Wg.error("enableShare() | failed:%o",F),"NotAllowedError"!==F.name&&this.emit("error",{code:"EID014",text:`Error while trying to start screen share. Error is: ${F}!`}),I&&I.stop()}}async disableShare(){if(Wg.debug("disableShare()"),!this._shareProducer)return Wg.warn("Screen share doesn't seem to be on!"),void this.emit("error",{code:"EID017",text:"Error while trying to stop screen share. Is the screen share on!"});if(this._shareProducer.close(),this._shareAudioProducer){this._shareAudioProducer.close();try{let Q={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._shareAudioProducer.id};this._sendMessage(Q),this.emit("ssAudioStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}}catch(Q){this.emit("error",{code:"EID015",text:`Error while trying to stop screen share audio. Error is: ${Q}!`})}}try{let Q={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._shareProducer.id};this._sendMessage(Q),this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}}catch(Q){this.emit("error",{code:"EID016",text:`Error while trying to stop screen share video. Error is: ${Q}!`})}this._shareAudioProducer=null,this._shareProducer=null}logMeOutNew=async()=>{try{Wg.debug("Room","inside log me out new"),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"removed_by_moderator"})}catch(A){}await this.leaveRoom()};logThisUserOutOfMeeting=A=>{if(Wg.debug("Room","inside log this user out of meeting"),A===this.data.inputParams.peerId)Wg.debug("ConferenceRoom","logging myself Out"),this.leaveRoom();else try{var Q={id:"logThisUserOut",peerId:A,moderatorPeerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId};this._sendMessage(Q)}catch(B){Wg.error("Room",B)}}}const hg="cp2p-client";class Gg{constructor(A){A?(this._debug=MB(`${hg}:${A}`),this._info=MB(`${hg}:INFO:${A}`),this._warn=MB(`${hg}:WARN:${A}`),this._error=MB(`${hg}:ERROR:${A}`)):(this._debug=MB(hg),this._info=MB(`${hg}:INFO`),this._warn=MB(`${hg}:WARN`),this._error=MB(`${hg}:ERROR`)),this._debug.log=function(){}.bind(),this._info.log=function(){}.bind(),this._warn.log=function(){}.bind(),this._error.log=function(){}.bind()}get debug(){return this._debug}get info(){return this._info}get warn(){return this._warn}get error(){return this._error}}const Mg={audio:{deviceId:{exact:void 0}},video:!1},wg={video:{deviceId:{exact:void 0},width:{min:320,ideal:640,max:1280},height:{min:240,ideal:480,max:720},frameRate:{min:15,max:30}}},sg={audio:!0,video:{width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},Tg=new Gg("socket");class yg extends cB.EventEmitter{constructor({url:A,roomId:Q,peerId:B,peerName:I,role:U}){super(),Tg.debug("constructor():%o ",{url:A,roomId:Q,peerId:B,peerName:I,role:U}),this._closed=!1,this._params={url:A,roomId:Q,peerId:B,peerName:I,role:U},this._socket=null,this._connectionStatus=null,this._createSocket()}get closed(){return this._closed}get connectionStatus(){return this._connectionStatus}close(){if(!this._closed){Tg.debug("close()"),this._closed=!0,this.emit("close");try{this._socket.disconnect()}catch(A){Tg.error("close() | error closing the Socket:%o",A)}}}async send(A){if(this._closed)throw new Error("transport closed");try{this._socket.send(JSON.stringify(A))}catch(Q){throw Tg.warn("send() failed:%o",Q),Q}}async request({type:A,message:Q}){return new Promise(B=>{if(this._closed)throw new Error("transport closed");try{this._socket.emit(A,JSON.stringify(Q),A=>{B(A)})}catch(I){throw Tg.warn("emit() failed:%o",I),I}})}async _createSocket(){let A=this;const Q=io(this._params.url,{query:{roomId:this._params.roomId,peerId:this._params.peerId,peerName:this._params.peerName,role:this._params.role}});Q.on("connect",()=>{Tg.debug("Socket connected!!"),A._connectionStatus=!0,A.emit("connected")}),Q.on("disconnect",()=>{Tg.debug("Socket disconnected!!"),A._connectionStatus=!1,A.emit("disconnected")}),Q.on("reconnect",()=>{Tg.debug("Socket reconnected after disconnect!!"),Q.emit("reconnected")}),Q.on("message",Q=>{const B=JSON.parse(Q);Tg.debug("New mesage received with id:%s",B.type),A.emit("message",B)}),this._socket=Q}}
231
+ `,{width:g,height:R}=F,J=g/R,n=aE(A,A.VERTEX_SHADER,C),S=aE(A,A.FRAGMENT_SHADER,E),V=ZE(A,n,S,Q,B),N=A.getUniformLocation(V,"u_backgroundScale"),D=A.getUniformLocation(V,"u_backgroundOffset"),d=A.getUniformLocation(V,"u_inputFrame"),l=A.getUniformLocation(V,"u_personMask"),W=A.getUniformLocation(V,"u_background"),k=A.getUniformLocation(V,"u_coverage"),i=A.getUniformLocation(V,"u_lightWrapping"),Z=A.getUniformLocation(V,"u_blendMode");A.useProgram(V),A.uniform2f(N,1,1),A.uniform2f(D,0,0),A.uniform1i(d,0),A.uniform1i(l,1),A.uniform2f(k,0,1),A.uniform1f(i,0),A.uniform1f(Z,0);let a=null;function o(Q){a=oE(A,A.RGBA8,Q.naturalWidth,Q.naturalHeight,A.LINEAR,A.LINEAR),A.texSubImage2D(A.TEXTURE_2D,0,0,0,Q.naturalWidth,Q.naturalHeight,A.RGBA,A.UNSIGNED_BYTE,Q);let B=0,I=0,U=Q.naturalWidth,F=Q.naturalHeight;U/F<J?(F=U/J,I=(Q.naturalHeight-F)/2):(U=F*J,B=(Q.naturalWidth-U)/2);const C=U/Q.naturalWidth,E=F/Q.naturalHeight;B/=Q.naturalWidth,I/=Q.naturalHeight,A.uniform2f(N,C,E),A.uniform2f(D,B,I)}return U?.complete?o(U):U&&(U.onload=()=>{o(U)}),{render:function(){A.viewport(0,0,g,R),A.useProgram(V),A.activeTexture(A.TEXTURE1),A.bindTexture(A.TEXTURE_2D,I),null!==a&&(A.activeTexture(A.TEXTURE2),A.bindTexture(A.TEXTURE_2D,a),A.uniform1i(W,2)),A.bindFramebuffer(A.FRAMEBUFFER,null),A.drawArrays(A.TRIANGLE_STRIP,0,4)},updateCoverage:function(Q){A.useProgram(V),A.uniform2f(k,Q[0],Q[1])},updateLightWrapping:function(Q){A.useProgram(V),A.uniform1f(i,Q)},updateBlendMode:function(Q){A.useProgram(V),A.uniform1f(Z,"screen"===Q?0:1)},cleanUp:function(){A.deleteTexture(a),A.deleteProgram(V),A.deleteShader(S),A.deleteShader(n)}}}(V,d,l,i,Q,U);return{render:async function(){V.activeTexture(V.TEXTURE0),V.bindTexture(V.TEXTURE_2D,W),V.texImage2D(V.TEXTURE_2D,0,V.RGBA,V.RGBA,V.UNSIGNED_BYTE,A.htmlElement),V.bindVertexArray(D),await Z.render(),E(),F._runInference(),E(),a.render(),o.render(),h.render()},updatePostProcessingConfig:function(A){if(o.updateSigmaSpace(A.jointBilateralFilter.sigmaSpace),o.updateSigmaColor(A.jointBilateralFilter.sigmaColor),"image"===B.type){const Q=h;Q.updateCoverage(A.coverage),Q.updateLightWrapping(A.lightWrapping),Q.updateBlendMode(A.blendMode)}else if("blur"===B.type){h.updateCoverage(A.coverage)}else{const A=h;A.updateCoverage([0,.9999]),A.updateLightWrapping(0)}},cleanUp:function(){h.cleanUp(),o.cleanUp(),a.cleanUp(),Z.cleanUp(),V.deleteTexture(i),V.deleteTexture(k),V.deleteTexture(W),V.deleteBuffer(l),V.deleteBuffer(d),V.deleteVertexArray(D),V.deleteShader(N)}}}class wE{constructor(){this.pipeline=null,this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[],this.isRunning=!1,this.timerWorker=null,this.renderTimeoutId=null,this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[]}async initialize(A,Q,B,I,U,F=null,C=null){this.stop(),this.backgroundImageRef=F,this.canvasRef=C;const E=1e3/B.targetFps;this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[],this.timerWorker=function(){const A=new Map,Q=new Blob(["\n const timeoutIds = new Map();\n \n addEventListener('message', (event) => {\n if (event.data.timeoutMs !== undefined) {\n const timeoutId = setTimeout(() => {\n postMessage({ callbackId: event.data.callbackId });\n timeoutIds.delete(event.data.callbackId);\n }, event.data.timeoutMs);\n timeoutIds.set(event.data.callbackId, timeoutId);\n } else {\n const timeoutId = timeoutIds.get(event.data.callbackId);\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n timeoutIds.delete(event.data.callbackId);\n }\n }\n });\n "],{type:"application/javascript"}),B=new Worker(URL.createObjectURL(Q));B.onmessage=Q=>{const B=A.get(Q.data.callbackId);B&&(A.delete(Q.data.callbackId),B())};let I=1;return{setTimeout:function(Q,U=0){const F=I++;return A.set(F,Q),B.postMessage({callbackId:F,timeoutMs:U}),F},clearTimeout:function(Q){A.has(Q)&&(B.postMessage({callbackId:Q}),A.delete(Q))},terminate:function(){A.clear(),B.terminate()}}}(),this.pipeline="webgl2"===B.pipeline?ME(A,this.backgroundImageRef,Q,B,this.canvasRef,U,this.timerWorker,this.addFrameEvent.bind(this)):kE(A,Q,B,this.canvasRef,I,U,this.addFrameEvent.bind(this));const g=async()=>{if(!this.isRunning)return;const A=performance.now();this.beginFrame(),await this.pipeline.render(),this.endFrame(),this.renderTimeoutId=this.timerWorker.setTimeout(g,Math.max(0,E-(performance.now()-A)))};return this.isRunning=!0,g(),{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations()}}beginFrame(){this.beginTime=Date.now()}addFrameEvent(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.beginTime=A,this.eventCount++}endFrame(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.frameCount++,A>=this.previousTime+1e3&&(this.fps=1e3*this.frameCount/(A-this.previousTime),this.durations=[...this.frameDurations],this.previousTime=A,this.frameCount=0),this.eventCount=0}getProcessingDurations(){return this.frameDurations.length>=3?[this.frameDurations[0]||0,this.frameDurations[1]||0,this.frameDurations[2]||0]:[0,0,0]}stop(){this.isRunning=!1,this.timerWorker&&this.renderTimeoutId&&this.timerWorker.clearTimeout(this.renderTimeoutId),this.timerWorker&&(this.timerWorker.terminate(),this.timerWorker=null),this.pipeline&&(this.pipeline.cleanUp(),this.pipeline=null),this.renderTimeoutId=null}getState(){return{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations(),isRunning:this.isRunning}}getFps(){return this.fps}getDurations(){return this.durations}isActive(){return this.isRunning&&null!==this.pipeline}async updateConfig(A,Q,B,I,U){return this.initialize(A,Q,B,I,U,this.backgroundImageRef,this.canvasRef)}destroy(){this.stop(),this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[]}}const sE=new sB("Room");const TE=new Set(["240p","360p","480p","720p","1080p","1440p","2160p"]);function yE(A){return!!Array.isArray(A)&&A.every(A=>TE.has(A))}const cE=new sB("utils-verifyFiles"),YE=["mp4"];function tE(A,Q){try{const B=new URL(A).pathname;return B.split(".").pop().toLowerCase()===Q.toLowerCase()}catch{return!1}}const KE=new TextEncoder,LE=new TextDecoder;function HE(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64("string"==typeof A?A:LE.decode(A),{alphabet:"base64url"});let Q=A;Q instanceof Uint8Array&&(Q=LE.decode(Q)),Q=Q.replace(/-/g,"+").replace(/_/g,"/").replace(/\s/g,"");try{return function(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64(A);const Q=atob(A),B=new Uint8Array(Q.length);for(let I=0;I<Q.length;I++)B[I]=Q.charCodeAt(I);return B}(Q)}catch{throw new TypeError("The input to be decoded is not correctly encoded.")}}class bE extends Error{static code="ERR_JOSE_GENERIC";code="ERR_JOSE_GENERIC";constructor(A,Q){super(A,Q),this.name=this.constructor.name,Error.captureStackTrace?.(this,this.constructor)}}class pE extends bE{static code="ERR_JWT_CLAIM_VALIDATION_FAILED";code="ERR_JWT_CLAIM_VALIDATION_FAILED";claim;reason;payload;constructor(A,Q,B="unspecified",I="unspecified"){super(A,{cause:{claim:B,reason:I,payload:Q}}),this.claim=B,this.reason=I,this.payload=Q}}class rE extends bE{static code="ERR_JWT_EXPIRED";code="ERR_JWT_EXPIRED";claim;reason;payload;constructor(A,Q,B="unspecified",I="unspecified"){super(A,{cause:{claim:B,reason:I,payload:Q}}),this.claim=B,this.reason=I,this.payload=Q}}class eE extends bE{static code="ERR_JOSE_ALG_NOT_ALLOWED";code="ERR_JOSE_ALG_NOT_ALLOWED"}class mE extends bE{static code="ERR_JOSE_NOT_SUPPORTED";code="ERR_JOSE_NOT_SUPPORTED"}class qE extends bE{static code="ERR_JWS_INVALID";code="ERR_JWS_INVALID"}class xE extends bE{static code="ERR_JWT_INVALID";code="ERR_JWT_INVALID"}class zE extends bE{static code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";constructor(A="signature verification failed",Q){super(A,Q)}}function XE(A,Q="algorithm.name"){return new TypeError(`CryptoKey does not support this operation, its ${Q} must be ${A}`)}function OE(A,Q){return A.name===Q}function jE(A){return parseInt(A.name.slice(4),10)}function vE(A,Q,B){switch(Q){case"HS256":case"HS384":case"HS512":{if(!OE(A.algorithm,"HMAC"))throw XE("HMAC");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"RS256":case"RS384":case"RS512":{if(!OE(A.algorithm,"RSASSA-PKCS1-v1_5"))throw XE("RSASSA-PKCS1-v1_5");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"PS256":case"PS384":case"PS512":{if(!OE(A.algorithm,"RSA-PSS"))throw XE("RSA-PSS");const B=parseInt(Q.slice(2),10);if(jE(A.algorithm.hash)!==B)throw XE(`SHA-${B}`,"algorithm.hash");break}case"Ed25519":case"EdDSA":if(!OE(A.algorithm,"Ed25519"))throw XE("Ed25519");break;case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":if(!OE(A.algorithm,Q))throw XE(Q);break;case"ES256":case"ES384":case"ES512":{if(!OE(A.algorithm,"ECDSA"))throw XE("ECDSA");const B=function(A){switch(A){case"ES256":return"P-256";case"ES384":return"P-384";case"ES512":return"P-521";default:throw new Error("unreachable")}}(Q);if(A.algorithm.namedCurve!==B)throw XE(B,"algorithm.namedCurve");break}default:throw new TypeError("CryptoKey does not support this operation")}!function(A,Q){if(!A.usages.includes(Q))throw new TypeError(`CryptoKey does not support this operation, its usages must include ${Q}.`)}(A,B)}function uE(A,Q,...B){if((B=B.filter(Boolean)).length>2){const Q=B.pop();A+=`one of type ${B.join(", ")}, or ${Q}.`}else 2===B.length?A+=`one of type ${B[0]} or ${B[1]}.`:A+=`of type ${B[0]}.`;return null==Q?A+=` Received ${Q}`:"function"==typeof Q&&Q.name?A+=` Received function ${Q.name}`:"object"==typeof Q&&null!=Q&&Q.constructor?.name&&(A+=` Received an instance of ${Q.constructor.name}`),A}function PE(A,Q,...B){return uE(`Key for the ${A} algorithm must be `,Q,...B)}function fE(A){return"CryptoKey"===A?.[Symbol.toStringTag]}function _E(A){return"KeyObject"===A?.[Symbol.toStringTag]}const $E=A=>fE(A)||_E(A);const Ag=A=>{if("object"!=typeof(Q=A)||null===Q||"[object Object]"!==Object.prototype.toString.call(A))return!1;var Q;if(null===Object.getPrototypeOf(A))return!0;let B=A;for(;null!==Object.getPrototypeOf(B);)B=Object.getPrototypeOf(B);return Object.getPrototypeOf(A)===B};const Qg=async A=>{if(!A.alg)throw new TypeError('"alg" argument is required when "jwk.alg" is not present');const{algorithm:Q,keyUsages:B}=function(A){let Q,B;switch(A.kty){case"AKP":switch(A.alg){case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":Q={name:A.alg},B=A.priv?["sign"]:["verify"];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"RSA":switch(A.alg){case"PS256":case"PS384":case"PS512":Q={name:"RSA-PSS",hash:`SHA-${A.alg.slice(-3)}`},B=A.d?["sign"]:["verify"];break;case"RS256":case"RS384":case"RS512":Q={name:"RSASSA-PKCS1-v1_5",hash:`SHA-${A.alg.slice(-3)}`},B=A.d?["sign"]:["verify"];break;case"RSA-OAEP":case"RSA-OAEP-256":case"RSA-OAEP-384":case"RSA-OAEP-512":Q={name:"RSA-OAEP",hash:`SHA-${parseInt(A.alg.slice(-3),10)||1}`},B=A.d?["decrypt","unwrapKey"]:["encrypt","wrapKey"];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"EC":switch(A.alg){case"ES256":Q={name:"ECDSA",namedCurve:"P-256"},B=A.d?["sign"]:["verify"];break;case"ES384":Q={name:"ECDSA",namedCurve:"P-384"},B=A.d?["sign"]:["verify"];break;case"ES512":Q={name:"ECDSA",namedCurve:"P-521"},B=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":Q={name:"ECDH",namedCurve:A.crv},B=A.d?["deriveBits"]:[];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"OKP":switch(A.alg){case"Ed25519":case"EdDSA":Q={name:"Ed25519"},B=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":Q={name:A.crv},B=A.d?["deriveBits"]:[];break;default:throw new mE('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;default:throw new mE('Invalid or unsupported JWK "kty" (Key Type) Parameter value')}return{algorithm:Q,keyUsages:B}}(A),I={...A};return"AKP"!==I.kty&&delete I.alg,delete I.use,crypto.subtle.importKey("jwk",I,Q,A.ext??(!A.d&&!A.priv),A.key_ops??B)};function Bg(A){return Ag(A)&&"string"==typeof A.kty}let Ig;const Ug=async(A,Q,B,I=!1)=>{Ig||=new WeakMap;let U=Ig.get(A);if(U?.[B])return U[B];const F=await Qg({...Q,alg:B});return I&&Object.freeze(A),U?U[B]=F:Ig.set(A,{[B]:F}),F},Fg=async(A,Q)=>{if(A instanceof Uint8Array)return A;if(fE(A))return A;if(_E(A)){if("secret"===A.type)return A.export();if("toCryptoKey"in A&&"function"==typeof A.toCryptoKey)try{return((A,Q)=>{Ig||=new WeakMap;let B=Ig.get(A);if(B?.[Q])return B[Q];const I="public"===A.type,U=!!I;let F;if("x25519"===A.asymmetricKeyType){switch(Q){case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}F=A.toCryptoKey(A.asymmetricKeyType,U,I?[]:["deriveBits"])}if("ed25519"===A.asymmetricKeyType){if("EdDSA"!==Q&&"Ed25519"!==Q)throw new TypeError("given KeyObject instance cannot be used for this algorithm");F=A.toCryptoKey(A.asymmetricKeyType,U,[I?"verify":"sign"])}switch(A.asymmetricKeyType){case"ml-dsa-44":case"ml-dsa-65":case"ml-dsa-87":if(Q!==A.asymmetricKeyType.toUpperCase())throw new TypeError("given KeyObject instance cannot be used for this algorithm");F=A.toCryptoKey(A.asymmetricKeyType,U,[I?"verify":"sign"])}if("rsa"===A.asymmetricKeyType){let B;switch(Q){case"RSA-OAEP":B="SHA-1";break;case"RS256":case"PS256":case"RSA-OAEP-256":B="SHA-256";break;case"RS384":case"PS384":case"RSA-OAEP-384":B="SHA-384";break;case"RS512":case"PS512":case"RSA-OAEP-512":B="SHA-512";break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}if(Q.startsWith("RSA-OAEP"))return A.toCryptoKey({name:"RSA-OAEP",hash:B},U,I?["encrypt"]:["decrypt"]);F=A.toCryptoKey({name:Q.startsWith("PS")?"RSA-PSS":"RSASSA-PKCS1-v1_5",hash:B},U,[I?"verify":"sign"])}if("ec"===A.asymmetricKeyType){const B=new Map([["prime256v1","P-256"],["secp384r1","P-384"],["secp521r1","P-521"]]).get(A.asymmetricKeyDetails?.namedCurve);if(!B)throw new TypeError("given KeyObject instance cannot be used for this algorithm");"ES256"===Q&&"P-256"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),"ES384"===Q&&"P-384"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),"ES512"===Q&&"P-521"===B&&(F=A.toCryptoKey({name:"ECDSA",namedCurve:B},U,[I?"verify":"sign"])),Q.startsWith("ECDH-ES")&&(F=A.toCryptoKey({name:"ECDH",namedCurve:B},U,I?[]:["deriveBits"]))}if(!F)throw new TypeError("given KeyObject instance cannot be used for this algorithm");return B?B[Q]=F:Ig.set(A,{[Q]:F}),F})(A,Q)}catch(pg){if(pg instanceof TypeError)throw pg}let B=A.export({format:"jwk"});return Ug(A,B,Q)}if(Bg(A))return A.k?HE(A.k):Ug(A,A,Q,!0);throw new Error("unreachable")},Cg=A=>A?.[Symbol.toStringTag],Eg=(A,Q,B)=>{if(void 0!==Q.use){let A;switch(B){case"sign":case"verify":A="sig";break;case"encrypt":case"decrypt":A="enc"}if(Q.use!==A)throw new TypeError(`Invalid key for this operation, its "use" must be "${A}" when present`)}if(void 0!==Q.alg&&Q.alg!==A)throw new TypeError(`Invalid key for this operation, its "alg" must be "${A}" when present`);if(Array.isArray(Q.key_ops)){let I;switch(!0){case"verify"===B:case"dir"===A:case A.includes("CBC-HS"):I=B;break;case A.startsWith("PBES2"):I="deriveBits";break;case/^A\d{3}(?:GCM)?(?:KW)?$/.test(A):I=!A.includes("GCM")&&A.endsWith("KW")?"unwrapKey":B;break;case"encrypt"===B:I="wrapKey";break;case"decrypt"===B:I=A.startsWith("RSA")?"unwrapKey":"deriveBits"}if(I&&!1===Q.key_ops?.includes?.(I))throw new TypeError(`Invalid key for this operation, its "key_ops" must include "${I}" when present`)}return!0},gg=(A,Q,B)=>{A.startsWith("HS")||"dir"===A||A.startsWith("PBES2")||/^A(?:128|192|256)(?:GCM)?(?:KW)?$/.test(A)||/^A(?:128|192|256)CBC-HS(?:256|384|512)$/.test(A)?((A,Q,B)=>{if(!(Q instanceof Uint8Array)){if(Bg(Q)){if(function(A){return"oct"===A.kty&&"string"==typeof A.k}(Q)&&Eg(A,Q,B))return;throw new TypeError('JSON Web Key for symmetric algorithms must have JWK "kty" (Key Type) equal to "oct" and the JWK "k" (Key Value) present')}if(!$E(Q))throw new TypeError(PE(A,Q,"CryptoKey","KeyObject","JSON Web Key","Uint8Array"));if("secret"!==Q.type)throw new TypeError(`${Cg(Q)} instances for symmetric algorithms must be of type "secret"`)}})(A,Q,B):((A,Q,B)=>{if(Bg(Q))switch(B){case"decrypt":case"sign":if(function(A){return"oct"!==A.kty&&("AKP"===A.kty&&"string"==typeof A.priv||"string"==typeof A.d)}(Q)&&Eg(A,Q,B))return;throw new TypeError("JSON Web Key for this operation be a private JWK");case"encrypt":case"verify":if(function(A){return"oct"!==A.kty&&void 0===A.d&&void 0===A.priv}(Q)&&Eg(A,Q,B))return;throw new TypeError("JSON Web Key for this operation be a public JWK")}if(!$E(Q))throw new TypeError(PE(A,Q,"CryptoKey","KeyObject","JSON Web Key"));if("secret"===Q.type)throw new TypeError(`${Cg(Q)} instances for asymmetric algorithms must not be of type "secret"`);if("public"===Q.type)switch(B){case"sign":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm signing must be of type "private"`);case"decrypt":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm decryption must be of type "private"`)}if("private"===Q.type)switch(B){case"verify":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm verifying must be of type "public"`);case"encrypt":throw new TypeError(`${Cg(Q)} instances for asymmetric algorithm encryption must be of type "public"`)}})(A,Q,B)},Rg=async(A,Q,B)=>{if(Q instanceof Uint8Array){if(!A.startsWith("HS"))throw new TypeError(((A,...Q)=>uE("Key must be ",A,...Q))(Q,"CryptoKey","KeyObject","JSON Web Key"));return crypto.subtle.importKey("raw",Q,{hash:`SHA-${A.slice(-3)}`,name:"HMAC"},!1,[B])}return vE(Q,A,B),Q},Jg=async(A,Q,B,I)=>{const U=await Rg(A,Q,"verify");((A,Q)=>{if(A.startsWith("RS")||A.startsWith("PS")){const{modulusLength:B}=Q.algorithm;if("number"!=typeof B||B<2048)throw new TypeError(`${A} requires key modulusLength to be 2048 bits or larger`)}})(A,U);const F=((A,Q)=>{const B=`SHA-${A.slice(-3)}`;switch(A){case"HS256":case"HS384":case"HS512":return{hash:B,name:"HMAC"};case"PS256":case"PS384":case"PS512":return{hash:B,name:"RSA-PSS",saltLength:parseInt(A.slice(-3),10)>>3};case"RS256":case"RS384":case"RS512":return{hash:B,name:"RSASSA-PKCS1-v1_5"};case"ES256":case"ES384":case"ES512":return{hash:B,name:"ECDSA",namedCurve:Q.namedCurve};case"Ed25519":case"EdDSA":return{name:"Ed25519"};case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":return{name:A};default:throw new mE(`alg ${A} is not supported either by JOSE or your javascript runtime`)}})(A,U.algorithm);try{return await crypto.subtle.verify(F,U,B,I)}catch{return!1}};async function ng(A,Q,B){if(!Ag(A))throw new qE("Flattened JWS must be an object");if(void 0===A.protected&&void 0===A.header)throw new qE('Flattened JWS must have either of the "protected" or "header" members');if(void 0!==A.protected&&"string"!=typeof A.protected)throw new qE("JWS Protected Header incorrect type");if(void 0===A.payload)throw new qE("JWS Payload missing");if("string"!=typeof A.signature)throw new qE("JWS Signature missing or incorrect type");if(void 0!==A.header&&!Ag(A.header))throw new qE("JWS Unprotected Header incorrect type");let I={};if(A.protected)try{const Q=HE(A.protected);I=JSON.parse(LE.decode(Q))}catch{throw new qE("JWS Protected Header is invalid")}if(!((...A)=>{const Q=A.filter(Boolean);if(0===Q.length||1===Q.length)return!0;let B;for(const I of Q){const A=Object.keys(I);if(B&&0!==B.size)for(const Q of A){if(B.has(Q))return!1;B.add(Q)}else B=new Set(A)}return!0})(I,A.header))throw new qE("JWS Protected and JWS Unprotected Header Parameter names must be disjoint");const U={...I,...A.header},F=((A,Q,B,I,U)=>{if(void 0!==U.crit&&void 0===I?.crit)throw new A('"crit" (Critical) Header Parameter MUST be integrity protected');if(!I||void 0===I.crit)return new Set;if(!Array.isArray(I.crit)||0===I.crit.length||I.crit.some(A=>"string"!=typeof A||0===A.length))throw new A('"crit" (Critical) Header Parameter MUST be an array of non-empty strings when present');let F;F=void 0!==B?new Map([...Object.entries(B),...Q.entries()]):Q;for(const C of I.crit){if(!F.has(C))throw new mE(`Extension Header Parameter "${C}" is not recognized`);if(void 0===U[C])throw new A(`Extension Header Parameter "${C}" is missing`);if(F.get(C)&&void 0===I[C])throw new A(`Extension Header Parameter "${C}" MUST be integrity protected`)}return new Set(I.crit)})(qE,new Map([["b64",!0]]),B?.crit,I,U);let C=!0;if(F.has("b64")&&(C=I.b64,"boolean"!=typeof C))throw new qE('The "b64" (base64url-encode payload) Header Parameter must be a boolean');const{alg:E}=U;if("string"!=typeof E||!E)throw new qE('JWS "alg" (Algorithm) Header Parameter missing or invalid');const g=B&&((A,Q)=>{if(void 0!==Q&&(!Array.isArray(Q)||Q.some(A=>"string"!=typeof A)))throw new TypeError(`"${A}" option must be an array of strings`);if(Q)return new Set(Q)})("algorithms",B.algorithms);if(g&&!g.has(E))throw new eE('"alg" (Algorithm) Header Parameter value not allowed');if(C){if("string"!=typeof A.payload)throw new qE("JWS Payload must be a string")}else if("string"!=typeof A.payload&&!(A.payload instanceof Uint8Array))throw new qE("JWS Payload must be a string or an Uint8Array instance");let R=!1;"function"==typeof Q&&(Q=await Q(I,A),R=!0),gg(E,Q,"verify");const J=function(...A){const Q=A.reduce((A,{length:Q})=>A+Q,0),B=new Uint8Array(Q);let I=0;for(const U of A)B.set(U,I),I+=U.length;return B}(KE.encode(A.protected??""),KE.encode("."),"string"==typeof A.payload?KE.encode(A.payload):A.payload);let n;try{n=HE(A.signature)}catch{throw new qE("Failed to base64url decode the signature")}const S=await Fg(Q,E);if(!(await Jg(E,S,n,J)))throw new zE;let V;if(C)try{V=HE(A.payload)}catch{throw new qE("Failed to base64url decode the payload")}else V="string"==typeof A.payload?KE.encode(A.payload):A.payload;const N={payload:V};return void 0!==A.protected&&(N.protectedHeader=I),void 0!==A.header&&(N.unprotectedHeader=A.header),R?{...N,key:S}:N}const Sg=86400,Vg=/^(\+|\-)? ?(\d+|\d+\.\d+) ?(seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)(?: (ago|from now))?$/i,Ng=A=>{const Q=Vg.exec(A);if(!Q||Q[4]&&Q[1])throw new TypeError("Invalid time period format");const B=parseFloat(Q[2]);let I;switch(Q[3].toLowerCase()){case"sec":case"secs":case"second":case"seconds":case"s":I=Math.round(B);break;case"minute":case"minutes":case"min":case"mins":case"m":I=Math.round(60*B);break;case"hour":case"hours":case"hr":case"hrs":case"h":I=Math.round(3600*B);break;case"day":case"days":case"d":I=Math.round(B*Sg);break;case"week":case"weeks":case"w":I=Math.round(604800*B);break;default:I=Math.round(31557600*B)}return"-"===Q[1]||"ago"===Q[4]?-I:I},Dg=A=>A.includes("/")?A.toLowerCase():`application/${A.toLowerCase()}`;function dg(A,Q,B={}){let I;try{I=JSON.parse(LE.decode(Q))}catch{}if(!Ag(I))throw new xE("JWT Claims Set must be a top-level JSON object");const{typ:U}=B;if(U&&("string"!=typeof A.typ||Dg(A.typ)!==Dg(U)))throw new pE('unexpected "typ" JWT header value',I,"typ","check_failed");const{requiredClaims:F=[],issuer:C,subject:E,audience:g,maxTokenAge:R}=B,J=[...F];void 0!==R&&J.push("iat"),void 0!==g&&J.push("aud"),void 0!==E&&J.push("sub"),void 0!==C&&J.push("iss");for(const l of new Set(J.reverse()))if(!(l in I))throw new pE(`missing required "${l}" claim`,I,l,"missing");if(C&&!(Array.isArray(C)?C:[C]).includes(I.iss))throw new pE('unexpected "iss" claim value',I,"iss","check_failed");if(E&&I.sub!==E)throw new pE('unexpected "sub" claim value',I,"sub","check_failed");if(g&&(n=I.aud,S="string"==typeof g?[g]:g,!("string"==typeof n?S.includes(n):Array.isArray(n)&&S.some(Set.prototype.has.bind(new Set(n))))))throw new pE('unexpected "aud" claim value',I,"aud","check_failed");var n,S;let V;switch(typeof B.clockTolerance){case"string":V=Ng(B.clockTolerance);break;case"number":V=B.clockTolerance;break;case"undefined":V=0;break;default:throw new TypeError("Invalid clockTolerance option type")}const{currentDate:N}=B,D=(d=N||new Date,Math.floor(d.getTime()/1e3));var d;if((void 0!==I.iat||R)&&"number"!=typeof I.iat)throw new pE('"iat" claim must be a number',I,"iat","invalid");if(void 0!==I.nbf){if("number"!=typeof I.nbf)throw new pE('"nbf" claim must be a number',I,"nbf","invalid");if(I.nbf>D+V)throw new pE('"nbf" claim timestamp check failed',I,"nbf","check_failed")}if(void 0!==I.exp){if("number"!=typeof I.exp)throw new pE('"exp" claim must be a number',I,"exp","invalid");if(I.exp<=D-V)throw new rE('"exp" claim timestamp check failed',I,"exp","check_failed")}if(R){const A=D-I.iat;if(A-V>("number"==typeof R?R:Ng(R)))throw new rE('"iat" claim timestamp check failed (too far in the past)',I,"iat","check_failed");if(A<0-V)throw new pE('"iat" claim timestamp check failed (it should be in the past)',I,"iat","check_failed")}return I}async function lg(A,Q,B){const I=await async function(A,Q,B){if(A instanceof Uint8Array&&(A=LE.decode(A)),"string"!=typeof A)throw new qE("Compact JWS must be a string or Uint8Array");const{0:I,1:U,2:F,length:C}=A.split(".");if(3!==C)throw new qE("Invalid Compact JWS");const E=await ng({payload:U,protected:I,signature:F},Q,B),g={payload:E.payload,protectedHeader:E.protectedHeader};return"function"==typeof Q?{...g,key:E.key}:g}(A,Q,B);if(I.protectedHeader.crit?.includes("b64")&&!1===I.protectedHeader.b64)throw new xE("JWTs MUST NOT use unencoded payload");const U={payload:dg(I.protectedHeader,I.payload,B),protectedHeader:I.protectedHeader};return"function"==typeof Q?{...U,key:I.key}:U}const Wg=new sB("Room"),kg={small:{width:{ideal:160},height:{ideal:120}},qvga:{width:{ideal:320},height:{ideal:240}},vga:{width:{ideal:640},height:{ideal:480}},hd:{width:{ideal:1280},height:{ideal:720}}};let ig;const Zg=new class{constructor(){this.queue=new Map}push(A,Q){this.queue.set(A,Q)}get(A){return this.queue.get(A)}remove(A){this.queue.delete(A)}},ag=new class{constructor(){this._localVBStream=null,this._vbDetailsNew={},this._vbDetails=null,this._roomType=null,this._participants={},this._peerId=null,this._peerConnection=null,this._pipelineManager=null,this._updateInterval=null,this._pipelineManager=new wE,this.initializeTFLite()}getVBDetails(){return this._vbDetailsNew}getVBStream(){return this._localVBStream}hasLiveVBTrack(){try{return this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks&&this._localVBStream.getVideoTracks().length>0&&"live"===this._localVBStream.getVideoTracks()[0].readyState}catch(A){return!1}}setPipelineManager(A){this._pipelineManager=A}async initializeTFLite(){try{const A=await WE(dE);this._vbDetailsNew.tfLite=A.tflite,this._vbDetailsNew.isSIMDSupported=A.isSIMDSupported}catch(A){}}async initializePipeline(A,Q){sE.debug("initializePipeline called with videoTrack and backgroundConfig:%O,%O",A,Q);let B=null;try{const I=await this._createHiddenVideoElement(A);if(this._vbDetailsNew.hiddenCanvas=this._createHiddenCanvasElement(I),"image"===Q.type){const A=await this._createHiddenImageElement(Q);if(!A.success)return!1;B=A.hiddenImage}const U=I.htmlElement;U instanceof HTMLVideoElement&&U.paused&&(sE.debug("🎬 Video is paused, starting playback..."),await U.play());const F=await this._pipelineManager.initialize(I,Q,dE,null,this._vbDetailsNew.tfLite,B,this._vbDetailsNew.hiddenCanvas);sE.debug("Inside getUserMediaSuccess result",F),sE.debug("Pipeline manager active? :%s",this._pipelineManager.isActive()),sE.debug("camera stream live status:%s",A.readyState),F.pipeline.updatePostProcessingConfig(lE),this._setupPeriodicUpdates(),this._vbDetailsNew.hiddenImage=B,this._vbDetailsNew.sourcePlayback=I;const C=F.canvasRef.captureStream(30);return this._localVBStream=C,{success:!0,vbStream:C}}catch(I){sE.error("Failed to initialize pipeline:%O",I)}}async _createHiddenVideoElement(A){return new Promise(Q=>{const B=document.createElement("video");B.autoplay=!0,B.loop=!0,B.controls=!1,B.playsInline=!0,B.muted=!0,B.srcObject=new MediaStream([A]),B.style.cssText="position: fixed; top: 10px; right: 10px; width: 200px; height: 150px; border: 2px solid blue; z-index: 9999; ",document.body.appendChild(B),B.play(),B.onloadeddata=()=>{Q({htmlElement:B,width:B.videoWidth,height:B.videoHeight})}})}async _createHiddenVideoElement(A){return new Promise(Q=>{const B=document.createElement("video");B.style.display="none",B.autoplay=!0,B.loop=!0,B.controls=!1,B.playsInline=!0,B.muted=!0,B.srcObject=new MediaStream([A]),document.body.appendChild(B);const I=async()=>{try{await B.play()}catch(A){}B.readyState<2||!B.videoWidth||!B.videoHeight?requestAnimationFrame(I):Q({htmlElement:B,width:B.videoWidth,height:B.videoHeight})};B.addEventListener("loadedmetadata",I,{once:!0}),I()})}_createHiddenCanvasElement(A){const Q=document.createElement("canvas");return Q.style.display="none",Q.width=A.width,Q.height=A.height,document.body.appendChild(Q),Q}_createHiddenImageElement(A){return new Promise(async Q=>{const B=document.createElement("img");if(B.style.display="none",A?.url.includes("http"))try{(await this.testImageCORS(A?.url)).success?(B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})},B.src=A.url):(B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})})}catch(pg){B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})}}else B.crossOrigin="anonymous",document.body.appendChild(B),B.onload=()=>{Q({success:!0,hiddenImage:B})},B.src=A.url})}async testImageCORS(A,Q=1e4){return new Promise((B,I)=>{const U=new Image;U.crossOrigin="anonymous";const F=setTimeout(()=>{U.src="",I(new Error("CORS_TIMEOUT"))},Q);U.onload=()=>{clearTimeout(F);try{const Q=document.createElement("canvas");Q.width=U.width||100,Q.height=U.height||100;const I=Q.getContext("2d");I.drawImage(U,0,0),I.getImageData(0,0,1,1),B({success:!0,url:A,width:U.naturalWidth,height:U.naturalHeight,message:"CORS allowed"})}catch(Q){I(new Error("CORS_BLOCKED"))}},U.onerror=A=>{clearTimeout(F),I(new Error("IMAGE_LOAD_FAILED"))},U.src=A})}_setupPeriodicUpdates(){this._updateInterval&&clearInterval(this._updateInterval),this._updateInterval=setInterval(()=>{if(this._pipelineManager&&this._pipelineManager.isActive()){const A=this._pipelineManager.getState();this._vbDetailsNew.fps=A.fps;const[Q,B,I]=A.durations||[0,0,0];this._vbDetailsNew.resizingDuration=Q,this._vbDetailsNew.inferenceDuration=B,this._vbDetailsNew.postProcessingDuration=I}},1e3)}cleanup(){try{if(this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks)try{this._localVBStream.getVideoTracks().forEach(A=>{try{A.stop()}catch(Q){}})}catch(A){}try{this._pipelineManager&&"function"==typeof this._pipelineManager.stop&&this._pipelineManager.stop()}catch(A){}if(this._updateInterval){try{clearInterval(this._updateInterval)}catch(A){}this._updateInterval=null}try{if(this._vbDetailsNew?.sourcePlayback?.htmlElement){try{this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null}catch(A){}try{this._vbDetailsNew.sourcePlayback.htmlElement.remove()}catch(A){}}}catch(A){}try{if(this._vbDetailsNew?.hiddenCanvas)try{this._vbDetailsNew.hiddenCanvas.remove()}catch(A){}}catch(A){}try{if(this._vbDetailsNew?.hiddenImage)try{this._vbDetailsNew.hiddenImage.remove()}catch(A){}}catch(A){}this._localVBStream=null,this._vbDetailsNew&&(this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null)}catch(A){}}};class og extends cB.EventEmitter{static async listDevices(){if(ig)return Wg.info("Device list already exists:%O",ig),{success:!0,deviceList:ig};const A=await $C();return A.success?(ig=A.deviceList,{success:!0,deviceList:A.deviceList}):{success:!1,reason:A.reason}}static async changeVB({track:A,details:Q}){if(Wg.debug("changeVB Received details are:%O",Q),Wg.debug("changeVB Received track are:%O",A),!Q)return Wg.debug("VB details not provided. Skipping VB processing."),{success:!1};if(!0===A.active){Wg.debug("Track is live, calling initializePipeline",A);const B=A.getVideoTracks()[0],I=await ag.initializePipeline(B,Q);return Wg.debug("response is :%o",I),I}throw Wg.error("Track is not live"),new Error("Track is not live")}static async init({sessionToken:A,roomId:Q,peerId:B}={}){if(!A)throw new Error("Session token is required to join the room.");try{let I;Wg.info("session token:%s",A);try{const Q=(new TextEncoder).encode("samvyo_tech_321"),{payload:B}=await lg(A,Q,{algorithms:["HS256"]});I=B,Wg.info("Decoded token:",I)}catch(pg){throw Wg.error("JWT verification failed:",pg),pg instanceof rE?new Error("Session token has expired"):pg instanceof pE?new Error("Session token not yet active"):new Error("Invalid session token: "+pg.message)}if(!I||"object"!=typeof I)throw new Error("Invalid token format");const{data:U,signallingServerUrl:F}=I;if(!U||!F)throw new Error("Missing required token data");return B||(B=JU()),Q||(Q=RU()),new og({peerId:B,roomId:Q,outputData:{sessionToken:A,innerSessionToken:U,signallingServerUrl:F}})}catch(I){throw Wg.error("Failed to initialize:",I.message),I}}constructor({peerId:A,roomId:Q,outputData:B}){super(),this._closed=!1,this._roomStatus="initialised",this._roomDisplayName=null,this._running=!1,this._cignal=null,this._socket=null,this._sendTransport=null,this._recvTransport=null,this._device=new SB.Device,this._webCamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._producers=new Map,this._consumers=new Map,this._peers=new Map,this._data={...B,inputParams:{peerId:A,roomId:Q,roomType:"conferencing"}},this._micStream=null,this._webCamStream=null,this._webcam={device:null,resolution:"hd"},this._mic={device:null},this._deviceList=ig||null,this._externalVideo=null,this._externalVideoStream=null,this._forceVP8=!1,this._forceH264=!1,this._forceVP9=!1,this._enableWebcamLayers=!0,this._numSimulcastStreams=3,this._enableSharingLayers=!0,this._client=GB.parse(window.navigator.userAgent),this._routerRtpCapabilities=null,this._recordingStartedByMe={},this._cignalConnected=!1,this._reconnectionInitiated=!1,this._restartIceInProgressSendTransport=!1,this._restartIceInProgressRecvTransport=!1,this._activeSpeaker=null,this._speechRecognition=null,this._transcriptStorage=new Map,this._audioContext=null,this._audioAnalyser=null,this._micMonitorStream=null,this._speakingWhileMutedInterval=null,this._speakingThreshold=-50,this._mutedSpeakingDetectionEnabled=!0,this._lastMutedSpeakingNotification=0,this._mutedSpeakingCooldown=3e3,this._audioTroubleShootData={lastDiagnostic:null,deviceTests:{},connectivityStatus:"unknown"},this._audioOutputDevices=[],this._currentSpeakerDevice=null,this._testAudioElements=new Map,this._speakerTestResults=new Map,this._remoteAudioElement=null,this._remoteCaption=null,this.initLocal()}get peerId(){return this._peerId}set peerId(A){this._peerId=A}get roomType(){return this._roomType}set roomType(A){this._roomType=A}get closed(){return this._closed}get data(){return this._data}set data(A){throw new Error("Setting the whole data object is not possible!")}get peers(){return this._peers}set peers(A){throw new Error("Setting the whole peers object is not possible!")}get transports(){return{produce:this._sendTransport,consume:this._recvTransport}}set transports(A){throw new Error("Setting of transport is not possible!")}get videoStream(){return this._webCamStream}get audioStream(){return this._micStream}get clientAgent(){return this._client}get activeParameters(){return this._data.inputParams}get deviceList(){return this._deviceList?this._deviceList:{videoDevices:[],audioDevices:[],audioOutputDevices:[]}}set deviceList(A){throw new Error("Setting of deviceList is not possible!")}get currentlyActiveSpeaker(){return this._activeSpeaker}set currentlyActiveSpeaker(A){throw new Error("Setting of currentActivespeaker is not possible!")}get roomDisplayName(){return this._roomDisplayName}set roomDisplayName(A){throw new Error("Setting of roomDisplayName is not possible!")}async initLocal(){const A=SB.detectDevice();Wg.debug("The device is:%O",A),await this._initSocket()}async _initSocket(){let A=this;const Q=this.data.signallingServerUrl.replace(/^(http|https):\/\//,""),B=`wss://${Q}/?sessionToken=${this.data.sessionToken}&roomId=${this.data.inputParams.roomId}&peerId=${this.data.inputParams.peerId}&roomType=${this.data.inputParams.roomType}`;Wg.info(`Going to create a new socket! with address: ${Q}`),this._socket=new gU(B,!0),this._listenToSocket(),this._socket.on("notify",({type:A,title:Q,message:B})=>{this.emit("notification",{eventType:A,eventText:`${Q}: ${B}`,roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("roomStartedP2p",A=>{Wg.info("P2P room successfully started"),this._running=!0}),this._socket.on("userError",Q=>{Wg.error("User Error happened with message:%O",Q),A.emit("notification",{eventType:Q.title,eventText:`${Q.text}`})}),this._socket.on("validationAlert",A=>{Wg.info("Validation alert happened")}),this._socket.on("alreadyActive",({title:A,text:Q})=>{this.emit("notification",{eventType:"alreadyActive",eventText:"This peer already has an active connection",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("passwordDisabled",()=>{Wg.info("password disabled by moderator!"),this.emit("notification",{eventType:"passwordDisabled",eventText:"Password for this room has been disabled by moderator",roomId:this.data.inputParams.roomId})}),this._socket.on("close",({code:A,reason:Q})=>{if(Wg.info(`socket closed with code ${A}`),4500!==A&&4100!==A){let B=Q||"Connection to server closed unexpectedly! Trying to reconnect.";Wg.info(`socket close code is${A} with reason ${B}`)}else Wg.info("Socket is now closed!"),this.close()}),this._socket.on("connected",async()=>{Wg.info("Socket connected"),this.emit("initSuccess")}),this._socket.on("reconnected",async()=>{Wg.info("Socket re-connected"),A.pc&&A._sendTransport&&A._recvTransport?roomType===tC.P2P&&A.pc?(Wg.info("Socket seems to be reconnected in mid call! RestartIce needed for p2p call."),"failed"!==A.pc.iceConnectionState&&"disconnected"!==A.pc.iceConnectionState||A.restartICE()):(Wg.debug("Ice restarts for mediasoup transports for a joined peer"),A._sendTransport&&["failed","disconnected"].includes(A._sendTransport.connectionState)?(Wg.debug("Restart ice for sendtransport"),A.restartIce(A._sendTransport.id,"send")):Wg.error("Send transport not available!"),A._recvTransport&&["failed","disconnected"].includes(A._recvTransport.connectionState)?(Wg.debug("Restart ice for recvtransport"),A.restartIce(A._recvTransport.id,"recv")):Wg.error("Recv transport not available!")):(Wg.info("Connection getting connected for first time"),this.emit("initSuccess"))}),this._socket.on("defaultJoinStatus",async A=>{Wg.info(" Socket defaultjoinstatus:%O",A)})}_sendMessage(A){this._socket.send({usageType:"sdk",...A})}_listenToSocket(){this._socket.on("message",A=>{try{switch("currentlyActiveSpeaker"===A.id||"allStats"===A.id||Wg.info("message in Room is:%O",A),A.id){case"chatMessage":this.processChatMessage(A);break;case"customMessage":this.processCustomMessage(A);break;case"existingParticipants":this.onExistingParticipants(A);break;case"newPeerJoin":this.onNewPeer(A);break;case"recordingError":this.handleRecordingErrors(A);break;case"moderatorAuthentication":this.authenticateUser(A);break;case"authenticationRequested":this.authenticationRequested(A);break;case"toggleMyMic":this.toggleMyMic(A);break;case"toggleMyCamera":this.toggleMyCamera(A);break;case"logMeOut":this.logMeOutNew(A);break;case"userAlreadyAuthenticated":this.hideUserAuthenticationDialog(A);break;case"peerLeft":this.peerLeft(A);break;case"recordingStarted":this.setRecordingStatusStarted(A);break;case"recordingStopped":this.setRecordingStatusEnded(A);break;case"startDefaultRecording":this.startRecording(A);break;case"mediaToggled":this.mediaToggled(A);break;case"processingStarted":this.handleProcessingStart(A);break;case"processingCompleted":this.handleProcessingCompletion(A);break;case"processingError":this.handleProcessingError(A);break;case"createTransportResponse":this.handleCreateTransportRequest(A);break;case"connectTransportResponse":this.handleConnectTransportRequest(A);break;case"connectRecvTransportResponse":this.handleConnectRecvTransportRequest(A);break;case"sendTrackResponse":this.handleSendTrackRequest(A);break;case"recvTrackResponse":this.handleRecvTrackRequest(A);break;case"roomClosedByModerator":this.leaveRoomCommon(),this.roomClosed();break;case"currentlyActiveSpeaker":this.setCurrentlyActiveSpeaker(A);break;case"restartIceResponse":this.restartIceResponse(A);break;case"consumerClosed":this.closeConsumer(A);break;case"handRaise":this.handleHandRaise(A);break;case"updateCId":this.updateCId(A);break;case"upgradeParticipant":this.handleUpgradeParticipant(A);break;case"downgradeParticipant":this.handleDowngradeParticipant(A);break;case"switchMicOff":this.handleSwitchMicOff(A);break;case"screenShareLimitReached":this.handleScreenShareLimitReached(A);break;case"upgradeLimitReached":this.handleUpgradeLimitReached(A);break;case"modUpgradeReq":this.handleModUpgradeReq(A);break;case"lockUnlockRoom":this.handleLockUnlockRoom(A);break;case"peersWaiting":this.handlePeersWaiting(A);break;case"remotePeerJoin":this.handleRemotePeerJoin(A);break;case"offer":Wg.debug("inside offer"),this.handleOffer(A);break;case"answer":Wg.debug("inside answer"),this.handleAnswer(A);break;case"candidate":Wg.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"p2pRoomClosed":Wg.debug("inside p2p room close"),this.leaveRoomNewP2p("leaveAndCloseRoom");break;case"p2pUserLeft":Wg.debug("inside p2p user left"),this.userLeftRoom(A);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"screenShareP2p":this.handleScreenShareP2p(A);break;default:Wg.warn("Unrecognized message:%o",A)}}catch(pg){Wg.error("listentomessage:%O",pg)}})}joinRoom=async({peerName:A=null,produce:Q=!0,produceAudio:B=!0,produceVideo:I=!0,consume:U=!0,videoResolution:F="hd",forceVp8:C=!1,forceVp9:E=!1,forceH264:g=!1,h264Profile:R="high",forcePCMU:J=!1,forcePCMA:n=!1,forceFPS:S=25,enableWebcamLayers:V=!0,numSimulcastStreams:N=3,autoGainControl:D=!0,echoCancellation:d=!0,noiseSuppression:l=!0,sampleRate:W=44e3,channelCount:k=1,videoBitRates:i=[700,250,75],share:Z=!1,shareAudio:a=!1,enableSharingLayers:o=!0,shareBitRates:h=[2500,1250,500],audioDeviceId:G=null,videoDeviceId:M=null,peerType:w="participant",roomType:s=tC.CONFERENCING,authenticationRequired:T=!1,password:y=null,roomDisplayName:c=null,vbdetails:Y}={})=>{Wg.info("Going to join room"),["hd","vga","qvga"].includes(F)||(Wg.warn("Invalid video resolution value. setting it to default value of 'hd' "),F="hd"),"boolean"!=typeof Q&&(Wg.warn("Produe should either be true or false"),Q=Boolean(Q)),"boolean"!=typeof B&&(Wg.warn("ProduceAudio should either be true or false"),B=Boolean(B)),"boolean"!=typeof I&&(Wg.warn("ProduceVideo should either be true or false"),I=Boolean(I)),"boolean"!=typeof U&&(Wg.warn("Consume should either be true or false"),U=Boolean(U)),"boolean"!=typeof C&&(Wg.warn("forceVp8 should either be true or false"),C=Boolean(C)),"boolean"!=typeof E&&(Wg.warn("forceVp9 should either be true or false"),E=Boolean(E)),"boolean"!=typeof g&&(Wg.warn("forceH264 should either be true or false"),g=Boolean(g)),["high","low"].includes(R.toLowerCase())||(Wg.warn("h264Profile should either be 'high' or 'low'"),R="high"),(!Number.isInteger(S)||Number.isInteger(S)&&(S>65||S<5))&&(Wg.warn("forceFPS should be a number between 5 to 65, default value is 25 fps."),S=25),"boolean"!=typeof V&&(Wg.warn("enableWebcamLayers should either be true or false"),V=Boolean(V)),(!Number.isInteger(N)||Number.isInteger(N)&&(N>3||N<1))&&(Wg.warn("numSimulcastStreams should be a number between 1 to 3, default value is 3."),N=3),Array.isArray(i)&&i.length>=1&&i.length<=3&&i.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?Wg.debug("videoBitRates values are correct"):(Wg.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),i=[700,250,75]),"boolean"!=typeof J&&(Wg.warn("forcePCMU should either be true or false"),J=Boolean(J)),"boolean"!=typeof n&&(Wg.warn("forcePCMA should either be true or false"),n=Boolean(n)),"boolean"!=typeof D&&(Wg.warn("autoGainControl should either be true or false"),D=Boolean(D)),"boolean"!=typeof d&&(Wg.warn("echoCancellation should either be true or false"),d=Boolean(d)),"boolean"!=typeof l&&(Wg.warn("noiseSuppression should either be true or false"),l=Boolean(l)),(!Number.isInteger(W)||Number.isInteger(W)&&(W>64e3||W<8e3))&&(Wg.warn("sampleRate should be a number between 8000 to 64000, default value is 44000 Khz."),W=44e3),(!Number.isInteger(k)||Number.isInteger(k)&&(k>2||k<1))&&(Wg.warn("sampleRate should be a number between 1 to 2, default value is 1, which is a mono audio."),k=1),"boolean"!=typeof Z&&(Wg.warn("share should either be true or false"),Z=Boolean(Z)),"boolean"!=typeof a&&(Wg.warn("shareAudio should either be true or false"),a=Boolean(a)),"boolean"!=typeof o&&(Wg.warn("enableSharingLayers should either be true or false"),o=Boolean(o)),Array.isArray(h)&&h.length>=1&&h.length<=3&&h.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?Wg.debug("shareBitRates values are correct"):(Wg.warn("sharebitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[2500,1250,500]'"),h=[2500,1250,500]),["moderator","participant","attendee"].includes(w)?Wg.debug("peerType is valid:%s",w):(w="participant",Wg.debug("peerType is invalid:%s. By default set to: participant",w)),await this.listDevicesInternal(),this._videoResolution=F,this._forceVP8=Boolean(C),this._forceH264=Boolean(g),this._forceVP9=Boolean(E),this._enableWebcamLayers=Boolean(V),this._numSimulcastStreams=N,this._enableSharingLayers=Boolean(o);try{A||(A=fC()),this.data.inputParams={...this.data.inputParams,peerName:A,produce:Q,produceAudio:B,produceVideo:I,consume:U,videoResolution:F,forceVp8:C,forceVp9:E,forceH264:g,h264Profile:R,forceFPS:S,forcePCMU:J,forcePCMA:n,enableWebcamLayers:V,numSimulcastStreams:N,autoGainControl:D,echoCancellation:d,noiseSuppression:l,sampleRate:W,channelCount:k,videoBitRates:i,share:Z,shareAudio:a,enableSharingLayers:o,shareBitRates:h,audioDeviceId:G,videoDeviceId:M,peerType:w,roomType:s,authenticationRequired:T,password:y,roomDisplayName:c,vbdetails:Y},Wg.info("input params are:%O",this.data.inputParams);const t={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,participantType:"attendee"===w?"viewer":w,roomType:s,roomDisplayName:c||`room-${1e5+Math.round(9e5*Math.random())}`,browser:this._client,name:this.data.inputParams.peerName,room:this.data.inputParams.roomId,authenticationRequired:T,isRoomPassword:!!y,roomPassword:y||null,usageType:"sdk"};this._sendMessage(t)}catch(t){return Wg.error("Failed to join room:",t.message),{success:!1,reason:t.message}}};authenticateUser=A=>{Wg.info("Moderator authentication requested:%O",A),this.emit("moderatorAuthentication",{moderatorName:A.moderatorName,requesterName:A.requesterName,requesterPeerId:A.requesterPeerId,text:A.title})};authenticationRequested=A=>{Wg.info("Moderator authentication requested:%O",A),this.emit("authenticationRequested",{requesterName:A.requesterName,requesterPeerId:this.data.inputParams.peerId,text:A.title})};allowRoomJoin=A=>{if(!A)return Wg.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};Wg.info("Allow user to join room:%O",A);let Q={id:"userAuthenticated",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(Q)};denyRoomJoin=A=>{if(!A)return Wg.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};Wg.info("Deny user to join room:%O",A);let Q={id:"userDenied",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(Q)};setSpeakingWhileMutedDetection(A=!0){this._mutedSpeakingDetectionEnabled=A,Wg.debug("Speaking while muted detection "+(A?"enabled":"disabled"))}setSpeakingThreshold(A=-50){this._speakingThreshold=A,Wg.debug(`Speaking threshold set to: ${A}dB`)}async _initializeAudioMonitoring(){if(this._micStream)try{this._audioContext=new AudioContext,this._audioAnalyser=this._audioContext.createAnalyser(),this._audioAnalyser.fftSize=512,this._audioAnalyser.smoothingTimeConstant=.3;const A={audio:{deviceId:this._mic.device?{exact:this._mic.device.deviceId}:void 0,echoCancellation:!1,noiseSuppression:!1,autoGainControl:!1}};this._micMonitorStream=await navigator.mediaDevices.getUserMedia(A);this._audioContext.createMediaStreamSource(this._micMonitorStream).connect(this._audioAnalyser),Wg.debug("Audio monitoring initialized successfully")}catch(A){Wg.error("Error initializing audio monitoring:%o",A)}}_getAudioLevel(){if(!this._audioAnalyser)return-1/0;const A=this._audioAnalyser.frequencyBinCount,Q=new Uint8Array(A);this._audioAnalyser.getByteFrequencyData(Q);let B=0;for(let F=0;F<A;F++)B+=Q[F];const I=B/A,U=20*Math.log10(I/255);return isFinite(U)?U:-1/0}_startSpeakingWhileMutedDetection(){this._mutedSpeakingDetectionEnabled&&this._audioAnalyser&&(this._speakingWhileMutedInterval=setInterval(()=>{if(!this._micProducer||!this._micProducer.paused)return;const A=this._getAudioLevel();if(A>this._speakingThreshold){const Q=Date.now();Q-this._lastMutedSpeakingNotification>this._mutedSpeakingCooldown&&(this._lastMutedSpeakingNotification=Q,this.data.inputParams.peerId===this.peerId&&(this.emit("speakingWhileMuted",{peerId:this.data.inputParams.peerId,audioLevel:A,timestamp:Q,message:"You appear to be speaking while muted"}),Wg.debug(`Speaking while muted detected - Audio level: ${A}dB`)))}},100))}_stopSpeakingWhileMutedDetection(){this._speakingWhileMutedInterval&&(clearInterval(this._speakingWhileMutedInterval),this._speakingWhileMutedInterval=null)}_cleanupAudioMonitoring(){this._stopSpeakingWhileMutedDetection(),this._micMonitorStream&&(this._micMonitorStream.getTracks().forEach(A=>A.stop()),this._micMonitorStream=null),this._audioContext&&"closed"!==this._audioContext.state&&(this._audioContext.close(),this._audioContext=null),this._audioAnalyser=null}async diagnoseAudio(){Wg.debug("Starting comprehensive audio diagnostic...");const A={timestamp:Date.now(),browser:this._client,permissions:{},devices:{},connectivity:{},currentSetup:{},recommendations:[]};try{return A.permissions=await this._testAudioPermissions(),A.devices=await this._testAudioDevices(),A.currentSetup=await this._testCurrentMicSetup(),A.connectivity=await this._testWebRTCConnectivity(),A.recommendations=this._generateAudioRecommendations(A),this._audioTroubleShootData.lastDiagnostic=A,this.emit("audioDiagnosticComplete",{peerId:this.data.inputParams.peerId,diagnostic:A}),A}catch(Q){return Wg.error("Audio diagnostic failed:",Q),A.error=Q.message,A}}async _testAudioPermissions(){const A={granted:!1,state:"unknown",error:null};try{if(navigator.permissions){const Q=await navigator.permissions.query({name:"microphone"});A.state=Q.state,A.granted="granted"===Q.state}const Q=await navigator.mediaDevices.getUserMedia({audio:!0,video:!1});A.granted=!0,A.actuallyGranted=!0,Q.getTracks().forEach(A=>A.stop())}catch(Q){A.error=Q.name,A.actuallyGranted=!1,Wg.error("Permission test failed:",Q)}return A}getSystemHealthStatus(){return{sdk:{roomStatus:this._roomStatus,isConnected:"connected"===this._roomStatus,micActive:!!this._micProducer&&!this._micProducer.closed,micMuted:this._micProducer?.paused,cameraActive:!!this._webcamProducer&&!this._webcamProducer.closed,screenSharing:!!this._shareProducer&&!this._shareProducer.closed},transports:{send:this._sendTransport?{id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}:null,recv:this._recvTransport?{id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState}:null},audio:{context:this._audioContext?.state,analyser:!!this._audioAnalyser,currentLevel:this._getAudioLevel(),speaking:this._getAudioLevel()>this._speakingThreshold,monitorStream:!!this._micMonitorStream},streams:{mic:this._micStream?.active,camera:this._webCamStream?.active,micTracks:this._micStream?.getTracks()?.length||0,cameraTracks:this._webCamStream?.getTracks()?.length||0}}}async testNetworkConnectivity(){const A={timestamp:Date.now(),stun:{working:!1,latency:null},turn:{working:!1,latency:null},bandwidth:{upload:null,download:null},packetLoss:null};try{const Q=Date.now(),B=new RTCPeerConnection({iceServers:[{urls:"stun:stun.l.google.com:19302"}]}),I=(B.createDataChannel("test"),await B.createOffer());await B.setLocalDescription(I),await new Promise(A=>{B.onicecandidate=Q=>{Q.candidate||A()},setTimeout(A,5e3)}),A.stun.working="failed"!==B.iceConnectionState,A.stun.latency=Date.now()-Q,B.close()}catch(Q){}return A}async assessAudioQuality(A=5e3){if(!this._micStream)throw new Error("No active microphone stream");const Q={duration:A,samples:[],averageLevel:0,peakLevel:-1/0,quietSamples:0,clipSamples:0,quality:"unknown"};try{const B=new AudioContext,I=B.createAnalyser();I.fftSize=1024;B.createMediaStreamSource(this._micStream).connect(I);const U=I.frequencyBinCount,F=new Uint8Array(U),C=Date.now(),E=100;return new Promise(g=>{const R=setInterval(()=>{I.getByteFrequencyData(F);let E=0;for(let A=0;A<U;A++)E+=F[A];const J=E/U,n=20*Math.log10(J/255);if(isFinite(n)&&(Q.samples.push(n),Q.peakLevel=Math.max(Q.peakLevel,n),n<-70&&Q.quietSamples++,n>-3&&Q.clipSamples++),Date.now()-C>=A){clearInterval(R),B.close();const A=Q.samples.filter(A=>isFinite(A));Q.averageLevel=A.reduce((A,Q)=>A+Q,0)/A.length;const I=Q.quietSamples/A.length*100;Q.clipSamples/A.length*100>10?Q.quality="poor-clipping":I>80?Q.quality="poor-quiet":Q.averageLevel>-30?Q.quality="good":Q.averageLevel>-50?Q.quality="fair":Q.quality="poor-low",g(Q)}},E)})}catch(B){throw new Error(`Audio quality assessment failed: ${B.message}`)}}async attemptAutoRemediation(){const A=[],Q=this.getSystemHealthStatus();try{"failed"===Q.transports.send?.connectionState&&(await this.restartIce(Q.transports.send.id,"send"),A.push("Restarted send transport")),"failed"===Q.transports.recv?.connectionState&&(await this.restartIce(Q.transports.recv.id,"recv"),A.push("Restarted receive transport")),!Q.audio.analyser&&this._micStream&&(this._cleanupAudioMonitoring(),await this._initializeAudioMonitoring(),A.push("Restarted audio monitoring")),Q.sdk.micActive&&!Q.streams.mic&&(await this.disableMic(),await this.enableMic(),A.push("Restarted microphone"));const B=await this.diagnoseAudio();if(B.devices?.working?.length>0){const Q=B.currentSetup?.deviceLabel,I=B.devices.working[0];Q!==I.label&&(await this.changeAudioInput({deviceId:I.deviceId}),A.push(`Switched to working device: ${I.label}`))}return{success:!0,fixes:A}}catch(B){return{success:!1,error:B.message,fixes:A}}}async getEnhancedDeviceList(){try{const Q=await navigator.mediaDevices.enumerateDevices(),B=[];for(const I of Q){if("audioinput"!==I.kind)continue;const Q={deviceId:I.deviceId,label:I.label,groupId:I.groupId,capabilities:null,testResult:null};try{const A=(await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:I.deviceId}}})).getAudioTracks()[0];Q.capabilities=A.getCapabilities(),Q.testResult=await this._testSpecificDevice(I.deviceId,1e3),A.stop()}catch(A){Q.testResult={working:!1,error:A.message}}B.push(Q)}return B}catch(A){throw new Error(`Enhanced device enumeration failed: ${A.message}`)}}async optimizeAudioSettings(){const A=this.getSystemHealthStatus(),Q=[];try{const B=await this.assessAudioQuality(3e3);if("poor-clipping"===B.quality?(await this.changeAudioInput({autoGainControl:!1,echoCancellation:!0,noiseSuppression:!0}),Q.push("Disabled auto-gain control to prevent clipping")):"poor-quiet"===B.quality&&(await this.changeAudioInput({autoGainControl:!0,echoCancellation:!0,noiseSuppression:!1}),Q.push("Enabled auto-gain control for low input levels")),"connected"===A.transports.send?.connectionState){await this._sendTransport.getStats()}return{success:!0,recommendations:Q,qualityAssessment:B}}catch(B){return{success:!1,error:B.message,recommendations:Q}}}async _testAudioDevices(){const A={available:[],current:null,working:[],failed:[]};try{const B=await navigator.mediaDevices.enumerateDevices();A.available=B.filter(A=>"audioinput"===A.kind).map(A=>({deviceId:A.deviceId,label:A.label,groupId:A.groupId})),A.current=this._mic.device;for(const I of A.available)try{const Q=await this._testSpecificDevice(I.deviceId);Q.working?A.working.push({...I,audioLevel:Q.audioLevel,testDuration:Q.duration}):A.failed.push({...I,error:Q.error})}catch(Q){A.failed.push({...I,error:Q.message})}}catch(Q){A.error=Q.message}return A}async _testSpecificDevice(A,Q=2e3){return new Promise(B=>{const I={working:!1,audioLevel:-1/0,duration:Q,error:null};let U=null,F=null,C=null;const E=()=>{U&&U.getTracks().forEach(A=>A.stop()),F&&"closed"!==F.state&&F.close()},g=setTimeout(()=>{E(),B(I)},Q);navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:A}}}).then(A=>{U=A,F=new(window.AudioContext||window.webkitAudioContext),C=F.createAnalyser(),C.fftSize=256;F.createMediaStreamSource(U).connect(C);const R=C.frequencyBinCount,J=new Uint8Array(R),n=setInterval(()=>{C.getByteFrequencyData(J);let A=0;for(let I=0;I<R;I++)A+=J[I];const Q=A/R,B=20*Math.log10(Q/255);isFinite(B)&&B>I.audioLevel&&(I.audioLevel=B),Q>0&&(I.working=!0)},100);setTimeout(()=>{clearInterval(n),clearTimeout(g),E(),B(I)},Q-100)}).catch(A=>{clearTimeout(g),I.error=A.message,E(),B(I)})})}async _testCurrentMicSetup(){const A={isActive:!1,isProducing:!1,isMuted:!1,audioLevel:-1/0,deviceLabel:null,streamActive:!1,producerStats:null};try{if(A.isActive=!!this._micProducer,A.isProducing=!(!this._micProducer||this._micProducer.closed),A.isMuted=!(!this._micProducer||!this._micProducer.paused),A.deviceLabel=this._mic.device?.label,A.streamActive=!(!this._micStream||!this._micStream.active),this._micStream){const Q=this._micStream.getAudioTracks();Q.length>0&&(A.trackEnabled=Q[0].enabled,A.trackReadyState=Q[0].readyState,A.trackSettings=Q[0].getSettings())}if(this._audioAnalyser&&(A.audioLevel=this._getAudioLevel()),this._micProducer&&this._sendTransport)try{const Q=await this._sendTransport.getStats();A.producerStats=Q}catch(Q){A.producerStatsError=Q.message}}catch(Q){A.error=Q.message}return A}async _testWebRTCConnectivity(){const A={sendTransport:null,recvTransport:null,iceConnectionState:null,dtlsState:null,error:null};try{this._sendTransport&&(A.sendTransport={id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}),this._recvTransport&&(A.recvTransport={id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState})}catch(Q){A.error=Q.message}return A}_generateAudioRecommendations(A){const Q=[];return A.permissions.granted||Q.push({type:"critical",title:"Microphone Permission Required",description:"Please allow microphone access in your browser",action:"Grant microphone permission in browser settings"}),0===A.devices.working.length&&Q.push({type:"critical",title:"No Working Audio Devices",description:"No functioning microphone devices detected",action:"Check if microphone is connected and enabled in system settings"}),A.currentSetup.isActive&&!A.currentSetup.streamActive&&Q.push({type:"warning",title:"Current Microphone Not Working",description:"The selected microphone device appears to be inactive",action:"Try switching to a different microphone device"}),"failed"===A.connectivity.sendTransport?.connectionState&&Q.push({type:"critical",title:"Connection Failed",description:"Unable to establish audio connection to server",action:"Check internet connection and try rejoining the room"}),A.currentSetup.audioLevel<-60&&Q.push({type:"info",title:"Low Audio Level",description:"Your microphone level appears to be very low",action:"Check microphone volume in system settings or move closer to microphone"}),Q}async quickAudioTest(){const A={working:!1,issues:[],timestamp:Date.now()};try{if(!this._micProducer)return A.issues.push("Microphone not active"),A;if(this._micProducer.closed)return A.issues.push("Microphone producer is closed"),A;if(!this._micStream||!this._micStream.active)return A.issues.push("Microphone stream is not active"),A;const Q=this._micStream.getAudioTracks();if(0===Q.length)return A.issues.push("No audio tracks found"),A;if("live"!==Q[0].readyState)return A.issues.push("Audio track is not live"),A;if("connected"!==this._sendTransport.connectionState)return A.issues.push(`Send transport not connected: ${this._sendTransport.connectionState}`),A;A.working=!0}catch(Q){A.issues.push(`Test error: ${Q.message}`)}return this.emit("quickAudioTestComplete",{peerId:this.data.inputParams.peerId,result:A}),A}async listAudioOutputDevices(){try{if(this._deviceList&&this._deviceList.audioOutputDevices)return Wg.debug("Using cached audio output devices:",this._deviceList.audioOutputDevices),{success:!0,devices:this._deviceList.audioOutputDevices};const A=await navigator.mediaDevices.enumerateDevices();return this._audioOutputDevices=A.filter(A=>"audiooutput"===A.kind),Wg.debug("Found audio output devices:",this._audioOutputDevices),{success:!0,devices:this._audioOutputDevices.map(A=>({deviceId:A.deviceId,label:A.label||`Speaker ${A.deviceId.slice(-4)}`,groupId:A.groupId}))}}catch(A){return Wg.error("Failed to enumerate audio output devices:",A),{success:!1,error:A.message}}}async testSpeakerDevice(A,Q={}){Wg.debug("Testing speaker device",A);const{testDuration:B=3e3,testFrequencies:I=[440,1e3,2e3],volume:U=.3,requireUserConfirmation:F=!0}=Q,C=`speaker-test-${A}-${Date.now()}`;try{if(!HTMLAudioElement.prototype.setSinkId)throw new Error("setSinkId is not supported in this browser");const Q={deviceId:A,testId:C,timestamp:Date.now(),success:!1,frequencies:[],volume:U,duration:B,userConfirmed:!1,error:null},E=new Audio;E.volume=U,E.loop=!1,await E.setSinkId(A),Q.setSinkId=!0,this._testAudioElements.set(C,E);for(const A of I){const U=await this._playTestTone(E,A,B/I.length);Q.frequencies.push(U)}if(F){const B=await this._requestUserConfirmation(A,Q);Q.userConfirmed=B,Q.success=B}else Q.success=Q.frequencies.every(A=>A.played);return this._speakerTestResults.set(A,Q),this.emit("speakerTestComplete",{deviceId:A,testResult:Q}),{success:!0,testResult:Q}}catch(E){Wg.error(`Speaker test failed for device ${A}:`,E);const Q={deviceId:A,testId:C,timestamp:Date.now(),success:!1,error:E.message};return this._speakerTestResults.set(A,Q),this.emit("speakerTestComplete",{deviceId:A,testResult:Q}),{success:!1,error:E.message,testResult:Q}}finally{this._cleanupTestAudio(C)}}async _playTestTone(A,Q,B){return new Promise((I,U)=>{try{const U=new window.AudioContext,F=U.createOscillator(),C=U.createGain(),E=U.createMediaStreamDestination();F.connect(C),C.connect(E),F.frequency.setValueAtTime(Q,U.currentTime),F.type="sine",C.gain.setValueAtTime(0,U.currentTime),C.gain.linearRampToValueAtTime(.1,U.currentTime+.1),C.gain.linearRampToValueAtTime(.1,U.currentTime+B/1e3-.1),C.gain.linearRampToValueAtTime(0,U.currentTime+B/1e3),A.srcObject=E.stream,F.start(),F.stop(U.currentTime+B/1e3);const g=A.play();void 0!==g&&g.then(()=>{setTimeout(()=>{F.disconnect(),C.disconnect(),U.close(),I({frequency:Q,duration:B,played:!0,timestamp:Date.now()})},B)}).catch(A=>{U.close(),I({frequency:Q,duration:B,played:!1,error:A.message,timestamp:Date.now()})})}catch(F){U(F)}})}async _requestUserConfirmation(A,Q){return new Promise(B=>{this.emit("speakerTestConfirmationRequired",{deviceId:A,testResult:Q,onConfirm:A=>B(A)}),setTimeout(()=>B(!1),1e4)})}async testCurrentSpeakerOutput(){try{const Q={timestamp:Date.now(),currentDevice:this._currentSpeakerDevice,remoteAudioPresent:!1,audioElementFound:!1,volumeLevel:0,success:!1},B=document.querySelectorAll("audio"),I=document.querySelectorAll("video");let U=[];if(B.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&U.push(A)}),I.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&U.push(A)}),Q.audioElementFound=U.length>0,Q.elementsCount=U.length,U.length>0){for(const B of U)try{if(B.srcObject){const A=new AudioContext,I=A.createMediaStreamSource(B.srcObject),U=A.createAnalyser();I.connect(U),U.fftSize=256;const F=U.frequencyBinCount,C=new Uint8Array(F);U.getByteFrequencyData(C);const E=C.reduce((A,Q)=>A+Q,0)/F;Q.volumeLevel=Math.max(Q.volumeLevel,E),Q.remoteAudioPresent=E>0,A.close()}}catch(A){Wg.debug("Could not analyze remote audio element:",A)}Q.success=Q.remoteAudioPresent}return this.emit("currentSpeakerTestComplete",Q),{success:!0,testResult:Q}}catch(A){return Wg.error("Current speaker test failed:",A),{success:!1,error:A.message}}}async diagnoseSpeakers(){Wg.debug("Starting comprehensive speaker diagnostic");const A={timestamp:Date.now(),browser:this._client,support:{},devices:{},currentOutput:{},remoteAudio:{},recommendations:[]};try{A.support={setSinkId:!!HTMLAudioElement.prototype.setSinkId,enumerateDevices:!!navigator.mediaDevices?.enumerateDevices,audioContext:!!window.AudioContext};const Q=await this.listAudioOutputDevices();A.devices={available:Q.devices||[],count:Q.devices?.length||0,hasDefault:Q.devices?.some(A=>"default"===A.deviceId)||!1};const B=await this.testCurrentSpeakerOutput();return A.currentOutput=B.testResult,A.remoteAudio=this._analyzeRemoteAudioSetup(),A.recommendations=this._generateSpeakerRecommendations(A),this.emit("speakerDiagnosticComplete",{diagnostic:A}),A}catch(Q){return Wg.error("Speaker diagnostic failed:",Q),A.error=Q.message,A}}_analyzeRemoteAudioSetup(){const A={consumers:0,activeStreams:0,audioElements:0,videoElements:0,totalTracks:0};try{this._consumers&&this._consumers.forEach(Q=>{Q&&"audio"===Q.kind&&!Q.closed&&A.consumers++});const Q=document.querySelectorAll("audio"),B=document.querySelectorAll("video");Q.forEach(Q=>{A.audioElements++,Q.srcObject&&Q.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=Q.srcObject.getAudioTracks().length)}),B.forEach(Q=>{A.videoElements++,Q.srcObject&&Q.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=Q.srcObject.getAudioTracks().length)}),0===A.activeStreams&&0===A.totalTracks&&A.consumers>0&&(A.activeStreams=A.consumers,A.totalTracks=A.consumers)}catch(Q){Wg.error("Remote audio analysis failed:",Q),A.error=Q.message}return A}_generateSpeakerRecommendations(A){const Q=[];return A.support.setSinkId||Q.push({type:"critical",title:"Audio Output Selection Not Supported",description:"Your browser does not support changing audio output devices",actions:["Use Chrome, Edge, or Firefox for audio output selection","Change system default audio device instead","Consider using a different browser"]}),0===A.devices.count&&Q.push({type:"critical",title:"No Audio Output Devices Found",description:"No speakers or headphones detected",actions:["Check if speakers/headphones are connected","Verify audio drivers are installed","Try refreshing the page after connecting devices"]}),0===A.remoteAudio.consumers&&0===A.remoteAudio.activeStreams&&Q.push({type:"warning",title:"No Remote Audio Detected",description:"Not receiving audio from other participants",actions:["Ask other participants to unmute their microphones","Check if you have muted remote participants","Verify your internet connection"]}),!A.currentOutput.success&&A.currentOutput.audioElementFound&&Q.push({type:"warning",title:"Audio Output Issues",description:"Remote audio present but may not be playing correctly",actions:["Check system volume levels","Try switching to a different audio output device","Verify the selected output device is working"]}),0===Q.length&&Q.push({type:"success",title:"Audio Output System Healthy",description:"Speaker setup appears to be working correctly",actions:["Your audio output is configured properly","Run individual device tests if experiencing issues"]}),Q}async progressiveTestAllSpeakers(A={}){Wg.debug("Progressive speaker test started");const{testDuration:Q=2e3,requireConfirmation:B=!0,volume:I=.2}=A;try{const A=await this.listAudioOutputDevices();if(!A.success)throw new Error("Could not enumerate audio devices");const U=[];let F=0;this.emit("progressiveSpeakerTestStarted",{totalDevices:A.devices.length,testDuration:Q,requireConfirmation:B});for(const C of A.devices){F++,this.emit("progressiveSpeakerTestProgress",{currentIndex:F,totalDevices:A.devices.length,currentDevice:C,progress:F/A.devices.length*100});const E=await this.testSpeakerDevice(C.deviceId,{testDuration:Q,volume:I,requireUserConfirmation:B,testFrequencies:[1e3]});U.push({device:C,...E}),await new Promise(A=>setTimeout(A,500))}return this.emit("progressiveSpeakerTestComplete",{results:U,workingDevices:U.filter(A=>A.success),failedDevices:U.filter(A=>!A.success)}),{success:!0,results:U,summary:{total:U.length,working:U.filter(A=>A.success).length,failed:U.filter(A=>!A.success).length}}}catch(U){return Wg.error("Progressive speaker test failed:",U),{success:!1,error:U.message}}}_cleanupTestAudio(A){if(Wg.debug("Cleaning up test audio"),this._testAudioElements.has(A)){const B=this._testAudioElements.get(A);try{B.pause(),B.srcObject=null,B.src=""}catch(Q){Wg.debug("Error cleaning up test audio:",Q)}this._testAudioElements.delete(A)}}getCurrentSpeakerDevice(){return Wg.debug("Getting current speaker device"),this._currentSpeakerDevice}async meetingSafeSpeakerTest(A){return Wg.debug("Meeting safe speaker test started"),this.testSpeakerDevice(A,{testDuration:1500,testFrequencies:[800],volume:.1,requireUserConfirmation:!0})}hideUserAuthenticationDialog=A=>{Wg.debug("authentication already done message:%o",A),this.emit("moderatorAuthStatus",{requesterId:A.requesterId,moderatorActed:A.peerId})};onNewPeer(A){const{peerId:Q,displayName:B,participantType:I}=A;this._peers.set(Q,{displayName:B,participantType:I,consumers:[]}),this.emit("newPeer",{peerId:Q,peerName:B,type:this.data.inputParams.peerId===Q?"local":"remote",peerRole:I})}async onExistingParticipants(A){if(Wg.debug("Onexisting participant message:%O",A),this._routerRtpCapabilities=A.routerRtpCapabilities,this._roomStatus="connected",this._roomDisplayName=A.roomDisplayName,this._running=!0,this._socket.updateRoomJoinStatus(!0),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:this.data.inputParams.peerName,type:"local",peerRole:this.data.inputParams.peerType}),this.data.inputParams.produce?await this._createSendTransport():Wg.debug("Produce is false!"),this.data.inputParams.consume){await this._createRecvTransport();let Q=this;A.peers&&A.peers.length>0&&A.peers.forEach(A=>{Q.emit("newPeer",{peerId:A.peerId,peerName:A.name,type:"remote",peerRole:A.participantType})})}else Wg.debug("Consume is false!")}sendCustomMessage=(A,Q="general",B=null,I,U,F={})=>{const C={id:"customMessage",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,data:A,type:Q,recieverPeerId:B,senderType:I,messageType:U,customData:F};Wg.debug("Room sendCustomMessage",C),this._sendMessage(C)};processCustomMessage=A=>{Wg.debug("Room processCustomMessage",A),this.emit("customMessage",A)};updateCId=A=>{Wg.debug("Received updateCId message",A),A.targetPeerId!==this.data.inputParams.peerId&&A.targetPeerId||this.emit("updateCId",{message:A,cId:A.cId,peerId:this.data.inputParams.peerId,isMyCId:A.targetPeerId===this.data.inputParams.peerId})};setCurrentlyActiveSpeaker(A){const{peerId:Q,volume:B}=A.activeSpeaker;this._activeSpeaker=A.activeSpeaker,this.emit("activeSpeaker",{peerId:Q,volume:B})}_createSendTransport=async()=>{Wg.debug("Room _createSendTransport");try{this._device.loaded||(Wg.debug("Room _createSendTransport","Going to load device with routerrtpcapabilities"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));let A="send";this._sendTransport||this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:A})}catch(pg){Wg.error("Room _createSendTransport",pg)}};_createRecvTransport=async()=>{this._device.loaded||(Wg.debug("loading device for creating recv transport"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));this._recvTransport||(Wg.debug("receive transport created"),this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:"recv"}))};handleCreateTransportRequest=async A=>{Wg.debug("Room handleCreateTransportRequest():%O",A);let Q,{transportOptions:B,direction:I}=A;try{if("recv"===I)Q=await this._device.createRecvTransport(B),Wg.debug("Room",`handleCreateTransportRequest() recv transport created ${Q.id}`),this._recvTransport=Q,this.handleRecvTransportListeners();else{if("send"!==I)throw new Error(`bad transport 'direction': ${I}`);Q=await this._device.createSendTransport(B),Wg.debug("Room",`handleCreateTransportRequest() send transport created [id:%s]${Q.id}`),this._sendTransport=Q,this.handleSendTransportListeners(),this.produceMedia()}}catch(U){Wg.error("Room handleCreateTransportRequest() failed to create transport [error:%o]",U)}};handleSendTransportListeners=()=>{this._sendTransport.on("connect",this.handleTransportConnectEvent),this._sendTransport.on("produce",this.handleTransportProduceEvent);let A=this;this._sendTransport.on("connectionstatechange",async Q=>{if(Wg.debug(`ConferenceRoom sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"disconnected"===Q)setTimeout(async()=>{if("disconnected"===Q)if(Wg.debug("Connection state for Send Transport is:%s even after 5 seconds",Q),Wg.warn(`sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)Wg.debug(`socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._sendTransport.id,"send")}},5e3);else if("failed"===Q)if(Wg.warn(`sendTransport connectionState ${Q} & socketconnection state ${this._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)Wg.debug(`handleSendTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._sendTransport.id,"send")}Wg.debug("ConferenceRoom",`send transport connection state change [state:%s]${Q}`)})};handleTransportConnectEvent=({dtlsParameters:A},Q,B)=>{try{const B=A=>{Wg.debug("connect-transport action"),Q(),Zg.remove("connectTransport")};Zg.push("connectTransport",B);let I={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._sendTransport.id,dtlsParameters:A,direction:"send"};this._sendMessage(I)}catch(I){Wg.error("handleTransportConnectEvent() failed [error:%o]",I),B(I)}};handleTransportProduceEvent=({kind:A,rtpParameters:Q,appData:B},I,U)=>{try{const U=A=>{Wg.debug("handleTransportProduceEvent callback [data:%o]",A),I({id:A.producerId}),Zg.remove("produce")};Zg.push("produce",U);let F="cam-audio"===B.mediaTag&&void 0!==this.data.inputParams.audioStatus&&!this.data.inputParams.audioStatus;Wg.debug(`handleTransportProduceEvent() | pause status->${F}`);let C={id:"sendTrack",transportId:this._sendTransport.id,peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,kind:A,rtpParameters:Q,paused:F,appData:B,clientOs:this._client.os.name,browser:this._client.browser};this._sendMessage(C)}catch(F){Wg.error("handleTransportProduceEvent() failed [error:%o]",F),U(F)}};produceMedia=async()=>{this.data.inputParams.produce?(this.data.inputParams.produceAudio?this.enableMic({deviceId:this.data.inputParams.audioDeviceId?this.data.inputParams.audioDeviceId:null}):Wg.debug("No need to produce audio!"),this._device.canProduce("video")&&(this.data.inputParams.produceVideo?(Wg.debug("going to enable cam with vbdetails",this.data.inputParams.vbdetails),this.enableCam({deviceId:this.data.inputParams.videoDeviceId?this.data.inputParams.videoDeviceId:null,vbdetails:this.data.inputParams.vbdetails})):Wg.debug("No need to produce video!"),this.data.inputParams.share&&this.enableShare({shareAudio:this.data.inputParams.shareAudio,enableSharingLayers:this._enableSharingLayers,shareBitRates:this.data.inputParams.shareBitRates}))):Wg.warn("produce is false!")};handleRecvTransportListeners=async()=>{this._recvTransport.on("connect",this.handleRecvTransportConnectEvent);let A=this;this._recvTransport.on("connectionstatechange",async Q=>{if("disconnected"===Q)setTimeout(async()=>{if("disconnected"===Q)if(Wg.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState),"connected"===this._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==this._socket.wsManager.connectionState;)Wg.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${this._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}},5e3);else if("failed"===Q)if(Wg.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState),"connected"===this._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==this._socket.wsManager.connectionState;)Wg.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${this._socket.wsManager.connectionState}`),await FE(1e3);"connected"===this._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}else Wg.debug("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",Q,this._socket.wsManager.connectionState)});let Q={id:"transportsAvailable",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,rtpCapabilities:this._device.rtpCapabilities};this._sendMessage(Q)};handleRecvTransportConnectEvent=({dtlsParameters:A},Q,B)=>{try{const B=A=>{Wg.debug("ConferenceRoom","connect-recv-transport action"),Q(),Zg.remove("connectRecvTransport")};Zg.push("connectRecvTransport",B);let I={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._recvTransport.id,dtlsParameters:A,direction:"recv"};this._sendMessage(I)}catch(I){Wg.error("handleTransportConnectEvent() failed [error:%o]",I),B(I)}};handleRecvTrackRequest=async A=>{if(Wg.debug("Room handleRecvTrackRequest",A),!this.data.inputParams.consume)return void Wg.warn("I do not want to consume");let{senderPeerId:Q,mediaTag:B,sender:I,audioStatus:U,videoStatus:F,senderParticipantType:C,type:E,producerPaused:g,...R}=A;Wg.debug("New consumer created",R),R.id=R.consumerId,delete R.consumerId,Wg.debug("ConferenceRoom",`senderPeerId is ->${Q}`);let J=await this._recvTransport.consume({...R,streamId:`${Q}-${"screen-video"===B||"screen-audio"===B?"share":"mic-webcam"}`,appData:{peerId:Q,mediaTag:B}});for(;this._recvTransport&&"connected"!==this._recvTransport.connectionState;)Wg.debug(`recv transport connstate${this._recvTransport.connectionState}`),await FE(100);this._consumers.set(J.id,J),J.on("transportclose",()=>{this._consumers.delete(J.id)});const{spatialLayers:n,temporalLayers:S}=SB.parseScalabilityMode(J.rtpParameters.encodings[0].scalabilityMode),V=this._peers.get(this.data.inputParams.peerId);Wg.debug(`Consumer created for sender peerId ${Q} for kind ${J.kind} for receiver peerId ${this.data.inputParams.peerId}`),Wg.info("The old peer data is :%O",V),V?(V["screen-video"===B||"screen-audio"===B?`ss${J.kind}`:J.kind]={consumerId:J.id,type:E,locallyPaused:!1,remotelyPaused:g,rtpParameters:J.rtpParameters,spatialLayers:n,temporalLayers:S,preferredSpatialLayer:n-1,preferredTemporalLayer:S-1,priority:1,codec:J.rtpParameters.codecs[0].mimeType.split("/")[1],track:J.track,share:"screen-video"===B||"screen-audio"===B},Wg.info("The new peer data is :%O",V),this._peers.set(this.data.inputParams.peerId,V)):(Wg.info("Peer not found!"),this._peers.set(this.data.inputParams.peerId,{["screen-video"===B||"screen-audio"===B?`ss${J.kind}`:J.kind]:{consumerId:J.id,type:E,locallyPaused:!1,remotelyPaused:g,rtpParameters:J.rtpParameters,spatialLayers:n,temporalLayers:S,preferredSpatialLayer:n-1,preferredTemporalLayer:S-1,priority:1,codec:J.rtpParameters.codecs[0].mimeType.split("/")[1],track:J.track,share:"screen-video"===B||"screen-audio"===B}})),await this.resumeConsumer(J),Wg.debug("Going to emit mic start / videostart"),"audio"===J.kind?"screen-audio"===B?this.emit("ssAudioStart",{peerId:Q,audioTrack:J.track,type:"remote"}):this.emit("micStart",{peerId:Q,audioTrack:J.track,type:"remote"}):"video"===J.kind&&("screen-video"===B?this.emit("ssVideoStart",{peerId:Q,videoTrack:J.track,type:"remote"}):this.emit("videoStart",{peerId:Q,videoTrack:J.track,type:"remote"}))};resumeConsumer=async A=>{if(A){Wg.debug("resume consumer",A.appData.peerId,A.appData.mediaTag);try{let Q={id:"resumeConsumer",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,consumerId:A.id};this._sendMessage(Q),await A.resume()}catch(Q){Wg.error("resumeConsumer error",Q)}}};handleConnectTransportRequest=async A=>{Wg.debug("handleTransportConnectRequest()");try{const Q=Zg.get("connectTransport");if(!Q)throw new Error("transport-connect action was not found");await Q(A)}catch(Q){Wg.error("handleTransportConnectRequest() failed [error:%o]",Q)}};handleConnectRecvTransportRequest=async A=>{Wg.debug("handleTransportConnectRequest()");try{const Q=Zg.get("connectRecvTransport");if(!Q)throw new Error("recv transport-connect action was not found");await Q(A)}catch(Q){Wg.error("handleRecvTransportConnectRequest() failed [error:%o]",Q)}};handleSendTrackRequest=async A=>{Wg.debug("ConferenceRoom","handleProduceRequest()");try{const Q=Zg.get("produce");if(!Q)throw new Error("produce action was not found");await Q(A)}catch(Q){Wg.error("handleProduceRequest() failed [error:%o]",Q)}};mediaToggled=A=>{switch(Wg.debug("Media Toggled message:%O",A),A.type){case"video":Wg.debug(`mediaToggled() | inside case video${A.videoStatus}`);break;case"audio":Wg.debug(`mediaToggled() | inside case audio${A.videoStatus}`),A.audioStatus?this.emit("peerUnMuted",{peerId:A.peerId,type:"remote"}):this.emit("peerMuted",{peerId:A.peerId,type:"remote"})}};closeConsumer=A=>{let{consumerId:Q}=A;const B=this._consumers.get(Q);if(!B)return void Wg.warn("Consumer with id not found!:%s",Q);const{peerId:I,mediaTag:U}=B.appData;Wg.debug("Consumer closed for consumerId:%s, type:%s, appData:%o",Q,B?.kind,B.appData);let F="screen-audio"===U||"screen-video"===U?`ss${B.kind}`:B.kind;B.close(),this._consumers.delete(Q);let C=this._peers.get(this.data.inputParams.peerId);Wg.debug("Peer data before deletion:%O",C),C[F]&&C[F].consumerId===Q&&delete C[F],Wg.debug("Peer data after deletion:%O",C),this._peers.set(this.data.inputParams.peerId,C),"audio"===B?.kind?(Wg.debug("Going to emit micEnd, consumer closed for audio"),"screen-audio"===U?this.emit("ssAudioStop",{peerId:I,track:null,type:"remote"}):this.emit("micEnd",{peerId:I,track:null,type:"remote"})):"video"===B?.kind&&(Wg.debug("Going to emit videoEnd, consumer closed for video"),"screen-video"===U?this.emit("ssVideoStop",{peerId:I,track:null,type:"remote"}):this.emit("videoEnd",{peerId:I,track:null,type:"remote"}))};peerLeft=A=>{Wg.debug("Peer Left message is:%o",A);let{peerId:Q}=A;this._peers.delete(Q),this.emit("peerLeft",{peerId:Q})};roomClosed=()=>{Wg.info("room closed by Moderator"),this._peers=null,this.emit("roomClosed",{roomId:this.data.inputParams.roomId})};close(){this._closed||(this._closed=!0,this._socket=null,this.data.inputParams={},Wg.info("Room close()"),this._sendTransport&&this._sendTransport.close(),this._recvTransport&&this._recvTransport.close(),this._roomStatus="closed",this._running=!1)}async leaveRoom(){Wg.debug("Leave room is called!!"),"connected"===this._roomStatus?(this._sendMessage({id:"leaveRoomNew",peerId:this.data.inputParams.peerId,roomLeaveType:"client"}),await this.leaveRoomCommon()):Wg.error("The room state is:%s",this._roomStatus)}async closeRoom(){"connected"===this._roomStatus?(this._sendMessage({id:"leaveAndCloseRoom",peerId:this.data.inputParams.peerId,roomCloseType:"client"}),await this.leaveRoomCommon()):Wg.error("The room state is:%s",this._roomStatus)}leaveRoomCommon=async()=>{try{Wg.debug("Starting comprehensive room leave cleanup...");try{this._cleanupAudioMonitoring(),this._stopSpeakingWhileMutedDetection(),ag&&"function"==typeof ag.cleanup&&ag.cleanup()}catch(A){}const I=new Set;this._webcamProducer?.track&&I.add(this._webcamProducer.track),this._micProducer?.track&&I.add(this._micProducer.track),this._shareProducer?.track&&I.add(this._shareProducer.track),this._shareAudioProducer?.track&&I.add(this._shareAudioProducer.track),this._producers&&this._producers.size>0&&this._producers.forEach(A=>{A?.track&&I.add(A.track)}),this._webCamStream&&this._webCamStream.getTracks().forEach(A=>I.add(A)),this._micStream&&this._micStream.getTracks().forEach(A=>I.add(A));try{const A=ag?.getVBStream?.()||ag?._localVBStream;A&&"function"==typeof A.getTracks&&A.getTracks().forEach(A=>I.add(A))}catch(A){}const U=[];document.querySelectorAll("video, audio").forEach(A=>{if(A.srcObject&&"function"==typeof A.srcObject.getTracks){A.srcObject.getTracks().forEach(A=>I.add(A)),U.push({element:A,stream:A.srcObject})}}),Wg.debug(`Found ${I.size} total tracks to stop`);let F=0;for(const A of I)try{A&&"live"===A.readyState&&"function"==typeof A.stop&&(A.stop(),F++,Wg.debug(`Stopped ${A.kind} track: ${A.label||A.id}`))}catch(Q){Wg.warn("Error stopping track:",Q)}if(Wg.debug(`Stopped ${F} tracks`),this._sendTransport){try{this._sendTransport.close()}catch(A){}this._sendTransport=null}if(this._recvTransport){try{this._recvTransport.close()}catch(A){}this._recvTransport=null}this._webcamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._webCamStream=null,this._micStream=null,this._producers&&this._producers.clear(),this._consumers&&this._consumers.clear(),this._roomStatus="closed",this._running=!1,this._routerRtpCapabilities=null,await new Promise(A=>setTimeout(A,100));let C=0;document.querySelectorAll("video, audio").forEach(A=>{try{A.srcObject&&(A.srcObject=null,"function"==typeof A.pause&&A.pause(),"function"==typeof A.load&&A.load(),C++,Wg.debug(`Force cleared ${A.nodeName} element`))}catch(Q){Wg.warn("Error clearing element:",Q)}}),Wg.debug(`Cleared ${C} DOM elements`);try{I.forEach(A=>{A&&"function"==typeof A.removeEventListener&&(A.removeEventListener("ended",()=>{}),A.removeEventListener("mute",()=>{}),A.removeEventListener("unmute",()=>{}))})}catch(A){}if(window.gc&&"function"==typeof window.gc)try{window.gc()}catch(A){}await new Promise(A=>setTimeout(A,200));try{const A=await this.reportActiveMediaUse();Wg.debug("Final media usage report:",A);const Q=[],B=A.dom.mediaElements.filter(A=>A.hasSrcObject&&A.tracks.length>0);B.forEach(A=>{A.tracks.forEach(B=>{"live"===B.readyState&&Q.push({kind:B.kind,label:B.label,id:B.id,element:A.nodeName})})}),(Q.length>0||B.length>0)&&(Wg.warn("WARNING: Media elements or live tracks still detected after cleanup:",{liveTracks:Q,elementsWithTracks:B.length}),await this.emergencyTrackCleanup())}catch(B){Wg.error("Failed to generate final media usage report:",B)}}catch(Q){Wg.error("Error during room leave cleanup:",Q),await this.emergencyTrackCleanup()}};emergencyTrackCleanup=async()=>{Wg.debug("Performing emergency track cleanup...");try{const A=[];document.querySelectorAll("video, audio").forEach(Q=>{if(Q.srcObject&&"function"==typeof Q.srcObject.getTracks){A.push(...Q.srcObject.getTracks()),Q.srcObject=null,"function"==typeof Q.pause&&Q.pause(),"function"==typeof Q.load&&Q.load();try{Q.src=""}catch(B){}}}),A.forEach(A=>{try{"live"===A.readyState&&(A.stop(),Wg.debug(`Emergency stopped ${A.kind}: ${A.label||A.id}`))}catch(Q){}}),Wg.debug(`Emergency cleanup completed - stopped ${A.length} tracks`),await new Promise(A=>setTimeout(A,300))}catch(A){Wg.error("Emergency cleanup failed:",A)}};reportActiveMediaUse=async(A=!1)=>{const Q={sdk:{micStreamTracks:[],camStreamTracks:[],vbStreamTracks:[],shareTracks:[],producers:[],consumers:[]},dom:{mediaElements:[]},timestamp:Date.now()},B=A=>{try{return{kind:A?.kind,enabled:A?.enabled,readyState:A?.readyState,label:A?.label,id:A?.id,muted:A?.muted}}catch(Q){return{error:!0}}};try{this._micStream&&"function"==typeof this._micStream.getTracks&&(Q.sdk.micStreamTracks=this._micStream.getTracks().map(B))}catch(U){}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&(Q.sdk.camStreamTracks=this._webCamStream.getTracks().map(B))}catch(U){}try{const A=ag?.getVBStream?.()||ag?._localVBStream;A&&"function"==typeof A.getTracks&&(Q.sdk.vbStreamTracks=A.getTracks().map(B))}catch(U){}try{this._shareProducer?.track&&Q.sdk.shareTracks.push(B(this._shareProducer.track)),this._shareAudioProducer?.track&&Q.sdk.shareTracks.push(B(this._shareAudioProducer.track))}catch(U){}try{this._producers&&this._producers.size>0&&this._producers.forEach((A,I)=>{Q.sdk.producers.push({key:I,track:A?.track?B(A.track):null,id:A?.id,paused:A?.paused})})}catch(U){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach((A,I)=>{Q.sdk.consumers.push({key:I,track:A?.track?B(A.track):null,id:A?.id,paused:A?.paused})})}catch(U){}try{const A=Array.from(document.querySelectorAll("video, audio"));Q.dom.mediaElements=A.map(A=>{let Q=[],I=null;try{const U=A.srcObject;U&&"function"==typeof U.getTracks&&(Q=U.getTracks().map(B),I=U.id)}catch(U){}return{nodeName:A.nodeName,muted:!!A.muted,paused:!!A.paused,hasSrcObject:!!A.srcObject,streamId:I,tracks:Q,src:A.src||null,currentSrc:A.currentSrc||null}})}catch(U){}const I=[...Q.sdk.micStreamTracks,...Q.sdk.camStreamTracks,...Q.sdk.vbStreamTracks,...Q.sdk.shareTracks,...Q.sdk.producers.map(A=>A.track).filter(Boolean),...Q.sdk.consumers.map(A=>A.track).filter(Boolean),...Q.dom.mediaElements.flatMap(A=>A.tracks)].filter(A=>A&&"live"===A.readyState);return Q.summary={totalLiveTracks:I.length,elementsWithSrcObject:Q.dom.mediaElements.filter(A=>A.hasSrcObject).length,elementsWithTracks:Q.dom.mediaElements.filter(A=>A.tracks.length>0).length},Q};async listDevicesInternal(){if(navigator.mediaDevices.ondevicechange=async A=>{let Q=await _C();Wg.info("Media devices changed!:%O",Q),Q.audioDevices&&Q.audioDevices.length>0&&(this._deviceList.audioDevices=Q.audioDevices),Q.videoDevices&&Q.videoDevices.length>0&&(this._deviceList.videoDevices=Q.videoDevices),Q.audioDevices&&Q.audioDevices.length>0&&(this._deviceList.audioOutputDevices=Q.audioDevicesOutput),ig=this._deviceList,this.emit("deviceListUpdated")},!this._deviceList){const A=await $C();if(A.success)return this._deviceList=A.deviceList,void(ig=this._deviceList)}}restartIce=async(A,Q)=>{if("send"===Q&&"connected"===this._sendTransport.connectionState||"recv"===Q&&"connected"===this._recvTransport.connectionState)return void Wg.debug("no need to restart ICE as transport now connected");Wg.debug("websocket is ready and connectionstate is still disconnected, therefore going to restart ICE");let B={id:"restartIce",transportId:A,roomName:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId};this._sendMessage(B)};restartIceResponse=A=>{Wg.debug("restart ICE response:%o",A);let{transportId:Q,iceParameters:B}=A;this._sendTransport&&this._sendTransport.id===Q?this._sendTransport.restartIce({iceParameters:B}):this._recvTransport&&this._recvTransport.id===Q&&this._recvTransport.restartIce({iceParameters:B})};startRecording=({recordingType:A=null,outputType:Q=null,outputQualities:B=null}={})=>{Wg.debug("recording type requested is:%s,outputType:%s, outputQualties:%o",A,Q,B);const I=!A||"av"!==A?.toLowerCase()&&"audiovideo"!==A?.toLowerCase()?"mergedA":"mergedAV";if((!Q||"hls"!==Q.toLowerCase()&&"mp4"!==Q.toLowerCase())&&Q)return Wg.error("Invalid outut type"),{success:!1,reason:`Invalid outputType: ${Q}. `};if(Q&&"hls"===Q.toLowerCase()&&B&&!yE(B))return Wg.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(B)}. Allowed values are ${Array.from(TE).join(", ")}.`};let U={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,type:pC,recordingStrategy:I,outputQualities:B,outputType:Q?.toLowerCase()};this._sendMessage(U),this._recordingStartedByMe={...this._recordingStartedByMe,"main-room":{recordingNo:null}}};stopRecording=()=>{Wg.debug("going to stop recording for recordingStartedByMe:%o",this._recordingStartedByMe);let A="main-room";if(!this._recordingStartedByMe[A])return{success:!1,error:!0,code:"RRID001",text:"Error while trying to stop recording. Either the recording has not been started yet Or The same user need to stop recording who started it."};{let Q={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,recordingNo:this._recordingStartedByMe[A].recordingNo,type:pC};this._sendMessage(Q),delete this._recordingStartedByMe[A],this.emit("recordingEnded",{peerId:this.data.inputParams.peerId})}};setRecordingStatusStarted=A=>{Wg.debug("Recording/Streaming started by moderator!!:%O",A);let{breakOutRoom:Q,recordingStartTime:B,recordingNo:I,type:U}=A;[pC,rC].includes(U)&&(this._recordingStartedByMe["main-room"]?(Wg.debug("This recording has been started by me."),this._recordingStartedByMe["main-room"].recordingNo=I,this.emit("recordingStarted",{peerId:this.data.inputParams.peerId,startTime:B})):this.emit("recordingStarted",{startTime:B}))};setRecordingStatusEnded=A=>{Wg.debug("Recording ended by moderator!!, data:%O",A);let{breakOutRoom:Q,type:B}=A;"rtmpStream"===B||this.emit("recordingEnded",{})};startProcessing=async({inputFiles:A=[],outputQualities:Q=null,bucket:B=null,cloud:I=null,region:U=null}={})=>{Wg.debug("Processing of Files requested for:%o",A);const F=Math.round(1e7*Math.random()),C=await async function(A){if(cE.info("The input files are:%o, length:%s",A,A.length),A.length>0){cE.info("Files array length is:%s",A.length);for(const{type:Q,url:B}of A){if(cE.info("The file detais are type:%s, url:%s",Q,B),!YE.includes(Q))return{success:!1,reason:`Type "${Q}" is not allowed.`};if(!tE(B,Q))return{success:!1,reason:`Extension mismatch for ${B}; expected .${Q}`}}return{success:!0}}return{success:!1,reason:"There are no files for processing!"}}(A);if(C.success){if(Q&&!yE(Q))return Wg.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(Q)}. Allowed values are ${Array.from(TE).join(", ")}.`};this._processingStartedByMe={...this._processingStartedByMe,[F]:{}};for(const{type:Q,url:B}of A)this._processingStartedByMe={...this._processingStartedByMe,[F]:{...this._processingStartedByMe[F],[B]:{type:Q,url:B,status:"pending"}}};let C={id:"processVideos",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,inputFiles:A,outputQualities:Q,bucket:B,cloud:I,region:U,requestId:F,type:"process"};return this._sendMessage(C),{success:!0}}return C};checkProcessingStatus=({requestId:A})=>(Wg.debug("Going to check processing status for request Id:%s",A),this._processingStartedByMe[A]?{success:!0,details:this._processingStartedByMe[A]}:{success:!0,details:this._processingStartedByMe});handleProcessingStart=A=>{const{processingStartTime:Q,processingNo:B,requestId:I}=A;Wg.debug("handleProcessingStart()| received message is:%o",A),this.emit("processingStarted",{processingStartTime:Q,requestId:I})};handleProcessingCompletion=A=>{const{totalProcessingTime:Q,hlsfileKey:B,size:I,originalFile:U,lastFile:F,requestId:C}=A;if(Wg.debug("handleProcessingCompletion()| received message is:%o",A),Wg.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[U]&&(this._processingStartedByMe={...this._processingStartedByMe,[C]:{...this._processingStartedByMe[C],[U]:{...this._processingStartedByMe[U],status:"completed",hlsfileKey:B,size:I,totalProcessingTime:Q}}}),Wg.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingCompleted",A),F){Wg.debug("The last file processing has been completed! Remove all the files that has been completed with the same requesterId");let A={...this._processingStartedByMe};delete A[C],Wg.debug("After deleting the current requestId:%o",A),this._processingStartedByMe=A}};handleProcessingError=A=>{const{totalProcessingTime:Q,hlsfileKey:B,size:I,originalFile:U,lastFile:F,requestId:C,error:E}=A;Wg.debug("handleProcessingCompletion()| received message is:%o",A),Wg.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[U]&&(this._processingStartedByMe={...this._processingStartedByMe,[C]:{...this._processingStartedByMe[C],[U]:{...this._processingStartedByMe[U],status:"error",hlsfileKey:B,size:I,totalProcessingTime:Q,error:E}}}),Wg.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingError",A)};async enableMic({deviceId:A=null,autoGainControl:Q,noiseSuppression:B,echoCancellation:I,channelCount:U,sampleRate:F,forcePCMU:C,forcePCMA:E}={}){if(Wg.debug("enableMic()"),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID007",text:"Error while trying to start Mic/Audio. Produce flag need to set to true while joining room in order to enable Mic/Audio."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID008",text:`Error while trying to start Mic/Audio as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Mic? OR if you have already initiated the joinRoom process, then Mic will be enabled automatically once room join process completes."};if(this._micProducer)return Wg.debug("Mic is already active!"),{success:!1,warning:!0,code:"RWID002",text:"Error while trying to start Mic/Audio. Mic/Audio is already active!"};if(!this._device.canProduce("audio"))return Wg.error("enableMic() | cannot produce audio"),{success:!1,error:!0,code:"REID009",text:"Error while trying to start Mic/Audio. Mic/Audio couldnot be activated due to limitations on this device. If you think this device has a functional Mic and the problem persists even after multiple retries, please contact technical support with the error code."};let g,R;C&&"boolean"==typeof C&&(this.data.inputParams.forcePCMU=C),E&&"boolean"==typeof E&&(this.data.inputParams.forcePCMA=E),Q&&"boolean"==typeof Q&&(this.data.inputParams.autoGainControl=Q),I&&"boolean"==typeof I&&(this.data.inputParams.echoCancellation=I),B&&"boolean"==typeof B&&(this.data.inputParams.noiseSuppression=B),F&&Number.isInteger(F)&&F<64e3&&F>8e3&&(this.data.inputParams.sampleRate=F),U&&Number.isInteger(U)&&U>0&&U<3&&(this.data.inputParams.channelCount=U);try{if(this._externalVideo)this._micStream=await this._getExternalVideoStream(),g=this._micStream.getAudioTracks()[0].clone();else{if(A?(R=this._deviceList.audioDevices.find(Q=>Q.deviceId===A),R||(Wg.warn("Selected audio input deviceId:%s not found",A),R=this._deviceList.audioDevices[0])):R=this._deviceList.audioDevices[0],this._mic.device=R,!R)return Wg.error("No mic device found! Can't start audio!"),{success:!1,reason:"No mic available for starting audio!"};A&&this.data.inputParams.audioDeviceId!==A&&(this.data.inputParams.audioDeviceId=A),Wg.debug("enableMic() | calling getUserMedia()");try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:R.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}}),g=this._micStream.getAudioTracks()[0]}catch(pg){throw new Error("Error while acquiring mic. Possible issue with audio constraint values",pg)}}this._micProducer=await this._sendTransport.produce({track:g,codecOptions:this.data.inputParams.forcePCMU||this.data.inputParams.forcePCMA?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):this.data.inputParams.forcePCMA?this._device.rtpCapabilities.codecs.find(A=>"audio/pcma"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"cam-audio"}}),this._producers.set("audio",{id:this._micProducer.id,paused:this._micProducer.paused,track:this._micProducer.track,rtpParameters:this._micProducer.rtpParameters,codec:this._micProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),this._micProducer.on("transportclose",()=>{this._micProducer=null}),this._micProducer.on("trackended",()=>{this.disableMic().catch(()=>{})}),await this._initializeAudioMonitoring(),this._startSpeakingWhileMutedDetection()}catch(J){Wg.error("enableMic() | failed:%o",J),this.emit("error",{code:"EID002",text:"Error enabling microphone!"}),g&&g.stop()}}async disableMic(){if(Wg.debug("disableMic()"),this._cleanupAudioMonitoring(),this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micProducer){this._micProducer.close(),this._producers.delete("audio");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._micProducer.id};this._sendMessage(A),this.emit("micEnd",{peerId:this.data.inputParams.peerId,audioTrack:null,type:"local"})}catch(A){this.emit("error",{code:"EID003",text:"Error disabling microphone!"})}this._micProducer=null}}async muteMic(){Wg.debug("muteMic()"),this._micProducer.pause();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!1,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){Wg.error("muteMic() | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone!"})}}async unmuteMic(){Wg.debug("unmuteMic()"),this._micProducer||(Wg.debug("Mic is not active!"),await this.enableMic()),this._micProducer.resume();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!0,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){Wg.error("unmuteMic() | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone!"})}}startSpeechRecognition(A={}){const Q={lang:"es-ES",continuous:!0,interimResults:!0,maxAlternatives:3,autoRestart:!0,restartDelayMs:250,...A},B=window.SpeechRecognition||window.webkitSpeechRecognition,I=window.SpeechGrammarList||window.webkitSpeechGrammarList;if(!B)return this.emit("sttError",{code:"UNSUPPORTED",message:"Web Speech API not supported"}),{success:!1,reason:"unsupported"};try{if(this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}try{this._speechRecognition.stop()}catch{}this._speechRecognition=null}const A=new B;if(A.lang=Q.lang,A.continuous=!!Q.continuous,A.interimResults=!!Q.interimResults,A.maxAlternatives=Q.maxAlternatives,Q.grammars&&Array.isArray(Q.grammars)&&I){const B=new I,U=`#JSGF V1.0; grammar terms; public <term> = ${Q.grammars.join(" | ")};`;B.addFromString&&B.addFromString(U,1),A.grammars&&(A.grammars=B)}return this._sttShouldRun=!0,this._sttAutoRestart=!!Q.autoRestart,this._sttRestartDelayMs=Number(Q.restartDelayMs)||250,A.onstart=()=>this.emit("sttStart",{timestamp:Date.now(),lang:A.lang}),A.onresult=Q=>{const B=Q.results[Q.results.length-1],I=B&&B[0]?B[0]:null,U={transcript:I?I.transcript:"",confidence:I?I.confidence:0,isFinal:!!B&&B.isFinal,timestamp:Date.now(),lang:A.lang},F=U.timestamp,C=this.data.inputParams.peerId||"local";this._transcriptStorage.has(F)||this._transcriptStorage.set(F,new Map);this._transcriptStorage.get(F).set(C,{transcript:U.transcript,isFinal:U.isFinal}),this.emit("sttResult",U)},A.onerror=A=>{this.emit("sttError",{code:A.error||"UNKNOWN",message:A.message||"Speech recognition error"})},A.onend=()=>{if(this.emit("sttEnd",{timestamp:Date.now()}),this._sttShouldRun&&this._sttAutoRestart){const B=this._sttRestartDelayMs;try{setTimeout(()=>{if(this._sttShouldRun&&this._sttAutoRestart)try{A.start()}catch(Q){}},B)}catch(Q){}}},A.start(),this._speechRecognition=A,{success:!0}}catch(pg){return this.emit("sttError",{code:"INIT_FAILED",message:pg.message}),{success:!1,reason:pg.message}}}stopSpeechRecognition(){try{if(this._sttShouldRun=!1,this._sttAutoRestart=!1,this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}this._speechRecognition.stop(),this._speechRecognition=null}return{success:!0}}catch(pg){return this.emit("sttError",{code:"STOP_FAILED",message:pg.message}),{success:!1,reason:pg.message}}}async startRemoteCaption({lang:A}={}){try{if(this._remoteCaption&&this._remoteCaption.enabled)return{success:!0,alreadyRunning:!0};if(!this._voskModule)try{this._voskModule=await Promise.resolve().then(()=>bg)}catch(Q){return this.emit("remoteCaptionError",{code:"VOSK_IMPORT_FAILED",message:Q?.message||"Failed to import vosk-browser"}),{success:!1,reason:"import_failed"}}let I;switch(A){case"es-ES":I="vosk-model-small-es-0.42.tar.gz";break;case"fr-FR":I="vosk-model-small-fr-0.22.tar.gz";break;default:I="vosk-model-small-en-us-0.15.tar.gz"}const U=[`/models/${I}`,`models/${I}`,`/samvyo-js-sdk/lib/models/${I}`],F=async A=>{for(const B of A)try{if((await fetch(B,{method:"HEAD"})).ok)return B}catch(Q){}return A[0]},C=await F(U);this._remoteCaption||(this._remoteCaption={});const E=this._remoteCaption;E.model||(E.model=await this._voskModule.createModel(C)),E.enabled=!0,E.audioTracks=E.audioTracks||new Map,E.currentSpeaker=null,E.recognizer=null,E.audioCtx=null,E.source=null,E.processor=null;const g=()=>{try{E.recognizer&&E.recognizer.remove&&E.recognizer.remove()}catch(A){}E.recognizer=null;try{E.processor&&E.processor.disconnect&&E.processor.disconnect()}catch(A){}E.processor=null;try{E.source&&E.source.disconnect&&E.source.disconnect()}catch(A){}E.source=null;try{E.audioCtx&&"closed"!==E.audioCtx.state&&E.audioCtx.close&&E.audioCtx.close()}catch(A){}E.audioCtx=null},R=async A=>{if(!E.enabled)return;if(!A)return;if(E.currentSpeaker===A)return;E.currentSpeaker=A,g();const Q=E.audioTracks.get(A);if(!Q)return;let B;try{B=new E.model.KaldiRecognizer(16e3)}catch(R){try{B=new E.model.KaldiRecognizer(void 0,16e3)}catch(J){try{B=new E.model.KaldiRecognizer}catch(n){return void this.emit("remoteCaptionError",{code:"RECOGNIZER_CREATE_FAILED",message:n?.message||""})}}}E.recognizer=B,B.on&&B.on("result",Q=>{const B=Q?.result?.text||"";B&&E.enabled&&this.emit("remoteCaption",{peerId:A,text:B,isFinal:!0})}),B.on&&B.on("partialresult",Q=>{const B=Q?.result?.partial||"";B&&E.enabled&&this.emit("remoteCaption",{peerId:A,text:B,isFinal:!1})});const I=new MediaStream([Q]),U=new(window.AudioContext||window.webkitAudioContext)({sampleRate:16e3});if(E.audioCtx=U,"suspended"===U.state)try{await U.resume()}catch(S){}const F=U.createMediaStreamSource(I);E.source=F;const C=U.createScriptProcessor(4096,1,1);E.processor=C,F.connect(C),C.connect(U.destination),C.onaudioprocess=A=>{if(E.enabled&&E.recognizer)try{E.recognizer.acceptWaveform(A.inputBuffer)}catch(S){}}};E._onMicStart=({peerId:A,audioTrack:Q})=>{E.enabled&&(Q&&E.audioTracks.set(A,Q),this._activeSpeaker&&this._activeSpeaker.peerId===A&&R(A))},E._onMicEnd=({peerId:A})=>{E.enabled&&(E.audioTracks.delete(A),E.currentSpeaker===A&&(g(),E.currentSpeaker=null))},E._onActiveSpeaker=({peerId:A})=>{E.enabled&&R(A)},this.on("micStart",E._onMicStart),this.on("micEnd",E._onMicEnd),this.on("activeSpeaker",E._onActiveSpeaker);try{this._micProducer?.track&&E.audioTracks.set(this.data.inputParams.peerId,this._micProducer.track)}catch(B){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach(A=>{try{"audio"===A?.kind&&A?.track&&A?.appData?.peerId&&E.audioTracks.set(A.appData.peerId,A.track)}catch(B){}})}catch(B){}return this._activeSpeaker?.peerId&&R(this._activeSpeaker.peerId),{success:!0,modelUrl:C}}catch(pg){return this.emit("remoteCaptionError",{code:"START_FAILED",message:pg?.message||String(pg)}),{success:!1,reason:pg?.message||"unknown"}}}stopRemoteCaption({unloadModel:A=!1}={}){try{const B=this._remoteCaption;if(!B)return{success:!0,alreadyStopped:!0};if(B.enabled=!1,B._onMicStart)try{this.off("micStart",B._onMicStart)}catch(Q){}if(B._onMicEnd)try{this.off("micEnd",B._onMicEnd)}catch(Q){}if(B._onActiveSpeaker)try{this.off("activeSpeaker",B._onActiveSpeaker)}catch(Q){}try{B.recognizer&&B.recognizer.remove&&B.recognizer.remove()}catch(Q){}B.recognizer=null;try{B.processor&&B.processor.disconnect&&B.processor.disconnect()}catch(Q){}B.processor=null;try{B.source&&B.source.disconnect&&B.source.disconnect()}catch(Q){}B.source=null;try{B.audioCtx&&"closed"!==B.audioCtx.state&&B.audioCtx.close&&B.audioCtx.close()}catch(Q){}if(B.audioCtx=null,B.currentSpeaker=null,B.audioTracks=new Map,A){try{B.model&&B.model.terminate&&B.model.terminate()}catch(Q){}B.model=null}return{success:!0}}catch(pg){return this.emit("remoteCaptionError",{code:"STOP_FAILED",message:pg?.message||String(pg)}),{success:!1,reason:pg?.message||"unknown"}}}async enableCam({deviceId:A=null,videoResolution:Q,forceVp8:B,forceVp9:I,forceH264:U,h264Profile:F,forceFPS:C,enableWebcamLayers:E,numSimulcastStreams:g,videoBitRates:R,vbdetails:J}={}){if(Wg.debug("enableWebcam()"),Wg.debug("first vbdetails in enablecam",J),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID004",text:"Error while trying to start Camera. Produce flag need to set to true while joining room in order to enable Camera."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID005",text:`Error while trying to start Camera as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Camera? OR if you have already initiated the joinRoom process, then Camera will be enabled automatically once room join process completes."};if(this._webcamProducer)return Wg.debug("Camera is already active!"),{success:!1,warning:!0,code:"RWID003",text:"Error while trying to start Camera. Camera is already active!"};if(!this._device.canProduce("video"))return Wg.error("enableWebcam() | cannot produce video"),{success:!1,error:!0,code:"REID006",text:"Error while trying to start Camera. Camera couldnot be activated due to limitations on this device. If you think this device has a functional camera and the problem persists even after multiple retries, please contact technical support with the error code."};let n,S;["hd","vga","qvga"].includes(Q)&&(this.data.inputParams.videoResolution=Q,this._webcam.resolution=Q),B&&"boolean"==typeof B&&(this.data.inputParams.forceVp8=B),I&&"boolean"==typeof I&&(this.data.inputParams.forceVp9=I),U&&"boolean"==typeof U&&(this.data.inputParams.forceH264=U),F&&["high","low"].includes(F.toLowerCase())&&(this.data.inputParams.h264Profile=F),C&&Number.isInteger(C)&&C<65&&C>5&&(this.data.inputParams.forceFPS=25),E&&"boolean"==typeof E&&(this.data.inputParams.enableWebcamLayers=E,this._enableWebcamLayers=E),g&&Number.isInteger(g)&&g<4&&g>0&&(this.data.inputParams.numSimulcastStreams=g,this._numSimulcastStreams=g),Array.isArray(R)&&R.length>=1&&R.length<=3&&R.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?(Wg.debug("videoBitRates values are correct"),this.data.inputParams.videoBitRates=R):Wg.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'");try{if(this._externalVideo)S={label:"external video"},this._webCamStream=await this._getExternalVideoStream(),n=this._webCamStream.getVideoTracks()[0].clone();else{A?(S=this._deviceList.videoDevices.find(Q=>Q.deviceId===A),S||(Wg.warn("Selected deviceId:%s not found",A),S=this._deviceList.videoDevices[0])):S=this._deviceList.videoDevices[0],this._webcam.device=S;const{resolution:Q}=this._webcam;if(!S)return Wg.error("No wencam device found! Can't start video!"),{success:!1,reason:"No Webcam available for starting video!"};A&&this.data.inputParams.videoDeviceId!==A&&(this.data.inputParams.videoDeviceId=A),Wg.debug("enableWebcam() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:S.deviceId},...kg[Q],frameRate:{ideal:this.data.inputParams.forceFPS}}}),n=this._webCamStream.getVideoTracks()[0]}let Q,B;const I={videoGoogleStartBitrate:1e3};if(Wg.debug("Current device codec options are:%O",this._device.rtpCapabilities.codecs),this._forceVP8){if(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!B)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!B)throw new Error("desired H264 codec+configuration is not supported");Wg.debug("Selected h264 codec is:%O",B)}else if(this._forceVP9&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!B))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableWebcamLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&B||"video/vp9"===A.mimeType.toLowerCase()?Q=[{maxBitrate:5e6,scalabilityMode:this._webcamScalabilityMode||"L3T3_KEY"}]:(Q=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.videoBitRates[0],scalabilityMode:this._webcamScalabilityMode||"L1T3"}],this._numSimulcastStreams>1&&Q.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.videoBitRates[1],scalabilityMode:this._webcamScalabilityMode||"L1T3"}),this._numSimulcastStreams>2&&Q.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.videoBitRates[2],scalabilityMode:this._webcamScalabilityMode||"L1T3"}))}if(J)try{const A=ag&&ag._localVBStream;let Q=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)Q=A.getVideoTracks()[0],Wg.debug("Using existing Virtual Background track");else{const A=await ag.initializePipeline(n,J);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(Q=A.vbStream.getVideoTracks()[0],Wg.debug("Initialized new Virtual Background pipeline"))}Q&&(n=Q)}catch(V){Wg.debug("VB init failed or skipped in enableCam")}this._webcamProducer=await this._sendTransport.produce({track:n,encodings:Q,codecOptions:I,codec:B,appData:{mediaTag:"cam-video"}}),this._producers.set("video",{id:this._webcamProducer.id,deviceLabel:S.label,type:this._getWebcamType(S),paused:this._webcamProducer.paused,track:this._webcamProducer.track,rtpParameters:this._webcamProducer.rtpParameters,codec:this._webcamProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webcamProducer.track,type:"local"}),this._webcamProducer.on("transportclose",()=>{this._webcamProducer=null}),this._webcamProducer.on("trackended",()=>{this.disableCam().catch(()=>{})})}catch(N){Wg.error("enableWebcam() | failed:%o",N),this.emit("error",{code:"EID011",text:"Enable Webcam failed!"}),n&&n.stop()}}async disableCam(){if(Wg.debug("disableWebcam()"),this._webcamProducer){this._webcamProducer.close(),this._producers.delete("video");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._webcamProducer.id};this._sendMessage(A),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null})}catch(A){this.emit("error",{code:"EID012",text:"Error while closing server side producer!"})}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&this._webCamStream.getTracks().forEach(A=>{try{A.stop()}catch(Q){}})}catch(Q){}this._webCamStream=null,this._webcamProducer=null}}async _updateWebcams(){Wg.debug("_updateWebcams()"),this._webcams=new Map,Wg.debug("_updateWebcams() | calling enumerateDevices()");const A=await navigator.mediaDevices.enumerateDevices();for(const U of A)"videoinput"===U.kind&&this._webcams.set(U.deviceId,U);const Q=Array.from(this._webcams.values()),B=Q.length,I=this._webcam.device?this._webcam.device.deviceId:void 0;Wg.debug("_updateWebcams() [webcams:%o]",Q),0===B?this._webcam.device=null:this._webcams.has(I)||(this._webcam.device=Q[0])}async _getExternalVideoStream(){if(this._externalVideoStream)return this._externalVideoStream;if(this._externalVideo.readyState<3&&await new Promise(A=>this._externalVideo.addEventListener("canplay",A)),this._externalVideo.captureStream)this._externalVideoStream=this._externalVideo.captureStream();else{if(!this._externalVideo.mozCaptureStream)throw new Error("video.captureStream() not supported");this._externalVideoStream=this._externalVideo.mozCaptureStream()}return this._externalVideoStream}_getWebcamType(A){return/(back|rear)/i.test(A.label)?(Wg.debug("_getWebcamType() | it seems to be a back camera"),"back"):(Wg.debug("_getWebcamType() | it seems to be a front camera"),"front")}async changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I}){if(this._webcamProducer){return await this._changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I})}return Wg.error("No webcam producer available!"),{success:!1,reason:"You are not sharing your camera yet. Camera Input can be changed to a new camera only when you are sharing an existing camera. "}}async _changeVideoInput({resolution:A,deviceId:Q,fps:B,vbdetails:I}){Wg.info("_changeVideoInput() | Inside"),A&&["hd","vga","qvga"].includes(A)?this._webcam.resolution=A:Wg.warn("Invalid video resolution value "),B&&Number.isInteger(B)&&B<65&&B>5?this.data.inputParams.forceFPS=B:Wg.warn("forceFPS should be a number between 5 to 65, default value is 25 fps.");let U=this._deviceList.videoDevices.find(A=>Q&&A.deviceId===Q);if(!U)return Wg.error("The selected deviceId not found!"),{success:!1,reason:"Invalid deviceId!"};this._webcam.device=U;try{this._webCamStream.getVideoTracks().forEach(A=>A.stop()),this._webCamStream=null,Wg.debug("changeVideoInput() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:U.deviceId},...kg[this._webcam.resolution],frameRate:{ideal:this.data.inputParams.forceFPS}}});let A=this._webCamStream.getVideoTracks()[0];if(Wg.debug("The new video track is:%O",A),I)try{const Q=await ag.initializePipeline(A,I);Q&&Q.vbStream&&"function"==typeof Q.vbStream.getVideoTracks&&Q.vbStream.getVideoTracks()[0]&&(A=Q.vbStream.getVideoTracks()[0],Wg.debug("Reinitialized VB pipeline for changed camera"))}catch(F){Wg.debug("VB init skipped/failed on changeVideoInput")}await this._webcamProducer.replaceTrack({track:A});let Q=this._producers.get("video");return Q.deviceLabel=U.label,Q.type=this._getWebcamType(U),Q.track=this._webcamProducer.track,Wg.debug("Updated producer values are:%O",Q),this._producers.set("video",Q),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:A,type:"local"}),{success:!0}}catch(C){return Wg.error("Error while changing input:%O",C),{success:!1,reason:"Couldn't change video input",error:C}}}async changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F}){if(this._micProducer){return await this._changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F})}return{success:!1,reason:"You are not sharing your mic yet. Mic Input can be changed to a new mic only when you are sharing an existing mic. "}}async _changeAudioInput({autoGainControl:A,echoCancellation:Q,noiseSuppression:B,sampleRate:I,channelCount:U,deviceId:F}){A&&"boolean"==typeof A&&(this.data.inputParams.autoGainControl=A),Q&&"boolean"==typeof Q&&(this.data.inputParams.echoCancellation=Boolean(Q)),B&&"boolean"==typeof B&&(this.data.inputParams.noiseSuppression=Boolean(B)),I&&Number.isInteger(I)&&I<64e3&&I>8e3&&(this.data.inputParams.sampleRate=I),U&&Number.isInteger(U)&&U>0&&U<3&&(this.data.inputParams.channelCount=U);let C=this._deviceList.audioDevices.find(A=>F&&A.deviceId===F);if(!C)return{success:!1,reason:"Invalid deviceId!"};this._mic.device=C,this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micStream=null;try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:C.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}});const A=this._micStream.getAudioTracks()[0];this._micProducer.replaceTrack({track:A});let Q=this._producers.get("audio");return Q.deviceLabel=C.label,Q.track=this._micProducer.track,Wg.debug("Updated producer values are:%O",Q),this._producers.set("audio",Q),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),{success:!0}}catch(pg){return Wg.error("Error while changing input:%O",pg),{success:!1,reason:"Couldn't change audio input",err:pg}}}toggleVB=async A=>{if(A&&this._localCamVideo.getVideoTracks()[0]){const A=await this.initializePipeline(this._localCamVideo.getVideoTracks()[0],{type:"blur"});if(Wg.debug("response is :%o, localVBTrack is:%O",A,this._localVBStream.getVideoTracks()[0]),A.success&&this._localVBStream.getVideoTracks()[0]){if(this._roomType===HC||this._roomType===bC)if(this._camVideoProducer&&store.getState().conf.joined){await this._camVideoProducer.replaceTrack({track:this._localVBStream.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else Wg.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===LC&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(Wg.debug("found sender:%o",A),A&&this._localVBStream.getVideoTracks()[0]){A.replaceTrack(this._localVBStream.getVideoTracks()[0]);let Q={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=Q,store.dispatch(confActions.addParticipant(Q))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}}else Wg.error("Virtual background procesing can't be enabled")}else if(this._localVBStream?.getVideoTracks()[0].stop(),this._localVBStream=null,this._pipelineManager.stop(),this._vbDetailsNew.sourcePlayback&&(this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null),this._vbDetailsNew.sourcePlayback?.htmlElement.remove(),this._vbDetailsNew?.hiddenCanvas.remove(),this._vbDetailsNew?.hiddenImage?.remove(),this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null,store.dispatch(confActions.setVBItemsStatus(A)),Wg.debug("Garbage collection completed. Set the video to original video!"),store.getState().conf.videoStatus)if(this._roomType===HC||this._roomType===bC)if(this._camVideoProducer&&store.getState().conf.joined&&this._localCamVideo.getVideoTracks()[0]&&store.getState().conf.joined&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){await this._camVideoProducer.replaceTrack({track:this._localCamVideo.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else Wg.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===LC&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(Wg.debug("found sender:%o",A),A&&this._localCamVideo.getVideoTracks()[0]&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){A.replaceTrack(this._localCamVideo.getVideoTracks()[0]);let Q={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=Q,store.dispatch(confActions.addParticipant(Q))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}};setVBDetails=async A=>{if(this._vbDetails=A,this._roomType!==HC&&this._roomType!==bC||!store.getState().conf.joined){if(this._roomType===LC&&this._peerConnection){const Q=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});Wg.debug("found sender:%o",Q),Q?Q.replaceTrack(A.stream.getVideoTracks()[0]):this.showNotification("danger","Error!","Unable to set virtual background! Try again OR contact support with code:CM-FE-RC-VB-E02")}}else if(this._camVideoProducer){Wg.debug("Going to replace the video track for cam video producer!");try{await this._camVideoProducer.replaceTrack({track:A.stream.getVideoTracks()[0]})}catch(pg){Wg.debug("vb set error",pg)}Wg.debug("all participants",this._participants),Wg.debug("this._localCamVideo",this._localCamVideo)}else Wg.warn("Camvideo producer is not available yet!")};async enableShare({shareAudio:A=!1,enableSharingLayers:Q=!0,shareBitRates:B=[2500,1250,500]}={}){if(Wg.debug("enableShare()"),!this.data.inputParams.produce)return Wg.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID003",text:"Error while trying to start screen share. Produce flag need to set to true while joining room in order to enable screen share."};if("connected"!==this._roomStatus)return Wg.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID001",text:`Error while trying to start screen share as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling screen share? OR if you have already initiated the joinRoom process, then try enabling screen share after some seconds."};if(this._shareProducer)return Wg.debug("Screen share is already active!"),{success:!1,warning:!0,code:"RWID001",text:"Error while trying to start screen share. Screen share is already active!"};if(!this._device.canProduce("video"))return Wg.error("enableShare() | cannot produce video"),{success:!1,error:!0,code:"REID002",text:"Error while trying to start screen share. Screen share couldnot be activated due to limitations on this device. If you think this device is capable of screen share and the problem persists even after multiple retries, please contact technical support with the error code."};let I,U;this._enableSharingLayers="boolean"!=typeof Q?Boolean(Q):Q,Array.isArray(B)&&B.length>=1&&B.length<=3&&B.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?this.data.inputParams.shareBitRates=B:this.data.inputParams.shareBitRates=[2500,1250,500];try{Wg.debug("enableShare() | calling getDisplayMedia()");const Q=await navigator.mediaDevices.getDisplayMedia({audio:!!A,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}});if(!Q)return Wg.error("Unable to capture screen."),void this.emit("error",{code:"EID013",text:"Error while trying to start screen share. Not able to capture screen!"});let B,F;U=Q.getAudioTracks()[0],U&&(this._shareAudioProducer=await this._sendTransport.produce({track:U,codecOptions:this.data.inputParams.forcePCMU?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"screen-audio"}}),this._producers.set("ssAudio",{id:this._shareAudioProducer.id,type:"shareAudio",paused:this._shareAudioProducer.paused,track:this._shareAudioProducer.track,rtpParameters:this._shareAudioProducer.rtpParameters,codec:this._shareAudioProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssAudioStart",{peerId:this.data.inputParams.peerId,audioTrack:this._shareAudioProducer.track,type:"local"})),I=Q.getVideoTracks()[0];const C={videoGoogleStartBitrate:1e3};if(this._forceVP8){if(F=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!F)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?F=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(F=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!F)throw new Error("desired H264 codec+configuration is not supported");Wg.debug("Selected h264 codec is:%O",F)}else if(this._forceVP9&&(F=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!F))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableSharingLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&F||"video/vp9"===A.mimeType.toLowerCase()?B=[{maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L3T3",dtx:!0}]:(B=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}],this._numSimulcastStreams>1&&B.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.shareBitRates[1],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}),this._numSimulcastStreams>2&&B.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.shareBitRates[2],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}))}this._shareProducer=await this._sendTransport.produce({track:I,encodings:B,codecOptions:C,codec:F,appData:{mediaTag:"screen-video"}}),this._producers.set("ssVideo",{id:this._shareProducer.id,type:"shareVideo",paused:this._shareProducer.paused,track:this._shareProducer.track,rtpParameters:this._shareProducer.rtpParameters,codec:this._shareProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._shareProducer.track,type:"local"}),this._shareProducer.on("transportclose",()=>{this._shareProducer=null}),this._shareProducer.on("trackended",()=>{this.disableShare().catch(()=>{})})}catch(F){Wg.error("enableShare() | failed:%o",F),"NotAllowedError"!==F.name&&this.emit("error",{code:"EID014",text:`Error while trying to start screen share. Error is: ${F}!`}),I&&I.stop()}}async disableShare(){if(Wg.debug("disableShare()"),!this._shareProducer)return Wg.warn("Screen share doesn't seem to be on!"),void this.emit("error",{code:"EID017",text:"Error while trying to stop screen share. Is the screen share on!"});if(this._shareProducer.close(),this._shareAudioProducer){this._shareAudioProducer.close();try{let Q={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._shareAudioProducer.id};this._sendMessage(Q),this.emit("ssAudioStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}}catch(Q){this.emit("error",{code:"EID015",text:`Error while trying to stop screen share audio. Error is: ${Q}!`})}}try{let Q={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._shareProducer.id};this._sendMessage(Q),this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}}catch(Q){this.emit("error",{code:"EID016",text:`Error while trying to stop screen share video. Error is: ${Q}!`})}this._shareAudioProducer=null,this._shareProducer=null}logMeOutNew=async()=>{try{Wg.debug("Room","inside log me out new"),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"removed_by_moderator"})}catch(A){}await this.leaveRoom()};logThisUserOutOfMeeting=A=>{if(Wg.debug("Room","inside log this user out of meeting"),A===this.data.inputParams.peerId)Wg.debug("ConferenceRoom","logging myself Out"),this.leaveRoom();else try{var Q={id:"logThisUserOut",peerId:A,moderatorPeerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId};this._sendMessage(Q)}catch(B){Wg.error("Room",B)}}}const hg="cp2p-client";class Gg{constructor(A){A?(this._debug=MB(`${hg}:${A}`),this._info=MB(`${hg}:INFO:${A}`),this._warn=MB(`${hg}:WARN:${A}`),this._error=MB(`${hg}:ERROR:${A}`)):(this._debug=MB(hg),this._info=MB(`${hg}:INFO`),this._warn=MB(`${hg}:WARN`),this._error=MB(`${hg}:ERROR`)),this._debug.log=function(){}.bind(),this._info.log=function(){}.bind(),this._warn.log=function(){}.bind(),this._error.log=function(){}.bind()}get debug(){return this._debug}get info(){return this._info}get warn(){return this._warn}get error(){return this._error}}const Mg={audio:{deviceId:{exact:void 0}},video:!1},wg={video:{deviceId:{exact:void 0},width:{min:320,ideal:640,max:1280},height:{min:240,ideal:480,max:720},frameRate:{min:15,max:30}}},sg={audio:!0,video:{width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},Tg=new Gg("socket");class yg extends cB.EventEmitter{constructor({url:A,roomId:Q,peerId:B,peerName:I,role:U}){super(),Tg.debug("constructor():%o ",{url:A,roomId:Q,peerId:B,peerName:I,role:U}),this._closed=!1,this._params={url:A,roomId:Q,peerId:B,peerName:I,role:U},this._socket=null,this._connectionStatus=null,this._createSocket()}get closed(){return this._closed}get connectionStatus(){return this._connectionStatus}close(){if(!this._closed){Tg.debug("close()"),this._closed=!0,this.emit("close");try{this._socket.disconnect()}catch(A){Tg.error("close() | error closing the Socket:%o",A)}}}async send(A){if(this._closed)throw new Error("transport closed");try{this._socket.send(JSON.stringify(A))}catch(Q){throw Tg.warn("send() failed:%o",Q),Q}}async request({type:A,message:Q}){return new Promise(B=>{if(this._closed)throw new Error("transport closed");try{this._socket.emit(A,JSON.stringify(Q),A=>{B(A)})}catch(I){throw Tg.warn("emit() failed:%o",I),I}})}async _createSocket(){let A=this;const Q=io(this._params.url,{query:{roomId:this._params.roomId,peerId:this._params.peerId,peerName:this._params.peerName,role:this._params.role}});Q.on("connect",()=>{Tg.debug("Socket connected!!"),A._connectionStatus=!0,A.emit("connected")}),Q.on("disconnect",()=>{Tg.debug("Socket disconnected!!"),A._connectionStatus=!1,A.emit("disconnected")}),Q.on("reconnect",()=>{Tg.debug("Socket reconnected after disconnect!!"),Q.emit("reconnected")}),Q.on("message",Q=>{const B=JSON.parse(Q);Tg.debug("New mesage received with id:%s",B.type),A.emit("message",B)}),this._socket=Q}}
232
232
  /*!
233
233
  * Platform.js
234
234
  * Copyright 2014-2020 Benjamin Tan
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "samvyo-js-sdk",
3
- "version": "2.0.19",
3
+ "version": "2.0.20",
4
4
  "description": "This is the client js sdk for cutting-edge Samvyo real-time voice/video cloud.",
5
5
  "repository": "git@github.com:sauravkp/VidScale-client-js.git",
6
6
  "author": "sauravkp <saurav.codes@gmail.com>",