samvyo-js-sdk 2.0.34-test.1 → 2.0.34-test.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -222,4 +222,4 @@ function A(A){return A&&A.__esModule&&Object.prototype.hasOwnProperty.call(A,"de
222
222
  personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);
223
223
  outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);
224
224
  }
225
- `,{width:D,height:n}=B,o=D/n,s=Ci(A,A.VERTEX_SHADER,E),w=Ci(A,A.FRAGMENT_SHADER,i),a=gi(A,s,w,I,g),y=A.getUniformLocation(a,"u_backgroundScale"),G=A.getUniformLocation(a,"u_backgroundOffset"),R=A.getUniformLocation(a,"u_inputFrame"),S=A.getUniformLocation(a,"u_personMask"),h=A.getUniformLocation(a,"u_background"),N=A.getUniformLocation(a,"u_coverage"),K=A.getUniformLocation(a,"u_lightWrapping"),M=A.getUniformLocation(a,"u_blendMode");A.useProgram(a),A.uniform2f(y,1,1),A.uniform2f(G,0,0),A.uniform1i(R,0),A.uniform1i(S,1),A.uniform2f(N,0,1),A.uniform1f(K,0),A.uniform1f(M,0);let F=null;function L(I){F=Qi(A,A.RGBA8,I.naturalWidth,I.naturalHeight,A.LINEAR,A.LINEAR),A.texSubImage2D(A.TEXTURE_2D,0,0,0,I.naturalWidth,I.naturalHeight,A.RGBA,A.UNSIGNED_BYTE,I);let g=0,C=0,Q=I.naturalWidth,B=I.naturalHeight;Q/B<o?(B=Q/o,C=(I.naturalHeight-B)/2):(Q=B*o,g=(I.naturalWidth-Q)/2);const E=Q/I.naturalWidth,i=B/I.naturalHeight;g/=I.naturalWidth,C/=I.naturalHeight,A.uniform2f(y,E,i),A.uniform2f(G,g,C)}return Q?.complete?L(Q):Q&&(Q.onload=()=>{L(Q)}),{render:function(){A.viewport(0,0,D,n),A.useProgram(a),A.activeTexture(A.TEXTURE1),A.bindTexture(A.TEXTURE_2D,C),null!==F&&(A.activeTexture(A.TEXTURE2),A.bindTexture(A.TEXTURE_2D,F),A.uniform1i(h,2)),A.bindFramebuffer(A.FRAMEBUFFER,null),A.drawArrays(A.TRIANGLE_STRIP,0,4)},updateCoverage:function(I){A.useProgram(a),A.uniform2f(N,I[0],I[1])},updateLightWrapping:function(I){A.useProgram(a),A.uniform1f(K,I)},updateBlendMode:function(I){A.useProgram(a),A.uniform1f(M,"screen"===I?0:1)},cleanUp:function(){A.deleteTexture(F),A.deleteProgram(a),A.deleteShader(w),A.deleteShader(s)}}}(a,R,S,K,I,Q);return{render:async function(){a.activeTexture(a.TEXTURE0),a.bindTexture(a.TEXTURE_2D,h),a.texImage2D(a.TEXTURE_2D,0,a.RGBA,a.RGBA,a.UNSIGNED_BYTE,A.htmlElement),a.bindVertexArray(G),await M.render(),i(),B._runInference(),i(),F.render(),L.render(),k.render()},updatePostProcessingConfig:function(A){if(L.updateSigmaSpace(A.jointBilateralFilter.sigmaSpace),L.updateSigmaColor(A.jointBilateralFilter.sigmaColor),"image"===g.type){const I=k;I.updateCoverage(A.coverage),I.updateLightWrapping(A.lightWrapping),I.updateBlendMode(A.blendMode)}else if("blur"===g.type)k.updateCoverage(A.coverage);else{const A=k;A.updateCoverage([0,.9999]),A.updateLightWrapping(0)}},cleanUp:function(){k.cleanUp(),L.cleanUp(),F.cleanUp(),M.cleanUp(),a.deleteTexture(K),a.deleteTexture(N),a.deleteTexture(h),a.deleteBuffer(S),a.deleteBuffer(R),a.deleteVertexArray(G),a.deleteShader(y)}}}class Ei{constructor(){this.pipeline=null,this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[],this.isRunning=!1,this.timerWorker=null,this.renderTimeoutId=null,this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[]}async initialize(A,I,g,C,Q,B=null,E=null){this.stop(),this.backgroundImageRef=B,this.canvasRef=E;const i=1e3/g.targetFps;this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[],this.timerWorker=function(){const A=new Map,I=new Blob(["\n const timeoutIds = new Map();\n \n addEventListener('message', (event) => {\n if (event.data.timeoutMs !== undefined) {\n const timeoutId = setTimeout(() => {\n postMessage({ callbackId: event.data.callbackId });\n timeoutIds.delete(event.data.callbackId);\n }, event.data.timeoutMs);\n timeoutIds.set(event.data.callbackId, timeoutId);\n } else {\n const timeoutId = timeoutIds.get(event.data.callbackId);\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n timeoutIds.delete(event.data.callbackId);\n }\n }\n });\n "],{type:"application/javascript"}),g=new Worker(URL.createObjectURL(I));g.onmessage=I=>{const g=A.get(I.data.callbackId);g&&(A.delete(I.data.callbackId),g())};let C=1;return{setTimeout:function(I,Q=0){const B=C++;return A.set(B,I),g.postMessage({callbackId:B,timeoutMs:Q}),B},clearTimeout:function(I){A.has(I)&&(g.postMessage({callbackId:I}),A.delete(I))},terminate:function(){A.clear(),g.terminate()}}}(),this.pipeline="webgl2"===g.pipeline?Bi(A,this.backgroundImageRef,I,g,this.canvasRef,Q,this.timerWorker,this.addFrameEvent.bind(this)):function(A,I,g,C,Q,B,E){const i=C.getContext("2d"),[D,n]=XE[g.inputResolution],o=D*n,s=new ImageData(D,n),w=document.createElement("canvas");w.width=D,w.height=n;const a=w.getContext("2d"),y=B._getInputMemoryOffset()/4,G=B._getOutputMemoryOffset()/4;let R;return{render:async function(){"none"!==I.type&&function(){if(a.drawImage(A.htmlElement,0,0,A.width,A.height,0,0,D,n),"meet"===g.model||"mlkit"===g.model){const A=a.getImageData(0,0,D,n);for(let I=0;I<o;I++)B.HEAPF32[y+3*I]=A.data[4*I]/255,B.HEAPF32[y+3*I+1]=A.data[4*I+1]/255,B.HEAPF32[y+3*I+2]=A.data[4*I+2]/255}}(),E(),"none"!==I.type&&("bodyPix"===g.model?await async function(){const A=await Q.segmentPerson(w);for(let I=0;I<o;I++)s.data[4*I+3]=A.data[I]?255:0;a.putImageData(s,0,0)}():function(){B._runInference();for(let A=0;A<o;A++)if("meet"===g.model){const I=B.HEAPF32[G+2*A],g=B.HEAPF32[G+2*A+1],C=Math.max(I,g),Q=Math.exp(I-C),E=Math.exp(g-C);s.data[4*A+3]=255*E/(Q+E)}else if("mlkit"===g.model){const I=B.HEAPF32[G+A];s.data[4*A+3]=255*I}a.putImageData(s,0,0)}()),E(),i.globalCompositeOperation="copy",i.filter="none",R?.smoothSegmentationMask&&("blur"===I.type?i.filter="blur(8px)":"image"===I.type&&(i.filter="blur(4px)")),"none"!==I.type&&(i.drawImage(w,0,0,D,n,0,0,A.width,A.height),i.globalCompositeOperation="source-in",i.filter="none"),i.drawImage(A.htmlElement,0,0),"blur"===I.type&&(i.globalCompositeOperation="destination-over",i.filter="blur(8px)",i.drawImage(A.htmlElement,0,0))},updatePostProcessingConfig:function(A){R=A},cleanUp:function(){}}}(A,I,g,this.canvasRef,C,Q,this.addFrameEvent.bind(this));const D=async()=>{if(!this.isRunning)return;const A=performance.now();this.beginFrame(),await this.pipeline.render(),this.endFrame(),this.renderTimeoutId=this.timerWorker.setTimeout(D,Math.max(0,i-(performance.now()-A)))};return this.isRunning=!0,D(),{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations()}}beginFrame(){this.beginTime=Date.now()}addFrameEvent(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.beginTime=A,this.eventCount++}endFrame(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.frameCount++,A>=this.previousTime+1e3&&(this.fps=1e3*this.frameCount/(A-this.previousTime),this.durations=[...this.frameDurations],this.previousTime=A,this.frameCount=0),this.eventCount=0}getProcessingDurations(){return this.frameDurations.length>=3?[this.frameDurations[0]||0,this.frameDurations[1]||0,this.frameDurations[2]||0]:[0,0,0]}stop(){this.isRunning=!1,this.timerWorker&&this.renderTimeoutId&&this.timerWorker.clearTimeout(this.renderTimeoutId),this.timerWorker&&(this.timerWorker.terminate(),this.timerWorker=null),this.pipeline&&(this.pipeline.cleanUp(),this.pipeline=null),this.renderTimeoutId=null}getState(){return{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations(),isRunning:this.isRunning}}getFps(){return this.fps}getDurations(){return this.durations}isActive(){return this.isRunning&&null!==this.pipeline}async updateConfig(A,I,g,C,Q){return this.initialize(A,I,g,C,Q,this.backgroundImageRef,this.canvasRef)}destroy(){this.stop(),this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[]}}const ii=new Gg("Room"),Di=new Set(["240p","360p","480p","720p","1080p","1440p","2160p"]);function ni(A){return!!Array.isArray(A)&&A.every(A=>Di.has(A))}const oi={audio:!1,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}},si={audio:{deviceId:{exact:void 0}},video:!1},wi={audio:!1,video:{deviceId:{exact:void 0},width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},ai=new Gg("utils-verifyFiles"),yi=["mp4"];function Gi(A,I){try{return new URL(A).pathname.split(".").pop().toLowerCase()===I.toLowerCase()}catch{return!1}}const Ri=new TextEncoder,Si=new TextDecoder;function hi(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64("string"==typeof A?A:Si.decode(A),{alphabet:"base64url"});let I=A;I instanceof Uint8Array&&(I=Si.decode(I)),I=I.replace(/-/g,"+").replace(/_/g,"/").replace(/\s/g,"");try{return function(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64(A);const I=atob(A),g=new Uint8Array(I.length);for(let A=0;A<I.length;A++)g[A]=I.charCodeAt(A);return g}(I)}catch{throw new TypeError("The input to be decoded is not correctly encoded.")}}class Ni extends Error{static code="ERR_JOSE_GENERIC";code="ERR_JOSE_GENERIC";constructor(A,I){super(A,I),this.name=this.constructor.name,Error.captureStackTrace?.(this,this.constructor)}}class Ki extends Ni{static code="ERR_JWT_CLAIM_VALIDATION_FAILED";code="ERR_JWT_CLAIM_VALIDATION_FAILED";claim;reason;payload;constructor(A,I,g="unspecified",C="unspecified"){super(A,{cause:{claim:g,reason:C,payload:I}}),this.claim=g,this.reason=C,this.payload=I}}class Mi extends Ni{static code="ERR_JWT_EXPIRED";code="ERR_JWT_EXPIRED";claim;reason;payload;constructor(A,I,g="unspecified",C="unspecified"){super(A,{cause:{claim:g,reason:C,payload:I}}),this.claim=g,this.reason=C,this.payload=I}}class Fi extends Ni{static code="ERR_JOSE_ALG_NOT_ALLOWED";code="ERR_JOSE_ALG_NOT_ALLOWED"}class Li extends Ni{static code="ERR_JOSE_NOT_SUPPORTED";code="ERR_JOSE_NOT_SUPPORTED"}class ki extends Ni{static code="ERR_JWS_INVALID";code="ERR_JWS_INVALID"}class Ji extends Ni{static code="ERR_JWT_INVALID";code="ERR_JWT_INVALID"}class Ui extends Ni{static code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";constructor(A="signature verification failed",I){super(A,I)}}function ti(A,I="algorithm.name"){return new TypeError(`CryptoKey does not support this operation, its ${I} must be ${A}`)}function qi(A,I){return A.name===I}function ri(A){return parseInt(A.name.slice(4),10)}function ci(A,I,...g){if((g=g.filter(Boolean)).length>2){const I=g.pop();A+=`one of type ${g.join(", ")}, or ${I}.`}else 2===g.length?A+=`one of type ${g[0]} or ${g[1]}.`:A+=`of type ${g[0]}.`;return null==I?A+=` Received ${I}`:"function"==typeof I&&I.name?A+=` Received function ${I.name}`:"object"==typeof I&&null!=I&&I.constructor?.name&&(A+=` Received an instance of ${I.constructor.name}`),A}function Hi(A,I,...g){return ci(`Key for the ${A} algorithm must be `,I,...g)}function ei(A){return"CryptoKey"===A?.[Symbol.toStringTag]}function Yi(A){return"KeyObject"===A?.[Symbol.toStringTag]}const di=A=>ei(A)||Yi(A),bi=A=>{if("object"!=typeof(I=A)||null===I||"[object Object]"!==Object.prototype.toString.call(A))return!1;var I;if(null===Object.getPrototypeOf(A))return!0;let g=A;for(;null!==Object.getPrototypeOf(g);)g=Object.getPrototypeOf(g);return Object.getPrototypeOf(A)===g};function xi(A){return bi(A)&&"string"==typeof A.kty}let ui;const pi=async(A,I,g,C=!1)=>{ui||=new WeakMap;let Q=ui.get(A);if(Q?.[g])return Q[g];const B=await(async A=>{if(!A.alg)throw new TypeError('"alg" argument is required when "jwk.alg" is not present');const{algorithm:I,keyUsages:g}=function(A){let I,g;switch(A.kty){case"AKP":switch(A.alg){case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":I={name:A.alg},g=A.priv?["sign"]:["verify"];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"RSA":switch(A.alg){case"PS256":case"PS384":case"PS512":I={name:"RSA-PSS",hash:`SHA-${A.alg.slice(-3)}`},g=A.d?["sign"]:["verify"];break;case"RS256":case"RS384":case"RS512":I={name:"RSASSA-PKCS1-v1_5",hash:`SHA-${A.alg.slice(-3)}`},g=A.d?["sign"]:["verify"];break;case"RSA-OAEP":case"RSA-OAEP-256":case"RSA-OAEP-384":case"RSA-OAEP-512":I={name:"RSA-OAEP",hash:`SHA-${parseInt(A.alg.slice(-3),10)||1}`},g=A.d?["decrypt","unwrapKey"]:["encrypt","wrapKey"];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"EC":switch(A.alg){case"ES256":I={name:"ECDSA",namedCurve:"P-256"},g=A.d?["sign"]:["verify"];break;case"ES384":I={name:"ECDSA",namedCurve:"P-384"},g=A.d?["sign"]:["verify"];break;case"ES512":I={name:"ECDSA",namedCurve:"P-521"},g=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":I={name:"ECDH",namedCurve:A.crv},g=A.d?["deriveBits"]:[];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"OKP":switch(A.alg){case"Ed25519":case"EdDSA":I={name:"Ed25519"},g=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":I={name:A.crv},g=A.d?["deriveBits"]:[];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;default:throw new Li('Invalid or unsupported JWK "kty" (Key Type) Parameter value')}return{algorithm:I,keyUsages:g}}(A),C={...A};return"AKP"!==C.kty&&delete C.alg,delete C.use,crypto.subtle.importKey("jwk",C,I,A.ext??(!A.d&&!A.priv),A.key_ops??g)})({...I,alg:g});return C&&Object.freeze(A),Q?Q[g]=B:ui.set(A,{[g]:B}),B},Ti=A=>A?.[Symbol.toStringTag],mi=(A,I,g)=>{if(void 0!==I.use){let A;switch(g){case"sign":case"verify":A="sig";break;case"encrypt":case"decrypt":A="enc"}if(I.use!==A)throw new TypeError(`Invalid key for this operation, its "use" must be "${A}" when present`)}if(void 0!==I.alg&&I.alg!==A)throw new TypeError(`Invalid key for this operation, its "alg" must be "${A}" when present`);if(Array.isArray(I.key_ops)){let C;switch(!0){case"verify"===g:case"dir"===A:case A.includes("CBC-HS"):C=g;break;case A.startsWith("PBES2"):C="deriveBits";break;case/^A\d{3}(?:GCM)?(?:KW)?$/.test(A):C=!A.includes("GCM")&&A.endsWith("KW")?"unwrapKey":g;break;case"encrypt"===g:C="wrapKey";break;case"decrypt"===g:C=A.startsWith("RSA")?"unwrapKey":"deriveBits"}if(C&&!1===I.key_ops?.includes?.(C))throw new TypeError(`Invalid key for this operation, its "key_ops" must include "${C}" when present`)}return!0},li=async(A,I,g)=>{if(I instanceof Uint8Array){if(!A.startsWith("HS"))throw new TypeError(ci("Key must be ",I,"CryptoKey","KeyObject","JSON Web Key"));return crypto.subtle.importKey("raw",I,{hash:`SHA-${A.slice(-3)}`,name:"HMAC"},!1,[g])}return function(A,I,g){switch(I){case"HS256":case"HS384":case"HS512":{if(!qi(A.algorithm,"HMAC"))throw ti("HMAC");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"RS256":case"RS384":case"RS512":{if(!qi(A.algorithm,"RSASSA-PKCS1-v1_5"))throw ti("RSASSA-PKCS1-v1_5");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"PS256":case"PS384":case"PS512":{if(!qi(A.algorithm,"RSA-PSS"))throw ti("RSA-PSS");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"Ed25519":case"EdDSA":if(!qi(A.algorithm,"Ed25519"))throw ti("Ed25519");break;case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":if(!qi(A.algorithm,I))throw ti(I);break;case"ES256":case"ES384":case"ES512":{if(!qi(A.algorithm,"ECDSA"))throw ti("ECDSA");const g=function(A){switch(A){case"ES256":return"P-256";case"ES384":return"P-384";case"ES512":return"P-521";default:throw new Error("unreachable")}}(I);if(A.algorithm.namedCurve!==g)throw ti(g,"algorithm.namedCurve");break}default:throw new TypeError("CryptoKey does not support this operation")}!function(A,I){if(!A.usages.includes(I))throw new TypeError(`CryptoKey does not support this operation, its usages must include ${I}.`)}(A,g)}(I,A,g),I};async function Oi(A,I,g){if(!bi(A))throw new ki("Flattened JWS must be an object");if(void 0===A.protected&&void 0===A.header)throw new ki('Flattened JWS must have either of the "protected" or "header" members');if(void 0!==A.protected&&"string"!=typeof A.protected)throw new ki("JWS Protected Header incorrect type");if(void 0===A.payload)throw new ki("JWS Payload missing");if("string"!=typeof A.signature)throw new ki("JWS Signature missing or incorrect type");if(void 0!==A.header&&!bi(A.header))throw new ki("JWS Unprotected Header incorrect type");let C={};if(A.protected)try{const I=hi(A.protected);C=JSON.parse(Si.decode(I))}catch{throw new ki("JWS Protected Header is invalid")}if(!((...A)=>{const I=A.filter(Boolean);if(0===I.length||1===I.length)return!0;let g;for(const A of I){const I=Object.keys(A);if(g&&0!==g.size)for(const A of I){if(g.has(A))return!1;g.add(A)}else g=new Set(I)}return!0})(C,A.header))throw new ki("JWS Protected and JWS Unprotected Header Parameter names must be disjoint");const Q={...C,...A.header},B=((A,I,g,C,Q)=>{if(void 0!==Q.crit&&void 0===C?.crit)throw new A('"crit" (Critical) Header Parameter MUST be integrity protected');if(!C||void 0===C.crit)return new Set;if(!Array.isArray(C.crit)||0===C.crit.length||C.crit.some(A=>"string"!=typeof A||0===A.length))throw new A('"crit" (Critical) Header Parameter MUST be an array of non-empty strings when present');let B;B=void 0!==g?new Map([...Object.entries(g),...I.entries()]):I;for(const I of C.crit){if(!B.has(I))throw new Li(`Extension Header Parameter "${I}" is not recognized`);if(void 0===Q[I])throw new A(`Extension Header Parameter "${I}" is missing`);if(B.get(I)&&void 0===C[I])throw new A(`Extension Header Parameter "${I}" MUST be integrity protected`)}return new Set(C.crit)})(ki,new Map([["b64",!0]]),g?.crit,C,Q);let E=!0;if(B.has("b64")&&(E=C.b64,"boolean"!=typeof E))throw new ki('The "b64" (base64url-encode payload) Header Parameter must be a boolean');const{alg:i}=Q;if("string"!=typeof i||!i)throw new ki('JWS "alg" (Algorithm) Header Parameter missing or invalid');const D=g&&((A,I)=>{if(void 0!==I&&(!Array.isArray(I)||I.some(A=>"string"!=typeof A)))throw new TypeError('"algorithms" option must be an array of strings');if(I)return new Set(I)})(0,g.algorithms);if(D&&!D.has(i))throw new Fi('"alg" (Algorithm) Header Parameter value not allowed');if(E){if("string"!=typeof A.payload)throw new ki("JWS Payload must be a string")}else if("string"!=typeof A.payload&&!(A.payload instanceof Uint8Array))throw new ki("JWS Payload must be a string or an Uint8Array instance");let n=!1;"function"==typeof I&&(I=await I(C,A),n=!0),((A,I,g)=>{A.startsWith("HS")||"dir"===A||A.startsWith("PBES2")||/^A(?:128|192|256)(?:GCM)?(?:KW)?$/.test(A)||/^A(?:128|192|256)CBC-HS(?:256|384|512)$/.test(A)?((A,I,g)=>{if(!(I instanceof Uint8Array)){if(xi(I)){if(function(A){return"oct"===A.kty&&"string"==typeof A.k}(I)&&mi(A,I,g))return;throw new TypeError('JSON Web Key for symmetric algorithms must have JWK "kty" (Key Type) equal to "oct" and the JWK "k" (Key Value) present')}if(!di(I))throw new TypeError(Hi(A,I,"CryptoKey","KeyObject","JSON Web Key","Uint8Array"));if("secret"!==I.type)throw new TypeError(`${Ti(I)} instances for symmetric algorithms must be of type "secret"`)}})(A,I,g):((A,I,g)=>{if(xi(I))switch(g){case"decrypt":case"sign":if(function(A){return"oct"!==A.kty&&("AKP"===A.kty&&"string"==typeof A.priv||"string"==typeof A.d)}(I)&&mi(A,I,g))return;throw new TypeError("JSON Web Key for this operation be a private JWK");case"encrypt":case"verify":if(function(A){return"oct"!==A.kty&&void 0===A.d&&void 0===A.priv}(I)&&mi(A,I,g))return;throw new TypeError("JSON Web Key for this operation be a public JWK")}if(!di(I))throw new TypeError(Hi(A,I,"CryptoKey","KeyObject","JSON Web Key"));if("secret"===I.type)throw new TypeError(`${Ti(I)} instances for asymmetric algorithms must not be of type "secret"`);if("public"===I.type)switch(g){case"sign":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm signing must be of type "private"`);case"decrypt":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm decryption must be of type "private"`)}if("private"===I.type)switch(g){case"verify":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm verifying must be of type "public"`);case"encrypt":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm encryption must be of type "public"`)}})(A,I,g)})(i,I,"verify");const o=function(...A){const I=A.reduce((A,{length:I})=>A+I,0),g=new Uint8Array(I);let C=0;for(const I of A)g.set(I,C),C+=I.length;return g}(Ri.encode(A.protected??""),Ri.encode("."),"string"==typeof A.payload?Ri.encode(A.payload):A.payload);let s;try{s=hi(A.signature)}catch{throw new ki("Failed to base64url decode the signature")}const w=await(async(A,I)=>{if(A instanceof Uint8Array)return A;if(ei(A))return A;if(Yi(A)){if("secret"===A.type)return A.export();if("toCryptoKey"in A&&"function"==typeof A.toCryptoKey)try{return((A,I)=>{ui||=new WeakMap;let g=ui.get(A);if(g?.[I])return g[I];const C="public"===A.type,Q=!!C;let B;if("x25519"===A.asymmetricKeyType){switch(I){case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}B=A.toCryptoKey(A.asymmetricKeyType,Q,C?[]:["deriveBits"])}if("ed25519"===A.asymmetricKeyType){if("EdDSA"!==I&&"Ed25519"!==I)throw new TypeError("given KeyObject instance cannot be used for this algorithm");B=A.toCryptoKey(A.asymmetricKeyType,Q,[C?"verify":"sign"])}switch(A.asymmetricKeyType){case"ml-dsa-44":case"ml-dsa-65":case"ml-dsa-87":if(I!==A.asymmetricKeyType.toUpperCase())throw new TypeError("given KeyObject instance cannot be used for this algorithm");B=A.toCryptoKey(A.asymmetricKeyType,Q,[C?"verify":"sign"])}if("rsa"===A.asymmetricKeyType){let g;switch(I){case"RSA-OAEP":g="SHA-1";break;case"RS256":case"PS256":case"RSA-OAEP-256":g="SHA-256";break;case"RS384":case"PS384":case"RSA-OAEP-384":g="SHA-384";break;case"RS512":case"PS512":case"RSA-OAEP-512":g="SHA-512";break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}if(I.startsWith("RSA-OAEP"))return A.toCryptoKey({name:"RSA-OAEP",hash:g},Q,C?["encrypt"]:["decrypt"]);B=A.toCryptoKey({name:I.startsWith("PS")?"RSA-PSS":"RSASSA-PKCS1-v1_5",hash:g},Q,[C?"verify":"sign"])}if("ec"===A.asymmetricKeyType){const g=new Map([["prime256v1","P-256"],["secp384r1","P-384"],["secp521r1","P-521"]]).get(A.asymmetricKeyDetails?.namedCurve);if(!g)throw new TypeError("given KeyObject instance cannot be used for this algorithm");"ES256"===I&&"P-256"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),"ES384"===I&&"P-384"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),"ES512"===I&&"P-521"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),I.startsWith("ECDH-ES")&&(B=A.toCryptoKey({name:"ECDH",namedCurve:g},Q,C?[]:["deriveBits"]))}if(!B)throw new TypeError("given KeyObject instance cannot be used for this algorithm");return g?g[I]=B:ui.set(A,{[I]:B}),B})(A,I)}catch(A){if(A instanceof TypeError)throw A}let g=A.export({format:"jwk"});return pi(A,g,I)}if(xi(A))return A.k?hi(A.k):pi(A,A,I,!0);throw new Error("unreachable")})(I,i);if(!await(async(A,I,g,C)=>{const Q=await li(A,I,"verify");((A,I)=>{if(A.startsWith("RS")||A.startsWith("PS")){const{modulusLength:g}=I.algorithm;if("number"!=typeof g||g<2048)throw new TypeError(`${A} requires key modulusLength to be 2048 bits or larger`)}})(A,Q);const B=((A,I)=>{const g=`SHA-${A.slice(-3)}`;switch(A){case"HS256":case"HS384":case"HS512":return{hash:g,name:"HMAC"};case"PS256":case"PS384":case"PS512":return{hash:g,name:"RSA-PSS",saltLength:parseInt(A.slice(-3),10)>>3};case"RS256":case"RS384":case"RS512":return{hash:g,name:"RSASSA-PKCS1-v1_5"};case"ES256":case"ES384":case"ES512":return{hash:g,name:"ECDSA",namedCurve:I.namedCurve};case"Ed25519":case"EdDSA":return{name:"Ed25519"};case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":return{name:A};default:throw new Li(`alg ${A} is not supported either by JOSE or your javascript runtime`)}})(A,Q.algorithm);try{return await crypto.subtle.verify(B,Q,g,C)}catch{return!1}})(i,w,s,o))throw new Ui;let a;if(E)try{a=hi(A.payload)}catch{throw new ki("Failed to base64url decode the payload")}else a="string"==typeof A.payload?Ri.encode(A.payload):A.payload;const y={payload:a};return void 0!==A.protected&&(y.protectedHeader=C),void 0!==A.header&&(y.unprotectedHeader=A.header),n?{...y,key:w}:y}const fi=/^(\+|\-)? ?(\d+|\d+\.\d+) ?(seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)(?: (ago|from now))?$/i,Pi=A=>{const I=fi.exec(A);if(!I||I[4]&&I[1])throw new TypeError("Invalid time period format");const g=parseFloat(I[2]);let C;switch(I[3].toLowerCase()){case"sec":case"secs":case"second":case"seconds":case"s":C=Math.round(g);break;case"minute":case"minutes":case"min":case"mins":case"m":C=Math.round(60*g);break;case"hour":case"hours":case"hr":case"hrs":case"h":C=Math.round(3600*g);break;case"day":case"days":case"d":C=Math.round(86400*g);break;case"week":case"weeks":case"w":C=Math.round(604800*g);break;default:C=Math.round(31557600*g)}return"-"===I[1]||"ago"===I[4]?-C:C},Wi=A=>A.includes("/")?A.toLowerCase():`application/${A.toLowerCase()}`;function zi(A,I,g={}){let C;try{C=JSON.parse(Si.decode(I))}catch{}if(!bi(C))throw new Ji("JWT Claims Set must be a top-level JSON object");const{typ:Q}=g;if(Q&&("string"!=typeof A.typ||Wi(A.typ)!==Wi(Q)))throw new Ki('unexpected "typ" JWT header value',C,"typ","check_failed");const{requiredClaims:B=[],issuer:E,subject:i,audience:D,maxTokenAge:n}=g,o=[...B];void 0!==n&&o.push("iat"),void 0!==D&&o.push("aud"),void 0!==i&&o.push("sub"),void 0!==E&&o.push("iss");for(const A of new Set(o.reverse()))if(!(A in C))throw new Ki(`missing required "${A}" claim`,C,A,"missing");if(E&&!(Array.isArray(E)?E:[E]).includes(C.iss))throw new Ki('unexpected "iss" claim value',C,"iss","check_failed");if(i&&C.sub!==i)throw new Ki('unexpected "sub" claim value',C,"sub","check_failed");if(D&&(w="string"==typeof D?[D]:D,!("string"==typeof(s=C.aud)?w.includes(s):Array.isArray(s)&&w.some(Set.prototype.has.bind(new Set(s))))))throw new Ki('unexpected "aud" claim value',C,"aud","check_failed");var s,w;let a;switch(typeof g.clockTolerance){case"string":a=Pi(g.clockTolerance);break;case"number":a=g.clockTolerance;break;case"undefined":a=0;break;default:throw new TypeError("Invalid clockTolerance option type")}const{currentDate:y}=g,G=(R=y||new Date,Math.floor(R.getTime()/1e3));var R;if((void 0!==C.iat||n)&&"number"!=typeof C.iat)throw new Ki('"iat" claim must be a number',C,"iat","invalid");if(void 0!==C.nbf){if("number"!=typeof C.nbf)throw new Ki('"nbf" claim must be a number',C,"nbf","invalid");if(C.nbf>G+a)throw new Ki('"nbf" claim timestamp check failed',C,"nbf","check_failed")}if(void 0!==C.exp){if("number"!=typeof C.exp)throw new Ki('"exp" claim must be a number',C,"exp","invalid");if(C.exp<=G-a)throw new Mi('"exp" claim timestamp check failed',C,"exp","check_failed")}if(n){const A=G-C.iat;if(A-a>("number"==typeof n?n:Pi(n)))throw new Mi('"iat" claim timestamp check failed (too far in the past)',C,"iat","check_failed");if(A<0-a)throw new Ki('"iat" claim timestamp check failed (it should be in the past)',C,"iat","check_failed")}return C}const ji=new Gg("Room"),Vi={small:{width:{ideal:160},height:{ideal:120}},qvga:{width:{ideal:320},height:{ideal:240}},vga:{width:{ideal:640},height:{ideal:480}},hd:{width:{ideal:1280},height:{ideal:720}}};let vi;const Zi=new class{constructor(){this.queue=new Map}push(A,I){this.queue.set(A,I)}get(A){return this.queue.get(A)}remove(A){this.queue.delete(A)}},Xi=new class{constructor(){this._localVBStream=null,this._vbDetailsNew={},this._vbDetails=null,this._roomType=null,this._participants={},this._peerId=null,this._peerConnection=null,this._pipelineManager=null,this._updateInterval=null,this._pipelineManager=new Ei,this.initializeTFLite()}getVBDetails(){return this._vbDetailsNew}getVBStream(){return this._localVBStream}hasLiveVBTrack(){try{return this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks&&this._localVBStream.getVideoTracks().length>0&&"live"===this._localVBStream.getVideoTracks()[0].readyState}catch(A){return!1}}setPipelineManager(A){this._pipelineManager=A}async initializeTFLite(){try{const A=await Ai(_E);this._vbDetailsNew.tfLite=A.tflite,this._vbDetailsNew.isSIMDSupported=A.isSIMDSupported}catch(A){}}async initializePipeline(A,I){ii.debug("initializePipeline called with videoTrack and backgroundConfig:%O,%O",A,I);let g=null;try{const C=await this._createHiddenVideoElement(A);if(this._vbDetailsNew.hiddenCanvas=this._createHiddenCanvasElement(C),"image"===I.type){const A=await this._createHiddenImageElement(I);if(!A.success)return!1;g=A.hiddenImage}const Q=C.htmlElement;Q instanceof HTMLVideoElement&&Q.paused&&(ii.debug("🎬 Video is paused, starting playback..."),await Q.play());const B=await this._pipelineManager.initialize(C,I,_E,null,this._vbDetailsNew.tfLite,g,this._vbDetailsNew.hiddenCanvas);ii.debug("Inside getUserMediaSuccess result",B),ii.debug("Pipeline manager active? :%s",this._pipelineManager.isActive()),ii.debug("camera stream live status:%s",A.readyState),B.pipeline.updatePostProcessingConfig($E),this._setupPeriodicUpdates(),this._vbDetailsNew.hiddenImage=g,this._vbDetailsNew.sourcePlayback=C;const E=B.canvasRef.captureStream(30);return this._localVBStream=E,{success:!0,vbStream:E}}catch(A){ii.error("Failed to initialize pipeline:%O",A)}}async _createHiddenVideoElement(A){return new Promise(I=>{const g=document.createElement("video");g.autoplay=!0,g.loop=!0,g.controls=!1,g.playsInline=!0,g.muted=!0,g.srcObject=new MediaStream([A]),g.style.cssText="position: fixed; top: 10px; right: 10px; width: 200px; height: 150px; border: 2px solid blue; z-index: 9999; ",document.body.appendChild(g),g.play(),g.onloadeddata=()=>{I({htmlElement:g,width:g.videoWidth,height:g.videoHeight})}})}async _createHiddenVideoElement(A){return new Promise(I=>{const g=document.createElement("video");g.style.display="none",g.autoplay=!0,g.loop=!0,g.controls=!1,g.playsInline=!0,g.muted=!0,g.srcObject=new MediaStream([A]),document.body.appendChild(g);const C=async()=>{try{await g.play()}catch(A){}g.readyState<2||!g.videoWidth||!g.videoHeight?requestAnimationFrame(C):I({htmlElement:g,width:g.videoWidth,height:g.videoHeight})};g.addEventListener("loadedmetadata",C,{once:!0}),C()})}_createHiddenCanvasElement(A){const I=document.createElement("canvas");return I.style.display="none",I.width=A.width,I.height=A.height,document.body.appendChild(I),I}_createHiddenImageElement(A){return new Promise(async I=>{const g=document.createElement("img");if(g.style.display="none",A?.url.includes("http"))try{(await this.testImageCORS(A?.url)).success?(g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})},g.src=A.url):(g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})})}catch(A){g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})}}else g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})},g.src=A.url})}async testImageCORS(A,I=1e4){return new Promise((g,C)=>{const Q=new Image;Q.crossOrigin="anonymous";const B=setTimeout(()=>{Q.src="",C(new Error("CORS_TIMEOUT"))},I);Q.onload=()=>{clearTimeout(B);try{const I=document.createElement("canvas");I.width=Q.width||100,I.height=Q.height||100;const C=I.getContext("2d");C.drawImage(Q,0,0),C.getImageData(0,0,1,1),g({success:!0,url:A,width:Q.naturalWidth,height:Q.naturalHeight,message:"CORS allowed"})}catch(A){C(new Error("CORS_BLOCKED"))}},Q.onerror=A=>{clearTimeout(B),C(new Error("IMAGE_LOAD_FAILED"))},Q.src=A})}_setupPeriodicUpdates(){this._updateInterval&&clearInterval(this._updateInterval),this._updateInterval=setInterval(()=>{if(this._pipelineManager&&this._pipelineManager.isActive()){const A=this._pipelineManager.getState();this._vbDetailsNew.fps=A.fps;const[I,g,C]=A.durations||[0,0,0];this._vbDetailsNew.resizingDuration=I,this._vbDetailsNew.inferenceDuration=g,this._vbDetailsNew.postProcessingDuration=C}},1e3)}cleanup(){try{if(this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks)try{this._localVBStream.getVideoTracks().forEach(A=>{try{A.stop()}catch(A){}})}catch(A){}try{this._pipelineManager&&"function"==typeof this._pipelineManager.stop&&this._pipelineManager.stop()}catch(A){}if(this._updateInterval){try{clearInterval(this._updateInterval)}catch(A){}this._updateInterval=null}try{if(this._vbDetailsNew?.sourcePlayback?.htmlElement){try{this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null}catch(A){}try{this._vbDetailsNew.sourcePlayback.htmlElement.remove()}catch(A){}}}catch(A){}try{if(this._vbDetailsNew?.hiddenCanvas)try{this._vbDetailsNew.hiddenCanvas.remove()}catch(A){}}catch(A){}try{if(this._vbDetailsNew?.hiddenImage)try{this._vbDetailsNew.hiddenImage.remove()}catch(A){}}catch(A){}this._localVBStream=null,this._vbDetailsNew&&(this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null)}catch(A){}}};class _i extends hg.EventEmitter{static async listDevices(){if(vi)return ji.info("Device list already exists:%O",vi),{success:!0,deviceList:vi};const A=await xE();return A.success?(vi=A.deviceList,{success:!0,deviceList:A.deviceList}):{success:!1,reason:A.reason}}static async changeVB({track:A,details:I}){if(ji.debug("changeVB Received details are:%O",I),ji.debug("changeVB Received track are:%O",A),!I)return ji.debug("VB details not provided. Skipping VB processing."),{success:!1};if(!0===A.active){ji.debug("Track is live, calling initializePipeline",A);const g=A.getVideoTracks()[0],C=await Xi.initializePipeline(g,I);return ji.debug("response is :%o",C),C}throw ji.error("Track is not live"),new Error("Track is not live")}static async init({sessionToken:A,roomId:I,peerId:g,roomType:C}={}){if(!A)throw new Error("Session token is required to join the room.");try{let Q;ji.info("session token:%s",A);try{const I=(new TextEncoder).encode("samvyo_tech_321"),{payload:g}=await async function(A,I,g){const C=await async function(A,I,g){if(A instanceof Uint8Array&&(A=Si.decode(A)),"string"!=typeof A)throw new ki("Compact JWS must be a string or Uint8Array");const{0:C,1:Q,2:B,length:E}=A.split(".");if(3!==E)throw new ki("Invalid Compact JWS");const i=await Oi({payload:Q,protected:C,signature:B},I,g),D={payload:i.payload,protectedHeader:i.protectedHeader};return"function"==typeof I?{...D,key:i.key}:D}(A,I,g);if(C.protectedHeader.crit?.includes("b64")&&!1===C.protectedHeader.b64)throw new Ji("JWTs MUST NOT use unencoded payload");const Q={payload:zi(C.protectedHeader,C.payload,g),protectedHeader:C.protectedHeader};return"function"==typeof I?{...Q,key:C.key}:Q}(A,I,{algorithms:["HS256"]});Q=g,ji.info("Decoded token:",Q)}catch(A){throw ji.error("JWT verification failed:",A),A instanceof Mi?new Error("Session token has expired"):A instanceof Ki?new Error("Session token not yet active"):new Error("Invalid session token: "+A.message)}if(!Q||"object"!=typeof Q)throw new Error("Invalid token format");const{data:B,signallingServerUrl:E}=Q;if(!B||!E)throw new Error("Missing required token data");return g||(g=vC()),I||(I=VC()),new _i({peerId:g,roomId:I,outputData:{sessionToken:A,innerSessionToken:B,signallingServerUrl:E},roomType:C})}catch(A){throw ji.error("Failed to initialize:",A.message),A}}constructor({peerId:A,roomId:I,outputData:g,roomType:C}){super(),this._closed=!1,this._roomStatus="initialised",this._roomDisplayName=null,this._running=!1,this._cignal=null,this._socket=null,this._sendTransport=null,this._recvTransport=null,this._device=new $I.Device,this._selfDisconnectTimer=null,this._webCamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._producers=new Map,this._consumers=new Map,this._peers=new Map,this._subscribedPeers=new Map,this._activeSubscribedPeerIds=new Set,this._availableTracks=new Map,this._subscriptionDebug={admissionDenied:[],preemptions:[],lastResponses:{subscribePeersResponse:null,stagePeersResponse:null,unsubscribePeersResponse:null}},this._data={...g,inputParams:{peerId:A,roomId:I,roomType:C||"conferencing"}},this._micStream=null,this._webCamStream=null,this._webcam={device:null,resolution:"hd"},this._mic={device:null},this._deviceList=vi||null,this._externalVideo=null,this._externalVideoStream=null,this._forceVP8=!1,this._forceH264=!1,this._forceVP9=!1,this._enableWebcamLayers=!0,this._numSimulcastStreams=3,this._enableSharingLayers=!0,this._client=wg.parse(window.navigator.userAgent),this._routerRtpCapabilities=null,this._recordingStartedByMe={},this._liveStreamingStartedByMe={},this._cignalConnected=!1,this._reconnectionInitiated=!1,this._restartIceInProgressSendTransport=!1,this._restartIceInProgressRecvTransport=!1,this._activeSpeaker=null,this._speechRecognition=null,this._transcriptStorage=new Map,this._audioContext=null,this._audioAnalyser=null,this._micMonitorStream=null,this._speakingWhileMutedInterval=null,this._speakingThreshold=-50,this._mutedSpeakingDetectionEnabled=!0,this._lastMutedSpeakingNotification=0,this._mutedSpeakingCooldown=3e3,this._audioTroubleShootData={lastDiagnostic:null,deviceTests:{},connectivityStatus:"unknown"},this._audioOutputDevices=[],this._currentSpeakerDevice=null,this._testAudioElements=new Map,this._speakerTestResults=new Map,this._remoteAudioElement=null,this._remoteCaption=null,this._transcriptionRecorder=null,this._transcriptionActive=!1,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,this._peerConnection=null,this._localStream=null,this._screenShareStream=null,this._remoteStream=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remotePeerType=null,this._remoteVideoStatus=null,this._remoteAudioStatus=null,this._videoWasActiveP2p=null,this._statsTimer=null,this._lastStatCache={},this._statsIntervalMs=5e3,this.initLocal()}get peerId(){return this._peerId}set peerId(A){this._peerId=A}get roomType(){return this._roomType}set roomType(A){this._roomType=A}get closed(){return this._closed}get data(){return this._data}set data(A){throw new Error("Setting the whole data object is not possible!")}get peers(){return this._peers}set peers(A){throw new Error("Setting the whole peers object is not possible!")}get transports(){return{produce:this._sendTransport,consume:this._recvTransport}}set transports(A){throw new Error("Setting of transport is not possible!")}get videoStream(){return this._webCamStream}get audioStream(){return this._micStream}get clientAgent(){return this._client}get activeParameters(){return this._data.inputParams}get deviceList(){return this._deviceList?this._deviceList:{videoDevices:[],audioDevices:[],audioOutputDevices:[]}}set deviceList(A){throw new Error("Setting of deviceList is not possible!")}get currentlyActiveSpeaker(){return this._activeSpeaker}set currentlyActiveSpeaker(A){throw new Error("Setting of currentActivespeaker is not possible!")}get roomDisplayName(){return this._roomDisplayName}set roomDisplayName(A){throw new Error("Setting of roomDisplayName is not possible!")}async initLocal(){const A=$I.detectDevice();ji.debug("The device is:%O",A),await this._initSocket()}async _initSocket(){let A=this;const I=this.data.signallingServerUrl.replace(/^(http|https):\/\//,""),g=`wss://${I}/?sessionToken=${this.data.sessionToken}&roomId=${this.data.inputParams.roomId}&peerId=${this.data.inputParams.peerId}&roomType=${this.data.inputParams.roomType}`;ji.info(`Going to create a new socket! with address: ${I}`),this._socket=new jC(g,!0),this._listenToSocket(),this._socket.on("notify",({type:A,title:I,message:g})=>{this.emit("notification",{eventType:A,eventText:`${I}: ${g}`,roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("roomStartedP2p",A=>{ji.info("P2P room successfully started:%O",A),this._running=!0;const I=this.data.inputParams.peerId;try{this.handleRemotePeerJoin(A)}catch(A){ji.error("Error handling remote peer join on roomStartedP2p:%o",A)}this.emit("joinSuccess",{message:A.message||"Successfully joined P2P room",peerId:I,callStartTime:A.callStartTime,cId:A.cId})}),this._socket.on("userError",I=>{ji.error("User Error happened with message:%O",I),A.emit("notification",{eventType:I.title,eventText:`${I.text}`})}),this._socket.on("validationAlert",A=>{ji.info("Validation alert happened")}),this._socket.on("alreadyActive",({title:A,text:I})=>{this.emit("notification",{eventType:"alreadyActive",eventText:"This peer already has an active connection",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("passwordDisabled",()=>{ji.info("password disabled by moderator!"),this.emit("notification",{eventType:"passwordDisabled",eventText:"Password for this room has been disabled by moderator",roomId:this.data.inputParams.roomId})}),this._socket.on("close",({code:A,reason:I})=>{if(ji.info(`socket closed with code ${A}`),this.emit("connectionStateChange",{status:"disconnected",code:A,reason:I||"Connection closed"}),4500!==A&&4100!==A){let g=I||"Connection to server closed unexpectedly! Trying to reconnect.";ji.info(`socket close code is${A} with reason ${g}`)}else ji.info("Socket is now closed!"),this.close()}),this._socket.on("connected",async()=>{ji.info("Socket connected"),this._clearSelfDisconnectGuard(),this.emit("connectionStateChange",{status:"connected"}),this.emit("initSuccess")}),this._socket.on("reconnected",async()=>{if(ji.info("Socket re-connected"),this._clearSelfDisconnectGuard(),this.emit("connectionStateChange",{status:"reconnected"}),this.roomType===RE&&A.pc)ji.info("Socket seems to be reconnected in mid call! RestartIce needed for p2p call."),"failed"!==A.pc.iceConnectionState&&"disconnected"!==A.pc.iceConnectionState||A.restartICE();else{ji.debug("Ice restarts for mediasoup transports for a joined peer");const I=!A._sendTransport||"closed"===A._sendTransport.connectionState,g=!A._recvTransport||"closed"===A._recvTransport.connectionState;(I||g)&&ji.warn("Transports missing/closed after reconnect. Recreating. sendClosed=%s recvClosed=%s",I,g),A._sendTransport&&["failed","disconnected"].includes(A._sendTransport.connectionState)?(ji.debug("Restart ice for sendtransport"),A.restartIce(A._sendTransport.id,"send")):A._sendTransport?ji.debug("Send transport state post-reconnect: %s (no ICE restart)",A._sendTransport.connectionState):ji.error("Send transport not available!"),A._recvTransport&&["failed","disconnected"].includes(A._recvTransport.connectionState)?(ji.debug("Restart ice for recvtransport"),A.restartIce(A._recvTransport.id,"recv")):A._recvTransport?ji.debug("Recv transport state post-reconnect: %s (no ICE restart)",A._recvTransport.connectionState):ji.error("Recv transport not available!")}this._sendTransport&&this._recvTransport&&"connected"===this._sendTransport.connectionState&&"connected"===this._recvTransport.connectionState&&this.emit("connectionStateChange",{status:"media-restored"})}),this._socket.on("defaultJoinStatus",async A=>{ji.info(" Socket defaultjoinstatus:%O",A)})}_startSelfDisconnectGuard(A=9e4){this._selfDisconnectTimer&&clearTimeout(this._selfDisconnectTimer),this._selfDisconnectTimer=setTimeout(async()=>{ji.warn("Self disconnect guard elapsed; emitting roomClosed to avoid stale client state");try{this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"self-timeout"})}catch(A){}try{await this.leaveRoomCommon()}catch(A){ji.error("Failed to leave room after self-timeout:%o",A)}},A)}_clearSelfDisconnectGuard(){this._selfDisconnectTimer&&(clearTimeout(this._selfDisconnectTimer),this._selfDisconnectTimer=null)}_sendMessage(A){this._socket.send({usageType:"sdk",...A})}_listenToSocket(){this._socket.on("message",A=>{try{switch("currentlyActiveSpeaker"===A.id||"allStats"===A.id||ji.info("message in Room is:%O",A),A.id){case"chatMessage":this.processChatMessage(A);break;case"customMessage":this.processCustomMessage(A);break;case"existingParticipants":this.onExistingParticipants(A);break;case"newPeerJoin":this.onNewPeer(A);break;case"recordingError":this.handleRecordingErrors(A);break;case"moderatorAuthentication":this.authenticateUser(A);break;case"authenticationRequested":this.authenticationRequested(A);break;case"toggleMyMic":this.toggleMyMic(A);break;case"toggleMyCamera":this.toggleMyCamera(A);break;case"logMeOut":this.logMeOutNew(A);break;case"userAlreadyAuthenticated":this.hideUserAuthenticationDialog(A);break;case"peerLeft":this.peerLeft(A);break;case"recordingStarted":this.setRecordingStatusStarted(A);break;case"recordingStopped":this.setRecordingStatusEnded(A);break;case"startDefaultRecording":this.startRecording(A);break;case"mediaToggled":this.mediaToggled(A);break;case"processingStarted":this.handleProcessingStart(A);break;case"processingCompleted":this.handleProcessingCompletion(A);break;case"processingError":this.handleProcessingError(A);break;case"createTransportResponse":this.handleCreateTransportRequest(A);break;case"connectTransportResponse":this.handleConnectTransportRequest(A);break;case"connectRecvTransportResponse":this.handleConnectRecvTransportRequest(A);break;case"sendTrackResponse":this.handleSendTrackRequest(A);break;case"recvTrackResponse":this.handleRecvTrackRequest(A);break;case"consumerAdmissionDenied":try{try{this._subscriptionDebug?.admissionDenied&&(this._subscriptionDebug.admissionDenied.push({ts:Date.now(),...A}),this._subscriptionDebug.admissionDenied.length>200&&this._subscriptionDebug.admissionDenied.shift())}catch(A){}this.emit("consumerAdmissionDenied",A)}catch(A){}break;case"subscribePeersResponse":case"stagePeersResponse":case"unsubscribePeersResponse":try{try{this._subscriptionDebug?.lastResponses&&(this._subscriptionDebug.lastResponses[A.id]={ts:Date.now(),...A})}catch(A){}this.emit(A.id,A)}catch(A){}break;case"existingTracks":try{const I=Array.isArray(A.tracks)?A.tracks:[];for(const A of I){if(!A)continue;const I=`${A.senderPeerId||A.peerId}:${A.mediaTag}`;this._availableTracks.set(I,A)}this.emit("existingTracks",{tracks:I})}catch(A){ji.error("Failed handling existingTracks:%O",A)}break;case"trackAvailable":try{const I=A.senderPeerId||A.peerId,g=A.mediaTag;if(I&&g){const C=`${I}:${g}`;this._availableTracks.set(C,A)}this.emit("trackAvailable",A)}catch(A){ji.error("Failed handling trackAvailable:%O",A)}break;case"trackUnavailable":try{const I=A.senderPeerId||A.peerId,g=A.mediaTag;if(I&&g){const A=`${I}:${g}`;this._availableTracks.delete(A)}this.emit("trackUnavailable",A)}catch(A){ji.error("Failed handling trackUnavailable:%O",A)}break;case"roomClosedByModerator":this.leaveRoomCommon(),this.roomClosed();break;case"currentlyActiveSpeaker":this.setCurrentlyActiveSpeaker(A);break;case"restartIceResponse":this.restartIceResponse(A);break;case"consumerClosed":this.closeConsumer(A);break;case"handRaise":this.handleHandRaise(A);break;case"updateCId":this.updateCId(A);break;case"upgradeParticipant":this.handleUpgradeParticipant(A);break;case"downgradeParticipant":this.handleDowngradeParticipant(A);break;case"switchMicOff":this.handleSwitchMicOff(A);break;case"screenShareLimitReached":this.handleScreenShareLimitReached(A);break;case"upgradeLimitReached":this.handleUpgradeLimitReached(A);break;case"modUpgradeReq":this.handleModUpgradeReq(A);break;case"lockUnlockRoom":this.handleLockUnlockRoom(A);break;case"peersWaiting":this.handlePeersWaiting(A);break;case"checkTransportStatus":this.checkTransportStatus(A);break;case"remotePeerJoin":this.handleRemotePeerJoin(A);break;case"offer":ji.debug("inside offer"),this.handleOffer(A);break;case"answer":ji.debug("inside answer"),this.handleAnswer(A);break;case"candidate":ji.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"p2pRoomClosed":ji.debug("inside p2p room close"),this.leaveRoomNewP2p(hE),this.emit("roomClosed",{roomId:this.data.inputParams.roomId});break;case"p2pUserLeft":ji.debug("inside p2p user left"),this.userLeftRoom(A);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"screenShareP2p":this.handleScreenShareP2p(A);break;case"transcription":this._processTranscriptionMessage(A);break;default:ji.warn("Unrecognized message:%o",A)}}catch(A){ji.error("listentomessage:%O",A)}})}joinRoom=async({peerName:A=null,produce:I=!0,produceAudio:g=!0,produceVideo:C=!0,consume:Q=!0,manualSubscription:B=!1,videoResolution:E="hd",forceVp8:i=!1,forceVp9:D=!1,forceH264:n=!1,h264Profile:o="high",forcePCMU:s=!1,forcePCMA:w=!1,forceFPS:a=25,enableWebcamLayers:y=!0,numSimulcastStreams:G=3,autoGainControl:R=!0,echoCancellation:S=!0,noiseSuppression:h=!0,sampleRate:N=44e3,channelCount:K=1,videoBitRates:M=[700,250,75],share:F=!1,shareAudio:L=!1,enableSharingLayers:k=!0,shareBitRates:J=[2500,1250,500],audioDeviceId:U=null,videoDeviceId:t=null,peerType:q="participant",roomType:r=SE,authenticationRequired:c=!1,password:H=null,roomDisplayName:e=null,vbdetails:Y,enableTranscription:d=!1,enableChatOption:b=!1,enableScreenSharing:x=!1}={})=>{ji.info("Going to join room",d),["hd","vga","qvga"].includes(E)||(ji.warn("Invalid video resolution value. setting it to default value of 'hd' "),E="hd"),"boolean"!=typeof I&&(ji.warn("Produe should either be true or false"),I=Boolean(I)),"boolean"!=typeof g&&(ji.warn("ProduceAudio should either be true or false"),g=Boolean(g)),"boolean"!=typeof C&&(ji.warn("ProduceVideo should either be true or false"),C=Boolean(C)),"boolean"!=typeof Q&&(ji.warn("Consume should either be true or false"),Q=Boolean(Q)),"boolean"!=typeof i&&(ji.warn("forceVp8 should either be true or false"),i=Boolean(i)),"boolean"!=typeof D&&(ji.warn("forceVp9 should either be true or false"),D=Boolean(D)),"boolean"!=typeof n&&(ji.warn("forceH264 should either be true or false"),n=Boolean(n)),["high","low"].includes(o.toLowerCase())||(ji.warn("h264Profile should either be 'high' or 'low'"),o="high"),(!Number.isInteger(a)||Number.isInteger(a)&&(a>65||a<5))&&(ji.warn("forceFPS should be a number between 5 to 65, default value is 25 fps."),a=25),"boolean"!=typeof y&&(ji.warn("enableWebcamLayers should either be true or false"),y=Boolean(y)),(!Number.isInteger(G)||Number.isInteger(G)&&(G>3||G<1))&&(ji.warn("numSimulcastStreams should be a number between 1 to 3, default value is 3."),G=3),Array.isArray(M)&&M.length>=1&&M.length<=3&&M.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?ji.debug("videoBitRates values are correct"):(ji.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),M=[700,250,75]),"boolean"!=typeof s&&(ji.warn("forcePCMU should either be true or false"),s=Boolean(s)),"boolean"!=typeof w&&(ji.warn("forcePCMA should either be true or false"),w=Boolean(w)),"boolean"!=typeof R&&(ji.warn("autoGainControl should either be true or false"),R=Boolean(R)),"boolean"!=typeof S&&(ji.warn("echoCancellation should either be true or false"),S=Boolean(S)),"boolean"!=typeof h&&(ji.warn("noiseSuppression should either be true or false"),h=Boolean(h)),(!Number.isInteger(N)||Number.isInteger(N)&&(N>64e3||N<8e3))&&(ji.warn("sampleRate should be a number between 8000 to 64000, default value is 44000 Khz."),N=44e3),(!Number.isInteger(K)||Number.isInteger(K)&&(K>2||K<1))&&(ji.warn("sampleRate should be a number between 1 to 2, default value is 1, which is a mono audio."),K=1),"boolean"!=typeof F&&(ji.warn("share should either be true or false"),F=Boolean(F)),"boolean"!=typeof L&&(ji.warn("shareAudio should either be true or false"),L=Boolean(L)),"boolean"!=typeof k&&(ji.warn("enableSharingLayers should either be true or false"),k=Boolean(k)),Array.isArray(J)&&J.length>=1&&J.length<=3&&J.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?ji.debug("shareBitRates values are correct"):(ji.warn("sharebitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[2500,1250,500]'"),J=[2500,1250,500]),["moderator","participant","attendee"].includes(q)?ji.debug("peerType is valid:%s",q):(q="participant",ji.debug("peerType is invalid:%s. By default set to: participant",q)),await this.listDevicesInternal(),this._videoResolution=E,this._forceVP8=Boolean(i),this._forceH264=Boolean(n),this._forceVP9=Boolean(D),this._enableWebcamLayers=Boolean(y),this._numSimulcastStreams=G,this._enableSharingLayers=Boolean(k);try{if(A||(A=dE()),this.data.inputParams={...this.data.inputParams,peerName:A,produce:I,produceAudio:g,produceVideo:C,consume:Q,manualSubscription:B,videoResolution:E,forceVp8:i,forceVp9:D,forceH264:n,h264Profile:o,forceFPS:a,forcePCMU:s,forcePCMA:w,enableWebcamLayers:y,numSimulcastStreams:G,autoGainControl:R,echoCancellation:S,noiseSuppression:h,sampleRate:N,channelCount:K,videoBitRates:M,share:F,shareAudio:L,enableSharingLayers:k,shareBitRates:J,audioDeviceId:U,videoDeviceId:t,peerType:q,roomType:r,authenticationRequired:c,password:H,roomDisplayName:e,vbdetails:Y,enableTranscription:d,enableChatOption:b,enableScreenSharing:x},ji.info("input params are:%O",this.data.inputParams),r===RE)return ji.info("Joining P2P room"),await this._joinRoomP2p({peerName:A,produce:I,produceAudio:g,produceVideo:C,audioDeviceId:U,videoDeviceId:t,peerType:q,authenticationRequired:c,password:H,roomDisplayName:e,vbdetails:Y,enableTranscription:d,enableChatOption:b,enableScreenSharing:x});{const A={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,participantType:"attendee"===q?"viewer":q,roomType:r,roomDisplayName:e||`room-${1e5+Math.round(9e5*Math.random())}`,browser:this._client,name:this.data.inputParams.peerName,room:this.data.inputParams.roomId,authenticationRequired:c,isRoomPassword:!!H,roomPassword:H||null,usageType:"sdk",enableTranscription:d,enableChatOption:b,enableScreenSharing:x,manualSubscription:Boolean(B)};this._sendMessage(A)}}catch(A){return ji.error("Failed to join room:",A.message),{success:!1,reason:A.message}}};_joinRoomP2p=async({peerName:A,produce:I,produceAudio:g,produceVideo:C,audioDeviceId:Q,videoDeviceId:B,peerType:E,authenticationRequired:i,password:D,roomDisplayName:n,vbdetails:o,enableTranscription:s,enableChatOption:w,enableScreenSharing:a})=>{ji.info("P2P room join started");try{if(I){if(C){ji.info("Acquiring video for P2P");try{await this._acquireVideoForP2p({deviceId:B,vbdetails:o})}catch(A){ji.warn("Video acquisition failed for P2P, continuing with audio if available:%O",A)}}if(g){ji.info("Acquiring audio for P2P");try{await this._acquireAudioForP2p({deviceId:Q})}catch(A){ji.warn("Audio acquisition failed for P2P, continuing with video if available:%O",A)}}}if(this._localStream=new MediaStream,this._micStream&&this._micStream.getTracks().forEach(A=>{this._localStream.addTrack(A)}),this._webCamStream&&this._webCamStream.getTracks().forEach(A=>{this._localStream.addTrack(A)}),0===this._localStream.getTracks().length)return ji.error("No media tracks acquired for P2P call"),this.emit("error",{type:"mediaError",message:"At least mic or camera is needed to join P2P room"}),{success:!1,reason:"No media tracks available"};const y=!(!g||!this._micStream),G=!(!C||!this._webCamStream);!0===i&&"moderator"!==E?(ji.info("Authentication required; deferring self-add until moderator approval"),this._pendingP2pAuth=!0,this._pendingSelfP2pInfo={peerName:A,peerType:E,audioStatus:y,videoStatus:G}):(ji.info("Adding self to peers"),this._addSelfToPeersP2p(A,E,y,G,w,a));const R={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,name:A,room:this.data.inputParams.roomId,roomDisplayName:n||`room-${1e5+Math.round(9e5*Math.random())}`,roomPassword:D,isRoomPassword:!!D,participantType:"moderator"===E?"moderator":"participant",camera:B||"default",mic:Q||"default",videoStatus:!(!C||!this._webCamStream),audioStatus:!(!g||!this._micStream),authenticationRequired:i,roomType:this.data.inputParams.roomType,browser:this._client,usageType:"sdk",enableTranscription:s,enableChatOption:w,enableScreenSharing:a};return ji.info("Sending P2P join message:%O",R),this._sendMessage(R),{success:!0}}catch(A){return ji.error("Failed to join P2P room:%O",A),this.emit("error",{type:"joinError",message:A.message}),{success:!1,reason:A.message}}};_acquireVideoForP2p=async({deviceId:A,vbdetails:I})=>{ji.info("Acquiring video for P2P with deviceId:%s",A);try{const g={deviceId:A?{exact:A}:void 0,width:{ideal:1280},height:{ideal:720},frameRate:{ideal:this.data.inputParams.forceFPS||25}},C=await navigator.mediaDevices.getUserMedia({video:g,audio:!1});let Q=C.getVideoTracks()[0];if(I){ji.info("vbdetails provided in _acquireVideoForP2p",I);try{const A=Xi?.getVBStream?.()||Xi?._localVBStream;let g=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)g=A.getVideoTracks()[0],ji.debug("Using existing Virtual Background track (P2P acquire)");else{const A=await Xi.initializePipeline(Q,I);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(g=A.vbStream.getVideoTracks()[0],ji.debug("Initialized new Virtual Background pipeline (P2P acquire)"))}if(g){const A=new MediaStream([g]);this._webCamStream=A}else this._webCamStream=C}catch(A){ji.debug("VB init failed or skipped in _acquireVideoForP2p"),this._webCamStream=C}}else this._webCamStream=C;return ji.info("Video acquired successfully for P2P"),C}catch(A){throw ji.error("Failed to acquire video for P2P:%O",A),A}};_acquireAudioForP2p=async({deviceId:A})=>{ji.info("Acquiring audio for P2P with deviceId:%s",A);try{const I={deviceId:A?{exact:A}:void 0,autoGainControl:this.data.inputParams.autoGainControl??!0,echoCancellation:this.data.inputParams.echoCancellation??!0,noiseSuppression:this.data.inputParams.noiseSuppression??!0,sampleRate:this.data.inputParams.sampleRate||44e3,channelCount:this.data.inputParams.channelCount||1},g=await navigator.mediaDevices.getUserMedia({audio:I,video:!1});return this._micStream=g,ji.info("Audio acquired successfully for P2P"),g}catch(A){throw ji.error("Failed to acquire audio for P2P:%O",A),A}};_addSelfToPeersP2p=(A,I,g,C,Q,B)=>{const E=this.data.inputParams.roomType===RE?"moderator"===I?"moderator":"presenter":I,i={id:this.data.inputParams.peerId,displayName:`${A} (You)`,participantType:E,audioStatus:g,videoStatus:C,audioTrack:this._micStream?this._micStream.getAudioTracks()[0]:null,videoTrack:this._webCamStream?this._webCamStream.getVideoTracks()[0]:null};this._peers.set(this.data.inputParams.peerId,i),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:`${A} (You)`,type:"local",peerRole:E,participantType:E,enableChatOption:Q,enableScreenSharing:B}),ji.info("Added self to peers:%O",i);try{!1===g?this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"}):!0===g&&this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"}),!1===C&&this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null}),!0===g&&this._micStream&&!this._emittedLocalMicStart&&(this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micStream.getAudioTracks()[0],type:"local"}),this._emittedLocalMicStart=!0),!0===C&&this._webCamStream&&!this._emittedLocalVideoStart&&(this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webCamStream.getVideoTracks()[0],type:"local"}),this._emittedLocalVideoStart=!0)}catch(A){}};authenticateUser=A=>{ji.info("Moderator authentication requested:%O",A),this.emit("moderatorAuthentication",{moderatorName:A.moderatorName,requesterName:A.requesterName,requesterPeerId:A.requesterPeerId,text:A.title})};authenticationRequested=A=>{ji.info("Moderator authentication requested:%O",A),this.emit("authenticationRequested",{requesterName:A.requesterName,requesterPeerId:this.data.inputParams.peerId,text:A.title})};allowRoomJoin=A=>{if(!A)return ji.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};ji.info("Allow user to join room:%O",A);let I={id:"userAuthenticated",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(I)};denyRoomJoin=A=>{if(!A)return ji.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};ji.info("Deny user to join room:%O",A);let I={id:"userDenied",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(I)};setSpeakingWhileMutedDetection(A=!0){this._mutedSpeakingDetectionEnabled=A,ji.debug("Speaking while muted detection "+(A?"enabled":"disabled"))}setSpeakingThreshold(A=-50){this._speakingThreshold=A,ji.debug(`Speaking threshold set to: ${A}dB`)}async _initializeAudioMonitoring(){if(this._micStream)try{this._audioContext=new AudioContext,this._audioAnalyser=this._audioContext.createAnalyser(),this._audioAnalyser.fftSize=512,this._audioAnalyser.smoothingTimeConstant=.3;const A={audio:{deviceId:this._mic.device?{exact:this._mic.device.deviceId}:void 0,echoCancellation:!1,noiseSuppression:!1,autoGainControl:!1}};this._micMonitorStream=await navigator.mediaDevices.getUserMedia(A),this._audioContext.createMediaStreamSource(this._micMonitorStream).connect(this._audioAnalyser),ji.debug("Audio monitoring initialized successfully")}catch(A){ji.error("Error initializing audio monitoring:%o",A)}}_getAudioLevel(){if(!this._audioAnalyser)return-1/0;const A=this._audioAnalyser.frequencyBinCount,I=new Uint8Array(A);this._audioAnalyser.getByteFrequencyData(I);let g=0;for(let C=0;C<A;C++)g+=I[C];const C=g/A,Q=20*Math.log10(C/255);return isFinite(Q)?Q:-1/0}_startSpeakingWhileMutedDetection(){this._mutedSpeakingDetectionEnabled&&this._audioAnalyser&&(this._speakingWhileMutedInterval=setInterval(()=>{if(!this._micProducer||!this._micProducer.paused)return;const A=this._getAudioLevel();if(A>this._speakingThreshold){const I=Date.now();I-this._lastMutedSpeakingNotification>this._mutedSpeakingCooldown&&(this._lastMutedSpeakingNotification=I,this.data.inputParams.peerId===this.peerId&&(this.emit("speakingWhileMuted",{peerId:this.data.inputParams.peerId,audioLevel:A,timestamp:I,message:"You appear to be speaking while muted"}),ji.debug(`Speaking while muted detected - Audio level: ${A}dB`)))}},100))}_stopSpeakingWhileMutedDetection(){this._speakingWhileMutedInterval&&(clearInterval(this._speakingWhileMutedInterval),this._speakingWhileMutedInterval=null)}_cleanupAudioMonitoring(){this._stopSpeakingWhileMutedDetection(),this._micMonitorStream&&(this._micMonitorStream.getTracks().forEach(A=>A.stop()),this._micMonitorStream=null),this._audioContext&&"closed"!==this._audioContext.state&&(this._audioContext.close(),this._audioContext=null),this._audioAnalyser=null}async diagnoseAudio(){ji.debug("Starting comprehensive audio diagnostic...");const A={timestamp:Date.now(),browser:this._client,permissions:{},devices:{},connectivity:{},currentSetup:{},recommendations:[]};try{return A.permissions=await this._testAudioPermissions(),A.devices=await this._testAudioDevices(),A.currentSetup=await this._testCurrentMicSetup(),A.connectivity=await this._testWebRTCConnectivity(),A.recommendations=this._generateAudioRecommendations(A),this._audioTroubleShootData.lastDiagnostic=A,this.emit("audioDiagnosticComplete",{peerId:this.data.inputParams.peerId,diagnostic:A}),A}catch(I){return ji.error("Audio diagnostic failed:",I),A.error=I.message,A}}async _testAudioPermissions(){const A={granted:!1,state:"unknown",error:null};try{if(navigator.permissions){const I=await navigator.permissions.query({name:"microphone"});A.state=I.state,A.granted="granted"===I.state}const I=await navigator.mediaDevices.getUserMedia({audio:!0,video:!1});A.granted=!0,A.actuallyGranted=!0,I.getTracks().forEach(A=>A.stop())}catch(I){A.error=I.name,A.actuallyGranted=!1,ji.error("Permission test failed:",I)}return A}getSystemHealthStatus(){return{sdk:{roomStatus:this._roomStatus,isConnected:"connected"===this._roomStatus,micActive:!!this._micProducer&&!this._micProducer.closed,micMuted:this._micProducer?.paused,cameraActive:!!this._webcamProducer&&!this._webcamProducer.closed,screenSharing:!!this._shareProducer&&!this._shareProducer.closed},transports:{send:this._sendTransport?{id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}:null,recv:this._recvTransport?{id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState}:null},audio:{context:this._audioContext?.state,analyser:!!this._audioAnalyser,currentLevel:this._getAudioLevel(),speaking:this._getAudioLevel()>this._speakingThreshold,monitorStream:!!this._micMonitorStream},streams:{mic:this._micStream?.active,camera:this._webCamStream?.active,micTracks:this._micStream?.getTracks()?.length||0,cameraTracks:this._webCamStream?.getTracks()?.length||0}}}async testNetworkConnectivity(){const A={timestamp:Date.now(),stun:{working:!1,latency:null},turn:{working:!1,latency:null},bandwidth:{upload:null,download:null},packetLoss:null};try{const I=Date.now(),g=new RTCPeerConnection({iceServers:[{urls:"stun:stun.l.google.com:19302"}]}),C=(g.createDataChannel("test"),await g.createOffer());await g.setLocalDescription(C),await new Promise(A=>{g.onicecandidate=I=>{I.candidate||A()},setTimeout(A,5e3)}),A.stun.working="failed"!==g.iceConnectionState,A.stun.latency=Date.now()-I,g.close()}catch(A){console.error("Network connectivity test failed:",A)}return A}async assessAudioQuality(A=5e3){if(!this._micStream)throw new Error("No active microphone stream");const I={duration:A,samples:[],averageLevel:0,peakLevel:-1/0,quietSamples:0,clipSamples:0,quality:"unknown"};try{const g=new AudioContext,C=g.createAnalyser();C.fftSize=1024,g.createMediaStreamSource(this._micStream).connect(C);const Q=C.frequencyBinCount,B=new Uint8Array(Q),E=Date.now(),i=100;return new Promise(D=>{const n=setInterval(()=>{C.getByteFrequencyData(B);let i=0;for(let A=0;A<Q;A++)i+=B[A];const o=i/Q,s=20*Math.log10(o/255);if(isFinite(s)&&(I.samples.push(s),I.peakLevel=Math.max(I.peakLevel,s),s<-70&&I.quietSamples++,s>-3&&I.clipSamples++),Date.now()-E>=A){clearInterval(n),g.close();const A=I.samples.filter(A=>isFinite(A));I.averageLevel=A.reduce((A,I)=>A+I,0)/A.length;const C=I.quietSamples/A.length*100;I.clipSamples/A.length*100>10?I.quality="poor-clipping":C>80?I.quality="poor-quiet":I.averageLevel>-30?I.quality="good":I.averageLevel>-50?I.quality="fair":I.quality="poor-low",D(I)}},i)})}catch(A){throw new Error(`Audio quality assessment failed: ${A.message}`)}}async attemptAutoRemediation(){const A=[],I=this.getSystemHealthStatus();try{"failed"===I.transports.send?.connectionState&&(await this.restartIce(I.transports.send.id,"send"),A.push("Restarted send transport")),"failed"===I.transports.recv?.connectionState&&(await this.restartIce(I.transports.recv.id,"recv"),A.push("Restarted receive transport")),!I.audio.analyser&&this._micStream&&(this._cleanupAudioMonitoring(),await this._initializeAudioMonitoring(),A.push("Restarted audio monitoring")),I.sdk.micActive&&!I.streams.mic&&(await this.disableMic(),await this.enableMic(),A.push("Restarted microphone"));const g=await this.diagnoseAudio();if(g.devices?.working?.length>0){const I=g.currentSetup?.deviceLabel,C=g.devices.working[0];I!==C.label&&(await this.changeAudioInput({deviceId:C.deviceId}),A.push(`Switched to working device: ${C.label}`))}return{success:!0,fixes:A}}catch(I){return{success:!1,error:I.message,fixes:A}}}async getEnhancedDeviceList(){try{const A=await navigator.mediaDevices.enumerateDevices(),I=[];for(const g of A){if("audioinput"!==g.kind)continue;const A={deviceId:g.deviceId,label:g.label,groupId:g.groupId,capabilities:null,testResult:null};try{const I=(await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:g.deviceId}}})).getAudioTracks()[0];A.capabilities=I.getCapabilities(),A.testResult=await this._testSpecificDevice(g.deviceId,1e3),I.stop()}catch(I){A.testResult={working:!1,error:I.message}}I.push(A)}return I}catch(A){throw new Error(`Enhanced device enumeration failed: ${A.message}`)}}async optimizeAudioSettings(){const A=this.getSystemHealthStatus(),I=[];try{const g=await this.assessAudioQuality(3e3);return"poor-clipping"===g.quality?(await this.changeAudioInput({autoGainControl:!1,echoCancellation:!0,noiseSuppression:!0}),I.push("Disabled auto-gain control to prevent clipping")):"poor-quiet"===g.quality&&(await this.changeAudioInput({autoGainControl:!0,echoCancellation:!0,noiseSuppression:!1}),I.push("Enabled auto-gain control for low input levels")),"connected"===A.transports.send?.connectionState&&await this._sendTransport.getStats(),{success:!0,recommendations:I,qualityAssessment:g}}catch(A){return{success:!1,error:A.message,recommendations:I}}}async _testAudioDevices(){const A={available:[],current:null,working:[],failed:[]};try{const I=await navigator.mediaDevices.enumerateDevices();A.available=I.filter(A=>"audioinput"===A.kind).map(A=>({deviceId:A.deviceId,label:A.label,groupId:A.groupId})),A.current=this._mic.device;for(const I of A.available)try{const g=await this._testSpecificDevice(I.deviceId);g.working?A.working.push({...I,audioLevel:g.audioLevel,testDuration:g.duration}):A.failed.push({...I,error:g.error})}catch(g){A.failed.push({...I,error:g.message})}}catch(I){A.error=I.message}return A}async _testSpecificDevice(A,I=2e3){return new Promise(g=>{const C={working:!1,audioLevel:-1/0,duration:I,error:null};let Q=null,B=null,E=null;const i=()=>{Q&&Q.getTracks().forEach(A=>A.stop()),B&&"closed"!==B.state&&B.close()},D=setTimeout(()=>{i(),g(C)},I);navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:A}}}).then(A=>{Q=A,B=new(window.AudioContext||window.webkitAudioContext),E=B.createAnalyser(),E.fftSize=256,B.createMediaStreamSource(Q).connect(E);const n=E.frequencyBinCount,o=new Uint8Array(n),s=setInterval(()=>{E.getByteFrequencyData(o);let A=0;for(let I=0;I<n;I++)A+=o[I];const I=A/n,g=20*Math.log10(I/255);isFinite(g)&&g>C.audioLevel&&(C.audioLevel=g),I>0&&(C.working=!0)},100);setTimeout(()=>{clearInterval(s),clearTimeout(D),i(),g(C)},I-100)}).catch(A=>{clearTimeout(D),C.error=A.message,i(),g(C)})})}async _testCurrentMicSetup(){const A={isActive:!1,isProducing:!1,isMuted:!1,audioLevel:-1/0,deviceLabel:null,streamActive:!1,producerStats:null};try{if(A.isActive=!!this._micProducer,A.isProducing=!(!this._micProducer||this._micProducer.closed),A.isMuted=!(!this._micProducer||!this._micProducer.paused),A.deviceLabel=this._mic.device?.label,A.streamActive=!(!this._micStream||!this._micStream.active),this._micStream){const I=this._micStream.getAudioTracks();I.length>0&&(A.trackEnabled=I[0].enabled,A.trackReadyState=I[0].readyState,A.trackSettings=I[0].getSettings())}if(this._audioAnalyser&&(A.audioLevel=this._getAudioLevel()),this._micProducer&&this._sendTransport)try{const I=await this._sendTransport.getStats();A.producerStats=I}catch(I){A.producerStatsError=I.message}}catch(I){A.error=I.message}return A}async _testWebRTCConnectivity(){const A={sendTransport:null,recvTransport:null,iceConnectionState:null,dtlsState:null,error:null};try{this._sendTransport&&(A.sendTransport={id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}),this._recvTransport&&(A.recvTransport={id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState})}catch(I){A.error=I.message}return A}_generateAudioRecommendations(A){const I=[];return A.permissions.granted||I.push({type:"critical",title:"Microphone Permission Required",description:"Please allow microphone access in your browser",action:"Grant microphone permission in browser settings"}),0===A.devices.working.length&&I.push({type:"critical",title:"No Working Audio Devices",description:"No functioning microphone devices detected",action:"Check if microphone is connected and enabled in system settings"}),A.currentSetup.isActive&&!A.currentSetup.streamActive&&I.push({type:"warning",title:"Current Microphone Not Working",description:"The selected microphone device appears to be inactive",action:"Try switching to a different microphone device"}),"failed"===A.connectivity.sendTransport?.connectionState&&I.push({type:"critical",title:"Connection Failed",description:"Unable to establish audio connection to server",action:"Check internet connection and try rejoining the room"}),A.currentSetup.audioLevel<-60&&I.push({type:"info",title:"Low Audio Level",description:"Your microphone level appears to be very low",action:"Check microphone volume in system settings or move closer to microphone"}),I}async quickAudioTest(){const A={working:!1,issues:[],timestamp:Date.now()};try{if(!this._micProducer)return A.issues.push("Microphone not active"),A;if(this._micProducer.closed)return A.issues.push("Microphone producer is closed"),A;if(!this._micStream||!this._micStream.active)return A.issues.push("Microphone stream is not active"),A;const I=this._micStream.getAudioTracks();if(0===I.length)return A.issues.push("No audio tracks found"),A;if("live"!==I[0].readyState)return A.issues.push("Audio track is not live"),A;if("connected"!==this._sendTransport.connectionState)return A.issues.push(`Send transport not connected: ${this._sendTransport.connectionState}`),A;A.working=!0}catch(I){A.issues.push(`Test error: ${I.message}`)}return this.emit("quickAudioTestComplete",{peerId:this.data.inputParams.peerId,result:A}),A}async listAudioOutputDevices(){try{if(this._deviceList&&this._deviceList.audioOutputDevices)return ji.debug("Using cached audio output devices:",this._deviceList.audioOutputDevices),{success:!0,devices:this._deviceList.audioOutputDevices};const A=await navigator.mediaDevices.enumerateDevices();return this._audioOutputDevices=A.filter(A=>"audiooutput"===A.kind),ji.debug("Found audio output devices:",this._audioOutputDevices),{success:!0,devices:this._audioOutputDevices.map(A=>({deviceId:A.deviceId,label:A.label||`Speaker ${A.deviceId.slice(-4)}`,groupId:A.groupId}))}}catch(A){return ji.error("Failed to enumerate audio output devices:",A),{success:!1,error:A.message}}}async testSpeakerDevice(A,I={}){ji.debug("Testing speaker device",A);const{testDuration:g=3e3,testFrequencies:C=[440,1e3,2e3],volume:Q=.3,requireUserConfirmation:B=!0}=I,E=`speaker-test-${A}-${Date.now()}`;try{if(!HTMLAudioElement.prototype.setSinkId)throw new Error("setSinkId is not supported in this browser");const I={deviceId:A,testId:E,timestamp:Date.now(),success:!1,frequencies:[],volume:Q,duration:g,userConfirmed:!1,error:null},i=new Audio;i.volume=Q,i.loop=!1,await i.setSinkId(A),I.setSinkId=!0,this._testAudioElements.set(E,i);for(const A of C){const Q=await this._playTestTone(i,A,g/C.length);I.frequencies.push(Q)}if(B){const g=await this._requestUserConfirmation(A,I);I.userConfirmed=g,I.success=g}else I.success=I.frequencies.every(A=>A.played);return this._speakerTestResults.set(A,I),this.emit("speakerTestComplete",{deviceId:A,testResult:I}),{success:!0,testResult:I}}catch(I){ji.error(`Speaker test failed for device ${A}:`,I);const g={deviceId:A,testId:E,timestamp:Date.now(),success:!1,error:I.message};return this._speakerTestResults.set(A,g),this.emit("speakerTestComplete",{deviceId:A,testResult:g}),{success:!1,error:I.message,testResult:g}}finally{this._cleanupTestAudio(E)}}async _playTestTone(A,I,g){return new Promise((C,Q)=>{try{const Q=new window.AudioContext,B=Q.createOscillator(),E=Q.createGain(),i=Q.createMediaStreamDestination();B.connect(E),E.connect(i),B.frequency.setValueAtTime(I,Q.currentTime),B.type="sine",E.gain.setValueAtTime(0,Q.currentTime),E.gain.linearRampToValueAtTime(.1,Q.currentTime+.1),E.gain.linearRampToValueAtTime(.1,Q.currentTime+g/1e3-.1),E.gain.linearRampToValueAtTime(0,Q.currentTime+g/1e3),A.srcObject=i.stream,B.start(),B.stop(Q.currentTime+g/1e3);const D=A.play();void 0!==D&&D.then(()=>{setTimeout(()=>{B.disconnect(),E.disconnect(),Q.close(),C({frequency:I,duration:g,played:!0,timestamp:Date.now()})},g)}).catch(A=>{Q.close(),C({frequency:I,duration:g,played:!1,error:A.message,timestamp:Date.now()})})}catch(A){Q(A)}})}async _requestUserConfirmation(A,I){return new Promise(g=>{this.emit("speakerTestConfirmationRequired",{deviceId:A,testResult:I,onConfirm:A=>g(A)}),setTimeout(()=>g(!1),1e4)})}async testCurrentSpeakerOutput(){try{const A={timestamp:Date.now(),currentDevice:this._currentSpeakerDevice,remoteAudioPresent:!1,audioElementFound:!1,volumeLevel:0,success:!1},I=document.querySelectorAll("audio"),g=document.querySelectorAll("video");let C=[];if(I.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&C.push(A)}),g.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&C.push(A)}),A.audioElementFound=C.length>0,A.elementsCount=C.length,C.length>0){for(const I of C)try{if(I.srcObject){const g=new AudioContext,C=g.createMediaStreamSource(I.srcObject),Q=g.createAnalyser();C.connect(Q),Q.fftSize=256;const B=Q.frequencyBinCount,E=new Uint8Array(B);Q.getByteFrequencyData(E);const i=E.reduce((A,I)=>A+I,0)/B;A.volumeLevel=Math.max(A.volumeLevel,i),A.remoteAudioPresent=i>0,g.close()}}catch(A){ji.debug("Could not analyze remote audio element:",A)}A.success=A.remoteAudioPresent}return this.emit("currentSpeakerTestComplete",A),{success:!0,testResult:A}}catch(A){return ji.error("Current speaker test failed:",A),{success:!1,error:A.message}}}async diagnoseSpeakers(){ji.debug("Starting comprehensive speaker diagnostic");const A={timestamp:Date.now(),browser:this._client,support:{},devices:{},currentOutput:{},remoteAudio:{},recommendations:[]};try{A.support={setSinkId:!!HTMLAudioElement.prototype.setSinkId,enumerateDevices:!!navigator.mediaDevices?.enumerateDevices,audioContext:!!window.AudioContext};const I=await this.listAudioOutputDevices();A.devices={available:I.devices||[],count:I.devices?.length||0,hasDefault:I.devices?.some(A=>"default"===A.deviceId)||!1};const g=await this.testCurrentSpeakerOutput();return A.currentOutput=g.testResult,A.remoteAudio=this._analyzeRemoteAudioSetup(),A.recommendations=this._generateSpeakerRecommendations(A),this.emit("speakerDiagnosticComplete",{diagnostic:A}),A}catch(I){return ji.error("Speaker diagnostic failed:",I),A.error=I.message,A}}_analyzeRemoteAudioSetup(){const A={consumers:0,activeStreams:0,audioElements:0,videoElements:0,totalTracks:0};try{this._consumers&&this._consumers.forEach(I=>{I&&"audio"===I.kind&&!I.closed&&A.consumers++});const I=document.querySelectorAll("audio"),g=document.querySelectorAll("video");I.forEach(I=>{A.audioElements++,I.srcObject&&I.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=I.srcObject.getAudioTracks().length)}),g.forEach(I=>{A.videoElements++,I.srcObject&&I.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=I.srcObject.getAudioTracks().length)}),0===A.activeStreams&&0===A.totalTracks&&A.consumers>0&&(A.activeStreams=A.consumers,A.totalTracks=A.consumers)}catch(I){ji.error("Remote audio analysis failed:",I),A.error=I.message}return A}_generateSpeakerRecommendations(A){const I=[];return A.support.setSinkId||I.push({type:"critical",title:"Audio Output Selection Not Supported",description:"Your browser does not support changing audio output devices",actions:["Use Chrome, Edge, or Firefox for audio output selection","Change system default audio device instead","Consider using a different browser"]}),0===A.devices.count&&I.push({type:"critical",title:"No Audio Output Devices Found",description:"No speakers or headphones detected",actions:["Check if speakers/headphones are connected","Verify audio drivers are installed","Try refreshing the page after connecting devices"]}),0===A.remoteAudio.consumers&&0===A.remoteAudio.activeStreams&&I.push({type:"warning",title:"No Remote Audio Detected",description:"Not receiving audio from other participants",actions:["Ask other participants to unmute their microphones","Check if you have muted remote participants","Verify your internet connection"]}),!A.currentOutput.success&&A.currentOutput.audioElementFound&&I.push({type:"warning",title:"Audio Output Issues",description:"Remote audio present but may not be playing correctly",actions:["Check system volume levels","Try switching to a different audio output device","Verify the selected output device is working"]}),0===I.length&&I.push({type:"success",title:"Audio Output System Healthy",description:"Speaker setup appears to be working correctly",actions:["Your audio output is configured properly","Run individual device tests if experiencing issues"]}),I}async progressiveTestAllSpeakers(A={}){ji.debug("Progressive speaker test started");const{testDuration:I=2e3,requireConfirmation:g=!0,volume:C=.2}=A;try{const A=await this.listAudioOutputDevices();if(!A.success)throw new Error("Could not enumerate audio devices");const Q=[];let B=0;this.emit("progressiveSpeakerTestStarted",{totalDevices:A.devices.length,testDuration:I,requireConfirmation:g});for(const E of A.devices){B++,this.emit("progressiveSpeakerTestProgress",{currentIndex:B,totalDevices:A.devices.length,currentDevice:E,progress:B/A.devices.length*100});const i=await this.testSpeakerDevice(E.deviceId,{testDuration:I,volume:C,requireUserConfirmation:g,testFrequencies:[1e3]});Q.push({device:E,...i}),await new Promise(A=>setTimeout(A,500))}return this.emit("progressiveSpeakerTestComplete",{results:Q,workingDevices:Q.filter(A=>A.success),failedDevices:Q.filter(A=>!A.success)}),{success:!0,results:Q,summary:{total:Q.length,working:Q.filter(A=>A.success).length,failed:Q.filter(A=>!A.success).length}}}catch(A){return ji.error("Progressive speaker test failed:",A),{success:!1,error:A.message}}}_cleanupTestAudio(A){if(ji.debug("Cleaning up test audio"),this._testAudioElements.has(A)){const I=this._testAudioElements.get(A);try{I.pause(),I.srcObject=null,I.src=""}catch(A){ji.debug("Error cleaning up test audio:",A)}this._testAudioElements.delete(A)}}getCurrentSpeakerDevice(){return ji.debug("Getting current speaker device"),this._currentSpeakerDevice}async meetingSafeSpeakerTest(A){return ji.debug("Meeting safe speaker test started"),this.testSpeakerDevice(A,{testDuration:1500,testFrequencies:[800],volume:.1,requireUserConfirmation:!0})}hideUserAuthenticationDialog=A=>{ji.debug("authentication already done message:%o",A),this.emit("moderatorAuthStatus",{requesterId:A.requesterId,moderatorActed:A.peerId})};onNewPeer(A){const{peerId:I,displayName:g,participantType:C}=A;this._peers.set(I,{displayName:g,participantType:C,consumers:[]}),this.emit("newPeer",{peerId:I,peerName:g,type:this.data.inputParams.peerId===I?"local":"remote",peerRole:C})}async onExistingParticipants(A){if(ji.debug("Onexisting participant message:%O",A),this._routerRtpCapabilities=A.routerRtpCapabilities,this._roomStatus="connected",this._roomDisplayName=A.roomDisplayName,this._running=!0,this._socket.updateRoomJoinStatus(!0),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:this.data.inputParams.peerName,type:"local",peerRole:this.data.inputParams.peerType,userSettings:A.userSettings,participantType:A.participantType,stageModeSettings:A.stageMode,enableChatOption:A.enableChatOption,enableScreenSharing:A.enableScreenSharing}),this.data.inputParams.produce?await this._createSendTransport():ji.debug("Produce is false!"),this.data.inputParams.consume){await this._createRecvTransport();let I=this;A.peers&&A.peers.length>0&&A.peers.forEach(A=>{I.emit("newPeer",{peerId:A.peerId,peerName:A.name,type:"remote",peerRole:A.participantType})})}else ji.debug("Consume is false!")}setSubscriptions=({peerIds:A=[],stagedPeerIds:I=[],mediaTags:g=[]}={})=>{try{if(!Boolean(this.data?.inputParams?.manualSubscription))return ji.debug("setSubscriptions() ignored (manualSubscription is disabled)"),{success:!1,reason:"manualSubscription is disabled"};ji.debug("setSubscriptions() peerIds:%O",A);const C=this.data?.inputParams?.peerId,Q=(Array.isArray(A)?A:[]).filter(Boolean).filter(A=>A!==C),B=(Array.isArray(I)?I:[]).filter(Boolean).filter(A=>A!==C),E=new Set(Q),i=new Set(B),D=new Set([...E,...i]),n=Array.isArray(g)?g.filter(Boolean):[],o=n.length>0,s=o?new Set(n):null,w=[],a=new Map,y=[],G=new Map,R=[],S=new Map,h=[];for(const A of D){const I=this._subscribedPeers.get(A);if(o){if(null===I)continue;const g=I instanceof Set?I:new Set,C=[];for(const A of s)g.has(A)||C.push(A);if(C.length>0){E.has(A)?a.set(A,new Set(C)):S.set(A,new Set(C));for(const A of C)g.add(A);this._subscribedPeers.set(A,g)}else void 0===I&&this._subscribedPeers.set(A,g)}else(void 0===I||I instanceof Set)&&(E.has(A)?w.push(A):R.push(A),this._subscribedPeers.set(A,null))}for(const[A,I]of Array.from(this._subscribedPeers.entries()))if(!D.has(A))if(o)if(null===I)y.push(A),this._subscribedPeers.delete(A);else if(I instanceof Set){const g=[];for(const A of s)I.has(A)&&g.push(A);if(g.length>0){G.set(A,new Set(g));for(const A of g)I.delete(A);0===I.size?this._subscribedPeers.delete(A):this._subscribedPeers.set(A,I)}}else this._subscribedPeers.delete(A);else y.push(A),this._subscribedPeers.delete(A);const N=new Set(this._activeSubscribedPeerIds);this._activeSubscribedPeerIds=new Set(Array.from(E));for(const A of D){const I=N.has(A),g=E.has(A);!I&&g&&h.push(A),I&&!g&&i.has(A)&&(R.includes(A)||R.push(A))}y.length>0&&this.unsubscribePeers(y,n),w.length>0&&this.subscribePeers(w,n),R.length>0&&this.stagePeers(R,n),h.length>0&&this.unstagePeers(h,n);for(const[A,I]of G.entries())this.unsubscribePeers([A],Array.from(I));for(const[A,I]of a.entries())this.subscribePeers([A],Array.from(I));for(const[A,I]of S.entries())this.stagePeers([A],Array.from(I));try{for(const A of h)this._resumeExistingConsumersForPeer(A);for(const A of R)E.has(A)||this._pauseExistingConsumersForPeer(A)}catch(A){}return{success:!0,subscribedPeers:Array.from(this._subscribedPeers.keys()),activePeers:Array.from(this._activeSubscribedPeerIds.values())}}catch(A){return ji.error("setSubscriptions() failed:%O",A),{success:!1,reason:A?.message||"unknown error"}}};subscribeToPeer=({peerId:A,mediaTags:I=[]}={})=>this.subscribePeers([A],I);unsubscribeFromPeer=({peerId:A,mediaTags:I=[]}={})=>this.unsubscribePeers([A],I);subscribePeers=(A=[],I=[])=>{if(ji.debug("subscribePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);if(0===g.length)return{success:!0};this._sendMessage({id:"subscribePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[]});const C=I.length>0?new Set(I):null;for(const A of g)this._resumeExistingConsumersForPeer(A,C);return{success:!0}};stagePeers=(A=[],I=[])=>{if(ji.debug("stagePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);if(0===g.length)return{success:!0};this._sendMessage({id:"stagePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[],staged:!0});const C=I.length>0?new Set(I):null;for(const A of g)this._pauseExistingConsumersForPeer(A,C);return{success:!0}};unstagePeers=(A=[],I=[])=>this.subscribePeers(A,I);setSubscribedTracks=(A,I={})=>{if(ji.debug("setSubscribedTracks() peerId:%s tracks:%O",A,I),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};if(!A)return{success:!1,reason:"peerId is required"};const g={audio:"cam-audio",video:"cam-video",screenVideo:"screen-video",screenAudio:"screen-audio"},C=[],Q=[],B=[];for(const[A,E]of Object.entries(I)){const I=g[A];I?!0===E?C.push(I):"staged"===E?Q.push(I):!1===E&&B.push(I):ji.warn("setSubscribedTracks: unknown track type:%s",A)}return B.length>0&&this.unsubscribePeers([A],B),Q.length>0&&this.stagePeers([A],Q),C.length>0&&this.subscribePeers([A],C),{success:!0}};batchSetSubscribedTracks=(A={})=>{if(ji.debug("batchSetSubscribedTracks() peerTracks:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const I={audio:"cam-audio",video:"cam-video",screenVideo:"screen-video",screenAudio:"screen-audio"},g=new Map,C=new Map,Q=new Map;for(const[B,E]of Object.entries(A))if(B&&E)for(const[A,i]of Object.entries(E)){const E=I[A];E&&(!0===i?(g.has(E)||g.set(E,new Set),g.get(E).add(B)):"staged"===i?(C.has(E)||C.set(E,new Set),C.get(E).add(B)):!1===i&&(Q.has(E)||Q.set(E,new Set),Q.get(E).add(B)))}for(const[A,I]of Q.entries())I.size>0&&this.unsubscribePeers(Array.from(I),[A]);for(const[A,I]of C.entries())I.size>0&&this.stagePeers(Array.from(I),[A]);for(const[A,I]of g.entries())I.size>0&&this.subscribePeers(Array.from(I),[A]);return{success:!0}};getEffectiveSubscriptionSnapshot=({includeHistory:A=!0}={})=>{try{const I=Boolean(this.data?.inputParams?.manualSubscription),g=this.data?.inputParams?.roomId,C=this.data?.inputParams?.peerId,Q=Array.from(this._subscribedPeers?.keys?.()||[]),B=Array.from(this._activeSubscribedPeerIds?.values?.()||[]),E=new Set(B),i=Q.filter(A=>A&&!E.has(A)),D={},n=Array.from(this._consumers?.values?.()||[]);for(const A of n){if(!A||A.closed)continue;const I=A?.appData?.peerId,g=A?.appData?.mediaTag;if(!I||!g)continue;const C=Boolean(A.paused),Q=A.kind;D[I]||(D[I]={peerId:I,mode:"none",tracks:{},audio:{cam:{active:!1,staged:!1},screen:{active:!1,staged:!1}},video:{cam:{active:!1,staged:!1},screen:{active:!1,staged:!1}}}),D[I].tracks[g]={consumerId:A.id,kind:Q,mediaTag:g,paused:C,staged:C,active:!C};const B="screen-audio"===g||"screen-video"===g?"screen":"cam";"audio"===Q?C?D[I].audio[B].staged=!0:D[I].audio[B].active=!0:"video"===Q&&(C?D[I].video[B].staged=!0:D[I].video[B].active=!0)}const o=[],s=[],w=[];for(const[A,I]of Object.entries(D)){const g=Object.values(I.tracks).some(A=>A&&A.active),C=Object.values(I.tracks).some(A=>A&&A.staged);I.mode=g?"subscribed":C?"staged":"none","subscribed"===I.mode?o.push(A):"staged"===I.mode&&s.push(A)}for(const A of Q)A&&(D[A]||w.push(A));const a={ts:Date.now(),manualSubscription:I,roomId:g,selfPeerId:C,desired:{peers:Q,activePeers:B,stagedPeers:i},effective:{peers:D,activePeers:o,stagedPeers:s,nonePeers:w}};return A&&(a.history={admissionDenied:Array.from(this._subscriptionDebug?.admissionDenied||[]),preemptions:Array.from(this._subscriptionDebug?.preemptions||[]),lastResponses:this._subscriptionDebug?.lastResponses||{}}),a}catch(A){return{ts:Date.now(),error:A?.message||"failed to compute snapshot"}}};getEffectiveSubscriptionsSnapshot=(A={})=>this.getEffectiveSubscriptionSnapshot(A);emitEffectiveSubscriptionSnapshot=(A={})=>{const I=this.getEffectiveSubscriptionSnapshot(A);try{this.emit("effectiveSubscriptionSnapshot",I)}catch(A){}return I};unsubscribePeers=(A=[],I=[])=>{if(ji.debug("unsubscribePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);return 0===g.length||this._sendMessage({id:"unsubscribePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[]}),{success:!0}};sendCustomMessage=(A,I="general",g=null,C,Q,B={})=>{const E={id:"customMessage",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,data:A,type:I,recieverPeerId:g,senderType:C,messageType:Q,customData:B};ji.debug("Room sendCustomMessage",E),this._sendMessage(E)};setCaptionPreference(A){try{const I={type:"captions_pref",show:!!A};return ji.debug("Room setCaptionPreference",I),this.sendCustomMessage(JSON.stringify(I),"custom",null,"participant","preference"),{success:!0}}catch(A){return ji.error("Failed to send caption preference",A),{success:!1,reason:A?.message}}}processCustomMessage=A=>{ji.debug("Room processCustomMessage",A),this.emit("customMessage",A)};updateCId=A=>{ji.debug("Received updateCId message",A),A.targetPeerId!==this.data.inputParams.peerId&&A.targetPeerId||this.emit("updateCId",{message:A,cId:A.cId,peerId:this.data.inputParams.peerId,isMyCId:A.targetPeerId===this.data.inputParams.peerId})};setCurrentlyActiveSpeaker(A){const{peerId:I,volume:g}=A.activeSpeaker;this._activeSpeaker=A.activeSpeaker,this.emit("activeSpeaker",{peerId:I,volume:g})}_createSendTransport=async()=>{ji.debug("Room _createSendTransport");try{this._device.loaded||(ji.debug("Room _createSendTransport","Going to load device with routerrtpcapabilities"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));let A="send";this._sendTransport&&!this._sendTransport.closed||this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:A})}catch(A){ji.error("Room _createSendTransport",A)}};_createRecvTransport=async()=>{this._device.loaded||(ji.debug("loading device for creating recv transport"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities})),this._recvTransport&&!this._recvTransport.closed||(ji.debug("receive transport created"),this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:"recv"}))};handleCreateTransportRequest=async A=>{ji.debug("Room handleCreateTransportRequest():%O",A);let I,{transportOptions:g,direction:C}=A;try{if("recv"===C)I=await this._device.createRecvTransport(g),ji.debug("Room",`handleCreateTransportRequest() recv transport created ${I.id}`),this._recvTransport=I,this.handleRecvTransportListeners();else{if("send"!==C)throw new Error(`bad transport 'direction': ${C}`);I=await this._device.createSendTransport(g),ji.debug("Room",`handleCreateTransportRequest() send transport created [id:%s]${I.id}`),this._sendTransport=I,this.handleSendTransportListeners(),this.produceMedia()}}catch(A){ji.error("Room handleCreateTransportRequest() failed to create transport [error:%o]",A)}};checkTransportStatus(A){ji.debug("The transport status is:%s",A),ji.debug("sendTransport conectionState:%s, recvTransport ConnectionState:%s",this._sendTransport.connectionState,this._recvTransport.connectionState),this._sendMessage({id:"peerTransportState",sendTransport:this._sendTransport.connectionState,recvTransport:this._recvTransport.connectionState})}handleSendTransportListeners=()=>{this._sendTransport.on("connect",this.handleTransportConnectEvent),this._sendTransport.on("produce",this.handleTransportProduceEvent);let A=this;this._sendTransport.on("connectionstatechange",async I=>{if(ji.debug(`ConferenceRoom sendTransport connectionState ${I} & socketconnection state ${this._socket._ws?this._socket._ws.readyState:"null"}`),ji.debug(`ConferenceRoom sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"disconnected"===I)setTimeout(async()=>{if("disconnected"===I)if(ji.debug("Connection state for Send Transport is:%s even after 5 seconds",I),ji.warn(`sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._sendTransport.id,"send")}},5e3);else if("failed"===I)if(ji.warn(`sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleSendTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._sendTransport.id,"send")}ji.debug("ConferenceRoom",`send transport connection state change [state:%s]${I}`)})};handleTransportConnectEvent=({dtlsParameters:A},I,g)=>{try{const g=A=>{ji.debug("connect-transport action"),I(),Zi.remove("connectTransport")};Zi.push("connectTransport",g);let C={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._sendTransport.id,dtlsParameters:A,direction:"send"};this._sendMessage(C)}catch(A){ji.error("handleTransportConnectEvent() failed [error:%o]",A),g(A)}};handleTransportProduceEvent=({kind:A,rtpParameters:I,appData:g},C,Q)=>{try{const Q=A=>{ji.debug("handleTransportProduceEvent callback [data:%o]",A),C({id:A.producerId}),Zi.remove("produce")};Zi.push("produce",Q);let B="cam-audio"===g.mediaTag&&void 0!==this.data.inputParams.audioStatus&&!this.data.inputParams.audioStatus;ji.debug(`handleTransportProduceEvent() | pause status->${B}`);let E={id:"sendTrack",transportId:this._sendTransport.id,peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,kind:A,rtpParameters:I,paused:B,appData:g,clientOs:this._client.os.name,browser:this._client.browser};this._sendMessage(E)}catch(A){ji.error("handleTransportProduceEvent() failed [error:%o]",A),Q(A)}};produceMedia=async()=>{this.data.inputParams.produce?(this.data.inputParams.produceAudio?this.enableMic({deviceId:this.data.inputParams.audioDeviceId?this.data.inputParams.audioDeviceId:null}):ji.debug("No need to produce audio!"),this._device.canProduce("video")&&(this.data.inputParams.produceVideo?(ji.debug("going to enable cam with vbdetails",this.data.inputParams.vbdetails),this.enableCam({deviceId:this.data.inputParams.videoDeviceId?this.data.inputParams.videoDeviceId:null,vbdetails:this.data.inputParams.vbdetails})):ji.debug("No need to produce video!"),this.data.inputParams.share&&this.enableShare({shareAudio:this.data.inputParams.shareAudio,enableSharingLayers:this._enableSharingLayers,shareBitRates:this.data.inputParams.shareBitRates}))):ji.warn("produce is false!")};handleRecvTransportListeners=async()=>{this._recvTransport.on("connect",this.handleRecvTransportConnectEvent);let A=this;this._recvTransport.on("connectionstatechange",async I=>{if(ji.debug(`ConferenceRoom recvTransport connectionState ${I} & socketconnection state ${this._socket._ws?this._socket._ws.readyState:"null"}`),"disconnected"===I)setTimeout(async()=>{if("disconnected"===I)if(ji.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}},5e3);else if("failed"===I)if(ji.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}else ji.debug("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState)});let I={id:"transportsAvailable",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,rtpCapabilities:this._device.rtpCapabilities};this._sendMessage(I)};handleRecvTransportConnectEvent=({dtlsParameters:A},I,g)=>{try{const g=A=>{ji.debug("ConferenceRoom","connect-recv-transport action"),I(),Zi.remove("connectRecvTransport")};Zi.push("connectRecvTransport",g);let C={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._recvTransport.id,dtlsParameters:A,direction:"recv"};this._sendMessage(C)}catch(A){ji.error("handleTransportConnectEvent() failed [error:%o]",A),g(A)}};handleRecvTrackRequest=async A=>{if(ji.debug("Room handleRecvTrackRequest",A),!this.data.inputParams.consume)return void ji.warn("I do not want to consume");let{senderPeerId:I,mediaTag:g,sender:C,audioStatus:Q,videoStatus:B,senderParticipantType:E,type:i,producerPaused:D,staged:n,...o}=A;ji.debug("New consumer created",o),o.id=o.consumerId,delete o.consumerId,ji.debug("ConferenceRoom",`senderPeerId is ->${I}`);let s=await this._recvTransport.consume({...o,streamId:`${I}-${"screen-video"===g||"screen-audio"===g?"share":"mic-webcam"}`,appData:{peerId:I,mediaTag:g}});for(;this._recvTransport&&"connected"!==this._recvTransport.connectionState;)ji.debug(`recv transport connstate${this._recvTransport.connectionState}`),await OE(100);this._consumers.set(s.id,s),s.on("transportclose",()=>{this._consumers.delete(s.id)});const{spatialLayers:w,temporalLayers:a}=$I.parseScalabilityMode(s.rtpParameters.encodings[0].scalabilityMode),y=this._peers.get(this.data.inputParams.peerId);ji.debug(`Consumer created for sender peerId ${I} for kind ${s.kind} for receiver peerId ${this.data.inputParams.peerId}`),ji.info("The old peer data is :%O",y),y?(y["screen-video"===g||"screen-audio"===g?`ss${s.kind}`:s.kind]={consumerId:s.id,type:i,locallyPaused:!1,remotelyPaused:D,rtpParameters:s.rtpParameters,spatialLayers:w,temporalLayers:a,preferredSpatialLayer:w-1,preferredTemporalLayer:a-1,priority:1,codec:s.rtpParameters.codecs[0].mimeType.split("/")[1],track:s.track,share:"screen-video"===g||"screen-audio"===g},ji.info("The new peer data is :%O",y),this._peers.set(this.data.inputParams.peerId,y)):(ji.info("Peer not found!"),this._peers.set(this.data.inputParams.peerId,{["screen-video"===g||"screen-audio"===g?`ss${s.kind}`:s.kind]:{consumerId:s.id,type:i,locallyPaused:!1,remotelyPaused:D,rtpParameters:s.rtpParameters,spatialLayers:w,temporalLayers:a,preferredSpatialLayer:w-1,preferredTemporalLayer:a-1,priority:1,codec:s.rtpParameters.codecs[0].mimeType.split("/")[1],track:s.track,share:"screen-video"===g||"screen-audio"===g}}));const G=Boolean(this.data?.inputParams?.manualSubscription),R=!G||this._activeSubscribedPeerIds.has(I),S=Boolean(n);if(!G||R&&!S)await this.resumeConsumer(s),this._emitConsumerStart({consumer:s,senderPeerId:I,mediaTag:g});else try{s.paused||await s.pause()}catch(A){}};_emitConsumerStart=({consumer:A,senderPeerId:I,mediaTag:g})=>{try{ji.debug("Emitting consumer start events for",I,g),"audio"===A.kind?"screen-audio"===g?this.emit("ssAudioStart",{peerId:I,audioTrack:A.track,type:"remote"}):this.emit("micStart",{peerId:I,audioTrack:A.track,type:"remote"}):"video"===A.kind&&("screen-video"===g?this.emit("ssVideoStart",{peerId:I,videoTrack:A.track,type:"remote"}):this.emit("videoStart",{peerId:I,videoTrack:A.track,type:"remote"}))}catch(A){ji.debug("_emitConsumerStart failed:%O",A)}};_pauseExistingConsumersForPeer=(A,I=null)=>{try{for(const g of Array.from(this._consumers.values()))if(g?.appData&&g.appData.peerId===A){if(null!==I){const A=g?.appData?.mediaTag;if(!A||!I.has(A))continue}try{g.paused||(g.pause(),ji.debug("Paused local consumer for peer:%s tag:%s",A,g?.appData?.mediaTag))}catch(A){}}}catch(A){}};_resumeExistingConsumersForPeer=async(A,I=null)=>{try{for(const g of Array.from(this._consumers.values()))if(g?.appData&&g.appData.peerId===A){if(null!==I){const A=g?.appData?.mediaTag;if(!A||!I.has(A))continue}try{g.paused&&(await this.resumeConsumer(g),this._emitConsumerStart({consumer:g,senderPeerId:A,mediaTag:g?.appData?.mediaTag}),ji.debug("Resumed consumer for peer:%s tag:%s",A,g?.appData?.mediaTag))}catch(A){}}}catch(A){}};resumeConsumer=async A=>{if(A){ji.debug("resume consumer",A.appData.peerId,A.appData.mediaTag);try{let I={id:"resumeConsumer",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,consumerId:A.id};this._sendMessage(I),await A.resume()}catch(A){ji.error("resumeConsumer error",A)}}};handleConnectTransportRequest=async A=>{ji.debug("handleTransportConnectRequest()");try{const I=Zi.get("connectTransport");if(!I)throw new Error("transport-connect action was not found");await I(A)}catch(A){ji.error("handleTransportConnectRequest() failed [error:%o]",A)}};handleConnectRecvTransportRequest=async A=>{ji.debug("handleTransportConnectRequest()");try{const I=Zi.get("connectRecvTransport");if(!I)throw new Error("recv transport-connect action was not found");await I(A)}catch(A){ji.error("handleRecvTransportConnectRequest() failed [error:%o]",A)}};handleSendTrackRequest=async A=>{ji.debug("ConferenceRoom","handleProduceRequest()");try{const I=Zi.get("produce");if(!I)throw new Error("produce action was not found");await I(A)}catch(A){ji.error("handleProduceRequest() failed [error:%o]",A)}};mediaToggled=A=>{switch(ji.debug("Media Toggled message:%O",A),A.type){case"video":ji.debug(`mediaToggled() | inside case video${A.videoStatus}`);const I=A.peerId??A.peer;if(A.videoStatus)if(this.data.inputParams.roomType===RE){const A=this._peers.get(I);A&&A.videoTrack?(this.emit("videoStart",{peerId:I,videoTrack:A.videoTrack,type:"cam-video"}),ji.info("Emitted videoStart for enabled video - track already exists")):(this.emit("videoStart",{peerId:I,videoTrack:null,type:"cam-video"}),ji.debug("Emitted videoStart for enabled video - track will arrive via ontrack"))}else this.emit("videoStart",{peerId:I,videoTrack:null,type:"cam-video"}),ji.debug("Emitted videoStart for enabled video in conference room");else this.emit("videoEnd",{peerId:I,type:"cam-video"}),ji.debug("Emitted videoEnd for disabled video");break;case"audio":ji.debug(`mediaToggled() | inside case audio${A.audioStatus}`);const g=A.peerId??A.peer;A.audioStatus?this.emit("peerUnMuted",{peerId:g,type:"remote"}):this.emit("peerMuted",{peerId:g,type:"remote"})}};closeConsumer=A=>{let{consumerId:I}=A;const g=this._consumers.get(I);if(!g)return void ji.warn("Consumer with id not found!:%s",I);const{peerId:C,mediaTag:Q}=g.appData;try{if("preempted"===A?.reason){try{this._subscriptionDebug?.preemptions&&(this._subscriptionDebug.preemptions.push({ts:Date.now(),...A,consumerId:I,senderPeerId:A?.senderPeerId||C,mediaTag:Q}),this._subscriptionDebug.preemptions.length>200&&this._subscriptionDebug.preemptions.shift())}catch(A){}this.emit("consumerPreempted",{consumerId:I,senderPeerId:A?.senderPeerId||C,mediaTag:Q,reason:A?.reason,preemptedBy:A?.preemptedBy,source:A?.source})}}catch(A){}ji.debug("Consumer closed for consumerId:%s, type:%s, appData:%o",I,g?.kind,g.appData);let B="screen-audio"===Q||"screen-video"===Q?`ss${g.kind}`:g.kind;g.close(),this._consumers.delete(I);let E=this._peers.get(this.data.inputParams.peerId);ji.debug("Peer data before deletion:%O",E),E[B]&&E[B].consumerId===I&&delete E[B],ji.debug("Peer data after deletion:%O",E),this._peers.set(this.data.inputParams.peerId,E),"audio"===g?.kind?(ji.debug("Going to emit micEnd, consumer closed for audio"),"screen-audio"===Q?this.emit("ssAudioStop",{peerId:C,track:null,type:"remote"}):this.emit("micEnd",{peerId:C,track:null,type:"remote"})):"video"===g?.kind&&(ji.debug("Going to emit videoEnd, consumer closed for video"),"screen-video"===Q?this.emit("ssVideoStop",{peerId:C,track:null,type:"remote"}):this.emit("videoEnd",{peerId:C,track:null,type:"remote"}))};peerLeft=A=>{ji.debug("Peer Left message is:%o",A);let{peerId:I}=A;if(I!==this.data.inputParams.peerId)this._peers.delete(I),this.emit("peerLeft",{peerId:I});else{try{this.emit("connectionStateChange",{status:"removed"}),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"self-removed-server"})}catch(A){}this.leaveRoomCommon().catch(A=>ji.error("Failed to leaveRoomCommon after self peerLeft:%o",A))}};roomClosed=()=>{ji.info("room closed by Moderator"),this._peers=null,this.emit("roomClosed",{roomId:this.data.inputParams.roomId})};handleRemotePeerJoin=async A=>{ji.info("Handling remote peer join in P2P:%O",A);const{peer:I,name:g,audioStatus:C,videoStatus:Q,participantType:B,iceServers:E,callStartTime:i,cId:D,enableChatOption:n,enableScreenSharing:o}=A;if(this._remotePeerId=I,this._remoteDisplayName=g,this._remotePeerType=B,this._remoteAudioStatus=C,this._remoteVideoStatus=Q,E&&(this._iceServers=E),i&&this.emit("callStartTime",{callStartTime:i}),D&&this.emit("cIdUpdate",{cId:D}),!0===this._pendingP2pAuth&&this._pendingSelfP2pInfo&&!this._peers.has(this.data.inputParams.peerId)){try{const{peerName:A,peerType:I,audioStatus:g,videoStatus:C}=this._pendingSelfP2pInfo;this._addSelfToPeersP2p(A,I,g,C,n,o)}catch(A){}this._pendingP2pAuth=!1,this._pendingSelfP2pInfo=null}if(I){const A={id:I,displayName:g,participantType:B,audioStatus:C,videoStatus:Q,audioTrack:null,videoTrack:null};this._peers.set(I,A),this.emit("newPeer",{peerId:I,peerName:g,type:"remote",peerRole:B,participantType:B}),ji.info("Added remote peer to peers:%O",A),"connected"!==this._roomStatus&&(this._roomStatus="connected",ji.debug("Room status marked connected for P2P flow"));try{!1===C?this.emit("peerMuted",{peerId:I,type:"remote"}):!0===C&&this.emit("peerUnMuted",{peerId:I,type:"remote"}),!1===Q&&this.emit("videoEnd",{peerId:I,type:"remote"})}catch(A){}}E&&"moderator"===this.data.inputParams.peerType?(ji.info("Moderator creating P2P offer"),await this.createPeerOfferP2p(E)):ji.info("Participant waiting for offer")};createPeerOfferP2p=async A=>{ji.info("Creating P2P peer connection and offer");try{if(this._peerConnection=new RTCPeerConnection({iceServers:A}),this._startPeerStatsLoop(),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{const A=this._peerConnection.iceConnectionState;"failed"!==A&&"disconnected"!==A&&"closed"!==A||this._stopPeerStatsLoop()}),this._localStream&&this._localStream.getTracks().length>0){const A=this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]?this._screenShareStream.getVideoTracks()[0]:null,I=this._vbDetails&&this._vbDetails.stream&&this._vbDetails.stream.getVideoTracks&&this._vbDetails.stream.getVideoTracks()[0]?this._vbDetails.stream.getVideoTracks()[0]:null;this._localStream.getTracks().forEach(g=>{ji.info("Adding local track to peer connection:%s",g.kind),"video"===g.kind?A?(ji.info("Using screen share track for initial offer"),this._peerConnection.addTrack(A,this._localStream)):I?(ji.info("Using virtual background track for initial offer"),this._peerConnection.addTrack(I,this._localStream)):this._peerConnection.addTrack(g,this._localStream):this._peerConnection.addTrack(g,this._localStream)})}else ji.warn("No local tracks available when creating offer");this._peerConnection.onicecandidate=A=>{A.candidate&&(ji.debug("Sending ICE candidate:%O",A.candidate),this._sendMessage({id:"candidate",candidate:A.candidate,peer:this._remotePeerId}))},this._peerConnection.ontrack=A=>{ji.info("Received remote track:%s",A.track.kind),this.gotRemoteTrackP2p(A)},this._peerConnection.onremovetrack=A=>{ji.info("Remote track removed:%s",A.track.kind),this.handleRemoteTrackRemoved(A)},this._peerConnection.addEventListener("iceconnectionstatechange",()=>{ji.info("ICE connection state:%s",this._peerConnection.iceConnectionState),"connected"===this._peerConnection.iceConnectionState?this.emit("peerConnected",{peerId:this._remotePeerId}):"failed"!==this._peerConnection.iceConnectionState&&"disconnected"!==this._peerConnection.iceConnectionState||(ji.warn("ICE connection failed/disconnected"),this.emit("peerDisconnected",{peerId:this._remotePeerId}))}),this._peerConnection.onnegotiationneeded=async()=>{ji.info("Negotiation needed - creating new offer for track changes");try{const A=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(A);const I=!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]);this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:I})}catch(A){ji.error("Failed to renegotiate P2P offer:%O",A)}};try{const A=!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]),I=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(I),this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:A})}catch(A){ji.error("Error creating/sending initial P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to create initial P2P offer"})}}catch(A){ji.error("Failed to create P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to create P2P connection"})}};_startPeerStatsLoop(){!this._statsTimer&&this._peerConnection&&(this._statsTimer=setInterval(async()=>{try{const A=await this._peerConnection.getStats(),I=this._parsePeerStats(A);if(!I)return;this.emit?.("peerStats",I),this._sendMessage({id:"p2pConnectionStats",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId,stats:I})}catch(A){}},this._statsIntervalMs))}_stopPeerStatsLoop(){this._statsTimer&&(clearInterval(this._statsTimer),this._statsTimer=null)}_parsePeerStats(A){let I;const g={peerId:this.data.inputParams.peerId,callId:this.data.inputParams.roomId,timestamp:Date.now(),audio:{inbound:{},outbound:{}},video:{inbound:{},outbound:{}}};return A.forEach(A=>{switch(A.type){case"candidate-pair":A.nominated&&(I=A);break;case"outbound-rtp":this._populateOutbound(A,g);break;case"inbound-rtp":this._populateInbound(A,g);break;case"track":"audio"===A.kind?(g.audioLevel=A.audioLevel??null,g.totalAudioEnergy=A.totalAudioEnergy??null):"video"===A.kind&&(g.video.trackFramesDropped=A.framesDropped??null);break;case"media-source":"video"===A.kind&&(g.video.frameWidth=A.width??null,g.video.frameHeight=A.height??null)}}),A.forEach(A=>{if("remote-inbound-rtp"===A.type){const I=A.kind;I&&g[I]&&g[I].outbound&&(g[I].outbound.producerScore=A.score??null,g[I].outbound.roundTripTime=A.roundTripTime??null,g[I].outbound.packetsLost=A.packetsLost??null,g[I].outbound.fractionLost=A.fractionLost??null)}}),["audio","video"].forEach(A=>{g[A].inbound&&(g[A].inbound.consumerScore=this._calculateConsumerScore(g[A].inbound,A))}),g.roundTripTime=I?.currentRoundTripTime??null,g.availableOutgoingBitrate=I?.availableOutgoingBitrate??null,g.availableIncomingBitrate=I?.availableIncomingBitrate??null,g.iceState=this._peerConnection?.iceConnectionState??null,g.candidatePairState=I?.state??null,g.qualityScore=this._calculateQualityScore(g),g}_populateOutbound(A,I){const g="audio"===A.kind?I.audio.outbound:I.video.outbound;g.packetsLost=A.packetsLost??null,g.framesPerSecond=A.framesPerSecond??null,g.keyFramesSent=A.keyFramesSent??null,g.framesDropped=A.framesDropped??null,g.qualityLimitationReason=A.qualityLimitationReason??null,g.packetsRetransmitted=A.retransmittedPacketsSent??null,g.nackCount=A.nackCount??null,g.bitrateKbps=this._calcBitrate(A,"bytesSent"),g.producerScore=A.score??null}_populateInbound(A,I){const g="audio"===A.kind?I.audio.inbound:I.video.inbound;g.packetsLost=A.packetsLost??null,g.jitter=A.jitter??null,g.framesPerSecond=A.framesPerSecond??null,g.frameWidth=A.frameWidth??null,g.frameHeight=A.frameHeight??null,g.packetsRetransmitted=A.retransmittedPacketsReceived??null,g.packetsReceived=A.packetsReceived??null,g.bitrateKbps=this._calcBitrate(A,"bytesReceived"),g.consumerScore=A.score??null}_calcBitrate(A,I){if(!A||"number"!=typeof A[I])return null;const g=this._lastStatCache[A.id],C=A.timestamp;let Q=null;if(g){const B=A[I]-g[I],E=(C-g.timestamp)/1e3;E>0&&(Q=Math.max(8*B/E/1e3,0))}return this._lastStatCache[A.id]={[I]:A[I],timestamp:C},Q}_calculateConsumerScore(A,I){let g=10;if(A.packetsReceived&&A.packetsLost){const I=A.packetsLost/(A.packetsReceived+A.packetsLost)*100;I>5?g-=3:I>3?g-=2:I>1?g-=1:I>.5&&(g-=.5)}if(A.jitter){const I=1e3*A.jitter;I>50?g-=1.5:I>30&&(g-=.8)}if("video"===I){const I=500,C=A.bitrateKbps??0;C>0&&(C<200?g-=2.5:C<I&&(g-=1.5))}else if("audio"===I){const I=20,C=A.bitrateKbps??0;C>0&&C<I&&(g-=2)}return Math.max(0,Math.min(10,g))}_calculateQualityScore(A){let I=10;if("connected"!==A.iceState&&"completed"!==A.iceState)return 0;const g=A.video?.inbound;if(g?.packetsReceived&&g?.packetsLost){const A=g.packetsLost/(g.packetsReceived+g.packetsLost)*100;A>5?I-=3:A>3?I-=2:A>1?I-=1:A>.5&&(I-=.5)}const C=g?.bitrateKbps??0;C>0&&(C<200?I-=2.5:C<500&&(I-=1.5));const Q=1e3*(g?.jitter??0);Q>50?I-=1.5:Q>30&&(I-=.8);const B=1e3*(A.roundTripTime??0);B>300?I-=2:B>200?I-=1:B>150&&(I-=.5);const E=A.video?.trackFramesDropped??0;return E>100?I-=1.5:E>50?I-=1:E>10&&(I-=.5),"bandwidth"===A.video?.outbound?.qualityLimitationReason&&(I-=1),Math.max(0,Math.min(10,I))}handleNegotiationNeededEventP2p=async()=>{ji.info("Negotiation needed - creating new offer for track changes"),this.restartICE()};handleOffer=async({offer:A,iceServers:I,screenShare:g})=>{ji.info("Handling P2P offer");try{const C=I||this._iceServers||[{urls:"stun:stun.l.google.com:19302"}];this._peerConnection=new RTCPeerConnection({iceServers:C}),this._startPeerStatsLoop(),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{const A=this._peerConnection.iceConnectionState;"failed"!==A&&"disconnected"!==A&&"closed"!==A||this._stopPeerStatsLoop()}),this._localStream&&this._localStream.getTracks().length>0?this._localStream.getTracks().forEach(A=>{ji.info("Adding local track to peer connection:%s",A.kind),this._peerConnection.addTrack(A,this._localStream)}):ji.warn("No local tracks available when handling offer"),this._peerConnection.onicecandidate=A=>{A.candidate&&(ji.debug("Sending ICE candidate:%O",A.candidate),this._sendMessage({id:"candidate",candidate:A.candidate,peer:this._remotePeerId}))},this._peerConnection.ontrack=A=>{ji.info("Received remote track:%s",A.track.kind),this.gotRemoteTrackP2p(A,g)},this._peerConnection.onremovetrack=A=>{ji.info("Remote track removed:%s",A.track.kind),this.handleRemoteTrackRemoved(A,g)},this._peerConnection.onnegotiationneeded=async()=>{ji.info("Negotiation needed - creating new offer for track changes");try{const A=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(A),ji.info("Sending negotiation offer to remote peer"),this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0])})}catch(A){ji.error("Failed to handle negotiation needed:%O",A),this.emit("error",{type:"negotiationError",message:"Failed to negotiate new track"})}},await this._peerConnection.setRemoteDescription(new RTCSessionDescription(A)),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{ji.info("ICE connection state:%s",this._peerConnection.iceConnectionState),"connected"===this._peerConnection.iceConnectionState?this.emit("peerConnected",{peerId:this._remotePeerId}):"failed"!==this._peerConnection.iceConnectionState&&"disconnected"!==this._peerConnection.iceConnectionState||(ji.warn("ICE connection failed/disconnected"),this.restartICE(),this.emit("peerDisconnected",{peerId:this._remotePeerId}))});const Q=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(Q),ji.info("Sending answer to remote peer"),this._sendMessage({id:"answer",answer:this._peerConnection.localDescription,peer:this._remotePeerId})}catch(A){ji.error("Failed to handle P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to handle P2P offer"})}};handleAnswer=async({answer:A})=>{ji.info("Handling P2P answer");try{await this._peerConnection.setRemoteDescription(new RTCSessionDescription(A)),ji.info("Remote description set successfully")}catch(A){ji.error("Failed to handle P2P answer:%O",A)}};handleCandidate=async A=>{ji.debug("Handling ICE candidate:%O",A);try{this._peerConnection?(await this._peerConnection.addIceCandidate(new RTCIceCandidate(A)),ji.debug("ICE candidate added successfully")):ji.warn("Received ICE candidate before peer connection exists")}catch(A){ji.error("Failed to add ICE candidate:%O",A)}};gotRemoteTrackP2p=(A,I=!1)=>{ji.info("Got remote track - kind:%s, screenShare:%s",A.track.kind,I);const g=A.track;if(this._remoteStream||(this._remoteStream=new MediaStream),this._remoteStream.addTrack(g),this._peers.has(this._remotePeerId)){const A=this._peers.get(this._remotePeerId);"audio"===g.kind?A.audioTrack=g:"video"===g.kind&&(A.videoTrack=g),this._peers.set(this._remotePeerId,A)}const C=I?"video"===g.kind?"screen-video":"screen-audio":"video"===g.kind?"cam-video":"cam-audio";I&&"video"===g.kind?this.emit("ssVideoStart",{peerId:this._remotePeerId,videoTrack:g,type:C}):"video"===g.kind?this.emit("videoStart",{peerId:this._remotePeerId,videoTrack:g,type:C}):"audio"===g.kind&&this.emit("micStart",{peerId:this._remotePeerId,audioTrack:g,type:C}),ji.info("Emitted track event for peer:%s type:%s",this._remotePeerId,C)};handleRemoteTrackRemoved=(A,I=!1)=>{ji.info("Handling remote track removal - kind:%s",A.track.kind);const g=A.track,C=this._remotePeerId;if(this._remoteStream&&this._remoteStream.removeTrack(g),this._peers.has(C)){const A=this._peers.get(C);"audio"===g.kind?(A.audioTrack=null,this.emit("micEnd",{peerId:C}),this.emit("peerMuted",{peerId:C,type:"remote"})):"video"===g.kind&&(A.videoTrack=null,I?this.emit("ssVideoStop",{peerId:C}):this.emit("videoEnd",{peerId:C,type:"cam-video"})),this._peers.set(C,A)}ji.info("Emitted track removal events for peer:%s kind:%s",C,g.kind)};userLeftRoom=A=>{ji.info("User left P2P room:%O",A);const{peerId:I}=A;this._peerConnection&&(this._peerConnection.close(),this._peerConnection=null,ji.info("Peer connection closed")),this._remoteStream&&(this._remoteStream.getTracks().forEach(A=>A.stop()),this._remoteStream=null),this._peers.has(I)&&(this._peers.delete(I),this.emit("peerLeft",{peerId:I})),this._remotePeerId=null,ji.info("Remote peer state cleared")};leaveRoomNewP2p=async A=>{ji.info("Leaving P2P room - action:%s",A);try{this._localStream&&this._localStream.getTracks().forEach(A=>{A.stop(),ji.debug("Stopped local track:%s",A.kind)}),this._micStream&&(this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=null),this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=null),this._peerConnection&&(this._peerConnection.close(),this._peerConnection=null);const I=A===hE?"leaveAndCloseRoom":"leaveRoomNew";this._sendMessage({id:I,peerId:this.data.inputParams.peerId,peer:this._remotePeerId}),this._clearP2pState(),this.emit("roomLeft",{roomId:this.data.inputParams.roomId}),ji.info("Left P2P room successfully")}catch(A){ji.error("Error leaving P2P room:%O",A)}};handleIceRestart=async A=>{ji.debug("Ice restart message received!!");try{this._peerConnection.ontrack=I=>{ji.info("Received remote track:%s",I.track.kind),this.gotRemoteTrackP2p(I,!!A.screenShare)}}catch(A){}await this._peerConnection.setRemoteDescription(A.offer);const I=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(I),this._sendMessage({id:"iceRestarted",answer:I,peer:this._remotePeerId})};handleIceRestartResponse=async A=>{ji.debug("Ice restart message response received!!"),this._peerConnection.setRemoteDescription(A.answer)};handleScreenShareP2p=A=>{if(ji.debug("Screen share P2P message received:%O",A),"start"===A?.type){const I=A.peerId||this._remotePeerId||A.peer;this.emit("ssVideoStart",{type:"start",peerId:I})}else if("end"===A?.type){const I=A.peerId||this._remotePeerId||A.peer;this.emit("ssVideoStop",{type:"end",peerId:I})}else ji.error("Unknown screen share P2P message type:%s",A?.type),this.emit("ssVideoStart",A)};_clearP2pState=()=>{this._peerConnection=null,this._localStream=null,this._remoteStream=null,this._remotePeerId=null,this._remoteDisplayName=null,this._remotePeerType=null,this._remoteAudioStatus=null,this._remoteVideoStatus=null,this._iceServers=null,this._peers=new Map,ji.debug("P2P state cleared")};async toggleMicP2p(A){if(this.data.inputParams.roomType!==RE)return void ji.warn("toggleMicP2p() called but not in P2P room");ji.info("Toggle mic P2P - enabled:%s",A);let I=null;try{if(this._peerConnection){const g=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);if(g&&g.track){if(I=g.track,g.track.enabled=A,ji.info("Toggled audio track enabled state to:%s",A),this._localStream){const I=this._localStream.getAudioTracks()[0];I&&(I.enabled=A)}}else A&&(await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId}),this._micStream&&this._micStream.getAudioTracks()[0]&&(I=this._micStream.getAudioTracks()[0],this._peerConnection.addTrack(this._micStream.getAudioTracks()[0],this._localStream),ji.info("Added audio track to peer connection"),this._localStream&&!this._localStream.getAudioTracks().find(A=>A.id===this._micStream.getAudioTracks()[0].id)&&this._localStream.addTrack(this._micStream.getAudioTracks()[0])))}else A?(await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId}),this._micStream&&this._micStream.getAudioTracks()[0]&&(I=this._micStream.getAudioTracks()[0]),this._micStream&&this._localStream&&!this._localStream.getAudioTracks().find(A=>A.id===this._micStream.getAudioTracks()[0].id)&&this._localStream.addTrack(this._micStream.getAudioTracks()[0])):this._micStream&&(this._micStream.getTracks().forEach(A=>{A.stop(),this._localStream&&this._localStream.removeTrack(A)}),this._micStream=null);this._sendMessage({id:"mediaToggled",audioStatus:A,peer:this._remotePeerId,type:"audio",peerId:this.data.inputParams.peerId}),A&&I?this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:I,type:"local"}):A||this.emit("micEnd",{peerId:this.data.inputParams.peerId})}catch(A){ji.error("Error toggling mic P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to toggle microphone"})}}async toggleCameraP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Toggle camera P2P - enabled:%s",A);try{if(A){if(await this._acquireVideoForP2p({deviceId:this.data.inputParams.videoDeviceId,vbdetails:this.data.inputParams.vbdetails}),this._peerConnection&&this._webCamStream){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);A&&A.track?(await A.replaceTrack(this._webCamStream.getVideoTracks()[0]),ji.info("Replaced video track in peer connection")):(this._peerConnection.addTrack(this._webCamStream.getVideoTracks()[0],this._localStream),ji.info("Added video track to peer connection")),this._localStream&&this._localStream.addTrack(this._webCamStream.getVideoTracks()[0])}this._sendMessage({id:"mediaToggled",videoStatus:!0,peer:this._remotePeerId,type:"video",peerId:this.data.inputParams.peerId}),this._webCamStream&&this._webCamStream.getVideoTracks()[0]&&this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webCamStream.getVideoTracks()[0],type:"local"})}else{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);if(A){if(ji.info("Removing video track from peer connection"),this._peerConnection.removeTrack(A),this._localStream){const A=this._localStream.getVideoTracks()[0];A&&(A.stop(),this._localStream.removeTrack(A),ji.info("Stopped and removed video track from local stream"))}this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>{A.stop()}),this._webCamStream=null)}}else this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>{A.stop(),this._localStream&&this._localStream.removeTrack(A)}),this._webCamStream=null);this._sendMessage({id:"mediaToggled",videoStatus:!1,peer:this._remotePeerId,type:"video",peerId:this.data.inputParams.peerId}),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,type:"cam-video"})}}catch(A){ji.error("Error toggling camera P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to toggle camera"})}}else ji.warn("toggleCameraP2p() called but not in P2P room")}async changeAudioInputP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Changing audio input P2P - deviceId:%s",A);try{if(this._micStream&&this._micStream.getTracks().forEach(A=>A.stop()),await this._acquireAudioForP2p({deviceId:A}),this._peerConnection&&this._micStream){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);A&&(await A.replaceTrack(this._micStream.getAudioTracks()[0]),ji.info("Replaced audio track with new device"))}this.emit("audioInputChanged",{deviceId:A})}catch(A){ji.error("Error changing audio input P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to change audio input"})}}else ji.warn("changeAudioInputP2p() called but not in P2P room")}async changeVideoInputP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Changing video input P2P - deviceId:%s",A);try{if(this._webCamStream&&this._webCamStream.getTracks().forEach(A=>A.stop()),await this._acquireVideoForP2p({deviceId:A,vbdetails:this.data.inputParams.vbdetails}),this._peerConnection&&this._webCamStream){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);A&&(await A.replaceTrack(this._webCamStream.getVideoTracks()[0]),ji.info("Replaced video track with new device"))}this.emit("videoInputChanged",{deviceId:A})}catch(A){ji.error("Error changing video input P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to change video input"})}}else ji.warn("changeVideoInputP2p() called but not in P2P room")}async enableScreenShareP2p(){if(this.data.inputParams.roomType===RE){ji.info("Enabling screen share P2P");try{if(this._screenShareStream)return void await this.disableScreenShareP2p();if(!this._peerConnection)return ji.warn("No peer connection available for screen share"),void this.emit("error",{type:"screenShareError",message:"No peer connection available"});const A=await navigator.mediaDevices.getDisplayMedia(oi);this._screenShareStream=A;let I=A.getVideoTracks()[0];if(!I)return ji.error("No video track in screen share stream"),void this.emit("error",{type:"screenShareError",message:"Failed to get screen share track"});ji.info("Adding screen share track alongside camera (no replace)");const g=this._peerConnection.addTrack(I,this._localStream);try{const A=this._peerConnection.getSenders();this._screenShareSender=A.find(A=>A.track===I)||g}catch(A){}this._videoWasActiveP2p=null,this._sendMessage({id:"screenShareP2p",peer:this._remotePeerId,type:"start",peerId:this.data.inputParams.peerId}),this.emit("screenShareStarted",{peerId:this.data.inputParams.peerId,track:I}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:I,type:"screen-video"}),I.onended=async()=>{ji.info("Screen share ended by user"),await this.disableScreenShareP2p()}}catch(A){ji.error("Error enabling screen share P2P:%O",A),this.emit("error",{type:"screenShareError",message:"Failed to start screen share"})}}else ji.warn("enableScreenShareP2p() called but not in P2P room")}async disableScreenShareP2p(){if(this.data.inputParams.roomType===RE){ji.info("Disabling screen share P2P");try{if(this._screenShareStream){const A=this._screenShareStream.getVideoTracks()[0];if(this._screenShareStream.getTracks().forEach(A=>A.stop()),this._screenShareStream=null,this._peerConnection){try{const I=this._peerConnection.getSenders(),g=this._screenShareSender||I.find(I=>I.track===A);g&&this._peerConnection.removeTrack(g)}catch(A){}if(this._localStream&&A){const I=this._localStream.getVideoTracks().find(I=>I===A);I&&this._localStream.removeTrack(I)}this._sendMessage({id:"screenShareP2p",peer:this._remotePeerId,type:"end",peerId:this.data.inputParams.peerId})}this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId}),this.emit("screenShareStopped",{peerId:this.data.inputParams.peerId}),this._screenShareSender=null,this._videoWasActiveP2p=null}}catch(A){ji.error("Error disabling screen share P2P:%O",A),this.emit("error",{type:"screenShareError",message:"Failed to disable screen share"})}}else ji.warn("disableScreenShareP2p() called but not in P2P room")}async setCurrentlyActiveSpeakerP2p(){if(this.data.inputParams.roomType!==RE||!this._peerConnection)return{producerId:null,volume:null,peerId:null};try{let A={producerId:null,volume:null,peerId:null};const I=this._peerConnection.getReceivers().find(A=>A.track&&"audio"===A.track.kind),g=this._peerConnection.getSenders().find(A=>A.track&&"audio"===A.track.kind);if(!I||!g)return A;const C=await I.getStats();let Q=Math.round(1e3*[...C.values()].filter(A=>"audio"===A.kind&&A.id.includes("RTCInboundRTPAudioStream"))[0]?.audioLevel);const B=await g.getStats();let E=Math.round(1e3*[...B.values()].filter(A=>A.id.includes("RTCAudioSource")&&"audio"===A.kind)[0]?.audioLevel);return ji.debug("receiverAudioLevel:%s, senderAudioLevel:%s",Q,E),Q>0&&Q>E?(A.volume=Q,A.peerId=this._remotePeerId):E>0&&E>Q&&(A.volume=E,A.peerId=this.data.inputParams.peerId),A}catch(A){return ji.error("Error getting active speaker P2P:%O",A),{producerId:null,volume:null,peerId:null}}}async restartICE(){if(this.data.inputParams.roomType===RE&&this._peerConnection)try{ji.info("Restarting ICE for P2P connection");const A=await this._peerConnection.createOffer({iceRestart:!0});await this._peerConnection.setLocalDescription(A),this._sendMessage({id:"iceRestart",offer:A,screenShare:!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]),peer:this._remotePeerId}),ji.info("ICE restart offer sent")}catch(A){ji.error("Failed to restart ICE:%O",A),this.emit("error",{type:"connectionError",message:"Failed to restart ICE connection"})}else ji.warn("restartICE() called but not in P2P room or peer connection not available")}async handleReplaceTrackP2p({deviceLabel:A,mediaType:I,deviceList:g}){if(this.data.inputParams.roomType!==RE||!this._peerConnection)return ji.warn("handleReplaceTrackP2p() called but not in P2P room"),{success:!1};try{let C,Q,B,E;ji.info("Replacing track P2P - label:%s, type:%s",A,I);let i=null;if("video"===I){const I=(g||this.deviceList)?.videoDevices?.find(I=>I.label===A);if(i=I?.deviceId,!i)return ji.error("Video device not found with label:%s",A),{success:!1};wi.video.deviceId.exact=i,B=wi,C=this._localStream?.getVideoTracks()[0]}else{if("audio"!==I)return ji.error("Unknown media type:%s",I),{success:!1};{const I=(g||this.deviceList)?.audioDevices?.find(I=>I.label===A);if(i=I?.deviceId,!i)return ji.error("Audio device not found with label:%s",A),{success:!1};si.audio.deviceId.exact=i,B=si,C=this._localStream?.getAudioTracks()[0]}}if(!C)return ji.warn("No old track found to replace"),{success:!1};let D=C.enabled;E=await navigator.mediaDevices.getUserMedia(B),"video"===I?Q=E.getVideoTracks()[0]:"audio"===I&&(Q=E.getAudioTracks()[0]),Q.enabled=D;const n=this._peerConnection.getSenders().find(A=>A.track&&A.track.kind===Q.kind);return n&&(await n.replaceTrack(Q),ji.info("Replaced %s track in peer connection",I)),this._localStream.removeTrack(C),this._localStream.addTrack(Q),C.stop(),"video"===I?(this._webCamStream&&this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=E):"audio"===I&&(this._micStream&&this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=E),"video"===I?(this.emit("videoInputChanged",{deviceId:i,deviceLabel:A}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:Q,type:"cam-video"})):(this.emit("audioInputChanged",{deviceId:i,deviceLabel:A}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:Q,type:"cam-audio"})),{success:!0}}catch(A){return ji.error("Error replacing track P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to replace media track"}),{success:!1}}}close(){this._closed||(this._closed=!0,this._socket=null,this.data.inputParams={},ji.info("Room close()"),this._sendTransport&&this._sendTransport.close(),this._recvTransport&&this._recvTransport.close(),this._roomStatus="closed",this._running=!1)}async leaveRoom(){ji.debug("Leave room is called!!"),this.data.inputParams.roomType===RE?(ji.info("Leaving P2P room"),await this.leaveRoomNewP2p("leaveRoomNew")):"connected"===this._roomStatus?(this._sendMessage({id:"leaveRoomNew",peerId:this.data.inputParams.peerId,roomLeaveType:"client"}),await this.leaveRoomCommon()):ji.error("The room state is:%s",this._roomStatus)}async closeRoom(){ji.debug("Close room is called!!"),this.data.inputParams.roomType===RE?(ji.info("Closing P2P room"),await this.leaveRoomNewP2p(hE)):"connected"===this._roomStatus?(this._sendMessage({id:"leaveAndCloseRoom",peerId:this.data.inputParams.peerId,roomCloseType:"client"}),await this.leaveRoomCommon()):ji.error("The room state is:%s",this._roomStatus)}leaveRoomCommon=async()=>{try{ji.debug("Starting comprehensive room leave cleanup...");try{this._cleanupAudioMonitoring(),this._stopSpeakingWhileMutedDetection(),this._stopAutoAudioStreaming(),this._transcriptionActive&&this.stopTranscription(),Xi&&"function"==typeof Xi.cleanup&&Xi.cleanup()}catch(A){}const A=new Set;this._webcamProducer?.track&&A.add(this._webcamProducer.track),this._micProducer?.track&&A.add(this._micProducer.track),this._shareProducer?.track&&A.add(this._shareProducer.track),this._shareAudioProducer?.track&&A.add(this._shareAudioProducer.track),this._producers&&this._producers.size>0&&this._producers.forEach(I=>{I?.track&&A.add(I.track)}),this._webCamStream&&this._webCamStream.getTracks().forEach(I=>A.add(I)),this._micStream&&this._micStream.getTracks().forEach(I=>A.add(I));try{const I=Xi?.getVBStream?.()||Xi?._localVBStream;I&&"function"==typeof I.getTracks&&I.getTracks().forEach(I=>A.add(I))}catch(A){}const I=[];document.querySelectorAll("video, audio").forEach(g=>{g.srcObject&&"function"==typeof g.srcObject.getTracks&&(g.srcObject.getTracks().forEach(I=>A.add(I)),I.push({element:g,stream:g.srcObject}))}),ji.debug(`Found ${A.size} total tracks to stop`);let g=0;for(const I of A)try{I&&"live"===I.readyState&&"function"==typeof I.stop&&(I.stop(),g++,ji.debug(`Stopped ${I.kind} track: ${I.label||I.id}`))}catch(A){ji.warn("Error stopping track:",A)}if(ji.debug(`Stopped ${g} tracks`),this._sendTransport){try{this._sendTransport.close()}catch(A){}this._sendTransport=null}if(this._recvTransport){try{this._recvTransport.close()}catch(A){}this._recvTransport=null}this._webcamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._webCamStream=null,this._micStream=null,this._producers&&this._producers.clear(),this._consumers&&this._consumers.clear(),this._roomStatus="closed",this._running=!1,this._routerRtpCapabilities=null,await new Promise(A=>setTimeout(A,100));let C=0;document.querySelectorAll("video, audio").forEach(A=>{try{A.srcObject&&(A.srcObject=null,"function"==typeof A.pause&&A.pause(),"function"==typeof A.load&&A.load(),C++,ji.debug(`Force cleared ${A.nodeName} element`))}catch(A){ji.warn("Error clearing element:",A)}}),ji.debug(`Cleared ${C} DOM elements`);try{A.forEach(A=>{A&&"function"==typeof A.removeEventListener&&(A.removeEventListener("ended",()=>{}),A.removeEventListener("mute",()=>{}),A.removeEventListener("unmute",()=>{}))})}catch(A){}if(window.gc&&"function"==typeof window.gc)try{window.gc()}catch(A){}await new Promise(A=>setTimeout(A,200));try{const A=await this.reportActiveMediaUse();ji.debug("Final media usage report:",A);const I=[],g=A.dom.mediaElements.filter(A=>A.hasSrcObject&&A.tracks.length>0);g.forEach(A=>{A.tracks.forEach(g=>{"live"===g.readyState&&I.push({kind:g.kind,label:g.label,id:g.id,element:A.nodeName})})}),(I.length>0||g.length>0)&&(ji.warn("WARNING: Media elements or live tracks still detected after cleanup:",{liveTracks:I,elementsWithTracks:g.length}),await this.emergencyTrackCleanup())}catch(A){ji.error("Failed to generate final media usage report:",A)}}catch(A){ji.error("Error during room leave cleanup:",A),await this.emergencyTrackCleanup()}};emergencyTrackCleanup=async()=>{ji.debug("Performing emergency track cleanup...");try{const A=[];document.querySelectorAll("video, audio").forEach(I=>{if(I.srcObject&&"function"==typeof I.srcObject.getTracks){A.push(...I.srcObject.getTracks()),I.srcObject=null,"function"==typeof I.pause&&I.pause(),"function"==typeof I.load&&I.load();try{I.src=""}catch(A){}}}),A.forEach(A=>{try{"live"===A.readyState&&(A.stop(),ji.debug(`Emergency stopped ${A.kind}: ${A.label||A.id}`))}catch(A){}}),ji.debug(`Emergency cleanup completed - stopped ${A.length} tracks`),await new Promise(A=>setTimeout(A,300))}catch(A){ji.error("Emergency cleanup failed:",A)}};reportActiveMediaUse=async(A=!1)=>{const I={sdk:{micStreamTracks:[],camStreamTracks:[],vbStreamTracks:[],shareTracks:[],producers:[],consumers:[]},dom:{mediaElements:[]},timestamp:Date.now()},g=A=>{try{return{kind:A?.kind,enabled:A?.enabled,readyState:A?.readyState,label:A?.label,id:A?.id,muted:A?.muted}}catch(A){return{error:!0}}};try{this._micStream&&"function"==typeof this._micStream.getTracks&&(I.sdk.micStreamTracks=this._micStream.getTracks().map(g))}catch(A){}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&(I.sdk.camStreamTracks=this._webCamStream.getTracks().map(g))}catch(A){}try{const A=Xi?.getVBStream?.()||Xi?._localVBStream;A&&"function"==typeof A.getTracks&&(I.sdk.vbStreamTracks=A.getTracks().map(g))}catch(A){}try{this._shareProducer?.track&&I.sdk.shareTracks.push(g(this._shareProducer.track)),this._shareAudioProducer?.track&&I.sdk.shareTracks.push(g(this._shareAudioProducer.track))}catch(A){}try{this._producers&&this._producers.size>0&&this._producers.forEach((A,C)=>{I.sdk.producers.push({key:C,track:A?.track?g(A.track):null,id:A?.id,paused:A?.paused})})}catch(A){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach((A,C)=>{I.sdk.consumers.push({key:C,track:A?.track?g(A.track):null,id:A?.id,paused:A?.paused})})}catch(A){}try{const A=Array.from(document.querySelectorAll("video, audio"));I.dom.mediaElements=A.map(A=>{let I=[],C=null;try{const Q=A.srcObject;Q&&"function"==typeof Q.getTracks&&(I=Q.getTracks().map(g),C=Q.id)}catch(A){}return{nodeName:A.nodeName,muted:!!A.muted,paused:!!A.paused,hasSrcObject:!!A.srcObject,streamId:C,tracks:I,src:A.src||null,currentSrc:A.currentSrc||null}})}catch(A){}const C=[...I.sdk.micStreamTracks,...I.sdk.camStreamTracks,...I.sdk.vbStreamTracks,...I.sdk.shareTracks,...I.sdk.producers.map(A=>A.track).filter(Boolean),...I.sdk.consumers.map(A=>A.track).filter(Boolean),...I.dom.mediaElements.flatMap(A=>A.tracks)].filter(A=>A&&"live"===A.readyState);return I.summary={totalLiveTracks:C.length,elementsWithSrcObject:I.dom.mediaElements.filter(A=>A.hasSrcObject).length,elementsWithTracks:I.dom.mediaElements.filter(A=>A.tracks.length>0).length},I};async listDevicesInternal(){if(navigator.mediaDevices.ondevicechange=async A=>{let I=await bE();ji.info("Media devices changed!:%O",I),I.audioDevices&&I.audioDevices.length>0&&(this._deviceList.audioDevices=I.audioDevices),I.videoDevices&&I.videoDevices.length>0&&(this._deviceList.videoDevices=I.videoDevices),I.audioDevices&&I.audioDevices.length>0&&(this._deviceList.audioOutputDevices=I.audioDevicesOutput),vi=this._deviceList,this.emit("deviceListUpdated")},!this._deviceList){const A=await xE();if(A.success)return this._deviceList=A.deviceList,void(vi=this._deviceList)}}restartIce=async(A,I)=>{if("send"===I&&"connected"===this._sendTransport.connectionState||"recv"===I&&"connected"===this._recvTransport.connectionState)return void ji.debug("no need to restart ICE as transport now connected");ji.debug("websocket is ready and connectionstate is still disconnected, therefore going to restart ICE");let g={id:"restartIce",transportId:A,roomName:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId};this._sendMessage(g)};restartIceResponse=A=>{ji.debug("restart ICE response:%o",A);const{transportId:I,iceParameters:g,error:C}=A;if(C){ji.error("restartIce failed for transport %s with error: %s. Recreating transports as fallback.",I,C);const A=this._sendTransport&&this._sendTransport.id===I,g=this._recvTransport&&this._recvTransport.id===I;return void(A?(this._sendTransport=null,this._createSendTransport()):g?(this._recvTransport=null,this._createRecvTransport()):(ji.warn("restartIceResponse error for unknown transportId:%s, recreating both transports",I),this._recreateTransportsAfterReconnection()))}this._sendTransport&&this._sendTransport.id===I?this._sendTransport.restartIce({iceParameters:g}):this._recvTransport&&this._recvTransport.id===I?this._recvTransport.restartIce({iceParameters:g}):ji.warn("restartIceResponse received for unknown transportId:%s. Ignoring.",I)};_recreateTransportsAfterReconnection=async()=>{try{ji.info("Recreating transports after reconnection");const A=!!this._sendTransport,I=!!this._recvTransport;this._sendTransport=null,this._recvTransport=null,A&&(ji.info("Recreating send transport"),await this._createSendTransport()),I&&(ji.info("Recreating recv transport"),await this._createRecvTransport()),ji.info("Transport recreation after reconnection complete")}catch(A){ji.error("Failed to recreate transports after reconnection: %o",A),this.emit("transportRecreationFailed",{error:A})}};startRecording=({recordingType:A=null,outputType:I=null,outputQualities:g=null}={})=>{ji.debug("recording type requested is:%s,outputType:%s, outputQualties:%o",A,I,g);const C=!A||"av"!==A?.toLowerCase()&&"audiovideo"!==A?.toLowerCase()?"mergedA":"mergedAV";if((!I||"hls"!==I.toLowerCase()&&"mp4"!==I.toLowerCase())&&I)return ji.error("Invalid outut type"),{success:!1,reason:`Invalid outputType: ${I}. `};if(I&&"hls"===I.toLowerCase()&&g&&!ni(g))return ji.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(g)}. Allowed values are ${Array.from(Di).join(", ")}.`};let Q={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,type:FE,recordingStrategy:C,outputQualities:g,outputType:I?.toLowerCase()};this._sendMessage(Q),this._recordingStartedByMe={...this._recordingStartedByMe,"main-room":{recordingNo:null}}};stopRecording=()=>{ji.debug("going to stop recording for recordingStartedByMe:%o",this._recordingStartedByMe);let A="main-room";if(!this._recordingStartedByMe[A])return{success:!1,error:!0,code:"RRID001",text:"Error while trying to stop recording. Either the recording has not been started yet Or The same user need to stop recording who started it."};{let I={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,recordingNo:this._recordingStartedByMe[A].recordingNo,type:FE};this._sendMessage(I),delete this._recordingStartedByMe[A],this.emit("recordingEnded",{peerId:this.data.inputParams.peerId})}};setRecordingStatusStarted=A=>{ji.debug("Recording/Streaming started by moderator!!:%O",A);let{recordingStartTime:I,recordingNo:g,type:C}=A;[FE,kE].includes(C)?this._recordingStartedByMe["main-room"]?(ji.debug("This recording has been started by me."),this._recordingStartedByMe["main-room"].recordingNo=g,this.emit("recordingStarted",{peerId:this.data.inputParams.peerId,startTime:I})):this.emit("recordingStarted",{startTime:I}):C===LE?(this._liveStreamingStartedByMe&&this._liveStreamingStartedByMe["main-room"]&&(this._liveStreamingStartedByMe["main-room"].recordingNo=g),this.emit("liveStreamingStarted",{startTime:I})):this.emit("liveStreamingStarted",{startTime:I})};setRecordingStatusEnded=A=>{ji.debug("Recording ended by moderator!!, data:%O",A);let{breakOutRoom:I,type:g}=A;g===LE?(this.emit("liveStreamingEnded",{}),this._liveStreamingStartedByMe=null):(this.emit("recordingEnded",{}),this._recordingStartedByMe=null)};startLiveStreaming=A=>{ji.debug("Live streaming started by moderator!!, data:%O",A);let{streamUrl:I,streamKey:g,type:C}=A;if(this._liveStreamingStartedByMe||(this._liveStreamingStartedByMe={}),this._liveStreamingStartedByMe["main-room"])return{success:!1,error:!0,code:"LRID002",text:"Live streaming is already started"};let Q={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,rtmpUrl:`${I}/${g}`,type:LE};return this._sendMessage(Q),this._liveStreamingStartedByMe={...this._liveStreamingStartedByMe,"main-room":{recordingNo:null}},{success:!0}};stopLiveStreaming=()=>{ji.debug("Live streaming stopped by moderator!!");let A="main-room";if(!this._liveStreamingStartedByMe||!this._liveStreamingStartedByMe[A])return{success:!1,error:!0,code:"LRID001",text:"Error while trying to stop live streaming. Either the live stream has not been started yet or it was started by another user."};{let I={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:LE,recordingNo:this._liveStreamingStartedByMe[A].recordingNo};this._sendMessage(I)}delete this._liveStreamingStartedByMe[A],this.emit("liveStreamingEnded",{peerId:this.data.inputParams.peerId})};startProcessing=async({inputFiles:A=[],outputQualities:I=null,bucket:g=null,cloud:C=null,region:Q=null}={})=>{ji.debug("Processing of Files requested for:%o",A);const B=Math.round(1e7*Math.random()),E=await async function(A){if(ai.info("The input files are:%o, length:%s",A,A.length),A.length>0){ai.info("Files array length is:%s",A.length);for(const{type:I,url:g}of A){if(ai.info("The file detais are type:%s, url:%s",I,g),!yi.includes(I))return console.warn(`Type "${I}" is not allowed.`),{success:!1,reason:`Type "${I}" is not allowed.`};if(!Gi(g,I))return console.warn(`Extension mismatch for ${g}; expected .${I}`),{success:!1,reason:`Extension mismatch for ${g}; expected .${I}`}}return{success:!0}}return{success:!1,reason:"There are no files for processing!"}}(A);if(E.success){if(I&&!ni(I))return ji.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(I)}. Allowed values are ${Array.from(Di).join(", ")}.`};this._processingStartedByMe={...this._processingStartedByMe,[B]:{}};for(const{type:I,url:g}of A)this._processingStartedByMe={...this._processingStartedByMe,[B]:{...this._processingStartedByMe[B],[g]:{type:I,url:g,status:"pending"}}};let E={id:"processVideos",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,inputFiles:A,outputQualities:I,bucket:g,cloud:C,region:Q,requestId:B,type:"process"};return this._sendMessage(E),{success:!0}}return E};checkProcessingStatus=({requestId:A})=>(ji.debug("Going to check processing status for request Id:%s",A),this._processingStartedByMe[A]?{success:!0,details:this._processingStartedByMe[A]}:{success:!0,details:this._processingStartedByMe});handleProcessingStart=A=>{const{processingStartTime:I,processingNo:g,requestId:C}=A;ji.debug("handleProcessingStart()| received message is:%o",A),this.emit("processingStarted",{processingStartTime:I,requestId:C})};handleProcessingCompletion=A=>{const{totalProcessingTime:I,hlsfileKey:g,size:C,originalFile:Q,lastFile:B,requestId:E}=A;if(ji.debug("handleProcessingCompletion()| received message is:%o",A),ji.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[Q]&&(this._processingStartedByMe={...this._processingStartedByMe,[E]:{...this._processingStartedByMe[E],[Q]:{...this._processingStartedByMe[Q],status:"completed",hlsfileKey:g,size:C,totalProcessingTime:I}}}),ji.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingCompleted",A),B){ji.debug("The last file processing has been completed! Remove all the files that has been completed with the same requesterId");let A={...this._processingStartedByMe};delete A[E],ji.debug("After deleting the current requestId:%o",A),this._processingStartedByMe=A}};handleProcessingError=A=>{const{totalProcessingTime:I,hlsfileKey:g,size:C,originalFile:Q,lastFile:B,requestId:E,error:i}=A;ji.debug("handleProcessingCompletion()| received message is:%o",A),ji.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[Q]&&(this._processingStartedByMe={...this._processingStartedByMe,[E]:{...this._processingStartedByMe[E],[Q]:{...this._processingStartedByMe[Q],status:"error",hlsfileKey:g,size:C,totalProcessingTime:I,error:i}}}),ji.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingError",A)};async enableMic({deviceId:A=null,autoGainControl:I,noiseSuppression:g,echoCancellation:C,channelCount:Q,sampleRate:B,forcePCMU:E,forcePCMA:i}={}){if(ji.debug("enableMic()"),this.data.inputParams.roomType===RE)return ji.debug("Enabling mic for P2P room"),A&&(this.data.inputParams.audioDeviceId=A),void 0!==I&&(this.data.inputParams.autoGainControl=I),void 0!==g&&(this.data.inputParams.noiseSuppression=g),void 0!==C&&(this.data.inputParams.echoCancellation=C),void 0!==B&&(this.data.inputParams.sampleRate=B),void 0!==Q&&(this.data.inputParams.channelCount=Q),await this.toggleMicP2p(!0);if(!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID007",text:"Error while trying to start Mic/Audio. Produce flag need to set to true while joining room in order to enable Mic/Audio."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID008",text:`Error while trying to start Mic/Audio as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Mic? OR if you have already initiated the joinRoom process, then Mic will be enabled automatically once room join process completes."};if(this._micProducer)return ji.debug("Mic is already active!"),{success:!1,warning:!0,code:"RWID002",text:"Error while trying to start Mic/Audio. Mic/Audio is already active!"};if(!this._device.canProduce("audio"))return ji.error("enableMic() | cannot produce audio"),{success:!1,error:!0,code:"REID009",text:"Error while trying to start Mic/Audio. Mic/Audio couldnot be activated due to limitations on this device. If you think this device has a functional Mic and the problem persists even after multiple retries, please contact technical support with the error code."};let D,n;E&&"boolean"==typeof E&&(this.data.inputParams.forcePCMU=E),i&&"boolean"==typeof i&&(this.data.inputParams.forcePCMA=i),I&&"boolean"==typeof I&&(this.data.inputParams.autoGainControl=I),C&&"boolean"==typeof C&&(this.data.inputParams.echoCancellation=C),g&&"boolean"==typeof g&&(this.data.inputParams.noiseSuppression=g),B&&Number.isInteger(B)&&B<64e3&&B>8e3&&(this.data.inputParams.sampleRate=B),Q&&Number.isInteger(Q)&&Q>0&&Q<3&&(this.data.inputParams.channelCount=Q);try{if(this._externalVideo)this._micStream=await this._getExternalVideoStream(),D=this._micStream.getAudioTracks()[0].clone();else{if(A?(n=this._deviceList.audioDevices.find(I=>I.deviceId===A),n||(ji.warn("Selected audio input deviceId:%s not found",A),n=this._deviceList.audioDevices[0])):n=this._deviceList.audioDevices[0],this._mic.device=n,!n)return ji.error("No mic device found! Can't start audio!"),{success:!1,reason:"No mic available for starting audio!"};A&&this.data.inputParams.audioDeviceId!==A&&(this.data.inputParams.audioDeviceId=A),ji.debug("enableMic() | calling getUserMedia()");try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:n.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}}),D=this._micStream.getAudioTracks()[0]}catch(A){throw new Error("Error while acquiring mic. Possible issue with audio constraint values",A)}}this._micProducer=await this._sendTransport.produce({track:D,codecOptions:this.data.inputParams.forcePCMU||this.data.inputParams.forcePCMA?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):this.data.inputParams.forcePCMA?this._device.rtpCapabilities.codecs.find(A=>"audio/pcma"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"cam-audio"}}),this._producers.set("audio",{id:this._micProducer.id,paused:this._micProducer.paused,track:this._micProducer.track,rtpParameters:this._micProducer.rtpParameters,codec:this._micProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),this._micProducer.on("transportclose",()=>{this._micProducer=null}),this._micProducer.on("trackended",()=>{this.disableMic().catch(()=>{})}),await this._initializeAudioMonitoring(),this._startSpeakingWhileMutedDetection()}catch(A){ji.error("enableMic() | failed:%o",A),this.emit("error",{code:"EID002",text:"Error enabling microphone!"}),D&&D.stop()}}async disableMic(){if(ji.debug("disableMic()"),this.data.inputParams.roomType===RE)return ji.debug("Disabling mic for P2P room"),this._cleanupAudioMonitoring(),await this.toggleMicP2p(!1);if(this._cleanupAudioMonitoring(),this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micProducer){this._micProducer.close(),this._producers.delete("audio");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._micProducer.id};this._sendMessage(A),this.emit("micEnd",{peerId:this.data.inputParams.peerId,audioTrack:null,type:"local"})}catch(A){this.emit("error",{code:"EID003",text:"Error disabling microphone!"})}this._micProducer=null}else ji.debug("No mic producer available")}async muteMic(){if(ji.debug("muteMic()"),this.data.inputParams.roomType!==RE)if(this._micProducer){this._micProducer.pause();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!1,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("muteMic() | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone!"})}}else ji.warn("No mic producer available");else try{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);A&&A.track&&(A.track.enabled=!1,ji.info("Disabled audio track in P2P peer connection"))}this._sendMessage({id:"mediaToggled",type:"audio",audioStatus:!1,peer:this._remotePeerId,peerId:this.data.inputParams.peerId}),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("muteMic() P2P | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone in P2P!"})}}async unmuteMic(){if(ji.debug("unmuteMic()"),this.data.inputParams.roomType!==RE){this._micProducer||(ji.debug("Mic is not active!"),await this.enableMic()),this._micProducer.resume();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!0,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("unmuteMic() | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone!"})}}else try{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);if(A&&A.track)A.track.enabled=!0,ji.info("Enabled audio track in P2P peer connection");else{if(this._micStream||(ji.debug("Mic stream not available, acquiring..."),await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId})),!this._micStream||!this._micStream.getAudioTracks()[0])return ji.error("Failed to acquire audio for unmute in P2P"),void this.emit("error",{code:"EID005",text:"Error acquiring audio for unmute in P2P!"});if(this._peerConnection.addTrack(this._micStream.getAudioTracks()[0],this._localStream),ji.info("Added audio track to P2P peer connection"),this._localStream){const A=this._localStream.getAudioTracks()[0];A!==this._micStream.getAudioTracks()[0]&&(A&&this._localStream.removeTrack(A),this._localStream.addTrack(this._micStream.getAudioTracks()[0]))}}}this._sendMessage({id:"mediaToggled",type:"audio",audioStatus:!0,peer:this._remotePeerId,peerId:this.data.inputParams.peerId}),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("unmuteMic() P2P | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone in P2P!"})}}handleRoomSettingsGeneral=async({allowScreenShare:A,noOfScreenShare:I,noOfUpgradeRequests:g})=>{const C={type:"roomSetting:generalSettings",allowScreenShare:A,noOfScreenShare:I,noOfUpgradeRequests:g};return ji.debug("Room settings - General: %O",C),this.sendCustomMessage(JSON.stringify(C),"custom",null,"moderator","roomSetting:generalSettings",{}),{success:!0}};handleAuthSettings=async({moderatorApproval:A,passwordRequired:I})=>{const g={type:"roomSetting:authSettings",moderatorApproval:A,passwordRequired:I};return ji.debug("Room settings - Auth: %O",g),this.sendCustomMessage(JSON.stringify(g),"custom",null,"moderator","roomSetting:authSettings",{}),{success:!0}};handleRoomSettingsStage=A=>{const I={type:"roomSetting:stageSettings",stageStatus:A.stageStatus,stagePeers:A.stagePeers,backStageStatus:A.backStageStatus,backStagePeers:A.backStagePeers};return this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:stageSettings",{}),{success:!0}};handlePresenterSettings=async A=>{const I={type:"roomSetting:presenterSettings",presenterSettings:A};return ji.debug("presenter settings: %O",I),this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:presenterSettings",{}),{success:!0}};handleParticipantSettings=async A=>{ji.debug("Going to update participant settings with values:%s",A);const I={type:"roomSetting:participantSettings",participantSettings:A};return this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:participantSettings",{}),{success:!0}};startSpeechRecognition(A={}){const I={lang:"es-ES",continuous:!0,interimResults:!0,maxAlternatives:3,autoRestart:!0,restartDelayMs:250,...A},g=window.SpeechRecognition||window.webkitSpeechRecognition,C=window.SpeechGrammarList||window.webkitSpeechGrammarList;if(!g)return this.emit("sttError",{code:"UNSUPPORTED",message:"Web Speech API not supported"}),{success:!1,reason:"unsupported"};try{if(this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}try{this._speechRecognition.stop()}catch{}this._speechRecognition=null}const A=new g;if(A.lang=I.lang,A.continuous=!!I.continuous,A.interimResults=!!I.interimResults,A.maxAlternatives=I.maxAlternatives,I.grammars&&Array.isArray(I.grammars)&&C){const g=new C,Q=`#JSGF V1.0; grammar terms; public <term> = ${I.grammars.join(" | ")};`;g.addFromString&&g.addFromString(Q,1),A.grammars&&(A.grammars=g)}return this._sttShouldRun=!0,this._sttAutoRestart=!!I.autoRestart,this._sttRestartDelayMs=Number(I.restartDelayMs)||250,A.onstart=()=>this.emit("sttStart",{timestamp:Date.now(),lang:A.lang}),A.onresult=I=>{const g=I.results[I.results.length-1],C=g&&g[0]?g[0]:null,Q={transcript:C?C.transcript:"",confidence:C?C.confidence:0,isFinal:!!g&&g.isFinal,timestamp:Date.now(),lang:A.lang},B=Q.timestamp,E=this.data.inputParams.peerId||"local";new Date(B).toISOString(),this._transcriptStorage.has(B)||this._transcriptStorage.set(B,new Map),this._transcriptStorage.get(B).set(E,{transcript:Q.transcript,isFinal:Q.isFinal}),this.emit("sttResult",Q)},A.onerror=A=>{this.emit("sttError",{code:A.error||"UNKNOWN",message:A.message||"Speech recognition error"})},A.onend=()=>{if(this.emit("sttEnd",{timestamp:Date.now()}),this._sttShouldRun&&this._sttAutoRestart){const I=this._sttRestartDelayMs;try{setTimeout(()=>{if(this._sttShouldRun&&this._sttAutoRestart)try{A.start()}catch(A){}},I)}catch(A){}}},A.start(),this._speechRecognition=A,{success:!0}}catch(A){return this.emit("sttError",{code:"INIT_FAILED",message:A.message}),{success:!1,reason:A.message}}}stopSpeechRecognition(){try{if(this._sttShouldRun=!1,this._sttAutoRestart=!1,this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}this._speechRecognition.stop(),this._speechRecognition=null}return{success:!0}}catch(A){return this.emit("sttError",{code:"STOP_FAILED",message:A.message}),{success:!1,reason:A.message}}}startTranscription(){try{return this._transcriptionActive?(ji.debug("Transcription already active"),{success:!1,reason:"already_active"}):(this._transcriptionActive=!0,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,ji.debug("Transcription started - waiting for audio buffer from client"),this.emit("transcriptionStarted"),{success:!0})}catch(A){return ji.error("Failed to start transcription:",A),this.emit("transcriptionError",{error:A.message}),{success:!1,reason:A.message}}}stopTranscription(){try{return this._transcriptionActive?(this._transcriptionRecorder&&"inactive"!==this._transcriptionRecorder.state&&this._transcriptionRecorder.stop(),this._transcriptionRecorder=null,this._transcriptionActive=!1,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,ji.debug("Transcription stopped completely"),this.emit("transcriptionStopped"),{success:!0}):(ji.debug("Transcription not active"),{success:!1,reason:"not_active"})}catch(A){return ji.error("Failed to stop transcription:",A),this.emit("transcriptionError",{error:A.message}),{success:!1,reason:A.message}}}sendAudioForTranscription(A,I){try{return A?(this.sendCustomMessage({audioBuffer:A,timestamp:Date.now(),activeSpeakerPeerId:I},"general",null,"participant","deepgram:audio"),ji.debug(`Audio buffer sent for transcription from peer: ${I}`),{success:!0}):(ji.debug("No audio buffer provided"),{success:!1,reason:"no_audio_buffer"})}catch(A){return ji.error("Failed to send audio for transcription:",A),{success:!1,reason:A.message}}}isTranscriptionActive(){return this._transcriptionActive||!1}_processTranscriptionMessage=A=>{try{if("transcription"===A.type&&A.data){const I=A.data;this.emit("transcription",{transcript:I.transcript,confidence:I.confidence,isFinal:I.isFinal,timestamp:I.timestamp,speaker:I.speaker}),ji.debug("Transcription received:",I.transcript)}}catch(A){ji.error("Failed to process transcription message:",A)}};async enableCam({deviceId:A=null,videoResolution:I,forceVp8:g,forceVp9:C,forceH264:Q,h264Profile:B,forceFPS:E,enableWebcamLayers:i,numSimulcastStreams:D,videoBitRates:n,vbdetails:o}={}){if(ji.debug("enableWebcam()"),ji.debug("first vbdetails in enablecam",o),!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID004",text:"Error while trying to start Camera. Produce flag need to set to true while joining room in order to enable Camera."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID005",text:`Error while trying to start Camera as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Camera? OR if you have already initiated the joinRoom process, then Camera will be enabled automatically once room join process completes."};if(this._webcamProducer)return ji.debug("Camera is already active!"),{success:!1,warning:!0,code:"RWID003",text:"Error while trying to start Camera. Camera is already active!"};if(!this._device.canProduce("video"))return ji.error("enableWebcam() | cannot produce video"),{success:!1,error:!0,code:"REID006",text:"Error while trying to start Camera. Camera couldnot be activated due to limitations on this device. If you think this device has a functional camera and the problem persists even after multiple retries, please contact technical support with the error code."};let s,w;if(["hd","vga","qvga"].includes(I)&&(this.data.inputParams.videoResolution=I,this._webcam.resolution=I),g&&"boolean"==typeof g&&(this.data.inputParams.forceVp8=g),C&&"boolean"==typeof C&&(this.data.inputParams.forceVp9=C),Q&&"boolean"==typeof Q&&(this.data.inputParams.forceH264=Q),B&&["high","low"].includes(B.toLowerCase())&&(this.data.inputParams.h264Profile=B),E&&Number.isInteger(E)&&E<65&&E>5&&(this.data.inputParams.forceFPS=25),i&&"boolean"==typeof i&&(this.data.inputParams.enableWebcamLayers=i,this._enableWebcamLayers=i),D&&Number.isInteger(D)&&D<4&&D>0&&(this.data.inputParams.numSimulcastStreams=D,this._numSimulcastStreams=D),Array.isArray(n)&&n.length>=1&&n.length<=3&&n.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?(ji.debug("videoBitRates values are correct"),this.data.inputParams.videoBitRates=n):ji.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),this.data.inputParams.roomType===RE)return ji.debug("Enabling camera for P2P room"),A&&(this.data.inputParams.videoDeviceId=A),o&&(this.data.inputParams.vbdetails=o),await this.toggleCameraP2p(!0);try{if(this._externalVideo)w={label:"external video"},this._webCamStream=await this._getExternalVideoStream(),s=this._webCamStream.getVideoTracks()[0].clone();else{A?(w=this._deviceList.videoDevices.find(I=>I.deviceId===A),w||(ji.warn("Selected deviceId:%s not found",A),w=this._deviceList.videoDevices[0])):w=this._deviceList.videoDevices[0],this._webcam.device=w;const{resolution:I}=this._webcam;if(!w)return ji.error("No wencam device found! Can't start video!"),{success:!1,reason:"No Webcam available for starting video!"};A&&this.data.inputParams.videoDeviceId!==A&&(this.data.inputParams.videoDeviceId=A),ji.debug("enableWebcam() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:w.deviceId},...Vi[I],frameRate:{ideal:this.data.inputParams.forceFPS}}}),s=this._webCamStream.getVideoTracks()[0]}let I,g;const C={videoGoogleStartBitrate:1e3};if(ji.debug("Current device codec options are:%O",this._device.rtpCapabilities.codecs),this._forceVP8){if(g=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!g)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?g=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(g=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!g)throw new Error("desired H264 codec+configuration is not supported");ji.debug("Selected h264 codec is:%O",g)}else if(this._forceVP9&&(g=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!g))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableWebcamLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&g||"video/vp9"===A.mimeType.toLowerCase()?I=[{maxBitrate:5e6,scalabilityMode:this._webcamScalabilityMode||"L3T3_KEY"}]:(I=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.videoBitRates[0],scalabilityMode:this._webcamScalabilityMode||"L1T3"}],this._numSimulcastStreams>1&&I.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.videoBitRates[1],scalabilityMode:this._webcamScalabilityMode||"L1T3"}),this._numSimulcastStreams>2&&I.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.videoBitRates[2],scalabilityMode:this._webcamScalabilityMode||"L1T3"}))}if(o)try{const A=Xi&&Xi._localVBStream;let I=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)I=A.getVideoTracks()[0],ji.debug("Using existing Virtual Background track");else{const A=await Xi.initializePipeline(s,o);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(I=A.vbStream.getVideoTracks()[0],ji.debug("Initialized new Virtual Background pipeline"))}I&&(s=I)}catch(A){ji.debug("VB init failed or skipped in enableCam")}this._webcamProducer=await this._sendTransport.produce({track:s,encodings:I,codecOptions:C,codec:g,appData:{mediaTag:"cam-video"}}),this._producers.set("video",{id:this._webcamProducer.id,deviceLabel:w.label,type:this._getWebcamType(w),paused:this._webcamProducer.paused,track:this._webcamProducer.track,rtpParameters:this._webcamProducer.rtpParameters,codec:this._webcamProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webcamProducer.track,type:"local"}),this._webcamProducer.on("transportclose",()=>{this._webcamProducer=null}),this._webcamProducer.on("trackended",()=>{this.disableCam().catch(()=>{})})}catch(A){ji.error("enableWebcam() | failed:%o",A),this.emit("error",{code:"EID011",text:"Enable Webcam failed!"}),s&&s.stop()}}async disableCam(){if(ji.debug("disableWebcam()"),this.data.inputParams.roomType===RE)return ji.debug("Disabling camera for P2P room"),await this.toggleCameraP2p(!1);if(this._webcamProducer){this._webcamProducer.close(),this._producers.delete("video");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._webcamProducer.id};this._sendMessage(A),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null})}catch(A){this.emit("error",{code:"EID012",text:"Error while closing server side producer!"})}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&this._webCamStream.getTracks().forEach(A=>{try{A.stop()}catch(A){}})}catch(A){}this._webCamStream=null,this._webcamProducer=null}else ji.debug("No webcam producer available")}async _updateWebcams(){ji.debug("_updateWebcams()"),this._webcams=new Map,ji.debug("_updateWebcams() | calling enumerateDevices()");const A=await navigator.mediaDevices.enumerateDevices();for(const I of A)"videoinput"===I.kind&&this._webcams.set(I.deviceId,I);const I=Array.from(this._webcams.values()),g=I.length,C=this._webcam.device?this._webcam.device.deviceId:void 0;ji.debug("_updateWebcams() [webcams:%o]",I),0===g?this._webcam.device=null:this._webcams.has(C)||(this._webcam.device=I[0])}async _getExternalVideoStream(){if(this._externalVideoStream)return this._externalVideoStream;if(this._externalVideo.readyState<3&&await new Promise(A=>this._externalVideo.addEventListener("canplay",A)),this._externalVideo.captureStream)this._externalVideoStream=this._externalVideo.captureStream();else{if(!this._externalVideo.mozCaptureStream)throw new Error("video.captureStream() not supported");this._externalVideoStream=this._externalVideo.mozCaptureStream()}return this._externalVideoStream}_getWebcamType(A){return/(back|rear)/i.test(A.label)?(ji.debug("_getWebcamType() | it seems to be a back camera"),"back"):(ji.debug("_getWebcamType() | it seems to be a front camera"),"front")}async changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}){if(this.data?.inputParams?.roomType===RE){ji.info("changeVideoInput() | P2P room detected, delegating to changeVideoInputP2p");try{C&&(this.data.inputParams.vbdetails=C)}catch(A){}return await this.changeVideoInputP2p(I)}return this._webcamProducer?await this._changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}):(ji.error("No webcam producer available!"),{success:!1,reason:"You are not sharing your camera yet. Camera Input can be changed to a new camera only when you are sharing an existing camera. "})}async _changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}){ji.info("_changeVideoInput() | Inside"),A&&["hd","vga","qvga"].includes(A)?this._webcam.resolution=A:ji.warn("Invalid video resolution value "),g&&Number.isInteger(g)&&g<65&&g>5?this.data.inputParams.forceFPS=g:ji.warn("forceFPS should be a number between 5 to 65, default value is 25 fps.");let Q=this._deviceList.videoDevices.find(A=>I&&A.deviceId===I);if(!Q)return ji.error("The selected deviceId not found!"),{success:!1,reason:"Invalid deviceId!"};this._webcam.device=Q;try{this._webCamStream.getVideoTracks().forEach(A=>A.stop()),this._webCamStream=null,ji.debug("changeVideoInput() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:Q.deviceId},...Vi[this._webcam.resolution],frameRate:{ideal:this.data.inputParams.forceFPS}}});let A=this._webCamStream.getVideoTracks()[0];if(ji.debug("The new video track is:%O",A),C)try{const I=await Xi.initializePipeline(A,C);I&&I.vbStream&&"function"==typeof I.vbStream.getVideoTracks&&I.vbStream.getVideoTracks()[0]&&(A=I.vbStream.getVideoTracks()[0],ji.debug("Reinitialized VB pipeline for changed camera"))}catch(A){ji.debug("VB init skipped/failed on changeVideoInput")}await this._webcamProducer.replaceTrack({track:A});let I=this._producers.get("video");return I.deviceLabel=Q.label,I.type=this._getWebcamType(Q),I.track=this._webcamProducer.track,ji.debug("Updated producer values are:%O",I),this._producers.set("video",I),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:A,type:"local"}),{success:!0}}catch(A){return ji.error("Error while changing input:%O",A),{success:!1,reason:"Couldn't change video input",error:A}}}async changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}){if(this.data?.inputParams?.roomType===RE){ji.info("changeAudioInput() | P2P room detected, delegating to changeAudioInputP2p");try{B&&(this.data.inputParams.audioDeviceId=B)}catch(A){}return await this.changeAudioInputP2p(B)}return this._micProducer?await this._changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}):{success:!1,reason:"You are not sharing your mic yet. Mic Input can be changed to a new mic only when you are sharing an existing mic. "}}async _changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}){A&&"boolean"==typeof A&&(this.data.inputParams.autoGainControl=A),I&&"boolean"==typeof I&&(this.data.inputParams.echoCancellation=Boolean(I)),g&&"boolean"==typeof g&&(this.data.inputParams.noiseSuppression=Boolean(g)),C&&Number.isInteger(C)&&C<64e3&&C>8e3&&(this.data.inputParams.sampleRate=C),Q&&Number.isInteger(Q)&&Q>0&&Q<3&&(this.data.inputParams.channelCount=Q);let E=this._deviceList.audioDevices.find(A=>B&&A.deviceId===B);if(!E)return{success:!1,reason:"Invalid deviceId!"};this._mic.device=E,this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micStream=null;try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:E.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}});const A=this._micStream.getAudioTracks()[0];this._micProducer.replaceTrack({track:A});let I=this._producers.get("audio");return I.deviceLabel=E.label,I.track=this._micProducer.track,ji.debug("Updated producer values are:%O",I),this._producers.set("audio",I),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),{success:!0}}catch(A){return ji.error("Error while changing input:%O",A),{success:!1,reason:"Couldn't change audio input",err:A}}}toggleVB=async A=>{if(A&&this._localCamVideo.getVideoTracks()[0]){const A=await this.initializePipeline(this._localCamVideo.getVideoTracks()[0],{type:"blur"});if(ji.debug("response is :%o, localVBTrack is:%O",A,this._localVBStream.getVideoTracks()[0]),A.success&&this._localVBStream.getVideoTracks()[0]){if(this._roomType===KE||this._roomType===ME)if(this._camVideoProducer&&store.getState().conf.joined){await this._camVideoProducer.replaceTrack({track:this._localVBStream.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else ji.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===NE&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(ji.debug("found sender:%o",A),A&&this._localVBStream.getVideoTracks()[0]){A.replaceTrack(this._localVBStream.getVideoTracks()[0]);let I={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=I,store.dispatch(confActions.addParticipant(I))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}}else ji.error("Virtual background procesing can't be enabled")}else if(this._localVBStream?.getVideoTracks()[0].stop(),this._localVBStream=null,this._pipelineManager.stop(),this._vbDetailsNew.sourcePlayback&&(this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null),this._vbDetailsNew.sourcePlayback?.htmlElement.remove(),this._vbDetailsNew?.hiddenCanvas.remove(),this._vbDetailsNew?.hiddenImage?.remove(),this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null,store.dispatch(confActions.setVBItemsStatus(A)),ji.debug("Garbage collection completed. Set the video to original video!"),store.getState().conf.videoStatus)if(this._roomType===KE||this._roomType===ME)if(this._camVideoProducer&&store.getState().conf.joined&&this._localCamVideo.getVideoTracks()[0]&&store.getState().conf.joined&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){await this._camVideoProducer.replaceTrack({track:this._localCamVideo.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else ji.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===NE&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(ji.debug("found sender:%o",A),A&&this._localCamVideo.getVideoTracks()[0]&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){A.replaceTrack(this._localCamVideo.getVideoTracks()[0]);let I={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=I,store.dispatch(confActions.addParticipant(I))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}};setVBDetails=async A=>{this._vbDetails=A;try{this.data&&this.data.inputParams&&(this.data.inputParams.vbdetails=A)}catch(A){}if(this._roomType!==KE&&this._roomType!==ME||!store.getState().conf.joined){if(this._roomType===NE&&this._peerConnection){const I=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});ji.debug("found sender:%o",I),I?I.replaceTrack(A.stream.getVideoTracks()[0]):this.showNotification("danger","Error!","Unable to set virtual background! Try again OR contact support with code:CM-FE-RC-VB-E02")}}else if(this._camVideoProducer){ji.debug("Going to replace the video track for cam video producer!");try{await this._camVideoProducer.replaceTrack({track:A.stream.getVideoTracks()[0]})}catch(A){ji.debug("vb set error",A)}ji.debug("all participants",this._participants),ji.debug("this._localCamVideo",this._localCamVideo)}else ji.warn("Camvideo producer is not available yet!")};async enableShare({shareAudio:A=!1,enableSharingLayers:I=!0,shareBitRates:g=[2500,1250,500]}={}){if(ji.debug("enableShare()"),this.data.inputParams.roomType===RE){ji.debug("Room type is P2P - using P2P screen share");try{return await this.enableScreenShareP2p(),{success:!0}}catch(A){return ji.error("Error enabling screen share in P2P mode:%O",A),{success:!1,error:!0,code:"REID014",text:`Error while trying to start screen share in P2P mode: ${A.message}`}}}if(!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID003",text:"Error while trying to start screen share. Produce flag need to set to true while joining room in order to enable screen share."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID001",text:`Error while trying to start screen share as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling screen share? OR if you have already initiated the joinRoom process, then try enabling screen share after some seconds."};if(this._shareProducer)return ji.debug("Screen share is already active!"),{success:!1,warning:!0,code:"RWID001",text:"Error while trying to start screen share. Screen share is already active!"};if(!this._device.canProduce("video"))return ji.error("enableShare() | cannot produce video"),{success:!1,error:!0,code:"REID002",text:"Error while trying to start screen share. Screen share couldnot be activated due to limitations on this device. If you think this device is capable of screen share and the problem persists even after multiple retries, please contact technical support with the error code."};let C,Q;this._enableSharingLayers="boolean"!=typeof I?Boolean(I):I,Array.isArray(g)&&g.length>=1&&g.length<=3&&g.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?this.data.inputParams.shareBitRates=g:this.data.inputParams.shareBitRates=[2500,1250,500];try{ji.debug("enableShare() | calling getDisplayMedia()");const I=await navigator.mediaDevices.getDisplayMedia({audio:!!A,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}});if(!I)return ji.error("Unable to capture screen."),void this.emit("error",{code:"EID013",text:"Error while trying to start screen share. Not able to capture screen!"});let g,B;Q=I.getAudioTracks()[0],Q&&(this._shareAudioProducer=await this._sendTransport.produce({track:Q,codecOptions:this.data.inputParams.forcePCMU?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"screen-audio"}}),this._producers.set("ssAudio",{id:this._shareAudioProducer.id,type:"shareAudio",paused:this._shareAudioProducer.paused,track:this._shareAudioProducer.track,rtpParameters:this._shareAudioProducer.rtpParameters,codec:this._shareAudioProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssAudioStart",{peerId:this.data.inputParams.peerId,audioTrack:this._shareAudioProducer.track,type:"local"})),C=I.getVideoTracks()[0];const E={videoGoogleStartBitrate:1e3};if(this._forceVP8){if(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!B)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!B)throw new Error("desired H264 codec+configuration is not supported");ji.debug("Selected h264 codec is:%O",B)}else if(this._forceVP9&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!B))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableSharingLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&B||"video/vp9"===A.mimeType.toLowerCase()?g=[{maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L3T3",dtx:!0}]:(g=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}],this._numSimulcastStreams>1&&g.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.shareBitRates[1],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}),this._numSimulcastStreams>2&&g.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.shareBitRates[2],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}))}this._shareProducer=await this._sendTransport.produce({track:C,encodings:g,codecOptions:E,codec:B,appData:{mediaTag:"screen-video"}}),this._producers.set("ssVideo",{id:this._shareProducer.id,type:"shareVideo",paused:this._shareProducer.paused,track:this._shareProducer.track,rtpParameters:this._shareProducer.rtpParameters,codec:this._shareProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._shareProducer.track,type:"local"}),this._shareProducer.on("transportclose",()=>{this._shareProducer=null}),this._shareProducer.on("trackended",()=>{this.disableShare().catch(()=>{})})}catch(A){ji.error("enableShare() | failed:%o",A),"NotAllowedError"!==A.name&&this.emit("error",{code:"EID014",text:`Error while trying to start screen share. Error is: ${A}!`}),C&&C.stop()}}async disableShare(){if(ji.debug("disableShare()"),this.data.inputParams.roomType===RE){ji.debug("Room type is P2P - using P2P screen share disable");try{return await this.disableScreenShareP2p(),{success:!0}}catch(A){return ji.error("Error disabling screen share in P2P mode:%O",A),void this.emit("error",{code:"EID017",text:`Error while trying to stop screen share in P2P mode: ${A.message}`})}}if(!this._shareProducer)return ji.warn("Screen share doesn't seem to be on!"),void this.emit("error",{code:"EID017",text:"Error while trying to stop screen share. Is the screen share on!"});if(this._shareProducer.close(),this._shareAudioProducer){this._shareAudioProducer.close();try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._shareAudioProducer.id};this._sendMessage(A),this.emit("ssAudioStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}}catch(A){this.emit("error",{code:"EID015",text:`Error while trying to stop screen share audio. Error is: ${A}!`})}}try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._shareProducer.id};this._sendMessage(A),this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}}catch(A){this.emit("error",{code:"EID016",text:`Error while trying to stop screen share video. Error is: ${A}!`})}this._shareAudioProducer=null,this._shareProducer=null}upgradeParticipant=(A,I=!0,g=!1)=>{ji.debug("JsSdk upgradeParticipant()",`Upgrading ${A}`);const C={id:"upgradeParticipant",peerId:A,roomName:this.data.inputParams.roomId,audioStatus:I,videoStatus:g};this._sendMessage(C)};downgradeParticipant=A=>{ji.debug("JsSdk downgradeParticipant()",`Downgrading ${A}`);const I={id:"downgradeParticipant",peerId:A,roomName:this.data.inputParams.roomId,sendTransportId:null};this._sendMessage(I)};sendUpgradeRequest=(A,I=!0)=>{ji.debug("JsSdk sendUpgradeRequest()",`Sending upgrade request to ${A}, status: ${I}`);const g={id:"modUpgradeReq",peerId:A,status:I,moderator:this.data.inputParams.peerId};this._sendMessage(g),this.emit("upgradeRequestSent",{peerId:A,status:I})};acceptUpgradeRequest=(A=!0,I=!1)=>{ji.debug("JsSdk acceptUpgradeRequest()","Accepting upgrade request");const g={id:"upgradeReqAccepted",audioStatus:A,videoStatus:I};this._sendMessage(g)};rejectUpgradeRequest=A=>{ji.debug("JsSdk rejectUpgradeRequest()","Rejecting upgrade request");const I={id:"modUpgradeReq",peerId:this.data.inputParams.peerId,status:!1};this._sendMessage(I),this.emit("upgradeRequestRejected",{moderatorPeerId:A})};raiseHand=()=>{ji.debug("JsSdk raiseHand()");const A={id:"handRaise",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,status:!0,handRaised:!0};this._sendMessage(A),this.emit("handRaised",{peerId:this.data.inputParams.peerId})};dropHand=(A=null,I=!1)=>{const g=A||this.data.inputParams.peerId;ji.debug("JsSdk dropHand()",`Dropping hand for ${g}`);const C={id:"handRaise",peerId:g,roomName:this.data.inputParams.roomId,status:!1,handRaised:!1,moderator:I};this._sendMessage(C),this.emit("handDropped",{peerId:g,moderator:I})};requestUpgradeToPresenter=async()=>{try{const A={id:"handRaise",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,status:!0,handRaised:!0,upgradeRequest:!0};return this._sendMessage(A),this.emit("upgradeRequestSent",{status:"success",message:"Upgrade request sent"}),{success:!0}}catch(A){return ji.error("requestUpgradeToPresenter failed",A),{success:!1,reason:A?.message||"unknown_error"}}};handleUpgradeParticipant=async A=>{const{peerId:I,audioStatus:g,videoStatus:C}=A;ji.debug("JsSdk handleUpgradeParticipant()",`Upgrade received for ${I}`),this.emit("participantUpgraded",{peerId:I,audioStatus:g,videoStatus:C,participantType:"presenter"}),I===this.data.inputParams.peerId&&(ji.debug("JsSdk handleUpgradeParticipant()","Current user upgraded to presenter"),this.data.inputParams.peerType="presenter",this.data.inputParams.produce=!0,this.data.inputParams.produceAudio=!0,this.data.inputParams.produceVideo=!0,this._sendTransport||await this._createSendTransport(),this.emit("upgraded",{audioStatus:g,videoStatus:C,participantType:"presenter"}))};handleDowngradeParticipant=async A=>{const{peerId:I}=A;if(ji.debug("JsSdk handleDowngradeParticipant()",`Downgrade received for ${I}`),this.emit("participantDowngraded",{peerId:I,participantType:"viewer"}),I===this.data.inputParams.peerId){if(ji.debug("JsSdk handleDowngradeParticipant()","Current user downgraded to viewer"),this.data.inputParams.peerType="viewer",this.data.inputParams.produce=!1,this.data.inputParams.produceAudio=!1,this.data.inputParams.produceVideo=!1,this._sendTransport&&(this._sendTransport.close(),this._sendTransport=null),this._camAudioProducer){try{this._camAudioProducer.track&&this._camAudioProducer.track.stop&&this._camAudioProducer.track.stop()}catch(A){}this._camAudioProducer.close(),this._camAudioProducer=null}if(this._camVideoProducer){try{this._camVideoProducer.track&&this._camVideoProducer.track.stop&&this._camVideoProducer.track.stop()}catch(A){}this._camVideoProducer.close(),this._camVideoProducer=null}if(this._shareProducer){try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}this._shareProducer.close(),this._shareProducer=null,this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,type:"local"})}if(this._shareAudioProducer){try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}this._shareAudioProducer.close(),this._shareAudioProducer=null}this._localCamVideo&&(this._localCamVideo.stop(),this._localCamVideo=null),this._localMicAudio&&(this._localMicAudio.stop(),this._localMicAudio=null),this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=null),this._micStream&&(this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=null),this.emit("downgraded",{participantType:"viewer"})}};handleModUpgradeReq=A=>{const{peerId:I,status:g,moderator:C}=A;ji.debug("JsSdk handleModUpgradeReq()",`Moderator upgrade request received. Status: ${g}`),"moderator"!==this.data.inputParams.peerType||g?g?this.emit("upgradeRequestReceived",{peerId:this.data.inputParams.peerId,moderator:C,message:"Moderator requested Participant to Presenter upgrade"}):this.emit("upgradeRequestCancelled",{peerId:this.data.inputParams.peerId,moderator:C,message:"Moderator cancelled Participant to Presenter upgrade request"}):this.emit("upgradeRequestRejected",{peerId:I,moderator:C,message:"Participant rejected request for upgrade to Presenter"})};handleUpgradeLimitReached=A=>{ji.debug("JsSdk handleUpgradeLimitReached()",A),this.emit("upgradeLimitReached",{message:A.text||"Maximum number of presenters reached",limit:A.limit})};handleHandRaise=A=>{const{peerId:I,handRaised:g,peerName:C,upgradeRequest:Q}=A;ji.debug("JsSdk handleHandRaise()",`Hand raise status: ${g} for ${I}`),this.emit("handRaise",{peerId:I,handRaised:g,peerName:C,upgradeRequest:!!Q})};handleSwitchMicOff=A=>{ji.debug("JsSdk handleSwitchMicOff()",A),this.emit("micForcedOff",{message:A.text||"Moderator turned off your microphone"}),this._camAudioProducer&&!this._camAudioProducer.paused&&this.muteMic()};handleScreenShareLimitReached=A=>{ji.debug("JsSdk handleScreenShareLimitReached()",A),this.emit("screenShareLimitReached",{message:A.text||"Maximum number of screen shares reached",limit:A.limit})};handleLockUnlockRoom=A=>{const{locked:I}=A;ji.debug("JsSdk handleLockUnlockRoom()","Room "+(I?"locked":"unlocked")),this.emit("roomLockStatusChanged",{locked:I,message:I?"Room has been locked":"Room has been unlocked"})};handlePeersWaiting=A=>{const{peersWaiting:I}=A;ji.debug("JsSdk handlePeersWaiting()",`${I?.length||0} peers waiting`),this.emit("peersWaiting",{peersWaiting:I||[],count:I?.length||0})};logMeOutNew=async()=>{try{ji.debug("Room","inside log me out new"),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"removed_by_moderator"})}catch(A){}await this.leaveRoom()};logThisUserOutOfMeeting=A=>{if(ji.debug("Room","inside log this user out of meeting"),A===this.data.inputParams.peerId)ji.debug("ConferenceRoom","logging myself Out"),this.leaveRoom();else try{var I={id:"logThisUserOut",peerId:A,moderatorPeerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId};this._sendMessage(I)}catch(A){ji.error("Room",A)}}}const $i="cp2p-client";class AD{constructor(A){A?(this._debug=ag(`${$i}:${A}`),this._info=ag(`${$i}:INFO:${A}`),this._warn=ag(`${$i}:WARN:${A}`),this._error=ag(`${$i}:ERROR:${A}`)):(this._debug=ag($i),this._info=ag(`${$i}:INFO`),this._warn=ag(`${$i}:WARN`),this._error=ag(`${$i}:ERROR`)),this._debug.log=console.info.bind(console),this._info.log=console.info.bind(console),this._warn.log=console.warn.bind(console),this._error.log=console.error.bind(console)}get debug(){return this._debug}get info(){return this._info}get warn(){return this._warn}get error(){return this._error}}const ID={audio:{deviceId:{exact:void 0}},video:!1},gD={video:{deviceId:{exact:void 0},width:{min:320,ideal:640,max:1280},height:{min:240,ideal:480,max:720},frameRate:{min:15,max:30}}},CD={audio:!0,video:{width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},QD=new AD("socket");class BD extends hg.EventEmitter{constructor({url:A,roomId:I,peerId:g,peerName:C,role:Q}){super(),QD.debug("constructor():%o ",{url:A,roomId:I,peerId:g,peerName:C,role:Q}),this._closed=!1,this._params={url:A,roomId:I,peerId:g,peerName:C,role:Q},this._socket=null,this._connectionStatus=null,this._createSocket()}get closed(){return this._closed}get connectionStatus(){return this._connectionStatus}close(){if(!this._closed){QD.debug("close()"),this._closed=!0,this.emit("close");try{this._socket.disconnect()}catch(A){QD.error("close() | error closing the Socket:%o",A)}}}async send(A){if(this._closed)throw new Error("transport closed");try{this._socket.send(JSON.stringify(A))}catch(A){throw QD.warn("send() failed:%o",A),A}}async request({type:A,message:I}){return new Promise(g=>{if(this._closed)throw new Error("transport closed");try{this._socket.emit(A,JSON.stringify(I),A=>{g(A)})}catch(A){throw QD.warn("emit() failed:%o",A),A}})}async _createSocket(){let A=this;const I=io(this._params.url,{query:{roomId:this._params.roomId,peerId:this._params.peerId,peerName:this._params.peerName,role:this._params.role}});I.on("connect",()=>{QD.debug("Socket connected!!"),A._connectionStatus=!0,A.emit("connected")}),I.on("disconnect",()=>{QD.debug("Socket disconnected!!"),A._connectionStatus=!1,A.emit("disconnected")}),I.on("reconnect",()=>{QD.debug("Socket reconnected after disconnect!!"),I.emit("reconnected")}),I.on("message",I=>{const g=JSON.parse(I);QD.debug("New mesage received with id:%s",g.type),A.emit("message",g)}),this._socket=I}}(function(){var A={function:!0,object:!0},I=A[typeof window]&&window||this,g=A[typeof exports]&&exports,C=A[typeof module]&&module&&!module.nodeType&&module,Q=g&&C&&"object"==typeof globalThis&&globalThis;!Q||Q.global!==Q&&Q.window!==Q&&Q.self!==Q||(I=Q);var B=Math.pow(2,53)-1,E=/\bOpera/,i=Object.prototype,D=i.hasOwnProperty,n=i.toString;function o(A){return(A=String(A)).charAt(0).toUpperCase()+A.slice(1)}function s(A){return A=R(A),/^(?:webOS|i(?:OS|P))/.test(A)?A:o(A)}function w(A,I){for(var g in A)D.call(A,g)&&I(A[g],g,A)}function a(A){return null==A?o(A):n.call(A).slice(8,-1)}function y(A){return String(A).replace(/([ -])(?!$)/g,"$1?")}function G(A,I){var g=null;return function(A,I){var g=-1,C=A?A.length:0;if("number"==typeof C&&C>-1&&C<=B)for(;++g<C;)I(A[g],g);else w(A,I)}(A,function(C,Q){g=I(g,C,Q,A)}),g}function R(A){return String(A).replace(/^ +| +$/g,"")}var S=function A(g){var C=I,Q=g&&"object"==typeof g&&"String"!=a(g);Q&&(C=g,g=null);var B=C.navigator||{},i=B.userAgent||"";g||(g=i);var D,o,S,h,N,K=Q?!!B.likeChrome:/\bChrome\b/.test(g)&&!/internal|\n/i.test(n.toString()),M="Object",F=Q?M:"ScriptBridgingProxyObject",L=Q?M:"Environment",k=Q&&C.java?"JavaPackage":a(C.java),J=Q?M:"RuntimeObject",U=/\bJava/.test(k)&&C.java,t=U&&a(C.environment)==L,q=U?"a":"α",r=U?"b":"β",c=C.document||{},H=C.operamini||C.opera,e=E.test(e=Q&&H?H["[[Class]]"]:a(H))?e:H=null,Y=g,d=[],b=null,x=g==i,u=x&&H&&"function"==typeof H.version&&H.version(),p=G([{label:"EdgeHTML",pattern:"Edge"},"Trident",{label:"WebKit",pattern:"AppleWebKit"},"iCab","Presto","NetFront","Tasman","KHTML","Gecko"],function(A,I){return A||RegExp("\\b"+(I.pattern||y(I))+"\\b","i").exec(g)&&(I.label||I)}),T=G(["Adobe AIR","Arora","Avant Browser","Breach","Camino","Electron","Epiphany","Fennec","Flock","Galeon","GreenBrowser","iCab","Iceweasel","K-Meleon","Konqueror","Lunascape","Maxthon",{label:"Microsoft Edge",pattern:"(?:Edge|Edg|EdgA|EdgiOS)"},"Midori","Nook Browser","PaleMoon","PhantomJS","Raven","Rekonq","RockMelt",{label:"Samsung Internet",pattern:"SamsungBrowser"},"SeaMonkey",{label:"Silk",pattern:"(?:Cloud9|Silk-Accelerated)"},"Sleipnir","SlimBrowser",{label:"SRWare Iron",pattern:"Iron"},"Sunrise","Swiftfox","Vivaldi","Waterfox","WebPositive",{label:"Yandex Browser",pattern:"YaBrowser"},{label:"UC Browser",pattern:"UCBrowser"},"Opera Mini",{label:"Opera Mini",pattern:"OPiOS"},"Opera",{label:"Opera",pattern:"OPR"},"Chromium","Chrome",{label:"Chrome",pattern:"(?:HeadlessChrome)"},{label:"Chrome Mobile",pattern:"(?:CriOS|CrMo)"},{label:"Firefox",pattern:"(?:Firefox|Minefield)"},{label:"Firefox for iOS",pattern:"FxiOS"},{label:"IE",pattern:"IEMobile"},{label:"IE",pattern:"MSIE"},"Safari"],function(A,I){return A||RegExp("\\b"+(I.pattern||y(I))+"\\b","i").exec(g)&&(I.label||I)}),m=f([{label:"BlackBerry",pattern:"BB10"},"BlackBerry",{label:"Galaxy S",pattern:"GT-I9000"},{label:"Galaxy S2",pattern:"GT-I9100"},{label:"Galaxy S3",pattern:"GT-I9300"},{label:"Galaxy S4",pattern:"GT-I9500"},{label:"Galaxy S5",pattern:"SM-G900"},{label:"Galaxy S6",pattern:"SM-G920"},{label:"Galaxy S6 Edge",pattern:"SM-G925"},{label:"Galaxy S7",pattern:"SM-G930"},{label:"Galaxy S7 Edge",pattern:"SM-G935"},"Google TV","Lumia","iPad","iPod","iPhone","Kindle",{label:"Kindle Fire",pattern:"(?:Cloud9|Silk-Accelerated)"},"Nexus","Nook","PlayBook","PlayStation Vita","PlayStation","TouchPad","Transformer",{label:"Wii U",pattern:"WiiU"},"Wii","Xbox One",{label:"Xbox 360",pattern:"Xbox"},"Xoom"]),l=G({Apple:{iPad:1,iPhone:1,iPod:1},Alcatel:{},Archos:{},Amazon:{Kindle:1,"Kindle Fire":1},Asus:{Transformer:1},"Barnes & Noble":{Nook:1},BlackBerry:{PlayBook:1},Google:{"Google TV":1,Nexus:1},HP:{TouchPad:1},HTC:{},Huawei:{},Lenovo:{},LG:{},Microsoft:{Xbox:1,"Xbox One":1},Motorola:{Xoom:1},Nintendo:{"Wii U":1,Wii:1},Nokia:{Lumia:1},Oppo:{},Samsung:{"Galaxy S":1,"Galaxy S2":1,"Galaxy S3":1,"Galaxy S4":1},Sony:{PlayStation:1,"PlayStation Vita":1},Xiaomi:{Mi:1,Redmi:1}},function(A,I,C){return A||(I[m]||I[/^[a-z]+(?: +[a-z]+\b)*/i.exec(m)]||RegExp("\\b"+y(C)+"(?:\\b|\\w*\\d)","i").exec(g))&&C}),O=G(["Windows Phone","KaiOS","Android","CentOS",{label:"Chrome OS",pattern:"CrOS"},"Debian",{label:"DragonFly BSD",pattern:"DragonFly"},"Fedora","FreeBSD","Gentoo","Haiku","Kubuntu","Linux Mint","OpenBSD","Red Hat","SuSE","Ubuntu","Xubuntu","Cygwin","Symbian OS","hpwOS","webOS ","webOS","Tablet OS","Tizen","Linux","Mac OS X","Macintosh","Mac","Windows 98;","Windows "],function(A,I){var C=I.pattern||y(I);return!A&&(A=RegExp("\\b"+C+"(?:/[\\d.]+|[ \\w.]*)","i").exec(g))&&(A=function(A,I,g){var C={"10.0":"10",6.4:"10 Technical Preview",6.3:"8.1",6.2:"8",6.1:"Server 2008 R2 / 7","6.0":"Server 2008 / Vista",5.2:"Server 2003 / XP 64-bit",5.1:"XP",5.01:"2000 SP1","5.0":"2000","4.0":"NT","4.90":"ME"};return I&&g&&/^Win/i.test(A)&&!/^Windows Phone /i.test(A)&&(C=C[/[\d.]+$/.exec(A)])&&(A="Windows "+C),A=String(A),I&&g&&(A=A.replace(RegExp(I,"i"),g)),s(A.replace(/ ce$/i," CE").replace(/\bhpw/i,"web").replace(/\bMacintosh\b/,"Mac OS").replace(/_PowerPC\b/i," OS").replace(/\b(OS X) [^ \d]+/i,"$1").replace(/\bMac (OS X)\b/,"$1").replace(/\/(\d)/," $1").replace(/_/g,".").replace(/(?: BePC|[ .]*fc[ \d.]+)$/i,"").replace(/\bx86\.64\b/gi,"x86_64").replace(/\b(Windows Phone) OS\b/,"$1").replace(/\b(Chrome OS \w+) [\d.]+\b/,"$1").split(" on ")[0])}(A,C,I.label||I)),A});function f(A){return G(A,function(A,I){var C=I.pattern||y(I);return!A&&(A=RegExp("\\b"+C+" *\\d+[.\\w_]*","i").exec(g)||RegExp("\\b"+C+" *\\w+-[\\w]*","i").exec(g)||RegExp("\\b"+C+"(?:; *(?:[a-z]+[_-])?[a-z]+\\d+|[^ ();-]*)","i").exec(g))&&((A=String(I.label&&!RegExp(C,"i").test(I.label)?I.label:A).split("/"))[1]&&!/[\d.]+/.test(A[0])&&(A[0]+=" "+A[1]),I=I.label||I,A=s(A[0].replace(RegExp(C,"i"),I).replace(RegExp("; *(?:"+I+"[_-])?","i")," ").replace(RegExp("("+I+")[-_.]?(\\w)","i"),"$1 $2"))),A})}function P(A){return G(A,function(A,I){return A||(RegExp(I+"(?:-[\\d.]+/|(?: for [\\w-]+)?[ /-])([\\d.]+[^ ();/_-]*)","i").exec(g)||0)[1]||null})}if(p&&(p=[p]),/\bAndroid\b/.test(O)&&!m&&(D=/\bAndroid[^;]*;(.*?)(?:Build|\) AppleWebKit)\b/i.exec(g))&&(m=R(D[1]).replace(/^[a-z]{2}-[a-z]{2};\s*/i,"")||null),l&&!m?m=f([l]):l&&m&&(m=m.replace(RegExp("^("+y(l)+")[-_.\\s]","i"),l+" ").replace(RegExp("^("+y(l)+")[-_.]?(\\w)","i"),l+" $2")),(D=/\bGoogle TV\b/.exec(m))&&(m=D[0]),/\bSimulator\b/i.test(g)&&(m=(m?m+" ":"")+"Simulator"),"Opera Mini"==T&&/\bOPiOS\b/.test(g)&&d.push("running in Turbo/Uncompressed mode"),"IE"==T&&/\blike iPhone OS\b/.test(g)?(l=(D=A(g.replace(/like iPhone OS/,""))).manufacturer,m=D.product):/^iP/.test(m)?(T||(T="Safari"),O="iOS"+((D=/ OS ([\d_]+)/i.exec(g))?" "+D[1].replace(/_/g,"."):"")):"Konqueror"==T&&/^Linux\b/i.test(O)?O="Kubuntu":l&&"Google"!=l&&(/Chrome/.test(T)&&!/\bMobile Safari\b/i.test(g)||/\bVita\b/.test(m))||/\bAndroid\b/.test(O)&&/^Chrome/.test(T)&&/\bVersion\//i.test(g)?(T="Android Browser",O=/\bAndroid\b/.test(O)?O:"Android"):"Silk"==T?(/\bMobi/i.test(g)||(O="Android",d.unshift("desktop mode")),/Accelerated *= *true/i.test(g)&&d.unshift("accelerated")):"UC Browser"==T&&/\bUCWEB\b/.test(g)?d.push("speed mode"):"PaleMoon"==T&&(D=/\bFirefox\/([\d.]+)\b/.exec(g))?d.push("identifying as Firefox "+D[1]):"Firefox"==T&&(D=/\b(Mobile|Tablet|TV)\b/i.exec(g))?(O||(O="Firefox OS"),m||(m=D[1])):!T||(D=!/\bMinefield\b/i.test(g)&&/\b(?:Firefox|Safari)\b/.exec(T))?(T&&!m&&/[\/,]|^[^(]+?\)/.test(g.slice(g.indexOf(D+"/")+8))&&(T=null),(D=m||l||O)&&(m||l||/\b(?:Android|Symbian OS|Tablet OS|webOS)\b/.test(O))&&(T=/[a-z]+(?: Hat)?/i.exec(/\bAndroid\b/.test(O)?O:D)+" Browser")):"Electron"==T&&(D=(/\bChrome\/([\d.]+)\b/.exec(g)||0)[1])&&d.push("Chromium "+D),u||(u=P(["(?:Cloud9|CriOS|CrMo|Edge|Edg|EdgA|EdgiOS|FxiOS|HeadlessChrome|IEMobile|Iron|Opera ?Mini|OPiOS|OPR|Raven|SamsungBrowser|Silk(?!/[\\d.]+$)|UCBrowser|YaBrowser)","Version",y(T),"(?:Firefox|Minefield|NetFront)"])),(D=("iCab"==p&&parseFloat(u)>3?"WebKit":/\bOpera\b/.test(T)&&(/\bOPR\b/.test(g)?"Blink":"Presto"))||/\b(?:Midori|Nook|Safari)\b/i.test(g)&&!/^(?:Trident|EdgeHTML)$/.test(p)&&"WebKit"||!p&&/\bMSIE\b/i.test(g)&&("Mac OS"==O?"Tasman":"Trident")||"WebKit"==p&&/\bPlayStation\b(?! Vita\b)/i.test(T)&&"NetFront")&&(p=[D]),"IE"==T&&(D=(/; *(?:XBLWP|ZuneWP)(\d+)/i.exec(g)||0)[1])?(T+=" Mobile",O="Windows Phone "+(/\+$/.test(D)?D:D+".x"),d.unshift("desktop mode")):/\bWPDesktop\b/i.test(g)?(T="IE Mobile",O="Windows Phone 8.x",d.unshift("desktop mode"),u||(u=(/\brv:([\d.]+)/.exec(g)||0)[1])):"IE"!=T&&"Trident"==p&&(D=/\brv:([\d.]+)/.exec(g))&&(T&&d.push("identifying as "+T+(u?" "+u:"")),T="IE",u=D[1]),x){if(h="global",N=null!=(S=C)?typeof S[h]:"number",/^(?:boolean|number|string|undefined)$/.test(N)||"object"==N&&!S[h])a(D=C.runtime)==F?(T="Adobe AIR",O=D.flash.system.Capabilities.os):a(D=C.phantom)==J?(T="PhantomJS",u=(D=D.version||null)&&D.major+"."+D.minor+"."+D.patch):"number"==typeof c.documentMode&&(D=/\bTrident\/(\d+)/i.exec(g))?(u=[u,c.documentMode],(D=+D[1]+4)!=u[1]&&(d.push("IE "+u[1]+" mode"),p&&(p[1]=""),u[1]=D),u="IE"==T?String(u[1].toFixed(1)):u[0]):"number"==typeof c.documentMode&&/^(?:Chrome|Firefox)\b/.test(T)&&(d.push("masking as "+T+" "+u),T="IE",u="11.0",p=["Trident"],O="Windows");else if(U&&(Y=(D=U.lang.System).getProperty("os.arch"),O=O||D.getProperty("os.name")+" "+D.getProperty("os.version")),t){try{u=C.require("ringo/engine").version.join("."),T="RingoJS"}catch(A){(D=C.system)&&D.global.system==C.system&&(T="Narwhal",O||(O=D[0].os||null))}T||(T="Rhino")}else"object"==typeof C.process&&!C.process.browser&&(D=C.process)&&("object"==typeof D.versions&&("string"==typeof D.versions.electron?(d.push("Node "+D.versions.node),T="Electron",u=D.versions.electron):"string"==typeof D.versions.nw&&(d.push("Chromium "+u,"Node "+D.versions.node),T="NW.js",u=D.versions.nw)),T||(T="Node.js",Y=D.arch,O=D.platform,u=(u=/[\d.]+/.exec(D.version))?u[0]:null));O=O&&s(O)}if(u&&(D=/(?:[ab]|dp|pre|[ab]\d+pre)(?:\d+\+?)?$/i.exec(u)||/(?:alpha|beta)(?: ?\d)?/i.exec(g+";"+(x&&B.appMinorVersion))||/\bMinefield\b/i.test(g)&&"a")&&(b=/b/i.test(D)?"beta":"alpha",u=u.replace(RegExp(D+"\\+?$"),"")+("beta"==b?r:q)+(/\d+\+?/.exec(D)||"")),"Fennec"==T||"Firefox"==T&&/\b(?:Android|Firefox OS|KaiOS)\b/.test(O))T="Firefox Mobile";else if("Maxthon"==T&&u)u=u.replace(/\.[\d.]+/,".x");else if(/\bXbox\b/i.test(m))"Xbox 360"==m&&(O=null),"Xbox 360"==m&&/\bIEMobile\b/.test(g)&&d.unshift("mobile mode");else if(!/^(?:Chrome|IE|Opera)$/.test(T)&&(!T||m||/Browser|Mobi/.test(T))||"Windows CE"!=O&&!/Mobi/i.test(g))if("IE"==T&&x)try{null===C.external&&d.unshift("platform preview")}catch(A){d.unshift("embedded")}else(/\bBlackBerry\b/.test(m)||/\bBB10\b/.test(g))&&(D=(RegExp(m.replace(/ +/g," *")+"/([.\\d]+)","i").exec(g)||0)[1]||u)?(O=((D=[D,/BB10/.test(g)])[1]?(m=null,l="BlackBerry"):"Device Software")+" "+D[0],u=null):this!=w&&"Wii"!=m&&(x&&H||/Opera/.test(T)&&/\b(?:MSIE|Firefox)\b/i.test(g)||"Firefox"==T&&/\bOS X (?:\d+\.){2,}/.test(O)||"IE"==T&&(O&&!/^Win/.test(O)&&u>5.5||/\bWindows XP\b/.test(O)&&u>8||8==u&&!/\bTrident\b/.test(g)))&&!E.test(D=A.call(w,g.replace(E,"")+";"))&&D.name&&(D="ing as "+D.name+((D=D.version)?" "+D:""),E.test(T)?(/\bIE\b/.test(D)&&"Mac OS"==O&&(O=null),D="identify"+D):(D="mask"+D,T=e?s(e.replace(/([a-z])([A-Z])/g,"$1 $2")):"Opera",/\bIE\b/.test(D)&&(O=null),x||(u=null)),p=["Presto"],d.push(D));else T+=" Mobile";(D=(/\bAppleWebKit\/([\d.]+\+?)/i.exec(g)||0)[1])&&(D=[parseFloat(D.replace(/\.(\d)$/,".0$1")),D],"Safari"==T&&"+"==D[1].slice(-1)?(T="WebKit Nightly",b="alpha",u=D[1].slice(0,-1)):u!=D[1]&&u!=(D[2]=(/\bSafari\/([\d.]+\+?)/i.exec(g)||0)[1])||(u=null),D[1]=(/\b(?:Headless)?Chrome\/([\d.]+)/i.exec(g)||0)[1],537.36==D[0]&&537.36==D[2]&&parseFloat(D[1])>=28&&"WebKit"==p&&(p=["Blink"]),x&&(K||D[1])?(p&&(p[1]="like Chrome"),D=D[1]||((D=D[0])<530?1:D<532?2:D<532.05?3:D<533?4:D<534.03?5:D<534.07?6:D<534.1?7:D<534.13?8:D<534.16?9:D<534.24?10:D<534.3?11:D<535.01?12:D<535.02?"13+":D<535.07?15:D<535.11?16:D<535.19?17:D<536.05?18:D<536.1?19:D<537.01?20:D<537.11?"21+":D<537.13?23:D<537.18?24:D<537.24?25:D<537.36?26:"Blink"!=p?"27":"28")):(p&&(p[1]="like Safari"),D=(D=D[0])<400?1:D<500?2:D<526?3:D<533?4:D<534?"4+":D<535?5:D<537?6:D<538?7:D<601?8:D<602?9:D<604?10:D<606?11:D<608?12:"12"),p&&(p[1]+=" "+(D+="number"==typeof D?".x":/[.+]/.test(D)?"":"+")),"Safari"==T&&(!u||parseInt(u)>45)?u=D:"Chrome"==T&&/\bHeadlessChrome/i.test(g)&&d.unshift("headless")),"Opera"==T&&(D=/\bzbov|zvav$/.exec(O))?(T+=" ",d.unshift("desktop mode"),"zvav"==D?(T+="Mini",u=null):T+="Mobile",O=O.replace(RegExp(" *"+D+"$"),"")):"Safari"==T&&/\bChrome\b/.exec(p&&p[1])?(d.unshift("desktop mode"),T="Chrome Mobile",u=null,/\bOS X\b/.test(O)?(l="Apple",O="iOS 4.3+"):O=null):/\bSRWare Iron\b/.test(T)&&!u&&(u=P("Chrome")),u&&0==u.indexOf(D=/[\d.]+$/.exec(O))&&g.indexOf("/"+D+"-")>-1&&(O=R(O.replace(D,""))),O&&-1!=O.indexOf(T)&&!RegExp(T+" OS").test(O)&&(O=O.replace(RegExp(" *"+y(T)+" *"),"")),p&&!/\b(?:Avant|Nook)\b/.test(T)&&(/Browser|Lunascape|Maxthon/.test(T)||"Safari"!=T&&/^iOS/.test(O)&&/\bSafari\b/.test(p[1])||/^(?:Adobe|Arora|Breach|Midori|Opera|Phantom|Rekonq|Rock|Samsung Internet|Sleipnir|SRWare Iron|Vivaldi|Web)/.test(T)&&p[1])&&(D=p[p.length-1])&&d.push(D),d.length&&(d=["("+d.join("; ")+")"]),l&&m&&m.indexOf(l)<0&&d.push("on "+l),m&&d.push((/^on /.test(d[d.length-1])?"":"on ")+m),O&&(D=/ ([\d.+]+)$/.exec(O),o=D&&"/"==O.charAt(O.length-D[0].length-1),O={architecture:32,family:D&&!o?O.replace(D[0],""):O,version:D?D[1]:null,toString:function(){var A=this.version;return this.family+(A&&!o?" "+A:"")+(64==this.architecture?" 64-bit":"")}}),(D=/\b(?:AMD|IA|Win|WOW|x86_|x)64\b/i.exec(Y))&&!/\bi686\b/i.test(Y)?(O&&(O.architecture=64,O.family=O.family.replace(RegExp(" *"+D),"")),T&&(/\bWOW64\b/i.test(g)||x&&/\w(?:86|32)$/.test(B.cpuClass||B.platform)&&!/\bWin64; x64\b/i.test(g))&&d.unshift("32-bit")):O&&/^OS X/.test(O.family)&&"Chrome"==T&&parseFloat(u)>=39&&(O.architecture=64),g||(g=null);var W={};return W.description=g,W.layout=p&&p[0],W.manufacturer=l,W.name=T,W.prerelease=b,W.product=m,W.ua=g,W.version=T&&u,W.os=O||{architecture:null,family:null,version:null,toString:function(){return"null"}},W.parse=A,W.toString=function(){return this.description||""},W.version&&d.unshift(u),W.name&&d.unshift(T),O&&T&&(O!=String(O).split(" ")[0]||O!=T.split(" ")[0]&&!m)&&d.push(m?"("+O+")":"on "+O),d.length&&(W.description=d.join(" ")),W}();"function"==typeof define&&"object"==typeof define.amd&&define.amd?(I.platform=S,define(function(){return S})):g&&C?w(S,function(A,I){g[I]=A}):I.platform=S}).call(void 0);const ED=new AD("Room");class iD extends hg.EventEmitter{static async createRoom({roomId:A,peerId:I,peerName:g,role:C="caller"}){return I||(I="111-111-1111".replace(/[018]/g,()=>(15&crypto.getRandomValues(new Uint8Array(1))[0]).toString(16))),A?new Room({peerId:I,roomId:A,peerName:g,role:C,url:"https://cp2p.centedge.io"}):{success:!1,reason:"roomId is required!"}}constructor({peerId:A,roomId:I,peerName:g,role:C,url:Q}){super(),this._closed=!1,this._roomId=I,this._socket=null,this._peerConnection=null,this._myDisplayName=g,this._myPeerId=A,this._myRole=C,this._localStream=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remoteRole=null,this._remoteStream=null,this._canvas=null,this._imageData=null,this._imageWidth=1280,this._imageHeight=720,this._availableMediaInputs=null,this._userAgent={name:platform.name,version:platform.version,os:platform.os.family,description:platform.description},this._remoteMediaInputs=null,this._remoteUserAgent=null,this._statsCaptureInterval=null,this._localStats=null,this._remoteStats=null,this._otherPeerConnected=null,this._data={},this._url=Q,this.prepareForCall(),ED.debug("user agent is:%o",this._userAgent)}get imageData(){return this._imageData}get roomId(){return this._roomId}get localStream(){return this._localStream}get remoteStream(){return this._remoteStream}get myName(){return this._myDisplayName}set myName(A){this._myDisplayName=A}get myId(){return this._myPeerId}get myRole(){return this._myRole}get remoteName(){return this._remoteDisplayName}get remoteId(){return this._remotePeerId}get remoteRole(){return this._remoteRole}get imageHeight(){return this._imageHeight}get imageWidth(){return this._imageWidth}get userAgent(){return this._userAgent}get mediaInputs(){return this._availableMediaInputs}get remoteUserAgent(){return this._remoteUserAgent}get remoteMediaInputs(){return this._remoteMediaInputs}get localStats(){return this._localStats}get remoteStats(){return this._remoteStats}get pc(){return this._peerConnection}get data(){return this._data}set data(A){throw new Error("Can't set the whole of data object.")}setImageProps({width:A,height:I}){A&&I?(this._imageHeight=I,this._imageWidth=A):this.clientErrorHandler({reason:"Both width and height are required to set the new image properties. ",error:null})}async prepareForCall(){let A=this;(async()=>{let A=[],I=[];return navigator.mediaDevices.enumerateDevices().then(function(g){let C=void 0!==g.find(A=>"videoinput"===A.kind),Q=void 0!==g.find(A=>"audioinput"===A.kind);return new Promise((g,B)=>{navigator.mediaDevices.getUserMedia({audio:Q,video:C}).then(C=>{navigator.mediaDevices.enumerateDevices().then(function(Q){Q.forEach(function(g){"audioinput"===g.kind&&I.push(g.label),"videoinput"===g.kind&&A.push(g.label)}),C.getTracks().forEach(A=>{A.stop()}),I.length>0&&A.length>0?g({videoLabels:A,audioLabels:I}):g({audioLabels:I,videoLabels:A})})}).catch(function(A){A.name,A.message,g({error:A.name,reason:A.message})})})}).catch(function(A){A.name,A.message})})().then(async I=>{if(I.error)switch(I.error){case"NotAllowedError":let A=`${I.error} : ${I.reason}!! You have denied the access to your Mic!! If you wish to join the room with video, please allow the camera access by clicking on the camera icon in the address bar Or by going to " chrome://settings/content#media-stream-mic " link in your chrome.`;this.clientErrorHandler({reason:A,error:null});break;case"NotFoundError":let g=`${I.error} : ${I.reason}!! No Microphone could be found!! If you have a camera or microphone in your computer but it is not able to detect them, please refresh your browser again or restart the browser after closing all the opened tabs.`;this.clientErrorHandler({reason:g,error:null});break;case"NotReadableError":let C=`${I.error} : ${I.reason}!! A hardware error occured at the OS OR browser Or Webpage level. If you have a microphone in your computer but it is not able to detect them, please refresh your browser again or restart the browser after closing all the opened tabs.`;this.clientErrorHandler({reason:C,error:null})}else I.audioLabels&&I.audioLabels.length>0&&I.videoLabels&&I.videoLabels.length>0&&(A._availableMediaInputs={audioInputs:I.audioLabels,videoInputs:I.videoLabels},ED.debug("available A/V inputs:%o",A.mediaInputs),await A.getUserMedia(),A.emit("mediaInputs"))}),navigator.mediaDevices.ondevicechange=async I=>{ED.info("Media devices changed!");let g=await(async()=>{let A=[],I=[];return new Promise((g,C)=>{navigator.mediaDevices.enumerateDevices().then(function(C){C.forEach(g=>{"audioinput"===g.kind&&I.push(g.label),"videoinput"===g.kind&&A.push(g.label)}),g({audioLabels:I,videoLabels:A})})})})();g.audioLabels&&g.audioLabels.length>0&&g.videoLabels&&g.videoLabels.length>0&&(A._availableMediaInputs={audioInputs:g.audioLabels,videoInputs:g.videoLabels},ED.debug("updated A/V inputs:%o",A.mediaInputs),A.emit("mediaInputs"),"client"===A.myRole&&A.pc&&(ED.info("Going to update the agent about the updated media devices list."),A.send({type:"mediaListUpdated",mediaInputs:A.mediaInputs})))}}send(A){this._closed||this._socket.send(A)}inform(A){if(this._closed)return;let I={type:"information",msg:A};this._socket.send(I)}async request({type:A,message:I}){const g=await this._socket.request({type:A,message:I});if(g)return g}gotMessageFromServer(A){switch(ED.debug("Got message:%o",A),A.type){case"login":this.handleLogin(A.details);break;case"offer":ED.debug("inside offer"),this.handleOffer({offer:A.offer,peer:A.peer,name:A.name});break;case"answer":ED.debug("inside answer"),this.handleAnswer(A);break;case"candidate":ED.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"leave":this.handleLeave(),this.emit("peerHangUp");break;case"audioToggle":this.emit("audioToggle",{status:A.status,peer:"other"});break;case"videoToggle":this.emit("videoToggle",{status:A.status,peer:"other"});break;case"remoteStreamToggle":this.handleRemoteStreamToggle(A);break;case"chatMessage":this.emit("chatMessage",A.message);break;case"imageCapture":this.captureImage("local");break;case"mediaChange":this.changeRemoteMediaSource(A);break;case"requestStats":this.sendRequestedstats();break;case"statsFetched":this._remoteStats=A.stats,this.emit("remoteStats");break;case"information":this.emit("information",A.msg);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"nameUpdate":this._remoteDisplayName=A.name,this.emit("peerName");break;case"mediaListUpdated":this._remoteMediaInputs=A.mediaInputs,this.emit("remoteMediaInputs");break;case"peerLeft":this.pc||(this._remoteDisplayName=null,this._remotePeerId=null,this._remoteRole=null,this._otherPeerConnected=!1,this.emit("peerJoin","None"),alert("Other person disconnected from the room"));break;case"error":this.emit("serverError",A.details)}}async getUserMedia(){ED.debug("Inside getusermedia"),this._localStream?ED.debug("Local stream already acquired! Waiting for call!"):navigator.mediaDevices.getUserMedia?navigator.mediaDevices.getUserMedia(CD).then(A=>this.getUserMediaSuccess(A)).catch(A=>this.clientErrorHandler({reason:"Error while acquiring camera/mic",error:A})):alert("Your browser does not support getUserMedia API")}getUserMediaSuccess(A){let I=this;ED.debug("Inside getUserMediaSuccess"),this._localStream=A,this.emit("localStream"),this._socket=new BD({url:this._url,roomId:this._roomId,peerId:this._myPeerId,peerName:this._myDisplayName,role:this._myRole}),this._socket.on("message",A=>this.gotMessageFromServer(A)),this._socket.on("connected",async()=>{I.pc&&(ED.debug("Socket reconnection happened after a disconnection, ICEState:%s",I._peerConnection.iceConnectionState),"failed"!==I._peerConnection.iceConnectionState&&"disconnected"!==I._peerConnection.iceConnectionState||(ED.debug("Going to restart ICE as ICE is not in connected state!!"),await I.restartICE()))})}handleAudioToggle(){let A=this._localStream.getAudioTracks()[0];A.enabled=!A.enabled,this.send({type:"audioToggle",status:A.enabled}),this.emit("audioToggle",{status:A.enabled,peer:"self"})}handleVideoToggle(){let A=this._localStream.getVideoTracks()[0];A.enabled=!A.enabled,this.send({type:"videoToggle",status:A.enabled}),this.emit("videoToggle",{status:A.enabled,peer:"self"})}handleVideoToggleNew(){let A=this;if(this._peerConnection){const I=this._peerConnection.getSenders().find(function(A){return A.track&&"video"==A.track.kind});if(I){ED.debug("found sender:%o",I),this._peerConnection.removeTrack(I);let A=this._localStream.getVideoTracks()[0];A&&(A.stop(),this._localStream.removeTrack(A)),this.send({type:"videoToggle",status:!1}),this.emit("videoToggle",{status:!1,peer:"self"})}else ED.debug("sender not found. Going to create a new video track."),navigator.mediaDevices.getUserMedia?navigator.mediaDevices.getUserMedia({...CD,audio:!1}).then(I=>{let g=I.getVideoTracks()[0];A._localStream.addTrack(g),A._peerConnection.addTrack(g),A.send({type:"videoToggle",status:!0}),A.emit("localStream"),A.emit("videoToggle",{status:!0,peer:"self"})}).catch(A=>this.clientErrorHandler({reason:"Error while acquiring camera/mic",error:A})):alert("Your browser does not support getUserMedia API")}else ED.warn("Peer connection not found!")}handleRemoteAudioToggle(){this.send({type:"remoteStreamToggle",streamType:"audio"})}handleRemoteVideoToggle(){this.send({type:"remoteStreamToggle",streamType:"video"})}handleRemoteStreamToggle(A){ED.debug("inside remote stream toggle"),"audio"===A.streamType?this.handleAudioToggle():"video"===A.streamType?this.handleVideoToggle():ED.warn("Invalid stream type!")}sendChatMessage(A){A&&" "!==A?this.send({type:"chatMessage",message:A}):this.clientErrorHandler({reason:"Chat message need to have a valid value!",error:null})}async informPeer(){this.remoteId&&this.send({type:"nameUpdate",name:this.myName})}handleLogin(A){A&&(ED.debug("All available users:%o",A),this._remoteDisplayName=A.displayName,this._remotePeerId=A.peerId,this._remoteRole=A.role),this._otherPeerConnected=!0,this.emit("peerJoin",this._remoteDisplayName)}async gotRemoteTrack(A){ED.debug("inside got remote track:%o",A),A.streams&&A.streams[0]?(ED.debug("Got a remote stream"),this._remoteStream=A.streams[0]):(this._remoteStream=new MediaStream,this._remoteStream.addTrack(A.track),ED.debug("Got a remote track")),this.emit("remoteStream")}_onICeCandidate(A){ED.debug("onicecandidate",A.candidate),A.candidate&&this.send({type:"candidate",candidate:A.candidate,peer:this._remotePeerId})}handleNegotiationNeededEvent=()=>{ED.debug("handleNegotiationNeededEvent() | needed to negotiate a new negotiation"),this.restartICE()};handleRemoveTrackEvent=()=>{ED.debug("handleRemoveTrackEvent() | Track removed by remote peer. Need to handle UI locally."),this.emit("videoToggle",{status:!1,peer:"other"})};async createPeerOffer(A){let I,g=this;I=await this._socket.request({type:"fetchIceServers",message:{}}),this._peerConnection=new RTCPeerConnection({iceServers:I}),ED.debug("connection state in createPeeroffer():%s",this._peerConnection.connectionState),this._peerConnection.onicecandidate=A=>{this._onICeCandidate(A)},this._localStream.getTracks().forEach(A=>this._peerConnection.addTrack(A,this._localStream));const C=await this._peerConnection.createOffer().catch(function(A){alert("Error when creating an offer",A),ED.error("Error when creating an offer:%o",A)});await this._peerConnection.setLocalDescription(C),this.send({type:"offer",offer:g._peerConnection.localDescription,peer:g._remotePeerId,name:g.myName}),this._peerConnection.ontrack=A=>g.gotRemoteTrack(A),this._peerConnection.addEventListener("iceconnectionstatechange",async A=>{ED.debug("Current ICEState:%s",g._peerConnection.iceConnectionState),"failed"!==g._peerConnection.iceConnectionState&&"disconnected"!==g._peerConnection.iceConnectionState||(ED.debug("Ice connection failed!! with socket connection status:%s",g._socket.connectionStatus),g.emit("iceFailure"))}),this._peerConnection.onnegotiationneeded=()=>this.handleNegotiationNeededEvent(),this._peerConnection.onremovetrack=()=>this.handleRemoveTrackEvent()}async handleOffer({peer:A,name:I,offer:g}){let C=this;const Q=await this._socket.request({type:"fetchIceServers",message:{}});this._peerConnection=new RTCPeerConnection({iceServers:Q}),ED.debug("Peer connection in handle offer is:%s",this._peerConnection),this._remoteDisplayName=I,this._remotePeerId=A,this._peerConnection.setRemoteDescription(new RTCSessionDescription(g)),this._localStream.getTracks().forEach(A=>this._peerConnection.addTrack(A,this._localStream)),this._peerConnection.createAnswer().then(function(A){return C._peerConnection.setLocalDescription(A)}).then(function(){ED.debug("Ice already completed. sending answer!!"),C.send({type:"answer",answer:C._peerConnection.localDescription,peer:C._remotePeerId,mediaInputs:C.mediaInputs,userAgent:C.userAgent})}).catch(function(A){alert("Error when creating an answer"),ED.error("Error while creating answer:%o",A)}),this._peerConnection.ontrack=A=>C.gotRemoteTrack(A),this._peerConnection.onicecandidate=A=>{this._onICeCandidate(A)},this._peerConnection.addEventListener("iceconnectionstatechange",async A=>{ED.debug("Current ICEState:%s",C._peerConnection.iceConnectionState),"failed"!==C._peerConnection.iceConnectionState&&"disconnected"!==C._peerConnection.iceConnectionState||(ED.debug("Ice connection failed!! with socket connection status:%s",C._socket.connectionStatus),C.emit("iceFailure"))}),this._peerConnection.onnegotiationneeded=()=>this.handleNegotiationNeededEvent(),this._peerConnection.onremovetrack=()=>this.handleRemoveTrackEvent(),this.emit("offerReceived")}handleAnswer(A){ED.debug("answer:%o ",A),this._peerConnection.setRemoteDescription(new RTCSessionDescription(A.answer)),this._remoteMediaInputs=A.mediaInputs,this._remoteUserAgent=A.userAgent,this.emit("remoteMediaInputs")}async handleCandidate(A){this.pc?(ED.debug("Peerconnection available, Inside handle candidate for remote icecandidates"),this._peerConnection.addIceCandidate(new RTCIceCandidate(A)).catch(A=>{ED.error("Error while adding iceCandidate",A)})):ED.warn("Candidate arrived before peer connection instantiation:%o",A)}async restartICE(){const A=await this._peerConnection.createOffer({iceRestart:!0});await this._peerConnection.setLocalDescription(A),this.send({type:"iceRestart",offer:A})}async handleIceRestart(A){ED.debug("Ice restart message received!!"),this._peerConnection.setRemoteDescription(A.offer);const I=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(I),this.send({type:"iceRestarted",answer:I})}async handleIceRestartResponse(A){ED.debug("Ice restart message response received!!"),this._peerConnection.setRemoteDescription(A.answer)}async informRoomLeave(){this.send({type:"leave",peer:this.remoteId}),await this.handleLeave()}async handleLeave(){this._peerConnection?(this._peerConnection.ontrack=null,this._peerConnection.onremovetrack=null,this._peerConnection.onremovestream=null,this._peerConnection.onicecandidate=null,this._peerConnection.oniceconnectionstatechange=null,this._peerConnection.onsignalingstatechange=null,this._peerConnection.onicegatheringstatechange=null,this._peerConnection.onnegotiationneeded=null,this._localStream&&this._localStream.getTracks().forEach(A=>A.stop()),this._remoteStream&&this._remoteStream.getTracks().forEach(A=>A.stop()),this._peerConnection.close(),this._peerConnection=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remoteStream=null,this._localStream=null,this._canvas=null,this._imageData=null,this._imageWidth=640,this._imageHeight=480,this._socket.close()):ED.error("No peerconnection object found!")}async changeRemoteMediaSource(A){let{label:I,mediaType:g}=A;!I||"video"!==g&&"audio"!==g?this.send({type:"error",details:{reason:"Can't change remote media source as either media label or media type is not available!"}}):this.handleReplaceTrack({label:I,mediaType:g})}async handleReplaceTrack({label:A,mediaType:I}){try{let g,C,Q;const B=await(A=>new Promise(I=>{navigator.mediaDevices.enumerateDevices().then(function(g){g.forEach(function(g){g.label===A&&I(g.deviceId)}),g[0].label||I({error:"noLabel",reason:"Device label names not found!"})}).catch(function(A){ConsoleHelper(A.name+": "+A.message),I({error:A.name,reason:A.message})})}))(A);"video"===I?(gD.video.deviceId.exact=B,Q=gD,g=this._localStream.getVideoTracks()[0]):"audio"===I?(ID.audio.deviceId.exact=B,Q=ID,g=this._localStream.getAudioTracks()[0]):ED.error("Unknown media type:%s",I);let E=g.enabled;g.stop(),ED.debug("selected constraints are:%o",Q);let i=await navigator.mediaDevices.getUserMedia(Q);if(ED.debug("new stream is:%o",i),"video"===I?C=i.getVideoTracks()[0]:"audio"===I&&(C=i.getAudioTracks()[0]),C.enabled=E,this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return A.track.kind==C.kind});ED.debug("found sender:%o",A),A.replaceTrack(C)}this._localStream.removeTrack(g),this._localStream.addTrack(C)}catch(A){ED.error("Error while replace media track:%o",A),this.clientErrorHandler({reason:"Error while replacing media track with new source",error:A})}}async captureImage(A){const I=document.createElement("canvas"),g=I.getContext("2d"),C=this._imageWidth,Q=this._imageHeight;ED.debug("height:%s and width:%s",C,Q),I.width=C,I.height=Q,"local"===A?g.drawImage(localVideo,0,0,C,Q):g.drawImage(remoteVideo,0,0,C,Q);let B=I.toDataURL("image/png");this._canvas=I,this._imageData=B,this.emit("imageCaptured")}async clearImage(){const A=this._canvas,I=A.getContext("2d");I.fillStyle="#AAA",I.fillRect(0,0,A.width,A.height);let g=A.toDataURL("image/png");this._imageData=g}captureRemoteImage(){this.send({type:"imageCapture"})}remoteCamChange(A){A?this.send({type:"mediaChange",mediaType:"video",label:A}):this.clientErrorHandler({reason:"A valid camera name must be selected for to change the remote camera!",error:null})}remoteMicChange(A){A?this.send({type:"mediaChange",mediaType:"audio",label:A}):this.clientErrorHandler({reason:"A valid mic name must be selected for to change the remote mic!",error:null})}requestStats(){this.send({type:"requestStats"})}async captureStats(){let A=this;return new Promise(I=>{this._peerConnection.getStats(null).then(g=>{let C={};g.forEach(A=>{["inbound-rtp","outbound-rtp","remote-inbound-rtp","remote-outbound-rtp","local-candidate","remote-candidate","candidate-pair"].includes(A.type)&&(C={...C,[`${A.type}`]:A})}),A._localStats=C,I(C)})})}displayStats({stats:A,element:I}){let g="";Object.values(A).forEach(A=>{g+=`<h2>Report: ${A.type}</h2>\n<strong>ID:</strong> ${A.id}<br>\n<strong>Timestamp:</strong> ${A.timestamp}<br>\n`,Object.keys(A).forEach(I=>{"id"!==I&&"timestamp"!==I&&"type"!==I&&(g+=`<strong>${I}:</strong> ${A[I]}<br>\n`)})}),document.querySelector(I).innerHTML=g}async sendRequestedstats(){let A=await this.captureStats();ED.debug("Local stats are:%o",A),this.send({type:"statsFetched",stats:A})}clientErrorHandler({reason:A,error:I}){this.emit("clientError",{reason:A,error:I})}copyLink(A){navigator.clipboard.writeText(A).then(function(){ED.debug("Async: Copying to clipboard was successful!"),alert("Link copied!!")},function(A){ED.error("Async: Could not copy text:%o ",A)})}}const DD={JsSdk_v1:lE,JsSdk:_i,P2pSdk:iD};export{_i as JsSdk,lE as JsSdk_v1,iD as P2pSdk,DD as default};
225
+ `,{width:D,height:n}=B,o=D/n,s=Ci(A,A.VERTEX_SHADER,E),w=Ci(A,A.FRAGMENT_SHADER,i),a=gi(A,s,w,I,g),y=A.getUniformLocation(a,"u_backgroundScale"),G=A.getUniformLocation(a,"u_backgroundOffset"),R=A.getUniformLocation(a,"u_inputFrame"),S=A.getUniformLocation(a,"u_personMask"),h=A.getUniformLocation(a,"u_background"),N=A.getUniformLocation(a,"u_coverage"),K=A.getUniformLocation(a,"u_lightWrapping"),M=A.getUniformLocation(a,"u_blendMode");A.useProgram(a),A.uniform2f(y,1,1),A.uniform2f(G,0,0),A.uniform1i(R,0),A.uniform1i(S,1),A.uniform2f(N,0,1),A.uniform1f(K,0),A.uniform1f(M,0);let F=null;function L(I){F=Qi(A,A.RGBA8,I.naturalWidth,I.naturalHeight,A.LINEAR,A.LINEAR),A.texSubImage2D(A.TEXTURE_2D,0,0,0,I.naturalWidth,I.naturalHeight,A.RGBA,A.UNSIGNED_BYTE,I);let g=0,C=0,Q=I.naturalWidth,B=I.naturalHeight;Q/B<o?(B=Q/o,C=(I.naturalHeight-B)/2):(Q=B*o,g=(I.naturalWidth-Q)/2);const E=Q/I.naturalWidth,i=B/I.naturalHeight;g/=I.naturalWidth,C/=I.naturalHeight,A.uniform2f(y,E,i),A.uniform2f(G,g,C)}return Q?.complete?L(Q):Q&&(Q.onload=()=>{L(Q)}),{render:function(){A.viewport(0,0,D,n),A.useProgram(a),A.activeTexture(A.TEXTURE1),A.bindTexture(A.TEXTURE_2D,C),null!==F&&(A.activeTexture(A.TEXTURE2),A.bindTexture(A.TEXTURE_2D,F),A.uniform1i(h,2)),A.bindFramebuffer(A.FRAMEBUFFER,null),A.drawArrays(A.TRIANGLE_STRIP,0,4)},updateCoverage:function(I){A.useProgram(a),A.uniform2f(N,I[0],I[1])},updateLightWrapping:function(I){A.useProgram(a),A.uniform1f(K,I)},updateBlendMode:function(I){A.useProgram(a),A.uniform1f(M,"screen"===I?0:1)},cleanUp:function(){A.deleteTexture(F),A.deleteProgram(a),A.deleteShader(w),A.deleteShader(s)}}}(a,R,S,K,I,Q);return{render:async function(){a.activeTexture(a.TEXTURE0),a.bindTexture(a.TEXTURE_2D,h),a.texImage2D(a.TEXTURE_2D,0,a.RGBA,a.RGBA,a.UNSIGNED_BYTE,A.htmlElement),a.bindVertexArray(G),await M.render(),i(),B._runInference(),i(),F.render(),L.render(),k.render()},updatePostProcessingConfig:function(A){if(L.updateSigmaSpace(A.jointBilateralFilter.sigmaSpace),L.updateSigmaColor(A.jointBilateralFilter.sigmaColor),"image"===g.type){const I=k;I.updateCoverage(A.coverage),I.updateLightWrapping(A.lightWrapping),I.updateBlendMode(A.blendMode)}else if("blur"===g.type)k.updateCoverage(A.coverage);else{const A=k;A.updateCoverage([0,.9999]),A.updateLightWrapping(0)}},cleanUp:function(){k.cleanUp(),L.cleanUp(),F.cleanUp(),M.cleanUp(),a.deleteTexture(K),a.deleteTexture(N),a.deleteTexture(h),a.deleteBuffer(S),a.deleteBuffer(R),a.deleteVertexArray(G),a.deleteShader(y)}}}class Ei{constructor(){this.pipeline=null,this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[],this.isRunning=!1,this.timerWorker=null,this.renderTimeoutId=null,this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[]}async initialize(A,I,g,C,Q,B=null,E=null){this.stop(),this.backgroundImageRef=B,this.canvasRef=E;const i=1e3/g.targetFps;this.previousTime=0,this.beginTime=0,this.eventCount=0,this.frameCount=0,this.frameDurations=[],this.timerWorker=function(){const A=new Map,I=new Blob(["\n const timeoutIds = new Map();\n \n addEventListener('message', (event) => {\n if (event.data.timeoutMs !== undefined) {\n const timeoutId = setTimeout(() => {\n postMessage({ callbackId: event.data.callbackId });\n timeoutIds.delete(event.data.callbackId);\n }, event.data.timeoutMs);\n timeoutIds.set(event.data.callbackId, timeoutId);\n } else {\n const timeoutId = timeoutIds.get(event.data.callbackId);\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n timeoutIds.delete(event.data.callbackId);\n }\n }\n });\n "],{type:"application/javascript"}),g=new Worker(URL.createObjectURL(I));g.onmessage=I=>{const g=A.get(I.data.callbackId);g&&(A.delete(I.data.callbackId),g())};let C=1;return{setTimeout:function(I,Q=0){const B=C++;return A.set(B,I),g.postMessage({callbackId:B,timeoutMs:Q}),B},clearTimeout:function(I){A.has(I)&&(g.postMessage({callbackId:I}),A.delete(I))},terminate:function(){A.clear(),g.terminate()}}}(),this.pipeline="webgl2"===g.pipeline?Bi(A,this.backgroundImageRef,I,g,this.canvasRef,Q,this.timerWorker,this.addFrameEvent.bind(this)):function(A,I,g,C,Q,B,E){const i=C.getContext("2d"),[D,n]=XE[g.inputResolution],o=D*n,s=new ImageData(D,n),w=document.createElement("canvas");w.width=D,w.height=n;const a=w.getContext("2d"),y=B._getInputMemoryOffset()/4,G=B._getOutputMemoryOffset()/4;let R;return{render:async function(){"none"!==I.type&&function(){if(a.drawImage(A.htmlElement,0,0,A.width,A.height,0,0,D,n),"meet"===g.model||"mlkit"===g.model){const A=a.getImageData(0,0,D,n);for(let I=0;I<o;I++)B.HEAPF32[y+3*I]=A.data[4*I]/255,B.HEAPF32[y+3*I+1]=A.data[4*I+1]/255,B.HEAPF32[y+3*I+2]=A.data[4*I+2]/255}}(),E(),"none"!==I.type&&("bodyPix"===g.model?await async function(){const A=await Q.segmentPerson(w);for(let I=0;I<o;I++)s.data[4*I+3]=A.data[I]?255:0;a.putImageData(s,0,0)}():function(){B._runInference();for(let A=0;A<o;A++)if("meet"===g.model){const I=B.HEAPF32[G+2*A],g=B.HEAPF32[G+2*A+1],C=Math.max(I,g),Q=Math.exp(I-C),E=Math.exp(g-C);s.data[4*A+3]=255*E/(Q+E)}else if("mlkit"===g.model){const I=B.HEAPF32[G+A];s.data[4*A+3]=255*I}a.putImageData(s,0,0)}()),E(),i.globalCompositeOperation="copy",i.filter="none",R?.smoothSegmentationMask&&("blur"===I.type?i.filter="blur(8px)":"image"===I.type&&(i.filter="blur(4px)")),"none"!==I.type&&(i.drawImage(w,0,0,D,n,0,0,A.width,A.height),i.globalCompositeOperation="source-in",i.filter="none"),i.drawImage(A.htmlElement,0,0),"blur"===I.type&&(i.globalCompositeOperation="destination-over",i.filter="blur(8px)",i.drawImage(A.htmlElement,0,0))},updatePostProcessingConfig:function(A){R=A},cleanUp:function(){}}}(A,I,g,this.canvasRef,C,Q,this.addFrameEvent.bind(this));const D=async()=>{if(!this.isRunning)return;const A=performance.now();this.beginFrame(),await this.pipeline.render(),this.endFrame(),this.renderTimeoutId=this.timerWorker.setTimeout(D,Math.max(0,i-(performance.now()-A)))};return this.isRunning=!0,D(),{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations()}}beginFrame(){this.beginTime=Date.now()}addFrameEvent(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.beginTime=A,this.eventCount++}endFrame(){const A=Date.now();this.frameDurations[this.eventCount]=A-this.beginTime,this.frameCount++,A>=this.previousTime+1e3&&(this.fps=1e3*this.frameCount/(A-this.previousTime),this.durations=[...this.frameDurations],this.previousTime=A,this.frameCount=0),this.eventCount=0}getProcessingDurations(){return this.frameDurations.length>=3?[this.frameDurations[0]||0,this.frameDurations[1]||0,this.frameDurations[2]||0]:[0,0,0]}stop(){this.isRunning=!1,this.timerWorker&&this.renderTimeoutId&&this.timerWorker.clearTimeout(this.renderTimeoutId),this.timerWorker&&(this.timerWorker.terminate(),this.timerWorker=null),this.pipeline&&(this.pipeline.cleanUp(),this.pipeline=null),this.renderTimeoutId=null}getState(){return{pipeline:this.pipeline,backgroundImageRef:this.backgroundImageRef,canvasRef:this.canvasRef,fps:this.fps,durations:this.getProcessingDurations(),isRunning:this.isRunning}}getFps(){return this.fps}getDurations(){return this.durations}isActive(){return this.isRunning&&null!==this.pipeline}async updateConfig(A,I,g,C,Q){return this.initialize(A,I,g,C,Q,this.backgroundImageRef,this.canvasRef)}destroy(){this.stop(),this.backgroundImageRef=null,this.canvasRef=null,this.fps=0,this.durations=[]}}const ii=new Gg("Room"),Di=new Set(["240p","360p","480p","720p","1080p","1440p","2160p"]);function ni(A){return!!Array.isArray(A)&&A.every(A=>Di.has(A))}const oi={audio:!1,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}},si={audio:{deviceId:{exact:void 0}},video:!1},wi={audio:!1,video:{deviceId:{exact:void 0},width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},ai=new Gg("utils-verifyFiles"),yi=["mp4"];function Gi(A,I){try{return new URL(A).pathname.split(".").pop().toLowerCase()===I.toLowerCase()}catch{return!1}}const Ri=new TextEncoder,Si=new TextDecoder;function hi(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64("string"==typeof A?A:Si.decode(A),{alphabet:"base64url"});let I=A;I instanceof Uint8Array&&(I=Si.decode(I)),I=I.replace(/-/g,"+").replace(/_/g,"/").replace(/\s/g,"");try{return function(A){if(Uint8Array.fromBase64)return Uint8Array.fromBase64(A);const I=atob(A),g=new Uint8Array(I.length);for(let A=0;A<I.length;A++)g[A]=I.charCodeAt(A);return g}(I)}catch{throw new TypeError("The input to be decoded is not correctly encoded.")}}class Ni extends Error{static code="ERR_JOSE_GENERIC";code="ERR_JOSE_GENERIC";constructor(A,I){super(A,I),this.name=this.constructor.name,Error.captureStackTrace?.(this,this.constructor)}}class Ki extends Ni{static code="ERR_JWT_CLAIM_VALIDATION_FAILED";code="ERR_JWT_CLAIM_VALIDATION_FAILED";claim;reason;payload;constructor(A,I,g="unspecified",C="unspecified"){super(A,{cause:{claim:g,reason:C,payload:I}}),this.claim=g,this.reason=C,this.payload=I}}class Mi extends Ni{static code="ERR_JWT_EXPIRED";code="ERR_JWT_EXPIRED";claim;reason;payload;constructor(A,I,g="unspecified",C="unspecified"){super(A,{cause:{claim:g,reason:C,payload:I}}),this.claim=g,this.reason=C,this.payload=I}}class Fi extends Ni{static code="ERR_JOSE_ALG_NOT_ALLOWED";code="ERR_JOSE_ALG_NOT_ALLOWED"}class Li extends Ni{static code="ERR_JOSE_NOT_SUPPORTED";code="ERR_JOSE_NOT_SUPPORTED"}class ki extends Ni{static code="ERR_JWS_INVALID";code="ERR_JWS_INVALID"}class Ji extends Ni{static code="ERR_JWT_INVALID";code="ERR_JWT_INVALID"}class Ui extends Ni{static code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";code="ERR_JWS_SIGNATURE_VERIFICATION_FAILED";constructor(A="signature verification failed",I){super(A,I)}}function ti(A,I="algorithm.name"){return new TypeError(`CryptoKey does not support this operation, its ${I} must be ${A}`)}function qi(A,I){return A.name===I}function ri(A){return parseInt(A.name.slice(4),10)}function ci(A,I,...g){if((g=g.filter(Boolean)).length>2){const I=g.pop();A+=`one of type ${g.join(", ")}, or ${I}.`}else 2===g.length?A+=`one of type ${g[0]} or ${g[1]}.`:A+=`of type ${g[0]}.`;return null==I?A+=` Received ${I}`:"function"==typeof I&&I.name?A+=` Received function ${I.name}`:"object"==typeof I&&null!=I&&I.constructor?.name&&(A+=` Received an instance of ${I.constructor.name}`),A}function Hi(A,I,...g){return ci(`Key for the ${A} algorithm must be `,I,...g)}function ei(A){return"CryptoKey"===A?.[Symbol.toStringTag]}function Yi(A){return"KeyObject"===A?.[Symbol.toStringTag]}const di=A=>ei(A)||Yi(A),bi=A=>{if("object"!=typeof(I=A)||null===I||"[object Object]"!==Object.prototype.toString.call(A))return!1;var I;if(null===Object.getPrototypeOf(A))return!0;let g=A;for(;null!==Object.getPrototypeOf(g);)g=Object.getPrototypeOf(g);return Object.getPrototypeOf(A)===g};function xi(A){return bi(A)&&"string"==typeof A.kty}let ui;const pi=async(A,I,g,C=!1)=>{ui||=new WeakMap;let Q=ui.get(A);if(Q?.[g])return Q[g];const B=await(async A=>{if(!A.alg)throw new TypeError('"alg" argument is required when "jwk.alg" is not present');const{algorithm:I,keyUsages:g}=function(A){let I,g;switch(A.kty){case"AKP":switch(A.alg){case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":I={name:A.alg},g=A.priv?["sign"]:["verify"];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"RSA":switch(A.alg){case"PS256":case"PS384":case"PS512":I={name:"RSA-PSS",hash:`SHA-${A.alg.slice(-3)}`},g=A.d?["sign"]:["verify"];break;case"RS256":case"RS384":case"RS512":I={name:"RSASSA-PKCS1-v1_5",hash:`SHA-${A.alg.slice(-3)}`},g=A.d?["sign"]:["verify"];break;case"RSA-OAEP":case"RSA-OAEP-256":case"RSA-OAEP-384":case"RSA-OAEP-512":I={name:"RSA-OAEP",hash:`SHA-${parseInt(A.alg.slice(-3),10)||1}`},g=A.d?["decrypt","unwrapKey"]:["encrypt","wrapKey"];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"EC":switch(A.alg){case"ES256":I={name:"ECDSA",namedCurve:"P-256"},g=A.d?["sign"]:["verify"];break;case"ES384":I={name:"ECDSA",namedCurve:"P-384"},g=A.d?["sign"]:["verify"];break;case"ES512":I={name:"ECDSA",namedCurve:"P-521"},g=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":I={name:"ECDH",namedCurve:A.crv},g=A.d?["deriveBits"]:[];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;case"OKP":switch(A.alg){case"Ed25519":case"EdDSA":I={name:"Ed25519"},g=A.d?["sign"]:["verify"];break;case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":I={name:A.crv},g=A.d?["deriveBits"]:[];break;default:throw new Li('Invalid or unsupported JWK "alg" (Algorithm) Parameter value')}break;default:throw new Li('Invalid or unsupported JWK "kty" (Key Type) Parameter value')}return{algorithm:I,keyUsages:g}}(A),C={...A};return"AKP"!==C.kty&&delete C.alg,delete C.use,crypto.subtle.importKey("jwk",C,I,A.ext??(!A.d&&!A.priv),A.key_ops??g)})({...I,alg:g});return C&&Object.freeze(A),Q?Q[g]=B:ui.set(A,{[g]:B}),B},Ti=A=>A?.[Symbol.toStringTag],mi=(A,I,g)=>{if(void 0!==I.use){let A;switch(g){case"sign":case"verify":A="sig";break;case"encrypt":case"decrypt":A="enc"}if(I.use!==A)throw new TypeError(`Invalid key for this operation, its "use" must be "${A}" when present`)}if(void 0!==I.alg&&I.alg!==A)throw new TypeError(`Invalid key for this operation, its "alg" must be "${A}" when present`);if(Array.isArray(I.key_ops)){let C;switch(!0){case"verify"===g:case"dir"===A:case A.includes("CBC-HS"):C=g;break;case A.startsWith("PBES2"):C="deriveBits";break;case/^A\d{3}(?:GCM)?(?:KW)?$/.test(A):C=!A.includes("GCM")&&A.endsWith("KW")?"unwrapKey":g;break;case"encrypt"===g:C="wrapKey";break;case"decrypt"===g:C=A.startsWith("RSA")?"unwrapKey":"deriveBits"}if(C&&!1===I.key_ops?.includes?.(C))throw new TypeError(`Invalid key for this operation, its "key_ops" must include "${C}" when present`)}return!0},li=async(A,I,g)=>{if(I instanceof Uint8Array){if(!A.startsWith("HS"))throw new TypeError(ci("Key must be ",I,"CryptoKey","KeyObject","JSON Web Key"));return crypto.subtle.importKey("raw",I,{hash:`SHA-${A.slice(-3)}`,name:"HMAC"},!1,[g])}return function(A,I,g){switch(I){case"HS256":case"HS384":case"HS512":{if(!qi(A.algorithm,"HMAC"))throw ti("HMAC");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"RS256":case"RS384":case"RS512":{if(!qi(A.algorithm,"RSASSA-PKCS1-v1_5"))throw ti("RSASSA-PKCS1-v1_5");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"PS256":case"PS384":case"PS512":{if(!qi(A.algorithm,"RSA-PSS"))throw ti("RSA-PSS");const g=parseInt(I.slice(2),10);if(ri(A.algorithm.hash)!==g)throw ti(`SHA-${g}`,"algorithm.hash");break}case"Ed25519":case"EdDSA":if(!qi(A.algorithm,"Ed25519"))throw ti("Ed25519");break;case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":if(!qi(A.algorithm,I))throw ti(I);break;case"ES256":case"ES384":case"ES512":{if(!qi(A.algorithm,"ECDSA"))throw ti("ECDSA");const g=function(A){switch(A){case"ES256":return"P-256";case"ES384":return"P-384";case"ES512":return"P-521";default:throw new Error("unreachable")}}(I);if(A.algorithm.namedCurve!==g)throw ti(g,"algorithm.namedCurve");break}default:throw new TypeError("CryptoKey does not support this operation")}!function(A,I){if(!A.usages.includes(I))throw new TypeError(`CryptoKey does not support this operation, its usages must include ${I}.`)}(A,g)}(I,A,g),I};async function Oi(A,I,g){if(!bi(A))throw new ki("Flattened JWS must be an object");if(void 0===A.protected&&void 0===A.header)throw new ki('Flattened JWS must have either of the "protected" or "header" members');if(void 0!==A.protected&&"string"!=typeof A.protected)throw new ki("JWS Protected Header incorrect type");if(void 0===A.payload)throw new ki("JWS Payload missing");if("string"!=typeof A.signature)throw new ki("JWS Signature missing or incorrect type");if(void 0!==A.header&&!bi(A.header))throw new ki("JWS Unprotected Header incorrect type");let C={};if(A.protected)try{const I=hi(A.protected);C=JSON.parse(Si.decode(I))}catch{throw new ki("JWS Protected Header is invalid")}if(!((...A)=>{const I=A.filter(Boolean);if(0===I.length||1===I.length)return!0;let g;for(const A of I){const I=Object.keys(A);if(g&&0!==g.size)for(const A of I){if(g.has(A))return!1;g.add(A)}else g=new Set(I)}return!0})(C,A.header))throw new ki("JWS Protected and JWS Unprotected Header Parameter names must be disjoint");const Q={...C,...A.header},B=((A,I,g,C,Q)=>{if(void 0!==Q.crit&&void 0===C?.crit)throw new A('"crit" (Critical) Header Parameter MUST be integrity protected');if(!C||void 0===C.crit)return new Set;if(!Array.isArray(C.crit)||0===C.crit.length||C.crit.some(A=>"string"!=typeof A||0===A.length))throw new A('"crit" (Critical) Header Parameter MUST be an array of non-empty strings when present');let B;B=void 0!==g?new Map([...Object.entries(g),...I.entries()]):I;for(const I of C.crit){if(!B.has(I))throw new Li(`Extension Header Parameter "${I}" is not recognized`);if(void 0===Q[I])throw new A(`Extension Header Parameter "${I}" is missing`);if(B.get(I)&&void 0===C[I])throw new A(`Extension Header Parameter "${I}" MUST be integrity protected`)}return new Set(C.crit)})(ki,new Map([["b64",!0]]),g?.crit,C,Q);let E=!0;if(B.has("b64")&&(E=C.b64,"boolean"!=typeof E))throw new ki('The "b64" (base64url-encode payload) Header Parameter must be a boolean');const{alg:i}=Q;if("string"!=typeof i||!i)throw new ki('JWS "alg" (Algorithm) Header Parameter missing or invalid');const D=g&&((A,I)=>{if(void 0!==I&&(!Array.isArray(I)||I.some(A=>"string"!=typeof A)))throw new TypeError('"algorithms" option must be an array of strings');if(I)return new Set(I)})(0,g.algorithms);if(D&&!D.has(i))throw new Fi('"alg" (Algorithm) Header Parameter value not allowed');if(E){if("string"!=typeof A.payload)throw new ki("JWS Payload must be a string")}else if("string"!=typeof A.payload&&!(A.payload instanceof Uint8Array))throw new ki("JWS Payload must be a string or an Uint8Array instance");let n=!1;"function"==typeof I&&(I=await I(C,A),n=!0),((A,I,g)=>{A.startsWith("HS")||"dir"===A||A.startsWith("PBES2")||/^A(?:128|192|256)(?:GCM)?(?:KW)?$/.test(A)||/^A(?:128|192|256)CBC-HS(?:256|384|512)$/.test(A)?((A,I,g)=>{if(!(I instanceof Uint8Array)){if(xi(I)){if(function(A){return"oct"===A.kty&&"string"==typeof A.k}(I)&&mi(A,I,g))return;throw new TypeError('JSON Web Key for symmetric algorithms must have JWK "kty" (Key Type) equal to "oct" and the JWK "k" (Key Value) present')}if(!di(I))throw new TypeError(Hi(A,I,"CryptoKey","KeyObject","JSON Web Key","Uint8Array"));if("secret"!==I.type)throw new TypeError(`${Ti(I)} instances for symmetric algorithms must be of type "secret"`)}})(A,I,g):((A,I,g)=>{if(xi(I))switch(g){case"decrypt":case"sign":if(function(A){return"oct"!==A.kty&&("AKP"===A.kty&&"string"==typeof A.priv||"string"==typeof A.d)}(I)&&mi(A,I,g))return;throw new TypeError("JSON Web Key for this operation be a private JWK");case"encrypt":case"verify":if(function(A){return"oct"!==A.kty&&void 0===A.d&&void 0===A.priv}(I)&&mi(A,I,g))return;throw new TypeError("JSON Web Key for this operation be a public JWK")}if(!di(I))throw new TypeError(Hi(A,I,"CryptoKey","KeyObject","JSON Web Key"));if("secret"===I.type)throw new TypeError(`${Ti(I)} instances for asymmetric algorithms must not be of type "secret"`);if("public"===I.type)switch(g){case"sign":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm signing must be of type "private"`);case"decrypt":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm decryption must be of type "private"`)}if("private"===I.type)switch(g){case"verify":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm verifying must be of type "public"`);case"encrypt":throw new TypeError(`${Ti(I)} instances for asymmetric algorithm encryption must be of type "public"`)}})(A,I,g)})(i,I,"verify");const o=function(...A){const I=A.reduce((A,{length:I})=>A+I,0),g=new Uint8Array(I);let C=0;for(const I of A)g.set(I,C),C+=I.length;return g}(Ri.encode(A.protected??""),Ri.encode("."),"string"==typeof A.payload?Ri.encode(A.payload):A.payload);let s;try{s=hi(A.signature)}catch{throw new ki("Failed to base64url decode the signature")}const w=await(async(A,I)=>{if(A instanceof Uint8Array)return A;if(ei(A))return A;if(Yi(A)){if("secret"===A.type)return A.export();if("toCryptoKey"in A&&"function"==typeof A.toCryptoKey)try{return((A,I)=>{ui||=new WeakMap;let g=ui.get(A);if(g?.[I])return g[I];const C="public"===A.type,Q=!!C;let B;if("x25519"===A.asymmetricKeyType){switch(I){case"ECDH-ES":case"ECDH-ES+A128KW":case"ECDH-ES+A192KW":case"ECDH-ES+A256KW":break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}B=A.toCryptoKey(A.asymmetricKeyType,Q,C?[]:["deriveBits"])}if("ed25519"===A.asymmetricKeyType){if("EdDSA"!==I&&"Ed25519"!==I)throw new TypeError("given KeyObject instance cannot be used for this algorithm");B=A.toCryptoKey(A.asymmetricKeyType,Q,[C?"verify":"sign"])}switch(A.asymmetricKeyType){case"ml-dsa-44":case"ml-dsa-65":case"ml-dsa-87":if(I!==A.asymmetricKeyType.toUpperCase())throw new TypeError("given KeyObject instance cannot be used for this algorithm");B=A.toCryptoKey(A.asymmetricKeyType,Q,[C?"verify":"sign"])}if("rsa"===A.asymmetricKeyType){let g;switch(I){case"RSA-OAEP":g="SHA-1";break;case"RS256":case"PS256":case"RSA-OAEP-256":g="SHA-256";break;case"RS384":case"PS384":case"RSA-OAEP-384":g="SHA-384";break;case"RS512":case"PS512":case"RSA-OAEP-512":g="SHA-512";break;default:throw new TypeError("given KeyObject instance cannot be used for this algorithm")}if(I.startsWith("RSA-OAEP"))return A.toCryptoKey({name:"RSA-OAEP",hash:g},Q,C?["encrypt"]:["decrypt"]);B=A.toCryptoKey({name:I.startsWith("PS")?"RSA-PSS":"RSASSA-PKCS1-v1_5",hash:g},Q,[C?"verify":"sign"])}if("ec"===A.asymmetricKeyType){const g=new Map([["prime256v1","P-256"],["secp384r1","P-384"],["secp521r1","P-521"]]).get(A.asymmetricKeyDetails?.namedCurve);if(!g)throw new TypeError("given KeyObject instance cannot be used for this algorithm");"ES256"===I&&"P-256"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),"ES384"===I&&"P-384"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),"ES512"===I&&"P-521"===g&&(B=A.toCryptoKey({name:"ECDSA",namedCurve:g},Q,[C?"verify":"sign"])),I.startsWith("ECDH-ES")&&(B=A.toCryptoKey({name:"ECDH",namedCurve:g},Q,C?[]:["deriveBits"]))}if(!B)throw new TypeError("given KeyObject instance cannot be used for this algorithm");return g?g[I]=B:ui.set(A,{[I]:B}),B})(A,I)}catch(A){if(A instanceof TypeError)throw A}let g=A.export({format:"jwk"});return pi(A,g,I)}if(xi(A))return A.k?hi(A.k):pi(A,A,I,!0);throw new Error("unreachable")})(I,i);if(!await(async(A,I,g,C)=>{const Q=await li(A,I,"verify");((A,I)=>{if(A.startsWith("RS")||A.startsWith("PS")){const{modulusLength:g}=I.algorithm;if("number"!=typeof g||g<2048)throw new TypeError(`${A} requires key modulusLength to be 2048 bits or larger`)}})(A,Q);const B=((A,I)=>{const g=`SHA-${A.slice(-3)}`;switch(A){case"HS256":case"HS384":case"HS512":return{hash:g,name:"HMAC"};case"PS256":case"PS384":case"PS512":return{hash:g,name:"RSA-PSS",saltLength:parseInt(A.slice(-3),10)>>3};case"RS256":case"RS384":case"RS512":return{hash:g,name:"RSASSA-PKCS1-v1_5"};case"ES256":case"ES384":case"ES512":return{hash:g,name:"ECDSA",namedCurve:I.namedCurve};case"Ed25519":case"EdDSA":return{name:"Ed25519"};case"ML-DSA-44":case"ML-DSA-65":case"ML-DSA-87":return{name:A};default:throw new Li(`alg ${A} is not supported either by JOSE or your javascript runtime`)}})(A,Q.algorithm);try{return await crypto.subtle.verify(B,Q,g,C)}catch{return!1}})(i,w,s,o))throw new Ui;let a;if(E)try{a=hi(A.payload)}catch{throw new ki("Failed to base64url decode the payload")}else a="string"==typeof A.payload?Ri.encode(A.payload):A.payload;const y={payload:a};return void 0!==A.protected&&(y.protectedHeader=C),void 0!==A.header&&(y.unprotectedHeader=A.header),n?{...y,key:w}:y}const fi=/^(\+|\-)? ?(\d+|\d+\.\d+) ?(seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)(?: (ago|from now))?$/i,Pi=A=>{const I=fi.exec(A);if(!I||I[4]&&I[1])throw new TypeError("Invalid time period format");const g=parseFloat(I[2]);let C;switch(I[3].toLowerCase()){case"sec":case"secs":case"second":case"seconds":case"s":C=Math.round(g);break;case"minute":case"minutes":case"min":case"mins":case"m":C=Math.round(60*g);break;case"hour":case"hours":case"hr":case"hrs":case"h":C=Math.round(3600*g);break;case"day":case"days":case"d":C=Math.round(86400*g);break;case"week":case"weeks":case"w":C=Math.round(604800*g);break;default:C=Math.round(31557600*g)}return"-"===I[1]||"ago"===I[4]?-C:C},Wi=A=>A.includes("/")?A.toLowerCase():`application/${A.toLowerCase()}`;function zi(A,I,g={}){let C;try{C=JSON.parse(Si.decode(I))}catch{}if(!bi(C))throw new Ji("JWT Claims Set must be a top-level JSON object");const{typ:Q}=g;if(Q&&("string"!=typeof A.typ||Wi(A.typ)!==Wi(Q)))throw new Ki('unexpected "typ" JWT header value',C,"typ","check_failed");const{requiredClaims:B=[],issuer:E,subject:i,audience:D,maxTokenAge:n}=g,o=[...B];void 0!==n&&o.push("iat"),void 0!==D&&o.push("aud"),void 0!==i&&o.push("sub"),void 0!==E&&o.push("iss");for(const A of new Set(o.reverse()))if(!(A in C))throw new Ki(`missing required "${A}" claim`,C,A,"missing");if(E&&!(Array.isArray(E)?E:[E]).includes(C.iss))throw new Ki('unexpected "iss" claim value',C,"iss","check_failed");if(i&&C.sub!==i)throw new Ki('unexpected "sub" claim value',C,"sub","check_failed");if(D&&(w="string"==typeof D?[D]:D,!("string"==typeof(s=C.aud)?w.includes(s):Array.isArray(s)&&w.some(Set.prototype.has.bind(new Set(s))))))throw new Ki('unexpected "aud" claim value',C,"aud","check_failed");var s,w;let a;switch(typeof g.clockTolerance){case"string":a=Pi(g.clockTolerance);break;case"number":a=g.clockTolerance;break;case"undefined":a=0;break;default:throw new TypeError("Invalid clockTolerance option type")}const{currentDate:y}=g,G=(R=y||new Date,Math.floor(R.getTime()/1e3));var R;if((void 0!==C.iat||n)&&"number"!=typeof C.iat)throw new Ki('"iat" claim must be a number',C,"iat","invalid");if(void 0!==C.nbf){if("number"!=typeof C.nbf)throw new Ki('"nbf" claim must be a number',C,"nbf","invalid");if(C.nbf>G+a)throw new Ki('"nbf" claim timestamp check failed',C,"nbf","check_failed")}if(void 0!==C.exp){if("number"!=typeof C.exp)throw new Ki('"exp" claim must be a number',C,"exp","invalid");if(C.exp<=G-a)throw new Mi('"exp" claim timestamp check failed',C,"exp","check_failed")}if(n){const A=G-C.iat;if(A-a>("number"==typeof n?n:Pi(n)))throw new Mi('"iat" claim timestamp check failed (too far in the past)',C,"iat","check_failed");if(A<0-a)throw new Ki('"iat" claim timestamp check failed (it should be in the past)',C,"iat","check_failed")}return C}const ji=new Gg("Room"),Vi={small:{width:{ideal:160},height:{ideal:120}},qvga:{width:{ideal:320},height:{ideal:240}},vga:{width:{ideal:640},height:{ideal:480}},hd:{width:{ideal:1280},height:{ideal:720}}};let vi;const Zi=new class{constructor(){this.queue=new Map}push(A,I){this.queue.set(A,I)}get(A){return this.queue.get(A)}remove(A){this.queue.delete(A)}},Xi=new class{constructor(){this._localVBStream=null,this._vbDetailsNew={},this._vbDetails=null,this._roomType=null,this._participants={},this._peerId=null,this._peerConnection=null,this._pipelineManager=null,this._updateInterval=null,this._pipelineManager=new Ei,this.initializeTFLite()}getVBDetails(){return this._vbDetailsNew}getVBStream(){return this._localVBStream}hasLiveVBTrack(){try{return this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks&&this._localVBStream.getVideoTracks().length>0&&"live"===this._localVBStream.getVideoTracks()[0].readyState}catch(A){return!1}}setPipelineManager(A){this._pipelineManager=A}async initializeTFLite(){try{const A=await Ai(_E);this._vbDetailsNew.tfLite=A.tflite,this._vbDetailsNew.isSIMDSupported=A.isSIMDSupported}catch(A){}}async initializePipeline(A,I){ii.debug("initializePipeline called with videoTrack and backgroundConfig:%O,%O",A,I);let g=null;try{const C=await this._createHiddenVideoElement(A);if(this._vbDetailsNew.hiddenCanvas=this._createHiddenCanvasElement(C),"image"===I.type){const A=await this._createHiddenImageElement(I);if(!A.success)return!1;g=A.hiddenImage}const Q=C.htmlElement;Q instanceof HTMLVideoElement&&Q.paused&&(ii.debug("🎬 Video is paused, starting playback..."),await Q.play());const B=await this._pipelineManager.initialize(C,I,_E,null,this._vbDetailsNew.tfLite,g,this._vbDetailsNew.hiddenCanvas);ii.debug("Inside getUserMediaSuccess result",B),ii.debug("Pipeline manager active? :%s",this._pipelineManager.isActive()),ii.debug("camera stream live status:%s",A.readyState),B.pipeline.updatePostProcessingConfig($E),this._setupPeriodicUpdates(),this._vbDetailsNew.hiddenImage=g,this._vbDetailsNew.sourcePlayback=C;const E=B.canvasRef.captureStream(30);return this._localVBStream=E,{success:!0,vbStream:E}}catch(A){ii.error("Failed to initialize pipeline:%O",A)}}async _createHiddenVideoElement(A){return new Promise(I=>{const g=document.createElement("video");g.autoplay=!0,g.loop=!0,g.controls=!1,g.playsInline=!0,g.muted=!0,g.srcObject=new MediaStream([A]),g.style.cssText="position: fixed; top: 10px; right: 10px; width: 200px; height: 150px; border: 2px solid blue; z-index: 9999; ",document.body.appendChild(g),g.play(),g.onloadeddata=()=>{I({htmlElement:g,width:g.videoWidth,height:g.videoHeight})}})}async _createHiddenVideoElement(A){return new Promise(I=>{const g=document.createElement("video");g.style.display="none",g.autoplay=!0,g.loop=!0,g.controls=!1,g.playsInline=!0,g.muted=!0,g.srcObject=new MediaStream([A]),document.body.appendChild(g);const C=async()=>{try{await g.play()}catch(A){}g.readyState<2||!g.videoWidth||!g.videoHeight?requestAnimationFrame(C):I({htmlElement:g,width:g.videoWidth,height:g.videoHeight})};g.addEventListener("loadedmetadata",C,{once:!0}),C()})}_createHiddenCanvasElement(A){const I=document.createElement("canvas");return I.style.display="none",I.width=A.width,I.height=A.height,document.body.appendChild(I),I}_createHiddenImageElement(A){return new Promise(async I=>{const g=document.createElement("img");if(g.style.display="none",A?.url.includes("http"))try{(await this.testImageCORS(A?.url)).success?(g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})},g.src=A.url):(g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})})}catch(A){g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})}}else g.crossOrigin="anonymous",document.body.appendChild(g),g.onload=()=>{I({success:!0,hiddenImage:g})},g.src=A.url})}async testImageCORS(A,I=1e4){return new Promise((g,C)=>{const Q=new Image;Q.crossOrigin="anonymous";const B=setTimeout(()=>{Q.src="",C(new Error("CORS_TIMEOUT"))},I);Q.onload=()=>{clearTimeout(B);try{const I=document.createElement("canvas");I.width=Q.width||100,I.height=Q.height||100;const C=I.getContext("2d");C.drawImage(Q,0,0),C.getImageData(0,0,1,1),g({success:!0,url:A,width:Q.naturalWidth,height:Q.naturalHeight,message:"CORS allowed"})}catch(A){C(new Error("CORS_BLOCKED"))}},Q.onerror=A=>{clearTimeout(B),C(new Error("IMAGE_LOAD_FAILED"))},Q.src=A})}_setupPeriodicUpdates(){this._updateInterval&&clearInterval(this._updateInterval),this._updateInterval=setInterval(()=>{if(this._pipelineManager&&this._pipelineManager.isActive()){const A=this._pipelineManager.getState();this._vbDetailsNew.fps=A.fps;const[I,g,C]=A.durations||[0,0,0];this._vbDetailsNew.resizingDuration=I,this._vbDetailsNew.inferenceDuration=g,this._vbDetailsNew.postProcessingDuration=C}},1e3)}cleanup(){try{if(this._localVBStream&&"function"==typeof this._localVBStream.getVideoTracks)try{this._localVBStream.getVideoTracks().forEach(A=>{try{A.stop()}catch(A){}})}catch(A){}try{this._pipelineManager&&"function"==typeof this._pipelineManager.stop&&this._pipelineManager.stop()}catch(A){}if(this._updateInterval){try{clearInterval(this._updateInterval)}catch(A){}this._updateInterval=null}try{if(this._vbDetailsNew?.sourcePlayback?.htmlElement){try{this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null}catch(A){}try{this._vbDetailsNew.sourcePlayback.htmlElement.remove()}catch(A){}}}catch(A){}try{if(this._vbDetailsNew?.hiddenCanvas)try{this._vbDetailsNew.hiddenCanvas.remove()}catch(A){}}catch(A){}try{if(this._vbDetailsNew?.hiddenImage)try{this._vbDetailsNew.hiddenImage.remove()}catch(A){}}catch(A){}this._localVBStream=null,this._vbDetailsNew&&(this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null)}catch(A){}}};class _i extends hg.EventEmitter{static async listDevices(){if(vi)return ji.info("Device list already exists:%O",vi),{success:!0,deviceList:vi};const A=await xE();return A.success?(vi=A.deviceList,{success:!0,deviceList:A.deviceList}):{success:!1,reason:A.reason}}static async changeVB({track:A,details:I}){if(ji.debug("changeVB Received details are:%O",I),ji.debug("changeVB Received track are:%O",A),!I)return ji.debug("VB details not provided. Skipping VB processing."),{success:!1};if(!0===A.active){ji.debug("Track is live, calling initializePipeline",A);const g=A.getVideoTracks()[0],C=await Xi.initializePipeline(g,I);return ji.debug("response is :%o",C),C}throw ji.error("Track is not live"),new Error("Track is not live")}static async init({sessionToken:A,roomId:I,peerId:g,roomType:C}={}){if(!A)throw new Error("Session token is required to join the room.");try{let Q;ji.info("session token:%s",A);try{const I=(new TextEncoder).encode("samvyo_tech_321"),{payload:g}=await async function(A,I,g){const C=await async function(A,I,g){if(A instanceof Uint8Array&&(A=Si.decode(A)),"string"!=typeof A)throw new ki("Compact JWS must be a string or Uint8Array");const{0:C,1:Q,2:B,length:E}=A.split(".");if(3!==E)throw new ki("Invalid Compact JWS");const i=await Oi({payload:Q,protected:C,signature:B},I,g),D={payload:i.payload,protectedHeader:i.protectedHeader};return"function"==typeof I?{...D,key:i.key}:D}(A,I,g);if(C.protectedHeader.crit?.includes("b64")&&!1===C.protectedHeader.b64)throw new Ji("JWTs MUST NOT use unencoded payload");const Q={payload:zi(C.protectedHeader,C.payload,g),protectedHeader:C.protectedHeader};return"function"==typeof I?{...Q,key:C.key}:Q}(A,I,{algorithms:["HS256"]});Q=g,ji.info("Decoded token:",Q)}catch(A){throw ji.error("JWT verification failed:",A),A instanceof Mi?new Error("Session token has expired"):A instanceof Ki?new Error("Session token not yet active"):new Error("Invalid session token: "+A.message)}if(!Q||"object"!=typeof Q)throw new Error("Invalid token format");const{data:B,signallingServerUrl:E}=Q;if(!B||!E)throw new Error("Missing required token data");return g||(g=vC()),I||(I=VC()),new _i({peerId:g,roomId:I,outputData:{sessionToken:A,innerSessionToken:B,signallingServerUrl:E},roomType:C})}catch(A){throw ji.error("Failed to initialize:",A.message),A}}constructor({peerId:A,roomId:I,outputData:g,roomType:C}){super(),this._closed=!1,this._roomStatus="initialised",this._roomDisplayName=null,this._running=!1,this._cignal=null,this._socket=null,this._sendTransport=null,this._recvTransport=null,this._device=new $I.Device,this._selfDisconnectTimer=null,this._webCamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._producers=new Map,this._consumers=new Map,this._peers=new Map,this._subscribedPeers=new Map,this._activeSubscribedPeerIds=new Set,this._availableTracks=new Map,this._subscriptionDebug={admissionDenied:[],preemptions:[],lastResponses:{subscribePeersResponse:null,stagePeersResponse:null,unsubscribePeersResponse:null}},this._data={...g,inputParams:{peerId:A,roomId:I,roomType:C||"conferencing"}},this._micStream=null,this._webCamStream=null,this._webcam={device:null,resolution:"hd"},this._mic={device:null},this._deviceList=vi||null,this._externalVideo=null,this._externalVideoStream=null,this._forceVP8=!1,this._forceH264=!1,this._forceVP9=!1,this._enableWebcamLayers=!0,this._numSimulcastStreams=3,this._enableSharingLayers=!0,this._client=wg.parse(window.navigator.userAgent),this._routerRtpCapabilities=null,this._recordingStartedByMe={},this._liveStreamingStartedByMe={},this._cignalConnected=!1,this._reconnectionInitiated=!1,this._restartIceInProgressSendTransport=!1,this._restartIceInProgressRecvTransport=!1,this._activeSpeaker=null,this._speechRecognition=null,this._transcriptStorage=new Map,this._audioContext=null,this._audioAnalyser=null,this._micMonitorStream=null,this._speakingWhileMutedInterval=null,this._speakingThreshold=-50,this._mutedSpeakingDetectionEnabled=!0,this._lastMutedSpeakingNotification=0,this._mutedSpeakingCooldown=3e3,this._audioTroubleShootData={lastDiagnostic:null,deviceTests:{},connectivityStatus:"unknown"},this._audioOutputDevices=[],this._currentSpeakerDevice=null,this._testAudioElements=new Map,this._speakerTestResults=new Map,this._remoteAudioElement=null,this._remoteCaption=null,this._transcriptionRecorder=null,this._transcriptionActive=!1,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,this._peerConnection=null,this._localStream=null,this._screenShareStream=null,this._remoteStream=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remotePeerType=null,this._remoteVideoStatus=null,this._remoteAudioStatus=null,this._videoWasActiveP2p=null,this._statsTimer=null,this._lastStatCache={},this._statsIntervalMs=5e3,this.initLocal()}get peerId(){return this._peerId}set peerId(A){this._peerId=A}get roomType(){return this._roomType}set roomType(A){this._roomType=A}get closed(){return this._closed}get data(){return this._data}set data(A){throw new Error("Setting the whole data object is not possible!")}get peers(){return this._peers}set peers(A){throw new Error("Setting the whole peers object is not possible!")}get transports(){return{produce:this._sendTransport,consume:this._recvTransport}}set transports(A){throw new Error("Setting of transport is not possible!")}get videoStream(){return this._webCamStream}get audioStream(){return this._micStream}get clientAgent(){return this._client}get activeParameters(){return this._data.inputParams}get deviceList(){return this._deviceList?this._deviceList:{videoDevices:[],audioDevices:[],audioOutputDevices:[]}}set deviceList(A){throw new Error("Setting of deviceList is not possible!")}get currentlyActiveSpeaker(){return this._activeSpeaker}set currentlyActiveSpeaker(A){throw new Error("Setting of currentActivespeaker is not possible!")}get roomDisplayName(){return this._roomDisplayName}set roomDisplayName(A){throw new Error("Setting of roomDisplayName is not possible!")}async initLocal(){const A=$I.detectDevice();ji.debug("The device is:%O",A),await this._initSocket()}async _initSocket(){let A=this;const I=this.data.signallingServerUrl.replace(/^(http|https):\/\//,""),g=`wss://${I}/?sessionToken=${this.data.sessionToken}&roomId=${this.data.inputParams.roomId}&peerId=${this.data.inputParams.peerId}&roomType=${this.data.inputParams.roomType}`;ji.info(`Going to create a new socket! with address: ${I}`),this._socket=new jC(g,!0),this._listenToSocket(),this._socket.on("notify",({type:A,title:I,message:g})=>{this.emit("notification",{eventType:A,eventText:`${I}: ${g}`,roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("roomStartedP2p",A=>{ji.info("P2P room successfully started:%O",A),this._running=!0;const I=this.data.inputParams.peerId;try{this.handleRemotePeerJoin(A)}catch(A){ji.error("Error handling remote peer join on roomStartedP2p:%o",A)}this.emit("joinSuccess",{message:A.message||"Successfully joined P2P room",peerId:I,callStartTime:A.callStartTime,cId:A.cId})}),this._socket.on("userError",I=>{ji.error("User Error happened with message:%O",I),A.emit("notification",{eventType:I.title,eventText:`${I.text}`})}),this._socket.on("validationAlert",A=>{ji.info("Validation alert happened")}),this._socket.on("alreadyActive",({title:A,text:I})=>{this.emit("notification",{eventType:"alreadyActive",eventText:"This peer already has an active connection",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId})}),this._socket.on("passwordDisabled",()=>{ji.info("password disabled by moderator!"),this.emit("notification",{eventType:"passwordDisabled",eventText:"Password for this room has been disabled by moderator",roomId:this.data.inputParams.roomId})}),this._socket.on("close",({code:A,reason:I})=>{if(ji.info(`socket closed with code ${A}`),this.emit("connectionStateChange",{status:"disconnected",code:A,reason:I||"Connection closed"}),4500!==A&&4100!==A){let g=I||"Connection to server closed unexpectedly! Trying to reconnect.";ji.info(`socket close code is${A} with reason ${g}`)}else ji.info("Socket is now closed!"),this.close()}),this._socket.on("connected",async()=>{ji.info("Socket connected"),this._clearSelfDisconnectGuard(),this.emit("connectionStateChange",{status:"connected"}),this.emit("initSuccess")}),this._socket.on("reconnected",async()=>{if(ji.info("Socket re-connected"),this._clearSelfDisconnectGuard(),this.emit("connectionStateChange",{status:"reconnected"}),this.roomType===RE&&A.pc)ji.info("Socket seems to be reconnected in mid call! RestartIce needed for p2p call."),"failed"!==A.pc.iceConnectionState&&"disconnected"!==A.pc.iceConnectionState||A.restartICE();else{ji.debug("Ice restarts for mediasoup transports for a joined peer");const I=!A._sendTransport||"closed"===A._sendTransport.connectionState,g=!A._recvTransport||"closed"===A._recvTransport.connectionState;(I||g)&&ji.warn("Transports missing/closed after reconnect. Recreating. sendClosed=%s recvClosed=%s",I,g),A._sendTransport&&["failed","disconnected"].includes(A._sendTransport.connectionState)?(ji.debug("Restart ice for sendtransport"),A.restartIce(A._sendTransport.id,"send")):A._sendTransport?ji.debug("Send transport state post-reconnect: %s (no ICE restart)",A._sendTransport.connectionState):ji.error("Send transport not available!"),A._recvTransport&&["failed","disconnected"].includes(A._recvTransport.connectionState)?(ji.debug("Restart ice for recvtransport"),A.restartIce(A._recvTransport.id,"recv")):A._recvTransport?ji.debug("Recv transport state post-reconnect: %s (no ICE restart)",A._recvTransport.connectionState):ji.error("Recv transport not available!")}this._sendTransport&&this._recvTransport&&"connected"===this._sendTransport.connectionState&&"connected"===this._recvTransport.connectionState&&this.emit("connectionStateChange",{status:"media-restored"})}),this._socket.on("defaultJoinStatus",async A=>{ji.info(" Socket defaultjoinstatus:%O",A)})}_startSelfDisconnectGuard(A=9e4){this._selfDisconnectTimer&&clearTimeout(this._selfDisconnectTimer),this._selfDisconnectTimer=setTimeout(async()=>{ji.warn("Self disconnect guard elapsed; emitting roomClosed to avoid stale client state");try{this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"self-timeout"})}catch(A){}try{await this.leaveRoomCommon()}catch(A){ji.error("Failed to leave room after self-timeout:%o",A)}},A)}_clearSelfDisconnectGuard(){this._selfDisconnectTimer&&(clearTimeout(this._selfDisconnectTimer),this._selfDisconnectTimer=null)}_sendMessage(A){this._socket.send({usageType:"sdk",...A})}_listenToSocket(){this._socket.on("message",A=>{try{switch("currentlyActiveSpeaker"===A.id||"allStats"===A.id||ji.info("message in Room is:%O",A),A.id){case"chatMessage":this.processChatMessage(A);break;case"customMessage":this.processCustomMessage(A);break;case"existingParticipants":this.onExistingParticipants(A);break;case"newPeerJoin":this.onNewPeer(A);break;case"recordingError":this.handleRecordingErrors(A);break;case"moderatorAuthentication":this.authenticateUser(A);break;case"authenticationRequested":this.authenticationRequested(A);break;case"toggleMyMic":this.toggleMyMic(A);break;case"toggleMyCamera":this.toggleMyCamera(A);break;case"logMeOut":this.logMeOutNew(A);break;case"userAlreadyAuthenticated":this.hideUserAuthenticationDialog(A);break;case"peerLeft":this.peerLeft(A);break;case"recordingStarted":this.setRecordingStatusStarted(A);break;case"recordingStopped":this.setRecordingStatusEnded(A);break;case"startDefaultRecording":this.startRecording(A);break;case"mediaToggled":this.mediaToggled(A);break;case"processingStarted":this.handleProcessingStart(A);break;case"processingCompleted":this.handleProcessingCompletion(A);break;case"processingError":this.handleProcessingError(A);break;case"createTransportResponse":this.handleCreateTransportRequest(A);break;case"connectTransportResponse":this.handleConnectTransportRequest(A);break;case"connectRecvTransportResponse":this.handleConnectRecvTransportRequest(A);break;case"sendTrackResponse":this.handleSendTrackRequest(A);break;case"recvTrackResponse":this.handleRecvTrackRequest(A);break;case"consumerAdmissionDenied":try{try{this._subscriptionDebug?.admissionDenied&&(this._subscriptionDebug.admissionDenied.push({ts:Date.now(),...A}),this._subscriptionDebug.admissionDenied.length>200&&this._subscriptionDebug.admissionDenied.shift())}catch(A){}this.emit("consumerAdmissionDenied",A)}catch(A){}break;case"subscribePeersResponse":case"stagePeersResponse":case"unsubscribePeersResponse":try{try{this._subscriptionDebug?.lastResponses&&(this._subscriptionDebug.lastResponses[A.id]={ts:Date.now(),...A})}catch(A){}this.emit(A.id,A)}catch(A){}break;case"existingTracks":try{const I=Array.isArray(A.tracks)?A.tracks:[];for(const A of I){if(!A)continue;const I=`${A.senderPeerId||A.peerId}:${A.mediaTag}`;this._availableTracks.set(I,A)}this.emit("existingTracks",{tracks:I})}catch(A){ji.error("Failed handling existingTracks:%O",A)}break;case"trackAvailable":try{const I=A.senderPeerId||A.peerId,g=A.mediaTag;if(I&&g){const C=`${I}:${g}`;this._availableTracks.set(C,A)}this.emit("trackAvailable",A)}catch(A){ji.error("Failed handling trackAvailable:%O",A)}break;case"trackUnavailable":try{const I=A.senderPeerId||A.peerId,g=A.mediaTag;if(I&&g){const A=`${I}:${g}`;this._availableTracks.delete(A)}this.emit("trackUnavailable",A)}catch(A){ji.error("Failed handling trackUnavailable:%O",A)}break;case"roomClosedByModerator":this.leaveRoomCommon(),this.roomClosed();break;case"currentlyActiveSpeaker":this.setCurrentlyActiveSpeaker(A);break;case"restartIceResponse":this.restartIceResponse(A);break;case"consumerClosed":this.closeConsumer(A);break;case"handRaise":this.handleHandRaise(A);break;case"updateCId":this.updateCId(A);break;case"upgradeParticipant":this.handleUpgradeParticipant(A);break;case"downgradeParticipant":this.handleDowngradeParticipant(A);break;case"switchMicOff":this.handleSwitchMicOff(A);break;case"screenShareLimitReached":this.handleScreenShareLimitReached(A);break;case"upgradeLimitReached":this.handleUpgradeLimitReached(A);break;case"modUpgradeReq":this.handleModUpgradeReq(A);break;case"lockUnlockRoom":this.handleLockUnlockRoom(A);break;case"peersWaiting":this.handlePeersWaiting(A);break;case"checkTransportStatus":this.checkTransportStatus(A);break;case"remotePeerJoin":this.handleRemotePeerJoin(A);break;case"offer":ji.debug("inside offer"),this.handleOffer(A);break;case"answer":ji.debug("inside answer"),this.handleAnswer(A);break;case"candidate":ji.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"p2pRoomClosed":ji.debug("inside p2p room close"),this.leaveRoomNewP2p(hE),this.emit("roomClosed",{roomId:this.data.inputParams.roomId});break;case"p2pUserLeft":ji.debug("inside p2p user left"),this.userLeftRoom(A);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"screenShareP2p":this.handleScreenShareP2p(A);break;case"transcription":this._processTranscriptionMessage(A);break;default:ji.warn("Unrecognized message:%o",A)}}catch(A){ji.error("listentomessage:%O",A)}})}joinRoom=async({peerName:A=null,produce:I=!0,produceAudio:g=!0,produceVideo:C=!0,consume:Q=!0,manualSubscription:B=!1,videoResolution:E="hd",forceVp8:i=!1,forceVp9:D=!1,forceH264:n=!1,h264Profile:o="high",forcePCMU:s=!1,forcePCMA:w=!1,forceFPS:a=25,enableWebcamLayers:y=!0,numSimulcastStreams:G=3,autoGainControl:R=!0,echoCancellation:S=!0,noiseSuppression:h=!0,sampleRate:N=44e3,channelCount:K=1,videoBitRates:M=[700,250,75],share:F=!1,shareAudio:L=!1,enableSharingLayers:k=!0,shareBitRates:J=[2500,1250,500],audioDeviceId:U=null,videoDeviceId:t=null,peerType:q="participant",roomType:r=SE,authenticationRequired:c=!1,password:H=null,roomDisplayName:e=null,vbdetails:Y,enableTranscription:d=!1,enableChatOption:b=!1,enableScreenSharing:x=!1}={})=>{ji.info("Going to join room",d),["hd","vga","qvga"].includes(E)||(ji.warn("Invalid video resolution value. setting it to default value of 'hd' "),E="hd"),"boolean"!=typeof I&&(ji.warn("Produe should either be true or false"),I=Boolean(I)),"boolean"!=typeof g&&(ji.warn("ProduceAudio should either be true or false"),g=Boolean(g)),"boolean"!=typeof C&&(ji.warn("ProduceVideo should either be true or false"),C=Boolean(C)),"boolean"!=typeof Q&&(ji.warn("Consume should either be true or false"),Q=Boolean(Q)),"boolean"!=typeof i&&(ji.warn("forceVp8 should either be true or false"),i=Boolean(i)),"boolean"!=typeof D&&(ji.warn("forceVp9 should either be true or false"),D=Boolean(D)),"boolean"!=typeof n&&(ji.warn("forceH264 should either be true or false"),n=Boolean(n)),["high","low"].includes(o.toLowerCase())||(ji.warn("h264Profile should either be 'high' or 'low'"),o="high"),(!Number.isInteger(a)||Number.isInteger(a)&&(a>65||a<5))&&(ji.warn("forceFPS should be a number between 5 to 65, default value is 25 fps."),a=25),"boolean"!=typeof y&&(ji.warn("enableWebcamLayers should either be true or false"),y=Boolean(y)),(!Number.isInteger(G)||Number.isInteger(G)&&(G>3||G<1))&&(ji.warn("numSimulcastStreams should be a number between 1 to 3, default value is 3."),G=3),Array.isArray(M)&&M.length>=1&&M.length<=3&&M.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?ji.debug("videoBitRates values are correct"):(ji.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),M=[700,250,75]),"boolean"!=typeof s&&(ji.warn("forcePCMU should either be true or false"),s=Boolean(s)),"boolean"!=typeof w&&(ji.warn("forcePCMA should either be true or false"),w=Boolean(w)),"boolean"!=typeof R&&(ji.warn("autoGainControl should either be true or false"),R=Boolean(R)),"boolean"!=typeof S&&(ji.warn("echoCancellation should either be true or false"),S=Boolean(S)),"boolean"!=typeof h&&(ji.warn("noiseSuppression should either be true or false"),h=Boolean(h)),(!Number.isInteger(N)||Number.isInteger(N)&&(N>64e3||N<8e3))&&(ji.warn("sampleRate should be a number between 8000 to 64000, default value is 44000 Khz."),N=44e3),(!Number.isInteger(K)||Number.isInteger(K)&&(K>2||K<1))&&(ji.warn("sampleRate should be a number between 1 to 2, default value is 1, which is a mono audio."),K=1),"boolean"!=typeof F&&(ji.warn("share should either be true or false"),F=Boolean(F)),"boolean"!=typeof L&&(ji.warn("shareAudio should either be true or false"),L=Boolean(L)),"boolean"!=typeof k&&(ji.warn("enableSharingLayers should either be true or false"),k=Boolean(k)),Array.isArray(J)&&J.length>=1&&J.length<=3&&J.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?ji.debug("shareBitRates values are correct"):(ji.warn("sharebitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[2500,1250,500]'"),J=[2500,1250,500]),["moderator","participant","attendee","bot"].includes(q)?ji.debug("peerType is valid:%s",q):(q="participant",ji.debug("peerType is invalid:%s. By default set to: participant",q)),I?await this.listDevicesInternal():ji.debug("Skipping device enumeration for receive-only client (produce: false)"),this._videoResolution=E,this._forceVP8=Boolean(i),this._forceH264=Boolean(n),this._forceVP9=Boolean(D),this._enableWebcamLayers=Boolean(y),this._numSimulcastStreams=G,this._enableSharingLayers=Boolean(k);try{if(A||(A=dE()),this.data.inputParams={...this.data.inputParams,peerName:A,produce:I,produceAudio:g,produceVideo:C,consume:Q,manualSubscription:B,videoResolution:E,forceVp8:i,forceVp9:D,forceH264:n,h264Profile:o,forceFPS:a,forcePCMU:s,forcePCMA:w,enableWebcamLayers:y,numSimulcastStreams:G,autoGainControl:R,echoCancellation:S,noiseSuppression:h,sampleRate:N,channelCount:K,videoBitRates:M,share:F,shareAudio:L,enableSharingLayers:k,shareBitRates:J,audioDeviceId:U,videoDeviceId:t,peerType:q,roomType:r,authenticationRequired:c,password:H,roomDisplayName:e,vbdetails:Y,enableTranscription:d,enableChatOption:b,enableScreenSharing:x},ji.info("input params are:%O",this.data.inputParams),r===RE)return ji.info("Joining P2P room"),await this._joinRoomP2p({peerName:A,produce:I,produceAudio:g,produceVideo:C,audioDeviceId:U,videoDeviceId:t,peerType:q,authenticationRequired:c,password:H,roomDisplayName:e,vbdetails:Y,enableTranscription:d,enableChatOption:b,enableScreenSharing:x});{const A={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,participantType:"attendee"===q?"viewer":"bot"===q?"bot":q,roomType:r,roomDisplayName:e||`room-${1e5+Math.round(9e5*Math.random())}`,browser:this._client,name:this.data.inputParams.peerName,room:this.data.inputParams.roomId,authenticationRequired:c,isRoomPassword:!!H,roomPassword:H||null,usageType:"sdk",enableTranscription:d,enableChatOption:b,enableScreenSharing:x,manualSubscription:Boolean(B)};this._sendMessage(A)}}catch(A){return ji.error("Failed to join room:",A.message),{success:!1,reason:A.message}}};_joinRoomP2p=async({peerName:A,produce:I,produceAudio:g,produceVideo:C,audioDeviceId:Q,videoDeviceId:B,peerType:E,authenticationRequired:i,password:D,roomDisplayName:n,vbdetails:o,enableTranscription:s,enableChatOption:w,enableScreenSharing:a})=>{ji.info("P2P room join started");try{if(I){if(C){ji.info("Acquiring video for P2P");try{await this._acquireVideoForP2p({deviceId:B,vbdetails:o})}catch(A){ji.warn("Video acquisition failed for P2P, continuing with audio if available:%O",A)}}if(g){ji.info("Acquiring audio for P2P");try{await this._acquireAudioForP2p({deviceId:Q})}catch(A){ji.warn("Audio acquisition failed for P2P, continuing with video if available:%O",A)}}}if(this._localStream=new MediaStream,this._micStream&&this._micStream.getTracks().forEach(A=>{this._localStream.addTrack(A)}),this._webCamStream&&this._webCamStream.getTracks().forEach(A=>{this._localStream.addTrack(A)}),0===this._localStream.getTracks().length)return ji.error("No media tracks acquired for P2P call"),this.emit("error",{type:"mediaError",message:"At least mic or camera is needed to join P2P room"}),{success:!1,reason:"No media tracks available"};const y=!(!g||!this._micStream),G=!(!C||!this._webCamStream);!0===i&&"moderator"!==E&&"bot"!==E?(ji.info("Authentication required; deferring self-add until moderator approval"),this._pendingP2pAuth=!0,this._pendingSelfP2pInfo={peerName:A,peerType:E,audioStatus:y,videoStatus:G}):(ji.info("Adding self to peers"),this._addSelfToPeersP2p(A,E,y,G,w,a));const R={id:"joinRoom",type:"r",peerId:this.data.inputParams.peerId,name:A,room:this.data.inputParams.roomId,roomDisplayName:n||`room-${1e5+Math.round(9e5*Math.random())}`,roomPassword:D,isRoomPassword:!!D,participantType:"moderator"===E?"moderator":"bot"===E?"bot":"participant",camera:B||"default",mic:Q||"default",videoStatus:!(!C||!this._webCamStream),audioStatus:!(!g||!this._micStream),authenticationRequired:i,roomType:this.data.inputParams.roomType,browser:this._client,usageType:"sdk",enableTranscription:s,enableChatOption:w,enableScreenSharing:a};return ji.info("Sending P2P join message:%O",R),this._sendMessage(R),{success:!0}}catch(A){return ji.error("Failed to join P2P room:%O",A),this.emit("error",{type:"joinError",message:A.message}),{success:!1,reason:A.message}}};_acquireVideoForP2p=async({deviceId:A,vbdetails:I})=>{ji.info("Acquiring video for P2P with deviceId:%s",A);try{const g={deviceId:A?{exact:A}:void 0,width:{ideal:1280},height:{ideal:720},frameRate:{ideal:this.data.inputParams.forceFPS||25}},C=await navigator.mediaDevices.getUserMedia({video:g,audio:!1});let Q=C.getVideoTracks()[0];if(I){ji.info("vbdetails provided in _acquireVideoForP2p",I);try{const A=Xi?.getVBStream?.()||Xi?._localVBStream;let g=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)g=A.getVideoTracks()[0],ji.debug("Using existing Virtual Background track (P2P acquire)");else{const A=await Xi.initializePipeline(Q,I);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(g=A.vbStream.getVideoTracks()[0],ji.debug("Initialized new Virtual Background pipeline (P2P acquire)"))}if(g){const A=new MediaStream([g]);this._webCamStream=A}else this._webCamStream=C}catch(A){ji.debug("VB init failed or skipped in _acquireVideoForP2p"),this._webCamStream=C}}else this._webCamStream=C;return ji.info("Video acquired successfully for P2P"),C}catch(A){throw ji.error("Failed to acquire video for P2P:%O",A),A}};_acquireAudioForP2p=async({deviceId:A})=>{ji.info("Acquiring audio for P2P with deviceId:%s",A);try{const I={deviceId:A?{exact:A}:void 0,autoGainControl:this.data.inputParams.autoGainControl??!0,echoCancellation:this.data.inputParams.echoCancellation??!0,noiseSuppression:this.data.inputParams.noiseSuppression??!0,sampleRate:this.data.inputParams.sampleRate||44e3,channelCount:this.data.inputParams.channelCount||1},g=await navigator.mediaDevices.getUserMedia({audio:I,video:!1});return this._micStream=g,ji.info("Audio acquired successfully for P2P"),g}catch(A){throw ji.error("Failed to acquire audio for P2P:%O",A),A}};_addSelfToPeersP2p=(A,I,g,C,Q,B)=>{const E=this.data.inputParams.roomType===RE?"moderator"===I?"moderator":"presenter":I,i={id:this.data.inputParams.peerId,displayName:`${A} (You)`,participantType:E,audioStatus:g,videoStatus:C,audioTrack:this._micStream?this._micStream.getAudioTracks()[0]:null,videoTrack:this._webCamStream?this._webCamStream.getVideoTracks()[0]:null};this._peers.set(this.data.inputParams.peerId,i),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:`${A} (You)`,type:"local",peerRole:E,participantType:E,enableChatOption:Q,enableScreenSharing:B}),ji.info("Added self to peers:%O",i);try{!1===g?this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"}):!0===g&&this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"}),!1===C&&this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null}),!0===g&&this._micStream&&!this._emittedLocalMicStart&&(this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micStream.getAudioTracks()[0],type:"local"}),this._emittedLocalMicStart=!0),!0===C&&this._webCamStream&&!this._emittedLocalVideoStart&&(this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webCamStream.getVideoTracks()[0],type:"local"}),this._emittedLocalVideoStart=!0)}catch(A){}};authenticateUser=A=>{ji.info("Moderator authentication requested:%O",A),this.emit("moderatorAuthentication",{moderatorName:A.moderatorName,requesterName:A.requesterName,requesterPeerId:A.requesterPeerId,text:A.title})};authenticationRequested=A=>{ji.info("Moderator authentication requested:%O",A),this.emit("authenticationRequested",{requesterName:A.requesterName,requesterPeerId:this.data.inputParams.peerId,text:A.title})};allowRoomJoin=A=>{if(!A)return ji.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};ji.info("Allow user to join room:%O",A);let I={id:"userAuthenticated",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(I)};denyRoomJoin=A=>{if(!A)return ji.error("peerId can't be undefined!"),{success:!1,reason:"PeerId can't be undefined"};ji.info("Deny user to join room:%O",A);let I={id:"userDenied",peerId:A,roomName:this.data.inputParams.roomId,moderator:this.data.inputParams.peerId};this._sendMessage(I)};setSpeakingWhileMutedDetection(A=!0){this._mutedSpeakingDetectionEnabled=A,ji.debug("Speaking while muted detection "+(A?"enabled":"disabled"))}setSpeakingThreshold(A=-50){this._speakingThreshold=A,ji.debug(`Speaking threshold set to: ${A}dB`)}async _initializeAudioMonitoring(){if(this._micStream)try{this._audioContext=new AudioContext,this._audioAnalyser=this._audioContext.createAnalyser(),this._audioAnalyser.fftSize=512,this._audioAnalyser.smoothingTimeConstant=.3;const A={audio:{deviceId:this._mic.device?{exact:this._mic.device.deviceId}:void 0,echoCancellation:!1,noiseSuppression:!1,autoGainControl:!1}};this._micMonitorStream=await navigator.mediaDevices.getUserMedia(A),this._audioContext.createMediaStreamSource(this._micMonitorStream).connect(this._audioAnalyser),ji.debug("Audio monitoring initialized successfully")}catch(A){ji.error("Error initializing audio monitoring:%o",A)}}_getAudioLevel(){if(!this._audioAnalyser)return-1/0;const A=this._audioAnalyser.frequencyBinCount,I=new Uint8Array(A);this._audioAnalyser.getByteFrequencyData(I);let g=0;for(let C=0;C<A;C++)g+=I[C];const C=g/A,Q=20*Math.log10(C/255);return isFinite(Q)?Q:-1/0}_startSpeakingWhileMutedDetection(){this._mutedSpeakingDetectionEnabled&&this._audioAnalyser&&(this._speakingWhileMutedInterval=setInterval(()=>{if(!this._micProducer||!this._micProducer.paused)return;const A=this._getAudioLevel();if(A>this._speakingThreshold){const I=Date.now();I-this._lastMutedSpeakingNotification>this._mutedSpeakingCooldown&&(this._lastMutedSpeakingNotification=I,this.data.inputParams.peerId===this.peerId&&(this.emit("speakingWhileMuted",{peerId:this.data.inputParams.peerId,audioLevel:A,timestamp:I,message:"You appear to be speaking while muted"}),ji.debug(`Speaking while muted detected - Audio level: ${A}dB`)))}},100))}_stopSpeakingWhileMutedDetection(){this._speakingWhileMutedInterval&&(clearInterval(this._speakingWhileMutedInterval),this._speakingWhileMutedInterval=null)}_cleanupAudioMonitoring(){this._stopSpeakingWhileMutedDetection(),this._micMonitorStream&&(this._micMonitorStream.getTracks().forEach(A=>A.stop()),this._micMonitorStream=null),this._audioContext&&"closed"!==this._audioContext.state&&(this._audioContext.close(),this._audioContext=null),this._audioAnalyser=null}async diagnoseAudio(){ji.debug("Starting comprehensive audio diagnostic...");const A={timestamp:Date.now(),browser:this._client,permissions:{},devices:{},connectivity:{},currentSetup:{},recommendations:[]};try{return A.permissions=await this._testAudioPermissions(),A.devices=await this._testAudioDevices(),A.currentSetup=await this._testCurrentMicSetup(),A.connectivity=await this._testWebRTCConnectivity(),A.recommendations=this._generateAudioRecommendations(A),this._audioTroubleShootData.lastDiagnostic=A,this.emit("audioDiagnosticComplete",{peerId:this.data.inputParams.peerId,diagnostic:A}),A}catch(I){return ji.error("Audio diagnostic failed:",I),A.error=I.message,A}}async _testAudioPermissions(){const A={granted:!1,state:"unknown",error:null};try{if(navigator.permissions){const I=await navigator.permissions.query({name:"microphone"});A.state=I.state,A.granted="granted"===I.state}const I=await navigator.mediaDevices.getUserMedia({audio:!0,video:!1});A.granted=!0,A.actuallyGranted=!0,I.getTracks().forEach(A=>A.stop())}catch(I){A.error=I.name,A.actuallyGranted=!1,ji.error("Permission test failed:",I)}return A}getSystemHealthStatus(){return{sdk:{roomStatus:this._roomStatus,isConnected:"connected"===this._roomStatus,micActive:!!this._micProducer&&!this._micProducer.closed,micMuted:this._micProducer?.paused,cameraActive:!!this._webcamProducer&&!this._webcamProducer.closed,screenSharing:!!this._shareProducer&&!this._shareProducer.closed},transports:{send:this._sendTransport?{id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}:null,recv:this._recvTransport?{id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState}:null},audio:{context:this._audioContext?.state,analyser:!!this._audioAnalyser,currentLevel:this._getAudioLevel(),speaking:this._getAudioLevel()>this._speakingThreshold,monitorStream:!!this._micMonitorStream},streams:{mic:this._micStream?.active,camera:this._webCamStream?.active,micTracks:this._micStream?.getTracks()?.length||0,cameraTracks:this._webCamStream?.getTracks()?.length||0}}}async testNetworkConnectivity(){const A={timestamp:Date.now(),stun:{working:!1,latency:null},turn:{working:!1,latency:null},bandwidth:{upload:null,download:null},packetLoss:null};try{const I=Date.now(),g=new RTCPeerConnection({iceServers:[{urls:"stun:stun.l.google.com:19302"}]}),C=(g.createDataChannel("test"),await g.createOffer());await g.setLocalDescription(C),await new Promise(A=>{g.onicecandidate=I=>{I.candidate||A()},setTimeout(A,5e3)}),A.stun.working="failed"!==g.iceConnectionState,A.stun.latency=Date.now()-I,g.close()}catch(A){console.error("Network connectivity test failed:",A)}return A}async assessAudioQuality(A=5e3){if(!this._micStream)throw new Error("No active microphone stream");const I={duration:A,samples:[],averageLevel:0,peakLevel:-1/0,quietSamples:0,clipSamples:0,quality:"unknown"};try{const g=new AudioContext,C=g.createAnalyser();C.fftSize=1024,g.createMediaStreamSource(this._micStream).connect(C);const Q=C.frequencyBinCount,B=new Uint8Array(Q),E=Date.now(),i=100;return new Promise(D=>{const n=setInterval(()=>{C.getByteFrequencyData(B);let i=0;for(let A=0;A<Q;A++)i+=B[A];const o=i/Q,s=20*Math.log10(o/255);if(isFinite(s)&&(I.samples.push(s),I.peakLevel=Math.max(I.peakLevel,s),s<-70&&I.quietSamples++,s>-3&&I.clipSamples++),Date.now()-E>=A){clearInterval(n),g.close();const A=I.samples.filter(A=>isFinite(A));I.averageLevel=A.reduce((A,I)=>A+I,0)/A.length;const C=I.quietSamples/A.length*100;I.clipSamples/A.length*100>10?I.quality="poor-clipping":C>80?I.quality="poor-quiet":I.averageLevel>-30?I.quality="good":I.averageLevel>-50?I.quality="fair":I.quality="poor-low",D(I)}},i)})}catch(A){throw new Error(`Audio quality assessment failed: ${A.message}`)}}async attemptAutoRemediation(){const A=[],I=this.getSystemHealthStatus();try{"failed"===I.transports.send?.connectionState&&(await this.restartIce(I.transports.send.id,"send"),A.push("Restarted send transport")),"failed"===I.transports.recv?.connectionState&&(await this.restartIce(I.transports.recv.id,"recv"),A.push("Restarted receive transport")),!I.audio.analyser&&this._micStream&&(this._cleanupAudioMonitoring(),await this._initializeAudioMonitoring(),A.push("Restarted audio monitoring")),I.sdk.micActive&&!I.streams.mic&&(await this.disableMic(),await this.enableMic(),A.push("Restarted microphone"));const g=await this.diagnoseAudio();if(g.devices?.working?.length>0){const I=g.currentSetup?.deviceLabel,C=g.devices.working[0];I!==C.label&&(await this.changeAudioInput({deviceId:C.deviceId}),A.push(`Switched to working device: ${C.label}`))}return{success:!0,fixes:A}}catch(I){return{success:!1,error:I.message,fixes:A}}}async getEnhancedDeviceList(){try{const A=await navigator.mediaDevices.enumerateDevices(),I=[];for(const g of A){if("audioinput"!==g.kind)continue;const A={deviceId:g.deviceId,label:g.label,groupId:g.groupId,capabilities:null,testResult:null};try{const I=(await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:g.deviceId}}})).getAudioTracks()[0];A.capabilities=I.getCapabilities(),A.testResult=await this._testSpecificDevice(g.deviceId,1e3),I.stop()}catch(I){A.testResult={working:!1,error:I.message}}I.push(A)}return I}catch(A){throw new Error(`Enhanced device enumeration failed: ${A.message}`)}}async optimizeAudioSettings(){const A=this.getSystemHealthStatus(),I=[];try{const g=await this.assessAudioQuality(3e3);return"poor-clipping"===g.quality?(await this.changeAudioInput({autoGainControl:!1,echoCancellation:!0,noiseSuppression:!0}),I.push("Disabled auto-gain control to prevent clipping")):"poor-quiet"===g.quality&&(await this.changeAudioInput({autoGainControl:!0,echoCancellation:!0,noiseSuppression:!1}),I.push("Enabled auto-gain control for low input levels")),"connected"===A.transports.send?.connectionState&&await this._sendTransport.getStats(),{success:!0,recommendations:I,qualityAssessment:g}}catch(A){return{success:!1,error:A.message,recommendations:I}}}async _testAudioDevices(){const A={available:[],current:null,working:[],failed:[]};try{const I=await navigator.mediaDevices.enumerateDevices();A.available=I.filter(A=>"audioinput"===A.kind).map(A=>({deviceId:A.deviceId,label:A.label,groupId:A.groupId})),A.current=this._mic.device;for(const I of A.available)try{const g=await this._testSpecificDevice(I.deviceId);g.working?A.working.push({...I,audioLevel:g.audioLevel,testDuration:g.duration}):A.failed.push({...I,error:g.error})}catch(g){A.failed.push({...I,error:g.message})}}catch(I){A.error=I.message}return A}async _testSpecificDevice(A,I=2e3){return new Promise(g=>{const C={working:!1,audioLevel:-1/0,duration:I,error:null};let Q=null,B=null,E=null;const i=()=>{Q&&Q.getTracks().forEach(A=>A.stop()),B&&"closed"!==B.state&&B.close()},D=setTimeout(()=>{i(),g(C)},I);navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:A}}}).then(A=>{Q=A,B=new(window.AudioContext||window.webkitAudioContext),E=B.createAnalyser(),E.fftSize=256,B.createMediaStreamSource(Q).connect(E);const n=E.frequencyBinCount,o=new Uint8Array(n),s=setInterval(()=>{E.getByteFrequencyData(o);let A=0;for(let I=0;I<n;I++)A+=o[I];const I=A/n,g=20*Math.log10(I/255);isFinite(g)&&g>C.audioLevel&&(C.audioLevel=g),I>0&&(C.working=!0)},100);setTimeout(()=>{clearInterval(s),clearTimeout(D),i(),g(C)},I-100)}).catch(A=>{clearTimeout(D),C.error=A.message,i(),g(C)})})}async _testCurrentMicSetup(){const A={isActive:!1,isProducing:!1,isMuted:!1,audioLevel:-1/0,deviceLabel:null,streamActive:!1,producerStats:null};try{if(A.isActive=!!this._micProducer,A.isProducing=!(!this._micProducer||this._micProducer.closed),A.isMuted=!(!this._micProducer||!this._micProducer.paused),A.deviceLabel=this._mic.device?.label,A.streamActive=!(!this._micStream||!this._micStream.active),this._micStream){const I=this._micStream.getAudioTracks();I.length>0&&(A.trackEnabled=I[0].enabled,A.trackReadyState=I[0].readyState,A.trackSettings=I[0].getSettings())}if(this._audioAnalyser&&(A.audioLevel=this._getAudioLevel()),this._micProducer&&this._sendTransport)try{const I=await this._sendTransport.getStats();A.producerStats=I}catch(I){A.producerStatsError=I.message}}catch(I){A.error=I.message}return A}async _testWebRTCConnectivity(){const A={sendTransport:null,recvTransport:null,iceConnectionState:null,dtlsState:null,error:null};try{this._sendTransport&&(A.sendTransport={id:this._sendTransport.id,connectionState:this._sendTransport.connectionState,iceState:this._sendTransport.iceState,dtlsState:this._sendTransport.dtlsState}),this._recvTransport&&(A.recvTransport={id:this._recvTransport.id,connectionState:this._recvTransport.connectionState,iceState:this._recvTransport.iceState,dtlsState:this._recvTransport.dtlsState})}catch(I){A.error=I.message}return A}_generateAudioRecommendations(A){const I=[];return A.permissions.granted||I.push({type:"critical",title:"Microphone Permission Required",description:"Please allow microphone access in your browser",action:"Grant microphone permission in browser settings"}),0===A.devices.working.length&&I.push({type:"critical",title:"No Working Audio Devices",description:"No functioning microphone devices detected",action:"Check if microphone is connected and enabled in system settings"}),A.currentSetup.isActive&&!A.currentSetup.streamActive&&I.push({type:"warning",title:"Current Microphone Not Working",description:"The selected microphone device appears to be inactive",action:"Try switching to a different microphone device"}),"failed"===A.connectivity.sendTransport?.connectionState&&I.push({type:"critical",title:"Connection Failed",description:"Unable to establish audio connection to server",action:"Check internet connection and try rejoining the room"}),A.currentSetup.audioLevel<-60&&I.push({type:"info",title:"Low Audio Level",description:"Your microphone level appears to be very low",action:"Check microphone volume in system settings or move closer to microphone"}),I}async quickAudioTest(){const A={working:!1,issues:[],timestamp:Date.now()};try{if(!this._micProducer)return A.issues.push("Microphone not active"),A;if(this._micProducer.closed)return A.issues.push("Microphone producer is closed"),A;if(!this._micStream||!this._micStream.active)return A.issues.push("Microphone stream is not active"),A;const I=this._micStream.getAudioTracks();if(0===I.length)return A.issues.push("No audio tracks found"),A;if("live"!==I[0].readyState)return A.issues.push("Audio track is not live"),A;if("connected"!==this._sendTransport.connectionState)return A.issues.push(`Send transport not connected: ${this._sendTransport.connectionState}`),A;A.working=!0}catch(I){A.issues.push(`Test error: ${I.message}`)}return this.emit("quickAudioTestComplete",{peerId:this.data.inputParams.peerId,result:A}),A}async listAudioOutputDevices(){try{if(this._deviceList&&this._deviceList.audioOutputDevices)return ji.debug("Using cached audio output devices:",this._deviceList.audioOutputDevices),{success:!0,devices:this._deviceList.audioOutputDevices};const A=await navigator.mediaDevices.enumerateDevices();return this._audioOutputDevices=A.filter(A=>"audiooutput"===A.kind),ji.debug("Found audio output devices:",this._audioOutputDevices),{success:!0,devices:this._audioOutputDevices.map(A=>({deviceId:A.deviceId,label:A.label||`Speaker ${A.deviceId.slice(-4)}`,groupId:A.groupId}))}}catch(A){return ji.error("Failed to enumerate audio output devices:",A),{success:!1,error:A.message}}}async testSpeakerDevice(A,I={}){ji.debug("Testing speaker device",A);const{testDuration:g=3e3,testFrequencies:C=[440,1e3,2e3],volume:Q=.3,requireUserConfirmation:B=!0}=I,E=`speaker-test-${A}-${Date.now()}`;try{if(!HTMLAudioElement.prototype.setSinkId)throw new Error("setSinkId is not supported in this browser");const I={deviceId:A,testId:E,timestamp:Date.now(),success:!1,frequencies:[],volume:Q,duration:g,userConfirmed:!1,error:null},i=new Audio;i.volume=Q,i.loop=!1,await i.setSinkId(A),I.setSinkId=!0,this._testAudioElements.set(E,i);for(const A of C){const Q=await this._playTestTone(i,A,g/C.length);I.frequencies.push(Q)}if(B){const g=await this._requestUserConfirmation(A,I);I.userConfirmed=g,I.success=g}else I.success=I.frequencies.every(A=>A.played);return this._speakerTestResults.set(A,I),this.emit("speakerTestComplete",{deviceId:A,testResult:I}),{success:!0,testResult:I}}catch(I){ji.error(`Speaker test failed for device ${A}:`,I);const g={deviceId:A,testId:E,timestamp:Date.now(),success:!1,error:I.message};return this._speakerTestResults.set(A,g),this.emit("speakerTestComplete",{deviceId:A,testResult:g}),{success:!1,error:I.message,testResult:g}}finally{this._cleanupTestAudio(E)}}async _playTestTone(A,I,g){return new Promise((C,Q)=>{try{const Q=new window.AudioContext,B=Q.createOscillator(),E=Q.createGain(),i=Q.createMediaStreamDestination();B.connect(E),E.connect(i),B.frequency.setValueAtTime(I,Q.currentTime),B.type="sine",E.gain.setValueAtTime(0,Q.currentTime),E.gain.linearRampToValueAtTime(.1,Q.currentTime+.1),E.gain.linearRampToValueAtTime(.1,Q.currentTime+g/1e3-.1),E.gain.linearRampToValueAtTime(0,Q.currentTime+g/1e3),A.srcObject=i.stream,B.start(),B.stop(Q.currentTime+g/1e3);const D=A.play();void 0!==D&&D.then(()=>{setTimeout(()=>{B.disconnect(),E.disconnect(),Q.close(),C({frequency:I,duration:g,played:!0,timestamp:Date.now()})},g)}).catch(A=>{Q.close(),C({frequency:I,duration:g,played:!1,error:A.message,timestamp:Date.now()})})}catch(A){Q(A)}})}async _requestUserConfirmation(A,I){return new Promise(g=>{this.emit("speakerTestConfirmationRequired",{deviceId:A,testResult:I,onConfirm:A=>g(A)}),setTimeout(()=>g(!1),1e4)})}async testCurrentSpeakerOutput(){try{const A={timestamp:Date.now(),currentDevice:this._currentSpeakerDevice,remoteAudioPresent:!1,audioElementFound:!1,volumeLevel:0,success:!1},I=document.querySelectorAll("audio"),g=document.querySelectorAll("video");let C=[];if(I.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&C.push(A)}),g.forEach(A=>{A.srcObject&&A.srcObject.getAudioTracks().length>0&&C.push(A)}),A.audioElementFound=C.length>0,A.elementsCount=C.length,C.length>0){for(const I of C)try{if(I.srcObject){const g=new AudioContext,C=g.createMediaStreamSource(I.srcObject),Q=g.createAnalyser();C.connect(Q),Q.fftSize=256;const B=Q.frequencyBinCount,E=new Uint8Array(B);Q.getByteFrequencyData(E);const i=E.reduce((A,I)=>A+I,0)/B;A.volumeLevel=Math.max(A.volumeLevel,i),A.remoteAudioPresent=i>0,g.close()}}catch(A){ji.debug("Could not analyze remote audio element:",A)}A.success=A.remoteAudioPresent}return this.emit("currentSpeakerTestComplete",A),{success:!0,testResult:A}}catch(A){return ji.error("Current speaker test failed:",A),{success:!1,error:A.message}}}async diagnoseSpeakers(){ji.debug("Starting comprehensive speaker diagnostic");const A={timestamp:Date.now(),browser:this._client,support:{},devices:{},currentOutput:{},remoteAudio:{},recommendations:[]};try{A.support={setSinkId:!!HTMLAudioElement.prototype.setSinkId,enumerateDevices:!!navigator.mediaDevices?.enumerateDevices,audioContext:!!window.AudioContext};const I=await this.listAudioOutputDevices();A.devices={available:I.devices||[],count:I.devices?.length||0,hasDefault:I.devices?.some(A=>"default"===A.deviceId)||!1};const g=await this.testCurrentSpeakerOutput();return A.currentOutput=g.testResult,A.remoteAudio=this._analyzeRemoteAudioSetup(),A.recommendations=this._generateSpeakerRecommendations(A),this.emit("speakerDiagnosticComplete",{diagnostic:A}),A}catch(I){return ji.error("Speaker diagnostic failed:",I),A.error=I.message,A}}_analyzeRemoteAudioSetup(){const A={consumers:0,activeStreams:0,audioElements:0,videoElements:0,totalTracks:0};try{this._consumers&&this._consumers.forEach(I=>{I&&"audio"===I.kind&&!I.closed&&A.consumers++});const I=document.querySelectorAll("audio"),g=document.querySelectorAll("video");I.forEach(I=>{A.audioElements++,I.srcObject&&I.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=I.srcObject.getAudioTracks().length)}),g.forEach(I=>{A.videoElements++,I.srcObject&&I.srcObject.getAudioTracks().length>0&&(A.activeStreams++,A.totalTracks+=I.srcObject.getAudioTracks().length)}),0===A.activeStreams&&0===A.totalTracks&&A.consumers>0&&(A.activeStreams=A.consumers,A.totalTracks=A.consumers)}catch(I){ji.error("Remote audio analysis failed:",I),A.error=I.message}return A}_generateSpeakerRecommendations(A){const I=[];return A.support.setSinkId||I.push({type:"critical",title:"Audio Output Selection Not Supported",description:"Your browser does not support changing audio output devices",actions:["Use Chrome, Edge, or Firefox for audio output selection","Change system default audio device instead","Consider using a different browser"]}),0===A.devices.count&&I.push({type:"critical",title:"No Audio Output Devices Found",description:"No speakers or headphones detected",actions:["Check if speakers/headphones are connected","Verify audio drivers are installed","Try refreshing the page after connecting devices"]}),0===A.remoteAudio.consumers&&0===A.remoteAudio.activeStreams&&I.push({type:"warning",title:"No Remote Audio Detected",description:"Not receiving audio from other participants",actions:["Ask other participants to unmute their microphones","Check if you have muted remote participants","Verify your internet connection"]}),!A.currentOutput.success&&A.currentOutput.audioElementFound&&I.push({type:"warning",title:"Audio Output Issues",description:"Remote audio present but may not be playing correctly",actions:["Check system volume levels","Try switching to a different audio output device","Verify the selected output device is working"]}),0===I.length&&I.push({type:"success",title:"Audio Output System Healthy",description:"Speaker setup appears to be working correctly",actions:["Your audio output is configured properly","Run individual device tests if experiencing issues"]}),I}async progressiveTestAllSpeakers(A={}){ji.debug("Progressive speaker test started");const{testDuration:I=2e3,requireConfirmation:g=!0,volume:C=.2}=A;try{const A=await this.listAudioOutputDevices();if(!A.success)throw new Error("Could not enumerate audio devices");const Q=[];let B=0;this.emit("progressiveSpeakerTestStarted",{totalDevices:A.devices.length,testDuration:I,requireConfirmation:g});for(const E of A.devices){B++,this.emit("progressiveSpeakerTestProgress",{currentIndex:B,totalDevices:A.devices.length,currentDevice:E,progress:B/A.devices.length*100});const i=await this.testSpeakerDevice(E.deviceId,{testDuration:I,volume:C,requireUserConfirmation:g,testFrequencies:[1e3]});Q.push({device:E,...i}),await new Promise(A=>setTimeout(A,500))}return this.emit("progressiveSpeakerTestComplete",{results:Q,workingDevices:Q.filter(A=>A.success),failedDevices:Q.filter(A=>!A.success)}),{success:!0,results:Q,summary:{total:Q.length,working:Q.filter(A=>A.success).length,failed:Q.filter(A=>!A.success).length}}}catch(A){return ji.error("Progressive speaker test failed:",A),{success:!1,error:A.message}}}_cleanupTestAudio(A){if(ji.debug("Cleaning up test audio"),this._testAudioElements.has(A)){const I=this._testAudioElements.get(A);try{I.pause(),I.srcObject=null,I.src=""}catch(A){ji.debug("Error cleaning up test audio:",A)}this._testAudioElements.delete(A)}}getCurrentSpeakerDevice(){return ji.debug("Getting current speaker device"),this._currentSpeakerDevice}async meetingSafeSpeakerTest(A){return ji.debug("Meeting safe speaker test started"),this.testSpeakerDevice(A,{testDuration:1500,testFrequencies:[800],volume:.1,requireUserConfirmation:!0})}hideUserAuthenticationDialog=A=>{ji.debug("authentication already done message:%o",A),this.emit("moderatorAuthStatus",{requesterId:A.requesterId,moderatorActed:A.peerId})};onNewPeer(A){const{peerId:I,displayName:g,participantType:C}=A;this._peers.set(I,{displayName:g,participantType:C,consumers:[]}),this.emit("newPeer",{peerId:I,peerName:g,type:this.data.inputParams.peerId===I?"local":"remote",peerRole:C})}async onExistingParticipants(A){if(ji.debug("Onexisting participant message:%O",A),this._routerRtpCapabilities=A.routerRtpCapabilities,this._roomStatus="connected",this._roomDisplayName=A.roomDisplayName,this._running=!0,this._socket.updateRoomJoinStatus(!0),this.emit("newPeer",{peerId:this.data.inputParams.peerId,peerName:this.data.inputParams.peerName,type:"local",peerRole:this.data.inputParams.peerType,userSettings:A.userSettings,participantType:A.participantType,stageModeSettings:A.stageMode,enableChatOption:A.enableChatOption,enableScreenSharing:A.enableScreenSharing}),this.data.inputParams.produce?await this._createSendTransport():ji.debug("Produce is false!"),this.data.inputParams.consume){await this._createRecvTransport();let I=this;A.peers&&A.peers.length>0&&A.peers.forEach(A=>{I.emit("newPeer",{peerId:A.peerId,peerName:A.name,type:"remote",peerRole:A.participantType})})}else ji.debug("Consume is false!")}setSubscriptions=({peerIds:A=[],stagedPeerIds:I=[],mediaTags:g=[]}={})=>{try{if(!Boolean(this.data?.inputParams?.manualSubscription))return ji.debug("setSubscriptions() ignored (manualSubscription is disabled)"),{success:!1,reason:"manualSubscription is disabled"};ji.debug("setSubscriptions() peerIds:%O",A);const C=this.data?.inputParams?.peerId,Q=(Array.isArray(A)?A:[]).filter(Boolean).filter(A=>A!==C),B=(Array.isArray(I)?I:[]).filter(Boolean).filter(A=>A!==C),E=new Set(Q),i=new Set(B),D=new Set([...E,...i]),n=Array.isArray(g)?g.filter(Boolean):[],o=n.length>0,s=o?new Set(n):null,w=[],a=new Map,y=[],G=new Map,R=[],S=new Map,h=[];for(const A of D){const I=this._subscribedPeers.get(A);if(o){if(null===I)continue;const g=I instanceof Set?I:new Set,C=[];for(const A of s)g.has(A)||C.push(A);if(C.length>0){E.has(A)?a.set(A,new Set(C)):S.set(A,new Set(C));for(const A of C)g.add(A);this._subscribedPeers.set(A,g)}else void 0===I&&this._subscribedPeers.set(A,g)}else(void 0===I||I instanceof Set)&&(E.has(A)?w.push(A):R.push(A),this._subscribedPeers.set(A,null))}for(const[A,I]of Array.from(this._subscribedPeers.entries()))if(!D.has(A))if(o)if(null===I)y.push(A),this._subscribedPeers.delete(A);else if(I instanceof Set){const g=[];for(const A of s)I.has(A)&&g.push(A);if(g.length>0){G.set(A,new Set(g));for(const A of g)I.delete(A);0===I.size?this._subscribedPeers.delete(A):this._subscribedPeers.set(A,I)}}else this._subscribedPeers.delete(A);else y.push(A),this._subscribedPeers.delete(A);const N=new Set(this._activeSubscribedPeerIds);this._activeSubscribedPeerIds=new Set(Array.from(E));for(const A of D){const I=N.has(A),g=E.has(A);!I&&g&&h.push(A),I&&!g&&i.has(A)&&(R.includes(A)||R.push(A))}y.length>0&&this.unsubscribePeers(y,n),w.length>0&&this.subscribePeers(w,n),R.length>0&&this.stagePeers(R,n),h.length>0&&this.unstagePeers(h,n);for(const[A,I]of G.entries())this.unsubscribePeers([A],Array.from(I));for(const[A,I]of a.entries())this.subscribePeers([A],Array.from(I));for(const[A,I]of S.entries())this.stagePeers([A],Array.from(I));try{for(const A of h)this._resumeExistingConsumersForPeer(A);for(const A of R)E.has(A)||this._pauseExistingConsumersForPeer(A)}catch(A){}return{success:!0,subscribedPeers:Array.from(this._subscribedPeers.keys()),activePeers:Array.from(this._activeSubscribedPeerIds.values())}}catch(A){return ji.error("setSubscriptions() failed:%O",A),{success:!1,reason:A?.message||"unknown error"}}};subscribeToPeer=({peerId:A,mediaTags:I=[]}={})=>this.subscribePeers([A],I);unsubscribeFromPeer=({peerId:A,mediaTags:I=[]}={})=>this.unsubscribePeers([A],I);subscribePeers=(A=[],I=[])=>{if(ji.debug("subscribePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);if(0===g.length)return{success:!0};this._sendMessage({id:"subscribePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[]});const C=I.length>0?new Set(I):null;for(const A of g)this._resumeExistingConsumersForPeer(A,C);return{success:!0}};stagePeers=(A=[],I=[])=>{if(ji.debug("stagePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);if(0===g.length)return{success:!0};this._sendMessage({id:"stagePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[],staged:!0});const C=I.length>0?new Set(I):null;for(const A of g)this._pauseExistingConsumersForPeer(A,C);return{success:!0}};unstagePeers=(A=[],I=[])=>this.subscribePeers(A,I);setSubscribedTracks=(A,I={})=>{if(ji.debug("setSubscribedTracks() peerId:%s tracks:%O",A,I),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};if(!A)return{success:!1,reason:"peerId is required"};const g={audio:"cam-audio",video:"cam-video",screenVideo:"screen-video",screenAudio:"screen-audio"},C=[],Q=[],B=[];for(const[A,E]of Object.entries(I)){const I=g[A];I?!0===E?C.push(I):"staged"===E?Q.push(I):!1===E&&B.push(I):ji.warn("setSubscribedTracks: unknown track type:%s",A)}return B.length>0&&this.unsubscribePeers([A],B),Q.length>0&&this.stagePeers([A],Q),C.length>0&&this.subscribePeers([A],C),{success:!0}};batchSetSubscribedTracks=(A={})=>{if(ji.debug("batchSetSubscribedTracks() peerTracks:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const I={audio:"cam-audio",video:"cam-video",screenVideo:"screen-video",screenAudio:"screen-audio"},g=new Map,C=new Map,Q=new Map;for(const[B,E]of Object.entries(A))if(B&&E)for(const[A,i]of Object.entries(E)){const E=I[A];E&&(!0===i?(g.has(E)||g.set(E,new Set),g.get(E).add(B)):"staged"===i?(C.has(E)||C.set(E,new Set),C.get(E).add(B)):!1===i&&(Q.has(E)||Q.set(E,new Set),Q.get(E).add(B)))}for(const[A,I]of Q.entries())I.size>0&&this.unsubscribePeers(Array.from(I),[A]);for(const[A,I]of C.entries())I.size>0&&this.stagePeers(Array.from(I),[A]);for(const[A,I]of g.entries())I.size>0&&this.subscribePeers(Array.from(I),[A]);return{success:!0}};getEffectiveSubscriptionSnapshot=({includeHistory:A=!0}={})=>{try{const I=Boolean(this.data?.inputParams?.manualSubscription),g=this.data?.inputParams?.roomId,C=this.data?.inputParams?.peerId,Q=Array.from(this._subscribedPeers?.keys?.()||[]),B=Array.from(this._activeSubscribedPeerIds?.values?.()||[]),E=new Set(B),i=Q.filter(A=>A&&!E.has(A)),D={},n=Array.from(this._consumers?.values?.()||[]);for(const A of n){if(!A||A.closed)continue;const I=A?.appData?.peerId,g=A?.appData?.mediaTag;if(!I||!g)continue;const C=Boolean(A.paused),Q=A.kind;D[I]||(D[I]={peerId:I,mode:"none",tracks:{},audio:{cam:{active:!1,staged:!1},screen:{active:!1,staged:!1}},video:{cam:{active:!1,staged:!1},screen:{active:!1,staged:!1}}}),D[I].tracks[g]={consumerId:A.id,kind:Q,mediaTag:g,paused:C,staged:C,active:!C};const B="screen-audio"===g||"screen-video"===g?"screen":"cam";"audio"===Q?C?D[I].audio[B].staged=!0:D[I].audio[B].active=!0:"video"===Q&&(C?D[I].video[B].staged=!0:D[I].video[B].active=!0)}const o=[],s=[],w=[];for(const[A,I]of Object.entries(D)){const g=Object.values(I.tracks).some(A=>A&&A.active),C=Object.values(I.tracks).some(A=>A&&A.staged);I.mode=g?"subscribed":C?"staged":"none","subscribed"===I.mode?o.push(A):"staged"===I.mode&&s.push(A)}for(const A of Q)A&&(D[A]||w.push(A));const a={ts:Date.now(),manualSubscription:I,roomId:g,selfPeerId:C,desired:{peers:Q,activePeers:B,stagedPeers:i},effective:{peers:D,activePeers:o,stagedPeers:s,nonePeers:w}};return A&&(a.history={admissionDenied:Array.from(this._subscriptionDebug?.admissionDenied||[]),preemptions:Array.from(this._subscriptionDebug?.preemptions||[]),lastResponses:this._subscriptionDebug?.lastResponses||{}}),a}catch(A){return{ts:Date.now(),error:A?.message||"failed to compute snapshot"}}};getEffectiveSubscriptionsSnapshot=(A={})=>this.getEffectiveSubscriptionSnapshot(A);emitEffectiveSubscriptionSnapshot=(A={})=>{const I=this.getEffectiveSubscriptionSnapshot(A);try{this.emit("effectiveSubscriptionSnapshot",I)}catch(A){}return I};unsubscribePeers=(A=[],I=[])=>{if(ji.debug("unsubscribePeers() peerIds:%O",A),!Boolean(this.data?.inputParams?.manualSubscription))return{success:!1,reason:"manualSubscription is disabled"};const g=(Array.isArray(A)?A:[]).filter(Boolean);return 0===g.length||this._sendMessage({id:"unsubscribePeers",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,peerIds:g,mediaTags:Array.isArray(I)?I:[]}),{success:!0}};sendCustomMessage=(A,I="general",g=null,C,Q,B={})=>{const E={id:"customMessage",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,data:A,type:I,recieverPeerId:g,senderType:C,messageType:Q,customData:B};ji.debug("Room sendCustomMessage",E),this._sendMessage(E)};setCaptionPreference(A){try{const I={type:"captions_pref",show:!!A};return ji.debug("Room setCaptionPreference",I),this.sendCustomMessage(JSON.stringify(I),"custom",null,"participant","preference"),{success:!0}}catch(A){return ji.error("Failed to send caption preference",A),{success:!1,reason:A?.message}}}processCustomMessage=A=>{ji.debug("Room processCustomMessage",A),this.emit("customMessage",A)};updateCId=A=>{ji.debug("Received updateCId message",A),A.targetPeerId!==this.data.inputParams.peerId&&A.targetPeerId||this.emit("updateCId",{message:A,cId:A.cId,peerId:this.data.inputParams.peerId,isMyCId:A.targetPeerId===this.data.inputParams.peerId})};setCurrentlyActiveSpeaker(A){const{peerId:I,volume:g}=A.activeSpeaker;this._activeSpeaker=A.activeSpeaker,this.emit("activeSpeaker",{peerId:I,volume:g})}_createSendTransport=async()=>{ji.debug("Room _createSendTransport");try{this._device.loaded||(ji.debug("Room _createSendTransport","Going to load device with routerrtpcapabilities"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities}));let A="send";this._sendTransport&&!this._sendTransport.closed||this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:A})}catch(A){ji.error("Room _createSendTransport",A)}};_createRecvTransport=async()=>{this._device.loaded||(ji.debug("loading device for creating recv transport"),await this._device.load({routerRtpCapabilities:this._routerRtpCapabilities})),this._recvTransport&&!this._recvTransport.closed||(ji.debug("receive transport created"),this._sendMessage({id:"createTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,direction:"recv"}))};handleCreateTransportRequest=async A=>{ji.debug("Room handleCreateTransportRequest():%O",A);let I,{transportOptions:g,direction:C}=A;try{if("recv"===C)I=await this._device.createRecvTransport(g),ji.debug("Room",`handleCreateTransportRequest() recv transport created ${I.id}`),this._recvTransport=I,this.handleRecvTransportListeners();else{if("send"!==C)throw new Error(`bad transport 'direction': ${C}`);I=await this._device.createSendTransport(g),ji.debug("Room",`handleCreateTransportRequest() send transport created [id:%s]${I.id}`),this._sendTransport=I,this.handleSendTransportListeners(),this.produceMedia()}}catch(A){ji.error("Room handleCreateTransportRequest() failed to create transport [error:%o]",A)}};checkTransportStatus(A){ji.debug("The transport status is:%s",A),ji.debug("sendTransport conectionState:%s, recvTransport ConnectionState:%s",this._sendTransport.connectionState,this._recvTransport.connectionState),this._sendMessage({id:"peerTransportState",sendTransport:this._sendTransport.connectionState,recvTransport:this._recvTransport.connectionState})}handleSendTransportListeners=()=>{this._sendTransport.on("connect",this.handleTransportConnectEvent),this._sendTransport.on("produce",this.handleTransportProduceEvent);let A=this;this._sendTransport.on("connectionstatechange",async I=>{if(ji.debug(`ConferenceRoom sendTransport connectionState ${I} & socketconnection state ${this._socket._ws?this._socket._ws.readyState:"null"}`),ji.debug(`ConferenceRoom sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"disconnected"===I)setTimeout(async()=>{if("disconnected"===I)if(ji.debug("Connection state for Send Transport is:%s even after 5 seconds",I),ji.warn(`sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._sendTransport.id,"send")}},5e3);else if("failed"===I)if(ji.warn(`sendTransport connectionState ${I} & socketconnection state ${A._socket.wsManager.connectionState}`),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._sendTransport.id,"send");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleSendTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._sendTransport.id,"send")}ji.debug("ConferenceRoom",`send transport connection state change [state:%s]${I}`)})};handleTransportConnectEvent=({dtlsParameters:A},I,g)=>{try{const g=A=>{ji.debug("connect-transport action"),I(),Zi.remove("connectTransport")};Zi.push("connectTransport",g);let C={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._sendTransport.id,dtlsParameters:A,direction:"send"};this._sendMessage(C)}catch(A){ji.error("handleTransportConnectEvent() failed [error:%o]",A),g(A)}};handleTransportProduceEvent=({kind:A,rtpParameters:I,appData:g},C,Q)=>{try{const Q=A=>{ji.debug("handleTransportProduceEvent callback [data:%o]",A),C({id:A.producerId}),Zi.remove("produce")};Zi.push("produce",Q);let B="cam-audio"===g.mediaTag&&void 0!==this.data.inputParams.audioStatus&&!this.data.inputParams.audioStatus;ji.debug(`handleTransportProduceEvent() | pause status->${B}`);let E={id:"sendTrack",transportId:this._sendTransport.id,peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,kind:A,rtpParameters:I,paused:B,appData:g,clientOs:this._client.os.name,browser:this._client.browser};this._sendMessage(E)}catch(A){ji.error("handleTransportProduceEvent() failed [error:%o]",A),Q(A)}};produceMedia=async()=>{this.data.inputParams.produce?(this.data.inputParams.produceAudio?this.enableMic({deviceId:this.data.inputParams.audioDeviceId?this.data.inputParams.audioDeviceId:null}):ji.debug("No need to produce audio!"),this._device.canProduce("video")&&(this.data.inputParams.produceVideo?(ji.debug("going to enable cam with vbdetails",this.data.inputParams.vbdetails),this.enableCam({deviceId:this.data.inputParams.videoDeviceId?this.data.inputParams.videoDeviceId:null,vbdetails:this.data.inputParams.vbdetails})):ji.debug("No need to produce video!"),this.data.inputParams.share&&this.enableShare({shareAudio:this.data.inputParams.shareAudio,enableSharingLayers:this._enableSharingLayers,shareBitRates:this.data.inputParams.shareBitRates}))):ji.warn("produce is false!")};handleRecvTransportListeners=async()=>{this._recvTransport.on("connect",this.handleRecvTransportConnectEvent);let A=this;this._recvTransport.on("connectionstatechange",async I=>{if(ji.debug(`ConferenceRoom recvTransport connectionState ${I} & socketconnection state ${this._socket._ws?this._socket._ws.readyState:"null"}`),"disconnected"===I)setTimeout(async()=>{if("disconnected"===I)if(ji.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}},5e3);else if("failed"===I)if(ji.warn("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState),"connected"===A._socket.wsManager.connectionState)A.restartIce(A._recvTransport.id,"recv");else{for(;"connected"!==A._socket.wsManager.connectionState;)ji.debug(`handleRecvTransportListeners() | socket not yet ready with state- ${A._socket.wsManager.connectionState}`),await OE(1e3);"connected"===A._roomStatus&&A.restartIce(A._recvTransport.id,"recv")}else ji.debug("Connection state for Recv Transport is:%s even after 5 seconds:%s, socket state",I,A._socket.wsManager.connectionState)});let I={id:"transportsAvailable",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,rtpCapabilities:this._device.rtpCapabilities};this._sendMessage(I)};handleRecvTransportConnectEvent=({dtlsParameters:A},I,g)=>{try{const g=A=>{ji.debug("ConferenceRoom","connect-recv-transport action"),I(),Zi.remove("connectRecvTransport")};Zi.push("connectRecvTransport",g);let C={id:"connectTransport",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,transportId:this._recvTransport.id,dtlsParameters:A,direction:"recv"};this._sendMessage(C)}catch(A){ji.error("handleTransportConnectEvent() failed [error:%o]",A),g(A)}};handleRecvTrackRequest=async A=>{if(ji.debug("Room handleRecvTrackRequest",A),!this.data.inputParams.consume)return void ji.warn("I do not want to consume");let{senderPeerId:I,mediaTag:g,sender:C,audioStatus:Q,videoStatus:B,senderParticipantType:E,type:i,producerPaused:D,staged:n,...o}=A;ji.debug("New consumer created",o),o.id=o.consumerId,delete o.consumerId,ji.debug("ConferenceRoom",`senderPeerId is ->${I}`);let s=await this._recvTransport.consume({...o,streamId:`${I}-${"screen-video"===g||"screen-audio"===g?"share":"mic-webcam"}`,appData:{peerId:I,mediaTag:g}});for(;this._recvTransport&&"connected"!==this._recvTransport.connectionState;)ji.debug(`recv transport connstate${this._recvTransport.connectionState}`),await OE(100);this._consumers.set(s.id,s),s.on("transportclose",()=>{this._consumers.delete(s.id)});const{spatialLayers:w,temporalLayers:a}=$I.parseScalabilityMode(s.rtpParameters.encodings[0].scalabilityMode),y=this._peers.get(this.data.inputParams.peerId);ji.debug(`Consumer created for sender peerId ${I} for kind ${s.kind} for receiver peerId ${this.data.inputParams.peerId}`),ji.info("The old peer data is :%O",y),y?(y["screen-video"===g||"screen-audio"===g?`ss${s.kind}`:s.kind]={consumerId:s.id,type:i,locallyPaused:!1,remotelyPaused:D,rtpParameters:s.rtpParameters,spatialLayers:w,temporalLayers:a,preferredSpatialLayer:w-1,preferredTemporalLayer:a-1,priority:1,codec:s.rtpParameters.codecs[0].mimeType.split("/")[1],track:s.track,share:"screen-video"===g||"screen-audio"===g},ji.info("The new peer data is :%O",y),this._peers.set(this.data.inputParams.peerId,y)):(ji.info("Peer not found!"),this._peers.set(this.data.inputParams.peerId,{["screen-video"===g||"screen-audio"===g?`ss${s.kind}`:s.kind]:{consumerId:s.id,type:i,locallyPaused:!1,remotelyPaused:D,rtpParameters:s.rtpParameters,spatialLayers:w,temporalLayers:a,preferredSpatialLayer:w-1,preferredTemporalLayer:a-1,priority:1,codec:s.rtpParameters.codecs[0].mimeType.split("/")[1],track:s.track,share:"screen-video"===g||"screen-audio"===g}}));const G=Boolean(this.data?.inputParams?.manualSubscription),R=!G||this._activeSubscribedPeerIds.has(I),S=Boolean(n);if(!G||R&&!S)await this.resumeConsumer(s),this._emitConsumerStart({consumer:s,senderPeerId:I,mediaTag:g});else try{s.paused||await s.pause()}catch(A){}};_emitConsumerStart=({consumer:A,senderPeerId:I,mediaTag:g})=>{try{ji.debug("Emitting consumer start events for",I,g),"audio"===A.kind?"screen-audio"===g?this.emit("ssAudioStart",{peerId:I,audioTrack:A.track,type:"remote"}):this.emit("micStart",{peerId:I,audioTrack:A.track,type:"remote"}):"video"===A.kind&&("screen-video"===g?this.emit("ssVideoStart",{peerId:I,videoTrack:A.track,type:"remote"}):this.emit("videoStart",{peerId:I,videoTrack:A.track,type:"remote"}))}catch(A){ji.debug("_emitConsumerStart failed:%O",A)}};_pauseExistingConsumersForPeer=(A,I=null)=>{try{for(const g of Array.from(this._consumers.values()))if(g?.appData&&g.appData.peerId===A){if(null!==I){const A=g?.appData?.mediaTag;if(!A||!I.has(A))continue}try{g.paused||(g.pause(),ji.debug("Paused local consumer for peer:%s tag:%s",A,g?.appData?.mediaTag))}catch(A){}}}catch(A){}};_resumeExistingConsumersForPeer=async(A,I=null)=>{try{for(const g of Array.from(this._consumers.values()))if(g?.appData&&g.appData.peerId===A){if(null!==I){const A=g?.appData?.mediaTag;if(!A||!I.has(A))continue}try{g.paused&&(await this.resumeConsumer(g),this._emitConsumerStart({consumer:g,senderPeerId:A,mediaTag:g?.appData?.mediaTag}),ji.debug("Resumed consumer for peer:%s tag:%s",A,g?.appData?.mediaTag))}catch(A){}}}catch(A){}};resumeConsumer=async A=>{if(A){ji.debug("resume consumer",A.appData.peerId,A.appData.mediaTag);try{let I={id:"resumeConsumer",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,consumerId:A.id};this._sendMessage(I),await A.resume()}catch(A){ji.error("resumeConsumer error",A)}}};handleConnectTransportRequest=async A=>{ji.debug("handleTransportConnectRequest()");try{const I=Zi.get("connectTransport");if(!I)throw new Error("transport-connect action was not found");await I(A)}catch(A){ji.error("handleTransportConnectRequest() failed [error:%o]",A)}};handleConnectRecvTransportRequest=async A=>{ji.debug("handleTransportConnectRequest()");try{const I=Zi.get("connectRecvTransport");if(!I)throw new Error("recv transport-connect action was not found");await I(A)}catch(A){ji.error("handleRecvTransportConnectRequest() failed [error:%o]",A)}};handleSendTrackRequest=async A=>{ji.debug("ConferenceRoom","handleProduceRequest()");try{const I=Zi.get("produce");if(!I)throw new Error("produce action was not found");await I(A)}catch(A){ji.error("handleProduceRequest() failed [error:%o]",A)}};mediaToggled=A=>{switch(ji.debug("Media Toggled message:%O",A),A.type){case"video":ji.debug(`mediaToggled() | inside case video${A.videoStatus}`);const I=A.peerId??A.peer;if(A.videoStatus)if(this.data.inputParams.roomType===RE){const A=this._peers.get(I);A&&A.videoTrack?(this.emit("videoStart",{peerId:I,videoTrack:A.videoTrack,type:"cam-video"}),ji.info("Emitted videoStart for enabled video - track already exists")):(this.emit("videoStart",{peerId:I,videoTrack:null,type:"cam-video"}),ji.debug("Emitted videoStart for enabled video - track will arrive via ontrack"))}else this.emit("videoStart",{peerId:I,videoTrack:null,type:"cam-video"}),ji.debug("Emitted videoStart for enabled video in conference room");else this.emit("videoEnd",{peerId:I,type:"cam-video"}),ji.debug("Emitted videoEnd for disabled video");break;case"audio":ji.debug(`mediaToggled() | inside case audio${A.audioStatus}`);const g=A.peerId??A.peer;A.audioStatus?this.emit("peerUnMuted",{peerId:g,type:"remote"}):this.emit("peerMuted",{peerId:g,type:"remote"})}};closeConsumer=A=>{let{consumerId:I}=A;const g=this._consumers.get(I);if(!g)return void ji.warn("Consumer with id not found!:%s",I);const{peerId:C,mediaTag:Q}=g.appData;try{if("preempted"===A?.reason){try{this._subscriptionDebug?.preemptions&&(this._subscriptionDebug.preemptions.push({ts:Date.now(),...A,consumerId:I,senderPeerId:A?.senderPeerId||C,mediaTag:Q}),this._subscriptionDebug.preemptions.length>200&&this._subscriptionDebug.preemptions.shift())}catch(A){}this.emit("consumerPreempted",{consumerId:I,senderPeerId:A?.senderPeerId||C,mediaTag:Q,reason:A?.reason,preemptedBy:A?.preemptedBy,source:A?.source})}}catch(A){}ji.debug("Consumer closed for consumerId:%s, type:%s, appData:%o",I,g?.kind,g.appData);let B="screen-audio"===Q||"screen-video"===Q?`ss${g.kind}`:g.kind;g.close(),this._consumers.delete(I);let E=this._peers.get(this.data.inputParams.peerId);ji.debug("Peer data before deletion:%O",E),E[B]&&E[B].consumerId===I&&delete E[B],ji.debug("Peer data after deletion:%O",E),this._peers.set(this.data.inputParams.peerId,E),"audio"===g?.kind?(ji.debug("Going to emit micEnd, consumer closed for audio"),"screen-audio"===Q?this.emit("ssAudioStop",{peerId:C,track:null,type:"remote"}):this.emit("micEnd",{peerId:C,track:null,type:"remote"})):"video"===g?.kind&&(ji.debug("Going to emit videoEnd, consumer closed for video"),"screen-video"===Q?this.emit("ssVideoStop",{peerId:C,track:null,type:"remote"}):this.emit("videoEnd",{peerId:C,track:null,type:"remote"}))};peerLeft=A=>{ji.debug("Peer Left message is:%o",A);let{peerId:I}=A;if(I!==this.data.inputParams.peerId)this._peers.delete(I),this.emit("peerLeft",{peerId:I});else{try{this.emit("connectionStateChange",{status:"removed"}),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"self-removed-server"})}catch(A){}this.leaveRoomCommon().catch(A=>ji.error("Failed to leaveRoomCommon after self peerLeft:%o",A))}};roomClosed=()=>{ji.info("room closed by Moderator"),this._peers=null,this.emit("roomClosed",{roomId:this.data.inputParams.roomId})};handleRemotePeerJoin=async A=>{ji.info("Handling remote peer join in P2P:%O",A);const{peer:I,name:g,audioStatus:C,videoStatus:Q,participantType:B,iceServers:E,callStartTime:i,cId:D,enableChatOption:n,enableScreenSharing:o}=A;if(this._remotePeerId=I,this._remoteDisplayName=g,this._remotePeerType=B,this._remoteAudioStatus=C,this._remoteVideoStatus=Q,E&&(this._iceServers=E),i&&this.emit("callStartTime",{callStartTime:i}),D&&this.emit("cIdUpdate",{cId:D}),!0===this._pendingP2pAuth&&this._pendingSelfP2pInfo&&!this._peers.has(this.data.inputParams.peerId)){try{const{peerName:A,peerType:I,audioStatus:g,videoStatus:C}=this._pendingSelfP2pInfo;this._addSelfToPeersP2p(A,I,g,C,n,o)}catch(A){}this._pendingP2pAuth=!1,this._pendingSelfP2pInfo=null}if(I){const A={id:I,displayName:g,participantType:B,audioStatus:C,videoStatus:Q,audioTrack:null,videoTrack:null};this._peers.set(I,A),this.emit("newPeer",{peerId:I,peerName:g,type:"remote",peerRole:B,participantType:B}),ji.info("Added remote peer to peers:%O",A),"connected"!==this._roomStatus&&(this._roomStatus="connected",ji.debug("Room status marked connected for P2P flow"));try{!1===C?this.emit("peerMuted",{peerId:I,type:"remote"}):!0===C&&this.emit("peerUnMuted",{peerId:I,type:"remote"}),!1===Q&&this.emit("videoEnd",{peerId:I,type:"remote"})}catch(A){}}E&&"moderator"===this.data.inputParams.peerType?(ji.info("Moderator creating P2P offer"),await this.createPeerOfferP2p(E)):ji.info("Participant waiting for offer")};createPeerOfferP2p=async A=>{ji.info("Creating P2P peer connection and offer");try{if(this._peerConnection=new RTCPeerConnection({iceServers:A}),this._startPeerStatsLoop(),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{const A=this._peerConnection.iceConnectionState;"failed"!==A&&"disconnected"!==A&&"closed"!==A||this._stopPeerStatsLoop()}),this._localStream&&this._localStream.getTracks().length>0){const A=this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]?this._screenShareStream.getVideoTracks()[0]:null,I=this._vbDetails&&this._vbDetails.stream&&this._vbDetails.stream.getVideoTracks&&this._vbDetails.stream.getVideoTracks()[0]?this._vbDetails.stream.getVideoTracks()[0]:null;this._localStream.getTracks().forEach(g=>{ji.info("Adding local track to peer connection:%s",g.kind),"video"===g.kind?A?(ji.info("Using screen share track for initial offer"),this._peerConnection.addTrack(A,this._localStream)):I?(ji.info("Using virtual background track for initial offer"),this._peerConnection.addTrack(I,this._localStream)):this._peerConnection.addTrack(g,this._localStream):this._peerConnection.addTrack(g,this._localStream)})}else ji.warn("No local tracks available when creating offer");this._peerConnection.onicecandidate=A=>{A.candidate&&(ji.debug("Sending ICE candidate:%O",A.candidate),this._sendMessage({id:"candidate",candidate:A.candidate,peer:this._remotePeerId}))},this._peerConnection.ontrack=A=>{ji.info("Received remote track:%s",A.track.kind),this.gotRemoteTrackP2p(A)},this._peerConnection.onremovetrack=A=>{ji.info("Remote track removed:%s",A.track.kind),this.handleRemoteTrackRemoved(A)},this._peerConnection.addEventListener("iceconnectionstatechange",()=>{ji.info("ICE connection state:%s",this._peerConnection.iceConnectionState),"connected"===this._peerConnection.iceConnectionState?this.emit("peerConnected",{peerId:this._remotePeerId}):"failed"!==this._peerConnection.iceConnectionState&&"disconnected"!==this._peerConnection.iceConnectionState||(ji.warn("ICE connection failed/disconnected"),this.emit("peerDisconnected",{peerId:this._remotePeerId}))}),this._peerConnection.onnegotiationneeded=async()=>{ji.info("Negotiation needed - creating new offer for track changes");try{const A=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(A);const I=!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]);this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:I})}catch(A){ji.error("Failed to renegotiate P2P offer:%O",A)}};try{const A=!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]),I=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(I),this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:A})}catch(A){ji.error("Error creating/sending initial P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to create initial P2P offer"})}}catch(A){ji.error("Failed to create P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to create P2P connection"})}};_startPeerStatsLoop(){!this._statsTimer&&this._peerConnection&&(this._statsTimer=setInterval(async()=>{try{const A=await this._peerConnection.getStats(),I=this._parsePeerStats(A);if(!I)return;this.emit?.("peerStats",I),this._sendMessage({id:"p2pConnectionStats",roomId:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId,stats:I})}catch(A){}},this._statsIntervalMs))}_stopPeerStatsLoop(){this._statsTimer&&(clearInterval(this._statsTimer),this._statsTimer=null)}_parsePeerStats(A){let I;const g={peerId:this.data.inputParams.peerId,callId:this.data.inputParams.roomId,timestamp:Date.now(),audio:{inbound:{},outbound:{}},video:{inbound:{},outbound:{}}};return A.forEach(A=>{switch(A.type){case"candidate-pair":A.nominated&&(I=A);break;case"outbound-rtp":this._populateOutbound(A,g);break;case"inbound-rtp":this._populateInbound(A,g);break;case"track":"audio"===A.kind?(g.audioLevel=A.audioLevel??null,g.totalAudioEnergy=A.totalAudioEnergy??null):"video"===A.kind&&(g.video.trackFramesDropped=A.framesDropped??null);break;case"media-source":"video"===A.kind&&(g.video.frameWidth=A.width??null,g.video.frameHeight=A.height??null)}}),A.forEach(A=>{if("remote-inbound-rtp"===A.type){const I=A.kind;I&&g[I]&&g[I].outbound&&(g[I].outbound.producerScore=A.score??null,g[I].outbound.roundTripTime=A.roundTripTime??null,g[I].outbound.packetsLost=A.packetsLost??null,g[I].outbound.fractionLost=A.fractionLost??null)}}),["audio","video"].forEach(A=>{g[A].inbound&&(g[A].inbound.consumerScore=this._calculateConsumerScore(g[A].inbound,A))}),g.roundTripTime=I?.currentRoundTripTime??null,g.availableOutgoingBitrate=I?.availableOutgoingBitrate??null,g.availableIncomingBitrate=I?.availableIncomingBitrate??null,g.iceState=this._peerConnection?.iceConnectionState??null,g.candidatePairState=I?.state??null,g.qualityScore=this._calculateQualityScore(g),g}_populateOutbound(A,I){const g="audio"===A.kind?I.audio.outbound:I.video.outbound;g.packetsLost=A.packetsLost??null,g.framesPerSecond=A.framesPerSecond??null,g.keyFramesSent=A.keyFramesSent??null,g.framesDropped=A.framesDropped??null,g.qualityLimitationReason=A.qualityLimitationReason??null,g.packetsRetransmitted=A.retransmittedPacketsSent??null,g.nackCount=A.nackCount??null,g.bitrateKbps=this._calcBitrate(A,"bytesSent"),g.producerScore=A.score??null}_populateInbound(A,I){const g="audio"===A.kind?I.audio.inbound:I.video.inbound;g.packetsLost=A.packetsLost??null,g.jitter=A.jitter??null,g.framesPerSecond=A.framesPerSecond??null,g.frameWidth=A.frameWidth??null,g.frameHeight=A.frameHeight??null,g.packetsRetransmitted=A.retransmittedPacketsReceived??null,g.packetsReceived=A.packetsReceived??null,g.bitrateKbps=this._calcBitrate(A,"bytesReceived"),g.consumerScore=A.score??null}_calcBitrate(A,I){if(!A||"number"!=typeof A[I])return null;const g=this._lastStatCache[A.id],C=A.timestamp;let Q=null;if(g){const B=A[I]-g[I],E=(C-g.timestamp)/1e3;E>0&&(Q=Math.max(8*B/E/1e3,0))}return this._lastStatCache[A.id]={[I]:A[I],timestamp:C},Q}_calculateConsumerScore(A,I){let g=10;if(A.packetsReceived&&A.packetsLost){const I=A.packetsLost/(A.packetsReceived+A.packetsLost)*100;I>5?g-=3:I>3?g-=2:I>1?g-=1:I>.5&&(g-=.5)}if(A.jitter){const I=1e3*A.jitter;I>50?g-=1.5:I>30&&(g-=.8)}if("video"===I){const I=500,C=A.bitrateKbps??0;C>0&&(C<200?g-=2.5:C<I&&(g-=1.5))}else if("audio"===I){const I=20,C=A.bitrateKbps??0;C>0&&C<I&&(g-=2)}return Math.max(0,Math.min(10,g))}_calculateQualityScore(A){let I=10;if("connected"!==A.iceState&&"completed"!==A.iceState)return 0;const g=A.video?.inbound;if(g?.packetsReceived&&g?.packetsLost){const A=g.packetsLost/(g.packetsReceived+g.packetsLost)*100;A>5?I-=3:A>3?I-=2:A>1?I-=1:A>.5&&(I-=.5)}const C=g?.bitrateKbps??0;C>0&&(C<200?I-=2.5:C<500&&(I-=1.5));const Q=1e3*(g?.jitter??0);Q>50?I-=1.5:Q>30&&(I-=.8);const B=1e3*(A.roundTripTime??0);B>300?I-=2:B>200?I-=1:B>150&&(I-=.5);const E=A.video?.trackFramesDropped??0;return E>100?I-=1.5:E>50?I-=1:E>10&&(I-=.5),"bandwidth"===A.video?.outbound?.qualityLimitationReason&&(I-=1),Math.max(0,Math.min(10,I))}handleNegotiationNeededEventP2p=async()=>{ji.info("Negotiation needed - creating new offer for track changes"),this.restartICE()};handleOffer=async({offer:A,iceServers:I,screenShare:g})=>{ji.info("Handling P2P offer");try{const C=I||this._iceServers||[{urls:"stun:stun.l.google.com:19302"}];this._peerConnection=new RTCPeerConnection({iceServers:C}),this._startPeerStatsLoop(),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{const A=this._peerConnection.iceConnectionState;"failed"!==A&&"disconnected"!==A&&"closed"!==A||this._stopPeerStatsLoop()}),this._localStream&&this._localStream.getTracks().length>0?this._localStream.getTracks().forEach(A=>{ji.info("Adding local track to peer connection:%s",A.kind),this._peerConnection.addTrack(A,this._localStream)}):ji.warn("No local tracks available when handling offer"),this._peerConnection.onicecandidate=A=>{A.candidate&&(ji.debug("Sending ICE candidate:%O",A.candidate),this._sendMessage({id:"candidate",candidate:A.candidate,peer:this._remotePeerId}))},this._peerConnection.ontrack=A=>{ji.info("Received remote track:%s",A.track.kind),this.gotRemoteTrackP2p(A,g)},this._peerConnection.onremovetrack=A=>{ji.info("Remote track removed:%s",A.track.kind),this.handleRemoteTrackRemoved(A,g)},this._peerConnection.onnegotiationneeded=async()=>{ji.info("Negotiation needed - creating new offer for track changes");try{const A=await this._peerConnection.createOffer();await this._peerConnection.setLocalDescription(A),ji.info("Sending negotiation offer to remote peer"),this._sendMessage({id:"offer",offer:this._peerConnection.localDescription,peer:this._remotePeerId,screenShare:!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0])})}catch(A){ji.error("Failed to handle negotiation needed:%O",A),this.emit("error",{type:"negotiationError",message:"Failed to negotiate new track"})}},await this._peerConnection.setRemoteDescription(new RTCSessionDescription(A)),this._peerConnection.addEventListener("iceconnectionstatechange",()=>{ji.info("ICE connection state:%s",this._peerConnection.iceConnectionState),"connected"===this._peerConnection.iceConnectionState?this.emit("peerConnected",{peerId:this._remotePeerId}):"failed"!==this._peerConnection.iceConnectionState&&"disconnected"!==this._peerConnection.iceConnectionState||(ji.warn("ICE connection failed/disconnected"),this.restartICE(),this.emit("peerDisconnected",{peerId:this._remotePeerId}))});const Q=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(Q),ji.info("Sending answer to remote peer"),this._sendMessage({id:"answer",answer:this._peerConnection.localDescription,peer:this._remotePeerId})}catch(A){ji.error("Failed to handle P2P offer:%O",A),this.emit("error",{type:"connectionError",message:"Failed to handle P2P offer"})}};handleAnswer=async({answer:A})=>{ji.info("Handling P2P answer");try{await this._peerConnection.setRemoteDescription(new RTCSessionDescription(A)),ji.info("Remote description set successfully")}catch(A){ji.error("Failed to handle P2P answer:%O",A)}};handleCandidate=async A=>{ji.debug("Handling ICE candidate:%O",A);try{this._peerConnection?(await this._peerConnection.addIceCandidate(new RTCIceCandidate(A)),ji.debug("ICE candidate added successfully")):ji.warn("Received ICE candidate before peer connection exists")}catch(A){ji.error("Failed to add ICE candidate:%O",A)}};gotRemoteTrackP2p=(A,I=!1)=>{ji.info("Got remote track - kind:%s, screenShare:%s",A.track.kind,I);const g=A.track;if(this._remoteStream||(this._remoteStream=new MediaStream),this._remoteStream.addTrack(g),this._peers.has(this._remotePeerId)){const A=this._peers.get(this._remotePeerId);"audio"===g.kind?A.audioTrack=g:"video"===g.kind&&(A.videoTrack=g),this._peers.set(this._remotePeerId,A)}const C=I?"video"===g.kind?"screen-video":"screen-audio":"video"===g.kind?"cam-video":"cam-audio";I&&"video"===g.kind?this.emit("ssVideoStart",{peerId:this._remotePeerId,videoTrack:g,type:C}):"video"===g.kind?this.emit("videoStart",{peerId:this._remotePeerId,videoTrack:g,type:C}):"audio"===g.kind&&this.emit("micStart",{peerId:this._remotePeerId,audioTrack:g,type:C}),ji.info("Emitted track event for peer:%s type:%s",this._remotePeerId,C)};handleRemoteTrackRemoved=(A,I=!1)=>{ji.info("Handling remote track removal - kind:%s",A.track.kind);const g=A.track,C=this._remotePeerId;if(this._remoteStream&&this._remoteStream.removeTrack(g),this._peers.has(C)){const A=this._peers.get(C);"audio"===g.kind?(A.audioTrack=null,this.emit("micEnd",{peerId:C}),this.emit("peerMuted",{peerId:C,type:"remote"})):"video"===g.kind&&(A.videoTrack=null,I?this.emit("ssVideoStop",{peerId:C}):this.emit("videoEnd",{peerId:C,type:"cam-video"})),this._peers.set(C,A)}ji.info("Emitted track removal events for peer:%s kind:%s",C,g.kind)};userLeftRoom=A=>{ji.info("User left P2P room:%O",A);const{peerId:I}=A;this._peerConnection&&(this._peerConnection.close(),this._peerConnection=null,ji.info("Peer connection closed")),this._remoteStream&&(this._remoteStream.getTracks().forEach(A=>A.stop()),this._remoteStream=null),this._peers.has(I)&&(this._peers.delete(I),this.emit("peerLeft",{peerId:I})),this._remotePeerId=null,ji.info("Remote peer state cleared")};leaveRoomNewP2p=async A=>{ji.info("Leaving P2P room - action:%s",A);try{this._localStream&&this._localStream.getTracks().forEach(A=>{A.stop(),ji.debug("Stopped local track:%s",A.kind)}),this._micStream&&(this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=null),this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=null),this._peerConnection&&(this._peerConnection.close(),this._peerConnection=null);const I=A===hE?"leaveAndCloseRoom":"leaveRoomNew";this._sendMessage({id:I,peerId:this.data.inputParams.peerId,peer:this._remotePeerId}),this._clearP2pState(),this.emit("roomLeft",{roomId:this.data.inputParams.roomId}),ji.info("Left P2P room successfully")}catch(A){ji.error("Error leaving P2P room:%O",A)}};handleIceRestart=async A=>{ji.debug("Ice restart message received!!");try{this._peerConnection.ontrack=I=>{ji.info("Received remote track:%s",I.track.kind),this.gotRemoteTrackP2p(I,!!A.screenShare)}}catch(A){}await this._peerConnection.setRemoteDescription(A.offer);const I=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(I),this._sendMessage({id:"iceRestarted",answer:I,peer:this._remotePeerId})};handleIceRestartResponse=async A=>{ji.debug("Ice restart message response received!!"),this._peerConnection.setRemoteDescription(A.answer)};handleScreenShareP2p=A=>{if(ji.debug("Screen share P2P message received:%O",A),"start"===A?.type){const I=A.peerId||this._remotePeerId||A.peer;this.emit("ssVideoStart",{type:"start",peerId:I})}else if("end"===A?.type){const I=A.peerId||this._remotePeerId||A.peer;this.emit("ssVideoStop",{type:"end",peerId:I})}else ji.error("Unknown screen share P2P message type:%s",A?.type),this.emit("ssVideoStart",A)};_clearP2pState=()=>{this._peerConnection=null,this._localStream=null,this._remoteStream=null,this._remotePeerId=null,this._remoteDisplayName=null,this._remotePeerType=null,this._remoteAudioStatus=null,this._remoteVideoStatus=null,this._iceServers=null,this._peers=new Map,ji.debug("P2P state cleared")};async toggleMicP2p(A){if(this.data.inputParams.roomType!==RE)return void ji.warn("toggleMicP2p() called but not in P2P room");ji.info("Toggle mic P2P - enabled:%s",A);let I=null;try{if(this._peerConnection){const g=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);if(g&&g.track){if(I=g.track,g.track.enabled=A,ji.info("Toggled audio track enabled state to:%s",A),this._localStream){const I=this._localStream.getAudioTracks()[0];I&&(I.enabled=A)}}else A&&(await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId}),this._micStream&&this._micStream.getAudioTracks()[0]&&(I=this._micStream.getAudioTracks()[0],this._peerConnection.addTrack(this._micStream.getAudioTracks()[0],this._localStream),ji.info("Added audio track to peer connection"),this._localStream&&!this._localStream.getAudioTracks().find(A=>A.id===this._micStream.getAudioTracks()[0].id)&&this._localStream.addTrack(this._micStream.getAudioTracks()[0])))}else A?(await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId}),this._micStream&&this._micStream.getAudioTracks()[0]&&(I=this._micStream.getAudioTracks()[0]),this._micStream&&this._localStream&&!this._localStream.getAudioTracks().find(A=>A.id===this._micStream.getAudioTracks()[0].id)&&this._localStream.addTrack(this._micStream.getAudioTracks()[0])):this._micStream&&(this._micStream.getTracks().forEach(A=>{A.stop(),this._localStream&&this._localStream.removeTrack(A)}),this._micStream=null);this._sendMessage({id:"mediaToggled",audioStatus:A,peer:this._remotePeerId,type:"audio",peerId:this.data.inputParams.peerId}),A&&I?this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:I,type:"local"}):A||this.emit("micEnd",{peerId:this.data.inputParams.peerId})}catch(A){ji.error("Error toggling mic P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to toggle microphone"})}}async toggleCameraP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Toggle camera P2P - enabled:%s",A);try{if(A){if(await this._acquireVideoForP2p({deviceId:this.data.inputParams.videoDeviceId,vbdetails:this.data.inputParams.vbdetails}),this._peerConnection&&this._webCamStream){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);A&&A.track?(await A.replaceTrack(this._webCamStream.getVideoTracks()[0]),ji.info("Replaced video track in peer connection")):(this._peerConnection.addTrack(this._webCamStream.getVideoTracks()[0],this._localStream),ji.info("Added video track to peer connection")),this._localStream&&this._localStream.addTrack(this._webCamStream.getVideoTracks()[0])}this._sendMessage({id:"mediaToggled",videoStatus:!0,peer:this._remotePeerId,type:"video",peerId:this.data.inputParams.peerId}),this._webCamStream&&this._webCamStream.getVideoTracks()[0]&&this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webCamStream.getVideoTracks()[0],type:"local"})}else{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);if(A){if(ji.info("Removing video track from peer connection"),this._peerConnection.removeTrack(A),this._localStream){const A=this._localStream.getVideoTracks()[0];A&&(A.stop(),this._localStream.removeTrack(A),ji.info("Stopped and removed video track from local stream"))}this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>{A.stop()}),this._webCamStream=null)}}else this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>{A.stop(),this._localStream&&this._localStream.removeTrack(A)}),this._webCamStream=null);this._sendMessage({id:"mediaToggled",videoStatus:!1,peer:this._remotePeerId,type:"video",peerId:this.data.inputParams.peerId}),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,type:"cam-video"})}}catch(A){ji.error("Error toggling camera P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to toggle camera"})}}else ji.warn("toggleCameraP2p() called but not in P2P room")}async changeAudioInputP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Changing audio input P2P - deviceId:%s",A);try{if(this._micStream&&this._micStream.getTracks().forEach(A=>A.stop()),await this._acquireAudioForP2p({deviceId:A}),this._peerConnection&&this._micStream){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);A&&(await A.replaceTrack(this._micStream.getAudioTracks()[0]),ji.info("Replaced audio track with new device"))}this.emit("audioInputChanged",{deviceId:A})}catch(A){ji.error("Error changing audio input P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to change audio input"})}}else ji.warn("changeAudioInputP2p() called but not in P2P room")}async changeVideoInputP2p(A){if(this.data.inputParams.roomType===RE){ji.info("Changing video input P2P - deviceId:%s",A);try{if(this._webCamStream&&this._webCamStream.getTracks().forEach(A=>A.stop()),await this._acquireVideoForP2p({deviceId:A,vbdetails:this.data.inputParams.vbdetails}),this._peerConnection&&this._webCamStream){const A=this._peerConnection.getSenders().find(A=>"video"===A.track?.kind);A&&(await A.replaceTrack(this._webCamStream.getVideoTracks()[0]),ji.info("Replaced video track with new device"))}this.emit("videoInputChanged",{deviceId:A})}catch(A){ji.error("Error changing video input P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to change video input"})}}else ji.warn("changeVideoInputP2p() called but not in P2P room")}async enableScreenShareP2p(){if(this.data.inputParams.roomType===RE){ji.info("Enabling screen share P2P");try{if(this._screenShareStream)return void await this.disableScreenShareP2p();if(!this._peerConnection)return ji.warn("No peer connection available for screen share"),void this.emit("error",{type:"screenShareError",message:"No peer connection available"});const A=await navigator.mediaDevices.getDisplayMedia(oi);this._screenShareStream=A;let I=A.getVideoTracks()[0];if(!I)return ji.error("No video track in screen share stream"),void this.emit("error",{type:"screenShareError",message:"Failed to get screen share track"});ji.info("Adding screen share track alongside camera (no replace)");const g=this._peerConnection.addTrack(I,this._localStream);try{const A=this._peerConnection.getSenders();this._screenShareSender=A.find(A=>A.track===I)||g}catch(A){}this._videoWasActiveP2p=null,this._sendMessage({id:"screenShareP2p",peer:this._remotePeerId,type:"start",peerId:this.data.inputParams.peerId}),this.emit("screenShareStarted",{peerId:this.data.inputParams.peerId,track:I}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:I,type:"screen-video"}),I.onended=async()=>{ji.info("Screen share ended by user"),await this.disableScreenShareP2p()}}catch(A){ji.error("Error enabling screen share P2P:%O",A),this.emit("error",{type:"screenShareError",message:"Failed to start screen share"})}}else ji.warn("enableScreenShareP2p() called but not in P2P room")}async disableScreenShareP2p(){if(this.data.inputParams.roomType===RE){ji.info("Disabling screen share P2P");try{if(this._screenShareStream){const A=this._screenShareStream.getVideoTracks()[0];if(this._screenShareStream.getTracks().forEach(A=>A.stop()),this._screenShareStream=null,this._peerConnection){try{const I=this._peerConnection.getSenders(),g=this._screenShareSender||I.find(I=>I.track===A);g&&this._peerConnection.removeTrack(g)}catch(A){}if(this._localStream&&A){const I=this._localStream.getVideoTracks().find(I=>I===A);I&&this._localStream.removeTrack(I)}this._sendMessage({id:"screenShareP2p",peer:this._remotePeerId,type:"end",peerId:this.data.inputParams.peerId})}this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId}),this.emit("screenShareStopped",{peerId:this.data.inputParams.peerId}),this._screenShareSender=null,this._videoWasActiveP2p=null}}catch(A){ji.error("Error disabling screen share P2P:%O",A),this.emit("error",{type:"screenShareError",message:"Failed to disable screen share"})}}else ji.warn("disableScreenShareP2p() called but not in P2P room")}async setCurrentlyActiveSpeakerP2p(){if(this.data.inputParams.roomType!==RE||!this._peerConnection)return{producerId:null,volume:null,peerId:null};try{let A={producerId:null,volume:null,peerId:null};const I=this._peerConnection.getReceivers().find(A=>A.track&&"audio"===A.track.kind),g=this._peerConnection.getSenders().find(A=>A.track&&"audio"===A.track.kind);if(!I||!g)return A;const C=await I.getStats();let Q=Math.round(1e3*[...C.values()].filter(A=>"audio"===A.kind&&A.id.includes("RTCInboundRTPAudioStream"))[0]?.audioLevel);const B=await g.getStats();let E=Math.round(1e3*[...B.values()].filter(A=>A.id.includes("RTCAudioSource")&&"audio"===A.kind)[0]?.audioLevel);return ji.debug("receiverAudioLevel:%s, senderAudioLevel:%s",Q,E),Q>0&&Q>E?(A.volume=Q,A.peerId=this._remotePeerId):E>0&&E>Q&&(A.volume=E,A.peerId=this.data.inputParams.peerId),A}catch(A){return ji.error("Error getting active speaker P2P:%O",A),{producerId:null,volume:null,peerId:null}}}async restartICE(){if(this.data.inputParams.roomType===RE&&this._peerConnection)try{ji.info("Restarting ICE for P2P connection");const A=await this._peerConnection.createOffer({iceRestart:!0});await this._peerConnection.setLocalDescription(A),this._sendMessage({id:"iceRestart",offer:A,screenShare:!!(this._screenShareStream&&this._screenShareStream.getVideoTracks&&this._screenShareStream.getVideoTracks()[0]),peer:this._remotePeerId}),ji.info("ICE restart offer sent")}catch(A){ji.error("Failed to restart ICE:%O",A),this.emit("error",{type:"connectionError",message:"Failed to restart ICE connection"})}else ji.warn("restartICE() called but not in P2P room or peer connection not available")}async handleReplaceTrackP2p({deviceLabel:A,mediaType:I,deviceList:g}){if(this.data.inputParams.roomType!==RE||!this._peerConnection)return ji.warn("handleReplaceTrackP2p() called but not in P2P room"),{success:!1};try{let C,Q,B,E;ji.info("Replacing track P2P - label:%s, type:%s",A,I);let i=null;if("video"===I){const I=(g||this.deviceList)?.videoDevices?.find(I=>I.label===A);if(i=I?.deviceId,!i)return ji.error("Video device not found with label:%s",A),{success:!1};wi.video.deviceId.exact=i,B=wi,C=this._localStream?.getVideoTracks()[0]}else{if("audio"!==I)return ji.error("Unknown media type:%s",I),{success:!1};{const I=(g||this.deviceList)?.audioDevices?.find(I=>I.label===A);if(i=I?.deviceId,!i)return ji.error("Audio device not found with label:%s",A),{success:!1};si.audio.deviceId.exact=i,B=si,C=this._localStream?.getAudioTracks()[0]}}if(!C)return ji.warn("No old track found to replace"),{success:!1};let D=C.enabled;E=await navigator.mediaDevices.getUserMedia(B),"video"===I?Q=E.getVideoTracks()[0]:"audio"===I&&(Q=E.getAudioTracks()[0]),Q.enabled=D;const n=this._peerConnection.getSenders().find(A=>A.track&&A.track.kind===Q.kind);return n&&(await n.replaceTrack(Q),ji.info("Replaced %s track in peer connection",I)),this._localStream.removeTrack(C),this._localStream.addTrack(Q),C.stop(),"video"===I?(this._webCamStream&&this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=E):"audio"===I&&(this._micStream&&this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=E),"video"===I?(this.emit("videoInputChanged",{deviceId:i,deviceLabel:A}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:Q,type:"cam-video"})):(this.emit("audioInputChanged",{deviceId:i,deviceLabel:A}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:Q,type:"cam-audio"})),{success:!0}}catch(A){return ji.error("Error replacing track P2P:%O",A),this.emit("error",{type:"mediaError",message:"Failed to replace media track"}),{success:!1}}}close(){this._closed||(this._closed=!0,this._socket=null,this.data.inputParams={},ji.info("Room close()"),this._sendTransport&&this._sendTransport.close(),this._recvTransport&&this._recvTransport.close(),this._roomStatus="closed",this._running=!1)}async leaveRoom(){ji.debug("Leave room is called!!"),this.data.inputParams.roomType===RE?(ji.info("Leaving P2P room"),await this.leaveRoomNewP2p("leaveRoomNew")):"connected"===this._roomStatus?(this._sendMessage({id:"leaveRoomNew",peerId:this.data.inputParams.peerId,roomLeaveType:"client"}),await this.leaveRoomCommon()):ji.error("The room state is:%s",this._roomStatus)}async closeRoom(){ji.debug("Close room is called!!"),this.data.inputParams.roomType===RE?(ji.info("Closing P2P room"),await this.leaveRoomNewP2p(hE)):"connected"===this._roomStatus?(this._sendMessage({id:"leaveAndCloseRoom",peerId:this.data.inputParams.peerId,roomCloseType:"client"}),await this.leaveRoomCommon()):ji.error("The room state is:%s",this._roomStatus)}leaveRoomCommon=async()=>{try{ji.debug("Starting comprehensive room leave cleanup...");try{this._cleanupAudioMonitoring(),this._stopSpeakingWhileMutedDetection(),this._stopAutoAudioStreaming(),this._transcriptionActive&&this.stopTranscription(),Xi&&"function"==typeof Xi.cleanup&&Xi.cleanup()}catch(A){}const A=new Set;this._webcamProducer?.track&&A.add(this._webcamProducer.track),this._micProducer?.track&&A.add(this._micProducer.track),this._shareProducer?.track&&A.add(this._shareProducer.track),this._shareAudioProducer?.track&&A.add(this._shareAudioProducer.track),this._producers&&this._producers.size>0&&this._producers.forEach(I=>{I?.track&&A.add(I.track)}),this._webCamStream&&this._webCamStream.getTracks().forEach(I=>A.add(I)),this._micStream&&this._micStream.getTracks().forEach(I=>A.add(I));try{const I=Xi?.getVBStream?.()||Xi?._localVBStream;I&&"function"==typeof I.getTracks&&I.getTracks().forEach(I=>A.add(I))}catch(A){}const I=[];document.querySelectorAll("video, audio").forEach(g=>{g.srcObject&&"function"==typeof g.srcObject.getTracks&&(g.srcObject.getTracks().forEach(I=>A.add(I)),I.push({element:g,stream:g.srcObject}))}),ji.debug(`Found ${A.size} total tracks to stop`);let g=0;for(const I of A)try{I&&"live"===I.readyState&&"function"==typeof I.stop&&(I.stop(),g++,ji.debug(`Stopped ${I.kind} track: ${I.label||I.id}`))}catch(A){ji.warn("Error stopping track:",A)}if(ji.debug(`Stopped ${g} tracks`),this._sendTransport){try{this._sendTransport.close()}catch(A){}this._sendTransport=null}if(this._recvTransport){try{this._recvTransport.close()}catch(A){}this._recvTransport=null}this._webcamProducer=null,this._micProducer=null,this._shareProducer=null,this._shareAudioProducer=null,this._webCamStream=null,this._micStream=null,this._producers&&this._producers.clear(),this._consumers&&this._consumers.clear(),this._roomStatus="closed",this._running=!1,this._routerRtpCapabilities=null,await new Promise(A=>setTimeout(A,100));let C=0;document.querySelectorAll("video, audio").forEach(A=>{try{A.srcObject&&(A.srcObject=null,"function"==typeof A.pause&&A.pause(),"function"==typeof A.load&&A.load(),C++,ji.debug(`Force cleared ${A.nodeName} element`))}catch(A){ji.warn("Error clearing element:",A)}}),ji.debug(`Cleared ${C} DOM elements`);try{A.forEach(A=>{A&&"function"==typeof A.removeEventListener&&(A.removeEventListener("ended",()=>{}),A.removeEventListener("mute",()=>{}),A.removeEventListener("unmute",()=>{}))})}catch(A){}if(window.gc&&"function"==typeof window.gc)try{window.gc()}catch(A){}await new Promise(A=>setTimeout(A,200));try{const A=await this.reportActiveMediaUse();ji.debug("Final media usage report:",A);const I=[],g=A.dom.mediaElements.filter(A=>A.hasSrcObject&&A.tracks.length>0);g.forEach(A=>{A.tracks.forEach(g=>{"live"===g.readyState&&I.push({kind:g.kind,label:g.label,id:g.id,element:A.nodeName})})}),(I.length>0||g.length>0)&&(ji.warn("WARNING: Media elements or live tracks still detected after cleanup:",{liveTracks:I,elementsWithTracks:g.length}),await this.emergencyTrackCleanup())}catch(A){ji.error("Failed to generate final media usage report:",A)}}catch(A){ji.error("Error during room leave cleanup:",A),await this.emergencyTrackCleanup()}};emergencyTrackCleanup=async()=>{ji.debug("Performing emergency track cleanup...");try{const A=[];document.querySelectorAll("video, audio").forEach(I=>{if(I.srcObject&&"function"==typeof I.srcObject.getTracks){A.push(...I.srcObject.getTracks()),I.srcObject=null,"function"==typeof I.pause&&I.pause(),"function"==typeof I.load&&I.load();try{I.src=""}catch(A){}}}),A.forEach(A=>{try{"live"===A.readyState&&(A.stop(),ji.debug(`Emergency stopped ${A.kind}: ${A.label||A.id}`))}catch(A){}}),ji.debug(`Emergency cleanup completed - stopped ${A.length} tracks`),await new Promise(A=>setTimeout(A,300))}catch(A){ji.error("Emergency cleanup failed:",A)}};reportActiveMediaUse=async(A=!1)=>{const I={sdk:{micStreamTracks:[],camStreamTracks:[],vbStreamTracks:[],shareTracks:[],producers:[],consumers:[]},dom:{mediaElements:[]},timestamp:Date.now()},g=A=>{try{return{kind:A?.kind,enabled:A?.enabled,readyState:A?.readyState,label:A?.label,id:A?.id,muted:A?.muted}}catch(A){return{error:!0}}};try{this._micStream&&"function"==typeof this._micStream.getTracks&&(I.sdk.micStreamTracks=this._micStream.getTracks().map(g))}catch(A){}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&(I.sdk.camStreamTracks=this._webCamStream.getTracks().map(g))}catch(A){}try{const A=Xi?.getVBStream?.()||Xi?._localVBStream;A&&"function"==typeof A.getTracks&&(I.sdk.vbStreamTracks=A.getTracks().map(g))}catch(A){}try{this._shareProducer?.track&&I.sdk.shareTracks.push(g(this._shareProducer.track)),this._shareAudioProducer?.track&&I.sdk.shareTracks.push(g(this._shareAudioProducer.track))}catch(A){}try{this._producers&&this._producers.size>0&&this._producers.forEach((A,C)=>{I.sdk.producers.push({key:C,track:A?.track?g(A.track):null,id:A?.id,paused:A?.paused})})}catch(A){}try{this._consumers&&this._consumers.size>0&&this._consumers.forEach((A,C)=>{I.sdk.consumers.push({key:C,track:A?.track?g(A.track):null,id:A?.id,paused:A?.paused})})}catch(A){}try{const A=Array.from(document.querySelectorAll("video, audio"));I.dom.mediaElements=A.map(A=>{let I=[],C=null;try{const Q=A.srcObject;Q&&"function"==typeof Q.getTracks&&(I=Q.getTracks().map(g),C=Q.id)}catch(A){}return{nodeName:A.nodeName,muted:!!A.muted,paused:!!A.paused,hasSrcObject:!!A.srcObject,streamId:C,tracks:I,src:A.src||null,currentSrc:A.currentSrc||null}})}catch(A){}const C=[...I.sdk.micStreamTracks,...I.sdk.camStreamTracks,...I.sdk.vbStreamTracks,...I.sdk.shareTracks,...I.sdk.producers.map(A=>A.track).filter(Boolean),...I.sdk.consumers.map(A=>A.track).filter(Boolean),...I.dom.mediaElements.flatMap(A=>A.tracks)].filter(A=>A&&"live"===A.readyState);return I.summary={totalLiveTracks:C.length,elementsWithSrcObject:I.dom.mediaElements.filter(A=>A.hasSrcObject).length,elementsWithTracks:I.dom.mediaElements.filter(A=>A.tracks.length>0).length},I};async listDevicesInternal(){if(navigator.mediaDevices.ondevicechange=async A=>{let I=await bE();ji.info("Media devices changed!:%O",I),I.audioDevices&&I.audioDevices.length>0&&(this._deviceList.audioDevices=I.audioDevices),I.videoDevices&&I.videoDevices.length>0&&(this._deviceList.videoDevices=I.videoDevices),I.audioDevices&&I.audioDevices.length>0&&(this._deviceList.audioOutputDevices=I.audioDevicesOutput),vi=this._deviceList,this.emit("deviceListUpdated")},!this._deviceList){const A=await xE();if(A.success)return this._deviceList=A.deviceList,void(vi=this._deviceList)}}restartIce=async(A,I)=>{if("send"===I&&"connected"===this._sendTransport.connectionState||"recv"===I&&"connected"===this._recvTransport.connectionState)return void ji.debug("no need to restart ICE as transport now connected");ji.debug("websocket is ready and connectionstate is still disconnected, therefore going to restart ICE");let g={id:"restartIce",transportId:A,roomName:this.data.inputParams.roomId,peerId:this.data.inputParams.peerId};this._sendMessage(g)};restartIceResponse=A=>{ji.debug("restart ICE response:%o",A);const{transportId:I,iceParameters:g,error:C}=A;if(C){ji.error("restartIce failed for transport %s with error: %s. Recreating transports as fallback.",I,C);const A=this._sendTransport&&this._sendTransport.id===I,g=this._recvTransport&&this._recvTransport.id===I;return void(A?(this._sendTransport=null,this._createSendTransport()):g?(this._recvTransport=null,this._createRecvTransport()):(ji.warn("restartIceResponse error for unknown transportId:%s, recreating both transports",I),this._recreateTransportsAfterReconnection()))}this._sendTransport&&this._sendTransport.id===I?this._sendTransport.restartIce({iceParameters:g}):this._recvTransport&&this._recvTransport.id===I?this._recvTransport.restartIce({iceParameters:g}):ji.warn("restartIceResponse received for unknown transportId:%s. Ignoring.",I)};_recreateTransportsAfterReconnection=async()=>{try{ji.info("Recreating transports after reconnection");const A=!!this._sendTransport,I=!!this._recvTransport;this._sendTransport=null,this._recvTransport=null,A&&(ji.info("Recreating send transport"),await this._createSendTransport()),I&&(ji.info("Recreating recv transport"),await this._createRecvTransport()),ji.info("Transport recreation after reconnection complete")}catch(A){ji.error("Failed to recreate transports after reconnection: %o",A),this.emit("transportRecreationFailed",{error:A})}};startRecording=({recordingType:A=null,outputType:I=null,outputQualities:g=null}={})=>{ji.debug("recording type requested is:%s,outputType:%s, outputQualties:%o",A,I,g);const C=!A||"av"!==A?.toLowerCase()&&"audiovideo"!==A?.toLowerCase()?"mergedA":"mergedAV";if((!I||"hls"!==I.toLowerCase()&&"mp4"!==I.toLowerCase())&&I)return ji.error("Invalid outut type"),{success:!1,reason:`Invalid outputType: ${I}. `};if(I&&"hls"===I.toLowerCase()&&g&&!ni(g))return ji.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(g)}. Allowed values are ${Array.from(Di).join(", ")}.`};let Q={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,type:FE,recordingStrategy:C,outputQualities:g,outputType:I?.toLowerCase()};this._sendMessage(Q),this._recordingStartedByMe={...this._recordingStartedByMe,"main-room":{recordingNo:null}}};stopRecording=()=>{ji.debug("going to stop recording for recordingStartedByMe:%o",this._recordingStartedByMe);let A="main-room";if(!this._recordingStartedByMe[A])return{success:!1,error:!0,code:"RRID001",text:"Error while trying to stop recording. Either the recording has not been started yet Or The same user need to stop recording who started it."};{let I={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,recordingNo:this._recordingStartedByMe[A].recordingNo,type:FE};this._sendMessage(I),delete this._recordingStartedByMe[A],this.emit("recordingEnded",{peerId:this.data.inputParams.peerId})}};setRecordingStatusStarted=A=>{ji.debug("Recording/Streaming started by moderator!!:%O",A);let{recordingStartTime:I,recordingNo:g,type:C}=A;[FE,kE].includes(C)?this._recordingStartedByMe["main-room"]?(ji.debug("This recording has been started by me."),this._recordingStartedByMe["main-room"].recordingNo=g,this.emit("recordingStarted",{peerId:this.data.inputParams.peerId,startTime:I})):this.emit("recordingStarted",{startTime:I}):C===LE?(this._liveStreamingStartedByMe&&this._liveStreamingStartedByMe["main-room"]&&(this._liveStreamingStartedByMe["main-room"].recordingNo=g),this.emit("liveStreamingStarted",{startTime:I})):this.emit("liveStreamingStarted",{startTime:I})};setRecordingStatusEnded=A=>{ji.debug("Recording ended by moderator!!, data:%O",A);let{breakOutRoom:I,type:g}=A;g===LE?(this.emit("liveStreamingEnded",{}),this._liveStreamingStartedByMe=null):(this.emit("recordingEnded",{}),this._recordingStartedByMe=null)};startLiveStreaming=A=>{ji.debug("Live streaming started by moderator!!, data:%O",A);let{streamUrl:I,streamKey:g,type:C}=A;if(this._liveStreamingStartedByMe||(this._liveStreamingStartedByMe={}),this._liveStreamingStartedByMe["main-room"])return{success:!1,error:!0,code:"LRID002",text:"Live streaming is already started"};let Q={id:"startRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,url:window.location.hostname,rtmpUrl:`${I}/${g}`,type:LE};return this._sendMessage(Q),this._liveStreamingStartedByMe={...this._liveStreamingStartedByMe,"main-room":{recordingNo:null}},{success:!0}};stopLiveStreaming=()=>{ji.debug("Live streaming stopped by moderator!!");let A="main-room";if(!this._liveStreamingStartedByMe||!this._liveStreamingStartedByMe[A])return{success:!1,error:!0,code:"LRID001",text:"Error while trying to stop live streaming. Either the live stream has not been started yet or it was started by another user."};{let I={id:"stopRecording",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:LE,recordingNo:this._liveStreamingStartedByMe[A].recordingNo};this._sendMessage(I)}delete this._liveStreamingStartedByMe[A],this.emit("liveStreamingEnded",{peerId:this.data.inputParams.peerId})};startProcessing=async({inputFiles:A=[],outputQualities:I=null,bucket:g=null,cloud:C=null,region:Q=null}={})=>{ji.debug("Processing of Files requested for:%o",A);const B=Math.round(1e7*Math.random()),E=await async function(A){if(ai.info("The input files are:%o, length:%s",A,A.length),A.length>0){ai.info("Files array length is:%s",A.length);for(const{type:I,url:g}of A){if(ai.info("The file detais are type:%s, url:%s",I,g),!yi.includes(I))return console.warn(`Type "${I}" is not allowed.`),{success:!1,reason:`Type "${I}" is not allowed.`};if(!Gi(g,I))return console.warn(`Extension mismatch for ${g}; expected .${I}`),{success:!1,reason:`Extension mismatch for ${g}; expected .${I}`}}return{success:!0}}return{success:!1,reason:"There are no files for processing!"}}(A);if(E.success){if(I&&!ni(I))return ji.error("Invalid outut qualities"),{success:!1,reason:`Invalid outputQualities: ${JSON.stringify(I)}. Allowed values are ${Array.from(Di).join(", ")}.`};this._processingStartedByMe={...this._processingStartedByMe,[B]:{}};for(const{type:I,url:g}of A)this._processingStartedByMe={...this._processingStartedByMe,[B]:{...this._processingStartedByMe[B],[g]:{type:I,url:g,status:"pending"}}};let E={id:"processVideos",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,inputFiles:A,outputQualities:I,bucket:g,cloud:C,region:Q,requestId:B,type:"process"};return this._sendMessage(E),{success:!0}}return E};checkProcessingStatus=({requestId:A})=>(ji.debug("Going to check processing status for request Id:%s",A),this._processingStartedByMe[A]?{success:!0,details:this._processingStartedByMe[A]}:{success:!0,details:this._processingStartedByMe});handleProcessingStart=A=>{const{processingStartTime:I,processingNo:g,requestId:C}=A;ji.debug("handleProcessingStart()| received message is:%o",A),this.emit("processingStarted",{processingStartTime:I,requestId:C})};handleProcessingCompletion=A=>{const{totalProcessingTime:I,hlsfileKey:g,size:C,originalFile:Q,lastFile:B,requestId:E}=A;if(ji.debug("handleProcessingCompletion()| received message is:%o",A),ji.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[Q]&&(this._processingStartedByMe={...this._processingStartedByMe,[E]:{...this._processingStartedByMe[E],[Q]:{...this._processingStartedByMe[Q],status:"completed",hlsfileKey:g,size:C,totalProcessingTime:I}}}),ji.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingCompleted",A),B){ji.debug("The last file processing has been completed! Remove all the files that has been completed with the same requesterId");let A={...this._processingStartedByMe};delete A[E],ji.debug("After deleting the current requestId:%o",A),this._processingStartedByMe=A}};handleProcessingError=A=>{const{totalProcessingTime:I,hlsfileKey:g,size:C,originalFile:Q,lastFile:B,requestId:E,error:i}=A;ji.debug("handleProcessingCompletion()| received message is:%o",A),ji.debug("Before update, Total files to be processed are:%o",this._processingStartedByMe),this._processingStartedByMe[Q]&&(this._processingStartedByMe={...this._processingStartedByMe,[E]:{...this._processingStartedByMe[E],[Q]:{...this._processingStartedByMe[Q],status:"error",hlsfileKey:g,size:C,totalProcessingTime:I,error:i}}}),ji.debug("After update, Total files to be processed are:%o",this._processingStartedByMe),this.emit("processingError",A)};async enableMic({deviceId:A=null,autoGainControl:I,noiseSuppression:g,echoCancellation:C,channelCount:Q,sampleRate:B,forcePCMU:E,forcePCMA:i}={}){if(ji.debug("enableMic()"),this.data.inputParams.roomType===RE)return ji.debug("Enabling mic for P2P room"),A&&(this.data.inputParams.audioDeviceId=A),void 0!==I&&(this.data.inputParams.autoGainControl=I),void 0!==g&&(this.data.inputParams.noiseSuppression=g),void 0!==C&&(this.data.inputParams.echoCancellation=C),void 0!==B&&(this.data.inputParams.sampleRate=B),void 0!==Q&&(this.data.inputParams.channelCount=Q),await this.toggleMicP2p(!0);if(!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID007",text:"Error while trying to start Mic/Audio. Produce flag need to set to true while joining room in order to enable Mic/Audio."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID008",text:`Error while trying to start Mic/Audio as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Mic? OR if you have already initiated the joinRoom process, then Mic will be enabled automatically once room join process completes."};if(this._micProducer)return ji.debug("Mic is already active!"),{success:!1,warning:!0,code:"RWID002",text:"Error while trying to start Mic/Audio. Mic/Audio is already active!"};if(!this._device.canProduce("audio"))return ji.error("enableMic() | cannot produce audio"),{success:!1,error:!0,code:"REID009",text:"Error while trying to start Mic/Audio. Mic/Audio couldnot be activated due to limitations on this device. If you think this device has a functional Mic and the problem persists even after multiple retries, please contact technical support with the error code."};let D,n;E&&"boolean"==typeof E&&(this.data.inputParams.forcePCMU=E),i&&"boolean"==typeof i&&(this.data.inputParams.forcePCMA=i),I&&"boolean"==typeof I&&(this.data.inputParams.autoGainControl=I),C&&"boolean"==typeof C&&(this.data.inputParams.echoCancellation=C),g&&"boolean"==typeof g&&(this.data.inputParams.noiseSuppression=g),B&&Number.isInteger(B)&&B<64e3&&B>8e3&&(this.data.inputParams.sampleRate=B),Q&&Number.isInteger(Q)&&Q>0&&Q<3&&(this.data.inputParams.channelCount=Q);try{if(this._externalVideo)this._micStream=await this._getExternalVideoStream(),D=this._micStream.getAudioTracks()[0].clone();else{if(A?(n=this._deviceList.audioDevices.find(I=>I.deviceId===A),n||(ji.warn("Selected audio input deviceId:%s not found",A),n=this._deviceList.audioDevices[0])):n=this._deviceList.audioDevices[0],this._mic.device=n,!n)return ji.error("No mic device found! Can't start audio!"),{success:!1,reason:"No mic available for starting audio!"};A&&this.data.inputParams.audioDeviceId!==A&&(this.data.inputParams.audioDeviceId=A),ji.debug("enableMic() | calling getUserMedia()");try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:n.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}}),D=this._micStream.getAudioTracks()[0]}catch(A){throw new Error("Error while acquiring mic. Possible issue with audio constraint values",A)}}this._micProducer=await this._sendTransport.produce({track:D,codecOptions:this.data.inputParams.forcePCMU||this.data.inputParams.forcePCMA?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):this.data.inputParams.forcePCMA?this._device.rtpCapabilities.codecs.find(A=>"audio/pcma"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"cam-audio"}}),this._producers.set("audio",{id:this._micProducer.id,paused:this._micProducer.paused,track:this._micProducer.track,rtpParameters:this._micProducer.rtpParameters,codec:this._micProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),this._micProducer.on("transportclose",()=>{this._micProducer=null}),this._micProducer.on("trackended",()=>{this.disableMic().catch(()=>{})}),await this._initializeAudioMonitoring(),this._startSpeakingWhileMutedDetection()}catch(A){ji.error("enableMic() | failed:%o",A),this.emit("error",{code:"EID002",text:"Error enabling microphone!"}),D&&D.stop()}}async disableMic(){if(ji.debug("disableMic()"),this.data.inputParams.roomType===RE)return ji.debug("Disabling mic for P2P room"),this._cleanupAudioMonitoring(),await this.toggleMicP2p(!1);if(this._cleanupAudioMonitoring(),this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micProducer){this._micProducer.close(),this._producers.delete("audio");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._micProducer.id};this._sendMessage(A),this.emit("micEnd",{peerId:this.data.inputParams.peerId,audioTrack:null,type:"local"})}catch(A){this.emit("error",{code:"EID003",text:"Error disabling microphone!"})}this._micProducer=null}else ji.debug("No mic producer available")}async muteMic(){if(ji.debug("muteMic()"),this.data.inputParams.roomType!==RE)if(this._micProducer){this._micProducer.pause();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!1,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("muteMic() | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone!"})}}else ji.warn("No mic producer available");else try{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);A&&A.track&&(A.track.enabled=!1,ji.info("Disabled audio track in P2P peer connection"))}this._sendMessage({id:"mediaToggled",type:"audio",audioStatus:!1,peer:this._remotePeerId,peerId:this.data.inputParams.peerId}),this.emit("peerMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("muteMic() P2P | failed: %o",A),this.emit("error",{code:"EID004",text:"Error muting local microphone in P2P!"})}}async unmuteMic(){if(ji.debug("unmuteMic()"),this.data.inputParams.roomType!==RE){this._micProducer||(ji.debug("Mic is not active!"),await this.enableMic()),this._micProducer.resume();try{let A={id:"toggleMedia",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",audioStatus:!0,producerId:this._micProducer.id};this._sendMessage(A),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("unmuteMic() | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone!"})}}else try{if(this._peerConnection){const A=this._peerConnection.getSenders().find(A=>"audio"===A.track?.kind);if(A&&A.track)A.track.enabled=!0,ji.info("Enabled audio track in P2P peer connection");else{if(this._micStream||(ji.debug("Mic stream not available, acquiring..."),await this._acquireAudioForP2p({deviceId:this.data.inputParams.audioDeviceId})),!this._micStream||!this._micStream.getAudioTracks()[0])return ji.error("Failed to acquire audio for unmute in P2P"),void this.emit("error",{code:"EID005",text:"Error acquiring audio for unmute in P2P!"});if(this._peerConnection.addTrack(this._micStream.getAudioTracks()[0],this._localStream),ji.info("Added audio track to P2P peer connection"),this._localStream){const A=this._localStream.getAudioTracks()[0];A!==this._micStream.getAudioTracks()[0]&&(A&&this._localStream.removeTrack(A),this._localStream.addTrack(this._micStream.getAudioTracks()[0]))}}}this._sendMessage({id:"mediaToggled",type:"audio",audioStatus:!0,peer:this._remotePeerId,peerId:this.data.inputParams.peerId}),this.emit("peerUnMuted",{peerId:this.data.inputParams.peerId,type:"local"})}catch(A){ji.error("unmuteMic() P2P | failed: %o",A),this.emit("error",{code:"EID005",text:"Error unmuting local microphone in P2P!"})}}handleRoomSettingsGeneral=async({allowScreenShare:A,noOfScreenShare:I,noOfUpgradeRequests:g})=>{const C={type:"roomSetting:generalSettings",allowScreenShare:A,noOfScreenShare:I,noOfUpgradeRequests:g};return ji.debug("Room settings - General: %O",C),this.sendCustomMessage(JSON.stringify(C),"custom",null,"moderator","roomSetting:generalSettings",{}),{success:!0}};handleAuthSettings=async({moderatorApproval:A,passwordRequired:I})=>{const g={type:"roomSetting:authSettings",moderatorApproval:A,passwordRequired:I};return ji.debug("Room settings - Auth: %O",g),this.sendCustomMessage(JSON.stringify(g),"custom",null,"moderator","roomSetting:authSettings",{}),{success:!0}};handleRoomSettingsStage=A=>{const I={type:"roomSetting:stageSettings",stageStatus:A.stageStatus,stagePeers:A.stagePeers,backStageStatus:A.backStageStatus,backStagePeers:A.backStagePeers};return this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:stageSettings",{}),{success:!0}};handlePresenterSettings=async A=>{const I={type:"roomSetting:presenterSettings",presenterSettings:A};return ji.debug("presenter settings: %O",I),this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:presenterSettings",{}),{success:!0}};handleParticipantSettings=async A=>{ji.debug("Going to update participant settings with values:%s",A);const I={type:"roomSetting:participantSettings",participantSettings:A};return this.sendCustomMessage(JSON.stringify(I),"custom",null,"moderator","roomSetting:participantSettings",{}),{success:!0}};startSpeechRecognition(A={}){const I={lang:"es-ES",continuous:!0,interimResults:!0,maxAlternatives:3,autoRestart:!0,restartDelayMs:250,...A},g=window.SpeechRecognition||window.webkitSpeechRecognition,C=window.SpeechGrammarList||window.webkitSpeechGrammarList;if(!g)return this.emit("sttError",{code:"UNSUPPORTED",message:"Web Speech API not supported"}),{success:!1,reason:"unsupported"};try{if(this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}try{this._speechRecognition.stop()}catch{}this._speechRecognition=null}const A=new g;if(A.lang=I.lang,A.continuous=!!I.continuous,A.interimResults=!!I.interimResults,A.maxAlternatives=I.maxAlternatives,I.grammars&&Array.isArray(I.grammars)&&C){const g=new C,Q=`#JSGF V1.0; grammar terms; public <term> = ${I.grammars.join(" | ")};`;g.addFromString&&g.addFromString(Q,1),A.grammars&&(A.grammars=g)}return this._sttShouldRun=!0,this._sttAutoRestart=!!I.autoRestart,this._sttRestartDelayMs=Number(I.restartDelayMs)||250,A.onstart=()=>this.emit("sttStart",{timestamp:Date.now(),lang:A.lang}),A.onresult=I=>{const g=I.results[I.results.length-1],C=g&&g[0]?g[0]:null,Q={transcript:C?C.transcript:"",confidence:C?C.confidence:0,isFinal:!!g&&g.isFinal,timestamp:Date.now(),lang:A.lang},B=Q.timestamp,E=this.data.inputParams.peerId||"local";new Date(B).toISOString(),this._transcriptStorage.has(B)||this._transcriptStorage.set(B,new Map),this._transcriptStorage.get(B).set(E,{transcript:Q.transcript,isFinal:Q.isFinal}),this.emit("sttResult",Q)},A.onerror=A=>{this.emit("sttError",{code:A.error||"UNKNOWN",message:A.message||"Speech recognition error"})},A.onend=()=>{if(this.emit("sttEnd",{timestamp:Date.now()}),this._sttShouldRun&&this._sttAutoRestart){const I=this._sttRestartDelayMs;try{setTimeout(()=>{if(this._sttShouldRun&&this._sttAutoRestart)try{A.start()}catch(A){}},I)}catch(A){}}},A.start(),this._speechRecognition=A,{success:!0}}catch(A){return this.emit("sttError",{code:"INIT_FAILED",message:A.message}),{success:!1,reason:A.message}}}stopSpeechRecognition(){try{if(this._sttShouldRun=!1,this._sttAutoRestart=!1,this._speechRecognition){try{this._speechRecognition.onend=null,this._speechRecognition.onresult=null,this._speechRecognition.onerror=null}catch{}this._speechRecognition.stop(),this._speechRecognition=null}return{success:!0}}catch(A){return this.emit("sttError",{code:"STOP_FAILED",message:A.message}),{success:!1,reason:A.message}}}startTranscription(){try{return this._transcriptionActive?(ji.debug("Transcription already active"),{success:!1,reason:"already_active"}):(this._transcriptionActive=!0,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,ji.debug("Transcription started - waiting for audio buffer from client"),this.emit("transcriptionStarted"),{success:!0})}catch(A){return ji.error("Failed to start transcription:",A),this.emit("transcriptionError",{error:A.message}),{success:!1,reason:A.message}}}stopTranscription(){try{return this._transcriptionActive?(this._transcriptionRecorder&&"inactive"!==this._transcriptionRecorder.state&&this._transcriptionRecorder.stop(),this._transcriptionRecorder=null,this._transcriptionActive=!1,this._transcriptionChunks=[],this._currentTranscriptionPeerId=null,this._transcriptionEmittedStart=!1,ji.debug("Transcription stopped completely"),this.emit("transcriptionStopped"),{success:!0}):(ji.debug("Transcription not active"),{success:!1,reason:"not_active"})}catch(A){return ji.error("Failed to stop transcription:",A),this.emit("transcriptionError",{error:A.message}),{success:!1,reason:A.message}}}sendAudioForTranscription(A,I){try{return A?(this.sendCustomMessage({audioBuffer:A,timestamp:Date.now(),activeSpeakerPeerId:I},"general",null,"participant","deepgram:audio"),ji.debug(`Audio buffer sent for transcription from peer: ${I}`),{success:!0}):(ji.debug("No audio buffer provided"),{success:!1,reason:"no_audio_buffer"})}catch(A){return ji.error("Failed to send audio for transcription:",A),{success:!1,reason:A.message}}}isTranscriptionActive(){return this._transcriptionActive||!1}_processTranscriptionMessage=A=>{try{if("transcription"===A.type&&A.data){const I=A.data;this.emit("transcription",{transcript:I.transcript,confidence:I.confidence,isFinal:I.isFinal,timestamp:I.timestamp,speaker:I.speaker}),ji.debug("Transcription received:",I.transcript)}}catch(A){ji.error("Failed to process transcription message:",A)}};async enableCam({deviceId:A=null,videoResolution:I,forceVp8:g,forceVp9:C,forceH264:Q,h264Profile:B,forceFPS:E,enableWebcamLayers:i,numSimulcastStreams:D,videoBitRates:n,vbdetails:o}={}){if(ji.debug("enableWebcam()"),ji.debug("first vbdetails in enablecam",o),!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID004",text:"Error while trying to start Camera. Produce flag need to set to true while joining room in order to enable Camera."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID005",text:`Error while trying to start Camera as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling Camera? OR if you have already initiated the joinRoom process, then Camera will be enabled automatically once room join process completes."};if(this._webcamProducer)return ji.debug("Camera is already active!"),{success:!1,warning:!0,code:"RWID003",text:"Error while trying to start Camera. Camera is already active!"};if(!this._device.canProduce("video"))return ji.error("enableWebcam() | cannot produce video"),{success:!1,error:!0,code:"REID006",text:"Error while trying to start Camera. Camera couldnot be activated due to limitations on this device. If you think this device has a functional camera and the problem persists even after multiple retries, please contact technical support with the error code."};let s,w;if(["hd","vga","qvga"].includes(I)&&(this.data.inputParams.videoResolution=I,this._webcam.resolution=I),g&&"boolean"==typeof g&&(this.data.inputParams.forceVp8=g),C&&"boolean"==typeof C&&(this.data.inputParams.forceVp9=C),Q&&"boolean"==typeof Q&&(this.data.inputParams.forceH264=Q),B&&["high","low"].includes(B.toLowerCase())&&(this.data.inputParams.h264Profile=B),E&&Number.isInteger(E)&&E<65&&E>5&&(this.data.inputParams.forceFPS=25),i&&"boolean"==typeof i&&(this.data.inputParams.enableWebcamLayers=i,this._enableWebcamLayers=i),D&&Number.isInteger(D)&&D<4&&D>0&&(this.data.inputParams.numSimulcastStreams=D,this._numSimulcastStreams=D),Array.isArray(n)&&n.length>=1&&n.length<=3&&n.every(A=>Number.isInteger(A)&&A>=75&&A<=800)?(ji.debug("videoBitRates values are correct"),this.data.inputParams.videoBitRates=n):ji.warn("videobitrates values should be an integer array with maximum 3 elements and minimum 1 element. The values in the array are '[700,250,75]'"),this.data.inputParams.roomType===RE)return ji.debug("Enabling camera for P2P room"),A&&(this.data.inputParams.videoDeviceId=A),o&&(this.data.inputParams.vbdetails=o),await this.toggleCameraP2p(!0);try{if(this._externalVideo)w={label:"external video"},this._webCamStream=await this._getExternalVideoStream(),s=this._webCamStream.getVideoTracks()[0].clone();else{A?(w=this._deviceList.videoDevices.find(I=>I.deviceId===A),w||(ji.warn("Selected deviceId:%s not found",A),w=this._deviceList.videoDevices[0])):w=this._deviceList.videoDevices[0],this._webcam.device=w;const{resolution:I}=this._webcam;if(!w)return ji.error("No wencam device found! Can't start video!"),{success:!1,reason:"No Webcam available for starting video!"};A&&this.data.inputParams.videoDeviceId!==A&&(this.data.inputParams.videoDeviceId=A),ji.debug("enableWebcam() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:w.deviceId},...Vi[I],frameRate:{ideal:this.data.inputParams.forceFPS}}}),s=this._webCamStream.getVideoTracks()[0]}let I,g;const C={videoGoogleStartBitrate:1e3};if(ji.debug("Current device codec options are:%O",this._device.rtpCapabilities.codecs),this._forceVP8){if(g=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!g)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?g=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(g=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!g)throw new Error("desired H264 codec+configuration is not supported");ji.debug("Selected h264 codec is:%O",g)}else if(this._forceVP9&&(g=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!g))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableWebcamLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&g||"video/vp9"===A.mimeType.toLowerCase()?I=[{maxBitrate:5e6,scalabilityMode:this._webcamScalabilityMode||"L3T3_KEY"}]:(I=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.videoBitRates[0],scalabilityMode:this._webcamScalabilityMode||"L1T3"}],this._numSimulcastStreams>1&&I.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.videoBitRates[1],scalabilityMode:this._webcamScalabilityMode||"L1T3"}),this._numSimulcastStreams>2&&I.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.videoBitRates[2],scalabilityMode:this._webcamScalabilityMode||"L1T3"}))}if(o)try{const A=Xi&&Xi._localVBStream;let I=null;if(A&&"function"==typeof A.getVideoTracks&&A.getVideoTracks().length>0&&"live"===A.getVideoTracks()[0].readyState)I=A.getVideoTracks()[0],ji.debug("Using existing Virtual Background track");else{const A=await Xi.initializePipeline(s,o);A&&A.vbStream&&"function"==typeof A.vbStream.getVideoTracks&&A.vbStream.getVideoTracks().length>0&&(I=A.vbStream.getVideoTracks()[0],ji.debug("Initialized new Virtual Background pipeline"))}I&&(s=I)}catch(A){ji.debug("VB init failed or skipped in enableCam")}this._webcamProducer=await this._sendTransport.produce({track:s,encodings:I,codecOptions:C,codec:g,appData:{mediaTag:"cam-video"}}),this._producers.set("video",{id:this._webcamProducer.id,deviceLabel:w.label,type:this._getWebcamType(w),paused:this._webcamProducer.paused,track:this._webcamProducer.track,rtpParameters:this._webcamProducer.rtpParameters,codec:this._webcamProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._webcamProducer.track,type:"local"}),this._webcamProducer.on("transportclose",()=>{this._webcamProducer=null}),this._webcamProducer.on("trackended",()=>{this.disableCam().catch(()=>{})})}catch(A){ji.error("enableWebcam() | failed:%o",A),this.emit("error",{code:"EID011",text:"Enable Webcam failed!"}),s&&s.stop()}}async disableCam(){if(ji.debug("disableWebcam()"),this.data.inputParams.roomType===RE)return ji.debug("Disabling camera for P2P room"),await this.toggleCameraP2p(!1);if(this._webcamProducer){this._webcamProducer.close(),this._producers.delete("video");try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._webcamProducer.id};this._sendMessage(A),this.emit("videoEnd",{peerId:this.data.inputParams.peerId,videoTrack:null})}catch(A){this.emit("error",{code:"EID012",text:"Error while closing server side producer!"})}try{this._webCamStream&&"function"==typeof this._webCamStream.getTracks&&this._webCamStream.getTracks().forEach(A=>{try{A.stop()}catch(A){}})}catch(A){}this._webCamStream=null,this._webcamProducer=null}else ji.debug("No webcam producer available")}async _updateWebcams(){ji.debug("_updateWebcams()"),this._webcams=new Map,ji.debug("_updateWebcams() | calling enumerateDevices()");const A=await navigator.mediaDevices.enumerateDevices();for(const I of A)"videoinput"===I.kind&&this._webcams.set(I.deviceId,I);const I=Array.from(this._webcams.values()),g=I.length,C=this._webcam.device?this._webcam.device.deviceId:void 0;ji.debug("_updateWebcams() [webcams:%o]",I),0===g?this._webcam.device=null:this._webcams.has(C)||(this._webcam.device=I[0])}async _getExternalVideoStream(){if(this._externalVideoStream)return this._externalVideoStream;if(this._externalVideo.readyState<3&&await new Promise(A=>this._externalVideo.addEventListener("canplay",A)),this._externalVideo.captureStream)this._externalVideoStream=this._externalVideo.captureStream();else{if(!this._externalVideo.mozCaptureStream)throw new Error("video.captureStream() not supported");this._externalVideoStream=this._externalVideo.mozCaptureStream()}return this._externalVideoStream}_getWebcamType(A){return/(back|rear)/i.test(A.label)?(ji.debug("_getWebcamType() | it seems to be a back camera"),"back"):(ji.debug("_getWebcamType() | it seems to be a front camera"),"front")}async changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}){if(this.data?.inputParams?.roomType===RE){ji.info("changeVideoInput() | P2P room detected, delegating to changeVideoInputP2p");try{C&&(this.data.inputParams.vbdetails=C)}catch(A){}return await this.changeVideoInputP2p(I)}return this._webcamProducer?await this._changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}):(ji.error("No webcam producer available!"),{success:!1,reason:"You are not sharing your camera yet. Camera Input can be changed to a new camera only when you are sharing an existing camera. "})}async _changeVideoInput({resolution:A,deviceId:I,fps:g,vbdetails:C}){ji.info("_changeVideoInput() | Inside"),A&&["hd","vga","qvga"].includes(A)?this._webcam.resolution=A:ji.warn("Invalid video resolution value "),g&&Number.isInteger(g)&&g<65&&g>5?this.data.inputParams.forceFPS=g:ji.warn("forceFPS should be a number between 5 to 65, default value is 25 fps.");let Q=this._deviceList.videoDevices.find(A=>I&&A.deviceId===I);if(!Q)return ji.error("The selected deviceId not found!"),{success:!1,reason:"Invalid deviceId!"};this._webcam.device=Q;try{this._webCamStream.getVideoTracks().forEach(A=>A.stop()),this._webCamStream=null,ji.debug("changeVideoInput() | calling getUserMedia()"),this._webCamStream=await navigator.mediaDevices.getUserMedia({video:{deviceId:{exact:Q.deviceId},...Vi[this._webcam.resolution],frameRate:{ideal:this.data.inputParams.forceFPS}}});let A=this._webCamStream.getVideoTracks()[0];if(ji.debug("The new video track is:%O",A),C)try{const I=await Xi.initializePipeline(A,C);I&&I.vbStream&&"function"==typeof I.vbStream.getVideoTracks&&I.vbStream.getVideoTracks()[0]&&(A=I.vbStream.getVideoTracks()[0],ji.debug("Reinitialized VB pipeline for changed camera"))}catch(A){ji.debug("VB init skipped/failed on changeVideoInput")}await this._webcamProducer.replaceTrack({track:A});let I=this._producers.get("video");return I.deviceLabel=Q.label,I.type=this._getWebcamType(Q),I.track=this._webcamProducer.track,ji.debug("Updated producer values are:%O",I),this._producers.set("video",I),this.emit("videoStart",{peerId:this.data.inputParams.peerId,videoTrack:A,type:"local"}),{success:!0}}catch(A){return ji.error("Error while changing input:%O",A),{success:!1,reason:"Couldn't change video input",error:A}}}async changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}){if(this.data?.inputParams?.roomType===RE){ji.info("changeAudioInput() | P2P room detected, delegating to changeAudioInputP2p");try{B&&(this.data.inputParams.audioDeviceId=B)}catch(A){}return await this.changeAudioInputP2p(B)}return this._micProducer?await this._changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}):{success:!1,reason:"You are not sharing your mic yet. Mic Input can be changed to a new mic only when you are sharing an existing mic. "}}async _changeAudioInput({autoGainControl:A,echoCancellation:I,noiseSuppression:g,sampleRate:C,channelCount:Q,deviceId:B}){A&&"boolean"==typeof A&&(this.data.inputParams.autoGainControl=A),I&&"boolean"==typeof I&&(this.data.inputParams.echoCancellation=Boolean(I)),g&&"boolean"==typeof g&&(this.data.inputParams.noiseSuppression=Boolean(g)),C&&Number.isInteger(C)&&C<64e3&&C>8e3&&(this.data.inputParams.sampleRate=C),Q&&Number.isInteger(Q)&&Q>0&&Q<3&&(this.data.inputParams.channelCount=Q);let E=this._deviceList.audioDevices.find(A=>B&&A.deviceId===B);if(!E)return{success:!1,reason:"Invalid deviceId!"};this._mic.device=E,this._micStream&&this._micStream.getAudioTracks().forEach(A=>A.stop()),this._micStream=null;try{this._micStream=await navigator.mediaDevices.getUserMedia({audio:{deviceId:{exact:E.deviceId},echoCancellation:this.data.inputParams.echoCancellation,noiseSuppression:this.data.inputParams.noiseSuppression,autoGainControl:this.data.inputParams.autoGainControl,sampleRate:this.data.inputParams.sampleRate,channelCount:this.data.inputParams.channelCount}});const A=this._micStream.getAudioTracks()[0];this._micProducer.replaceTrack({track:A});let I=this._producers.get("audio");return I.deviceLabel=E.label,I.track=this._micProducer.track,ji.debug("Updated producer values are:%O",I),this._producers.set("audio",I),this.emit("micStart",{peerId:this.data.inputParams.peerId,audioTrack:this._micProducer.track,type:"local"}),{success:!0}}catch(A){return ji.error("Error while changing input:%O",A),{success:!1,reason:"Couldn't change audio input",err:A}}}toggleVB=async A=>{if(A&&this._localCamVideo.getVideoTracks()[0]){const A=await this.initializePipeline(this._localCamVideo.getVideoTracks()[0],{type:"blur"});if(ji.debug("response is :%o, localVBTrack is:%O",A,this._localVBStream.getVideoTracks()[0]),A.success&&this._localVBStream.getVideoTracks()[0]){if(this._roomType===KE||this._roomType===ME)if(this._camVideoProducer&&store.getState().conf.joined){await this._camVideoProducer.replaceTrack({track:this._localVBStream.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else ji.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===NE&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(ji.debug("found sender:%o",A),A&&this._localVBStream.getVideoTracks()[0]){A.replaceTrack(this._localVBStream.getVideoTracks()[0]);let I={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localVBStream.getVideoTracks()[0]}};this._participants=I,store.dispatch(confActions.addParticipant(I))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}}else ji.error("Virtual background procesing can't be enabled")}else if(this._localVBStream?.getVideoTracks()[0].stop(),this._localVBStream=null,this._pipelineManager.stop(),this._vbDetailsNew.sourcePlayback&&(this._vbDetailsNew.sourcePlayback.htmlElement.srcObject=null),this._vbDetailsNew.sourcePlayback?.htmlElement.remove(),this._vbDetailsNew?.hiddenCanvas.remove(),this._vbDetailsNew?.hiddenImage?.remove(),this._vbDetailsNew.sourcePlayback=null,this._vbDetailsNew.hiddenCanvas=null,this._vbDetailsNew.hiddenImage=null,store.dispatch(confActions.setVBItemsStatus(A)),ji.debug("Garbage collection completed. Set the video to original video!"),store.getState().conf.videoStatus)if(this._roomType===KE||this._roomType===ME)if(this._camVideoProducer&&store.getState().conf.joined&&this._localCamVideo.getVideoTracks()[0]&&store.getState().conf.joined&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){await this._camVideoProducer.replaceTrack({track:this._localCamVideo.getVideoTracks()[0].clone()});let A={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=A,store.dispatch(confActions.addParticipant(A))}else ji.debug("Camvideoproducer not available! virtual background changes in the landing page! ");else if(this._roomType===NE&&this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});if(ji.debug("found sender:%o",A),A&&this._localCamVideo.getVideoTracks()[0]&&"live"===this._localCamVideo.getVideoTracks()[0].readyState){A.replaceTrack(this._localCamVideo.getVideoTracks()[0]);let I={...this._participants,[this._peerId]:{...this._participants[this._peerId],videoRef:this._localCamVideo.getVideoTracks()[0]}};this._participants=I,store.dispatch(confActions.addParticipant(I))}else this.showNotification("danger","Error!","Unable to switch off virtual background! Try again OR contact support with code:CM-FE-RC-VB-E04")}};setVBDetails=async A=>{this._vbDetails=A;try{this.data&&this.data.inputParams&&(this.data.inputParams.vbdetails=A)}catch(A){}if(this._roomType!==KE&&this._roomType!==ME||!store.getState().conf.joined){if(this._roomType===NE&&this._peerConnection){const I=this._peerConnection.getSenders().find(function(A){return"video"===A.track.kind});ji.debug("found sender:%o",I),I?I.replaceTrack(A.stream.getVideoTracks()[0]):this.showNotification("danger","Error!","Unable to set virtual background! Try again OR contact support with code:CM-FE-RC-VB-E02")}}else if(this._camVideoProducer){ji.debug("Going to replace the video track for cam video producer!");try{await this._camVideoProducer.replaceTrack({track:A.stream.getVideoTracks()[0]})}catch(A){ji.debug("vb set error",A)}ji.debug("all participants",this._participants),ji.debug("this._localCamVideo",this._localCamVideo)}else ji.warn("Camvideo producer is not available yet!")};async enableShare({shareAudio:A=!1,enableSharingLayers:I=!0,shareBitRates:g=[2500,1250,500]}={}){if(ji.debug("enableShare()"),this.data.inputParams.roomType===RE){ji.debug("Room type is P2P - using P2P screen share");try{return await this.enableScreenShareP2p(),{success:!0}}catch(A){return ji.error("Error enabling screen share in P2P mode:%O",A),{success:!1,error:!0,code:"REID014",text:`Error while trying to start screen share in P2P mode: ${A.message}`}}}if(!this.data.inputParams.produce)return ji.debug("Produce status is set to false!"),{success:!1,error:!0,code:"REID003",text:"Error while trying to start screen share. Produce flag need to set to true while joining room in order to enable screen share."};if("connected"!==this._roomStatus)return ji.debug("Room status is not connected yet!"),{success:!1,error:!0,code:"REID001",text:`Error while trying to start screen share as room not in connected status. Current room status:!${this._roomStatus}`,possibleReasons:"Did you forget to call joinRoom before enabling screen share? OR if you have already initiated the joinRoom process, then try enabling screen share after some seconds."};if(this._shareProducer)return ji.debug("Screen share is already active!"),{success:!1,warning:!0,code:"RWID001",text:"Error while trying to start screen share. Screen share is already active!"};if(!this._device.canProduce("video"))return ji.error("enableShare() | cannot produce video"),{success:!1,error:!0,code:"REID002",text:"Error while trying to start screen share. Screen share couldnot be activated due to limitations on this device. If you think this device is capable of screen share and the problem persists even after multiple retries, please contact technical support with the error code."};let C,Q;this._enableSharingLayers="boolean"!=typeof I?Boolean(I):I,Array.isArray(g)&&g.length>=1&&g.length<=3&&g.every(A=>Number.isInteger(A)&&A>=500&&A<=2500)?this.data.inputParams.shareBitRates=g:this.data.inputParams.shareBitRates=[2500,1250,500];try{ji.debug("enableShare() | calling getDisplayMedia()");const I=await navigator.mediaDevices.getDisplayMedia({audio:!!A,video:{displaySurface:"monitor",logicalSurface:!0,cursor:!0,width:{max:1920},height:{max:1080},frameRate:{max:30}}});if(!I)return ji.error("Unable to capture screen."),void this.emit("error",{code:"EID013",text:"Error while trying to start screen share. Not able to capture screen!"});let g,B;Q=I.getAudioTracks()[0],Q&&(this._shareAudioProducer=await this._sendTransport.produce({track:Q,codecOptions:this.data.inputParams.forcePCMU?void 0:{opusStereo:!1,opusDtx:!0,opusFec:!0,opusNack:!0},codec:this.data.inputParams.forcePCMU?this._device.rtpCapabilities.codecs.find(A=>"audio/pcmu"===A.mimeType.toLowerCase()):void 0,appData:{mediaTag:"screen-audio"}}),this._producers.set("ssAudio",{id:this._shareAudioProducer.id,type:"shareAudio",paused:this._shareAudioProducer.paused,track:this._shareAudioProducer.track,rtpParameters:this._shareAudioProducer.rtpParameters,codec:this._shareAudioProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssAudioStart",{peerId:this.data.inputParams.peerId,audioTrack:this._shareAudioProducer.track,type:"local"})),C=I.getVideoTracks()[0];const E={videoGoogleStartBitrate:1e3};if(this._forceVP8){if(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp8"===A.mimeType.toLowerCase()),!B)throw new Error("desired VP8 codec+configuration is not supported")}else if(this._forceH264){if("high"===this.data.inputParams.h264Profile?B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"4d001f"===A.parameters["profile-level-id"]):"low"===this.data.inputParams.h264Profile&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/h264"===A.mimeType.toLowerCase()&&"42e01f"===A.parameters["profile-level-id"])),!B)throw new Error("desired H264 codec+configuration is not supported");ji.debug("Selected h264 codec is:%O",B)}else if(this._forceVP9&&(B=this._device.rtpCapabilities.codecs.find(A=>"video/vp9"===A.mimeType.toLowerCase()),!B))throw new Error("desired VP9 codec+configuration is not supported");if(this._enableSharingLayers){const A=this._device.rtpCapabilities.codecs.find(A=>"video"===A.kind);this._forceVP9&&B||"video/vp9"===A.mimeType.toLowerCase()?g=[{maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L3T3",dtx:!0}]:(g=[{scaleResolutionDownBy:1,maxBitrate:1e3*this.data.inputParams.shareBitRates[0],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}],this._numSimulcastStreams>1&&g.unshift({scaleResolutionDownBy:2,maxBitrate:1e3*this.data.inputParams.shareBitRates[1],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}),this._numSimulcastStreams>2&&g.unshift({scaleResolutionDownBy:4,maxBitrate:1e3*this.data.inputParams.shareBitRates[2],scalabilityMode:this._sharingScalabilityMode||"L1T3",dtx:!0}))}this._shareProducer=await this._sendTransport.produce({track:C,encodings:g,codecOptions:E,codec:B,appData:{mediaTag:"screen-video"}}),this._producers.set("ssVideo",{id:this._shareProducer.id,type:"shareVideo",paused:this._shareProducer.paused,track:this._shareProducer.track,rtpParameters:this._shareProducer.rtpParameters,codec:this._shareProducer.rtpParameters.codecs[0].mimeType.split("/")[1]}),this.emit("ssVideoStart",{peerId:this.data.inputParams.peerId,videoTrack:this._shareProducer.track,type:"local"}),this._shareProducer.on("transportclose",()=>{this._shareProducer=null}),this._shareProducer.on("trackended",()=>{this.disableShare().catch(()=>{})})}catch(A){ji.error("enableShare() | failed:%o",A),"NotAllowedError"!==A.name&&this.emit("error",{code:"EID014",text:`Error while trying to start screen share. Error is: ${A}!`}),C&&C.stop()}}async disableShare(){if(ji.debug("disableShare()"),this.data.inputParams.roomType===RE){ji.debug("Room type is P2P - using P2P screen share disable");try{return await this.disableScreenShareP2p(),{success:!0}}catch(A){return ji.error("Error disabling screen share in P2P mode:%O",A),void this.emit("error",{code:"EID017",text:`Error while trying to stop screen share in P2P mode: ${A.message}`})}}if(!this._shareProducer)return ji.warn("Screen share doesn't seem to be on!"),void this.emit("error",{code:"EID017",text:"Error while trying to stop screen share. Is the screen share on!"});if(this._shareProducer.close(),this._shareAudioProducer){this._shareAudioProducer.close();try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"audio",producerId:this._shareAudioProducer.id};this._sendMessage(A),this.emit("ssAudioStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}}catch(A){this.emit("error",{code:"EID015",text:`Error while trying to stop screen share audio. Error is: ${A}!`})}}try{let A={id:"closeProducerSDK",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,type:"video",producerId:this._shareProducer.id};this._sendMessage(A),this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,videoTrack:null,type:"local"});try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}}catch(A){this.emit("error",{code:"EID016",text:`Error while trying to stop screen share video. Error is: ${A}!`})}this._shareAudioProducer=null,this._shareProducer=null}upgradeParticipant=(A,I=!0,g=!1)=>{ji.debug("JsSdk upgradeParticipant()",`Upgrading ${A}`);const C={id:"upgradeParticipant",peerId:A,roomName:this.data.inputParams.roomId,audioStatus:I,videoStatus:g};this._sendMessage(C)};downgradeParticipant=A=>{ji.debug("JsSdk downgradeParticipant()",`Downgrading ${A}`);const I={id:"downgradeParticipant",peerId:A,roomName:this.data.inputParams.roomId,sendTransportId:null};this._sendMessage(I)};sendUpgradeRequest=(A,I=!0)=>{ji.debug("JsSdk sendUpgradeRequest()",`Sending upgrade request to ${A}, status: ${I}`);const g={id:"modUpgradeReq",peerId:A,status:I,moderator:this.data.inputParams.peerId};this._sendMessage(g),this.emit("upgradeRequestSent",{peerId:A,status:I})};acceptUpgradeRequest=(A=!0,I=!1)=>{ji.debug("JsSdk acceptUpgradeRequest()","Accepting upgrade request");const g={id:"upgradeReqAccepted",audioStatus:A,videoStatus:I};this._sendMessage(g)};rejectUpgradeRequest=A=>{ji.debug("JsSdk rejectUpgradeRequest()","Rejecting upgrade request");const I={id:"modUpgradeReq",peerId:this.data.inputParams.peerId,status:!1};this._sendMessage(I),this.emit("upgradeRequestRejected",{moderatorPeerId:A})};raiseHand=()=>{ji.debug("JsSdk raiseHand()");const A={id:"handRaise",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,status:!0,handRaised:!0};this._sendMessage(A),this.emit("handRaised",{peerId:this.data.inputParams.peerId})};dropHand=(A=null,I=!1)=>{const g=A||this.data.inputParams.peerId;ji.debug("JsSdk dropHand()",`Dropping hand for ${g}`);const C={id:"handRaise",peerId:g,roomName:this.data.inputParams.roomId,status:!1,handRaised:!1,moderator:I};this._sendMessage(C),this.emit("handDropped",{peerId:g,moderator:I})};requestUpgradeToPresenter=async()=>{try{const A={id:"handRaise",peerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId,status:!0,handRaised:!0,upgradeRequest:!0};return this._sendMessage(A),this.emit("upgradeRequestSent",{status:"success",message:"Upgrade request sent"}),{success:!0}}catch(A){return ji.error("requestUpgradeToPresenter failed",A),{success:!1,reason:A?.message||"unknown_error"}}};handleUpgradeParticipant=async A=>{const{peerId:I,audioStatus:g,videoStatus:C}=A;ji.debug("JsSdk handleUpgradeParticipant()",`Upgrade received for ${I}`),this.emit("participantUpgraded",{peerId:I,audioStatus:g,videoStatus:C,participantType:"presenter"}),I===this.data.inputParams.peerId&&(ji.debug("JsSdk handleUpgradeParticipant()","Current user upgraded to presenter"),this.data.inputParams.peerType="presenter",this.data.inputParams.produce=!0,this.data.inputParams.produceAudio=!0,this.data.inputParams.produceVideo=!0,this._sendTransport||await this._createSendTransport(),this.emit("upgraded",{audioStatus:g,videoStatus:C,participantType:"presenter"}))};handleDowngradeParticipant=async A=>{const{peerId:I}=A;if(ji.debug("JsSdk handleDowngradeParticipant()",`Downgrade received for ${I}`),this.emit("participantDowngraded",{peerId:I,participantType:"viewer"}),I===this.data.inputParams.peerId){if(ji.debug("JsSdk handleDowngradeParticipant()","Current user downgraded to viewer"),this.data.inputParams.peerType="viewer",this.data.inputParams.produce=!1,this.data.inputParams.produceAudio=!1,this.data.inputParams.produceVideo=!1,this._sendTransport&&(this._sendTransport.close(),this._sendTransport=null),this._camAudioProducer){try{this._camAudioProducer.track&&this._camAudioProducer.track.stop&&this._camAudioProducer.track.stop()}catch(A){}this._camAudioProducer.close(),this._camAudioProducer=null}if(this._camVideoProducer){try{this._camVideoProducer.track&&this._camVideoProducer.track.stop&&this._camVideoProducer.track.stop()}catch(A){}this._camVideoProducer.close(),this._camVideoProducer=null}if(this._shareProducer){try{this._shareProducer.track&&this._shareProducer.track.stop&&this._shareProducer.track.stop()}catch(A){}this._shareProducer.close(),this._shareProducer=null,this.emit("ssVideoStop",{peerId:this.data.inputParams.peerId,type:"local"})}if(this._shareAudioProducer){try{this._shareAudioProducer.track&&this._shareAudioProducer.track.stop&&this._shareAudioProducer.track.stop()}catch(A){}this._shareAudioProducer.close(),this._shareAudioProducer=null}this._localCamVideo&&(this._localCamVideo.stop(),this._localCamVideo=null),this._localMicAudio&&(this._localMicAudio.stop(),this._localMicAudio=null),this._webCamStream&&(this._webCamStream.getTracks().forEach(A=>A.stop()),this._webCamStream=null),this._micStream&&(this._micStream.getTracks().forEach(A=>A.stop()),this._micStream=null),this.emit("downgraded",{participantType:"viewer"})}};handleModUpgradeReq=A=>{const{peerId:I,status:g,moderator:C}=A;ji.debug("JsSdk handleModUpgradeReq()",`Moderator upgrade request received. Status: ${g}`),"moderator"!==this.data.inputParams.peerType||g?g?this.emit("upgradeRequestReceived",{peerId:this.data.inputParams.peerId,moderator:C,message:"Moderator requested Participant to Presenter upgrade"}):this.emit("upgradeRequestCancelled",{peerId:this.data.inputParams.peerId,moderator:C,message:"Moderator cancelled Participant to Presenter upgrade request"}):this.emit("upgradeRequestRejected",{peerId:I,moderator:C,message:"Participant rejected request for upgrade to Presenter"})};handleUpgradeLimitReached=A=>{ji.debug("JsSdk handleUpgradeLimitReached()",A),this.emit("upgradeLimitReached",{message:A.text||"Maximum number of presenters reached",limit:A.limit})};handleHandRaise=A=>{const{peerId:I,handRaised:g,peerName:C,upgradeRequest:Q}=A;ji.debug("JsSdk handleHandRaise()",`Hand raise status: ${g} for ${I}`),this.emit("handRaise",{peerId:I,handRaised:g,peerName:C,upgradeRequest:!!Q})};handleSwitchMicOff=A=>{ji.debug("JsSdk handleSwitchMicOff()",A),this.emit("micForcedOff",{message:A.text||"Moderator turned off your microphone"}),this._camAudioProducer&&!this._camAudioProducer.paused&&this.muteMic()};handleScreenShareLimitReached=A=>{ji.debug("JsSdk handleScreenShareLimitReached()",A),this.emit("screenShareLimitReached",{message:A.text||"Maximum number of screen shares reached",limit:A.limit})};handleLockUnlockRoom=A=>{const{locked:I}=A;ji.debug("JsSdk handleLockUnlockRoom()","Room "+(I?"locked":"unlocked")),this.emit("roomLockStatusChanged",{locked:I,message:I?"Room has been locked":"Room has been unlocked"})};handlePeersWaiting=A=>{const{peersWaiting:I}=A;ji.debug("JsSdk handlePeersWaiting()",`${I?.length||0} peers waiting`),this.emit("peersWaiting",{peersWaiting:I||[],count:I?.length||0})};logMeOutNew=async()=>{try{ji.debug("Room","inside log me out new"),this.emit("roomClosed",{roomId:this.data.inputParams.roomId,reason:"removed_by_moderator"})}catch(A){}await this.leaveRoom()};logThisUserOutOfMeeting=A=>{if(ji.debug("Room","inside log this user out of meeting"),A===this.data.inputParams.peerId)ji.debug("ConferenceRoom","logging myself Out"),this.leaveRoom();else try{var I={id:"logThisUserOut",peerId:A,moderatorPeerId:this.data.inputParams.peerId,roomName:this.data.inputParams.roomId};this._sendMessage(I)}catch(A){ji.error("Room",A)}}}const $i="cp2p-client";class AD{constructor(A){A?(this._debug=ag(`${$i}:${A}`),this._info=ag(`${$i}:INFO:${A}`),this._warn=ag(`${$i}:WARN:${A}`),this._error=ag(`${$i}:ERROR:${A}`)):(this._debug=ag($i),this._info=ag(`${$i}:INFO`),this._warn=ag(`${$i}:WARN`),this._error=ag(`${$i}:ERROR`)),this._debug.log=console.info.bind(console),this._info.log=console.info.bind(console),this._warn.log=console.warn.bind(console),this._error.log=console.error.bind(console)}get debug(){return this._debug}get info(){return this._info}get warn(){return this._warn}get error(){return this._error}}const ID={audio:{deviceId:{exact:void 0}},video:!1},gD={video:{deviceId:{exact:void 0},width:{min:320,ideal:640,max:1280},height:{min:240,ideal:480,max:720},frameRate:{min:15,max:30}}},CD={audio:!0,video:{width:{min:320,ideal:1280,max:1280},height:{min:240,ideal:720,max:720},aspectRatio:1.777777778,frameRate:{min:15,max:30}}},QD=new AD("socket");class BD extends hg.EventEmitter{constructor({url:A,roomId:I,peerId:g,peerName:C,role:Q}){super(),QD.debug("constructor():%o ",{url:A,roomId:I,peerId:g,peerName:C,role:Q}),this._closed=!1,this._params={url:A,roomId:I,peerId:g,peerName:C,role:Q},this._socket=null,this._connectionStatus=null,this._createSocket()}get closed(){return this._closed}get connectionStatus(){return this._connectionStatus}close(){if(!this._closed){QD.debug("close()"),this._closed=!0,this.emit("close");try{this._socket.disconnect()}catch(A){QD.error("close() | error closing the Socket:%o",A)}}}async send(A){if(this._closed)throw new Error("transport closed");try{this._socket.send(JSON.stringify(A))}catch(A){throw QD.warn("send() failed:%o",A),A}}async request({type:A,message:I}){return new Promise(g=>{if(this._closed)throw new Error("transport closed");try{this._socket.emit(A,JSON.stringify(I),A=>{g(A)})}catch(A){throw QD.warn("emit() failed:%o",A),A}})}async _createSocket(){let A=this;const I=io(this._params.url,{query:{roomId:this._params.roomId,peerId:this._params.peerId,peerName:this._params.peerName,role:this._params.role}});I.on("connect",()=>{QD.debug("Socket connected!!"),A._connectionStatus=!0,A.emit("connected")}),I.on("disconnect",()=>{QD.debug("Socket disconnected!!"),A._connectionStatus=!1,A.emit("disconnected")}),I.on("reconnect",()=>{QD.debug("Socket reconnected after disconnect!!"),I.emit("reconnected")}),I.on("message",I=>{const g=JSON.parse(I);QD.debug("New mesage received with id:%s",g.type),A.emit("message",g)}),this._socket=I}}(function(){var A={function:!0,object:!0},I=A[typeof window]&&window||this,g=A[typeof exports]&&exports,C=A[typeof module]&&module&&!module.nodeType&&module,Q=g&&C&&"object"==typeof globalThis&&globalThis;!Q||Q.global!==Q&&Q.window!==Q&&Q.self!==Q||(I=Q);var B=Math.pow(2,53)-1,E=/\bOpera/,i=Object.prototype,D=i.hasOwnProperty,n=i.toString;function o(A){return(A=String(A)).charAt(0).toUpperCase()+A.slice(1)}function s(A){return A=R(A),/^(?:webOS|i(?:OS|P))/.test(A)?A:o(A)}function w(A,I){for(var g in A)D.call(A,g)&&I(A[g],g,A)}function a(A){return null==A?o(A):n.call(A).slice(8,-1)}function y(A){return String(A).replace(/([ -])(?!$)/g,"$1?")}function G(A,I){var g=null;return function(A,I){var g=-1,C=A?A.length:0;if("number"==typeof C&&C>-1&&C<=B)for(;++g<C;)I(A[g],g);else w(A,I)}(A,function(C,Q){g=I(g,C,Q,A)}),g}function R(A){return String(A).replace(/^ +| +$/g,"")}var S=function A(g){var C=I,Q=g&&"object"==typeof g&&"String"!=a(g);Q&&(C=g,g=null);var B=C.navigator||{},i=B.userAgent||"";g||(g=i);var D,o,S,h,N,K=Q?!!B.likeChrome:/\bChrome\b/.test(g)&&!/internal|\n/i.test(n.toString()),M="Object",F=Q?M:"ScriptBridgingProxyObject",L=Q?M:"Environment",k=Q&&C.java?"JavaPackage":a(C.java),J=Q?M:"RuntimeObject",U=/\bJava/.test(k)&&C.java,t=U&&a(C.environment)==L,q=U?"a":"α",r=U?"b":"β",c=C.document||{},H=C.operamini||C.opera,e=E.test(e=Q&&H?H["[[Class]]"]:a(H))?e:H=null,Y=g,d=[],b=null,x=g==i,u=x&&H&&"function"==typeof H.version&&H.version(),p=G([{label:"EdgeHTML",pattern:"Edge"},"Trident",{label:"WebKit",pattern:"AppleWebKit"},"iCab","Presto","NetFront","Tasman","KHTML","Gecko"],function(A,I){return A||RegExp("\\b"+(I.pattern||y(I))+"\\b","i").exec(g)&&(I.label||I)}),T=G(["Adobe AIR","Arora","Avant Browser","Breach","Camino","Electron","Epiphany","Fennec","Flock","Galeon","GreenBrowser","iCab","Iceweasel","K-Meleon","Konqueror","Lunascape","Maxthon",{label:"Microsoft Edge",pattern:"(?:Edge|Edg|EdgA|EdgiOS)"},"Midori","Nook Browser","PaleMoon","PhantomJS","Raven","Rekonq","RockMelt",{label:"Samsung Internet",pattern:"SamsungBrowser"},"SeaMonkey",{label:"Silk",pattern:"(?:Cloud9|Silk-Accelerated)"},"Sleipnir","SlimBrowser",{label:"SRWare Iron",pattern:"Iron"},"Sunrise","Swiftfox","Vivaldi","Waterfox","WebPositive",{label:"Yandex Browser",pattern:"YaBrowser"},{label:"UC Browser",pattern:"UCBrowser"},"Opera Mini",{label:"Opera Mini",pattern:"OPiOS"},"Opera",{label:"Opera",pattern:"OPR"},"Chromium","Chrome",{label:"Chrome",pattern:"(?:HeadlessChrome)"},{label:"Chrome Mobile",pattern:"(?:CriOS|CrMo)"},{label:"Firefox",pattern:"(?:Firefox|Minefield)"},{label:"Firefox for iOS",pattern:"FxiOS"},{label:"IE",pattern:"IEMobile"},{label:"IE",pattern:"MSIE"},"Safari"],function(A,I){return A||RegExp("\\b"+(I.pattern||y(I))+"\\b","i").exec(g)&&(I.label||I)}),m=f([{label:"BlackBerry",pattern:"BB10"},"BlackBerry",{label:"Galaxy S",pattern:"GT-I9000"},{label:"Galaxy S2",pattern:"GT-I9100"},{label:"Galaxy S3",pattern:"GT-I9300"},{label:"Galaxy S4",pattern:"GT-I9500"},{label:"Galaxy S5",pattern:"SM-G900"},{label:"Galaxy S6",pattern:"SM-G920"},{label:"Galaxy S6 Edge",pattern:"SM-G925"},{label:"Galaxy S7",pattern:"SM-G930"},{label:"Galaxy S7 Edge",pattern:"SM-G935"},"Google TV","Lumia","iPad","iPod","iPhone","Kindle",{label:"Kindle Fire",pattern:"(?:Cloud9|Silk-Accelerated)"},"Nexus","Nook","PlayBook","PlayStation Vita","PlayStation","TouchPad","Transformer",{label:"Wii U",pattern:"WiiU"},"Wii","Xbox One",{label:"Xbox 360",pattern:"Xbox"},"Xoom"]),l=G({Apple:{iPad:1,iPhone:1,iPod:1},Alcatel:{},Archos:{},Amazon:{Kindle:1,"Kindle Fire":1},Asus:{Transformer:1},"Barnes & Noble":{Nook:1},BlackBerry:{PlayBook:1},Google:{"Google TV":1,Nexus:1},HP:{TouchPad:1},HTC:{},Huawei:{},Lenovo:{},LG:{},Microsoft:{Xbox:1,"Xbox One":1},Motorola:{Xoom:1},Nintendo:{"Wii U":1,Wii:1},Nokia:{Lumia:1},Oppo:{},Samsung:{"Galaxy S":1,"Galaxy S2":1,"Galaxy S3":1,"Galaxy S4":1},Sony:{PlayStation:1,"PlayStation Vita":1},Xiaomi:{Mi:1,Redmi:1}},function(A,I,C){return A||(I[m]||I[/^[a-z]+(?: +[a-z]+\b)*/i.exec(m)]||RegExp("\\b"+y(C)+"(?:\\b|\\w*\\d)","i").exec(g))&&C}),O=G(["Windows Phone","KaiOS","Android","CentOS",{label:"Chrome OS",pattern:"CrOS"},"Debian",{label:"DragonFly BSD",pattern:"DragonFly"},"Fedora","FreeBSD","Gentoo","Haiku","Kubuntu","Linux Mint","OpenBSD","Red Hat","SuSE","Ubuntu","Xubuntu","Cygwin","Symbian OS","hpwOS","webOS ","webOS","Tablet OS","Tizen","Linux","Mac OS X","Macintosh","Mac","Windows 98;","Windows "],function(A,I){var C=I.pattern||y(I);return!A&&(A=RegExp("\\b"+C+"(?:/[\\d.]+|[ \\w.]*)","i").exec(g))&&(A=function(A,I,g){var C={"10.0":"10",6.4:"10 Technical Preview",6.3:"8.1",6.2:"8",6.1:"Server 2008 R2 / 7","6.0":"Server 2008 / Vista",5.2:"Server 2003 / XP 64-bit",5.1:"XP",5.01:"2000 SP1","5.0":"2000","4.0":"NT","4.90":"ME"};return I&&g&&/^Win/i.test(A)&&!/^Windows Phone /i.test(A)&&(C=C[/[\d.]+$/.exec(A)])&&(A="Windows "+C),A=String(A),I&&g&&(A=A.replace(RegExp(I,"i"),g)),s(A.replace(/ ce$/i," CE").replace(/\bhpw/i,"web").replace(/\bMacintosh\b/,"Mac OS").replace(/_PowerPC\b/i," OS").replace(/\b(OS X) [^ \d]+/i,"$1").replace(/\bMac (OS X)\b/,"$1").replace(/\/(\d)/," $1").replace(/_/g,".").replace(/(?: BePC|[ .]*fc[ \d.]+)$/i,"").replace(/\bx86\.64\b/gi,"x86_64").replace(/\b(Windows Phone) OS\b/,"$1").replace(/\b(Chrome OS \w+) [\d.]+\b/,"$1").split(" on ")[0])}(A,C,I.label||I)),A});function f(A){return G(A,function(A,I){var C=I.pattern||y(I);return!A&&(A=RegExp("\\b"+C+" *\\d+[.\\w_]*","i").exec(g)||RegExp("\\b"+C+" *\\w+-[\\w]*","i").exec(g)||RegExp("\\b"+C+"(?:; *(?:[a-z]+[_-])?[a-z]+\\d+|[^ ();-]*)","i").exec(g))&&((A=String(I.label&&!RegExp(C,"i").test(I.label)?I.label:A).split("/"))[1]&&!/[\d.]+/.test(A[0])&&(A[0]+=" "+A[1]),I=I.label||I,A=s(A[0].replace(RegExp(C,"i"),I).replace(RegExp("; *(?:"+I+"[_-])?","i")," ").replace(RegExp("("+I+")[-_.]?(\\w)","i"),"$1 $2"))),A})}function P(A){return G(A,function(A,I){return A||(RegExp(I+"(?:-[\\d.]+/|(?: for [\\w-]+)?[ /-])([\\d.]+[^ ();/_-]*)","i").exec(g)||0)[1]||null})}if(p&&(p=[p]),/\bAndroid\b/.test(O)&&!m&&(D=/\bAndroid[^;]*;(.*?)(?:Build|\) AppleWebKit)\b/i.exec(g))&&(m=R(D[1]).replace(/^[a-z]{2}-[a-z]{2};\s*/i,"")||null),l&&!m?m=f([l]):l&&m&&(m=m.replace(RegExp("^("+y(l)+")[-_.\\s]","i"),l+" ").replace(RegExp("^("+y(l)+")[-_.]?(\\w)","i"),l+" $2")),(D=/\bGoogle TV\b/.exec(m))&&(m=D[0]),/\bSimulator\b/i.test(g)&&(m=(m?m+" ":"")+"Simulator"),"Opera Mini"==T&&/\bOPiOS\b/.test(g)&&d.push("running in Turbo/Uncompressed mode"),"IE"==T&&/\blike iPhone OS\b/.test(g)?(l=(D=A(g.replace(/like iPhone OS/,""))).manufacturer,m=D.product):/^iP/.test(m)?(T||(T="Safari"),O="iOS"+((D=/ OS ([\d_]+)/i.exec(g))?" "+D[1].replace(/_/g,"."):"")):"Konqueror"==T&&/^Linux\b/i.test(O)?O="Kubuntu":l&&"Google"!=l&&(/Chrome/.test(T)&&!/\bMobile Safari\b/i.test(g)||/\bVita\b/.test(m))||/\bAndroid\b/.test(O)&&/^Chrome/.test(T)&&/\bVersion\//i.test(g)?(T="Android Browser",O=/\bAndroid\b/.test(O)?O:"Android"):"Silk"==T?(/\bMobi/i.test(g)||(O="Android",d.unshift("desktop mode")),/Accelerated *= *true/i.test(g)&&d.unshift("accelerated")):"UC Browser"==T&&/\bUCWEB\b/.test(g)?d.push("speed mode"):"PaleMoon"==T&&(D=/\bFirefox\/([\d.]+)\b/.exec(g))?d.push("identifying as Firefox "+D[1]):"Firefox"==T&&(D=/\b(Mobile|Tablet|TV)\b/i.exec(g))?(O||(O="Firefox OS"),m||(m=D[1])):!T||(D=!/\bMinefield\b/i.test(g)&&/\b(?:Firefox|Safari)\b/.exec(T))?(T&&!m&&/[\/,]|^[^(]+?\)/.test(g.slice(g.indexOf(D+"/")+8))&&(T=null),(D=m||l||O)&&(m||l||/\b(?:Android|Symbian OS|Tablet OS|webOS)\b/.test(O))&&(T=/[a-z]+(?: Hat)?/i.exec(/\bAndroid\b/.test(O)?O:D)+" Browser")):"Electron"==T&&(D=(/\bChrome\/([\d.]+)\b/.exec(g)||0)[1])&&d.push("Chromium "+D),u||(u=P(["(?:Cloud9|CriOS|CrMo|Edge|Edg|EdgA|EdgiOS|FxiOS|HeadlessChrome|IEMobile|Iron|Opera ?Mini|OPiOS|OPR|Raven|SamsungBrowser|Silk(?!/[\\d.]+$)|UCBrowser|YaBrowser)","Version",y(T),"(?:Firefox|Minefield|NetFront)"])),(D=("iCab"==p&&parseFloat(u)>3?"WebKit":/\bOpera\b/.test(T)&&(/\bOPR\b/.test(g)?"Blink":"Presto"))||/\b(?:Midori|Nook|Safari)\b/i.test(g)&&!/^(?:Trident|EdgeHTML)$/.test(p)&&"WebKit"||!p&&/\bMSIE\b/i.test(g)&&("Mac OS"==O?"Tasman":"Trident")||"WebKit"==p&&/\bPlayStation\b(?! Vita\b)/i.test(T)&&"NetFront")&&(p=[D]),"IE"==T&&(D=(/; *(?:XBLWP|ZuneWP)(\d+)/i.exec(g)||0)[1])?(T+=" Mobile",O="Windows Phone "+(/\+$/.test(D)?D:D+".x"),d.unshift("desktop mode")):/\bWPDesktop\b/i.test(g)?(T="IE Mobile",O="Windows Phone 8.x",d.unshift("desktop mode"),u||(u=(/\brv:([\d.]+)/.exec(g)||0)[1])):"IE"!=T&&"Trident"==p&&(D=/\brv:([\d.]+)/.exec(g))&&(T&&d.push("identifying as "+T+(u?" "+u:"")),T="IE",u=D[1]),x){if(h="global",N=null!=(S=C)?typeof S[h]:"number",/^(?:boolean|number|string|undefined)$/.test(N)||"object"==N&&!S[h])a(D=C.runtime)==F?(T="Adobe AIR",O=D.flash.system.Capabilities.os):a(D=C.phantom)==J?(T="PhantomJS",u=(D=D.version||null)&&D.major+"."+D.minor+"."+D.patch):"number"==typeof c.documentMode&&(D=/\bTrident\/(\d+)/i.exec(g))?(u=[u,c.documentMode],(D=+D[1]+4)!=u[1]&&(d.push("IE "+u[1]+" mode"),p&&(p[1]=""),u[1]=D),u="IE"==T?String(u[1].toFixed(1)):u[0]):"number"==typeof c.documentMode&&/^(?:Chrome|Firefox)\b/.test(T)&&(d.push("masking as "+T+" "+u),T="IE",u="11.0",p=["Trident"],O="Windows");else if(U&&(Y=(D=U.lang.System).getProperty("os.arch"),O=O||D.getProperty("os.name")+" "+D.getProperty("os.version")),t){try{u=C.require("ringo/engine").version.join("."),T="RingoJS"}catch(A){(D=C.system)&&D.global.system==C.system&&(T="Narwhal",O||(O=D[0].os||null))}T||(T="Rhino")}else"object"==typeof C.process&&!C.process.browser&&(D=C.process)&&("object"==typeof D.versions&&("string"==typeof D.versions.electron?(d.push("Node "+D.versions.node),T="Electron",u=D.versions.electron):"string"==typeof D.versions.nw&&(d.push("Chromium "+u,"Node "+D.versions.node),T="NW.js",u=D.versions.nw)),T||(T="Node.js",Y=D.arch,O=D.platform,u=(u=/[\d.]+/.exec(D.version))?u[0]:null));O=O&&s(O)}if(u&&(D=/(?:[ab]|dp|pre|[ab]\d+pre)(?:\d+\+?)?$/i.exec(u)||/(?:alpha|beta)(?: ?\d)?/i.exec(g+";"+(x&&B.appMinorVersion))||/\bMinefield\b/i.test(g)&&"a")&&(b=/b/i.test(D)?"beta":"alpha",u=u.replace(RegExp(D+"\\+?$"),"")+("beta"==b?r:q)+(/\d+\+?/.exec(D)||"")),"Fennec"==T||"Firefox"==T&&/\b(?:Android|Firefox OS|KaiOS)\b/.test(O))T="Firefox Mobile";else if("Maxthon"==T&&u)u=u.replace(/\.[\d.]+/,".x");else if(/\bXbox\b/i.test(m))"Xbox 360"==m&&(O=null),"Xbox 360"==m&&/\bIEMobile\b/.test(g)&&d.unshift("mobile mode");else if(!/^(?:Chrome|IE|Opera)$/.test(T)&&(!T||m||/Browser|Mobi/.test(T))||"Windows CE"!=O&&!/Mobi/i.test(g))if("IE"==T&&x)try{null===C.external&&d.unshift("platform preview")}catch(A){d.unshift("embedded")}else(/\bBlackBerry\b/.test(m)||/\bBB10\b/.test(g))&&(D=(RegExp(m.replace(/ +/g," *")+"/([.\\d]+)","i").exec(g)||0)[1]||u)?(O=((D=[D,/BB10/.test(g)])[1]?(m=null,l="BlackBerry"):"Device Software")+" "+D[0],u=null):this!=w&&"Wii"!=m&&(x&&H||/Opera/.test(T)&&/\b(?:MSIE|Firefox)\b/i.test(g)||"Firefox"==T&&/\bOS X (?:\d+\.){2,}/.test(O)||"IE"==T&&(O&&!/^Win/.test(O)&&u>5.5||/\bWindows XP\b/.test(O)&&u>8||8==u&&!/\bTrident\b/.test(g)))&&!E.test(D=A.call(w,g.replace(E,"")+";"))&&D.name&&(D="ing as "+D.name+((D=D.version)?" "+D:""),E.test(T)?(/\bIE\b/.test(D)&&"Mac OS"==O&&(O=null),D="identify"+D):(D="mask"+D,T=e?s(e.replace(/([a-z])([A-Z])/g,"$1 $2")):"Opera",/\bIE\b/.test(D)&&(O=null),x||(u=null)),p=["Presto"],d.push(D));else T+=" Mobile";(D=(/\bAppleWebKit\/([\d.]+\+?)/i.exec(g)||0)[1])&&(D=[parseFloat(D.replace(/\.(\d)$/,".0$1")),D],"Safari"==T&&"+"==D[1].slice(-1)?(T="WebKit Nightly",b="alpha",u=D[1].slice(0,-1)):u!=D[1]&&u!=(D[2]=(/\bSafari\/([\d.]+\+?)/i.exec(g)||0)[1])||(u=null),D[1]=(/\b(?:Headless)?Chrome\/([\d.]+)/i.exec(g)||0)[1],537.36==D[0]&&537.36==D[2]&&parseFloat(D[1])>=28&&"WebKit"==p&&(p=["Blink"]),x&&(K||D[1])?(p&&(p[1]="like Chrome"),D=D[1]||((D=D[0])<530?1:D<532?2:D<532.05?3:D<533?4:D<534.03?5:D<534.07?6:D<534.1?7:D<534.13?8:D<534.16?9:D<534.24?10:D<534.3?11:D<535.01?12:D<535.02?"13+":D<535.07?15:D<535.11?16:D<535.19?17:D<536.05?18:D<536.1?19:D<537.01?20:D<537.11?"21+":D<537.13?23:D<537.18?24:D<537.24?25:D<537.36?26:"Blink"!=p?"27":"28")):(p&&(p[1]="like Safari"),D=(D=D[0])<400?1:D<500?2:D<526?3:D<533?4:D<534?"4+":D<535?5:D<537?6:D<538?7:D<601?8:D<602?9:D<604?10:D<606?11:D<608?12:"12"),p&&(p[1]+=" "+(D+="number"==typeof D?".x":/[.+]/.test(D)?"":"+")),"Safari"==T&&(!u||parseInt(u)>45)?u=D:"Chrome"==T&&/\bHeadlessChrome/i.test(g)&&d.unshift("headless")),"Opera"==T&&(D=/\bzbov|zvav$/.exec(O))?(T+=" ",d.unshift("desktop mode"),"zvav"==D?(T+="Mini",u=null):T+="Mobile",O=O.replace(RegExp(" *"+D+"$"),"")):"Safari"==T&&/\bChrome\b/.exec(p&&p[1])?(d.unshift("desktop mode"),T="Chrome Mobile",u=null,/\bOS X\b/.test(O)?(l="Apple",O="iOS 4.3+"):O=null):/\bSRWare Iron\b/.test(T)&&!u&&(u=P("Chrome")),u&&0==u.indexOf(D=/[\d.]+$/.exec(O))&&g.indexOf("/"+D+"-")>-1&&(O=R(O.replace(D,""))),O&&-1!=O.indexOf(T)&&!RegExp(T+" OS").test(O)&&(O=O.replace(RegExp(" *"+y(T)+" *"),"")),p&&!/\b(?:Avant|Nook)\b/.test(T)&&(/Browser|Lunascape|Maxthon/.test(T)||"Safari"!=T&&/^iOS/.test(O)&&/\bSafari\b/.test(p[1])||/^(?:Adobe|Arora|Breach|Midori|Opera|Phantom|Rekonq|Rock|Samsung Internet|Sleipnir|SRWare Iron|Vivaldi|Web)/.test(T)&&p[1])&&(D=p[p.length-1])&&d.push(D),d.length&&(d=["("+d.join("; ")+")"]),l&&m&&m.indexOf(l)<0&&d.push("on "+l),m&&d.push((/^on /.test(d[d.length-1])?"":"on ")+m),O&&(D=/ ([\d.+]+)$/.exec(O),o=D&&"/"==O.charAt(O.length-D[0].length-1),O={architecture:32,family:D&&!o?O.replace(D[0],""):O,version:D?D[1]:null,toString:function(){var A=this.version;return this.family+(A&&!o?" "+A:"")+(64==this.architecture?" 64-bit":"")}}),(D=/\b(?:AMD|IA|Win|WOW|x86_|x)64\b/i.exec(Y))&&!/\bi686\b/i.test(Y)?(O&&(O.architecture=64,O.family=O.family.replace(RegExp(" *"+D),"")),T&&(/\bWOW64\b/i.test(g)||x&&/\w(?:86|32)$/.test(B.cpuClass||B.platform)&&!/\bWin64; x64\b/i.test(g))&&d.unshift("32-bit")):O&&/^OS X/.test(O.family)&&"Chrome"==T&&parseFloat(u)>=39&&(O.architecture=64),g||(g=null);var W={};return W.description=g,W.layout=p&&p[0],W.manufacturer=l,W.name=T,W.prerelease=b,W.product=m,W.ua=g,W.version=T&&u,W.os=O||{architecture:null,family:null,version:null,toString:function(){return"null"}},W.parse=A,W.toString=function(){return this.description||""},W.version&&d.unshift(u),W.name&&d.unshift(T),O&&T&&(O!=String(O).split(" ")[0]||O!=T.split(" ")[0]&&!m)&&d.push(m?"("+O+")":"on "+O),d.length&&(W.description=d.join(" ")),W}();"function"==typeof define&&"object"==typeof define.amd&&define.amd?(I.platform=S,define(function(){return S})):g&&C?w(S,function(A,I){g[I]=A}):I.platform=S}).call(void 0);const ED=new AD("Room");class iD extends hg.EventEmitter{static async createRoom({roomId:A,peerId:I,peerName:g,role:C="caller"}){return I||(I="111-111-1111".replace(/[018]/g,()=>(15&crypto.getRandomValues(new Uint8Array(1))[0]).toString(16))),A?new Room({peerId:I,roomId:A,peerName:g,role:C,url:"https://cp2p.centedge.io"}):{success:!1,reason:"roomId is required!"}}constructor({peerId:A,roomId:I,peerName:g,role:C,url:Q}){super(),this._closed=!1,this._roomId=I,this._socket=null,this._peerConnection=null,this._myDisplayName=g,this._myPeerId=A,this._myRole=C,this._localStream=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remoteRole=null,this._remoteStream=null,this._canvas=null,this._imageData=null,this._imageWidth=1280,this._imageHeight=720,this._availableMediaInputs=null,this._userAgent={name:platform.name,version:platform.version,os:platform.os.family,description:platform.description},this._remoteMediaInputs=null,this._remoteUserAgent=null,this._statsCaptureInterval=null,this._localStats=null,this._remoteStats=null,this._otherPeerConnected=null,this._data={},this._url=Q,this.prepareForCall(),ED.debug("user agent is:%o",this._userAgent)}get imageData(){return this._imageData}get roomId(){return this._roomId}get localStream(){return this._localStream}get remoteStream(){return this._remoteStream}get myName(){return this._myDisplayName}set myName(A){this._myDisplayName=A}get myId(){return this._myPeerId}get myRole(){return this._myRole}get remoteName(){return this._remoteDisplayName}get remoteId(){return this._remotePeerId}get remoteRole(){return this._remoteRole}get imageHeight(){return this._imageHeight}get imageWidth(){return this._imageWidth}get userAgent(){return this._userAgent}get mediaInputs(){return this._availableMediaInputs}get remoteUserAgent(){return this._remoteUserAgent}get remoteMediaInputs(){return this._remoteMediaInputs}get localStats(){return this._localStats}get remoteStats(){return this._remoteStats}get pc(){return this._peerConnection}get data(){return this._data}set data(A){throw new Error("Can't set the whole of data object.")}setImageProps({width:A,height:I}){A&&I?(this._imageHeight=I,this._imageWidth=A):this.clientErrorHandler({reason:"Both width and height are required to set the new image properties. ",error:null})}async prepareForCall(){let A=this;(async()=>{let A=[],I=[];return navigator.mediaDevices.enumerateDevices().then(function(g){let C=void 0!==g.find(A=>"videoinput"===A.kind),Q=void 0!==g.find(A=>"audioinput"===A.kind);return new Promise((g,B)=>{navigator.mediaDevices.getUserMedia({audio:Q,video:C}).then(C=>{navigator.mediaDevices.enumerateDevices().then(function(Q){Q.forEach(function(g){"audioinput"===g.kind&&I.push(g.label),"videoinput"===g.kind&&A.push(g.label)}),C.getTracks().forEach(A=>{A.stop()}),I.length>0&&A.length>0?g({videoLabels:A,audioLabels:I}):g({audioLabels:I,videoLabels:A})})}).catch(function(A){A.name,A.message,g({error:A.name,reason:A.message})})})}).catch(function(A){A.name,A.message})})().then(async I=>{if(I.error)switch(I.error){case"NotAllowedError":let A=`${I.error} : ${I.reason}!! You have denied the access to your Mic!! If you wish to join the room with video, please allow the camera access by clicking on the camera icon in the address bar Or by going to " chrome://settings/content#media-stream-mic " link in your chrome.`;this.clientErrorHandler({reason:A,error:null});break;case"NotFoundError":let g=`${I.error} : ${I.reason}!! No Microphone could be found!! If you have a camera or microphone in your computer but it is not able to detect them, please refresh your browser again or restart the browser after closing all the opened tabs.`;this.clientErrorHandler({reason:g,error:null});break;case"NotReadableError":let C=`${I.error} : ${I.reason}!! A hardware error occured at the OS OR browser Or Webpage level. If you have a microphone in your computer but it is not able to detect them, please refresh your browser again or restart the browser after closing all the opened tabs.`;this.clientErrorHandler({reason:C,error:null})}else I.audioLabels&&I.audioLabels.length>0&&I.videoLabels&&I.videoLabels.length>0&&(A._availableMediaInputs={audioInputs:I.audioLabels,videoInputs:I.videoLabels},ED.debug("available A/V inputs:%o",A.mediaInputs),await A.getUserMedia(),A.emit("mediaInputs"))}),navigator.mediaDevices.ondevicechange=async I=>{ED.info("Media devices changed!");let g=await(async()=>{let A=[],I=[];return new Promise((g,C)=>{navigator.mediaDevices.enumerateDevices().then(function(C){C.forEach(g=>{"audioinput"===g.kind&&I.push(g.label),"videoinput"===g.kind&&A.push(g.label)}),g({audioLabels:I,videoLabels:A})})})})();g.audioLabels&&g.audioLabels.length>0&&g.videoLabels&&g.videoLabels.length>0&&(A._availableMediaInputs={audioInputs:g.audioLabels,videoInputs:g.videoLabels},ED.debug("updated A/V inputs:%o",A.mediaInputs),A.emit("mediaInputs"),"client"===A.myRole&&A.pc&&(ED.info("Going to update the agent about the updated media devices list."),A.send({type:"mediaListUpdated",mediaInputs:A.mediaInputs})))}}send(A){this._closed||this._socket.send(A)}inform(A){if(this._closed)return;let I={type:"information",msg:A};this._socket.send(I)}async request({type:A,message:I}){const g=await this._socket.request({type:A,message:I});if(g)return g}gotMessageFromServer(A){switch(ED.debug("Got message:%o",A),A.type){case"login":this.handleLogin(A.details);break;case"offer":ED.debug("inside offer"),this.handleOffer({offer:A.offer,peer:A.peer,name:A.name});break;case"answer":ED.debug("inside answer"),this.handleAnswer(A);break;case"candidate":ED.debug("inside handle candidate"),this.handleCandidate(A.candidate);break;case"leave":this.handleLeave(),this.emit("peerHangUp");break;case"audioToggle":this.emit("audioToggle",{status:A.status,peer:"other"});break;case"videoToggle":this.emit("videoToggle",{status:A.status,peer:"other"});break;case"remoteStreamToggle":this.handleRemoteStreamToggle(A);break;case"chatMessage":this.emit("chatMessage",A.message);break;case"imageCapture":this.captureImage("local");break;case"mediaChange":this.changeRemoteMediaSource(A);break;case"requestStats":this.sendRequestedstats();break;case"statsFetched":this._remoteStats=A.stats,this.emit("remoteStats");break;case"information":this.emit("information",A.msg);break;case"iceRestart":this.handleIceRestart(A);break;case"iceRestarted":this.handleIceRestartResponse(A);break;case"nameUpdate":this._remoteDisplayName=A.name,this.emit("peerName");break;case"mediaListUpdated":this._remoteMediaInputs=A.mediaInputs,this.emit("remoteMediaInputs");break;case"peerLeft":this.pc||(this._remoteDisplayName=null,this._remotePeerId=null,this._remoteRole=null,this._otherPeerConnected=!1,this.emit("peerJoin","None"),alert("Other person disconnected from the room"));break;case"error":this.emit("serverError",A.details)}}async getUserMedia(){ED.debug("Inside getusermedia"),this._localStream?ED.debug("Local stream already acquired! Waiting for call!"):navigator.mediaDevices.getUserMedia?navigator.mediaDevices.getUserMedia(CD).then(A=>this.getUserMediaSuccess(A)).catch(A=>this.clientErrorHandler({reason:"Error while acquiring camera/mic",error:A})):alert("Your browser does not support getUserMedia API")}getUserMediaSuccess(A){let I=this;ED.debug("Inside getUserMediaSuccess"),this._localStream=A,this.emit("localStream"),this._socket=new BD({url:this._url,roomId:this._roomId,peerId:this._myPeerId,peerName:this._myDisplayName,role:this._myRole}),this._socket.on("message",A=>this.gotMessageFromServer(A)),this._socket.on("connected",async()=>{I.pc&&(ED.debug("Socket reconnection happened after a disconnection, ICEState:%s",I._peerConnection.iceConnectionState),"failed"!==I._peerConnection.iceConnectionState&&"disconnected"!==I._peerConnection.iceConnectionState||(ED.debug("Going to restart ICE as ICE is not in connected state!!"),await I.restartICE()))})}handleAudioToggle(){let A=this._localStream.getAudioTracks()[0];A.enabled=!A.enabled,this.send({type:"audioToggle",status:A.enabled}),this.emit("audioToggle",{status:A.enabled,peer:"self"})}handleVideoToggle(){let A=this._localStream.getVideoTracks()[0];A.enabled=!A.enabled,this.send({type:"videoToggle",status:A.enabled}),this.emit("videoToggle",{status:A.enabled,peer:"self"})}handleVideoToggleNew(){let A=this;if(this._peerConnection){const I=this._peerConnection.getSenders().find(function(A){return A.track&&"video"==A.track.kind});if(I){ED.debug("found sender:%o",I),this._peerConnection.removeTrack(I);let A=this._localStream.getVideoTracks()[0];A&&(A.stop(),this._localStream.removeTrack(A)),this.send({type:"videoToggle",status:!1}),this.emit("videoToggle",{status:!1,peer:"self"})}else ED.debug("sender not found. Going to create a new video track."),navigator.mediaDevices.getUserMedia?navigator.mediaDevices.getUserMedia({...CD,audio:!1}).then(I=>{let g=I.getVideoTracks()[0];A._localStream.addTrack(g),A._peerConnection.addTrack(g),A.send({type:"videoToggle",status:!0}),A.emit("localStream"),A.emit("videoToggle",{status:!0,peer:"self"})}).catch(A=>this.clientErrorHandler({reason:"Error while acquiring camera/mic",error:A})):alert("Your browser does not support getUserMedia API")}else ED.warn("Peer connection not found!")}handleRemoteAudioToggle(){this.send({type:"remoteStreamToggle",streamType:"audio"})}handleRemoteVideoToggle(){this.send({type:"remoteStreamToggle",streamType:"video"})}handleRemoteStreamToggle(A){ED.debug("inside remote stream toggle"),"audio"===A.streamType?this.handleAudioToggle():"video"===A.streamType?this.handleVideoToggle():ED.warn("Invalid stream type!")}sendChatMessage(A){A&&" "!==A?this.send({type:"chatMessage",message:A}):this.clientErrorHandler({reason:"Chat message need to have a valid value!",error:null})}async informPeer(){this.remoteId&&this.send({type:"nameUpdate",name:this.myName})}handleLogin(A){A&&(ED.debug("All available users:%o",A),this._remoteDisplayName=A.displayName,this._remotePeerId=A.peerId,this._remoteRole=A.role),this._otherPeerConnected=!0,this.emit("peerJoin",this._remoteDisplayName)}async gotRemoteTrack(A){ED.debug("inside got remote track:%o",A),A.streams&&A.streams[0]?(ED.debug("Got a remote stream"),this._remoteStream=A.streams[0]):(this._remoteStream=new MediaStream,this._remoteStream.addTrack(A.track),ED.debug("Got a remote track")),this.emit("remoteStream")}_onICeCandidate(A){ED.debug("onicecandidate",A.candidate),A.candidate&&this.send({type:"candidate",candidate:A.candidate,peer:this._remotePeerId})}handleNegotiationNeededEvent=()=>{ED.debug("handleNegotiationNeededEvent() | needed to negotiate a new negotiation"),this.restartICE()};handleRemoveTrackEvent=()=>{ED.debug("handleRemoveTrackEvent() | Track removed by remote peer. Need to handle UI locally."),this.emit("videoToggle",{status:!1,peer:"other"})};async createPeerOffer(A){let I,g=this;I=await this._socket.request({type:"fetchIceServers",message:{}}),this._peerConnection=new RTCPeerConnection({iceServers:I}),ED.debug("connection state in createPeeroffer():%s",this._peerConnection.connectionState),this._peerConnection.onicecandidate=A=>{this._onICeCandidate(A)},this._localStream.getTracks().forEach(A=>this._peerConnection.addTrack(A,this._localStream));const C=await this._peerConnection.createOffer().catch(function(A){alert("Error when creating an offer",A),ED.error("Error when creating an offer:%o",A)});await this._peerConnection.setLocalDescription(C),this.send({type:"offer",offer:g._peerConnection.localDescription,peer:g._remotePeerId,name:g.myName}),this._peerConnection.ontrack=A=>g.gotRemoteTrack(A),this._peerConnection.addEventListener("iceconnectionstatechange",async A=>{ED.debug("Current ICEState:%s",g._peerConnection.iceConnectionState),"failed"!==g._peerConnection.iceConnectionState&&"disconnected"!==g._peerConnection.iceConnectionState||(ED.debug("Ice connection failed!! with socket connection status:%s",g._socket.connectionStatus),g.emit("iceFailure"))}),this._peerConnection.onnegotiationneeded=()=>this.handleNegotiationNeededEvent(),this._peerConnection.onremovetrack=()=>this.handleRemoveTrackEvent()}async handleOffer({peer:A,name:I,offer:g}){let C=this;const Q=await this._socket.request({type:"fetchIceServers",message:{}});this._peerConnection=new RTCPeerConnection({iceServers:Q}),ED.debug("Peer connection in handle offer is:%s",this._peerConnection),this._remoteDisplayName=I,this._remotePeerId=A,this._peerConnection.setRemoteDescription(new RTCSessionDescription(g)),this._localStream.getTracks().forEach(A=>this._peerConnection.addTrack(A,this._localStream)),this._peerConnection.createAnswer().then(function(A){return C._peerConnection.setLocalDescription(A)}).then(function(){ED.debug("Ice already completed. sending answer!!"),C.send({type:"answer",answer:C._peerConnection.localDescription,peer:C._remotePeerId,mediaInputs:C.mediaInputs,userAgent:C.userAgent})}).catch(function(A){alert("Error when creating an answer"),ED.error("Error while creating answer:%o",A)}),this._peerConnection.ontrack=A=>C.gotRemoteTrack(A),this._peerConnection.onicecandidate=A=>{this._onICeCandidate(A)},this._peerConnection.addEventListener("iceconnectionstatechange",async A=>{ED.debug("Current ICEState:%s",C._peerConnection.iceConnectionState),"failed"!==C._peerConnection.iceConnectionState&&"disconnected"!==C._peerConnection.iceConnectionState||(ED.debug("Ice connection failed!! with socket connection status:%s",C._socket.connectionStatus),C.emit("iceFailure"))}),this._peerConnection.onnegotiationneeded=()=>this.handleNegotiationNeededEvent(),this._peerConnection.onremovetrack=()=>this.handleRemoveTrackEvent(),this.emit("offerReceived")}handleAnswer(A){ED.debug("answer:%o ",A),this._peerConnection.setRemoteDescription(new RTCSessionDescription(A.answer)),this._remoteMediaInputs=A.mediaInputs,this._remoteUserAgent=A.userAgent,this.emit("remoteMediaInputs")}async handleCandidate(A){this.pc?(ED.debug("Peerconnection available, Inside handle candidate for remote icecandidates"),this._peerConnection.addIceCandidate(new RTCIceCandidate(A)).catch(A=>{ED.error("Error while adding iceCandidate",A)})):ED.warn("Candidate arrived before peer connection instantiation:%o",A)}async restartICE(){const A=await this._peerConnection.createOffer({iceRestart:!0});await this._peerConnection.setLocalDescription(A),this.send({type:"iceRestart",offer:A})}async handleIceRestart(A){ED.debug("Ice restart message received!!"),this._peerConnection.setRemoteDescription(A.offer);const I=await this._peerConnection.createAnswer();await this._peerConnection.setLocalDescription(I),this.send({type:"iceRestarted",answer:I})}async handleIceRestartResponse(A){ED.debug("Ice restart message response received!!"),this._peerConnection.setRemoteDescription(A.answer)}async informRoomLeave(){this.send({type:"leave",peer:this.remoteId}),await this.handleLeave()}async handleLeave(){this._peerConnection?(this._peerConnection.ontrack=null,this._peerConnection.onremovetrack=null,this._peerConnection.onremovestream=null,this._peerConnection.onicecandidate=null,this._peerConnection.oniceconnectionstatechange=null,this._peerConnection.onsignalingstatechange=null,this._peerConnection.onicegatheringstatechange=null,this._peerConnection.onnegotiationneeded=null,this._localStream&&this._localStream.getTracks().forEach(A=>A.stop()),this._remoteStream&&this._remoteStream.getTracks().forEach(A=>A.stop()),this._peerConnection.close(),this._peerConnection=null,this._remoteDisplayName=null,this._remotePeerId=null,this._remoteStream=null,this._localStream=null,this._canvas=null,this._imageData=null,this._imageWidth=640,this._imageHeight=480,this._socket.close()):ED.error("No peerconnection object found!")}async changeRemoteMediaSource(A){let{label:I,mediaType:g}=A;!I||"video"!==g&&"audio"!==g?this.send({type:"error",details:{reason:"Can't change remote media source as either media label or media type is not available!"}}):this.handleReplaceTrack({label:I,mediaType:g})}async handleReplaceTrack({label:A,mediaType:I}){try{let g,C,Q;const B=await(A=>new Promise(I=>{navigator.mediaDevices.enumerateDevices().then(function(g){g.forEach(function(g){g.label===A&&I(g.deviceId)}),g[0].label||I({error:"noLabel",reason:"Device label names not found!"})}).catch(function(A){ConsoleHelper(A.name+": "+A.message),I({error:A.name,reason:A.message})})}))(A);"video"===I?(gD.video.deviceId.exact=B,Q=gD,g=this._localStream.getVideoTracks()[0]):"audio"===I?(ID.audio.deviceId.exact=B,Q=ID,g=this._localStream.getAudioTracks()[0]):ED.error("Unknown media type:%s",I);let E=g.enabled;g.stop(),ED.debug("selected constraints are:%o",Q);let i=await navigator.mediaDevices.getUserMedia(Q);if(ED.debug("new stream is:%o",i),"video"===I?C=i.getVideoTracks()[0]:"audio"===I&&(C=i.getAudioTracks()[0]),C.enabled=E,this._peerConnection){const A=this._peerConnection.getSenders().find(function(A){return A.track.kind==C.kind});ED.debug("found sender:%o",A),A.replaceTrack(C)}this._localStream.removeTrack(g),this._localStream.addTrack(C)}catch(A){ED.error("Error while replace media track:%o",A),this.clientErrorHandler({reason:"Error while replacing media track with new source",error:A})}}async captureImage(A){const I=document.createElement("canvas"),g=I.getContext("2d"),C=this._imageWidth,Q=this._imageHeight;ED.debug("height:%s and width:%s",C,Q),I.width=C,I.height=Q,"local"===A?g.drawImage(localVideo,0,0,C,Q):g.drawImage(remoteVideo,0,0,C,Q);let B=I.toDataURL("image/png");this._canvas=I,this._imageData=B,this.emit("imageCaptured")}async clearImage(){const A=this._canvas,I=A.getContext("2d");I.fillStyle="#AAA",I.fillRect(0,0,A.width,A.height);let g=A.toDataURL("image/png");this._imageData=g}captureRemoteImage(){this.send({type:"imageCapture"})}remoteCamChange(A){A?this.send({type:"mediaChange",mediaType:"video",label:A}):this.clientErrorHandler({reason:"A valid camera name must be selected for to change the remote camera!",error:null})}remoteMicChange(A){A?this.send({type:"mediaChange",mediaType:"audio",label:A}):this.clientErrorHandler({reason:"A valid mic name must be selected for to change the remote mic!",error:null})}requestStats(){this.send({type:"requestStats"})}async captureStats(){let A=this;return new Promise(I=>{this._peerConnection.getStats(null).then(g=>{let C={};g.forEach(A=>{["inbound-rtp","outbound-rtp","remote-inbound-rtp","remote-outbound-rtp","local-candidate","remote-candidate","candidate-pair"].includes(A.type)&&(C={...C,[`${A.type}`]:A})}),A._localStats=C,I(C)})})}displayStats({stats:A,element:I}){let g="";Object.values(A).forEach(A=>{g+=`<h2>Report: ${A.type}</h2>\n<strong>ID:</strong> ${A.id}<br>\n<strong>Timestamp:</strong> ${A.timestamp}<br>\n`,Object.keys(A).forEach(I=>{"id"!==I&&"timestamp"!==I&&"type"!==I&&(g+=`<strong>${I}:</strong> ${A[I]}<br>\n`)})}),document.querySelector(I).innerHTML=g}async sendRequestedstats(){let A=await this.captureStats();ED.debug("Local stats are:%o",A),this.send({type:"statsFetched",stats:A})}clientErrorHandler({reason:A,error:I}){this.emit("clientError",{reason:A,error:I})}copyLink(A){navigator.clipboard.writeText(A).then(function(){ED.debug("Async: Copying to clipboard was successful!"),alert("Link copied!!")},function(A){ED.error("Async: Could not copy text:%o ",A)})}}const DD={JsSdk_v1:lE,JsSdk:_i,P2pSdk:iD};export{_i as JsSdk,lE as JsSdk_v1,iD as P2pSdk,DD as default};