@spiffcommerce/preview 2.1.0 → 2.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/module.js CHANGED
@@ -1,2 +1,1357 @@
1
- import{ActionManager as e}from"@babylonjs/core/Actions/actionManager";import{ExecuteCodeAction as t}from"@babylonjs/core/Actions/directActions";import{Engine as i}from"@babylonjs/core/Engines/engine";import{NullEngine as a}from"@babylonjs/core/Engines/nullEngine";import{GlowLayer as s}from"@babylonjs/core/Layers/glowLayer";import{HighlightLayer as n}from"@babylonjs/core/Layers/highlightLayer";import{PBRMaterial as o}from"@babylonjs/core/Materials/PBR/pbrMaterial";import{CubeTexture as r}from"@babylonjs/core/Materials/Textures/cubeTexture";import{DynamicTexture as c}from"@babylonjs/core/Materials/Textures/dynamicTexture";import{Color3 as l,Color4 as h}from"@babylonjs/core/Maths/math.color";import{Vector3 as m}from"@babylonjs/core/Maths/math.vector";import{DracoCompression as u}from"@babylonjs/core/Meshes/Compression/dracoCompression";import{Observable as d}from"@babylonjs/core/Misc/observable";import{Tools as g}from"@babylonjs/core/Misc/tools";import{Scene as p}from"@babylonjs/core/scene";import{SceneLoader as f}from"@babylonjs/core/Loading/sceneLoader";import{Texture as b}from"@babylonjs/core/Materials/Textures/texture";import{Animation as v}from"@babylonjs/core/Animations/animation";import{QuadraticEase as w,EasingFunction as y}from"@babylonjs/core/Animations/easing";import{ArcRotateCamera as M}from"@babylonjs/core/Cameras/arcRotateCamera";import{AssetsManager as T}from"@babylonjs/core/Misc/assetsManager";import{WebXRHitTest as C}from"@babylonjs/core/XR/features/WebXRHitTest";import{WebXRState as L}from"@babylonjs/core/XR/webXRTypes";function R(e,t,i,a){Object.defineProperty(e,t,{get:i,set:a,enumerable:!0,configurable:!0})}var x={};let A;var O;let E;var S;R(x,"ProductCameraRig",(()=>A)),R(x,"MaterialEffectMode",(()=>E)),(O=A||(A={}))[O.Orbit=0]="Orbit",O[O.Pan=1]="Pan",(S=E||(E={})).None="None",S.RemoveWhenSelected="RemoveWhenSelected",S.ApplyWhenSelected="ApplyWhenSelected";var P={};R(P,"SpiffCommerce3DPreviewService",(()=>te)),R(P,"createBaseModel",(()=>ie));const I=new Map;async function B(e,t,i){return new Promise(((a,s)=>{const n=I.get(e);if(n&&n.scene.uid===t.uid)return a(n);f.LoadAssetContainerAsync(e,void 0,t,i).then((t=>{I.set(e,t),a(t)})).catch(s)}))}function F(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}class N{constructor(e){F(this,"customOptions",void 0),F(this,"getSceneClearColor",(()=>{const e=this.customOptions?.backgroundImage?0:1;if(this.customOptions&&this.customOptions.backgroundImage)return new h(0,0,0,e).toLinearSpace();if(this.customOptions&&this.customOptions.backgroundColor){const t=l.FromHexString(this.customOptions.backgroundColor);return new h(t.r,t.g,t.b,e).toLinearSpace()}return new h(.98,.98,.98,e).toLinearSpace()})),F(this,"highlightColorFromConfig",(()=>this.customOptions&&this.customOptions.highlightColor?this.hexToColor4(this.customOptions.highlightColor):new h(.98,.98,.98,1).toLinearSpace())),F(this,"hexToColor4",((e,t=1)=>{const i=l.FromHexString(e);return new h(i.r,i.g,i.b,t).toLinearSpace()})),this.customOptions=e}get options(){return this.customOptions}get scene(){return{clearColor:this.getSceneClearColor(),transparentBackground:this.customOptions?.backgroundImage,environmentFile:this.customOptions?.environmentFile??"assets/model-viewer/default.env"}}get camera(){return{autoOrientation:this.customOptions?.disableAutomaticOrientation??!0,autoRotation:{enabled:this.customOptions?.autoRotation??!1,idleTimeMs:this.customOptions?.idleTimeBeforeRotation??5e3},limits:{min:{alpha:this.customOptions?.lowerAlphaLimitDeg?this.customOptions?.lowerAlphaLimitDeg*Math.PI/180:void 0,beta:this.customOptions?.lowerBetaLimitDeg?this.customOptions?.lowerBetaLimitDeg*Math.PI/180:void 0,radius:this.customOptions?.minZoomOverride},max:{alpha:this.customOptions?.upperAlphaLimitDeg?this.customOptions?.upperAlphaLimitDeg*Math.PI/180:void 0,beta:this.customOptions?.upperBetaLimitDeg?this.customOptions?.upperBetaLimitDeg*Math.PI/180:void 0,radius:this.customOptions?.maxZoomOverride}}}}get highlights(){return{enabled:this.customOptions?.highlightOnMaterialHover??!1,color:this.highlightColorFromConfig()}}}var D={};R(D,"RenderingConfiguration",(()=>k)),R(D,"REFLECTION_PROBE_RESOLUTION",(()=>_));class k{static getDynamicTextureResolution(){return this.getIsMobile()||!k.offscreenRenderingSupported()?{width:1024,height:1024}:{width:2048,height:2048}}static shouldMipMap(){return!0}static offscreenRenderingSupported(){return!navigator.userAgent.includes("SamsungBrowser")&&(!!window.Worker&&!!window.OffscreenCanvas)}static getMirrorTextureResolution(){return this.getIsMobile()?512:1024}static getIsMobile(){return window.innerWidth<=480}}const _=128;function j(e,t,a){t.forEach((t=>{const s=t.getID(),n=t.getName(),o=k.getDynamicTextureResolution();e.materials.filter((e=>e.name===n)).forEach((r=>{const l=a.get(s);if(l)z(r,l),l.update(false);else{const l=function(e,t,a,s){const n=new c(e,{width:a,height:s},t,k.shouldMipMap(),b.TRILINEAR_SAMPLINGMODE,i.TEXTUREFORMAT_RGBA),o=n.getContext();o&&(o.fillStyle="#f5f5f5",o.fillRect(0,0,a,s),n.update());return n}(n,e,o.width,o.height);a.set(s,l),t.setStaticContext(l.getContext()),z(r,l),l.onLoadObservable.addOnce((()=>{l.update(false)}))}}))}))}function z(e,t){if(e instanceof o){const i=e,a=i.albedoTexture;a?(t.wrapU=a.wrapU,t.wrapV=a.wrapV):(t.wrapU=1,t.wrapV=1),i.albedoTexture=t}else{const i=e,a=i.diffuseTexture;a&&(t.wrapU=a.wrapU,t.wrapV=a.wrapV),i.diffuseTexture=t}}function V(e,t,i,a){const s=e.animationGroups,n=e=>e.targetedAnimations.map((e=>e.animation.framePerSecond)).reduce(((e,t)=>e+t),0)/e.targetedAnimations.length||0;void 0===a||void 0===i||a!==i?s.forEach((e=>{e.stop();const s=n(e);e.start(t,1,a*s,i*s)})):s.forEach((e=>{e.stop();const i=n(e),s=a*i;e.start(t,1,s,s)}))}function H(e){e.animationGroups.forEach((e=>{e.stop()}))}function U(e,t,i){var a,s,n;e.stopAnimation(t),t.animations=[],Math.abs(t.alpha)>2*Math.PI&&(t.alpha=(a=t.alpha,s=0,n=2*Math.PI,a<s?n-(s-a)%(n-s):s+(a-s)%(n-s)));const o=[],r=i.target,c=r?1:0;if(i.target&&o.push(K("cameraTargetLerp","target",(new m).copyFrom(t.target),new m(i.target.x,i.target.y,i.target.z),v.ANIMATIONTYPE_VECTOR3,0)),o.push(K("cameraAlphaLerp","alpha",t.alpha,W(i.lonDeg),v.ANIMATIONTYPE_FLOAT,c)),o.push(K("cameraBetaLerp","beta",t.beta,W(i.latDeg),v.ANIMATIONTYPE_FLOAT,c)),void 0!==i.radius){const e=Math.max(.01,i.radius);o.push(K("cameraRadiusLerp","radius",t.radius,e,v.ANIMATIONTYPE_FLOAT,c))}t.animations.push(...o);const l=t.useAutoRotationBehavior;t.disableAutoRotationBehavior(),e.beginAnimation(t,0,r?120:60,!1,1,(()=>{t.animations=[],l&&t.enableAutoRotationBehavior()}))}function W(e){return e*Math.PI/180}function K(e,t,i,a,s,n=0,o=v.ANIMATIONLOOPMODE_CONSTANT){const r=new w;r.setEasingMode(y.EASINGMODE_EASEINOUT);const c=new v(e,t,60,s,o),l=[];return n>0&&l.push({frame:0,value:i}),l.push({frame:60*n,value:i}),l.push({frame:60*(n+1),value:a}),c.setKeys(l),c.setEasingFunction(r),c}class q extends M{constructor(e,t,i,a,s,n,o,r){var c,l,h;super(e,t,i,a,s,n,r),c=this,l="lastFocus",h=new m(0,0,0),l in c?Object.defineProperty(c,l,{value:h,enumerable:!0,configurable:!0,writable:!0}):c[l]=h,this.minZ=.01,this.maxZ=1e3*this.radius,this.lowerRadiusLimit=.01*this.radius,this.enableFramingBehavior(),this.upperRadiusLimit=1.5*this.radius,this.wheelPrecision=100/this.radius,this.pinchPrecision=300/this.radius,this.wheelDeltaPercentage=.01,this.pinchDeltaPercentage=.005,this.useNaturalPinchZoom=!0,o.camera.autoOrientation&&(this.alpha+=Math.PI),o&&(o.camera.limits.min.beta&&(this.lowerBetaLimit=o.camera.limits.min.beta),o.camera.limits.max.beta&&(this.upperBetaLimit=o.camera.limits.max.beta),o.camera.limits.min.alpha&&(this.lowerAlphaLimit=o.camera.limits.min.alpha),o.camera.limits.max.alpha&&(this.upperAlphaLimit=o.camera.limits.max.alpha),o.camera.limits.min.radius&&(this.lowerRadiusLimit=o.camera.limits.min.radius),o.camera.limits.max.radius&&(this.upperRadiusLimit=o.camera.limits.max.radius),o.camera.autoRotation.enabled&&this.enableAutoRotationBehavior(o.camera.autoRotation.idleTimeMs))}getFramingBehavior(){return this.getBehaviorByName("Framing")}getAutoRotationBehavior(){const e=this.getBehaviorByName("AutoRotation");return e||void 0}enableFramingBehavior(){this.useFramingBehavior=!0;const e=this.getFramingBehavior();e.attach(this),e.framingTime=0,e.elevationReturnTime=-1,e.zoomStopsAnimation=!1,this.lowerRadiusLimit=null;const t=G(this._scene);return e.zoomOnBoundingInfo(t.min,t.max),this.wheelPrecision=100/this.radius,null===this.lowerRadiusLimit&&(this.lowerRadiusLimit=.1),this.lastFocus.copyFrom(this.target),e}rerunFramingBehavior(e){const t=this.getFramingBehavior();t.framingTime=800;const i=G(this._scene);t.zoomOnBoundingInfo(i.min,i.max,void 0,(()=>{e()})),t.framingTime=0}enableAutoRotationBehavior(e=5e3){this.useAutoRotationBehavior=!0;const t=this.getAutoRotationBehavior();t&&(t.idleRotationWaitTime=e)}disableAutoRotationBehavior(){this.useAutoRotationBehavior=!1}static create(e,t){e.activeCamera&&(e.activeCamera.dispose(),e.activeCamera=null);const i=G(e),a=i.max.subtract(i.min),s=i.min.add(a.scale(.5)),n=new q("ProductCamera",-Math.PI/2,Math.PI/2,1.5*a.length(),s,e,t);return n.panningInertia=0,n.panningOriginTarget.copyFrom(s),n.onAfterCheckInputsObservable.add((()=>{n.panningSensibility=1e3/a.length()})),e.activeCamera=n,n}}function G(e){const t=e.meshes.filter((e=>e.name.toLowerCase().endsWith("_t")||e.name.toLowerCase().includes("_t_")));return e.getWorldExtends((e=>e.isVisible&&e.isEnabled()&&(0===t.length||t.includes(e))))}const X={albedoTexture:"albedoMapKey",bumpTexture:"normalMapKey",ambientTexture:"ambientMapKey",emissiveTexture:"emissionMapKey",opacityTexture:"alphaMapKey",metallicTexture:"metallicMapKey",refractionTexture:"refractionMapKey"};function Z(e,t,i,a){["albedoTexture","bumpTexture","ambientTexture","emissiveTexture","opacityTexture","metallicTexture","refractionTexture"].forEach((s=>{!function(e,t,i,a,s){const n=X[e];if(!n)throw new Error("Unexpected texture name encountered.");const o=t[n];o?a.addTextureTask(e,o,!1,!1):s&&i[e]&&(i[e]&&i[e].dispose(),i[e]=null,function(e,t){"opacityTexture"===e&&(t.useAlphaFromAlbedoTexture=!0);"metallicTexture"===e&&(t.useRoughnessFromMetallicTextureAlpha=!1,t.useRoughnessFromMetallicTextureGreen=!1,t.useMetallnessFromMetallicTextureBlue=!1);"refractionTexture"===e&&(t.subSurface.isRefractionEnabled=!1,t.subSurface.refractionIntensity=1)}(e,i))}(s,e,t,i,a)})),function(e,t){if(!e.clearCoat)return;e.clearCoat===E.RemoveWhenSelected?(t.clearCoat.isEnabled=!1,t.clearCoat.indexOfRefraction=1.5):e.clearCoat===E.ApplyWhenSelected&&(t.clearCoat.isEnabled=!0,t.clearCoat.indexOfRefraction=e.clearCoatIOR||t.clearCoat.indexOfRefraction)}(e,t)}function Y(e,t,i,a){"opacityTexture"===e&&(t.useAlphaFromAlbedoTexture=!1),"metallicTexture"===e&&(t.useRoughnessFromMetallicTextureAlpha=!1,t.useRoughnessFromMetallicTextureGreen=!0,t.useMetallnessFromMetallicTextureBlue=!0),"refractionTexture"===e&&(t.subSurface.isRefractionEnabled=!0,t.subSurface.refractionIntensity=i.refractionIntensity||1),t[e]=a}function Q(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}class ${constructor(){Q(this,"materialVariantMap",new Map),Q(this,"keysThatRemovedBaseModel",[]),Q(this,"loadedContainerForKey",new Map)}async applyMaterial(e,t,i,a,s){return new Promise((n=>{const o=e.materials.filter((e=>e.name===t)),r=this.materialVariantMap.get(t);if(this.materialVariantMap.set(t,{...r,...i}),0===o.length)return void n(void 0);const c=new T(e);c.useDefaultLoadingScreen=!1,o.forEach((e=>Z(i,e,c,s))),c.onProgress=(e,t,i)=>{a&&a(e/t*100,100,i.name)},c.onFinish=e=>{e.forEach((e=>{const t=e;a&&a(100,100,e.name),o.forEach((a=>Y(e.name,a,i,t.texture)))})),n(void 0)},c.loadAsync()}))}async applyModel(e,t,i,a,s){if(a&&i&&!this.keysThatRemovedBaseModel.includes(t)&&this.keysThatRemovedBaseModel.push(t),!a){const i=this.keysThatRemovedBaseModel.includes(t);return this.loadedContainerForKey.get(t)?.removeAllFromScene(),e&&i&&await ie(e.metadata.baseModel,e),Promise.resolve()}const n=await B(a,e,s);if(this.loadedContainerForKey.has(t)&&this.loadedContainerForKey.get(t)?.removeAllFromScene(),i){(await B(e.metadata.baseModel,e)).removeAllFromScene()}n.addAllToScene(),this.loadedContainerForKey.set(t,n);const o=[];this.materialVariantMap.forEach((async(t,i)=>{o.push(this.applyMaterial(e,i,t))})),await Promise.all(o)}}var J=async(e,t)=>{const i=await t.createDefaultXRExperienceAsync({uiOptions:{sessionMode:"immersive-ar",referenceSpaceType:"local-floor",onError:e=>{console.log(e)}}}),a=i.baseExperience.featuresManager.enableFeature(C.Name,"latest",{disablePermanentHitTest:!0,enableTransientHitTest:!0,useReferenceSpace:!0});let s=null;a.onHitTestResultObservable.add((e=>{s=e.length?e[0]:void 0})),t.onPointerDown=()=>{if(s&&i.baseExperience.state===L.IN_XR){const e=t.getNodeByName("__root__");s.transformationMatrix.decompose(void 0,e.rotationQuaternion,e.position)}}};function ee(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}class te{constructor(e,t){ee(this,"engine",void 0),ee(this,"scene",void 0),ee(this,"configuration",void 0),ee(this,"loadProgress",new Map([["initialScene",0]])),ee(this,"materialReadyToLoadCallbacks",new Map),ee(this,"modelReadyToLoadCallbacks",new Map),ee(this,"queuedModelAnimation",void 0),ee(this,"queuedCameraAnimation",void 0),ee(this,"focusLostNotified",!1),ee(this,"loadObservable",new d),ee(this,"focusLostObservable",new d),ee(this,"dynamicTextures",new Map),ee(this,"highlightLayer",void 0),ee(this,"variantManager",new $),this.configuration=new N(t);u.Configuration={decoder:{wasmUrl:"https://www.gstatic.com/draco/versioned/decoders/1.5.3/draco_wasm_wrapper_gltf.js",wasmBinaryUrl:"https://www.gstatic.com/draco/versioned/decoders/1.5.3/draco_decoder_gltf.wasm",fallbackUrl:"https://www.gstatic.com/draco/versioned/decoders/1.5.3/draco_decoder_gltf.js"}},e&&(e.getContext("webgl2")||e.getContext("webgl"));const s=console.log;console.log=()=>{};const n=e?new i(e,!0,{premultipliedAlpha:!1,preserveDrawingBuffer:!!t?.backgroundImage,audioEngine:!1,stencil:this.configuration.highlights.enabled,forceSRGBBufferSupportState:!0}):new a;console.log=s,n.hideLoadingUI(),window.addEventListener("resize",this.fireResizeEvent.bind(this)),this.engine=n,this.scene=new p(n)}registerFocusLostListener(e){this.focusLostObservable.add(e)}unregisterFocusLostListener(e){this.focusLostObservable.removeCallback(e)}registerLoadProgressListener(e){this.loadObservable.add(e),e(this.getLoadListenerEvent())}unregisterLoadProgressListener(e){this.loadObservable.removeCallback(e)}registerView(e){const t=e.height,i=e.width;this.engine.registerView(e),e.setAttribute("height",t.toString()),e.setAttribute("width",i.toString()),this.orbitEnabled()||this.setCameraState(A.Pan)}getNumViewports(){return this.engine.views?.length||0}unregisterView(e){this.engine.unRegisterView(e),this.engine.inputElement=null}shutdown(){this.engine&&this.engine.dispose(),window.removeEventListener("resize",this.fireResizeEvent)}getSceneInitializationProgress(){return this.loadProgress.get("initialScene")}async initialize(i,a){if(this.scene.clearColor=this.configuration.scene.clearColor,this.scene.metadata={baseModel:i},this.scene.environmentTexture=r.CreateFromPrefilteredData(this.configuration.scene.environmentFile,this.scene),i){const e=99;await ie(i,this.scene,(t=>{this.loadProgress.set("initialScene",t.loaded*e/t.total),this.notifyLoadHandlers()}))}this.configuration.highlights.enabled&&this.scene.meshes.forEach((i=>{"targetcube_t"!==i.name&&"backgroundShell"!==i.name&&(i.isPickable=!0,i.actionManager||(i.actionManager=new e(this.scene)),i.actionManager.registerAction(new t(e.OnPointerOutTrigger,(()=>{this.setHighlights([])}))),i.actionManager.registerAction(new t(e.OnPointerOverTrigger,(e=>{if(e.meshUnderPointer){const t=e.meshUnderPointer.material;t&&this.setHighlights([t])}}))))}));const n=q.create(this.scene,this.configuration),o=a?.getAll()||new Map;j(this.scene,o,this.dynamicTextures);if(this.scene.materials.some((e=>"emissiveTexture"in e&&null!==e.emissiveTexture))){new s("GlowLayer",this.scene).intensity=.5}H(this.scene),this.engine.runRenderLoop((()=>{if(!this.engine.views)return;const e=this.engine.views[0],t=this.engine.inputElement;(!t||e&&t&&t.id!==e.target.id)&&this.reattachControls(e.target),o.forEach(((e,t)=>{const i=this.dynamicTextures.get(t);i&&e.getStaticContextDirty()&&i.isReady()&&(i.update(!1),e.setStaticContextDirty(!1))})),this.configuration.scene.transparentBackground&&this.engine.views.forEach((e=>{const t=this.engine.getRenderingCanvas();e.target.getContext("2d").clearRect(0,0,t.width,t.height)})),this.scene.render(),100!==this.getSceneInitializationProgress()&&(this.loadProgress.set("initialScene",100),this.materialReadyToLoadCallbacks.forEach(((e,t)=>{e.forEach(((e,i)=>{this.applyMaterialVariant(t,i,e)}))})),this.materialReadyToLoadCallbacks.clear(),this.modelReadyToLoadCallbacks.forEach((e=>{e(this.scene)})),this.modelReadyToLoadCallbacks.clear(),this.queuedModelAnimation&&(this.executeModelAnimation(this.queuedModelAnimation),this.queuedModelAnimation=void 0),this.queuedCameraAnimation&&(this.executeCameraAnimation(this.queuedCameraAnimation),this.queuedCameraAnimation=void 0),this.notifyLoadHandlers()),n.target.equalsWithEpsilon(n.lastFocus,.1)||this.focusLostNotified||(this.focusLostObservable.notifyObservers(void 0),this.focusLostNotified=!0)}))}executeModelAnimation(e){100===this.getSceneInitializationProgress()?V(this.scene,e.loop,e.to,e.from):this.queuedModelAnimation=e}executeCameraAnimation(e){100===this.getSceneInitializationProgress()&&this.scene.activeCamera?U(this.scene,this.scene.activeCamera,e):this.queuedCameraAnimation=e}getCameraPose(){if(this.scene){const e=this.scene.activeCamera;if(e)return{lonDeg:Math.round(180*e.alpha/Math.PI),latDeg:Math.round(180*e.beta/Math.PI),radius:Math.round(1e4*(e.radius+Number.EPSILON))/1e4,target:{x:e.target.x,y:e.target.y,z:e.target.z}}}}setCameraPose(e){if(this.scene){const t=this.scene.activeCamera;t&&(t.target=new m(e.target.x,e.target.y,e.target.z),t.radius=e.radius,t.alpha=e.latDeg,t.beta=e.lonDeg)}}async applyMaterialVariant(e,t,i,a){if(100===this.getSceneInitializationProgress())await this.variantManager.applyMaterial(this.scene,e,i,((e,t,i)=>{this.loadProgress.set(`key_${i}`,e/t*100),this.notifyLoadHandlers()}),a);else if(this.materialReadyToLoadCallbacks.has(e)){this.materialReadyToLoadCallbacks.get(e)?.set(t,i)}else{this.materialReadyToLoadCallbacks.set(e,new Map);this.materialReadyToLoadCallbacks.get(e)?.set(t,i)}}async applyModelVariant(e,t,i){100===this.getSceneInitializationProgress()?(await this.variantManager.applyModel(this.scene,e,i||!1,t.model,(t=>{this.loadProgress.set(e,100*t.loaded/t.total),this.notifyLoadHandlers()})),this.scene.materials.forEach((e=>{if(e&&0===e.getBindedMeshes().length){const t=e instanceof o&&!(e.albedoTexture instanceof c);e.dispose(!1,t)}})),j(this.scene,t.contextService.getAll(),this.dynamicTextures),H(this.scene)):this.modelReadyToLoadCallbacks.set(e,(()=>{this.applyModelVariant(e,t,i)}))}setCameraState(e){if(!this.engine?.views||!this.engine?.views[0])throw new Error("No views attached, camera state requires a view to attach controls onto.");e===A.Orbit?this.reattachControls(this.engine.views[0].target,2):this.reattachControls(this.engine.views[0].target,0)}animateToLastCameraFocus(){return new Promise((e=>{const t=this.scene.activeCamera,i=this.configuration;t.rerunFramingBehavior((()=>{this.focusLostNotified=!1,i.camera.limits.min.radius&&(t.lowerRadiusLimit=i.camera.limits.min.radius),i.camera.limits.max.radius&&(t.upperRadiusLimit=i.camera.limits.max.radius),e()}))}))}setAutoRotation(e){const t=this.scene.activeCamera;this.configuration.camera.autoRotation.enabled&&t&&(e?t.enableAutoRotationBehavior(this.configuration.camera.autoRotation.idleTimeMs):t.disableAutoRotationBehavior())}getCurrentConfiguration(){return this.configuration.options}async renderSceneScreenshot(e,t){const i=this.scene.activeCamera;if(!i)throw new Error("Missing product camera, cannot render screenshot!");var a;(a=i).getScene().stopAnimation(a),a.animations=[];const s=i.alpha,n=i.beta,o=i.radius,r=t.latDeg*Math.PI/180,c=t.lonDeg*Math.PI/180;i.alpha=c,i.beta=r,i.radius=t.radius||i.radius;const l=await g.CreateScreenshotUsingRenderTargetAsync(this.engine,i,e,"image/png",2,!0);return i.alpha=s,i.beta=n,i.radius=o,l}orbitEnabled(){const e=this.configuration;if(!e)return!0;const t=e.camera.limits.min.alpha,i=e.camera.limits.max.alpha,a=e.camera.limits.min.beta,s=e.camera.limits.max.beta;if(void 0===t||void 0===i||void 0===a||void 0===s)return!0;const n=[a,s],o=[t,i].every((e=>e===t)),r=n.every((e=>e===a));return!o&&!r}fireResizeEvent(){this.getNumViewports()>0&&this.engine.resize()}onMaterialSelected(i){this.scene.meshes.forEach((a=>{"targetcube_t"!==a.name&&"backgroundShell"!==a.name&&(a.actionManager||(a.actionManager=new e(this.scene)),a.actionManager.registerAction(new t(e.OnPickDownTrigger,(e=>{if(e.meshUnderPointer){const t=e.meshUnderPointer.material;t&&i({id:t.id,name:t.name})}}))))}))}listMaterials(){const e=this.scene?.materials;return e?e.map((e=>({id:e.id,name:e.name}))):[]}setHighlights(e,t){0===e.length&&(this.highlightLayer?.dispose(),this.highlightLayer=void 0),this.highlightLayer||(this.highlightLayer=new n("highlights",this.scene,{isStroke:!0,blurVerticalSize:.85,blurHorizontalSize:.85}),this.highlightLayer.innerGlow=!0,this.highlightLayer.outerGlow=!1),this.highlightLayer.removeAllMeshes();const i=t?new l(t[0],t[1],t[2]).toLinearSpace():void 0;e.forEach((e=>{const t=this.scene.materials.find((t=>t.name===e.name&&t.id===e.id));t&&t.getBindedMeshes().forEach((e=>this.highlightLayer?.addMesh(e,i||l.FromHexString("#fcba03"))))}))}initXRExperience(e){J(0,this.scene)}reattachControls(e,t=2){this.scene.detachControl(),this.engine.inputElement=e;const i=this.scene.activeCamera;if(i){i.attachControl(!0,!1,t);i.inputs.attached.pointers.multiTouchPanning=!1}this.scene.attachControl(!0,!0,!0)}getLoadListenerEvent(){const e=Array.from(this.loadProgress.values()).filter((e=>e<100)),t=e.reduce(((e,t)=>e+t),0)/e.length||0;return{loadValue:0===e.length?100:t,sceneInitialized:100===this.getSceneInitializationProgress()}}notifyLoadHandlers(){this.loadObservable.notifyObservers(this.getLoadListenerEvent())}}async function ie(e,t,i){(await B(e,t,i)).addAllToScene()}export{A as ProductCameraRig,E as MaterialEffectMode,te as SpiffCommerce3DPreviewService,ie as createBaseModel,k as RenderingConfiguration,_ as REFLECTION_PROBE_RESOLUTION};
1
+ import {ActionManager as $hgUW1$ActionManager} from "@babylonjs/core/Actions/actionManager";
2
+ import {ExecuteCodeAction as $hgUW1$ExecuteCodeAction} from "@babylonjs/core/Actions/directActions";
3
+ import {Engine as $hgUW1$Engine} from "@babylonjs/core/Engines/engine";
4
+ import {NullEngine as $hgUW1$NullEngine} from "@babylonjs/core/Engines/nullEngine";
5
+ import {GlowLayer as $hgUW1$GlowLayer} from "@babylonjs/core/Layers/glowLayer";
6
+ import {HighlightLayer as $hgUW1$HighlightLayer} from "@babylonjs/core/Layers/highlightLayer";
7
+ import {PBRMaterial as $hgUW1$PBRMaterial} from "@babylonjs/core/Materials/PBR/pbrMaterial";
8
+ import {CubeTexture as $hgUW1$CubeTexture} from "@babylonjs/core/Materials/Textures/cubeTexture";
9
+ import {DynamicTexture as $hgUW1$DynamicTexture} from "@babylonjs/core/Materials/Textures/dynamicTexture";
10
+ import {Color3 as $hgUW1$Color3, Color4 as $hgUW1$Color4} from "@babylonjs/core/Maths/math.color";
11
+ import {Vector3 as $hgUW1$Vector3} from "@babylonjs/core/Maths/math.vector";
12
+ import {DracoCompression as $hgUW1$DracoCompression} from "@babylonjs/core/Meshes/Compression/dracoCompression";
13
+ import {Observable as $hgUW1$Observable} from "@babylonjs/core/Misc/observable";
14
+ import {Tools as $hgUW1$Tools} from "@babylonjs/core/Misc/tools";
15
+ import {Scene as $hgUW1$Scene} from "@babylonjs/core/scene";
16
+ import "@babylonjs/core/Engines/Extensions/engine.views";
17
+ import "@babylonjs/core/Materials/Textures/Loaders/ddsTextureLoader";
18
+ import "@babylonjs/core/Materials/Textures/Loaders/envTextureLoader";
19
+ import "@babylonjs/core/Materials/Textures/Loaders/ktxTextureLoader";
20
+ import "@babylonjs/core/Misc/screenshotTools";
21
+ import "@babylonjs/core/Rendering/boundingBoxRenderer";
22
+ import {SceneLoader as $hgUW1$SceneLoader} from "@babylonjs/core/Loading/sceneLoader";
23
+ import {GLTF2 as $hgUW1$GLTF2} from "@babylonjs/loaders/glTF";
24
+ import "@babylonjs/loaders/glTF/2.0/Extensions";
25
+ import {Texture as $hgUW1$Texture} from "@babylonjs/core/Materials/Textures/texture";
26
+ import {MirrorTexture as $hgUW1$MirrorTexture} from "@babylonjs/core/Materials/Textures/mirrorTexture";
27
+ import {Color3 as $hgUW1$Color31, Vector3 as $hgUW1$Vector31, Plane as $hgUW1$Plane} from "@babylonjs/core/Maths/math";
28
+ import {ReflectionProbe as $hgUW1$ReflectionProbe} from "@babylonjs/core/Probes/reflectionProbe";
29
+ import {Animation as $hgUW1$Animation} from "@babylonjs/core/Animations/animation";
30
+ import {QuadraticEase as $hgUW1$QuadraticEase, EasingFunction as $hgUW1$EasingFunction} from "@babylonjs/core/Animations/easing";
31
+ import {ArcRotateCamera as $hgUW1$ArcRotateCamera} from "@babylonjs/core/Cameras/arcRotateCamera";
32
+ import {AssetsManager as $hgUW1$AssetsManager} from "@babylonjs/core/Misc/assetsManager";
33
+
34
+ function $parcel$export(e, n, v, s) {
35
+ Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
36
+ }
37
+ var $0822e2c84c7d8f0f$exports = {};
38
+
39
+ $parcel$export($0822e2c84c7d8f0f$exports, "ProductCameraRig", () => $0822e2c84c7d8f0f$export$9beaec53ae7234b4);
40
+ $parcel$export($0822e2c84c7d8f0f$exports, "MaterialEffectMode", () => $0822e2c84c7d8f0f$export$5185910364c29078);
41
+ let $0822e2c84c7d8f0f$export$9beaec53ae7234b4;
42
+ (function(ProductCameraRig) {
43
+ ProductCameraRig[ProductCameraRig["Orbit"] = 0] = "Orbit";
44
+ ProductCameraRig[ProductCameraRig["Pan"] = 1] = "Pan";
45
+ })($0822e2c84c7d8f0f$export$9beaec53ae7234b4 || ($0822e2c84c7d8f0f$export$9beaec53ae7234b4 = {}));
46
+ let $0822e2c84c7d8f0f$export$5185910364c29078;
47
+ (function(MaterialEffectMode) {
48
+ MaterialEffectMode[/**
49
+ * When a material variant effect specifies 'None' the effect doesn't change in any way. This is the default behavior.
50
+ */ "None"] = "None";
51
+ MaterialEffectMode[/**
52
+ * When a material variant effect specifies 'RemoveWhenSelected' the effect is removed.
53
+ */ "RemoveWhenSelected"] = "RemoveWhenSelected";
54
+ MaterialEffectMode[/**
55
+ * When a material variant effect specifies 'ApplyWhenSelected' the effect is enabled.
56
+ */ "ApplyWhenSelected"] = "ApplyWhenSelected";
57
+ })($0822e2c84c7d8f0f$export$5185910364c29078 || ($0822e2c84c7d8f0f$export$5185910364c29078 = {}));
58
+
59
+
60
+ var $52383f320efd34d4$exports = {};
61
+
62
+ $parcel$export($52383f320efd34d4$exports, "SpiffCommerce3DPreviewService", () => $52383f320efd34d4$export$bbc4f3921db721d6);
63
+ $parcel$export($52383f320efd34d4$exports, "createBaseModel", () => $52383f320efd34d4$export$fdb7ab7bd2bafbea);
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+ const $a38b99fca407fe64$export$b495560913bf8cdc = new Map();
83
+ async function $a38b99fca407fe64$export$a2966fb0929452fe(src, scene, progressHandler) {
84
+ return new Promise((resolve, reject)=>{
85
+ const existingContainer = $a38b99fca407fe64$export$b495560913bf8cdc.get(src);
86
+ if (existingContainer && existingContainer.scene.uid === scene.uid) return resolve(existingContainer);
87
+ else (0, $hgUW1$SceneLoader).LoadAssetContainerAsync(src, undefined, scene, progressHandler).then((newContainer)=>{
88
+ $a38b99fca407fe64$export$b495560913bf8cdc.set(src, newContainer);
89
+ resolve(newContainer);
90
+ }).catch(reject);
91
+ });
92
+ }
93
+
94
+
95
+
96
+ function $0e1e2b2fa84204ab$var$_defineProperty(obj, key, value) {
97
+ if (key in obj) Object.defineProperty(obj, key, {
98
+ value: value,
99
+ enumerable: true,
100
+ configurable: true,
101
+ writable: true
102
+ });
103
+ else obj[key] = value;
104
+ return obj;
105
+ }
106
+ class $0e1e2b2fa84204ab$export$fa89be98fe0936f2 {
107
+ constructor(options){
108
+ $0e1e2b2fa84204ab$var$_defineProperty(this, "customOptions", void 0);
109
+ $0e1e2b2fa84204ab$var$_defineProperty(this, "getSceneClearColor", ()=>{
110
+ // It looks like the browser combines RGB values with background elements regardless of the alpha value.
111
+ // To counteract this we set RGB to black in the case of transparency.
112
+ const alpha = this.customOptions?.backgroundImage ? 0 : 1;
113
+ if (this.customOptions && this.customOptions.backgroundImage) return new (0, $hgUW1$Color4)(0, 0, 0, alpha).toLinearSpace();
114
+ if (this.customOptions && this.customOptions.backgroundColor) {
115
+ const parsedColor = (0, $hgUW1$Color3).FromHexString(this.customOptions.backgroundColor);
116
+ return new (0, $hgUW1$Color4)(parsedColor.r, parsedColor.g, parsedColor.b, alpha).toLinearSpace();
117
+ }
118
+ return new (0, $hgUW1$Color4)(0.98, 0.98, 0.98, alpha).toLinearSpace();
119
+ });
120
+ $0e1e2b2fa84204ab$var$_defineProperty(this, "highlightColorFromConfig", ()=>{
121
+ if (this.customOptions && this.customOptions.highlightColor) return this.hexToColor4(this.customOptions.highlightColor);
122
+ return new (0, $hgUW1$Color4)(0.98, 0.98, 0.98, 1).toLinearSpace();
123
+ });
124
+ $0e1e2b2fa84204ab$var$_defineProperty(this, "hexToColor4", (hex, alpha = 1)=>{
125
+ const parsedColor = (0, $hgUW1$Color3).FromHexString(hex);
126
+ return new (0, $hgUW1$Color4)(parsedColor.r, parsedColor.g, parsedColor.b, alpha).toLinearSpace();
127
+ });
128
+ this.customOptions = options;
129
+ }
130
+ get options() {
131
+ return this.customOptions;
132
+ }
133
+ /**
134
+ * Configuration related to the scene
135
+ */ get scene() {
136
+ return {
137
+ clearColor: this.getSceneClearColor(),
138
+ transparentBackground: this.customOptions?.backgroundImage,
139
+ environmentFile: this.customOptions?.environmentFile ?? "assets/model-viewer/default.env"
140
+ };
141
+ }
142
+ /**
143
+ * Configuration related to the camera used to view and interact with the scene.
144
+ */ get camera() {
145
+ return {
146
+ autoOrientation: this.customOptions?.disableAutomaticOrientation ?? true,
147
+ autoRotation: {
148
+ enabled: this.customOptions?.autoRotation ?? false,
149
+ idleTimeMs: this.customOptions?.idleTimeBeforeRotation ?? 5000
150
+ },
151
+ limits: {
152
+ min: {
153
+ alpha: this.customOptions?.lowerAlphaLimitDeg ? this.customOptions?.lowerAlphaLimitDeg * Math.PI / 180 : undefined,
154
+ beta: this.customOptions?.lowerBetaLimitDeg ? this.customOptions?.lowerBetaLimitDeg * Math.PI / 180 : undefined,
155
+ radius: this.customOptions?.minZoomOverride
156
+ },
157
+ max: {
158
+ alpha: this.customOptions?.upperAlphaLimitDeg ? this.customOptions?.upperAlphaLimitDeg * Math.PI / 180 : undefined,
159
+ beta: this.customOptions?.upperBetaLimitDeg ? this.customOptions?.upperBetaLimitDeg * Math.PI / 180 : undefined,
160
+ radius: this.customOptions?.maxZoomOverride
161
+ }
162
+ }
163
+ };
164
+ }
165
+ /**
166
+ * Configuration related to the highlighting system. Highlights are used to add
167
+ * a visual cue to the user that something is moused over in the preview.
168
+ */ get highlights() {
169
+ return {
170
+ enabled: this.customOptions?.highlightOnMaterialHover ?? false,
171
+ color: this.highlightColorFromConfig()
172
+ };
173
+ }
174
+ }
175
+
176
+
177
+
178
+
179
+
180
+
181
+ var $db6a29689d313881$exports = {};
182
+
183
+ $parcel$export($db6a29689d313881$exports, "RenderingConfiguration", () => $db6a29689d313881$export$902ee4e8040a4cdb);
184
+ $parcel$export($db6a29689d313881$exports, "REFLECTION_PROBE_RESOLUTION", () => $db6a29689d313881$export$fbb2744b8f74e4f2);
185
+ class $db6a29689d313881$export$902ee4e8040a4cdb {
186
+ /**
187
+ * Returns the resolution expected for generated textures.
188
+ */ static getDynamicTextureResolution() {
189
+ return this.getIsMobile() || !$db6a29689d313881$export$902ee4e8040a4cdb.offscreenRenderingSupported() ? {
190
+ width: 1024,
191
+ height: 1024
192
+ } : {
193
+ width: 2048,
194
+ height: 2048
195
+ };
196
+ }
197
+ /**
198
+ * Returns true when textures should generate mip maps
199
+ */ static shouldMipMap() {
200
+ return true;
201
+ }
202
+ /**
203
+ * Returns true when multithreaded rendering is supported.
204
+ */ static offscreenRenderingSupported() {
205
+ // Samsung browser has broken web worker custom fonts from v14. To be
206
+ // safe, if we detect this browser we immediately disable offscreen canvas. There
207
+ // may be a more specific feature we can detect to be more clean about this moving forwards.
208
+ if (navigator.userAgent.includes("SamsungBrowser")) return false;
209
+ // Otherwise we check for relevant required functionality
210
+ return !!window.Worker && !!window.OffscreenCanvas;
211
+ }
212
+ /**
213
+ * Returns the resolution expected for mirror textures.
214
+ */ static getMirrorTextureResolution() {
215
+ return this.getIsMobile() ? 512 : 1024;
216
+ }
217
+ static getIsMobile() {
218
+ return window.innerWidth <= 480;
219
+ }
220
+ }
221
+ const $db6a29689d313881$export$fbb2744b8f74e4f2 = 128;
222
+
223
+
224
+ function $373f30fedccc1cd5$export$e2d6a39fe5dbab66(scene, renderableContexts, existingTextures) {
225
+ renderableContexts.forEach((context)=>{
226
+ const contextID = context.getID();
227
+ const contextName = context.getName();
228
+ const renderDims = (0, $db6a29689d313881$export$902ee4e8040a4cdb).getDynamicTextureResolution(); // Find any materials in the scene with the name of
229
+ // this layout and are included in our filter if one is provided.
230
+ const targetMaterials = scene.materials.filter((mat)=>{
231
+ return mat.name === contextName;
232
+ });
233
+ targetMaterials.forEach((mat)=>{
234
+ const curDynamicTexture = existingTextures.get(contextID);
235
+ const invertYAxis = false;
236
+ if (!curDynamicTexture) {
237
+ const newTexture = $373f30fedccc1cd5$var$createDynamicTexture(contextName, scene, renderDims.width, renderDims.height);
238
+ existingTextures.set(contextID, newTexture);
239
+ context.setStaticContext(newTexture.getContext());
240
+ $373f30fedccc1cd5$var$applyDynamicTexture(mat, newTexture);
241
+ newTexture.onLoadObservable.addOnce(()=>{
242
+ newTexture.update(invertYAxis);
243
+ });
244
+ } else {
245
+ $373f30fedccc1cd5$var$applyDynamicTexture(mat, curDynamicTexture);
246
+ curDynamicTexture.update(invertYAxis);
247
+ }
248
+ });
249
+ });
250
+ }
251
+ /**
252
+ * Construct a new dynamic texture to listen for changes to a panel.
253
+ * @param scene The screen the resource will be associated to.
254
+ * @param layoutKey The key for the layout this dynamic texture relates to.
255
+ * @param RenderableContextService The canvas service, used to pull the layout with the given key.
256
+ */ function $373f30fedccc1cd5$var$createDynamicTexture(name, scene, width, height) {
257
+ const dynamicTex = new (0, $hgUW1$DynamicTexture)(name, {
258
+ width: width,
259
+ height: height
260
+ }, scene, (0, $db6a29689d313881$export$902ee4e8040a4cdb).shouldMipMap(), (0, $hgUW1$Texture).TRILINEAR_SAMPLINGMODE, (0, $hgUW1$Engine).TEXTUREFORMAT_RGBA);
261
+ const ctx = dynamicTex.getContext();
262
+ if (ctx) {
263
+ ctx.fillStyle = "#f5f5f5";
264
+ ctx.fillRect(0, 0, width, height);
265
+ dynamicTex.update();
266
+ }
267
+ return dynamicTex;
268
+ }
269
+ /**
270
+ * Applies a given dynamic texture to a target material.
271
+ * @param mat The material to apply to.
272
+ * @param dynamicTexture The dynamic texture we want to apply.
273
+ */ function $373f30fedccc1cd5$var$applyDynamicTexture(mat, dynamicTexture) {
274
+ if (mat instanceof (0, $hgUW1$PBRMaterial)) {
275
+ const pbrMat = mat;
276
+ const albedoTexture = pbrMat.albedoTexture;
277
+ if (albedoTexture) {
278
+ dynamicTexture.wrapU = albedoTexture.wrapU;
279
+ dynamicTexture.wrapV = albedoTexture.wrapV;
280
+ } else {
281
+ dynamicTexture.wrapU = 1;
282
+ dynamicTexture.wrapV = 1;
283
+ }
284
+ pbrMat.albedoTexture = dynamicTexture;
285
+ } else {
286
+ const standardMat = mat;
287
+ const diffuseTexture = standardMat.diffuseTexture;
288
+ if (diffuseTexture) {
289
+ dynamicTexture.wrapU = diffuseTexture.wrapU;
290
+ dynamicTexture.wrapV = diffuseTexture.wrapV;
291
+ }
292
+ standardMat.diffuseTexture = dynamicTexture;
293
+ }
294
+ }
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+ function $f2c8dd674be56293$var$_defineProperty(obj, key, value) {
303
+ if (key in obj) Object.defineProperty(obj, key, {
304
+ value: value,
305
+ enumerable: true,
306
+ configurable: true,
307
+ writable: true
308
+ });
309
+ else obj[key] = value;
310
+ return obj;
311
+ }
312
+ class $f2c8dd674be56293$export$3fb1f3d61bcfe989 {
313
+ constructor(loader){
314
+ $f2c8dd674be56293$var$_defineProperty(this, "name", void 0);
315
+ $f2c8dd674be56293$var$_defineProperty(this, "enabled", void 0);
316
+ $f2c8dd674be56293$var$_defineProperty(this, "loader", void 0);
317
+ this.name = "glbPostProcessor";
318
+ this.enabled = true;
319
+ this.loader = loader;
320
+ }
321
+ onReady() {
322
+ this.applyReflections(this.loader.babylonScene);
323
+ }
324
+ /**
325
+ * Any custom properties set in blender on transform nodes will appear here. We can
326
+ * write the properties into the object metadata for retrieval at runtime.
327
+ */ loadNodeAsync(context, node, assign) {
328
+ return this.loader.loadNodeAsync(context, node, function(babylonMesh) {
329
+ node.extras && Object.keys(node.extras).forEach((key)=>{
330
+ const value = node.extras[key];
331
+ babylonMesh.metadata[key] = value;
332
+ });
333
+ assign(babylonMesh);
334
+ });
335
+ }
336
+ /**
337
+ * Material properties can be set at load time as we don't depend
338
+ * on the entire scene graph being loaded (for reflection render lists).
339
+ * The need for these properties may go over time as the relevant exporters/importers are updated.
340
+ */ loadMaterialPropertiesAsync(context, material, babylonMaterial) {
341
+ this.enableMaterialExtrasIfRequired(material, babylonMaterial);
342
+ return this.loader.loadMaterialPropertiesAsync(context, material, babylonMaterial);
343
+ }
344
+ dispose() {}
345
+ /**
346
+ * With a given material descriptor and babylon material instance, looks for any
347
+ * metadata defined on the descriptor and applies to the babylon material.
348
+ * @param material A material descriptor
349
+ * @param babylonMaterial An instance of a material representation in babylon.
350
+ */ enableMaterialExtrasIfRequired(material, babylonMaterial) {
351
+ if (!material.extras || !(babylonMaterial instanceof (0, $hgUW1$PBRMaterial))) return;
352
+ if (material.extras.sheen) {
353
+ const mat = babylonMaterial;
354
+ mat.sheen.isEnabled = true;
355
+ mat.sheen.intensity = material.extras.sheen;
356
+ }
357
+ if (material.extras.translucency) {
358
+ const mat1 = babylonMaterial;
359
+ mat1.subSurface.isTranslucencyEnabled = true;
360
+ mat1.subSurface.translucencyIntensity = material.extras.translucency;
361
+ if (material.extras.translucencyR && material.extras.translucencyG && material.extras.translucencyB) mat1.subSurface.tintColor = new (0, $hgUW1$Color31)(material.extras.translucencyR, material.extras.translucencyG, material.extras.translucencyB);
362
+ }
363
+ if (material.extras.useDepthPrePass) {
364
+ const mat2 = babylonMaterial;
365
+ mat2.needDepthPrePass = true;
366
+ mat2.forceIrradianceInFragment = true; // https://forum.babylonjs.com/t/rendering-artificats-on-safari-ios-14-pbrmaterial-depth-prepass-ibl/15670/5
367
+ }
368
+ if (material.extras.useParallax) {
369
+ const mat3 = babylonMaterial;
370
+ mat3.useParallax = true;
371
+ mat3.useParallaxOcclusion = true;
372
+ mat3.parallaxScaleBias = material.extras.useParallax;
373
+ }
374
+ }
375
+ /**
376
+ * Called with a loaded scene. Will finish initialization
377
+ * of any reflective properties within the scene.
378
+ * @param scene The scene to parse.
379
+ */ applyReflections(scene) {
380
+ function getReflectiveMeshes(scene) {
381
+ const reflectiveMeshes = [];
382
+ scene.transformNodes.forEach((node)=>{
383
+ if (node.metadata && node.metadata.reflective) reflectiveMeshes.push(...node.getChildMeshes());
384
+ });
385
+ scene.meshes.forEach((mesh)=>{
386
+ if (mesh.metadata && mesh.metadata.reflective && !reflectiveMeshes.includes(mesh)) reflectiveMeshes.push(mesh);
387
+ });
388
+ return reflectiveMeshes;
389
+ }
390
+ function buildMirrorTexture(mesh, reflectivity = 1) {
391
+ const mat = mesh.material;
392
+ if (!mat) return;
393
+ // Create a new mirror texture
394
+ const mirrorTex = new (0, $hgUW1$MirrorTexture)("mirror", (0, $db6a29689d313881$export$902ee4e8040a4cdb).getMirrorTextureResolution(), scene, true);
395
+ mirrorTex.renderList = getReflectiveMeshes(scene); // Calculate reflectance plane for mirror based on target mesh transform
396
+ const targetVertices = mesh.getVerticesData("normal");
397
+ if (!targetVertices) throw new Error("Mirror attribute specified on: " + mesh.name + "But no normals exist to generate a mirror from!");
398
+ mesh.computeWorldMatrix(true);
399
+ const worldMatrix = mesh.getWorldMatrix();
400
+ const normal = (0, $hgUW1$Vector31).TransformNormal(new (0, $hgUW1$Vector31)(targetVertices[0], targetVertices[1], targetVertices[2]), worldMatrix).normalize();
401
+ const reflector = (0, $hgUW1$Plane).FromPositionAndNormal(mesh.position, normal.scale(-1));
402
+ mirrorTex.mirrorPlane = reflector;
403
+ mirrorTex.level = reflectivity;
404
+ mat.reflectionTexture = mirrorTex;
405
+ }
406
+ function buildReflectionProbe(mesh) {
407
+ const mat = mesh.material;
408
+ const probe = new (0, $hgUW1$ReflectionProbe)("probe-" + mat.name, (0, $db6a29689d313881$export$fbb2744b8f74e4f2), scene);
409
+ probe.attachToMesh(mesh);
410
+ probe.renderList && probe.renderList.push(...getReflectiveMeshes(scene));
411
+ mat.reflectionTexture = probe.cubeTexture;
412
+ }
413
+ scene.meshes.forEach((mesh)=>{
414
+ const tags = mesh.metadata;
415
+ if (!tags) return;
416
+ if (tags.mirrorTexture) buildMirrorTexture(mesh, tags.mirrorTexture);
417
+ if (tags.reflectionProbe) buildReflectionProbe(mesh);
418
+ });
419
+ }
420
+ }
421
+
422
+
423
+
424
+
425
+
426
+ const $163294662caa594a$var$CAMERA_FRAME_RATE = 60;
427
+ const $163294662caa594a$var$SPEED_RATIO = 1; // TODO: This function is quite limited in how animations can be expressed, make it better (With backwards compat of course...)!
428
+ function $163294662caa594a$export$267f51f3588f9216(scene, loop, to, from) {
429
+ const animGroups = scene.animationGroups;
430
+ const getAvgFrameRateForAnimGroup = (animGroup)=>{
431
+ const sum = animGroup.targetedAnimations.map((ta)=>ta.animation.framePerSecond).reduce((a, b)=>a + b, 0);
432
+ return sum / animGroup.targetedAnimations.length || 0;
433
+ }; // When our to & from values match we want to go immediately
434
+ // to a specific sequence of the animation.
435
+ if (from !== undefined && to !== undefined && from === to) {
436
+ animGroups.forEach((animGroup)=>{
437
+ animGroup.stop();
438
+ const frameRate = getAvgFrameRateForAnimGroup(animGroup);
439
+ const targetFrame = from * frameRate;
440
+ animGroup.start(loop, $163294662caa594a$var$SPEED_RATIO, targetFrame, targetFrame);
441
+ });
442
+ return;
443
+ } // Otherwise we go to the 'from' value and play through to the 'to'
444
+ // value, looping if requested.
445
+ animGroups.forEach((animGroup)=>{
446
+ animGroup.stop();
447
+ const frameRate = getAvgFrameRateForAnimGroup(animGroup);
448
+ animGroup.start(loop, $163294662caa594a$var$SPEED_RATIO, from * frameRate, to * frameRate);
449
+ });
450
+ }
451
+ function $163294662caa594a$export$f2095917e1db8d3a(scene) {
452
+ const animGroups = scene.animationGroups;
453
+ animGroups.forEach((animGroup)=>{
454
+ animGroup.stop();
455
+ });
456
+ }
457
+ function $163294662caa594a$export$307c7dce7e89b5b5(camera) {
458
+ camera.getScene().stopAnimation(camera);
459
+ camera.animations = [];
460
+ }
461
+ function $163294662caa594a$export$f9de9f8269cb3b3a(scene, camera, cameraAnimation) {
462
+ // Clear any existing animation, not doing so will hang this animation.
463
+ scene.stopAnimation(camera);
464
+ camera.animations = []; // We set the alpha to the equivalent location of its current value but with a value 0 - PI * 2
465
+ if (Math.abs(camera.alpha) > 2 * Math.PI) camera.alpha = $163294662caa594a$var$wrap2Range(camera.alpha, 0, 2 * Math.PI);
466
+ const animSet = []; // Indices for ordering the animation operations, we want to rotate before
467
+ // we pan.
468
+ const hasTargetTween = cameraAnimation.target;
469
+ const panSequenceIndex = 0;
470
+ const rotationSequenceIndex = hasTargetTween ? 1 : 0; // Build target animation
471
+ if (cameraAnimation.target) animSet.push($163294662caa594a$var$createAnimation("cameraTargetLerp", "target", new (0, $hgUW1$Vector3)().copyFrom(camera.target), new (0, $hgUW1$Vector3)(cameraAnimation.target.x, cameraAnimation.target.y, cameraAnimation.target.z), (0, $hgUW1$Animation).ANIMATIONTYPE_VECTOR3, panSequenceIndex));
472
+ // Build the alpha & beta animations
473
+ animSet.push($163294662caa594a$var$createAnimation("cameraAlphaLerp", "alpha", camera.alpha, $163294662caa594a$var$degToRad(cameraAnimation.lonDeg), (0, $hgUW1$Animation).ANIMATIONTYPE_FLOAT, rotationSequenceIndex));
474
+ animSet.push($163294662caa594a$var$createAnimation("cameraBetaLerp", "beta", camera.beta, $163294662caa594a$var$degToRad(cameraAnimation.latDeg), (0, $hgUW1$Animation).ANIMATIONTYPE_FLOAT, rotationSequenceIndex)); // Build the radius animation
475
+ if (cameraAnimation.radius !== undefined) {
476
+ const targetRadius = Math.max(0.01, cameraAnimation.radius);
477
+ animSet.push($163294662caa594a$var$createAnimation("cameraRadiusLerp", "radius", camera.radius, targetRadius, (0, $hgUW1$Animation).ANIMATIONTYPE_FLOAT, rotationSequenceIndex));
478
+ } // Execute the animations, cleaning up after it completes.
479
+ camera.animations.push(...animSet); // When we run a camera animation we must disable any existing
480
+ // auto rotation too prevent any animations conflicting with each other.
481
+ // If it was enabled we re-enable again when the animation has finished running.
482
+ const rotationWasActive = camera.useAutoRotationBehavior;
483
+ camera.disableAutoRotationBehavior(); // Finally, execute the animation on the scene.
484
+ scene.beginAnimation(camera, 0, hasTargetTween ? $163294662caa594a$var$CAMERA_FRAME_RATE * 2 : $163294662caa594a$var$CAMERA_FRAME_RATE, false, 1, ()=>{
485
+ camera.animations = [];
486
+ if (rotationWasActive) camera.enableAutoRotationBehavior();
487
+ });
488
+ }
489
+ /**
490
+ * Small helper for converting Degrees to Radians
491
+ * @param value The value to convert
492
+ */ function $163294662caa594a$var$degToRad(value) {
493
+ return value * Math.PI / 180;
494
+ }
495
+ /**
496
+ * A generic function for tweens between two values which is a concept used
497
+ * heavily in our camera animation system.
498
+ * @param animName The name of the animation.
499
+ * @param targetProperty A target property to affect on the object holding the animation.
500
+ * @param fromValue The initial value of the property.
501
+ * @param toValue The end value of the property.
502
+ * @param dataType The data type being modified eg. Animation.ANIMATIONTYPE_VECTOR3
503
+ * @param ordering animations are sequenced via ordering, an ordering of 0 means the animation will play from frame 0 -> FRAME_RATE
504
+ * an ordering of 1 means the animation will play from FRAME_RATE -> FRAME_RATE * 2 allowing us to play animations in a specific order.
505
+ * @param loopmode What to do when the animation completes eg. Animation.ANIMATIONLOOPMODE_CYCLE
506
+ */ function $163294662caa594a$var$createAnimation(animName, targetProperty, fromValue, toValue, dataType, ordering = 0, loopmode = (0, $hgUW1$Animation).ANIMATIONLOOPMODE_CONSTANT) {
507
+ const easingFunc = new (0, $hgUW1$QuadraticEase)();
508
+ easingFunc.setEasingMode((0, $hgUW1$EasingFunction).EASINGMODE_EASEINOUT);
509
+ const targetAnim = new (0, $hgUW1$Animation)(animName, targetProperty, $163294662caa594a$var$CAMERA_FRAME_RATE, dataType, loopmode);
510
+ const targetKeys = [];
511
+ ordering > 0 && targetKeys.push({
512
+ frame: 0,
513
+ value: fromValue
514
+ });
515
+ targetKeys.push({
516
+ frame: $163294662caa594a$var$CAMERA_FRAME_RATE * ordering,
517
+ value: fromValue
518
+ });
519
+ targetKeys.push({
520
+ frame: $163294662caa594a$var$CAMERA_FRAME_RATE * (ordering + 1),
521
+ value: toValue
522
+ });
523
+ targetAnim.setKeys(targetKeys);
524
+ targetAnim.setEasingFunction(easingFunc);
525
+ return targetAnim;
526
+ } // Given min, max. Return a value that wraps from max to min or vice versa if required.
527
+ function $163294662caa594a$var$wrap2Range(val, min, max) {
528
+ if (val < min) return val = max - (min - val) % (max - min);
529
+ else return val = min + (val - min) % (max - min);
530
+ }
531
+
532
+
533
+
534
+
535
+ function $f04b030f3c397053$var$_defineProperty(obj, key, value) {
536
+ if (key in obj) Object.defineProperty(obj, key, {
537
+ value: value,
538
+ enumerable: true,
539
+ configurable: true,
540
+ writable: true
541
+ });
542
+ else obj[key] = value;
543
+ return obj;
544
+ }
545
+ class $f04b030f3c397053$export$eaa74e880b82d23a extends (0, $hgUW1$ArcRotateCamera) {
546
+ constructor(name, alpha, beta, radius, target, scene, configuration, setActiveOnSceneIfNoneActive){
547
+ super(name, alpha, beta, radius, target, scene, setActiveOnSceneIfNoneActive); // We set some sensible defaults for camera behavior. Some of
548
+ // these defaults are dependant on calculated camera parameters like radius
549
+ // which is why we need to ensure that these values are computing straight after the framing behavior
550
+ // has run so our calculations are correctly computed based on scene scale.
551
+ $f04b030f3c397053$var$_defineProperty(this, "lastFocus", new (0, $hgUW1$Vector3)(0, 0, 0));
552
+ this.minZ = 0.01;
553
+ this.maxZ = this.radius * 1000;
554
+ this.lowerRadiusLimit = this.radius * 0.01;
555
+ this.enableFramingBehavior();
556
+ this.upperRadiusLimit = 1.5 * this.radius;
557
+ this.wheelPrecision = 100 / this.radius;
558
+ this.pinchPrecision = 300 / this.radius;
559
+ this.wheelDeltaPercentage = 0.01;
560
+ this.pinchDeltaPercentage = 0.005;
561
+ this.useNaturalPinchZoom = true; // glTF assets use a +Z forward convention while the default camera faces +Z.
562
+ // Rotate the camera to look at the front of the asset by default unless configured not to.
563
+ if (configuration.camera.autoOrientation) this.alpha += Math.PI;
564
+ // When configuration is available we can go through the camera specific
565
+ // fields and apply them.
566
+ if (configuration) {
567
+ if (configuration.camera.limits.min.beta) this.lowerBetaLimit = configuration.camera.limits.min.beta;
568
+ if (configuration.camera.limits.max.beta) this.upperBetaLimit = configuration.camera.limits.max.beta;
569
+ if (configuration.camera.limits.min.alpha) this.lowerAlphaLimit = configuration.camera.limits.min.alpha;
570
+ if (configuration.camera.limits.max.alpha) this.upperAlphaLimit = configuration.camera.limits.max.alpha;
571
+ if (configuration.camera.limits.min.radius) this.lowerRadiusLimit = configuration.camera.limits.min.radius;
572
+ if (configuration.camera.limits.max.radius) this.upperRadiusLimit = configuration.camera.limits.max.radius;
573
+ configuration.camera.autoRotation.enabled && this.enableAutoRotationBehavior(configuration.camera.autoRotation.idleTimeMs);
574
+ }
575
+ }
576
+ /**
577
+ * Stores the current executed target point of the camera. If the camera target changes
578
+ * via panning we can notify ui about the fact to help users reset the camera.
579
+ */ /**
580
+ * Returns the framing behavior of this camera.
581
+ */ getFramingBehavior() {
582
+ return this.getBehaviorByName("Framing");
583
+ }
584
+ /**
585
+ * Returns the auto rotation behavior of this camera, this may
586
+ * not always be available.
587
+ */ getAutoRotationBehavior() {
588
+ const autoRotate = this.getBehaviorByName("AutoRotation");
589
+ if (autoRotate) return autoRotate;
590
+ else return undefined;
591
+ }
592
+ /**
593
+ * Activates framing on this camera causing the camera to focus on
594
+ * the scene and bound itself in a sensible fashion to the scene content.
595
+ */ enableFramingBehavior() {
596
+ this.useFramingBehavior = true;
597
+ const framingBehavior = this.getFramingBehavior();
598
+ framingBehavior.attach(this);
599
+ framingBehavior.framingTime = 0;
600
+ framingBehavior.elevationReturnTime = -1;
601
+ framingBehavior.zoomStopsAnimation = false;
602
+ this.lowerRadiusLimit = null; // Set the target bounding box for the camera
603
+ const worldExtends = $f04b030f3c397053$var$calculateWorldExtends(this._scene);
604
+ framingBehavior.zoomOnBoundingInfo(worldExtends.min, worldExtends.max); // Calculate a sensible default for wheel zoom speed based on newly calculated radius.
605
+ this.wheelPrecision = 100 / this.radius; // If we have no lower radius at this point we must fall back to a default
606
+ // to prevent the user zooming through the model...
607
+ if (this.lowerRadiusLimit === null) this.lowerRadiusLimit = 0.1;
608
+ this.lastFocus.copyFrom(this.target);
609
+ return framingBehavior;
610
+ }
611
+ /**
612
+ * Animates the camera back to the initial target when it has drifted to another position.
613
+ * @param onAnimationComplete A callback when the camera has finished animating.
614
+ */ rerunFramingBehavior(onAnimationComplete) {
615
+ const framingBehavior = this.getFramingBehavior();
616
+ framingBehavior.framingTime = 800;
617
+ const handleAnimationComplete = ()=>{
618
+ onAnimationComplete();
619
+ };
620
+ const worldExtends = $f04b030f3c397053$var$calculateWorldExtends(this._scene);
621
+ framingBehavior.zoomOnBoundingInfo(worldExtends.min, worldExtends.max, undefined, handleAnimationComplete);
622
+ framingBehavior.framingTime = 0;
623
+ }
624
+ /**
625
+ * Activates the auto rotation behavior causing the camera to rotate slowly around
626
+ * it's target.
627
+ * @param idleTime The number of milliseconds before the camera starts rotating after the user last interacted.
628
+ */ enableAutoRotationBehavior(idleTime = 5000) {
629
+ this.useAutoRotationBehavior = true;
630
+ const autoRotation = this.getAutoRotationBehavior();
631
+ if (!autoRotation) return;
632
+ autoRotation.idleRotationWaitTime = idleTime;
633
+ }
634
+ /**
635
+ * Stops the auto rotation functionality immediately.
636
+ */ disableAutoRotationBehavior() {
637
+ this.useAutoRotationBehavior = false;
638
+ }
639
+ /**
640
+ * A static function used to instantiate a single product camera instance on a scene. This camera will assume
641
+ * the active camera role on the scene and any existing active camera will be disposed.
642
+ * @param scene The scene to attach the camera to.
643
+ * @param configuration A configuration object to define the cameras limitations.
644
+ */ static create(scene, configuration) {
645
+ // We don't want duplicate cameras, dispose any existing.
646
+ if (scene.activeCamera) {
647
+ scene.activeCamera.dispose();
648
+ scene.activeCamera = null;
649
+ } // Build camera
650
+ const worldExtends = $f04b030f3c397053$var$calculateWorldExtends(scene);
651
+ const worldSize = worldExtends.max.subtract(worldExtends.min);
652
+ const worldCenter = worldExtends.min.add(worldSize.scale(0.5));
653
+ const camera = new $f04b030f3c397053$export$eaa74e880b82d23a("ProductCamera", -(Math.PI / 2), Math.PI / 2, worldSize.length() * 1.5, worldCenter, scene, configuration); // Attach an observer to manage dynamic recalculation of pan speed based on zoom. This will
654
+ // allow us to keep pan speed consistent regardless of zoom level.
655
+ camera.panningInertia = 0;
656
+ camera.panningOriginTarget.copyFrom(worldCenter);
657
+ camera.onAfterCheckInputsObservable.add(()=>{
658
+ // TODO: There's still an unknown factor here. Dividing 1000 results
659
+ // in a fairly good experience across the board but really tiny/massive models still
660
+ // are too fast/slow respectively. Camera.radius also seems to be related...
661
+ camera.panningSensibility = 1000 / worldSize.length();
662
+ });
663
+ scene.activeCamera = camera;
664
+ return camera;
665
+ }
666
+ }
667
+ /**
668
+ * Determines the bounding box of all content in the scene that should be included in
669
+ * the focus of the product camera.
670
+ */ function $f04b030f3c397053$var$calculateWorldExtends(scene) {
671
+ // TODO: Move away from depending on mesh name to determine whether or not a mesh should be targeted
672
+ const targetMeshes = scene.meshes.filter((mesh)=>{
673
+ return mesh.name.toLowerCase().endsWith("_t") || mesh.name.toLowerCase().includes("_t_");
674
+ });
675
+ return scene.getWorldExtends((mesh)=>{
676
+ return mesh.isVisible && mesh.isEnabled() && (targetMeshes.length === 0 || targetMeshes.includes(mesh));
677
+ });
678
+ }
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+ const $11b2720e07c11c64$var$keyToTextureNameMap = {
687
+ albedoTexture: "albedoMapKey",
688
+ bumpTexture: "normalMapKey",
689
+ ambientTexture: "ambientMapKey",
690
+ emissiveTexture: "emissionMapKey",
691
+ opacityTexture: "alphaMapKey",
692
+ metallicTexture: "metallicMapKey",
693
+ refractionTexture: "refractionMapKey"
694
+ };
695
+ function $11b2720e07c11c64$export$a0161ad02fcaac1(materialVariant, targetMaterial, assetsManager, removeWhenUndefined) {
696
+ const supportedTextures = [
697
+ "albedoTexture",
698
+ "bumpTexture",
699
+ "ambientTexture",
700
+ "emissiveTexture",
701
+ "opacityTexture",
702
+ "metallicTexture",
703
+ "refractionTexture"
704
+ ];
705
+ supportedTextures.forEach((textureName)=>{
706
+ $11b2720e07c11c64$var$calculateAndApplyTextureProperties(textureName, materialVariant, targetMaterial, assetsManager, removeWhenUndefined);
707
+ });
708
+ $11b2720e07c11c64$var$calculateClearcoatProperties(materialVariant, targetMaterial);
709
+ }
710
+ function $11b2720e07c11c64$var$calculateAndApplyTextureProperties(textureName, materialVariant, targetMaterial, assetsManager, removeWhenUndefined) {
711
+ const resourceKeyForTexture = $11b2720e07c11c64$var$keyToTextureNameMap[textureName];
712
+ if (!resourceKeyForTexture) throw new Error("Unexpected texture name encountered.");
713
+ const textureKey = materialVariant[resourceKeyForTexture];
714
+ if (textureKey) assetsManager.addTextureTask(textureName, textureKey, false, false);
715
+ else if (removeWhenUndefined && targetMaterial[textureName]) {
716
+ targetMaterial[textureName] && targetMaterial[textureName].dispose();
717
+ targetMaterial[textureName] = null;
718
+ $11b2720e07c11c64$var$applyTextureSpecificChangesOnRemoval(textureName, targetMaterial);
719
+ }
720
+ }
721
+ /**
722
+ * Certain textures require specific behavior to be undone on removal, such
723
+ * as flags that would have enabled no longer desired behavior. We filter those out here.
724
+ * @param textureName The texture name being handled.
725
+ * @param targetMat The material this texture resides on.
726
+ */ function $11b2720e07c11c64$var$applyTextureSpecificChangesOnRemoval(textureName, targetMat) {
727
+ if (textureName === "opacityTexture") targetMat.useAlphaFromAlbedoTexture = true;
728
+ if (textureName === "metallicTexture") {
729
+ targetMat.useRoughnessFromMetallicTextureAlpha = false;
730
+ targetMat.useRoughnessFromMetallicTextureGreen = false;
731
+ targetMat.useMetallnessFromMetallicTextureBlue = false;
732
+ }
733
+ if (textureName === "refractionTexture") {
734
+ targetMat.subSurface.isRefractionEnabled = false;
735
+ targetMat.subSurface.refractionIntensity = 1;
736
+ }
737
+ }
738
+ function $11b2720e07c11c64$export$e17dfe56aa21b90(textureName, targetMat, materialResource, downloadedTexture) {
739
+ if (textureName === "opacityTexture") targetMat.useAlphaFromAlbedoTexture = false;
740
+ if (textureName === "metallicTexture") {
741
+ targetMat.useRoughnessFromMetallicTextureAlpha = false;
742
+ targetMat.useRoughnessFromMetallicTextureGreen = true;
743
+ targetMat.useMetallnessFromMetallicTextureBlue = true;
744
+ }
745
+ if (textureName === "refractionTexture") {
746
+ targetMat.subSurface.isRefractionEnabled = true;
747
+ targetMat.subSurface.refractionIntensity = materialResource.refractionIntensity || 1;
748
+ }
749
+ targetMat[textureName] = downloadedTexture;
750
+ }
751
+ /**
752
+ * Applies a clearcoat parameter to a material if requested.
753
+ * @param materialVariant The variant to read for clearcoat.
754
+ * @param targetMaterial The target material to be applied to.
755
+ */ function $11b2720e07c11c64$var$calculateClearcoatProperties(materialVariant, targetMaterial) {
756
+ if (!materialVariant.clearCoat) return;
757
+ if (materialVariant.clearCoat === (0, $0822e2c84c7d8f0f$export$5185910364c29078).RemoveWhenSelected) {
758
+ targetMaterial.clearCoat.isEnabled = false;
759
+ targetMaterial.clearCoat.indexOfRefraction = 1.5; // Default value
760
+ } else if (materialVariant.clearCoat === (0, $0822e2c84c7d8f0f$export$5185910364c29078).ApplyWhenSelected) {
761
+ targetMaterial.clearCoat.isEnabled = true;
762
+ targetMaterial.clearCoat.indexOfRefraction = materialVariant.clearCoatIOR || targetMaterial.clearCoat.indexOfRefraction;
763
+ }
764
+ }
765
+
766
+
767
+ function $645f6c7a5d045644$var$_defineProperty(obj, key, value) {
768
+ if (key in obj) Object.defineProperty(obj, key, {
769
+ value: value,
770
+ enumerable: true,
771
+ configurable: true,
772
+ writable: true
773
+ });
774
+ else obj[key] = value;
775
+ return obj;
776
+ }
777
+ class $645f6c7a5d045644$export$2e2bcd8739ae039 {
778
+ constructor(){
779
+ $645f6c7a5d045644$var$_defineProperty(this, "materialVariantMap", new Map());
780
+ $645f6c7a5d045644$var$_defineProperty(this, "keysThatRemovedBaseModel", []);
781
+ $645f6c7a5d045644$var$_defineProperty(this, "loadedContainerForKey", new Map());
782
+ }
783
+ /**
784
+ * Apply a material variant to a given scene.
785
+ * @param scene The scene to add the material changes to.
786
+ * @param targetMaterial The name of the material in the scene we should be targeting.
787
+ * @param material The material variant to apply.
788
+ * @param onProgress A progress callback function for loading bars and event timing.
789
+ * @param removeWhenUndefined material application is an additive process, setting this bool to true will
790
+ * instead remove material textures when they aren't defined. this is useful for material editor applications
791
+ * where we want to undo changes are remove effects from display.
792
+ */ async applyMaterial(scene, targetMaterial, material, onProgress, removeWhenUndefined) {
793
+ return new Promise((resolve)=>{
794
+ // Attempt to find the requested material in the scene
795
+ const targetedMaterials = scene.materials.filter((mat)=>mat.name === targetMaterial); // Keep track of material changes
796
+ const existingMaterialResource = this.materialVariantMap.get(targetMaterial);
797
+ this.materialVariantMap.set(targetMaterial, {
798
+ ...existingMaterialResource,
799
+ ...material
800
+ }); // At this point we know the scene has been loaded. If the target material doesn't exist then
801
+ // the step wasn't configured correctly to begin with so just ignore the request.
802
+ if (targetedMaterials.length === 0) {
803
+ resolve(undefined);
804
+ return;
805
+ } // Create an async task for each type of available texture given.
806
+ const assetsManager = new (0, $hgUW1$AssetsManager)(scene);
807
+ assetsManager.useDefaultLoadingScreen = false; // Loop through the materials found in scene that match our target material name and
808
+ // notify the asset manager of the various material values that need to change on each.
809
+ targetedMaterials.forEach((targetMaterial)=>(0, $11b2720e07c11c64$export$a0161ad02fcaac1)(material, targetMaterial, assetsManager, removeWhenUndefined)); // Notify caller on progress towards loading material changes.
810
+ assetsManager.onProgress = (remaining, total, task)=>{
811
+ onProgress && onProgress(remaining / total * 100, 100, task.name);
812
+ }; // Execute the configured texture tasks. For each task received, we iterate ver each targeted material and
813
+ // apply the requirements of the task.
814
+ assetsManager.onFinish = (tasks)=>{
815
+ tasks.forEach((task)=>{
816
+ const textureTask = task;
817
+ onProgress && onProgress(100, 100, task.name);
818
+ targetedMaterials.forEach((targetMat)=>(0, $11b2720e07c11c64$export$e17dfe56aa21b90)(task.name, targetMat, material, textureTask.texture));
819
+ });
820
+ resolve(undefined);
821
+ };
822
+ assetsManager.loadAsync();
823
+ });
824
+ }
825
+ /**
826
+ * Applies a model variant to the scene.
827
+ * @param scene The scene we're working out of.
828
+ * @param key The key to uniquely identify this model variant application.
829
+ * @param replaceProductModel When true, we should replace the base product model.
830
+ * @param model The details for the new model, when undefined we should remove the variant associated to the given key.
831
+ * @param onProgress A load progress callback that can be used for loading bars and event timing.
832
+ */ async applyModel(scene, key, replaceProductModel, model, onProgress) {
833
+ /**
834
+ * If we're adding a new model to the scene and the key hasn't been
835
+ * encountered before and the product model is being replaced, let's
836
+ * store the key so we know to replace the product model when encountered later.
837
+ */ if (model && replaceProductModel && !this.keysThatRemovedBaseModel.includes(key)) this.keysThatRemovedBaseModel.push(key);
838
+ // When no model details are provided that means
839
+ // the model variant has been requested to be removed entirely. We remove that
840
+ // and also re-add any base model if it was removed to fit this variant.
841
+ if (!model) {
842
+ const shouldRestoreBaseModel = this.keysThatRemovedBaseModel.includes(key);
843
+ this.loadedContainerForKey.get(key)?.removeAllFromScene();
844
+ scene && shouldRestoreBaseModel && await (0, $52383f320efd34d4$export$fdb7ab7bd2bafbea)(scene.metadata.baseModel, scene);
845
+ return Promise.resolve();
846
+ } // Load new model data
847
+ const container = await (0, $a38b99fca407fe64$export$a2966fb0929452fe)(model, scene, onProgress); // Remove any existing asset for the given key.
848
+ if (this.loadedContainerForKey.has(key)) this.loadedContainerForKey.get(key)?.removeAllFromScene();
849
+ // Remove product base model if required.
850
+ if (replaceProductModel) {
851
+ const base = await (0, $a38b99fca407fe64$export$a2966fb0929452fe)(scene.metadata.baseModel, scene);
852
+ base.removeAllFromScene();
853
+ } // Add new model to scene.
854
+ container.addAllToScene();
855
+ this.loadedContainerForKey.set(key, container); // Reapply any encountered material variants to the scene, this ensures
856
+ // no material information is lost.
857
+ const promiseList = [];
858
+ this.materialVariantMap.forEach(async (value, key)=>{
859
+ promiseList.push(this.applyMaterial(scene, key, value));
860
+ });
861
+ await Promise.all(promiseList);
862
+ }
863
+ }
864
+
865
+
866
+
867
+
868
+
869
+
870
+
871
+
872
+
873
+ function $52383f320efd34d4$var$_defineProperty(obj, key, value) {
874
+ if (key in obj) Object.defineProperty(obj, key, {
875
+ value: value,
876
+ enumerable: true,
877
+ configurable: true,
878
+ writable: true
879
+ });
880
+ else obj[key] = value;
881
+ return obj;
882
+ }
883
+ // to the GLTF loader provided by babylonjs.
884
+ (0, $hgUW1$GLTF2).GLTFLoader.RegisterExtension("glbPostProcessor", function(loader) {
885
+ return new (0, $f2c8dd674be56293$export$3fb1f3d61bcfe989)(loader);
886
+ });
887
+ (0, $hgUW1$SceneLoader).OnPluginActivatedObservable.add((plugin)=>{
888
+ if (plugin.name === "gltf") {
889
+ const loader = plugin;
890
+ loader.useRangeRequests = true;
891
+ loader.transparencyAsCoverage = true;
892
+ }
893
+ }); // Side effects for view related logic
894
+ const $52383f320efd34d4$var$InitialSceneLoadProgressKey = "initialScene";
895
+ class $52383f320efd34d4$export$bbc4f3921db721d6 {
896
+ /**
897
+ * The current progress of loading the scene. A value between 0-100.
898
+ */ /**
899
+ * A map relating target materials to a list of materials to apply via a option.
900
+ * TODO: We need to clean this up and make it generic.
901
+ */ /**
902
+ * A map relating option id to a model variant to a callback provided by someone who wants
903
+ * to load a model variant.
904
+ */ /**
905
+ * The last model animation requested if the scene wasn't ready.
906
+ */ /**
907
+ * The last camera animation requested if the scene wasn't ready.
908
+ */ /**
909
+ * When listeners have been notified of a change in camera target position
910
+ * this field will be set to true.
911
+ */ /**
912
+ * A list of listener who want to know about the progress of scene loading.
913
+ */ /**
914
+ * An observable handling loss of target focus by the camera.
915
+ */ /**
916
+ * A set of dynamic textures linked to workflow canvases, these textures
917
+ * update themselves when changes are made to the underlying canvas
918
+ */ /**
919
+ * When instantiated, contains a list of meshes that contribute to highlighting
920
+ * in the scene.
921
+ */ /**
922
+ * A manager class encapsulating the logic required to manipulate material and
923
+ * model variants in the scene.
924
+ */ constructor(canvas, options){
925
+ $52383f320efd34d4$var$_defineProperty(this, "engine", void 0);
926
+ $52383f320efd34d4$var$_defineProperty(this, "scene", void 0);
927
+ $52383f320efd34d4$var$_defineProperty(this, "configuration", void 0);
928
+ $52383f320efd34d4$var$_defineProperty(this, "loadProgress", new Map([
929
+ [
930
+ $52383f320efd34d4$var$InitialSceneLoadProgressKey,
931
+ 0
932
+ ]
933
+ ]));
934
+ $52383f320efd34d4$var$_defineProperty(this, "materialReadyToLoadCallbacks", new Map());
935
+ $52383f320efd34d4$var$_defineProperty(this, "modelReadyToLoadCallbacks", new Map());
936
+ $52383f320efd34d4$var$_defineProperty(this, "queuedModelAnimation", void 0);
937
+ $52383f320efd34d4$var$_defineProperty(this, "queuedCameraAnimation", void 0);
938
+ $52383f320efd34d4$var$_defineProperty(this, "focusLostNotified", false);
939
+ $52383f320efd34d4$var$_defineProperty(this, "loadObservable", new (0, $hgUW1$Observable)());
940
+ $52383f320efd34d4$var$_defineProperty(this, "focusLostObservable", new (0, $hgUW1$Observable)());
941
+ $52383f320efd34d4$var$_defineProperty(this, "dynamicTextures", new Map());
942
+ $52383f320efd34d4$var$_defineProperty(this, "highlightLayer", void 0);
943
+ $52383f320efd34d4$var$_defineProperty(this, "variantManager", new (0, $645f6c7a5d045644$export$2e2bcd8739ae039)());
944
+ // Options are required all over the place so set this first.
945
+ this.configuration = new (0, $0e1e2b2fa84204ab$export$fa89be98fe0936f2)(options); // We serve the draco decoder logic from google's CDN. To ensure we use the latest version, we need to
946
+ // move this to our CDN one day when we are big and powerful...
947
+ const DRACO_VERSION = "1.5.3";
948
+ (0, $hgUW1$DracoCompression).Configuration = {
949
+ decoder: {
950
+ wasmUrl: `https://www.gstatic.com/draco/versioned/decoders/${DRACO_VERSION}/draco_wasm_wrapper_gltf.js`,
951
+ wasmBinaryUrl: `https://www.gstatic.com/draco/versioned/decoders/${DRACO_VERSION}/draco_decoder_gltf.wasm`,
952
+ fallbackUrl: `https://www.gstatic.com/draco/versioned/decoders/${DRACO_VERSION}/draco_decoder_gltf.js`
953
+ }
954
+ }; // HOTFIX: Resolve issue on safari 15, initializing engine without first requesting a context causes page reload.
955
+ if (canvas) {
956
+ if (canvas.getContext("webgl2")) ;
957
+ else canvas.getContext("webgl");
958
+ } // Try to instantiate the engine, Engine may throw an exception when WebGL is deemed unavailable. We
959
+ // can expect consumers of this interface to handle this at a higher level as failure to bring the 3D preview
960
+ // service up could represent a major change in how workflows function...
961
+ const logger = console.log;
962
+ console.log = ()=>{};
963
+ const engine = !canvas ? new (0, $hgUW1$NullEngine)() : new (0, $hgUW1$Engine)(canvas, true, {
964
+ premultipliedAlpha: false,
965
+ preserveDrawingBuffer: !!options?.backgroundImage,
966
+ audioEngine: false,
967
+ stencil: this.configuration.highlights.enabled,
968
+ forceSRGBBufferSupportState: true
969
+ });
970
+ console.log = logger; // We implement our own loading UI in react, disable the
971
+ // default babylon behavior here.
972
+ engine.hideLoadingUI(); // If the window is resized we want to update internal dimensions to reflect any changes.
973
+ window.addEventListener("resize", this.fireResizeEvent.bind(this)); // Set the engine and scene to be used for the lifetime of this service.
974
+ this.engine = engine;
975
+ this.scene = new (0, $hgUW1$Scene)(engine);
976
+ }
977
+ registerFocusLostListener(listener) {
978
+ this.focusLostObservable.add(listener);
979
+ }
980
+ unregisterFocusLostListener(listener) {
981
+ this.focusLostObservable.removeCallback(listener);
982
+ }
983
+ registerLoadProgressListener(listener) {
984
+ this.loadObservable.add(listener);
985
+ listener(this.getLoadListenerEvent());
986
+ }
987
+ unregisterLoadProgressListener(listener) {
988
+ this.loadObservable.removeCallback(listener);
989
+ }
990
+ registerView(canvas) {
991
+ // Register the view with the engine
992
+ // In babylon 4.1.0 the registered canvas' height and width will
993
+ // get overwritten with the working canvas' client height and
994
+ // width so until the release of 4.2.0 we need to
995
+ // manually restore the registered canvas' dimensions.
996
+ // https://github.com/BabylonJS/Babylon.js/commit/baedd30f2b62fc793fc23188048ca27b21c068f4
997
+ // TODO: We're already using 4.2, can we remove this width/height code now? Test whhen possible.
998
+ const height = canvas.height;
999
+ const width = canvas.width;
1000
+ this.engine.registerView(canvas);
1001
+ canvas.setAttribute("height", height.toString());
1002
+ canvas.setAttribute("width", width.toString()); // Lock camera state if orbit is disabled.
1003
+ if (!this.orbitEnabled()) this.setCameraState((0, $0822e2c84c7d8f0f$export$9beaec53ae7234b4).Pan);
1004
+ }
1005
+ getNumViewports() {
1006
+ return this.engine.views?.length || 0;
1007
+ }
1008
+ unregisterView(canvas) {
1009
+ this.engine.unRegisterView(canvas);
1010
+ this.engine.inputElement = null;
1011
+ }
1012
+ shutdown() {
1013
+ if (this.engine) this.engine.dispose();
1014
+ window.removeEventListener("resize", this.fireResizeEvent);
1015
+ }
1016
+ getSceneInitializationProgress() {
1017
+ return this.loadProgress.get($52383f320efd34d4$var$InitialSceneLoadProgressKey);
1018
+ }
1019
+ async initialize(src, contextService) {
1020
+ this.scene.clearColor = this.configuration.scene.clearColor; // Load the the given base model. If no model is provided we load
1021
+ // an empty scene instead.
1022
+ this.scene.metadata = {
1023
+ baseModel: src
1024
+ }; // Setup environment lighting, we export .dds files from IBL Baker and
1025
+ // load them into babylon to be optimized and exported again in the .env.local format.
1026
+ this.scene.environmentTexture = (0, $hgUW1$CubeTexture).CreateFromPrefilteredData(this.configuration.scene.environmentFile, this.scene); // If a base model is specified, load here. We await this to ensure it's ready before scene load.
1027
+ if (src) {
1028
+ const maximumLoadProgress = 99;
1029
+ await $52383f320efd34d4$export$fdb7ab7bd2bafbea(src, this.scene, (event)=>{
1030
+ this.loadProgress.set($52383f320efd34d4$var$InitialSceneLoadProgressKey, event.loaded * maximumLoadProgress / event.total);
1031
+ this.notifyLoadHandlers();
1032
+ });
1033
+ }
1034
+ if (this.configuration.highlights.enabled) this.scene.meshes.forEach((m)=>{
1035
+ if (m.name === "targetcube_t" || m.name === "backgroundShell") return;
1036
+ m.isPickable = true;
1037
+ if (!m.actionManager) m.actionManager = new (0, $hgUW1$ActionManager)(this.scene);
1038
+ m.actionManager.registerAction(new (0, $hgUW1$ExecuteCodeAction)((0, $hgUW1$ActionManager).OnPointerOutTrigger, ()=>{
1039
+ this.setHighlights([]);
1040
+ }));
1041
+ m.actionManager.registerAction(new (0, $hgUW1$ExecuteCodeAction)((0, $hgUW1$ActionManager).OnPointerOverTrigger, (e)=>{
1042
+ if (e.meshUnderPointer) {
1043
+ const material = e.meshUnderPointer.material;
1044
+ if (material) this.setHighlights([
1045
+ material
1046
+ ]);
1047
+ }
1048
+ }));
1049
+ });
1050
+ // Construct the camera used to view the scene.
1051
+ const camera = (0, $f04b030f3c397053$export$eaa74e880b82d23a).create(this.scene, this.configuration); // Create & apply dynamic textures
1052
+ const canvasPanels = contextService?.getAll() || new Map();
1053
+ (0, $373f30fedccc1cd5$export$e2d6a39fe5dbab66)(this.scene, canvasPanels, this.dynamicTextures); // Enable glow layer if any materials use emission.
1054
+ const shouldEnableGlow = this.scene.materials.some((mat)=>"emissiveTexture" in mat && mat.emissiveTexture !== null);
1055
+ if (shouldEnableGlow) {
1056
+ const gl = new (0, $hgUW1$GlowLayer)("GlowLayer", this.scene);
1057
+ gl.intensity = 0.5;
1058
+ } // In cases where we start off with no model animation, we should
1059
+ // default to paused. Otherwise the animation will loop repeatedly.
1060
+ (0, $163294662caa594a$export$f2095917e1db8d3a)(this.scene); // Start main render loop
1061
+ this.engine.runRenderLoop(()=>{
1062
+ if (!this.engine.views) return;
1063
+ // Recapture input if the view has changed.
1064
+ const view = this.engine.views[0];
1065
+ const curInputEl = this.engine.inputElement;
1066
+ if (!curInputEl || view && curInputEl && curInputEl.id !== view.target.id) this.reattachControls(view.target);
1067
+ // Update dynamic textures where canvas changes have been made.
1068
+ canvasPanels.forEach((canvasPanel, layoutKey)=>{
1069
+ const dynamicTexture = this.dynamicTextures.get(layoutKey);
1070
+ if (dynamicTexture && canvasPanel.getStaticContextDirty() && dynamicTexture.isReady()) {
1071
+ dynamicTexture.update(false);
1072
+ canvasPanel.setStaticContextDirty(false);
1073
+ }
1074
+ }); // A Patch for missing behavior from Babylon v5.0, ensures that views are
1075
+ // cleared entirely before rendering to prevent smearing.
1076
+ if (this.configuration.scene.transparentBackground) this.engine.views.forEach((view)=>{
1077
+ const renderingCanvas = this.engine.getRenderingCanvas();
1078
+ const viewCtx = view.target.getContext("2d");
1079
+ viewCtx.clearRect(0, 0, renderingCanvas.width, renderingCanvas.height);
1080
+ });
1081
+ // Render the scene
1082
+ this.scene.render(); // If load wasn't finalized by this point, we've done our first render and shou;d
1083
+ // callback to finalize the load process.
1084
+ if (this.getSceneInitializationProgress() !== 100) {
1085
+ this.loadProgress.set($52383f320efd34d4$var$InitialSceneLoadProgressKey, 100); // Finalize load process.
1086
+ this.materialReadyToLoadCallbacks.forEach((value, targetMaterial)=>{
1087
+ value.forEach((material, key)=>{
1088
+ this.applyMaterialVariant(targetMaterial, key, material);
1089
+ });
1090
+ });
1091
+ this.materialReadyToLoadCallbacks.clear();
1092
+ this.modelReadyToLoadCallbacks.forEach((callback)=>{
1093
+ callback(this.scene);
1094
+ });
1095
+ this.modelReadyToLoadCallbacks.clear();
1096
+ if (this.queuedModelAnimation) {
1097
+ this.executeModelAnimation(this.queuedModelAnimation);
1098
+ this.queuedModelAnimation = undefined;
1099
+ }
1100
+ if (this.queuedCameraAnimation) {
1101
+ this.executeCameraAnimation(this.queuedCameraAnimation);
1102
+ this.queuedCameraAnimation = undefined;
1103
+ }
1104
+ this.notifyLoadHandlers();
1105
+ } // When the camera moves from it's last set focus point via panning
1106
+ // let's notify any listeners soo they can act on the fact
1107
+ if (!camera.target.equalsWithEpsilon(camera.lastFocus, 0.1) && !this.focusLostNotified) {
1108
+ this.focusLostObservable.notifyObservers(undefined);
1109
+ this.focusLostNotified = true;
1110
+ }
1111
+ });
1112
+ }
1113
+ executeModelAnimation(animation) {
1114
+ if (this.getSceneInitializationProgress() !== 100) {
1115
+ this.queuedModelAnimation = animation;
1116
+ return;
1117
+ }
1118
+ (0, $163294662caa594a$export$267f51f3588f9216)(this.scene, animation.loop, animation.to, animation.from);
1119
+ }
1120
+ executeCameraAnimation(animation) {
1121
+ if (this.getSceneInitializationProgress() !== 100 || !this.scene.activeCamera) {
1122
+ this.queuedCameraAnimation = animation;
1123
+ return;
1124
+ }
1125
+ (0, $163294662caa594a$export$f9de9f8269cb3b3a)(this.scene, this.scene.activeCamera, animation);
1126
+ }
1127
+ getCameraPose() {
1128
+ if (this.scene) {
1129
+ const camera = this.scene.activeCamera;
1130
+ if (camera) return {
1131
+ lonDeg: Math.round(camera.alpha * 180 / Math.PI),
1132
+ latDeg: Math.round(camera.beta * 180 / Math.PI),
1133
+ radius: Math.round((camera.radius + Number.EPSILON) * 10000) / 10000,
1134
+ target: {
1135
+ x: camera.target.x,
1136
+ y: camera.target.y,
1137
+ z: camera.target.z
1138
+ }
1139
+ };
1140
+ }
1141
+ return undefined;
1142
+ }
1143
+ setCameraPose(cameraPose) {
1144
+ if (this.scene) {
1145
+ const camera = this.scene.activeCamera;
1146
+ if (camera) {
1147
+ camera.target = new (0, $hgUW1$Vector3)(cameraPose.target.x, cameraPose.target.y, cameraPose.target.z);
1148
+ camera.radius = cameraPose.radius;
1149
+ camera.alpha = cameraPose.latDeg;
1150
+ camera.beta = cameraPose.lonDeg;
1151
+ }
1152
+ }
1153
+ }
1154
+ async applyMaterialVariant(targetMaterial, key, material, removeWhenUndefined) {
1155
+ // If the scene hasn't finished loading yet, let's queue the material changes
1156
+ // up to have them play back when the scene is ready. NOTE: We queue using
1157
+ // option as a key, this prevents multiple options applying and making eachothers changes redundant.
1158
+ if (this.getSceneInitializationProgress() !== 100) {
1159
+ if (this.materialReadyToLoadCallbacks.has(targetMaterial)) {
1160
+ const entry = this.materialReadyToLoadCallbacks.get(targetMaterial);
1161
+ entry?.set(key, material);
1162
+ } else {
1163
+ this.materialReadyToLoadCallbacks.set(targetMaterial, new Map());
1164
+ const newEntry = this.materialReadyToLoadCallbacks.get(targetMaterial);
1165
+ newEntry?.set(key, material);
1166
+ }
1167
+ return;
1168
+ }
1169
+ await this.variantManager.applyMaterial(this.scene, targetMaterial, material, (remaining, total, taskName)=>{
1170
+ this.loadProgress.set(`key_${taskName}`, remaining / total * 100);
1171
+ this.notifyLoadHandlers();
1172
+ }, removeWhenUndefined);
1173
+ }
1174
+ async applyModelVariant(key, modelDetails, replaceProductModel) {
1175
+ // If the scene hasn't finished loading yet, let's queue the model changes
1176
+ // up to have them play back when the scene is ready.
1177
+ if (this.getSceneInitializationProgress() !== 100) {
1178
+ this.modelReadyToLoadCallbacks.set(key, ()=>{
1179
+ this.applyModelVariant(key, modelDetails, replaceProductModel);
1180
+ });
1181
+ return;
1182
+ } // Load the model variant, we attach a listener to keep track of progress.
1183
+ await this.variantManager.applyModel(this.scene, key, replaceProductModel || false, modelDetails.model, (e)=>{
1184
+ this.loadProgress.set(key, e.loaded * 100 / e.total);
1185
+ this.notifyLoadHandlers();
1186
+ }); // Dispose of any memory that is no longer required.
1187
+ // TODO Can we remove?
1188
+ this.scene.materials.forEach((m)=>{
1189
+ if (m && m.getBindedMeshes().length === 0) {
1190
+ // If this material's related textures aren't dynamic, let's kill them.
1191
+ const forceRemoveTextures = m instanceof (0, $hgUW1$PBRMaterial) && !(m.albedoTexture instanceof (0, $hgUW1$DynamicTexture));
1192
+ m.dispose(false, forceRemoveTextures);
1193
+ }
1194
+ }); // Re-attach dynamic textures if required.
1195
+ (0, $373f30fedccc1cd5$export$e2d6a39fe5dbab66)(this.scene, modelDetails.contextService.getAll(), this.dynamicTextures);
1196
+ (0, $163294662caa594a$export$f2095917e1db8d3a)(this.scene);
1197
+ }
1198
+ setCameraState(rigType) {
1199
+ if (!this.engine?.views || !this.engine?.views[0]) throw new Error("No views attached, camera state requires a view to attach controls onto.");
1200
+ if (rigType === (0, $0822e2c84c7d8f0f$export$9beaec53ae7234b4).Orbit) this.reattachControls(this.engine.views[0].target, 2);
1201
+ else this.reattachControls(this.engine.views[0].target, 0);
1202
+ }
1203
+ animateToLastCameraFocus() {
1204
+ return new Promise((resolve)=>{
1205
+ const camera = this.scene.activeCamera;
1206
+ const configuration = this.configuration;
1207
+ camera.rerunFramingBehavior(()=>{
1208
+ this.focusLostNotified = false;
1209
+ if (configuration.camera.limits.min.radius) camera.lowerRadiusLimit = configuration.camera.limits.min.radius;
1210
+ if (configuration.camera.limits.max.radius) camera.upperRadiusLimit = configuration.camera.limits.max.radius;
1211
+ resolve();
1212
+ });
1213
+ });
1214
+ }
1215
+ setAutoRotation(shouldAutoRotate) {
1216
+ const camera = this.scene.activeCamera;
1217
+ if (!this.configuration.camera.autoRotation.enabled || !camera) return;
1218
+ if (shouldAutoRotate) camera.enableAutoRotationBehavior(this.configuration.camera.autoRotation.idleTimeMs);
1219
+ else camera.disableAutoRotationBehavior();
1220
+ }
1221
+ getCurrentConfiguration() {
1222
+ return this.configuration.options;
1223
+ }
1224
+ async renderSceneScreenshot(resolution, camAnim) {
1225
+ const productCamera = this.scene.activeCamera;
1226
+ if (!productCamera) throw new Error("Missing product camera, cannot render screenshot!");
1227
+ // To ensure in progress animations to cause trouble we reset the cameras anims.
1228
+ (0, $163294662caa594a$export$307c7dce7e89b5b5)(productCamera); // stores initial values of both lon and lat of the camera
1229
+ const initialAlpha = productCamera.alpha;
1230
+ const initialBeta = productCamera.beta;
1231
+ const initialRadius = productCamera.radius;
1232
+ const latRad = camAnim.latDeg * Math.PI / 180;
1233
+ const lonRad = camAnim.lonDeg * Math.PI / 180;
1234
+ productCamera.alpha = lonRad;
1235
+ productCamera.beta = latRad;
1236
+ productCamera.radius = camAnim.radius || productCamera.radius;
1237
+ const response = await (0, $hgUW1$Tools).CreateScreenshotUsingRenderTargetAsync(this.engine, productCamera, resolution, "image/png", 2, true);
1238
+ productCamera.alpha = initialAlpha;
1239
+ productCamera.beta = initialBeta;
1240
+ productCamera.radius = initialRadius;
1241
+ return response;
1242
+ }
1243
+ orbitEnabled() {
1244
+ const configuration = this.configuration;
1245
+ if (!configuration) return true;
1246
+ const lowerA = configuration.camera.limits.min.alpha;
1247
+ const upperA = configuration.camera.limits.max.alpha;
1248
+ const lowerB = configuration.camera.limits.min.beta;
1249
+ const upperB = configuration.camera.limits.max.beta;
1250
+ if (lowerA === undefined || upperA === undefined || lowerB === undefined || upperB === undefined) return true;
1251
+ const alphaBounds = [
1252
+ lowerA,
1253
+ upperA
1254
+ ];
1255
+ const betaBounds = [
1256
+ lowerB,
1257
+ upperB
1258
+ ];
1259
+ const alphaLocked = alphaBounds.every((val)=>val === lowerA);
1260
+ const betaLocked = betaBounds.every((val)=>val === lowerB);
1261
+ return !alphaLocked && !betaLocked;
1262
+ }
1263
+ fireResizeEvent() {
1264
+ if (this.getNumViewports() > 0) this.engine.resize();
1265
+ }
1266
+ onMaterialSelected(cb) {
1267
+ this.scene.meshes.forEach((m)=>{
1268
+ if (m.name === "targetcube_t" || m.name === "backgroundShell") return;
1269
+ if (!m.actionManager) m.actionManager = new (0, $hgUW1$ActionManager)(this.scene);
1270
+ m.actionManager.registerAction(new (0, $hgUW1$ExecuteCodeAction)((0, $hgUW1$ActionManager).OnPickDownTrigger, (e)=>{
1271
+ if (e.meshUnderPointer) {
1272
+ const material = e.meshUnderPointer.material;
1273
+ if (material) cb({
1274
+ id: material.id,
1275
+ name: material.name
1276
+ });
1277
+ }
1278
+ }));
1279
+ });
1280
+ }
1281
+ listMaterials() {
1282
+ const materials = this.scene?.materials;
1283
+ if (!materials) return [];
1284
+ return materials.map((material)=>{
1285
+ return {
1286
+ id: material.id,
1287
+ name: material.name
1288
+ };
1289
+ });
1290
+ }
1291
+ setHighlights(materials, color) {
1292
+ // If no material were requested, let's clear the highlight layer entirely.
1293
+ if (materials.length === 0) {
1294
+ this.highlightLayer?.dispose();
1295
+ this.highlightLayer = undefined;
1296
+ } // Instantiate and build mesh list for any new highlight layer.
1297
+ if (!this.highlightLayer) {
1298
+ this.highlightLayer = new (0, $hgUW1$HighlightLayer)("highlights", this.scene, {
1299
+ isStroke: true,
1300
+ blurVerticalSize: 0.85,
1301
+ blurHorizontalSize: 0.85
1302
+ });
1303
+ this.highlightLayer.innerGlow = true;
1304
+ this.highlightLayer.outerGlow = false;
1305
+ } // Clear existing selections.
1306
+ this.highlightLayer.removeAllMeshes();
1307
+ const providedColor = color ? new (0, $hgUW1$Color3)(color[0], color[1], color[2]).toLinearSpace() : undefined; // Highlight all passed meshes, if they're found. Ignore otherwise.
1308
+ materials.forEach((material)=>{
1309
+ const sceneMaterial = this.scene.materials.find((sm)=>sm.name === material.name && sm.id === material.id);
1310
+ if (sceneMaterial) sceneMaterial.getBindedMeshes().forEach((boundMesh)=>this.highlightLayer?.addMesh(boundMesh, providedColor || (0, $hgUW1$Color3).FromHexString("#fcba03")));
1311
+ });
1312
+ }
1313
+ /**
1314
+ * Given a valid canvas element, will remove any existing input controls
1315
+ * and re-attach them to the given canvas. The pan mouse button can be set
1316
+ * to either 0 (left mouse) or 2 (right mouse).
1317
+ */ reattachControls(targetCanvas, panMouseButton = 2) {
1318
+ this.scene.detachControl();
1319
+ this.engine.inputElement = targetCanvas;
1320
+ const camera = this.scene.activeCamera;
1321
+ if (camera) {
1322
+ camera.attachControl(true, false, panMouseButton); // Importing ArcRotateCameraPointersInput breaks the build only in jenkins. casting as any for now.
1323
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
1324
+ const pointerInputController = camera.inputs.attached.pointers;
1325
+ pointerInputController.multiTouchPanning = false;
1326
+ }
1327
+ this.scene.attachControl(true, true, true);
1328
+ }
1329
+ /**
1330
+ * Computes a load listener event based on current state of scene.
1331
+ */ getLoadListenerEvent() {
1332
+ const values = Array.from(this.loadProgress.values()).filter((v)=>v < 100);
1333
+ const sum = values.reduce((a, b)=>a + b, 0);
1334
+ const avg = sum / values.length || 0;
1335
+ return {
1336
+ loadValue: values.length === 0 ? 100 : avg,
1337
+ sceneInitialized: this.getSceneInitializationProgress() === 100
1338
+ };
1339
+ }
1340
+ /**
1341
+ * Computes the average loading time across all loading events and notifies
1342
+ * listeners of the current load progress.
1343
+ */ notifyLoadHandlers() {
1344
+ this.loadObservable.notifyObservers(this.getLoadListenerEvent());
1345
+ }
1346
+ }
1347
+ async function $52383f320efd34d4$export$fdb7ab7bd2bafbea(src, scene, progressHandler) {
1348
+ const container = await (0, $a38b99fca407fe64$export$a2966fb0929452fe)(src, scene, progressHandler);
1349
+ container.addAllToScene();
1350
+ }
1351
+
1352
+
1353
+
1354
+
1355
+
1356
+ export {$0822e2c84c7d8f0f$export$9beaec53ae7234b4 as ProductCameraRig, $0822e2c84c7d8f0f$export$5185910364c29078 as MaterialEffectMode, $52383f320efd34d4$export$bbc4f3921db721d6 as SpiffCommerce3DPreviewService, $52383f320efd34d4$export$fdb7ab7bd2bafbea as createBaseModel, $db6a29689d313881$export$902ee4e8040a4cdb as RenderingConfiguration, $db6a29689d313881$export$fbb2744b8f74e4f2 as REFLECTION_PROBE_RESOLUTION};
2
1357
  //# sourceMappingURL=module.js.map