@spiffcommerce/preview 3.6.2-rc.8 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/index.esm.js +1576 -38
  2. package/dist/index.umd.js +1 -0
  3. package/package.json +4 -6
  4. package/dist/_tslib.esm.js +0 -33
  5. package/dist/animation.esm.js +0 -1364
  6. package/dist/assetCache.esm.js +0 -6
  7. package/dist/assetCache.esm2.js +0 -825
  8. package/dist/blurPostProcess.esm.js +0 -327
  9. package/dist/bumpVertex.esm.js +0 -497
  10. package/dist/compatibilityOptions.esm.js +0 -68
  11. package/dist/configuration.esm.js +0 -121
  12. package/dist/core.esm.js +0 -8135
  13. package/dist/dynamicTexture.esm.js +0 -105
  14. package/dist/dynamicTexture.esm2.js +0 -238
  15. package/dist/easing.esm.js +0 -130
  16. package/dist/effectFallbacks.esm.js +0 -378
  17. package/dist/engine.esm.js +0 -25504
  18. package/dist/glbLoaderExtensions.esm.js +0 -690
  19. package/dist/glowLayer.esm.js +0 -1621
  20. package/dist/glowLayerManager.esm.js +0 -50
  21. package/dist/guid.esm.js +0 -21
  22. package/dist/hdrFilteringFunctions.esm.js +0 -816
  23. package/dist/helperFunctions.esm.js +0 -5145
  24. package/dist/material.esm.js +0 -115
  25. package/dist/material.esm2.js +0 -5245
  26. package/dist/math.axis.esm.js +0 -35
  27. package/dist/math.color.esm.js +0 -1661
  28. package/dist/math.path.esm.js +0 -15
  29. package/dist/math.size.esm.js +0 -137
  30. package/dist/mesh.esm.js +0 -11170
  31. package/dist/modelContainer.esm.js +0 -1895
  32. package/dist/node.esm.js +0 -795
  33. package/dist/pbrBRDFFunctions.esm.js +0 -124
  34. package/dist/pbrMaterial.esm.js +8 -8739
  35. package/dist/productAnimations.esm.js +0 -182
  36. package/dist/productCamera.esm.js +0 -14
  37. package/dist/productCamera.esm2.js +0 -3870
  38. package/dist/renderConstants.esm.js +0 -116
  39. package/dist/renderingPipeline.esm.js +0 -18
  40. package/dist/renderingPipeline.esm2.js +1 -3594
  41. package/dist/sceneLoaderFlags.esm.js +0 -51
  42. package/dist/types.esm.js +0 -30
  43. package/dist/variants.esm.js +0 -16
  44. package/dist/variants.esm2.js +0 -3097
  45. package/dist/webRequest.esm.js +0 -7777
@@ -1,690 +0,0 @@
1
- import { _ as __awaiter } from './_tslib.esm.js';
2
- import { P as PBRMaterial } from './pbrMaterial.esm.js';
3
- import { M as Matrix, V as Vector3, b as Vector2, _ as __decorate, c as serializeAsMeshReference, d as serializeAsVector3, S as SerializationHelper } from './webRequest.esm.js';
4
- import { T as Texture, R as RenderTargetTexture } from './helperFunctions.esm.js';
5
- import { B as BlurPostProcess } from './blurPostProcess.esm.js';
6
- import { P as Plane, A as AbstractScene } from './engine.esm.js';
7
- import './math.axis.esm.js';
8
- import { a as Color3 } from './math.color.esm.js';
9
- import './math.path.esm.js';
10
- import { RenderingConfiguration, REFLECTION_PROBE_RESOLUTION } from './renderConstants.esm.js';
11
- import './hdrFilteringFunctions.esm.js';
12
- import './pbrBRDFFunctions.esm.js';
13
- import './material.esm2.js';
14
- import './node.esm.js';
15
- import './compatibilityOptions.esm.js';
16
- import './bumpVertex.esm.js';
17
- import './effectFallbacks.esm.js';
18
- import './math.size.esm.js';
19
-
20
- /**
21
- * Mirror texture can be used to simulate the view from a mirror in a scene.
22
- * It will dynamically be rendered every frame to adapt to the camera point of view.
23
- * You can then easily use it as a reflectionTexture on a flat surface.
24
- * In case the surface is not a plane, please consider relying on reflection probes.
25
- * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#mirrortexture
26
- */
27
- class MirrorTexture extends RenderTargetTexture {
28
- /**
29
- * Define the blur ratio used to blur the reflection if needed.
30
- */
31
- set blurRatio(value) {
32
- if (this._blurRatio === value) {
33
- return;
34
- }
35
- this._blurRatio = value;
36
- this._preparePostProcesses();
37
- }
38
- get blurRatio() {
39
- return this._blurRatio;
40
- }
41
- /**
42
- * Define the adaptive blur kernel used to blur the reflection if needed.
43
- * This will autocompute the closest best match for the `blurKernel`
44
- */
45
- set adaptiveBlurKernel(value) {
46
- this._adaptiveBlurKernel = value;
47
- this._autoComputeBlurKernel();
48
- }
49
- /**
50
- * Define the blur kernel used to blur the reflection if needed.
51
- * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you.
52
- */
53
- set blurKernel(value) {
54
- this.blurKernelX = value;
55
- this.blurKernelY = value;
56
- }
57
- /**
58
- * Define the blur kernel on the X Axis used to blur the reflection if needed.
59
- * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you.
60
- */
61
- set blurKernelX(value) {
62
- if (this._blurKernelX === value) {
63
- return;
64
- }
65
- this._blurKernelX = value;
66
- this._preparePostProcesses();
67
- }
68
- get blurKernelX() {
69
- return this._blurKernelX;
70
- }
71
- /**
72
- * Define the blur kernel on the Y Axis used to blur the reflection if needed.
73
- * Please consider using `adaptiveBlurKernel` as it could find the closest best value for you.
74
- */
75
- set blurKernelY(value) {
76
- if (this._blurKernelY === value) {
77
- return;
78
- }
79
- this._blurKernelY = value;
80
- this._preparePostProcesses();
81
- }
82
- get blurKernelY() {
83
- return this._blurKernelY;
84
- }
85
- _autoComputeBlurKernel() {
86
- const engine = this.getScene().getEngine();
87
- const dw = this.getRenderWidth() / engine.getRenderWidth();
88
- const dh = this.getRenderHeight() / engine.getRenderHeight();
89
- this.blurKernelX = this._adaptiveBlurKernel * dw;
90
- this.blurKernelY = this._adaptiveBlurKernel * dh;
91
- }
92
- _onRatioRescale() {
93
- if (this._sizeRatio) {
94
- this.resize(this._initialSizeParameter);
95
- if (!this._adaptiveBlurKernel) {
96
- this._preparePostProcesses();
97
- }
98
- }
99
- if (this._adaptiveBlurKernel) {
100
- this._autoComputeBlurKernel();
101
- }
102
- }
103
- _updateGammaSpace() {
104
- const scene = this.getScene();
105
- if (!scene) {
106
- return;
107
- }
108
- this.gammaSpace = !scene.imageProcessingConfiguration.isEnabled || !scene.imageProcessingConfiguration.applyByPostProcess;
109
- }
110
- /**
111
- * Instantiates a Mirror Texture.
112
- * Mirror texture can be used to simulate the view from a mirror in a scene.
113
- * It will dynamically be rendered every frame to adapt to the camera point of view.
114
- * You can then easily use it as a reflectionTexture on a flat surface.
115
- * In case the surface is not a plane, please consider relying on reflection probes.
116
- * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#mirrors
117
- * @param name
118
- * @param size
119
- * @param scene
120
- * @param generateMipMaps
121
- * @param type
122
- * @param samplingMode
123
- * @param generateDepthBuffer
124
- */
125
- constructor(name, size, scene, generateMipMaps, type = 0, samplingMode = Texture.BILINEAR_SAMPLINGMODE, generateDepthBuffer = true) {
126
- super(name, size, scene, generateMipMaps, true, type, false, samplingMode, generateDepthBuffer);
127
- /**
128
- * Define the reflection plane we want to use. The mirrorPlane is usually set to the constructed reflector.
129
- * It is possible to directly set the mirrorPlane by directly using a Plane(a, b, c, d) where a, b and c give the plane normal vector (a, b, c) and d is a scalar displacement from the mirrorPlane to the origin. However in all but the very simplest of situations it is more straight forward to set it to the reflector as stated in the doc.
130
- * @see https://doc.babylonjs.com/features/featuresDeepDive/materials/using/reflectionTexture#mirrors
131
- */
132
- this.mirrorPlane = new Plane(0, 1, 0, 1);
133
- this._transformMatrix = Matrix.Zero();
134
- this._mirrorMatrix = Matrix.Zero();
135
- this._adaptiveBlurKernel = 0;
136
- this._blurKernelX = 0;
137
- this._blurKernelY = 0;
138
- this._blurRatio = 1.0;
139
- scene = this.getScene();
140
- if (!scene) {
141
- return this;
142
- }
143
- this.ignoreCameraViewport = true;
144
- this._updateGammaSpace();
145
- this._imageProcessingConfigChangeObserver = scene.imageProcessingConfiguration.onUpdateParameters.add(() => {
146
- this._updateGammaSpace();
147
- });
148
- const engine = scene.getEngine();
149
- if (engine.supportsUniformBuffers) {
150
- this._sceneUBO = scene.createSceneUniformBuffer(`Scene for Mirror Texture (name "${name}")`);
151
- }
152
- this.onBeforeBindObservable.add(() => {
153
- var _a;
154
- (_a = engine._debugPushGroup) === null || _a === void 0 ? void 0 : _a.call(engine, `mirror generation for ${name}`, 1);
155
- });
156
- this.onAfterUnbindObservable.add(() => {
157
- var _a;
158
- (_a = engine._debugPopGroup) === null || _a === void 0 ? void 0 : _a.call(engine, 1);
159
- });
160
- let saveClipPlane;
161
- this.onBeforeRenderObservable.add(() => {
162
- if (this._sceneUBO) {
163
- this._currentSceneUBO = scene.getSceneUniformBuffer();
164
- scene.setSceneUniformBuffer(this._sceneUBO);
165
- scene.getSceneUniformBuffer().unbindEffect();
166
- }
167
- Matrix.ReflectionToRef(this.mirrorPlane, this._mirrorMatrix);
168
- this._mirrorMatrix.multiplyToRef(scene.getViewMatrix(), this._transformMatrix);
169
- scene.setTransformMatrix(this._transformMatrix, scene.getProjectionMatrix());
170
- saveClipPlane = scene.clipPlane;
171
- scene.clipPlane = this.mirrorPlane;
172
- scene._mirroredCameraPosition = Vector3.TransformCoordinates(scene.activeCamera.globalPosition, this._mirrorMatrix);
173
- });
174
- this.onAfterRenderObservable.add(() => {
175
- if (this._sceneUBO) {
176
- scene.setSceneUniformBuffer(this._currentSceneUBO);
177
- }
178
- scene.updateTransformMatrix();
179
- scene._mirroredCameraPosition = null;
180
- scene.clipPlane = saveClipPlane;
181
- });
182
- }
183
- _preparePostProcesses() {
184
- this.clearPostProcesses(true);
185
- if (this._blurKernelX && this._blurKernelY) {
186
- const engine = this.getScene().getEngine();
187
- const textureType = engine.getCaps().textureFloatRender && engine.getCaps().textureFloatLinearFiltering ? 1 : 2;
188
- this._blurX = new BlurPostProcess("horizontal blur", new Vector2(1.0, 0), this._blurKernelX, this._blurRatio, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, textureType);
189
- this._blurX.autoClear = false;
190
- if (this._blurRatio === 1 && this.samples < 2 && this._texture) {
191
- this._blurX.inputTexture = this._renderTarget;
192
- }
193
- else {
194
- this._blurX.alwaysForcePOT = true;
195
- }
196
- this._blurY = new BlurPostProcess("vertical blur", new Vector2(0, 1.0), this._blurKernelY, this._blurRatio, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, textureType);
197
- this._blurY.autoClear = false;
198
- this._blurY.alwaysForcePOT = this._blurRatio !== 1;
199
- this.addPostProcess(this._blurX);
200
- this.addPostProcess(this._blurY);
201
- }
202
- else {
203
- if (this._blurY) {
204
- this.removePostProcess(this._blurY);
205
- this._blurY.dispose();
206
- this._blurY = null;
207
- }
208
- if (this._blurX) {
209
- this.removePostProcess(this._blurX);
210
- this._blurX.dispose();
211
- this._blurX = null;
212
- }
213
- }
214
- }
215
- /**
216
- * Clone the mirror texture.
217
- * @returns the cloned texture
218
- */
219
- clone() {
220
- const scene = this.getScene();
221
- if (!scene) {
222
- return this;
223
- }
224
- const textureSize = this.getSize();
225
- const newTexture = new MirrorTexture(this.name, textureSize.width, scene, this._renderTargetOptions.generateMipMaps, this._renderTargetOptions.type, this._renderTargetOptions.samplingMode, this._renderTargetOptions.generateDepthBuffer);
226
- // Base texture
227
- newTexture.hasAlpha = this.hasAlpha;
228
- newTexture.level = this.level;
229
- // Mirror Texture
230
- newTexture.mirrorPlane = this.mirrorPlane.clone();
231
- if (this.renderList) {
232
- newTexture.renderList = this.renderList.slice(0);
233
- }
234
- return newTexture;
235
- }
236
- /**
237
- * Serialize the texture to a JSON representation you could use in Parse later on
238
- * @returns the serialized JSON representation
239
- */
240
- serialize() {
241
- if (!this.name) {
242
- return null;
243
- }
244
- const serializationObject = super.serialize();
245
- serializationObject.mirrorPlane = this.mirrorPlane.asArray();
246
- return serializationObject;
247
- }
248
- /**
249
- * Dispose the texture and release its associated resources.
250
- */
251
- dispose() {
252
- var _a;
253
- super.dispose();
254
- const scene = this.getScene();
255
- if (scene) {
256
- scene.imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingConfigChangeObserver);
257
- }
258
- (_a = this._sceneUBO) === null || _a === void 0 ? void 0 : _a.dispose();
259
- }
260
- }
261
- Texture._CreateMirror = (name, renderTargetSize, scene, generateMipMaps) => {
262
- return new MirrorTexture(name, renderTargetSize, scene, generateMipMaps);
263
- };
264
-
265
- AbstractScene.prototype.removeReflectionProbe = function (toRemove) {
266
- if (!this.reflectionProbes) {
267
- return -1;
268
- }
269
- const index = this.reflectionProbes.indexOf(toRemove);
270
- if (index !== -1) {
271
- this.reflectionProbes.splice(index, 1);
272
- }
273
- return index;
274
- };
275
- AbstractScene.prototype.addReflectionProbe = function (newReflectionProbe) {
276
- if (!this.reflectionProbes) {
277
- this.reflectionProbes = [];
278
- }
279
- this.reflectionProbes.push(newReflectionProbe);
280
- };
281
- /**
282
- * Class used to generate realtime reflection / refraction cube textures
283
- * @see https://doc.babylonjs.com/features/featuresDeepDive/environment/reflectionProbes
284
- */
285
- class ReflectionProbe {
286
- /**
287
- * Creates a new reflection probe
288
- * @param name defines the name of the probe
289
- * @param size defines the texture resolution (for each face)
290
- * @param scene defines the hosting scene
291
- * @param generateMipMaps defines if mip maps should be generated automatically (true by default)
292
- * @param useFloat defines if HDR data (float data) should be used to store colors (false by default)
293
- * @param linearSpace defines if the probe should be generated in linear space or not (false by default)
294
- */
295
- constructor(
296
- /** defines the name of the probe */
297
- name, size, scene, generateMipMaps = true, useFloat = false, linearSpace = false) {
298
- this.name = name;
299
- this._viewMatrix = Matrix.Identity();
300
- this._target = Vector3.Zero();
301
- this._add = Vector3.Zero();
302
- this._invertYAxis = false;
303
- /** Gets or sets probe position (center of the cube map) */
304
- this.position = Vector3.Zero();
305
- /**
306
- * Gets or sets an object used to store user defined information for the reflection probe.
307
- */
308
- this.metadata = null;
309
- /** @internal */
310
- this._parentContainer = null;
311
- this._scene = scene;
312
- if (scene.getEngine().supportsUniformBuffers) {
313
- this._sceneUBOs = [];
314
- for (let i = 0; i < 6; ++i) {
315
- this._sceneUBOs.push(scene.createSceneUniformBuffer(`Scene for Reflection Probe (name "${name}") face #${i}`));
316
- }
317
- }
318
- // Create the scene field if not exist.
319
- if (!this._scene.reflectionProbes) {
320
- this._scene.reflectionProbes = new Array();
321
- }
322
- this._scene.reflectionProbes.push(this);
323
- let textureType = 0;
324
- if (useFloat) {
325
- const caps = this._scene.getEngine().getCaps();
326
- if (caps.textureHalfFloatRender) {
327
- textureType = 2;
328
- }
329
- else if (caps.textureFloatRender) {
330
- textureType = 1;
331
- }
332
- }
333
- this._renderTargetTexture = new RenderTargetTexture(name, size, scene, generateMipMaps, true, textureType, true);
334
- this._renderTargetTexture.gammaSpace = !linearSpace;
335
- this._renderTargetTexture.invertZ = scene.useRightHandedSystem;
336
- const useReverseDepthBuffer = scene.getEngine().useReverseDepthBuffer;
337
- this._renderTargetTexture.onBeforeRenderObservable.add((faceIndex) => {
338
- if (this._sceneUBOs) {
339
- scene.setSceneUniformBuffer(this._sceneUBOs[faceIndex]);
340
- scene.getSceneUniformBuffer().unbindEffect();
341
- }
342
- switch (faceIndex) {
343
- case 0:
344
- this._add.copyFromFloats(1, 0, 0);
345
- break;
346
- case 1:
347
- this._add.copyFromFloats(-1, 0, 0);
348
- break;
349
- case 2:
350
- this._add.copyFromFloats(0, this._invertYAxis ? 1 : -1, 0);
351
- break;
352
- case 3:
353
- this._add.copyFromFloats(0, this._invertYAxis ? -1 : 1, 0);
354
- break;
355
- case 4:
356
- this._add.copyFromFloats(0, 0, scene.useRightHandedSystem ? -1 : 1);
357
- break;
358
- case 5:
359
- this._add.copyFromFloats(0, 0, scene.useRightHandedSystem ? 1 : -1);
360
- break;
361
- }
362
- if (this._attachedMesh) {
363
- this.position.copyFrom(this._attachedMesh.getAbsolutePosition());
364
- }
365
- this.position.addToRef(this._add, this._target);
366
- const lookAtFunction = scene.useRightHandedSystem ? Matrix.LookAtRHToRef : Matrix.LookAtLHToRef;
367
- const perspectiveFunction = scene.useRightHandedSystem ? Matrix.PerspectiveFovRH : Matrix.PerspectiveFovLH;
368
- lookAtFunction(this.position, this._target, Vector3.Up(), this._viewMatrix);
369
- if (scene.activeCamera) {
370
- this._projectionMatrix = perspectiveFunction(Math.PI / 2, 1, useReverseDepthBuffer ? scene.activeCamera.maxZ : scene.activeCamera.minZ, useReverseDepthBuffer ? scene.activeCamera.minZ : scene.activeCamera.maxZ, this._scene.getEngine().isNDCHalfZRange);
371
- scene.setTransformMatrix(this._viewMatrix, this._projectionMatrix);
372
- if (scene.activeCamera.isRigCamera && !this._renderTargetTexture.activeCamera) {
373
- this._renderTargetTexture.activeCamera = scene.activeCamera.rigParent || null;
374
- }
375
- }
376
- scene._forcedViewPosition = this.position;
377
- });
378
- let currentApplyByPostProcess;
379
- this._renderTargetTexture.onBeforeBindObservable.add(() => {
380
- var _a, _b;
381
- this._currentSceneUBO = scene.getSceneUniformBuffer();
382
- (_b = (_a = scene.getEngine())._debugPushGroup) === null || _b === void 0 ? void 0 : _b.call(_a, `reflection probe generation for ${name}`, 1);
383
- currentApplyByPostProcess = this._scene.imageProcessingConfiguration.applyByPostProcess;
384
- if (linearSpace) {
385
- scene.imageProcessingConfiguration.applyByPostProcess = true;
386
- }
387
- });
388
- this._renderTargetTexture.onAfterUnbindObservable.add(() => {
389
- var _a, _b;
390
- scene.imageProcessingConfiguration.applyByPostProcess = currentApplyByPostProcess;
391
- scene._forcedViewPosition = null;
392
- if (this._sceneUBOs) {
393
- scene.setSceneUniformBuffer(this._currentSceneUBO);
394
- }
395
- scene.updateTransformMatrix(true);
396
- (_b = (_a = scene.getEngine())._debugPopGroup) === null || _b === void 0 ? void 0 : _b.call(_a, 1);
397
- });
398
- }
399
- /** Gets or sets the number of samples to use for multi-sampling (0 by default). Required WebGL2 */
400
- get samples() {
401
- return this._renderTargetTexture.samples;
402
- }
403
- set samples(value) {
404
- this._renderTargetTexture.samples = value;
405
- }
406
- /** Gets or sets the refresh rate to use (on every frame by default) */
407
- get refreshRate() {
408
- return this._renderTargetTexture.refreshRate;
409
- }
410
- set refreshRate(value) {
411
- this._renderTargetTexture.refreshRate = value;
412
- }
413
- /**
414
- * Gets the hosting scene
415
- * @returns a Scene
416
- */
417
- getScene() {
418
- return this._scene;
419
- }
420
- /** Gets the internal CubeTexture used to render to */
421
- get cubeTexture() {
422
- return this._renderTargetTexture;
423
- }
424
- /** Gets the list of meshes to render */
425
- get renderList() {
426
- return this._renderTargetTexture.renderList;
427
- }
428
- /**
429
- * Attach the probe to a specific mesh (Rendering will be done from attached mesh's position)
430
- * @param mesh defines the mesh to attach to
431
- */
432
- attachToMesh(mesh) {
433
- this._attachedMesh = mesh;
434
- }
435
- /**
436
- * Specifies whether or not the stencil and depth buffer are cleared between two rendering groups
437
- * @param renderingGroupId The rendering group id corresponding to its index
438
- * @param autoClearDepthStencil Automatically clears depth and stencil between groups if true.
439
- */
440
- setRenderingAutoClearDepthStencil(renderingGroupId, autoClearDepthStencil) {
441
- this._renderTargetTexture.setRenderingAutoClearDepthStencil(renderingGroupId, autoClearDepthStencil);
442
- }
443
- /**
444
- * Clean all associated resources
445
- */
446
- dispose() {
447
- const index = this._scene.reflectionProbes.indexOf(this);
448
- if (index !== -1) {
449
- // Remove from the scene if found
450
- this._scene.reflectionProbes.splice(index, 1);
451
- }
452
- if (this._parentContainer) {
453
- const index = this._parentContainer.reflectionProbes.indexOf(this);
454
- if (index > -1) {
455
- this._parentContainer.reflectionProbes.splice(index, 1);
456
- }
457
- this._parentContainer = null;
458
- }
459
- if (this._renderTargetTexture) {
460
- this._renderTargetTexture.dispose();
461
- this._renderTargetTexture = null;
462
- }
463
- if (this._sceneUBOs) {
464
- for (const ubo of this._sceneUBOs) {
465
- ubo.dispose();
466
- }
467
- this._sceneUBOs = [];
468
- }
469
- }
470
- /**
471
- * Converts the reflection probe information to a readable string for debug purpose.
472
- * @param fullDetails Supports for multiple levels of logging within scene loading
473
- * @returns the human readable reflection probe info
474
- */
475
- toString(fullDetails) {
476
- let ret = "Name: " + this.name;
477
- if (fullDetails) {
478
- ret += ", position: " + this.position.toString();
479
- if (this._attachedMesh) {
480
- ret += ", attached mesh: " + this._attachedMesh.name;
481
- }
482
- }
483
- return ret;
484
- }
485
- /**
486
- * Get the class name of the refection probe.
487
- * @returns "ReflectionProbe"
488
- */
489
- getClassName() {
490
- return "ReflectionProbe";
491
- }
492
- /**
493
- * Serialize the reflection probe to a JSON representation we can easily use in the respective Parse function.
494
- * @returns The JSON representation of the texture
495
- */
496
- serialize() {
497
- const serializationObject = SerializationHelper.Serialize(this, this._renderTargetTexture.serialize());
498
- serializationObject.isReflectionProbe = true;
499
- serializationObject.metadata = this.metadata;
500
- return serializationObject;
501
- }
502
- /**
503
- * Parse the JSON representation of a reflection probe in order to recreate the reflection probe in the given scene.
504
- * @param parsedReflectionProbe Define the JSON representation of the reflection probe
505
- * @param scene Define the scene the parsed reflection probe should be instantiated in
506
- * @param rootUrl Define the root url of the parsing sequence in the case of relative dependencies
507
- * @returns The parsed reflection probe if successful
508
- */
509
- static Parse(parsedReflectionProbe, scene, rootUrl) {
510
- let reflectionProbe = null;
511
- if (scene.reflectionProbes) {
512
- for (let index = 0; index < scene.reflectionProbes.length; index++) {
513
- const rp = scene.reflectionProbes[index];
514
- if (rp.name === parsedReflectionProbe.name) {
515
- reflectionProbe = rp;
516
- break;
517
- }
518
- }
519
- }
520
- reflectionProbe = SerializationHelper.Parse(() => reflectionProbe || new ReflectionProbe(parsedReflectionProbe.name, parsedReflectionProbe.renderTargetSize, scene, parsedReflectionProbe._generateMipMaps), parsedReflectionProbe, scene, rootUrl);
521
- reflectionProbe.cubeTexture._waitingRenderList = parsedReflectionProbe.renderList;
522
- if (parsedReflectionProbe._attachedMesh) {
523
- reflectionProbe.attachToMesh(scene.getMeshById(parsedReflectionProbe._attachedMesh));
524
- }
525
- if (parsedReflectionProbe.metadata) {
526
- reflectionProbe.metadata = parsedReflectionProbe.metadata;
527
- }
528
- return reflectionProbe;
529
- }
530
- }
531
- __decorate([
532
- serializeAsMeshReference()
533
- ], ReflectionProbe.prototype, "_attachedMesh", void 0);
534
- __decorate([
535
- serializeAsVector3()
536
- ], ReflectionProbe.prototype, "position", void 0);
537
-
538
- /**
539
- * We extend the GLTF loader with our own custom logic. This allows us to read nodes
540
- * and materials as they load in and parse any additional metadata set in the glb file that
541
- * babylon would ignore otherwise. eg. metadata set by Blender.
542
- */
543
- class GLBLoaderExtension {
544
- constructor(loader) {
545
- this.name = 'glbPostProcessor';
546
- this.enabled = true;
547
- this.loader = loader;
548
- }
549
- onReady() {
550
- this.applyReflections(this.loader.babylonScene);
551
- }
552
- /**
553
- * Any custom properties set in blender on transform nodes will appear here. We can
554
- * write the properties into the object metadata for retrieval at runtime.
555
- */
556
- loadNodeAsync(context, node, assign) {
557
- return this.loader.loadNodeAsync(context, node, function (babylonMesh) {
558
- node.extras &&
559
- Object.keys(node.extras).forEach((key) => {
560
- const value = node.extras[key];
561
- babylonMesh.metadata[key] = value;
562
- });
563
- assign(babylonMesh);
564
- });
565
- }
566
- /**
567
- * Material properties can be set at load time as we don't depend
568
- * on the entire scene graph being loaded (for reflection render lists).
569
- * The need for these properties may go over time as the relevant exporters/importers are updated.
570
- */
571
- loadMaterialPropertiesAsync(context, material, babylonMaterial) {
572
- return __awaiter(this, void 0, void 0, function* () {
573
- yield this.loader.loadMaterialPropertiesAsync(context, material, babylonMaterial);
574
- this.enableMaterialExtrasIfRequired(material, babylonMaterial);
575
- babylonMaterial.needDepthPrePass = true;
576
- });
577
- }
578
- dispose() {
579
- // Nothing to dispose, we have to implement this however..
580
- }
581
- /**
582
- * With a given material descriptor and babylon material instance, looks for any
583
- * metadata defined on the descriptor and applies to the babylon material.
584
- * @param material A material descriptor
585
- * @param babylonMaterial An instance of a material representation in babylon.
586
- */
587
- enableMaterialExtrasIfRequired(material, babylonMaterial) {
588
- if (!material.extras || !(babylonMaterial instanceof PBRMaterial)) {
589
- return;
590
- }
591
- if (material.extras.sheen) {
592
- const mat = babylonMaterial;
593
- mat.sheen.isEnabled = true;
594
- mat.sheen.intensity = material.extras.sheen;
595
- }
596
- if (material.extras.translucency) {
597
- const mat = babylonMaterial;
598
- mat.subSurface.isTranslucencyEnabled = true;
599
- mat.subSurface.translucencyIntensity = material.extras.translucency;
600
- if (material.extras.translucencyR &&
601
- material.extras.translucencyG &&
602
- material.extras.translucencyB) {
603
- mat.subSurface.tintColor = new Color3(material.extras.translucencyR, material.extras.translucencyG, material.extras.translucencyB);
604
- }
605
- }
606
- if (material.extras.refractionIOR) {
607
- const mat = babylonMaterial;
608
- mat.subSurface.isRefractionEnabled = true;
609
- mat.subSurface.volumeIndexOfRefraction = material.extras.refractionIOR;
610
- }
611
- if (material.extras.useDepthPrePass) {
612
- const mat = babylonMaterial;
613
- mat.needDepthPrePass = true;
614
- mat.forceIrradianceInFragment = true; // https://forum.babylonjs.com/t/rendering-artificats-on-safari-ios-14-pbrmaterial-depth-prepass-ibl/15670/5
615
- }
616
- if (material.extras.useParallax) {
617
- const mat = babylonMaterial;
618
- mat.useParallax = true;
619
- mat.useParallaxOcclusion = true;
620
- mat.parallaxScaleBias = material.extras.useParallax;
621
- }
622
- }
623
- /**
624
- * Called with a loaded scene. Will finish initialization
625
- * of any reflective properties within the scene.
626
- * @param scene The scene to parse.
627
- */
628
- applyReflections(scene) {
629
- function getReflectiveMeshes(scene) {
630
- const reflectiveMeshes = [];
631
- scene.transformNodes.forEach((node) => {
632
- if (node.metadata && node.metadata.reflective) {
633
- reflectiveMeshes.push(...node.getChildMeshes());
634
- }
635
- });
636
- scene.meshes.forEach((mesh) => {
637
- if (mesh.metadata &&
638
- mesh.metadata.reflective &&
639
- !reflectiveMeshes.includes(mesh)) {
640
- reflectiveMeshes.push(mesh);
641
- }
642
- });
643
- return reflectiveMeshes;
644
- }
645
- function buildMirrorTexture(mesh, reflectivity = 1) {
646
- const mat = mesh.material;
647
- if (!mat) {
648
- return;
649
- }
650
- // Create a new mirror texture
651
- const mirrorTex = new MirrorTexture('mirror', RenderingConfiguration.getMirrorTextureResolution(), scene, true);
652
- mirrorTex.renderList = getReflectiveMeshes(scene);
653
- // Calculate reflectance plane for mirror based on target mesh transform
654
- const targetVertices = mesh.getVerticesData('normal');
655
- if (!targetVertices) {
656
- throw new Error('Mirror attribute specified on: ' +
657
- mesh.name +
658
- 'But no normals exist to generate a mirror from!');
659
- }
660
- mesh.computeWorldMatrix(true);
661
- const worldMatrix = mesh.getWorldMatrix();
662
- const normal = Vector3.TransformNormal(new Vector3(targetVertices[0], targetVertices[1], targetVertices[2]), worldMatrix).normalize();
663
- const reflector = Plane.FromPositionAndNormal(mesh.position, normal.scale(-1));
664
- mirrorTex.mirrorPlane = reflector;
665
- mirrorTex.level = reflectivity;
666
- mat.reflectionTexture = mirrorTex;
667
- }
668
- function buildReflectionProbe(mesh) {
669
- const mat = mesh.material;
670
- const probe = new ReflectionProbe('probe-' + mat.name, REFLECTION_PROBE_RESOLUTION, scene);
671
- probe.attachToMesh(mesh);
672
- probe.renderList && probe.renderList.push(...getReflectiveMeshes(scene));
673
- mat.reflectionTexture = probe.cubeTexture;
674
- }
675
- scene.meshes.forEach((mesh) => {
676
- const tags = mesh.metadata;
677
- if (!tags) {
678
- return;
679
- }
680
- if (tags.mirrorTexture) {
681
- buildMirrorTexture(mesh, tags.mirrorTexture);
682
- }
683
- if (tags.reflectionProbe) {
684
- buildReflectionProbe(mesh);
685
- }
686
- });
687
- }
688
- }
689
-
690
- export { GLBLoaderExtension };