@spiffcommerce/preview 3.6.2-rc.8 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.esm.js +1576 -38
- package/dist/index.umd.js +1 -0
- package/package.json +4 -6
- package/dist/_tslib.esm.js +0 -33
- package/dist/animation.esm.js +0 -1364
- package/dist/assetCache.esm.js +0 -6
- package/dist/assetCache.esm2.js +0 -825
- package/dist/blurPostProcess.esm.js +0 -327
- package/dist/bumpVertex.esm.js +0 -497
- package/dist/compatibilityOptions.esm.js +0 -68
- package/dist/configuration.esm.js +0 -121
- package/dist/core.esm.js +0 -8135
- package/dist/dynamicTexture.esm.js +0 -105
- package/dist/dynamicTexture.esm2.js +0 -238
- package/dist/easing.esm.js +0 -130
- package/dist/effectFallbacks.esm.js +0 -378
- package/dist/engine.esm.js +0 -25504
- package/dist/glbLoaderExtensions.esm.js +0 -690
- package/dist/glowLayer.esm.js +0 -1621
- package/dist/glowLayerManager.esm.js +0 -50
- package/dist/guid.esm.js +0 -21
- package/dist/hdrFilteringFunctions.esm.js +0 -816
- package/dist/helperFunctions.esm.js +0 -5145
- package/dist/material.esm.js +0 -115
- package/dist/material.esm2.js +0 -5245
- package/dist/math.axis.esm.js +0 -35
- package/dist/math.color.esm.js +0 -1661
- package/dist/math.path.esm.js +0 -15
- package/dist/math.size.esm.js +0 -137
- package/dist/mesh.esm.js +0 -11170
- package/dist/modelContainer.esm.js +0 -1895
- package/dist/node.esm.js +0 -795
- package/dist/pbrBRDFFunctions.esm.js +0 -124
- package/dist/pbrMaterial.esm.js +8 -8739
- package/dist/productAnimations.esm.js +0 -182
- package/dist/productCamera.esm.js +0 -14
- package/dist/productCamera.esm2.js +0 -3870
- package/dist/renderConstants.esm.js +0 -116
- package/dist/renderingPipeline.esm.js +0 -18
- package/dist/renderingPipeline.esm2.js +1 -3594
- package/dist/sceneLoaderFlags.esm.js +0 -51
- package/dist/types.esm.js +0 -30
- package/dist/variants.esm.js +0 -16
- package/dist/variants.esm2.js +0 -3097
- package/dist/webRequest.esm.js +0 -7777
|
@@ -1,11394 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
import { A as ArrayTools, R as RegisterClass, b as ToGammaSpace, C as Color4, a as Color3, S as Scalar } from './math.color.esm.js';
|
|
3
|
-
import { V as Vector3, Q as Quaternion, M as Matrix, T as TmpVectors, _ as __decorate, s as serialize, S as SerializationHelper, E as EngineStore, b as Vector2, O as Observable, a as _WarnImport, l as serializeAsTexture, e as Vector4 } from './webRequest.esm.js';
|
|
4
|
-
import { P as PostProcess, T as Texture, R as RenderTargetTexture } from './helperFunctions.esm.js';
|
|
5
|
-
import { G as GlowLayer } from './glowLayer.esm.js';
|
|
6
|
-
import { M as MaterialFlags } from './bumpVertex.esm.js';
|
|
7
|
-
import { B as BlurPostProcess } from './blurPostProcess.esm.js';
|
|
8
|
-
import { D as DynamicTexture } from './dynamicTexture.esm2.js';
|
|
9
|
-
import { C as Camera, b as addClipPlaneUniforms, a as MaterialHelper, p as prepareStringDefinesForClipPlanes, M as Material, c as bindClipPlane } from './material.esm2.js';
|
|
10
|
-
import { P as PassPostProcess } from './pbrBRDFFunctions.esm.js';
|
|
11
|
-
import './effectFallbacks.esm.js';
|
|
12
|
-
import { A as Animation } from './animation.esm.js';
|
|
13
|
-
import { S as Size } from './math.size.esm.js';
|
|
14
|
-
import { N as Node } from './node.esm.js';
|
|
15
|
-
import { S as Space } from './math.axis.esm.js';
|
|
16
|
-
import { renderingPipelineDefaults } from './renderConstants.esm.js';
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* Class used to store bone information
|
|
20
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/mesh/bonesSkeletons
|
|
21
|
-
*/
|
|
22
|
-
class Bone extends Node {
|
|
23
|
-
/** @internal */
|
|
24
|
-
get _matrix() {
|
|
25
|
-
this._compose();
|
|
26
|
-
return this._localMatrix;
|
|
27
|
-
}
|
|
28
|
-
/** @internal */
|
|
29
|
-
set _matrix(value) {
|
|
30
|
-
// skip if the matrices are the same
|
|
31
|
-
if (value.updateFlag === this._localMatrix.updateFlag && !this._needToCompose) {
|
|
32
|
-
return;
|
|
33
|
-
}
|
|
34
|
-
this._needToCompose = false; // in case there was a pending compose
|
|
35
|
-
this._localMatrix.copyFrom(value);
|
|
36
|
-
this._markAsDirtyAndDecompose();
|
|
37
|
-
}
|
|
38
|
-
/**
|
|
39
|
-
* Create a new bone
|
|
40
|
-
* @param name defines the bone name
|
|
41
|
-
* @param skeleton defines the parent skeleton
|
|
42
|
-
* @param parentBone defines the parent (can be null if the bone is the root)
|
|
43
|
-
* @param localMatrix defines the local matrix (default: identity)
|
|
44
|
-
* @param restMatrix defines the rest matrix (default: localMatrix)
|
|
45
|
-
* @param bindMatrix defines the bind matrix (default: localMatrix)
|
|
46
|
-
* @param index defines index of the bone in the hierarchy (default: null)
|
|
47
|
-
*/
|
|
48
|
-
constructor(
|
|
49
|
-
/**
|
|
50
|
-
* defines the bone name
|
|
51
|
-
*/
|
|
52
|
-
name, skeleton, parentBone = null, localMatrix = null, restMatrix = null, bindMatrix = null, index = null) {
|
|
53
|
-
var _a;
|
|
54
|
-
super(name, skeleton.getScene());
|
|
55
|
-
this.name = name;
|
|
56
|
-
/**
|
|
57
|
-
* Gets the list of child bones
|
|
58
|
-
*/
|
|
59
|
-
this.children = new Array();
|
|
60
|
-
/** Gets the animations associated with this bone */
|
|
61
|
-
this.animations = new Array();
|
|
62
|
-
/**
|
|
63
|
-
* @internal Internal only
|
|
64
|
-
* Set this value to map this bone to a different index in the transform matrices
|
|
65
|
-
* Set this value to -1 to exclude the bone from the transform matrices
|
|
66
|
-
*/
|
|
67
|
-
this._index = null;
|
|
68
|
-
this._scalingDeterminant = 1;
|
|
69
|
-
this._needToDecompose = true;
|
|
70
|
-
this._needToCompose = false;
|
|
71
|
-
/** @internal */
|
|
72
|
-
this._linkedTransformNode = null;
|
|
73
|
-
/** @internal */
|
|
74
|
-
this._waitingTransformNodeId = null;
|
|
75
|
-
this._skeleton = skeleton;
|
|
76
|
-
this._localMatrix = (_a = localMatrix === null || localMatrix === void 0 ? void 0 : localMatrix.clone()) !== null && _a !== void 0 ? _a : Matrix.Identity();
|
|
77
|
-
this._restMatrix = restMatrix !== null && restMatrix !== void 0 ? restMatrix : this._localMatrix.clone();
|
|
78
|
-
this._bindMatrix = bindMatrix !== null && bindMatrix !== void 0 ? bindMatrix : this._localMatrix.clone();
|
|
79
|
-
this._index = index;
|
|
80
|
-
this._absoluteMatrix = new Matrix();
|
|
81
|
-
this._absoluteBindMatrix = new Matrix();
|
|
82
|
-
this._absoluteInverseBindMatrix = new Matrix();
|
|
83
|
-
this._finalMatrix = new Matrix();
|
|
84
|
-
skeleton.bones.push(this);
|
|
85
|
-
this.setParent(parentBone, false);
|
|
86
|
-
this._updateAbsoluteBindMatrices();
|
|
87
|
-
}
|
|
88
|
-
/**
|
|
89
|
-
* Gets the current object class name.
|
|
90
|
-
* @returns the class name
|
|
91
|
-
*/
|
|
92
|
-
getClassName() {
|
|
93
|
-
return "Bone";
|
|
94
|
-
}
|
|
95
|
-
// Members
|
|
96
|
-
/**
|
|
97
|
-
* Gets the parent skeleton
|
|
98
|
-
* @returns a skeleton
|
|
99
|
-
*/
|
|
100
|
-
getSkeleton() {
|
|
101
|
-
return this._skeleton;
|
|
102
|
-
}
|
|
103
|
-
get parent() {
|
|
104
|
-
return this._parentNode;
|
|
105
|
-
}
|
|
106
|
-
/**
|
|
107
|
-
* Gets parent bone
|
|
108
|
-
* @returns a bone or null if the bone is the root of the bone hierarchy
|
|
109
|
-
*/
|
|
110
|
-
getParent() {
|
|
111
|
-
return this.parent;
|
|
112
|
-
}
|
|
113
|
-
/**
|
|
114
|
-
* Returns an array containing the children of the bone
|
|
115
|
-
* @returns an array containing the children of the bone (can be empty if the bone has no children)
|
|
116
|
-
*/
|
|
117
|
-
getChildren() {
|
|
118
|
-
return this.children;
|
|
119
|
-
}
|
|
120
|
-
/**
|
|
121
|
-
* Gets the node index in matrix array generated for rendering
|
|
122
|
-
* @returns the node index
|
|
123
|
-
*/
|
|
124
|
-
getIndex() {
|
|
125
|
-
return this._index === null ? this.getSkeleton().bones.indexOf(this) : this._index;
|
|
126
|
-
}
|
|
127
|
-
set parent(newParent) {
|
|
128
|
-
this.setParent(newParent);
|
|
129
|
-
}
|
|
130
|
-
/**
|
|
131
|
-
* Sets the parent bone
|
|
132
|
-
* @param parent defines the parent (can be null if the bone is the root)
|
|
133
|
-
* @param updateAbsoluteBindMatrices defines if the absolute bind and absolute inverse bind matrices must be updated
|
|
134
|
-
*/
|
|
135
|
-
setParent(parent, updateAbsoluteBindMatrices = true) {
|
|
136
|
-
if (this.parent === parent) {
|
|
137
|
-
return;
|
|
138
|
-
}
|
|
139
|
-
if (this.parent) {
|
|
140
|
-
const index = this.parent.children.indexOf(this);
|
|
141
|
-
if (index !== -1) {
|
|
142
|
-
this.parent.children.splice(index, 1);
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
this._parentNode = parent;
|
|
146
|
-
if (this.parent) {
|
|
147
|
-
this.parent.children.push(this);
|
|
148
|
-
}
|
|
149
|
-
if (updateAbsoluteBindMatrices) {
|
|
150
|
-
this._updateAbsoluteBindMatrices();
|
|
151
|
-
}
|
|
152
|
-
this.markAsDirty();
|
|
153
|
-
}
|
|
154
|
-
/**
|
|
155
|
-
* Gets the local matrix
|
|
156
|
-
* @returns the local matrix
|
|
157
|
-
*/
|
|
158
|
-
getLocalMatrix() {
|
|
159
|
-
this._compose();
|
|
160
|
-
return this._localMatrix;
|
|
161
|
-
}
|
|
162
|
-
/**
|
|
163
|
-
* Gets the bind matrix
|
|
164
|
-
* @returns the bind matrix
|
|
165
|
-
*/
|
|
166
|
-
getBindMatrix() {
|
|
167
|
-
return this._bindMatrix;
|
|
168
|
-
}
|
|
169
|
-
/**
|
|
170
|
-
* Gets the bind matrix.
|
|
171
|
-
* @returns the bind matrix
|
|
172
|
-
* @deprecated Please use getBindMatrix instead
|
|
173
|
-
*/
|
|
174
|
-
getBaseMatrix() {
|
|
175
|
-
return this.getBindMatrix();
|
|
176
|
-
}
|
|
177
|
-
/**
|
|
178
|
-
* Gets the rest matrix
|
|
179
|
-
* @returns the rest matrix
|
|
180
|
-
*/
|
|
181
|
-
getRestMatrix() {
|
|
182
|
-
return this._restMatrix;
|
|
183
|
-
}
|
|
184
|
-
/**
|
|
185
|
-
* Gets the rest matrix
|
|
186
|
-
* @returns the rest matrix
|
|
187
|
-
* @deprecated Please use getRestMatrix instead
|
|
188
|
-
*/
|
|
189
|
-
getRestPose() {
|
|
190
|
-
return this.getRestMatrix();
|
|
191
|
-
}
|
|
192
|
-
/**
|
|
193
|
-
* Sets the rest matrix
|
|
194
|
-
* @param matrix the local-space rest matrix to set for this bone
|
|
195
|
-
*/
|
|
196
|
-
setRestMatrix(matrix) {
|
|
197
|
-
this._restMatrix.copyFrom(matrix);
|
|
198
|
-
}
|
|
199
|
-
/**
|
|
200
|
-
* Sets the rest matrix
|
|
201
|
-
* @param matrix the local-space rest to set for this bone
|
|
202
|
-
* @deprecated Please use setRestMatrix instead
|
|
203
|
-
*/
|
|
204
|
-
setRestPose(matrix) {
|
|
205
|
-
this.setRestMatrix(matrix);
|
|
206
|
-
}
|
|
207
|
-
/**
|
|
208
|
-
* Gets the bind matrix
|
|
209
|
-
* @returns the bind matrix
|
|
210
|
-
* @deprecated Please use getBindMatrix instead
|
|
211
|
-
*/
|
|
212
|
-
getBindPose() {
|
|
213
|
-
return this.getBindMatrix();
|
|
214
|
-
}
|
|
215
|
-
/**
|
|
216
|
-
* Sets the bind matrix
|
|
217
|
-
* This will trigger a recomputation of the absolute bind and absolute inverse bind matrices for this bone and its children
|
|
218
|
-
* Note that the local matrix will also be set with the matrix passed in parameter!
|
|
219
|
-
* @param matrix the local-space bind matrix to set for this bone
|
|
220
|
-
*/
|
|
221
|
-
setBindMatrix(matrix) {
|
|
222
|
-
this.updateMatrix(matrix);
|
|
223
|
-
}
|
|
224
|
-
/**
|
|
225
|
-
* Sets the bind matrix
|
|
226
|
-
* @param matrix the local-space bind to set for this bone
|
|
227
|
-
* @deprecated Please use setBindMatrix instead
|
|
228
|
-
*/
|
|
229
|
-
setBindPose(matrix) {
|
|
230
|
-
this.setBindMatrix(matrix);
|
|
231
|
-
}
|
|
232
|
-
/**
|
|
233
|
-
* Gets the matrix used to store the final world transformation of the bone (ie. the matrix sent to shaders)
|
|
234
|
-
*/
|
|
235
|
-
getFinalMatrix() {
|
|
236
|
-
return this._finalMatrix;
|
|
237
|
-
}
|
|
238
|
-
/**
|
|
239
|
-
* Gets the matrix used to store the final world transformation of the bone (ie. the matrix sent to shaders)
|
|
240
|
-
* @deprecated Please use getFinalMatrix instead
|
|
241
|
-
*/
|
|
242
|
-
getWorldMatrix() {
|
|
243
|
-
return this.getFinalMatrix();
|
|
244
|
-
}
|
|
245
|
-
/**
|
|
246
|
-
* Sets the local matrix to the rest matrix
|
|
247
|
-
*/
|
|
248
|
-
returnToRest() {
|
|
249
|
-
var _a;
|
|
250
|
-
if (this._linkedTransformNode) {
|
|
251
|
-
const localScaling = TmpVectors.Vector3[0];
|
|
252
|
-
const localRotation = TmpVectors.Quaternion[0];
|
|
253
|
-
const localPosition = TmpVectors.Vector3[1];
|
|
254
|
-
this.getRestMatrix().decompose(localScaling, localRotation, localPosition);
|
|
255
|
-
this._linkedTransformNode.position.copyFrom(localPosition);
|
|
256
|
-
this._linkedTransformNode.rotationQuaternion = (_a = this._linkedTransformNode.rotationQuaternion) !== null && _a !== void 0 ? _a : Quaternion.Identity();
|
|
257
|
-
this._linkedTransformNode.rotationQuaternion.copyFrom(localRotation);
|
|
258
|
-
this._linkedTransformNode.scaling.copyFrom(localScaling);
|
|
259
|
-
}
|
|
260
|
-
else {
|
|
261
|
-
this._matrix = this._restMatrix;
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
/**
|
|
265
|
-
* Gets the inverse of the bind matrix, in world space (relative to the skeleton root)
|
|
266
|
-
* @returns the inverse bind matrix, in world space
|
|
267
|
-
*/
|
|
268
|
-
getAbsoluteInverseBindMatrix() {
|
|
269
|
-
return this._absoluteInverseBindMatrix;
|
|
270
|
-
}
|
|
271
|
-
/**
|
|
272
|
-
* Gets the inverse of the bind matrix, in world space (relative to the skeleton root)
|
|
273
|
-
* @returns the inverse bind matrix, in world space
|
|
274
|
-
* @deprecated Please use getAbsoluteInverseBindMatrix instead
|
|
275
|
-
*/
|
|
276
|
-
getInvertedAbsoluteTransform() {
|
|
277
|
-
return this.getAbsoluteInverseBindMatrix();
|
|
278
|
-
}
|
|
279
|
-
/**
|
|
280
|
-
* Gets the bone matrix, in world space (relative to the skeleton root)
|
|
281
|
-
* @returns the bone matrix, in world space
|
|
282
|
-
*/
|
|
283
|
-
getAbsoluteMatrix() {
|
|
284
|
-
return this._absoluteMatrix;
|
|
285
|
-
}
|
|
286
|
-
/**
|
|
287
|
-
* Gets the bone matrix, in world space (relative to the skeleton root)
|
|
288
|
-
* @returns the bone matrix, in world space
|
|
289
|
-
* @deprecated Please use getAbsoluteMatrix instead
|
|
290
|
-
*/
|
|
291
|
-
getAbsoluteTransform() {
|
|
292
|
-
return this._absoluteMatrix;
|
|
293
|
-
}
|
|
294
|
-
/**
|
|
295
|
-
* Links with the given transform node.
|
|
296
|
-
* The local matrix of this bone is overwritten by the transform of the node every frame.
|
|
297
|
-
* @param transformNode defines the transform node to link to
|
|
298
|
-
*/
|
|
299
|
-
linkTransformNode(transformNode) {
|
|
300
|
-
if (this._linkedTransformNode) {
|
|
301
|
-
this._skeleton._numBonesWithLinkedTransformNode--;
|
|
302
|
-
}
|
|
303
|
-
this._linkedTransformNode = transformNode;
|
|
304
|
-
if (this._linkedTransformNode) {
|
|
305
|
-
this._skeleton._numBonesWithLinkedTransformNode++;
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
// Properties (matches TransformNode properties)
|
|
309
|
-
/**
|
|
310
|
-
* Gets the node used to drive the bone's transformation
|
|
311
|
-
* @returns a transform node or null
|
|
312
|
-
*/
|
|
313
|
-
getTransformNode() {
|
|
314
|
-
return this._linkedTransformNode;
|
|
315
|
-
}
|
|
316
|
-
/** Gets or sets current position (in local space) */
|
|
317
|
-
get position() {
|
|
318
|
-
this._decompose();
|
|
319
|
-
return this._localPosition;
|
|
320
|
-
}
|
|
321
|
-
set position(newPosition) {
|
|
322
|
-
this._decompose();
|
|
323
|
-
this._localPosition.copyFrom(newPosition);
|
|
324
|
-
this._markAsDirtyAndCompose();
|
|
325
|
-
}
|
|
326
|
-
/** Gets or sets current rotation (in local space) */
|
|
327
|
-
get rotation() {
|
|
328
|
-
return this.getRotation();
|
|
329
|
-
}
|
|
330
|
-
set rotation(newRotation) {
|
|
331
|
-
this.setRotation(newRotation);
|
|
332
|
-
}
|
|
333
|
-
/** Gets or sets current rotation quaternion (in local space) */
|
|
334
|
-
get rotationQuaternion() {
|
|
335
|
-
this._decompose();
|
|
336
|
-
return this._localRotation;
|
|
337
|
-
}
|
|
338
|
-
set rotationQuaternion(newRotation) {
|
|
339
|
-
this.setRotationQuaternion(newRotation);
|
|
340
|
-
}
|
|
341
|
-
/** Gets or sets current scaling (in local space) */
|
|
342
|
-
get scaling() {
|
|
343
|
-
return this.getScale();
|
|
344
|
-
}
|
|
345
|
-
set scaling(newScaling) {
|
|
346
|
-
this.setScale(newScaling);
|
|
347
|
-
}
|
|
348
|
-
/**
|
|
349
|
-
* Gets the animation properties override
|
|
350
|
-
*/
|
|
351
|
-
get animationPropertiesOverride() {
|
|
352
|
-
return this._skeleton.animationPropertiesOverride;
|
|
353
|
-
}
|
|
354
|
-
// Methods
|
|
355
|
-
_decompose() {
|
|
356
|
-
if (!this._needToDecompose) {
|
|
357
|
-
return;
|
|
358
|
-
}
|
|
359
|
-
this._needToDecompose = false;
|
|
360
|
-
if (!this._localScaling) {
|
|
361
|
-
this._localScaling = Vector3.Zero();
|
|
362
|
-
this._localRotation = Quaternion.Zero();
|
|
363
|
-
this._localPosition = Vector3.Zero();
|
|
364
|
-
}
|
|
365
|
-
this._localMatrix.decompose(this._localScaling, this._localRotation, this._localPosition);
|
|
366
|
-
}
|
|
367
|
-
_compose() {
|
|
368
|
-
if (!this._needToCompose) {
|
|
369
|
-
return;
|
|
370
|
-
}
|
|
371
|
-
if (!this._localScaling) {
|
|
372
|
-
this._needToCompose = false;
|
|
373
|
-
return;
|
|
374
|
-
}
|
|
375
|
-
this._needToCompose = false;
|
|
376
|
-
Matrix.ComposeToRef(this._localScaling, this._localRotation, this._localPosition, this._localMatrix);
|
|
377
|
-
}
|
|
378
|
-
/**
|
|
379
|
-
* Update the bind (and optionally the local) matrix
|
|
380
|
-
* @param bindMatrix defines the new matrix to set to the bind/local matrix, in local space
|
|
381
|
-
* @param updateAbsoluteBindMatrices defines if the absolute bind and absolute inverse bind matrices must be recomputed (default: true)
|
|
382
|
-
* @param updateLocalMatrix defines if the local matrix should also be updated with the matrix passed in parameter (default: true)
|
|
383
|
-
*/
|
|
384
|
-
updateMatrix(bindMatrix, updateAbsoluteBindMatrices = true, updateLocalMatrix = true) {
|
|
385
|
-
this._bindMatrix.copyFrom(bindMatrix);
|
|
386
|
-
if (updateAbsoluteBindMatrices) {
|
|
387
|
-
this._updateAbsoluteBindMatrices();
|
|
388
|
-
}
|
|
389
|
-
if (updateLocalMatrix) {
|
|
390
|
-
this._matrix = bindMatrix;
|
|
391
|
-
}
|
|
392
|
-
else {
|
|
393
|
-
this.markAsDirty();
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
/**
|
|
397
|
-
* @internal
|
|
398
|
-
*/
|
|
399
|
-
_updateAbsoluteBindMatrices(bindMatrix, updateChildren = true) {
|
|
400
|
-
if (!bindMatrix) {
|
|
401
|
-
bindMatrix = this._bindMatrix;
|
|
402
|
-
}
|
|
403
|
-
if (this.parent) {
|
|
404
|
-
bindMatrix.multiplyToRef(this.parent._absoluteBindMatrix, this._absoluteBindMatrix);
|
|
405
|
-
}
|
|
406
|
-
else {
|
|
407
|
-
this._absoluteBindMatrix.copyFrom(bindMatrix);
|
|
408
|
-
}
|
|
409
|
-
this._absoluteBindMatrix.invertToRef(this._absoluteInverseBindMatrix);
|
|
410
|
-
if (updateChildren) {
|
|
411
|
-
for (let index = 0; index < this.children.length; index++) {
|
|
412
|
-
this.children[index]._updateAbsoluteBindMatrices();
|
|
413
|
-
}
|
|
414
|
-
}
|
|
415
|
-
this._scalingDeterminant = this._absoluteBindMatrix.determinant() < 0 ? -1 : 1;
|
|
416
|
-
}
|
|
417
|
-
/**
|
|
418
|
-
* Flag the bone as dirty (Forcing it to update everything)
|
|
419
|
-
* @returns this bone
|
|
420
|
-
*/
|
|
421
|
-
markAsDirty() {
|
|
422
|
-
this._currentRenderId++;
|
|
423
|
-
this._childUpdateId++;
|
|
424
|
-
this._skeleton._markAsDirty();
|
|
425
|
-
return this;
|
|
426
|
-
}
|
|
427
|
-
/** @internal */
|
|
428
|
-
_markAsDirtyAndCompose() {
|
|
429
|
-
this.markAsDirty();
|
|
430
|
-
this._needToCompose = true;
|
|
431
|
-
}
|
|
432
|
-
_markAsDirtyAndDecompose() {
|
|
433
|
-
this.markAsDirty();
|
|
434
|
-
this._needToDecompose = true;
|
|
435
|
-
}
|
|
436
|
-
_updatePosition(vec, space = Space.LOCAL, tNode, translationMode = true) {
|
|
437
|
-
const lm = this.getLocalMatrix();
|
|
438
|
-
if (space == Space.LOCAL) {
|
|
439
|
-
if (translationMode) {
|
|
440
|
-
lm.addAtIndex(12, vec.x);
|
|
441
|
-
lm.addAtIndex(13, vec.y);
|
|
442
|
-
lm.addAtIndex(14, vec.z);
|
|
443
|
-
}
|
|
444
|
-
else {
|
|
445
|
-
lm.setTranslationFromFloats(vec.x, vec.y, vec.z);
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
else {
|
|
449
|
-
let wm = null;
|
|
450
|
-
//tNode.getWorldMatrix() needs to be called before skeleton.computeAbsoluteMatrices()
|
|
451
|
-
if (tNode) {
|
|
452
|
-
wm = tNode.getWorldMatrix();
|
|
453
|
-
}
|
|
454
|
-
this._skeleton.computeAbsoluteMatrices();
|
|
455
|
-
const tmat = Bone._TmpMats[0];
|
|
456
|
-
const tvec = Bone._TmpVecs[0];
|
|
457
|
-
if (this.parent) {
|
|
458
|
-
if (tNode && wm) {
|
|
459
|
-
tmat.copyFrom(this.parent.getAbsoluteMatrix());
|
|
460
|
-
tmat.multiplyToRef(wm, tmat);
|
|
461
|
-
}
|
|
462
|
-
else {
|
|
463
|
-
tmat.copyFrom(this.parent.getAbsoluteMatrix());
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
else {
|
|
467
|
-
Matrix.IdentityToRef(tmat);
|
|
468
|
-
}
|
|
469
|
-
if (translationMode) {
|
|
470
|
-
tmat.setTranslationFromFloats(0, 0, 0);
|
|
471
|
-
}
|
|
472
|
-
tmat.invert();
|
|
473
|
-
Vector3.TransformCoordinatesToRef(vec, tmat, tvec);
|
|
474
|
-
if (translationMode) {
|
|
475
|
-
lm.addAtIndex(12, tvec.x);
|
|
476
|
-
lm.addAtIndex(13, tvec.y);
|
|
477
|
-
lm.addAtIndex(14, tvec.z);
|
|
478
|
-
}
|
|
479
|
-
else {
|
|
480
|
-
lm.setTranslationFromFloats(tvec.x, tvec.y, tvec.z);
|
|
481
|
-
}
|
|
482
|
-
}
|
|
483
|
-
this._markAsDirtyAndDecompose();
|
|
484
|
-
}
|
|
485
|
-
/**
|
|
486
|
-
* Translate the bone in local or world space
|
|
487
|
-
* @param vec The amount to translate the bone
|
|
488
|
-
* @param space The space that the translation is in (default: Space.LOCAL)
|
|
489
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
490
|
-
*/
|
|
491
|
-
translate(vec, space = Space.LOCAL, tNode) {
|
|
492
|
-
this._updatePosition(vec, space, tNode, true);
|
|
493
|
-
}
|
|
494
|
-
/**
|
|
495
|
-
* Set the position of the bone in local or world space
|
|
496
|
-
* @param position The position to set the bone
|
|
497
|
-
* @param space The space that the position is in (default: Space.LOCAL)
|
|
498
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
499
|
-
*/
|
|
500
|
-
setPosition(position, space = Space.LOCAL, tNode) {
|
|
501
|
-
this._updatePosition(position, space, tNode, false);
|
|
502
|
-
}
|
|
503
|
-
/**
|
|
504
|
-
* Set the absolute position of the bone (world space)
|
|
505
|
-
* @param position The position to set the bone
|
|
506
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
507
|
-
*/
|
|
508
|
-
setAbsolutePosition(position, tNode) {
|
|
509
|
-
this.setPosition(position, Space.WORLD, tNode);
|
|
510
|
-
}
|
|
511
|
-
/**
|
|
512
|
-
* Scale the bone on the x, y and z axes (in local space)
|
|
513
|
-
* @param x The amount to scale the bone on the x axis
|
|
514
|
-
* @param y The amount to scale the bone on the y axis
|
|
515
|
-
* @param z The amount to scale the bone on the z axis
|
|
516
|
-
* @param scaleChildren sets this to true if children of the bone should be scaled as well (false by default)
|
|
517
|
-
*/
|
|
518
|
-
scale(x, y, z, scaleChildren = false) {
|
|
519
|
-
const locMat = this.getLocalMatrix();
|
|
520
|
-
// Apply new scaling on top of current local matrix
|
|
521
|
-
const scaleMat = Bone._TmpMats[0];
|
|
522
|
-
Matrix.ScalingToRef(x, y, z, scaleMat);
|
|
523
|
-
scaleMat.multiplyToRef(locMat, locMat);
|
|
524
|
-
// Invert scaling matrix and apply the inverse to all children
|
|
525
|
-
scaleMat.invert();
|
|
526
|
-
for (const child of this.children) {
|
|
527
|
-
const cm = child.getLocalMatrix();
|
|
528
|
-
cm.multiplyToRef(scaleMat, cm);
|
|
529
|
-
cm.multiplyAtIndex(12, x);
|
|
530
|
-
cm.multiplyAtIndex(13, y);
|
|
531
|
-
cm.multiplyAtIndex(14, z);
|
|
532
|
-
child._markAsDirtyAndDecompose();
|
|
533
|
-
}
|
|
534
|
-
this._markAsDirtyAndDecompose();
|
|
535
|
-
if (scaleChildren) {
|
|
536
|
-
for (const child of this.children) {
|
|
537
|
-
child.scale(x, y, z, scaleChildren);
|
|
538
|
-
}
|
|
539
|
-
}
|
|
540
|
-
}
|
|
541
|
-
/**
|
|
542
|
-
* Set the bone scaling in local space
|
|
543
|
-
* @param scale defines the scaling vector
|
|
544
|
-
*/
|
|
545
|
-
setScale(scale) {
|
|
546
|
-
this._decompose();
|
|
547
|
-
this._localScaling.copyFrom(scale);
|
|
548
|
-
this._markAsDirtyAndCompose();
|
|
549
|
-
}
|
|
550
|
-
/**
|
|
551
|
-
* Gets the current scaling in local space
|
|
552
|
-
* @returns the current scaling vector
|
|
553
|
-
*/
|
|
554
|
-
getScale() {
|
|
555
|
-
this._decompose();
|
|
556
|
-
return this._localScaling;
|
|
557
|
-
}
|
|
558
|
-
/**
|
|
559
|
-
* Gets the current scaling in local space and stores it in a target vector
|
|
560
|
-
* @param result defines the target vector
|
|
561
|
-
*/
|
|
562
|
-
getScaleToRef(result) {
|
|
563
|
-
this._decompose();
|
|
564
|
-
result.copyFrom(this._localScaling);
|
|
565
|
-
}
|
|
566
|
-
/**
|
|
567
|
-
* Set the yaw, pitch, and roll of the bone in local or world space
|
|
568
|
-
* @param yaw The rotation of the bone on the y axis
|
|
569
|
-
* @param pitch The rotation of the bone on the x axis
|
|
570
|
-
* @param roll The rotation of the bone on the z axis
|
|
571
|
-
* @param space The space that the axes of rotation are in
|
|
572
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
573
|
-
*/
|
|
574
|
-
setYawPitchRoll(yaw, pitch, roll, space = Space.LOCAL, tNode) {
|
|
575
|
-
if (space === Space.LOCAL) {
|
|
576
|
-
const quat = Bone._TmpQuat;
|
|
577
|
-
Quaternion.RotationYawPitchRollToRef(yaw, pitch, roll, quat);
|
|
578
|
-
this.setRotationQuaternion(quat, space, tNode);
|
|
579
|
-
return;
|
|
580
|
-
}
|
|
581
|
-
const rotMatInv = Bone._TmpMats[0];
|
|
582
|
-
if (!this._getAbsoluteInverseMatrixUnscaledToRef(rotMatInv, tNode)) {
|
|
583
|
-
return;
|
|
584
|
-
}
|
|
585
|
-
const rotMat = Bone._TmpMats[1];
|
|
586
|
-
Matrix.RotationYawPitchRollToRef(yaw, pitch, roll, rotMat);
|
|
587
|
-
rotMatInv.multiplyToRef(rotMat, rotMat);
|
|
588
|
-
this._rotateWithMatrix(rotMat, space, tNode);
|
|
589
|
-
}
|
|
590
|
-
/**
|
|
591
|
-
* Add a rotation to the bone on an axis in local or world space
|
|
592
|
-
* @param axis The axis to rotate the bone on
|
|
593
|
-
* @param amount The amount to rotate the bone
|
|
594
|
-
* @param space The space that the axis is in
|
|
595
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
596
|
-
*/
|
|
597
|
-
rotate(axis, amount, space = Space.LOCAL, tNode) {
|
|
598
|
-
const rmat = Bone._TmpMats[0];
|
|
599
|
-
rmat.setTranslationFromFloats(0, 0, 0);
|
|
600
|
-
Matrix.RotationAxisToRef(axis, amount, rmat);
|
|
601
|
-
this._rotateWithMatrix(rmat, space, tNode);
|
|
602
|
-
}
|
|
603
|
-
/**
|
|
604
|
-
* Set the rotation of the bone to a particular axis angle in local or world space
|
|
605
|
-
* @param axis The axis to rotate the bone on
|
|
606
|
-
* @param angle The angle that the bone should be rotated to
|
|
607
|
-
* @param space The space that the axis is in
|
|
608
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
609
|
-
*/
|
|
610
|
-
setAxisAngle(axis, angle, space = Space.LOCAL, tNode) {
|
|
611
|
-
if (space === Space.LOCAL) {
|
|
612
|
-
const quat = Bone._TmpQuat;
|
|
613
|
-
Quaternion.RotationAxisToRef(axis, angle, quat);
|
|
614
|
-
this.setRotationQuaternion(quat, space, tNode);
|
|
615
|
-
return;
|
|
616
|
-
}
|
|
617
|
-
const rotMatInv = Bone._TmpMats[0];
|
|
618
|
-
if (!this._getAbsoluteInverseMatrixUnscaledToRef(rotMatInv, tNode)) {
|
|
619
|
-
return;
|
|
620
|
-
}
|
|
621
|
-
const rotMat = Bone._TmpMats[1];
|
|
622
|
-
Matrix.RotationAxisToRef(axis, angle, rotMat);
|
|
623
|
-
rotMatInv.multiplyToRef(rotMat, rotMat);
|
|
624
|
-
this._rotateWithMatrix(rotMat, space, tNode);
|
|
625
|
-
}
|
|
626
|
-
/**
|
|
627
|
-
* Set the euler rotation of the bone in local or world space
|
|
628
|
-
* @param rotation The euler rotation that the bone should be set to
|
|
629
|
-
* @param space The space that the rotation is in
|
|
630
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
631
|
-
*/
|
|
632
|
-
setRotation(rotation, space = Space.LOCAL, tNode) {
|
|
633
|
-
this.setYawPitchRoll(rotation.y, rotation.x, rotation.z, space, tNode);
|
|
634
|
-
}
|
|
635
|
-
/**
|
|
636
|
-
* Set the quaternion rotation of the bone in local or world space
|
|
637
|
-
* @param quat The quaternion rotation that the bone should be set to
|
|
638
|
-
* @param space The space that the rotation is in
|
|
639
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
640
|
-
*/
|
|
641
|
-
setRotationQuaternion(quat, space = Space.LOCAL, tNode) {
|
|
642
|
-
if (space === Space.LOCAL) {
|
|
643
|
-
this._decompose();
|
|
644
|
-
this._localRotation.copyFrom(quat);
|
|
645
|
-
this._markAsDirtyAndCompose();
|
|
646
|
-
return;
|
|
647
|
-
}
|
|
648
|
-
const rotMatInv = Bone._TmpMats[0];
|
|
649
|
-
if (!this._getAbsoluteInverseMatrixUnscaledToRef(rotMatInv, tNode)) {
|
|
650
|
-
return;
|
|
651
|
-
}
|
|
652
|
-
const rotMat = Bone._TmpMats[1];
|
|
653
|
-
Matrix.FromQuaternionToRef(quat, rotMat);
|
|
654
|
-
rotMatInv.multiplyToRef(rotMat, rotMat);
|
|
655
|
-
this._rotateWithMatrix(rotMat, space, tNode);
|
|
656
|
-
}
|
|
657
|
-
/**
|
|
658
|
-
* Set the rotation matrix of the bone in local or world space
|
|
659
|
-
* @param rotMat The rotation matrix that the bone should be set to
|
|
660
|
-
* @param space The space that the rotation is in
|
|
661
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
662
|
-
*/
|
|
663
|
-
setRotationMatrix(rotMat, space = Space.LOCAL, tNode) {
|
|
664
|
-
if (space === Space.LOCAL) {
|
|
665
|
-
const quat = Bone._TmpQuat;
|
|
666
|
-
Quaternion.FromRotationMatrixToRef(rotMat, quat);
|
|
667
|
-
this.setRotationQuaternion(quat, space, tNode);
|
|
668
|
-
return;
|
|
669
|
-
}
|
|
670
|
-
const rotMatInv = Bone._TmpMats[0];
|
|
671
|
-
if (!this._getAbsoluteInverseMatrixUnscaledToRef(rotMatInv, tNode)) {
|
|
672
|
-
return;
|
|
673
|
-
}
|
|
674
|
-
const rotMat2 = Bone._TmpMats[1];
|
|
675
|
-
rotMat2.copyFrom(rotMat);
|
|
676
|
-
rotMatInv.multiplyToRef(rotMat, rotMat2);
|
|
677
|
-
this._rotateWithMatrix(rotMat2, space, tNode);
|
|
678
|
-
}
|
|
679
|
-
_rotateWithMatrix(rmat, space = Space.LOCAL, tNode) {
|
|
680
|
-
const lmat = this.getLocalMatrix();
|
|
681
|
-
const lx = lmat.m[12];
|
|
682
|
-
const ly = lmat.m[13];
|
|
683
|
-
const lz = lmat.m[14];
|
|
684
|
-
const parent = this.getParent();
|
|
685
|
-
const parentScale = Bone._TmpMats[3];
|
|
686
|
-
const parentScaleInv = Bone._TmpMats[4];
|
|
687
|
-
if (parent && space == Space.WORLD) {
|
|
688
|
-
if (tNode) {
|
|
689
|
-
parentScale.copyFrom(tNode.getWorldMatrix());
|
|
690
|
-
parent.getAbsoluteMatrix().multiplyToRef(parentScale, parentScale);
|
|
691
|
-
}
|
|
692
|
-
else {
|
|
693
|
-
parentScale.copyFrom(parent.getAbsoluteMatrix());
|
|
694
|
-
}
|
|
695
|
-
parentScaleInv.copyFrom(parentScale);
|
|
696
|
-
parentScaleInv.invert();
|
|
697
|
-
lmat.multiplyToRef(parentScale, lmat);
|
|
698
|
-
lmat.multiplyToRef(rmat, lmat);
|
|
699
|
-
lmat.multiplyToRef(parentScaleInv, lmat);
|
|
700
|
-
}
|
|
701
|
-
else {
|
|
702
|
-
if (space == Space.WORLD && tNode) {
|
|
703
|
-
parentScale.copyFrom(tNode.getWorldMatrix());
|
|
704
|
-
parentScaleInv.copyFrom(parentScale);
|
|
705
|
-
parentScaleInv.invert();
|
|
706
|
-
lmat.multiplyToRef(parentScale, lmat);
|
|
707
|
-
lmat.multiplyToRef(rmat, lmat);
|
|
708
|
-
lmat.multiplyToRef(parentScaleInv, lmat);
|
|
709
|
-
}
|
|
710
|
-
else {
|
|
711
|
-
lmat.multiplyToRef(rmat, lmat);
|
|
712
|
-
}
|
|
713
|
-
}
|
|
714
|
-
lmat.setTranslationFromFloats(lx, ly, lz);
|
|
715
|
-
this.computeAbsoluteMatrices();
|
|
716
|
-
this._markAsDirtyAndDecompose();
|
|
717
|
-
}
|
|
718
|
-
_getAbsoluteInverseMatrixUnscaledToRef(rotMatInv, tNode) {
|
|
719
|
-
const scaleMatrix = Bone._TmpMats[2];
|
|
720
|
-
rotMatInv.copyFrom(this.getAbsoluteMatrix());
|
|
721
|
-
if (tNode) {
|
|
722
|
-
rotMatInv.multiplyToRef(tNode.getWorldMatrix(), rotMatInv);
|
|
723
|
-
Matrix.ScalingToRef(tNode.scaling.x, tNode.scaling.y, tNode.scaling.z, scaleMatrix);
|
|
724
|
-
}
|
|
725
|
-
else {
|
|
726
|
-
Matrix.IdentityToRef(scaleMatrix);
|
|
727
|
-
}
|
|
728
|
-
rotMatInv.invert();
|
|
729
|
-
if (isNaN(rotMatInv.m[0])) {
|
|
730
|
-
// Matrix failed to invert.
|
|
731
|
-
// This can happen if scale is zero for example.
|
|
732
|
-
return false;
|
|
733
|
-
}
|
|
734
|
-
scaleMatrix.multiplyAtIndex(0, this._scalingDeterminant);
|
|
735
|
-
rotMatInv.multiplyToRef(scaleMatrix, rotMatInv);
|
|
736
|
-
return true;
|
|
737
|
-
}
|
|
738
|
-
/**
|
|
739
|
-
* Get the position of the bone in local or world space
|
|
740
|
-
* @param space The space that the returned position is in
|
|
741
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
742
|
-
* @returns The position of the bone
|
|
743
|
-
*/
|
|
744
|
-
getPosition(space = Space.LOCAL, tNode = null) {
|
|
745
|
-
const pos = Vector3.Zero();
|
|
746
|
-
this.getPositionToRef(space, tNode, pos);
|
|
747
|
-
return pos;
|
|
748
|
-
}
|
|
749
|
-
/**
|
|
750
|
-
* Copy the position of the bone to a vector3 in local or world space
|
|
751
|
-
* @param space The space that the returned position is in
|
|
752
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
753
|
-
* @param result The vector3 to copy the position to
|
|
754
|
-
*/
|
|
755
|
-
getPositionToRef(space = Space.LOCAL, tNode, result) {
|
|
756
|
-
if (space == Space.LOCAL) {
|
|
757
|
-
const lm = this.getLocalMatrix();
|
|
758
|
-
result.x = lm.m[12];
|
|
759
|
-
result.y = lm.m[13];
|
|
760
|
-
result.z = lm.m[14];
|
|
761
|
-
}
|
|
762
|
-
else {
|
|
763
|
-
let wm = null;
|
|
764
|
-
//tNode.getWorldMatrix() needs to be called before skeleton.computeAbsoluteMatrices()
|
|
765
|
-
if (tNode) {
|
|
766
|
-
wm = tNode.getWorldMatrix();
|
|
767
|
-
}
|
|
768
|
-
this._skeleton.computeAbsoluteMatrices();
|
|
769
|
-
let tmat = Bone._TmpMats[0];
|
|
770
|
-
if (tNode && wm) {
|
|
771
|
-
tmat.copyFrom(this.getAbsoluteMatrix());
|
|
772
|
-
tmat.multiplyToRef(wm, tmat);
|
|
773
|
-
}
|
|
774
|
-
else {
|
|
775
|
-
tmat = this.getAbsoluteMatrix();
|
|
776
|
-
}
|
|
777
|
-
result.x = tmat.m[12];
|
|
778
|
-
result.y = tmat.m[13];
|
|
779
|
-
result.z = tmat.m[14];
|
|
780
|
-
}
|
|
781
|
-
}
|
|
782
|
-
/**
|
|
783
|
-
* Get the absolute position of the bone (world space)
|
|
784
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
785
|
-
* @returns The absolute position of the bone
|
|
786
|
-
*/
|
|
787
|
-
getAbsolutePosition(tNode = null) {
|
|
788
|
-
const pos = Vector3.Zero();
|
|
789
|
-
this.getPositionToRef(Space.WORLD, tNode, pos);
|
|
790
|
-
return pos;
|
|
791
|
-
}
|
|
792
|
-
/**
|
|
793
|
-
* Copy the absolute position of the bone (world space) to the result param
|
|
794
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
795
|
-
* @param result The vector3 to copy the absolute position to
|
|
796
|
-
*/
|
|
797
|
-
getAbsolutePositionToRef(tNode, result) {
|
|
798
|
-
this.getPositionToRef(Space.WORLD, tNode, result);
|
|
799
|
-
}
|
|
800
|
-
/**
|
|
801
|
-
* Compute the absolute matrices of this bone and its children
|
|
802
|
-
*/
|
|
803
|
-
computeAbsoluteMatrices() {
|
|
804
|
-
this._compose();
|
|
805
|
-
if (this.parent) {
|
|
806
|
-
this._localMatrix.multiplyToRef(this.parent._absoluteMatrix, this._absoluteMatrix);
|
|
807
|
-
}
|
|
808
|
-
else {
|
|
809
|
-
this._absoluteMatrix.copyFrom(this._localMatrix);
|
|
810
|
-
const poseMatrix = this._skeleton.getPoseMatrix();
|
|
811
|
-
if (poseMatrix) {
|
|
812
|
-
this._absoluteMatrix.multiplyToRef(poseMatrix, this._absoluteMatrix);
|
|
813
|
-
}
|
|
814
|
-
}
|
|
815
|
-
const children = this.children;
|
|
816
|
-
const len = children.length;
|
|
817
|
-
for (let i = 0; i < len; i++) {
|
|
818
|
-
children[i].computeAbsoluteMatrices();
|
|
819
|
-
}
|
|
820
|
-
}
|
|
821
|
-
/**
|
|
822
|
-
* Compute the absolute matrices of this bone and its children
|
|
823
|
-
* @deprecated Please use computeAbsoluteMatrices instead
|
|
824
|
-
*/
|
|
825
|
-
computeAbsoluteTransforms() {
|
|
826
|
-
this.computeAbsoluteMatrices();
|
|
827
|
-
}
|
|
828
|
-
/**
|
|
829
|
-
* Get the world direction from an axis that is in the local space of the bone
|
|
830
|
-
* @param localAxis The local direction that is used to compute the world direction
|
|
831
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
832
|
-
* @returns The world direction
|
|
833
|
-
*/
|
|
834
|
-
getDirection(localAxis, tNode = null) {
|
|
835
|
-
const result = Vector3.Zero();
|
|
836
|
-
this.getDirectionToRef(localAxis, tNode, result);
|
|
837
|
-
return result;
|
|
838
|
-
}
|
|
839
|
-
/**
|
|
840
|
-
* Copy the world direction to a vector3 from an axis that is in the local space of the bone
|
|
841
|
-
* @param localAxis The local direction that is used to compute the world direction
|
|
842
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
843
|
-
* @param result The vector3 that the world direction will be copied to
|
|
844
|
-
*/
|
|
845
|
-
getDirectionToRef(localAxis, tNode = null, result) {
|
|
846
|
-
let wm = null;
|
|
847
|
-
//tNode.getWorldMatrix() needs to be called before skeleton.computeAbsoluteMatrices()
|
|
848
|
-
if (tNode) {
|
|
849
|
-
wm = tNode.getWorldMatrix();
|
|
850
|
-
}
|
|
851
|
-
this._skeleton.computeAbsoluteMatrices();
|
|
852
|
-
const mat = Bone._TmpMats[0];
|
|
853
|
-
mat.copyFrom(this.getAbsoluteMatrix());
|
|
854
|
-
if (tNode && wm) {
|
|
855
|
-
mat.multiplyToRef(wm, mat);
|
|
856
|
-
}
|
|
857
|
-
Vector3.TransformNormalToRef(localAxis, mat, result);
|
|
858
|
-
result.normalize();
|
|
859
|
-
}
|
|
860
|
-
/**
|
|
861
|
-
* Get the euler rotation of the bone in local or world space
|
|
862
|
-
* @param space The space that the rotation should be in
|
|
863
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
864
|
-
* @returns The euler rotation
|
|
865
|
-
*/
|
|
866
|
-
getRotation(space = Space.LOCAL, tNode = null) {
|
|
867
|
-
const result = Vector3.Zero();
|
|
868
|
-
this.getRotationToRef(space, tNode, result);
|
|
869
|
-
return result;
|
|
870
|
-
}
|
|
871
|
-
/**
|
|
872
|
-
* Copy the euler rotation of the bone to a vector3. The rotation can be in either local or world space
|
|
873
|
-
* @param space The space that the rotation should be in
|
|
874
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
875
|
-
* @param result The vector3 that the rotation should be copied to
|
|
876
|
-
*/
|
|
877
|
-
getRotationToRef(space = Space.LOCAL, tNode = null, result) {
|
|
878
|
-
const quat = Bone._TmpQuat;
|
|
879
|
-
this.getRotationQuaternionToRef(space, tNode, quat);
|
|
880
|
-
quat.toEulerAnglesToRef(result);
|
|
881
|
-
}
|
|
882
|
-
/**
|
|
883
|
-
* Get the quaternion rotation of the bone in either local or world space
|
|
884
|
-
* @param space The space that the rotation should be in
|
|
885
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
886
|
-
* @returns The quaternion rotation
|
|
887
|
-
*/
|
|
888
|
-
getRotationQuaternion(space = Space.LOCAL, tNode = null) {
|
|
889
|
-
const result = Quaternion.Identity();
|
|
890
|
-
this.getRotationQuaternionToRef(space, tNode, result);
|
|
891
|
-
return result;
|
|
892
|
-
}
|
|
893
|
-
/**
|
|
894
|
-
* Copy the quaternion rotation of the bone to a quaternion. The rotation can be in either local or world space
|
|
895
|
-
* @param space The space that the rotation should be in
|
|
896
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
897
|
-
* @param result The quaternion that the rotation should be copied to
|
|
898
|
-
*/
|
|
899
|
-
getRotationQuaternionToRef(space = Space.LOCAL, tNode = null, result) {
|
|
900
|
-
if (space == Space.LOCAL) {
|
|
901
|
-
this._decompose();
|
|
902
|
-
result.copyFrom(this._localRotation);
|
|
903
|
-
}
|
|
904
|
-
else {
|
|
905
|
-
const mat = Bone._TmpMats[0];
|
|
906
|
-
const amat = this.getAbsoluteMatrix();
|
|
907
|
-
if (tNode) {
|
|
908
|
-
amat.multiplyToRef(tNode.getWorldMatrix(), mat);
|
|
909
|
-
}
|
|
910
|
-
else {
|
|
911
|
-
mat.copyFrom(amat);
|
|
912
|
-
}
|
|
913
|
-
mat.multiplyAtIndex(0, this._scalingDeterminant);
|
|
914
|
-
mat.multiplyAtIndex(1, this._scalingDeterminant);
|
|
915
|
-
mat.multiplyAtIndex(2, this._scalingDeterminant);
|
|
916
|
-
mat.decompose(undefined, result, undefined);
|
|
917
|
-
}
|
|
918
|
-
}
|
|
919
|
-
/**
|
|
920
|
-
* Get the rotation matrix of the bone in local or world space
|
|
921
|
-
* @param space The space that the rotation should be in
|
|
922
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
923
|
-
* @returns The rotation matrix
|
|
924
|
-
*/
|
|
925
|
-
getRotationMatrix(space = Space.LOCAL, tNode) {
|
|
926
|
-
const result = Matrix.Identity();
|
|
927
|
-
this.getRotationMatrixToRef(space, tNode, result);
|
|
928
|
-
return result;
|
|
929
|
-
}
|
|
930
|
-
/**
|
|
931
|
-
* Copy the rotation matrix of the bone to a matrix. The rotation can be in either local or world space
|
|
932
|
-
* @param space The space that the rotation should be in
|
|
933
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
934
|
-
* @param result The quaternion that the rotation should be copied to
|
|
935
|
-
*/
|
|
936
|
-
getRotationMatrixToRef(space = Space.LOCAL, tNode, result) {
|
|
937
|
-
if (space == Space.LOCAL) {
|
|
938
|
-
this.getLocalMatrix().getRotationMatrixToRef(result);
|
|
939
|
-
}
|
|
940
|
-
else {
|
|
941
|
-
const mat = Bone._TmpMats[0];
|
|
942
|
-
const amat = this.getAbsoluteMatrix();
|
|
943
|
-
if (tNode) {
|
|
944
|
-
amat.multiplyToRef(tNode.getWorldMatrix(), mat);
|
|
945
|
-
}
|
|
946
|
-
else {
|
|
947
|
-
mat.copyFrom(amat);
|
|
948
|
-
}
|
|
949
|
-
mat.multiplyAtIndex(0, this._scalingDeterminant);
|
|
950
|
-
mat.multiplyAtIndex(1, this._scalingDeterminant);
|
|
951
|
-
mat.multiplyAtIndex(2, this._scalingDeterminant);
|
|
952
|
-
mat.getRotationMatrixToRef(result);
|
|
953
|
-
}
|
|
954
|
-
}
|
|
955
|
-
/**
|
|
956
|
-
* Get the world position of a point that is in the local space of the bone
|
|
957
|
-
* @param position The local position
|
|
958
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
959
|
-
* @returns The world position
|
|
960
|
-
*/
|
|
961
|
-
getAbsolutePositionFromLocal(position, tNode = null) {
|
|
962
|
-
const result = Vector3.Zero();
|
|
963
|
-
this.getAbsolutePositionFromLocalToRef(position, tNode, result);
|
|
964
|
-
return result;
|
|
965
|
-
}
|
|
966
|
-
/**
|
|
967
|
-
* Get the world position of a point that is in the local space of the bone and copy it to the result param
|
|
968
|
-
* @param position The local position
|
|
969
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
970
|
-
* @param result The vector3 that the world position should be copied to
|
|
971
|
-
*/
|
|
972
|
-
getAbsolutePositionFromLocalToRef(position, tNode = null, result) {
|
|
973
|
-
let wm = null;
|
|
974
|
-
//tNode.getWorldMatrix() needs to be called before skeleton.computeAbsoluteMatrices()
|
|
975
|
-
if (tNode) {
|
|
976
|
-
wm = tNode.getWorldMatrix();
|
|
977
|
-
}
|
|
978
|
-
this._skeleton.computeAbsoluteMatrices();
|
|
979
|
-
const tmat = Bone._TmpMats[0];
|
|
980
|
-
tmat.copyFrom(this.getAbsoluteMatrix());
|
|
981
|
-
if (tNode && wm) {
|
|
982
|
-
tmat.multiplyToRef(wm, tmat);
|
|
983
|
-
}
|
|
984
|
-
Vector3.TransformCoordinatesToRef(position, tmat, result);
|
|
985
|
-
}
|
|
986
|
-
/**
|
|
987
|
-
* Get the local position of a point that is in world space
|
|
988
|
-
* @param position The world position
|
|
989
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
990
|
-
* @returns The local position
|
|
991
|
-
*/
|
|
992
|
-
getLocalPositionFromAbsolute(position, tNode = null) {
|
|
993
|
-
const result = Vector3.Zero();
|
|
994
|
-
this.getLocalPositionFromAbsoluteToRef(position, tNode, result);
|
|
995
|
-
return result;
|
|
996
|
-
}
|
|
997
|
-
/**
|
|
998
|
-
* Get the local position of a point that is in world space and copy it to the result param
|
|
999
|
-
* @param position The world position
|
|
1000
|
-
* @param tNode A TransformNode whose world matrix is to be applied to the calculated absolute matrix. In most cases, you'll want to pass the mesh associated with the skeleton from which this bone comes. Used only when space=Space.WORLD
|
|
1001
|
-
* @param result The vector3 that the local position should be copied to
|
|
1002
|
-
*/
|
|
1003
|
-
getLocalPositionFromAbsoluteToRef(position, tNode = null, result) {
|
|
1004
|
-
let wm = null;
|
|
1005
|
-
//tNode.getWorldMatrix() needs to be called before skeleton.computeAbsoluteMatrices()
|
|
1006
|
-
if (tNode) {
|
|
1007
|
-
wm = tNode.getWorldMatrix();
|
|
1008
|
-
}
|
|
1009
|
-
this._skeleton.computeAbsoluteMatrices();
|
|
1010
|
-
const tmat = Bone._TmpMats[0];
|
|
1011
|
-
tmat.copyFrom(this.getAbsoluteMatrix());
|
|
1012
|
-
if (tNode && wm) {
|
|
1013
|
-
tmat.multiplyToRef(wm, tmat);
|
|
1014
|
-
}
|
|
1015
|
-
tmat.invert();
|
|
1016
|
-
Vector3.TransformCoordinatesToRef(position, tmat, result);
|
|
1017
|
-
}
|
|
1018
|
-
/**
|
|
1019
|
-
* Set the current local matrix as the restMatrix for this bone.
|
|
1020
|
-
*/
|
|
1021
|
-
setCurrentPoseAsRest() {
|
|
1022
|
-
this.setRestMatrix(this.getLocalMatrix());
|
|
1023
|
-
}
|
|
1024
|
-
}
|
|
1025
|
-
Bone._TmpVecs = ArrayTools.BuildArray(2, Vector3.Zero);
|
|
1026
|
-
Bone._TmpQuat = Quaternion.Identity();
|
|
1027
|
-
Bone._TmpMats = ArrayTools.BuildArray(5, Matrix.Identity);
|
|
1028
|
-
|
|
1029
|
-
// Do not edit.
|
|
1030
|
-
const name$s = "sharpenPixelShader";
|
|
1031
|
-
const shader$s = `varying vec2 vUV;
|
|
1032
|
-
void main(void)
|
|
1033
|
-
// Sideeffect
|
|
1034
|
-
ShaderStore.ShadersStore[name$s] = shader$s;
|
|
1035
|
-
|
|
1036
|
-
/**
|
|
1037
|
-
* The SharpenPostProcess applies a sharpen kernel to every pixel
|
|
1038
|
-
* See http://en.wikipedia.org/wiki/Kernel_(image_processing)
|
|
1039
|
-
*/
|
|
1040
|
-
class SharpenPostProcess extends PostProcess {
|
|
1041
|
-
/**
|
|
1042
|
-
* Gets a string identifying the name of the class
|
|
1043
|
-
* @returns "SharpenPostProcess" string
|
|
1044
|
-
*/
|
|
1045
|
-
getClassName() {
|
|
1046
|
-
return "SharpenPostProcess";
|
|
1047
|
-
}
|
|
1048
|
-
/**
|
|
1049
|
-
* Creates a new instance ConvolutionPostProcess
|
|
1050
|
-
* @param name The name of the effect.
|
|
1051
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
1052
|
-
* @param camera The camera to apply the render pass to.
|
|
1053
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
1054
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
1055
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
1056
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
1057
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
1058
|
-
*/
|
|
1059
|
-
constructor(name, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
1060
|
-
super(name, "sharpen", ["sharpnessAmounts", "screenSize"], null, options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, blockCompilation);
|
|
1061
|
-
/**
|
|
1062
|
-
* How much of the original color should be applied. Setting this to 0 will display edge detection. (default: 1)
|
|
1063
|
-
*/
|
|
1064
|
-
this.colorAmount = 1.0;
|
|
1065
|
-
/**
|
|
1066
|
-
* How much sharpness should be applied (default: 0.3)
|
|
1067
|
-
*/
|
|
1068
|
-
this.edgeAmount = 0.3;
|
|
1069
|
-
this.onApply = (effect) => {
|
|
1070
|
-
effect.setFloat2("screenSize", this.width, this.height);
|
|
1071
|
-
effect.setFloat2("sharpnessAmounts", this.edgeAmount, this.colorAmount);
|
|
1072
|
-
};
|
|
1073
|
-
}
|
|
1074
|
-
/**
|
|
1075
|
-
* @internal
|
|
1076
|
-
*/
|
|
1077
|
-
static _Parse(parsedPostProcess, targetCamera, scene, rootUrl) {
|
|
1078
|
-
return SerializationHelper.Parse(() => {
|
|
1079
|
-
return new SharpenPostProcess(parsedPostProcess.name, parsedPostProcess.options, targetCamera, parsedPostProcess.renderTargetSamplingMode, scene.getEngine(), parsedPostProcess.textureType, parsedPostProcess.reusable);
|
|
1080
|
-
}, parsedPostProcess, scene, rootUrl);
|
|
1081
|
-
}
|
|
1082
|
-
}
|
|
1083
|
-
__decorate([
|
|
1084
|
-
serialize()
|
|
1085
|
-
], SharpenPostProcess.prototype, "colorAmount", void 0);
|
|
1086
|
-
__decorate([
|
|
1087
|
-
serialize()
|
|
1088
|
-
], SharpenPostProcess.prototype, "edgeAmount", void 0);
|
|
1089
|
-
RegisterClass("BABYLON.SharpenPostProcess", SharpenPostProcess);
|
|
1090
|
-
|
|
1091
|
-
// Do not edit.
|
|
1092
|
-
const name$r = "imageProcessingPixelShader";
|
|
1093
|
-
const shader$r = `varying vec2 vUV;
|
|
1094
|
-
#include<helperFunctions>
|
|
1095
|
-
#include<imageProcessingFunctions>
|
|
1096
|
-
#define CUSTOM_FRAGMENT_DEFINITIONS
|
|
1097
|
-
void main(void)
|
|
1098
|
-
#ifndef FROMLINEARSPACE
|
|
1099
|
-
result.rgb=toLinearSpace(result.rgb);
|
|
1100
|
-
result=applyImageProcessing(result);
|
|
1101
|
-
#ifdef FROMLINEARSPACE
|
|
1102
|
-
result=applyImageProcessing(result);
|
|
1103
|
-
#endif
|
|
1104
|
-
gl_FragColor=result;
|
|
1105
|
-
// Sideeffect
|
|
1106
|
-
ShaderStore.ShadersStore[name$r] = shader$r;
|
|
1107
|
-
|
|
1108
|
-
/**
|
|
1109
|
-
* ImageProcessingPostProcess
|
|
1110
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#imageprocessing
|
|
1111
|
-
*/
|
|
1112
|
-
class ImageProcessingPostProcess extends PostProcess {
|
|
1113
|
-
/**
|
|
1114
|
-
* Gets the image processing configuration used either in this material.
|
|
1115
|
-
*/
|
|
1116
|
-
get imageProcessingConfiguration() {
|
|
1117
|
-
return this._imageProcessingConfiguration;
|
|
1118
|
-
}
|
|
1119
|
-
/**
|
|
1120
|
-
* Sets the Default image processing configuration used either in the this material.
|
|
1121
|
-
*
|
|
1122
|
-
* If sets to null, the scene one is in use.
|
|
1123
|
-
*/
|
|
1124
|
-
set imageProcessingConfiguration(value) {
|
|
1125
|
-
// We are almost sure it is applied by post process as
|
|
1126
|
-
// We are in the post process :-)
|
|
1127
|
-
value.applyByPostProcess = true;
|
|
1128
|
-
this._attachImageProcessingConfiguration(value);
|
|
1129
|
-
}
|
|
1130
|
-
/**
|
|
1131
|
-
* Attaches a new image processing configuration to the PBR Material.
|
|
1132
|
-
* @param configuration
|
|
1133
|
-
* @param doNotBuild
|
|
1134
|
-
*/
|
|
1135
|
-
_attachImageProcessingConfiguration(configuration, doNotBuild = false) {
|
|
1136
|
-
if (configuration === this._imageProcessingConfiguration) {
|
|
1137
|
-
return;
|
|
1138
|
-
}
|
|
1139
|
-
// Detaches observer.
|
|
1140
|
-
if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
|
|
1141
|
-
this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
|
|
1142
|
-
}
|
|
1143
|
-
// Pick the scene configuration if needed.
|
|
1144
|
-
if (!configuration) {
|
|
1145
|
-
let scene = null;
|
|
1146
|
-
const engine = this.getEngine();
|
|
1147
|
-
const camera = this.getCamera();
|
|
1148
|
-
if (camera) {
|
|
1149
|
-
scene = camera.getScene();
|
|
1150
|
-
}
|
|
1151
|
-
else if (engine && engine.scenes) {
|
|
1152
|
-
const scenes = engine.scenes;
|
|
1153
|
-
scene = scenes[scenes.length - 1];
|
|
1154
|
-
}
|
|
1155
|
-
else {
|
|
1156
|
-
scene = EngineStore.LastCreatedScene;
|
|
1157
|
-
}
|
|
1158
|
-
if (scene) {
|
|
1159
|
-
this._imageProcessingConfiguration = scene.imageProcessingConfiguration;
|
|
1160
|
-
}
|
|
1161
|
-
else {
|
|
1162
|
-
this._imageProcessingConfiguration = new ImageProcessingConfiguration();
|
|
1163
|
-
}
|
|
1164
|
-
}
|
|
1165
|
-
else {
|
|
1166
|
-
this._imageProcessingConfiguration = configuration;
|
|
1167
|
-
}
|
|
1168
|
-
// Attaches observer.
|
|
1169
|
-
if (this._imageProcessingConfiguration) {
|
|
1170
|
-
this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(() => {
|
|
1171
|
-
this._updateParameters();
|
|
1172
|
-
});
|
|
1173
|
-
}
|
|
1174
|
-
// Ensure the effect will be rebuilt.
|
|
1175
|
-
if (!doNotBuild) {
|
|
1176
|
-
this._updateParameters();
|
|
1177
|
-
}
|
|
1178
|
-
}
|
|
1179
|
-
/**
|
|
1180
|
-
* If the post process is supported.
|
|
1181
|
-
*/
|
|
1182
|
-
get isSupported() {
|
|
1183
|
-
const effect = this.getEffect();
|
|
1184
|
-
return !effect || effect.isSupported;
|
|
1185
|
-
}
|
|
1186
|
-
/**
|
|
1187
|
-
* Gets Color curves setup used in the effect if colorCurvesEnabled is set to true .
|
|
1188
|
-
*/
|
|
1189
|
-
get colorCurves() {
|
|
1190
|
-
return this.imageProcessingConfiguration.colorCurves;
|
|
1191
|
-
}
|
|
1192
|
-
/**
|
|
1193
|
-
* Sets Color curves setup used in the effect if colorCurvesEnabled is set to true .
|
|
1194
|
-
*/
|
|
1195
|
-
set colorCurves(value) {
|
|
1196
|
-
this.imageProcessingConfiguration.colorCurves = value;
|
|
1197
|
-
}
|
|
1198
|
-
/**
|
|
1199
|
-
* Gets whether the color curves effect is enabled.
|
|
1200
|
-
*/
|
|
1201
|
-
get colorCurvesEnabled() {
|
|
1202
|
-
return this.imageProcessingConfiguration.colorCurvesEnabled;
|
|
1203
|
-
}
|
|
1204
|
-
/**
|
|
1205
|
-
* Sets whether the color curves effect is enabled.
|
|
1206
|
-
*/
|
|
1207
|
-
set colorCurvesEnabled(value) {
|
|
1208
|
-
this.imageProcessingConfiguration.colorCurvesEnabled = value;
|
|
1209
|
-
}
|
|
1210
|
-
/**
|
|
1211
|
-
* Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
|
|
1212
|
-
*/
|
|
1213
|
-
get colorGradingTexture() {
|
|
1214
|
-
return this.imageProcessingConfiguration.colorGradingTexture;
|
|
1215
|
-
}
|
|
1216
|
-
/**
|
|
1217
|
-
* Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
|
|
1218
|
-
*/
|
|
1219
|
-
set colorGradingTexture(value) {
|
|
1220
|
-
this.imageProcessingConfiguration.colorGradingTexture = value;
|
|
1221
|
-
}
|
|
1222
|
-
/**
|
|
1223
|
-
* Gets whether the color grading effect is enabled.
|
|
1224
|
-
*/
|
|
1225
|
-
get colorGradingEnabled() {
|
|
1226
|
-
return this.imageProcessingConfiguration.colorGradingEnabled;
|
|
1227
|
-
}
|
|
1228
|
-
/**
|
|
1229
|
-
* Gets whether the color grading effect is enabled.
|
|
1230
|
-
*/
|
|
1231
|
-
set colorGradingEnabled(value) {
|
|
1232
|
-
this.imageProcessingConfiguration.colorGradingEnabled = value;
|
|
1233
|
-
}
|
|
1234
|
-
/**
|
|
1235
|
-
* Gets exposure used in the effect.
|
|
1236
|
-
*/
|
|
1237
|
-
get exposure() {
|
|
1238
|
-
return this.imageProcessingConfiguration.exposure;
|
|
1239
|
-
}
|
|
1240
|
-
/**
|
|
1241
|
-
* Sets exposure used in the effect.
|
|
1242
|
-
*/
|
|
1243
|
-
set exposure(value) {
|
|
1244
|
-
this.imageProcessingConfiguration.exposure = value;
|
|
1245
|
-
}
|
|
1246
|
-
/**
|
|
1247
|
-
* Gets whether tonemapping is enabled or not.
|
|
1248
|
-
*/
|
|
1249
|
-
get toneMappingEnabled() {
|
|
1250
|
-
return this._imageProcessingConfiguration.toneMappingEnabled;
|
|
1251
|
-
}
|
|
1252
|
-
/**
|
|
1253
|
-
* Sets whether tonemapping is enabled or not
|
|
1254
|
-
*/
|
|
1255
|
-
set toneMappingEnabled(value) {
|
|
1256
|
-
this._imageProcessingConfiguration.toneMappingEnabled = value;
|
|
1257
|
-
}
|
|
1258
|
-
/**
|
|
1259
|
-
* Gets the type of tone mapping effect.
|
|
1260
|
-
*/
|
|
1261
|
-
get toneMappingType() {
|
|
1262
|
-
return this._imageProcessingConfiguration.toneMappingType;
|
|
1263
|
-
}
|
|
1264
|
-
/**
|
|
1265
|
-
* Sets the type of tone mapping effect.
|
|
1266
|
-
*/
|
|
1267
|
-
set toneMappingType(value) {
|
|
1268
|
-
this._imageProcessingConfiguration.toneMappingType = value;
|
|
1269
|
-
}
|
|
1270
|
-
/**
|
|
1271
|
-
* Gets contrast used in the effect.
|
|
1272
|
-
*/
|
|
1273
|
-
get contrast() {
|
|
1274
|
-
return this.imageProcessingConfiguration.contrast;
|
|
1275
|
-
}
|
|
1276
|
-
/**
|
|
1277
|
-
* Sets contrast used in the effect.
|
|
1278
|
-
*/
|
|
1279
|
-
set contrast(value) {
|
|
1280
|
-
this.imageProcessingConfiguration.contrast = value;
|
|
1281
|
-
}
|
|
1282
|
-
/**
|
|
1283
|
-
* Gets Vignette stretch size.
|
|
1284
|
-
*/
|
|
1285
|
-
get vignetteStretch() {
|
|
1286
|
-
return this.imageProcessingConfiguration.vignetteStretch;
|
|
1287
|
-
}
|
|
1288
|
-
/**
|
|
1289
|
-
* Sets Vignette stretch size.
|
|
1290
|
-
*/
|
|
1291
|
-
set vignetteStretch(value) {
|
|
1292
|
-
this.imageProcessingConfiguration.vignetteStretch = value;
|
|
1293
|
-
}
|
|
1294
|
-
/**
|
|
1295
|
-
* Gets Vignette center X Offset.
|
|
1296
|
-
* @deprecated use vignetteCenterX instead
|
|
1297
|
-
*/
|
|
1298
|
-
get vignetteCentreX() {
|
|
1299
|
-
return this.imageProcessingConfiguration.vignetteCenterX;
|
|
1300
|
-
}
|
|
1301
|
-
/**
|
|
1302
|
-
* Sets Vignette center X Offset.
|
|
1303
|
-
* @deprecated use vignetteCenterX instead
|
|
1304
|
-
*/
|
|
1305
|
-
set vignetteCentreX(value) {
|
|
1306
|
-
this.imageProcessingConfiguration.vignetteCenterX = value;
|
|
1307
|
-
}
|
|
1308
|
-
/**
|
|
1309
|
-
* Gets Vignette center Y Offset.
|
|
1310
|
-
* @deprecated use vignetteCenterY instead
|
|
1311
|
-
*/
|
|
1312
|
-
get vignetteCentreY() {
|
|
1313
|
-
return this.imageProcessingConfiguration.vignetteCenterY;
|
|
1314
|
-
}
|
|
1315
|
-
/**
|
|
1316
|
-
* Sets Vignette center Y Offset.
|
|
1317
|
-
* @deprecated use vignetteCenterY instead
|
|
1318
|
-
*/
|
|
1319
|
-
set vignetteCentreY(value) {
|
|
1320
|
-
this.imageProcessingConfiguration.vignetteCenterY = value;
|
|
1321
|
-
}
|
|
1322
|
-
/**
|
|
1323
|
-
* Vignette center Y Offset.
|
|
1324
|
-
*/
|
|
1325
|
-
get vignetteCenterY() {
|
|
1326
|
-
return this.imageProcessingConfiguration.vignetteCenterY;
|
|
1327
|
-
}
|
|
1328
|
-
set vignetteCenterY(value) {
|
|
1329
|
-
this.imageProcessingConfiguration.vignetteCenterY = value;
|
|
1330
|
-
}
|
|
1331
|
-
/**
|
|
1332
|
-
* Vignette center X Offset.
|
|
1333
|
-
*/
|
|
1334
|
-
get vignetteCenterX() {
|
|
1335
|
-
return this.imageProcessingConfiguration.vignetteCenterX;
|
|
1336
|
-
}
|
|
1337
|
-
set vignetteCenterX(value) {
|
|
1338
|
-
this.imageProcessingConfiguration.vignetteCenterX = value;
|
|
1339
|
-
}
|
|
1340
|
-
/**
|
|
1341
|
-
* Gets Vignette weight or intensity of the vignette effect.
|
|
1342
|
-
*/
|
|
1343
|
-
get vignetteWeight() {
|
|
1344
|
-
return this.imageProcessingConfiguration.vignetteWeight;
|
|
1345
|
-
}
|
|
1346
|
-
/**
|
|
1347
|
-
* Sets Vignette weight or intensity of the vignette effect.
|
|
1348
|
-
*/
|
|
1349
|
-
set vignetteWeight(value) {
|
|
1350
|
-
this.imageProcessingConfiguration.vignetteWeight = value;
|
|
1351
|
-
}
|
|
1352
|
-
/**
|
|
1353
|
-
* Gets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
|
|
1354
|
-
* if vignetteEnabled is set to true.
|
|
1355
|
-
*/
|
|
1356
|
-
get vignetteColor() {
|
|
1357
|
-
return this.imageProcessingConfiguration.vignetteColor;
|
|
1358
|
-
}
|
|
1359
|
-
/**
|
|
1360
|
-
* Sets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
|
|
1361
|
-
* if vignetteEnabled is set to true.
|
|
1362
|
-
*/
|
|
1363
|
-
set vignetteColor(value) {
|
|
1364
|
-
this.imageProcessingConfiguration.vignetteColor = value;
|
|
1365
|
-
}
|
|
1366
|
-
/**
|
|
1367
|
-
* Gets Camera field of view used by the Vignette effect.
|
|
1368
|
-
*/
|
|
1369
|
-
get vignetteCameraFov() {
|
|
1370
|
-
return this.imageProcessingConfiguration.vignetteCameraFov;
|
|
1371
|
-
}
|
|
1372
|
-
/**
|
|
1373
|
-
* Sets Camera field of view used by the Vignette effect.
|
|
1374
|
-
*/
|
|
1375
|
-
set vignetteCameraFov(value) {
|
|
1376
|
-
this.imageProcessingConfiguration.vignetteCameraFov = value;
|
|
1377
|
-
}
|
|
1378
|
-
/**
|
|
1379
|
-
* Gets the vignette blend mode allowing different kind of effect.
|
|
1380
|
-
*/
|
|
1381
|
-
get vignetteBlendMode() {
|
|
1382
|
-
return this.imageProcessingConfiguration.vignetteBlendMode;
|
|
1383
|
-
}
|
|
1384
|
-
/**
|
|
1385
|
-
* Sets the vignette blend mode allowing different kind of effect.
|
|
1386
|
-
*/
|
|
1387
|
-
set vignetteBlendMode(value) {
|
|
1388
|
-
this.imageProcessingConfiguration.vignetteBlendMode = value;
|
|
1389
|
-
}
|
|
1390
|
-
/**
|
|
1391
|
-
* Gets whether the vignette effect is enabled.
|
|
1392
|
-
*/
|
|
1393
|
-
get vignetteEnabled() {
|
|
1394
|
-
return this.imageProcessingConfiguration.vignetteEnabled;
|
|
1395
|
-
}
|
|
1396
|
-
/**
|
|
1397
|
-
* Sets whether the vignette effect is enabled.
|
|
1398
|
-
*/
|
|
1399
|
-
set vignetteEnabled(value) {
|
|
1400
|
-
this.imageProcessingConfiguration.vignetteEnabled = value;
|
|
1401
|
-
}
|
|
1402
|
-
/**
|
|
1403
|
-
* Gets intensity of the dithering effect.
|
|
1404
|
-
*/
|
|
1405
|
-
get ditheringIntensity() {
|
|
1406
|
-
return this.imageProcessingConfiguration.ditheringIntensity;
|
|
1407
|
-
}
|
|
1408
|
-
/**
|
|
1409
|
-
* Sets intensity of the dithering effect.
|
|
1410
|
-
*/
|
|
1411
|
-
set ditheringIntensity(value) {
|
|
1412
|
-
this.imageProcessingConfiguration.ditheringIntensity = value;
|
|
1413
|
-
}
|
|
1414
|
-
/**
|
|
1415
|
-
* Gets whether the dithering effect is enabled.
|
|
1416
|
-
*/
|
|
1417
|
-
get ditheringEnabled() {
|
|
1418
|
-
return this.imageProcessingConfiguration.ditheringEnabled;
|
|
1419
|
-
}
|
|
1420
|
-
/**
|
|
1421
|
-
* Sets whether the dithering effect is enabled.
|
|
1422
|
-
*/
|
|
1423
|
-
set ditheringEnabled(value) {
|
|
1424
|
-
this.imageProcessingConfiguration.ditheringEnabled = value;
|
|
1425
|
-
}
|
|
1426
|
-
/**
|
|
1427
|
-
* Gets whether the input of the processing is in Gamma or Linear Space.
|
|
1428
|
-
*/
|
|
1429
|
-
get fromLinearSpace() {
|
|
1430
|
-
return this._fromLinearSpace;
|
|
1431
|
-
}
|
|
1432
|
-
/**
|
|
1433
|
-
* Sets whether the input of the processing is in Gamma or Linear Space.
|
|
1434
|
-
*/
|
|
1435
|
-
set fromLinearSpace(value) {
|
|
1436
|
-
if (this._fromLinearSpace === value) {
|
|
1437
|
-
return;
|
|
1438
|
-
}
|
|
1439
|
-
this._fromLinearSpace = value;
|
|
1440
|
-
this._updateParameters();
|
|
1441
|
-
}
|
|
1442
|
-
constructor(name, options, camera = null, samplingMode, engine, reusable, textureType = 0, imageProcessingConfiguration) {
|
|
1443
|
-
super(name, "imageProcessing", [], [], options, camera, samplingMode, engine, reusable, null, textureType, "postprocess", null, true);
|
|
1444
|
-
this._fromLinearSpace = true;
|
|
1445
|
-
/**
|
|
1446
|
-
* Defines cache preventing GC.
|
|
1447
|
-
*/
|
|
1448
|
-
this._defines = {
|
|
1449
|
-
IMAGEPROCESSING: false,
|
|
1450
|
-
VIGNETTE: false,
|
|
1451
|
-
VIGNETTEBLENDMODEMULTIPLY: false,
|
|
1452
|
-
VIGNETTEBLENDMODEOPAQUE: false,
|
|
1453
|
-
TONEMAPPING: false,
|
|
1454
|
-
TONEMAPPING_ACES: false,
|
|
1455
|
-
CONTRAST: false,
|
|
1456
|
-
COLORCURVES: false,
|
|
1457
|
-
COLORGRADING: false,
|
|
1458
|
-
COLORGRADING3D: false,
|
|
1459
|
-
FROMLINEARSPACE: false,
|
|
1460
|
-
SAMPLER3DGREENDEPTH: false,
|
|
1461
|
-
SAMPLER3DBGRMAP: false,
|
|
1462
|
-
DITHER: false,
|
|
1463
|
-
IMAGEPROCESSINGPOSTPROCESS: false,
|
|
1464
|
-
EXPOSURE: false,
|
|
1465
|
-
SKIPFINALCOLORCLAMP: false,
|
|
1466
|
-
};
|
|
1467
|
-
// Setup the configuration as forced by the constructor. This would then not force the
|
|
1468
|
-
// scene materials output in linear space and let untouched the default forward pass.
|
|
1469
|
-
if (imageProcessingConfiguration) {
|
|
1470
|
-
imageProcessingConfiguration.applyByPostProcess = true;
|
|
1471
|
-
this._attachImageProcessingConfiguration(imageProcessingConfiguration, true);
|
|
1472
|
-
// This will cause the shader to be compiled
|
|
1473
|
-
this._updateParameters();
|
|
1474
|
-
}
|
|
1475
|
-
// Setup the default processing configuration to the scene.
|
|
1476
|
-
else {
|
|
1477
|
-
this._attachImageProcessingConfiguration(null, true);
|
|
1478
|
-
this.imageProcessingConfiguration.applyByPostProcess = true;
|
|
1479
|
-
}
|
|
1480
|
-
this.onApply = (effect) => {
|
|
1481
|
-
this.imageProcessingConfiguration.bind(effect, this.aspectRatio);
|
|
1482
|
-
};
|
|
1483
|
-
}
|
|
1484
|
-
/**
|
|
1485
|
-
* "ImageProcessingPostProcess"
|
|
1486
|
-
* @returns "ImageProcessingPostProcess"
|
|
1487
|
-
*/
|
|
1488
|
-
getClassName() {
|
|
1489
|
-
return "ImageProcessingPostProcess";
|
|
1490
|
-
}
|
|
1491
|
-
/**
|
|
1492
|
-
* @internal
|
|
1493
|
-
*/
|
|
1494
|
-
_updateParameters() {
|
|
1495
|
-
this._defines.FROMLINEARSPACE = this._fromLinearSpace;
|
|
1496
|
-
this.imageProcessingConfiguration.prepareDefines(this._defines, true);
|
|
1497
|
-
let defines = "";
|
|
1498
|
-
for (const define in this._defines) {
|
|
1499
|
-
if (this._defines[define]) {
|
|
1500
|
-
defines += `#define ${define};\r\n`;
|
|
1501
|
-
}
|
|
1502
|
-
}
|
|
1503
|
-
const samplers = ["textureSampler"];
|
|
1504
|
-
const uniforms = ["scale"];
|
|
1505
|
-
if (ImageProcessingConfiguration) {
|
|
1506
|
-
ImageProcessingConfiguration.PrepareSamplers(samplers, this._defines);
|
|
1507
|
-
ImageProcessingConfiguration.PrepareUniforms(uniforms, this._defines);
|
|
1508
|
-
}
|
|
1509
|
-
this.updateEffect(defines, uniforms, samplers);
|
|
1510
|
-
}
|
|
1511
|
-
dispose(camera) {
|
|
1512
|
-
super.dispose(camera);
|
|
1513
|
-
if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
|
|
1514
|
-
this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
|
|
1515
|
-
}
|
|
1516
|
-
if (this._imageProcessingConfiguration) {
|
|
1517
|
-
this.imageProcessingConfiguration.applyByPostProcess = false;
|
|
1518
|
-
}
|
|
1519
|
-
}
|
|
1520
|
-
}
|
|
1521
|
-
__decorate([
|
|
1522
|
-
serialize()
|
|
1523
|
-
], ImageProcessingPostProcess.prototype, "_fromLinearSpace", void 0);
|
|
1524
|
-
|
|
1525
|
-
// Do not edit.
|
|
1526
|
-
const name$q = "chromaticAberrationPixelShader";
|
|
1527
|
-
const shader$q = `uniform sampler2D textureSampler;
|
|
1528
|
-
void main(void)
|
|
+
centered_screen_pos.y*centered_screen_pos.y;
|
|
|
1529
|
-
// Sideeffect
|
|
1530
|
-
ShaderStore.ShadersStore[name$q] = shader$q;
|
|
1531
|
-
|
|
1532
|
-
/**
|
|
1533
|
-
* The ChromaticAberrationPostProcess separates the rgb channels in an image to produce chromatic distortion around the edges of the screen
|
|
1534
|
-
*/
|
|
1535
|
-
class ChromaticAberrationPostProcess extends PostProcess {
|
|
1536
|
-
/**
|
|
1537
|
-
* Gets a string identifying the name of the class
|
|
1538
|
-
* @returns "ChromaticAberrationPostProcess" string
|
|
1539
|
-
*/
|
|
1540
|
-
getClassName() {
|
|
1541
|
-
return "ChromaticAberrationPostProcess";
|
|
1542
|
-
}
|
|
1543
|
-
/**
|
|
1544
|
-
* Creates a new instance ChromaticAberrationPostProcess
|
|
1545
|
-
* @param name The name of the effect.
|
|
1546
|
-
* @param screenWidth The width of the screen to apply the effect on.
|
|
1547
|
-
* @param screenHeight The height of the screen to apply the effect on.
|
|
1548
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
1549
|
-
* @param camera The camera to apply the render pass to.
|
|
1550
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
1551
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
1552
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
1553
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
1554
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
1555
|
-
*/
|
|
1556
|
-
constructor(name, screenWidth, screenHeight, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
1557
|
-
super(name, "chromaticAberration", ["chromatic_aberration", "screen_width", "screen_height", "direction", "radialIntensity", "centerPosition"], [], options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, blockCompilation);
|
|
1558
|
-
/**
|
|
1559
|
-
* The amount of separation of rgb channels (default: 30)
|
|
1560
|
-
*/
|
|
1561
|
-
this.aberrationAmount = 30;
|
|
1562
|
-
/**
|
|
1563
|
-
* The amount the effect will increase for pixels closer to the edge of the screen. (default: 0)
|
|
1564
|
-
*/
|
|
1565
|
-
this.radialIntensity = 0;
|
|
1566
|
-
/**
|
|
1567
|
-
* The normalized direction in which the rgb channels should be separated. If set to 0,0 radial direction will be used. (default: Vector2(0.707,0.707))
|
|
1568
|
-
*/
|
|
1569
|
-
this.direction = new Vector2(0.707, 0.707);
|
|
1570
|
-
/**
|
|
1571
|
-
* The center position where the radialIntensity should be around. [0.5,0.5 is center of screen, 1,1 is top right corner] (default: Vector2(0.5 ,0.5))
|
|
1572
|
-
*/
|
|
1573
|
-
this.centerPosition = new Vector2(0.5, 0.5);
|
|
1574
|
-
this.screenWidth = screenWidth;
|
|
1575
|
-
this.screenHeight = screenHeight;
|
|
1576
|
-
this.onApplyObservable.add((effect) => {
|
|
1577
|
-
effect.setFloat("chromatic_aberration", this.aberrationAmount);
|
|
1578
|
-
effect.setFloat("screen_width", screenWidth);
|
|
1579
|
-
effect.setFloat("screen_height", screenHeight);
|
|
1580
|
-
effect.setFloat("radialIntensity", this.radialIntensity);
|
|
1581
|
-
effect.setFloat2("direction", this.direction.x, this.direction.y);
|
|
1582
|
-
effect.setFloat2("centerPosition", this.centerPosition.x, this.centerPosition.y);
|
|
1583
|
-
});
|
|
1584
|
-
}
|
|
1585
|
-
/**
|
|
1586
|
-
* @internal
|
|
1587
|
-
*/
|
|
1588
|
-
static _Parse(parsedPostProcess, targetCamera, scene, rootUrl) {
|
|
1589
|
-
return SerializationHelper.Parse(() => {
|
|
1590
|
-
return new ChromaticAberrationPostProcess(parsedPostProcess.name, parsedPostProcess.screenWidth, parsedPostProcess.screenHeight, parsedPostProcess.options, targetCamera, parsedPostProcess.renderTargetSamplingMode, scene.getEngine(), parsedPostProcess.reusable, parsedPostProcess.textureType, false);
|
|
1591
|
-
}, parsedPostProcess, scene, rootUrl);
|
|
1592
|
-
}
|
|
1593
|
-
}
|
|
1594
|
-
__decorate([
|
|
1595
|
-
serialize()
|
|
1596
|
-
], ChromaticAberrationPostProcess.prototype, "aberrationAmount", void 0);
|
|
1597
|
-
__decorate([
|
|
1598
|
-
serialize()
|
|
1599
|
-
], ChromaticAberrationPostProcess.prototype, "radialIntensity", void 0);
|
|
1600
|
-
__decorate([
|
|
1601
|
-
serialize()
|
|
1602
|
-
], ChromaticAberrationPostProcess.prototype, "direction", void 0);
|
|
1603
|
-
__decorate([
|
|
1604
|
-
serialize()
|
|
1605
|
-
], ChromaticAberrationPostProcess.prototype, "centerPosition", void 0);
|
|
1606
|
-
__decorate([
|
|
1607
|
-
serialize()
|
|
1608
|
-
], ChromaticAberrationPostProcess.prototype, "screenWidth", void 0);
|
|
1609
|
-
__decorate([
|
|
1610
|
-
serialize()
|
|
1611
|
-
], ChromaticAberrationPostProcess.prototype, "screenHeight", void 0);
|
|
1612
|
-
RegisterClass("BABYLON.ChromaticAberrationPostProcess", ChromaticAberrationPostProcess);
|
|
1613
|
-
|
|
1614
|
-
// Do not edit.
|
|
1615
|
-
const name$p = "grainPixelShader";
|
|
1616
|
-
const shader$p = `#include<helperFunctions>
|
|
1617
|
-
uniform sampler2D textureSampler;
|
|
1618
|
-
void main(void)
|
|
1619
|
-
// Sideeffect
|
|
1620
|
-
ShaderStore.ShadersStore[name$p] = shader$p;
|
|
1621
|
-
|
|
1622
|
-
/**
|
|
1623
|
-
* The GrainPostProcess adds noise to the image at mid luminance levels
|
|
1624
|
-
*/
|
|
1625
|
-
class GrainPostProcess extends PostProcess {
|
|
1626
|
-
/**
|
|
1627
|
-
* Gets a string identifying the name of the class
|
|
1628
|
-
* @returns "GrainPostProcess" string
|
|
1629
|
-
*/
|
|
1630
|
-
getClassName() {
|
|
1631
|
-
return "GrainPostProcess";
|
|
1632
|
-
}
|
|
1633
|
-
/**
|
|
1634
|
-
* Creates a new instance of @see GrainPostProcess
|
|
1635
|
-
* @param name The name of the effect.
|
|
1636
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
1637
|
-
* @param camera The camera to apply the render pass to.
|
|
1638
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
1639
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
1640
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
1641
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
1642
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
1643
|
-
*/
|
|
1644
|
-
constructor(name, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
1645
|
-
super(name, "grain", ["intensity", "animatedSeed"], [], options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, blockCompilation);
|
|
1646
|
-
/**
|
|
1647
|
-
* The intensity of the grain added (default: 30)
|
|
1648
|
-
*/
|
|
1649
|
-
this.intensity = 30;
|
|
1650
|
-
/**
|
|
1651
|
-
* If the grain should be randomized on every frame
|
|
1652
|
-
*/
|
|
1653
|
-
this.animated = false;
|
|
1654
|
-
this.onApplyObservable.add((effect) => {
|
|
1655
|
-
effect.setFloat("intensity", this.intensity);
|
|
1656
|
-
effect.setFloat("animatedSeed", this.animated ? Math.random() + 1 : 1);
|
|
1657
|
-
});
|
|
1658
|
-
}
|
|
1659
|
-
/**
|
|
1660
|
-
* @internal
|
|
1661
|
-
*/
|
|
1662
|
-
static _Parse(parsedPostProcess, targetCamera, scene, rootUrl) {
|
|
1663
|
-
return SerializationHelper.Parse(() => {
|
|
1664
|
-
return new GrainPostProcess(parsedPostProcess.name, parsedPostProcess.options, targetCamera, parsedPostProcess.renderTargetSamplingMode, scene.getEngine(), parsedPostProcess.reusable);
|
|
1665
|
-
}, parsedPostProcess, scene, rootUrl);
|
|
1666
|
-
}
|
|
1667
|
-
}
|
|
1668
|
-
__decorate([
|
|
1669
|
-
serialize()
|
|
1670
|
-
], GrainPostProcess.prototype, "intensity", void 0);
|
|
1671
|
-
__decorate([
|
|
1672
|
-
serialize()
|
|
1673
|
-
], GrainPostProcess.prototype, "animated", void 0);
|
|
1674
|
-
RegisterClass("BABYLON.GrainPostProcess", GrainPostProcess);
|
|
1675
|
-
|
|
1676
|
-
// Do not edit.
|
|
1677
|
-
const name$o = "fxaaPixelShader";
|
|
1678
|
-
const shader$o = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE)
|
|
1679
|
-
#define TEXTUREFUNC(s,c,l) texture2DLodEXT(s,c,l)
|
|
1680
|
-
#else
|
|
1681
|
-
#define TEXTUREFUNC(s,c,b) texture2D(s,c,b)
|
|
1682
|
-
#endif
|
|
1683
|
-
uniform sampler2D textureSampler;
|
|
1684
|
-
void main(){
|
|
1685
|
-
if(range<rangeMaxClamped)
|
|
1686
|
-
float lumaNW=FxaaLuma(TEXTUREFUNC(textureSampler,sampleCoordNW,0.0));
|
|
1687
|
-
if(range<rangeMaxClamped)
|
|
1688
|
-
gl_FragColor=TEXTUREFUNC(textureSampler,posM,0.0);
|
|
1689
|
-
}`;
|
|
1690
|
-
// Sideeffect
|
|
1691
|
-
ShaderStore.ShadersStore[name$o] = shader$o;
|
|
1692
|
-
|
|
1693
|
-
// Do not edit.
|
|
1694
|
-
const name$n = "fxaaVertexShader";
|
|
1695
|
-
const shader$n = `attribute vec2 position;
|
|
1696
|
-
void main(void) {
|
|
1697
|
-
vUV=(position*madd+madd);
|
|
1698
|
-
}`;
|
|
1699
|
-
// Sideeffect
|
|
1700
|
-
ShaderStore.ShadersStore[name$n] = shader$n;
|
|
1701
|
-
|
|
1702
|
-
/**
|
|
1703
|
-
* Fxaa post process
|
|
1704
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/usePostProcesses#fxaa
|
|
1705
|
-
*/
|
|
1706
|
-
class FxaaPostProcess extends PostProcess {
|
|
1707
|
-
/**
|
|
1708
|
-
* Gets a string identifying the name of the class
|
|
1709
|
-
* @returns "FxaaPostProcess" string
|
|
1710
|
-
*/
|
|
1711
|
-
getClassName() {
|
|
1712
|
-
return "FxaaPostProcess";
|
|
1713
|
-
}
|
|
1714
|
-
constructor(name, options, camera = null, samplingMode, engine, reusable, textureType = 0) {
|
|
1715
|
-
super(name, "fxaa", ["texelSize"], null, options, camera, samplingMode || Texture.BILINEAR_SAMPLINGMODE, engine, reusable, null, textureType, "fxaa", undefined, true);
|
|
1716
|
-
const defines = this._getDefines();
|
|
1717
|
-
this.updateEffect(defines);
|
|
1718
|
-
this.onApplyObservable.add((effect) => {
|
|
1719
|
-
const texelSize = this.texelSize;
|
|
1720
|
-
effect.setFloat2("texelSize", texelSize.x, texelSize.y);
|
|
1721
|
-
});
|
|
1722
|
-
}
|
|
1723
|
-
_getDefines() {
|
|
1724
|
-
const engine = this.getEngine();
|
|
1725
|
-
if (!engine) {
|
|
1726
|
-
return null;
|
|
1727
|
-
}
|
|
1728
|
-
const glInfo = engine.getGlInfo();
|
|
1729
|
-
if (glInfo && glInfo.renderer && glInfo.renderer.toLowerCase().indexOf("mali") > -1) {
|
|
1730
|
-
return "#define MALI 1\n";
|
|
1731
|
-
}
|
|
1732
|
-
return null;
|
|
1733
|
-
}
|
|
1734
|
-
/**
|
|
1735
|
-
* @internal
|
|
1736
|
-
*/
|
|
1737
|
-
static _Parse(parsedPostProcess, targetCamera, scene, rootUrl) {
|
|
1738
|
-
return SerializationHelper.Parse(() => {
|
|
1739
|
-
return new FxaaPostProcess(parsedPostProcess.name, parsedPostProcess.options, targetCamera, parsedPostProcess.renderTargetSamplingMode, scene.getEngine(), parsedPostProcess.reusable);
|
|
1740
|
-
}, parsedPostProcess, scene, rootUrl);
|
|
1741
|
-
}
|
|
1742
|
-
}
|
|
1743
|
-
RegisterClass("BABYLON.FxaaPostProcess", FxaaPostProcess);
|
|
1744
|
-
|
|
1745
|
-
/**
|
|
1746
|
-
* PostProcessRenderPipeline
|
|
1747
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/postProcessRenderPipeline
|
|
1748
|
-
*/
|
|
1749
|
-
class PostProcessRenderPipeline {
|
|
1750
|
-
/**
|
|
1751
|
-
* Gets pipeline name
|
|
1752
|
-
*/
|
|
1753
|
-
get name() {
|
|
1754
|
-
return this._name;
|
|
1755
|
-
}
|
|
1756
|
-
/** Gets the list of attached cameras */
|
|
1757
|
-
get cameras() {
|
|
1758
|
-
return this._cameras;
|
|
1759
|
-
}
|
|
1760
|
-
/**
|
|
1761
|
-
* Initializes a PostProcessRenderPipeline
|
|
1762
|
-
* @param _engine engine to add the pipeline to
|
|
1763
|
-
* @param name name of the pipeline
|
|
1764
|
-
*/
|
|
1765
|
-
constructor(_engine, name) {
|
|
1766
|
-
this._engine = _engine;
|
|
1767
|
-
this._name = name;
|
|
1768
|
-
this._renderEffects = {};
|
|
1769
|
-
this._renderEffectsForIsolatedPass = new Array();
|
|
1770
|
-
this._cameras = [];
|
|
1771
|
-
}
|
|
1772
|
-
/**
|
|
1773
|
-
* Gets the class name
|
|
1774
|
-
* @returns "PostProcessRenderPipeline"
|
|
1775
|
-
*/
|
|
1776
|
-
getClassName() {
|
|
1777
|
-
return "PostProcessRenderPipeline";
|
|
1778
|
-
}
|
|
1779
|
-
/**
|
|
1780
|
-
* If all the render effects in the pipeline are supported
|
|
1781
|
-
*/
|
|
1782
|
-
get isSupported() {
|
|
1783
|
-
for (const renderEffectName in this._renderEffects) {
|
|
1784
|
-
if (Object.prototype.hasOwnProperty.call(this._renderEffects, renderEffectName)) {
|
|
1785
|
-
if (!this._renderEffects[renderEffectName].isSupported) {
|
|
1786
|
-
return false;
|
|
1787
|
-
}
|
|
1788
|
-
}
|
|
1789
|
-
}
|
|
1790
|
-
return true;
|
|
1791
|
-
}
|
|
1792
|
-
/**
|
|
1793
|
-
* Adds an effect to the pipeline
|
|
1794
|
-
* @param renderEffect the effect to add
|
|
1795
|
-
*/
|
|
1796
|
-
addEffect(renderEffect) {
|
|
1797
|
-
this._renderEffects[renderEffect._name] = renderEffect;
|
|
1798
|
-
}
|
|
1799
|
-
// private
|
|
1800
|
-
/** @internal */
|
|
1801
|
-
_rebuild() { }
|
|
1802
|
-
/**
|
|
1803
|
-
* @internal
|
|
1804
|
-
*/
|
|
1805
|
-
_enableEffect(renderEffectName, cameras) {
|
|
1806
|
-
const renderEffects = this._renderEffects[renderEffectName];
|
|
1807
|
-
if (!renderEffects) {
|
|
1808
|
-
return;
|
|
1809
|
-
}
|
|
1810
|
-
renderEffects._enable(Tools.MakeArray(cameras || this._cameras));
|
|
1811
|
-
}
|
|
1812
|
-
/**
|
|
1813
|
-
* @internal
|
|
1814
|
-
*/
|
|
1815
|
-
_disableEffect(renderEffectName, cameras) {
|
|
1816
|
-
const renderEffects = this._renderEffects[renderEffectName];
|
|
1817
|
-
if (!renderEffects) {
|
|
1818
|
-
return;
|
|
1819
|
-
}
|
|
1820
|
-
renderEffects._disable(Tools.MakeArray(cameras || this._cameras));
|
|
1821
|
-
}
|
|
1822
|
-
/**
|
|
1823
|
-
* @internal
|
|
1824
|
-
*/
|
|
1825
|
-
_attachCameras(cameras, unique) {
|
|
1826
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
1827
|
-
if (!cams) {
|
|
1828
|
-
return;
|
|
1829
|
-
}
|
|
1830
|
-
const indicesToDelete = [];
|
|
1831
|
-
let i;
|
|
1832
|
-
for (i = 0; i < cams.length; i++) {
|
|
1833
|
-
const camera = cams[i];
|
|
1834
|
-
if (!camera) {
|
|
1835
|
-
continue;
|
|
1836
|
-
}
|
|
1837
|
-
if (this._cameras.indexOf(camera) === -1) {
|
|
1838
|
-
this._cameras.push(camera);
|
|
1839
|
-
}
|
|
1840
|
-
else if (unique) {
|
|
1841
|
-
indicesToDelete.push(i);
|
|
1842
|
-
}
|
|
1843
|
-
}
|
|
1844
|
-
for (i = 0; i < indicesToDelete.length; i++) {
|
|
1845
|
-
cams.splice(indicesToDelete[i], 1);
|
|
1846
|
-
}
|
|
1847
|
-
for (const renderEffectName in this._renderEffects) {
|
|
1848
|
-
if (Object.prototype.hasOwnProperty.call(this._renderEffects, renderEffectName)) {
|
|
1849
|
-
this._renderEffects[renderEffectName]._attachCameras(cams);
|
|
1850
|
-
}
|
|
1851
|
-
}
|
|
1852
|
-
}
|
|
1853
|
-
/**
|
|
1854
|
-
* @internal
|
|
1855
|
-
*/
|
|
1856
|
-
_detachCameras(cameras) {
|
|
1857
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
1858
|
-
if (!cams) {
|
|
1859
|
-
return;
|
|
1860
|
-
}
|
|
1861
|
-
for (const renderEffectName in this._renderEffects) {
|
|
1862
|
-
if (Object.prototype.hasOwnProperty.call(this._renderEffects, renderEffectName)) {
|
|
1863
|
-
this._renderEffects[renderEffectName]._detachCameras(cams);
|
|
1864
|
-
}
|
|
1865
|
-
}
|
|
1866
|
-
for (let i = 0; i < cams.length; i++) {
|
|
1867
|
-
this._cameras.splice(this._cameras.indexOf(cams[i]), 1);
|
|
1868
|
-
}
|
|
1869
|
-
}
|
|
1870
|
-
/** @internal */
|
|
1871
|
-
_update() {
|
|
1872
|
-
for (const renderEffectName in this._renderEffects) {
|
|
1873
|
-
if (Object.prototype.hasOwnProperty.call(this._renderEffects, renderEffectName)) {
|
|
1874
|
-
this._renderEffects[renderEffectName]._update();
|
|
1875
|
-
}
|
|
1876
|
-
}
|
|
1877
|
-
for (let i = 0; i < this._cameras.length; i++) {
|
|
1878
|
-
if (!this._cameras[i]) {
|
|
1879
|
-
continue;
|
|
1880
|
-
}
|
|
1881
|
-
const cameraName = this._cameras[i].name;
|
|
1882
|
-
if (this._renderEffectsForIsolatedPass[cameraName]) {
|
|
1883
|
-
this._renderEffectsForIsolatedPass[cameraName]._update();
|
|
1884
|
-
}
|
|
1885
|
-
}
|
|
1886
|
-
}
|
|
1887
|
-
/** @internal */
|
|
1888
|
-
_reset() {
|
|
1889
|
-
this._renderEffects = {};
|
|
1890
|
-
this._renderEffectsForIsolatedPass = new Array();
|
|
1891
|
-
}
|
|
1892
|
-
_enableMSAAOnFirstPostProcess(sampleCount) {
|
|
1893
|
-
if (!this._engine._features.supportMSAA) {
|
|
1894
|
-
return false;
|
|
1895
|
-
}
|
|
1896
|
-
// Set samples of the very first post process to 4 to enable native anti-aliasing in browsers that support webGL 2.0 (See: https://github.com/BabylonJS/Babylon.js/issues/3754)
|
|
1897
|
-
const effectKeys = Object.keys(this._renderEffects);
|
|
1898
|
-
if (effectKeys.length > 0) {
|
|
1899
|
-
const postProcesses = this._renderEffects[effectKeys[0]].getPostProcesses();
|
|
1900
|
-
if (postProcesses) {
|
|
1901
|
-
postProcesses[0].samples = sampleCount;
|
|
1902
|
-
}
|
|
1903
|
-
}
|
|
1904
|
-
return true;
|
|
1905
|
-
}
|
|
1906
|
-
/**
|
|
1907
|
-
* Sets the required values to the prepass renderer.
|
|
1908
|
-
* @param prePassRenderer defines the prepass renderer to setup.
|
|
1909
|
-
* @returns true if the pre pass is needed.
|
|
1910
|
-
*/
|
|
1911
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
1912
|
-
setPrePassRenderer(prePassRenderer) {
|
|
1913
|
-
// Do Nothing by default
|
|
1914
|
-
return false;
|
|
1915
|
-
}
|
|
1916
|
-
/**
|
|
1917
|
-
* Disposes of the pipeline
|
|
1918
|
-
*/
|
|
1919
|
-
dispose() {
|
|
1920
|
-
// Must be implemented by children
|
|
1921
|
-
}
|
|
1922
|
-
}
|
|
1923
|
-
__decorate([
|
|
1924
|
-
serialize()
|
|
1925
|
-
], PostProcessRenderPipeline.prototype, "_name", void 0);
|
|
1926
|
-
|
|
1927
|
-
/**
|
|
1928
|
-
* This represents a set of one or more post processes in Babylon.
|
|
1929
|
-
* A post process can be used to apply a shader to a texture after it is rendered.
|
|
1930
|
-
* @example https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/postProcessRenderPipeline
|
|
1931
|
-
*/
|
|
1932
|
-
class PostProcessRenderEffect {
|
|
1933
|
-
/**
|
|
1934
|
-
* Instantiates a post process render effect.
|
|
1935
|
-
* A post process can be used to apply a shader to a texture after it is rendered.
|
|
1936
|
-
* @param engine The engine the effect is tied to
|
|
1937
|
-
* @param name The name of the effect
|
|
1938
|
-
* @param getPostProcesses A function that returns a set of post processes which the effect will run in order to be run.
|
|
1939
|
-
* @param singleInstance False if this post process can be run on multiple cameras. (default: true)
|
|
1940
|
-
*/
|
|
1941
|
-
constructor(engine, name, getPostProcesses, singleInstance) {
|
|
1942
|
-
this._name = name;
|
|
1943
|
-
this._singleInstance = singleInstance || true;
|
|
1944
|
-
this._getPostProcesses = getPostProcesses;
|
|
1945
|
-
this._cameras = {};
|
|
1946
|
-
this._indicesForCamera = {};
|
|
1947
|
-
this._postProcesses = {};
|
|
1948
|
-
}
|
|
1949
|
-
/**
|
|
1950
|
-
* Checks if all the post processes in the effect are supported.
|
|
1951
|
-
*/
|
|
1952
|
-
get isSupported() {
|
|
1953
|
-
for (const index in this._postProcesses) {
|
|
1954
|
-
if (Object.prototype.hasOwnProperty.call(this._postProcesses, index)) {
|
|
1955
|
-
const pps = this._postProcesses[index];
|
|
1956
|
-
for (let ppIndex = 0; ppIndex < pps.length; ppIndex++) {
|
|
1957
|
-
if (!pps[ppIndex].isSupported) {
|
|
1958
|
-
return false;
|
|
1959
|
-
}
|
|
1960
|
-
}
|
|
1961
|
-
}
|
|
1962
|
-
}
|
|
1963
|
-
return true;
|
|
1964
|
-
}
|
|
1965
|
-
/**
|
|
1966
|
-
* Updates the current state of the effect
|
|
1967
|
-
* @internal
|
|
1968
|
-
*/
|
|
1969
|
-
_update() { }
|
|
1970
|
-
/**
|
|
1971
|
-
* Attaches the effect on cameras
|
|
1972
|
-
* @param cameras The camera to attach to.
|
|
1973
|
-
* @internal
|
|
1974
|
-
*/
|
|
1975
|
-
_attachCameras(cameras) {
|
|
1976
|
-
let cameraKey;
|
|
1977
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
1978
|
-
if (!cams) {
|
|
1979
|
-
return;
|
|
1980
|
-
}
|
|
1981
|
-
for (let i = 0; i < cams.length; i++) {
|
|
1982
|
-
const camera = cams[i];
|
|
1983
|
-
if (!camera) {
|
|
1984
|
-
continue;
|
|
1985
|
-
}
|
|
1986
|
-
const cameraName = camera.name;
|
|
1987
|
-
if (this._singleInstance) {
|
|
1988
|
-
cameraKey = 0;
|
|
1989
|
-
}
|
|
1990
|
-
else {
|
|
1991
|
-
cameraKey = cameraName;
|
|
1992
|
-
}
|
|
1993
|
-
if (!this._postProcesses[cameraKey]) {
|
|
1994
|
-
const postProcess = this._getPostProcesses();
|
|
1995
|
-
if (postProcess) {
|
|
1996
|
-
this._postProcesses[cameraKey] = Array.isArray(postProcess) ? postProcess : [postProcess];
|
|
1997
|
-
}
|
|
1998
|
-
}
|
|
1999
|
-
if (!this._indicesForCamera[cameraName]) {
|
|
2000
|
-
this._indicesForCamera[cameraName] = [];
|
|
2001
|
-
}
|
|
2002
|
-
this._postProcesses[cameraKey].forEach((postProcess) => {
|
|
2003
|
-
const index = camera.attachPostProcess(postProcess);
|
|
2004
|
-
this._indicesForCamera[cameraName].push(index);
|
|
2005
|
-
});
|
|
2006
|
-
if (!this._cameras[cameraName]) {
|
|
2007
|
-
this._cameras[cameraName] = camera;
|
|
2008
|
-
}
|
|
2009
|
-
}
|
|
2010
|
-
}
|
|
2011
|
-
/**
|
|
2012
|
-
* Detaches the effect on cameras
|
|
2013
|
-
* @param cameras The camera to detach from.
|
|
2014
|
-
* @internal
|
|
2015
|
-
*/
|
|
2016
|
-
_detachCameras(cameras) {
|
|
2017
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
2018
|
-
if (!cams) {
|
|
2019
|
-
return;
|
|
2020
|
-
}
|
|
2021
|
-
for (let i = 0; i < cams.length; i++) {
|
|
2022
|
-
const camera = cams[i];
|
|
2023
|
-
const cameraName = camera.name;
|
|
2024
|
-
const postProcesses = this._postProcesses[this._singleInstance ? 0 : cameraName];
|
|
2025
|
-
if (postProcesses) {
|
|
2026
|
-
postProcesses.forEach((postProcess) => {
|
|
2027
|
-
camera.detachPostProcess(postProcess);
|
|
2028
|
-
});
|
|
2029
|
-
}
|
|
2030
|
-
if (this._cameras[cameraName]) {
|
|
2031
|
-
this._cameras[cameraName] = null;
|
|
2032
|
-
}
|
|
2033
|
-
}
|
|
2034
|
-
}
|
|
2035
|
-
/**
|
|
2036
|
-
* Enables the effect on given cameras
|
|
2037
|
-
* @param cameras The camera to enable.
|
|
2038
|
-
* @internal
|
|
2039
|
-
*/
|
|
2040
|
-
_enable(cameras) {
|
|
2041
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
2042
|
-
if (!cams) {
|
|
2043
|
-
return;
|
|
2044
|
-
}
|
|
2045
|
-
for (let i = 0; i < cams.length; i++) {
|
|
2046
|
-
const camera = cams[i];
|
|
2047
|
-
const cameraName = camera.name;
|
|
2048
|
-
for (let j = 0; j < this._indicesForCamera[cameraName].length; j++) {
|
|
2049
|
-
if (camera._postProcesses[this._indicesForCamera[cameraName][j]] === undefined || camera._postProcesses[this._indicesForCamera[cameraName][j]] === null) {
|
|
2050
|
-
this._postProcesses[this._singleInstance ? 0 : cameraName].forEach((postProcess) => {
|
|
2051
|
-
cams[i].attachPostProcess(postProcess, this._indicesForCamera[cameraName][j]);
|
|
2052
|
-
});
|
|
2053
|
-
}
|
|
2054
|
-
}
|
|
2055
|
-
}
|
|
2056
|
-
}
|
|
2057
|
-
/**
|
|
2058
|
-
* Disables the effect on the given cameras
|
|
2059
|
-
* @param cameras The camera to disable.
|
|
2060
|
-
* @internal
|
|
2061
|
-
*/
|
|
2062
|
-
_disable(cameras) {
|
|
2063
|
-
const cams = Tools.MakeArray(cameras || this._cameras);
|
|
2064
|
-
if (!cams) {
|
|
2065
|
-
return;
|
|
2066
|
-
}
|
|
2067
|
-
for (let i = 0; i < cams.length; i++) {
|
|
2068
|
-
const camera = cams[i];
|
|
2069
|
-
const cameraName = camera.name;
|
|
2070
|
-
this._postProcesses[this._singleInstance ? 0 : cameraName].forEach((postProcess) => {
|
|
2071
|
-
camera.detachPostProcess(postProcess);
|
|
2072
|
-
});
|
|
2073
|
-
}
|
|
2074
|
-
}
|
|
2075
|
-
/**
|
|
2076
|
-
* Gets a list of the post processes contained in the effect.
|
|
2077
|
-
* @param camera The camera to get the post processes on.
|
|
2078
|
-
* @returns The list of the post processes in the effect.
|
|
2079
|
-
*/
|
|
2080
|
-
getPostProcesses(camera) {
|
|
2081
|
-
if (this._singleInstance) {
|
|
2082
|
-
return this._postProcesses[0];
|
|
2083
|
-
}
|
|
2084
|
-
else {
|
|
2085
|
-
if (!camera) {
|
|
2086
|
-
return null;
|
|
2087
|
-
}
|
|
2088
|
-
return this._postProcesses[camera.name];
|
|
2089
|
-
}
|
|
2090
|
-
}
|
|
2091
|
-
}
|
|
2092
|
-
|
|
2093
|
-
// Do not edit.
|
|
2094
|
-
const name$m = "circleOfConfusionPixelShader";
|
|
2095
|
-
const shader$m = `uniform sampler2D depthSampler;
|
|
2096
|
-
void main(void)
|
|
2097
|
-
float pixelDistance=(cameraMinMaxZ.x+cameraMinMaxZ.y*depth)*1000.0;
|
|
2098
|
-
float coc=abs(cocPrecalculation*((focusDistance-pixelDistance)/pixelDistance));
|
|
2099
|
-
// Sideeffect
|
|
2100
|
-
ShaderStore.ShadersStore[name$m] = shader$m;
|
|
2101
|
-
|
|
2102
|
-
/**
|
|
2103
|
-
* The CircleOfConfusionPostProcess computes the circle of confusion value for each pixel given required lens parameters. See https://en.wikipedia.org/wiki/Circle_of_confusion
|
|
2104
|
-
*/
|
|
2105
|
-
class CircleOfConfusionPostProcess extends PostProcess {
|
|
2106
|
-
/**
|
|
2107
|
-
* Gets a string identifying the name of the class
|
|
2108
|
-
* @returns "CircleOfConfusionPostProcess" string
|
|
2109
|
-
*/
|
|
2110
|
-
getClassName() {
|
|
2111
|
-
return "CircleOfConfusionPostProcess";
|
|
2112
|
-
}
|
|
2113
|
-
/**
|
|
2114
|
-
* Creates a new instance CircleOfConfusionPostProcess
|
|
2115
|
-
* @param name The name of the effect.
|
|
2116
|
-
* @param depthTexture The depth texture of the scene to compute the circle of confusion. This must be set in order for this to function but may be set after initialization if needed.
|
|
2117
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
2118
|
-
* @param camera The camera to apply the render pass to.
|
|
2119
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
2120
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
2121
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
2122
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
2123
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2124
|
-
*/
|
|
2125
|
-
constructor(name, depthTexture, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
2126
|
-
super(name, "circleOfConfusion", ["cameraMinMaxZ", "focusDistance", "cocPrecalculation"], ["depthSampler"], options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, blockCompilation);
|
|
2127
|
-
/**
|
|
2128
|
-
* Max lens size in scene units/1000 (eg. millimeter). Standard cameras are 50mm. (default: 50) The diameter of the resulting aperture can be computed by lensSize/fStop.
|
|
2129
|
-
*/
|
|
2130
|
-
this.lensSize = 50;
|
|
2131
|
-
/**
|
|
2132
|
-
* F-Stop of the effect's camera. The diameter of the resulting aperture can be computed by lensSize/fStop. (default: 1.4)
|
|
2133
|
-
*/
|
|
2134
|
-
this.fStop = 1.4;
|
|
2135
|
-
/**
|
|
2136
|
-
* Distance away from the camera to focus on in scene units/1000 (eg. millimeter). (default: 2000)
|
|
2137
|
-
*/
|
|
2138
|
-
this.focusDistance = 2000;
|
|
2139
|
-
/**
|
|
2140
|
-
* Focal length of the effect's camera in scene units/1000 (eg. millimeter). (default: 50)
|
|
2141
|
-
*/
|
|
2142
|
-
this.focalLength = 50;
|
|
2143
|
-
this._depthTexture = null;
|
|
2144
|
-
this._depthTexture = depthTexture;
|
|
2145
|
-
this.onApplyObservable.add((effect) => {
|
|
2146
|
-
if (!this._depthTexture) {
|
|
2147
|
-
Logger.Warn("No depth texture set on CircleOfConfusionPostProcess");
|
|
2148
|
-
return;
|
|
2149
|
-
}
|
|
2150
|
-
effect.setTexture("depthSampler", this._depthTexture);
|
|
2151
|
-
// Circle of confusion calculation, See https://developer.nvidia.com/gpugems/GPUGems/gpugems_ch23.html
|
|
2152
|
-
const aperture = this.lensSize / this.fStop;
|
|
2153
|
-
const cocPrecalculation = (aperture * this.focalLength) / (this.focusDistance - this.focalLength); // * ((this.focusDistance - pixelDistance)/pixelDistance) [This part is done in shader]
|
|
2154
|
-
effect.setFloat("focusDistance", this.focusDistance);
|
|
2155
|
-
effect.setFloat("cocPrecalculation", cocPrecalculation);
|
|
2156
|
-
const activeCamera = this._depthTexture.activeCamera;
|
|
2157
|
-
effect.setFloat2("cameraMinMaxZ", activeCamera.minZ, activeCamera.maxZ - activeCamera.minZ);
|
|
2158
|
-
});
|
|
2159
|
-
}
|
|
2160
|
-
/**
|
|
2161
|
-
* Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function.
|
|
2162
|
-
*/
|
|
2163
|
-
set depthTexture(value) {
|
|
2164
|
-
this._depthTexture = value;
|
|
2165
|
-
}
|
|
2166
|
-
}
|
|
2167
|
-
__decorate([
|
|
2168
|
-
serialize()
|
|
2169
|
-
], CircleOfConfusionPostProcess.prototype, "lensSize", void 0);
|
|
2170
|
-
__decorate([
|
|
2171
|
-
serialize()
|
|
2172
|
-
], CircleOfConfusionPostProcess.prototype, "fStop", void 0);
|
|
2173
|
-
__decorate([
|
|
2174
|
-
serialize()
|
|
2175
|
-
], CircleOfConfusionPostProcess.prototype, "focusDistance", void 0);
|
|
2176
|
-
__decorate([
|
|
2177
|
-
serialize()
|
|
2178
|
-
], CircleOfConfusionPostProcess.prototype, "focalLength", void 0);
|
|
2179
|
-
RegisterClass("BABYLON.CircleOfConfusionPostProcess", CircleOfConfusionPostProcess);
|
|
2180
|
-
|
|
2181
|
-
/**
|
|
2182
|
-
* The DepthOfFieldBlurPostProcess applied a blur in a give direction.
|
|
2183
|
-
* This blur differs from the standard BlurPostProcess as it attempts to avoid blurring pixels
|
|
2184
|
-
* based on samples that have a large difference in distance than the center pixel.
|
|
2185
|
-
* See section 2.6.2 http://fileadmin.cs.lth.se/cs/education/edan35/lectures/12dof.pdf
|
|
2186
|
-
*/
|
|
2187
|
-
class DepthOfFieldBlurPostProcess extends BlurPostProcess {
|
|
2188
|
-
/**
|
|
2189
|
-
* Gets a string identifying the name of the class
|
|
2190
|
-
* @returns "DepthOfFieldBlurPostProcess" string
|
|
2191
|
-
*/
|
|
2192
|
-
getClassName() {
|
|
2193
|
-
return "DepthOfFieldBlurPostProcess";
|
|
2194
|
-
}
|
|
2195
|
-
/**
|
|
2196
|
-
* Creates a new instance DepthOfFieldBlurPostProcess
|
|
2197
|
-
* @param name The name of the effect.
|
|
2198
|
-
* @param scene The scene the effect belongs to.
|
|
2199
|
-
* @param direction The direction the blur should be applied.
|
|
2200
|
-
* @param kernel The size of the kernel used to blur.
|
|
2201
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
2202
|
-
* @param camera The camera to apply the render pass to.
|
|
2203
|
-
* @param circleOfConfusion The circle of confusion + depth map to be used to avoid blurring across edges
|
|
2204
|
-
* @param imageToBlur The image to apply the blur to (default: Current rendered frame)
|
|
2205
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
2206
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
2207
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
2208
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
2209
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2210
|
-
* @param textureFormat Format of textures used when performing the post process. (default: TEXTUREFORMAT_RGBA)
|
|
2211
|
-
*/
|
|
2212
|
-
constructor(name, scene, direction, kernel, options, camera, circleOfConfusion, imageToBlur = null, samplingMode = Texture.BILINEAR_SAMPLINGMODE, engine, reusable, textureType = 0, blockCompilation = false, textureFormat = 5) {
|
|
2213
|
-
super(name, direction, kernel, options, camera,
|
|
2214
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
2215
|
-
(samplingMode = 2), engine, reusable, textureType, `#define DOF 1\r\n`, blockCompilation, textureFormat);
|
|
2216
|
-
this.direction = direction;
|
|
2217
|
-
this.externalTextureSamplerBinding = !!imageToBlur;
|
|
2218
|
-
this.onApplyObservable.add((effect) => {
|
|
2219
|
-
if (imageToBlur != null) {
|
|
2220
|
-
effect.setTextureFromPostProcess("textureSampler", imageToBlur);
|
|
2221
|
-
}
|
|
2222
|
-
effect.setTextureFromPostProcessOutput("circleOfConfusionSampler", circleOfConfusion);
|
|
2223
|
-
});
|
|
2224
|
-
}
|
|
2225
|
-
}
|
|
2226
|
-
__decorate([
|
|
2227
|
-
serialize()
|
|
2228
|
-
], DepthOfFieldBlurPostProcess.prototype, "direction", void 0);
|
|
2229
|
-
RegisterClass("BABYLON.DepthOfFieldBlurPostProcess", DepthOfFieldBlurPostProcess);
|
|
2230
|
-
|
|
2231
|
-
// Do not edit.
|
|
2232
|
-
const name$l = "depthOfFieldMergePixelShader";
|
|
2233
|
-
const shader$l = `#if defined(WEBGL2) || defined(WEBGPU) || defined(NATIVE)
|
|
2234
|
-
#define TEXTUREFUNC(s,c,lod) texture2DLodEXT(s,c,lod)
|
|
2235
|
-
#else
|
|
2236
|
-
#define TEXTUREFUNC(s,c,bias) texture2D(s,c,bias)
|
|
2237
|
-
#endif
|
|
2238
|
-
uniform sampler2D textureSampler;
|
|
2239
|
-
uniform sampler2D blurStep1;
|
|
2240
|
-
#if BLUR_LEVEL>1
|
|
2241
|
-
uniform sampler2D blurStep2;
|
|
2242
|
-
#define CUSTOM_FRAGMENT_DEFINITIONS
|
|
2243
|
-
void main(void)
|
|
2244
|
-
vec4 original=TEXTUREFUNC(textureSampler,vUV,0.0);
|
|
2245
|
-
#if BLUR_LEVEL==1
|
|
2246
|
-
if(coc<0.5){
|
|
2247
|
-
#if BLUR_LEVEL==2
|
|
2248
|
-
if(coc<0.33){
|
|
2249
|
-
}
|
|
2250
|
-
// Sideeffect
|
|
2251
|
-
ShaderStore.ShadersStore[name$l] = shader$l;
|
|
2252
|
-
|
|
2253
|
-
/**
|
|
2254
|
-
* The DepthOfFieldMergePostProcess merges blurred images with the original based on the values of the circle of confusion.
|
|
2255
|
-
*/
|
|
2256
|
-
class DepthOfFieldMergePostProcess extends PostProcess {
|
|
2257
|
-
/**
|
|
2258
|
-
* Gets a string identifying the name of the class
|
|
2259
|
-
* @returns "DepthOfFieldMergePostProcess" string
|
|
2260
|
-
*/
|
|
2261
|
-
getClassName() {
|
|
2262
|
-
return "DepthOfFieldMergePostProcess";
|
|
2263
|
-
}
|
|
2264
|
-
/**
|
|
2265
|
-
* Creates a new instance of DepthOfFieldMergePostProcess
|
|
2266
|
-
* @param name The name of the effect.
|
|
2267
|
-
* @param originalFromInput Post process which's input will be used for the merge.
|
|
2268
|
-
* @param circleOfConfusion Circle of confusion post process which's output will be used to blur each pixel.
|
|
2269
|
-
* @param _blurSteps Blur post processes from low to high which will be mixed with the original image.
|
|
2270
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
2271
|
-
* @param camera The camera to apply the render pass to.
|
|
2272
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
2273
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
2274
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
2275
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
2276
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2277
|
-
*/
|
|
2278
|
-
constructor(name, originalFromInput, circleOfConfusion, _blurSteps, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
2279
|
-
super(name, "depthOfFieldMerge", [], ["circleOfConfusionSampler", "blurStep0", "blurStep1", "blurStep2"], options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, true);
|
|
2280
|
-
this._blurSteps = _blurSteps;
|
|
2281
|
-
this.externalTextureSamplerBinding = true;
|
|
2282
|
-
this.onApplyObservable.add((effect) => {
|
|
2283
|
-
effect.setTextureFromPostProcess("textureSampler", originalFromInput);
|
|
2284
|
-
effect.setTextureFromPostProcessOutput("circleOfConfusionSampler", circleOfConfusion);
|
|
2285
|
-
_blurSteps.forEach((step, index) => {
|
|
2286
|
-
effect.setTextureFromPostProcessOutput("blurStep" + (_blurSteps.length - index - 1), step);
|
|
2287
|
-
});
|
|
2288
|
-
});
|
|
2289
|
-
if (!blockCompilation) {
|
|
2290
|
-
this.updateEffect();
|
|
2291
|
-
}
|
|
2292
|
-
}
|
|
2293
|
-
/**
|
|
2294
|
-
* Updates the effect with the current post process compile time values and recompiles the shader.
|
|
2295
|
-
* @param defines Define statements that should be added at the beginning of the shader. (default: null)
|
|
2296
|
-
* @param uniforms Set of uniform variables that will be passed to the shader. (default: null)
|
|
2297
|
-
* @param samplers Set of Texture2D variables that will be passed to the shader. (default: null)
|
|
2298
|
-
* @param indexParameters The index parameters to be used for babylons include syntax "#include<kernelBlurVaryingDeclaration>[0..varyingCount]". (default: undefined) See usage in babylon.blurPostProcess.ts and kernelBlur.vertex.fx
|
|
2299
|
-
* @param onCompiled Called when the shader has been compiled.
|
|
2300
|
-
* @param onError Called if there is an error when compiling a shader.
|
|
2301
|
-
*/
|
|
2302
|
-
updateEffect(defines = null, uniforms = null, samplers = null, indexParameters, onCompiled, onError) {
|
|
2303
|
-
if (!defines) {
|
|
2304
|
-
defines = "";
|
|
2305
|
-
defines += "#define BLUR_LEVEL " + (this._blurSteps.length - 1) + "\n";
|
|
2306
|
-
}
|
|
2307
|
-
super.updateEffect(defines, uniforms, samplers, indexParameters, onCompiled, onError);
|
|
2308
|
-
}
|
|
2309
|
-
}
|
|
2310
|
-
|
|
2311
|
-
/**
|
|
2312
|
-
* Specifies the level of max blur that should be applied when using the depth of field effect
|
|
2313
|
-
*/
|
|
2314
|
-
var DepthOfFieldEffectBlurLevel;
|
|
2315
|
-
(function (DepthOfFieldEffectBlurLevel) {
|
|
2316
|
-
/**
|
|
2317
|
-
* Subtle blur
|
|
2318
|
-
*/
|
|
2319
|
-
DepthOfFieldEffectBlurLevel[DepthOfFieldEffectBlurLevel["Low"] = 0] = "Low";
|
|
2320
|
-
/**
|
|
2321
|
-
* Medium blur
|
|
2322
|
-
*/
|
|
2323
|
-
DepthOfFieldEffectBlurLevel[DepthOfFieldEffectBlurLevel["Medium"] = 1] = "Medium";
|
|
2324
|
-
/**
|
|
2325
|
-
* Large blur
|
|
2326
|
-
*/
|
|
2327
|
-
DepthOfFieldEffectBlurLevel[DepthOfFieldEffectBlurLevel["High"] = 2] = "High";
|
|
2328
|
-
})(DepthOfFieldEffectBlurLevel || (DepthOfFieldEffectBlurLevel = {}));
|
|
2329
|
-
/**
|
|
2330
|
-
* The depth of field effect applies a blur to objects that are closer or further from where the camera is focusing.
|
|
2331
|
-
*/
|
|
2332
|
-
class DepthOfFieldEffect extends PostProcessRenderEffect {
|
|
2333
|
-
/**
|
|
2334
|
-
* The focal the length of the camera used in the effect in scene units/1000 (eg. millimeter)
|
|
2335
|
-
*/
|
|
2336
|
-
set focalLength(value) {
|
|
2337
|
-
this._circleOfConfusion.focalLength = value;
|
|
2338
|
-
}
|
|
2339
|
-
get focalLength() {
|
|
2340
|
-
return this._circleOfConfusion.focalLength;
|
|
2341
|
-
}
|
|
2342
|
-
/**
|
|
2343
|
-
* F-Stop of the effect's camera. The diameter of the resulting aperture can be computed by lensSize/fStop. (default: 1.4)
|
|
2344
|
-
*/
|
|
2345
|
-
set fStop(value) {
|
|
2346
|
-
this._circleOfConfusion.fStop = value;
|
|
2347
|
-
}
|
|
2348
|
-
get fStop() {
|
|
2349
|
-
return this._circleOfConfusion.fStop;
|
|
2350
|
-
}
|
|
2351
|
-
/**
|
|
2352
|
-
* Distance away from the camera to focus on in scene units/1000 (eg. millimeter). (default: 2000)
|
|
2353
|
-
*/
|
|
2354
|
-
set focusDistance(value) {
|
|
2355
|
-
this._circleOfConfusion.focusDistance = value;
|
|
2356
|
-
}
|
|
2357
|
-
get focusDistance() {
|
|
2358
|
-
return this._circleOfConfusion.focusDistance;
|
|
2359
|
-
}
|
|
2360
|
-
/**
|
|
2361
|
-
* Max lens size in scene units/1000 (eg. millimeter). Standard cameras are 50mm. (default: 50) The diameter of the resulting aperture can be computed by lensSize/fStop.
|
|
2362
|
-
*/
|
|
2363
|
-
set lensSize(value) {
|
|
2364
|
-
this._circleOfConfusion.lensSize = value;
|
|
2365
|
-
}
|
|
2366
|
-
get lensSize() {
|
|
2367
|
-
return this._circleOfConfusion.lensSize;
|
|
2368
|
-
}
|
|
2369
|
-
/**
|
|
2370
|
-
* Creates a new instance DepthOfFieldEffect
|
|
2371
|
-
* @param scene The scene the effect belongs to.
|
|
2372
|
-
* @param depthTexture The depth texture of the scene to compute the circle of confusion.This must be set in order for this to function but may be set after initialization if needed.
|
|
2373
|
-
* @param blurLevel
|
|
2374
|
-
* @param pipelineTextureType The type of texture to be used when performing the post processing.
|
|
2375
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2376
|
-
*/
|
|
2377
|
-
constructor(scene, depthTexture, blurLevel = DepthOfFieldEffectBlurLevel.Low, pipelineTextureType = 0, blockCompilation = false) {
|
|
2378
|
-
super(scene.getEngine(), "depth of field", () => {
|
|
2379
|
-
return this._effects;
|
|
2380
|
-
}, true);
|
|
2381
|
-
/**
|
|
2382
|
-
* @internal Internal post processes in depth of field effect
|
|
2383
|
-
*/
|
|
2384
|
-
this._effects = [];
|
|
2385
|
-
// Use R-only formats if supported to store the circle of confusion values.
|
|
2386
|
-
// This should be more space and bandwidth efficient than using RGBA.
|
|
2387
|
-
const engine = scene.getEngine();
|
|
2388
|
-
const circleOfConfusionTextureFormat = engine.isWebGPU || engine.webGLVersion > 1 ? 6 : 5;
|
|
2389
|
-
// Circle of confusion value for each pixel is used to determine how much to blur that pixel
|
|
2390
|
-
this._circleOfConfusion = new CircleOfConfusionPostProcess("circleOfConfusion", depthTexture, 1, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, pipelineTextureType, blockCompilation);
|
|
2391
|
-
// Create a pyramid of blurred images (eg. fullSize 1/4 blur, half size 1/2 blur, quarter size 3/4 blur, eith size 4/4 blur)
|
|
2392
|
-
// Blur the image but do not blur on sharp far to near distance changes to avoid bleeding artifacts
|
|
2393
|
-
// See section 2.6.2 http://fileadmin.cs.lth.se/cs/education/edan35/lectures/12dof.pdf
|
|
2394
|
-
this._depthOfFieldBlurY = [];
|
|
2395
|
-
this._depthOfFieldBlurX = [];
|
|
2396
|
-
let blurCount = 1;
|
|
2397
|
-
let kernelSize = 15;
|
|
2398
|
-
switch (blurLevel) {
|
|
2399
|
-
case DepthOfFieldEffectBlurLevel.High: {
|
|
2400
|
-
blurCount = 3;
|
|
2401
|
-
kernelSize = 51;
|
|
2402
|
-
break;
|
|
2403
|
-
}
|
|
2404
|
-
case DepthOfFieldEffectBlurLevel.Medium: {
|
|
2405
|
-
blurCount = 2;
|
|
2406
|
-
kernelSize = 31;
|
|
2407
|
-
break;
|
|
2408
|
-
}
|
|
2409
|
-
default: {
|
|
2410
|
-
kernelSize = 15;
|
|
2411
|
-
blurCount = 1;
|
|
2412
|
-
break;
|
|
2413
|
-
}
|
|
2414
|
-
}
|
|
2415
|
-
const adjustedKernelSize = kernelSize / Math.pow(2, blurCount - 1);
|
|
2416
|
-
let ratio = 1.0;
|
|
2417
|
-
for (let i = 0; i < blurCount; i++) {
|
|
2418
|
-
const blurY = new DepthOfFieldBlurPostProcess("vertical blur", scene, new Vector2(0, 1.0), adjustedKernelSize, ratio, null, this._circleOfConfusion, i == 0 ? this._circleOfConfusion : null, Texture.BILINEAR_SAMPLINGMODE, engine, false, pipelineTextureType, blockCompilation, i == 0 ? circleOfConfusionTextureFormat : 5);
|
|
2419
|
-
blurY.autoClear = false;
|
|
2420
|
-
ratio = 0.75 / Math.pow(2, i);
|
|
2421
|
-
const blurX = new DepthOfFieldBlurPostProcess("horizontal blur", scene, new Vector2(1.0, 0), adjustedKernelSize, ratio, null, this._circleOfConfusion, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, pipelineTextureType, blockCompilation);
|
|
2422
|
-
blurX.autoClear = false;
|
|
2423
|
-
this._depthOfFieldBlurY.push(blurY);
|
|
2424
|
-
this._depthOfFieldBlurX.push(blurX);
|
|
2425
|
-
}
|
|
2426
|
-
// Set all post processes on the effect.
|
|
2427
|
-
this._effects = [this._circleOfConfusion];
|
|
2428
|
-
for (let i = 0; i < this._depthOfFieldBlurX.length; i++) {
|
|
2429
|
-
this._effects.push(this._depthOfFieldBlurY[i]);
|
|
2430
|
-
this._effects.push(this._depthOfFieldBlurX[i]);
|
|
2431
|
-
}
|
|
2432
|
-
// Merge blurred images with original image based on circleOfConfusion
|
|
2433
|
-
this._dofMerge = new DepthOfFieldMergePostProcess("dofMerge", this._circleOfConfusion, this._circleOfConfusion, this._depthOfFieldBlurX, ratio, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, pipelineTextureType, blockCompilation);
|
|
2434
|
-
this._dofMerge.autoClear = false;
|
|
2435
|
-
this._effects.push(this._dofMerge);
|
|
2436
|
-
}
|
|
2437
|
-
/**
|
|
2438
|
-
* Get the current class name of the current effect
|
|
2439
|
-
* @returns "DepthOfFieldEffect"
|
|
2440
|
-
*/
|
|
2441
|
-
getClassName() {
|
|
2442
|
-
return "DepthOfFieldEffect";
|
|
2443
|
-
}
|
|
2444
|
-
/**
|
|
2445
|
-
* Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function.
|
|
2446
|
-
*/
|
|
2447
|
-
set depthTexture(value) {
|
|
2448
|
-
this._circleOfConfusion.depthTexture = value;
|
|
2449
|
-
}
|
|
2450
|
-
/**
|
|
2451
|
-
* Disposes each of the internal effects for a given camera.
|
|
2452
|
-
* @param camera The camera to dispose the effect on.
|
|
2453
|
-
*/
|
|
2454
|
-
disposeEffects(camera) {
|
|
2455
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2456
|
-
this._effects[effectIndex].dispose(camera);
|
|
2457
|
-
}
|
|
2458
|
-
}
|
|
2459
|
-
/**
|
|
2460
|
-
* @internal Internal
|
|
2461
|
-
*/
|
|
2462
|
-
_updateEffects() {
|
|
2463
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2464
|
-
this._effects[effectIndex].updateEffect();
|
|
2465
|
-
}
|
|
2466
|
-
}
|
|
2467
|
-
/**
|
|
2468
|
-
* Internal
|
|
2469
|
-
* @returns if all the contained post processes are ready.
|
|
2470
|
-
* @internal
|
|
2471
|
-
*/
|
|
2472
|
-
_isReady() {
|
|
2473
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2474
|
-
if (!this._effects[effectIndex].isReady()) {
|
|
2475
|
-
return false;
|
|
2476
|
-
}
|
|
2477
|
-
}
|
|
2478
|
-
return true;
|
|
2479
|
-
}
|
|
2480
|
-
}
|
|
2481
|
-
|
|
2482
|
-
// Do not edit.
|
|
2483
|
-
const name$k = "extractHighlightsPixelShader";
|
|
2484
|
-
const shader$k = `#include<helperFunctions>
|
|
2485
|
-
varying vec2 vUV;
|
|
2486
|
-
void main(void)
|
|
2487
|
-
// Sideeffect
|
|
2488
|
-
ShaderStore.ShadersStore[name$k] = shader$k;
|
|
2489
|
-
|
|
2490
|
-
/**
|
|
2491
|
-
* The extract highlights post process sets all pixels to black except pixels above the specified luminance threshold. Used as the first step for a bloom effect.
|
|
2492
|
-
*/
|
|
2493
|
-
class ExtractHighlightsPostProcess extends PostProcess {
|
|
2494
|
-
/**
|
|
2495
|
-
* Gets a string identifying the name of the class
|
|
2496
|
-
* @returns "ExtractHighlightsPostProcess" string
|
|
2497
|
-
*/
|
|
2498
|
-
getClassName() {
|
|
2499
|
-
return "ExtractHighlightsPostProcess";
|
|
2500
|
-
}
|
|
2501
|
-
constructor(name, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
2502
|
-
super(name, "extractHighlights", ["threshold", "exposure"], null, options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, blockCompilation);
|
|
2503
|
-
/**
|
|
2504
|
-
* The luminance threshold, pixels below this value will be set to black.
|
|
2505
|
-
*/
|
|
2506
|
-
this.threshold = 0.9;
|
|
2507
|
-
/** @internal */
|
|
2508
|
-
this._exposure = 1;
|
|
2509
|
-
/**
|
|
2510
|
-
* Post process which has the input texture to be used when performing highlight extraction
|
|
2511
|
-
* @internal
|
|
2512
|
-
*/
|
|
2513
|
-
this._inputPostProcess = null;
|
|
2514
|
-
this.onApplyObservable.add((effect) => {
|
|
2515
|
-
this.externalTextureSamplerBinding = !!this._inputPostProcess;
|
|
2516
|
-
if (this._inputPostProcess) {
|
|
2517
|
-
effect.setTextureFromPostProcess("textureSampler", this._inputPostProcess);
|
|
2518
|
-
}
|
|
2519
|
-
effect.setFloat("threshold", Math.pow(this.threshold, ToGammaSpace));
|
|
2520
|
-
effect.setFloat("exposure", this._exposure);
|
|
2521
|
-
});
|
|
2522
|
-
}
|
|
2523
|
-
}
|
|
2524
|
-
__decorate([
|
|
2525
|
-
serialize()
|
|
2526
|
-
], ExtractHighlightsPostProcess.prototype, "threshold", void 0);
|
|
2527
|
-
RegisterClass("BABYLON.ExtractHighlightsPostProcess", ExtractHighlightsPostProcess);
|
|
2528
|
-
|
|
2529
|
-
// Do not edit.
|
|
2530
|
-
const name$j = "bloomMergePixelShader";
|
|
2531
|
-
const shader$j = `uniform sampler2D textureSampler;
|
|
2532
|
-
void main(void)
|
|
2533
|
-
// Sideeffect
|
|
2534
|
-
ShaderStore.ShadersStore[name$j] = shader$j;
|
|
2535
|
-
|
|
2536
|
-
/**
|
|
2537
|
-
* The BloomMergePostProcess merges blurred images with the original based on the values of the circle of confusion.
|
|
2538
|
-
*/
|
|
2539
|
-
class BloomMergePostProcess extends PostProcess {
|
|
2540
|
-
/**
|
|
2541
|
-
* Gets a string identifying the name of the class
|
|
2542
|
-
* @returns "BloomMergePostProcess" string
|
|
2543
|
-
*/
|
|
2544
|
-
getClassName() {
|
|
2545
|
-
return "BloomMergePostProcess";
|
|
2546
|
-
}
|
|
2547
|
-
/**
|
|
2548
|
-
* Creates a new instance of @see BloomMergePostProcess
|
|
2549
|
-
* @param name The name of the effect.
|
|
2550
|
-
* @param originalFromInput Post process which's input will be used for the merge.
|
|
2551
|
-
* @param blurred Blurred highlights post process which's output will be used.
|
|
2552
|
-
* @param weight Weight of the bloom to be added to the original input.
|
|
2553
|
-
* @param options The required width/height ratio to downsize to before computing the render pass.
|
|
2554
|
-
* @param camera The camera to apply the render pass to.
|
|
2555
|
-
* @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
|
|
2556
|
-
* @param engine The engine which the post process will be applied. (default: current engine)
|
|
2557
|
-
* @param reusable If the post process can be reused on the same frame. (default: false)
|
|
2558
|
-
* @param textureType Type of textures used when performing the post process. (default: 0)
|
|
2559
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2560
|
-
*/
|
|
2561
|
-
constructor(name, originalFromInput, blurred,
|
|
2562
|
-
/** Weight of the bloom to be added to the original input. */
|
|
2563
|
-
weight, options, camera, samplingMode, engine, reusable, textureType = 0, blockCompilation = false) {
|
|
2564
|
-
super(name, "bloomMerge", ["bloomWeight"], ["bloomBlur"], options, camera, samplingMode, engine, reusable, null, textureType, undefined, null, true);
|
|
2565
|
-
/** Weight of the bloom to be added to the original input. */
|
|
2566
|
-
this.weight = 1;
|
|
2567
|
-
this.weight = weight;
|
|
2568
|
-
this.externalTextureSamplerBinding = true;
|
|
2569
|
-
this.onApplyObservable.add((effect) => {
|
|
2570
|
-
effect.setTextureFromPostProcess("textureSampler", originalFromInput);
|
|
2571
|
-
effect.setTextureFromPostProcessOutput("bloomBlur", blurred);
|
|
2572
|
-
effect.setFloat("bloomWeight", this.weight);
|
|
2573
|
-
});
|
|
2574
|
-
if (!blockCompilation) {
|
|
2575
|
-
this.updateEffect();
|
|
2576
|
-
}
|
|
2577
|
-
}
|
|
2578
|
-
}
|
|
2579
|
-
__decorate([
|
|
2580
|
-
serialize()
|
|
2581
|
-
], BloomMergePostProcess.prototype, "weight", void 0);
|
|
2582
|
-
RegisterClass("BABYLON.BloomMergePostProcess", BloomMergePostProcess);
|
|
2583
|
-
|
|
2584
|
-
/**
|
|
2585
|
-
* The bloom effect spreads bright areas of an image to simulate artifacts seen in cameras
|
|
2586
|
-
*/
|
|
2587
|
-
class BloomEffect extends PostProcessRenderEffect {
|
|
2588
|
-
/**
|
|
2589
|
-
* The luminance threshold to find bright areas of the image to bloom.
|
|
2590
|
-
*/
|
|
2591
|
-
get threshold() {
|
|
2592
|
-
return this._downscale.threshold;
|
|
2593
|
-
}
|
|
2594
|
-
set threshold(value) {
|
|
2595
|
-
this._downscale.threshold = value;
|
|
2596
|
-
}
|
|
2597
|
-
/**
|
|
2598
|
-
* The strength of the bloom.
|
|
2599
|
-
*/
|
|
2600
|
-
get weight() {
|
|
2601
|
-
return this._merge.weight;
|
|
2602
|
-
}
|
|
2603
|
-
set weight(value) {
|
|
2604
|
-
this._merge.weight = value;
|
|
2605
|
-
}
|
|
2606
|
-
/**
|
|
2607
|
-
* Specifies the size of the bloom blur kernel, relative to the final output size
|
|
2608
|
-
*/
|
|
2609
|
-
get kernel() {
|
|
2610
|
-
return this._blurX.kernel / this._bloomScale;
|
|
2611
|
-
}
|
|
2612
|
-
set kernel(value) {
|
|
2613
|
-
this._blurX.kernel = value * this._bloomScale;
|
|
2614
|
-
this._blurY.kernel = value * this._bloomScale;
|
|
2615
|
-
}
|
|
2616
|
-
/**
|
|
2617
|
-
* Creates a new instance of @see BloomEffect
|
|
2618
|
-
* @param scene The scene the effect belongs to.
|
|
2619
|
-
* @param _bloomScale The ratio of the blur texture to the input texture that should be used to compute the bloom.
|
|
2620
|
-
* @param bloomWeight The the strength of bloom.
|
|
2621
|
-
* @param bloomKernel The size of the kernel to be used when applying the blur.
|
|
2622
|
-
* @param pipelineTextureType The type of texture to be used when performing the post processing.
|
|
2623
|
-
* @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false)
|
|
2624
|
-
*/
|
|
2625
|
-
constructor(scene, _bloomScale, bloomWeight, bloomKernel, pipelineTextureType = 0, blockCompilation = false) {
|
|
2626
|
-
super(scene.getEngine(), "bloom", () => {
|
|
2627
|
-
return this._effects;
|
|
2628
|
-
}, true);
|
|
2629
|
-
this._bloomScale = _bloomScale;
|
|
2630
|
-
/**
|
|
2631
|
-
* @internal Internal
|
|
2632
|
-
*/
|
|
2633
|
-
this._effects = [];
|
|
2634
|
-
this._downscale = new ExtractHighlightsPostProcess("highlights", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType, blockCompilation);
|
|
2635
|
-
this._blurX = new BlurPostProcess("horizontal blur", new Vector2(1.0, 0), 10.0, _bloomScale, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType, undefined, blockCompilation);
|
|
2636
|
-
this._blurX.alwaysForcePOT = true;
|
|
2637
|
-
this._blurX.autoClear = false;
|
|
2638
|
-
this._blurY = new BlurPostProcess("vertical blur", new Vector2(0, 1.0), 10.0, _bloomScale, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType, undefined, blockCompilation);
|
|
2639
|
-
this._blurY.alwaysForcePOT = true;
|
|
2640
|
-
this._blurY.autoClear = false;
|
|
2641
|
-
this.kernel = bloomKernel;
|
|
2642
|
-
this._effects = [this._downscale, this._blurX, this._blurY];
|
|
2643
|
-
this._merge = new BloomMergePostProcess("bloomMerge", this._downscale, this._blurY, bloomWeight, _bloomScale, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType, blockCompilation);
|
|
2644
|
-
this._merge.autoClear = false;
|
|
2645
|
-
this._effects.push(this._merge);
|
|
2646
|
-
}
|
|
2647
|
-
/**
|
|
2648
|
-
* Disposes each of the internal effects for a given camera.
|
|
2649
|
-
* @param camera The camera to dispose the effect on.
|
|
2650
|
-
*/
|
|
2651
|
-
disposeEffects(camera) {
|
|
2652
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2653
|
-
this._effects[effectIndex].dispose(camera);
|
|
2654
|
-
}
|
|
2655
|
-
}
|
|
2656
|
-
/**
|
|
2657
|
-
* @internal Internal
|
|
2658
|
-
*/
|
|
2659
|
-
_updateEffects() {
|
|
2660
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2661
|
-
this._effects[effectIndex].updateEffect();
|
|
2662
|
-
}
|
|
2663
|
-
}
|
|
2664
|
-
/**
|
|
2665
|
-
* Internal
|
|
2666
|
-
* @returns if all the contained post processes are ready.
|
|
2667
|
-
* @internal
|
|
2668
|
-
*/
|
|
2669
|
-
_isReady() {
|
|
2670
|
-
for (let effectIndex = 0; effectIndex < this._effects.length; effectIndex++) {
|
|
2671
|
-
if (!this._effects[effectIndex].isReady()) {
|
|
2672
|
-
return false;
|
|
2673
|
-
}
|
|
2674
|
-
}
|
|
2675
|
-
return true;
|
|
2676
|
-
}
|
|
2677
|
-
}
|
|
2678
|
-
|
|
2679
|
-
/**
|
|
2680
|
-
* PostProcessRenderPipelineManager class
|
|
2681
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/postProcessRenderPipeline
|
|
2682
|
-
*/
|
|
2683
|
-
class PostProcessRenderPipelineManager {
|
|
2684
|
-
/**
|
|
2685
|
-
* Initializes a PostProcessRenderPipelineManager
|
|
2686
|
-
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/postProcessRenderPipeline
|
|
2687
|
-
*/
|
|
2688
|
-
constructor() {
|
|
2689
|
-
this._renderPipelines = {};
|
|
2690
|
-
}
|
|
2691
|
-
/**
|
|
2692
|
-
* Gets the list of supported render pipelines
|
|
2693
|
-
*/
|
|
2694
|
-
get supportedPipelines() {
|
|
2695
|
-
const result = [];
|
|
2696
|
-
for (const renderPipelineName in this._renderPipelines) {
|
|
2697
|
-
if (Object.prototype.hasOwnProperty.call(this._renderPipelines, renderPipelineName)) {
|
|
2698
|
-
const pipeline = this._renderPipelines[renderPipelineName];
|
|
2699
|
-
if (pipeline.isSupported) {
|
|
2700
|
-
result.push(pipeline);
|
|
2701
|
-
}
|
|
2702
|
-
}
|
|
2703
|
-
}
|
|
2704
|
-
return result;
|
|
2705
|
-
}
|
|
2706
|
-
/**
|
|
2707
|
-
* Adds a pipeline to the manager
|
|
2708
|
-
* @param renderPipeline The pipeline to add
|
|
2709
|
-
*/
|
|
2710
|
-
addPipeline(renderPipeline) {
|
|
2711
|
-
this._renderPipelines[renderPipeline._name] = renderPipeline;
|
|
2712
|
-
}
|
|
2713
|
-
/**
|
|
2714
|
-
* Remove the pipeline from the manager
|
|
2715
|
-
* @param renderPipelineName the name of the pipeline to remove
|
|
2716
|
-
*/
|
|
2717
|
-
removePipeline(renderPipelineName) {
|
|
2718
|
-
delete this._renderPipelines[renderPipelineName];
|
|
2719
|
-
}
|
|
2720
|
-
/**
|
|
2721
|
-
* Attaches a camera to the pipeline
|
|
2722
|
-
* @param renderPipelineName The name of the pipeline to attach to
|
|
2723
|
-
* @param cameras the camera to attach
|
|
2724
|
-
* @param unique if the camera can be attached multiple times to the pipeline
|
|
2725
|
-
*/
|
|
2726
|
-
attachCamerasToRenderPipeline(renderPipelineName, cameras, unique = false) {
|
|
2727
|
-
const renderPipeline = this._renderPipelines[renderPipelineName];
|
|
2728
|
-
if (!renderPipeline) {
|
|
2729
|
-
return;
|
|
2730
|
-
}
|
|
2731
|
-
renderPipeline._attachCameras(cameras, unique);
|
|
2732
|
-
}
|
|
2733
|
-
/**
|
|
2734
|
-
* Detaches a camera from the pipeline
|
|
2735
|
-
* @param renderPipelineName The name of the pipeline to detach from
|
|
2736
|
-
* @param cameras the camera to detach
|
|
2737
|
-
*/
|
|
2738
|
-
detachCamerasFromRenderPipeline(renderPipelineName, cameras) {
|
|
2739
|
-
const renderPipeline = this._renderPipelines[renderPipelineName];
|
|
2740
|
-
if (!renderPipeline) {
|
|
2741
|
-
return;
|
|
2742
|
-
}
|
|
2743
|
-
renderPipeline._detachCameras(cameras);
|
|
2744
|
-
}
|
|
2745
|
-
/**
|
|
2746
|
-
* Enables an effect by name on a pipeline
|
|
2747
|
-
* @param renderPipelineName the name of the pipeline to enable the effect in
|
|
2748
|
-
* @param renderEffectName the name of the effect to enable
|
|
2749
|
-
* @param cameras the cameras that the effect should be enabled on
|
|
2750
|
-
*/
|
|
2751
|
-
enableEffectInPipeline(renderPipelineName, renderEffectName, cameras) {
|
|
2752
|
-
const renderPipeline = this._renderPipelines[renderPipelineName];
|
|
2753
|
-
if (!renderPipeline) {
|
|
2754
|
-
return;
|
|
2755
|
-
}
|
|
2756
|
-
renderPipeline._enableEffect(renderEffectName, cameras);
|
|
2757
|
-
}
|
|
2758
|
-
/**
|
|
2759
|
-
* Disables an effect by name on a pipeline
|
|
2760
|
-
* @param renderPipelineName the name of the pipeline to disable the effect in
|
|
2761
|
-
* @param renderEffectName the name of the effect to disable
|
|
2762
|
-
* @param cameras the cameras that the effect should be disabled on
|
|
2763
|
-
*/
|
|
2764
|
-
disableEffectInPipeline(renderPipelineName, renderEffectName, cameras) {
|
|
2765
|
-
const renderPipeline = this._renderPipelines[renderPipelineName];
|
|
2766
|
-
if (!renderPipeline) {
|
|
2767
|
-
return;
|
|
2768
|
-
}
|
|
2769
|
-
renderPipeline._disableEffect(renderEffectName, cameras);
|
|
2770
|
-
}
|
|
2771
|
-
/**
|
|
2772
|
-
* Updates the state of all contained render pipelines and disposes of any non supported pipelines
|
|
2773
|
-
*/
|
|
2774
|
-
update() {
|
|
2775
|
-
for (const renderPipelineName in this._renderPipelines) {
|
|
2776
|
-
if (Object.prototype.hasOwnProperty.call(this._renderPipelines, renderPipelineName)) {
|
|
2777
|
-
const pipeline = this._renderPipelines[renderPipelineName];
|
|
2778
|
-
if (!pipeline.isSupported) {
|
|
2779
|
-
pipeline.dispose();
|
|
2780
|
-
delete this._renderPipelines[renderPipelineName];
|
|
2781
|
-
}
|
|
2782
|
-
else {
|
|
2783
|
-
pipeline._update();
|
|
2784
|
-
}
|
|
2785
|
-
}
|
|
2786
|
-
}
|
|
2787
|
-
}
|
|
2788
|
-
/** @internal */
|
|
2789
|
-
_rebuild() {
|
|
2790
|
-
for (const renderPipelineName in this._renderPipelines) {
|
|
2791
|
-
if (Object.prototype.hasOwnProperty.call(this._renderPipelines, renderPipelineName)) {
|
|
2792
|
-
const pipeline = this._renderPipelines[renderPipelineName];
|
|
2793
|
-
pipeline._rebuild();
|
|
2794
|
-
}
|
|
2795
|
-
}
|
|
2796
|
-
}
|
|
2797
|
-
/**
|
|
2798
|
-
* Disposes of the manager and pipelines
|
|
2799
|
-
*/
|
|
2800
|
-
dispose() {
|
|
2801
|
-
for (const renderPipelineName in this._renderPipelines) {
|
|
2802
|
-
if (Object.prototype.hasOwnProperty.call(this._renderPipelines, renderPipelineName)) {
|
|
2803
|
-
const pipeline = this._renderPipelines[renderPipelineName];
|
|
2804
|
-
pipeline.dispose();
|
|
2805
|
-
}
|
|
2806
|
-
}
|
|
2807
|
-
}
|
|
2808
|
-
}
|
|
2809
|
-
|
|
2810
|
-
Object.defineProperty(Scene.prototype, "postProcessRenderPipelineManager", {
|
|
2811
|
-
get: function () {
|
|
2812
|
-
if (!this._postProcessRenderPipelineManager) {
|
|
2813
|
-
// Register the G Buffer component to the scene.
|
|
2814
|
-
let component = this._getComponent(SceneComponentConstants.NAME_POSTPROCESSRENDERPIPELINEMANAGER);
|
|
2815
|
-
if (!component) {
|
|
2816
|
-
component = new PostProcessRenderPipelineManagerSceneComponent(this);
|
|
2817
|
-
this._addComponent(component);
|
|
2818
|
-
}
|
|
2819
|
-
this._postProcessRenderPipelineManager = new PostProcessRenderPipelineManager();
|
|
2820
|
-
}
|
|
2821
|
-
return this._postProcessRenderPipelineManager;
|
|
2822
|
-
},
|
|
2823
|
-
enumerable: true,
|
|
2824
|
-
configurable: true,
|
|
2825
|
-
});
|
|
2826
|
-
/**
|
|
2827
|
-
* Defines the Render Pipeline scene component responsible to rendering pipelines
|
|
2828
|
-
*/
|
|
2829
|
-
class PostProcessRenderPipelineManagerSceneComponent {
|
|
2830
|
-
/**
|
|
2831
|
-
* Creates a new instance of the component for the given scene
|
|
2832
|
-
* @param scene Defines the scene to register the component in
|
|
2833
|
-
*/
|
|
2834
|
-
constructor(scene) {
|
|
2835
|
-
/**
|
|
2836
|
-
* The component name helpful to identify the component in the list of scene components.
|
|
2837
|
-
*/
|
|
2838
|
-
this.name = SceneComponentConstants.NAME_POSTPROCESSRENDERPIPELINEMANAGER;
|
|
2839
|
-
this.scene = scene;
|
|
2840
|
-
}
|
|
2841
|
-
/**
|
|
2842
|
-
* Registers the component in a given scene
|
|
2843
|
-
*/
|
|
2844
|
-
register() {
|
|
2845
|
-
this.scene._gatherRenderTargetsStage.registerStep(SceneComponentConstants.STEP_GATHERRENDERTARGETS_POSTPROCESSRENDERPIPELINEMANAGER, this, this._gatherRenderTargets);
|
|
2846
|
-
}
|
|
2847
|
-
/**
|
|
2848
|
-
* Rebuilds the elements related to this component in case of
|
|
2849
|
-
* context lost for instance.
|
|
2850
|
-
*/
|
|
2851
|
-
rebuild() {
|
|
2852
|
-
if (this.scene._postProcessRenderPipelineManager) {
|
|
2853
|
-
this.scene._postProcessRenderPipelineManager._rebuild();
|
|
2854
|
-
}
|
|
2855
|
-
}
|
|
2856
|
-
/**
|
|
2857
|
-
* Disposes the component and the associated resources
|
|
2858
|
-
*/
|
|
2859
|
-
dispose() {
|
|
2860
|
-
if (this.scene._postProcessRenderPipelineManager) {
|
|
2861
|
-
this.scene._postProcessRenderPipelineManager.dispose();
|
|
2862
|
-
}
|
|
2863
|
-
}
|
|
2864
|
-
_gatherRenderTargets() {
|
|
2865
|
-
if (this.scene._postProcessRenderPipelineManager) {
|
|
2866
|
-
this.scene._postProcessRenderPipelineManager.update();
|
|
2867
|
-
}
|
|
2868
|
-
}
|
|
2869
|
-
}
|
|
2870
|
-
|
|
2871
|
-
/**
|
|
2872
|
-
* The default rendering pipeline can be added to a scene to apply common post processing effects such as anti-aliasing or depth of field.
|
|
2873
|
-
* See https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/defaultRenderingPipeline
|
|
2874
|
-
*/
|
|
2875
|
-
class DefaultRenderingPipeline extends PostProcessRenderPipeline {
|
|
2876
|
-
/**
|
|
2877
|
-
* Enable or disable automatic building of the pipeline when effects are enabled and disabled.
|
|
2878
|
-
* If false, you will have to manually call prepare() to update the pipeline.
|
|
2879
|
-
*/
|
|
2880
|
-
get automaticBuild() {
|
|
2881
|
-
return this._buildAllowed;
|
|
2882
|
-
}
|
|
2883
|
-
set automaticBuild(value) {
|
|
2884
|
-
this._buildAllowed = value;
|
|
2885
|
-
}
|
|
2886
|
-
/**
|
|
2887
|
-
* Gets active scene
|
|
2888
|
-
*/
|
|
2889
|
-
get scene() {
|
|
2890
|
-
return this._scene;
|
|
2891
|
-
}
|
|
2892
|
-
/**
|
|
2893
|
-
* Enable or disable the sharpen process from the pipeline
|
|
2894
|
-
*/
|
|
2895
|
-
set sharpenEnabled(enabled) {
|
|
2896
|
-
if (this._sharpenEnabled === enabled) {
|
|
2897
|
-
return;
|
|
2898
|
-
}
|
|
2899
|
-
this._sharpenEnabled = enabled;
|
|
2900
|
-
this._buildPipeline();
|
|
2901
|
-
}
|
|
2902
|
-
get sharpenEnabled() {
|
|
2903
|
-
return this._sharpenEnabled;
|
|
2904
|
-
}
|
|
2905
|
-
/**
|
|
2906
|
-
* Specifies the size of the bloom blur kernel, relative to the final output size
|
|
2907
|
-
*/
|
|
2908
|
-
get bloomKernel() {
|
|
2909
|
-
return this._bloomKernel;
|
|
2910
|
-
}
|
|
2911
|
-
set bloomKernel(value) {
|
|
2912
|
-
this._bloomKernel = value;
|
|
2913
|
-
this.bloom.kernel = value / this._hardwareScaleLevel;
|
|
2914
|
-
}
|
|
2915
|
-
/**
|
|
2916
|
-
* The strength of the bloom.
|
|
2917
|
-
*/
|
|
2918
|
-
set bloomWeight(value) {
|
|
2919
|
-
if (this._bloomWeight === value) {
|
|
2920
|
-
return;
|
|
2921
|
-
}
|
|
2922
|
-
this.bloom.weight = value;
|
|
2923
|
-
this._bloomWeight = value;
|
|
2924
|
-
}
|
|
2925
|
-
get bloomWeight() {
|
|
2926
|
-
return this._bloomWeight;
|
|
2927
|
-
}
|
|
2928
|
-
/**
|
|
2929
|
-
* The luminance threshold to find bright areas of the image to bloom.
|
|
2930
|
-
*/
|
|
2931
|
-
set bloomThreshold(value) {
|
|
2932
|
-
if (this._bloomThreshold === value) {
|
|
2933
|
-
return;
|
|
2934
|
-
}
|
|
2935
|
-
this.bloom.threshold = value;
|
|
2936
|
-
this._bloomThreshold = value;
|
|
2937
|
-
}
|
|
2938
|
-
get bloomThreshold() {
|
|
2939
|
-
return this._bloomThreshold;
|
|
2940
|
-
}
|
|
2941
|
-
/**
|
|
2942
|
-
* The scale of the bloom, lower value will provide better performance.
|
|
2943
|
-
*/
|
|
2944
|
-
set bloomScale(value) {
|
|
2945
|
-
if (this._bloomScale === value) {
|
|
2946
|
-
return;
|
|
2947
|
-
}
|
|
2948
|
-
this._bloomScale = value;
|
|
2949
|
-
// recreate bloom and dispose old as this setting is not dynamic
|
|
2950
|
-
this._rebuildBloom();
|
|
2951
|
-
this._buildPipeline();
|
|
2952
|
-
}
|
|
2953
|
-
get bloomScale() {
|
|
2954
|
-
return this._bloomScale;
|
|
2955
|
-
}
|
|
2956
|
-
/**
|
|
2957
|
-
* Enable or disable the bloom from the pipeline
|
|
2958
|
-
*/
|
|
2959
|
-
set bloomEnabled(enabled) {
|
|
2960
|
-
if (this._bloomEnabled === enabled) {
|
|
2961
|
-
return;
|
|
2962
|
-
}
|
|
2963
|
-
this._bloomEnabled = enabled;
|
|
2964
|
-
this._buildPipeline();
|
|
2965
|
-
}
|
|
2966
|
-
get bloomEnabled() {
|
|
2967
|
-
return this._bloomEnabled;
|
|
2968
|
-
}
|
|
2969
|
-
_rebuildBloom() {
|
|
2970
|
-
// recreate bloom and dispose old as this setting is not dynamic
|
|
2971
|
-
const oldBloom = this.bloom;
|
|
2972
|
-
this.bloom = new BloomEffect(this._scene, this.bloomScale, this._bloomWeight, this.bloomKernel / this._hardwareScaleLevel, this._defaultPipelineTextureType, false);
|
|
2973
|
-
this.bloom.threshold = oldBloom.threshold;
|
|
2974
|
-
for (let i = 0; i < this._cameras.length; i++) {
|
|
2975
|
-
oldBloom.disposeEffects(this._cameras[i]);
|
|
2976
|
-
}
|
|
2977
|
-
}
|
|
2978
|
-
/**
|
|
2979
|
-
* If the depth of field is enabled.
|
|
2980
|
-
*/
|
|
2981
|
-
get depthOfFieldEnabled() {
|
|
2982
|
-
return this._depthOfFieldEnabled;
|
|
2983
|
-
}
|
|
2984
|
-
set depthOfFieldEnabled(enabled) {
|
|
2985
|
-
if (this._depthOfFieldEnabled === enabled) {
|
|
2986
|
-
return;
|
|
2987
|
-
}
|
|
2988
|
-
this._depthOfFieldEnabled = enabled;
|
|
2989
|
-
this._buildPipeline();
|
|
2990
|
-
}
|
|
2991
|
-
/**
|
|
2992
|
-
* Blur level of the depth of field effect. (Higher blur will effect performance)
|
|
2993
|
-
*/
|
|
2994
|
-
get depthOfFieldBlurLevel() {
|
|
2995
|
-
return this._depthOfFieldBlurLevel;
|
|
2996
|
-
}
|
|
2997
|
-
set depthOfFieldBlurLevel(value) {
|
|
2998
|
-
if (this._depthOfFieldBlurLevel === value) {
|
|
2999
|
-
return;
|
|
3000
|
-
}
|
|
3001
|
-
this._depthOfFieldBlurLevel = value;
|
|
3002
|
-
// recreate dof and dispose old as this setting is not dynamic
|
|
3003
|
-
const oldDof = this.depthOfField;
|
|
3004
|
-
this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, false);
|
|
3005
|
-
this.depthOfField.focalLength = oldDof.focalLength;
|
|
3006
|
-
this.depthOfField.focusDistance = oldDof.focusDistance;
|
|
3007
|
-
this.depthOfField.fStop = oldDof.fStop;
|
|
3008
|
-
this.depthOfField.lensSize = oldDof.lensSize;
|
|
3009
|
-
for (let i = 0; i < this._cameras.length; i++) {
|
|
3010
|
-
oldDof.disposeEffects(this._cameras[i]);
|
|
3011
|
-
}
|
|
3012
|
-
this._buildPipeline();
|
|
3013
|
-
}
|
|
3014
|
-
/**
|
|
3015
|
-
* If the anti aliasing is enabled.
|
|
3016
|
-
*/
|
|
3017
|
-
set fxaaEnabled(enabled) {
|
|
3018
|
-
if (this._fxaaEnabled === enabled) {
|
|
3019
|
-
return;
|
|
3020
|
-
}
|
|
3021
|
-
this._fxaaEnabled = enabled;
|
|
3022
|
-
this._buildPipeline();
|
|
3023
|
-
}
|
|
3024
|
-
get fxaaEnabled() {
|
|
3025
|
-
return this._fxaaEnabled;
|
|
3026
|
-
}
|
|
3027
|
-
/**
|
|
3028
|
-
* MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
|
|
3029
|
-
*/
|
|
3030
|
-
set samples(sampleCount) {
|
|
3031
|
-
if (this._samples === sampleCount) {
|
|
3032
|
-
return;
|
|
3033
|
-
}
|
|
3034
|
-
this._samples = sampleCount;
|
|
3035
|
-
this._buildPipeline();
|
|
3036
|
-
}
|
|
3037
|
-
get samples() {
|
|
3038
|
-
return this._samples;
|
|
3039
|
-
}
|
|
3040
|
-
/**
|
|
3041
|
-
* If image processing is enabled.
|
|
3042
|
-
*/
|
|
3043
|
-
set imageProcessingEnabled(enabled) {
|
|
3044
|
-
if (this._imageProcessingEnabled === enabled) {
|
|
3045
|
-
return;
|
|
3046
|
-
}
|
|
3047
|
-
this._scene.imageProcessingConfiguration.isEnabled = enabled;
|
|
3048
|
-
}
|
|
3049
|
-
get imageProcessingEnabled() {
|
|
3050
|
-
return this._imageProcessingEnabled;
|
|
3051
|
-
}
|
|
3052
|
-
/**
|
|
3053
|
-
* If glow layer is enabled. (Adds a glow effect to emmissive materials)
|
|
3054
|
-
*/
|
|
3055
|
-
set glowLayerEnabled(enabled) {
|
|
3056
|
-
if (enabled && !this._glowLayer) {
|
|
3057
|
-
this._glowLayer = new GlowLayer("", this._scene);
|
|
3058
|
-
}
|
|
3059
|
-
else if (!enabled && this._glowLayer) {
|
|
3060
|
-
this._glowLayer.dispose();
|
|
3061
|
-
this._glowLayer = null;
|
|
3062
|
-
}
|
|
3063
|
-
}
|
|
3064
|
-
get glowLayerEnabled() {
|
|
3065
|
-
return this._glowLayer != null;
|
|
3066
|
-
}
|
|
3067
|
-
/**
|
|
3068
|
-
* Gets the glow layer (or null if not defined)
|
|
3069
|
-
*/
|
|
3070
|
-
get glowLayer() {
|
|
3071
|
-
return this._glowLayer;
|
|
3072
|
-
}
|
|
3073
|
-
/**
|
|
3074
|
-
* Enable or disable the chromaticAberration process from the pipeline
|
|
3075
|
-
*/
|
|
3076
|
-
set chromaticAberrationEnabled(enabled) {
|
|
3077
|
-
if (this._chromaticAberrationEnabled === enabled) {
|
|
3078
|
-
return;
|
|
3079
|
-
}
|
|
3080
|
-
this._chromaticAberrationEnabled = enabled;
|
|
3081
|
-
this._buildPipeline();
|
|
3082
|
-
}
|
|
3083
|
-
get chromaticAberrationEnabled() {
|
|
3084
|
-
return this._chromaticAberrationEnabled;
|
|
3085
|
-
}
|
|
3086
|
-
/**
|
|
3087
|
-
* Enable or disable the grain process from the pipeline
|
|
3088
|
-
*/
|
|
3089
|
-
set grainEnabled(enabled) {
|
|
3090
|
-
if (this._grainEnabled === enabled) {
|
|
3091
|
-
return;
|
|
3092
|
-
}
|
|
3093
|
-
this._grainEnabled = enabled;
|
|
3094
|
-
this._buildPipeline();
|
|
3095
|
-
}
|
|
3096
|
-
get grainEnabled() {
|
|
3097
|
-
return this._grainEnabled;
|
|
3098
|
-
}
|
|
3099
|
-
/**
|
|
3100
|
-
* Instantiates a DefaultRenderingPipeline.
|
|
3101
|
-
* @param name The rendering pipeline name (default: "")
|
|
3102
|
-
* @param hdr If high dynamic range textures should be used (default: true)
|
|
3103
|
-
* @param scene The scene linked to this pipeline (default: the last created scene)
|
|
3104
|
-
* @param cameras The array of cameras that the rendering pipeline will be attached to (default: scene.cameras)
|
|
3105
|
-
* @param automaticBuild If false, you will have to manually call prepare() to update the pipeline (default: true)
|
|
3106
|
-
*/
|
|
3107
|
-
constructor(name = "", hdr = true, scene = EngineStore.LastCreatedScene, cameras, automaticBuild = true) {
|
|
3108
|
-
super(scene.getEngine(), name);
|
|
3109
|
-
this._camerasToBeAttached = [];
|
|
3110
|
-
/**
|
|
3111
|
-
* ID of the sharpen post process,
|
|
3112
|
-
*/
|
|
3113
|
-
this.SharpenPostProcessId = "SharpenPostProcessEffect";
|
|
3114
|
-
/**
|
|
3115
|
-
* @ignore
|
|
3116
|
-
* ID of the image processing post process;
|
|
3117
|
-
*/
|
|
3118
|
-
this.ImageProcessingPostProcessId = "ImageProcessingPostProcessEffect";
|
|
3119
|
-
/**
|
|
3120
|
-
* @ignore
|
|
3121
|
-
* ID of the Fast Approximate Anti-Aliasing post process;
|
|
3122
|
-
*/
|
|
3123
|
-
this.FxaaPostProcessId = "FxaaPostProcessEffect";
|
|
3124
|
-
/**
|
|
3125
|
-
* ID of the chromatic aberration post process,
|
|
3126
|
-
*/
|
|
3127
|
-
this.ChromaticAberrationPostProcessId = "ChromaticAberrationPostProcessEffect";
|
|
3128
|
-
/**
|
|
3129
|
-
* ID of the grain post process
|
|
3130
|
-
*/
|
|
3131
|
-
this.GrainPostProcessId = "GrainPostProcessEffect";
|
|
3132
|
-
/**
|
|
3133
|
-
* Glow post process which adds a glow to emissive areas of the image
|
|
3134
|
-
*/
|
|
3135
|
-
this._glowLayer = null;
|
|
3136
|
-
/**
|
|
3137
|
-
* Animations which can be used to tweak settings over a period of time
|
|
3138
|
-
*/
|
|
3139
|
-
this.animations = [];
|
|
3140
|
-
this._imageProcessingConfigurationObserver = null;
|
|
3141
|
-
// Values
|
|
3142
|
-
this._sharpenEnabled = false;
|
|
3143
|
-
this._bloomEnabled = false;
|
|
3144
|
-
this._depthOfFieldEnabled = false;
|
|
3145
|
-
this._depthOfFieldBlurLevel = DepthOfFieldEffectBlurLevel.Low;
|
|
3146
|
-
this._fxaaEnabled = false;
|
|
3147
|
-
this._imageProcessingEnabled = true;
|
|
3148
|
-
this._bloomScale = 0.5;
|
|
3149
|
-
this._chromaticAberrationEnabled = false;
|
|
3150
|
-
this._grainEnabled = false;
|
|
3151
|
-
this._buildAllowed = true;
|
|
3152
|
-
/**
|
|
3153
|
-
* This is triggered each time the pipeline has been built.
|
|
3154
|
-
*/
|
|
3155
|
-
this.onBuildObservable = new Observable();
|
|
3156
|
-
this._resizeObserver = null;
|
|
3157
|
-
this._hardwareScaleLevel = 1.0;
|
|
3158
|
-
this._bloomKernel = 64;
|
|
3159
|
-
/**
|
|
3160
|
-
* Specifies the weight of the bloom in the final rendering
|
|
3161
|
-
*/
|
|
3162
|
-
this._bloomWeight = 0.15;
|
|
3163
|
-
/**
|
|
3164
|
-
* Specifies the luma threshold for the area that will be blurred by the bloom
|
|
3165
|
-
*/
|
|
3166
|
-
this._bloomThreshold = 0.9;
|
|
3167
|
-
this._samples = 1;
|
|
3168
|
-
this._hasCleared = false;
|
|
3169
|
-
this._prevPostProcess = null;
|
|
3170
|
-
this._prevPrevPostProcess = null;
|
|
3171
|
-
this._depthOfFieldSceneObserver = null;
|
|
3172
|
-
this._activeCameraChangedObserver = null;
|
|
3173
|
-
this._activeCamerasChangedObserver = null;
|
|
3174
|
-
this._cameras = cameras || scene.cameras;
|
|
3175
|
-
this._cameras = this._cameras.slice();
|
|
3176
|
-
this._camerasToBeAttached = this._cameras.slice();
|
|
3177
|
-
this._buildAllowed = automaticBuild;
|
|
3178
|
-
// Initialize
|
|
3179
|
-
this._scene = scene;
|
|
3180
|
-
const caps = this._scene.getEngine().getCaps();
|
|
3181
|
-
this._hdr = hdr && (caps.textureHalfFloatRender || caps.textureFloatRender);
|
|
3182
|
-
// Misc
|
|
3183
|
-
if (this._hdr) {
|
|
3184
|
-
if (caps.textureHalfFloatRender) {
|
|
3185
|
-
this._defaultPipelineTextureType = 2;
|
|
3186
|
-
}
|
|
3187
|
-
else if (caps.textureFloatRender) {
|
|
3188
|
-
this._defaultPipelineTextureType = 1;
|
|
3189
|
-
}
|
|
3190
|
-
}
|
|
3191
|
-
else {
|
|
3192
|
-
this._defaultPipelineTextureType = 0;
|
|
3193
|
-
}
|
|
3194
|
-
// Attach
|
|
3195
|
-
scene.postProcessRenderPipelineManager.addPipeline(this);
|
|
3196
|
-
const engine = this._scene.getEngine();
|
|
3197
|
-
// Create post processes before hand so they can be modified before enabled.
|
|
3198
|
-
// Block compilation flag is set to true to avoid compilation prior to use, these will be updated on first use in build pipeline.
|
|
3199
|
-
this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
|
|
3200
|
-
this._sharpenEffect = new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => {
|
|
3201
|
-
return this.sharpen;
|
|
3202
|
-
}, true);
|
|
3203
|
-
this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, true);
|
|
3204
|
-
// To keep the bloom sizes consistent across different display densities, factor in the hardware scaling level.
|
|
3205
|
-
this._hardwareScaleLevel = engine.getHardwareScalingLevel();
|
|
3206
|
-
this._resizeObserver = engine.onResizeObservable.add(() => {
|
|
3207
|
-
this._hardwareScaleLevel = engine.getHardwareScalingLevel();
|
|
3208
|
-
this.bloomKernel = this._bloomKernel;
|
|
3209
|
-
});
|
|
3210
|
-
this.bloom = new BloomEffect(this._scene, this._bloomScale, this._bloomWeight, this.bloomKernel / this._hardwareScaleLevel, this._defaultPipelineTextureType, true);
|
|
3211
|
-
this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
|
|
3212
|
-
this._chromaticAberrationEffect = new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => {
|
|
3213
|
-
return this.chromaticAberration;
|
|
3214
|
-
}, true);
|
|
3215
|
-
this.grain = new GrainPostProcess("Grain", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
|
|
3216
|
-
this._grainEffect = new PostProcessRenderEffect(engine, this.GrainPostProcessId, () => {
|
|
3217
|
-
return this.grain;
|
|
3218
|
-
}, true);
|
|
3219
|
-
this._imageProcessingConfigurationObserver = this._scene.imageProcessingConfiguration.onUpdateParameters.add(() => {
|
|
3220
|
-
this.bloom._downscale._exposure = this._scene.imageProcessingConfiguration.exposure;
|
|
3221
|
-
if (this.imageProcessingEnabled !== this._scene.imageProcessingConfiguration.isEnabled) {
|
|
3222
|
-
this._imageProcessingEnabled = this._scene.imageProcessingConfiguration.isEnabled;
|
|
3223
|
-
// Avoid re-entrant problems by deferring the call to _buildPipeline because the call to _buildPipeline
|
|
3224
|
-
// at the end of the constructor could end up triggering imageProcessingConfiguration.onUpdateParameters!
|
|
3225
|
-
// Note that the pipeline could have been disposed before the deferred call was executed, but in that case
|
|
3226
|
-
// _buildAllowed will have been set to false, preventing _buildPipeline from being executed.
|
|
3227
|
-
Tools.SetImmediate(() => {
|
|
3228
|
-
this._buildPipeline();
|
|
3229
|
-
});
|
|
3230
|
-
}
|
|
3231
|
-
});
|
|
3232
|
-
this._buildPipeline();
|
|
3233
|
-
}
|
|
3234
|
-
/**
|
|
3235
|
-
* Get the class name
|
|
3236
|
-
* @returns "DefaultRenderingPipeline"
|
|
3237
|
-
*/
|
|
3238
|
-
getClassName() {
|
|
3239
|
-
return "DefaultRenderingPipeline";
|
|
3240
|
-
}
|
|
3241
|
-
/**
|
|
3242
|
-
* Force the compilation of the entire pipeline.
|
|
3243
|
-
*/
|
|
3244
|
-
prepare() {
|
|
3245
|
-
const previousState = this._buildAllowed;
|
|
3246
|
-
this._buildAllowed = true;
|
|
3247
|
-
this._buildPipeline();
|
|
3248
|
-
this._buildAllowed = previousState;
|
|
3249
|
-
}
|
|
3250
|
-
_setAutoClearAndTextureSharing(postProcess, skipTextureSharing = false) {
|
|
3251
|
-
if (this._hasCleared) {
|
|
3252
|
-
postProcess.autoClear = false;
|
|
3253
|
-
}
|
|
3254
|
-
else {
|
|
3255
|
-
postProcess.autoClear = true;
|
|
3256
|
-
this._scene.autoClear = false;
|
|
3257
|
-
this._hasCleared = true;
|
|
3258
|
-
}
|
|
3259
|
-
if (!skipTextureSharing) {
|
|
3260
|
-
if (this._prevPrevPostProcess) {
|
|
3261
|
-
postProcess.shareOutputWith(this._prevPrevPostProcess);
|
|
3262
|
-
}
|
|
3263
|
-
else {
|
|
3264
|
-
postProcess.useOwnOutput();
|
|
3265
|
-
}
|
|
3266
|
-
if (this._prevPostProcess) {
|
|
3267
|
-
this._prevPrevPostProcess = this._prevPostProcess;
|
|
3268
|
-
}
|
|
3269
|
-
this._prevPostProcess = postProcess;
|
|
3270
|
-
}
|
|
3271
|
-
}
|
|
3272
|
-
_buildPipeline() {
|
|
3273
|
-
if (!this._buildAllowed) {
|
|
3274
|
-
return;
|
|
3275
|
-
}
|
|
3276
|
-
this._scene.autoClear = true;
|
|
3277
|
-
const engine = this._scene.getEngine();
|
|
3278
|
-
this._disposePostProcesses();
|
|
3279
|
-
if (this._cameras !== null) {
|
|
3280
|
-
this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
|
|
3281
|
-
// get back cameras to be used to reattach pipeline
|
|
3282
|
-
this._cameras = this._camerasToBeAttached.slice();
|
|
3283
|
-
}
|
|
3284
|
-
this._reset();
|
|
3285
|
-
this._prevPostProcess = null;
|
|
3286
|
-
this._prevPrevPostProcess = null;
|
|
3287
|
-
this._hasCleared = false;
|
|
3288
|
-
if (this.depthOfFieldEnabled) {
|
|
3289
|
-
// Multi camera suport
|
|
3290
|
-
if (this._cameras.length > 1) {
|
|
3291
|
-
for (const camera of this._cameras) {
|
|
3292
|
-
const depthRenderer = this._scene.enableDepthRenderer(camera);
|
|
3293
|
-
depthRenderer.useOnlyInActiveCamera = true;
|
|
3294
|
-
}
|
|
3295
|
-
this._depthOfFieldSceneObserver = this._scene.onAfterRenderTargetsRenderObservable.add((scene) => {
|
|
3296
|
-
if (this._cameras.indexOf(scene.activeCamera) > -1) {
|
|
3297
|
-
this.depthOfField.depthTexture = scene.enableDepthRenderer(scene.activeCamera).getDepthMap();
|
|
3298
|
-
}
|
|
3299
|
-
});
|
|
3300
|
-
}
|
|
3301
|
-
else {
|
|
3302
|
-
this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
|
|
3303
|
-
const depthRenderer = this._scene.enableDepthRenderer(this._cameras[0]);
|
|
3304
|
-
this.depthOfField.depthTexture = depthRenderer.getDepthMap();
|
|
3305
|
-
}
|
|
3306
|
-
if (!this.depthOfField._isReady()) {
|
|
3307
|
-
this.depthOfField._updateEffects();
|
|
3308
|
-
}
|
|
3309
|
-
this.addEffect(this.depthOfField);
|
|
3310
|
-
this._setAutoClearAndTextureSharing(this.depthOfField._effects[0], true);
|
|
3311
|
-
}
|
|
3312
|
-
else {
|
|
3313
|
-
this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
|
|
3314
|
-
}
|
|
3315
|
-
if (this.bloomEnabled) {
|
|
3316
|
-
if (!this.bloom._isReady()) {
|
|
3317
|
-
this.bloom._updateEffects();
|
|
3318
|
-
}
|
|
3319
|
-
this.addEffect(this.bloom);
|
|
3320
|
-
this._setAutoClearAndTextureSharing(this.bloom._effects[0], true);
|
|
3321
|
-
}
|
|
3322
|
-
if (this._imageProcessingEnabled) {
|
|
3323
|
-
this.imageProcessing = new ImageProcessingPostProcess("imageProcessing", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, this.scene.imageProcessingConfiguration);
|
|
3324
|
-
if (this._hdr) {
|
|
3325
|
-
this.addEffect(new PostProcessRenderEffect(engine, this.ImageProcessingPostProcessId, () => {
|
|
3326
|
-
return this.imageProcessing;
|
|
3327
|
-
}, true));
|
|
3328
|
-
this._setAutoClearAndTextureSharing(this.imageProcessing);
|
|
3329
|
-
}
|
|
3330
|
-
else {
|
|
3331
|
-
this._scene.imageProcessingConfiguration.applyByPostProcess = false;
|
|
3332
|
-
}
|
|
3333
|
-
if (!this._cameras || this._cameras.length === 0) {
|
|
3334
|
-
this._scene.imageProcessingConfiguration.applyByPostProcess = false;
|
|
3335
|
-
}
|
|
3336
|
-
if (!this.imageProcessing.getEffect()) {
|
|
3337
|
-
this.imageProcessing._updateParameters();
|
|
3338
|
-
}
|
|
3339
|
-
}
|
|
3340
|
-
if (this.sharpenEnabled) {
|
|
3341
|
-
if (!this.sharpen.isReady()) {
|
|
3342
|
-
this.sharpen.updateEffect();
|
|
3343
|
-
}
|
|
3344
|
-
this.addEffect(this._sharpenEffect);
|
|
3345
|
-
this._setAutoClearAndTextureSharing(this.sharpen);
|
|
3346
|
-
}
|
|
3347
|
-
if (this.grainEnabled) {
|
|
3348
|
-
if (!this.grain.isReady()) {
|
|
3349
|
-
this.grain.updateEffect();
|
|
3350
|
-
}
|
|
3351
|
-
this.addEffect(this._grainEffect);
|
|
3352
|
-
this._setAutoClearAndTextureSharing(this.grain);
|
|
3353
|
-
}
|
|
3354
|
-
if (this.chromaticAberrationEnabled) {
|
|
3355
|
-
if (!this.chromaticAberration.isReady()) {
|
|
3356
|
-
this.chromaticAberration.updateEffect();
|
|
3357
|
-
}
|
|
3358
|
-
this.addEffect(this._chromaticAberrationEffect);
|
|
3359
|
-
this._setAutoClearAndTextureSharing(this.chromaticAberration);
|
|
3360
|
-
}
|
|
3361
|
-
if (this.fxaaEnabled) {
|
|
3362
|
-
this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
|
|
3363
|
-
this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => {
|
|
3364
|
-
return this.fxaa;
|
|
3365
|
-
}, true));
|
|
3366
|
-
this._setAutoClearAndTextureSharing(this.fxaa, true);
|
|
3367
|
-
}
|
|
3368
|
-
if (this._cameras !== null) {
|
|
3369
|
-
this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
|
|
3370
|
-
}
|
|
3371
|
-
// In multicamera mode, the scene needs to autoclear in between cameras.
|
|
3372
|
-
if ((this._scene.activeCameras && this._scene.activeCameras.length > 1) || (this._scene.activeCamera && this._cameras.indexOf(this._scene.activeCamera) === -1)) {
|
|
3373
|
-
this._scene.autoClear = true;
|
|
3374
|
-
}
|
|
3375
|
-
// The active camera on the scene can be changed anytime
|
|
3376
|
-
if (!this._activeCameraChangedObserver) {
|
|
3377
|
-
this._activeCameraChangedObserver = this._scene.onActiveCameraChanged.add(() => {
|
|
3378
|
-
if (this._scene.activeCamera && this._cameras.indexOf(this._scene.activeCamera) === -1) {
|
|
3379
|
-
this._scene.autoClear = true;
|
|
3380
|
-
}
|
|
3381
|
-
});
|
|
3382
|
-
}
|
|
3383
|
-
if (!this._activeCamerasChangedObserver) {
|
|
3384
|
-
this._activeCamerasChangedObserver = this._scene.onActiveCamerasChanged.add(() => {
|
|
3385
|
-
if (this._scene.activeCameras && this._scene.activeCameras.length > 1) {
|
|
3386
|
-
this._scene.autoClear = true;
|
|
3387
|
-
}
|
|
3388
|
-
});
|
|
3389
|
-
}
|
|
3390
|
-
if (!this._enableMSAAOnFirstPostProcess(this.samples) && this.samples > 1) {
|
|
3391
|
-
Logger.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
|
|
3392
|
-
}
|
|
3393
|
-
this.onBuildObservable.notifyObservers(this);
|
|
3394
|
-
}
|
|
3395
|
-
_disposePostProcesses(disposeNonRecreated = false) {
|
|
3396
|
-
for (let i = 0; i < this._cameras.length; i++) {
|
|
3397
|
-
const camera = this._cameras[i];
|
|
3398
|
-
if (this.imageProcessing) {
|
|
3399
|
-
this.imageProcessing.dispose(camera);
|
|
3400
|
-
}
|
|
3401
|
-
if (this.fxaa) {
|
|
3402
|
-
this.fxaa.dispose(camera);
|
|
3403
|
-
}
|
|
3404
|
-
// These are created in the constructor and should not be disposed on every pipeline change
|
|
3405
|
-
if (disposeNonRecreated) {
|
|
3406
|
-
if (this.sharpen) {
|
|
3407
|
-
this.sharpen.dispose(camera);
|
|
3408
|
-
}
|
|
3409
|
-
if (this.depthOfField) {
|
|
3410
|
-
this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
|
|
3411
|
-
this.depthOfField.disposeEffects(camera);
|
|
3412
|
-
}
|
|
3413
|
-
if (this.bloom) {
|
|
3414
|
-
this.bloom.disposeEffects(camera);
|
|
3415
|
-
}
|
|
3416
|
-
if (this.chromaticAberration) {
|
|
3417
|
-
this.chromaticAberration.dispose(camera);
|
|
3418
|
-
}
|
|
3419
|
-
if (this.grain) {
|
|
3420
|
-
this.grain.dispose(camera);
|
|
3421
|
-
}
|
|
3422
|
-
if (this._glowLayer) {
|
|
3423
|
-
this._glowLayer.dispose();
|
|
3424
|
-
}
|
|
3425
|
-
}
|
|
3426
|
-
}
|
|
3427
|
-
this.imageProcessing = null;
|
|
3428
|
-
this.fxaa = null;
|
|
3429
|
-
if (disposeNonRecreated) {
|
|
3430
|
-
this.sharpen = null;
|
|
3431
|
-
this._sharpenEffect = null;
|
|
3432
|
-
this.depthOfField = null;
|
|
3433
|
-
this.bloom = null;
|
|
3434
|
-
this.chromaticAberration = null;
|
|
3435
|
-
this._chromaticAberrationEffect = null;
|
|
3436
|
-
this.grain = null;
|
|
3437
|
-
this._grainEffect = null;
|
|
3438
|
-
this._glowLayer = null;
|
|
3439
|
-
}
|
|
3440
|
-
}
|
|
3441
|
-
/**
|
|
3442
|
-
* Adds a camera to the pipeline
|
|
3443
|
-
* @param camera the camera to be added
|
|
3444
|
-
*/
|
|
3445
|
-
addCamera(camera) {
|
|
3446
|
-
this._camerasToBeAttached.push(camera);
|
|
3447
|
-
this._buildPipeline();
|
|
3448
|
-
}
|
|
3449
|
-
/**
|
|
3450
|
-
* Removes a camera from the pipeline
|
|
3451
|
-
* @param camera the camera to remove
|
|
3452
|
-
*/
|
|
3453
|
-
removeCamera(camera) {
|
|
3454
|
-
const index = this._camerasToBeAttached.indexOf(camera);
|
|
3455
|
-
this._camerasToBeAttached.splice(index, 1);
|
|
3456
|
-
this._buildPipeline();
|
|
3457
|
-
}
|
|
3458
|
-
/**
|
|
3459
|
-
* Dispose of the pipeline and stop all post processes
|
|
3460
|
-
*/
|
|
3461
|
-
dispose() {
|
|
3462
|
-
this._buildAllowed = false;
|
|
3463
|
-
this.onBuildObservable.clear();
|
|
3464
|
-
this._disposePostProcesses(true);
|
|
3465
|
-
this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
|
|
3466
|
-
this._scene._postProcessRenderPipelineManager.removePipeline(this.name);
|
|
3467
|
-
this._scene.autoClear = true;
|
|
3468
|
-
if (this._resizeObserver) {
|
|
3469
|
-
this._scene.getEngine().onResizeObservable.remove(this._resizeObserver);
|
|
3470
|
-
this._resizeObserver = null;
|
|
3471
|
-
}
|
|
3472
|
-
this._scene.onActiveCameraChanged.remove(this._activeCameraChangedObserver);
|
|
3473
|
-
this._scene.onActiveCamerasChanged.remove(this._activeCamerasChangedObserver);
|
|
3474
|
-
this._scene.imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingConfigurationObserver);
|
|
3475
|
-
super.dispose();
|
|
3476
|
-
}
|
|
3477
|
-
/**
|
|
3478
|
-
* Serialize the rendering pipeline (Used when exporting)
|
|
3479
|
-
* @returns the serialized object
|
|
3480
|
-
*/
|
|
3481
|
-
serialize() {
|
|
3482
|
-
const serializationObject = SerializationHelper.Serialize(this);
|
|
3483
|
-
serializationObject.customType = "DefaultRenderingPipeline";
|
|
3484
|
-
return serializationObject;
|
|
3485
|
-
}
|
|
3486
|
-
/**
|
|
3487
|
-
* Parse the serialized pipeline
|
|
3488
|
-
* @param source Source pipeline.
|
|
3489
|
-
* @param scene The scene to load the pipeline to.
|
|
3490
|
-
* @param rootUrl The URL of the serialized pipeline.
|
|
3491
|
-
* @returns An instantiated pipeline from the serialized object.
|
|
3492
|
-
*/
|
|
3493
|
-
static Parse(source, scene, rootUrl) {
|
|
3494
|
-
return SerializationHelper.Parse(() => new DefaultRenderingPipeline(source._name, source._name._hdr, scene), source, scene, rootUrl);
|
|
3495
|
-
}
|
|
3496
|
-
}
|
|
3497
|
-
__decorate([
|
|
3498
|
-
serialize()
|
|
3499
|
-
], DefaultRenderingPipeline.prototype, "sharpenEnabled", null);
|
|
3500
|
-
__decorate([
|
|
3501
|
-
serialize()
|
|
3502
|
-
], DefaultRenderingPipeline.prototype, "bloomKernel", null);
|
|
3503
|
-
__decorate([
|
|
3504
|
-
serialize()
|
|
3505
|
-
], DefaultRenderingPipeline.prototype, "_bloomWeight", void 0);
|
|
3506
|
-
__decorate([
|
|
3507
|
-
serialize()
|
|
3508
|
-
], DefaultRenderingPipeline.prototype, "_bloomThreshold", void 0);
|
|
3509
|
-
__decorate([
|
|
3510
|
-
serialize()
|
|
3511
|
-
], DefaultRenderingPipeline.prototype, "_hdr", void 0);
|
|
3512
|
-
__decorate([
|
|
3513
|
-
serialize()
|
|
3514
|
-
], DefaultRenderingPipeline.prototype, "bloomWeight", null);
|
|
3515
|
-
__decorate([
|
|
3516
|
-
serialize()
|
|
3517
|
-
], DefaultRenderingPipeline.prototype, "bloomThreshold", null);
|
|
3518
|
-
__decorate([
|
|
3519
|
-
serialize()
|
|
3520
|
-
], DefaultRenderingPipeline.prototype, "bloomScale", null);
|
|
3521
|
-
__decorate([
|
|
3522
|
-
serialize()
|
|
3523
|
-
], DefaultRenderingPipeline.prototype, "bloomEnabled", null);
|
|
3524
|
-
__decorate([
|
|
3525
|
-
serialize()
|
|
3526
|
-
], DefaultRenderingPipeline.prototype, "depthOfFieldEnabled", null);
|
|
3527
|
-
__decorate([
|
|
3528
|
-
serialize()
|
|
3529
|
-
], DefaultRenderingPipeline.prototype, "depthOfFieldBlurLevel", null);
|
|
3530
|
-
__decorate([
|
|
3531
|
-
serialize()
|
|
3532
|
-
], DefaultRenderingPipeline.prototype, "fxaaEnabled", null);
|
|
3533
|
-
__decorate([
|
|
3534
|
-
serialize()
|
|
3535
|
-
], DefaultRenderingPipeline.prototype, "samples", null);
|
|
3536
|
-
__decorate([
|
|
3537
|
-
serialize()
|
|
3538
|
-
], DefaultRenderingPipeline.prototype, "imageProcessingEnabled", null);
|
|
3539
|
-
__decorate([
|
|
3540
|
-
serialize()
|
|
3541
|
-
], DefaultRenderingPipeline.prototype, "glowLayerEnabled", null);
|
|
3542
|
-
__decorate([
|
|
3543
|
-
serialize()
|
|
3544
|
-
], DefaultRenderingPipeline.prototype, "chromaticAberrationEnabled", null);
|
|
3545
|
-
__decorate([
|
|
3546
|
-
serialize()
|
|
3547
|
-
], DefaultRenderingPipeline.prototype, "grainEnabled", null);
|
|
3548
|
-
RegisterClass("BABYLON.DefaultRenderingPipeline", DefaultRenderingPipeline);
|
|
3549
|
-
|
|
3550
|
-
// Do not edit.
|
|
3551
|
-
const name$i = "lensHighlightsPixelShader";
|
|
3552
|
-
const shader$i = `uniform sampler2D textureSampler;
|
|
3553
|
-
void main(void)
|
|
3554
|
-
blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.84*w,0.43*h)));
|
|
3555
|
-
blurred+=highlightColor(texture2D(textureSampler,vUV+vec2(-0.85*w,0.36*h)));
|
|
3556
|
-
blurred/=39.0;
|
|
3557
|
-
// Sideeffect
|
|
3558
|
-
ShaderStore.ShadersStore[name$i] = shader$i;
|
|
3559
|
-
|
|
3560
|
-
// Do not edit.
|
|
3561
|
-
const name$h = "depthOfFieldPixelShader";
|
|
3562
|
-
const shader$h = `uniform sampler2D textureSampler;
|
|
3563
|
-
#define TWOPI 6.28318530
|
|
3564
|
-
#define inverse_focal_length 0.1
|
|
3565
|
-
vec2 centered_screen_pos;
|
|
3566
|
-
void main(void)
|
|
3567
|
-
// Sideeffect
|
|
3568
|
-
ShaderStore.ShadersStore[name$h] = shader$h;
|
|
3569
|
-
|
|
3570
|
-
/**
|
|
3571
|
-
* Contains all parameters needed for the prepass to perform
|
|
3572
|
-
* screen space subsurface scattering
|
|
3573
|
-
*/
|
|
3574
|
-
class SSAO2Configuration {
|
|
3575
|
-
constructor() {
|
|
3576
|
-
/**
|
|
3577
|
-
* Is subsurface enabled
|
|
3578
|
-
*/
|
|
3579
|
-
this.enabled = false;
|
|
3580
|
-
/**
|
|
3581
|
-
* Name of the configuration
|
|
3582
|
-
*/
|
|
3583
|
-
this.name = "ssao2";
|
|
3584
|
-
/**
|
|
3585
|
-
* Textures that should be present in the MRT for this effect to work
|
|
3586
|
-
*/
|
|
3587
|
-
this.texturesRequired = [6, 5];
|
|
3588
|
-
}
|
|
3589
|
-
}
|
|
3590
|
-
|
|
3591
|
-
// Do not edit.
|
|
3592
|
-
const name$g = "ssao2PixelShader";
|
|
3593
|
-
const shader$g = `precision highp float;
|
|
3594
|
-
float scales[16]=float[16](
|