@galacean/engine-core 1.6.0-beta.2 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +17 -8
- package/dist/main.js.map +1 -1
- package/dist/module.js +17 -8
- package/dist/module.js.map +1 -1
- package/package.json +3 -3
- package/types/Entity.d.ts +1 -0
package/dist/module.js
CHANGED
|
@@ -7740,6 +7740,8 @@ function dependentComponents(componentOrComponents, dependentMode) {
|
|
|
7740
7740
|
rotation._onValueChanged = target._onRotationChanged;
|
|
7741
7741
|
// @ts-ignore
|
|
7742
7742
|
scale._onValueChanged = target._onScaleChanged;
|
|
7743
|
+
// When cloning, other components may obtain properties such as `rotationQuaternion` in the constructor, local related dirty flags need to be corrected
|
|
7744
|
+
target._setDirtyFlagTrue(2 | 64);
|
|
7743
7745
|
};
|
|
7744
7746
|
_proto._onLocalMatrixChanging = function _onLocalMatrixChanging() {
|
|
7745
7747
|
this._setDirtyFlagFalse(64);
|
|
@@ -10835,7 +10837,7 @@ var blitVs = "attribute vec4 POSITION_UV;\nvarying vec2 v_uv;\n\nvoid main() {\t
|
|
|
10835
10837
|
|
|
10836
10838
|
var bilateralBlurFS = "#include <common>\n\nvarying vec2 v_uv;\nuniform sampler2D renderer_BlitTexture;\nuniform vec4 renderer_SourceScaleOffset; \nuniform float material_farPlaneOverEdgeDistance;\n#if SSAO_QUALITY == 0\n #define BLUR_SAMPLE_COUNT 3\n#elif SSAO_QUALITY == 1\n #define BLUR_SAMPLE_COUNT 6\n#elif SSAO_QUALITY == 2\n #define BLUR_SAMPLE_COUNT 12\n#endif\n\nuniform float material_kernel[12]; // Sample weights for bilateral blur\n\nfloat bilateralWeight(float depth, float sampleDepth) {\n float diff = (sampleDepth - depth) * material_farPlaneOverEdgeDistance;\n return max(0.0, 1.0 - diff * diff);\n}\n\nhighp float unpack(highp vec2 depth) {\n // depth here only has 8-bits of precision, but the unpacked depth is highp\n // this is equivalent to (x8 * 256 + y8) / 65535, which gives a value between 0 and 1\n return (depth.x * (256.0 / 257.0) + depth.y * (1.0 / 257.0));\n}\n\nvoid tap(const sampler2D saoTexture,\n inout float sum, inout float totalWeight, float weight, float depth, vec2 position) {\n vec4 data = texture2D(saoTexture, position);\n // bilateral sample\n float bilateral = weight * bilateralWeight(depth, unpack(data.gb));\n sum += data.r * bilateral;\n totalWeight += bilateral;\n}\n\nvoid main(){\n mediump vec4 data = texture2D(renderer_BlitTexture, v_uv);\n float depth = unpack(data.gb);\n\n // Weight of the center pixel from the Gaussian kernel (typically 1.0)\n float totalWeight = material_kernel[0];\n float sum = data.r * totalWeight;\n \n vec2 offset = renderer_SourceScaleOffset.zw;\n for (int i = 1; i < BLUR_SAMPLE_COUNT; i++) {\n float weight = material_kernel[i];\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv + offset);\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv - offset);\n offset += renderer_SourceScaleOffset.zw;\n }\n\n float ao = sum * (1.0 / totalWeight);\n\n // simple dithering helps a lot (assumes 8 bits target)\n // this is most useful with high quality/large blurs\n ao += ((interleavedGradientNoise(gl_FragCoord.xy) - 0.5) / 255.0);\n gl_FragColor = vec4(ao, data.gb, 1.0);\n\n}\n\n"; // eslint-disable-line
|
|
10837
10839
|
|
|
10838
|
-
var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
|
|
10840
|
+
var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform highp sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, highp sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
|
|
10839
10841
|
|
|
10840
10842
|
/**
|
|
10841
10843
|
* @internal
|
|
@@ -10848,6 +10850,7 @@ var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n
|
|
|
10848
10850
|
var material = new Material(engine, Shader.find(ScalableAmbientObscurancePass.SHADER_NAME));
|
|
10849
10851
|
material._addReferCount(1);
|
|
10850
10852
|
_this._material = material;
|
|
10853
|
+
_this.isSupported = _this.engine._hardwareRenderer.capability.isFragmentHighPrecision;
|
|
10851
10854
|
return _this;
|
|
10852
10855
|
}
|
|
10853
10856
|
var _proto = ScalableAmbientObscurancePass.prototype;
|
|
@@ -15796,8 +15799,8 @@ CascadedShadowCasterPass._tempMatrix0 = new Matrix();
|
|
|
15796
15799
|
}
|
|
15797
15800
|
// Scalable ambient obscurance pass
|
|
15798
15801
|
// Before opaque pass so materials can sample ambient occlusion in BRDF
|
|
15799
|
-
|
|
15800
|
-
|
|
15802
|
+
var saoPass = this._saoPass;
|
|
15803
|
+
if (ambientOcclusionEnabled && supportDepthTexture && saoPass.isSupported) {
|
|
15801
15804
|
saoPass.onConfig(camera, this._depthOnlyPass.renderTarget);
|
|
15802
15805
|
saoPass.onRender(context);
|
|
15803
15806
|
} else {
|
|
@@ -21946,11 +21949,7 @@ var ComponentCloner = /*#__PURE__*/ function() {
|
|
|
21946
21949
|
], args));
|
|
21947
21950
|
this._components.push(component);
|
|
21948
21951
|
// @todo: temporary solution
|
|
21949
|
-
if (_instanceof(component, Transform))
|
|
21950
|
-
var transform = this._transform;
|
|
21951
|
-
this._transform = component;
|
|
21952
|
-
transform == null ? void 0 : transform.destroy();
|
|
21953
|
-
}
|
|
21952
|
+
if (_instanceof(component, Transform)) this._setTransform(component);
|
|
21954
21953
|
component._setActive(true, ActiveChangeFlag.All);
|
|
21955
21954
|
return component;
|
|
21956
21955
|
};
|
|
@@ -22379,6 +22378,16 @@ var ComponentCloner = /*#__PURE__*/ function() {
|
|
|
22379
22378
|
}
|
|
22380
22379
|
}
|
|
22381
22380
|
};
|
|
22381
|
+
_proto._setTransform = function _setTransform(value) {
|
|
22382
|
+
var _this__transform;
|
|
22383
|
+
(_this__transform = this._transform) == null ? void 0 : _this__transform.destroy();
|
|
22384
|
+
this._transform = value;
|
|
22385
|
+
var children = this._children;
|
|
22386
|
+
for(var i = 0, n = children.length; i < n; i++){
|
|
22387
|
+
var _children_i_transform;
|
|
22388
|
+
(_children_i_transform = children[i].transform) == null ? void 0 : _children_i_transform._parentChange();
|
|
22389
|
+
}
|
|
22390
|
+
};
|
|
22382
22391
|
/**
|
|
22383
22392
|
* @deprecated
|
|
22384
22393
|
*/ _proto.getInvModelMatrix = function getInvModelMatrix() {
|