@galacean/engine-core 1.6.0-beta.2 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -7744,6 +7744,8 @@ function dependentComponents(componentOrComponents, dependentMode) {
7744
7744
  rotation._onValueChanged = target._onRotationChanged;
7745
7745
  // @ts-ignore
7746
7746
  scale._onValueChanged = target._onScaleChanged;
7747
+ // When cloning, other components may obtain properties such as `rotationQuaternion` in the constructor, local related dirty flags need to be corrected
7748
+ target._setDirtyFlagTrue(2 | 64);
7747
7749
  };
7748
7750
  _proto._onLocalMatrixChanging = function _onLocalMatrixChanging() {
7749
7751
  this._setDirtyFlagFalse(64);
@@ -10839,7 +10841,7 @@ var blitVs = "attribute vec4 POSITION_UV;\nvarying vec2 v_uv;\n\nvoid main() {\t
10839
10841
 
10840
10842
  var bilateralBlurFS = "#include <common>\n\nvarying vec2 v_uv;\nuniform sampler2D renderer_BlitTexture;\nuniform vec4 renderer_SourceScaleOffset; \nuniform float material_farPlaneOverEdgeDistance;\n#if SSAO_QUALITY == 0\n #define BLUR_SAMPLE_COUNT 3\n#elif SSAO_QUALITY == 1\n #define BLUR_SAMPLE_COUNT 6\n#elif SSAO_QUALITY == 2\n #define BLUR_SAMPLE_COUNT 12\n#endif\n\nuniform float material_kernel[12]; // Sample weights for bilateral blur\n\nfloat bilateralWeight(float depth, float sampleDepth) {\n float diff = (sampleDepth - depth) * material_farPlaneOverEdgeDistance;\n return max(0.0, 1.0 - diff * diff);\n}\n\nhighp float unpack(highp vec2 depth) {\n // depth here only has 8-bits of precision, but the unpacked depth is highp\n // this is equivalent to (x8 * 256 + y8) / 65535, which gives a value between 0 and 1\n return (depth.x * (256.0 / 257.0) + depth.y * (1.0 / 257.0));\n}\n\nvoid tap(const sampler2D saoTexture,\n inout float sum, inout float totalWeight, float weight, float depth, vec2 position) {\n vec4 data = texture2D(saoTexture, position);\n // bilateral sample\n float bilateral = weight * bilateralWeight(depth, unpack(data.gb));\n sum += data.r * bilateral;\n totalWeight += bilateral;\n}\n\nvoid main(){\n mediump vec4 data = texture2D(renderer_BlitTexture, v_uv);\n float depth = unpack(data.gb);\n\n // Weight of the center pixel from the Gaussian kernel (typically 1.0)\n float totalWeight = material_kernel[0];\n float sum = data.r * totalWeight;\n \n vec2 offset = renderer_SourceScaleOffset.zw;\n for (int i = 1; i < BLUR_SAMPLE_COUNT; i++) {\n float weight = material_kernel[i];\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv + offset);\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv - offset);\n offset += renderer_SourceScaleOffset.zw;\n }\n\n float ao = sum * (1.0 / totalWeight);\n\n // simple dithering helps a lot (assumes 8 bits target)\n // this is most useful with high quality/large blurs\n ao += ((interleavedGradientNoise(gl_FragCoord.xy) - 0.5) / 255.0);\n gl_FragColor = vec4(ao, data.gb, 1.0);\n\n}\n\n"; // eslint-disable-line
10841
10843
 
10842
- var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
10844
+ var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform highp sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, highp sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
10843
10845
 
10844
10846
  /**
10845
10847
  * @internal
@@ -10852,6 +10854,7 @@ var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n
10852
10854
  var material = new Material(engine, Shader.find(ScalableAmbientObscurancePass.SHADER_NAME));
10853
10855
  material._addReferCount(1);
10854
10856
  _this._material = material;
10857
+ _this.isSupported = _this.engine._hardwareRenderer.capability.isFragmentHighPrecision;
10855
10858
  return _this;
10856
10859
  }
10857
10860
  var _proto = ScalableAmbientObscurancePass.prototype;
@@ -15800,8 +15803,8 @@ CascadedShadowCasterPass._tempMatrix0 = new engineMath.Matrix();
15800
15803
  }
15801
15804
  // Scalable ambient obscurance pass
15802
15805
  // Before opaque pass so materials can sample ambient occlusion in BRDF
15803
- if (ambientOcclusionEnabled && supportDepthTexture) {
15804
- var saoPass = this._saoPass;
15806
+ var saoPass = this._saoPass;
15807
+ if (ambientOcclusionEnabled && supportDepthTexture && saoPass.isSupported) {
15805
15808
  saoPass.onConfig(camera, this._depthOnlyPass.renderTarget);
15806
15809
  saoPass.onRender(context);
15807
15810
  } else {
@@ -21950,11 +21953,7 @@ var ComponentCloner = /*#__PURE__*/ function() {
21950
21953
  ], args));
21951
21954
  this._components.push(component);
21952
21955
  // @todo: temporary solution
21953
- if (_instanceof(component, Transform)) {
21954
- var transform = this._transform;
21955
- this._transform = component;
21956
- transform == null ? void 0 : transform.destroy();
21957
- }
21956
+ if (_instanceof(component, Transform)) this._setTransform(component);
21958
21957
  component._setActive(true, ActiveChangeFlag.All);
21959
21958
  return component;
21960
21959
  };
@@ -22383,6 +22382,16 @@ var ComponentCloner = /*#__PURE__*/ function() {
22383
22382
  }
22384
22383
  }
22385
22384
  };
22385
+ _proto._setTransform = function _setTransform(value) {
22386
+ var _this__transform;
22387
+ (_this__transform = this._transform) == null ? void 0 : _this__transform.destroy();
22388
+ this._transform = value;
22389
+ var children = this._children;
22390
+ for(var i = 0, n = children.length; i < n; i++){
22391
+ var _children_i_transform;
22392
+ (_children_i_transform = children[i].transform) == null ? void 0 : _children_i_transform._parentChange();
22393
+ }
22394
+ };
22386
22395
  /**
22387
22396
  * @deprecated
22388
22397
  */ _proto.getInvModelMatrix = function getInvModelMatrix() {