@galacean/engine 1.6.0-beta.2 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/browser.js CHANGED
@@ -12678,6 +12678,8 @@
12678
12678
  rotation._onValueChanged = target._onRotationChanged;
12679
12679
  // @ts-ignore
12680
12680
  scale._onValueChanged = target._onScaleChanged;
12681
+ // When cloning, other components may obtain properties such as `rotationQuaternion` in the constructor, local related dirty flags need to be corrected
12682
+ target._setDirtyFlagTrue(2 | 64);
12681
12683
  };
12682
12684
  _proto._onLocalMatrixChanging = function _onLocalMatrixChanging() {
12683
12685
  this._setDirtyFlagFalse(64);
@@ -15746,7 +15748,7 @@
15746
15748
  return AmbientOcclusionQuality;
15747
15749
  }({});
15748
15750
  var bilateralBlurFS = "#include <common>\n\nvarying vec2 v_uv;\nuniform sampler2D renderer_BlitTexture;\nuniform vec4 renderer_SourceScaleOffset; \nuniform float material_farPlaneOverEdgeDistance;\n#if SSAO_QUALITY == 0\n #define BLUR_SAMPLE_COUNT 3\n#elif SSAO_QUALITY == 1\n #define BLUR_SAMPLE_COUNT 6\n#elif SSAO_QUALITY == 2\n #define BLUR_SAMPLE_COUNT 12\n#endif\n\nuniform float material_kernel[12]; // Sample weights for bilateral blur\n\nfloat bilateralWeight(float depth, float sampleDepth) {\n float diff = (sampleDepth - depth) * material_farPlaneOverEdgeDistance;\n return max(0.0, 1.0 - diff * diff);\n}\n\nhighp float unpack(highp vec2 depth) {\n // depth here only has 8-bits of precision, but the unpacked depth is highp\n // this is equivalent to (x8 * 256 + y8) / 65535, which gives a value between 0 and 1\n return (depth.x * (256.0 / 257.0) + depth.y * (1.0 / 257.0));\n}\n\nvoid tap(const sampler2D saoTexture,\n inout float sum, inout float totalWeight, float weight, float depth, vec2 position) {\n vec4 data = texture2D(saoTexture, position);\n // bilateral sample\n float bilateral = weight * bilateralWeight(depth, unpack(data.gb));\n sum += data.r * bilateral;\n totalWeight += bilateral;\n}\n\nvoid main(){\n mediump vec4 data = texture2D(renderer_BlitTexture, v_uv);\n float depth = unpack(data.gb);\n\n // Weight of the center pixel from the Gaussian kernel (typically 1.0)\n float totalWeight = material_kernel[0];\n float sum = data.r * totalWeight;\n \n vec2 offset = renderer_SourceScaleOffset.zw;\n for (int i = 1; i < BLUR_SAMPLE_COUNT; i++) {\n float weight = material_kernel[i];\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv + offset);\n tap(renderer_BlitTexture, sum, totalWeight, weight, depth, v_uv - offset);\n offset += renderer_SourceScaleOffset.zw;\n }\n\n float ao = sum * (1.0 / totalWeight);\n\n // simple dithering helps a lot (assumes 8 bits target)\n // this is most useful with high quality/large blurs\n ao += ((interleavedGradientNoise(gl_FragCoord.xy) - 0.5) / 255.0);\n gl_FragColor = vec4(ao, data.gb, 1.0);\n\n}\n\n"; // eslint-disable-line
15749
- var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
15751
+ var scalableAmbientOcclusionFS = "// Ambient Occlusion, largely inspired from:\n// \"The Alchemy Screen-Space Ambient Obscurance Algorithm\" by Morgan McGuire\n// \"Scalable Ambient Obscurance\" by Morgan McGuire, Michael Mara and David Luebke\n// https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf\n\n#include <common>\n\nvarying vec2 v_uv;\nuniform vec4 renderer_texelSize; // x: 1/width, y: 1/height, z: width, w: height\nuniform highp sampler2D renderer_BlitTexture; // Camera_DepthTexture\n\n// float inc = (1.0f / (SAMPLE_COUNT - 0.5f)) * SPIRAL_TURNS * 2.0 * PI\n// const vec2 angleIncCosSin = vec2(cos(inc), sin(inc))\n#if SSAO_QUALITY == 0\n #define SAMPLE_COUNT 7.0\n #define SPIRAL_TURNS 3.0\n const vec2 angleIncCosSin = vec2(-0.971148, 0.238227);\n#elif SSAO_QUALITY == 1\n #define SAMPLE_COUNT 11.0\n #define SPIRAL_TURNS 6.0\n const vec2 angleIncCosSin = vec2(-0.896127, -0.443780);\n#elif SSAO_QUALITY == 2\n #define SAMPLE_COUNT 16.0\n #define SPIRAL_TURNS 7.0\n const vec2 angleIncCosSin = vec2(-0.966846, 0.255311);\n#endif\n\nuniform float material_invRadiusSquared; // Inverse of the squared radius\nuniform float material_minHorizonAngleSineSquared; // Minimum horizon angle sine squared\nuniform float material_intensity; // Intensity of the ambient occlusion\nuniform float material_projectionScaleRadius;\nuniform float material_bias; // Bias to avoid self-occlusion\nuniform float material_peak2; // Peak value to avoid singularities\nuniform float material_power; // Exponent to convert occlusion to visibility\nuniform vec2 material_invProjScaleXY; //invProjection[0][0] * 2, invProjection[1][1] * 2\n\n\nvec3 computeViewSpacePosition(vec2 uv, float linearDepth, vec2 invProjScaleXY) {\n #ifdef CAMERA_ORTHOGRAPHIC\n return vec3((vec2(0.5) - uv) * invProjScaleXY , linearDepth);\n #else\n return vec3((vec2(0.5) - uv) * invProjScaleXY * linearDepth, linearDepth);\n #endif\n}\n\nfloat depthToViewZ(float depth) {\n return -remapDepthBufferEyeDepth(depth);\n}\n\n// reconstructing normal from depth buffer\n// https://atyuwen.github.io/posts/normal-reconstruction\n// https://wickedengine.net/2019/09/22/improved-normal-reconstruction-from-depth/\nvec3 computeViewSpaceNormal(vec2 uv, highp sampler2D depthTexture, float depth, vec3 viewPos, vec2 texel, vec2 invProjScaleXY) {\n vec3 normal = vec3(0.0);\n#if SSAO_QUALITY == 0 || SSAO_QUALITY == 1\n vec2 uvdx = uv + vec2(texel.x, 0.0);\n vec2 uvdy = uv + vec2(0.0, texel.y);\n\n float depthX = texture2D(depthTexture, uvdx).r;\n float depthY = texture2D(depthTexture, uvdy).r;\n\n vec3 px = computeViewSpacePosition(uvdx, depthToViewZ(depthX), invProjScaleXY);\n vec3 py = computeViewSpacePosition(uvdy, depthToViewZ(depthY), invProjScaleXY);\n\n vec3 dpdx = px - viewPos;\n vec3 dpdy = py - viewPos;\n\n normal = normalize(cross(dpdx, dpdy));\n\n#elif SSAO_QUALITY == 2\n vec2 dx = vec2(texel.x, 0.0);\n vec2 dy = vec2(0.0, texel.y);\n \n vec4 H;\n H.x = texture2D(depthTexture, uv - dx).r; // left\n H.y = texture2D(depthTexture, uv + dx).r; // right\n H.z = texture2D(depthTexture, uv - dx * 2.0).r; // left2\n H.w = texture2D(depthTexture, uv + dx * 2.0).r; // right2\n \n // Calculate horizontal edge weights\n vec2 horizontalEdgeWeights = abs((2.0 * H.xy - H.zw) - depth);\n\n vec3 pos_l = computeViewSpacePosition(uv - dx, depthToViewZ(H.x), invProjScaleXY);\n vec3 pos_r = computeViewSpacePosition(uv + dx, depthToViewZ(H.y), invProjScaleXY);\n vec3 dpdx = (horizontalEdgeWeights.x < horizontalEdgeWeights.y) ? (viewPos - pos_l) : (pos_r - viewPos);\n\n // Sample depths for vertical edge detection\n vec4 V;\n V.x = texture2D(depthTexture, uv - dy).r; // down\n V.y = texture2D(depthTexture, uv + dy).r; // up\n V.z = texture2D(depthTexture, uv - dy * 2.0).r; // down2\n V.w = texture2D(depthTexture, uv + dy * 2.0).r; // up2\n\n // Calculate vertical edge weights\n vec2 verticalEdgeWeights = abs((2.0 * V.xy - V.zw) - depth);\n vec3 pos_d = computeViewSpacePosition(uv - dy, depthToViewZ(V.x), invProjScaleXY);\n vec3 pos_u = computeViewSpacePosition(uv + dy, depthToViewZ(V.y), invProjScaleXY);\n vec3 dpdy = (verticalEdgeWeights.x < verticalEdgeWeights.y) ? (viewPos - pos_d) : (pos_u - viewPos);\n normal = normalize(cross(dpdx, dpdy));\n #endif\n return normal;\n\n}\n\nvec3 tapLocation(float i, const float noise) {\n float offset = ((2.0 * PI) * 2.4) * noise;\n float angle = ((i / SAMPLE_COUNT) * SPIRAL_TURNS) * (2.0 * PI) + offset;\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(cos(angle), sin(angle), radius * radius);\n}\n\nvec2 startPosition(const float noise) {\n float angle = ((2.0 * PI) * 2.4) * noise;\n return vec2(cos(angle), sin(angle));\n}\n\nmat2 tapAngleStep() {\n vec2 t = angleIncCosSin;\n return mat2(t.x, t.y, -t.y, t.x);\n}\n\nvec3 tapLocationFast(float i, vec2 p, const float noise) {\n float radius = (i + noise + 0.5) / SAMPLE_COUNT;\n return vec3(p, radius * radius);\n}\n\nvoid computeAmbientOcclusionSAO(inout float occlusion, float i, float ssDiskRadius, vec2 uv, vec3 originPosition, vec3 normal,\n vec2 tapPosition, float noise) {\n\n vec3 tap = tapLocationFast(i, tapPosition, noise);\n\n float ssRadius = max(1.0, tap.z * ssDiskRadius); // at least 1 pixel screen-space radius\n\n vec2 uvSamplePos = uv + vec2(ssRadius * tap.xy) * renderer_texelSize.xy;\n\n float occlusionDepth = texture2D(renderer_BlitTexture, uvSamplePos).r;\n float linearOcclusionDepth = depthToViewZ(occlusionDepth);\n // “p” is the position after spiral sampling\n vec3 p = computeViewSpacePosition(uvSamplePos, linearOcclusionDepth, material_invProjScaleXY);\n\n // now we have the sample, compute AO\n vec3 v = p - originPosition; // sample vector\n float vv = dot(v, v); // squared distance\n float vn = dot(v, normal); // distance * cos(v, normal)\n\n // discard samples that are outside of the radius, preventing distant geometry to\n // cast shadows -- there are many functions that work and choosing one is an artistic\n // decision.\n float weight = pow(max(0.0, 1.0 - vv * material_invRadiusSquared), 2.0);\n\n // discard samples that are too close to the horizon to reduce shadows cast by geometry\n // not sufficently tessellated. The goal is to discard samples that form an angle 'beta'\n // smaller than 'epsilon' with the horizon. We already have dot(v,n) which is equal to the\n // sin(beta) * |v|. So the test simplifies to vn^2 < vv * sin(epsilon)^2.\n weight *= step(vv * material_minHorizonAngleSineSquared, vn * vn);\n\n // Calculate the contribution of a single sampling point to Ambient Occlusion\n float sampleOcclusion = max(0.0, vn + (originPosition.z * material_bias)) / (vv + material_peak2);\n occlusion += weight * sampleOcclusion;\n}\n\nvoid scalableAmbientObscurance(vec2 uv, vec3 origin, vec3 normal, out float obscurance) {\n float noise = interleavedGradientNoise(gl_FragCoord.xy);\n vec2 tapPosition = startPosition(noise);\n mat2 angleStep = tapAngleStep();\n\n // Choose the screen-space sample radius\n // proportional to the projected area of the sphere\n float ssDiskRadius = -(material_projectionScaleRadius / origin.z);\n\n // Accumulate the occlusion amount of all sampling points\n obscurance = 0.0;\n for (float i = 0.0; i < SAMPLE_COUNT; i += 1.0) {\n computeAmbientOcclusionSAO(obscurance, i, ssDiskRadius, uv, origin, normal, tapPosition, noise);\n tapPosition = angleStep * tapPosition;\n }\n obscurance = sqrt(obscurance * material_intensity);\n}\n\nvec2 pack(highp float normalizedDepth) {\n highp float z = clamp(normalizedDepth, 0.0, 1.0);\n highp float t = floor(256.0 * z);\n mediump float hi = t * (1.0 / 256.0);\n mediump float lo = (256.0 * z) - t;\n return vec2(hi, lo);\n}\n\n\nvoid main(){\n float depth = texture2D(renderer_BlitTexture, v_uv).r;\n float z = depthToViewZ(depth);\n\n // Reconstruct view space position from depth\n vec3 positionVS = computeViewSpacePosition(v_uv, z, material_invProjScaleXY);\n\n // Compute normal\n vec3 normal = computeViewSpaceNormal(v_uv, renderer_BlitTexture, depth, positionVS, renderer_texelSize.xy, material_invProjScaleXY);\n\n float occlusion = 0.0;\n scalableAmbientObscurance(v_uv, positionVS, normal, occlusion);\n\n // Occlusion to visibility\n float aoVisibility = pow(clamp(1.0 - occlusion, 0.0, 1.0), material_power);\n\n gl_FragColor = vec4(aoVisibility, pack(-positionVS.z/camera_ProjectionParams.z), 1.0);\n}\n\n"; // eslint-disable-line
15750
15752
  /**
15751
15753
  * @internal
15752
15754
  * Scalable Ambient Obscurance render pass.
@@ -15758,6 +15760,7 @@
15758
15760
  var material = new Material(engine, Shader.find(ScalableAmbientObscurancePass.SHADER_NAME));
15759
15761
  material._addReferCount(1);
15760
15762
  _this._material = material;
15763
+ _this.isSupported = _this.engine._hardwareRenderer.capability.isFragmentHighPrecision;
15761
15764
  return _this;
15762
15765
  }
15763
15766
  var _proto = ScalableAmbientObscurancePass.prototype;
@@ -20641,8 +20644,8 @@
20641
20644
  }
20642
20645
  // Scalable ambient obscurance pass
20643
20646
  // Before opaque pass so materials can sample ambient occlusion in BRDF
20644
- if (ambientOcclusionEnabled && supportDepthTexture) {
20645
- var saoPass = this._saoPass;
20647
+ var saoPass = this._saoPass;
20648
+ if (ambientOcclusionEnabled && supportDepthTexture && saoPass.isSupported) {
20646
20649
  saoPass.onConfig(camera, this._depthOnlyPass.renderTarget);
20647
20650
  saoPass.onRender(context);
20648
20651
  } else {
@@ -26747,11 +26750,7 @@
26747
26750
  ], args));
26748
26751
  this._components.push(component);
26749
26752
  // @todo: temporary solution
26750
- if (_instanceof1$2(component, Transform)) {
26751
- var transform = this._transform;
26752
- this._transform = component;
26753
- transform == null ? void 0 : transform.destroy();
26754
- }
26753
+ if (_instanceof1$2(component, Transform)) this._setTransform(component);
26755
26754
  component._setActive(true, ActiveChangeFlag.All);
26756
26755
  return component;
26757
26756
  };
@@ -27180,6 +27179,16 @@
27180
27179
  }
27181
27180
  }
27182
27181
  };
27182
+ _proto._setTransform = function _setTransform(value) {
27183
+ var _this__transform;
27184
+ (_this__transform = this._transform) == null ? void 0 : _this__transform.destroy();
27185
+ this._transform = value;
27186
+ var children = this._children;
27187
+ for(var i = 0, n = children.length; i < n; i++){
27188
+ var _children_i_transform;
27189
+ (_children_i_transform = children[i].transform) == null ? void 0 : _children_i_transform._parentChange();
27190
+ }
27191
+ };
27183
27192
  /**
27184
27193
  * @deprecated
27185
27194
  */ _proto.getInvModelMatrix = function getInvModelMatrix() {
@@ -42643,6 +42652,13 @@
42643
42652
  return this._maxAntiAliasing;
42644
42653
  }
42645
42654
  },
42655
+ {
42656
+ key: "isFragmentHighPrecision",
42657
+ get: function get() {
42658
+ var gl = this._rhi.gl;
42659
+ return gl.getShaderPrecisionFormat(gl.FRAGMENT_SHADER, gl.HIGH_FLOAT).precision !== 0;
42660
+ }
42661
+ },
42646
42662
  {
42647
42663
  key: "rhi",
42648
42664
  get: function get() {
@@ -50950,7 +50966,8 @@
50950
50966
  ._request(item.url, _extends({}, item, {
50951
50967
  type: "json"
50952
50968
  })).then(function(data) {
50953
- var scene = new Scene(engine);
50969
+ var _data_name;
50970
+ var scene = new Scene(engine, (_data_name = data.name) != null ? _data_name : "");
50954
50971
  var context = new ParserContext(engine, ParserType.Scene, scene);
50955
50972
  var parser = new SceneParser(data, context, scene);
50956
50973
  parser._collectDependentAssets(data);
@@ -51050,6 +51067,19 @@
51050
51067
  if (postProcessData) {
51051
51068
  Logger.warn("Post Process is not supported in scene yet, please add PostProcess component in entity instead.");
51052
51069
  }
51070
+ // Ambient Occlusion
51071
+ var ambientOcclusion = data.scene.ambientOcclusion;
51072
+ if (ambientOcclusion) {
51073
+ var sceneAmbientOcclusion = scene.ambientOcclusion;
51074
+ sceneAmbientOcclusion.enabled = ambientOcclusion.enabledAmbientOcclusion;
51075
+ sceneAmbientOcclusion.intensity = ambientOcclusion.intensity;
51076
+ sceneAmbientOcclusion.radius = ambientOcclusion.radius;
51077
+ sceneAmbientOcclusion.bias = ambientOcclusion.bias;
51078
+ sceneAmbientOcclusion.power = ambientOcclusion.power;
51079
+ sceneAmbientOcclusion.quality = ambientOcclusion.quality;
51080
+ sceneAmbientOcclusion.bilateralThreshold = ambientOcclusion.bilateralThreshold;
51081
+ sceneAmbientOcclusion.minHorizonAngle = ambientOcclusion.minHorizonAngle;
51082
+ }
51053
51083
  return Promise.all(promises).then(function() {
51054
51084
  resolve(scene);
51055
51085
  });
@@ -51572,7 +51602,7 @@
51572
51602
  ], EXT_texture_webp);
51573
51603
 
51574
51604
  //@ts-ignore
51575
- var version = "1.6.0-beta.2";
51605
+ var version = "1.6.1";
51576
51606
  console.log("Galacean Engine Version: " + version);
51577
51607
  for(var key in CoreObjects){
51578
51608
  Loader.registerClass(key, CoreObjects[key]);