@spatialwalk/avatarkit 1.0.0-beta.32 → 1.0.0-beta.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -0
- package/README.md +19 -0
- package/dist/{StreamingAudioPlayer-TiKq7LEP.js → StreamingAudioPlayer-D_anvcr1.js} +1 -1
- package/dist/core/AvatarView.d.ts +1 -0
- package/dist/{index-CFQb9r1N.js → index-D8QhzqfR.js} +551 -27
- package/dist/index.js +1 -1
- package/dist/renderer/renderer.d.ts +6 -1
- package/dist/renderer/webgl/webglRenderer.d.ts +16 -2
- package/dist/renderer/webgpu/webgpuRenderer.d.ts +15 -2
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,11 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [1.0.0-beta.34] - 2025-12-16
|
|
9
|
+
|
|
10
|
+
### ✨ New Features
|
|
11
|
+
- **Avatar Transform API** - Added `setTransform` method to `AvatarView` for controlling avatar position and scale within the canvas. Supports normalized coordinates (-1 to 1) for position and scale factor.
|
|
12
|
+
|
|
8
13
|
## [1.0.0-beta.32] - 2025-12-16
|
|
9
14
|
|
|
10
15
|
### 🔄 Breaking Changes
|
package/README.md
CHANGED
|
@@ -292,6 +292,12 @@ const avatarView = new AvatarView(avatar, container)
|
|
|
292
292
|
// Wait for first frame to render
|
|
293
293
|
await avatarView.ready // Promise that resolves when the first frame is rendered
|
|
294
294
|
|
|
295
|
+
// Set avatar transform (position and scale)
|
|
296
|
+
avatarView.setTransform(x, y, scale)
|
|
297
|
+
// - x: Horizontal offset in normalized coordinates (-1 to 1, where -1 = left edge, 0 = center, 1 = right edge)
|
|
298
|
+
// - y: Vertical offset in normalized coordinates (-1 to 1, where -1 = bottom edge, 0 = center, 1 = top edge)
|
|
299
|
+
// - scale: Scale factor (1.0 = original size, 2.0 = double size, 0.5 = half size)
|
|
300
|
+
|
|
295
301
|
// Cleanup resources (must be called before switching characters)
|
|
296
302
|
avatarView.dispose()
|
|
297
303
|
```
|
|
@@ -389,6 +395,19 @@ avatarView.avatarController.onConversationState = (state: ConversationState) =>
|
|
|
389
395
|
avatarView.avatarController.onError = (error: Error) => {}
|
|
390
396
|
```
|
|
391
397
|
|
|
398
|
+
#### Avatar Transform Methods
|
|
399
|
+
|
|
400
|
+
```typescript
|
|
401
|
+
// Set avatar transform (position and scale in canvas)
|
|
402
|
+
avatarView.setTransform(x, y, scale)
|
|
403
|
+
// - x: Horizontal offset in normalized coordinates (-1 to 1, where -1 = left edge, 0 = center, 1 = right edge)
|
|
404
|
+
// - y: Vertical offset in normalized coordinates (-1 to 1, where -1 = bottom edge, 0 = center, 1 = top edge)
|
|
405
|
+
// - scale: Scale factor (1.0 = original size, 2.0 = double size, 0.5 = half size)
|
|
406
|
+
// Example:
|
|
407
|
+
avatarView.setTransform(0, 0, 1.0) // Center, original size
|
|
408
|
+
avatarView.setTransform(0.5, 0, 2.0) // Right half, double size
|
|
409
|
+
```
|
|
410
|
+
|
|
392
411
|
**Important Notes:**
|
|
393
412
|
- `start()` and `close()` are only available in SDK mode
|
|
394
413
|
- `yieldAudioData()` and `yieldFramesData()` are only available in Host mode
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
var __defProp = Object.defineProperty;
|
|
2
2
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
3
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
-
import { A as APP_CONFIG, e as errorToMessage, l as logEvent, a as logger } from "./index-
|
|
4
|
+
import { A as APP_CONFIG, e as errorToMessage, l as logEvent, a as logger } from "./index-D8QhzqfR.js";
|
|
5
5
|
class StreamingAudioPlayer {
|
|
6
6
|
constructor(options) {
|
|
7
7
|
__publicField(this, "audioContext", null);
|
|
@@ -2977,7 +2977,7 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
2977
2977
|
if (this.streamingPlayer) {
|
|
2978
2978
|
return;
|
|
2979
2979
|
}
|
|
2980
|
-
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-
|
|
2980
|
+
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-D_anvcr1.js");
|
|
2981
2981
|
this.streamingPlayer = new StreamingAudioPlayer({
|
|
2982
2982
|
sampleRate: APP_CONFIG.audio.sampleRate,
|
|
2983
2983
|
channelCount: 1,
|
|
@@ -4378,7 +4378,7 @@ class AvatarSDK {
|
|
|
4378
4378
|
}
|
|
4379
4379
|
__publicField(AvatarSDK, "_isInitialized", false);
|
|
4380
4380
|
__publicField(AvatarSDK, "_configuration", null);
|
|
4381
|
-
__publicField(AvatarSDK, "_version", "1.0.0-beta.
|
|
4381
|
+
__publicField(AvatarSDK, "_version", "1.0.0-beta.34");
|
|
4382
4382
|
__publicField(AvatarSDK, "_avatarCore", null);
|
|
4383
4383
|
__publicField(AvatarSDK, "_dynamicSdkConfig", null);
|
|
4384
4384
|
class EventEmitter {
|
|
@@ -6328,6 +6328,8 @@ function reorderPackedData(packedData, sortOrder) {
|
|
|
6328
6328
|
}
|
|
6329
6329
|
const fragmentShaderSource = "#version 300 es\nprecision highp float;\n\nin vec2 v_relativePosition;\nin vec4 v_color;\n\nout vec4 fragColor;\n\nconst float BOUNDS_RADIUS = 3.0;\nconst float BOUNDS_RADIUS_SQUARED = BOUNDS_RADIUS * BOUNDS_RADIUS;\n\nfloat splatFragmentAlpha(vec2 relativePosition, float splatAlpha) {\n // ✅ 修复:完全匹配MetalSplatter的计算方式\n float negativeMagnitudeSquared = -dot(relativePosition, relativePosition);\n\n // 边界检查:超出椭圆边界的点被剔除\n if (negativeMagnitudeSquared < -BOUNDS_RADIUS_SQUARED) {\n return 0.0;\n }\n\n // ✅ 修复:高斯衰减,使用MetalSplatter的公式 exp(0.5 * negative)\n return exp(0.5 * negativeMagnitudeSquared) * splatAlpha;\n}\n\nvoid main() {\n float alpha = splatFragmentAlpha(v_relativePosition, v_color.a);\n\n // ✅ 优化:提前丢弃几乎透明的片段(提升性能和质量,对齐 Android SDK)\n if (alpha < 0.001) {\n discard;\n }\n\n // 预乘 alpha 输出(匹配 premultipliedAlpha: true)\n // 颜色值需要乘以 alpha,这样 WebGL 才能正确混合\n fragColor = vec4(v_color.rgb * alpha, alpha);\n}";
|
|
6330
6330
|
const vertexShaderSource = "#version 300 es\nprecision highp float;\n\n// 基础四边形顶点属性(共享4个顶点)\nlayout(location = 0) in vec2 a_quadVertex; // (-1,-1), (-1,1), (1,-1), (1,1)\n\n// 实例化属性(每个splat实例)\nlayout(location = 1) in vec3 a_position; // splat中心位置\nlayout(location = 2) in vec4 a_color; // RGBA颜色\nlayout(location = 3) in vec3 a_covA; // 协方差矩阵上三角\nlayout(location = 4) in vec3 a_covB; // 协方差矩阵下三角\n\n// Uniform变量\nuniform mat4 u_viewMatrix;\nuniform mat4 u_projectionMatrix;\nuniform vec2 u_screenSize;\nuniform int u_enableFrustumCulling;\n\n// 输出到片段着色器\nout vec2 v_relativePosition;\nout vec4 v_color;\n\n// 常量定义\nconst float BOUNDS_RADIUS = 3.0;\n\n/**\n * 计算2D协方差矩阵(复刻Metal版本)\n */\nvec3 calcCovariance2D(vec3 viewPos, vec3 cov3Da, vec3 cov3Db, mat4 viewMatrix, mat4 projectionMatrix, vec2 screenSize) {\n float invViewPosZ = 1.0 / viewPos.z;\n float invViewPosZSquared = invViewPosZ * invViewPosZ;\n\n // FOV限制\n float tanHalfFovX = 1.0 / projectionMatrix[0][0];\n float tanHalfFovY = 1.0 / projectionMatrix[1][1];\n float limX = 1.3 * tanHalfFovX;\n float limY = 1.3 * tanHalfFovY;\n\n viewPos.x = clamp(viewPos.x * invViewPosZ, -limX, limX) * viewPos.z;\n viewPos.y = clamp(viewPos.y * invViewPosZ, -limY, limY) * viewPos.z;\n\n // 焦距计算\n float focalX = screenSize.x * projectionMatrix[0][0] / 2.0;\n float focalY = screenSize.y * projectionMatrix[1][1] / 2.0;\n\n // 雅可比矩阵 J\n mat3 J = mat3(\n focalX * invViewPosZ, 0.0, 0.0,\n 0.0, focalY * invViewPosZ, 0.0,\n -(focalX * viewPos.x) * invViewPosZSquared, -(focalY * viewPos.y) * invViewPosZSquared, 0.0\n );\n\n // 视图变换矩阵 W (仅旋转部分) - 对齐 Android SDK,不使用转置\n mat3 W = mat3(viewMatrix[0].xyz, viewMatrix[1].xyz, viewMatrix[2].xyz);\n\n // 投影变换 T = J * W\n mat3 T = J * W;\n\n // 3D协方差矩阵 Vrk\n mat3 Vrk = mat3(\n cov3Da.x, cov3Da.y, cov3Da.z,\n cov3Da.y, cov3Db.x, cov3Db.y,\n cov3Da.z, cov3Db.y, cov3Db.z\n );\n\n // 2D协方差矩阵\n mat3 cov = T * Vrk * transpose(T);\n\n // 低通滤波器\n cov[0][0] += 0.3;\n cov[1][1] += 0.3;\n\n return vec3(cov[0][0], cov[0][1], cov[1][1]);\n}\n\n/**\n * 分解协方差矩阵\n */\nvoid decomposeCovariance(vec3 cov2D, out vec2 v1, out vec2 v2) {\n float a = cov2D.x;\n float b = cov2D.y;\n float d = cov2D.z;\n\n float det = a * d - b * b;\n float trace = a + d;\n\n float mean = 0.5 * trace;\n float dist = max(0.1, sqrt(mean * mean - det));\n\n // 特征值\n float lambda1 = mean + dist;\n float lambda2 = mean - dist;\n\n // 确保特征值为正\n lambda1 = max(lambda1, 0.01);\n lambda2 = max(lambda2, 0.01);\n\n // 特征向量 - 完全复刻MetalSplatter的算法\n vec2 eigenvector1;\n if (abs(b) < 1e-6) {\n eigenvector1 = (a > d) ? vec2(1.0, 0.0) : vec2(0.0, 1.0);\n } else {\n // ✅ 修复:使用MetalSplatter的公式 (b, d - lambda2) 而不是 (b, lambda1 - a)\n eigenvector1 = normalize(vec2(b, d - lambda2));\n }\n\n // ✅ 修复:正交特征向量,使用MetalSplatter的方向 (y, -x) 而不是 (-y, x)\n vec2 eigenvector2 = vec2(eigenvector1.y, -eigenvector1.x);\n\n v1 = eigenvector1 * sqrt(lambda1);\n v2 = eigenvector2 * sqrt(lambda2);\n}\n\nvoid main() {\n // 直接使用原始位置数据,不进行硬编码缩放\n vec3 scaledPosition = a_position;\n\n // 转换到视图空间\n vec4 viewPosition4 = u_viewMatrix * vec4(scaledPosition, 1.0);\n vec3 viewPosition3 = viewPosition4.xyz;\n\n // 计算2D协方差矩阵\n vec3 cov2D = calcCovariance2D(viewPosition3, a_covA, a_covB, u_viewMatrix, u_projectionMatrix, u_screenSize);\n\n // 分解协方差矩阵\n vec2 axis1, axis2;\n decomposeCovariance(cov2D, axis1, axis2);\n\n // 投影到屏幕空间\n vec4 projectedCenter = u_projectionMatrix * viewPosition4;\n\n // 视锥体剔除(对齐MetalSplatter边界,可调试禁用)\n if (u_enableFrustumCulling == 1) {\n float bounds = 1.2 * projectedCenter.w;\n if (projectedCenter.z < 0.0 ||\n projectedCenter.z > projectedCenter.w ||\n projectedCenter.x < -bounds ||\n projectedCenter.x > bounds ||\n projectedCenter.y < -bounds ||\n projectedCenter.y > bounds) {\n // 剔除到屏幕外\n gl_Position = vec4(1.0, 1.0, 0.0, 1.0);\n return;\n }\n }\n\n // 使用实例化的四边形顶点\n vec2 relativeCoord = a_quadVertex;\n\n // 计算椭圆变换后的相对位置(像素单位)\n vec2 ellipseRelativePos = relativeCoord.x * axis1 + relativeCoord.y * axis2;\n\n // 计算屏幕空间偏移\n vec2 screenSizeFloat = u_screenSize;\n vec2 projectedScreenDelta = ellipseRelativePos * 2.0 * BOUNDS_RADIUS / screenSizeFloat;\n\n // 最终顶点位置\n gl_Position = vec4(\n projectedCenter.x + projectedScreenDelta.x * projectedCenter.w,\n projectedCenter.y + projectedScreenDelta.y * projectedCenter.w,\n projectedCenter.z,\n projectedCenter.w\n );\n\n // 传递标准化坐标给片段着色器(椭圆内[-1,1]范围)\n v_relativePosition = relativeCoord * BOUNDS_RADIUS;\n v_color = a_color;\n}";
|
|
6331
|
+
const blitVertexShaderSource = "#version 300 es\nprecision highp float;\n\n// 全屏四边形顶点(NDC坐标)\nin vec2 a_position;\nin vec2 a_texCoord;\n\nout vec2 v_texCoord;\n\nuniform vec2 u_offset; // 屏幕空间偏移(NDC坐标)\nuniform float u_scale; // 缩放因子\n\nvoid main() {\n // 以中心为基准的缩放和偏移\n // NDC 坐标范围是 [-1, 1],中心是 (0, 0)\n // 1. 先缩放(以中心为基准,所以直接缩放即可)\n // 2. 然后应用偏移\n vec2 pos = a_position * u_scale + u_offset;\n gl_Position = vec4(pos, 0.0, 1.0);\n // WebGL framebuffer 纹理坐标不需要翻转(framebuffer 的纹理坐标系统与屏幕坐标一致)\n v_texCoord = a_texCoord;\n}\n\n";
|
|
6332
|
+
const blitFragmentShaderSource = "#version 300 es\nprecision highp float;\n\nin vec2 v_texCoord;\nout vec4 fragColor;\n\nuniform sampler2D u_texture;\n\nvoid main() {\n fragColor = texture(u_texture, v_texCoord);\n}\n\n";
|
|
6331
6333
|
class WebGLRenderer {
|
|
6332
6334
|
constructor(canvas, backgroundColor, alpha = true) {
|
|
6333
6335
|
__publicField(this, "canvas");
|
|
@@ -6342,6 +6344,16 @@ class WebGLRenderer {
|
|
|
6342
6344
|
__publicField(this, "splatCount");
|
|
6343
6345
|
__publicField(this, "isInitialized");
|
|
6344
6346
|
__publicField(this, "splatBufferSize");
|
|
6347
|
+
__publicField(this, "framebuffer", null);
|
|
6348
|
+
__publicField(this, "renderTexture", null);
|
|
6349
|
+
__publicField(this, "depthBuffer", null);
|
|
6350
|
+
__publicField(this, "framebufferWidth", 0);
|
|
6351
|
+
__publicField(this, "framebufferHeight", 0);
|
|
6352
|
+
__publicField(this, "blitShaderProgram", null);
|
|
6353
|
+
__publicField(this, "blitUniformLocations", { offset: null, scale: null, texture: null });
|
|
6354
|
+
__publicField(this, "blitAttributeLocations", { position: 0, texCoord: 0 });
|
|
6355
|
+
__publicField(this, "blitQuadBuffer", null);
|
|
6356
|
+
__publicField(this, "blitVAO", null);
|
|
6345
6357
|
__publicField(this, "alpha");
|
|
6346
6358
|
this.canvas = canvas;
|
|
6347
6359
|
this.backgroundColor = backgroundColor || [0, 0, 0, 0];
|
|
@@ -6374,6 +6386,7 @@ class WebGLRenderer {
|
|
|
6374
6386
|
this.setupShaderLocations();
|
|
6375
6387
|
this.setupWebGLState();
|
|
6376
6388
|
this.createBuffers();
|
|
6389
|
+
this.createBlitShader(gl);
|
|
6377
6390
|
this.isInitialized = true;
|
|
6378
6391
|
} catch (error) {
|
|
6379
6392
|
logger.error(`WebGLRenderer initialize failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -6521,22 +6534,28 @@ class WebGLRenderer {
|
|
|
6521
6534
|
);
|
|
6522
6535
|
gl.vertexAttribDivisor(this.attributeLocations.covB, 1);
|
|
6523
6536
|
}
|
|
6524
|
-
render(viewMatrix, projectionMatrix, screenSize) {
|
|
6537
|
+
render(viewMatrix, projectionMatrix, screenSize, transform) {
|
|
6525
6538
|
if (!this.isInitialized || this.splatCount === 0) {
|
|
6526
6539
|
return;
|
|
6527
6540
|
}
|
|
6528
6541
|
const gl = this.gl;
|
|
6529
6542
|
if (!gl)
|
|
6530
6543
|
throw new Error("WebGL context not initialized");
|
|
6531
|
-
|
|
6532
|
-
|
|
6533
|
-
|
|
6534
|
-
|
|
6535
|
-
|
|
6536
|
-
|
|
6537
|
-
|
|
6538
|
-
|
|
6539
|
-
|
|
6544
|
+
const [width, height] = screenSize;
|
|
6545
|
+
const needsTransform = transform && (transform.x !== 0 || transform.y !== 0 || transform.scale !== 1);
|
|
6546
|
+
if (needsTransform) {
|
|
6547
|
+
if (!this.framebuffer || this.framebufferWidth !== width || this.framebufferHeight !== height) {
|
|
6548
|
+
this.createFramebuffer(width, height);
|
|
6549
|
+
}
|
|
6550
|
+
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
|
|
6551
|
+
this.render3DGS(gl, viewMatrix, projectionMatrix, screenSize, width, height);
|
|
6552
|
+
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
|
|
6553
|
+
gl.viewport(0, 0, width, height);
|
|
6554
|
+
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
|
|
6555
|
+
this.blitToScreen(transform);
|
|
6556
|
+
} else {
|
|
6557
|
+
this.render3DGS(gl, viewMatrix, projectionMatrix, screenSize, width, height);
|
|
6558
|
+
}
|
|
6540
6559
|
}
|
|
6541
6560
|
createShaderProgram(gl) {
|
|
6542
6561
|
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
|
|
@@ -6593,6 +6612,174 @@ class WebGLRenderer {
|
|
|
6593
6612
|
);
|
|
6594
6613
|
}
|
|
6595
6614
|
}
|
|
6615
|
+
render3DGS(gl, viewMatrix, projectionMatrix, screenSize, width, height) {
|
|
6616
|
+
gl.viewport(0, 0, width, height);
|
|
6617
|
+
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
|
|
6618
|
+
gl.disable(gl.DEPTH_TEST);
|
|
6619
|
+
gl.depthMask(true);
|
|
6620
|
+
gl.enable(gl.BLEND);
|
|
6621
|
+
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
|
|
6622
|
+
gl.useProgram(this.shaderProgram);
|
|
6623
|
+
gl.uniformMatrix4fv(this.uniformLocations.viewMatrix, false, viewMatrix);
|
|
6624
|
+
gl.uniformMatrix4fv(this.uniformLocations.projectionMatrix, false, projectionMatrix);
|
|
6625
|
+
gl.uniform2fv(this.uniformLocations.screenSize, screenSize);
|
|
6626
|
+
gl.uniform1i(this.uniformLocations.enableFrustumCulling, 1);
|
|
6627
|
+
this.setupVertexAttributes();
|
|
6628
|
+
gl.drawArraysInstanced(gl.TRIANGLE_STRIP, 0, 4, this.splatCount);
|
|
6629
|
+
}
|
|
6630
|
+
createFramebuffer(width, height) {
|
|
6631
|
+
const gl = this.gl;
|
|
6632
|
+
if (!gl)
|
|
6633
|
+
return;
|
|
6634
|
+
if (this.framebuffer) {
|
|
6635
|
+
gl.deleteFramebuffer(this.framebuffer);
|
|
6636
|
+
}
|
|
6637
|
+
if (this.renderTexture) {
|
|
6638
|
+
gl.deleteTexture(this.renderTexture);
|
|
6639
|
+
}
|
|
6640
|
+
if (this.depthBuffer) {
|
|
6641
|
+
gl.deleteRenderbuffer(this.depthBuffer);
|
|
6642
|
+
}
|
|
6643
|
+
this.renderTexture = gl.createTexture();
|
|
6644
|
+
gl.bindTexture(gl.TEXTURE_2D, this.renderTexture);
|
|
6645
|
+
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
|
|
6646
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
6647
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
6648
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
6649
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
6650
|
+
this.depthBuffer = gl.createRenderbuffer();
|
|
6651
|
+
gl.bindRenderbuffer(gl.RENDERBUFFER, this.depthBuffer);
|
|
6652
|
+
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT24, width, height);
|
|
6653
|
+
this.framebuffer = gl.createFramebuffer();
|
|
6654
|
+
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
|
|
6655
|
+
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.renderTexture, 0);
|
|
6656
|
+
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, this.depthBuffer);
|
|
6657
|
+
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
|
|
6658
|
+
if (status !== gl.FRAMEBUFFER_COMPLETE) {
|
|
6659
|
+
const statusName = {
|
|
6660
|
+
[gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT]: "INCOMPLETE_ATTACHMENT",
|
|
6661
|
+
[gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT]: "INCOMPLETE_MISSING_ATTACHMENT",
|
|
6662
|
+
[gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS]: "INCOMPLETE_DIMENSIONS",
|
|
6663
|
+
[gl.FRAMEBUFFER_UNSUPPORTED]: "UNSUPPORTED"
|
|
6664
|
+
}[status] || `UNKNOWN(${status})`;
|
|
6665
|
+
logger.error(`[WebGLRenderer] Framebuffer incomplete: ${statusName}`);
|
|
6666
|
+
throw new Error(`Framebuffer incomplete: ${statusName}`);
|
|
6667
|
+
}
|
|
6668
|
+
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
|
|
6669
|
+
this.framebufferWidth = width;
|
|
6670
|
+
this.framebufferHeight = height;
|
|
6671
|
+
}
|
|
6672
|
+
createBlitShader(gl) {
|
|
6673
|
+
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
|
|
6674
|
+
if (!vertexShader)
|
|
6675
|
+
throw new Error("Failed to create blit vertex shader");
|
|
6676
|
+
gl.shaderSource(vertexShader, blitVertexShaderSource);
|
|
6677
|
+
gl.compileShader(vertexShader);
|
|
6678
|
+
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
|
|
6679
|
+
const info = gl.getShaderInfoLog(vertexShader);
|
|
6680
|
+
gl.deleteShader(vertexShader);
|
|
6681
|
+
throw new Error(`Blit vertex shader compilation failed: ${info}`);
|
|
6682
|
+
}
|
|
6683
|
+
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
|
|
6684
|
+
if (!fragmentShader) {
|
|
6685
|
+
gl.deleteShader(vertexShader);
|
|
6686
|
+
throw new Error("Failed to create blit fragment shader");
|
|
6687
|
+
}
|
|
6688
|
+
gl.shaderSource(fragmentShader, blitFragmentShaderSource);
|
|
6689
|
+
gl.compileShader(fragmentShader);
|
|
6690
|
+
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
|
|
6691
|
+
const info = gl.getShaderInfoLog(fragmentShader);
|
|
6692
|
+
gl.deleteShader(vertexShader);
|
|
6693
|
+
gl.deleteShader(fragmentShader);
|
|
6694
|
+
throw new Error(`Blit fragment shader compilation failed: ${info}`);
|
|
6695
|
+
}
|
|
6696
|
+
this.blitShaderProgram = gl.createProgram();
|
|
6697
|
+
if (!this.blitShaderProgram) {
|
|
6698
|
+
gl.deleteShader(vertexShader);
|
|
6699
|
+
gl.deleteShader(fragmentShader);
|
|
6700
|
+
throw new Error("Failed to create blit shader program");
|
|
6701
|
+
}
|
|
6702
|
+
gl.attachShader(this.blitShaderProgram, vertexShader);
|
|
6703
|
+
gl.attachShader(this.blitShaderProgram, fragmentShader);
|
|
6704
|
+
gl.linkProgram(this.blitShaderProgram);
|
|
6705
|
+
if (!gl.getProgramParameter(this.blitShaderProgram, gl.LINK_STATUS)) {
|
|
6706
|
+
const info = gl.getProgramInfoLog(this.blitShaderProgram);
|
|
6707
|
+
gl.deleteShader(vertexShader);
|
|
6708
|
+
gl.deleteShader(fragmentShader);
|
|
6709
|
+
gl.deleteProgram(this.blitShaderProgram);
|
|
6710
|
+
this.blitShaderProgram = null;
|
|
6711
|
+
throw new Error(`Blit shader program linking failed: ${info}`);
|
|
6712
|
+
}
|
|
6713
|
+
gl.deleteShader(vertexShader);
|
|
6714
|
+
gl.deleteShader(fragmentShader);
|
|
6715
|
+
this.blitUniformLocations.offset = gl.getUniformLocation(this.blitShaderProgram, "u_offset");
|
|
6716
|
+
this.blitUniformLocations.scale = gl.getUniformLocation(this.blitShaderProgram, "u_scale");
|
|
6717
|
+
this.blitUniformLocations.texture = gl.getUniformLocation(this.blitShaderProgram, "u_texture");
|
|
6718
|
+
this.blitAttributeLocations.position = gl.getAttribLocation(this.blitShaderProgram, "a_position");
|
|
6719
|
+
this.blitAttributeLocations.texCoord = gl.getAttribLocation(this.blitShaderProgram, "a_texCoord");
|
|
6720
|
+
if (!this.blitUniformLocations.offset || !this.blitUniformLocations.scale || !this.blitUniformLocations.texture) {
|
|
6721
|
+
throw new Error("[WebGLRenderer] Failed to get blit shader uniform locations");
|
|
6722
|
+
}
|
|
6723
|
+
if (this.blitAttributeLocations.position === -1 || this.blitAttributeLocations.texCoord === -1) {
|
|
6724
|
+
throw new Error("[WebGLRenderer] Failed to get blit shader attribute locations");
|
|
6725
|
+
}
|
|
6726
|
+
this.blitVAO = gl.createVertexArray();
|
|
6727
|
+
gl.bindVertexArray(this.blitVAO);
|
|
6728
|
+
const quadData = new Float32Array([
|
|
6729
|
+
-1,
|
|
6730
|
+
-1,
|
|
6731
|
+
0,
|
|
6732
|
+
0,
|
|
6733
|
+
-1,
|
|
6734
|
+
1,
|
|
6735
|
+
0,
|
|
6736
|
+
1,
|
|
6737
|
+
1,
|
|
6738
|
+
-1,
|
|
6739
|
+
1,
|
|
6740
|
+
0,
|
|
6741
|
+
1,
|
|
6742
|
+
1,
|
|
6743
|
+
1,
|
|
6744
|
+
1
|
|
6745
|
+
]);
|
|
6746
|
+
this.blitQuadBuffer = gl.createBuffer();
|
|
6747
|
+
gl.bindBuffer(gl.ARRAY_BUFFER, this.blitQuadBuffer);
|
|
6748
|
+
gl.bufferData(gl.ARRAY_BUFFER, quadData, gl.STATIC_DRAW);
|
|
6749
|
+
gl.enableVertexAttribArray(this.blitAttributeLocations.position);
|
|
6750
|
+
gl.vertexAttribPointer(this.blitAttributeLocations.position, 2, gl.FLOAT, false, 16, 0);
|
|
6751
|
+
gl.enableVertexAttribArray(this.blitAttributeLocations.texCoord);
|
|
6752
|
+
gl.vertexAttribPointer(this.blitAttributeLocations.texCoord, 2, gl.FLOAT, false, 16, 8);
|
|
6753
|
+
gl.bindVertexArray(null);
|
|
6754
|
+
}
|
|
6755
|
+
blitToScreen(transform) {
|
|
6756
|
+
const gl = this.gl;
|
|
6757
|
+
if (!gl || !this.blitShaderProgram || !this.renderTexture || !this.blitVAO) {
|
|
6758
|
+
logger.error(`[WebGLRenderer] Blit failed: gl=${!!gl}, shader=${!!this.blitShaderProgram}, texture=${!!this.renderTexture}, VAO=${!!this.blitVAO}`);
|
|
6759
|
+
return;
|
|
6760
|
+
}
|
|
6761
|
+
gl.useProgram(this.blitShaderProgram);
|
|
6762
|
+
const wasBlendEnabled = gl.isEnabled(gl.BLEND);
|
|
6763
|
+
const wasDepthTestEnabled = gl.isEnabled(gl.DEPTH_TEST);
|
|
6764
|
+
gl.disable(gl.DEPTH_TEST);
|
|
6765
|
+
gl.disable(gl.BLEND);
|
|
6766
|
+
gl.bindVertexArray(this.blitVAO);
|
|
6767
|
+
gl.activeTexture(gl.TEXTURE0);
|
|
6768
|
+
gl.bindTexture(gl.TEXTURE_2D, this.renderTexture);
|
|
6769
|
+
gl.uniform1i(this.blitUniformLocations.texture, 0);
|
|
6770
|
+
const offsetXNDC = transform.x;
|
|
6771
|
+
const offsetYNDC = -transform.y;
|
|
6772
|
+
gl.uniform2f(this.blitUniformLocations.offset, offsetXNDC, offsetYNDC);
|
|
6773
|
+
gl.uniform1f(this.blitUniformLocations.scale, transform.scale);
|
|
6774
|
+
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
|
|
6775
|
+
gl.bindVertexArray(null);
|
|
6776
|
+
if (wasDepthTestEnabled) {
|
|
6777
|
+
gl.enable(gl.DEPTH_TEST);
|
|
6778
|
+
}
|
|
6779
|
+
if (wasBlendEnabled) {
|
|
6780
|
+
gl.enable(gl.BLEND);
|
|
6781
|
+
}
|
|
6782
|
+
}
|
|
6596
6783
|
dispose() {
|
|
6597
6784
|
if (!this.gl)
|
|
6598
6785
|
return;
|
|
@@ -6601,8 +6788,22 @@ class WebGLRenderer {
|
|
|
6601
6788
|
gl.deleteBuffer(this.splatBuffer);
|
|
6602
6789
|
if (this.quadVertexBuffer)
|
|
6603
6790
|
gl.deleteBuffer(this.quadVertexBuffer);
|
|
6791
|
+
if (this.blitQuadBuffer)
|
|
6792
|
+
gl.deleteBuffer(this.blitQuadBuffer);
|
|
6793
|
+
if (this.framebuffer)
|
|
6794
|
+
gl.deleteFramebuffer(this.framebuffer);
|
|
6795
|
+
if (this.renderTexture)
|
|
6796
|
+
gl.deleteTexture(this.renderTexture);
|
|
6797
|
+
if (this.depthBuffer)
|
|
6798
|
+
gl.deleteRenderbuffer(this.depthBuffer);
|
|
6799
|
+
if (this.shaderProgram)
|
|
6800
|
+
gl.deleteProgram(this.shaderProgram);
|
|
6801
|
+
if (this.blitShaderProgram)
|
|
6802
|
+
gl.deleteProgram(this.blitShaderProgram);
|
|
6604
6803
|
if (this.vertexArray)
|
|
6605
6804
|
gl.deleteVertexArray(this.vertexArray);
|
|
6805
|
+
if (this.blitVAO)
|
|
6806
|
+
gl.deleteVertexArray(this.blitVAO);
|
|
6606
6807
|
if (this.shaderProgram)
|
|
6607
6808
|
gl.deleteProgram(this.shaderProgram);
|
|
6608
6809
|
try {
|
|
@@ -6617,6 +6818,7 @@ class WebGLRenderer {
|
|
|
6617
6818
|
}
|
|
6618
6819
|
}
|
|
6619
6820
|
const renderShaderCode = "/**\n * WebGPU 3DGS 渲染着色器\n *\n * 实例化渲染:每个 splat 绘制一个四边形\n * 对应 WebGL 版本的 GLSL 着色器\n */\n\n// ============ Uniform Bindings ============\n\nstruct Uniforms {\n viewMatrix: mat4x4f,\n projectionMatrix: mat4x4f,\n screenSize: vec2f,\n enableFrustumCulling: u32,\n}\n\n@group(0) @binding(0) var<uniform> uniforms: Uniforms;\n\n// ============ Storage Buffer Bindings (间接索引渲染) ============\n\n@group(1) @binding(0) var<storage, read> sortIndices: array<u32>;\n@group(1) @binding(1) var<storage, read> splatData: array<f32>;\n\n// ============ Vertex Shader ============\n\nstruct VertexInput {\n // 共享四边形顶点 (per-vertex)\n @location(0) quadVertex: vec2f,\n}\n\nstruct VertexOutput {\n @builtin(position) position: vec4f,\n @location(0) relativePosition: vec2f,\n @location(1) color: vec4f,\n}\n\n// 常量定义\nconst BOUNDS_RADIUS: f32 = 3.0;\n\n/**\n * 计算2D协方差矩阵(复刻 WebGL 版本)\n */\nfn calcCovariance2D(\n viewPos: vec3f,\n cov3Da: vec3f,\n cov3Db: vec3f,\n viewMatrix: mat4x4f,\n projectionMatrix: mat4x4f,\n screenSize: vec2f\n) -> vec3f {\n let invViewPosZ = 1.0 / viewPos.z;\n let invViewPosZSquared = invViewPosZ * invViewPosZ;\n\n // FOV 限制\n let tanHalfFovX = 1.0 / projectionMatrix[0][0];\n let tanHalfFovY = 1.0 / projectionMatrix[1][1];\n let limX = 1.3 * tanHalfFovX;\n let limY = 1.3 * tanHalfFovY;\n\n var clampedViewPos = viewPos;\n clampedViewPos.x = clamp(viewPos.x * invViewPosZ, -limX, limX) * viewPos.z;\n clampedViewPos.y = clamp(viewPos.y * invViewPosZ, -limY, limY) * viewPos.z;\n\n // 焦距计算\n let focalX = screenSize.x * projectionMatrix[0][0] / 2.0;\n let focalY = screenSize.y * projectionMatrix[1][1] / 2.0;\n\n // 雅可比矩阵 J\n let J = mat3x3f(\n focalX * invViewPosZ, 0.0, -(focalX * clampedViewPos.x) * invViewPosZSquared,\n 0.0, focalY * invViewPosZ, -(focalY * clampedViewPos.y) * invViewPosZSquared,\n 0.0, 0.0, 0.0\n );\n\n // 视图变换矩阵 W (仅旋转部分) - 对齐 Android SDK,不使用转置\n let W = mat3x3f(\n viewMatrix[0].xyz,\n viewMatrix[1].xyz,\n viewMatrix[2].xyz\n );\n\n // 投影变换 T = J * W\n let T = J * W;\n\n // 3D 协方差矩阵 Vrk(对称矩阵)\n let Vrk = mat3x3f(\n cov3Da.x, cov3Da.y, cov3Da.z,\n cov3Da.y, cov3Db.x, cov3Db.y,\n cov3Da.z, cov3Db.y, cov3Db.z\n );\n\n // 2D 协方差矩阵: cov = T * Vrk * T^T\n let cov = T * Vrk * transpose(T);\n\n // 低通滤波器\n var result = vec3f(cov[0][0], cov[0][1], cov[1][1]);\n result.x += 0.3;\n result.z += 0.3;\n\n return result;\n}\n\n/**\n * 分解协方差矩阵\n */\nfn decomposeCovariance(cov2D: vec3f) -> array<vec2f, 2> {\n let a = cov2D.x;\n let b = cov2D.y;\n let d = cov2D.z;\n\n let det = a * d - b * b;\n let trace = a + d;\n\n let mean = 0.5 * trace;\n let dist = max(0.1, sqrt(mean * mean - det));\n\n // 特征值\n var lambda1 = mean + dist;\n var lambda2 = mean - dist;\n\n // 确保特征值为正\n lambda1 = max(lambda1, 0.01);\n lambda2 = max(lambda2, 0.01);\n\n // 特征向量(复刻 WebGL MetalSplatter 算法)\n var eigenvector1: vec2f;\n if (abs(b) < 1e-6) {\n eigenvector1 = select(vec2f(0.0, 1.0), vec2f(1.0, 0.0), a > d);\n } else {\n eigenvector1 = normalize(vec2f(b, d - lambda2));\n }\n\n // 正交特征向量\n let eigenvector2 = vec2f(eigenvector1.y, -eigenvector1.x);\n\n let v1 = eigenvector1 * sqrt(lambda1);\n let v2 = eigenvector2 * sqrt(lambda2);\n\n return array<vec2f, 2>(v1, v2);\n}\n\n@vertex\nfn vertexMain(\n input: VertexInput,\n @builtin(instance_index) instanceIndex: u32\n) -> VertexOutput {\n var output: VertexOutput;\n\n // 🚀 间接索引:通过排序索引读取实际数据\n let sortedIdx = sortIndices[instanceIndex];\n let dataOffset = sortedIdx * 13u;\n\n // 从 storage buffer 读取 splat 数据\n let position = vec3f(\n splatData[dataOffset + 0u],\n splatData[dataOffset + 1u],\n splatData[dataOffset + 2u]\n );\n let color = vec4f(\n splatData[dataOffset + 3u],\n splatData[dataOffset + 4u],\n splatData[dataOffset + 5u],\n splatData[dataOffset + 6u]\n );\n let covA = vec3f(\n splatData[dataOffset + 7u],\n splatData[dataOffset + 8u],\n splatData[dataOffset + 9u]\n );\n let covB = vec3f(\n splatData[dataOffset + 10u],\n splatData[dataOffset + 11u],\n splatData[dataOffset + 12u]\n );\n\n // 转换到视图空间\n let viewPosition4 = uniforms.viewMatrix * vec4f(position, 1.0);\n let viewPosition3 = viewPosition4.xyz;\n\n // 计算 2D 协方差矩阵\n let cov2D = calcCovariance2D(\n viewPosition3,\n covA,\n covB,\n uniforms.viewMatrix,\n uniforms.projectionMatrix,\n uniforms.screenSize\n );\n\n // 分解协方差矩阵\n let axes = decomposeCovariance(cov2D);\n let axis1 = axes[0];\n let axis2 = axes[1];\n\n // 投影到屏幕空间\n let projectedCenter = uniforms.projectionMatrix * viewPosition4;\n\n // 视锥体剔除\n if (uniforms.enableFrustumCulling == 1u) {\n let bounds = 1.2 * projectedCenter.w;\n if (projectedCenter.z < 0.0 ||\n projectedCenter.z > projectedCenter.w ||\n projectedCenter.x < -bounds ||\n projectedCenter.x > bounds ||\n projectedCenter.y < -bounds ||\n projectedCenter.y > bounds) {\n // 剔除到屏幕外\n output.position = vec4f(2.0, 2.0, 0.0, 1.0);\n output.relativePosition = vec2f(0.0);\n output.color = vec4f(0.0);\n return output;\n }\n }\n\n // 使用实例化的四边形顶点\n let relativeCoord = input.quadVertex;\n\n // 计算椭圆变换后的相对位置(像素单位)\n let ellipseRelativePos = relativeCoord.x * axis1 + relativeCoord.y * axis2;\n\n // 计算屏幕空间偏移\n let projectedScreenDelta = ellipseRelativePos * 2.0 * BOUNDS_RADIUS / uniforms.screenSize;\n\n // 最终顶点位置\n output.position = vec4f(\n projectedCenter.x + projectedScreenDelta.x * projectedCenter.w,\n projectedCenter.y + projectedScreenDelta.y * projectedCenter.w,\n projectedCenter.z,\n projectedCenter.w\n );\n\n // 传递给 fragment shader\n output.relativePosition = relativeCoord * BOUNDS_RADIUS;\n output.color = color;\n\n return output;\n}\n\n// ============ Fragment Shader ============\n\nconst BOUNDS_RADIUS_SQUARED: f32 = BOUNDS_RADIUS * BOUNDS_RADIUS;\n\nfn splatFragmentAlpha(relativePosition: vec2f, splatAlpha: f32) -> f32 {\n // 复刻 WebGL MetalSplatter 计算方式\n let negativeMagnitudeSquared = -dot(relativePosition, relativePosition);\n\n // 边界检查:超出椭圆边界的点被剔除\n if (negativeMagnitudeSquared < -BOUNDS_RADIUS_SQUARED) {\n return 0.0;\n }\n\n // 高斯衰减\n return exp(0.5 * negativeMagnitudeSquared) * splatAlpha;\n}\n\n@fragment\nfn fragmentMain(input: VertexOutput) -> @location(0) vec4f {\n let alpha = splatFragmentAlpha(input.relativePosition, input.color.a);\n\n // ✅ 优化:提前丢弃几乎透明的片段(提升性能和质量,对齐 Android SDK)\n if (alpha < 0.001) {\n discard;\n }\n\n // 预乘 alpha 输出(匹配 alphaMode: 'premultiplied')\n return vec4f(input.color.rgb * alpha, alpha);\n}\n";
|
|
6821
|
+
const blitShaderCode = "/**\n * WebGPU Blit Shader\n * 用于将 render texture 绘制到屏幕,应用 transform\n */\n\nstruct BlitUniforms {\n offset: vec2f, // 屏幕空间偏移(NDC坐标)\n scale: f32, // 缩放因子\n}\n\n@group(0) @binding(0) var<uniform> blitUniforms: BlitUniforms;\n@group(1) @binding(0) var texture: texture_2d<f32>;\n@group(1) @binding(1) var textureSampler: sampler;\n\nstruct VertexInput {\n @location(0) position: vec2f,\n @location(1) texCoord: vec2f,\n}\n\nstruct VertexOutput {\n @builtin(position) position: vec4f,\n @location(0) texCoord: vec2f,\n}\n\n@vertex\nfn vertexMain(input: VertexInput) -> VertexOutput {\n var output: VertexOutput;\n // 应用缩放和偏移\n let pos = input.position * blitUniforms.scale + blitUniforms.offset;\n output.position = vec4f(pos, 0.0, 1.0);\n // WebGPU framebuffer 纹理坐标需要翻转 Y 轴\n // framebuffer 的内容是从上到下存储的,但纹理坐标 (0,0) 在左上角,所以需要翻转\n output.texCoord = vec2f(input.texCoord.x, 1.0 - input.texCoord.y);\n return output;\n}\n\n@fragment\nfn fragmentMain(input: VertexOutput) -> @location(0) vec4f {\n return textureSample(texture, textureSampler, input.texCoord);\n}\n\n";
|
|
6620
6822
|
class WebGPURenderer {
|
|
6621
6823
|
constructor(canvas, backgroundColor, alpha = true) {
|
|
6622
6824
|
__publicField(this, "canvas");
|
|
@@ -6624,6 +6826,7 @@ class WebGPURenderer {
|
|
|
6624
6826
|
__publicField(this, "device", null);
|
|
6625
6827
|
__publicField(this, "context", null);
|
|
6626
6828
|
__publicField(this, "renderPipeline", null);
|
|
6829
|
+
__publicField(this, "renderTexturePipeline", null);
|
|
6627
6830
|
__publicField(this, "quadVertexBuffer", null);
|
|
6628
6831
|
__publicField(this, "uniformBuffer", null);
|
|
6629
6832
|
__publicField(this, "uniformBindGroup", null);
|
|
@@ -6635,6 +6838,15 @@ class WebGPURenderer {
|
|
|
6635
6838
|
__publicField(this, "splatCount", 0);
|
|
6636
6839
|
__publicField(this, "presentationFormat", "bgra8unorm");
|
|
6637
6840
|
__publicField(this, "alpha");
|
|
6841
|
+
__publicField(this, "renderTexture", null);
|
|
6842
|
+
__publicField(this, "renderTextureView", null);
|
|
6843
|
+
__publicField(this, "depthTexture", null);
|
|
6844
|
+
__publicField(this, "framebufferWidth", 0);
|
|
6845
|
+
__publicField(this, "framebufferHeight", 0);
|
|
6846
|
+
__publicField(this, "blitPipeline", null);
|
|
6847
|
+
__publicField(this, "blitUniformBuffer", null);
|
|
6848
|
+
__publicField(this, "blitQuadBuffer", null);
|
|
6849
|
+
__publicField(this, "blitSampler", null);
|
|
6638
6850
|
this.canvas = canvas;
|
|
6639
6851
|
this.backgroundColor = backgroundColor || [0, 0, 0, 0];
|
|
6640
6852
|
this.alpha = alpha;
|
|
@@ -6660,6 +6872,7 @@ class WebGPURenderer {
|
|
|
6660
6872
|
this.createUniformBuffer();
|
|
6661
6873
|
this.createQuadVertexBuffer();
|
|
6662
6874
|
await this.createRenderPipeline();
|
|
6875
|
+
await this.createBlitPipeline();
|
|
6663
6876
|
}
|
|
6664
6877
|
createUniformBuffer() {
|
|
6665
6878
|
if (!this.device)
|
|
@@ -6787,6 +7000,182 @@ class WebGPURenderer {
|
|
|
6787
7000
|
}
|
|
6788
7001
|
]
|
|
6789
7002
|
});
|
|
7003
|
+
this.renderTexturePipeline = this.device.createRenderPipeline({
|
|
7004
|
+
label: "3DGS Render Texture Pipeline",
|
|
7005
|
+
layout: pipelineLayout,
|
|
7006
|
+
vertex: {
|
|
7007
|
+
module: shaderModule,
|
|
7008
|
+
entryPoint: "vertexMain",
|
|
7009
|
+
buffers: vertexBufferLayouts
|
|
7010
|
+
},
|
|
7011
|
+
fragment: {
|
|
7012
|
+
module: shaderModule,
|
|
7013
|
+
entryPoint: "fragmentMain",
|
|
7014
|
+
targets: [
|
|
7015
|
+
{
|
|
7016
|
+
format: "rgba16float",
|
|
7017
|
+
blend: {
|
|
7018
|
+
color: {
|
|
7019
|
+
srcFactor: "one",
|
|
7020
|
+
dstFactor: "one-minus-src-alpha",
|
|
7021
|
+
operation: "add"
|
|
7022
|
+
},
|
|
7023
|
+
alpha: {
|
|
7024
|
+
srcFactor: "one",
|
|
7025
|
+
dstFactor: "one-minus-src-alpha",
|
|
7026
|
+
operation: "add"
|
|
7027
|
+
}
|
|
7028
|
+
}
|
|
7029
|
+
}
|
|
7030
|
+
]
|
|
7031
|
+
},
|
|
7032
|
+
primitive: {
|
|
7033
|
+
topology: "triangle-strip",
|
|
7034
|
+
stripIndexFormat: void 0
|
|
7035
|
+
},
|
|
7036
|
+
depthStencil: {
|
|
7037
|
+
format: "depth24plus",
|
|
7038
|
+
depthWriteEnabled: false,
|
|
7039
|
+
depthCompare: "always"
|
|
7040
|
+
}
|
|
7041
|
+
});
|
|
7042
|
+
}
|
|
7043
|
+
async createBlitPipeline() {
|
|
7044
|
+
if (!this.device)
|
|
7045
|
+
return;
|
|
7046
|
+
const blitShaderModule = this.device.createShaderModule({
|
|
7047
|
+
label: "Blit Shader",
|
|
7048
|
+
code: blitShaderCode
|
|
7049
|
+
});
|
|
7050
|
+
this.blitUniformBuffer = this.device.createBuffer({
|
|
7051
|
+
label: "Blit Uniform Buffer",
|
|
7052
|
+
size: 16,
|
|
7053
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
|
|
7054
|
+
});
|
|
7055
|
+
const blitUniformBindGroupLayout = this.device.createBindGroupLayout({
|
|
7056
|
+
label: "Blit Uniform Bind Group Layout",
|
|
7057
|
+
entries: [
|
|
7058
|
+
{
|
|
7059
|
+
binding: 0,
|
|
7060
|
+
visibility: GPUShaderStage.VERTEX,
|
|
7061
|
+
buffer: { type: "uniform" }
|
|
7062
|
+
}
|
|
7063
|
+
]
|
|
7064
|
+
});
|
|
7065
|
+
const blitTextureBindGroupLayout = this.device.createBindGroupLayout({
|
|
7066
|
+
label: "Blit Texture Bind Group Layout",
|
|
7067
|
+
entries: [
|
|
7068
|
+
{
|
|
7069
|
+
binding: 0,
|
|
7070
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
7071
|
+
texture: {}
|
|
7072
|
+
},
|
|
7073
|
+
{
|
|
7074
|
+
binding: 1,
|
|
7075
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
7076
|
+
sampler: {}
|
|
7077
|
+
}
|
|
7078
|
+
]
|
|
7079
|
+
});
|
|
7080
|
+
const blitPipelineLayout = this.device.createPipelineLayout({
|
|
7081
|
+
label: "Blit Pipeline Layout",
|
|
7082
|
+
bindGroupLayouts: [blitUniformBindGroupLayout, blitTextureBindGroupLayout]
|
|
7083
|
+
});
|
|
7084
|
+
this.blitSampler = this.device.createSampler({
|
|
7085
|
+
label: "Blit Sampler",
|
|
7086
|
+
magFilter: "linear",
|
|
7087
|
+
minFilter: "linear"
|
|
7088
|
+
});
|
|
7089
|
+
const quadData = new Float32Array([
|
|
7090
|
+
-1,
|
|
7091
|
+
-1,
|
|
7092
|
+
0,
|
|
7093
|
+
0,
|
|
7094
|
+
-1,
|
|
7095
|
+
1,
|
|
7096
|
+
0,
|
|
7097
|
+
1,
|
|
7098
|
+
1,
|
|
7099
|
+
-1,
|
|
7100
|
+
1,
|
|
7101
|
+
0,
|
|
7102
|
+
1,
|
|
7103
|
+
1,
|
|
7104
|
+
1,
|
|
7105
|
+
1
|
|
7106
|
+
]);
|
|
7107
|
+
this.blitQuadBuffer = this.device.createBuffer({
|
|
7108
|
+
label: "Blit Quad Buffer",
|
|
7109
|
+
size: quadData.byteLength,
|
|
7110
|
+
usage: GPUBufferUsage.VERTEX,
|
|
7111
|
+
mappedAtCreation: true
|
|
7112
|
+
});
|
|
7113
|
+
new Float32Array(this.blitQuadBuffer.getMappedRange()).set(quadData);
|
|
7114
|
+
this.blitQuadBuffer.unmap();
|
|
7115
|
+
this.blitPipeline = this.device.createRenderPipeline({
|
|
7116
|
+
label: "Blit Pipeline",
|
|
7117
|
+
layout: blitPipelineLayout,
|
|
7118
|
+
vertex: {
|
|
7119
|
+
module: blitShaderModule,
|
|
7120
|
+
entryPoint: "vertexMain",
|
|
7121
|
+
buffers: [
|
|
7122
|
+
{
|
|
7123
|
+
arrayStride: 16,
|
|
7124
|
+
stepMode: "vertex",
|
|
7125
|
+
attributes: [
|
|
7126
|
+
{
|
|
7127
|
+
shaderLocation: 0,
|
|
7128
|
+
offset: 0,
|
|
7129
|
+
format: "float32x2"
|
|
7130
|
+
},
|
|
7131
|
+
{
|
|
7132
|
+
shaderLocation: 1,
|
|
7133
|
+
offset: 8,
|
|
7134
|
+
format: "float32x2"
|
|
7135
|
+
}
|
|
7136
|
+
]
|
|
7137
|
+
}
|
|
7138
|
+
]
|
|
7139
|
+
},
|
|
7140
|
+
fragment: {
|
|
7141
|
+
module: blitShaderModule,
|
|
7142
|
+
entryPoint: "fragmentMain",
|
|
7143
|
+
targets: [
|
|
7144
|
+
{
|
|
7145
|
+
format: this.presentationFormat,
|
|
7146
|
+
blend: void 0
|
|
7147
|
+
}
|
|
7148
|
+
]
|
|
7149
|
+
},
|
|
7150
|
+
primitive: {
|
|
7151
|
+
topology: "triangle-strip"
|
|
7152
|
+
}
|
|
7153
|
+
});
|
|
7154
|
+
}
|
|
7155
|
+
createRenderTexture(width, height) {
|
|
7156
|
+
if (!this.device)
|
|
7157
|
+
return;
|
|
7158
|
+
if (this.renderTexture) {
|
|
7159
|
+
this.renderTexture.destroy();
|
|
7160
|
+
}
|
|
7161
|
+
if (this.depthTexture) {
|
|
7162
|
+
this.depthTexture.destroy();
|
|
7163
|
+
}
|
|
7164
|
+
this.renderTexture = this.device.createTexture({
|
|
7165
|
+
label: "Render Texture",
|
|
7166
|
+
size: [width, height],
|
|
7167
|
+
format: "rgba16float",
|
|
7168
|
+
usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING
|
|
7169
|
+
});
|
|
7170
|
+
this.renderTextureView = this.renderTexture.createView();
|
|
7171
|
+
this.depthTexture = this.device.createTexture({
|
|
7172
|
+
label: "Depth Texture",
|
|
7173
|
+
size: [width, height],
|
|
7174
|
+
format: "depth24plus",
|
|
7175
|
+
usage: GPUTextureUsage.RENDER_ATTACHMENT
|
|
7176
|
+
});
|
|
7177
|
+
this.framebufferWidth = width;
|
|
7178
|
+
this.framebufferHeight = height;
|
|
6790
7179
|
}
|
|
6791
7180
|
loadSplatsFromPackedData(packedData, pointCount, sortOrder) {
|
|
6792
7181
|
if (!this.device)
|
|
@@ -6854,18 +7243,119 @@ class WebGPURenderer {
|
|
|
6854
7243
|
}
|
|
6855
7244
|
}
|
|
6856
7245
|
}
|
|
6857
|
-
render(viewMatrix, projectionMatrix, screenSize) {
|
|
6858
|
-
if (!this.device || !this.context || !this.renderPipeline)
|
|
7246
|
+
render(viewMatrix, projectionMatrix, screenSize, transform) {
|
|
7247
|
+
if (!this.device || !this.context || !this.renderPipeline || !this.renderTexturePipeline)
|
|
6859
7248
|
return;
|
|
6860
7249
|
if (this.splatCount === 0 || !this.storageBindGroup)
|
|
6861
7250
|
return;
|
|
7251
|
+
const [width, height] = screenSize;
|
|
7252
|
+
const needsTransform = transform && (transform.x !== 0 || transform.y !== 0 || transform.scale !== 1);
|
|
6862
7253
|
this.updateUniforms(viewMatrix, projectionMatrix, screenSize);
|
|
6863
|
-
const textureView = this.context.getCurrentTexture().createView();
|
|
6864
7254
|
const commandEncoder = this.device.createCommandEncoder({
|
|
6865
7255
|
label: "Render Command Encoder"
|
|
6866
7256
|
});
|
|
6867
|
-
|
|
6868
|
-
|
|
7257
|
+
if (needsTransform) {
|
|
7258
|
+
if (!this.renderTexture || this.framebufferWidth !== width || this.framebufferHeight !== height) {
|
|
7259
|
+
this.createRenderTexture(width, height);
|
|
7260
|
+
}
|
|
7261
|
+
const renderPass = commandEncoder.beginRenderPass({
|
|
7262
|
+
label: "Render to Texture Pass",
|
|
7263
|
+
colorAttachments: [
|
|
7264
|
+
{
|
|
7265
|
+
view: this.renderTextureView,
|
|
7266
|
+
clearValue: {
|
|
7267
|
+
r: this.backgroundColor[0],
|
|
7268
|
+
g: this.backgroundColor[1],
|
|
7269
|
+
b: this.backgroundColor[2],
|
|
7270
|
+
a: this.backgroundColor[3]
|
|
7271
|
+
},
|
|
7272
|
+
loadOp: "clear",
|
|
7273
|
+
storeOp: "store"
|
|
7274
|
+
}
|
|
7275
|
+
],
|
|
7276
|
+
depthStencilAttachment: {
|
|
7277
|
+
view: this.depthTexture.createView(),
|
|
7278
|
+
depthLoadOp: "clear",
|
|
7279
|
+
depthStoreOp: "store",
|
|
7280
|
+
depthClearValue: 1
|
|
7281
|
+
}
|
|
7282
|
+
});
|
|
7283
|
+
renderPass.setPipeline(this.renderTexturePipeline);
|
|
7284
|
+
renderPass.setBindGroup(0, this.uniformBindGroup);
|
|
7285
|
+
renderPass.setBindGroup(1, this.storageBindGroup);
|
|
7286
|
+
renderPass.setVertexBuffer(0, this.quadVertexBuffer);
|
|
7287
|
+
renderPass.draw(4, this.splatCount);
|
|
7288
|
+
renderPass.end();
|
|
7289
|
+
this.blitToScreen(commandEncoder, transform);
|
|
7290
|
+
} else {
|
|
7291
|
+
const textureView = this.context.getCurrentTexture().createView();
|
|
7292
|
+
const renderPass = commandEncoder.beginRenderPass({
|
|
7293
|
+
label: "Render Pass",
|
|
7294
|
+
colorAttachments: [
|
|
7295
|
+
{
|
|
7296
|
+
view: textureView,
|
|
7297
|
+
clearValue: {
|
|
7298
|
+
r: this.backgroundColor[0],
|
|
7299
|
+
g: this.backgroundColor[1],
|
|
7300
|
+
b: this.backgroundColor[2],
|
|
7301
|
+
a: this.backgroundColor[3]
|
|
7302
|
+
},
|
|
7303
|
+
loadOp: "clear",
|
|
7304
|
+
storeOp: "store"
|
|
7305
|
+
}
|
|
7306
|
+
]
|
|
7307
|
+
});
|
|
7308
|
+
renderPass.setPipeline(this.renderPipeline);
|
|
7309
|
+
renderPass.setBindGroup(0, this.uniformBindGroup);
|
|
7310
|
+
renderPass.setBindGroup(1, this.storageBindGroup);
|
|
7311
|
+
renderPass.setVertexBuffer(0, this.quadVertexBuffer);
|
|
7312
|
+
renderPass.draw(4, this.splatCount);
|
|
7313
|
+
renderPass.end();
|
|
7314
|
+
}
|
|
7315
|
+
this.device.queue.submit([commandEncoder.finish()]);
|
|
7316
|
+
}
|
|
7317
|
+
blitToScreen(commandEncoder, transform) {
|
|
7318
|
+
if (!this.device || !this.blitPipeline || !this.renderTextureView || !this.blitQuadBuffer || !this.blitUniformBuffer || !this.blitSampler) {
|
|
7319
|
+
logger.error(`[WebGPURenderer] Blit failed: device=${!!this.device}, pipeline=${!!this.blitPipeline}, texture=${!!this.renderTextureView}, buffer=${!!this.blitQuadBuffer}, uniform=${!!this.blitUniformBuffer}, sampler=${!!this.blitSampler}`);
|
|
7320
|
+
return;
|
|
7321
|
+
}
|
|
7322
|
+
const offsetXNDC = transform.x;
|
|
7323
|
+
const offsetYNDC = -transform.y;
|
|
7324
|
+
const uniformData = new ArrayBuffer(16);
|
|
7325
|
+
const float32View = new Float32Array(uniformData);
|
|
7326
|
+
float32View[0] = offsetXNDC;
|
|
7327
|
+
float32View[1] = offsetYNDC;
|
|
7328
|
+
float32View[2] = transform.scale;
|
|
7329
|
+
this.device.queue.writeBuffer(this.blitUniformBuffer, 0, uniformData);
|
|
7330
|
+
const blitUniformBindGroupLayout = this.blitPipeline.getBindGroupLayout(0);
|
|
7331
|
+
const blitTextureBindGroupLayout = this.blitPipeline.getBindGroupLayout(1);
|
|
7332
|
+
const blitUniformBindGroup = this.device.createBindGroup({
|
|
7333
|
+
label: "Blit Uniform Bind Group",
|
|
7334
|
+
layout: blitUniformBindGroupLayout,
|
|
7335
|
+
entries: [
|
|
7336
|
+
{
|
|
7337
|
+
binding: 0,
|
|
7338
|
+
resource: { buffer: this.blitUniformBuffer }
|
|
7339
|
+
}
|
|
7340
|
+
]
|
|
7341
|
+
});
|
|
7342
|
+
const blitTextureBindGroup = this.device.createBindGroup({
|
|
7343
|
+
label: "Blit Texture Bind Group",
|
|
7344
|
+
layout: blitTextureBindGroupLayout,
|
|
7345
|
+
entries: [
|
|
7346
|
+
{
|
|
7347
|
+
binding: 0,
|
|
7348
|
+
resource: this.renderTextureView
|
|
7349
|
+
},
|
|
7350
|
+
{
|
|
7351
|
+
binding: 1,
|
|
7352
|
+
resource: this.blitSampler
|
|
7353
|
+
}
|
|
7354
|
+
]
|
|
7355
|
+
});
|
|
7356
|
+
const textureView = this.context.getCurrentTexture().createView();
|
|
7357
|
+
const blitPass = commandEncoder.beginRenderPass({
|
|
7358
|
+
label: "Blit Pass",
|
|
6869
7359
|
colorAttachments: [
|
|
6870
7360
|
{
|
|
6871
7361
|
view: textureView,
|
|
@@ -6880,13 +7370,12 @@ class WebGPURenderer {
|
|
|
6880
7370
|
}
|
|
6881
7371
|
]
|
|
6882
7372
|
});
|
|
6883
|
-
|
|
6884
|
-
|
|
6885
|
-
|
|
6886
|
-
|
|
6887
|
-
|
|
6888
|
-
|
|
6889
|
-
this.device.queue.submit([commandEncoder.finish()]);
|
|
7373
|
+
blitPass.setPipeline(this.blitPipeline);
|
|
7374
|
+
blitPass.setBindGroup(0, blitUniformBindGroup);
|
|
7375
|
+
blitPass.setBindGroup(1, blitTextureBindGroup);
|
|
7376
|
+
blitPass.setVertexBuffer(0, this.blitQuadBuffer);
|
|
7377
|
+
blitPass.draw(4);
|
|
7378
|
+
blitPass.end();
|
|
6890
7379
|
}
|
|
6891
7380
|
updateUniforms(viewMatrix, projectionMatrix, screenSize) {
|
|
6892
7381
|
if (!this.device || !this.uniformBuffer)
|
|
@@ -6905,21 +7394,33 @@ class WebGPURenderer {
|
|
|
6905
7394
|
this.backgroundColor = backgroundColor;
|
|
6906
7395
|
}
|
|
6907
7396
|
dispose() {
|
|
6908
|
-
var _a, _b, _c, _d, _e;
|
|
7397
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
6909
7398
|
(_a = this.sortIndexBuffer) == null ? void 0 : _a.destroy();
|
|
6910
7399
|
(_b = this.splatDataBuffer) == null ? void 0 : _b.destroy();
|
|
6911
7400
|
(_c = this.quadVertexBuffer) == null ? void 0 : _c.destroy();
|
|
6912
7401
|
(_d = this.uniformBuffer) == null ? void 0 : _d.destroy();
|
|
6913
|
-
(_e = this.
|
|
7402
|
+
(_e = this.renderTexture) == null ? void 0 : _e.destroy();
|
|
7403
|
+
(_f = this.depthTexture) == null ? void 0 : _f.destroy();
|
|
7404
|
+
(_g = this.blitUniformBuffer) == null ? void 0 : _g.destroy();
|
|
7405
|
+
(_h = this.blitQuadBuffer) == null ? void 0 : _h.destroy();
|
|
7406
|
+
(_i = this.device) == null ? void 0 : _i.destroy();
|
|
6914
7407
|
this.sortIndexBuffer = null;
|
|
6915
7408
|
this.splatDataBuffer = null;
|
|
6916
7409
|
this.quadVertexBuffer = null;
|
|
6917
7410
|
this.uniformBuffer = null;
|
|
6918
7411
|
this.uniformBindGroup = null;
|
|
6919
7412
|
this.storageBindGroup = null;
|
|
7413
|
+
this.renderTexture = null;
|
|
7414
|
+
this.renderTextureView = null;
|
|
7415
|
+
this.depthTexture = null;
|
|
7416
|
+
this.blitUniformBuffer = null;
|
|
7417
|
+
this.blitQuadBuffer = null;
|
|
7418
|
+
this.blitPipeline = null;
|
|
7419
|
+
this.blitSampler = null;
|
|
6920
7420
|
this.device = null;
|
|
6921
7421
|
this.context = null;
|
|
6922
7422
|
this.renderPipeline = null;
|
|
7423
|
+
this.renderTexturePipeline = null;
|
|
6923
7424
|
}
|
|
6924
7425
|
}
|
|
6925
7426
|
class RenderSystem {
|
|
@@ -6942,6 +7443,9 @@ class RenderSystem {
|
|
|
6942
7443
|
__publicField(this, "originalPackedData", null);
|
|
6943
7444
|
__publicField(this, "renderTime", 0);
|
|
6944
7445
|
__publicField(this, "sortTime", 0);
|
|
7446
|
+
__publicField(this, "offsetX", 0);
|
|
7447
|
+
__publicField(this, "offsetY", 0);
|
|
7448
|
+
__publicField(this, "scale", 1);
|
|
6945
7449
|
this.options = options;
|
|
6946
7450
|
this.canvas = options.canvas;
|
|
6947
7451
|
this.backgroundColor = options.backgroundColor || [0, 0, 0, 0];
|
|
@@ -7010,11 +7514,21 @@ class RenderSystem {
|
|
|
7010
7514
|
this.renderer.render(
|
|
7011
7515
|
this.viewMatrix,
|
|
7012
7516
|
this.projectionMatrix,
|
|
7013
|
-
[this.canvas.width, this.canvas.height]
|
|
7517
|
+
[this.canvas.width, this.canvas.height],
|
|
7518
|
+
this.offsetX !== 0 || this.offsetY !== 0 || this.scale !== 1 ? { x: this.offsetX, y: this.offsetY, scale: this.scale } : void 0
|
|
7014
7519
|
);
|
|
7015
7520
|
const renderTime = performance.now() - startRender;
|
|
7016
7521
|
this.renderTime = renderTime;
|
|
7017
7522
|
}
|
|
7523
|
+
setTransform(x, y, scale = 1) {
|
|
7524
|
+
logger.log(`[RenderSystem] Setting transform: x=${x}, y=${y}, scale=${scale}`);
|
|
7525
|
+
this.offsetX = x;
|
|
7526
|
+
this.offsetY = y;
|
|
7527
|
+
this.scale = scale;
|
|
7528
|
+
}
|
|
7529
|
+
getTransform() {
|
|
7530
|
+
return { x: this.offsetX, y: this.offsetY, scale: this.scale };
|
|
7531
|
+
}
|
|
7018
7532
|
updateCamera(params) {
|
|
7019
7533
|
Object.assign(this.camera, params);
|
|
7020
7534
|
this.updateCameraAspect();
|
|
@@ -7930,6 +8444,16 @@ class AvatarView {
|
|
|
7930
8444
|
img.src = imageUrl;
|
|
7931
8445
|
}
|
|
7932
8446
|
}
|
|
8447
|
+
setTransform(x, y, scale) {
|
|
8448
|
+
if (!this.renderSystem) {
|
|
8449
|
+
throw new Error("Render system not initialized");
|
|
8450
|
+
}
|
|
8451
|
+
logger.log(`[AvatarView] Setting transform: x=${x}, y=${y}, scale=${scale}`);
|
|
8452
|
+
this.renderSystem.setTransform(x, y, scale);
|
|
8453
|
+
if (this.isInitialized && this.renderSystem) {
|
|
8454
|
+
this.renderSystem.renderFrame();
|
|
8455
|
+
}
|
|
8456
|
+
}
|
|
7933
8457
|
}
|
|
7934
8458
|
export {
|
|
7935
8459
|
APP_CONFIG as A,
|
package/dist/index.js
CHANGED
|
@@ -1,7 +1,12 @@
|
|
|
1
|
+
export interface Transform {
|
|
2
|
+
x: number;
|
|
3
|
+
y: number;
|
|
4
|
+
scale: number;
|
|
5
|
+
}
|
|
1
6
|
export interface I3DGSRenderer {
|
|
2
7
|
initialize: () => Promise<void>;
|
|
3
8
|
loadSplatsFromPackedData: (packedData: Float32Array, pointCount: number, sortOrder?: Uint32Array) => void;
|
|
4
|
-
render: (viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number]) => void;
|
|
9
|
+
render: (viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number], transform?: Transform) => void;
|
|
5
10
|
dispose: () => void;
|
|
6
11
|
}
|
|
7
12
|
export type RenderBackend = 'webgl' | 'webgpu';
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { I3DGSRenderer } from '../renderer';
|
|
1
|
+
import { I3DGSRenderer, Transform } from '../renderer';
|
|
2
2
|
export declare class WebGLRenderer implements I3DGSRenderer {
|
|
3
3
|
private canvas;
|
|
4
4
|
private backgroundColor;
|
|
@@ -12,6 +12,16 @@ export declare class WebGLRenderer implements I3DGSRenderer {
|
|
|
12
12
|
private splatCount;
|
|
13
13
|
private isInitialized;
|
|
14
14
|
private splatBufferSize;
|
|
15
|
+
private framebuffer;
|
|
16
|
+
private renderTexture;
|
|
17
|
+
private depthBuffer;
|
|
18
|
+
private framebufferWidth;
|
|
19
|
+
private framebufferHeight;
|
|
20
|
+
private blitShaderProgram;
|
|
21
|
+
private blitUniformLocations;
|
|
22
|
+
private blitAttributeLocations;
|
|
23
|
+
private blitQuadBuffer;
|
|
24
|
+
private blitVAO;
|
|
15
25
|
constructor(canvas: HTMLCanvasElement, backgroundColor?: [number, number, number, number], alpha?: boolean);
|
|
16
26
|
private alpha;
|
|
17
27
|
initialize(): Promise<void>;
|
|
@@ -22,8 +32,12 @@ export declare class WebGLRenderer implements I3DGSRenderer {
|
|
|
22
32
|
loadSplatsFromPackedData(packedData: Float32Array, pointCount: number, _sortOrder?: Uint32Array): void;
|
|
23
33
|
private uploadToGPU;
|
|
24
34
|
private setupVertexAttributes;
|
|
25
|
-
render(viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number]): void;
|
|
35
|
+
render(viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number], transform?: Transform): void;
|
|
26
36
|
private createShaderProgram;
|
|
27
37
|
updateBackgroundColor(backgroundColor: [number, number, number, number]): void;
|
|
38
|
+
private render3DGS;
|
|
39
|
+
private createFramebuffer;
|
|
40
|
+
private createBlitShader;
|
|
41
|
+
private blitToScreen;
|
|
28
42
|
dispose(): void;
|
|
29
43
|
}
|
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import { I3DGSRenderer } from '../renderer';
|
|
1
|
+
import { I3DGSRenderer, Transform } from '../renderer';
|
|
2
2
|
export declare class WebGPURenderer implements I3DGSRenderer {
|
|
3
3
|
private canvas;
|
|
4
4
|
private backgroundColor;
|
|
5
5
|
private device;
|
|
6
6
|
private context;
|
|
7
7
|
private renderPipeline;
|
|
8
|
+
private renderTexturePipeline;
|
|
8
9
|
private quadVertexBuffer;
|
|
9
10
|
private uniformBuffer;
|
|
10
11
|
private uniformBindGroup;
|
|
@@ -16,13 +17,25 @@ export declare class WebGPURenderer implements I3DGSRenderer {
|
|
|
16
17
|
private splatCount;
|
|
17
18
|
private presentationFormat;
|
|
18
19
|
private alpha;
|
|
20
|
+
private renderTexture;
|
|
21
|
+
private renderTextureView;
|
|
22
|
+
private depthTexture;
|
|
23
|
+
private framebufferWidth;
|
|
24
|
+
private framebufferHeight;
|
|
25
|
+
private blitPipeline;
|
|
26
|
+
private blitUniformBuffer;
|
|
27
|
+
private blitQuadBuffer;
|
|
28
|
+
private blitSampler;
|
|
19
29
|
constructor(canvas: HTMLCanvasElement, backgroundColor?: [number, number, number, number], alpha?: boolean);
|
|
20
30
|
initialize(): Promise<void>;
|
|
21
31
|
private createUniformBuffer;
|
|
22
32
|
private createQuadVertexBuffer;
|
|
23
33
|
private createRenderPipeline;
|
|
34
|
+
private createBlitPipeline;
|
|
35
|
+
private createRenderTexture;
|
|
24
36
|
loadSplatsFromPackedData(packedData: Float32Array, pointCount: number, sortOrder?: Uint32Array): void;
|
|
25
|
-
render(viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number]): void;
|
|
37
|
+
render(viewMatrix: Float32Array, projectionMatrix: Float32Array, screenSize: [number, number], transform?: Transform): void;
|
|
38
|
+
private blitToScreen;
|
|
26
39
|
private updateUniforms;
|
|
27
40
|
updateBackgroundColor(backgroundColor: [number, number, number, number]): void;
|
|
28
41
|
dispose(): void;
|
package/package.json
CHANGED