reze-engine 0.3.6 → 0.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1575 @@
1
+ import { Camera } from "./camera";
2
+ import { Quat, Vec3 } from "reze-mmd";
3
+ import { Quat as QuatMath } from "./math";
4
+ import { PmxLoader } from "./pmx-loader";
5
+ import { Player } from "./player";
6
+ export class Engine {
7
+ constructor(canvas, options) {
8
+ this.cameraMatrixData = new Float32Array(36);
9
+ this.cameraDistance = 26.6;
10
+ this.cameraTarget = new Vec3(0, 12.5, 0);
11
+ this.lightData = new Float32Array(4);
12
+ this.resizeObserver = null;
13
+ this.sampleCount = 4;
14
+ // Constants
15
+ this.STENCIL_EYE_VALUE = 1;
16
+ this.BLOOM_DOWNSCALE_FACTOR = 2;
17
+ // Ambient light settings
18
+ this.ambientColor = new Vec3(1.0, 1.0, 1.0);
19
+ // Bloom settings
20
+ this.bloomThreshold = Engine.DEFAULT_BLOOM_THRESHOLD;
21
+ this.bloomIntensity = Engine.DEFAULT_BLOOM_INTENSITY;
22
+ // Rim light settings
23
+ this.rimLightIntensity = Engine.DEFAULT_RIM_LIGHT_INTENSITY;
24
+ this.currentModel = null;
25
+ this.modelDir = "";
26
+ this.physics = null;
27
+ this.textureCache = new Map();
28
+ this.vertexBufferNeedsUpdate = false;
29
+ // Unified draw call list
30
+ this.drawCalls = [];
31
+ this.lastFpsUpdate = performance.now();
32
+ this.framesSinceLastUpdate = 0;
33
+ this.lastFrameTime = performance.now();
34
+ this.frameTimeSum = 0;
35
+ this.frameTimeCount = 0;
36
+ this.stats = {
37
+ fps: 0,
38
+ frameTime: 0,
39
+ };
40
+ this.animationFrameId = null;
41
+ this.renderLoopCallback = null;
42
+ this.player = new Player();
43
+ this.hasAnimation = false; // Set to true when loadAnimation is called
44
+ this.canvas = canvas;
45
+ if (options) {
46
+ this.ambientColor = options.ambientColor ?? new Vec3(1.0, 1.0, 1.0);
47
+ this.bloomIntensity = options.bloomIntensity ?? Engine.DEFAULT_BLOOM_INTENSITY;
48
+ this.rimLightIntensity = options.rimLightIntensity ?? Engine.DEFAULT_RIM_LIGHT_INTENSITY;
49
+ this.cameraDistance = options.cameraDistance ?? Engine.DEFAULT_CAMERA_DISTANCE;
50
+ this.cameraTarget = options.cameraTarget ?? Engine.DEFAULT_CAMERA_TARGET;
51
+ }
52
+ }
53
+ // Step 1: Get WebGPU device and context
54
+ async init() {
55
+ const adapter = await navigator.gpu?.requestAdapter();
56
+ const device = await adapter?.requestDevice();
57
+ if (!device) {
58
+ throw new Error("WebGPU is not supported in this browser.");
59
+ }
60
+ this.device = device;
61
+ const context = this.canvas.getContext("webgpu");
62
+ if (!context) {
63
+ throw new Error("Failed to get WebGPU context.");
64
+ }
65
+ this.context = context;
66
+ this.presentationFormat = navigator.gpu.getPreferredCanvasFormat();
67
+ this.context.configure({
68
+ device: this.device,
69
+ format: this.presentationFormat,
70
+ alphaMode: "premultiplied",
71
+ });
72
+ this.setupCamera();
73
+ this.setupLighting();
74
+ this.createPipelines();
75
+ this.createBloomPipelines();
76
+ this.setupResize();
77
+ }
78
+ createRenderPipeline(config) {
79
+ return this.device.createRenderPipeline({
80
+ label: config.label,
81
+ layout: config.layout,
82
+ vertex: {
83
+ module: config.shaderModule,
84
+ buffers: config.vertexBuffers,
85
+ },
86
+ fragment: config.fragmentTarget
87
+ ? {
88
+ module: config.shaderModule,
89
+ entryPoint: config.fragmentEntryPoint,
90
+ targets: [config.fragmentTarget],
91
+ }
92
+ : undefined,
93
+ primitive: { cullMode: config.cullMode ?? "none" },
94
+ depthStencil: config.depthStencil,
95
+ multisample: config.multisample ?? { count: this.sampleCount },
96
+ });
97
+ }
98
+ createPipelines() {
99
+ this.materialSampler = this.device.createSampler({
100
+ magFilter: "linear",
101
+ minFilter: "linear",
102
+ addressModeU: "repeat",
103
+ addressModeV: "repeat",
104
+ });
105
+ // Shared vertex buffer layouts
106
+ const fullVertexBuffers = [
107
+ {
108
+ arrayStride: 8 * 4,
109
+ attributes: [
110
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
111
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
112
+ { shaderLocation: 2, offset: 6 * 4, format: "float32x2" },
113
+ ],
114
+ },
115
+ {
116
+ arrayStride: 4 * 2,
117
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
118
+ },
119
+ {
120
+ arrayStride: 4,
121
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
122
+ },
123
+ ];
124
+ const outlineVertexBuffers = [
125
+ {
126
+ arrayStride: 8 * 4,
127
+ attributes: [
128
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
129
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
130
+ ],
131
+ },
132
+ {
133
+ arrayStride: 4 * 2,
134
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
135
+ },
136
+ {
137
+ arrayStride: 4,
138
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
139
+ },
140
+ ];
141
+ const depthOnlyVertexBuffers = [
142
+ {
143
+ arrayStride: 8 * 4,
144
+ attributes: [
145
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
146
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
147
+ ],
148
+ },
149
+ {
150
+ arrayStride: 4 * 2,
151
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
152
+ },
153
+ {
154
+ arrayStride: 4,
155
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
156
+ },
157
+ ];
158
+ const standardBlend = {
159
+ format: this.presentationFormat,
160
+ blend: {
161
+ color: {
162
+ srcFactor: "src-alpha",
163
+ dstFactor: "one-minus-src-alpha",
164
+ operation: "add",
165
+ },
166
+ alpha: {
167
+ srcFactor: "one",
168
+ dstFactor: "one-minus-src-alpha",
169
+ operation: "add",
170
+ },
171
+ },
172
+ };
173
+ const shaderModule = this.device.createShaderModule({
174
+ label: "model shaders",
175
+ code: /* wgsl */ `
176
+ struct CameraUniforms {
177
+ view: mat4x4f,
178
+ projection: mat4x4f,
179
+ viewPos: vec3f,
180
+ _padding: f32,
181
+ };
182
+
183
+ struct LightUniforms {
184
+ ambientColor: vec3f,
185
+ };
186
+
187
+ struct MaterialUniforms {
188
+ alpha: f32,
189
+ alphaMultiplier: f32,
190
+ rimIntensity: f32,
191
+ _padding1: f32,
192
+ rimColor: vec3f,
193
+ isOverEyes: f32, // 1.0 if rendering over eyes, 0.0 otherwise
194
+ };
195
+
196
+ struct VertexOutput {
197
+ @builtin(position) position: vec4f,
198
+ @location(0) normal: vec3f,
199
+ @location(1) uv: vec2f,
200
+ @location(2) worldPos: vec3f,
201
+ };
202
+
203
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
204
+ @group(0) @binding(1) var<uniform> light: LightUniforms;
205
+ @group(0) @binding(2) var diffuseTexture: texture_2d<f32>;
206
+ @group(0) @binding(3) var diffuseSampler: sampler;
207
+ @group(0) @binding(4) var<storage, read> skinMats: array<mat4x4f>;
208
+ @group(0) @binding(5) var<uniform> material: MaterialUniforms;
209
+
210
+ @vertex fn vs(
211
+ @location(0) position: vec3f,
212
+ @location(1) normal: vec3f,
213
+ @location(2) uv: vec2f,
214
+ @location(3) joints0: vec4<u32>,
215
+ @location(4) weights0: vec4<f32>
216
+ ) -> VertexOutput {
217
+ var output: VertexOutput;
218
+ let pos4 = vec4f(position, 1.0);
219
+
220
+ // Branchless weight normalization (avoids GPU branch divergence)
221
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
222
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
223
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
224
+
225
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
226
+ var skinnedNrm = vec3f(0.0, 0.0, 0.0);
227
+ for (var i = 0u; i < 4u; i++) {
228
+ let j = joints0[i];
229
+ let w = normalizedWeights[i];
230
+ let m = skinMats[j];
231
+ skinnedPos += (m * pos4) * w;
232
+ let r3 = mat3x3f(m[0].xyz, m[1].xyz, m[2].xyz);
233
+ skinnedNrm += (r3 * normal) * w;
234
+ }
235
+ let worldPos = skinnedPos.xyz;
236
+ output.position = camera.projection * camera.view * vec4f(worldPos, 1.0);
237
+ output.normal = normalize(skinnedNrm);
238
+ output.uv = uv;
239
+ output.worldPos = worldPos;
240
+ return output;
241
+ }
242
+
243
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
244
+ // Early alpha test - discard before expensive calculations
245
+ var finalAlpha = material.alpha * material.alphaMultiplier;
246
+ if (material.isOverEyes > 0.5) {
247
+ finalAlpha *= 0.5; // Hair over eyes gets 50% alpha
248
+ }
249
+ if (finalAlpha < 0.001) {
250
+ discard;
251
+ }
252
+
253
+ let n = normalize(input.normal);
254
+ let albedo = textureSample(diffuseTexture, diffuseSampler, input.uv).rgb;
255
+
256
+ let lightAccum = light.ambientColor;
257
+
258
+ // Rim light calculation
259
+ let viewDir = normalize(camera.viewPos - input.worldPos);
260
+ var rimFactor = 1.0 - max(dot(n, viewDir), 0.0);
261
+ rimFactor = rimFactor * rimFactor; // Optimized: direct multiply instead of pow(x, 2.0)
262
+ let rimLight = material.rimColor * material.rimIntensity * rimFactor;
263
+
264
+ let color = albedo * lightAccum + rimLight;
265
+
266
+ return vec4f(color, finalAlpha);
267
+ }
268
+ `,
269
+ });
270
+ // Create explicit bind group layout for all pipelines using the main shader
271
+ this.mainBindGroupLayout = this.device.createBindGroupLayout({
272
+ label: "main material bind group layout",
273
+ entries: [
274
+ { binding: 0, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // camera
275
+ { binding: 1, visibility: GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // light
276
+ { binding: 2, visibility: GPUShaderStage.FRAGMENT, texture: {} }, // diffuseTexture
277
+ { binding: 3, visibility: GPUShaderStage.FRAGMENT, sampler: {} }, // diffuseSampler
278
+ { binding: 4, visibility: GPUShaderStage.VERTEX, buffer: { type: "read-only-storage" } }, // skinMats
279
+ { binding: 5, visibility: GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // material
280
+ ],
281
+ });
282
+ const mainPipelineLayout = this.device.createPipelineLayout({
283
+ label: "main pipeline layout",
284
+ bindGroupLayouts: [this.mainBindGroupLayout],
285
+ });
286
+ this.modelPipeline = this.createRenderPipeline({
287
+ label: "model pipeline",
288
+ layout: mainPipelineLayout,
289
+ shaderModule,
290
+ vertexBuffers: fullVertexBuffers,
291
+ fragmentTarget: standardBlend,
292
+ cullMode: "none",
293
+ depthStencil: {
294
+ format: "depth24plus-stencil8",
295
+ depthWriteEnabled: true,
296
+ depthCompare: "less-equal",
297
+ },
298
+ });
299
+ // Create bind group layout for outline pipelines
300
+ this.outlineBindGroupLayout = this.device.createBindGroupLayout({
301
+ label: "outline bind group layout",
302
+ entries: [
303
+ { binding: 0, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // camera
304
+ { binding: 1, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // material
305
+ { binding: 2, visibility: GPUShaderStage.VERTEX, buffer: { type: "read-only-storage" } }, // skinMats
306
+ ],
307
+ });
308
+ const outlinePipelineLayout = this.device.createPipelineLayout({
309
+ label: "outline pipeline layout",
310
+ bindGroupLayouts: [this.outlineBindGroupLayout],
311
+ });
312
+ const outlineShaderModule = this.device.createShaderModule({
313
+ label: "outline shaders",
314
+ code: /* wgsl */ `
315
+ struct CameraUniforms {
316
+ view: mat4x4f,
317
+ projection: mat4x4f,
318
+ viewPos: vec3f,
319
+ _padding: f32,
320
+ };
321
+
322
+ struct MaterialUniforms {
323
+ edgeColor: vec4f,
324
+ edgeSize: f32,
325
+ isOverEyes: f32, // 1.0 if rendering over eyes, 0.0 otherwise (for hair outlines)
326
+ _padding1: f32,
327
+ _padding2: f32,
328
+ };
329
+
330
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
331
+ @group(0) @binding(1) var<uniform> material: MaterialUniforms;
332
+ @group(0) @binding(2) var<storage, read> skinMats: array<mat4x4f>;
333
+
334
+ struct VertexOutput {
335
+ @builtin(position) position: vec4f,
336
+ };
337
+
338
+ @vertex fn vs(
339
+ @location(0) position: vec3f,
340
+ @location(1) normal: vec3f,
341
+ @location(3) joints0: vec4<u32>,
342
+ @location(4) weights0: vec4<f32>
343
+ ) -> VertexOutput {
344
+ var output: VertexOutput;
345
+ let pos4 = vec4f(position, 1.0);
346
+
347
+ // Branchless weight normalization (avoids GPU branch divergence)
348
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
349
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
350
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
351
+
352
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
353
+ var skinnedNrm = vec3f(0.0, 0.0, 0.0);
354
+ for (var i = 0u; i < 4u; i++) {
355
+ let j = joints0[i];
356
+ let w = normalizedWeights[i];
357
+ let m = skinMats[j];
358
+ skinnedPos += (m * pos4) * w;
359
+ let r3 = mat3x3f(m[0].xyz, m[1].xyz, m[2].xyz);
360
+ skinnedNrm += (r3 * normal) * w;
361
+ }
362
+ let worldPos = skinnedPos.xyz;
363
+ let worldNormal = normalize(skinnedNrm);
364
+
365
+ // MMD invert hull: expand vertices outward along normals
366
+ let scaleFactor = 0.01;
367
+ let expandedPos = worldPos + worldNormal * material.edgeSize * scaleFactor;
368
+ output.position = camera.projection * camera.view * vec4f(expandedPos, 1.0);
369
+ return output;
370
+ }
371
+
372
+ @fragment fn fs() -> @location(0) vec4f {
373
+ var color = material.edgeColor;
374
+
375
+ if (material.isOverEyes > 0.5) {
376
+ color.a *= 0.5; // Hair outlines over eyes get 50% alpha
377
+ }
378
+
379
+ return color;
380
+ }
381
+ `,
382
+ });
383
+ this.outlinePipeline = this.createRenderPipeline({
384
+ label: "outline pipeline",
385
+ layout: outlinePipelineLayout,
386
+ shaderModule: outlineShaderModule,
387
+ vertexBuffers: outlineVertexBuffers,
388
+ fragmentTarget: standardBlend,
389
+ cullMode: "back",
390
+ depthStencil: {
391
+ format: "depth24plus-stencil8",
392
+ depthWriteEnabled: true,
393
+ depthCompare: "less-equal",
394
+ },
395
+ });
396
+ // Hair outline pipeline
397
+ this.hairOutlinePipeline = this.createRenderPipeline({
398
+ label: "hair outline pipeline",
399
+ layout: outlinePipelineLayout,
400
+ shaderModule: outlineShaderModule,
401
+ vertexBuffers: outlineVertexBuffers,
402
+ fragmentTarget: standardBlend,
403
+ cullMode: "back",
404
+ depthStencil: {
405
+ format: "depth24plus-stencil8",
406
+ depthWriteEnabled: false,
407
+ depthCompare: "less-equal",
408
+ depthBias: -0.0001,
409
+ depthBiasSlopeScale: 0.0,
410
+ depthBiasClamp: 0.0,
411
+ },
412
+ });
413
+ // Eye overlay pipeline (renders after opaque, writes stencil)
414
+ this.eyePipeline = this.createRenderPipeline({
415
+ label: "eye overlay pipeline",
416
+ layout: mainPipelineLayout,
417
+ shaderModule,
418
+ vertexBuffers: fullVertexBuffers,
419
+ fragmentTarget: standardBlend,
420
+ cullMode: "front",
421
+ depthStencil: {
422
+ format: "depth24plus-stencil8",
423
+ depthWriteEnabled: true,
424
+ depthCompare: "less-equal",
425
+ depthBias: -0.00005,
426
+ depthBiasSlopeScale: 0.0,
427
+ depthBiasClamp: 0.0,
428
+ stencilFront: {
429
+ compare: "always",
430
+ failOp: "keep",
431
+ depthFailOp: "keep",
432
+ passOp: "replace",
433
+ },
434
+ stencilBack: {
435
+ compare: "always",
436
+ failOp: "keep",
437
+ depthFailOp: "keep",
438
+ passOp: "replace",
439
+ },
440
+ },
441
+ });
442
+ // Depth-only shader for hair pre-pass (reduces overdraw by early depth rejection)
443
+ const depthOnlyShaderModule = this.device.createShaderModule({
444
+ label: "depth only shader",
445
+ code: /* wgsl */ `
446
+ struct CameraUniforms {
447
+ view: mat4x4f,
448
+ projection: mat4x4f,
449
+ viewPos: vec3f,
450
+ _padding: f32,
451
+ };
452
+
453
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
454
+ @group(0) @binding(4) var<storage, read> skinMats: array<mat4x4f>;
455
+
456
+ @vertex fn vs(
457
+ @location(0) position: vec3f,
458
+ @location(1) normal: vec3f,
459
+ @location(3) joints0: vec4<u32>,
460
+ @location(4) weights0: vec4<f32>
461
+ ) -> @builtin(position) vec4f {
462
+ let pos4 = vec4f(position, 1.0);
463
+
464
+ // Branchless weight normalization (avoids GPU branch divergence)
465
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
466
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
467
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
468
+
469
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
470
+ for (var i = 0u; i < 4u; i++) {
471
+ let j = joints0[i];
472
+ let w = normalizedWeights[i];
473
+ let m = skinMats[j];
474
+ skinnedPos += (m * pos4) * w;
475
+ }
476
+ let worldPos = skinnedPos.xyz;
477
+ let clipPos = camera.projection * camera.view * vec4f(worldPos, 1.0);
478
+ return clipPos;
479
+ }
480
+
481
+ @fragment fn fs() -> @location(0) vec4f {
482
+ return vec4f(0.0, 0.0, 0.0, 0.0); // Transparent - color writes disabled via writeMask
483
+ }
484
+ `,
485
+ });
486
+ // Hair depth pre-pass pipeline: depth-only with color writes disabled to eliminate overdraw
487
+ this.hairDepthPipeline = this.createRenderPipeline({
488
+ label: "hair depth pre-pass",
489
+ layout: mainPipelineLayout,
490
+ shaderModule: depthOnlyShaderModule,
491
+ vertexBuffers: depthOnlyVertexBuffers,
492
+ fragmentTarget: {
493
+ format: this.presentationFormat,
494
+ writeMask: 0,
495
+ },
496
+ fragmentEntryPoint: "fs",
497
+ cullMode: "front",
498
+ depthStencil: {
499
+ format: "depth24plus-stencil8",
500
+ depthWriteEnabled: true,
501
+ depthCompare: "less-equal",
502
+ depthBias: 0.0,
503
+ depthBiasSlopeScale: 0.0,
504
+ depthBiasClamp: 0.0,
505
+ },
506
+ });
507
+ // Hair pipelines for rendering over eyes vs non-eyes (only differ in stencil compare mode)
508
+ const createHairPipeline = (isOverEyes) => {
509
+ return this.createRenderPipeline({
510
+ label: `hair pipeline (${isOverEyes ? "over eyes" : "over non-eyes"})`,
511
+ layout: mainPipelineLayout,
512
+ shaderModule,
513
+ vertexBuffers: fullVertexBuffers,
514
+ fragmentTarget: standardBlend,
515
+ cullMode: "front",
516
+ depthStencil: {
517
+ format: "depth24plus-stencil8",
518
+ depthWriteEnabled: false,
519
+ depthCompare: "less-equal",
520
+ stencilFront: {
521
+ compare: isOverEyes ? "equal" : "not-equal",
522
+ failOp: "keep",
523
+ depthFailOp: "keep",
524
+ passOp: "keep",
525
+ },
526
+ stencilBack: {
527
+ compare: isOverEyes ? "equal" : "not-equal",
528
+ failOp: "keep",
529
+ depthFailOp: "keep",
530
+ passOp: "keep",
531
+ },
532
+ },
533
+ });
534
+ };
535
+ this.hairPipelineOverEyes = createHairPipeline(true);
536
+ this.hairPipelineOverNonEyes = createHairPipeline(false);
537
+ }
538
+ // Create bloom post-processing pipelines
539
+ createBloomPipelines() {
540
+ // Bloom extraction shader (extracts bright areas)
541
+ const bloomExtractShader = this.device.createShaderModule({
542
+ label: "bloom extract",
543
+ code: /* wgsl */ `
544
+ struct VertexOutput {
545
+ @builtin(position) position: vec4f,
546
+ @location(0) uv: vec2f,
547
+ };
548
+
549
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
550
+ var output: VertexOutput;
551
+ // Generate fullscreen quad from vertex index
552
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
553
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
554
+ output.position = vec4f(x, y, 0.0, 1.0);
555
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
556
+ return output;
557
+ }
558
+
559
+ struct BloomExtractUniforms {
560
+ threshold: f32,
561
+ _padding1: f32,
562
+ _padding2: f32,
563
+ _padding3: f32,
564
+ _padding4: f32,
565
+ _padding5: f32,
566
+ _padding6: f32,
567
+ _padding7: f32,
568
+ };
569
+
570
+ @group(0) @binding(0) var inputTexture: texture_2d<f32>;
571
+ @group(0) @binding(1) var inputSampler: sampler;
572
+ @group(0) @binding(2) var<uniform> extractUniforms: BloomExtractUniforms;
573
+
574
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
575
+ let color = textureSample(inputTexture, inputSampler, input.uv);
576
+ // Extract bright areas above threshold
577
+ let threshold = extractUniforms.threshold;
578
+ let bloom = max(vec3f(0.0), color.rgb - vec3f(threshold)) / max(0.001, 1.0 - threshold);
579
+ return vec4f(bloom, color.a);
580
+ }
581
+ `,
582
+ });
583
+ // Bloom blur shader (gaussian blur - can be used for both horizontal and vertical)
584
+ const bloomBlurShader = this.device.createShaderModule({
585
+ label: "bloom blur",
586
+ code: /* wgsl */ `
587
+ struct VertexOutput {
588
+ @builtin(position) position: vec4f,
589
+ @location(0) uv: vec2f,
590
+ };
591
+
592
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
593
+ var output: VertexOutput;
594
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
595
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
596
+ output.position = vec4f(x, y, 0.0, 1.0);
597
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
598
+ return output;
599
+ }
600
+
601
+ struct BlurUniforms {
602
+ direction: vec2f,
603
+ _padding1: f32,
604
+ _padding2: f32,
605
+ _padding3: f32,
606
+ _padding4: f32,
607
+ _padding5: f32,
608
+ _padding6: f32,
609
+ };
610
+
611
+ @group(0) @binding(0) var inputTexture: texture_2d<f32>;
612
+ @group(0) @binding(1) var inputSampler: sampler;
613
+ @group(0) @binding(2) var<uniform> blurUniforms: BlurUniforms;
614
+
615
+ // 3-tap gaussian blur using bilinear filtering trick (40% fewer texture fetches!)
616
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
617
+ let texelSize = 1.0 / vec2f(textureDimensions(inputTexture));
618
+
619
+ // Bilinear optimization: leverage hardware filtering to sample between pixels
620
+ // Original 5-tap: weights [0.06136, 0.24477, 0.38774, 0.24477, 0.06136] at offsets [-2, -1, 0, 1, 2]
621
+ // Optimized 3-tap: combine adjacent samples using weighted offsets
622
+ let weight0 = 0.38774; // Center sample
623
+ let weight1 = 0.24477 + 0.06136; // Combined outer samples = 0.30613
624
+ let offset1 = (0.24477 * 1.0 + 0.06136 * 2.0) / weight1; // Weighted position = 1.2
625
+
626
+ var result = textureSample(inputTexture, inputSampler, input.uv) * weight0;
627
+ let offsetVec = offset1 * texelSize * blurUniforms.direction;
628
+ result += textureSample(inputTexture, inputSampler, input.uv + offsetVec) * weight1;
629
+ result += textureSample(inputTexture, inputSampler, input.uv - offsetVec) * weight1;
630
+
631
+ return result;
632
+ }
633
+ `,
634
+ });
635
+ // Bloom composition shader (combines original scene with bloom)
636
+ const bloomComposeShader = this.device.createShaderModule({
637
+ label: "bloom compose",
638
+ code: /* wgsl */ `
639
+ struct VertexOutput {
640
+ @builtin(position) position: vec4f,
641
+ @location(0) uv: vec2f,
642
+ };
643
+
644
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
645
+ var output: VertexOutput;
646
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
647
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
648
+ output.position = vec4f(x, y, 0.0, 1.0);
649
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
650
+ return output;
651
+ }
652
+
653
+ struct BloomComposeUniforms {
654
+ intensity: f32,
655
+ _padding1: f32,
656
+ _padding2: f32,
657
+ _padding3: f32,
658
+ _padding4: f32,
659
+ _padding5: f32,
660
+ _padding6: f32,
661
+ _padding7: f32,
662
+ };
663
+
664
+ @group(0) @binding(0) var sceneTexture: texture_2d<f32>;
665
+ @group(0) @binding(1) var sceneSampler: sampler;
666
+ @group(0) @binding(2) var bloomTexture: texture_2d<f32>;
667
+ @group(0) @binding(3) var bloomSampler: sampler;
668
+ @group(0) @binding(4) var<uniform> composeUniforms: BloomComposeUniforms;
669
+
670
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
671
+ let scene = textureSample(sceneTexture, sceneSampler, input.uv);
672
+ let bloom = textureSample(bloomTexture, bloomSampler, input.uv);
673
+ // Additive blending with intensity control
674
+ let result = scene.rgb + bloom.rgb * composeUniforms.intensity;
675
+ return vec4f(result, scene.a);
676
+ }
677
+ `,
678
+ });
679
+ // Create uniform buffer for blur direction (minimum 32 bytes for WebGPU)
680
+ const blurDirectionBuffer = this.device.createBuffer({
681
+ label: "blur direction",
682
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
683
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
684
+ });
685
+ // Create uniform buffer for bloom intensity (minimum 32 bytes for WebGPU)
686
+ const bloomIntensityBuffer = this.device.createBuffer({
687
+ label: "bloom intensity",
688
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
689
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
690
+ });
691
+ // Create uniform buffer for bloom threshold (minimum 32 bytes for WebGPU)
692
+ const bloomThresholdBuffer = this.device.createBuffer({
693
+ label: "bloom threshold",
694
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
695
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
696
+ });
697
+ // Set default bloom values
698
+ const intensityData = new Float32Array(8); // f32 + 7 padding floats = 8 floats = 32 bytes
699
+ intensityData[0] = this.bloomIntensity;
700
+ this.device.queue.writeBuffer(bloomIntensityBuffer, 0, intensityData);
701
+ const thresholdData = new Float32Array(8); // f32 + 7 padding floats = 8 floats = 32 bytes
702
+ thresholdData[0] = this.bloomThreshold;
703
+ this.device.queue.writeBuffer(bloomThresholdBuffer, 0, thresholdData);
704
+ // Create linear sampler for post-processing
705
+ const linearSampler = this.device.createSampler({
706
+ magFilter: "linear",
707
+ minFilter: "linear",
708
+ addressModeU: "clamp-to-edge",
709
+ addressModeV: "clamp-to-edge",
710
+ });
711
+ // Bloom extraction pipeline
712
+ this.bloomExtractPipeline = this.device.createRenderPipeline({
713
+ label: "bloom extract",
714
+ layout: "auto",
715
+ vertex: {
716
+ module: bloomExtractShader,
717
+ entryPoint: "vs",
718
+ },
719
+ fragment: {
720
+ module: bloomExtractShader,
721
+ entryPoint: "fs",
722
+ targets: [{ format: this.presentationFormat }],
723
+ },
724
+ primitive: { topology: "triangle-list" },
725
+ });
726
+ // Bloom blur pipeline
727
+ this.bloomBlurPipeline = this.device.createRenderPipeline({
728
+ label: "bloom blur",
729
+ layout: "auto",
730
+ vertex: {
731
+ module: bloomBlurShader,
732
+ entryPoint: "vs",
733
+ },
734
+ fragment: {
735
+ module: bloomBlurShader,
736
+ entryPoint: "fs",
737
+ targets: [{ format: this.presentationFormat }],
738
+ },
739
+ primitive: { topology: "triangle-list" },
740
+ });
741
+ // Bloom composition pipeline
742
+ this.bloomComposePipeline = this.device.createRenderPipeline({
743
+ label: "bloom compose",
744
+ layout: "auto",
745
+ vertex: {
746
+ module: bloomComposeShader,
747
+ entryPoint: "vs",
748
+ },
749
+ fragment: {
750
+ module: bloomComposeShader,
751
+ entryPoint: "fs",
752
+ targets: [{ format: this.presentationFormat }],
753
+ },
754
+ primitive: { topology: "triangle-list" },
755
+ });
756
+ // Store buffers and sampler for later use
757
+ this.blurDirectionBuffer = blurDirectionBuffer;
758
+ this.bloomIntensityBuffer = bloomIntensityBuffer;
759
+ this.bloomThresholdBuffer = bloomThresholdBuffer;
760
+ this.linearSampler = linearSampler;
761
+ }
762
+ setupBloom(width, height) {
763
+ const bloomWidth = Math.floor(width / this.BLOOM_DOWNSCALE_FACTOR);
764
+ const bloomHeight = Math.floor(height / this.BLOOM_DOWNSCALE_FACTOR);
765
+ this.bloomExtractTexture = this.device.createTexture({
766
+ label: "bloom extract",
767
+ size: [bloomWidth, bloomHeight],
768
+ format: this.presentationFormat,
769
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
770
+ });
771
+ this.bloomBlurTexture1 = this.device.createTexture({
772
+ label: "bloom blur 1",
773
+ size: [bloomWidth, bloomHeight],
774
+ format: this.presentationFormat,
775
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
776
+ });
777
+ this.bloomBlurTexture2 = this.device.createTexture({
778
+ label: "bloom blur 2",
779
+ size: [bloomWidth, bloomHeight],
780
+ format: this.presentationFormat,
781
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
782
+ });
783
+ // Create bloom bind groups
784
+ this.bloomExtractBindGroup = this.device.createBindGroup({
785
+ layout: this.bloomExtractPipeline.getBindGroupLayout(0),
786
+ entries: [
787
+ { binding: 0, resource: this.sceneRenderTexture.createView() },
788
+ { binding: 1, resource: this.linearSampler },
789
+ { binding: 2, resource: { buffer: this.bloomThresholdBuffer } },
790
+ ],
791
+ });
792
+ this.bloomBlurHBindGroup = this.device.createBindGroup({
793
+ layout: this.bloomBlurPipeline.getBindGroupLayout(0),
794
+ entries: [
795
+ { binding: 0, resource: this.bloomExtractTexture.createView() },
796
+ { binding: 1, resource: this.linearSampler },
797
+ { binding: 2, resource: { buffer: this.blurDirectionBuffer } },
798
+ ],
799
+ });
800
+ this.bloomBlurVBindGroup = this.device.createBindGroup({
801
+ layout: this.bloomBlurPipeline.getBindGroupLayout(0),
802
+ entries: [
803
+ { binding: 0, resource: this.bloomBlurTexture1.createView() },
804
+ { binding: 1, resource: this.linearSampler },
805
+ { binding: 2, resource: { buffer: this.blurDirectionBuffer } },
806
+ ],
807
+ });
808
+ this.bloomComposeBindGroup = this.device.createBindGroup({
809
+ layout: this.bloomComposePipeline.getBindGroupLayout(0),
810
+ entries: [
811
+ { binding: 0, resource: this.sceneRenderTexture.createView() },
812
+ { binding: 1, resource: this.linearSampler },
813
+ { binding: 2, resource: this.bloomBlurTexture2.createView() },
814
+ { binding: 3, resource: this.linearSampler },
815
+ { binding: 4, resource: { buffer: this.bloomIntensityBuffer } },
816
+ ],
817
+ });
818
+ }
819
+ // Step 3: Setup canvas resize handling
820
+ setupResize() {
821
+ this.resizeObserver = new ResizeObserver(() => this.handleResize());
822
+ this.resizeObserver.observe(this.canvas);
823
+ this.handleResize();
824
+ }
825
+ handleResize() {
826
+ const displayWidth = this.canvas.clientWidth;
827
+ const displayHeight = this.canvas.clientHeight;
828
+ const dpr = window.devicePixelRatio || 1;
829
+ const width = Math.floor(displayWidth * dpr);
830
+ const height = Math.floor(displayHeight * dpr);
831
+ if (!this.multisampleTexture || this.canvas.width !== width || this.canvas.height !== height) {
832
+ this.canvas.width = width;
833
+ this.canvas.height = height;
834
+ this.multisampleTexture = this.device.createTexture({
835
+ label: "multisample render target",
836
+ size: [width, height],
837
+ sampleCount: this.sampleCount,
838
+ format: this.presentationFormat,
839
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
840
+ });
841
+ this.depthTexture = this.device.createTexture({
842
+ label: "depth texture",
843
+ size: [width, height],
844
+ sampleCount: this.sampleCount,
845
+ format: "depth24plus-stencil8",
846
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
847
+ });
848
+ // Create scene render texture (non-multisampled for post-processing)
849
+ this.sceneRenderTexture = this.device.createTexture({
850
+ label: "scene render texture",
851
+ size: [width, height],
852
+ format: this.presentationFormat,
853
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
854
+ });
855
+ // Setup bloom textures and bind groups
856
+ this.setupBloom(width, height);
857
+ const depthTextureView = this.depthTexture.createView();
858
+ // Cache the scene render texture view (only recreate on resize)
859
+ this.sceneRenderTextureView = this.sceneRenderTexture.createView();
860
+ // Render scene to texture instead of directly to canvas
861
+ const colorAttachment = this.sampleCount > 1
862
+ ? {
863
+ view: this.multisampleTexture.createView(),
864
+ resolveTarget: this.sceneRenderTextureView,
865
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
866
+ loadOp: "clear",
867
+ storeOp: "store",
868
+ }
869
+ : {
870
+ view: this.sceneRenderTextureView,
871
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
872
+ loadOp: "clear",
873
+ storeOp: "store",
874
+ };
875
+ this.renderPassDescriptor = {
876
+ label: "renderPass",
877
+ colorAttachments: [colorAttachment],
878
+ depthStencilAttachment: {
879
+ view: depthTextureView,
880
+ depthClearValue: 1.0,
881
+ depthLoadOp: "clear",
882
+ depthStoreOp: "store",
883
+ stencilClearValue: 0,
884
+ stencilLoadOp: "clear",
885
+ stencilStoreOp: "discard", // Discard stencil after frame to save bandwidth (we only use it during rendering)
886
+ },
887
+ };
888
+ this.camera.aspect = width / height;
889
+ }
890
+ }
891
+ // Step 4: Create camera and uniform buffer
892
+ setupCamera() {
893
+ this.cameraUniformBuffer = this.device.createBuffer({
894
+ label: "camera uniforms",
895
+ size: 40 * 4,
896
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
897
+ });
898
+ this.camera = new Camera(Math.PI, Math.PI / 2.5, this.cameraDistance, this.cameraTarget);
899
+ this.camera.aspect = this.canvas.width / this.canvas.height;
900
+ this.camera.attachControl(this.canvas);
901
+ }
902
+ // Step 5: Create lighting buffers
903
+ setupLighting() {
904
+ this.lightUniformBuffer = this.device.createBuffer({
905
+ label: "light uniforms",
906
+ size: 4 * 4, // 4 floats: ambientColor vec3f (3) + padding (1)
907
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
908
+ });
909
+ this.setAmbientColor(this.ambientColor);
910
+ this.device.queue.writeBuffer(this.lightUniformBuffer, 0, this.lightData);
911
+ }
912
+ setAmbientColor(color) {
913
+ // Layout: ambientColor (0-2), padding (3)
914
+ this.lightData[0] = color.x;
915
+ this.lightData[1] = color.y;
916
+ this.lightData[2] = color.z;
917
+ this.lightData[3] = 0.0; // Padding for vec3f alignment
918
+ }
919
+ async loadAnimation(url) {
920
+ await this.player.loadVmd(url);
921
+ this.hasAnimation = true;
922
+ // Show first frame (time 0) immediately
923
+ if (this.currentModel) {
924
+ const initialPose = this.player.getPoseAtTime(0);
925
+ this.resetBonesAndPhysics(initialPose, true);
926
+ }
927
+ }
928
+ playAnimation() {
929
+ if (!this.hasAnimation || !this.currentModel)
930
+ return;
931
+ const wasPaused = this.player.isPausedState;
932
+ const wasPlaying = this.player.isPlayingState;
933
+ // Only reset pose and physics if starting from beginning (not resuming)
934
+ if (!wasPlaying && !wasPaused) {
935
+ const initialPose = this.player.getPoseAtTime(0);
936
+ this.resetBonesAndPhysics(initialPose, true);
937
+ }
938
+ // Start playback (or resume if paused)
939
+ this.player.play();
940
+ }
941
+ stopAnimation() {
942
+ this.player.stop();
943
+ }
944
+ pauseAnimation() {
945
+ this.player.pause();
946
+ }
947
+ seekAnimation(time) {
948
+ if (!this.currentModel || !this.hasAnimation)
949
+ return;
950
+ this.player.seek(time);
951
+ // Immediately apply pose at seeked time (don't reset bones without keyframes)
952
+ const pose = this.player.getPoseAtTime(time);
953
+ this.resetBonesAndPhysics(pose, false);
954
+ }
955
+ getAnimationProgress() {
956
+ return this.player.getProgress();
957
+ }
958
+ /**
959
+ * Apply animation pose to model
960
+ */
961
+ applyPose(pose) {
962
+ if (!this.currentModel)
963
+ return;
964
+ // Apply bone rotations
965
+ if (pose.boneRotations.size > 0) {
966
+ const boneNames = Array.from(pose.boneRotations.keys());
967
+ const rotations = Array.from(pose.boneRotations.values()).map((quat) => new Quat(quat.x, quat.y, quat.z, quat.w));
968
+ this.rotateBones(boneNames, rotations, 0);
969
+ }
970
+ // Apply bone translations
971
+ if (pose.boneTranslations.size > 0) {
972
+ const boneNames = Array.from(pose.boneTranslations.keys());
973
+ const translations = Array.from(pose.boneTranslations.values()).map((vec3) => new Vec3(vec3.x, vec3.y, vec3.z));
974
+ this.moveBones(boneNames, translations, 0);
975
+ }
976
+ // Apply morph weights
977
+ for (const [morphName, weight] of pose.morphWeights.entries()) {
978
+ this.setMorphWeight(morphName, weight, 0);
979
+ }
980
+ }
981
+ /**
982
+ * Reset bones and physics to match a given pose
983
+ * @param pose The pose to apply
984
+ * @param resetBonesWithoutKeyframes If true, reset bones that don't have keyframes in the pose to identity
985
+ */
986
+ resetBonesAndPhysics(pose, resetBonesWithoutKeyframes = false) {
987
+ if (!this.currentModel)
988
+ return;
989
+ this.applyPose(pose);
990
+ // Reset bones without keyframes if requested (for initial animation setup)
991
+ if (resetBonesWithoutKeyframes) {
992
+ const skeleton = this.currentModel.getSkeleton();
993
+ const bonesWithPose = new Set(pose.boneRotations.keys());
994
+ const bonesToReset = [];
995
+ for (const bone of skeleton.bones) {
996
+ if (!bonesWithPose.has(bone.name)) {
997
+ bonesToReset.push(bone.name);
998
+ }
999
+ }
1000
+ if (bonesToReset.length > 0) {
1001
+ const identityQuat = new Quat(0, 0, 0, 1);
1002
+ const identityQuats = new Array(bonesToReset.length).fill(identityQuat);
1003
+ this.rotateBones(bonesToReset, identityQuats, 0);
1004
+ }
1005
+ }
1006
+ // Update model pose and physics
1007
+ this.currentModel.evaluatePose();
1008
+ // if (this.physics) {
1009
+ // const worldMats = this.currentModel.getBoneWorldMatrices()
1010
+ // this.physics.reset(worldMats, this.currentModel.getBoneInverseBindMatrices())
1011
+ // // Compute and upload skin matrices immediately
1012
+ // this.computeSkinMatrices()
1013
+ // }
1014
+ }
1015
+ getStats() {
1016
+ return { ...this.stats };
1017
+ }
1018
+ runRenderLoop(callback) {
1019
+ this.renderLoopCallback = callback || null;
1020
+ const loop = () => {
1021
+ this.render();
1022
+ if (this.renderLoopCallback) {
1023
+ this.renderLoopCallback();
1024
+ }
1025
+ this.animationFrameId = requestAnimationFrame(loop);
1026
+ };
1027
+ this.animationFrameId = requestAnimationFrame(loop);
1028
+ }
1029
+ stopRenderLoop() {
1030
+ if (this.animationFrameId !== null) {
1031
+ cancelAnimationFrame(this.animationFrameId);
1032
+ this.animationFrameId = null;
1033
+ }
1034
+ this.renderLoopCallback = null;
1035
+ }
1036
+ dispose() {
1037
+ this.stopRenderLoop();
1038
+ this.stopAnimation();
1039
+ if (this.camera)
1040
+ this.camera.detachControl();
1041
+ if (this.resizeObserver) {
1042
+ this.resizeObserver.disconnect();
1043
+ this.resizeObserver = null;
1044
+ }
1045
+ }
1046
+ // Step 6: Load PMX model file
1047
+ async loadModel(path) {
1048
+ const pathParts = path.split("/");
1049
+ pathParts.pop();
1050
+ const dir = pathParts.join("/") + "/";
1051
+ this.modelDir = dir;
1052
+ const model = await PmxLoader.load(path);
1053
+ // this.physics = new Physics(model.getRigidbodies(), model.getJoints())
1054
+ await this.setupModelBuffers(model);
1055
+ }
1056
+ rotateBones(bones, rotations, durationMs) {
1057
+ this.currentModel?.rotateBones(bones, rotations.map((quat) => new QuatMath(quat.x, quat.y, quat.z, quat.w)), durationMs);
1058
+ }
1059
+ // moveBones now takes relative translations (VMD-style) by default
1060
+ moveBones(bones, relativeTranslations, durationMs) {
1061
+ this.currentModel?.moveBones(bones, relativeTranslations, durationMs);
1062
+ }
1063
+ setMorphWeight(name, weight, durationMs) {
1064
+ if (!this.currentModel)
1065
+ return;
1066
+ this.currentModel.setMorphWeight(name, weight, durationMs);
1067
+ if (!durationMs || durationMs === 0) {
1068
+ this.vertexBufferNeedsUpdate = true;
1069
+ }
1070
+ }
1071
+ updateVertexBuffer() {
1072
+ if (!this.currentModel || !this.vertexBuffer)
1073
+ return;
1074
+ const vertices = this.currentModel.getVertices();
1075
+ if (!vertices || vertices.length === 0)
1076
+ return;
1077
+ this.device.queue.writeBuffer(this.vertexBuffer, 0, vertices);
1078
+ }
1079
+ // Step 7: Create vertex, index, and joint buffers
1080
+ async setupModelBuffers(model) {
1081
+ this.currentModel = model;
1082
+ const vertices = this.currentModel.getVertices();
1083
+ const skinning = this.currentModel.getSkinning();
1084
+ const boneCount = this.currentModel.getSkeleton().bones.length;
1085
+ this.vertexBuffer = this.device.createBuffer({
1086
+ label: "model vertex buffer",
1087
+ size: vertices.byteLength,
1088
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1089
+ });
1090
+ this.device.queue.writeBuffer(this.vertexBuffer, 0, new Float32Array(vertices));
1091
+ this.jointsBuffer = this.device.createBuffer({
1092
+ label: "joints buffer",
1093
+ size: skinning.joints.byteLength,
1094
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1095
+ });
1096
+ this.device.queue.writeBuffer(this.jointsBuffer, 0, skinning.joints.buffer, skinning.joints.byteOffset, skinning.joints.byteLength);
1097
+ this.weightsBuffer = this.device.createBuffer({
1098
+ label: "weights buffer",
1099
+ size: skinning.weights.byteLength,
1100
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1101
+ });
1102
+ this.device.queue.writeBuffer(this.weightsBuffer, 0, skinning.weights.buffer, skinning.weights.byteOffset, skinning.weights.byteLength);
1103
+ const matrixSize = boneCount * 16 * 4;
1104
+ this.skinMatrixBuffer = this.device.createBuffer({
1105
+ label: "skin matrices",
1106
+ size: Math.max(256, matrixSize),
1107
+ usage: GPUBufferUsage.STORAGE | GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1108
+ });
1109
+ const indices = this.currentModel.getIndices();
1110
+ if (indices) {
1111
+ this.indexBuffer = this.device.createBuffer({
1112
+ label: "model index buffer",
1113
+ size: indices.byteLength,
1114
+ usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST,
1115
+ });
1116
+ this.device.queue.writeBuffer(this.indexBuffer, 0, new Uint32Array(indices));
1117
+ }
1118
+ else {
1119
+ throw new Error("Model has no index buffer");
1120
+ }
1121
+ await this.setupMaterials(this.currentModel);
1122
+ }
1123
+ async setupMaterials(model) {
1124
+ const materials = model.getMaterials();
1125
+ if (materials.length === 0) {
1126
+ throw new Error("Model has no materials");
1127
+ }
1128
+ const textures = model.getTextures();
1129
+ const loadTextureByIndex = async (texIndex) => {
1130
+ if (texIndex < 0 || texIndex >= textures.length) {
1131
+ return null;
1132
+ }
1133
+ const path = this.modelDir + textures[texIndex].path;
1134
+ const texture = await this.createTextureFromPath(path);
1135
+ return texture;
1136
+ };
1137
+ this.drawCalls = [];
1138
+ let currentIndexOffset = 0;
1139
+ for (const mat of materials) {
1140
+ const indexCount = mat.vertexCount;
1141
+ if (indexCount === 0)
1142
+ continue;
1143
+ const diffuseTexture = await loadTextureByIndex(mat.diffuseTextureIndex);
1144
+ if (!diffuseTexture)
1145
+ throw new Error(`Material "${mat.name}" has no diffuse texture`);
1146
+ const materialAlpha = mat.diffuse[3];
1147
+ const isTransparent = materialAlpha < 1.0 - Engine.TRANSPARENCY_EPSILON;
1148
+ const materialUniformBuffer = this.createMaterialUniformBuffer(mat.name, materialAlpha, 0.0);
1149
+ // Create bind groups using the shared bind group layout - All pipelines (main, eye, hair multiply, hair opaque) use the same shader and layout
1150
+ const bindGroup = this.device.createBindGroup({
1151
+ label: `material bind group: ${mat.name}`,
1152
+ layout: this.mainBindGroupLayout,
1153
+ entries: [
1154
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1155
+ { binding: 1, resource: { buffer: this.lightUniformBuffer } },
1156
+ { binding: 2, resource: diffuseTexture.createView() },
1157
+ { binding: 3, resource: this.materialSampler },
1158
+ { binding: 4, resource: { buffer: this.skinMatrixBuffer } },
1159
+ { binding: 5, resource: { buffer: materialUniformBuffer } },
1160
+ ],
1161
+ });
1162
+ if (indexCount > 0) {
1163
+ if (mat.isEye) {
1164
+ this.drawCalls.push({ type: "eye", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1165
+ }
1166
+ else if (mat.isHair) {
1167
+ // Hair materials: create separate bind groups for over-eyes vs over-non-eyes
1168
+ const createHairBindGroup = (isOverEyes) => {
1169
+ const buffer = this.createMaterialUniformBuffer(`${mat.name} (${isOverEyes ? "over eyes" : "over non-eyes"})`, materialAlpha, isOverEyes ? 1.0 : 0.0);
1170
+ return this.device.createBindGroup({
1171
+ label: `material bind group (${isOverEyes ? "over eyes" : "over non-eyes"}): ${mat.name}`,
1172
+ layout: this.mainBindGroupLayout,
1173
+ entries: [
1174
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1175
+ { binding: 1, resource: { buffer: this.lightUniformBuffer } },
1176
+ { binding: 2, resource: diffuseTexture.createView() },
1177
+ { binding: 3, resource: this.materialSampler },
1178
+ { binding: 4, resource: { buffer: this.skinMatrixBuffer } },
1179
+ { binding: 5, resource: { buffer: buffer } },
1180
+ ],
1181
+ });
1182
+ };
1183
+ const bindGroupOverEyes = createHairBindGroup(true);
1184
+ const bindGroupOverNonEyes = createHairBindGroup(false);
1185
+ this.drawCalls.push({
1186
+ type: "hair-over-eyes",
1187
+ count: indexCount,
1188
+ firstIndex: currentIndexOffset,
1189
+ bindGroup: bindGroupOverEyes,
1190
+ });
1191
+ this.drawCalls.push({
1192
+ type: "hair-over-non-eyes",
1193
+ count: indexCount,
1194
+ firstIndex: currentIndexOffset,
1195
+ bindGroup: bindGroupOverNonEyes,
1196
+ });
1197
+ }
1198
+ else if (isTransparent) {
1199
+ this.drawCalls.push({ type: "transparent", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1200
+ }
1201
+ else {
1202
+ this.drawCalls.push({ type: "opaque", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1203
+ }
1204
+ }
1205
+ // Edge flag is at bit 4 (0x10) in PMX format
1206
+ if ((mat.edgeFlag & 0x10) !== 0 && mat.edgeSize > 0) {
1207
+ const materialUniformData = new Float32Array([
1208
+ mat.edgeColor[0],
1209
+ mat.edgeColor[1],
1210
+ mat.edgeColor[2],
1211
+ mat.edgeColor[3],
1212
+ mat.edgeSize,
1213
+ 0,
1214
+ 0,
1215
+ 0,
1216
+ ]);
1217
+ const materialUniformBuffer = this.createUniformBuffer(`outline material uniform: ${mat.name}`, materialUniformData);
1218
+ const outlineBindGroup = this.device.createBindGroup({
1219
+ label: `outline bind group: ${mat.name}`,
1220
+ layout: this.outlineBindGroupLayout,
1221
+ entries: [
1222
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1223
+ { binding: 1, resource: { buffer: materialUniformBuffer } },
1224
+ { binding: 2, resource: { buffer: this.skinMatrixBuffer } },
1225
+ ],
1226
+ });
1227
+ if (indexCount > 0) {
1228
+ const outlineType = mat.isEye
1229
+ ? "eye-outline"
1230
+ : mat.isHair
1231
+ ? "hair-outline"
1232
+ : isTransparent
1233
+ ? "transparent-outline"
1234
+ : "opaque-outline";
1235
+ this.drawCalls.push({
1236
+ type: outlineType,
1237
+ count: indexCount,
1238
+ firstIndex: currentIndexOffset,
1239
+ bindGroup: outlineBindGroup,
1240
+ });
1241
+ }
1242
+ }
1243
+ currentIndexOffset += indexCount;
1244
+ }
1245
+ }
1246
+ createMaterialUniformBuffer(label, alpha, isOverEyes) {
1247
+ const data = new Float32Array(8);
1248
+ data.set([alpha, 1.0, this.rimLightIntensity, 0.0, 1.0, 1.0, 1.0, isOverEyes]);
1249
+ return this.createUniformBuffer(`material uniform: ${label}`, data);
1250
+ }
1251
+ createUniformBuffer(label, data) {
1252
+ const buffer = this.device.createBuffer({
1253
+ label,
1254
+ size: data.byteLength,
1255
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
1256
+ });
1257
+ this.device.queue.writeBuffer(buffer, 0, data);
1258
+ return buffer;
1259
+ }
1260
+ async createTextureFromPath(path) {
1261
+ const cached = this.textureCache.get(path);
1262
+ if (cached) {
1263
+ return cached;
1264
+ }
1265
+ try {
1266
+ const response = await fetch(path);
1267
+ if (!response.ok) {
1268
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
1269
+ }
1270
+ const imageBitmap = await createImageBitmap(await response.blob(), {
1271
+ premultiplyAlpha: "none",
1272
+ colorSpaceConversion: "none",
1273
+ });
1274
+ const texture = this.device.createTexture({
1275
+ label: `texture: ${path}`,
1276
+ size: [imageBitmap.width, imageBitmap.height],
1277
+ format: "rgba8unorm",
1278
+ usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT,
1279
+ });
1280
+ this.device.queue.copyExternalImageToTexture({ source: imageBitmap }, { texture }, [
1281
+ imageBitmap.width,
1282
+ imageBitmap.height,
1283
+ ]);
1284
+ this.textureCache.set(path, texture);
1285
+ return texture;
1286
+ }
1287
+ catch {
1288
+ return null;
1289
+ }
1290
+ }
1291
+ // Helper: Render eyes with stencil writing (for post-alpha-eye effect)
1292
+ renderEyes(pass) {
1293
+ pass.setPipeline(this.eyePipeline);
1294
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1295
+ for (const draw of this.drawCalls) {
1296
+ if (draw.type === "eye") {
1297
+ pass.setBindGroup(0, draw.bindGroup);
1298
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1299
+ }
1300
+ }
1301
+ }
1302
+ // Helper: Render hair with post-alpha-eye effect (depth pre-pass + stencil-based shading + outlines)
1303
+ renderHair(pass) {
1304
+ // Hair depth pre-pass (reduces overdraw via early depth rejection)
1305
+ const hasHair = this.drawCalls.some((d) => d.type === "hair-over-eyes" || d.type === "hair-over-non-eyes");
1306
+ if (hasHair) {
1307
+ pass.setPipeline(this.hairDepthPipeline);
1308
+ for (const draw of this.drawCalls) {
1309
+ if (draw.type === "hair-over-eyes" || draw.type === "hair-over-non-eyes") {
1310
+ pass.setBindGroup(0, draw.bindGroup);
1311
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1312
+ }
1313
+ }
1314
+ }
1315
+ // Hair shading (split by stencil for transparency over eyes)
1316
+ const hairOverEyes = this.drawCalls.filter((d) => d.type === "hair-over-eyes");
1317
+ if (hairOverEyes.length > 0) {
1318
+ pass.setPipeline(this.hairPipelineOverEyes);
1319
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1320
+ for (const draw of hairOverEyes) {
1321
+ pass.setBindGroup(0, draw.bindGroup);
1322
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1323
+ }
1324
+ }
1325
+ const hairOverNonEyes = this.drawCalls.filter((d) => d.type === "hair-over-non-eyes");
1326
+ if (hairOverNonEyes.length > 0) {
1327
+ pass.setPipeline(this.hairPipelineOverNonEyes);
1328
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1329
+ for (const draw of hairOverNonEyes) {
1330
+ pass.setBindGroup(0, draw.bindGroup);
1331
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1332
+ }
1333
+ }
1334
+ // Hair outlines
1335
+ const hairOutlines = this.drawCalls.filter((d) => d.type === "hair-outline");
1336
+ if (hairOutlines.length > 0) {
1337
+ pass.setPipeline(this.hairOutlinePipeline);
1338
+ for (const draw of hairOutlines) {
1339
+ pass.setBindGroup(0, draw.bindGroup);
1340
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1341
+ }
1342
+ }
1343
+ }
1344
+ // Render strategy: 1) Opaque non-eye/hair 2) Eyes (stencil=1) 3) Hair (depth pre-pass + split by stencil) 4) Transparent 5) Bloom
1345
+ render() {
1346
+ if (this.multisampleTexture && this.camera && this.device) {
1347
+ const currentTime = performance.now();
1348
+ const deltaTime = this.lastFrameTime > 0 ? (currentTime - this.lastFrameTime) / 1000 : 0.016;
1349
+ this.lastFrameTime = currentTime;
1350
+ this.updateCameraUniforms();
1351
+ this.updateRenderTarget();
1352
+ // // Animate VMD animation if playing
1353
+ // if (this.hasAnimation && this.currentModel) {
1354
+ // const pose = this.player.update(currentTime)
1355
+ // if (pose) {
1356
+ // this.applyPose(pose)
1357
+ // }
1358
+ // }
1359
+ // // Update model pose first (this may update morph weights via tweens)
1360
+ // // We need to do this before creating the encoder to ensure vertex buffer is ready
1361
+ // if (this.currentModel) {
1362
+ // const hasActiveMorphTweens = this.currentModel.evaluatePose()
1363
+ // if (hasActiveMorphTweens) {
1364
+ // this.vertexBufferNeedsUpdate = true
1365
+ // }
1366
+ // }
1367
+ // // Update vertex buffer if morphs changed
1368
+ // if (this.vertexBufferNeedsUpdate) {
1369
+ // this.updateVertexBuffer()
1370
+ // this.vertexBufferNeedsUpdate = false
1371
+ // }
1372
+ // Update model pose (computes skin matrices on CPU)
1373
+ this.updateModelPose(deltaTime);
1374
+ // Use single encoder for render
1375
+ const encoder = this.device.createCommandEncoder();
1376
+ const pass = encoder.beginRenderPass(this.renderPassDescriptor);
1377
+ if (this.currentModel) {
1378
+ pass.setVertexBuffer(0, this.vertexBuffer);
1379
+ pass.setVertexBuffer(1, this.jointsBuffer);
1380
+ pass.setVertexBuffer(2, this.weightsBuffer);
1381
+ pass.setIndexBuffer(this.indexBuffer, "uint32");
1382
+ // Pass 1: Opaque
1383
+ pass.setPipeline(this.modelPipeline);
1384
+ for (const draw of this.drawCalls) {
1385
+ if (draw.type === "opaque") {
1386
+ pass.setBindGroup(0, draw.bindGroup);
1387
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1388
+ }
1389
+ }
1390
+ // Pass 2: Eyes (writes stencil value for hair to test against)
1391
+ this.renderEyes(pass);
1392
+ this.drawOutlines(pass, false);
1393
+ // Pass 3: Hair rendering (depth pre-pass + shading + outlines)
1394
+ this.renderHair(pass);
1395
+ // Pass 4: Transparent
1396
+ pass.setPipeline(this.modelPipeline);
1397
+ for (const draw of this.drawCalls) {
1398
+ if (draw.type === "transparent") {
1399
+ pass.setBindGroup(0, draw.bindGroup);
1400
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1401
+ }
1402
+ }
1403
+ this.drawOutlines(pass, true);
1404
+ }
1405
+ pass.end();
1406
+ this.device.queue.submit([encoder.finish()]);
1407
+ this.applyBloom();
1408
+ this.updateStats(performance.now() - currentTime);
1409
+ }
1410
+ }
1411
+ applyBloom() {
1412
+ if (!this.sceneRenderTexture || !this.bloomExtractTexture) {
1413
+ return;
1414
+ }
1415
+ // Update bloom parameters
1416
+ const thresholdData = new Float32Array(8);
1417
+ thresholdData[0] = this.bloomThreshold;
1418
+ this.device.queue.writeBuffer(this.bloomThresholdBuffer, 0, thresholdData);
1419
+ const intensityData = new Float32Array(8);
1420
+ intensityData[0] = this.bloomIntensity;
1421
+ this.device.queue.writeBuffer(this.bloomIntensityBuffer, 0, intensityData);
1422
+ const encoder = this.device.createCommandEncoder();
1423
+ // Extract bright areas
1424
+ const extractPass = encoder.beginRenderPass({
1425
+ label: "bloom extract",
1426
+ colorAttachments: [
1427
+ {
1428
+ view: this.bloomExtractTexture.createView(),
1429
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1430
+ loadOp: "clear",
1431
+ storeOp: "store",
1432
+ },
1433
+ ],
1434
+ });
1435
+ extractPass.setPipeline(this.bloomExtractPipeline);
1436
+ extractPass.setBindGroup(0, this.bloomExtractBindGroup);
1437
+ extractPass.draw(6, 1, 0, 0);
1438
+ extractPass.end();
1439
+ // Horizontal blur
1440
+ const hBlurData = new Float32Array(4);
1441
+ hBlurData[0] = 1.0;
1442
+ hBlurData[1] = 0.0;
1443
+ this.device.queue.writeBuffer(this.blurDirectionBuffer, 0, hBlurData);
1444
+ const blurHPass = encoder.beginRenderPass({
1445
+ label: "bloom blur horizontal",
1446
+ colorAttachments: [
1447
+ {
1448
+ view: this.bloomBlurTexture1.createView(),
1449
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1450
+ loadOp: "clear",
1451
+ storeOp: "store",
1452
+ },
1453
+ ],
1454
+ });
1455
+ blurHPass.setPipeline(this.bloomBlurPipeline);
1456
+ blurHPass.setBindGroup(0, this.bloomBlurHBindGroup);
1457
+ blurHPass.draw(6, 1, 0, 0);
1458
+ blurHPass.end();
1459
+ // Vertical blur
1460
+ const vBlurData = new Float32Array(4);
1461
+ vBlurData[0] = 0.0;
1462
+ vBlurData[1] = 1.0;
1463
+ this.device.queue.writeBuffer(this.blurDirectionBuffer, 0, vBlurData);
1464
+ const blurVPass = encoder.beginRenderPass({
1465
+ label: "bloom blur vertical",
1466
+ colorAttachments: [
1467
+ {
1468
+ view: this.bloomBlurTexture2.createView(),
1469
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1470
+ loadOp: "clear",
1471
+ storeOp: "store",
1472
+ },
1473
+ ],
1474
+ });
1475
+ blurVPass.setPipeline(this.bloomBlurPipeline);
1476
+ blurVPass.setBindGroup(0, this.bloomBlurVBindGroup);
1477
+ blurVPass.draw(6, 1, 0, 0);
1478
+ blurVPass.end();
1479
+ // Compose to canvas
1480
+ const composePass = encoder.beginRenderPass({
1481
+ label: "bloom compose",
1482
+ colorAttachments: [
1483
+ {
1484
+ view: this.context.getCurrentTexture().createView(),
1485
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1486
+ loadOp: "clear",
1487
+ storeOp: "store",
1488
+ },
1489
+ ],
1490
+ });
1491
+ composePass.setPipeline(this.bloomComposePipeline);
1492
+ composePass.setBindGroup(0, this.bloomComposeBindGroup);
1493
+ composePass.draw(6, 1, 0, 0);
1494
+ composePass.end();
1495
+ this.device.queue.submit([encoder.finish()]);
1496
+ }
1497
+ updateCameraUniforms() {
1498
+ const viewMatrix = this.camera.getViewMatrix();
1499
+ const projectionMatrix = this.camera.getProjectionMatrix();
1500
+ const cameraPos = this.camera.getPosition();
1501
+ this.cameraMatrixData.set(viewMatrix.values, 0);
1502
+ this.cameraMatrixData.set(projectionMatrix.values, 16);
1503
+ this.cameraMatrixData[32] = cameraPos.x;
1504
+ this.cameraMatrixData[33] = cameraPos.y;
1505
+ this.cameraMatrixData[34] = cameraPos.z;
1506
+ this.device.queue.writeBuffer(this.cameraUniformBuffer, 0, this.cameraMatrixData);
1507
+ }
1508
+ updateRenderTarget() {
1509
+ // Use cached view (only recreated on resize in handleResize)
1510
+ const colorAttachment = this.renderPassDescriptor.colorAttachments[0];
1511
+ if (this.sampleCount > 1) {
1512
+ colorAttachment.resolveTarget = this.sceneRenderTextureView;
1513
+ }
1514
+ else {
1515
+ colorAttachment.view = this.sceneRenderTextureView;
1516
+ }
1517
+ }
1518
+ updateModelPose(deltaTime) {
1519
+ // Note: evaluatePose is called earlier in render() to update vertex buffer before encoder creation
1520
+ // Here we just get the matrices and update physics/compute
1521
+ const worldMats = this.currentModel.getBoneWorldMatrices();
1522
+ // if (this.physics) {
1523
+ // this.physics.step(deltaTime, worldMats, this.currentModel!.getBoneInverseBindMatrices())
1524
+ // }
1525
+ this.currentModel?.evaluatePose();
1526
+ this.computeSkinMatrices();
1527
+ }
1528
+ computeSkinMatrices() {
1529
+ const skinMatrices = this.currentModel.getSkinMatrices();
1530
+ this.device.queue.writeBuffer(this.skinMatrixBuffer, 0, skinMatrices.buffer, skinMatrices.byteOffset, skinMatrices.byteLength);
1531
+ }
1532
+ drawOutlines(pass, transparent) {
1533
+ pass.setPipeline(this.outlinePipeline);
1534
+ const outlineType = transparent ? "transparent-outline" : "opaque-outline";
1535
+ for (const draw of this.drawCalls) {
1536
+ if (draw.type === outlineType) {
1537
+ pass.setBindGroup(0, draw.bindGroup);
1538
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1539
+ }
1540
+ }
1541
+ }
1542
+ updateStats(frameTime) {
1543
+ // Simplified frame time tracking - rolling average with fixed window
1544
+ const maxSamples = 60;
1545
+ this.frameTimeSum += frameTime;
1546
+ this.frameTimeCount++;
1547
+ if (this.frameTimeCount > maxSamples) {
1548
+ // Maintain rolling window by subtracting oldest sample estimate
1549
+ const avg = this.frameTimeSum / maxSamples;
1550
+ this.frameTimeSum -= avg;
1551
+ this.frameTimeCount = maxSamples;
1552
+ }
1553
+ this.stats.frameTime =
1554
+ Math.round((this.frameTimeSum / this.frameTimeCount) * Engine.STATS_FRAME_TIME_ROUNDING) /
1555
+ Engine.STATS_FRAME_TIME_ROUNDING;
1556
+ // FPS tracking
1557
+ const now = performance.now();
1558
+ this.framesSinceLastUpdate++;
1559
+ const elapsed = now - this.lastFpsUpdate;
1560
+ if (elapsed >= Engine.STATS_FPS_UPDATE_INTERVAL_MS) {
1561
+ this.stats.fps = Math.round((this.framesSinceLastUpdate / elapsed) * Engine.STATS_FPS_UPDATE_INTERVAL_MS);
1562
+ this.framesSinceLastUpdate = 0;
1563
+ this.lastFpsUpdate = now;
1564
+ }
1565
+ }
1566
+ }
1567
+ // Default values
1568
+ Engine.DEFAULT_BLOOM_THRESHOLD = 0.01;
1569
+ Engine.DEFAULT_BLOOM_INTENSITY = 0.12;
1570
+ Engine.DEFAULT_RIM_LIGHT_INTENSITY = 0.45;
1571
+ Engine.DEFAULT_CAMERA_DISTANCE = 26.6;
1572
+ Engine.DEFAULT_CAMERA_TARGET = new Vec3(0, 12.5, 0);
1573
+ Engine.TRANSPARENCY_EPSILON = 0.001;
1574
+ Engine.STATS_FPS_UPDATE_INTERVAL_MS = 1000;
1575
+ Engine.STATS_FRAME_TIME_ROUNDING = 100;