reze-engine 0.3.6 → 0.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1489 @@
1
+ import { Camera } from "./camera";
2
+ import { WasmModel, Vec3 } from "reze-mmd";
3
+ import { Player } from "./player";
4
+ export class Engine {
5
+ constructor(canvas, options) {
6
+ this.cameraMatrixData = new Float32Array(36);
7
+ this.cameraDistance = 26.6;
8
+ this.cameraTarget = new Vec3(0, 12.5, 0);
9
+ this.lightData = new Float32Array(4);
10
+ this.resizeObserver = null;
11
+ this.sampleCount = 4;
12
+ // Constants
13
+ this.STENCIL_EYE_VALUE = 1;
14
+ this.BLOOM_DOWNSCALE_FACTOR = 2;
15
+ // Ambient light settings
16
+ this.ambientColor = new Vec3(1.0, 1.0, 1.0);
17
+ // Bloom settings
18
+ this.bloomThreshold = Engine.DEFAULT_BLOOM_THRESHOLD;
19
+ this.bloomIntensity = Engine.DEFAULT_BLOOM_INTENSITY;
20
+ // Rim light settings
21
+ this.rimLightIntensity = Engine.DEFAULT_RIM_LIGHT_INTENSITY;
22
+ this.model = null;
23
+ this.modelDir = "";
24
+ this.physics = null;
25
+ this.textureCache = new Map();
26
+ this.vertexBufferNeedsUpdate = false;
27
+ // Unified draw call list
28
+ this.drawCalls = [];
29
+ this.lastFpsUpdate = performance.now();
30
+ this.framesSinceLastUpdate = 0;
31
+ this.lastFrameTime = performance.now();
32
+ this.frameTimeSum = 0;
33
+ this.frameTimeCount = 0;
34
+ this.stats = {
35
+ fps: 0,
36
+ frameTime: 0,
37
+ };
38
+ this.animationFrameId = null;
39
+ this.renderLoopCallback = null;
40
+ this.player = new Player();
41
+ this.hasAnimation = false; // Set to true when loadAnimation is called
42
+ this.canvas = canvas;
43
+ if (options) {
44
+ this.ambientColor = options.ambientColor ?? new Vec3(1.0, 1.0, 1.0);
45
+ this.bloomIntensity = options.bloomIntensity ?? Engine.DEFAULT_BLOOM_INTENSITY;
46
+ this.rimLightIntensity = options.rimLightIntensity ?? Engine.DEFAULT_RIM_LIGHT_INTENSITY;
47
+ this.cameraDistance = options.cameraDistance ?? Engine.DEFAULT_CAMERA_DISTANCE;
48
+ this.cameraTarget = options.cameraTarget ?? Engine.DEFAULT_CAMERA_TARGET;
49
+ }
50
+ }
51
+ // Step 1: Get WebGPU device and context
52
+ async init() {
53
+ const adapter = await navigator.gpu?.requestAdapter();
54
+ const device = await adapter?.requestDevice();
55
+ if (!device) {
56
+ throw new Error("WebGPU is not supported in this browser.");
57
+ }
58
+ this.device = device;
59
+ const context = this.canvas.getContext("webgpu");
60
+ if (!context) {
61
+ throw new Error("Failed to get WebGPU context.");
62
+ }
63
+ this.context = context;
64
+ this.presentationFormat = navigator.gpu.getPreferredCanvasFormat();
65
+ this.context.configure({
66
+ device: this.device,
67
+ format: this.presentationFormat,
68
+ alphaMode: "premultiplied",
69
+ });
70
+ this.setupCamera();
71
+ this.setupLighting();
72
+ this.createPipelines();
73
+ this.createBloomPipelines();
74
+ this.setupResize();
75
+ }
76
+ createRenderPipeline(config) {
77
+ return this.device.createRenderPipeline({
78
+ label: config.label,
79
+ layout: config.layout,
80
+ vertex: {
81
+ module: config.shaderModule,
82
+ buffers: config.vertexBuffers,
83
+ },
84
+ fragment: config.fragmentTarget
85
+ ? {
86
+ module: config.shaderModule,
87
+ entryPoint: config.fragmentEntryPoint,
88
+ targets: [config.fragmentTarget],
89
+ }
90
+ : undefined,
91
+ primitive: { cullMode: config.cullMode ?? "none" },
92
+ depthStencil: config.depthStencil,
93
+ multisample: config.multisample ?? { count: this.sampleCount },
94
+ });
95
+ }
96
+ createPipelines() {
97
+ this.materialSampler = this.device.createSampler({
98
+ magFilter: "linear",
99
+ minFilter: "linear",
100
+ addressModeU: "repeat",
101
+ addressModeV: "repeat",
102
+ });
103
+ // Shared vertex buffer layouts
104
+ const fullVertexBuffers = [
105
+ {
106
+ arrayStride: 8 * 4,
107
+ attributes: [
108
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
109
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
110
+ { shaderLocation: 2, offset: 6 * 4, format: "float32x2" },
111
+ ],
112
+ },
113
+ {
114
+ arrayStride: 4 * 2,
115
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
116
+ },
117
+ {
118
+ arrayStride: 4,
119
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
120
+ },
121
+ ];
122
+ const outlineVertexBuffers = [
123
+ {
124
+ arrayStride: 8 * 4,
125
+ attributes: [
126
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
127
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
128
+ ],
129
+ },
130
+ {
131
+ arrayStride: 4 * 2,
132
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
133
+ },
134
+ {
135
+ arrayStride: 4,
136
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
137
+ },
138
+ ];
139
+ const depthOnlyVertexBuffers = [
140
+ {
141
+ arrayStride: 8 * 4,
142
+ attributes: [
143
+ { shaderLocation: 0, offset: 0, format: "float32x3" },
144
+ { shaderLocation: 1, offset: 3 * 4, format: "float32x3" },
145
+ ],
146
+ },
147
+ {
148
+ arrayStride: 4 * 2,
149
+ attributes: [{ shaderLocation: 3, offset: 0, format: "uint16x4" }],
150
+ },
151
+ {
152
+ arrayStride: 4,
153
+ attributes: [{ shaderLocation: 4, offset: 0, format: "unorm8x4" }],
154
+ },
155
+ ];
156
+ const standardBlend = {
157
+ format: this.presentationFormat,
158
+ blend: {
159
+ color: {
160
+ srcFactor: "src-alpha",
161
+ dstFactor: "one-minus-src-alpha",
162
+ operation: "add",
163
+ },
164
+ alpha: {
165
+ srcFactor: "one",
166
+ dstFactor: "one-minus-src-alpha",
167
+ operation: "add",
168
+ },
169
+ },
170
+ };
171
+ const shaderModule = this.device.createShaderModule({
172
+ label: "model shaders",
173
+ code: /* wgsl */ `
174
+ struct CameraUniforms {
175
+ view: mat4x4f,
176
+ projection: mat4x4f,
177
+ viewPos: vec3f,
178
+ _padding: f32,
179
+ };
180
+
181
+ struct LightUniforms {
182
+ ambientColor: vec3f,
183
+ };
184
+
185
+ struct MaterialUniforms {
186
+ alpha: f32,
187
+ alphaMultiplier: f32,
188
+ rimIntensity: f32,
189
+ _padding1: f32,
190
+ rimColor: vec3f,
191
+ isOverEyes: f32, // 1.0 if rendering over eyes, 0.0 otherwise
192
+ };
193
+
194
+ struct VertexOutput {
195
+ @builtin(position) position: vec4f,
196
+ @location(0) normal: vec3f,
197
+ @location(1) uv: vec2f,
198
+ @location(2) worldPos: vec3f,
199
+ };
200
+
201
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
202
+ @group(0) @binding(1) var<uniform> light: LightUniforms;
203
+ @group(0) @binding(2) var diffuseTexture: texture_2d<f32>;
204
+ @group(0) @binding(3) var diffuseSampler: sampler;
205
+ @group(0) @binding(4) var<storage, read> skinMats: array<mat4x4f>;
206
+ @group(0) @binding(5) var<uniform> material: MaterialUniforms;
207
+
208
+ @vertex fn vs(
209
+ @location(0) position: vec3f,
210
+ @location(1) normal: vec3f,
211
+ @location(2) uv: vec2f,
212
+ @location(3) joints0: vec4<u32>,
213
+ @location(4) weights0: vec4<f32>
214
+ ) -> VertexOutput {
215
+ var output: VertexOutput;
216
+ let pos4 = vec4f(position, 1.0);
217
+
218
+ // Branchless weight normalization (avoids GPU branch divergence)
219
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
220
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
221
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
222
+
223
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
224
+ var skinnedNrm = vec3f(0.0, 0.0, 0.0);
225
+ for (var i = 0u; i < 4u; i++) {
226
+ let j = joints0[i];
227
+ let w = normalizedWeights[i];
228
+ let m = skinMats[j];
229
+ skinnedPos += (m * pos4) * w;
230
+ let r3 = mat3x3f(m[0].xyz, m[1].xyz, m[2].xyz);
231
+ skinnedNrm += (r3 * normal) * w;
232
+ }
233
+ let worldPos = skinnedPos.xyz;
234
+ output.position = camera.projection * camera.view * vec4f(worldPos, 1.0);
235
+ output.normal = normalize(skinnedNrm);
236
+ output.uv = uv;
237
+ output.worldPos = worldPos;
238
+ return output;
239
+ }
240
+
241
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
242
+ // Early alpha test - discard before expensive calculations
243
+ var finalAlpha = material.alpha * material.alphaMultiplier;
244
+ if (material.isOverEyes > 0.5) {
245
+ finalAlpha *= 0.5; // Hair over eyes gets 50% alpha
246
+ }
247
+ if (finalAlpha < 0.001) {
248
+ discard;
249
+ }
250
+
251
+ let n = normalize(input.normal);
252
+ let albedo = textureSample(diffuseTexture, diffuseSampler, input.uv).rgb;
253
+
254
+ let lightAccum = light.ambientColor;
255
+
256
+ // Rim light calculation
257
+ let viewDir = normalize(camera.viewPos - input.worldPos);
258
+ var rimFactor = 1.0 - max(dot(n, viewDir), 0.0);
259
+ rimFactor = rimFactor * rimFactor; // Optimized: direct multiply instead of pow(x, 2.0)
260
+ let rimLight = material.rimColor * material.rimIntensity * rimFactor;
261
+
262
+ let color = albedo * lightAccum + rimLight;
263
+
264
+ return vec4f(color, finalAlpha);
265
+ }
266
+ `,
267
+ });
268
+ // Create explicit bind group layout for all pipelines using the main shader
269
+ this.mainBindGroupLayout = this.device.createBindGroupLayout({
270
+ label: "main material bind group layout",
271
+ entries: [
272
+ { binding: 0, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // camera
273
+ { binding: 1, visibility: GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // light
274
+ { binding: 2, visibility: GPUShaderStage.FRAGMENT, texture: {} }, // diffuseTexture
275
+ { binding: 3, visibility: GPUShaderStage.FRAGMENT, sampler: {} }, // diffuseSampler
276
+ { binding: 4, visibility: GPUShaderStage.VERTEX, buffer: { type: "read-only-storage" } }, // skinMats
277
+ { binding: 5, visibility: GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // material
278
+ ],
279
+ });
280
+ const mainPipelineLayout = this.device.createPipelineLayout({
281
+ label: "main pipeline layout",
282
+ bindGroupLayouts: [this.mainBindGroupLayout],
283
+ });
284
+ this.modelPipeline = this.createRenderPipeline({
285
+ label: "model pipeline",
286
+ layout: mainPipelineLayout,
287
+ shaderModule,
288
+ vertexBuffers: fullVertexBuffers,
289
+ fragmentTarget: standardBlend,
290
+ cullMode: "none",
291
+ depthStencil: {
292
+ format: "depth24plus-stencil8",
293
+ depthWriteEnabled: true,
294
+ depthCompare: "less-equal",
295
+ },
296
+ });
297
+ // Create bind group layout for outline pipelines
298
+ this.outlineBindGroupLayout = this.device.createBindGroupLayout({
299
+ label: "outline bind group layout",
300
+ entries: [
301
+ { binding: 0, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // camera
302
+ { binding: 1, visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, buffer: { type: "uniform" } }, // material
303
+ { binding: 2, visibility: GPUShaderStage.VERTEX, buffer: { type: "read-only-storage" } }, // skinMats
304
+ ],
305
+ });
306
+ const outlinePipelineLayout = this.device.createPipelineLayout({
307
+ label: "outline pipeline layout",
308
+ bindGroupLayouts: [this.outlineBindGroupLayout],
309
+ });
310
+ const outlineShaderModule = this.device.createShaderModule({
311
+ label: "outline shaders",
312
+ code: /* wgsl */ `
313
+ struct CameraUniforms {
314
+ view: mat4x4f,
315
+ projection: mat4x4f,
316
+ viewPos: vec3f,
317
+ _padding: f32,
318
+ };
319
+
320
+ struct MaterialUniforms {
321
+ edgeColor: vec4f,
322
+ edgeSize: f32,
323
+ isOverEyes: f32, // 1.0 if rendering over eyes, 0.0 otherwise (for hair outlines)
324
+ _padding1: f32,
325
+ _padding2: f32,
326
+ };
327
+
328
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
329
+ @group(0) @binding(1) var<uniform> material: MaterialUniforms;
330
+ @group(0) @binding(2) var<storage, read> skinMats: array<mat4x4f>;
331
+
332
+ struct VertexOutput {
333
+ @builtin(position) position: vec4f,
334
+ };
335
+
336
+ @vertex fn vs(
337
+ @location(0) position: vec3f,
338
+ @location(1) normal: vec3f,
339
+ @location(3) joints0: vec4<u32>,
340
+ @location(4) weights0: vec4<f32>
341
+ ) -> VertexOutput {
342
+ var output: VertexOutput;
343
+ let pos4 = vec4f(position, 1.0);
344
+
345
+ // Branchless weight normalization (avoids GPU branch divergence)
346
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
347
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
348
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
349
+
350
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
351
+ var skinnedNrm = vec3f(0.0, 0.0, 0.0);
352
+ for (var i = 0u; i < 4u; i++) {
353
+ let j = joints0[i];
354
+ let w = normalizedWeights[i];
355
+ let m = skinMats[j];
356
+ skinnedPos += (m * pos4) * w;
357
+ let r3 = mat3x3f(m[0].xyz, m[1].xyz, m[2].xyz);
358
+ skinnedNrm += (r3 * normal) * w;
359
+ }
360
+ let worldPos = skinnedPos.xyz;
361
+ let worldNormal = normalize(skinnedNrm);
362
+
363
+ // MMD invert hull: expand vertices outward along normals
364
+ let scaleFactor = 0.01;
365
+ let expandedPos = worldPos + worldNormal * material.edgeSize * scaleFactor;
366
+ output.position = camera.projection * camera.view * vec4f(expandedPos, 1.0);
367
+ return output;
368
+ }
369
+
370
+ @fragment fn fs() -> @location(0) vec4f {
371
+ var color = material.edgeColor;
372
+
373
+ if (material.isOverEyes > 0.5) {
374
+ color.a *= 0.5; // Hair outlines over eyes get 50% alpha
375
+ }
376
+
377
+ return color;
378
+ }
379
+ `,
380
+ });
381
+ this.outlinePipeline = this.createRenderPipeline({
382
+ label: "outline pipeline",
383
+ layout: outlinePipelineLayout,
384
+ shaderModule: outlineShaderModule,
385
+ vertexBuffers: outlineVertexBuffers,
386
+ fragmentTarget: standardBlend,
387
+ cullMode: "back",
388
+ depthStencil: {
389
+ format: "depth24plus-stencil8",
390
+ depthWriteEnabled: true,
391
+ depthCompare: "less-equal",
392
+ },
393
+ });
394
+ // Hair outline pipeline
395
+ this.hairOutlinePipeline = this.createRenderPipeline({
396
+ label: "hair outline pipeline",
397
+ layout: outlinePipelineLayout,
398
+ shaderModule: outlineShaderModule,
399
+ vertexBuffers: outlineVertexBuffers,
400
+ fragmentTarget: standardBlend,
401
+ cullMode: "back",
402
+ depthStencil: {
403
+ format: "depth24plus-stencil8",
404
+ depthWriteEnabled: false,
405
+ depthCompare: "less-equal",
406
+ depthBias: -0.0001,
407
+ depthBiasSlopeScale: 0.0,
408
+ depthBiasClamp: 0.0,
409
+ },
410
+ });
411
+ // Eye overlay pipeline (renders after opaque, writes stencil)
412
+ this.eyePipeline = this.createRenderPipeline({
413
+ label: "eye overlay pipeline",
414
+ layout: mainPipelineLayout,
415
+ shaderModule,
416
+ vertexBuffers: fullVertexBuffers,
417
+ fragmentTarget: standardBlend,
418
+ cullMode: "front",
419
+ depthStencil: {
420
+ format: "depth24plus-stencil8",
421
+ depthWriteEnabled: true,
422
+ depthCompare: "less-equal",
423
+ depthBias: -0.00005,
424
+ depthBiasSlopeScale: 0.0,
425
+ depthBiasClamp: 0.0,
426
+ stencilFront: {
427
+ compare: "always",
428
+ failOp: "keep",
429
+ depthFailOp: "keep",
430
+ passOp: "replace",
431
+ },
432
+ stencilBack: {
433
+ compare: "always",
434
+ failOp: "keep",
435
+ depthFailOp: "keep",
436
+ passOp: "replace",
437
+ },
438
+ },
439
+ });
440
+ // Depth-only shader for hair pre-pass (reduces overdraw by early depth rejection)
441
+ const depthOnlyShaderModule = this.device.createShaderModule({
442
+ label: "depth only shader",
443
+ code: /* wgsl */ `
444
+ struct CameraUniforms {
445
+ view: mat4x4f,
446
+ projection: mat4x4f,
447
+ viewPos: vec3f,
448
+ _padding: f32,
449
+ };
450
+
451
+ @group(0) @binding(0) var<uniform> camera: CameraUniforms;
452
+ @group(0) @binding(4) var<storage, read> skinMats: array<mat4x4f>;
453
+
454
+ @vertex fn vs(
455
+ @location(0) position: vec3f,
456
+ @location(1) normal: vec3f,
457
+ @location(3) joints0: vec4<u32>,
458
+ @location(4) weights0: vec4<f32>
459
+ ) -> @builtin(position) vec4f {
460
+ let pos4 = vec4f(position, 1.0);
461
+
462
+ // Branchless weight normalization (avoids GPU branch divergence)
463
+ let weightSum = weights0.x + weights0.y + weights0.z + weights0.w;
464
+ let invWeightSum = select(1.0, 1.0 / weightSum, weightSum > 0.0001);
465
+ let normalizedWeights = select(vec4f(1.0, 0.0, 0.0, 0.0), weights0 * invWeightSum, weightSum > 0.0001);
466
+
467
+ var skinnedPos = vec4f(0.0, 0.0, 0.0, 0.0);
468
+ for (var i = 0u; i < 4u; i++) {
469
+ let j = joints0[i];
470
+ let w = normalizedWeights[i];
471
+ let m = skinMats[j];
472
+ skinnedPos += (m * pos4) * w;
473
+ }
474
+ let worldPos = skinnedPos.xyz;
475
+ let clipPos = camera.projection * camera.view * vec4f(worldPos, 1.0);
476
+ return clipPos;
477
+ }
478
+
479
+ @fragment fn fs() -> @location(0) vec4f {
480
+ return vec4f(0.0, 0.0, 0.0, 0.0); // Transparent - color writes disabled via writeMask
481
+ }
482
+ `,
483
+ });
484
+ // Hair depth pre-pass pipeline: depth-only with color writes disabled to eliminate overdraw
485
+ this.hairDepthPipeline = this.createRenderPipeline({
486
+ label: "hair depth pre-pass",
487
+ layout: mainPipelineLayout,
488
+ shaderModule: depthOnlyShaderModule,
489
+ vertexBuffers: depthOnlyVertexBuffers,
490
+ fragmentTarget: {
491
+ format: this.presentationFormat,
492
+ writeMask: 0,
493
+ },
494
+ fragmentEntryPoint: "fs",
495
+ cullMode: "front",
496
+ depthStencil: {
497
+ format: "depth24plus-stencil8",
498
+ depthWriteEnabled: true,
499
+ depthCompare: "less-equal",
500
+ depthBias: 0.0,
501
+ depthBiasSlopeScale: 0.0,
502
+ depthBiasClamp: 0.0,
503
+ },
504
+ });
505
+ // Hair pipelines for rendering over eyes vs non-eyes (only differ in stencil compare mode)
506
+ const createHairPipeline = (isOverEyes) => {
507
+ return this.createRenderPipeline({
508
+ label: `hair pipeline (${isOverEyes ? "over eyes" : "over non-eyes"})`,
509
+ layout: mainPipelineLayout,
510
+ shaderModule,
511
+ vertexBuffers: fullVertexBuffers,
512
+ fragmentTarget: standardBlend,
513
+ cullMode: "front",
514
+ depthStencil: {
515
+ format: "depth24plus-stencil8",
516
+ depthWriteEnabled: false,
517
+ depthCompare: "less-equal",
518
+ stencilFront: {
519
+ compare: isOverEyes ? "equal" : "not-equal",
520
+ failOp: "keep",
521
+ depthFailOp: "keep",
522
+ passOp: "keep",
523
+ },
524
+ stencilBack: {
525
+ compare: isOverEyes ? "equal" : "not-equal",
526
+ failOp: "keep",
527
+ depthFailOp: "keep",
528
+ passOp: "keep",
529
+ },
530
+ },
531
+ });
532
+ };
533
+ this.hairPipelineOverEyes = createHairPipeline(true);
534
+ this.hairPipelineOverNonEyes = createHairPipeline(false);
535
+ }
536
+ // Create bloom post-processing pipelines
537
+ createBloomPipelines() {
538
+ // Bloom extraction shader (extracts bright areas)
539
+ const bloomExtractShader = this.device.createShaderModule({
540
+ label: "bloom extract",
541
+ code: /* wgsl */ `
542
+ struct VertexOutput {
543
+ @builtin(position) position: vec4f,
544
+ @location(0) uv: vec2f,
545
+ };
546
+
547
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
548
+ var output: VertexOutput;
549
+ // Generate fullscreen quad from vertex index
550
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
551
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
552
+ output.position = vec4f(x, y, 0.0, 1.0);
553
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
554
+ return output;
555
+ }
556
+
557
+ struct BloomExtractUniforms {
558
+ threshold: f32,
559
+ _padding1: f32,
560
+ _padding2: f32,
561
+ _padding3: f32,
562
+ _padding4: f32,
563
+ _padding5: f32,
564
+ _padding6: f32,
565
+ _padding7: f32,
566
+ };
567
+
568
+ @group(0) @binding(0) var inputTexture: texture_2d<f32>;
569
+ @group(0) @binding(1) var inputSampler: sampler;
570
+ @group(0) @binding(2) var<uniform> extractUniforms: BloomExtractUniforms;
571
+
572
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
573
+ let color = textureSample(inputTexture, inputSampler, input.uv);
574
+ // Extract bright areas above threshold
575
+ let threshold = extractUniforms.threshold;
576
+ let bloom = max(vec3f(0.0), color.rgb - vec3f(threshold)) / max(0.001, 1.0 - threshold);
577
+ return vec4f(bloom, color.a);
578
+ }
579
+ `,
580
+ });
581
+ // Bloom blur shader (gaussian blur - can be used for both horizontal and vertical)
582
+ const bloomBlurShader = this.device.createShaderModule({
583
+ label: "bloom blur",
584
+ code: /* wgsl */ `
585
+ struct VertexOutput {
586
+ @builtin(position) position: vec4f,
587
+ @location(0) uv: vec2f,
588
+ };
589
+
590
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
591
+ var output: VertexOutput;
592
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
593
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
594
+ output.position = vec4f(x, y, 0.0, 1.0);
595
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
596
+ return output;
597
+ }
598
+
599
+ struct BlurUniforms {
600
+ direction: vec2f,
601
+ _padding1: f32,
602
+ _padding2: f32,
603
+ _padding3: f32,
604
+ _padding4: f32,
605
+ _padding5: f32,
606
+ _padding6: f32,
607
+ };
608
+
609
+ @group(0) @binding(0) var inputTexture: texture_2d<f32>;
610
+ @group(0) @binding(1) var inputSampler: sampler;
611
+ @group(0) @binding(2) var<uniform> blurUniforms: BlurUniforms;
612
+
613
+ // 3-tap gaussian blur using bilinear filtering trick (40% fewer texture fetches!)
614
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
615
+ let texelSize = 1.0 / vec2f(textureDimensions(inputTexture));
616
+
617
+ // Bilinear optimization: leverage hardware filtering to sample between pixels
618
+ // Original 5-tap: weights [0.06136, 0.24477, 0.38774, 0.24477, 0.06136] at offsets [-2, -1, 0, 1, 2]
619
+ // Optimized 3-tap: combine adjacent samples using weighted offsets
620
+ let weight0 = 0.38774; // Center sample
621
+ let weight1 = 0.24477 + 0.06136; // Combined outer samples = 0.30613
622
+ let offset1 = (0.24477 * 1.0 + 0.06136 * 2.0) / weight1; // Weighted position = 1.2
623
+
624
+ var result = textureSample(inputTexture, inputSampler, input.uv) * weight0;
625
+ let offsetVec = offset1 * texelSize * blurUniforms.direction;
626
+ result += textureSample(inputTexture, inputSampler, input.uv + offsetVec) * weight1;
627
+ result += textureSample(inputTexture, inputSampler, input.uv - offsetVec) * weight1;
628
+
629
+ return result;
630
+ }
631
+ `,
632
+ });
633
+ // Bloom composition shader (combines original scene with bloom)
634
+ const bloomComposeShader = this.device.createShaderModule({
635
+ label: "bloom compose",
636
+ code: /* wgsl */ `
637
+ struct VertexOutput {
638
+ @builtin(position) position: vec4f,
639
+ @location(0) uv: vec2f,
640
+ };
641
+
642
+ @vertex fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
643
+ var output: VertexOutput;
644
+ let x = f32((vertexIndex << 1u) & 2u) * 2.0 - 1.0;
645
+ let y = f32(vertexIndex & 2u) * 2.0 - 1.0;
646
+ output.position = vec4f(x, y, 0.0, 1.0);
647
+ output.uv = vec2f(x * 0.5 + 0.5, 1.0 - (y * 0.5 + 0.5));
648
+ return output;
649
+ }
650
+
651
+ struct BloomComposeUniforms {
652
+ intensity: f32,
653
+ _padding1: f32,
654
+ _padding2: f32,
655
+ _padding3: f32,
656
+ _padding4: f32,
657
+ _padding5: f32,
658
+ _padding6: f32,
659
+ _padding7: f32,
660
+ };
661
+
662
+ @group(0) @binding(0) var sceneTexture: texture_2d<f32>;
663
+ @group(0) @binding(1) var sceneSampler: sampler;
664
+ @group(0) @binding(2) var bloomTexture: texture_2d<f32>;
665
+ @group(0) @binding(3) var bloomSampler: sampler;
666
+ @group(0) @binding(4) var<uniform> composeUniforms: BloomComposeUniforms;
667
+
668
+ @fragment fn fs(input: VertexOutput) -> @location(0) vec4f {
669
+ let scene = textureSample(sceneTexture, sceneSampler, input.uv);
670
+ let bloom = textureSample(bloomTexture, bloomSampler, input.uv);
671
+ // Additive blending with intensity control
672
+ let result = scene.rgb + bloom.rgb * composeUniforms.intensity;
673
+ return vec4f(result, scene.a);
674
+ }
675
+ `,
676
+ });
677
+ // Create uniform buffer for blur direction (minimum 32 bytes for WebGPU)
678
+ const blurDirectionBuffer = this.device.createBuffer({
679
+ label: "blur direction",
680
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
681
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
682
+ });
683
+ // Create uniform buffer for bloom intensity (minimum 32 bytes for WebGPU)
684
+ const bloomIntensityBuffer = this.device.createBuffer({
685
+ label: "bloom intensity",
686
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
687
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
688
+ });
689
+ // Create uniform buffer for bloom threshold (minimum 32 bytes for WebGPU)
690
+ const bloomThresholdBuffer = this.device.createBuffer({
691
+ label: "bloom threshold",
692
+ size: 32, // Minimum 32 bytes required for uniform buffers in WebGPU
693
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
694
+ });
695
+ // Set default bloom values
696
+ const intensityData = new Float32Array(8); // f32 + 7 padding floats = 8 floats = 32 bytes
697
+ intensityData[0] = this.bloomIntensity;
698
+ this.device.queue.writeBuffer(bloomIntensityBuffer, 0, intensityData);
699
+ const thresholdData = new Float32Array(8); // f32 + 7 padding floats = 8 floats = 32 bytes
700
+ thresholdData[0] = this.bloomThreshold;
701
+ this.device.queue.writeBuffer(bloomThresholdBuffer, 0, thresholdData);
702
+ // Create linear sampler for post-processing
703
+ const linearSampler = this.device.createSampler({
704
+ magFilter: "linear",
705
+ minFilter: "linear",
706
+ addressModeU: "clamp-to-edge",
707
+ addressModeV: "clamp-to-edge",
708
+ });
709
+ // Bloom extraction pipeline
710
+ this.bloomExtractPipeline = this.device.createRenderPipeline({
711
+ label: "bloom extract",
712
+ layout: "auto",
713
+ vertex: {
714
+ module: bloomExtractShader,
715
+ entryPoint: "vs",
716
+ },
717
+ fragment: {
718
+ module: bloomExtractShader,
719
+ entryPoint: "fs",
720
+ targets: [{ format: this.presentationFormat }],
721
+ },
722
+ primitive: { topology: "triangle-list" },
723
+ });
724
+ // Bloom blur pipeline
725
+ this.bloomBlurPipeline = this.device.createRenderPipeline({
726
+ label: "bloom blur",
727
+ layout: "auto",
728
+ vertex: {
729
+ module: bloomBlurShader,
730
+ entryPoint: "vs",
731
+ },
732
+ fragment: {
733
+ module: bloomBlurShader,
734
+ entryPoint: "fs",
735
+ targets: [{ format: this.presentationFormat }],
736
+ },
737
+ primitive: { topology: "triangle-list" },
738
+ });
739
+ // Bloom composition pipeline
740
+ this.bloomComposePipeline = this.device.createRenderPipeline({
741
+ label: "bloom compose",
742
+ layout: "auto",
743
+ vertex: {
744
+ module: bloomComposeShader,
745
+ entryPoint: "vs",
746
+ },
747
+ fragment: {
748
+ module: bloomComposeShader,
749
+ entryPoint: "fs",
750
+ targets: [{ format: this.presentationFormat }],
751
+ },
752
+ primitive: { topology: "triangle-list" },
753
+ });
754
+ // Store buffers and sampler for later use
755
+ this.blurDirectionBuffer = blurDirectionBuffer;
756
+ this.bloomIntensityBuffer = bloomIntensityBuffer;
757
+ this.bloomThresholdBuffer = bloomThresholdBuffer;
758
+ this.linearSampler = linearSampler;
759
+ }
760
+ setupBloom(width, height) {
761
+ const bloomWidth = Math.floor(width / this.BLOOM_DOWNSCALE_FACTOR);
762
+ const bloomHeight = Math.floor(height / this.BLOOM_DOWNSCALE_FACTOR);
763
+ this.bloomExtractTexture = this.device.createTexture({
764
+ label: "bloom extract",
765
+ size: [bloomWidth, bloomHeight],
766
+ format: this.presentationFormat,
767
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
768
+ });
769
+ this.bloomBlurTexture1 = this.device.createTexture({
770
+ label: "bloom blur 1",
771
+ size: [bloomWidth, bloomHeight],
772
+ format: this.presentationFormat,
773
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
774
+ });
775
+ this.bloomBlurTexture2 = this.device.createTexture({
776
+ label: "bloom blur 2",
777
+ size: [bloomWidth, bloomHeight],
778
+ format: this.presentationFormat,
779
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
780
+ });
781
+ // Create bloom bind groups
782
+ this.bloomExtractBindGroup = this.device.createBindGroup({
783
+ layout: this.bloomExtractPipeline.getBindGroupLayout(0),
784
+ entries: [
785
+ { binding: 0, resource: this.sceneRenderTexture.createView() },
786
+ { binding: 1, resource: this.linearSampler },
787
+ { binding: 2, resource: { buffer: this.bloomThresholdBuffer } },
788
+ ],
789
+ });
790
+ this.bloomBlurHBindGroup = this.device.createBindGroup({
791
+ layout: this.bloomBlurPipeline.getBindGroupLayout(0),
792
+ entries: [
793
+ { binding: 0, resource: this.bloomExtractTexture.createView() },
794
+ { binding: 1, resource: this.linearSampler },
795
+ { binding: 2, resource: { buffer: this.blurDirectionBuffer } },
796
+ ],
797
+ });
798
+ this.bloomBlurVBindGroup = this.device.createBindGroup({
799
+ layout: this.bloomBlurPipeline.getBindGroupLayout(0),
800
+ entries: [
801
+ { binding: 0, resource: this.bloomBlurTexture1.createView() },
802
+ { binding: 1, resource: this.linearSampler },
803
+ { binding: 2, resource: { buffer: this.blurDirectionBuffer } },
804
+ ],
805
+ });
806
+ this.bloomComposeBindGroup = this.device.createBindGroup({
807
+ layout: this.bloomComposePipeline.getBindGroupLayout(0),
808
+ entries: [
809
+ { binding: 0, resource: this.sceneRenderTexture.createView() },
810
+ { binding: 1, resource: this.linearSampler },
811
+ { binding: 2, resource: this.bloomBlurTexture2.createView() },
812
+ { binding: 3, resource: this.linearSampler },
813
+ { binding: 4, resource: { buffer: this.bloomIntensityBuffer } },
814
+ ],
815
+ });
816
+ }
817
+ // Step 3: Setup canvas resize handling
818
+ setupResize() {
819
+ this.resizeObserver = new ResizeObserver(() => this.handleResize());
820
+ this.resizeObserver.observe(this.canvas);
821
+ this.handleResize();
822
+ }
823
+ handleResize() {
824
+ const displayWidth = this.canvas.clientWidth;
825
+ const displayHeight = this.canvas.clientHeight;
826
+ const dpr = window.devicePixelRatio || 1;
827
+ const width = Math.floor(displayWidth * dpr);
828
+ const height = Math.floor(displayHeight * dpr);
829
+ if (!this.multisampleTexture || this.canvas.width !== width || this.canvas.height !== height) {
830
+ this.canvas.width = width;
831
+ this.canvas.height = height;
832
+ this.multisampleTexture = this.device.createTexture({
833
+ label: "multisample render target",
834
+ size: [width, height],
835
+ sampleCount: this.sampleCount,
836
+ format: this.presentationFormat,
837
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
838
+ });
839
+ this.depthTexture = this.device.createTexture({
840
+ label: "depth texture",
841
+ size: [width, height],
842
+ sampleCount: this.sampleCount,
843
+ format: "depth24plus-stencil8",
844
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
845
+ });
846
+ // Create scene render texture (non-multisampled for post-processing)
847
+ this.sceneRenderTexture = this.device.createTexture({
848
+ label: "scene render texture",
849
+ size: [width, height],
850
+ format: this.presentationFormat,
851
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
852
+ });
853
+ // Setup bloom textures and bind groups
854
+ this.setupBloom(width, height);
855
+ const depthTextureView = this.depthTexture.createView();
856
+ // Cache the scene render texture view (only recreate on resize)
857
+ this.sceneRenderTextureView = this.sceneRenderTexture.createView();
858
+ // Render scene to texture instead of directly to canvas
859
+ const colorAttachment = this.sampleCount > 1
860
+ ? {
861
+ view: this.multisampleTexture.createView(),
862
+ resolveTarget: this.sceneRenderTextureView,
863
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
864
+ loadOp: "clear",
865
+ storeOp: "store",
866
+ }
867
+ : {
868
+ view: this.sceneRenderTextureView,
869
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
870
+ loadOp: "clear",
871
+ storeOp: "store",
872
+ };
873
+ this.renderPassDescriptor = {
874
+ label: "renderPass",
875
+ colorAttachments: [colorAttachment],
876
+ depthStencilAttachment: {
877
+ view: depthTextureView,
878
+ depthClearValue: 1.0,
879
+ depthLoadOp: "clear",
880
+ depthStoreOp: "store",
881
+ stencilClearValue: 0,
882
+ stencilLoadOp: "clear",
883
+ stencilStoreOp: "discard", // Discard stencil after frame to save bandwidth (we only use it during rendering)
884
+ },
885
+ };
886
+ this.camera.aspect = width / height;
887
+ }
888
+ }
889
+ // Step 4: Create camera and uniform buffer
890
+ setupCamera() {
891
+ this.cameraUniformBuffer = this.device.createBuffer({
892
+ label: "camera uniforms",
893
+ size: 40 * 4,
894
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
895
+ });
896
+ this.camera = new Camera(Math.PI, Math.PI / 2.5, this.cameraDistance, this.cameraTarget);
897
+ this.camera.aspect = this.canvas.width / this.canvas.height;
898
+ this.camera.attachControl(this.canvas);
899
+ }
900
+ // Step 5: Create lighting buffers
901
+ setupLighting() {
902
+ this.lightUniformBuffer = this.device.createBuffer({
903
+ label: "light uniforms",
904
+ size: 4 * 4, // 4 floats: ambientColor vec3f (3) + padding (1)
905
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
906
+ });
907
+ this.setAmbientColor(this.ambientColor);
908
+ this.device.queue.writeBuffer(this.lightUniformBuffer, 0, this.lightData);
909
+ }
910
+ setAmbientColor(color) {
911
+ // Layout: ambientColor (0-2), padding (3)
912
+ this.lightData[0] = color.x;
913
+ this.lightData[1] = color.y;
914
+ this.lightData[2] = color.z;
915
+ this.lightData[3] = 0.0; // Padding for vec3f alignment
916
+ }
917
+ async loadAnimation(url) {
918
+ await this.player.loadVmd(url);
919
+ this.hasAnimation = true;
920
+ }
921
+ playAnimation() {
922
+ if (!this.hasAnimation || !this.model)
923
+ return;
924
+ // Start playback (or resume if paused)
925
+ this.player.play();
926
+ }
927
+ stopAnimation() {
928
+ this.player.stop();
929
+ }
930
+ pauseAnimation() {
931
+ this.player.pause();
932
+ }
933
+ seekAnimation(time) {
934
+ if (!this.model || !this.hasAnimation)
935
+ return;
936
+ this.player.seek(time);
937
+ }
938
+ getAnimationProgress() {
939
+ return this.player.getProgress();
940
+ }
941
+ getStats() {
942
+ return { ...this.stats };
943
+ }
944
+ runRenderLoop(callback) {
945
+ this.renderLoopCallback = callback || null;
946
+ const loop = () => {
947
+ this.render();
948
+ if (this.renderLoopCallback) {
949
+ this.renderLoopCallback();
950
+ }
951
+ this.animationFrameId = requestAnimationFrame(loop);
952
+ };
953
+ this.animationFrameId = requestAnimationFrame(loop);
954
+ }
955
+ stopRenderLoop() {
956
+ if (this.animationFrameId !== null) {
957
+ cancelAnimationFrame(this.animationFrameId);
958
+ this.animationFrameId = null;
959
+ }
960
+ this.renderLoopCallback = null;
961
+ }
962
+ dispose() {
963
+ this.stopRenderLoop();
964
+ this.stopAnimation();
965
+ if (this.camera)
966
+ this.camera.detachControl();
967
+ if (this.resizeObserver) {
968
+ this.resizeObserver.disconnect();
969
+ this.resizeObserver = null;
970
+ }
971
+ }
972
+ // Step 6: Load PMX model file
973
+ async loadModel(path) {
974
+ const pathParts = path.split("/");
975
+ pathParts.pop();
976
+ const dir = pathParts.join("/") + "/";
977
+ this.modelDir = dir;
978
+ const wasmModel = new WasmModel(await fetch(path)
979
+ .then((res) => res.arrayBuffer())
980
+ .then((arrayBuffer) => new Uint8Array(arrayBuffer)));
981
+ this.model = wasmModel;
982
+ await this.setupModelBuffers();
983
+ }
984
+ rotateBones(bones, rotations, durationMs) {
985
+ for (let i = 0; i < bones.length; i++) {
986
+ const bone = bones[i];
987
+ const rotation = rotations[i];
988
+ this.model?.rotateBone(bone, rotation);
989
+ }
990
+ }
991
+ // moveBones now takes relative translations (VMD-style) by default
992
+ moveBones(bones, relativeTranslations, durationMs) {
993
+ for (let i = 0; i < bones.length; i++) {
994
+ const bone = bones[i];
995
+ const translation = relativeTranslations[i];
996
+ this.model?.moveBone(bone, translation);
997
+ }
998
+ }
999
+ setMorphWeight(name, weight, durationMs) {
1000
+ if (!this.model)
1001
+ return;
1002
+ this.model.setMorphWeight(name, weight);
1003
+ if (!durationMs || durationMs === 0) {
1004
+ this.vertexBufferNeedsUpdate = true;
1005
+ }
1006
+ }
1007
+ updateVertexBuffer() {
1008
+ if (!this.model || !this.vertexBuffer)
1009
+ return;
1010
+ const vertices = this.model.getVertices();
1011
+ if (!vertices || vertices.length === 0)
1012
+ return;
1013
+ this.device.queue.writeBuffer(this.vertexBuffer, 0, new Float32Array(vertices));
1014
+ }
1015
+ // Step 7: Create vertex, index, and joint buffers
1016
+ async setupModelBuffers() {
1017
+ if (!this.model)
1018
+ return;
1019
+ const vertices = this.model.getVertices();
1020
+ const skinning = this.model.getSkinning();
1021
+ const boneCount = this.model.getBones().length;
1022
+ this.vertexBuffer = this.device.createBuffer({
1023
+ label: "model vertex buffer",
1024
+ size: vertices.byteLength,
1025
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1026
+ });
1027
+ this.device.queue.writeBuffer(this.vertexBuffer, 0, new Float32Array(vertices));
1028
+ this.jointsBuffer = this.device.createBuffer({
1029
+ label: "joints buffer",
1030
+ size: skinning.joints.byteLength,
1031
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1032
+ });
1033
+ this.device.queue.writeBuffer(this.jointsBuffer, 0, skinning.joints.buffer, skinning.joints.byteOffset, skinning.joints.byteLength);
1034
+ this.weightsBuffer = this.device.createBuffer({
1035
+ label: "weights buffer",
1036
+ size: skinning.weights.byteLength,
1037
+ usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1038
+ });
1039
+ this.device.queue.writeBuffer(this.weightsBuffer, 0, skinning.weights.buffer, skinning.weights.byteOffset, skinning.weights.byteLength);
1040
+ const matrixSize = boneCount * 16 * 4;
1041
+ this.skinMatrixBuffer = this.device.createBuffer({
1042
+ label: "skin matrices",
1043
+ size: Math.max(256, matrixSize),
1044
+ usage: GPUBufferUsage.STORAGE | GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
1045
+ });
1046
+ const indices = this.model.getIndices();
1047
+ if (indices) {
1048
+ this.indexBuffer = this.device.createBuffer({
1049
+ label: "model index buffer",
1050
+ size: indices.byteLength,
1051
+ usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST,
1052
+ });
1053
+ this.device.queue.writeBuffer(this.indexBuffer, 0, new Uint32Array(indices));
1054
+ }
1055
+ else {
1056
+ throw new Error("Model has no index buffer");
1057
+ }
1058
+ await this.setupMaterials();
1059
+ }
1060
+ async setupMaterials() {
1061
+ if (!this.model)
1062
+ return;
1063
+ const materials = this.model.getMaterials();
1064
+ if (materials.length === 0) {
1065
+ throw new Error("Model has no materials");
1066
+ }
1067
+ const textures = this.model.getTextures();
1068
+ const loadTextureByIndex = async (texIndex) => {
1069
+ if (texIndex < 0 || texIndex >= textures.length) {
1070
+ return null;
1071
+ }
1072
+ const path = this.modelDir + textures[texIndex].path;
1073
+ const texture = await this.createTextureFromPath(path);
1074
+ return texture;
1075
+ };
1076
+ this.drawCalls = [];
1077
+ let currentIndexOffset = 0;
1078
+ for (const mat of materials) {
1079
+ const indexCount = mat.vertexCount;
1080
+ if (indexCount === 0)
1081
+ continue;
1082
+ const diffuseTexture = await loadTextureByIndex(mat.diffuseTextureIndex);
1083
+ if (!diffuseTexture)
1084
+ throw new Error(`Material "${mat.name}" has no diffuse texture`);
1085
+ const materialAlpha = mat.diffuse[3];
1086
+ const isTransparent = materialAlpha < 1.0 - Engine.TRANSPARENCY_EPSILON;
1087
+ const materialUniformBuffer = this.createMaterialUniformBuffer(mat.name, materialAlpha, 0.0);
1088
+ // Create bind groups using the shared bind group layout - All pipelines (main, eye, hair multiply, hair opaque) use the same shader and layout
1089
+ const bindGroup = this.device.createBindGroup({
1090
+ label: `material bind group: ${mat.name}`,
1091
+ layout: this.mainBindGroupLayout,
1092
+ entries: [
1093
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1094
+ { binding: 1, resource: { buffer: this.lightUniformBuffer } },
1095
+ { binding: 2, resource: diffuseTexture.createView() },
1096
+ { binding: 3, resource: this.materialSampler },
1097
+ { binding: 4, resource: { buffer: this.skinMatrixBuffer } },
1098
+ { binding: 5, resource: { buffer: materialUniformBuffer } },
1099
+ ],
1100
+ });
1101
+ if (indexCount > 0) {
1102
+ if (mat.isEye) {
1103
+ this.drawCalls.push({ type: "eye", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1104
+ }
1105
+ else if (mat.isHair) {
1106
+ // Hair materials: create separate bind groups for over-eyes vs over-non-eyes
1107
+ const createHairBindGroup = (isOverEyes) => {
1108
+ const buffer = this.createMaterialUniformBuffer(`${mat.name} (${isOverEyes ? "over eyes" : "over non-eyes"})`, materialAlpha, isOverEyes ? 1.0 : 0.0);
1109
+ return this.device.createBindGroup({
1110
+ label: `material bind group (${isOverEyes ? "over eyes" : "over non-eyes"}): ${mat.name}`,
1111
+ layout: this.mainBindGroupLayout,
1112
+ entries: [
1113
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1114
+ { binding: 1, resource: { buffer: this.lightUniformBuffer } },
1115
+ { binding: 2, resource: diffuseTexture.createView() },
1116
+ { binding: 3, resource: this.materialSampler },
1117
+ { binding: 4, resource: { buffer: this.skinMatrixBuffer } },
1118
+ { binding: 5, resource: { buffer: buffer } },
1119
+ ],
1120
+ });
1121
+ };
1122
+ const bindGroupOverEyes = createHairBindGroup(true);
1123
+ const bindGroupOverNonEyes = createHairBindGroup(false);
1124
+ this.drawCalls.push({
1125
+ type: "hair-over-eyes",
1126
+ count: indexCount,
1127
+ firstIndex: currentIndexOffset,
1128
+ bindGroup: bindGroupOverEyes,
1129
+ });
1130
+ this.drawCalls.push({
1131
+ type: "hair-over-non-eyes",
1132
+ count: indexCount,
1133
+ firstIndex: currentIndexOffset,
1134
+ bindGroup: bindGroupOverNonEyes,
1135
+ });
1136
+ }
1137
+ else if (isTransparent) {
1138
+ this.drawCalls.push({ type: "transparent", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1139
+ }
1140
+ else {
1141
+ this.drawCalls.push({ type: "opaque", count: indexCount, firstIndex: currentIndexOffset, bindGroup });
1142
+ }
1143
+ }
1144
+ // Edge flag is at bit 4 (0x10) in PMX format
1145
+ if ((mat.edgeFlag & 0x10) !== 0 && mat.edgeSize > 0) {
1146
+ const materialUniformData = new Float32Array([
1147
+ mat.edgeColor[0],
1148
+ mat.edgeColor[1],
1149
+ mat.edgeColor[2],
1150
+ mat.edgeColor[3],
1151
+ mat.edgeSize,
1152
+ 0,
1153
+ 0,
1154
+ 0,
1155
+ ]);
1156
+ const materialUniformBuffer = this.createUniformBuffer(`outline material uniform: ${mat.name}`, materialUniformData);
1157
+ const outlineBindGroup = this.device.createBindGroup({
1158
+ label: `outline bind group: ${mat.name}`,
1159
+ layout: this.outlineBindGroupLayout,
1160
+ entries: [
1161
+ { binding: 0, resource: { buffer: this.cameraUniformBuffer } },
1162
+ { binding: 1, resource: { buffer: materialUniformBuffer } },
1163
+ { binding: 2, resource: { buffer: this.skinMatrixBuffer } },
1164
+ ],
1165
+ });
1166
+ if (indexCount > 0) {
1167
+ const outlineType = mat.isEye
1168
+ ? "eye-outline"
1169
+ : mat.isHair
1170
+ ? "hair-outline"
1171
+ : isTransparent
1172
+ ? "transparent-outline"
1173
+ : "opaque-outline";
1174
+ this.drawCalls.push({
1175
+ type: outlineType,
1176
+ count: indexCount,
1177
+ firstIndex: currentIndexOffset,
1178
+ bindGroup: outlineBindGroup,
1179
+ });
1180
+ }
1181
+ }
1182
+ currentIndexOffset += indexCount;
1183
+ }
1184
+ }
1185
+ createMaterialUniformBuffer(label, alpha, isOverEyes) {
1186
+ const data = new Float32Array(8);
1187
+ data.set([alpha, 1.0, this.rimLightIntensity, 0.0, 1.0, 1.0, 1.0, isOverEyes]);
1188
+ return this.createUniformBuffer(`material uniform: ${label}`, data);
1189
+ }
1190
+ createUniformBuffer(label, data) {
1191
+ const buffer = this.device.createBuffer({
1192
+ label,
1193
+ size: data.byteLength,
1194
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
1195
+ });
1196
+ this.device.queue.writeBuffer(buffer, 0, data);
1197
+ return buffer;
1198
+ }
1199
+ async createTextureFromPath(path) {
1200
+ const cached = this.textureCache.get(path);
1201
+ if (cached) {
1202
+ return cached;
1203
+ }
1204
+ try {
1205
+ const response = await fetch(path);
1206
+ if (!response.ok) {
1207
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
1208
+ }
1209
+ const imageBitmap = await createImageBitmap(await response.blob(), {
1210
+ premultiplyAlpha: "none",
1211
+ colorSpaceConversion: "none",
1212
+ });
1213
+ const texture = this.device.createTexture({
1214
+ label: `texture: ${path}`,
1215
+ size: [imageBitmap.width, imageBitmap.height],
1216
+ format: "rgba8unorm",
1217
+ usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT,
1218
+ });
1219
+ this.device.queue.copyExternalImageToTexture({ source: imageBitmap }, { texture }, [
1220
+ imageBitmap.width,
1221
+ imageBitmap.height,
1222
+ ]);
1223
+ this.textureCache.set(path, texture);
1224
+ return texture;
1225
+ }
1226
+ catch {
1227
+ return null;
1228
+ }
1229
+ }
1230
+ // Helper: Render eyes with stencil writing (for post-alpha-eye effect)
1231
+ renderEyes(pass) {
1232
+ pass.setPipeline(this.eyePipeline);
1233
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1234
+ for (const draw of this.drawCalls) {
1235
+ if (draw.type === "eye") {
1236
+ pass.setBindGroup(0, draw.bindGroup);
1237
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1238
+ }
1239
+ }
1240
+ }
1241
+ // Helper: Render hair with post-alpha-eye effect (depth pre-pass + stencil-based shading + outlines)
1242
+ renderHair(pass) {
1243
+ // Hair depth pre-pass (reduces overdraw via early depth rejection)
1244
+ const hasHair = this.drawCalls.some((d) => d.type === "hair-over-eyes" || d.type === "hair-over-non-eyes");
1245
+ if (hasHair) {
1246
+ pass.setPipeline(this.hairDepthPipeline);
1247
+ for (const draw of this.drawCalls) {
1248
+ if (draw.type === "hair-over-eyes" || draw.type === "hair-over-non-eyes") {
1249
+ pass.setBindGroup(0, draw.bindGroup);
1250
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1251
+ }
1252
+ }
1253
+ }
1254
+ // Hair shading (split by stencil for transparency over eyes)
1255
+ const hairOverEyes = this.drawCalls.filter((d) => d.type === "hair-over-eyes");
1256
+ if (hairOverEyes.length > 0) {
1257
+ pass.setPipeline(this.hairPipelineOverEyes);
1258
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1259
+ for (const draw of hairOverEyes) {
1260
+ pass.setBindGroup(0, draw.bindGroup);
1261
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1262
+ }
1263
+ }
1264
+ const hairOverNonEyes = this.drawCalls.filter((d) => d.type === "hair-over-non-eyes");
1265
+ if (hairOverNonEyes.length > 0) {
1266
+ pass.setPipeline(this.hairPipelineOverNonEyes);
1267
+ pass.setStencilReference(this.STENCIL_EYE_VALUE);
1268
+ for (const draw of hairOverNonEyes) {
1269
+ pass.setBindGroup(0, draw.bindGroup);
1270
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1271
+ }
1272
+ }
1273
+ // Hair outlines
1274
+ const hairOutlines = this.drawCalls.filter((d) => d.type === "hair-outline");
1275
+ if (hairOutlines.length > 0) {
1276
+ pass.setPipeline(this.hairOutlinePipeline);
1277
+ for (const draw of hairOutlines) {
1278
+ pass.setBindGroup(0, draw.bindGroup);
1279
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1280
+ }
1281
+ }
1282
+ }
1283
+ // Render strategy: 1) Opaque non-eye/hair 2) Eyes (stencil=1) 3) Hair (depth pre-pass + split by stencil) 4) Transparent 5) Bloom
1284
+ render() {
1285
+ if (this.multisampleTexture && this.camera && this.device) {
1286
+ const currentTime = performance.now();
1287
+ const deltaTime = this.lastFrameTime > 0 ? (currentTime - this.lastFrameTime) / 1000 : 0.016;
1288
+ this.lastFrameTime = currentTime;
1289
+ this.updateCameraUniforms();
1290
+ this.updateRenderTarget();
1291
+ // Update vertex buffer if morphs changed
1292
+ if (this.vertexBufferNeedsUpdate) {
1293
+ this.updateVertexBuffer();
1294
+ this.vertexBufferNeedsUpdate = false;
1295
+ }
1296
+ // Use single encoder for render
1297
+ const encoder = this.device.createCommandEncoder();
1298
+ const pass = encoder.beginRenderPass(this.renderPassDescriptor);
1299
+ if (this.model) {
1300
+ this.model.update();
1301
+ this.computeSkinMatrices();
1302
+ pass.setVertexBuffer(0, this.vertexBuffer);
1303
+ pass.setVertexBuffer(1, this.jointsBuffer);
1304
+ pass.setVertexBuffer(2, this.weightsBuffer);
1305
+ pass.setIndexBuffer(this.indexBuffer, "uint32");
1306
+ // Pass 1: Opaque
1307
+ pass.setPipeline(this.modelPipeline);
1308
+ for (const draw of this.drawCalls) {
1309
+ if (draw.type === "opaque") {
1310
+ pass.setBindGroup(0, draw.bindGroup);
1311
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1312
+ }
1313
+ }
1314
+ // Pass 2: Eyes (writes stencil value for hair to test against)
1315
+ this.renderEyes(pass);
1316
+ this.drawOutlines(pass, false);
1317
+ // Pass 3: Hair rendering (depth pre-pass + shading + outlines)
1318
+ this.renderHair(pass);
1319
+ // Pass 4: Transparent
1320
+ pass.setPipeline(this.modelPipeline);
1321
+ for (const draw of this.drawCalls) {
1322
+ if (draw.type === "transparent") {
1323
+ pass.setBindGroup(0, draw.bindGroup);
1324
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1325
+ }
1326
+ }
1327
+ this.drawOutlines(pass, true);
1328
+ }
1329
+ pass.end();
1330
+ this.device.queue.submit([encoder.finish()]);
1331
+ this.applyBloom();
1332
+ this.updateStats(performance.now() - currentTime);
1333
+ }
1334
+ }
1335
+ applyBloom() {
1336
+ if (!this.sceneRenderTexture || !this.bloomExtractTexture) {
1337
+ return;
1338
+ }
1339
+ // Update bloom parameters
1340
+ const thresholdData = new Float32Array(8);
1341
+ thresholdData[0] = this.bloomThreshold;
1342
+ this.device.queue.writeBuffer(this.bloomThresholdBuffer, 0, thresholdData);
1343
+ const intensityData = new Float32Array(8);
1344
+ intensityData[0] = this.bloomIntensity;
1345
+ this.device.queue.writeBuffer(this.bloomIntensityBuffer, 0, intensityData);
1346
+ const encoder = this.device.createCommandEncoder();
1347
+ // Extract bright areas
1348
+ const extractPass = encoder.beginRenderPass({
1349
+ label: "bloom extract",
1350
+ colorAttachments: [
1351
+ {
1352
+ view: this.bloomExtractTexture.createView(),
1353
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1354
+ loadOp: "clear",
1355
+ storeOp: "store",
1356
+ },
1357
+ ],
1358
+ });
1359
+ extractPass.setPipeline(this.bloomExtractPipeline);
1360
+ extractPass.setBindGroup(0, this.bloomExtractBindGroup);
1361
+ extractPass.draw(6, 1, 0, 0);
1362
+ extractPass.end();
1363
+ // Horizontal blur
1364
+ const hBlurData = new Float32Array(4);
1365
+ hBlurData[0] = 1.0;
1366
+ hBlurData[1] = 0.0;
1367
+ this.device.queue.writeBuffer(this.blurDirectionBuffer, 0, hBlurData);
1368
+ const blurHPass = encoder.beginRenderPass({
1369
+ label: "bloom blur horizontal",
1370
+ colorAttachments: [
1371
+ {
1372
+ view: this.bloomBlurTexture1.createView(),
1373
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1374
+ loadOp: "clear",
1375
+ storeOp: "store",
1376
+ },
1377
+ ],
1378
+ });
1379
+ blurHPass.setPipeline(this.bloomBlurPipeline);
1380
+ blurHPass.setBindGroup(0, this.bloomBlurHBindGroup);
1381
+ blurHPass.draw(6, 1, 0, 0);
1382
+ blurHPass.end();
1383
+ // Vertical blur
1384
+ const vBlurData = new Float32Array(4);
1385
+ vBlurData[0] = 0.0;
1386
+ vBlurData[1] = 1.0;
1387
+ this.device.queue.writeBuffer(this.blurDirectionBuffer, 0, vBlurData);
1388
+ const blurVPass = encoder.beginRenderPass({
1389
+ label: "bloom blur vertical",
1390
+ colorAttachments: [
1391
+ {
1392
+ view: this.bloomBlurTexture2.createView(),
1393
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1394
+ loadOp: "clear",
1395
+ storeOp: "store",
1396
+ },
1397
+ ],
1398
+ });
1399
+ blurVPass.setPipeline(this.bloomBlurPipeline);
1400
+ blurVPass.setBindGroup(0, this.bloomBlurVBindGroup);
1401
+ blurVPass.draw(6, 1, 0, 0);
1402
+ blurVPass.end();
1403
+ // Compose to canvas
1404
+ const composePass = encoder.beginRenderPass({
1405
+ label: "bloom compose",
1406
+ colorAttachments: [
1407
+ {
1408
+ view: this.context.getCurrentTexture().createView(),
1409
+ clearValue: { r: 0, g: 0, b: 0, a: 0 },
1410
+ loadOp: "clear",
1411
+ storeOp: "store",
1412
+ },
1413
+ ],
1414
+ });
1415
+ composePass.setPipeline(this.bloomComposePipeline);
1416
+ composePass.setBindGroup(0, this.bloomComposeBindGroup);
1417
+ composePass.draw(6, 1, 0, 0);
1418
+ composePass.end();
1419
+ this.device.queue.submit([encoder.finish()]);
1420
+ }
1421
+ updateCameraUniforms() {
1422
+ const viewMatrix = this.camera.getViewMatrix();
1423
+ const projectionMatrix = this.camera.getProjectionMatrix();
1424
+ const cameraPos = this.camera.getPosition();
1425
+ this.cameraMatrixData.set(viewMatrix.values, 0);
1426
+ this.cameraMatrixData.set(projectionMatrix.values, 16);
1427
+ this.cameraMatrixData[32] = cameraPos.x;
1428
+ this.cameraMatrixData[33] = cameraPos.y;
1429
+ this.cameraMatrixData[34] = cameraPos.z;
1430
+ this.device.queue.writeBuffer(this.cameraUniformBuffer, 0, this.cameraMatrixData);
1431
+ }
1432
+ updateRenderTarget() {
1433
+ // Use cached view (only recreated on resize in handleResize)
1434
+ const colorAttachment = this.renderPassDescriptor.colorAttachments[0];
1435
+ if (this.sampleCount > 1) {
1436
+ colorAttachment.resolveTarget = this.sceneRenderTextureView;
1437
+ }
1438
+ else {
1439
+ colorAttachment.view = this.sceneRenderTextureView;
1440
+ }
1441
+ }
1442
+ computeSkinMatrices() {
1443
+ const skinMatrices = this.model.getSkinMatrices();
1444
+ this.device.queue.writeBuffer(this.skinMatrixBuffer, 0, skinMatrices.buffer, skinMatrices.byteOffset, skinMatrices.byteLength);
1445
+ }
1446
+ drawOutlines(pass, transparent) {
1447
+ pass.setPipeline(this.outlinePipeline);
1448
+ const outlineType = transparent ? "transparent-outline" : "opaque-outline";
1449
+ for (const draw of this.drawCalls) {
1450
+ if (draw.type === outlineType) {
1451
+ pass.setBindGroup(0, draw.bindGroup);
1452
+ pass.drawIndexed(draw.count, 1, draw.firstIndex, 0, 0);
1453
+ }
1454
+ }
1455
+ }
1456
+ updateStats(frameTime) {
1457
+ // Simplified frame time tracking - rolling average with fixed window
1458
+ const maxSamples = 60;
1459
+ this.frameTimeSum += frameTime;
1460
+ this.frameTimeCount++;
1461
+ if (this.frameTimeCount > maxSamples) {
1462
+ // Maintain rolling window by subtracting oldest sample estimate
1463
+ const avg = this.frameTimeSum / maxSamples;
1464
+ this.frameTimeSum -= avg;
1465
+ this.frameTimeCount = maxSamples;
1466
+ }
1467
+ this.stats.frameTime =
1468
+ Math.round((this.frameTimeSum / this.frameTimeCount) * Engine.STATS_FRAME_TIME_ROUNDING) /
1469
+ Engine.STATS_FRAME_TIME_ROUNDING;
1470
+ // FPS tracking
1471
+ const now = performance.now();
1472
+ this.framesSinceLastUpdate++;
1473
+ const elapsed = now - this.lastFpsUpdate;
1474
+ if (elapsed >= Engine.STATS_FPS_UPDATE_INTERVAL_MS) {
1475
+ this.stats.fps = Math.round((this.framesSinceLastUpdate / elapsed) * Engine.STATS_FPS_UPDATE_INTERVAL_MS);
1476
+ this.framesSinceLastUpdate = 0;
1477
+ this.lastFpsUpdate = now;
1478
+ }
1479
+ }
1480
+ }
1481
+ // Default values
1482
+ Engine.DEFAULT_BLOOM_THRESHOLD = 0.01;
1483
+ Engine.DEFAULT_BLOOM_INTENSITY = 0.12;
1484
+ Engine.DEFAULT_RIM_LIGHT_INTENSITY = 0.45;
1485
+ Engine.DEFAULT_CAMERA_DISTANCE = 26.6;
1486
+ Engine.DEFAULT_CAMERA_TARGET = new Vec3(0, 12.5, 0);
1487
+ Engine.TRANSPARENCY_EPSILON = 0.001;
1488
+ Engine.STATS_FPS_UPDATE_INTERVAL_MS = 1000;
1489
+ Engine.STATS_FRAME_TIME_ROUNDING = 100;