matrix-engine-wgpu 1.2.11 → 1.2.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "matrix-engine-wgpu",
3
- "version": "1.2.11",
3
+ "version": "1.2.13",
4
4
  "description": "+HOTFIX raycast, webGPU powered pwa application. Crazy fast rendering with AmmoJS physics support. Simple raycaster hit object added.",
5
5
  "main": "index.js",
6
6
  "files": [
@@ -8,598 +8,603 @@ import {degToRad, genName, LOG_FUNNY_SMALL} from './utils';
8
8
  // import {checkingProcedure, checkingRay, touchCoordinate} from './raycast';
9
9
 
10
10
  export default class MEMeshObj {
11
- constructor(canvas, device, context, o) {
12
- if(typeof o.name === 'undefined') o.name = genName(9);
13
- if(typeof o.raycast === 'undefined') {
14
- this.raycast = {
15
- enabled: false,
16
- radius: 2
17
- };
18
- } else {
19
- this.raycast = o.raycast;
20
- }
21
-
22
- this.name = o.name;
23
- this.done = false;
24
- this.device = device;
25
- this.context = context;
26
- this.entityArgPass = o.entityArgPass;
27
-
28
- // Mesh stuff
29
- this.mesh = o.mesh;
30
- this.mesh.uvs = this.mesh.textures;
31
- console.log(`%c Mesh loaded: ${o.name}`, LOG_FUNNY_SMALL);
32
-
33
- this.inputHandler = createInputHandler(window, canvas);
34
- this.cameras = o.cameras;
35
-
36
- this.mainCameraParams = {
37
- type: o.mainCameraParams.type,
38
- responseCoef: o.mainCameraParams.responseCoef
39
- }
40
-
41
- // touchCoordinate.enabled = true;
42
-
43
- this.lastFrameMS = 0;
44
- this.texturesPaths = [];
45
- o.texturesPaths.forEach((t) => {this.texturesPaths.push(t)})
46
-
47
- this.presentationFormat = navigator.gpu.getPreferredCanvasFormat();
48
-
49
- this.position = new Position(o.position.x, o.position.y, o.position.z);
50
- this.rotation = new Rotation(o.rotation.x, o.rotation.y, o.rotation.z);
51
- this.rotation.rotationSpeed.x = o.rotationSpeed.x;
52
- this.rotation.rotationSpeed.y = o.rotationSpeed.y;
53
- this.rotation.rotationSpeed.z = o.rotationSpeed.z;
54
- this.scale = o.scale;
55
-
56
- this.runProgram = () => {
57
- return new Promise(async (resolve) => {
58
- this.shadowDepthTextureSize = 1024;
59
- const aspect = canvas.width / canvas.height;
60
- this.projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 2000.0);
61
- this.modelViewProjectionMatrix = mat4.create();
62
- // console.log('cube added texturesPaths: ', this.texturesPaths)
63
- this.loadTex0(this.texturesPaths, device).then(() => {
64
- // console.log('loaded tex buffer for mesh:', this.texture0)
65
- resolve()
66
- })
67
- })
68
- }
69
-
70
- this.runProgram().then(() => {
71
- const aspect = canvas.width / canvas.height;
72
- const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
73
- this.context.configure({
74
- device: this.device,
75
- format: presentationFormat,
76
- alphaMode: 'premultiplied',
77
- });
78
-
79
- // Create the model vertex buffer.
80
- this.vertexBuffer = this.device.createBuffer({
81
- size: this.mesh.vertices.length * Float32Array.BYTES_PER_ELEMENT,
82
- usage: GPUBufferUsage.VERTEX,
83
- mappedAtCreation: true,
84
- });
85
- {
86
- // const mapping = new Float32Array(this.vertexBuffer.getMappedRange());
87
- // // for(let i = 0;i < this.mesh.vertices.length;++i) {
88
- // // mapping.set(this.mesh.vertices[i], 6 * i);
89
- // // mapping.set(this.mesh.normals[i], 6 * i + 3);
90
- // // }
91
- // this.vertexBuffer.unmap();
92
- new Float32Array(this.vertexBuffer.getMappedRange()).set(this.mesh.vertices);
93
- this.vertexBuffer.unmap();
94
- }
95
-
96
- // NIDZA TEST SECOUND BUFFER
97
- // Create the model vertex buffer.
98
- this.vertexNormalsBuffer = this.device.createBuffer({
99
- size: this.mesh.vertexNormals.length * Float32Array.BYTES_PER_ELEMENT,
100
- usage: GPUBufferUsage.VERTEX,
101
- mappedAtCreation: true,
102
- });
103
- {
104
- new Float32Array(this.vertexNormalsBuffer.getMappedRange()).set(this.mesh.vertexNormals);
105
- this.vertexNormalsBuffer.unmap();
106
- }
107
-
108
- this.vertexTexCoordsBuffer = this.device.createBuffer({
109
- size: this.mesh.textures.length * Float32Array.BYTES_PER_ELEMENT,
110
- usage: GPUBufferUsage.VERTEX,
111
- mappedAtCreation: true,
112
- });
113
- {
114
- new Float32Array(this.vertexTexCoordsBuffer.getMappedRange()).set(this.mesh.textures);
115
- this.vertexTexCoordsBuffer.unmap();
116
- }
117
-
118
- // Create the model index buffer.
119
- this.indexCount = this.mesh.indices.length;
120
- this.indexBuffer = this.device.createBuffer({
121
- size: this.indexCount * Uint16Array.BYTES_PER_ELEMENT,
122
- usage: GPUBufferUsage.INDEX,
123
- mappedAtCreation: true,
124
- });
125
- {
126
- // const mapping = new Uint16Array(this.indexBuffer.getMappedRange());
127
- // for(let i = 0;i < this.mesh.indices.length;++i) {
128
- // mapping.set(this.mesh.indices[i], i);
129
- // }
130
- new Uint16Array(this.indexBuffer.getMappedRange()).set(this.mesh.indices);
131
- this.indexBuffer.unmap();
132
- }
133
-
134
- // Create the depth texture for rendering/sampling the shadow map.
135
- this.shadowDepthTexture = this.device.createTexture({
136
- size: [this.shadowDepthTextureSize, this.shadowDepthTextureSize, 1],
137
- usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
138
- format: 'depth32float',
139
- });
140
- this.shadowDepthTextureView = this.shadowDepthTexture.createView();
141
-
142
- // Create some common descriptors used for both the shadow pipeline
143
- // and the color rendering pipeline.
144
- this.vertexBuffers = [
145
- {
146
- arrayStride: Float32Array.BYTES_PER_ELEMENT * 3,
147
- attributes: [
148
- {
149
- // position
150
- shaderLocation: 0,
151
- offset: 0,
152
- format: "float32x3",
153
- }
154
- ],
155
- },
156
- {
157
- arrayStride: Float32Array.BYTES_PER_ELEMENT * 3,
158
- attributes: [
159
- {
160
- // normal
161
- shaderLocation: 1,
162
- offset: 0,
163
- format: "float32x3",
164
- },
165
- ],
166
- },
167
- {
168
- arrayStride: Float32Array.BYTES_PER_ELEMENT * 2,
169
- attributes: [
170
- {
171
- // uvs
172
- shaderLocation: 2,
173
- offset: 0,
174
- format: "float32x2",
175
- },
176
- ],
177
- },
178
- ];
179
-
180
- const primitive = {
181
- topology: 'triangle-list',
182
- cullMode: 'back',
183
- };
184
-
185
- this.uniformBufferBindGroupLayout = this.device.createBindGroupLayout({
186
- entries: [
187
- {
188
- binding: 0,
189
- visibility: GPUShaderStage.VERTEX,
190
- buffer: {
191
- type: 'uniform',
192
- },
193
- },
194
- ],
195
- });
196
-
197
- this.shadowPipeline = this.device.createRenderPipeline({
198
- layout: this.device.createPipelineLayout({
199
- bindGroupLayouts: [
200
- this.uniformBufferBindGroupLayout,
201
- this.uniformBufferBindGroupLayout,
202
- ],
203
- }),
204
- vertex: {
205
- module: this.device.createShaderModule({
206
- code: vertexShadowWGSL,
207
- }),
208
- buffers: this.vertexBuffers,
209
- },
210
- depthStencil: {
211
- depthWriteEnabled: true,
212
- depthCompare: 'less',
213
- format: 'depth32float',
214
- },
215
- primitive,
216
- });
217
-
218
- // Create a bind group layout which holds the scene uniforms and
219
- // the texture+sampler for depth. We create it manually because the WebPU
220
- // implementation doesn't infer this from the shader (yet).
221
- this.bglForRender = this.device.createBindGroupLayout({
222
- entries: [
223
- {
224
- binding: 0,
225
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
226
- buffer: {
227
- type: 'uniform',
228
- },
229
- },
230
- {
231
- binding: 1,
232
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
233
- texture: {
234
- sampleType: 'depth',
235
- },
236
- },
237
- {
238
- binding: 2,
239
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
240
- sampler: {
241
- type: 'comparison',
242
- },
243
- },
244
- {
245
- binding: 3,
246
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
247
- texture: {
248
- sampleType: 'float',
249
- }
250
- },
251
- {
252
- binding: 4,
253
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
254
- sampler: {
255
- type: 'filtering',
256
- }
257
- }
258
- ]
259
- });
260
-
261
- this.pipeline = this.device.createRenderPipeline({
262
- layout: this.device.createPipelineLayout({
263
- bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
264
- }),
265
- vertex: {
266
- module: this.device.createShaderModule({
267
- code: vertexWGSL,
268
- }),
269
- buffers: this.vertexBuffers,
270
- },
271
- fragment: {
272
- module: this.device.createShaderModule({
273
- code: fragmentWGSL,
274
- }),
275
- targets: [
276
- {
277
- format: presentationFormat,
278
- },
279
- ],
280
- constants: {
281
- shadowDepthTextureSize: this.shadowDepthTextureSize,
282
- },
283
- },
284
- depthStencil: {
285
- depthWriteEnabled: true,
286
- depthCompare: 'less',
287
- format: 'depth24plus-stencil8',
288
- },
289
- primitive,
290
- });
291
-
292
- const depthTexture = this.device.createTexture({
293
- size: [canvas.width, canvas.height],
294
- format: 'depth24plus-stencil8',
295
- usage: GPUTextureUsage.RENDER_ATTACHMENT,
296
- });
297
-
298
- this.renderPassDescriptor = {
299
- colorAttachments: [
300
- {
301
- // view is acquired and set in render loop.
302
- view: undefined,
303
- clearValue: {r: 0.5, g: 0.5, b: 0.5, a: 1.0},
304
- loadOp: 'load',
305
- storeOp: 'store',
306
- },
307
- ],
308
- depthStencilAttachment: {
309
- view: depthTexture.createView(),
310
- depthClearValue: 1.0,
311
- depthLoadOp: 'clear',
312
- depthStoreOp: 'store',
313
- stencilClearValue: 0,
314
- stencilLoadOp: 'clear',
315
- stencilStoreOp: 'store',
316
- },
317
- };
318
-
319
- this.modelUniformBuffer = this.device.createBuffer({
320
- size: 4 * 16, // 4x4 matrix
321
- usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
322
- });
323
-
324
- this.sceneUniformBuffer = this.device.createBuffer({
325
- // Two 4x4 viewProj matrices,
326
- // one for the camera and one for the light.
327
- // Then a vec3 for the light position.
328
- // Rounded to the nearest multiple of 16.
329
- size: 2 * 4 * 16 + 4 * 4,
330
- usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
331
- });
332
-
333
- this.sceneBindGroupForShadow = this.device.createBindGroup({
334
- layout: this.uniformBufferBindGroupLayout,
335
- entries: [
336
- {
337
- binding: 0,
338
- resource: {
339
- buffer: this.sceneUniformBuffer,
340
- },
341
- },
342
- ],
343
- });
344
-
345
- this.sceneBindGroupForRender = this.device.createBindGroup({
346
- layout: this.bglForRender,
347
- entries: [
348
- {
349
- binding: 0,
350
- resource: {
351
- buffer: this.sceneUniformBuffer,
352
- },
353
- },
354
- {
355
- binding: 1,
356
- resource: this.shadowDepthTextureView,
357
- },
358
- {
359
- binding: 2,
360
- resource: this.device.createSampler({
361
- compare: 'less',
362
- }),
363
- },
364
- {
365
- binding: 3,
366
- resource: this.texture0.createView(),
367
- },
368
- {
369
- binding: 4,
370
- resource: this.sampler,
371
- },
372
- ],
373
- });
374
-
375
- this.modelBindGroup = this.device.createBindGroup({
376
- layout: this.uniformBufferBindGroupLayout,
377
- entries: [
378
- {
379
- binding: 0,
380
- resource: {
381
- buffer: this.modelUniformBuffer,
382
- },
383
- },
384
- ],
385
- });
386
-
387
- // Rotates the camera around the origin based on time.
388
- this.getTransformationMatrix = (pos) => {
389
- const now = Date.now();
390
- const deltaTime = (now - this.lastFrameMS) / this.mainCameraParams.responseCoef;
391
- this.lastFrameMS = now;
392
- // const this.viewMatrix = mat4.identity()
393
- const camera = this.cameras[this.mainCameraParams.type];
394
- this.viewMatrix = camera.update(deltaTime, this.inputHandler());
395
- mat4.translate(this.viewMatrix, vec3.fromValues(pos.x, pos.y, pos.z), this.viewMatrix);
396
- mat4.rotate(
397
- this.viewMatrix,
398
- vec3.fromValues(this.rotation.axis.x, this.rotation.axis.y, this.rotation.axis.z),
399
- degToRad(this.rotation.angle), this.viewMatrix)
400
-
401
- // console.info('angle: ', this.rotation.angle, ' axis ' , this.rotation.axis.x, ' , ', this.rotation.axis.y, ' , ', this.rotation.axis.z)
402
- mat4.multiply(this.projectionMatrix, this.viewMatrix, this.modelViewProjectionMatrix);
403
- return this.modelViewProjectionMatrix;
404
- }
405
-
406
- this.upVector = vec3.fromValues(0, 1, 0);
407
- this.origin = vec3.fromValues(0, 0, 0);
408
-
409
- this.lightPosition = vec3.fromValues(0, 0, 0);
410
- this.lightViewMatrix = mat4.lookAt(this.lightPosition, this.origin, this.upVector);
411
- const lightProjectionMatrix = mat4.create();
412
-
413
- var myLMargin = 100;
414
- {
415
- const left = -myLMargin;
416
- const right = myLMargin;
417
- const bottom = -myLMargin;
418
- const top = myLMargin;
419
- const near = -200;
420
- const far = 300;
421
- mat4.ortho(left, right, bottom, top, near, far, lightProjectionMatrix);
422
- // test
423
- // mat4.ortho(right, left, top, bottom, near, far, lightProjectionMatrix);
424
- }
425
-
426
- this.lightViewProjMatrix = mat4.multiply(
427
- lightProjectionMatrix,
428
- this.lightViewMatrix
429
- );
430
-
431
- // looks like affect on transformations for now const 0
432
- const modelMatrix = mat4.translation([0, 0, 0]);
433
- // The camera/light aren't moving, so write them into buffers now.
434
- {
435
- const lightMatrixData = this.lightViewProjMatrix; // as Float32Array;
436
- this.device.queue.writeBuffer(
437
- this.sceneUniformBuffer,
438
- 0,
439
- lightMatrixData.buffer,
440
- lightMatrixData.byteOffset,
441
- lightMatrixData.byteLength
442
- );
443
-
444
- const lightData = this.lightPosition;
445
- this.device.queue.writeBuffer(
446
- this.sceneUniformBuffer,
447
- 128,
448
- lightData.buffer,
449
- lightData.byteOffset,
450
- lightData.byteLength
451
- );
452
-
453
- const modelData = modelMatrix;
454
- this.device.queue.writeBuffer(
455
- this.modelUniformBuffer,
456
- 0,
457
- modelData.buffer,
458
- modelData.byteOffset,
459
- modelData.byteLength
460
- );
461
- }
462
-
463
- this.shadowPassDescriptor = {
464
- colorAttachments: [],
465
- depthStencilAttachment: {
466
- view: this.shadowDepthTextureView,
467
- depthClearValue: 1.0,
468
- depthLoadOp: 'clear',
469
- depthStoreOp: 'store',
470
- },
471
- };
472
-
473
- this.done = true;
474
- })
475
- }
476
-
477
- updateLightsTest = (position) => {
478
- console.log('Update light position.', position)
479
- this.lightPosition = vec3.fromValues(position[0], position[1], position[2]);
480
- this.lightViewMatrix = mat4.lookAt(this.lightPosition, this.origin, this.upVector);
481
-
482
- const lightProjectionMatrix = mat4.create();
483
- {
484
- const left = -80;
485
- const right = 80;
486
- const bottom = -80;
487
- const top = 80;
488
- const near = -200;
489
- const far = 300;
490
- mat4.ortho(left, right, bottom, top, near, far, lightProjectionMatrix);
491
- }
492
-
493
- this.lightViewProjMatrix = mat4.multiply(
494
- lightProjectionMatrix,
495
- this.lightViewMatrix
496
- );
497
-
498
- // looks like affect on transformations for now const 0
499
- const modelMatrix = mat4.translation([0, 0, 0]);
500
- // The camera/light aren't moving, so write them into buffers now.
501
- {
502
- const lightMatrixData = this.lightViewProjMatrix; // as Float32Array;
503
- this.device.queue.writeBuffer(
504
- this.sceneUniformBuffer,
505
- 0, // 0 ori
506
- lightMatrixData.buffer,
507
- lightMatrixData.byteOffset,
508
- lightMatrixData.byteLength
509
- );
510
-
511
- const lightData = this.lightPosition;
512
- this.device.queue.writeBuffer(
513
- this.sceneUniformBuffer,
514
- 256,
515
- lightData.buffer,
516
- lightData.byteOffset,
517
- lightData.byteLength
518
- );
519
-
520
- const modelData = modelMatrix;
521
- this.device.queue.writeBuffer(
522
- this.modelUniformBuffer,
523
- 0,
524
- modelData.buffer,
525
- modelData.byteOffset,
526
- modelData.byteLength
527
- );
528
- }
529
-
530
- this.shadowPassDescriptor = {
531
- colorAttachments: [],
532
- depthStencilAttachment: {
533
- view: this.shadowDepthTextureView,
534
- depthClearValue: 1.0, // ori 1.0
535
- depthLoadOp: 'clear',
536
- depthStoreOp: 'store',
537
- },
538
- };
539
-
540
- ///////////////////////
541
- }
542
-
543
- async loadTex0(texturesPaths, device) {
544
-
545
- this.sampler = device.createSampler({
546
- magFilter: 'linear',
547
- minFilter: 'linear',
548
- });
549
-
550
- return new Promise(async (resolve) => {
551
- const response = await fetch(texturesPaths[0]);
552
- const imageBitmap = await createImageBitmap(await response.blob());
553
- this.texture0 = device.createTexture({
554
- size: [imageBitmap.width, imageBitmap.height, 1],
555
- format: 'rgba8unorm',
556
- usage:
557
- GPUTextureUsage.TEXTURE_BINDING |
558
- GPUTextureUsage.COPY_DST |
559
- GPUTextureUsage.RENDER_ATTACHMENT,
560
- });
561
-
562
- device.queue.copyExternalImageToTexture(
563
- {source: imageBitmap},
564
- {texture: this.texture0},
565
- [imageBitmap.width, imageBitmap.height]
566
- );
567
- resolve()
568
- })
569
- }
570
-
571
- draw = (commandEncoder) => {
572
- if(this.done == false) return;
573
- const transformationMatrix = this.getTransformationMatrix(this.position);
574
- this.device.queue.writeBuffer(
575
- this.sceneUniformBuffer,
576
- 64,
577
- transformationMatrix.buffer,
578
- transformationMatrix.byteOffset,
579
- transformationMatrix.byteLength
580
- );
581
- this.renderPassDescriptor.colorAttachments[0].view = this.context
582
- .getCurrentTexture()
583
- .createView();
584
- }
585
-
586
- drawElements = (renderPass) => {
587
- renderPass.setBindGroup(0, this.sceneBindGroupForRender);
588
- renderPass.setBindGroup(1, this.modelBindGroup);
589
- renderPass.setVertexBuffer(0, this.vertexBuffer);
590
- renderPass.setVertexBuffer(1, this.vertexNormalsBuffer);
591
- renderPass.setVertexBuffer(2, this.vertexTexCoordsBuffer);
592
- renderPass.setIndexBuffer(this.indexBuffer, 'uint16');
593
- renderPass.drawIndexed(this.indexCount);
594
- }
595
-
596
- drawShadows = (shadowPass) => {
597
- shadowPass.setBindGroup(0, this.sceneBindGroupForShadow);
598
- shadowPass.setBindGroup(1, this.modelBindGroup);
599
- shadowPass.setVertexBuffer(0, this.vertexBuffer);
600
- shadowPass.setVertexBuffer(1, this.vertexNormalsBuffer);
601
- shadowPass.setVertexBuffer(2, this.vertexTexCoordsBuffer);
602
- shadowPass.setIndexBuffer(this.indexBuffer, 'uint16');
603
- shadowPass.drawIndexed(this.indexCount);
604
- }
11
+ constructor(canvas, device, context, o) {
12
+ if(typeof o.name === 'undefined') o.name = genName(9);
13
+ if(typeof o.raycast === 'undefined') {
14
+ this.raycast = {
15
+ enabled: false,
16
+ radius: 2
17
+ };
18
+ } else {
19
+ this.raycast = o.raycast;
20
+ }
21
+
22
+ this.name = o.name;
23
+ this.done = false;
24
+ this.device = device;
25
+ this.context = context;
26
+ this.entityArgPass = o.entityArgPass;
27
+
28
+ // Mesh stuff
29
+ this.mesh = o.mesh;
30
+ this.mesh.uvs = this.mesh.textures;
31
+ console.log(`%c Mesh loaded: ${o.name}`, LOG_FUNNY_SMALL);
32
+
33
+ this.inputHandler = createInputHandler(window, canvas);
34
+ this.cameras = o.cameras;
35
+
36
+ this.mainCameraParams = {
37
+ type: o.mainCameraParams.type,
38
+ responseCoef: o.mainCameraParams.responseCoef
39
+ }
40
+
41
+ // touchCoordinate.enabled = true;
42
+
43
+ this.lastFrameMS = 0;
44
+ this.texturesPaths = [];
45
+ o.texturesPaths.forEach((t) => {this.texturesPaths.push(t)})
46
+
47
+ this.presentationFormat = navigator.gpu.getPreferredCanvasFormat();
48
+
49
+ this.position = new Position(o.position.x, o.position.y, o.position.z);
50
+ this.rotation = new Rotation(o.rotation.x, o.rotation.y, o.rotation.z);
51
+ this.rotation.rotationSpeed.x = o.rotationSpeed.x;
52
+ this.rotation.rotationSpeed.y = o.rotationSpeed.y;
53
+ this.rotation.rotationSpeed.z = o.rotationSpeed.z;
54
+ this.scale = o.scale;
55
+
56
+ this.runProgram = () => {
57
+ return new Promise(async (resolve) => {
58
+ this.shadowDepthTextureSize = 1024;
59
+ const aspect = canvas.width / canvas.height;
60
+ this.projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 2000.0);
61
+ this.modelViewProjectionMatrix = mat4.create();
62
+ // console.log('cube added texturesPaths: ', this.texturesPaths)
63
+ this.loadTex0(this.texturesPaths, device).then(() => {
64
+ // console.log('loaded tex buffer for mesh:', this.texture0)
65
+ resolve()
66
+ })
67
+ })
68
+ }
69
+
70
+ this.runProgram().then(() => {
71
+ const aspect = canvas.width / canvas.height;
72
+ const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
73
+ this.context.configure({
74
+ device: this.device,
75
+ format: presentationFormat,
76
+ alphaMode: 'premultiplied',
77
+ });
78
+
79
+ // Create the model vertex buffer.
80
+ this.vertexBuffer = this.device.createBuffer({
81
+ size: this.mesh.vertices.length * Float32Array.BYTES_PER_ELEMENT,
82
+ usage: GPUBufferUsage.VERTEX,
83
+ mappedAtCreation: true,
84
+ });
85
+ {
86
+ // const mapping = new Float32Array(this.vertexBuffer.getMappedRange());
87
+ // // for(let i = 0;i < this.mesh.vertices.length;++i) {
88
+ // // mapping.set(this.mesh.vertices[i], 6 * i);
89
+ // // mapping.set(this.mesh.normals[i], 6 * i + 3);
90
+ // // }
91
+ // this.vertexBuffer.unmap();
92
+ new Float32Array(this.vertexBuffer.getMappedRange()).set(this.mesh.vertices);
93
+ this.vertexBuffer.unmap();
94
+ }
95
+
96
+ // NIDZA TEST SECOUND BUFFER
97
+ // Create the model vertex buffer.
98
+ this.vertexNormalsBuffer = this.device.createBuffer({
99
+ size: this.mesh.vertexNormals.length * Float32Array.BYTES_PER_ELEMENT,
100
+ usage: GPUBufferUsage.VERTEX,
101
+ mappedAtCreation: true,
102
+ });
103
+ {
104
+ new Float32Array(this.vertexNormalsBuffer.getMappedRange()).set(this.mesh.vertexNormals);
105
+ this.vertexNormalsBuffer.unmap();
106
+ }
107
+
108
+ this.vertexTexCoordsBuffer = this.device.createBuffer({
109
+ size: this.mesh.textures.length * Float32Array.BYTES_PER_ELEMENT,
110
+ usage: GPUBufferUsage.VERTEX,
111
+ mappedAtCreation: true,
112
+ });
113
+ {
114
+ new Float32Array(this.vertexTexCoordsBuffer.getMappedRange()).set(this.mesh.textures);
115
+ this.vertexTexCoordsBuffer.unmap();
116
+ }
117
+
118
+ // Create the model index buffer.
119
+ this.indexCount = this.mesh.indices.length;
120
+ this.indexBuffer = this.device.createBuffer({
121
+ size: this.indexCount * Uint16Array.BYTES_PER_ELEMENT,
122
+ usage: GPUBufferUsage.INDEX,
123
+ mappedAtCreation: true,
124
+ });
125
+ {
126
+ // const mapping = new Uint16Array(this.indexBuffer.getMappedRange());
127
+ // for(let i = 0;i < this.mesh.indices.length;++i) {
128
+ // mapping.set(this.mesh.indices[i], i);
129
+ // }
130
+ new Uint16Array(this.indexBuffer.getMappedRange()).set(this.mesh.indices);
131
+ this.indexBuffer.unmap();
132
+ }
133
+
134
+ // Create the depth texture for rendering/sampling the shadow map.
135
+ this.shadowDepthTexture = this.device.createTexture({
136
+ size: [this.shadowDepthTextureSize, this.shadowDepthTextureSize, 1],
137
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
138
+ format: 'depth32float',
139
+ });
140
+ this.shadowDepthTextureView = this.shadowDepthTexture.createView();
141
+
142
+ // Create some common descriptors used for both the shadow pipeline
143
+ // and the color rendering pipeline.
144
+ this.vertexBuffers = [
145
+ {
146
+ arrayStride: Float32Array.BYTES_PER_ELEMENT * 3,
147
+ attributes: [
148
+ {
149
+ // position
150
+ shaderLocation: 0,
151
+ offset: 0,
152
+ format: "float32x3",
153
+ }
154
+ ],
155
+ },
156
+ {
157
+ arrayStride: Float32Array.BYTES_PER_ELEMENT * 3,
158
+ attributes: [
159
+ {
160
+ // normal
161
+ shaderLocation: 1,
162
+ offset: 0,
163
+ format: "float32x3",
164
+ },
165
+ ],
166
+ },
167
+ {
168
+ arrayStride: Float32Array.BYTES_PER_ELEMENT * 2,
169
+ attributes: [
170
+ {
171
+ // uvs
172
+ shaderLocation: 2,
173
+ offset: 0,
174
+ format: "float32x2",
175
+ },
176
+ ],
177
+ },
178
+ ];
179
+
180
+ const primitive = {
181
+ topology: 'triangle-list',
182
+ cullMode: 'back',
183
+ };
184
+
185
+ this.uniformBufferBindGroupLayout = this.device.createBindGroupLayout({
186
+ entries: [
187
+ {
188
+ binding: 0,
189
+ visibility: GPUShaderStage.VERTEX,
190
+ buffer: {
191
+ type: 'uniform',
192
+ },
193
+ },
194
+ ],
195
+ });
196
+
197
+ this.shadowPipeline = this.device.createRenderPipeline({
198
+ layout: this.device.createPipelineLayout({
199
+ bindGroupLayouts: [
200
+ this.uniformBufferBindGroupLayout,
201
+ this.uniformBufferBindGroupLayout,
202
+ ],
203
+ }),
204
+ vertex: {
205
+ module: this.device.createShaderModule({
206
+ code: vertexShadowWGSL,
207
+ }),
208
+ buffers: this.vertexBuffers,
209
+ },
210
+ depthStencil: {
211
+ depthWriteEnabled: true,
212
+ depthCompare: 'less',
213
+ format: 'depth32float',
214
+ },
215
+ primitive,
216
+ });
217
+
218
+ // Create a bind group layout which holds the scene uniforms and
219
+ // the texture+sampler for depth. We create it manually because the WebPU
220
+ // implementation doesn't infer this from the shader (yet).
221
+ this.bglForRender = this.device.createBindGroupLayout({
222
+ entries: [
223
+ {
224
+ binding: 0,
225
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
226
+ buffer: {
227
+ type: 'uniform',
228
+ },
229
+ },
230
+ {
231
+ binding: 1,
232
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
233
+ texture: {
234
+ sampleType: 'depth',
235
+ },
236
+ },
237
+ {
238
+ binding: 2,
239
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
240
+ sampler: {
241
+ type: 'comparison',
242
+ },
243
+ },
244
+ {
245
+ binding: 3,
246
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
247
+ texture: {
248
+ sampleType: 'float',
249
+ }
250
+ },
251
+ {
252
+ binding: 4,
253
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
254
+ sampler: {
255
+ type: 'filtering',
256
+ }
257
+ }
258
+ ]
259
+ });
260
+
261
+ this.pipeline = this.device.createRenderPipeline({
262
+ layout: this.device.createPipelineLayout({
263
+ bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
264
+ }),
265
+ vertex: {
266
+ module: this.device.createShaderModule({
267
+ code: vertexWGSL,
268
+ }),
269
+ buffers: this.vertexBuffers,
270
+ },
271
+ fragment: {
272
+ module: this.device.createShaderModule({
273
+ code: fragmentWGSL,
274
+ }),
275
+ targets: [
276
+ {
277
+ format: presentationFormat,
278
+ },
279
+ ],
280
+ constants: {
281
+ shadowDepthTextureSize: this.shadowDepthTextureSize,
282
+ },
283
+ },
284
+ depthStencil: {
285
+ depthWriteEnabled: true,
286
+ depthCompare: 'less',
287
+ format: 'depth24plus-stencil8',
288
+ },
289
+ primitive,
290
+ });
291
+
292
+ const depthTexture = this.device.createTexture({
293
+ size: [canvas.width, canvas.height, 1],
294
+ format: 'depth24plus-stencil8',
295
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
296
+ });
297
+
298
+ this.renderPassDescriptor = {
299
+ colorAttachments: [
300
+ {
301
+ // view is acquired and set in render loop.
302
+ view: undefined,
303
+ clearValue: {r: 0.5, g: 0.5, b: 0.5, a: 1.0},
304
+ loadOp: 'clear',
305
+ storeOp: 'store',
306
+ },
307
+ ],
308
+ depthStencilAttachment: {
309
+ view: depthTexture.createView(),
310
+ depthClearValue: 1.0,
311
+ depthLoadOp: 'clear',
312
+ depthStoreOp: 'store',
313
+ stencilClearValue: 0,
314
+ stencilLoadOp: 'clear',
315
+ stencilStoreOp: 'store',
316
+ },
317
+ };
318
+
319
+ this.modelUniformBuffer = this.device.createBuffer({
320
+ size: 4 * 16, // 4x4 matrix
321
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
322
+ });
323
+
324
+ this.sceneUniformBuffer = this.device.createBuffer({
325
+ // Two 4x4 viewProj matrices,
326
+ // one for the camera and one for the light.
327
+ // Then a vec3 for the light position.
328
+ // Rounded to the nearest multiple of 16.
329
+ // size: 2 * 4 * 16 + 4 * 4,
330
+ size: 160,
331
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
332
+ });
333
+
334
+ this.sceneBindGroupForShadow = this.device.createBindGroup({
335
+ layout: this.uniformBufferBindGroupLayout,
336
+ entries: [
337
+ {
338
+ binding: 0,
339
+ resource: {
340
+ buffer: this.sceneUniformBuffer,
341
+ },
342
+ },
343
+ ],
344
+ });
345
+
346
+ this.sceneBindGroupForRender = this.device.createBindGroup({
347
+ layout: this.bglForRender,
348
+ entries: [
349
+ {
350
+ binding: 0,
351
+ resource: {
352
+ buffer: this.sceneUniformBuffer,
353
+ },
354
+ },
355
+ {
356
+ binding: 1,
357
+ resource: this.shadowDepthTextureView,
358
+ },
359
+ {
360
+ binding: 2,
361
+ resource: this.device.createSampler({
362
+ compare: 'less',
363
+ }),
364
+ },
365
+ {
366
+ binding: 3,
367
+ resource: this.texture0.createView(),
368
+ },
369
+ {
370
+ binding: 4,
371
+ resource: this.sampler,
372
+ },
373
+ ],
374
+ });
375
+
376
+ this.modelBindGroup = this.device.createBindGroup({
377
+ layout: this.uniformBufferBindGroupLayout,
378
+ entries: [
379
+ {
380
+ binding: 0,
381
+ resource: {
382
+ buffer: this.modelUniformBuffer,
383
+ },
384
+ },
385
+ ],
386
+ });
387
+
388
+ // Rotates the camera around the origin based on time.
389
+ this.getTransformationMatrix = (pos) => {
390
+ const now = Date.now();
391
+ const deltaTime = (now - this.lastFrameMS) / this.mainCameraParams.responseCoef;
392
+ this.lastFrameMS = now;
393
+ // const this.viewMatrix = mat4.identity()
394
+ const camera = this.cameras[this.mainCameraParams.type];
395
+ this.viewMatrix = camera.update(deltaTime, this.inputHandler());
396
+ mat4.translate(this.viewMatrix, vec3.fromValues(pos.x, pos.y, pos.z), this.viewMatrix);
397
+ mat4.rotate(
398
+ this.viewMatrix,
399
+ vec3.fromValues(this.rotation.axis.x, this.rotation.axis.y, this.rotation.axis.z),
400
+ degToRad(this.rotation.angle), this.viewMatrix)
401
+ // console.info('this: ', this)
402
+ mat4.rotateX(this.viewMatrix, Math.PI * this.rotation.getRotX(), this.viewMatrix);
403
+ mat4.rotateY(this.viewMatrix, Math.PI * this.rotation.getRotY(), this.viewMatrix);
404
+ mat4.rotateZ(this.viewMatrix, Math.PI * this.rotation.getRotZ(), this.viewMatrix);
405
+ // console.info('angle: ', this.rotation.angle, ' axis ' , this.rotation.axis.x, ' , ', this.rotation.axis.y, ' , ', this.rotation.axis.z)
406
+ mat4.multiply(this.projectionMatrix, this.viewMatrix, this.modelViewProjectionMatrix);
407
+ return this.modelViewProjectionMatrix;
408
+ }
409
+
410
+ this.upVector = vec3.fromValues(0, 1, 0);
411
+ this.origin = vec3.fromValues(0, 0, 0);
412
+
413
+ this.lightPosition = vec3.fromValues(0, 0, 0);
414
+ this.lightViewMatrix = mat4.lookAt(this.lightPosition, this.origin, this.upVector);
415
+ const lightProjectionMatrix = mat4.create();
416
+
417
+ var myLMargin = 100;
418
+ {
419
+ const left = -myLMargin;
420
+ const right = myLMargin;
421
+ const bottom = -myLMargin;
422
+ const top = myLMargin;
423
+ const near = -200;
424
+ const far = 300;
425
+ mat4.ortho(left, right, bottom, top, near, far, lightProjectionMatrix);
426
+ // test
427
+ // mat4.ortho(right, left, top, bottom, near, far, lightProjectionMatrix);
428
+ }
429
+
430
+ this.lightViewProjMatrix = mat4.multiply(
431
+ lightProjectionMatrix,
432
+ this.lightViewMatrix
433
+ );
434
+
435
+ // looks like affect on transformations for now const 0
436
+ const modelMatrix = mat4.translation([0, 0, 0]);
437
+ // The camera/light aren't moving, so write them into buffers now.
438
+ {
439
+ const lightMatrixData = this.lightViewProjMatrix; // as Float32Array;
440
+ this.device.queue.writeBuffer(
441
+ this.sceneUniformBuffer,
442
+ 0,
443
+ lightMatrixData.buffer,
444
+ lightMatrixData.byteOffset,
445
+ lightMatrixData.byteLength
446
+ );
447
+
448
+ const lightData = this.lightPosition;
449
+ this.device.queue.writeBuffer(
450
+ this.sceneUniformBuffer,
451
+ 128,
452
+ lightData.buffer,
453
+ lightData.byteOffset,
454
+ lightData.byteLength
455
+ );
456
+
457
+ const modelData = modelMatrix;
458
+ this.device.queue.writeBuffer(
459
+ this.modelUniformBuffer,
460
+ 0,
461
+ modelData.buffer,
462
+ modelData.byteOffset,
463
+ modelData.byteLength
464
+ );
465
+ }
466
+
467
+ this.shadowPassDescriptor = {
468
+ colorAttachments: [],
469
+ depthStencilAttachment: {
470
+ view: this.shadowDepthTextureView,
471
+ depthClearValue: 1.0,
472
+ depthLoadOp: 'clear',
473
+ depthStoreOp: 'store',
474
+ },
475
+ };
476
+
477
+ this.done = true;
478
+ })
479
+ }
480
+
481
+ updateLightsTest = (position) => {
482
+ console.log('Update light position.', position)
483
+ this.lightPosition = vec3.fromValues(position[0], position[1], position[2]);
484
+ this.lightViewMatrix = mat4.lookAt(this.lightPosition, this.origin, this.upVector);
485
+
486
+ const lightProjectionMatrix = mat4.create();
487
+ {
488
+ const left = -80;
489
+ const right = 80;
490
+ const bottom = -80;
491
+ const top = 80;
492
+ const near = -200;
493
+ const far = 300;
494
+ mat4.ortho(left, right, bottom, top, near, far, lightProjectionMatrix);
495
+ }
496
+
497
+ this.lightViewProjMatrix = mat4.multiply(
498
+ lightProjectionMatrix,
499
+ this.lightViewMatrix
500
+ );
501
+
502
+ // looks like affect on transformations for now const 0
503
+ const modelMatrix = mat4.translation([0, 0, 0]);
504
+ // The camera/light aren't moving, so write them into buffers now.
505
+ {
506
+ const lightMatrixData = this.lightViewProjMatrix; // as Float32Array;
507
+ this.device.queue.writeBuffer(
508
+ this.sceneUniformBuffer,
509
+ 0, // 0 ori
510
+ lightMatrixData.buffer,
511
+ lightMatrixData.byteOffset,
512
+ lightMatrixData.byteLength
513
+ );
514
+
515
+ const lightData = this.lightPosition;
516
+ this.device.queue.writeBuffer(
517
+ this.sceneUniformBuffer,
518
+ 256,
519
+ lightData.buffer,
520
+ lightData.byteOffset,
521
+ lightData.byteLength
522
+ );
523
+
524
+ const modelData = modelMatrix;
525
+ this.device.queue.writeBuffer(
526
+ this.modelUniformBuffer,
527
+ 0,
528
+ modelData.buffer,
529
+ modelData.byteOffset,
530
+ modelData.byteLength
531
+ );
532
+ }
533
+
534
+ this.shadowPassDescriptor = {
535
+ colorAttachments: [],
536
+ depthStencilAttachment: {
537
+ view: this.shadowDepthTextureView,
538
+ depthClearValue: 1.0, // ori 1.0
539
+ depthLoadOp: 'clear',
540
+ depthStoreOp: 'store',
541
+ },
542
+ };
543
+
544
+ ///////////////////////
545
+ }
546
+
547
+ async loadTex0(texturesPaths, device) {
548
+
549
+ this.sampler = device.createSampler({
550
+ magFilter: 'linear',
551
+ minFilter: 'linear',
552
+ });
553
+
554
+ return new Promise(async (resolve) => {
555
+ const response = await fetch(texturesPaths[0]);
556
+ const imageBitmap = await createImageBitmap(await response.blob());
557
+ this.texture0 = device.createTexture({
558
+ size: [imageBitmap.width, imageBitmap.height, 1],
559
+ format: 'rgba8unorm',
560
+ usage:
561
+ GPUTextureUsage.TEXTURE_BINDING |
562
+ GPUTextureUsage.COPY_DST |
563
+ GPUTextureUsage.RENDER_ATTACHMENT,
564
+ });
565
+
566
+ device.queue.copyExternalImageToTexture(
567
+ {source: imageBitmap},
568
+ {texture: this.texture0},
569
+ [imageBitmap.width, imageBitmap.height]
570
+ );
571
+ resolve()
572
+ })
573
+ }
574
+
575
+ draw = (commandEncoder) => {
576
+ if(this.done == false) return;
577
+ // console.log('test draw for meshObj !')
578
+ const transformationMatrix = this.getTransformationMatrix(this.position);
579
+ this.device.queue.writeBuffer(
580
+ this.sceneUniformBuffer,
581
+ 64,
582
+ transformationMatrix.buffer,
583
+ transformationMatrix.byteOffset,
584
+ transformationMatrix.byteLength
585
+ );
586
+ this.renderPassDescriptor.colorAttachments[0].view = this.context
587
+ .getCurrentTexture()
588
+ .createView();
589
+ }
590
+
591
+ drawElements = (renderPass) => {
592
+ renderPass.setBindGroup(0, this.sceneBindGroupForRender);
593
+ renderPass.setBindGroup(1, this.modelBindGroup);
594
+ renderPass.setVertexBuffer(0, this.vertexBuffer);
595
+ renderPass.setVertexBuffer(1, this.vertexNormalsBuffer);
596
+ renderPass.setVertexBuffer(2, this.vertexTexCoordsBuffer);
597
+ renderPass.setIndexBuffer(this.indexBuffer, 'uint16');
598
+ renderPass.drawIndexed(this.indexCount);
599
+ }
600
+
601
+ drawShadows = (shadowPass) => {
602
+ shadowPass.setBindGroup(0, this.sceneBindGroupForShadow);
603
+ shadowPass.setBindGroup(1, this.modelBindGroup);
604
+ shadowPass.setVertexBuffer(0, this.vertexBuffer);
605
+ shadowPass.setVertexBuffer(1, this.vertexNormalsBuffer);
606
+ shadowPass.setVertexBuffer(2, this.vertexTexCoordsBuffer);
607
+ shadowPass.setIndexBuffer(this.indexBuffer, 'uint16');
608
+ shadowPass.drawIndexed(this.indexCount);
609
+ }
605
610
  }
@@ -220,7 +220,7 @@ export default class MatrixAmmo {
220
220
  }
221
221
 
222
222
  detectCollision() {
223
- console.log('override this')
223
+ // console.log('override this')
224
224
  return;
225
225
 
226
226
  this.lastRoll = '';
@@ -3,7 +3,8 @@ export let fragmentWGSL = `override shadowDepthTextureSize: f32 = 1024.0;
3
3
  struct Scene {
4
4
  lightViewProjMatrix : mat4x4f,
5
5
  cameraViewProjMatrix : mat4x4f,
6
- lightPos : vec3f,
6
+ lightPos : vec4f,
7
+ // padding: f32, // 👈 fix alignment
7
8
  }
8
9
 
9
10
  @group(0) @binding(0) var<uniform> scene : Scene;
@@ -39,7 +40,7 @@ fn main(input : FragmentInput) -> @location(0) vec4f {
39
40
  }
40
41
  }
41
42
  visibility /= 9.0;
42
- let lambertFactor = max(dot(normalize(scene.lightPos - input.fragPos), normalize(input.fragNorm)), 0.0);
43
+ let lambertFactor = max(dot(normalize(scene.lightPos.xyz - input.fragPos), normalize(input.fragNorm)), 0.0);
43
44
  let lightingFactor = min(ambientFactor + visibility * lambertFactor, 1.0);
44
45
  let textureColor = textureSample(meshTexture, meshSampler, input.uv);
45
46
 
@@ -1,7 +1,8 @@
1
1
  export let vertexWGSL = `struct Scene {
2
2
  lightViewProjMatrix: mat4x4f,
3
3
  cameraViewProjMatrix: mat4x4f,
4
- lightPos: vec3f,
4
+ lightPos: vec4f,
5
+ // padding: f32, // 👈 fix alignment
5
6
  }
6
7
 
7
8
  struct Model {
@@ -1,7 +1,8 @@
1
1
  export let vertexShadowWGSL = `struct Scene {
2
2
  lightViewProjMatrix: mat4x4f,
3
3
  cameraViewProjMatrix: mat4x4f,
4
- lightPos: vec3f,
4
+ lightPos: vec4f,
5
+ // padding: f32, // 👈 fix alignment
5
6
  }
6
7
 
7
8
  struct Model {