matrix-engine-wgpu 1.3.8 → 1.3.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +2 -1
- package/package.json +1 -1
- package/readme.md +103 -81
- package/src/engine/engine.js +4 -0
- package/src/engine/materials.js +200 -0
- package/src/engine/mesh-obj.js +87 -152
- package/src/shaders/fragment.video.wgsl.js +51 -0
- package/src/shaders/vertex.wgsl.js +2 -2
- package/src/world.js +6 -3
package/index.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* npm import/export
|
|
5
5
|
*/
|
|
6
6
|
// import {degToRad, radToDeg} from "./utils";
|
|
7
|
-
import {downloadMeshes} from "./src/engine/loader-obj.js";
|
|
7
|
+
import {downloadMeshes, makeObjSeqArg} from "./src/engine/loader-obj.js";
|
|
8
8
|
import MatrixEngineWGPU from "./src/world.js";
|
|
9
9
|
import {
|
|
10
10
|
addRaycastsAABBListener,
|
|
@@ -36,5 +36,6 @@ export {
|
|
|
36
36
|
rayIntersectsAABB,
|
|
37
37
|
computeAABB,
|
|
38
38
|
computeWorldVertsAndAABB,
|
|
39
|
+
makeObjSeqArg,
|
|
39
40
|
about
|
|
40
41
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "matrix-engine-wgpu",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.11",
|
|
4
4
|
"description": "obj sequence anim +HOTFIX raycast, webGPU powered pwa application. Crazy fast rendering with AmmoJS physics support. Simple raycaster hit object added.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"files": [
|
package/readme.md
CHANGED
|
@@ -27,10 +27,10 @@ Published on npm as: **`matrix-engine-wgpu`**
|
|
|
27
27
|
|
|
28
28
|
## Goals
|
|
29
29
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
30
|
+
- ✔️ Support for 3D objects and scene transformations
|
|
31
|
+
- 🎯 Replicate matrix-engine (WebGL) features
|
|
32
|
+
- 📦 Based on the `shadowMapping` sample from [webgpu-samples](https://webgpu.github.io/webgpu-samples/?sample=shadowMapping)
|
|
33
|
+
- ✔️ Ammo.js physics integration (basic cube)
|
|
34
34
|
|
|
35
35
|
---
|
|
36
36
|
|
|
@@ -38,17 +38,21 @@ Published on npm as: **`matrix-engine-wgpu`**
|
|
|
38
38
|
|
|
39
39
|
### Scene Management
|
|
40
40
|
|
|
41
|
-
|
|
41
|
+
- Canvas is dynamically created in JavaScript—no `<canvas>` element needed in HTML.
|
|
42
42
|
|
|
43
|
-
|
|
43
|
+
- Access the main scene objects:
|
|
44
44
|
|
|
45
45
|
```js
|
|
46
46
|
app.mainRenderBundle[0];
|
|
47
47
|
```
|
|
48
|
+
or
|
|
49
|
+
```js
|
|
50
|
+
app.getSceneObjectByName("Sphere1");
|
|
51
|
+
```
|
|
48
52
|
|
|
49
|
-
|
|
53
|
+
- Add meshes with `.addMeshObj()`, supporting `.obj` loading, unlit textures, cubes, spheres, etc.
|
|
50
54
|
|
|
51
|
-
|
|
55
|
+
- Cleanly destroy the scene:
|
|
52
56
|
|
|
53
57
|
```js
|
|
54
58
|
app.destroyProgram();
|
|
@@ -72,8 +76,8 @@ mainCameraParams: {
|
|
|
72
76
|
### Object Position
|
|
73
77
|
|
|
74
78
|
Best way for access physics body object:
|
|
75
|
-
|
|
76
|
-
|
|
79
|
+
app.matrixAmmo.getBodyByName(name)
|
|
80
|
+
also app.matrixAmmo.getNameByBody
|
|
77
81
|
|
|
78
82
|
Control object position:
|
|
79
83
|
|
|
@@ -152,14 +156,21 @@ The raycast returns:
|
|
|
152
156
|
Manual raycast example:
|
|
153
157
|
|
|
154
158
|
```js
|
|
155
|
-
window.addEventListener(
|
|
156
|
-
let canvas = document.querySelector(
|
|
159
|
+
window.addEventListener("click", event => {
|
|
160
|
+
let canvas = document.querySelector("canvas");
|
|
157
161
|
let camera = app.cameras.WASD;
|
|
158
|
-
const {
|
|
162
|
+
const {rayOrigin, rayDirection} = getRayFromMouse(event, canvas, camera);
|
|
159
163
|
|
|
160
164
|
for (const object of app.mainRenderBundle) {
|
|
161
|
-
if (
|
|
162
|
-
|
|
165
|
+
if (
|
|
166
|
+
rayIntersectsSphere(
|
|
167
|
+
rayOrigin,
|
|
168
|
+
rayDirection,
|
|
169
|
+
object.position,
|
|
170
|
+
object.raycast.radius
|
|
171
|
+
)
|
|
172
|
+
) {
|
|
173
|
+
console.log("Object clicked:", object.name);
|
|
163
174
|
}
|
|
164
175
|
}
|
|
165
176
|
});
|
|
@@ -168,85 +179,93 @@ window.addEventListener('click', (event) => {
|
|
|
168
179
|
Automatic raycast listener:
|
|
169
180
|
|
|
170
181
|
```js
|
|
171
|
-
addRaycastListener();
|
|
182
|
+
addRaycastListener();
|
|
172
183
|
|
|
173
|
-
window.addEventListener(
|
|
174
|
-
console.log(
|
|
184
|
+
window.addEventListener("ray.hit.event", event => {
|
|
185
|
+
console.log("Ray hit:", event.detail.hitObject);
|
|
175
186
|
});
|
|
176
187
|
```
|
|
188
|
+
|
|
177
189
|
Engine also exports (box):
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
190
|
+
|
|
191
|
+
- addRaycastsAABBListener
|
|
192
|
+
- rayIntersectsAABB,
|
|
193
|
+
- computeAABB,
|
|
194
|
+
- computeWorldVertsAndAABB,
|
|
195
|
+
|
|
182
196
|
---
|
|
183
197
|
|
|
184
198
|
### How to Load `.obj` Models
|
|
185
199
|
|
|
186
200
|
```js
|
|
187
201
|
import MatrixEngineWGPU from "./src/world.js";
|
|
188
|
-
import {
|
|
189
|
-
|
|
190
|
-
export let application = new MatrixEngineWGPU({
|
|
191
|
-
useSingleRenderPass: true,
|
|
192
|
-
canvasSize: 'fullscreen',
|
|
193
|
-
mainCameraParams: {
|
|
194
|
-
type: 'WASD',
|
|
195
|
-
responseCoef: 1000
|
|
196
|
-
}
|
|
197
|
-
}, () => {
|
|
198
|
-
addEventListener('AmmoReady', () => {
|
|
199
|
-
downloadMeshes({
|
|
200
|
-
welcomeText: "./res/meshes/blender/piramyd.obj",
|
|
201
|
-
armor: "./res/meshes/obj/armor.obj",
|
|
202
|
-
sphere: "./res/meshes/blender/sphere.obj",
|
|
203
|
-
cube: "./res/meshes/blender/cube.obj",
|
|
204
|
-
}, onLoadObj);
|
|
205
|
-
});
|
|
202
|
+
import {downloadMeshes} from "./src/engine/loader-obj.js";
|
|
206
203
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
204
|
+
export let application = new MatrixEngineWGPU(
|
|
205
|
+
{
|
|
206
|
+
useSingleRenderPass: true,
|
|
207
|
+
canvasSize: "fullscreen",
|
|
208
|
+
mainCameraParams: {
|
|
209
|
+
type: "WASD",
|
|
210
|
+
responseCoef: 1000,
|
|
211
|
+
},
|
|
212
|
+
},
|
|
213
|
+
() => {
|
|
214
|
+
addEventListener("AmmoReady", () => {
|
|
215
|
+
downloadMeshes(
|
|
216
|
+
{
|
|
217
|
+
welcomeText: "./res/meshes/blender/piramyd.obj",
|
|
218
|
+
armor: "./res/meshes/obj/armor.obj",
|
|
219
|
+
sphere: "./res/meshes/blender/sphere.obj",
|
|
220
|
+
cube: "./res/meshes/blender/cube.obj",
|
|
221
|
+
},
|
|
222
|
+
onLoadObj
|
|
223
|
+
);
|
|
224
224
|
});
|
|
225
225
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
texturesPaths: ['./res/meshes/blender/cube.png'],
|
|
231
|
-
name: 'SpherePhysics',
|
|
232
|
-
mesh: meshes.sphere,
|
|
233
|
-
physics: {
|
|
234
|
-
enabled: true,
|
|
235
|
-
geometry: "Sphere"
|
|
226
|
+
function onLoadObj(meshes) {
|
|
227
|
+
application.myLoadedMeshes = meshes;
|
|
228
|
+
for (const key in meshes) {
|
|
229
|
+
console.log(`%c Loaded obj: ${key} `, LOG_MATRIX);
|
|
236
230
|
}
|
|
237
|
-
|
|
231
|
+
|
|
232
|
+
application.addMeshObj({
|
|
233
|
+
position: {x: 0, y: 2, z: -10},
|
|
234
|
+
rotation: {x: 0, y: 0, z: 0},
|
|
235
|
+
rotationSpeed: {x: 0, y: 0, z: 0},
|
|
236
|
+
texturesPaths: ["./res/meshes/blender/cube.png"],
|
|
237
|
+
name: "CubePhysics",
|
|
238
|
+
mesh: meshes.cube,
|
|
239
|
+
physics: {
|
|
240
|
+
enabled: true,
|
|
241
|
+
geometry: "Cube",
|
|
242
|
+
},
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
application.addMeshObj({
|
|
246
|
+
position: {x: 0, y: 2, z: -10},
|
|
247
|
+
rotation: {x: 0, y: 0, z: 0},
|
|
248
|
+
rotationSpeed: {x: 0, y: 0, z: 0},
|
|
249
|
+
texturesPaths: ["./res/meshes/blender/cube.png"],
|
|
250
|
+
name: "SpherePhysics",
|
|
251
|
+
mesh: meshes.sphere,
|
|
252
|
+
physics: {
|
|
253
|
+
enabled: true,
|
|
254
|
+
geometry: "Sphere",
|
|
255
|
+
},
|
|
256
|
+
});
|
|
257
|
+
}
|
|
238
258
|
}
|
|
239
|
-
|
|
259
|
+
);
|
|
240
260
|
|
|
241
261
|
window.app = application;
|
|
242
262
|
```
|
|
243
263
|
|
|
244
|
-
|
|
245
264
|
### 🔁 Load OBJ Sequence Animation
|
|
246
265
|
|
|
247
266
|
This example shows how to load and animate a sequence of .obj files to simulate mesh-based animation (e.g. walking character).
|
|
248
267
|
|
|
249
|
-
|
|
268
|
+
````js
|
|
250
269
|
import MatrixEngineWGPU from "../src/world.js";
|
|
251
270
|
import { downloadMeshes, makeObjSeqArg } from "../src/engine/loader-obj.js";
|
|
252
271
|
import { LOG_MATRIX } from "../src/engine/utils.js";
|
|
@@ -315,15 +334,18 @@ export var loadObjsSequence = function () {
|
|
|
315
334
|
If this happen less then 15 times (Loading procces) then it is ok probably...
|
|
316
335
|
```warn
|
|
317
336
|
Draw func (err):TypeError: Failed to execute 'beginRenderPass' on 'GPUCommandEncoder': The provided value is not of type 'GPURenderPassDescriptor'.
|
|
318
|
-
|
|
337
|
+
````
|
|
319
338
|
|
|
320
339
|
## @Note
|
|
340
|
+
|
|
321
341
|
I act according to the fact that there is only one canvas element on the page.
|
|
322
342
|
|
|
323
343
|
## About URLParams
|
|
344
|
+
|
|
324
345
|
Buildin Url Param check for multiLang.
|
|
346
|
+
|
|
325
347
|
```js
|
|
326
|
-
|
|
348
|
+
urlQuery.lang;
|
|
327
349
|
```
|
|
328
350
|
|
|
329
351
|
---
|
|
@@ -367,12 +389,12 @@ This is static file storage.
|
|
|
367
389
|
|
|
368
390
|
## Live Demos & Dev Links
|
|
369
391
|
|
|
370
|
-
|
|
371
|
-
|
|
392
|
+
- [Jamb WebGPU Demo (WIP)](https://maximumroulette.com/apps/webgpu/)
|
|
393
|
+
- [CodePen Demo](https://codepen.io/zlatnaspirala/pen/VwNKMar?editors=0011)
|
|
372
394
|
→ Uses `empty.js` build from:
|
|
373
395
|
[https://maximumroulette.com/apps/megpu/empty.js](https://maximumroulette.com/apps/megpu/empty.js)
|
|
374
|
-
|
|
375
|
-
|
|
396
|
+
- [CodeSandbox Implementation](https://codesandbox.io/p/github/zlatnaspirala/matrix-engine-wgpu/main?file=%2Fpackage.json%3A14%2C16)
|
|
397
|
+
- 📘 Learning Resource: [WebGPU Ray Tracing](https://maierfelix.github.io/2020-01-13-webgpu-ray-tracing/)
|
|
376
398
|
|
|
377
399
|
---
|
|
378
400
|
|
|
@@ -386,13 +408,13 @@ You may use, modify, and sell projects based on this code — just keep this not
|
|
|
386
408
|
|
|
387
409
|
### Attribution & Credits
|
|
388
410
|
|
|
389
|
-
|
|
411
|
+
- Engine design and scene structure inspired by:
|
|
390
412
|
[WebGPU Samples](https://webgpu.github.io/webgpu-samples/?sample=shadowMapping)
|
|
391
|
-
|
|
413
|
+
- OBJ Loader adapted from:
|
|
392
414
|
[http://math.hws.edu/graphicsbook/source/webgl/cube-camera.html](http://math.hws.edu/graphicsbook/source/webgl/cube-camera.html)
|
|
393
|
-
|
|
415
|
+
- Dice roll sound `roll1.wav` sourced from:
|
|
394
416
|
[https://wavbvkery.com/dice-rolling-sound/](https://wavbvkery.com/dice-rolling-sound/)
|
|
395
|
-
|
|
417
|
+
- Raycasting logic assisted by ChatGPT
|
|
396
418
|
|
|
397
419
|
---
|
|
398
420
|
|
package/src/engine/engine.js
CHANGED
|
@@ -60,6 +60,7 @@ class CameraBase {
|
|
|
60
60
|
get right() {
|
|
61
61
|
return this.right_;
|
|
62
62
|
}
|
|
63
|
+
|
|
63
64
|
// Assigns `vec` to the first 3 elements of column vector 0 of the camera matrix
|
|
64
65
|
set right(vec) {
|
|
65
66
|
vec3.copy(vec, this.right_);
|
|
@@ -69,6 +70,7 @@ class CameraBase {
|
|
|
69
70
|
get up() {
|
|
70
71
|
return this.up_;
|
|
71
72
|
}
|
|
73
|
+
|
|
72
74
|
// Assigns `vec` to the first 3 elements of column vector 1 of the camera matrix \ Vec3
|
|
73
75
|
set up(vec) {
|
|
74
76
|
vec3.copy(vec, this.up_);
|
|
@@ -78,6 +80,7 @@ class CameraBase {
|
|
|
78
80
|
get back() {
|
|
79
81
|
return this.back_;
|
|
80
82
|
}
|
|
83
|
+
|
|
81
84
|
// Assigns `vec` to the first 3 elements of column vector 2 of the camera matrix
|
|
82
85
|
set back(vec) {
|
|
83
86
|
vec3.copy(vec, this.back_);
|
|
@@ -87,6 +90,7 @@ class CameraBase {
|
|
|
87
90
|
get position() {
|
|
88
91
|
return this.position_;
|
|
89
92
|
}
|
|
93
|
+
|
|
90
94
|
// Assigns `vec` to the first 3 elements of column vector 3 of the camera matrix
|
|
91
95
|
set position(vec) {
|
|
92
96
|
vec3.copy(vec, this.position_);
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @description
|
|
3
|
+
* Created for matrix-engine-wgpu project.
|
|
4
|
+
* MeshObj class estends Materials.
|
|
5
|
+
* @author Nikola Lukic
|
|
6
|
+
* @email zlatnaspirala@gmail.com
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
export default class Materials {
|
|
10
|
+
constructor(device) {
|
|
11
|
+
this.device = device;
|
|
12
|
+
this.isVideo = false;
|
|
13
|
+
// For shadow comparison
|
|
14
|
+
this.compareSampler = this.device.createSampler({compare: 'less'});
|
|
15
|
+
// For image textures (standard sampler)
|
|
16
|
+
this.imageSampler = this.device.createSampler({
|
|
17
|
+
magFilter: 'linear',
|
|
18
|
+
minFilter: 'linear',
|
|
19
|
+
});
|
|
20
|
+
// For external video textures (needs to be filtering sampler too!)
|
|
21
|
+
this.videoSampler = this.device.createSampler({
|
|
22
|
+
magFilter: 'linear',
|
|
23
|
+
minFilter: 'linear',
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async loadTex0(texturesPaths) {
|
|
28
|
+
this.sampler = this.device.createSampler({
|
|
29
|
+
magFilter: 'linear',
|
|
30
|
+
minFilter: 'linear',
|
|
31
|
+
});
|
|
32
|
+
return new Promise(async (resolve) => {
|
|
33
|
+
const response = await fetch(texturesPaths[0]);
|
|
34
|
+
const imageBitmap = await createImageBitmap(await response.blob());
|
|
35
|
+
this.texture0 = this.device.createTexture({
|
|
36
|
+
size: [imageBitmap.width, imageBitmap.height, 1], // REMOVED 1
|
|
37
|
+
format: 'rgba8unorm',
|
|
38
|
+
usage:
|
|
39
|
+
GPUTextureUsage.TEXTURE_BINDING |
|
|
40
|
+
GPUTextureUsage.COPY_DST |
|
|
41
|
+
GPUTextureUsage.RENDER_ATTACHMENT,
|
|
42
|
+
});
|
|
43
|
+
this.device.queue.copyExternalImageToTexture(
|
|
44
|
+
{source: imageBitmap},
|
|
45
|
+
{texture: this.texture0},
|
|
46
|
+
[imageBitmap.width, imageBitmap.height]
|
|
47
|
+
);
|
|
48
|
+
resolve()
|
|
49
|
+
})
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async loadVideoTexture(arg) {
|
|
53
|
+
this.isVideo = true;
|
|
54
|
+
if(arg.type === 'video') {
|
|
55
|
+
this.video = document.createElement('video');
|
|
56
|
+
this.video.src = arg.src || 'res/videos/tunel.mp4';
|
|
57
|
+
this.video.crossOrigin = 'anonymous';
|
|
58
|
+
this.video.autoplay = true;
|
|
59
|
+
this.video.loop = true;
|
|
60
|
+
document.body.append(this.video);
|
|
61
|
+
this.video.style.display = 'none';
|
|
62
|
+
await this.video.play();
|
|
63
|
+
} else if(arg.type === 'videoElement') {
|
|
64
|
+
this.video = arg.el;
|
|
65
|
+
await this.video.play();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
this.sampler = this.device.createSampler({
|
|
69
|
+
magFilter: 'linear',
|
|
70
|
+
minFilter: 'linear',
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
// ✅ Now
|
|
74
|
+
// includes externalTexture type
|
|
75
|
+
this.createLayoutForRender();
|
|
76
|
+
this.setupPipeline();
|
|
77
|
+
setTimeout(() => this.createBindGroupForRender(), 1500);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
updateVideoTexture() {
|
|
81
|
+
if(!this.video || this.video.readyState < 2) return;
|
|
82
|
+
this.externalTexture = this.device.importExternalTexture({source: this.video});
|
|
83
|
+
this.createBindGroupForRender();
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
createBindGroupForRender() {
|
|
87
|
+
const textureResource = this.isVideo
|
|
88
|
+
? this.externalTexture // must be set via updateVideoTexture
|
|
89
|
+
: this.texture0.createView();
|
|
90
|
+
// Log all bindings to debug
|
|
91
|
+
if(!textureResource || !this.sceneUniformBuffer || !this.shadowDepthTextureView || !this.sampler) {
|
|
92
|
+
console.warn("❗Missing res skipping...");
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if(this.isVideo == true) {
|
|
97
|
+
this.sceneBindGroupForRender = this.device.createBindGroup({
|
|
98
|
+
layout: this.bglForRender,
|
|
99
|
+
entries: [
|
|
100
|
+
{
|
|
101
|
+
binding: 0,
|
|
102
|
+
resource: {buffer: this.sceneUniformBuffer},
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
binding: 1,
|
|
106
|
+
resource: this.shadowDepthTextureView,
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
binding: 2,
|
|
110
|
+
resource: this.compareSampler,
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
binding: 3,
|
|
114
|
+
resource: textureResource,
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
binding: 4,
|
|
118
|
+
resource: this.videoSampler,
|
|
119
|
+
},
|
|
120
|
+
],
|
|
121
|
+
});
|
|
122
|
+
} else {
|
|
123
|
+
this.sceneBindGroupForRender = this.device.createBindGroup({
|
|
124
|
+
layout: this.bglForRender,
|
|
125
|
+
entries: [
|
|
126
|
+
{
|
|
127
|
+
binding: 0,
|
|
128
|
+
resource: {buffer: this.sceneUniformBuffer},
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
binding: 1,
|
|
132
|
+
resource: this.shadowDepthTextureView,
|
|
133
|
+
},
|
|
134
|
+
{
|
|
135
|
+
binding: 2,
|
|
136
|
+
resource: this.compareSampler,
|
|
137
|
+
},
|
|
138
|
+
{
|
|
139
|
+
binding: 3,
|
|
140
|
+
resource: textureResource,
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
binding: 4,
|
|
144
|
+
resource: this.imageSampler,
|
|
145
|
+
},
|
|
146
|
+
],
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
createLayoutForRender() {
|
|
152
|
+
this.bglForRender = this.device.createBindGroupLayout({
|
|
153
|
+
entries: [
|
|
154
|
+
{
|
|
155
|
+
binding: 0,
|
|
156
|
+
visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
|
|
157
|
+
buffer: {type: 'uniform'},
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
binding: 1,
|
|
161
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
162
|
+
texture: {sampleType: 'depth'},
|
|
163
|
+
},
|
|
164
|
+
{
|
|
165
|
+
binding: 2,
|
|
166
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
167
|
+
sampler: {type: 'comparison'},
|
|
168
|
+
},
|
|
169
|
+
...(this.isVideo
|
|
170
|
+
? [ // VIDEO
|
|
171
|
+
{
|
|
172
|
+
binding: 3,
|
|
173
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
174
|
+
externalTexture: {},
|
|
175
|
+
},
|
|
176
|
+
{
|
|
177
|
+
binding: 4,
|
|
178
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
179
|
+
sampler: {type: 'filtering'}, // for video sampling
|
|
180
|
+
},
|
|
181
|
+
]
|
|
182
|
+
: [ // IMAGE
|
|
183
|
+
{
|
|
184
|
+
binding: 3,
|
|
185
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
186
|
+
texture: {
|
|
187
|
+
sampleType: 'float',
|
|
188
|
+
viewDimension: '2d',
|
|
189
|
+
},
|
|
190
|
+
},
|
|
191
|
+
{
|
|
192
|
+
binding: 4,
|
|
193
|
+
visibility: GPUShaderStage.FRAGMENT,
|
|
194
|
+
sampler: {type: 'filtering'},
|
|
195
|
+
},
|
|
196
|
+
])
|
|
197
|
+
],
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
}
|
package/src/engine/mesh-obj.js
CHANGED
|
@@ -5,10 +5,12 @@ import {vertexShadowWGSL} from '../shaders/vertexShadow.wgsl';
|
|
|
5
5
|
import {fragmentWGSL} from '../shaders/fragment.wgsl';
|
|
6
6
|
import {vertexWGSL} from '../shaders/vertex.wgsl';
|
|
7
7
|
import {degToRad, genName, LOG_FUNNY_SMALL} from './utils';
|
|
8
|
-
|
|
8
|
+
import Materials from './materials';
|
|
9
|
+
import {fragmentVideoWGSL} from '../shaders/fragment.video.wgsl';
|
|
9
10
|
|
|
10
|
-
export default class MEMeshObj {
|
|
11
|
+
export default class MEMeshObj extends Materials {
|
|
11
12
|
constructor(canvas, device, context, o) {
|
|
13
|
+
super(device);
|
|
12
14
|
if(typeof o.name === 'undefined') o.name = genName(9);
|
|
13
15
|
if(typeof o.raycast === 'undefined') {
|
|
14
16
|
this.raycast = {
|
|
@@ -28,6 +30,8 @@ export default class MEMeshObj {
|
|
|
28
30
|
// comes from engine not from args
|
|
29
31
|
this.clearColor = "red";
|
|
30
32
|
|
|
33
|
+
this.video = null;
|
|
34
|
+
|
|
31
35
|
// Mesh stuff - for single mesh or t-posed (fiktive-first in loading order)
|
|
32
36
|
this.mesh = o.mesh;
|
|
33
37
|
this.mesh.uvs = this.mesh.textures;
|
|
@@ -73,7 +77,7 @@ export default class MEMeshObj {
|
|
|
73
77
|
this.projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 2000.0);
|
|
74
78
|
this.modelViewProjectionMatrix = mat4.create();
|
|
75
79
|
// console.log('cube added texturesPaths: ', this.texturesPaths)
|
|
76
|
-
this.loadTex0(this.texturesPaths
|
|
80
|
+
this.loadTex0(this.texturesPaths).then(() => {
|
|
77
81
|
// console.log('loaded tex buffer for mesh:', this.texture0)
|
|
78
82
|
resolve()
|
|
79
83
|
})
|
|
@@ -82,10 +86,10 @@ export default class MEMeshObj {
|
|
|
82
86
|
|
|
83
87
|
this.runProgram().then(() => {
|
|
84
88
|
const aspect = canvas.width / canvas.height;
|
|
85
|
-
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
|
|
89
|
+
// const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
|
|
86
90
|
this.context.configure({
|
|
87
91
|
device: this.device,
|
|
88
|
-
format: presentationFormat,
|
|
92
|
+
format: this.presentationFormat,
|
|
89
93
|
alphaMode: 'premultiplied',
|
|
90
94
|
});
|
|
91
95
|
|
|
@@ -189,7 +193,7 @@ export default class MEMeshObj {
|
|
|
189
193
|
},
|
|
190
194
|
];
|
|
191
195
|
|
|
192
|
-
|
|
196
|
+
this.primitive = {
|
|
193
197
|
topology: 'triangle-list',
|
|
194
198
|
// cullMode: 'back', // ORI
|
|
195
199
|
cullMode: 'none', // ORI
|
|
@@ -225,82 +229,45 @@ export default class MEMeshObj {
|
|
|
225
229
|
depthCompare: 'less',
|
|
226
230
|
format: 'depth32float',
|
|
227
231
|
},
|
|
228
|
-
primitive,
|
|
232
|
+
primitive: this.primitive,
|
|
229
233
|
});
|
|
230
234
|
|
|
231
235
|
// Create a bind group layout which holds the scene uniforms and
|
|
232
236
|
// the texture+sampler for depth. We create it manually because the WebPU
|
|
233
237
|
// implementation doesn't infer this from the shader (yet).
|
|
234
|
-
this.
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
sampler: {
|
|
268
|
-
type: 'filtering',
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
]
|
|
272
|
-
});
|
|
273
|
-
|
|
274
|
-
this.pipeline = this.device.createRenderPipeline({
|
|
275
|
-
layout: this.device.createPipelineLayout({
|
|
276
|
-
bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
|
|
277
|
-
}),
|
|
278
|
-
vertex: {
|
|
279
|
-
module: this.device.createShaderModule({
|
|
280
|
-
code: vertexWGSL,
|
|
281
|
-
}),
|
|
282
|
-
buffers: this.vertexBuffers,
|
|
283
|
-
},
|
|
284
|
-
fragment: {
|
|
285
|
-
module: this.device.createShaderModule({
|
|
286
|
-
code: fragmentWGSL,
|
|
287
|
-
}),
|
|
288
|
-
targets: [
|
|
289
|
-
{
|
|
290
|
-
format: presentationFormat,
|
|
291
|
-
},
|
|
292
|
-
],
|
|
293
|
-
constants: {
|
|
294
|
-
shadowDepthTextureSize: this.shadowDepthTextureSize,
|
|
295
|
-
},
|
|
296
|
-
},
|
|
297
|
-
depthStencil: {
|
|
298
|
-
depthWriteEnabled: true,
|
|
299
|
-
depthCompare: 'less',
|
|
300
|
-
format: 'depth24plus-stencil8',
|
|
301
|
-
},
|
|
302
|
-
primitive,
|
|
303
|
-
});
|
|
238
|
+
this.createLayoutForRender()
|
|
239
|
+
|
|
240
|
+
this.setupPipeline();
|
|
241
|
+
// this.pipeline = this.device.createRenderPipeline({
|
|
242
|
+
// layout: this.device.createPipelineLayout({
|
|
243
|
+
// bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
|
|
244
|
+
// }),
|
|
245
|
+
// vertex: {
|
|
246
|
+
// module: this.device.createShaderModule({
|
|
247
|
+
// code: vertexWGSL,
|
|
248
|
+
// }),
|
|
249
|
+
// buffers: this.vertexBuffers,
|
|
250
|
+
// },
|
|
251
|
+
// fragment: {
|
|
252
|
+
// module: this.device.createShaderModule({
|
|
253
|
+
// code: fragmentWGSL,
|
|
254
|
+
// }),
|
|
255
|
+
// targets: [
|
|
256
|
+
// {
|
|
257
|
+
// format: presentationFormat,
|
|
258
|
+
// },
|
|
259
|
+
// ],
|
|
260
|
+
// constants: {
|
|
261
|
+
// shadowDepthTextureSize: this.shadowDepthTextureSize,
|
|
262
|
+
// },
|
|
263
|
+
// },
|
|
264
|
+
// depthStencil: {
|
|
265
|
+
// depthWriteEnabled: true,
|
|
266
|
+
// depthCompare: 'less',
|
|
267
|
+
// format: 'depth24plus-stencil8',
|
|
268
|
+
// },
|
|
269
|
+
// primitive,
|
|
270
|
+
// });
|
|
304
271
|
|
|
305
272
|
const depthTexture = this.device.createTexture({
|
|
306
273
|
size: [canvas.width, canvas.height],
|
|
@@ -355,35 +322,8 @@ export default class MEMeshObj {
|
|
|
355
322
|
],
|
|
356
323
|
});
|
|
357
324
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
entries: [
|
|
361
|
-
{
|
|
362
|
-
binding: 0,
|
|
363
|
-
resource: {
|
|
364
|
-
buffer: this.sceneUniformBuffer,
|
|
365
|
-
},
|
|
366
|
-
},
|
|
367
|
-
{
|
|
368
|
-
binding: 1,
|
|
369
|
-
resource: this.shadowDepthTextureView,
|
|
370
|
-
},
|
|
371
|
-
{
|
|
372
|
-
binding: 2,
|
|
373
|
-
resource: this.device.createSampler({
|
|
374
|
-
compare: 'less',
|
|
375
|
-
}),
|
|
376
|
-
},
|
|
377
|
-
{
|
|
378
|
-
binding: 3,
|
|
379
|
-
resource: this.texture0.createView(),
|
|
380
|
-
},
|
|
381
|
-
{
|
|
382
|
-
binding: 4,
|
|
383
|
-
resource: this.sampler,
|
|
384
|
-
},
|
|
385
|
-
],
|
|
386
|
-
});
|
|
325
|
+
// --------------------------
|
|
326
|
+
this.createBindGroupForRender();
|
|
387
327
|
|
|
388
328
|
this.modelBindGroup = this.device.createBindGroup({
|
|
389
329
|
layout: this.uniformBufferBindGroupLayout,
|
|
@@ -429,13 +369,13 @@ export default class MEMeshObj {
|
|
|
429
369
|
}
|
|
430
370
|
|
|
431
371
|
this.getModelMatrix = (pos) => {
|
|
432
|
-
let modelMatrix =
|
|
372
|
+
let modelMatrix = mat4.identity();
|
|
433
373
|
mat4.translate(modelMatrix, [pos.x, pos.y, pos.z], modelMatrix);
|
|
434
374
|
if(this.itIsPhysicsBody) {
|
|
435
375
|
mat4.rotate(modelMatrix,
|
|
436
376
|
[this.rotation.axis.x, this.rotation.axis.y, this.rotation.axis.z],
|
|
437
|
-
|
|
438
|
-
|
|
377
|
+
degToRad(this.rotation.angle),
|
|
378
|
+
modelMatrix
|
|
439
379
|
);
|
|
440
380
|
} else {
|
|
441
381
|
mat4.rotateX(modelMatrix, this.rotation.getRotX(), modelMatrix);
|
|
@@ -589,59 +529,54 @@ export default class MEMeshObj {
|
|
|
589
529
|
///////////////////////
|
|
590
530
|
}
|
|
591
531
|
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
532
|
+
setupPipeline = () => {
|
|
533
|
+
this.pipeline = this.device.createRenderPipeline({
|
|
534
|
+
layout: this.device.createPipelineLayout({
|
|
535
|
+
bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
|
|
536
|
+
}),
|
|
537
|
+
vertex: {
|
|
538
|
+
entryPoint: 'main', // ✅ Add this
|
|
539
|
+
module: this.device.createShaderModule({
|
|
540
|
+
code: vertexWGSL,
|
|
541
|
+
}),
|
|
542
|
+
buffers: this.vertexBuffers,
|
|
543
|
+
},
|
|
544
|
+
fragment: {
|
|
545
|
+
entryPoint: 'main', // ✅ Add this
|
|
546
|
+
module: this.device.createShaderModule({
|
|
547
|
+
code: (this.isVideo == true ? fragmentVideoWGSL : fragmentWGSL),
|
|
548
|
+
}),
|
|
549
|
+
targets: [
|
|
550
|
+
{
|
|
551
|
+
format: this.presentationFormat,
|
|
552
|
+
},
|
|
553
|
+
],
|
|
554
|
+
constants: {
|
|
555
|
+
shadowDepthTextureSize: this.shadowDepthTextureSize,
|
|
556
|
+
},
|
|
557
|
+
},
|
|
558
|
+
depthStencil: {
|
|
559
|
+
depthWriteEnabled: true,
|
|
560
|
+
depthCompare: 'less',
|
|
561
|
+
format: 'depth24plus-stencil8',
|
|
562
|
+
},
|
|
563
|
+
primitive: this.primitive,
|
|
597
564
|
});
|
|
598
|
-
|
|
599
|
-
return new Promise(async (resolve) => {
|
|
600
|
-
const response = await fetch(texturesPaths[0]);
|
|
601
|
-
|
|
602
|
-
// const blob = await response.blob();
|
|
603
|
-
// if(!blob.type.startsWith('image/')) {
|
|
604
|
-
// console.error("Unexpected texture response type:", blob.type);
|
|
605
|
-
// return;
|
|
606
|
-
// }
|
|
607
|
-
|
|
608
|
-
// const imageBitmap = await createImageBitmap(blob);
|
|
609
|
-
const imageBitmap = await createImageBitmap(await response.blob());
|
|
610
|
-
this.texture0 = device.createTexture({
|
|
611
|
-
size: [imageBitmap.width, imageBitmap.height, 1], // REMOVED 1
|
|
612
|
-
format: 'rgba8unorm',
|
|
613
|
-
usage:
|
|
614
|
-
GPUTextureUsage.TEXTURE_BINDING |
|
|
615
|
-
GPUTextureUsage.COPY_DST |
|
|
616
|
-
GPUTextureUsage.RENDER_ATTACHMENT,
|
|
617
|
-
});
|
|
618
|
-
|
|
619
|
-
device.queue.copyExternalImageToTexture(
|
|
620
|
-
{source: imageBitmap},
|
|
621
|
-
{texture: this.texture0},
|
|
622
|
-
[imageBitmap.width, imageBitmap.height]
|
|
623
|
-
);
|
|
624
|
-
resolve()
|
|
625
|
-
})
|
|
626
565
|
}
|
|
627
566
|
|
|
628
|
-
draw = (
|
|
567
|
+
draw = () => {
|
|
629
568
|
if(this.done == false) return;
|
|
630
569
|
const transformationMatrix = this.getTransformationMatrix(this.position);
|
|
631
|
-
|
|
632
|
-
this.device.queue.writeBuffer(
|
|
633
|
-
this.sceneUniformBuffer,
|
|
634
|
-
64,
|
|
635
|
-
transformationMatrix.buffer,
|
|
636
|
-
transformationMatrix.byteOffset,
|
|
637
|
-
transformationMatrix.byteLength
|
|
638
|
-
);
|
|
570
|
+
this.device.queue.writeBuffer(this.sceneUniformBuffer, 64, transformationMatrix.buffer, transformationMatrix.byteOffset, transformationMatrix.byteLength);
|
|
639
571
|
this.renderPassDescriptor.colorAttachments[0].view = this.context
|
|
640
572
|
.getCurrentTexture()
|
|
641
573
|
.createView();
|
|
642
574
|
}
|
|
643
575
|
|
|
644
576
|
drawElements = (renderPass) => {
|
|
577
|
+
if(this.isVideo) {
|
|
578
|
+
this.updateVideoTexture();
|
|
579
|
+
}
|
|
645
580
|
renderPass.setBindGroup(0, this.sceneBindGroupForRender);
|
|
646
581
|
renderPass.setBindGroup(1, this.modelBindGroup);
|
|
647
582
|
renderPass.setVertexBuffer(0, this.vertexBuffer);
|
|
@@ -651,7 +586,7 @@ export default class MEMeshObj {
|
|
|
651
586
|
renderPass.drawIndexed(this.indexCount);
|
|
652
587
|
}
|
|
653
588
|
|
|
654
|
-
// test
|
|
589
|
+
// test
|
|
655
590
|
createGPUBuffer(dataArray, usage) {
|
|
656
591
|
if(!dataArray || typeof dataArray.length !== 'number') {
|
|
657
592
|
throw new Error('Invalid data array passed to createGPUBuffer');
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
export let fragmentVideoWGSL = `override shadowDepthTextureSize: f32 = 1024.0;
|
|
2
|
+
|
|
3
|
+
struct Scene {
|
|
4
|
+
lightViewProjMatrix : mat4x4f,
|
|
5
|
+
cameraViewProjMatrix : mat4x4f,
|
|
6
|
+
lightPos : vec3f,
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
@group(0) @binding(0) var<uniform> scene : Scene;
|
|
10
|
+
@group(0) @binding(1) var shadowMap: texture_depth_2d;
|
|
11
|
+
@group(0) @binding(2) var shadowSampler: sampler_comparison;
|
|
12
|
+
@group(0) @binding(3) var meshTexture: texture_external;
|
|
13
|
+
@group(0) @binding(4) var meshSampler: sampler;
|
|
14
|
+
|
|
15
|
+
// ❌ No binding(4) here!
|
|
16
|
+
|
|
17
|
+
struct FragmentInput {
|
|
18
|
+
@location(0) shadowPos : vec3f,
|
|
19
|
+
@location(1) fragPos : vec3f,
|
|
20
|
+
@location(2) fragNorm : vec3f,
|
|
21
|
+
@location(3) uv : vec2f,
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const albedo = vec3f(0.9);
|
|
25
|
+
const ambientFactor = 0.7;
|
|
26
|
+
|
|
27
|
+
@fragment
|
|
28
|
+
fn main(input : FragmentInput) -> @location(0) vec4f {
|
|
29
|
+
// Shadow filtering
|
|
30
|
+
var visibility = 0.0;
|
|
31
|
+
let oneOverShadowDepthTextureSize = 1.0 / shadowDepthTextureSize;
|
|
32
|
+
for (var y = -1; y <= 1; y++) {
|
|
33
|
+
for (var x = -1; x <= 1; x++) {
|
|
34
|
+
let offset = vec2f(vec2(x, y)) * oneOverShadowDepthTextureSize;
|
|
35
|
+
visibility += textureSampleCompare(
|
|
36
|
+
shadowMap, shadowSampler,
|
|
37
|
+
input.shadowPos.xy + offset, input.shadowPos.z - 0.007
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
visibility /= 9.0;
|
|
42
|
+
|
|
43
|
+
let lambertFactor = max(dot(normalize(scene.lightPos - input.fragPos), normalize(input.fragNorm)), 0.0);
|
|
44
|
+
let lightingFactor = min(ambientFactor + visibility * lambertFactor, 1.0);
|
|
45
|
+
|
|
46
|
+
// ✅ Correct way to sample video texture
|
|
47
|
+
let textureColor = textureSampleBaseClampToEdge(meshTexture, meshSampler, input.uv);
|
|
48
|
+
|
|
49
|
+
return vec4(textureColor.rgb * lightingFactor * albedo, 1.0);
|
|
50
|
+
}
|
|
51
|
+
`;
|
package/src/world.js
CHANGED
|
@@ -84,7 +84,6 @@ export default class MatrixEngineWGPU {
|
|
|
84
84
|
WASD: new WASDCamera({position: initialCameraPosition}),
|
|
85
85
|
};
|
|
86
86
|
|
|
87
|
-
//
|
|
88
87
|
this.label = new MultiLang()
|
|
89
88
|
if(urlQuery.lang != null) {
|
|
90
89
|
this.label.loadMultilang(urlQuery.lang).then((r) => {
|
|
@@ -135,6 +134,10 @@ export default class MatrixEngineWGPU {
|
|
|
135
134
|
this.run(callback)
|
|
136
135
|
};
|
|
137
136
|
|
|
137
|
+
getSceneObjectByName(name) {
|
|
138
|
+
return this.mainRenderBundle.find((sceneObject) => sceneObject.name === name )
|
|
139
|
+
}
|
|
140
|
+
|
|
138
141
|
// Not in use for now
|
|
139
142
|
addCube = (o) => {
|
|
140
143
|
if(typeof o === 'undefined') {
|
|
@@ -322,7 +325,7 @@ export default class MatrixEngineWGPU {
|
|
|
322
325
|
}
|
|
323
326
|
|
|
324
327
|
destroyProgram = () => {
|
|
325
|
-
this.mainRenderBundle =
|
|
328
|
+
this.mainRenderBundle = [];
|
|
326
329
|
this.canvas.remove();
|
|
327
330
|
}
|
|
328
331
|
|
|
@@ -337,7 +340,7 @@ export default class MatrixEngineWGPU {
|
|
|
337
340
|
meItem.position.update();
|
|
338
341
|
})
|
|
339
342
|
|
|
340
|
-
this.matrixAmmo.updatePhysics()
|
|
343
|
+
if (this.matrixAmmo) this.matrixAmmo.updatePhysics();
|
|
341
344
|
|
|
342
345
|
this.mainRenderBundle.forEach((meItem, index) => {
|
|
343
346
|
meItem.draw(commandEncoder);
|