matrix-engine-wgpu 1.3.9 → 1.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "matrix-engine-wgpu",
3
- "version": "1.3.9",
3
+ "version": "1.3.12",
4
4
  "description": "obj sequence anim +HOTFIX raycast, webGPU powered pwa application. Crazy fast rendering with AmmoJS physics support. Simple raycaster hit object added.",
5
5
  "main": "index.js",
6
6
  "files": [
package/readme.md CHANGED
@@ -27,10 +27,10 @@ Published on npm as: **`matrix-engine-wgpu`**
27
27
 
28
28
  ## Goals
29
29
 
30
- * ✔️ Support for 3D objects and scene transformations
31
- * 🎯 Replicate matrix-engine (WebGL) features
32
- * 📦 Based on the `shadowMapping` sample from [webgpu-samples](https://webgpu.github.io/webgpu-samples/?sample=shadowMapping)
33
- * ✔️ Ammo.js physics integration (basic cube)
30
+ - ✔️ Support for 3D objects and scene transformations
31
+ - 🎯 Replicate matrix-engine (WebGL) features
32
+ - 📦 Based on the `shadowMapping` sample from [webgpu-samples](https://webgpu.github.io/webgpu-samples/?sample=shadowMapping)
33
+ - ✔️ Ammo.js physics integration (basic cube)
34
34
 
35
35
  ---
36
36
 
@@ -38,17 +38,21 @@ Published on npm as: **`matrix-engine-wgpu`**
38
38
 
39
39
  ### Scene Management
40
40
 
41
- * Canvas is dynamically created in JavaScript—no `<canvas>` element needed in HTML.
41
+ - Canvas is dynamically created in JavaScript—no `<canvas>` element needed in HTML.
42
42
 
43
- * Access the main scene objects:
43
+ - Access the main scene objects:
44
44
 
45
45
  ```js
46
46
  app.mainRenderBundle[0];
47
47
  ```
48
+ or
49
+ ```js
50
+ app.getSceneObjectByName("Sphere1");
51
+ ```
48
52
 
49
- * Add meshes with `.addMeshObj()`, supporting `.obj` loading, unlit textures, cubes, spheres, etc.
53
+ - Add meshes with `.addMeshObj()`, supporting `.obj` loading, unlit textures, cubes, spheres, etc.
50
54
 
51
- * Cleanly destroy the scene:
55
+ - Cleanly destroy the scene:
52
56
 
53
57
  ```js
54
58
  app.destroyProgram();
@@ -72,8 +76,8 @@ mainCameraParams: {
72
76
  ### Object Position
73
77
 
74
78
  Best way for access physics body object:
75
- app.matrixAmmo.getBodyByName(name)
76
- also app.matrixAmmo.getNameByBody
79
+ app.matrixAmmo.getBodyByName(name)
80
+ also app.matrixAmmo.getNameByBody
77
81
 
78
82
  Control object position:
79
83
 
@@ -128,7 +132,7 @@ app.mainRenderBundle[0].rotation.rotationSpeed.y = 0;
128
132
 
129
133
  ---
130
134
 
131
- ### Camera Example
135
+ ### 3D Camera Example
132
136
 
133
137
  Manipulate WASD camera:
134
138
 
@@ -152,14 +156,21 @@ The raycast returns:
152
156
  Manual raycast example:
153
157
 
154
158
  ```js
155
- window.addEventListener('click', (event) => {
156
- let canvas = document.querySelector('canvas');
159
+ window.addEventListener("click", event => {
160
+ let canvas = document.querySelector("canvas");
157
161
  let camera = app.cameras.WASD;
158
- const { rayOrigin, rayDirection } = getRayFromMouse(event, canvas, camera);
162
+ const {rayOrigin, rayDirection} = getRayFromMouse(event, canvas, camera);
159
163
 
160
164
  for (const object of app.mainRenderBundle) {
161
- if (rayIntersectsSphere(rayOrigin, rayDirection, object.position, object.raycast.radius)) {
162
- console.log('Object clicked:', object.name);
165
+ if (
166
+ rayIntersectsSphere(
167
+ rayOrigin,
168
+ rayDirection,
169
+ object.position,
170
+ object.raycast.radius
171
+ )
172
+ ) {
173
+ console.log("Object clicked:", object.name);
163
174
  }
164
175
  }
165
176
  });
@@ -168,80 +179,88 @@ window.addEventListener('click', (event) => {
168
179
  Automatic raycast listener:
169
180
 
170
181
  ```js
171
- addRaycastListener();
182
+ addRaycastListener();
172
183
 
173
- window.addEventListener('ray.hit.event', (event) => {
174
- console.log('Ray hit:', event.detail.hitObject);
184
+ window.addEventListener("ray.hit.event", event => {
185
+ console.log("Ray hit:", event.detail.hitObject);
175
186
  });
176
187
  ```
188
+
177
189
  Engine also exports (box):
178
- - addRaycastsAABBListener
179
- - rayIntersectsAABB,
180
- - computeAABB,
181
- - computeWorldVertsAndAABB,
190
+
191
+ - addRaycastsAABBListener
192
+ - rayIntersectsAABB,
193
+ - computeAABB,
194
+ - computeWorldVertsAndAABB,
195
+
182
196
  ---
183
197
 
184
198
  ### How to Load `.obj` Models
185
199
 
186
200
  ```js
187
201
  import MatrixEngineWGPU from "./src/world.js";
188
- import { downloadMeshes } from './src/engine/loader-obj.js';
189
-
190
- export let application = new MatrixEngineWGPU({
191
- useSingleRenderPass: true,
192
- canvasSize: 'fullscreen',
193
- mainCameraParams: {
194
- type: 'WASD',
195
- responseCoef: 1000
196
- }
197
- }, () => {
198
- addEventListener('AmmoReady', () => {
199
- downloadMeshes({
200
- welcomeText: "./res/meshes/blender/piramyd.obj",
201
- armor: "./res/meshes/obj/armor.obj",
202
- sphere: "./res/meshes/blender/sphere.obj",
203
- cube: "./res/meshes/blender/cube.obj",
204
- }, onLoadObj);
205
- });
206
-
207
- function onLoadObj(meshes) {
208
- application.myLoadedMeshes = meshes;
209
- for (const key in meshes) {
210
- console.log(`%c Loaded obj: ${key} `, LOG_MATRIX);
211
- }
202
+ import {downloadMeshes} from "./src/engine/loader-obj.js";
212
203
 
213
- application.addMeshObj({
214
- position: {x: 0, y: 2, z: -10},
215
- rotation: {x: 0, y: 0, z: 0},
216
- rotationSpeed: {x: 0, y: 0, z: 0},
217
- texturesPaths: ['./res/meshes/blender/cube.png'],
218
- name: 'CubePhysics',
219
- mesh: meshes.cube,
220
- physics: {
221
- enabled: true,
222
- geometry: "Cube"
223
- }
204
+ export let application = new MatrixEngineWGPU(
205
+ {
206
+ useSingleRenderPass: true,
207
+ canvasSize: "fullscreen",
208
+ mainCameraParams: {
209
+ type: "WASD",
210
+ responseCoef: 1000,
211
+ },
212
+ },
213
+ () => {
214
+ addEventListener("AmmoReady", () => {
215
+ downloadMeshes(
216
+ {
217
+ welcomeText: "./res/meshes/blender/piramyd.obj",
218
+ armor: "./res/meshes/obj/armor.obj",
219
+ sphere: "./res/meshes/blender/sphere.obj",
220
+ cube: "./res/meshes/blender/cube.obj",
221
+ },
222
+ onLoadObj
223
+ );
224
224
  });
225
225
 
226
- application.addMeshObj({
227
- position: {x: 0, y: 2, z: -10},
228
- rotation: {x: 0, y: 0, z: 0},
229
- rotationSpeed: {x: 0, y: 0, z: 0},
230
- texturesPaths: ['./res/meshes/blender/cube.png'],
231
- name: 'SpherePhysics',
232
- mesh: meshes.sphere,
233
- physics: {
234
- enabled: true,
235
- geometry: "Sphere"
226
+ function onLoadObj(meshes) {
227
+ application.myLoadedMeshes = meshes;
228
+ for (const key in meshes) {
229
+ console.log(`%c Loaded obj: ${key} `, LOG_MATRIX);
236
230
  }
237
- });
231
+
232
+ application.addMeshObj({
233
+ position: {x: 0, y: 2, z: -10},
234
+ rotation: {x: 0, y: 0, z: 0},
235
+ rotationSpeed: {x: 0, y: 0, z: 0},
236
+ texturesPaths: ["./res/meshes/blender/cube.png"],
237
+ name: "CubePhysics",
238
+ mesh: meshes.cube,
239
+ physics: {
240
+ enabled: true,
241
+ geometry: "Cube",
242
+ },
243
+ });
244
+
245
+ application.addMeshObj({
246
+ position: {x: 0, y: 2, z: -10},
247
+ rotation: {x: 0, y: 0, z: 0},
248
+ rotationSpeed: {x: 0, y: 0, z: 0},
249
+ texturesPaths: ["./res/meshes/blender/cube.png"],
250
+ name: "SpherePhysics",
251
+ mesh: meshes.sphere,
252
+ physics: {
253
+ enabled: true,
254
+ geometry: "Sphere",
255
+ },
256
+ });
257
+ }
238
258
  }
239
- });
259
+ );
240
260
 
241
261
  window.app = application;
242
262
  ```
243
263
 
244
-
245
264
  ### 🔁 Load OBJ Sequence Animation
246
265
 
247
266
  This example shows how to load and animate a sequence of .obj files to simulate mesh-based animation (e.g. walking character).
@@ -308,22 +327,38 @@ export var loadObjsSequence = function () {
308
327
 
309
328
  window.app = loadObjFile;
310
329
  };
330
+ ```
331
+
332
+ ### 📽️ Video textures
333
+
334
+ ```js
335
+ TEST.loadVideoTexture({
336
+ type: 'video', // video , camera //not tested yet canvas2d , canvas2dinline
337
+ src: 'res/videos/tunel.mp4'
338
+ });
339
+ ```
311
340
 
312
- ### 📽️ Preview
341
+ <pre>
342
+ | Scenario | Best Approach |
343
+ | ------------------------------ | ---------------------------------- |
344
+ | Dynamic 2D canvas animation | `canvas.captureStream()` → `video` |
345
+ | Static canvas snapshot | `createImageBitmap(canvas)` |
346
+ | Replaying real video or webcam | Direct `video` element |
347
+ </pre>
313
348
 
314
- ## @Note
349
+ ### Note
315
350
  If this happen less then 15 times (Loading procces) then it is ok probably...
316
- ```warn
351
+
352
+ ```json
317
353
  Draw func (err):TypeError: Failed to execute 'beginRenderPass' on 'GPUCommandEncoder': The provided value is not of type 'GPURenderPassDescriptor'.
318
354
  ```
319
355
 
320
- ## @Note
321
- I act according to the fact that there is only one canvas element on the page.
322
-
323
356
  ## About URLParams
357
+
324
358
  Buildin Url Param check for multiLang.
359
+
325
360
  ```js
326
- urlQuery.lang
361
+ urlQuery.lang;
327
362
  ```
328
363
 
329
364
  ---
@@ -367,12 +402,12 @@ This is static file storage.
367
402
 
368
403
  ## Live Demos & Dev Links
369
404
 
370
- * [Jamb WebGPU Demo (WIP)](https://maximumroulette.com/apps/webgpu/)
371
- * [CodePen Demo](https://codepen.io/zlatnaspirala/pen/VwNKMar?editors=0011)
405
+ - [Jamb WebGPU Demo (WIP)](https://maximumroulette.com/apps/webgpu/)
406
+ - [CodePen Demo](https://codepen.io/zlatnaspirala/pen/VwNKMar?editors=0011)
372
407
  → Uses `empty.js` build from:
373
408
  [https://maximumroulette.com/apps/megpu/empty.js](https://maximumroulette.com/apps/megpu/empty.js)
374
- * [CodeSandbox Implementation](https://codesandbox.io/p/github/zlatnaspirala/matrix-engine-wgpu/main?file=%2Fpackage.json%3A14%2C16)
375
- * 📘 Learning Resource: [WebGPU Ray Tracing](https://maierfelix.github.io/2020-01-13-webgpu-ray-tracing/)
409
+ - [CodeSandbox Implementation](https://codesandbox.io/p/github/zlatnaspirala/matrix-engine-wgpu/main?file=%2Fpackage.json%3A14%2C16)
410
+ - 📘 Learning Resource: [WebGPU Ray Tracing](https://maierfelix.github.io/2020-01-13-webgpu-ray-tracing/)
376
411
 
377
412
  ---
378
413
 
@@ -386,13 +421,13 @@ You may use, modify, and sell projects based on this code — just keep this not
386
421
 
387
422
  ### Attribution & Credits
388
423
 
389
- * Engine design and scene structure inspired by:
424
+ - Engine design and scene structure inspired by:
390
425
  [WebGPU Samples](https://webgpu.github.io/webgpu-samples/?sample=shadowMapping)
391
- * OBJ Loader adapted from:
426
+ - OBJ Loader adapted from:
392
427
  [http://math.hws.edu/graphicsbook/source/webgl/cube-camera.html](http://math.hws.edu/graphicsbook/source/webgl/cube-camera.html)
393
- * Dice roll sound `roll1.wav` sourced from:
428
+ - Dice roll sound `roll1.wav` sourced from:
394
429
  [https://wavbvkery.com/dice-rolling-sound/](https://wavbvkery.com/dice-rolling-sound/)
395
- * Raycasting logic assisted by ChatGPT
430
+ - Raycasting logic assisted by ChatGPT
396
431
 
397
432
  ---
398
433
 
@@ -60,6 +60,7 @@ class CameraBase {
60
60
  get right() {
61
61
  return this.right_;
62
62
  }
63
+
63
64
  // Assigns `vec` to the first 3 elements of column vector 0 of the camera matrix
64
65
  set right(vec) {
65
66
  vec3.copy(vec, this.right_);
@@ -69,6 +70,7 @@ class CameraBase {
69
70
  get up() {
70
71
  return this.up_;
71
72
  }
73
+
72
74
  // Assigns `vec` to the first 3 elements of column vector 1 of the camera matrix \ Vec3
73
75
  set up(vec) {
74
76
  vec3.copy(vec, this.up_);
@@ -78,6 +80,7 @@ class CameraBase {
78
80
  get back() {
79
81
  return this.back_;
80
82
  }
83
+
81
84
  // Assigns `vec` to the first 3 elements of column vector 2 of the camera matrix
82
85
  set back(vec) {
83
86
  vec3.copy(vec, this.back_);
@@ -87,6 +90,7 @@ class CameraBase {
87
90
  get position() {
88
91
  return this.position_;
89
92
  }
93
+
90
94
  // Assigns `vec` to the first 3 elements of column vector 3 of the camera matrix
91
95
  set position(vec) {
92
96
  vec3.copy(vec, this.position_);
@@ -0,0 +1,273 @@
1
+ /**
2
+ * @description
3
+ * Created for matrix-engine-wgpu project.
4
+ * MeshObj class estends Materials.
5
+ * @author Nikola Lukic
6
+ * @email zlatnaspirala@gmail.com
7
+ */
8
+
9
+ export default class Materials {
10
+ constructor(device) {
11
+ this.device = device;
12
+ this.isVideo = false;
13
+ // For shadow comparison
14
+ this.compareSampler = this.device.createSampler({compare: 'less'});
15
+ // For image textures (standard sampler)
16
+ this.imageSampler = this.device.createSampler({
17
+ magFilter: 'linear',
18
+ minFilter: 'linear',
19
+ });
20
+ // For external video textures (needs to be filtering sampler too!)
21
+ this.videoSampler = this.device.createSampler({
22
+ magFilter: 'linear',
23
+ minFilter: 'linear',
24
+ });
25
+ }
26
+
27
+ async loadTex0(texturesPaths) {
28
+ this.sampler = this.device.createSampler({
29
+ magFilter: 'linear',
30
+ minFilter: 'linear',
31
+ });
32
+ return new Promise(async (resolve) => {
33
+ const response = await fetch(texturesPaths[0]);
34
+ const imageBitmap = await createImageBitmap(await response.blob());
35
+ this.texture0 = this.device.createTexture({
36
+ size: [imageBitmap.width, imageBitmap.height, 1], // REMOVED 1
37
+ format: 'rgba8unorm',
38
+ usage:
39
+ GPUTextureUsage.TEXTURE_BINDING |
40
+ GPUTextureUsage.COPY_DST |
41
+ GPUTextureUsage.RENDER_ATTACHMENT,
42
+ });
43
+ this.device.queue.copyExternalImageToTexture(
44
+ {source: imageBitmap},
45
+ {texture: this.texture0},
46
+ [imageBitmap.width, imageBitmap.height]
47
+ );
48
+ resolve()
49
+ })
50
+ }
51
+
52
+ async loadVideoTexture(arg) {
53
+ this.isVideo = true;
54
+ if(arg.type === 'video') {
55
+ this.video = document.createElement('video');
56
+ this.video.src = arg.src || 'res/videos/tunel.mp4';
57
+ this.video.crossOrigin = 'anonymous';
58
+ this.video.autoplay = true;
59
+ this.video.loop = true;
60
+ document.body.append(this.video);
61
+ this.video.style.display = 'none';
62
+ await this.video.play();
63
+ } else if(arg.type === 'videoElement') {
64
+ this.video = arg.el;
65
+ await this.video.play();
66
+ } else if(arg.type === 'camera') {
67
+ this.video = document.createElement('video');
68
+ this.video.autoplay = true;
69
+ this.video.muted = true;
70
+ this.video.playsInline = true;
71
+ this.video.style.display = 'none';
72
+ document.body.append(this.video);
73
+
74
+ try {
75
+ const stream = await (navigator.mediaDevices?.getUserMedia?.({
76
+ video: {
77
+ width: {ideal: 1280},
78
+ height: {ideal: 720},
79
+ },
80
+ audio: false
81
+ }));
82
+
83
+ this.video.srcObject = stream;
84
+ await this.video.play();
85
+ } catch(err) {
86
+ console.error("❌ Failed to access camera:", err);
87
+ return;
88
+ }
89
+ } else if(arg.type === 'canvas2d') {
90
+ // Existing canvas (arg.el) — assume it's actively drawing
91
+ this.video = document.createElement('video');
92
+ this.video.autoplay = true;
93
+ this.video.muted = true;
94
+ this.video.playsInline = true;
95
+ this.video.style.display = 'none';
96
+ document.body.append(this.video);
97
+
98
+ // Create stream from existing canvas
99
+ const stream = arg.el.captureStream?.() || arg.el.mozCaptureStream?.();
100
+ if(!stream) {
101
+ console.error('❌ Cannot capture stream from canvas2d');
102
+ return;
103
+ }
104
+
105
+ this.video.srcObject = stream;
106
+ await this.video.play();
107
+
108
+ } else if(arg.type === 'canvas2d-inline') {
109
+ // Miniature inline-drawn canvas created dynamically
110
+ const canvas = document.createElement('canvas');
111
+ canvas.width = arg.width || 256;
112
+ canvas.height = arg.height || 256;
113
+ const ctx = canvas.getContext('2d');
114
+
115
+ if(typeof arg.canvaInlineProgram === 'function') {
116
+ // Start drawing loop
117
+ const drawLoop = () => {
118
+ arg.canvaInlineProgram(ctx, canvas);
119
+ requestAnimationFrame(drawLoop);
120
+ };
121
+ drawLoop();
122
+ }
123
+
124
+ this.video = document.createElement('video');
125
+ this.video.autoplay = true;
126
+ this.video.muted = true;
127
+ this.video.playsInline = true;
128
+ this.video.style.display = 'none';
129
+ document.body.append(this.video);
130
+
131
+ const stream = canvas.captureStream?.() || canvas.mozCaptureStream?.();
132
+ if(!stream) {
133
+ console.error('❌ Cannot capture stream from inline canvas');
134
+ return;
135
+ }
136
+
137
+ this.video.srcObject = stream;
138
+ await this.video.play();
139
+ }
140
+
141
+ this.sampler = this.device.createSampler({
142
+ magFilter: 'linear',
143
+ minFilter: 'linear',
144
+ });
145
+
146
+ // ✅ Now
147
+ // includes externalTexture type
148
+ this.createLayoutForRender();
149
+ this.setupPipeline();
150
+ setTimeout(() => this.createBindGroupForRender(), 1500);
151
+ }
152
+
153
+ updateVideoTexture() {
154
+ if(!this.video || this.video.readyState < 2) return;
155
+ this.externalTexture = this.device.importExternalTexture({source: this.video});
156
+ this.createBindGroupForRender();
157
+ }
158
+
159
+ createBindGroupForRender() {
160
+ const textureResource = this.isVideo
161
+ ? this.externalTexture // must be set via updateVideoTexture
162
+ : this.texture0.createView();
163
+ // Log all bindings to debug
164
+ if(!textureResource || !this.sceneUniformBuffer || !this.shadowDepthTextureView || !this.sampler) {
165
+ console.warn("❗Missing res skipping...");
166
+ return;
167
+ }
168
+
169
+ if(this.isVideo == true) {
170
+ this.sceneBindGroupForRender = this.device.createBindGroup({
171
+ layout: this.bglForRender,
172
+ entries: [
173
+ {
174
+ binding: 0,
175
+ resource: {buffer: this.sceneUniformBuffer},
176
+ },
177
+ {
178
+ binding: 1,
179
+ resource: this.shadowDepthTextureView,
180
+ },
181
+ {
182
+ binding: 2,
183
+ resource: this.compareSampler,
184
+ },
185
+ {
186
+ binding: 3,
187
+ resource: textureResource,
188
+ },
189
+ {
190
+ binding: 4,
191
+ resource: this.videoSampler,
192
+ },
193
+ ],
194
+ });
195
+ } else {
196
+ this.sceneBindGroupForRender = this.device.createBindGroup({
197
+ layout: this.bglForRender,
198
+ entries: [
199
+ {
200
+ binding: 0,
201
+ resource: {buffer: this.sceneUniformBuffer},
202
+ },
203
+ {
204
+ binding: 1,
205
+ resource: this.shadowDepthTextureView,
206
+ },
207
+ {
208
+ binding: 2,
209
+ resource: this.compareSampler,
210
+ },
211
+ {
212
+ binding: 3,
213
+ resource: textureResource,
214
+ },
215
+ {
216
+ binding: 4,
217
+ resource: this.imageSampler,
218
+ },
219
+ ],
220
+ });
221
+ }
222
+ }
223
+
224
+ createLayoutForRender() {
225
+ this.bglForRender = this.device.createBindGroupLayout({
226
+ entries: [
227
+ {
228
+ binding: 0,
229
+ visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
230
+ buffer: {type: 'uniform'},
231
+ },
232
+ {
233
+ binding: 1,
234
+ visibility: GPUShaderStage.FRAGMENT,
235
+ texture: {sampleType: 'depth'},
236
+ },
237
+ {
238
+ binding: 2,
239
+ visibility: GPUShaderStage.FRAGMENT,
240
+ sampler: {type: 'comparison'},
241
+ },
242
+ ...(this.isVideo
243
+ ? [ // VIDEO
244
+ {
245
+ binding: 3,
246
+ visibility: GPUShaderStage.FRAGMENT,
247
+ externalTexture: {},
248
+ },
249
+ {
250
+ binding: 4,
251
+ visibility: GPUShaderStage.FRAGMENT,
252
+ sampler: {type: 'filtering'}, // for video sampling
253
+ },
254
+ ]
255
+ : [ // IMAGE
256
+ {
257
+ binding: 3,
258
+ visibility: GPUShaderStage.FRAGMENT,
259
+ texture: {
260
+ sampleType: 'float',
261
+ viewDimension: '2d',
262
+ },
263
+ },
264
+ {
265
+ binding: 4,
266
+ visibility: GPUShaderStage.FRAGMENT,
267
+ sampler: {type: 'filtering'},
268
+ },
269
+ ])
270
+ ],
271
+ });
272
+ }
273
+ }
@@ -5,10 +5,12 @@ import {vertexShadowWGSL} from '../shaders/vertexShadow.wgsl';
5
5
  import {fragmentWGSL} from '../shaders/fragment.wgsl';
6
6
  import {vertexWGSL} from '../shaders/vertex.wgsl';
7
7
  import {degToRad, genName, LOG_FUNNY_SMALL} from './utils';
8
- // import {checkingProcedure, checkingRay, touchCoordinate} from './raycast';
8
+ import Materials from './materials';
9
+ import {fragmentVideoWGSL} from '../shaders/fragment.video.wgsl';
9
10
 
10
- export default class MEMeshObj {
11
+ export default class MEMeshObj extends Materials {
11
12
  constructor(canvas, device, context, o) {
13
+ super(device);
12
14
  if(typeof o.name === 'undefined') o.name = genName(9);
13
15
  if(typeof o.raycast === 'undefined') {
14
16
  this.raycast = {
@@ -28,6 +30,8 @@ export default class MEMeshObj {
28
30
  // comes from engine not from args
29
31
  this.clearColor = "red";
30
32
 
33
+ this.video = null;
34
+
31
35
  // Mesh stuff - for single mesh or t-posed (fiktive-first in loading order)
32
36
  this.mesh = o.mesh;
33
37
  this.mesh.uvs = this.mesh.textures;
@@ -73,7 +77,7 @@ export default class MEMeshObj {
73
77
  this.projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 2000.0);
74
78
  this.modelViewProjectionMatrix = mat4.create();
75
79
  // console.log('cube added texturesPaths: ', this.texturesPaths)
76
- this.loadTex0(this.texturesPaths, device).then(() => {
80
+ this.loadTex0(this.texturesPaths).then(() => {
77
81
  // console.log('loaded tex buffer for mesh:', this.texture0)
78
82
  resolve()
79
83
  })
@@ -82,10 +86,10 @@ export default class MEMeshObj {
82
86
 
83
87
  this.runProgram().then(() => {
84
88
  const aspect = canvas.width / canvas.height;
85
- const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
89
+ // const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
86
90
  this.context.configure({
87
91
  device: this.device,
88
- format: presentationFormat,
92
+ format: this.presentationFormat,
89
93
  alphaMode: 'premultiplied',
90
94
  });
91
95
 
@@ -189,7 +193,7 @@ export default class MEMeshObj {
189
193
  },
190
194
  ];
191
195
 
192
- const primitive = {
196
+ this.primitive = {
193
197
  topology: 'triangle-list',
194
198
  // cullMode: 'back', // ORI
195
199
  cullMode: 'none', // ORI
@@ -225,82 +229,45 @@ export default class MEMeshObj {
225
229
  depthCompare: 'less',
226
230
  format: 'depth32float',
227
231
  },
228
- primitive,
232
+ primitive: this.primitive,
229
233
  });
230
234
 
231
235
  // Create a bind group layout which holds the scene uniforms and
232
236
  // the texture+sampler for depth. We create it manually because the WebPU
233
237
  // implementation doesn't infer this from the shader (yet).
234
- this.bglForRender = this.device.createBindGroupLayout({
235
- entries: [
236
- {
237
- binding: 0,
238
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
239
- buffer: {
240
- type: 'uniform',
241
- },
242
- },
243
- {
244
- binding: 1,
245
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
246
- texture: {
247
- sampleType: 'depth',
248
- },
249
- },
250
- {
251
- binding: 2,
252
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
253
- sampler: {
254
- type: 'comparison',
255
- },
256
- },
257
- {
258
- binding: 3,
259
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
260
- texture: {
261
- sampleType: 'float',
262
- }
263
- },
264
- {
265
- binding: 4,
266
- visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
267
- sampler: {
268
- type: 'filtering',
269
- }
270
- }
271
- ]
272
- });
273
-
274
- this.pipeline = this.device.createRenderPipeline({
275
- layout: this.device.createPipelineLayout({
276
- bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
277
- }),
278
- vertex: {
279
- module: this.device.createShaderModule({
280
- code: vertexWGSL,
281
- }),
282
- buffers: this.vertexBuffers,
283
- },
284
- fragment: {
285
- module: this.device.createShaderModule({
286
- code: fragmentWGSL,
287
- }),
288
- targets: [
289
- {
290
- format: presentationFormat,
291
- },
292
- ],
293
- constants: {
294
- shadowDepthTextureSize: this.shadowDepthTextureSize,
295
- },
296
- },
297
- depthStencil: {
298
- depthWriteEnabled: true,
299
- depthCompare: 'less',
300
- format: 'depth24plus-stencil8',
301
- },
302
- primitive,
303
- });
238
+ this.createLayoutForRender()
239
+
240
+ this.setupPipeline();
241
+ // this.pipeline = this.device.createRenderPipeline({
242
+ // layout: this.device.createPipelineLayout({
243
+ // bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
244
+ // }),
245
+ // vertex: {
246
+ // module: this.device.createShaderModule({
247
+ // code: vertexWGSL,
248
+ // }),
249
+ // buffers: this.vertexBuffers,
250
+ // },
251
+ // fragment: {
252
+ // module: this.device.createShaderModule({
253
+ // code: fragmentWGSL,
254
+ // }),
255
+ // targets: [
256
+ // {
257
+ // format: presentationFormat,
258
+ // },
259
+ // ],
260
+ // constants: {
261
+ // shadowDepthTextureSize: this.shadowDepthTextureSize,
262
+ // },
263
+ // },
264
+ // depthStencil: {
265
+ // depthWriteEnabled: true,
266
+ // depthCompare: 'less',
267
+ // format: 'depth24plus-stencil8',
268
+ // },
269
+ // primitive,
270
+ // });
304
271
 
305
272
  const depthTexture = this.device.createTexture({
306
273
  size: [canvas.width, canvas.height],
@@ -355,35 +322,8 @@ export default class MEMeshObj {
355
322
  ],
356
323
  });
357
324
 
358
- this.sceneBindGroupForRender = this.device.createBindGroup({
359
- layout: this.bglForRender,
360
- entries: [
361
- {
362
- binding: 0,
363
- resource: {
364
- buffer: this.sceneUniformBuffer,
365
- },
366
- },
367
- {
368
- binding: 1,
369
- resource: this.shadowDepthTextureView,
370
- },
371
- {
372
- binding: 2,
373
- resource: this.device.createSampler({
374
- compare: 'less',
375
- }),
376
- },
377
- {
378
- binding: 3,
379
- resource: this.texture0.createView(),
380
- },
381
- {
382
- binding: 4,
383
- resource: this.sampler,
384
- },
385
- ],
386
- });
325
+ // --------------------------
326
+ this.createBindGroupForRender();
387
327
 
388
328
  this.modelBindGroup = this.device.createBindGroup({
389
329
  layout: this.uniformBufferBindGroupLayout,
@@ -429,13 +369,13 @@ export default class MEMeshObj {
429
369
  }
430
370
 
431
371
  this.getModelMatrix = (pos) => {
432
- let modelMatrix = mat4.identity();
372
+ let modelMatrix = mat4.identity();
433
373
  mat4.translate(modelMatrix, [pos.x, pos.y, pos.z], modelMatrix);
434
374
  if(this.itIsPhysicsBody) {
435
375
  mat4.rotate(modelMatrix,
436
376
  [this.rotation.axis.x, this.rotation.axis.y, this.rotation.axis.z],
437
- degToRad(this.rotation.angle),
438
- modelMatrix
377
+ degToRad(this.rotation.angle),
378
+ modelMatrix
439
379
  );
440
380
  } else {
441
381
  mat4.rotateX(modelMatrix, this.rotation.getRotX(), modelMatrix);
@@ -589,59 +529,54 @@ export default class MEMeshObj {
589
529
  ///////////////////////
590
530
  }
591
531
 
592
- async loadTex0(texturesPaths, device) {
593
-
594
- this.sampler = device.createSampler({
595
- magFilter: 'linear',
596
- minFilter: 'linear',
532
+ setupPipeline = () => {
533
+ this.pipeline = this.device.createRenderPipeline({
534
+ layout: this.device.createPipelineLayout({
535
+ bindGroupLayouts: [this.bglForRender, this.uniformBufferBindGroupLayout],
536
+ }),
537
+ vertex: {
538
+ entryPoint: 'main', // ✅ Add this
539
+ module: this.device.createShaderModule({
540
+ code: vertexWGSL,
541
+ }),
542
+ buffers: this.vertexBuffers,
543
+ },
544
+ fragment: {
545
+ entryPoint: 'main', // ✅ Add this
546
+ module: this.device.createShaderModule({
547
+ code: (this.isVideo == true ? fragmentVideoWGSL : fragmentWGSL),
548
+ }),
549
+ targets: [
550
+ {
551
+ format: this.presentationFormat,
552
+ },
553
+ ],
554
+ constants: {
555
+ shadowDepthTextureSize: this.shadowDepthTextureSize,
556
+ },
557
+ },
558
+ depthStencil: {
559
+ depthWriteEnabled: true,
560
+ depthCompare: 'less',
561
+ format: 'depth24plus-stencil8',
562
+ },
563
+ primitive: this.primitive,
597
564
  });
598
-
599
- return new Promise(async (resolve) => {
600
- const response = await fetch(texturesPaths[0]);
601
-
602
- // const blob = await response.blob();
603
- // if(!blob.type.startsWith('image/')) {
604
- // console.error("Unexpected texture response type:", blob.type);
605
- // return;
606
- // }
607
-
608
- // const imageBitmap = await createImageBitmap(blob);
609
- const imageBitmap = await createImageBitmap(await response.blob());
610
- this.texture0 = device.createTexture({
611
- size: [imageBitmap.width, imageBitmap.height, 1], // REMOVED 1
612
- format: 'rgba8unorm',
613
- usage:
614
- GPUTextureUsage.TEXTURE_BINDING |
615
- GPUTextureUsage.COPY_DST |
616
- GPUTextureUsage.RENDER_ATTACHMENT,
617
- });
618
-
619
- device.queue.copyExternalImageToTexture(
620
- {source: imageBitmap},
621
- {texture: this.texture0},
622
- [imageBitmap.width, imageBitmap.height]
623
- );
624
- resolve()
625
- })
626
565
  }
627
566
 
628
- draw = (commandEncoder) => {
567
+ draw = () => {
629
568
  if(this.done == false) return;
630
569
  const transformationMatrix = this.getTransformationMatrix(this.position);
631
-
632
- this.device.queue.writeBuffer(
633
- this.sceneUniformBuffer,
634
- 64,
635
- transformationMatrix.buffer,
636
- transformationMatrix.byteOffset,
637
- transformationMatrix.byteLength
638
- );
570
+ this.device.queue.writeBuffer(this.sceneUniformBuffer, 64, transformationMatrix.buffer, transformationMatrix.byteOffset, transformationMatrix.byteLength);
639
571
  this.renderPassDescriptor.colorAttachments[0].view = this.context
640
572
  .getCurrentTexture()
641
573
  .createView();
642
574
  }
643
575
 
644
576
  drawElements = (renderPass) => {
577
+ if(this.isVideo) {
578
+ this.updateVideoTexture();
579
+ }
645
580
  renderPass.setBindGroup(0, this.sceneBindGroupForRender);
646
581
  renderPass.setBindGroup(1, this.modelBindGroup);
647
582
  renderPass.setVertexBuffer(0, this.vertexBuffer);
@@ -651,7 +586,7 @@ export default class MEMeshObj {
651
586
  renderPass.drawIndexed(this.indexCount);
652
587
  }
653
588
 
654
- // test
589
+ // test
655
590
  createGPUBuffer(dataArray, usage) {
656
591
  if(!dataArray || typeof dataArray.length !== 'number') {
657
592
  throw new Error('Invalid data array passed to createGPUBuffer');
@@ -0,0 +1,51 @@
1
+ export let fragmentVideoWGSL = `override shadowDepthTextureSize: f32 = 1024.0;
2
+
3
+ struct Scene {
4
+ lightViewProjMatrix : mat4x4f,
5
+ cameraViewProjMatrix : mat4x4f,
6
+ lightPos : vec3f,
7
+ }
8
+
9
+ @group(0) @binding(0) var<uniform> scene : Scene;
10
+ @group(0) @binding(1) var shadowMap: texture_depth_2d;
11
+ @group(0) @binding(2) var shadowSampler: sampler_comparison;
12
+ @group(0) @binding(3) var meshTexture: texture_external;
13
+ @group(0) @binding(4) var meshSampler: sampler;
14
+
15
+ // ❌ No binding(4) here!
16
+
17
+ struct FragmentInput {
18
+ @location(0) shadowPos : vec3f,
19
+ @location(1) fragPos : vec3f,
20
+ @location(2) fragNorm : vec3f,
21
+ @location(3) uv : vec2f,
22
+ }
23
+
24
+ const albedo = vec3f(0.9);
25
+ const ambientFactor = 0.7;
26
+
27
+ @fragment
28
+ fn main(input : FragmentInput) -> @location(0) vec4f {
29
+ // Shadow filtering
30
+ var visibility = 0.0;
31
+ let oneOverShadowDepthTextureSize = 1.0 / shadowDepthTextureSize;
32
+ for (var y = -1; y <= 1; y++) {
33
+ for (var x = -1; x <= 1; x++) {
34
+ let offset = vec2f(vec2(x, y)) * oneOverShadowDepthTextureSize;
35
+ visibility += textureSampleCompare(
36
+ shadowMap, shadowSampler,
37
+ input.shadowPos.xy + offset, input.shadowPos.z - 0.007
38
+ );
39
+ }
40
+ }
41
+ visibility /= 9.0;
42
+
43
+ let lambertFactor = max(dot(normalize(scene.lightPos - input.fragPos), normalize(input.fragNorm)), 0.0);
44
+ let lightingFactor = min(ambientFactor + visibility * lambertFactor, 1.0);
45
+
46
+ // ✅ Correct way to sample video texture
47
+ let textureColor = textureSampleBaseClampToEdge(meshTexture, meshSampler, input.uv);
48
+
49
+ return vec4(textureColor.rgb * lightingFactor * albedo, 1.0);
50
+ }
51
+ `;
@@ -16,7 +16,7 @@ struct VertexOutput {
16
16
  @location(1) fragPos: vec3f,
17
17
  @location(2) fragNorm: vec3f,
18
18
  @location(3) uv : vec2f,
19
-
19
+
20
20
  @builtin(position) Position: vec4f,
21
21
  }
22
22
 
@@ -46,4 +46,4 @@ fn main(
46
46
 
47
47
  return output;
48
48
  }
49
- `
49
+ `;
package/src/world.js CHANGED
@@ -84,7 +84,6 @@ export default class MatrixEngineWGPU {
84
84
  WASD: new WASDCamera({position: initialCameraPosition}),
85
85
  };
86
86
 
87
- //
88
87
  this.label = new MultiLang()
89
88
  if(urlQuery.lang != null) {
90
89
  this.label.loadMultilang(urlQuery.lang).then((r) => {
@@ -135,6 +134,10 @@ export default class MatrixEngineWGPU {
135
134
  this.run(callback)
136
135
  };
137
136
 
137
+ getSceneObjectByName(name) {
138
+ return this.mainRenderBundle.find((sceneObject) => sceneObject.name === name )
139
+ }
140
+
138
141
  // Not in use for now
139
142
  addCube = (o) => {
140
143
  if(typeof o === 'undefined') {
@@ -322,7 +325,7 @@ export default class MatrixEngineWGPU {
322
325
  }
323
326
 
324
327
  destroyProgram = () => {
325
- this.mainRenderBundle = undefined;
328
+ this.mainRenderBundle = [];
326
329
  this.canvas.remove();
327
330
  }
328
331
 
@@ -337,7 +340,7 @@ export default class MatrixEngineWGPU {
337
340
  meItem.position.update();
338
341
  })
339
342
 
340
- this.matrixAmmo.updatePhysics()
343
+ if (this.matrixAmmo) this.matrixAmmo.updatePhysics();
341
344
 
342
345
  this.mainRenderBundle.forEach((meItem, index) => {
343
346
  meItem.draw(commandEncoder);