@kitware/vtk.js 24.5.2 → 24.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/Common/DataModel/DataSetAttributes/FieldData.d.ts +3 -1
  2. package/Rendering/Core/RenderWindowInteractor.d.ts +123 -109
  3. package/Rendering/Core/ScalarBarActor.js +2 -2
  4. package/Rendering/OpenGL/OrderIndependentTranslucentPass.js +5 -1
  5. package/Rendering/OpenGL/RenderWindow.d.ts +1 -1
  6. package/Rendering/OpenGL/VolumeMapper.js +1 -1
  7. package/Rendering/SceneGraph/ViewNode.js +28 -2
  8. package/Rendering/WebGPU/BufferManager.js +83 -14
  9. package/Rendering/WebGPU/CellArrayMapper.js +591 -0
  10. package/Rendering/WebGPU/Device.js +97 -57
  11. package/Rendering/WebGPU/FullScreenQuad.js +4 -6
  12. package/Rendering/WebGPU/Glyph3DMapper.js +62 -27
  13. package/Rendering/WebGPU/ImageMapper.js +23 -64
  14. package/Rendering/WebGPU/OrderIndependentTranslucentPass.js +4 -6
  15. package/Rendering/WebGPU/Pipeline.js +12 -0
  16. package/Rendering/WebGPU/PolyDataMapper.js +49 -623
  17. package/Rendering/WebGPU/RenderEncoder.js +34 -0
  18. package/Rendering/WebGPU/Renderer.js +4 -62
  19. package/Rendering/WebGPU/ShaderDescription.js +6 -6
  20. package/Rendering/WebGPU/{MapperHelper.js → SimpleMapper.js} +64 -38
  21. package/Rendering/WebGPU/SphereMapper.js +66 -64
  22. package/Rendering/WebGPU/StickMapper.js +73 -72
  23. package/Rendering/WebGPU/StorageBuffer.js +2 -3
  24. package/Rendering/WebGPU/Texture.js +0 -2
  25. package/Rendering/WebGPU/TextureManager.js +37 -7
  26. package/Rendering/WebGPU/UniformBuffer.js +1 -2
  27. package/Rendering/WebGPU/Volume.js +1 -14
  28. package/Rendering/WebGPU/VolumePass.js +16 -22
  29. package/Rendering/WebGPU/VolumePassFSQ.js +19 -29
  30. package/package.json +1 -1
@@ -1,11 +1,10 @@
1
1
  import { newInstance as newInstance$1, vtkErrorMacro as vtkErrorMacro$1 } from '../../macros.js';
2
- import vtkWebGPUPolyDataMapper from './PolyDataMapper.js';
2
+ import vtkWebGPUCellArrayMapper from './CellArrayMapper.js';
3
3
  import vtkWebGPUBufferManager from './BufferManager.js';
4
4
  import vtkWebGPUShaderCache from './ShaderCache.js';
5
5
  import { registerOverride } from './ViewNodeFactory.js';
6
6
 
7
- var BufferUsage = vtkWebGPUBufferManager.BufferUsage,
8
- PrimitiveTypes = vtkWebGPUBufferManager.PrimitiveTypes;
7
+ var BufferUsage = vtkWebGPUBufferManager.BufferUsage;
9
8
  var vtkErrorMacro = vtkErrorMacro$1; // Vertices
10
9
  // 013 - 032 - 324 - 453
11
10
  //
@@ -34,6 +33,21 @@ var vtkWebGPUStickMapperVS = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//
34
33
  function vtkWebGPUStickMapper(publicAPI, model) {
35
34
  // Set our className
36
35
  model.classHierarchy.push('vtkWebGPUStickMapper');
36
+ var cellMapperBuildPass = publicAPI.buildPass;
37
+
38
+ publicAPI.buildPass = function (prepass) {
39
+ if (prepass) {
40
+ if (!model.renderable.getStatic()) {
41
+ model.renderable.update();
42
+ }
43
+
44
+ var poly = model.renderable.getInputData();
45
+ publicAPI.setCellArray(poly.getVerts());
46
+ publicAPI.setCurrentInput(poly);
47
+ }
48
+
49
+ cellMapperBuildPass(prepass);
50
+ };
37
51
 
38
52
  publicAPI.replaceShaderNormal = function (hash, pipeline, vertexInput) {
39
53
  var vDesc = pipeline.getShaderDescription('vertex');
@@ -45,7 +59,7 @@ function vtkWebGPUStickMapper(publicAPI, model) {
45
59
  vDesc.addBuiltinInput('u32', '@builtin(vertex_index) vertexIndex');
46
60
  var fDesc = pipeline.getShaderDescription('fragment');
47
61
  fDesc.addBuiltinOutput('f32', '@builtin(frag_depth) fragDepth');
48
- var stickFrag = "\n // compute the eye position and unit direction\n var vertexVC: vec4<f32>;\n var EyePos: vec3<f32>;\n var EyeDir: vec3<f32>;\n\n if (rendererUBO.cameraParallel != 0u)\n {\n EyePos = vec3<f32>(input.vertexVC.x, input.vertexVC.y, input.vertexVC.z + 3.0*input.radiusVC);\n EyeDir = vec3<f32>(0.0, 0.0, -1.0);\n }\n else\n {\n EyeDir = input.vertexVC.xyz;\n EyePos = vec3<f32>(0.0,0.0,0.0);\n var lengthED: f32 = length(EyeDir);\n EyeDir = normalize(EyeDir);\n // we adjust the EyePos to be closer if it is too far away\n // to prevent floating point precision noise\n if (lengthED > input.radiusVC*3.0)\n {\n EyePos = input.vertexVC.xyz - EyeDir*3.0*input.radiusVC;\n }\n }\n // translate to Sphere center\n EyePos = EyePos - input.centerVC;\n\n // rotate to new basis\n // base1, base2, orientVC\n var base1: vec3<f32>;\n if (abs(input.orientVC.z) < 0.99)\n {\n base1 = normalize(cross(input.orientVC,vec3<f32>(0.0,0.0,1.0)));\n }\n else\n {\n base1 = normalize(cross(input.orientVC,vec3<f32>(0.0,1.0,0.0)));\n }\n var base2: vec3<f32> = cross(input.orientVC,base1);\n EyePos = vec3<f32>(dot(EyePos,base1),dot(EyePos,base2),dot(EyePos,input.orientVC));\n EyeDir = vec3<f32>(dot(EyeDir,base1),dot(EyeDir,base2),dot(EyeDir,input.orientVC));\n\n // scale to radius 1.0\n EyePos = EyePos * (1.0 / input.radiusVC);\n\n // find the intersection\n var a: f32 = EyeDir.x*EyeDir.x + EyeDir.y*EyeDir.y;\n var b: f32 = 2.0*(EyePos.x*EyeDir.x + EyePos.y*EyeDir.y);\n var c: f32 = EyePos.x*EyePos.x + EyePos.y*EyePos.y - 1.0;\n var d: f32 = b*b - 4.0*a*c;\n var normal: vec3<f32> = vec3<f32>(0.0,0.0,1.0);\n if (d < 0.0) { discard; }\n else\n {\n var t: f32 = (-b - sqrt(d))*(0.5 / a);\n var tz: f32 = EyePos.z + t*EyeDir.z;\n var iPoint: vec3<f32> = EyePos + t*EyeDir;\n if (abs(iPoint.z)*input.radiusVC > input.lengthVC*0.5)\n {\n // test for end cap\n var t2: f32 = (-b + sqrt(d))*(0.5 / a);\n var tz2: f32 = EyePos.z + t2*EyeDir.z;\n if (tz2*input.radiusVC > input.lengthVC*0.5 || tz*input.radiusVC < -0.5*input.lengthVC) { discard; }\n else\n {\n normal = input.orientVC;\n var t3: f32 = (input.lengthVC*0.5/input.radiusVC - EyePos.z)/EyeDir.z;\n iPoint = EyePos + t3*EyeDir;\n vertexVC = vec4<f32>(input.radiusVC*(iPoint.x*base1 + iPoint.y*base2 + iPoint.z*input.orientVC) + input.centerVC, 1.0);\n }\n }\n else\n {\n // The normal is the iPoint.xy rotated back into VC\n normal = iPoint.x*base1 + iPoint.y*base2;\n // rescale rerotate and translate\n vertexVC = vec4<f32>(input.radiusVC*(normal + iPoint.z*input.orientVC) + input.centerVC, 1.0);\n }\n // compute the pixel's depth\n var pos: vec4<f32> = rendererUBO.VCPCMatrix * vertexVC;\n output.fragDepth = pos.z / pos.w;\n }\n ";
62
+ var stickFrag = "\n // compute the eye position and unit direction\n var vertexVC: vec4<f32>;\n var EyePos: vec3<f32>;\n var EyeDir: vec3<f32>;\n\n if (rendererUBO.cameraParallel != 0u)\n {\n EyePos = vec3<f32>(input.vertexVC.x, input.vertexVC.y, input.vertexVC.z + 3.0*input.radiusVC);\n EyeDir = vec3<f32>(0.0, 0.0, -1.0);\n }\n else\n {\n EyeDir = input.vertexVC.xyz;\n EyePos = vec3<f32>(0.0,0.0,0.0);\n var lengthED: f32 = length(EyeDir);\n EyeDir = normalize(EyeDir);\n // we adjust the EyePos to be closer if it is too far away\n // to prevent floating point precision noise\n if (lengthED > input.radiusVC*3.0)\n {\n EyePos = input.vertexVC.xyz - EyeDir*3.0*input.radiusVC;\n }\n }\n // translate to Sphere center\n EyePos = EyePos - input.centerVC;\n\n // rotate to new basis\n // base1, base2, orientVC\n var base1: vec3<f32>;\n if (abs(input.orientVC.z) < 0.99)\n {\n base1 = normalize(cross(input.orientVC,vec3<f32>(0.0,0.0,1.0)));\n }\n else\n {\n base1 = normalize(cross(input.orientVC,vec3<f32>(0.0,1.0,0.0)));\n }\n var base2: vec3<f32> = cross(input.orientVC,base1);\n EyePos = vec3<f32>(dot(EyePos,base1),dot(EyePos,base2),dot(EyePos,input.orientVC));\n EyeDir = vec3<f32>(dot(EyeDir,base1),dot(EyeDir,base2),dot(EyeDir,input.orientVC));\n\n // scale to radius 1.0\n EyePos = EyePos * (1.0 / input.radiusVC);\n\n // find the intersection\n var a: f32 = EyeDir.x*EyeDir.x + EyeDir.y*EyeDir.y;\n var b: f32 = 2.0*(EyePos.x*EyeDir.x + EyePos.y*EyeDir.y);\n var c: f32 = EyePos.x*EyePos.x + EyePos.y*EyePos.y - 1.0;\n var d: f32 = b*b - 4.0*a*c;\n var normal: vec3<f32> = vec3<f32>(0.0,0.0,1.0);\n if (d < 0.0) { discard; }\n else\n {\n var t: f32 = (-b - sqrt(d))*(0.5 / a);\n var tz: f32 = EyePos.z + t*EyeDir.z;\n var iPoint: vec3<f32> = EyePos + t*EyeDir;\n if (abs(iPoint.z)*input.radiusVC > input.lengthVC*0.5)\n {\n // test for end cap\n var t2: f32 = (-b + sqrt(d))*(0.5 / a);\n var tz2: f32 = EyePos.z + t2*EyeDir.z;\n if (tz2*input.radiusVC > input.lengthVC*0.5 || tz*input.radiusVC < -0.5*input.lengthVC) { discard; }\n else\n {\n normal = input.orientVC;\n var t3: f32 = (input.lengthVC*0.5/input.radiusVC - EyePos.z)/EyeDir.z;\n iPoint = EyePos + t3*EyeDir;\n vertexVC = vec4<f32>(input.radiusVC*(iPoint.x*base1 + iPoint.y*base2 + iPoint.z*input.orientVC) + input.centerVC, 1.0);\n }\n }\n else\n {\n // The normal is the iPoint.xy rotated back into VC\n normal = iPoint.x*base1 + iPoint.y*base2;\n // rescale rerotate and translate\n vertexVC = vec4<f32>(input.radiusVC*(normal + iPoint.z*input.orientVC) + input.centerVC, 1.0);\n }\n }\n // compute the pixel's depth\n var pos: vec4<f32> = rendererUBO.VCPCMatrix * vertexVC;\n output.fragDepth = pos.z / pos.w;\n ";
49
63
  var code = fDesc.getCode();
50
64
  code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [stickFrag]).result;
51
65
  fDesc.setCode(code);
@@ -62,42 +76,36 @@ function vtkWebGPUStickMapper(publicAPI, model) {
62
76
  // or vertex input changes/ bind groups/ etc
63
77
 
64
78
 
65
- publicAPI.computePipelineHash = function (vertexInput) {
66
- var pipelineHash = 'stm';
79
+ publicAPI.computePipelineHash = function () {
80
+ model.pipelineHash = 'stm';
67
81
 
68
- if (vertexInput.hasAttribute("colorVI")) {
69
- pipelineHash += "c";
82
+ if (model.vertexInput.hasAttribute("colorVI")) {
83
+ model.pipelineHash += "c";
70
84
  }
71
85
 
72
- pipelineHash += model.renderEncoder.getPipelineHash();
73
- return pipelineHash;
74
- }; // was originally buildIBOs() but not using IBOs right now
75
-
86
+ model.pipelineHash += model.renderEncoder.getPipelineHash();
87
+ };
76
88
 
77
- publicAPI.buildPrimitives = function () {
89
+ publicAPI.updateBuffers = function () {
78
90
  var poly = model.currentInput;
79
- var device = model.WebGPURenderWindow.getDevice();
80
- model.renderable.mapScalars(poly, 1.0); // handle triangles
81
-
82
- var i = PrimitiveTypes.Triangles;
91
+ model.renderable.mapScalars(poly, 1.0);
92
+ var device = model.device;
83
93
  var points = poly.getPoints();
84
94
  var pointData = poly.getPointData();
85
95
  var numPoints = points.getNumberOfPoints();
86
96
  var pointArray = points.getData();
87
- var primHelper = model.primitives[i];
88
- primHelper.setNumberOfInstances(numPoints);
89
- primHelper.setNumberOfVertices(12);
90
- var vertexInput = model.primitives[i].getVertexInput();
91
- var buffRequest = {
92
- owner: points,
93
- hash: 'stm',
94
- time: points.getMTime(),
95
- usage: BufferUsage.RawVertex,
96
- format: 'float32x3'
97
- };
98
-
99
- if (!device.getBufferManager().hasBuffer(buffRequest)) {
100
- // xyz v1 v2 v3
97
+ publicAPI.setNumberOfInstances(numPoints);
98
+ publicAPI.setNumberOfVertices(12);
99
+ var vertexInput = model.vertexInput;
100
+ var hash = "stm".concat(points.getMTime(), "float32x3");
101
+
102
+ if (!device.getBufferManager().hasBuffer(hash)) {
103
+ var buffRequest = {
104
+ hash: hash,
105
+ usage: BufferUsage.RawVertex,
106
+ format: 'float32x3'
107
+ }; // xyz v1 v2 v3
108
+
101
109
  var tmpVBO = new Float32Array(numPoints * 3);
102
110
  var pointIdx = 0;
103
111
  var vboIdx = 0;
@@ -124,15 +132,15 @@ function vtkWebGPUStickMapper(publicAPI, model) {
124
132
  var defaultRadius = model.renderable.getRadius();
125
133
 
126
134
  if (scales || defaultRadius !== model._lastRadius) {
127
- buffRequest = {
128
- owner: scales,
129
- hash: 'stm',
130
- time: scales ? pointData.getArray(model.renderable.getScaleArray()).getMTime() : 0,
131
- usage: BufferUsage.RawVertex,
132
- format: 'float32'
133
- };
135
+ hash = "stm".concat(scales ? pointData.getArray(model.renderable.getScaleArray()).getMTime() : defaultRadius, "float32");
136
+
137
+ if (!device.getBufferManager().hasBuffer(hash)) {
138
+ var _buffRequest = {
139
+ hash: hash,
140
+ usage: BufferUsage.RawVertex,
141
+ format: 'float32'
142
+ };
134
143
 
135
- if (!device.getBufferManager().hasBuffer(buffRequest)) {
136
144
  var _tmpVBO = new Float32Array(numPoints);
137
145
 
138
146
  var _vboIdx = 0;
@@ -147,9 +155,9 @@ function vtkWebGPUStickMapper(publicAPI, model) {
147
155
  _tmpVBO[_vboIdx++] = radius;
148
156
  }
149
157
 
150
- buffRequest.nativeArray = _tmpVBO;
158
+ _buffRequest.nativeArray = _tmpVBO;
151
159
 
152
- var _buff = device.getBufferManager().getBuffer(buffRequest);
160
+ var _buff = device.getBufferManager().getBuffer(_buffRequest);
153
161
 
154
162
  vertexInput.addBuffer(_buff, ['radiusMC'], 'instance');
155
163
  }
@@ -165,16 +173,15 @@ function vtkWebGPUStickMapper(publicAPI, model) {
165
173
  vtkErrorMacro(['Error setting orientationArray.\n', 'You have to specify the stick orientation']);
166
174
  }
167
175
 
168
- buffRequest = {
169
- owner: orientationArray,
170
- hash: 'stm',
171
- time: pointData.getArray(model.renderable.getOrientationArray()).getMTime(),
172
- usage: BufferUsage.RawVertex,
173
- format: 'float32x3'
174
- };
176
+ hash = "stm".concat(pointData.getArray(model.renderable.getOrientationArray()).getMTime(), "float32x3");
177
+
178
+ if (!device.getBufferManager().hasBuffer(hash)) {
179
+ var _buffRequest2 = {
180
+ hash: hash,
181
+ usage: BufferUsage.RawVertex,
182
+ format: 'float32x3'
183
+ }; // xyz v1 v2 v3
175
184
 
176
- if (!device.getBufferManager().hasBuffer(buffRequest)) {
177
- // xyz v1 v2 v3
178
185
  var _tmpVBO2 = new Float32Array(numPoints * 3);
179
186
 
180
187
  var _pointIdx = 0;
@@ -193,14 +200,13 @@ function vtkWebGPUStickMapper(publicAPI, model) {
193
200
  _tmpVBO2[_vboIdx2++] = orientationArray[_pointIdx + 2] * length;
194
201
  }
195
202
 
196
- buffRequest.nativeArray = _tmpVBO2;
203
+ _buffRequest2.nativeArray = _tmpVBO2;
197
204
 
198
- var _buff2 = device.getBufferManager().getBuffer(buffRequest);
205
+ var _buff2 = device.getBufferManager().getBuffer(_buffRequest2);
199
206
 
200
207
  vertexInput.addBuffer(_buff2, ['orientMC'], 'instance');
201
- }
208
+ } // deal with colors but only if modified
202
209
 
203
- model.renderable.mapScalars(poly, 1.0); // deal with colors but only if modified
204
210
 
205
211
  var haveColors = false;
206
212
 
@@ -208,15 +214,13 @@ function vtkWebGPUStickMapper(publicAPI, model) {
208
214
  var c = model.renderable.getColorMapColors();
209
215
 
210
216
  if (c) {
211
- buffRequest = {
212
- owner: c,
213
- hash: 'stm',
214
- time: c.getMTime(),
215
- usage: BufferUsage.RawVertex,
216
- format: 'unorm8x4'
217
- };
217
+ hash = "stm".concat(c.getMTime(), "unorm8x4");
218
218
 
219
- if (!device.getBufferManager().hasBuffer(buffRequest)) {
219
+ if (!device.getBufferManager().hasBuffer(hash)) {
220
+ var _buffRequest3 = {
221
+ usage: BufferUsage.RawVertex,
222
+ format: 'unorm8x4'
223
+ };
220
224
  var colorComponents = c.getNumberOfComponents();
221
225
 
222
226
  if (colorComponents !== 4) {
@@ -236,9 +240,9 @@ function vtkWebGPUStickMapper(publicAPI, model) {
236
240
  _tmpVBO3[_vboIdx3++] = colorData[colorIdx + 3];
237
241
  }
238
242
 
239
- buffRequest.nativeArray = _tmpVBO3;
243
+ _buffRequest3.nativeArray = _tmpVBO3;
240
244
 
241
- var _buff3 = device.getBufferManager().getBuffer(buffRequest);
245
+ var _buff3 = device.getBufferManager().getBuffer(_buffRequest3);
242
246
 
243
247
  vertexInput.addBuffer(_buff3, ['colorVI'], 'instance');
244
248
  }
@@ -251,11 +255,8 @@ function vtkWebGPUStickMapper(publicAPI, model) {
251
255
  vertexInput.removeBufferIfPresent('colorVI');
252
256
  }
253
257
 
254
- primHelper.setPipelineHash(publicAPI.computePipelineHash(vertexInput));
255
- primHelper.setWebGPURenderer(model.WebGPURenderer);
256
- primHelper.setTopology('triangle-list');
257
- primHelper.build(model.renderEncoder, device);
258
- primHelper.registerToDraw();
258
+ publicAPI.setTopology('triangle-list');
259
+ publicAPI.updateUBO();
259
260
  };
260
261
  } // ----------------------------------------------------------------------------
261
262
  // Object factory
@@ -268,11 +269,11 @@ function extend(publicAPI, model) {
268
269
  var initialValues = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
269
270
  Object.assign(model, DEFAULT_VALUES, initialValues); // Inheritance
270
271
 
271
- vtkWebGPUPolyDataMapper.extend(publicAPI, model, initialValues);
272
- model.primitives[PrimitiveTypes.Triangles].setVertexShaderTemplate(vtkWebGPUStickMapperVS); // Object methods
272
+ vtkWebGPUCellArrayMapper.extend(publicAPI, model, initialValues);
273
+ publicAPI.setVertexShaderTemplate(vtkWebGPUStickMapperVS); // Object methods
273
274
 
274
275
  vtkWebGPUStickMapper(publicAPI, model);
275
- var sr = model.primitives[PrimitiveTypes.Triangles].getShaderReplacements();
276
+ var sr = model.shaderReplacements;
276
277
  sr.set('replaceShaderPosition', publicAPI.replaceShaderPosition);
277
278
  sr.set('replaceShaderNormal', publicAPI.replaceShaderNormal);
278
279
  } // ----------------------------------------------------------------------------
@@ -40,7 +40,6 @@ function vtkWebGPUStorageBuffer(publicAPI, model) {
40
40
  if (!model._buffer) {
41
41
  var req = {
42
42
  nativeArray: model.Float32Array,
43
- time: 0,
44
43
  usage: BufferUsage.Storage,
45
44
  label: model.label
46
45
  };
@@ -176,10 +175,10 @@ function vtkWebGPUStorageBuffer(publicAPI, model) {
176
175
 
177
176
  for (var i = 0; i < model.bufferEntries.length; i++) {
178
177
  var entry = model.bufferEntries[i];
179
- lines.push(" ".concat(entry.name, ": ").concat(entry.type, ";"));
178
+ lines.push(" ".concat(entry.name, ": ").concat(entry.type, ","));
180
179
  }
181
180
 
182
- lines.push("\n};\nstruct ".concat(model.label, "Struct\n{\n values: array<").concat(model.label, "StructEntry>;\n};\n@binding(").concat(binding, ") @group(").concat(group, ") var<storage, read> ").concat(model.label, ": ").concat(model.label, "Struct;\n"));
181
+ lines.push("\n};\nstruct ".concat(model.label, "Struct\n{\n values: array<").concat(model.label, "StructEntry>,\n};\n@binding(").concat(binding, ") @group(").concat(group, ") var<storage, read> ").concat(model.label, ": ").concat(model.label, "Struct;\n"));
183
182
  return lines.join('\n');
184
183
  };
185
184
 
@@ -95,7 +95,6 @@ function vtkWebGPUTexture(publicAPI, model) {
95
95
 
96
96
  if (req.dataArray) {
97
97
  buffRequest.dataArray = req.dataArray;
98
- buffRequest.time = req.dataArray.getMTime();
99
98
  }
100
99
 
101
100
  buffRequest.nativeArray = req.nativeArray; // bytesPerRow must be a multiple of 256 so we might need to rebuild
@@ -144,7 +143,6 @@ function vtkWebGPUTexture(publicAPI, model) {
144
143
 
145
144
  var _buffRequest = {
146
145
  nativeArray: imageData.data,
147
- time: 0,
148
146
 
149
147
  /* eslint-disable no-undef */
150
148
  usage: BufferUsage.Texture,
@@ -119,17 +119,47 @@ function vtkWebGPUTextureManager(publicAPI, model) {
119
119
 
120
120
  publicAPI.getTexture = function (req) {
121
121
  // if we have a source the get/create/cache the texture
122
- if (req.owner) {
123
- // fill out the req time and format based on imageData/image
124
- _fillRequest(req); // if a matching texture already exists then return it
125
-
126
-
127
- var hash = req.time + req.format;
128
- return model.device.getCachedObject(req.owner, hash, _createTexture, req);
122
+ if (req.hash) {
123
+ // if a matching texture already exists then return it
124
+ return model.device.getCachedObject(req.hash, _createTexture, req);
129
125
  }
130
126
 
131
127
  return _createTexture(req);
132
128
  };
129
+
130
+ publicAPI.getTextureForImageData = function (imgData) {
131
+ var treq = {
132
+ time: imgData.getMTime()
133
+ };
134
+ treq.imageData = imgData; // fill out the req time and format based on imageData/image
135
+
136
+ _fillRequest(treq);
137
+
138
+ treq.hash = treq.time + treq.format;
139
+ return model.device.getTextureManager().getTexture(treq);
140
+ };
141
+
142
+ publicAPI.getTextureForVTKTexture = function (srcTexture) {
143
+ var treq = {
144
+ time: srcTexture.getMTime()
145
+ };
146
+
147
+ if (srcTexture.getInputData()) {
148
+ treq.imageData = srcTexture.getInputData();
149
+ } else if (srcTexture.getImage()) {
150
+ treq.image = srcTexture.getImage();
151
+ } else if (srcTexture.getJsImageData()) {
152
+ treq.jsImageData = srcTexture.getJsImageData();
153
+ } else if (srcTexture.getCanvas()) {
154
+ treq.canvas = srcTexture.getCanvas();
155
+ } // fill out the req time and format based on imageData/image
156
+
157
+
158
+ _fillRequest(treq);
159
+
160
+ treq.hash = treq.time + treq.format;
161
+ return model.device.getTextureManager().getTexture(treq);
162
+ };
133
163
  } // ----------------------------------------------------------------------------
134
164
  // Object factory
135
165
  // ----------------------------------------------------------------------------
@@ -206,7 +206,6 @@ function vtkWebGPUUniformBuffer(publicAPI, model) {
206
206
  if (!model.UBO) {
207
207
  var req = {
208
208
  nativeArray: model.Float32Array,
209
- time: 0,
210
209
  usage: BufferUsage.UniformArray,
211
210
  label: model.label
212
211
  };
@@ -306,7 +305,7 @@ function vtkWebGPUUniformBuffer(publicAPI, model) {
306
305
 
307
306
  for (var i = 0; i < model.bufferEntries.length; i++) {
308
307
  var entry = model.bufferEntries[i];
309
- lines.push(" ".concat(entry.name, ": ").concat(entry.type, ";"));
308
+ lines.push(" ".concat(entry.name, ": ").concat(entry.type, ","));
310
309
  }
311
310
 
312
311
  lines.push("};\n@binding(".concat(binding, ") @group(").concat(group, ") var<uniform> ").concat(model.label, ": ").concat(model.label, "Struct;"));
@@ -23,10 +23,7 @@ function vtkWebGPUVolume(publicAPI, model) {
23
23
  model.propID = model.WebGPURenderWindow.getUniquePropID();
24
24
  }
25
25
 
26
- publicAPI.prepareNodes();
27
- model.renderable.getMapper().update(); // publicAPI.addMissingNode(model.renderable.getMapper());
28
-
29
- publicAPI.removeUnusedNodes();
26
+ model.renderable.getMapper().update();
30
27
  }
31
28
  };
32
29
 
@@ -81,16 +78,6 @@ function vtkWebGPUVolume(publicAPI, model) {
81
78
  }
82
79
  };
83
80
 
84
- publicAPI.traverseVolumePass = function (renderPass) {
85
- if (!model.renderable || !model.renderable.getNestedVisibility() || model.WebGPURenderer.getSelector() && !model.renderable.getNestedPickable()) {
86
- return;
87
- }
88
-
89
- publicAPI.apply(renderPass, true);
90
- model.children[0].traverse(renderPass);
91
- publicAPI.apply(renderPass, false);
92
- };
93
-
94
81
  publicAPI.getKeyMatrices = function (wgpuRen) {
95
82
  // has the actor or stabilization center changed?
96
83
  if (Math.max(model.renderable.getMTime(), wgpuRen.getStabilizedTime()) > model.keyMatricesTime.getMTime()) {
@@ -4,7 +4,7 @@ import vtkPolyData from '../../Common/DataModel/PolyData.js';
4
4
  import vtkProperty from '../Core/Property.js';
5
5
  import vtkRenderPass from '../SceneGraph/RenderPass.js';
6
6
  import vtkWebGPUBufferManager from './BufferManager.js';
7
- import vtkWebGPUMapperHelper from './MapperHelper.js';
7
+ import vtkWebGPUSimpleMapper from './SimpleMapper.js';
8
8
  import vtkWebGPURenderEncoder from './RenderEncoder.js';
9
9
  import vtkWebGPUShaderCache from './ShaderCache.js';
10
10
  import vtkWebGPUTexture from './Texture.js';
@@ -150,16 +150,6 @@ function vtkWebGPUVolumePass(publicAPI, model) {
150
150
  // final composite
151
151
 
152
152
 
153
- model._copyEncoder.setColorTextureView(0, model.colorTextureView);
154
-
155
- model._copyEncoder.attachTextureViews();
156
-
157
- renNode.setRenderEncoder(model._copyEncoder);
158
-
159
- model._copyEncoder.begin(viewNode.getCommandEncoder());
160
-
161
- renNode.scissorAndViewport(model._copyEncoder);
162
-
163
153
  model._volumeCopyQuad.setWebGPURenderer(renNode);
164
154
 
165
155
  if (model._useSmallViewport) {
@@ -174,7 +164,15 @@ function vtkWebGPUVolumePass(publicAPI, model) {
174
164
 
175
165
  model._copyUBO.sendIfNeeded(device);
176
166
 
177
- model._volumeCopyQuad.render(model._copyEncoder, viewNode.getDevice());
167
+ model._copyEncoder.setColorTextureView(0, model.colorTextureView);
168
+
169
+ model._copyEncoder.attachTextureViews();
170
+
171
+ model._copyEncoder.begin(viewNode.getCommandEncoder());
172
+
173
+ renNode.scissorAndViewport(model._copyEncoder);
174
+
175
+ model._volumeCopyQuad.prepareAndDraw(model._copyEncoder);
178
176
 
179
177
  model._copyEncoder.end();
180
178
  }; // unsubscribe from our listeners
@@ -235,7 +233,6 @@ function vtkWebGPUVolumePass(publicAPI, model) {
235
233
  publicAPI.rayCastPass = function (viewNode, renNode, volumes) {
236
234
  var encoder = model._firstGroup ? model._clearEncoder : model._mergeEncoder;
237
235
  encoder.attachTextureViews();
238
- renNode.setRenderEncoder(encoder);
239
236
  encoder.begin(viewNode.getCommandEncoder());
240
237
 
241
238
  var width = model._colorTextureView.getTexture().getWidth();
@@ -252,7 +249,7 @@ function vtkWebGPUVolumePass(publicAPI, model) {
252
249
  encoder.getHandle().setScissorRect(0, 0, width, height);
253
250
  model.fullScreenQuad.setWebGPURenderer(renNode);
254
251
  model.fullScreenQuad.setVolumes(volumes);
255
- model.fullScreenQuad.render(encoder, viewNode.getDevice());
252
+ model.fullScreenQuad.prepareAndDraw(encoder);
256
253
  encoder.end();
257
254
  };
258
255
 
@@ -263,11 +260,9 @@ function vtkWebGPUVolumePass(publicAPI, model) {
263
260
 
264
261
  var points = pd.getPoints();
265
262
  var buffRequest = {
266
- owner: points,
267
263
  usage: BufferUsage.PointArray,
268
264
  format: 'float32x4',
269
- time: Math.max(points.getMTime(), cells.getMTime()),
270
- hash: 'vp',
265
+ hash: "vp".concat(points.getMTime()).concat(cells.getMTime()),
271
266
  dataArray: points,
272
267
  cells: cells,
273
268
  primitiveType: PrimitiveTypes.Triangles,
@@ -348,8 +343,7 @@ function vtkWebGPUVolumePass(publicAPI, model) {
348
343
  };
349
344
 
350
345
  publicAPI.drawDepthRange = function (renNode, viewNode) {
351
- var device = viewNode.getDevice(); // copy current depth buffer to
352
-
346
+ // copy current depth buffer to
353
347
  model._depthRangeTexture.resizeToMatch(model.colorTextureView.getTexture());
354
348
 
355
349
  model._depthRangeTexture2.resizeToMatch(model.colorTextureView.getTexture());
@@ -362,9 +356,9 @@ function vtkWebGPUVolumePass(publicAPI, model) {
362
356
 
363
357
  model._mapper.setWebGPURenderer(renNode);
364
358
 
365
- model._mapper.build(model._depthRangeEncoder, device);
359
+ model._mapper.prepareToDraw(model._depthRangeEncoder);
366
360
 
367
- model._mapper.registerToDraw();
361
+ model._mapper.registerDrawCallback(model._depthRangeEncoder);
368
362
 
369
363
  renNode.volumeDepthRangePass(false);
370
364
  };
@@ -652,7 +646,7 @@ function extend(publicAPI, model) {
652
646
 
653
647
  vtkRenderPass.extend(publicAPI, model, initialValues);
654
648
  model._lastScale = 2.0;
655
- model._mapper = vtkWebGPUMapperHelper.newInstance();
649
+ model._mapper = vtkWebGPUSimpleMapper.newInstance();
656
650
 
657
651
  model._mapper.setFragmentShaderTemplate(DepthBoundsFS);
658
652
 
@@ -423,8 +423,11 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
423
423
  model.componentSSBO.send(device);
424
424
  };
425
425
 
426
- publicAPI.updateBuffers = function (device) {
427
- // compute the min step size
426
+ var superClassUpdateBuffers = publicAPI.updateBuffers;
427
+
428
+ publicAPI.updateBuffers = function () {
429
+ superClassUpdateBuffers(); // compute the min step size
430
+
428
431
  var sampleDist = model.volumes[0].getRenderable().getMapper().getSampleDistance();
429
432
 
430
433
  for (var i = 0; i < model.volumes.length; i++) {
@@ -440,7 +443,7 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
440
443
  if (model.sampleDist !== sampleDist) {
441
444
  model.sampleDist = sampleDist;
442
445
  model.UBO.setValue('SampleDistance', sampleDist);
443
- model.UBO.sendIfNeeded(device);
446
+ model.UBO.sendIfNeeded(model.device);
444
447
  } // add in 3d volume textures
445
448
 
446
449
 
@@ -452,11 +455,7 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
452
455
 
453
456
  var image = _volMapr4.getInputData();
454
457
 
455
- var treq = {
456
- imageData: image,
457
- owner: image.getPointData().getScalars()
458
- };
459
- var newTex = device.getTextureManager().getTexture(treq);
458
+ var newTex = model.device.getTextureManager().getTextureForImageData(image);
460
459
 
461
460
  if (!model.textureViews[vidx + 4] || model.textureViews[vidx + 4].getTexture() !== newTex) {
462
461
  var tview = newTex.createView("volTexture".concat(vidx));
@@ -473,8 +472,18 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
473
472
  }
474
473
 
475
474
  model.lastVolumeLength = model.volumes.length;
476
- publicAPI.updateLUTImage(device);
477
- publicAPI.updateSSBO(device);
475
+ publicAPI.updateLUTImage(model.device);
476
+ publicAPI.updateSSBO(model.device);
477
+
478
+ if (!model.clampSampler) {
479
+ model.clampSampler = vtkWebGPUSampler.newInstance({
480
+ label: 'clampSampler'
481
+ });
482
+ model.clampSampler.create(model.device, {
483
+ minFilter: 'linear',
484
+ magFilter: 'linear'
485
+ });
486
+ }
478
487
  };
479
488
 
480
489
  publicAPI.computePipelineHash = function () {
@@ -503,25 +512,6 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
503
512
  }
504
513
  };
505
514
 
506
- var superclassBuild = publicAPI.build;
507
-
508
- publicAPI.build = function (renderEncoder, device) {
509
- publicAPI.computePipelineHash();
510
- publicAPI.updateBuffers(device);
511
-
512
- if (!model.clampSampler) {
513
- model.clampSampler = vtkWebGPUSampler.newInstance({
514
- label: 'clampSampler'
515
- });
516
- model.clampSampler.create(device, {
517
- minFilter: 'linear',
518
- magFilter: 'linear'
519
- });
520
- }
521
-
522
- superclassBuild(renderEncoder, device);
523
- };
524
-
525
515
  var superclassGetBindables = publicAPI.getBindables;
526
516
 
527
517
  publicAPI.getBindables = function () {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kitware/vtk.js",
3
- "version": "24.5.2",
3
+ "version": "24.5.5",
4
4
  "description": "Visualization Toolkit for the Web",
5
5
  "keywords": [
6
6
  "3d",