@kitware/vtk.js 19.7.3 → 19.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -48,8 +48,8 @@ In general VTK tries to be as portable as possible; the specific configurations
48
48
 
49
49
  vtk.js supports the following development environments:
50
50
 
51
- - Node 12+
52
- - NPM 6+
51
+ - Node 14+
52
+ - NPM 7+
53
53
 
54
54
  and we use [@babel/preset-env](https://www.npmjs.com/package/@babel/preset-env) with the [defaults](https://github.com/Kitware/vtk-js/blob/master/.browserslistrc) set of [browsers target](https://browserl.ist/?q=defaults).
55
55
  But when built from source this could be adjusted to support any browser as long they provide WebGL.
@@ -1,6 +1,6 @@
1
1
  import macro from '../../macros.js';
2
2
 
3
- var DEFAULT_VIEW_API = navigator.gpu ? 'WebGPU' : 'WebGL';
3
+ var DEFAULT_VIEW_API = 'WebGL';
4
4
  var VIEW_CONSTRUCTORS = Object.create(null); // ----------------------------------------------------------------------------
5
5
  // static methods
6
6
  // ----------------------------------------------------------------------------
@@ -20,6 +20,9 @@ import '../WebGPU/Actor.js';
20
20
  import '../WebGPU/PolyDataMapper.js';
21
21
  import '../WebGPU/Texture.js';
22
22
  import '../WebGPU/Glyph3DMapper.js';
23
+ import '../WebGPU/ImageMapper.js';
24
+ import '../WebGPU/ImageSlice.js';
25
+ import '../WebGPU/Volume.js';
23
26
  import '../WebGPU/PixelSpaceCallbackMapper.js';
24
27
  import '../WebGPU/SphereMapper.js';
25
28
  import '../WebGPU/StickMapper.js';
@@ -7,5 +7,7 @@ import '../OpenGL/VolumeMapper.js';
7
7
  import '../OpenGL/PixelSpaceCallbackMapper.js';
8
8
  import '../WebGPU/Camera.js';
9
9
  import '../WebGPU/Renderer.js';
10
+ import '../WebGPU/ImageMapper.js';
11
+ import '../WebGPU/ImageSlice.js';
10
12
  import '../WebGPU/Volume.js';
11
13
  import '../WebGPU/PixelSpaceCallbackMapper.js';
@@ -0,0 +1,421 @@
1
+ import Constants from '../Core/ImageMapper/Constants.js';
2
+ import { newInstance as newInstance$1, obj } from '../../macros.js';
3
+ import vtkWebGPUShaderCache from './ShaderCache.js';
4
+ import vtkWebGPUStorageBuffer from './StorageBuffer.js';
5
+ import vtkWebGPUFullScreenQuad from './FullScreenQuad.js';
6
+ import vtkWebGPUUniformBuffer from './UniformBuffer.js';
7
+ import vtkWebGPUSampler from './Sampler.js';
8
+ import vtkViewNode from '../SceneGraph/ViewNode.js';
9
+ import { InterpolationType } from '../Core/ImageProperty/Constants.js';
10
+ import { registerOverride } from './ViewNodeFactory.js';
11
+ import { i as identity, t as translate, j as transpose, g as invert, m as multiply, s as scale } from '../../vendor/gl-matrix/esm/mat4.js';
12
+ import { t as transformMat4, s as subtract } from '../../vendor/gl-matrix/esm/vec4.js';
13
+
14
+ var SlicingMode = Constants.SlicingMode;
15
+ var imgFragTemplate = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::Image::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::IOStructs::Dec\n\n[[stage(fragment)]]\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output: fragmentOutput;\n\n //VTK::Image::Sample\n computedColor.g = computedColor.r;\n computedColor.b = computedColor.r;\n computedColor.a = 1.0;\n\n // var computedColor: vec4<f32> = vec4<f32>(1.0,0.7, 0.5, 1.0);\n\n//VTK::RenderEncoder::Impl\n\n return output;\n}\n"; // ----------------------------------------------------------------------------
16
+ // helper methods
17
+ // ----------------------------------------------------------------------------
18
+
19
+ function computeFnToString(property, fn, numberOfComponents) {
20
+ var pwfun = fn.apply(property);
21
+
22
+ if (pwfun) {
23
+ var iComps = property.getIndependentComponents();
24
+ return "".concat(property.getMTime(), "-").concat(iComps, "-").concat(numberOfComponents);
25
+ }
26
+
27
+ return '0';
28
+ } // ----------------------------------------------------------------------------
29
+ // vtkWebGPUImageMapper methods
30
+ // ----------------------------------------------------------------------------
31
+
32
+
33
+ var tmpMat4 = new Float64Array(16);
34
+ var tmp2Mat4 = new Float64Array(16);
35
+ var tmp3Mat4 = new Float64Array(16);
36
+ var ptsArray1 = new Float64Array(4);
37
+ var ptsArray2 = new Float64Array(4);
38
+
39
+ function vtkWebGPUImageMapper(publicAPI, model) {
40
+ // Set our className
41
+ model.classHierarchy.push('vtkWebGPUImageMapper');
42
+
43
+ publicAPI.buildPass = function (prepass) {
44
+ if (prepass) {
45
+ model.WebGPUImageSlice = publicAPI.getFirstAncestorOfType('vtkWebGPUImageSlice');
46
+ model.WebGPURenderer = model.WebGPUImageSlice.getFirstAncestorOfType('vtkWebGPURenderer');
47
+ model.WebGPURenderWindow = model.WebGPURenderer.getParent();
48
+ model.device = model.WebGPURenderWindow.getDevice();
49
+ var ren = model.WebGPURenderer.getRenderable(); // is slice set by the camera
50
+
51
+ if (model.renderable.getSliceAtFocalPoint()) {
52
+ model.renderable.setSliceFromCamera(ren.getActiveCamera());
53
+ }
54
+ }
55
+ }; // Renders myself
56
+
57
+
58
+ publicAPI.translucentPass = function (prepass) {
59
+ if (prepass) {
60
+ publicAPI.render();
61
+ }
62
+ };
63
+
64
+ publicAPI.opaquePass = function (prepass) {
65
+ if (prepass) {
66
+ publicAPI.render();
67
+ }
68
+ };
69
+
70
+ publicAPI.render = function () {
71
+ model.renderable.update();
72
+ model.currentInput = model.renderable.getInputData();
73
+ model.renderEncoder = model.WebGPURenderer.getRenderEncoder();
74
+ publicAPI.build(model.renderEncoder, model.device); // update descriptor sets
75
+
76
+ publicAPI.updateUBO(model.device);
77
+ };
78
+
79
+ publicAPI.computePipelineHash = function () {
80
+ var ext = model.currentInput.getExtent();
81
+
82
+ if (ext[0] === ext[1] || ext[2] === ext[3] || ext[4] === ext[5]) {
83
+ model.dimensions = 2;
84
+ model.pipelineHash = 'img2';
85
+ } else {
86
+ model.dimensions = 3;
87
+ model.pipelineHash = 'img3';
88
+ }
89
+ };
90
+
91
+ publicAPI.updateUBO = function (device) {
92
+ var utime = model.UBO.getSendTime();
93
+ var actor = model.WebGPUImageSlice.getRenderable();
94
+ var volMapr = actor.getMapper();
95
+
96
+ if (publicAPI.getMTime() > utime || model.renderable.getMTime() > utime || actor.getProperty().getMTime() > utime) {
97
+ // compute the SCTCMatrix
98
+ var image = volMapr.getInputData();
99
+ var center = model.WebGPURenderer.getStabilizedCenterByReference();
100
+ identity(tmpMat4);
101
+ translate(tmpMat4, tmpMat4, center); // tmpMat4 is now SC->World
102
+
103
+ var mcwcmat = actor.getMatrix();
104
+ transpose(tmp2Mat4, mcwcmat);
105
+ invert(tmp2Mat4, tmp2Mat4); // tmp2Mat4 is now world to model
106
+
107
+ multiply(tmpMat4, tmp2Mat4, tmpMat4); // tmp4Mat is now SC->Model
108
+ // the method on the data is world to index but the volume is in
109
+ // model coordinates so really in this context it is model to index
110
+
111
+ var modelToIndex = image.getWorldToIndex();
112
+ multiply(tmpMat4, modelToIndex, tmpMat4); // tmpMat4 is now SC -> Index, save this as we need it later
113
+
114
+ invert(tmp3Mat4, tmpMat4);
115
+ var dims = image.getDimensions();
116
+ identity(tmp2Mat4);
117
+ scale(tmp2Mat4, tmp2Mat4, [1.0 / dims[0], 1.0 / dims[1], 1.0 / dims[2]]);
118
+ multiply(tmpMat4, tmp2Mat4, tmpMat4); // tmpMat4 is now SC -> Tcoord
119
+
120
+ model.UBO.setArray('SCTCMatrix', tmpMat4); // need to compute the plane here in world coordinates
121
+ // then pass that down in the UBO
122
+
123
+ var ext = model.currentInput.getExtent(); // Find what IJK axis and what direction to slice along
124
+
125
+ var _model$renderable$get = model.renderable.getClosestIJKAxis(),
126
+ ijkMode = _model$renderable$get.ijkMode; // Find the IJK slice
127
+
128
+
129
+ var nSlice = model.renderable.getSlice();
130
+
131
+ if (ijkMode !== model.renderable.getSlicingMode()) {
132
+ // If not IJK slicing, get the IJK slice from the XYZ position/slice
133
+ nSlice = model.renderable.getSliceAtPosition(nSlice);
134
+ }
135
+
136
+ var axis0 = 2;
137
+ var axis1 = 0;
138
+ var axis2 = 1;
139
+
140
+ if (ijkMode === SlicingMode.I) {
141
+ axis0 = 0;
142
+ axis1 = 1;
143
+ axis2 = 2;
144
+ } else if (ijkMode === SlicingMode.J) {
145
+ axis0 = 1;
146
+ axis1 = 2;
147
+ axis2 = 0;
148
+ }
149
+
150
+ ptsArray1[axis0] = nSlice;
151
+ ptsArray1[axis1] = ext[axis1 * 2];
152
+ ptsArray1[axis2] = ext[axis2 * 2];
153
+ ptsArray1[3] = 1.0;
154
+ transformMat4(ptsArray1, ptsArray1, tmp3Mat4);
155
+ model.UBO.setArray('Origin', ptsArray1);
156
+ ptsArray2[axis0] = nSlice;
157
+ ptsArray2[axis1] = ext[axis1 * 2 + 1];
158
+ ptsArray2[axis2] = ext[axis2 * 2];
159
+ ptsArray2[3] = 1.0;
160
+ transformMat4(ptsArray2, ptsArray2, tmp3Mat4);
161
+ subtract(ptsArray2, ptsArray2, ptsArray1);
162
+ ptsArray2[3] = 1.0;
163
+ model.UBO.setArray('Axis1', ptsArray2);
164
+ ptsArray2[axis0] = nSlice;
165
+ ptsArray2[axis1] = ext[axis1 * 2];
166
+ ptsArray2[axis2] = ext[axis2 * 2 + 1];
167
+ ptsArray2[3] = 1.0;
168
+ transformMat4(ptsArray2, ptsArray2, tmp3Mat4);
169
+ subtract(ptsArray2, ptsArray2, ptsArray1);
170
+ ptsArray2[3] = 1.0;
171
+ model.UBO.setArray('Axis2', ptsArray2); // three levels of shift scale combined into one
172
+ // for performance in the fragment shader
173
+
174
+ var cScale = [1, 1, 1, 1];
175
+ var cShift = [0, 0, 0, 0];
176
+ var tView = model.helper.getTextureViews()[0];
177
+ var tScale = tView.getTexture().getScale();
178
+ var numComp = tView.getTexture().getNumberOfComponents();
179
+ var iComps = false; // todo handle independent?
180
+
181
+ for (var i = 0; i < numComp; i++) {
182
+ var cw = actor.getProperty().getColorWindow();
183
+ var cl = actor.getProperty().getColorLevel();
184
+ var target = iComps ? i : 0;
185
+ var cfun = actor.getProperty().getRGBTransferFunction(target);
186
+
187
+ if (cfun) {
188
+ var cRange = cfun.getRange();
189
+ cw = cRange[1] - cRange[0];
190
+ cl = 0.5 * (cRange[1] + cRange[0]);
191
+ }
192
+
193
+ cScale[i] = tScale / cw;
194
+ cShift[i] = -cl / cw + 0.5;
195
+ }
196
+
197
+ model.UBO.setArray('cScale', cScale);
198
+ model.UBO.setArray('cShift', cShift);
199
+ model.UBO.sendIfNeeded(device);
200
+ }
201
+ };
202
+
203
+ publicAPI.updateLUTImage = function (device) {
204
+ var actorProperty = model.WebGPUImageSlice.getRenderable().getProperty();
205
+ var tView = model.helper.getTextureViews()[0];
206
+ tView.getTexture().getNumberOfComponents();
207
+
208
+ var numIComps = 1;
209
+ var cfunToString = computeFnToString(actorProperty, actorProperty.getRGBTransferFunction, numIComps);
210
+
211
+ if (model.colorTextureString !== cfunToString) {
212
+ model.numRows = numIComps;
213
+ var colorArray = new Uint8Array(model.numRows * 2 * model.rowLength * 4);
214
+ var cfun = actorProperty.getRGBTransferFunction();
215
+
216
+ if (cfun) {
217
+ var tmpTable = new Float32Array(model.rowLength * 3);
218
+
219
+ for (var c = 0; c < numIComps; c++) {
220
+ cfun = actorProperty.getRGBTransferFunction(c);
221
+ var cRange = cfun.getRange();
222
+ cfun.getTable(cRange[0], cRange[1], model.rowLength, tmpTable, 1);
223
+
224
+ {
225
+ for (var _i = 0; _i < model.rowLength; _i++) {
226
+ colorArray[c * model.rowLength * 8 + _i * 4] = 255.0 * tmpTable[_i * 3];
227
+ colorArray[c * model.rowLength * 8 + _i * 4 + 1] = 255.0 * tmpTable[_i * 3 + 1];
228
+ colorArray[c * model.rowLength * 8 + _i * 4 + 2] = 255.0 * tmpTable[_i * 3 + 2];
229
+ colorArray[c * model.rowLength * 8 + _i * 4 + 3] = 255.0;
230
+ }
231
+ }
232
+ }
233
+ } else {
234
+ for (var _i2 = 0; _i2 < model.rowLength; ++_i2) {
235
+ var grey = 255.0 * _i2 / (model.rowLength - 1);
236
+ colorArray[_i2 * 4] = grey;
237
+ colorArray[_i2 * 4 + 1] = grey;
238
+ colorArray[_i2 * 4 + 2] = grey;
239
+ colorArray[_i2 * 4 + 3] = 255.0;
240
+
241
+ for (var _j = 0; _j < 4; _j++) {
242
+ colorArray[_i2 * 4 + model.rowLength * 4 + _j] = colorArray[_i2 * 4 + _j];
243
+ }
244
+ }
245
+ }
246
+
247
+ {
248
+ var treq = {
249
+ nativeArray: colorArray,
250
+ width: model.rowLength,
251
+ height: model.numRows * 2,
252
+ depth: 1,
253
+ format: 'rgba8unorm'
254
+ };
255
+ var newTex = device.getTextureManager().getTexture(treq);
256
+ var tview = newTex.createView();
257
+ tview.setName('tfunTexture');
258
+ var tViews = model.helper.getTextureViews();
259
+ tViews[1] = tview;
260
+ }
261
+ model.colorTextureString = cfunToString;
262
+ }
263
+ };
264
+
265
+ publicAPI.updateBuffers = function (device) {
266
+ var treq = {
267
+ imageData: model.currentInput,
268
+ source: model.currentInput
269
+ };
270
+ var newTex = device.getTextureManager().getTexture(treq);
271
+ var tViews = model.helper.getTextureViews();
272
+
273
+ if (!tViews[0] || tViews[0].getTexture() !== newTex) {
274
+ var tview = newTex.createView();
275
+ tview.setName("imgTexture");
276
+ tViews[0] = tview;
277
+ }
278
+
279
+ publicAPI.updateLUTImage(device);
280
+ };
281
+
282
+ publicAPI.build = function (renderEncoder, device) {
283
+ publicAPI.computePipelineHash();
284
+ model.helper.setPipelineHash(model.pipelineHash);
285
+ publicAPI.updateBuffers(device); // set interpolation on the texture based on property setting
286
+
287
+ var actorProperty = model.WebGPUImageSlice.getRenderable().getProperty();
288
+ var iType = actorProperty.getInterpolationType() === InterpolationType.NEAREST ? 'nearest' : 'linear';
289
+
290
+ if (!model.clampSampler || iType !== model.clampSampler.getOptions().minFilter) {
291
+ model.clampSampler = vtkWebGPUSampler.newInstance();
292
+ model.clampSampler.setName('clampSampler');
293
+ model.clampSampler.create(device, {
294
+ minFilter: iType,
295
+ magFilter: iType
296
+ });
297
+ }
298
+
299
+ model.helper.setAdditionalBindables(publicAPI.getBindables());
300
+ model.helper.setWebGPURenderer(model.WebGPURenderer);
301
+ model.helper.build(renderEncoder, device);
302
+ model.helper.registerToDraw();
303
+ };
304
+
305
+ publicAPI.getBindables = function () {
306
+ var bindables = []; // bindables.push(model.componentSSBO);
307
+
308
+ bindables.push(model.clampSampler);
309
+ return bindables;
310
+ };
311
+
312
+ var sr = model.helper.getShaderReplacements();
313
+
314
+ publicAPI.replaceShaderPosition = function (hash, pipeline, vertexInput) {
315
+ var vDesc = pipeline.getShaderDescription('vertex');
316
+ vDesc.addBuiltinOutput('vec4<f32>', '[[builtin(position)]] Position');
317
+ var code = vDesc.getCode();
318
+ var lines = ['var pos: vec4<f32> = mapperUBO.Origin +', ' (vertexBC.x * 0.5 + 0.5) * mapperUBO.Axis1 + (vertexBC.y * 0.5 + 0.5) * mapperUBO.Axis2;', 'pos.w = 1.0;'];
319
+
320
+ if (model.dimensions === 2) {
321
+ lines.push('var tcoord : vec2<f32> = (mapperUBO.SCTCMatrix * pos).xy;');
322
+ } else {
323
+ lines.push('var tcoord : vec3<f32> = (mapperUBO.SCTCMatrix * pos).xyz;');
324
+ }
325
+
326
+ lines.push('output.tcoordVS = tcoord;', 'output.Position = rendererUBO.SCPCMatrix * pos;');
327
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', lines).result;
328
+ vDesc.setCode(code);
329
+ };
330
+
331
+ sr.set('replaceShaderPosition', publicAPI.replaceShaderPosition);
332
+
333
+ publicAPI.replaceShaderTCoord = function (hash, pipeline, vertexInput) {
334
+ var vDesc = pipeline.getShaderDescription('vertex');
335
+
336
+ if (model.dimensions === 2) {
337
+ vDesc.addOutput('vec2<f32>', 'tcoordVS');
338
+ } else {
339
+ vDesc.addOutput('vec3<f32>', 'tcoordVS');
340
+ }
341
+ };
342
+
343
+ sr.set('replaceShaderTCoord', publicAPI.replaceShaderTCoord);
344
+
345
+ publicAPI.replaceShaderImage = function (hash, pipeline, vertexInput) {
346
+ var fDesc = pipeline.getShaderDescription('fragment');
347
+ var code = fDesc.getCode();
348
+
349
+ if (model.dimensions === 3) {
350
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Image::Sample', [" var computedColor: vec4<f32> =", " textureSampleLevel(imgTexture, clampSampler, input.tcoordVS, 0.0);", "//VTK::Image::Sample"]).result;
351
+ } else {
352
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Image::Sample', [" var computedColor: vec4<f32> =", " textureSampleLevel(imgTexture, clampSampler, input.tcoordVS, 0.0);", "//VTK::Image::Sample"]).result;
353
+ }
354
+
355
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Image::Sample', [" var coord: vec2<f32> =", " vec2<f32>(computedColor.r * mapperUBO.cScale.r + mapperUBO.cShift.r, 0.5);", " computedColor = textureSampleLevel(tfunTexture, clampSampler, coord, 0.0);"]).result;
356
+ fDesc.setCode(code);
357
+ };
358
+
359
+ sr.set('replaceShaderImage', publicAPI.replaceShaderImage);
360
+ } // ----------------------------------------------------------------------------
361
+ // Object factory
362
+ // ----------------------------------------------------------------------------
363
+
364
+
365
+ var DEFAULT_VALUES = {
366
+ rowLength: 1024 // VBOBuildTime: 0,
367
+ // VBOBuildString: null,
368
+ // webGPUTexture: null,
369
+ // tris: null,
370
+ // imagemat: null,
371
+ // imagematinv: null,
372
+ // colorTexture: null,
373
+ // pwfTexture: null,
374
+ // lastHaveSeenDepthRequest: false,
375
+ // haveSeenDepthRequest: false,
376
+ // lastTextureComponents: 0,
377
+
378
+ }; // ----------------------------------------------------------------------------
379
+
380
+ function extend(publicAPI, model) {
381
+ var initialValues = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
382
+ Object.assign(model, DEFAULT_VALUES, initialValues); // Inheritance
383
+
384
+ vtkViewNode.extend(publicAPI, model, initialValues);
385
+ model.helper = vtkWebGPUFullScreenQuad.newInstance();
386
+ model.helper.setFragmentShaderTemplate(imgFragTemplate);
387
+ model.UBO = vtkWebGPUUniformBuffer.newInstance();
388
+ model.UBO.setName('mapperUBO');
389
+ model.UBO.addEntry('SCTCMatrix', 'mat4x4<f32>');
390
+ model.UBO.addEntry('Origin', 'vec4<f32>');
391
+ model.UBO.addEntry('Axis2', 'vec4<f32>');
392
+ model.UBO.addEntry('Axis1', 'vec4<f32>');
393
+ model.UBO.addEntry('cScale', 'vec4<f32>');
394
+ model.UBO.addEntry('cShift', 'vec4<f32>');
395
+ model.helper.setUBO(model.UBO);
396
+ model.SSBO = vtkWebGPUStorageBuffer.newInstance();
397
+ model.SSBO.setName('volumeSSBO');
398
+ model.componentSSBO = vtkWebGPUStorageBuffer.newInstance();
399
+ model.componentSSBO.setName('componentSSBO');
400
+ model.lutBuildTime = {};
401
+ obj(model.lutBuildTime, {
402
+ mtime: 0
403
+ });
404
+ model.imagemat = identity(new Float64Array(16));
405
+ model.imagematinv = identity(new Float64Array(16));
406
+ model.VBOBuildTime = {};
407
+ obj(model.VBOBuildTime); // Object methods
408
+
409
+ vtkWebGPUImageMapper(publicAPI, model);
410
+ } // ----------------------------------------------------------------------------
411
+
412
+ var newInstance = newInstance$1(extend, 'vtkWebGPUImageMapper'); // ----------------------------------------------------------------------------
413
+
414
+ var index = {
415
+ newInstance: newInstance,
416
+ extend: extend
417
+ }; // Register ourself to WebGPU backend if imported
418
+
419
+ registerOverride('vtkImageMapper', newInstance);
420
+
421
+ export { index as default, extend, newInstance };
@@ -0,0 +1,162 @@
1
+ import { newInstance as newInstance$1, obj, get } from '../../macros.js';
2
+ import vtkViewNode from '../SceneGraph/ViewNode.js';
3
+ import { registerOverride } from './ViewNodeFactory.js';
4
+ import { i as identity, j as transpose, d as copy, g as invert, t as translate } from '../../vendor/gl-matrix/esm/mat4.js';
5
+
6
+ // vtkWebGPUImageSlice methods
7
+ // ----------------------------------------------------------------------------
8
+
9
+ function vtkWebGPUImageSlice(publicAPI, model) {
10
+ // Set our className
11
+ model.classHierarchy.push('vtkWebGPUImageSlice'); // Builds myself.
12
+
13
+ publicAPI.buildPass = function (prepass) {
14
+ if (!model.renderable || !model.renderable.getVisibility()) {
15
+ return;
16
+ }
17
+
18
+ if (prepass) {
19
+ if (!model.renderable) {
20
+ return;
21
+ }
22
+
23
+ model.WebGPURenderer = publicAPI.getFirstAncestorOfType('vtkWebGPURenderer');
24
+ model.WebGPURenderWindow = model.WebGPURenderer.getFirstAncestorOfType('vtkWebGPURenderWindow');
25
+
26
+ if (model.propID === undefined) {
27
+ model.propID = model.WebGPURenderWindow.getUniquePropID();
28
+ }
29
+
30
+ publicAPI.prepareNodes();
31
+ publicAPI.addMissingNode(model.renderable.getMapper());
32
+ publicAPI.removeUnusedNodes();
33
+ }
34
+ }; // we draw textures, then mapper, then post pass textures
35
+
36
+
37
+ publicAPI.traverseOpaquePass = function (renderPass) {
38
+ if (!model.renderable || !model.renderable.getNestedVisibility() || !model.renderable.getIsOpaque() || model.WebGPURenderer.getSelector() && !model.renderable.getNestedPickable()) {
39
+ return;
40
+ }
41
+
42
+ publicAPI.apply(renderPass, true);
43
+ model.children.forEach(function (child) {
44
+ child.traverse(renderPass);
45
+ });
46
+ publicAPI.apply(renderPass, false);
47
+ };
48
+
49
+ publicAPI.traverseTranslucentPass = function (renderPass) {
50
+ if (!model.renderable || !model.renderable.getNestedVisibility() || model.renderable.getIsOpaque() || model.WebGPURenderer.getSelector() && !model.renderable.getNestedPickable()) {
51
+ return;
52
+ }
53
+
54
+ publicAPI.apply(renderPass, true);
55
+ model.children.forEach(function (child) {
56
+ child.traverse(renderPass);
57
+ });
58
+ publicAPI.apply(renderPass, false);
59
+ };
60
+
61
+ publicAPI.queryPass = function (prepass, renderPass) {
62
+ if (prepass) {
63
+ if (!model.renderable || !model.renderable.getVisibility()) {
64
+ return;
65
+ }
66
+
67
+ if (model.renderable.getIsOpaque()) {
68
+ renderPass.incrementOpaqueActorCount();
69
+ } else {
70
+ renderPass.incrementTranslucentActorCount();
71
+ }
72
+ }
73
+ };
74
+
75
+ publicAPI.getBufferShift = function (wgpuRen) {
76
+ publicAPI.getKeyMatrices(wgpuRen);
77
+ return model.bufferShift;
78
+ };
79
+
80
+ publicAPI.getKeyMatrices = function (wgpuRen) {
81
+ // has the actor or stabilization center changed?
82
+ if (Math.max(model.renderable.getMTime(), wgpuRen.getStabilizedTime()) > model.keyMatricesTime.getMTime()) {
83
+ model.renderable.computeMatrix();
84
+ var mcwc = model.renderable.getMatrix(); // compute the net shift
85
+
86
+ var center = wgpuRen.getStabilizedCenterByReference();
87
+ model.bufferShift[0] = mcwc[3] - center[0];
88
+ model.bufferShift[1] = mcwc[7] - center[1];
89
+ model.bufferShift[2] = mcwc[11] - center[2];
90
+ transpose(model.keyMatrices.bcwc, mcwc);
91
+
92
+ if (model.renderable.getIsIdentity()) {
93
+ identity(model.keyMatrices.normalMatrix);
94
+ } else {
95
+ // we use bcwc BEFORE the translate below (just to get transposed mcvc)
96
+ copy(model.keyMatrices.normalMatrix, model.keyMatrices.bcwc); // zero out translation
97
+
98
+ model.keyMatrices.normalMatrix[3] = 0.0;
99
+ model.keyMatrices.normalMatrix[7] = 0.0;
100
+ model.keyMatrices.normalMatrix[11] = 0.0;
101
+ invert(model.keyMatrices.normalMatrix, model.keyMatrices.normalMatrix);
102
+ transpose(model.keyMatrices.normalMatrix, model.keyMatrices.normalMatrix);
103
+ } // only meed the buffer shift to get to world
104
+
105
+
106
+ translate(model.keyMatrices.bcwc, model.keyMatrices.bcwc, [-model.bufferShift[0], -model.bufferShift[1], -model.bufferShift[2]]); // to get to stabilized we also need the center
107
+
108
+ translate(model.keyMatrices.bcsc, model.keyMatrices.bcwc, [-center[0], -center[1], -center[2]]);
109
+ model.keyMatricesTime.modified();
110
+ }
111
+
112
+ return model.keyMatrices;
113
+ };
114
+ } // ----------------------------------------------------------------------------
115
+ // Object factory
116
+ // ----------------------------------------------------------------------------
117
+
118
+
119
+ var DEFAULT_VALUES = {
120
+ bufferShift: undefined,
121
+ keyMatrixTime: null,
122
+ keyMatrices: null,
123
+ propID: undefined
124
+ }; // ----------------------------------------------------------------------------
125
+
126
+ function extend(publicAPI, model) {
127
+ var initialValues = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
128
+ Object.assign(model, DEFAULT_VALUES, initialValues); // Inheritance
129
+
130
+ vtkViewNode.extend(publicAPI, model, initialValues);
131
+ model.keyMatricesTime = {};
132
+ obj(model.keyMatricesTime, {
133
+ mtime: 0
134
+ });
135
+ model.keyMatrices = {
136
+ normalMatrix: new Float64Array(16),
137
+ bcwc: new Float64Array(16),
138
+ bcsc: new Float64Array(16)
139
+ };
140
+ model.keyMatrixTime = {};
141
+ obj(model.keyMatrixTime, {
142
+ mtime: 0
143
+ });
144
+ model.keyMatrices = {
145
+ mcwc: identity(new Float64Array(16))
146
+ };
147
+ model.bufferShift = [0, 0, 0, 0];
148
+ get(publicAPI, model, ['propID', 'keyMatricesTime']); // Object methods
149
+
150
+ vtkWebGPUImageSlice(publicAPI, model);
151
+ } // ----------------------------------------------------------------------------
152
+
153
+ var newInstance = newInstance$1(extend, 'vtkWebGPUImageSlice'); // ----------------------------------------------------------------------------
154
+
155
+ var index = {
156
+ newInstance: newInstance,
157
+ extend: extend
158
+ }; // Register ourself to WebGPU backend if imported
159
+
160
+ registerOverride('vtkImageSlice', newInstance);
161
+
162
+ export { index as default, extend, newInstance };
@@ -1,3 +1,4 @@
1
+ import _toConsumableArray from '@babel/runtime/helpers/toConsumableArray';
1
2
  import macro from '../../macros.js';
2
3
  import vtkWebGPUBindGroup from './BindGroup.js';
3
4
  import vtkWebGPUPipeline from './Pipeline.js';
@@ -153,7 +154,7 @@ function vtkWebGPUMapperHelper(publicAPI, model) {
153
154
  };
154
155
 
155
156
  publicAPI.getBindables = function () {
156
- var bindables = [];
157
+ var bindables = _toConsumableArray(model.additionalBindables);
157
158
 
158
159
  if (model.UBO) {
159
160
  bindables.push(model.UBO);
@@ -205,6 +206,7 @@ function vtkWebGPUMapperHelper(publicAPI, model) {
205
206
 
206
207
 
207
208
  var DEFAULT_VALUES = {
209
+ additionalBindables: undefined,
208
210
  bindGroup: null,
209
211
  device: null,
210
212
  fragmentShaderTemplate: null,
@@ -229,12 +231,13 @@ function extend(publicAPI, model) {
229
231
  model.vertexInput = vtkWebGPUVertexInput.newInstance();
230
232
  model.bindGroup = vtkWebGPUBindGroup.newInstance();
231
233
  model.bindGroup.setName('mapperBG');
234
+ model.additionalBindables = [];
232
235
  model.fragmentShaderTemplate = model.fragmentShaderTemplate || vtkWebGPUMapperHelperFS;
233
236
  model.vertexShaderTemplate = model.vertexShaderTemplate || vtkWebGPUMapperHelperVS;
234
237
  model.shaderReplacements = new Map(); // Build VTK API
235
238
 
236
239
  macro.get(publicAPI, model, ['vertexInput']);
237
- macro.setGet(publicAPI, model, ['device', 'fragmentShaderTemplate', 'interpolate', 'numberOfInstances', 'numberOfVertices', 'pipelineHash', 'shaderReplacements', 'SSBO', 'textureViews', 'topology', 'UBO', 'vertexShaderTemplate', 'WebGPURenderer']); // Object methods
240
+ macro.setGet(publicAPI, model, ['additionalBindables', 'device', 'fragmentShaderTemplate', 'interpolate', 'numberOfInstances', 'numberOfVertices', 'pipelineHash', 'shaderReplacements', 'SSBO', 'textureViews', 'topology', 'UBO', 'vertexShaderTemplate', 'WebGPURenderer']); // Object methods
238
241
 
239
242
  vtkWebGPUMapperHelper(publicAPI, model);
240
243
  } // ----------------------------------------------------------------------------
@@ -4,6 +4,9 @@ import '../Actor.js';
4
4
  import '../PolyDataMapper.js';
5
5
  import '../Texture.js';
6
6
  import '../Glyph3DMapper.js';
7
+ import '../ImageMapper.js';
8
+ import '../ImageSlice.js';
9
+ import '../Volume.js';
7
10
  import '../PixelSpaceCallbackMapper.js';
8
11
  import '../SphereMapper.js';
9
12
  import '../StickMapper.js';
@@ -1,4 +1,6 @@
1
1
  import '../Camera.js';
2
2
  import '../Renderer.js';
3
+ import '../ImageMapper.js';
4
+ import '../ImageSlice.js';
3
5
  import '../Volume.js';
4
6
  import '../PixelSpaceCallbackMapper.js';
@@ -12,10 +12,9 @@ function vtkWebGPUSampler(publicAPI, model) {
12
12
  publicAPI.create = function (device) {
13
13
  var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
14
14
  model.device = device;
15
- model.handle = model.device.getHandle().createSampler({
16
- magFilter: options.magFilter ? options.magFilter : 'nearest',
17
- minFilter: options.minFilter ? options.minFilter : 'nearest'
18
- });
15
+ model.options.magFilter = options.magFilter ? options.magFilter : 'nearest';
16
+ model.options.minFilter = options.minFilter ? options.minFilter : 'nearest';
17
+ model.handle = model.device.getHandle().createSampler(model.options);
19
18
  model.bindGroupTime.modified();
20
19
  };
21
20
 
@@ -38,7 +37,8 @@ function vtkWebGPUSampler(publicAPI, model) {
38
37
  var DEFAULT_VALUES = {
39
38
  device: null,
40
39
  handle: null,
41
- name: null
40
+ name: null,
41
+ options: null
42
42
  }; // ----------------------------------------------------------------------------
43
43
 
44
44
  function extend(publicAPI, model) {
@@ -46,6 +46,7 @@ function extend(publicAPI, model) {
46
46
  Object.assign(model, DEFAULT_VALUES, initialValues); // Object methods
47
47
 
48
48
  macro.obj(publicAPI, model);
49
+ model.options = {};
49
50
  model.bindGroupLayoutEntry = {
50
51
  /* eslint-disable no-undef */
51
52
  visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT,
@@ -58,7 +59,7 @@ function extend(publicAPI, model) {
58
59
  macro.obj(model.bindGroupTime, {
59
60
  mtime: 0
60
61
  });
61
- macro.get(publicAPI, model, ['bindGroupTime', 'handle']);
62
+ macro.get(publicAPI, model, ['bindGroupTime', 'handle', 'options']);
62
63
  macro.setGet(publicAPI, model, ['bindGroupLayoutEntry', 'device', 'name']);
63
64
  vtkWebGPUSampler(publicAPI, model);
64
65
  } // ----------------------------------------------------------------------------
@@ -147,6 +147,20 @@ function vtkWebGPUTexture(publicAPI, model) {
147
147
  }, [model.width, model.height, model.depth]);
148
148
  model.device.submitCommandEncoder(cmdEnc);
149
149
  model.ready = true;
150
+ }; // when data is pulled out of this texture what scale must be applied to
151
+ // get back to the original source data. For formats such as r8unorm we
152
+ // have to multiply by 255.0, for formats such as r16float it is 1.0
153
+
154
+
155
+ publicAPI.getScale = function () {
156
+ var tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(model.format);
157
+ var halfFloat = tDetails.elementSize === 2 && tDetails.sampleType === 'float';
158
+ return halfFloat ? 1.0 : 255.0;
159
+ };
160
+
161
+ publicAPI.getNumberOfComponents = function () {
162
+ var tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(model.format);
163
+ return tDetails.numComponents;
150
164
  };
151
165
 
152
166
  publicAPI.resizeToMatch = function (tex) {
@@ -258,10 +258,7 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
258
258
  identity(tmpMat4);
259
259
  translate(tmpMat4, tmpMat4, center); // tmpMat4 is now SC->World
260
260
 
261
- var _vol = model.volumes[vidx];
262
-
263
- var mcwcmat = _vol.getRenderable().getMatrix();
264
-
261
+ var mcwcmat = actor.getMatrix();
265
262
  transpose(tmp2Mat4, mcwcmat);
266
263
  invert(tmp2Mat4, tmp2Mat4); // tmp2Mat4 is now world to model
267
264
 
@@ -295,10 +292,7 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
295
292
  spacingArray[vidx * 4 + 2] = spacing[2];
296
293
  spacingArray[vidx * 4 + 3] = 1.0; // handle filteringMode
297
294
 
298
- var tformat = model.textureViews[vidx + 4].getTexture().getFormat();
299
- var tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(tformat);
300
- var halfFloat = tDetails.elementSize === 2 && tDetails.sampleType === 'float';
301
- var tScale = halfFloat ? 1.0 : 255.0;
295
+ var tScale = model.textureViews[vidx + 4].getTexture().getScale();
302
296
 
303
297
  var ipScalarRange = _volMapr2.getIpScalarRange();
304
298
 
@@ -346,18 +340,16 @@ function vtkWebGPUVolumePassFSQ(publicAPI, model) {
346
340
  var iComps = vprop.getIndependentComponents(); // const numIComps = iComps ? numComp : 1;
347
341
  // half float?
348
342
 
349
- var _tformat = model.textureViews[_vidx2 + 4].getTexture().getFormat();
350
-
351
- var _tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(_tformat);
352
-
353
- var _halfFloat = _tDetails.elementSize === 2 && _tDetails.sampleType === 'float';
343
+ var tformat = model.textureViews[_vidx2 + 4].getTexture().getFormat();
354
344
 
345
+ var tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(tformat);
346
+ var halfFloat = tDetails.elementSize === 2 && tDetails.sampleType === 'float';
355
347
  var volInfo = {
356
348
  scale: [255.0],
357
349
  offset: [0.0]
358
350
  };
359
351
 
360
- if (_halfFloat) {
352
+ if (halfFloat) {
361
353
  volInfo.scale[0] = 1.0;
362
354
  } // three levels of shift scale combined into one
363
355
  // for performance in the fragment shader
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kitware/vtk.js",
3
- "version": "19.7.3",
3
+ "version": "19.8.1",
4
4
  "description": "Visualization Toolkit for the Web",
5
5
  "keywords": [
6
6
  "3d",
@@ -41,6 +41,22 @@ function fromValues(x, y, z, w) {
41
41
  out[3] = w;
42
42
  return out;
43
43
  }
44
+ /**
45
+ * Subtracts vector b from vector a
46
+ *
47
+ * @param {vec4} out the receiving vector
48
+ * @param {ReadonlyVec4} a the first operand
49
+ * @param {ReadonlyVec4} b the second operand
50
+ * @returns {vec4} out
51
+ */
52
+
53
+ function subtract(out, a, b) {
54
+ out[0] = a[0] - b[0];
55
+ out[1] = a[1] - b[1];
56
+ out[2] = a[2] - b[2];
57
+ out[3] = a[3] - b[3];
58
+ return out;
59
+ }
44
60
  /**
45
61
  * Normalize a vec4
46
62
  *
@@ -134,4 +150,4 @@ function transformMat4(out, a, m) {
134
150
  };
135
151
  })();
136
152
 
137
- export { fromValues as f, normalize as n, transformMat4 as t };
153
+ export { fromValues as f, normalize as n, subtract as s, transformMat4 as t };