@kitware/vtk.js 25.3.0 → 25.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -48,8 +48,8 @@ In general VTK tries to be as portable as possible; the specific configurations
48
48
 
49
49
  vtk.js supports the following development environments:
50
50
 
51
- - Node 12+
52
- - NPM 6+
51
+ - Node 14+
52
+ - NPM 7+
53
53
 
54
54
  and we use [@babel/preset-env](https://www.npmjs.com/package/@babel/preset-env) with the [defaults](https://github.com/Kitware/vtk-js/blob/master/.browserslistrc) set of [browsers target](https://browserl.ist/?q=defaults).
55
55
  But when built from source this could be adjusted to support any browser as long they provide WebGL.
@@ -35,6 +35,10 @@ export interface IRendererInitialValues extends IViewportInitialValues {
35
35
  occlusionRatio?: number;
36
36
  maximumNumberOfPeels?: number;
37
37
  texturedBackground?: boolean;
38
+ environmentTexture?: vtkTexture;
39
+ environmentTextureDiffuseStrength?: number;
40
+ environmentTextureSpecularStrength?: number;
41
+ useEnvironmentTextureAsBackground?: boolean;
38
42
  pass?: number;
39
43
  }
40
44
 
@@ -115,7 +119,25 @@ export interface vtkRenderer extends vtkViewport {
115
119
  *
116
120
  * @default null
117
121
  */
118
- getBackgroundTexture(): vtkTexture;
122
+ getEnvironmentTexture(): vtkTexture;
123
+
124
+ /**
125
+ * Returns the diffuse strength of the set environment texture.
126
+ * @default 1
127
+ */
128
+ getEnvironmentTextureDiffuseStrength(): number;
129
+
130
+ /**
131
+ * Returns the specular strength of the set environment texture.
132
+ * @default 1
133
+ */
134
+ getEnvironmentTextureSpecularStrength(): number;
135
+
136
+ /**
137
+ * Gets whether or not the environment texture is being used as the background for the view.
138
+ * @default false
139
+ */
140
+ getUseEnvironmentTextureAsBackground(): boolean;
119
141
 
120
142
  /**
121
143
  *
@@ -347,9 +369,27 @@ export interface vtkRenderer extends vtkViewport {
347
369
 
348
370
  /**
349
371
  *
350
- * @param {vtkTexture} backgroundTexture
372
+ * @param {vtkTexture} environmentTexture
373
+ */
374
+ setEnvironmentTexture(environmentTexture: vtkTexture): boolean;
375
+
376
+ /**
377
+ * Sets the diffuse strength of the set environment texture.
378
+ * @param {number} diffuseStrength the new diffuse strength.
351
379
  */
352
- setBackgroundTexture(backgroundTexture: vtkTexture): boolean;
380
+ setEnvironmentTextureDiffuseStrength(diffuseStrength: number): boolean;
381
+
382
+ /**
383
+ * Sets the specular strength of the set environment texture.
384
+ * @param {number} specularStrength the new specular strength.
385
+ */
386
+ setEnvironmentTextureSpecularStrength(specularStrength: number): boolean;
387
+
388
+ /**
389
+ * Sets whether or not to use the environment texture as the background for the view.
390
+ * @param {number} textureAsBackground
391
+ */
392
+ setUseEnvironmentTextureAsBackground(textureAsBackground: boolean): boolean;
353
393
 
354
394
  /**
355
395
  *
@@ -593,6 +593,10 @@ var DEFAULT_VALUES = {
593
593
  delegate: null,
594
594
  texturedBackground: false,
595
595
  backgroundTexture: null,
596
+ environmentTexture: null,
597
+ environmentTextureDiffuseStrength: 1,
598
+ environmentTextureSpecularStrength: 1,
599
+ useEnvironmentTextureAsBackground: false,
596
600
  pass: 0
597
601
  }; // ----------------------------------------------------------------------------
598
602
 
@@ -611,7 +615,7 @@ function extend(publicAPI, model) {
611
615
  if (model.background.length === 3) model.background.push(1); // Build VTK API
612
616
 
613
617
  get(publicAPI, model, ['_renderWindow', 'allocatedRenderTime', 'timeFactor', 'lastRenderTimeInSeconds', 'numberOfPropsRendered', 'lastRenderingUsedDepthPeeling', 'selector']);
614
- setGet(publicAPI, model, ['twoSidedLighting', 'lightFollowCamera', 'automaticLightCreation', 'erase', 'draw', 'nearClippingPlaneTolerance', 'clippingRangeExpansion', 'backingStore', 'interactive', 'layer', 'preserveColorBuffer', 'preserveDepthBuffer', 'useDepthPeeling', 'occlusionRatio', 'maximumNumberOfPeels', 'delegate', 'backgroundTexture', 'texturedBackground', 'useShadows', 'pass']);
618
+ setGet(publicAPI, model, ['twoSidedLighting', 'lightFollowCamera', 'automaticLightCreation', 'erase', 'draw', 'nearClippingPlaneTolerance', 'clippingRangeExpansion', 'backingStore', 'interactive', 'layer', 'preserveColorBuffer', 'preserveDepthBuffer', 'useDepthPeeling', 'occlusionRatio', 'maximumNumberOfPeels', 'delegate', 'backgroundTexture', 'texturedBackground', 'environmentTexture', 'environmentTextureDiffuseStrength', 'environmentTextureSpecularStrength', 'useEnvironmentTextureAsBackground', 'useShadows', 'pass']);
615
619
  getArray(publicAPI, model, ['actors', 'volumes', 'lights']);
616
620
  setGetArray(publicAPI, model, ['background'], 4, 1.0);
617
621
  moveToProtected(publicAPI, model, ['renderWindow']); // Object methods
@@ -5,6 +5,7 @@ export interface ITextureInitialValues {
5
5
  interpolate?: boolean;
6
6
  edgeClamp?: boolean;
7
7
  imageLoaded?: boolean;
8
+ mipLevel?: number;
8
9
  }
9
10
 
10
11
  export interface vtkTexture extends vtkAlgorithm {
@@ -34,6 +35,11 @@ export interface vtkTexture extends vtkAlgorithm {
34
35
  */
35
36
  getImageLoaded(): boolean;
36
37
 
38
+ /**
39
+ *
40
+ */
41
+ getMipLevel(): number;
42
+
37
43
  /**
38
44
  *
39
45
  * @param repeat
@@ -62,6 +68,11 @@ export interface vtkTexture extends vtkAlgorithm {
62
68
  * @default null
63
69
  */
64
70
  setImage(image: any): void;
71
+
72
+ /**
73
+ * @param level
74
+ */
75
+ setMipLevel(level: number): boolean;
65
76
  }
66
77
 
67
78
  /**
@@ -79,6 +90,17 @@ export function extend(publicAPI: object, model: object, initialValues?: ITextur
79
90
  */
80
91
  export function newInstance(initialValues?: ITextureInitialValues): vtkTexture;
81
92
 
93
+ /**
94
+ * Method used to create mipmaps from given texture data. Works best with textures that have a
95
+ * width and a height that are powers of two.
96
+ *
97
+ * @param nativeArray the array of data to create mipmaps from.
98
+ * @param width the width of the data
99
+ * @param height the height of the data
100
+ * @param level the level to which additional mipmaps are generated.
101
+ */
102
+ export function generateMipmaps(nativeArray: any, width: number, height: number, level: number): Array<Uint8ClampedArray>;
103
+
82
104
  /**
83
105
  * vtkTexture is an image algorithm that handles loading and binding of texture maps.
84
106
  * It obtains its data from an input image data dataset type.
@@ -1,5 +1,10 @@
1
+ import _defineProperty from '@babel/runtime/helpers/defineProperty';
2
+ import _toConsumableArray from '@babel/runtime/helpers/toConsumableArray';
1
3
  import macro from '../../macros.js';
2
4
 
5
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
6
+
7
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
3
8
  // vtkTexture methods
4
9
  // ----------------------------------------------------------------------------
5
10
 
@@ -103,19 +108,160 @@ function vtkTexture(publicAPI, model) {
103
108
  var dimensionality = (width > 1) + (height > 1) + (depth > 1);
104
109
  return dimensionality;
105
110
  };
106
- } // ----------------------------------------------------------------------------
111
+
112
+ publicAPI.getInputAsJsImageData = function () {
113
+ if (!model.imageLoaded || publicAPI.getInputData()) return null;
114
+
115
+ if (model.jsImageData) {
116
+ return model.jsImageData();
117
+ }
118
+
119
+ if (model.canvas) {
120
+ var context = model.canvas.getContext('2d');
121
+ var imageData = context.getImageData(0, 0, model.canvas.width, model.canvas.height);
122
+ return imageData;
123
+ }
124
+
125
+ if (model.image) {
126
+ var canvas = document.createElement('canvas');
127
+ canvas.width = model.image.width;
128
+ canvas.height = model.image.height;
129
+
130
+ var _context = canvas.getContext('2d');
131
+
132
+ _context.translate(0, canvas.height);
133
+
134
+ _context.scale(1, -1);
135
+
136
+ _context.drawImage(model.image, 0, 0, model.image.width, model.image.height);
137
+
138
+ var _imageData = _context.getImageData(0, 0, canvas.width, canvas.height);
139
+
140
+ return _imageData;
141
+ }
142
+
143
+ return null;
144
+ };
145
+ } // Use nativeArray instead of self
146
+
147
+
148
+ var generateMipmaps = function generateMipmaps(nativeArray, width, height, level) {
149
+ // TODO: FIX UNEVEN TEXTURE MIP GENERATION:
150
+ // When textures don't have standard ratios, higher mip levels
151
+ // result in their color chanels getting messed up and shifting
152
+ // 3x3 gaussian kernel
153
+ var g3m = [1, 2, 1]; // eslint-disable-line
154
+
155
+ var g3w = 4; // eslint-disable-line
156
+
157
+ var kernel = g3m;
158
+ var kernelWeight = g3w;
159
+ var hs = nativeArray.length / (width * height); // TODO: support for textures with depth more than 1
160
+
161
+ var currentWidth = width;
162
+ var currentHeight = height;
163
+ var imageData = nativeArray;
164
+ var maps = [imageData];
165
+
166
+ for (var i = 0; i < level; i++) {
167
+ var oldData = _toConsumableArray(imageData);
168
+
169
+ currentWidth /= 2;
170
+ currentHeight /= 2;
171
+ imageData = new Uint8ClampedArray(currentWidth * currentHeight * hs);
172
+ var vs = hs * currentWidth; // Scale down
173
+
174
+ var shift = 0;
175
+
176
+ for (var p = 0; p < imageData.length; p += hs) {
177
+ if (p % vs === 0) {
178
+ shift += 2 * hs * currentWidth;
179
+ }
180
+
181
+ for (var c = 0; c < hs; c++) {
182
+ var sample = oldData[shift + c];
183
+ sample += oldData[shift + hs + c];
184
+ sample += oldData[shift - 2 * vs + c];
185
+ sample += oldData[shift - 2 * vs + hs + c];
186
+ sample /= 4;
187
+ imageData[p + c] = sample;
188
+ }
189
+
190
+ shift += 2 * hs;
191
+ } // Horizontal Pass
192
+
193
+
194
+ var dataCopy = _toConsumableArray(imageData);
195
+
196
+ for (var _p = 0; _p < imageData.length; _p += hs) {
197
+ for (var _c = 0; _c < hs; _c++) {
198
+ var x = -(kernel.length - 1) / 2;
199
+ var kw = kernelWeight;
200
+ var value = 0.0;
201
+
202
+ for (var k = 0; k < kernel.length; k++) {
203
+ var index = _p + _c + x * hs;
204
+ var lineShift = index % vs - (_p + _c) % vs;
205
+ if (lineShift > hs) index += vs;
206
+ if (lineShift < -hs) index -= vs;
207
+
208
+ if (dataCopy[index]) {
209
+ value += dataCopy[index] * kernel[k];
210
+ } else {
211
+ kw -= kernel[k];
212
+ }
213
+
214
+ x += 1;
215
+ }
216
+
217
+ imageData[_p + _c] = value / kw;
218
+ }
219
+ } // Vertical Pass
220
+
221
+
222
+ dataCopy = _toConsumableArray(imageData);
223
+
224
+ for (var _p2 = 0; _p2 < imageData.length; _p2 += hs) {
225
+ for (var _c2 = 0; _c2 < hs; _c2++) {
226
+ var _x = -(kernel.length - 1) / 2;
227
+
228
+ var _kw = kernelWeight;
229
+ var _value = 0.0;
230
+
231
+ for (var _k = 0; _k < kernel.length; _k++) {
232
+ var _index = _p2 + _c2 + _x * vs;
233
+
234
+ if (dataCopy[_index]) {
235
+ _value += dataCopy[_index] * kernel[_k];
236
+ } else {
237
+ _kw -= kernel[_k];
238
+ }
239
+
240
+ _x += 1;
241
+ }
242
+
243
+ imageData[_p2 + _c2] = _value / _kw;
244
+ }
245
+ }
246
+
247
+ maps.push(imageData);
248
+ }
249
+
250
+ return maps;
251
+ }; // ----------------------------------------------------------------------------
107
252
  // Object factory
108
253
  // ----------------------------------------------------------------------------
109
254
 
110
255
 
111
256
  var DEFAULT_VALUES = {
112
- repeat: false,
113
- interpolate: false,
114
- edgeClamp: false,
115
257
  image: null,
116
258
  canvas: null,
259
+ jsImageData: null,
117
260
  imageLoaded: false,
118
- jsImageData: null
261
+ repeat: false,
262
+ interpolate: false,
263
+ edgeClamp: false,
264
+ mipLevel: 0
119
265
  }; // ----------------------------------------------------------------------------
120
266
 
121
267
  function extend(publicAPI, model) {
@@ -125,15 +271,18 @@ function extend(publicAPI, model) {
125
271
  macro.obj(publicAPI, model);
126
272
  macro.algo(publicAPI, model, 6, 0);
127
273
  macro.get(publicAPI, model, ['canvas', 'image', 'jsImageData', 'imageLoaded']);
128
- macro.setGet(publicAPI, model, ['repeat', 'edgeClamp', 'interpolate']);
274
+ macro.setGet(publicAPI, model, ['repeat', 'edgeClamp', 'interpolate', 'mipLevel']);
129
275
  vtkTexture(publicAPI, model);
130
276
  } // ----------------------------------------------------------------------------
131
277
 
132
- var newInstance = macro.newInstance(extend, 'vtkTexture'); // ----------------------------------------------------------------------------
278
+ var newInstance = macro.newInstance(extend, 'vtkTexture');
279
+ var STATIC = {
280
+ generateMipmaps: generateMipmaps
281
+ }; // ----------------------------------------------------------------------------
133
282
 
134
- var vtkTexture$1 = {
283
+ var vtkTexture$1 = _objectSpread({
135
284
  newInstance: newInstance,
136
285
  extend: extend
137
- };
286
+ }, STATIC);
138
287
 
139
- export { vtkTexture$1 as default, extend, newInstance };
288
+ export { STATIC, vtkTexture$1 as default, extend, newInstance };
@@ -18,7 +18,7 @@ var ScalarMode = vtkMapper.ScalarMode;
18
18
  var CoordinateSystem = vtkProp.CoordinateSystem;
19
19
  var DisplayLocation = vtkProperty2D.DisplayLocation;
20
20
  var vtkWebGPUPolyDataVS = "\n//VTK::Renderer::Dec\n\n//VTK::Color::Dec\n\n//VTK::Normal::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::Select::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@vertex\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : vertexOutput;\n\n var vertex: vec4<f32> = vertexBC;\n\n //VTK::Color::Impl\n\n //VTK::Normal::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::Select::Impl\n\n //VTK::Position::Impl\n\n return output;\n}\n";
21
- var vtkWebGPUPolyDataFS = "\nstruct PBRData {\n diffuse: vec3<f32>,\n specular: vec3<f32>,\n}\n\n// Dot product with the max already in it\nfn mdot(a: vec3<f32>, b: vec3<f32>) -> f32 {\n return max(0.0, dot(a, b));\n}\n\n// Lambertian diffuse model\nfn lambertDiffuse(base: vec3<f32>, N: vec3<f32>, L: vec3<f32>) -> vec3<f32> {\n var pi: f32 = 3.14159265359; \n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5);\n return (base/pi)*NdotL;\n}\n\n// Yasuhiro Fujii improvement on the Oren-Nayar model\n// https://mimosa-pudica.net/improved-oren-nayar.html\n// p is surface color, o is roughness\nfn fujiiOrenNayar(p: vec3<f32>, o: f32, N: vec3<f32>, L: vec3<f32>, V: vec3<f32>) -> vec3<f32> {\n var invpi: f32 = 0.31830988618; // 1/pi\n\n var o2 = o*o;\n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5); // Less physically accurate, but hides the \"seams\" between lights better\n\n var NdotV: f32 = mdot(N, V);\n var LdotV: f32 = mdot(L, V);\n\n var s: f32 = LdotV - NdotL*NdotV;\n var t: f32 = mix(1, max(NdotL, NdotV), step(0, s)); // Mix with step is the equivalent of an if statement\n var A: vec3<f32> = 0.5*(o2 / (o2 + 0.33)) + 0.17*p*(o2 / (o2 + 0.13));\n A = invpi*(1 - A);\n var B: f32 = 0.45*(o2 / (o2 + 0.09));\n B = invpi*B;\n\n return p*NdotL*(A + B*(s/t));\n}\n\n// Fresnel portion of BRDF (IOR only, simplified)\nfn schlickFresnelIOR(V: vec3<f32>, N: vec3<f32>, ior: f32, k: f32) -> f32 {\n var NdotV: f32 = mdot(V, N);\n // var R0: f32 = pow((ior - 1.0) / (ior + 1.0), 2); // 1.0 is about the ior of air, and it is assumed that light will be traveling through air\n var F0: f32 = (pow((ior - 1.0), 2) + k*k) / (pow((ior + 1.0), 2) + k*k); // This takes into account the roughness, whic the other one does not\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Fresnel portion of BRDF (Color ior, better)\nfn schlickFresnelRGB(V: vec3<f32>, N: vec3<f32>, F0: vec3<f32>) -> vec3<f32> {\n var NdotV: f32 = mdot(V, N);\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Normal portion of BRDF\n// https://learnopengl.com/PBR/Theory\n// Trowbridge-Reitz GGX functions: normal, halfway, roughness^2\nfn trGGX(N: vec3<f32>, H: vec3<f32>, a: f32) -> f32 {\n var pi: f32 = 3.14159265359; \n\n var a2: f32 = a*a;\n var NdotH = mdot(N, H);\n var NdotH2 = NdotH*NdotH;\n \n var denom: f32 = NdotH2 * (a2 - 1.0) + 1.0;\n\n return a2 / max((pi*denom*denom), 0.000001);\n}\n\n// A VERY bad approximation of anisotropy. Real anisotropic calculations require tangent and bitangent\nfn anisotrophicTrGGX(N: vec3<f32>, H: vec3<f32>, O: vec3<f32>, s: f32, a: f32) -> f32 {\n var Op: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(O) * s, 0.)).xyz;\n\n var ggx1: f32 = trGGX(N + Op*s, H, a);\n var ggx2: f32 = trGGX(N - Op*s, H, a);\n return (0.5 * ggx1 + 0.5 * ggx2);\n}\n\n// Geometry portion of BRDF\nfn schlickGGX(N: vec3<f32>, X: vec3<f32>, k: f32) -> f32 {\n var NdotX = mdot(N, X);\n return NdotX / max(0.000001, (NdotX*(1-k) + k));\n}\n\nfn smithSurfaceRoughness(N: vec3<f32>, V: vec3<f32>, L: vec3<f32>, k: f32) -> f32 {\n var ggx1: f32 = max(0.01, schlickGGX(N, V, k)); // Prevents void zones at the cost of some accuracy\n var ggx2: f32 = schlickGGX(N, L, k);\n return ggx1*ggx2;\n}\n\n// BRDF Combination\nfn cookTorrance(D: f32, F: f32, G: f32, N: vec3<f32>, V: vec3<f32>, L: vec3<f32>) -> f32 {\n var num: f32 = D*F*G;\n var denom: f32 = 4*mdot(V, N)*mdot(L, N);\n\n return num / max(denom, 0.000001);\n}\n\n// Different lighting calculations for different light sources\nfn calcDirectionalLight(N: vec3<f32>, V: vec3<f32>, ior: f32, roughness: f32, metallic: f32, direction: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData { \n var L: vec3<f32> = normalize(direction); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2;\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); // Fresnel term is replaced with 1 because it is added later\n var incoming: vec3<f32> = color;\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5);\n\n var specular: vec3<f32> = brdf*incoming*angle;\n // Oren-Nayar gives a clay-like effect when fully rough which some people may not want, so it might be better to give a separate\n // control property for the diffuse vs specular roughness\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V); \n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// TODO: find some way to reduce the number of arguments going in here\nfn calcPointLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n var incoming: vec3<f32> = color * (1. / (dist*dist));\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// For a reason unknown to me, spheres dont seem to behave propperly with head-on spot lights\nfn calcSpotLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, direction: vec3<f32>, cones: vec2<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos);\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n \n // Cones.x is the inner phi and cones.y is the outer phi\n var theta: f32 = mdot(normalize(direction), L);\n var epsilon: f32 = cones.x - cones.y;\n var intensity: f32 = (theta - cones.y) / epsilon;\n intensity = clamp(intensity, 0.0, 1.0);\n intensity /= dist*dist;\n\n var incoming: vec3<f32> = color * intensity;\n\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// Environment mapping stuff\n// Takes in a vector and converts it to an equivalent coordinate in a rectilinear texture. Should be replaced with cubemaps at some point\nfn vecToRectCoord(dir: vec3<f32>) -> vec2<f32> {\n var tau: f32 = 6.28318530718;\n var out: vec2<f32> = vec2<f32>(0.);\n\n out.x = atan2(dir.z, dir.x) / tau;\n out.x += 0.5;\n\n out.y = (dir.y * .5) + .5;\n\n return out;\n}\n\n//VTK::Renderer::Dec\n\n//VTK::Color::Dec\n\n//VTK::TCoord::Dec\n\n// optional surface normal declaration\n//VTK::Normal::Dec\n\n//VTK::Select::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : fragmentOutput;\n\n // Temporary ambient, diffuse, and opacity\n var ambientColor: vec4<f32> = mapperUBO.AmbientColor;\n var diffuseColor: vec4<f32> = mapperUBO.DiffuseColor;\n var opacity: f32 = mapperUBO.Opacity;\n\n // This should be declared somewhere else\n var _diffuseMap: vec4<f32> = vec4<f32>(1);\n var _roughnessMap: vec4<f32> = vec4<f32>(1);\n var _metallicMap: vec4<f32> = vec4<f32>(1);\n var _normalMap: vec4<f32> = vec4<f32>(0, 0, 1, 0); // normal map was setting off the normal vector detection in fragment\n var _ambientOcclusionMap: vec4<f32> = vec4<f32>(0);\n var _emissionMap: vec4<f32> = vec4<f32>(0);\n\n //VTK::Color::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::Normal::Impl\n\n var computedColor: vec4<f32> = vec4<f32>(diffuseColor.rgb, 1.);\n\n //VTK::Light::Impl\n\n //VTK::Select::Impl\n\n if (computedColor.a == 0.0) { discard; };\n\n //VTK::Position::Impl\n\n //VTK::RenderEncoder::Impl\n\n return output;\n}\n";
21
+ var vtkWebGPUPolyDataFS = "\nstruct PBRData {\n diffuse: vec3<f32>,\n specular: vec3<f32>,\n}\n\n// Dot product with the max already in it\nfn mdot(a: vec3<f32>, b: vec3<f32>) -> f32 {\n return max(0.0, dot(a, b));\n}\n// Dot product with a max in it that does not allow for negative values\n// Physically based rendering is accurate as long as normals are accurate,\n// however this is pretty often not the case. In order to prevent negative\n// values from ruining light calculations and creating zones of zero light,\n// this remapping is used, which smoothly clamps the dot product between\n// zero and one while still maintaining a good amount of accuracy.\nfn cdot(a: vec3<f32>, b: vec3<f32>) -> f32 {\n var d: f32 = max(0.0, dot(a, b));\n d = pow((d + 1) / 2.0, 2.6);\n return d;\n}\n\n// Lambertian diffuse model\nfn lambertDiffuse(base: vec3<f32>, N: vec3<f32>, L: vec3<f32>) -> vec3<f32> {\n var pi: f32 = 3.14159265359; \n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5);\n return (base/pi)*NdotL;\n}\n\n// Yasuhiro Fujii improvement on the Oren-Nayar model\n// https://mimosa-pudica.net/improved-oren-nayar.html\n// p is surface color, o is roughness\nfn fujiiOrenNayar(p: vec3<f32>, o: f32, N: vec3<f32>, L: vec3<f32>, V: vec3<f32>) -> vec3<f32> {\n var invpi: f32 = 0.31830988618; // 1/pi\n\n var o2 = o*o;\n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5); // Less physically accurate, but hides the \"seams\" between lights better\n\n var NdotV: f32 = mdot(N, V);\n var LdotV: f32 = mdot(L, V);\n\n var s: f32 = LdotV - NdotL*NdotV;\n var t: f32 = mix(1, max(NdotL, NdotV), step(0, s)); // Mix with step is the equivalent of an if statement\n var A: vec3<f32> = 0.5*(o2 / (o2 + 0.33)) + 0.17*p*(o2 / (o2 + 0.13));\n A = invpi*(1 - A);\n var B: f32 = 0.45*(o2 / (o2 + 0.09));\n B = invpi*B;\n\n return p*NdotL*(A + B*(s/t));\n}\n\n// Fresnel portion of BRDF (IOR only, simplified)\nfn schlickFresnelIOR(V: vec3<f32>, N: vec3<f32>, ior: f32, k: f32) -> f32 {\n var NdotV: f32 = mdot(V, N);\n var F0: f32 = (pow((ior - 1.0), 2) + k*k) / (pow((ior + 1.0), 2) + k*k); // This takes into account the roughness, which the other one does not\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Fresnel portion of BRDF (Color ior, better)\nfn schlickFresnelRGB(V: vec3<f32>, N: vec3<f32>, F0: vec3<f32>) -> vec3<f32> {\n var NdotV: f32 = mdot(V, N);\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Normal portion of BRDF\n// https://learnopengl.com/PBR/Theory\n// Trowbridge-Reitz GGX functions: normal, halfway, roughness^2\nfn trGGX(N: vec3<f32>, H: vec3<f32>, a: f32) -> f32 {\n var pi: f32 = 3.14159265359; \n\n var a2: f32 = a*a;\n var NdotH = mdot(N, H);\n var NdotH2 = NdotH*NdotH;\n \n var denom: f32 = NdotH2 * (a2 - 1.0) + 1.0;\n\n return a2 / max((pi*denom*denom), 0.000001);\n}\n\n// A VERY bad approximation of anisotropy. Real anisotropic calculations require tangent and bitangent\nfn anisotrophicTrGGX(N: vec3<f32>, H: vec3<f32>, O: vec3<f32>, s: f32, a: f32) -> f32 {\n var Op: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(O) * s, 0.)).xyz;\n\n var ggx1: f32 = trGGX(N + Op*s, H, a);\n var ggx2: f32 = trGGX(N - Op*s, H, a);\n return (0.5 * ggx1 + 0.5 * ggx2);\n}\n\n// Geometry portion of BRDF\nfn schlickGGX(N: vec3<f32>, X: vec3<f32>, k: f32) -> f32 {\n var NdotX = cdot(N, X);\n return NdotX / max(0.000001, (NdotX*(1-k) + k));\n}\n\nfn smithSurfaceRoughness(N: vec3<f32>, V: vec3<f32>, L: vec3<f32>, k: f32) -> f32 {\n var ggx1: f32 = min(1, schlickGGX(N, V, k));\n var ggx2: f32 = min(1, schlickGGX(N, L, k));\n return ggx1*ggx2;\n}\n\n// BRDF Combination\nfn cookTorrance(D: f32, F: f32, G: f32, N: vec3<f32>, V: vec3<f32>, L: vec3<f32>) -> f32 {\n var num: f32 = D*F*G;\n var denom: f32 = 4*cdot(V, N)*cdot(L, N);\n\n return num / max(denom, 0.000001);\n}\n\n// Different lighting calculations for different light sources\nfn calcDirectionalLight(N: vec3<f32>, V: vec3<f32>, ior: f32, roughness: f32, metallic: f32, direction: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData { \n var L: vec3<f32> = normalize(direction); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2;\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); // Fresnel term is replaced with 1 because it is added later\n var incoming: vec3<f32> = color;\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5);\n\n var specular: vec3<f32> = brdf*incoming*angle;\n // Oren-Nayar gives a clay-like effect when fully rough which some people may not want, so it might be better to give a separate\n // control property for the diffuse vs specular roughness\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V); \n // Stores the specular and diffuse separately to allow for finer post processing\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// TODO: find some way to reduce the number of arguments going in here\nfn calcPointLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n var incoming: vec3<f32> = color * (1. / (dist*dist));\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// For a reason unknown to me, spheres dont seem to behave propperly with head-on spot lights\nfn calcSpotLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, direction: vec3<f32>, cones: vec2<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos);\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n \n // Cones.x is the inner phi and cones.y is the outer phi\n var theta: f32 = mdot(normalize(direction), L);\n var epsilon: f32 = cones.x - cones.y;\n var intensity: f32 = (theta - cones.y) / epsilon;\n intensity = clamp(intensity, 0.0, 1.0);\n intensity /= dist*dist;\n\n var incoming: vec3<f32> = color * intensity;\n\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// Environment mapping stuff\n// Takes in a vector and converts it to an equivalent coordinate in a rectilinear texture. Should be replaced with cubemaps at some point\nfn vecToRectCoord(dir: vec3<f32>) -> vec2<f32> {\n var tau: f32 = 6.28318530718;\n var pi: f32 = 3.14159265359;\n var out: vec2<f32> = vec2<f32>(0.0);\n\n out.x = atan2(dir.z, dir.x) / tau;\n out.x += 0.5;\n\n var phix: f32 = length(vec2(dir.x, dir.z));\n out.y = atan2(dir.y, phix) / pi + 0.5;\n\n return out;\n}\n\n//VTK::Renderer::Dec\n\n//VTK::Color::Dec\n\n//VTK::TCoord::Dec\n\n// optional surface normal declaration\n//VTK::Normal::Dec\n\n//VTK::Select::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : fragmentOutput;\n\n // Temporary ambient, diffuse, and opacity\n var ambientColor: vec4<f32> = mapperUBO.AmbientColor;\n var diffuseColor: vec4<f32> = mapperUBO.DiffuseColor;\n var opacity: f32 = mapperUBO.Opacity;\n\n // This should be declared somewhere else\n var _diffuseMap: vec4<f32> = vec4<f32>(1);\n var _roughnessMap: vec4<f32> = vec4<f32>(1);\n var _metallicMap: vec4<f32> = vec4<f32>(1);\n var _normalMap: vec4<f32> = vec4<f32>(0, 0, 1, 0); // normal map was setting off the normal vector detection in fragment\n var _ambientOcclusionMap: vec4<f32> = vec4<f32>(1);\n var _emissionMap: vec4<f32> = vec4<f32>(0);\n\n //VTK::Color::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::Normal::Impl\n\n var computedColor: vec4<f32> = vec4<f32>(diffuseColor.rgb, 1.0);\n\n //VTK::Light::Impl\n\n //VTK::Select::Impl\n\n if (computedColor.a == 0.0) { discard; };\n\n //VTK::Position::Impl\n\n //VTK::RenderEncoder::Impl\n\n return output;\n}\n";
22
22
 
23
23
  function isEdges(hash) {
24
24
  // edge pipelines have "edge" in them
@@ -209,7 +209,7 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
209
209
  code = fDesc.getCode();
210
210
 
211
211
  if (actor.getProperty().getNormalTexture()) {
212
- code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' var normal: vec3<f32> = input.normalVC;', ' if (!input.frontFacing) { normal = -normal; }', ' var tangent: vec3<f32> = input.tangentVC;', ' var bitangent: vec3<f32> = input.bitangentVC;', ' var TCVCMatrix: mat3x3<f32> = mat3x3<f32>(', ' tangent.x, bitangent.x, normal.x,', ' tangent.y, bitangent.y, normal.y,', ' tangent.z, bitangent.z, normal.z,', ' );', ' normal = TCVCMatrix * (_normalMap.xyz * 2 - 1);', ' normal = mix(input.normalVC, normal, mapperUBO.NormalStrength);', ' normal = normalize(normal);']).result;
212
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' var normal: vec3<f32> = input.normalVC;', ' if (!input.frontFacing) { normal = -normal; }', ' var tangent: vec3<f32> = input.tangentVC;', ' var bitangent: vec3<f32> = input.bitangentVC;', ' var TCVCMatrix: mat3x3<f32> = mat3x3<f32>(', ' tangent.x, bitangent.x, normal.x,', ' tangent.y, bitangent.y, normal.y,', ' tangent.z, bitangent.z, normal.z,', ' );', ' var mappedNormal: vec3<f32> = TCVCMatrix * (_normalMap.xyz * 2 - 1);', ' normal = mix(normal, mappedNormal, mapperUBO.NormalStrength);', ' normal = normalize(normal);']).result;
213
213
  } else {
214
214
  code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' var normal: vec3<f32> = input.normalVC;', ' if (!input.frontFacing) { normal = -normal; }', ' normal = normalize(normal);']).result;
215
215
  }
@@ -225,18 +225,30 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
225
225
  if (hash.includes('sel')) return;
226
226
  var vDesc = pipeline.getShaderDescription('vertex');
227
227
  if (!vDesc.hasOutput('vertexVC')) vDesc.addOutput('vec4<f32>', 'vertexVC');
228
+ var renderer = model.WebGPURenderer.getRenderable();
228
229
  var fDesc = pipeline.getShaderDescription('fragment');
229
230
  var code = fDesc.getCode(); // Code that runs if the fragment shader includes normals
230
231
 
231
232
  if (code.includes('var normal:') && model.useRendererMatrix && !isEdges(hash) && !model.is2D && !hash.includes('sel')) {
232
- code = vtkWebGPUShaderCache.substitute(code, '//VTK::Light::Impl', [// Constants
233
+ var _renderer$getEnvironm;
234
+
235
+ var lightingCode = [// Constants
233
236
  ' var pi: f32 = 3.14159265359;', // Vectors needed for light calculations
234
237
  ' var fragPos: vec3<f32> = vec3<f32>(input.vertexVC.xyz);', ' var V: vec3<f32> = mix(normalize(-fragPos), vec3<f32>(0, 0, 1), f32(rendererUBO.cameraParallel)); // View Vector', // Values needed for light calculations
235
238
  ' var baseColor: vec3<f32> = _diffuseMap.rgb * diffuseColor.rgb;', ' var roughness: f32 = max(0.000001, mapperUBO.Roughness * _roughnessMap.r);', // Need to have a different way of sampling greyscale values aside from .r
236
239
  ' var metallic: f32 = mapperUBO.Metallic * _metallicMap.r;', ' var alpha: f32 = roughness*roughness;', ' var ior: f32 = mapperUBO.BaseIOR;', ' var k: f32 = alpha*alpha / 2;', // Split diffuse and specular components
237
240
  ' var diffuse: vec3<f32> = vec3<f32>(0.);', ' var specular: vec3<f32> = vec3<f32>(0.);', ' var emission: vec3<f32> = _emissionMap.rgb * mapperUBO.Emission;', // Summing diffuse and specular components of directional lights
238
241
  ' {', ' var i: i32 = 0;', ' loop {', ' if !(i < rendererUBO.LightCount) { break; }', ' switch (i32(rendererLightSSBO.values[i].LightData.x)) {', ' // Point Light', ' case 0 {', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var pos: vec3<f32> = (rendererLightSSBO.values[i].LightPos).xyz;', ' var calculated: PBRData = calcPointLight(normal, V, fragPos, ior, roughness, metallic, pos, color, baseColor);', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' // Directional light', ' case 1 {', ' var dir: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(rendererLightSSBO.values[i].LightDir.xyz), 0.)).xyz;', ' dir = normalize(dir);', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var calculated: PBRData = calcDirectionalLight(normal, V, ior, roughness, metallic, dir, color, baseColor); // diffuseColor.rgb needs to be fixed with a more dynamic diffuse color', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' // Spot Light', ' case 2 {', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var pos: vec3<f32> = (rendererLightSSBO.values[i].LightPos).xyz;', ' var dir: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(rendererLightSSBO.values[i].LightDir.xyz), 0.)).xyz;', ' dir = normalize(dir);', ' var cones: vec2<f32> = vec2<f32>(rendererLightSSBO.values[i].LightData.y, rendererLightSSBO.values[i].LightData.z);', ' var calculated: PBRData = calcSpotLight(normal, V, fragPos, ior, roughness, metallic, pos, dir, cones, color, baseColor);', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' default { continue; }', ' }', ' continuing { i++; }', ' }', ' }', // Final variables for combining specular and diffuse
239
- ' var fresnel: f32 = schlickFresnelIOR(V, normal, ior, k); // Fresnel', ' fresnel = min(1, fresnel);', ' // This could be controlled with its own variable (that isnt base color) for better artistic control', ' var fresnelMetallic: vec3<f32> = schlickFresnelRGB(V, normal, baseColor); // Fresnel for metal, takes color into account', ' var kS: vec3<f32> = mix(vec3<f32>(fresnel), fresnelMetallic, metallic);', ' kS = min(vec3<f32>(1), kS);', ' var kD: vec3<f32> = (1.0 - kS) * (1.0 - metallic);', ' var PBR: vec3<f32> = mapperUBO.DiffuseIntensity*kD*diffuse + kS*specular;', ' PBR += emission;', ' computedColor = vec4<f32>(PBR, mapperUBO.Opacity);']).result;
242
+ ' var fresnel: f32 = schlickFresnelIOR(V, normal, ior, k); // Fresnel', ' fresnel = min(1, fresnel);', ' // This could be controlled with its own variable (that isnt base color) for better artistic control', ' var fresnelMetallic: vec3<f32> = schlickFresnelRGB(V, normal, baseColor); // Fresnel for metal, takes color into account', ' var kS: vec3<f32> = mix(vec3<f32>(fresnel), fresnelMetallic, metallic);', ' kS = min(vec3<f32>(1), kS);', ' var kD: vec3<f32> = (1.0 - kS) * (1.0 - metallic);', ' var PBR: vec3<f32> = mapperUBO.DiffuseIntensity*kD*diffuse + kS*specular;', ' PBR += emission;', ' computedColor = vec4<f32>(PBR, mapperUBO.Opacity);'];
243
+
244
+ if ((_renderer$getEnvironm = renderer.getEnvironmentTexture()) !== null && _renderer$getEnvironm !== void 0 && _renderer$getEnvironm.getImageLoaded()) {
245
+ lightingCode.push(' // To get diffuse IBL, the texture is sampled with normals in worldspace', ' var diffuseIBLCoords: vec3<f32> = (transpose(rendererUBO.WCVCNormals) * vec4<f32>(normal, 1.)).xyz;', ' var diffuseCoords: vec2<f32> = vecToRectCoord(diffuseIBLCoords);', ' // To get specular IBL, the texture is sampled as the worldspace reflection between the normal and view vectors', ' // Reflections are first calculated in viewspace, then converted to worldspace to sample the environment', ' var VreflN: vec3<f32> = normalize(reflect(-V, normal));', ' var reflectionIBLCoords = (transpose(rendererUBO.WCVCNormals) * vec4<f32>(VreflN, 1.)).xyz;', ' var specularCoords: vec2<f32> = vecToRectCoord(reflectionIBLCoords);', ' var diffuseIBL = textureSampleLevel(EnvironmentTexture, EnvironmentTextureSampler, diffuseCoords, rendererUBO.MaxEnvironmentMipLevel);', // Level multiplier should be set by UBO
246
+ ' var level = roughness * rendererUBO.MaxEnvironmentMipLevel;', ' var specularIBL = textureSampleLevel(EnvironmentTexture, EnvironmentTextureSampler, specularCoords, level);', // Manual mip smoothing since not all formats support smooth level sampling
247
+ ' var specularIBLContribution: vec3<f32> = specularIBL.rgb*rendererUBO.BackgroundSpecularStrength;', ' computedColor += vec4<f32>(specularIBLContribution*kS, 0);', ' var diffuseIBLContribution: vec3<f32> = diffuseIBL.rgb*rendererUBO.BackgroundDiffuseStrength;', ' diffuseIBLContribution *= baseColor * _ambientOcclusionMap.rgb;', // Multipy by baseColor may be changed
248
+ ' computedColor += vec4<f32>(diffuseIBLContribution*kD, 0);');
249
+ }
250
+
251
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Light::Impl', lightingCode).result;
240
252
  fDesc.setCode(code); // If theres no normals, just set the specular color to be flat
241
253
  } else {
242
254
  code = vtkWebGPUShaderCache.substitute(code, '//VTK::Light::Impl', [' var diffuse: vec3<f32> = diffuseColor.rgb;', ' var specular: vec3<f32> = mapperUBO.SpecularColor.rgb * mapperUBO.SpecularColor.a;', ' computedColor = vec4<f32>(diffuse * _diffuseMap.rgb, mapperUBO.Opacity);']).result;
@@ -287,8 +299,7 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
287
299
  var numComp = vtkWebGPUTypes.getNumberOfComponentsFromBufferFormat(tcoords.getArrayInformation()[0].format);
288
300
  var code = vDesc.getCode();
289
301
  vDesc.addOutput("vec".concat(numComp, "<f32>"), 'tcoordVS');
290
- code = vtkWebGPUShaderCache.substitute(code, '//VTK::TCoord::Impl', [' output.tcoordVS = tcoord;' // Ensure that UV coordinates are always between 0-1
291
- ]).result;
302
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::TCoord::Impl', [' output.tcoordVS = tcoord;']).result;
292
303
  vDesc.setCode(code);
293
304
  var fDesc = pipeline.getShaderDescription('fragment');
294
305
  code = fDesc.getCode();
@@ -568,7 +579,7 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
568
579
  };
569
580
 
570
581
  publicAPI.updateTextures = function () {
571
- var _model$renderable$get2, _model$renderable2, _actor$getProperty$ge14, _actor$getProperty8, _actor$getProperty$ge15, _actor$getProperty9, _actor$getProperty$ge16, _actor$getProperty10, _actor$getProperty$ge17, _actor$getProperty11, _actor$getProperty$ge18, _actor$getProperty12, _actor$getProperty$ge19, _actor$getProperty13, _renderer$getBackgrou;
582
+ var _model$renderable$get2, _model$renderable2, _actor$getProperty$ge14, _actor$getProperty8, _actor$getProperty$ge15, _actor$getProperty9, _actor$getProperty$ge16, _actor$getProperty10, _actor$getProperty$ge17, _actor$getProperty11, _actor$getProperty$ge18, _actor$getProperty12, _actor$getProperty$ge19, _actor$getProperty13, _renderer$getEnvironm2;
572
583
 
573
584
  // we keep track of new and used textures so
574
585
  // that we can clean up any unused textures so we don't hold onto them
@@ -634,8 +645,8 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
634
645
  textures.push(_pair7);
635
646
  }
636
647
 
637
- if ((_renderer$getBackgrou = renderer.getBackgroundTexture) !== null && _renderer$getBackgrou !== void 0 && _renderer$getBackgrou.call(renderer)) {
638
- var _pair8 = ['Background', renderer.getBackgroundTexture()];
648
+ if ((_renderer$getEnvironm2 = renderer.getEnvironmentTexture) !== null && _renderer$getEnvironm2 !== void 0 && _renderer$getEnvironm2.call(renderer)) {
649
+ var _pair8 = ['Environment', renderer.getEnvironmentTexture()];
639
650
  textures.push(_pair8);
640
651
  }
641
652
 
@@ -671,10 +682,29 @@ function vtkWebGPUCellArrayMapper(publicAPI, model) {
671
682
  model.textures.push(newTex);
672
683
  model.textureViews.push(tview);
673
684
  var interpolate = srcTexture.getInterpolate() ? 'linear' : 'nearest';
674
- tview.addSampler(model.device, {
675
- minFilter: interpolate,
676
- magFilter: interpolate
677
- });
685
+ var addressMode = null;
686
+ if (!addressMode && srcTexture.getEdgeClamp() && srcTexture.getRepeat()) addressMode = 'mirror-repeat';
687
+ if (!addressMode && srcTexture.getEdgeClamp()) addressMode = 'clamp-to-edge';
688
+ if (!addressMode && srcTexture.getRepeat()) addressMode = 'repeat';
689
+
690
+ if (textureName !== 'Environment') {
691
+ tview.addSampler(model.device, {
692
+ addressModeU: addressMode,
693
+ addressModeV: addressMode,
694
+ addressModeW: addressMode,
695
+ minFilter: interpolate,
696
+ magFilter: interpolate
697
+ });
698
+ } else {
699
+ tview.addSampler(model.device, {
700
+ addressModeU: 'repeat',
701
+ addressModeV: 'clamp-to-edge',
702
+ addressModeW: 'repeat',
703
+ minFilter: interpolate,
704
+ magFilter: interpolate,
705
+ mipmapFilter: 'linear'
706
+ });
707
+ }
678
708
  }
679
709
  }
680
710
  } // remove unused textures
@@ -12,8 +12,9 @@ function vtkWebGPUFullScreenQuad(publicAPI, model) {
12
12
  publicAPI.replaceShaderPosition = function (hash, pipeline, vertexInput) {
13
13
  var vDesc = pipeline.getShaderDescription('vertex');
14
14
  vDesc.addBuiltinOutput('vec4<f32>', '@builtin(position) Position');
15
+ vDesc.addOutput('vec4<f32>', 'vertexVC');
15
16
  var code = vDesc.getCode();
16
- code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', ['output.tcoordVS = vec2<f32>(vertexBC.x * 0.5 + 0.5, 1.0 - vertexBC.y * 0.5 - 0.5);', 'output.Position = vec4<f32>(vertexBC, 1.0);']).result;
17
+ code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', ['output.tcoordVS = vec2<f32>(vertexBC.x * 0.5 + 0.5, 1.0 - vertexBC.y * 0.5 - 0.5);', 'output.Position = vec4<f32>(vertexBC, 1.0);', 'output.vertexVC = vec4<f32>(vertexBC, 1);']).result;
17
18
  vDesc.setCode(code);
18
19
  };
19
20
 
@@ -9,7 +9,12 @@ import vtkWebGPUUniformBuffer from './UniformBuffer.js';
9
9
  import { registerOverride } from './ViewNodeFactory.js';
10
10
 
11
11
  var vtkDebugMacro = vtkDebugMacro$1;
12
- var clearFragTemplate = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output: fragmentOutput;\n\n var computedColor: vec4<f32> = mapperUBO.BackgroundColor;\n\n //VTK::RenderEncoder::Impl\n return output;\n}\n"; // Light type index gives either 0, 1, or 2 which indicates what type of light there is.
12
+ var clearFragColorTemplate = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output: fragmentOutput;\n\n var computedColor: vec4<f32> = mapperUBO.BackgroundColor;\n\n //VTK::RenderEncoder::Impl\n return output;\n}\n";
13
+ var clearFragTextureTemplate = "\nfn vecToRectCoord(dir: vec3<f32>) -> vec2<f32> {\n var tau: f32 = 6.28318530718;\n var pi: f32 = 3.14159265359;\n var out: vec2<f32> = vec2<f32>(0.0);\n\n out.x = atan2(dir.z, dir.x) / tau;\n out.x += 0.5;\n\n var phix: f32 = length(vec2(dir.x, dir.z));\n out.y = atan2(dir.y, phix) / pi + 0.5;\n\n return out;\n}\n\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output: fragmentOutput;\n\n var tcoord: vec4<f32> = vec4<f32>(input.vertexVC.xy, -1, 1);\n var V: vec4<f32> = normalize(mapperUBO.FSQMatrix * tcoord); // vec2<f32>((input.tcoordVS.x - 0.5) * 2, -(input.tcoordVS.y - 0.5) * 2);\n // textureSampleLevel gets rid of some ugly artifacts\n var background = textureSampleLevel(EnvironmentTexture, EnvironmentTextureSampler, vecToRectCoord(V.xyz), 0);\n var computedColor: vec4<f32> = vec4<f32>(background.rgb, 1);\n\n //VTK::RenderEncoder::Impl\n return output;\n}\n";
14
+
15
+ var _fsqClearMat4 = new Float64Array(16);
16
+
17
+ var _tNormalMat4 = new Float64Array(16); // Light type index gives either 0, 1, or 2 which indicates what type of light there is.
13
18
  // While technically, there are only spot and directional lights, within the CellArrayMapper
14
19
  // there is a third, positional light. It is technically just a variant of a spot light with
15
20
  // a cone angle of 90 or above, however certain calculations can be skipped if it is treated
@@ -19,6 +24,7 @@ var clearFragTemplate = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::
19
24
  // 1 -> directional light
20
25
  // 2 -> spot light
21
26
 
27
+
22
28
  function getLightTypeIndex(light) {
23
29
  if (light.getPositional()) {
24
30
  if (light.getConeAngle() >= 90) {
@@ -124,6 +130,8 @@ function vtkWebGPURenderer(publicAPI, model) {
124
130
  var utime = model.UBO.getSendTime();
125
131
 
126
132
  if (model._parent.getMTime() > utime || publicAPI.getMTime() > utime || model.camera.getMTime() > utime || model.renderable.getMTime() > utime) {
133
+ var _model$renderable$get;
134
+
127
135
  var keyMats = model.webgpuCamera.getKeyMatrices(publicAPI);
128
136
  model.UBO.setArray('WCVCMatrix', keyMats.wcvc);
129
137
  model.UBO.setArray('SCPCMatrix', keyMats.scpc);
@@ -132,6 +140,9 @@ function vtkWebGPURenderer(publicAPI, model) {
132
140
  model.UBO.setArray('VCPCMatrix', keyMats.vcpc);
133
141
  model.UBO.setArray('WCVCNormals', keyMats.normalMatrix);
134
142
  model.UBO.setValue('LightCount', model.renderable.getLights().length);
143
+ model.UBO.setValue('MaxEnvironmentMipLevel', (_model$renderable$get = model.renderable.getEnvironmentTexture()) === null || _model$renderable$get === void 0 ? void 0 : _model$renderable$get.getMipLevel());
144
+ model.UBO.setValue('BackgroundDiffuseStrength', model.renderable.getEnvironmentTextureDiffuseStrength());
145
+ model.UBO.setValue('BackgroundSpecularStrength', model.renderable.getEnvironmentTextureSpecularStrength());
135
146
  var tsize = publicAPI.getYInvertedTiledSizeAndOrigin();
136
147
  model.UBO.setArray('viewportSize', [tsize.usize, tsize.vsize]);
137
148
  model.UBO.setValue('cameraParallel', model.camera.getParallelProjection());
@@ -162,8 +173,7 @@ function vtkWebGPURenderer(publicAPI, model) {
162
173
 
163
174
  var viewCoordinatePosition = lights[_i].getPosition();
164
175
 
165
- vec3.transformMat4(viewCoordinatePosition, viewCoordinatePosition, keyMats.wcvc); // console.log(viewCoordinatePosition);
166
- // viewCoordinatePosition
176
+ vec3.transformMat4(viewCoordinatePosition, viewCoordinatePosition, keyMats.wcvc); // viewCoordinatePosition
167
177
 
168
178
  lightPosArray[offset] = viewCoordinatePosition[0];
169
179
  lightPosArray[offset + 1] = viewCoordinatePosition[1];
@@ -240,26 +250,85 @@ function vtkWebGPURenderer(publicAPI, model) {
240
250
  };
241
251
 
242
252
  publicAPI.clear = function () {
253
+ var _model$backgroundTex;
254
+
243
255
  if (model.renderable.getTransparent() || model.suppressClear) {
244
256
  return;
245
257
  }
246
258
 
247
- var device = model._parent.getDevice();
259
+ var device = model._parent.getDevice(); // Normal Solid Color
260
+
248
261
 
249
262
  if (!model.clearFSQ) {
250
263
  model.clearFSQ = vtkWebGPUFullScreenQuad.newInstance();
251
264
  model.clearFSQ.setDevice(device);
252
265
  model.clearFSQ.setPipelineHash('clearfsq');
253
- model.clearFSQ.setFragmentShaderTemplate(clearFragTemplate);
266
+ model.clearFSQ.setFragmentShaderTemplate(clearFragColorTemplate);
254
267
  var ubo = vtkWebGPUUniformBuffer.newInstance({
255
268
  label: 'mapperUBO'
256
269
  });
270
+ ubo.addEntry('FSQMatrix', 'mat4x4<f32>');
257
271
  ubo.addEntry('BackgroundColor', 'vec4<f32>');
258
272
  model.clearFSQ.setUBO(ubo);
273
+ model.backgroundTex = model.renderable.getEnvironmentTexture();
274
+ } // Textured Background
275
+
276
+
277
+ if (model.clearFSQ.getPipelineHash() !== 'clearfsqwithtexture' && model.renderable.getUseEnvironmentTextureAsBackground() && (_model$backgroundTex = model.backgroundTex) !== null && _model$backgroundTex !== void 0 && _model$backgroundTex.getImageLoaded()) {
278
+ model.clearFSQ.setFragmentShaderTemplate(clearFragTextureTemplate);
279
+
280
+ var _ubo = vtkWebGPUUniformBuffer.newInstance({
281
+ label: 'mapperUBO'
282
+ });
283
+
284
+ _ubo.addEntry('FSQMatrix', 'mat4x4<f32>');
285
+
286
+ _ubo.addEntry('BackgroundColor', 'vec4<f32>');
287
+
288
+ model.clearFSQ.setUBO(_ubo);
289
+ var environmentTextureHash = device.getTextureManager().getTextureForVTKTexture(model.backgroundTex);
290
+
291
+ if (environmentTextureHash.getReady()) {
292
+ var tview = environmentTextureHash.createView("EnvironmentTexture");
293
+ model.clearFSQ.setTextureViews([tview]);
294
+ model.backgroundTexLoaded = true;
295
+ var interpolate = model.backgroundTex.getInterpolate() ? 'linear' : 'nearest';
296
+ tview.addSampler(device, {
297
+ addressModeU: 'repeat',
298
+ addressModeV: 'clamp-to-edge',
299
+ addressModeW: 'repeat',
300
+ minFilter: interpolate,
301
+ magFilter: interpolate,
302
+ mipmapFilter: 'linear'
303
+ });
304
+ }
305
+
306
+ model.clearFSQ.setPipelineHash('clearfsqwithtexture');
307
+ } else if (model.clearFSQ.getPipelineHash() === 'clearfsqwithtexture' && !model.renderable.getUseEnvironmentTextureAsBackground()) {
308
+ // In case the mode is changed at runtime
309
+ model.clearFSQ = vtkWebGPUFullScreenQuad.newInstance();
310
+ model.clearFSQ.setDevice(device);
311
+ model.clearFSQ.setPipelineHash('clearfsq');
312
+ model.clearFSQ.setFragmentShaderTemplate(clearFragColorTemplate);
313
+
314
+ var _ubo2 = vtkWebGPUUniformBuffer.newInstance({
315
+ label: 'mapperUBO'
316
+ });
317
+
318
+ _ubo2.addEntry('FSQMatrix', 'mat4x4<f32>');
319
+
320
+ _ubo2.addEntry('BackgroundColor', 'vec4<f32>');
321
+
322
+ model.clearFSQ.setUBO(_ubo2);
259
323
  }
260
324
 
325
+ var keyMats = model.webgpuCamera.getKeyMatrices(publicAPI);
261
326
  var background = model.renderable.getBackgroundByReference();
262
327
  model.clearFSQ.getUBO().setArray('BackgroundColor', background);
328
+ mat4.transpose(_tNormalMat4, keyMats.normalMatrix);
329
+ mat4.mul(_fsqClearMat4, keyMats.scvc, keyMats.pcsc);
330
+ mat4.mul(_fsqClearMat4, _tNormalMat4, _fsqClearMat4);
331
+ model.clearFSQ.getUBO().setArray('FSQMatrix', _fsqClearMat4);
263
332
  model.clearFSQ.getUBO().sendIfNeeded(device);
264
333
  model.clearFSQ.prepareAndDraw(model.renderEncoder);
265
334
  };
@@ -395,6 +464,9 @@ function extend(publicAPI, model) {
395
464
  model.UBO.addEntry('WCVCNormals', 'mat4x4<f32>');
396
465
  model.UBO.addEntry('viewportSize', 'vec2<f32>');
397
466
  model.UBO.addEntry('LightCount', 'i32');
467
+ model.UBO.addEntry('MaxEnvironmentMipLevel', 'f32');
468
+ model.UBO.addEntry('BackgroundDiffuseStrength', 'f32');
469
+ model.UBO.addEntry('BackgroundSpecularStrength', 'f32');
398
470
  model.UBO.addEntry('cameraParallel', 'u32'); // SSBO (Light data)
399
471
 
400
472
  model.SSBO = vtkWebGPUStorageBuffer.newInstance({
@@ -12,8 +12,12 @@ function vtkWebGPUSampler(publicAPI, model) {
12
12
  publicAPI.create = function (device) {
13
13
  var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
14
14
  model.device = device;
15
+ model.options.addressModeU = options.addressModeU ? options.addressModeU : 'clamp-to-edge';
16
+ model.options.addressModeV = options.addressModeV ? options.addressModeV : 'clamp-to-edge';
17
+ model.options.addressModeW = options.addressModeW ? options.addressModeW : 'clamp-to-edge';
15
18
  model.options.magFilter = options.magFilter ? options.magFilter : 'nearest';
16
19
  model.options.minFilter = options.minFilter ? options.minFilter : 'nearest';
20
+ model.options.mipmapFilter = options.mipmapFilter ? options.mipmapFilter : 'nearest';
17
21
  model.options.label = model.label;
18
22
  model.handle = model.device.getHandle().createSampler(model.options);
19
23
  model.bindGroupTime.modified();
@@ -3,6 +3,7 @@ import HalfFloat from '../../Common/Core/HalfFloat.js';
3
3
  import vtkWebGPUBufferManager from './BufferManager.js';
4
4
  import vtkWebGPUTextureView from './TextureView.js';
5
5
  import vtkWebGPUTypes from './Types.js';
6
+ import vtkTexture from '../Core/Texture.js';
6
7
 
7
8
  var BufferUsage = vtkWebGPUBufferManager.BufferUsage; // ----------------------------------------------------------------------------
8
9
  // Global methods
@@ -22,6 +23,7 @@ function vtkWebGPUTexture(publicAPI, model) {
22
23
  model.depth = options.depth ? options.depth : 1;
23
24
  var dimension = model.depth === 1 ? '2d' : '3d';
24
25
  model.format = options.format ? options.format : 'rgba8unorm';
26
+ model.mipLevel = options.mipLevel ? options.mipLevel : 0;
25
27
  /* eslint-disable no-undef */
26
28
 
27
29
  /* eslint-disable no-bitwise */
@@ -37,7 +39,8 @@ function vtkWebGPUTexture(publicAPI, model) {
37
39
  // 'rgba8unorm',
38
40
  usage: model.usage,
39
41
  label: model.label,
40
- dimension: dimension
42
+ dimension: dimension,
43
+ mipLevelCount: model.mipLevel + 1
41
44
  });
42
45
  };
43
46
 
@@ -56,10 +59,11 @@ function vtkWebGPUTexture(publicAPI, model) {
56
59
  /* eslint-enable no-undef */
57
60
 
58
61
  /* eslint-enable no-bitwise */
59
- }; // set the data
60
-
62
+ };
61
63
 
62
64
  publicAPI.writeImageData = function (req) {
65
+ var nativeArray = [];
66
+
63
67
  if (req.canvas) {
64
68
  model.device.getHandle().queue.copyExternalImageToTexture({
65
69
  source: req.canvas,
@@ -84,36 +88,23 @@ function vtkWebGPUTexture(publicAPI, model) {
84
88
  var tDetails = vtkWebGPUTypes.getDetailsFromTextureFormat(model.format);
85
89
  var bufferBytesPerRow = model.width * tDetails.stride;
86
90
 
87
- if (req.nativeArray) {
88
- // create and write the buffer
89
- var buffRequest = {
90
- /* eslint-disable no-undef */
91
- usage: BufferUsage.Texture
92
- /* eslint-enable no-undef */
93
-
94
- };
95
-
96
- if (req.dataArray) {
97
- buffRequest.dataArray = req.dataArray;
98
- }
99
-
100
- buffRequest.nativeArray = req.nativeArray; // bytesPerRow must be a multiple of 256 so we might need to rebuild
91
+ var fixAll = function fixAll(arr, height, depth) {
92
+ // bytesPerRow must be a multiple of 256 so we might need to rebuild
101
93
  // the data here before passing to the buffer. e.g. if it is unorm8x4 then
102
94
  // we need to have width be a multiple of 64
103
-
104
- var inWidthInBytes = req.nativeArray.length / (model.height * model.depth) * req.nativeArray.BYTES_PER_ELEMENT; // is this a half float texture?
95
+ var inWidthInBytes = arr.length / (height * depth) * arr.BYTES_PER_ELEMENT; // is this a half float texture?
105
96
 
106
97
  var halfFloat = tDetails.elementSize === 2 && tDetails.sampleType === 'float'; // if we need to copy the data
107
98
 
108
99
  if (halfFloat || inWidthInBytes % 256) {
109
- var inArray = req.nativeArray;
100
+ var inArray = arr;
110
101
  var inWidth = inWidthInBytes / inArray.BYTES_PER_ELEMENT;
111
102
  var outBytesPerElement = tDetails.elementSize;
112
103
  var outWidthInBytes = 256 * Math.floor((inWidth * outBytesPerElement + 255) / 256);
113
104
  var outWidth = outWidthInBytes / outBytesPerElement;
114
- var outArray = macro.newTypedArray(halfFloat ? 'Uint16Array' : inArray.constructor.name, outWidth * model.height * model.depth);
105
+ var outArray = macro.newTypedArray(halfFloat ? 'Uint16Array' : inArray.constructor.name, outWidth * height * depth);
115
106
 
116
- for (var v = 0; v < model.height * model.depth; v++) {
107
+ for (var v = 0; v < height * depth; v++) {
117
108
  if (halfFloat) {
118
109
  for (var i = 0; i < inWidth; i++) {
119
110
  outArray[v * outWidth + i] = HalfFloat.toHalf(inArray[v * inWidth + i]);
@@ -123,12 +114,14 @@ function vtkWebGPUTexture(publicAPI, model) {
123
114
  }
124
115
  }
125
116
 
126
- buffRequest.nativeArray = outArray;
127
- bufferBytesPerRow = outWidthInBytes;
117
+ return [outArray, outWidthInBytes];
128
118
  }
129
119
 
130
- var buff = model.device.getBufferManager().getBuffer(buffRequest);
131
- model.buffer = buff;
120
+ return [arr, inWidthInBytes];
121
+ };
122
+
123
+ if (req.nativeArray) {
124
+ nativeArray = req.nativeArray;
132
125
  }
133
126
 
134
127
  if (req.image) {
@@ -139,35 +132,74 @@ function vtkWebGPUTexture(publicAPI, model) {
139
132
  ctx.translate(0, canvas.height);
140
133
  ctx.scale(1, -1);
141
134
  ctx.drawImage(req.image, 0, 0, req.image.width, req.image.height, 0, 0, canvas.width, canvas.height);
142
- var imageData = ctx.getImageData(0, 0, req.image.width, req.image.height); // create and write the buffer
135
+ var imageData = ctx.getImageData(0, 0, req.image.width, req.image.height);
136
+ nativeArray = imageData.data;
137
+ }
143
138
 
139
+ var cmdEnc = model.device.createCommandEncoder();
140
+
141
+ if (publicAPI.getDimensionality() !== 3) {
142
+ // Non-3D, supports mipmaps
143
+ var mips = vtkTexture.generateMipmaps(nativeArray, model.width, model.height, model.mipLevel);
144
+ var currentWidth = model.width;
145
+ var currentHeight = model.height;
146
+
147
+ for (var m = 0; m <= model.mipLevel; m++) {
148
+ var fix = fixAll(mips[m], currentHeight, 1);
149
+ bufferBytesPerRow = fix[1];
150
+ var buffRequest = {
151
+ dataArray: req.dataArray ? req.dataArray : null,
152
+ nativeArray: fix[0],
153
+
154
+ /* eslint-disable no-undef */
155
+ usage: BufferUsage.Texture
156
+ /* eslint-enable no-undef */
157
+
158
+ };
159
+ var buff = model.device.getBufferManager().getBuffer(buffRequest);
160
+ cmdEnc.copyBufferToTexture({
161
+ buffer: buff.getHandle(),
162
+ offset: 0,
163
+ bytesPerRow: bufferBytesPerRow,
164
+ rowsPerImage: currentHeight
165
+ }, {
166
+ texture: model.handle,
167
+ mipLevel: m
168
+ }, [currentWidth, currentHeight, 1]);
169
+ currentWidth /= 2;
170
+ currentHeight /= 2;
171
+ }
172
+
173
+ model.device.submitCommandEncoder(cmdEnc);
174
+ model.ready = true;
175
+ } else {
176
+ // 3D, no mipmaps
177
+ var _fix = fixAll(nativeArray, model.height, model.depth);
178
+
179
+ bufferBytesPerRow = _fix[1];
144
180
  var _buffRequest = {
145
- nativeArray: imageData.data,
181
+ dataArray: req.dataArray ? req.dataArray : null,
146
182
 
147
183
  /* eslint-disable no-undef */
148
- usage: BufferUsage.Texture,
149
-
184
+ usage: BufferUsage.Texture
150
185
  /* eslint-enable no-undef */
151
- format: 'unorm8x4'
186
+
152
187
  };
188
+ _buffRequest.nativeArray = _fix[0];
153
189
 
154
190
  var _buff = model.device.getBufferManager().getBuffer(_buffRequest);
155
191
 
156
- model.buffer = _buff;
157
- } // get a buffer for the image
158
-
159
-
160
- var cmdEnc = model.device.createCommandEncoder();
161
- cmdEnc.copyBufferToTexture({
162
- buffer: model.buffer.getHandle(),
163
- offset: 0,
164
- bytesPerRow: bufferBytesPerRow,
165
- rowsPerImage: model.height
166
- }, {
167
- texture: model.handle
168
- }, [model.width, model.height, model.depth]);
169
- model.device.submitCommandEncoder(cmdEnc);
170
- model.ready = true;
192
+ cmdEnc.copyBufferToTexture({
193
+ buffer: _buff.getHandle(),
194
+ offset: 0,
195
+ bytesPerRow: bufferBytesPerRow,
196
+ rowsPerImage: model.height
197
+ }, {
198
+ texture: model.handle
199
+ }, [model.width, model.height, model.depth]);
200
+ model.device.submitCommandEncoder(cmdEnc);
201
+ model.ready = true;
202
+ }
171
203
  }; // when data is pulled out of this texture what scale must be applied to
172
204
  // get back to the original source data. For formats such as r8unorm we
173
205
  // have to multiply by 255.0, for formats such as r16float it is 1.0
@@ -105,7 +105,8 @@ function vtkWebGPUTextureManager(publicAPI, model) {
105
105
  height: req.height,
106
106
  depth: req.depth,
107
107
  format: req.format,
108
- usage: req.usage
108
+ usage: req.usage,
109
+ mipLevel: req.mipLevel
109
110
  }); // fill the texture if we have data
110
111
 
111
112
  if (req.nativeArray || req.image || req.canvas) {
@@ -135,7 +136,7 @@ function vtkWebGPUTextureManager(publicAPI, model) {
135
136
 
136
137
  _fillRequest(treq);
137
138
 
138
- treq.hash = treq.time + treq.format;
139
+ treq.hash = treq.time + treq.format + treq.mipLevel;
139
140
  return model.device.getTextureManager().getTexture(treq);
140
141
  };
141
142
 
@@ -157,7 +158,8 @@ function vtkWebGPUTextureManager(publicAPI, model) {
157
158
 
158
159
  _fillRequest(treq);
159
160
 
160
- treq.hash = treq.time + treq.format;
161
+ treq.mipLevel = srcTexture.getMipLevel();
162
+ treq.hash = treq.time + treq.format + treq.mipLevel;
161
163
  return model.device.getTextureManager().getTexture(treq);
162
164
  };
163
165
  } // ----------------------------------------------------------------------------
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kitware/vtk.js",
3
- "version": "25.3.0",
3
+ "version": "25.4.0",
4
4
  "description": "Visualization Toolkit for the Web",
5
5
  "keywords": [
6
6
  "3d",
@@ -96,7 +96,7 @@
96
96
  "kw-doc": "3.1.2",
97
97
  "lodash": "4.17.21",
98
98
  "magic-string": "0.26.2",
99
- "moment": "2.29.3",
99
+ "moment": "2.29.4",
100
100
  "patch-package": "6.4.7",
101
101
  "pixelmatch": "5.3.0",
102
102
  "postcss-loader": "6.2.1",