@vitessce/all 3.7.1 → 3.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{index-DqDDC59y.js → OrbitControls-Cz9DCZ2j.js} +270 -10111
- package/dist/{ReactNeuroglancer-CNzkD33k.js → ReactNeuroglancer-CbIsUGfp.js} +1 -1
- package/dist/{deflate-D7qntYPm.js → deflate-BZqpc-jk.js} +1 -1
- package/dist/{higlass-B5Yf-THY.js → higlass-DEBJRTUn.js} +1 -1
- package/dist/index-BVFjJIoQ.js +3547 -0
- package/dist/index-CpB0CdZl.js +9854 -0
- package/dist/{index-Ci947y1K.js → index-eNWork9t.js} +266 -451
- package/dist/index.js +8 -8
- package/dist/{jpeg-23ExE8ad.js → jpeg-Sl7J-6mO.js} +1 -1
- package/dist/{lerc-BPNFjya5.js → lerc-z_QbiC-f.js} +1 -1
- package/dist/{lzw-CR9bmrUA.js → lzw-C8NEd7Y4.js} +1 -1
- package/dist/{packbits-yEwcRYhH.js → packbits-BRJrVgsM.js} +1 -1
- package/dist/{raw-Day99mnL.js → raw-CRoXFKXe.js} +1 -1
- package/dist/{troika-three-text.esm-orFZa6y_.js → troika-three-text.esm-B0ld0YMM.js} +1 -1
- package/dist/{webimage-CmvCEz7r.js → webimage-C6WknL6k.js} +1 -1
- package/dist-tsc/base-plugins.d.ts.map +1 -1
- package/dist-tsc/base-plugins.js +1 -0
- package/package.json +34 -33
- package/src/base-plugins.ts +1 -0
@@ -0,0 +1,3547 @@
|
|
1
|
+
import { aw as log, ax as isEqual, ar as Data3DTexture, as as RedFormat, ay as UnsignedByteType, l as LinearFilter, az as RedIntegerFormat, aA as UnsignedIntType, m as NearestFilter, e as Vector3, V as Vector2, ag as Vector4, ae as UniformsUtils, aq as CoordinationType, aB as WebGLMultipleRenderTargets, aC as RGBAFormat, av as Scene, W as OrthographicCamera, ad as ShaderMaterial, z as Mesh, aD as PlaneGeometry, ao as jsxRuntimeExports, aE as GLSL3, am as BackSide } from "./index-eNWork9t.js";
|
2
|
+
import { useRef, useState, useMemo, useEffect, useCallback } from "react";
|
3
|
+
import { u as useThree, a as useFrame, O as OrbitControls, C as Canvas } from "./OrbitControls-Cz9DCZ2j.js";
|
4
|
+
const LogLevel = {
|
5
|
+
INFO: "info",
|
6
|
+
WARN: "warn",
|
7
|
+
ERROR: "error",
|
8
|
+
DEBUG: "debug",
|
9
|
+
TRACE: "trace"
|
10
|
+
// default value
|
11
|
+
};
|
12
|
+
const OrderedLogLevels = [
|
13
|
+
LogLevel.TRACE,
|
14
|
+
LogLevel.DEBUG,
|
15
|
+
LogLevel.INFO,
|
16
|
+
LogLevel.WARN,
|
17
|
+
LogLevel.ERROR
|
18
|
+
];
|
19
|
+
function getLogLevel() {
|
20
|
+
return log.getLevel();
|
21
|
+
}
|
22
|
+
function atLeastLogLevel(someLevel) {
|
23
|
+
const currLevel = getLogLevel();
|
24
|
+
const numericTarget = OrderedLogLevels.indexOf(someLevel);
|
25
|
+
return currLevel <= numericTarget;
|
26
|
+
}
|
27
|
+
const BRICK_SIZE = 32;
|
28
|
+
const BRICK_CACHE_SIZE_X = 64;
|
29
|
+
const BRICK_CACHE_SIZE_Y = 64;
|
30
|
+
const BRICK_CACHE_SIZE_Z = 4;
|
31
|
+
const TOTAL_NUM_BRICKS = BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y * BRICK_CACHE_SIZE_Z;
|
32
|
+
const BRICK_CACHE_SIZE_VOXELS_X = BRICK_CACHE_SIZE_X * BRICK_SIZE;
|
33
|
+
const BRICK_CACHE_SIZE_VOXELS_Y = BRICK_CACHE_SIZE_Y * BRICK_SIZE;
|
34
|
+
const BRICK_CACHE_SIZE_VOXELS_Z = BRICK_CACHE_SIZE_Z * BRICK_SIZE;
|
35
|
+
const INIT_STATUS = {
|
36
|
+
NOT_STARTED: "not_started",
|
37
|
+
IN_PROGRESS: "in_progress",
|
38
|
+
COMPLETE: "complete",
|
39
|
+
FAILED: "failed"
|
40
|
+
};
|
41
|
+
function logWithColor$2(message) {
|
42
|
+
if (atLeastLogLevel(LogLevel.DEBUG)) {
|
43
|
+
console.warn(`%cDM: ${message}`, "background: blue; color: white; padding: 2px; border-radius: 3px;");
|
44
|
+
}
|
45
|
+
}
|
46
|
+
function _resolutionStatsToShapes(multiResolutionStats) {
|
47
|
+
return multiResolutionStats.map((stats) => {
|
48
|
+
const { dims } = stats;
|
49
|
+
const shape = [
|
50
|
+
// Other spatial-accelerated code assumes TCZYX dimension order
|
51
|
+
dims.t,
|
52
|
+
// TODO: standardize lowercase/uppercase dim names at store-level
|
53
|
+
dims.c,
|
54
|
+
// TODO: handle case when dimension(s) are missing
|
55
|
+
dims.z,
|
56
|
+
dims.y,
|
57
|
+
dims.x
|
58
|
+
];
|
59
|
+
return shape;
|
60
|
+
});
|
61
|
+
}
|
62
|
+
function _resolutionStatsToBrickLayout(multiResolutionStats) {
|
63
|
+
return multiResolutionStats.map((stats) => {
|
64
|
+
const shape = [
|
65
|
+
stats.depth,
|
66
|
+
// z
|
67
|
+
stats.height,
|
68
|
+
// y
|
69
|
+
stats.width
|
70
|
+
// x
|
71
|
+
];
|
72
|
+
return [
|
73
|
+
Math.ceil((shape[0] || 1) / BRICK_SIZE),
|
74
|
+
Math.ceil((shape[1] || 1) / BRICK_SIZE),
|
75
|
+
Math.ceil((shape[2] || 1) / BRICK_SIZE)
|
76
|
+
];
|
77
|
+
});
|
78
|
+
}
|
79
|
+
function _initMRMCPT(zarrStoreBrickLayout, channelsZarrMappingsLength) {
|
80
|
+
log.debug("_initMRMCPT", zarrStoreBrickLayout, channelsZarrMappingsLength);
|
81
|
+
const PT = {
|
82
|
+
channelOffsets: [
|
83
|
+
[0, 0, 1],
|
84
|
+
[0, 1, 0],
|
85
|
+
[0, 1, 1],
|
86
|
+
[1, 0, 0],
|
87
|
+
[1, 0, 1],
|
88
|
+
[1, 1, 0],
|
89
|
+
[1, 1, 1]
|
90
|
+
],
|
91
|
+
anchors: [],
|
92
|
+
offsets: [],
|
93
|
+
xExtent: 0,
|
94
|
+
// includes the offset inclusive
|
95
|
+
yExtent: 0,
|
96
|
+
// includes the offset inclusive
|
97
|
+
zExtent: 0,
|
98
|
+
// includes the offset inclusive
|
99
|
+
z0Extent: 0,
|
100
|
+
// l0 z extent
|
101
|
+
zTotal: 0
|
102
|
+
// original z extent plus the l0 z extent times the channel count
|
103
|
+
};
|
104
|
+
PT.xExtent = 1;
|
105
|
+
PT.yExtent = 1;
|
106
|
+
PT.zExtent = 1;
|
107
|
+
const l0z = zarrStoreBrickLayout[0][0];
|
108
|
+
PT.z0Extent = l0z;
|
109
|
+
PT.lowestDataRes = zarrStoreBrickLayout.length - 1;
|
110
|
+
for (let i = zarrStoreBrickLayout.length - 1; i > 0; i--) {
|
111
|
+
PT.anchors.push([
|
112
|
+
PT.xExtent,
|
113
|
+
PT.yExtent,
|
114
|
+
PT.zExtent
|
115
|
+
]);
|
116
|
+
PT.xExtent += zarrStoreBrickLayout[i][2];
|
117
|
+
PT.yExtent += zarrStoreBrickLayout[i][1];
|
118
|
+
PT.zExtent += zarrStoreBrickLayout[i][0];
|
119
|
+
}
|
120
|
+
PT.anchors.push([0, 0, PT.zExtent]);
|
121
|
+
PT.anchors.reverse();
|
122
|
+
PT.zTotal = PT.zExtent + channelsZarrMappingsLength * l0z;
|
123
|
+
const brickCacheData = new Uint8Array(BRICK_CACHE_SIZE_VOXELS_X * BRICK_CACHE_SIZE_VOXELS_Y * BRICK_CACHE_SIZE_VOXELS_Z);
|
124
|
+
brickCacheData.fill(0);
|
125
|
+
const pageTableData = new Uint32Array(PT.xExtent * PT.yExtent * PT.zTotal);
|
126
|
+
pageTableData.fill(0);
|
127
|
+
const bcTHREE = new Data3DTexture(brickCacheData, BRICK_CACHE_SIZE_VOXELS_X, BRICK_CACHE_SIZE_VOXELS_Y, BRICK_CACHE_SIZE_VOXELS_Z);
|
128
|
+
bcTHREE.format = RedFormat;
|
129
|
+
bcTHREE.type = UnsignedByteType;
|
130
|
+
bcTHREE.internalFormat = "R8";
|
131
|
+
bcTHREE.minFilter = LinearFilter;
|
132
|
+
bcTHREE.magFilter = LinearFilter;
|
133
|
+
bcTHREE.generateMipmaps = false;
|
134
|
+
bcTHREE.needsUpdate = true;
|
135
|
+
const ptTHREE = new Data3DTexture(pageTableData, PT.xExtent, PT.yExtent, PT.zTotal);
|
136
|
+
ptTHREE.format = RedIntegerFormat;
|
137
|
+
ptTHREE.type = UnsignedIntType;
|
138
|
+
ptTHREE.internalFormat = "R32UI";
|
139
|
+
ptTHREE.minFilter = NearestFilter;
|
140
|
+
ptTHREE.magFilter = NearestFilter;
|
141
|
+
ptTHREE.generateMipmaps = false;
|
142
|
+
ptTHREE.needsUpdate = true;
|
143
|
+
log.debug("_initMRMCPT", PT, ptTHREE, bcTHREE);
|
144
|
+
return {
|
145
|
+
PT,
|
146
|
+
ptTHREE,
|
147
|
+
bcTHREE
|
148
|
+
};
|
149
|
+
}
|
150
|
+
function _packPT(min, max, bcX, bcY, bcZ) {
|
151
|
+
const clamp7 = (v) => Math.max(0, Math.min(127, Math.floor(v / 2)));
|
152
|
+
return (1 << 31 | 1 << 30 | clamp7(min) << 23 | clamp7(max) << 16 | (bcX & 63) << 10 | (bcY & 63) << 4 | bcZ & 15) >>> 0;
|
153
|
+
}
|
154
|
+
function _ptToZarr(ptx, pty, ptz, ptInfo) {
|
155
|
+
const { PT_zExtent, PT_z0Extent, PT_anchors } = ptInfo;
|
156
|
+
let channel = -1;
|
157
|
+
let resolution = -1;
|
158
|
+
let x = -1;
|
159
|
+
let y = -1;
|
160
|
+
let z = -1;
|
161
|
+
if (ptz >= PT_zExtent) {
|
162
|
+
resolution = 0;
|
163
|
+
x = ptx;
|
164
|
+
y = pty;
|
165
|
+
z = (ptz - PT_zExtent) % PT_z0Extent;
|
166
|
+
channel = Math.floor((ptz - PT_zExtent) / PT_z0Extent);
|
167
|
+
} else {
|
168
|
+
for (let i = 1; i < PT_anchors.length; i++) {
|
169
|
+
if (ptx < PT_anchors[i][0] && pty < PT_anchors[i][1] && ptz < PT_anchors[i][2]) ;
|
170
|
+
else {
|
171
|
+
resolution = i;
|
172
|
+
const channelMask = [0, 0, 0];
|
173
|
+
if (ptx >= PT_anchors[i][0]) {
|
174
|
+
channelMask[0] = 1;
|
175
|
+
}
|
176
|
+
if (pty >= PT_anchors[i][1]) {
|
177
|
+
channelMask[1] = 1;
|
178
|
+
}
|
179
|
+
if (ptz >= PT_anchors[i][2]) {
|
180
|
+
channelMask[2] = 1;
|
181
|
+
}
|
182
|
+
const binaryChannel = channelMask[0] << 2 | channelMask[1] << 1 | channelMask[2];
|
183
|
+
channel = Math.max(1, Math.min(7, binaryChannel)) - 1;
|
184
|
+
const thisOffset = channelMask.map((v, j) => v * PT_anchors[i][j]);
|
185
|
+
x = ptx - thisOffset[0];
|
186
|
+
y = pty - thisOffset[1];
|
187
|
+
z = ptz - thisOffset[2];
|
188
|
+
break;
|
189
|
+
}
|
190
|
+
}
|
191
|
+
}
|
192
|
+
return {
|
193
|
+
channel,
|
194
|
+
resolution,
|
195
|
+
x,
|
196
|
+
y,
|
197
|
+
z
|
198
|
+
};
|
199
|
+
}
|
200
|
+
function _requestBufferToRequestObjects(buffer, k) {
|
201
|
+
const counts = /* @__PURE__ */ new Map();
|
202
|
+
for (let i = 0; i < buffer.length; i += 4) {
|
203
|
+
const r = buffer[i];
|
204
|
+
const g = buffer[i + 1];
|
205
|
+
const b = buffer[i + 2];
|
206
|
+
const a = buffer[i + 3];
|
207
|
+
if ((r | g | b | a) === 0) {
|
208
|
+
continue;
|
209
|
+
}
|
210
|
+
const packed = (r << 24 | g << 16 | b << 8 | a) >>> 0;
|
211
|
+
counts.set(packed, (counts.get(packed) || 0) + 1);
|
212
|
+
}
|
213
|
+
const requests = [...counts.entries()].sort((a, b) => b[1] - a[1]).slice(0, k).map(([packed]) => ({
|
214
|
+
x: packed >> 22 & 1023,
|
215
|
+
y: packed >> 12 & 1023,
|
216
|
+
z: packed & 4095
|
217
|
+
}));
|
218
|
+
return { requests, origRequestCount: counts.size };
|
219
|
+
}
|
220
|
+
class VolumeDataManager {
|
221
|
+
constructor(glParam) {
|
222
|
+
logWithColor$2("CLASS INITIALIZING");
|
223
|
+
const gl = glParam.getContext?.() || glParam;
|
224
|
+
const renderer = glParam;
|
225
|
+
if (gl.domElement && gl.getContext) {
|
226
|
+
this.gl = gl.getContext();
|
227
|
+
} else if (gl.isWebGLRenderer) {
|
228
|
+
this.gl = gl.getContext();
|
229
|
+
} else {
|
230
|
+
this.gl = gl;
|
231
|
+
}
|
232
|
+
this.renderer = renderer;
|
233
|
+
if (!this.gl || typeof this.gl.getParameter !== "function") {
|
234
|
+
log.debug("Unable to get WebGL context, using mock context");
|
235
|
+
this.gl = {
|
236
|
+
getParameter: (param) => {
|
237
|
+
const defaults = {
|
238
|
+
MAX_TEXTURE_SIZE: 4096,
|
239
|
+
MAX_3D_TEXTURE_SIZE: 256,
|
240
|
+
MAX_RENDERBUFFER_SIZE: 4096,
|
241
|
+
MAX_UNIFORM_BUFFER_BINDINGS: 16
|
242
|
+
};
|
243
|
+
return defaults[param] || 0;
|
244
|
+
},
|
245
|
+
MAX_TEXTURE_SIZE: "MAX_TEXTURE_SIZE",
|
246
|
+
MAX_3D_TEXTURE_SIZE: "MAX_3D_TEXTURE_SIZE",
|
247
|
+
MAX_RENDERBUFFER_SIZE: "MAX_RENDERBUFFER_SIZE",
|
248
|
+
MAX_UNIFORM_BUFFER_BINDINGS: "MAX_UNIFORM_BUFFER_BINDINGS"
|
249
|
+
};
|
250
|
+
}
|
251
|
+
log.debug("GL CONSTANTS");
|
252
|
+
log.debug(this.gl);
|
253
|
+
log.debug(this.gl.TEXTURE0);
|
254
|
+
log.debug(this.gl.textures);
|
255
|
+
log.debug("RENDERER");
|
256
|
+
log.debug(this.renderer);
|
257
|
+
this.deviceLimits = {
|
258
|
+
maxTextureSize: this.gl.getParameter(this.gl.MAX_TEXTURE_SIZE),
|
259
|
+
max3DTextureSize: this.gl.getParameter(this.gl.MAX_3D_TEXTURE_SIZE),
|
260
|
+
maxRenderbufferSize: this.gl.getParameter(this.gl.MAX_RENDERBUFFER_SIZE),
|
261
|
+
maxUniformBufferBindings: this.gl.getParameter(this.gl.MAX_UNIFORM_BUFFER_BINDINGS)
|
262
|
+
};
|
263
|
+
this.zarrStore = {
|
264
|
+
resolutions: null,
|
265
|
+
// 6 (the number of resolutions aka. pyramid levels in the file)
|
266
|
+
chunkSize: [],
|
267
|
+
// [32, 32, 32]
|
268
|
+
shapes: [],
|
269
|
+
// [[795, 1024, 1024], ..., [64, 64, 64], [32, 32, 32]]
|
270
|
+
arrays: [],
|
271
|
+
// [array0, array1, array2, array3, array4, array5]
|
272
|
+
dtype: "",
|
273
|
+
// 'uint8'
|
274
|
+
physicalSizeTotal: [],
|
275
|
+
// [795 x 0.0688, 1024 x 0.03417, 1024 x 0.03417]
|
276
|
+
physicalSizeVoxel: [],
|
277
|
+
// [0.0688, 0.03417, 0.03417]
|
278
|
+
brickLayout: [],
|
279
|
+
// [[25, 32, 32],[13, 16, 16], ..., [2,2,2],[1,1,1]]
|
280
|
+
// store: '', // ref to this.store
|
281
|
+
// group: '', // ref to this.group
|
282
|
+
channelCount: 1,
|
283
|
+
// MAX 7 TODO: get from zarr metadata
|
284
|
+
scales: [],
|
285
|
+
// downsample ratios, [x,y,z] per resolution level
|
286
|
+
lowestDataRes: 0
|
287
|
+
// lowest resolution level with data
|
288
|
+
};
|
289
|
+
this.ptTHREE = null;
|
290
|
+
this.bcTHREE = null;
|
291
|
+
this.channels = {
|
292
|
+
maxChannels: 7,
|
293
|
+
// lower when dataset has fewer, dictates page table size
|
294
|
+
zarrMappings: [],
|
295
|
+
// stores the zarr channel index for every one of the up to 7 channels
|
296
|
+
colorMappings: [],
|
297
|
+
// stores the PT slot for every color
|
298
|
+
downsampleMin: [],
|
299
|
+
// stores the downsample min for every one of the up to 7 channels
|
300
|
+
downsampleMax: []
|
301
|
+
// stores the downsample max for every one of the up to 7 channels
|
302
|
+
};
|
303
|
+
this.PT = {
|
304
|
+
channelOffsets: [
|
305
|
+
[0, 0, 1],
|
306
|
+
[0, 1, 0],
|
307
|
+
[0, 1, 1],
|
308
|
+
[1, 0, 0],
|
309
|
+
[1, 0, 1],
|
310
|
+
[1, 1, 0],
|
311
|
+
[1, 1, 1]
|
312
|
+
],
|
313
|
+
anchors: [],
|
314
|
+
offsets: [],
|
315
|
+
xExtent: 0,
|
316
|
+
// includes the offset inclusive
|
317
|
+
yExtent: 0,
|
318
|
+
// includes the offset inclusive
|
319
|
+
zExtent: 0,
|
320
|
+
// includes the offset inclusive
|
321
|
+
z0Extent: 0,
|
322
|
+
// l0 z extent
|
323
|
+
zTotal: 0
|
324
|
+
// original z extent plus the l0 z extent times the channel count
|
325
|
+
};
|
326
|
+
this.bricksEverLoaded = /* @__PURE__ */ new Set();
|
327
|
+
this.isBusy = false;
|
328
|
+
this.BCTimeStamps = new Array(TOTAL_NUM_BRICKS).fill(0);
|
329
|
+
this.BCMinMax = new Array(TOTAL_NUM_BRICKS).fill([0, 0]);
|
330
|
+
this.BCFull = false;
|
331
|
+
this.BCUnusedIndex = 0;
|
332
|
+
this.bc2pt = new Array(TOTAL_NUM_BRICKS).fill(null);
|
333
|
+
this.LRUStack = [];
|
334
|
+
this.triggerUsage = true;
|
335
|
+
this.triggerRequest = false;
|
336
|
+
this.timeStamp = 0;
|
337
|
+
this.k = 40;
|
338
|
+
this.noNewRequests = false;
|
339
|
+
this.needsBailout = false;
|
340
|
+
this.initStatus = INIT_STATUS.NOT_STARTED;
|
341
|
+
this.initError = null;
|
342
|
+
logWithColor$2("VolumeDataManager constructor complete");
|
343
|
+
}
|
344
|
+
initImages(images, imageLayerScopes) {
|
345
|
+
logWithColor$2("INIT IMAGES");
|
346
|
+
this.images = images;
|
347
|
+
this.imageLayerScopes = imageLayerScopes;
|
348
|
+
}
|
349
|
+
/**
|
350
|
+
* Initialize the VolumeDataManager with Zarr store details and device limits
|
351
|
+
* This should be called ONCE at website initialization
|
352
|
+
* TODO(mark): merge this with the constructor?
|
353
|
+
* @returns {Promise<Object>} Object with Zarr store details and device limits
|
354
|
+
*/
|
355
|
+
async init(config) {
|
356
|
+
logWithColor$2("INIT()");
|
357
|
+
if (this.initStatus !== INIT_STATUS.NOT_STARTED) {
|
358
|
+
log.debug("VolumeDataManager init() was called more than once!");
|
359
|
+
if (this.initStatus === INIT_STATUS.COMPLETE) {
|
360
|
+
return {
|
361
|
+
success: true,
|
362
|
+
deviceLimits: this.deviceLimits,
|
363
|
+
zarrStore: this.zarrStore,
|
364
|
+
// physicalScale: this.physicalScale,
|
365
|
+
physicalSizeTotal: this.zarrStore.physicalSizeTotal,
|
366
|
+
physicalSizeVoxel: this.zarrStore.physicalSizeVoxel,
|
367
|
+
error: null
|
368
|
+
};
|
369
|
+
}
|
370
|
+
if (this.initStatus === INIT_STATUS.FAILED) {
|
371
|
+
return {
|
372
|
+
success: false,
|
373
|
+
error: this.initError || "Unknown initialization error"
|
374
|
+
};
|
375
|
+
}
|
376
|
+
return {
|
377
|
+
success: false,
|
378
|
+
pending: true,
|
379
|
+
error: "Initialization in progress"
|
380
|
+
};
|
381
|
+
}
|
382
|
+
this.initStatus = INIT_STATUS.IN_PROGRESS;
|
383
|
+
logWithColor$2("INIT() IN PROGRESS");
|
384
|
+
try {
|
385
|
+
const imageWrapper = this.images?.[this.imageLayerScopes?.[0]]?.image?.instance;
|
386
|
+
this.ngffMetadata = imageWrapper.vivLoader.metadata;
|
387
|
+
log.debug("ngffMetadata", this.ngffMetadata);
|
388
|
+
if (!imageWrapper || imageWrapper.getType() !== "ome-zarr") {
|
389
|
+
throw new Error("Invalid imageWrapper or not an OME-Zarr image");
|
390
|
+
}
|
391
|
+
const multiResolutionStats = imageWrapper.getMultiResolutionStats();
|
392
|
+
const shapes = _resolutionStatsToShapes(multiResolutionStats);
|
393
|
+
const resolutions = multiResolutionStats.length;
|
394
|
+
this.zarrStore.resolutions = resolutions;
|
395
|
+
const vivData = imageWrapper.getData();
|
396
|
+
if (!Array.isArray(vivData) || vivData.length < 1) {
|
397
|
+
throw new Error("Not a multiresolution loader");
|
398
|
+
}
|
399
|
+
if (!isEqual(vivData[0].labels, ["t", "c", "z", "y", "x"])) {
|
400
|
+
throw new Error("Expected OME-Zarr data with dimensions [t, c, z, y, x]");
|
401
|
+
}
|
402
|
+
log.debug("vivData", vivData);
|
403
|
+
const arrays = vivData.map((resolutionData) => resolutionData._data);
|
404
|
+
const scales = new Array(resolutions).fill(null);
|
405
|
+
if (arrays.length > 0) {
|
406
|
+
const array0 = arrays[0];
|
407
|
+
this.zarrStore = {
|
408
|
+
resolutions,
|
409
|
+
chunkSize: array0.chunks,
|
410
|
+
shapes,
|
411
|
+
arrays,
|
412
|
+
dtype: array0.dtype,
|
413
|
+
physicalSizeTotal: [],
|
414
|
+
// Will be populated if metadata exists
|
415
|
+
physicalSizeVoxel: [],
|
416
|
+
// Will be populated if metadata exists
|
417
|
+
brickLayout: [],
|
418
|
+
// Calculate from shapes and chunk sizes
|
419
|
+
// store: this.store,
|
420
|
+
// group: this.group,
|
421
|
+
channelCount: shapes[0][1],
|
422
|
+
scales
|
423
|
+
};
|
424
|
+
this.channels.colorMappings = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(-1);
|
425
|
+
this.channels.zarrMappings = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0);
|
426
|
+
this.channels.downsampleMin = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0);
|
427
|
+
this.channels.downsampleMax = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0);
|
428
|
+
if (array0.meta && array0.meta.physicalSizes) {
|
429
|
+
const { x, y, z } = array0.meta.physicalSizes;
|
430
|
+
const zSize = z?.size || 1;
|
431
|
+
const ySize = y?.size || 1;
|
432
|
+
const xSize = x?.size || 1;
|
433
|
+
this.zarrStore.physicalSizeVoxel = [zSize, ySize, xSize];
|
434
|
+
if (array0.shape && array0.shape.length >= 5) {
|
435
|
+
this.zarrStore.physicalSizeTotal = [
|
436
|
+
(array0.shape[2] || 1) * zSize,
|
437
|
+
(array0.shape[3] || 1) * ySize,
|
438
|
+
(array0.shape[4] || 1) * xSize
|
439
|
+
];
|
440
|
+
}
|
441
|
+
} else {
|
442
|
+
this.zarrStore.physicalSizeVoxel = [1, 1, 1];
|
443
|
+
this.zarrStore.physicalSizeTotal = [
|
444
|
+
array0.shape[2] || 1,
|
445
|
+
array0.shape[3] || 1,
|
446
|
+
array0.shape[4] || 1
|
447
|
+
];
|
448
|
+
}
|
449
|
+
const { multiscales } = this.ngffMetadata;
|
450
|
+
if (!multiscales) {
|
451
|
+
throw new Error("Expected multiscales metadata in group.attrs");
|
452
|
+
}
|
453
|
+
if (multiscales?.[0]?.datasets?.[0]?.coordinateTransformations) {
|
454
|
+
for (let i = 0; i < resolutions; i++) {
|
455
|
+
if (multiscales?.[0]?.datasets?.[i]?.coordinateTransformations?.[0]?.scale) {
|
456
|
+
const { scale } = multiscales[0].datasets[i].coordinateTransformations[0];
|
457
|
+
scales[i] = [scale[4], scale[3], scale[2]];
|
458
|
+
}
|
459
|
+
}
|
460
|
+
} else {
|
461
|
+
log.error("no coordinateTransformations available, assuming downsampling ratio of 2 per dimension");
|
462
|
+
for (let i = 0; i < resolutions; i++) {
|
463
|
+
const scale = 2 ** i;
|
464
|
+
scales[i] = [scale, scale, scale];
|
465
|
+
}
|
466
|
+
}
|
467
|
+
this.zarrStore.scales = scales;
|
468
|
+
const { coordinateTransformations } = this.ngffMetadata;
|
469
|
+
if (coordinateTransformations?.[0]?.scale) {
|
470
|
+
const { scale } = coordinateTransformations[0];
|
471
|
+
const scaleLength = scale.length;
|
472
|
+
if (scaleLength >= 3) {
|
473
|
+
const zScale = scale[scaleLength - 3];
|
474
|
+
const yScale = scale[scaleLength - 2];
|
475
|
+
const xScale = scale[scaleLength - 1];
|
476
|
+
this.zarrStore.physicalSizeVoxel = [zScale, yScale, xScale];
|
477
|
+
if (array0.shape && array0.shape.length >= 5) {
|
478
|
+
this.zarrStore.physicalSizeTotal = [
|
479
|
+
(array0.shape[2] || 1) * zScale,
|
480
|
+
(array0.shape[3] || 1) * yScale,
|
481
|
+
(array0.shape[4] || 1) * xScale
|
482
|
+
];
|
483
|
+
}
|
484
|
+
}
|
485
|
+
}
|
486
|
+
this.zarrStore.brickLayout = _resolutionStatsToBrickLayout(imageWrapper.getMultiResolutionStats());
|
487
|
+
log.debug("config", config);
|
488
|
+
const { omero } = this.ngffMetadata || {};
|
489
|
+
if (!omero) {
|
490
|
+
throw new Error("Expected omero metadata in ngffMetadata");
|
491
|
+
}
|
492
|
+
log.debug("omero", omero);
|
493
|
+
Object.keys(config).forEach((key, i) => {
|
494
|
+
const configChannel = config[key].spatialTargetC;
|
495
|
+
this.channels.zarrMappings[i] = configChannel;
|
496
|
+
this.channels.colorMappings[i] = i;
|
497
|
+
this.channels.downsampleMin[i] = omero?.channels?.[configChannel]?.window?.min || 0;
|
498
|
+
this.channels.downsampleMax[i] = omero?.channels?.[configChannel]?.window?.max || 65535;
|
499
|
+
});
|
500
|
+
log.debug("zarrMappings after init", this.channels.zarrMappings);
|
501
|
+
log.debug("colorMappings after init", this.channels.colorMappings);
|
502
|
+
log.debug("downsampleMin after init", this.channels.downsampleMin);
|
503
|
+
log.debug("downsampleMax after init", this.channels.downsampleMax);
|
504
|
+
this.initMRMCPT();
|
505
|
+
}
|
506
|
+
this.initStatus = INIT_STATUS.COMPLETE;
|
507
|
+
logWithColor$2("INIT() COMPLETE");
|
508
|
+
return {
|
509
|
+
success: true,
|
510
|
+
deviceLimits: this.deviceLimits,
|
511
|
+
zarrStore: this.zarrStore,
|
512
|
+
// physicalScale: this.physicalScale,
|
513
|
+
physicalSizeTotal: this.zarrStore.physicalSizeTotal,
|
514
|
+
physicalSizeVoxel: this.zarrStore.physicalSizeVoxel,
|
515
|
+
error: null
|
516
|
+
};
|
517
|
+
} catch (error) {
|
518
|
+
logWithColor$2("INIT() FAILED");
|
519
|
+
log.error("Error initializing VolumeDataManager:", error);
|
520
|
+
this.initStatus = INIT_STATUS.FAILED;
|
521
|
+
this.initError = error.message || "Unknown error";
|
522
|
+
return {
|
523
|
+
success: false,
|
524
|
+
error: this.initError
|
525
|
+
};
|
526
|
+
}
|
527
|
+
}
|
528
|
+
/**
|
529
|
+
* Initialize the BrickCache and PageTable
|
530
|
+
* MRMCPT: multi-resolution multi-channel page table
|
531
|
+
*
|
532
|
+
* Depends on:
|
533
|
+
* - zarrStore.brickLayout
|
534
|
+
* - zarrMappings.length (zarrMappings: the zarr channel index for every one of the up to 7 channels)
|
535
|
+
* -
|
536
|
+
*/
|
537
|
+
initMRMCPT() {
|
538
|
+
logWithColor$2("initMRMCPT");
|
539
|
+
const { PT, ptTHREE, bcTHREE } = _initMRMCPT(this.zarrStore.brickLayout, this.channels.zarrMappings.length);
|
540
|
+
this.PT = PT;
|
541
|
+
this.ptTHREE = ptTHREE;
|
542
|
+
this.bcTHREE = bcTHREE;
|
543
|
+
logWithColor$2("initMRMCPT() COMPLETE");
|
544
|
+
}
|
545
|
+
/*
|
546
|
+
testTexture() {
|
547
|
+
log.debug('testTexture pt', this.ptTHREE);
|
548
|
+
log.debug('testTexture bc', this.bcTHREE);
|
549
|
+
}
|
550
|
+
*/
|
551
|
+
async initTexture() {
|
552
|
+
const requests = [
|
553
|
+
{ x: 0, y: 0, z: 1 }
|
554
|
+
];
|
555
|
+
await this.handleBrickRequests(requests);
|
556
|
+
}
|
557
|
+
updateChannels(channelProps) {
|
558
|
+
logWithColor$2("updateChannels");
|
559
|
+
log.debug("channelProps", channelProps);
|
560
|
+
log.debug("this.channels.zarrMappings", this.channels.zarrMappings);
|
561
|
+
log.debug("this.channels.colorMappings", this.channels.colorMappings);
|
562
|
+
log.debug("this.channels.downsampleMin", this.channels.downsampleMin);
|
563
|
+
log.debug("this.channels.downsampleMax", this.channels.downsampleMax);
|
564
|
+
if (this.channels.zarrMappings.length === 0) {
|
565
|
+
log.debug("channels not initialized yet");
|
566
|
+
return;
|
567
|
+
}
|
568
|
+
const requestedZarrChannels = Object.values(channelProps).map((channelData) => channelData.spatialTargetC).filter((targetC) => targetC !== void 0);
|
569
|
+
const currentZarrChannels = this.channels.zarrMappings.filter((mapping) => mapping !== void 0);
|
570
|
+
const requestedSorted = [...new Set(requestedZarrChannels)].sort((a, b) => a - b);
|
571
|
+
const currentSorted = [...new Set(currentZarrChannels)].sort((a, b) => a - b);
|
572
|
+
if (requestedSorted.length === currentSorted.length && requestedSorted.every((val, index) => val === currentSorted[index])) {
|
573
|
+
log.debug("Channel mappings unchanged, skipping update");
|
574
|
+
}
|
575
|
+
log.debug("Channel mappings changed:", {
|
576
|
+
current: currentSorted,
|
577
|
+
requested: requestedSorted
|
578
|
+
});
|
579
|
+
Object.entries(channelProps).forEach(([uiChannelKey, channelData]) => {
|
580
|
+
const targetZarrChannel = channelData.spatialTargetC;
|
581
|
+
log.debug(`UI channel "${uiChannelKey}" wants zarr channel ${targetZarrChannel}`);
|
582
|
+
const existingSlotIndex = this.channels.zarrMappings.indexOf(targetZarrChannel);
|
583
|
+
if (existingSlotIndex === -1) {
|
584
|
+
const nextFreeSlot = this.channels.zarrMappings.findIndex((slot) => slot === void 0);
|
585
|
+
if (nextFreeSlot !== -1) {
|
586
|
+
this.channels.zarrMappings[nextFreeSlot] = targetZarrChannel;
|
587
|
+
log.debug("channelData", channelData);
|
588
|
+
log.debug("this.ngffMetadata?.omero?.channels", this.ngffMetadata?.omero?.channels);
|
589
|
+
log.debug("targetZarrChannel", targetZarrChannel);
|
590
|
+
this.channels.downsampleMin[nextFreeSlot] = this.ngffMetadata?.omero?.channels?.[targetZarrChannel]?.window?.min || 0;
|
591
|
+
this.channels.downsampleMax[nextFreeSlot] = this.ngffMetadata?.omero?.channels?.[targetZarrChannel]?.window?.max || 65535;
|
592
|
+
log.debug(`Mapped zarr channel ${targetZarrChannel} to slot ${nextFreeSlot}`);
|
593
|
+
log.debug("channels", this.channels);
|
594
|
+
} else {
|
595
|
+
log.debug("No free slots found, looking for unused mapped channels");
|
596
|
+
const currentlyMapped = this.channels.zarrMappings.filter((mapping) => mapping !== void 0);
|
597
|
+
const stillRequested = requestedZarrChannels;
|
598
|
+
const unusedMappedChannels = currentlyMapped.filter((mappedChannel) => !stillRequested.includes(mappedChannel));
|
599
|
+
log.debug("Currently mapped:", currentlyMapped);
|
600
|
+
log.debug("Still requested:", stillRequested);
|
601
|
+
log.debug("Unused mapped channels:", unusedMappedChannels);
|
602
|
+
if (unusedMappedChannels.length > 0) {
|
603
|
+
const slotToReuse = this.channels.zarrMappings.findIndex((mapping) => unusedMappedChannels.includes(mapping));
|
604
|
+
if (slotToReuse !== -1) {
|
605
|
+
const oldZarrChannel = this.channels.zarrMappings[slotToReuse];
|
606
|
+
this.channels.zarrMappings[slotToReuse] = targetZarrChannel;
|
607
|
+
this.channels.downsampleMin[slotToReuse] = this.ngffMetadata?.omero?.channels?.[targetZarrChannel]?.window?.min || 0;
|
608
|
+
this.channels.downsampleMax[slotToReuse] = this.ngffMetadata?.omero?.channels?.[targetZarrChannel]?.window?.max || 65535;
|
609
|
+
log.debug(`Reused slot ${slotToReuse}: ${oldZarrChannel} -> ${targetZarrChannel}`);
|
610
|
+
this._purgeChannel(slotToReuse);
|
611
|
+
} else {
|
612
|
+
log.error("Could not find slot to reuse - this should not happen");
|
613
|
+
}
|
614
|
+
} else {
|
615
|
+
log.error("All slots are full and all mapped channels are still in use");
|
616
|
+
}
|
617
|
+
}
|
618
|
+
} else {
|
619
|
+
log.debug(`Zarr channel ${targetZarrChannel} already mapped to slot ${existingSlotIndex}`);
|
620
|
+
}
|
621
|
+
});
|
622
|
+
const newColorMappings = Object.values(channelProps).map((ch) => {
|
623
|
+
const slot = this.channels.zarrMappings.indexOf(ch.spatialTargetC);
|
624
|
+
return slot !== -1 ? slot : -1;
|
625
|
+
});
|
626
|
+
while (newColorMappings.length < 7) {
|
627
|
+
newColorMappings.push(-1);
|
628
|
+
}
|
629
|
+
log.debug("newColorMappings", newColorMappings);
|
630
|
+
this.channels.colorMappings = newColorMappings;
|
631
|
+
log.debug("updatedChannels", this.channels);
|
632
|
+
}
|
633
|
+
/**
|
634
|
+
* Try to load a resolution level
|
635
|
+
* @param {number} resolutionIndex - The resolution level to load
|
636
|
+
* @param {Array} arrays - Array to store the loaded arrays
|
637
|
+
* @returns {Promise} Promise resolving when the resolution is loaded or rejected
|
638
|
+
*/
|
639
|
+
/*
|
640
|
+
async tryLoadResolution(resolutionIndex, arrays) {
|
641
|
+
logWithColor('tryLoadResolution');
|
642
|
+
log.debug(resolutionIndex, arrays);
|
643
|
+
try {
|
644
|
+
const array = await zarrita.open(this.group.resolve(String(resolutionIndex)));
|
645
|
+
// Create new arrays to avoid modifying parameters directly
|
646
|
+
const newArrays = [...arrays];
|
647
|
+
newArrays[resolutionIndex] = array;
|
648
|
+
// Update the original arrays
|
649
|
+
Object.assign(arrays, newArrays);
|
650
|
+
logWithColor('tryLoadResolution() COMPLETE');
|
651
|
+
return { success: true, level: resolutionIndex };
|
652
|
+
} catch (err) {
|
653
|
+
log.error(`Failed to load resolution ${resolutionIndex}:`, err);
|
654
|
+
return { success: false, level: resolutionIndex, error: err.message };
|
655
|
+
}
|
656
|
+
}
|
657
|
+
*/
|
658
|
+
/**
|
659
|
+
* Get physical dimensions
|
660
|
+
* @returns {Array} Physical dimensions [X, Y, Z]
|
661
|
+
*/
|
662
|
+
getPhysicalDimensionsXYZ() {
|
663
|
+
log.debug("getPhysicalDimensionsXYZ");
|
664
|
+
log.debug("this.zarrStore.physicalSizeTotal", this.zarrStore.physicalSizeTotal);
|
665
|
+
const out = [
|
666
|
+
this.zarrStore.physicalSizeTotal[2],
|
667
|
+
this.zarrStore.physicalSizeTotal[1],
|
668
|
+
this.zarrStore.physicalSizeTotal[0]
|
669
|
+
];
|
670
|
+
log.debug("out", out);
|
671
|
+
return out;
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Get the maximum resolution
|
675
|
+
* @returns {number} Maximum resolution
|
676
|
+
*/
|
677
|
+
getMaxResolutionXYZ() {
|
678
|
+
log.debug("getMaxResolutionXYZ");
|
679
|
+
log.debug("this.zarrStore.shapes", this.zarrStore.shapes);
|
680
|
+
const out = [
|
681
|
+
this.zarrStore.shapes[0][4],
|
682
|
+
this.zarrStore.shapes[0][3],
|
683
|
+
this.zarrStore.shapes[0][2]
|
684
|
+
];
|
685
|
+
log.debug("out", out);
|
686
|
+
return out;
|
687
|
+
}
|
688
|
+
getOriginalScaleXYZ() {
|
689
|
+
logWithColor$2("getOriginalScaleXYZ");
|
690
|
+
log.debug("this.zarrStore.physicalSizeVoxel", this.zarrStore.physicalSizeVoxel);
|
691
|
+
const out = [
|
692
|
+
this.zarrStore.physicalSizeVoxel[2],
|
693
|
+
this.zarrStore.physicalSizeVoxel[1],
|
694
|
+
this.zarrStore.physicalSizeVoxel[0]
|
695
|
+
];
|
696
|
+
log.debug("out", out);
|
697
|
+
return out;
|
698
|
+
}
|
699
|
+
getNormalizedScaleXYZ() {
|
700
|
+
log.debug("getNormalizedScaleXYZ");
|
701
|
+
const out = [
|
702
|
+
1,
|
703
|
+
this.zarrStore.physicalSizeVoxel[1] / this.zarrStore.physicalSizeVoxel[2],
|
704
|
+
this.zarrStore.physicalSizeVoxel[0] / this.zarrStore.physicalSizeVoxel[0]
|
705
|
+
];
|
706
|
+
log.debug("out", out);
|
707
|
+
return out;
|
708
|
+
}
|
709
|
+
getBoxDimensionsXYZ() {
|
710
|
+
log.debug("getBoxDimensionsXYZ");
|
711
|
+
log.debug("this.zarrStore.shapes", this.zarrStore.shapes);
|
712
|
+
const out = [
|
713
|
+
1,
|
714
|
+
this.zarrStore.shapes[0][3] / this.zarrStore.shapes[0][4],
|
715
|
+
this.zarrStore.shapes[0][2] / this.zarrStore.shapes[0][4]
|
716
|
+
];
|
717
|
+
log.debug("out", out);
|
718
|
+
return out;
|
719
|
+
}
|
720
|
+
/**
|
721
|
+
* Load a specific Zarr chunk based on [t,c,z,y,x] coordinates
|
722
|
+
* @param {number} t - Time point (default 0)
|
723
|
+
* @param {number} c - Channel (default 0)
|
724
|
+
* @param {number} z - Z coordinate
|
725
|
+
* @param {number} y - Y coordinate
|
726
|
+
* @param {number} x - X coordinate
|
727
|
+
* @param {number} resolution - Resolution level
|
728
|
+
* @returns {Promise<Uint8Array>} 32x32x32 chunk data
|
729
|
+
*/
|
730
|
+
async loadZarrChunk(t = 0, c = 0, z, y, x, resolution) {
|
731
|
+
if (!this.zarrStore || !this.zarrStore.arrays[resolution]) {
|
732
|
+
throw new Error("Zarr store or resolution not initialized");
|
733
|
+
}
|
734
|
+
const array = this.zarrStore.arrays[resolution];
|
735
|
+
const chunkEntry = await array.getChunk([t, c, z, y, x]);
|
736
|
+
if (!chunkEntry) {
|
737
|
+
throw new Error(`No chunk found at coordinates [${t},${c},${z},${y},${x}]`);
|
738
|
+
}
|
739
|
+
if (chunkEntry.data.length !== BRICK_SIZE * BRICK_SIZE * BRICK_SIZE) {
|
740
|
+
throw new Error(`Unexpected chunk size: ${chunkEntry.data.length}`);
|
741
|
+
}
|
742
|
+
return chunkEntry.data;
|
743
|
+
}
|
744
|
+
async processRequestData(buffer) {
|
745
|
+
if (this.isBusy) {
|
746
|
+
log.debug("processRequestData: already busy, skipping");
|
747
|
+
return;
|
748
|
+
}
|
749
|
+
this.isBusy = true;
|
750
|
+
this.triggerRequest = false;
|
751
|
+
const { requests, origRequestCount } = _requestBufferToRequestObjects(buffer, this.k);
|
752
|
+
if (requests.length === 0) {
|
753
|
+
this.noNewRequests = true;
|
754
|
+
}
|
755
|
+
log.debug(`processRequestData: handling ${requests.length} requests of ${origRequestCount}`);
|
756
|
+
await this.handleBrickRequests(requests);
|
757
|
+
this.triggerUsage = true;
|
758
|
+
this.isBusy = false;
|
759
|
+
}
|
760
|
+
async processUsageData(buffer) {
|
761
|
+
if (this.isBusy) {
|
762
|
+
log.debug("processUsageData: already busy, skipping");
|
763
|
+
this.needsBailout = true;
|
764
|
+
return;
|
765
|
+
}
|
766
|
+
this.isBusy = true;
|
767
|
+
this.triggerUsage = false;
|
768
|
+
const now = ++this.timeStamp;
|
769
|
+
const usedBricks = /* @__PURE__ */ new Set();
|
770
|
+
for (let i = 0; i < buffer.length; i += 4) {
|
771
|
+
const x = buffer[i];
|
772
|
+
const y = buffer[i + 1];
|
773
|
+
const z = buffer[i + 2];
|
774
|
+
if ((x | y | z) === 0) {
|
775
|
+
continue;
|
776
|
+
}
|
777
|
+
const bcIndex = z * BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y + y * BRICK_CACHE_SIZE_X + x;
|
778
|
+
if (bcIndex < this.BCTimeStamps.length) {
|
779
|
+
usedBricks.add(bcIndex);
|
780
|
+
}
|
781
|
+
}
|
782
|
+
Array.from(usedBricks).forEach((bcIndex) => {
|
783
|
+
this.BCTimeStamps[bcIndex] = now;
|
784
|
+
});
|
785
|
+
if (this.BCFull)
|
786
|
+
this._buildLRU();
|
787
|
+
this.triggerRequest = true;
|
788
|
+
this.isBusy = false;
|
789
|
+
}
|
790
|
+
// Helper method to update PT entries for evicted bricks
|
791
|
+
_evictBrick(bcIndex) {
|
792
|
+
const pt = this.bc2pt[bcIndex];
|
793
|
+
if (!pt)
|
794
|
+
return;
|
795
|
+
const [min, max] = this.BCMinMax[bcIndex] || [0, 0];
|
796
|
+
const ptVal = (0 << 31 | 1 << 30 | Math.min(127, min >> 1) << 23 | Math.min(127, max >> 1) << 16) >>> 0;
|
797
|
+
this._updatePTEntry(pt.x, pt.y, pt.z, ptVal);
|
798
|
+
this.bc2pt[bcIndex] = null;
|
799
|
+
}
|
800
|
+
_purgeChannel(ptChannelIndex) {
|
801
|
+
log.debug("purging channel", ptChannelIndex);
|
802
|
+
log.debug("corresponding zarr channel", this.channels.zarrMappings[ptChannelIndex]);
|
803
|
+
if (!this.ptTHREE) {
|
804
|
+
log.error("pagetable texture not initialized");
|
805
|
+
return;
|
806
|
+
}
|
807
|
+
this.channels.downsampleMin[ptChannelIndex] = void 0;
|
808
|
+
this.channels.downsampleMax[ptChannelIndex] = void 0;
|
809
|
+
this.channels.zarrMappings[ptChannelIndex] = void 0;
|
810
|
+
const channelMask = this.PT.channelOffsets[ptChannelIndex];
|
811
|
+
log.debug("channelMask", channelMask);
|
812
|
+
log.error("TODO: not tested yet");
|
813
|
+
const { gl } = this;
|
814
|
+
const texPT = this.renderer.properties.get(this.ptTHREE).__webglTexture;
|
815
|
+
gl.activeTexture(gl.TEXTURE0);
|
816
|
+
gl.bindTexture(gl.TEXTURE_3D, texPT);
|
817
|
+
for (let r = 0; r < this.zarrStore.resolutions; r++) {
|
818
|
+
const anchor = [
|
819
|
+
this.PT.anchors[r][2] * channelMask[2],
|
820
|
+
this.PT.anchors[r][1] * channelMask[1],
|
821
|
+
this.PT.anchors[r][0] * channelMask[0]
|
822
|
+
];
|
823
|
+
log.debug("anchor", anchor);
|
824
|
+
const extents = this.zarrStore.brickLayout[r];
|
825
|
+
const size = extents[0] * extents[1] * extents[2];
|
826
|
+
log.debug("extents", extents);
|
827
|
+
log.debug("size", size);
|
828
|
+
gl.texSubImage3D(gl.TEXTURE_3D, 0, anchor[0], anchor[1], anchor[2], extents[0], extents[1], extents[2], gl.RED_INTEGER, gl.UNSIGNED_INT, new Uint32Array(size));
|
829
|
+
}
|
830
|
+
gl.bindTexture(gl.TEXTURE_3D, null);
|
831
|
+
}
|
832
|
+
// Update a PT entry
|
833
|
+
_updatePTEntry(ptX, ptY, ptZ, ptVal) {
|
834
|
+
if (!this.ptTHREE)
|
835
|
+
return;
|
836
|
+
const { gl } = this;
|
837
|
+
const texPT = this.renderer.properties.get(this.ptTHREE).__webglTexture;
|
838
|
+
gl.activeTexture(gl.TEXTURE0);
|
839
|
+
gl.bindTexture(gl.TEXTURE_3D, texPT);
|
840
|
+
gl.texSubImage3D(gl.TEXTURE_3D, 0, ptX, ptY, ptZ, 1, 1, 1, gl.RED_INTEGER, gl.UNSIGNED_INT, new Uint32Array([ptVal]));
|
841
|
+
gl.bindTexture(gl.TEXTURE_3D, null);
|
842
|
+
}
|
843
|
+
/* ------------------------------------------------------------- *
|
844
|
+
* 2. Allocate the next n free bricks in the brick cache *
|
845
|
+
* ------------------------------------------------------------- */
|
846
|
+
/**
|
847
|
+
*
|
848
|
+
* @param {number} n The number of slots to allocate
|
849
|
+
* @returns {{ bcIndex, x, y, z }[]} Array of brick cache coordinates for the allocated slots.
|
850
|
+
*/
|
851
|
+
_allocateBCSlots(n) {
|
852
|
+
let slots = [];
|
853
|
+
const total = BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y * BRICK_CACHE_SIZE_Z;
|
854
|
+
if (!this.BCFull && this.BCUnusedIndex + n > total) {
|
855
|
+
this.BCFull = true;
|
856
|
+
log.debug("BRICK CACHE FULL");
|
857
|
+
}
|
858
|
+
if (!this.BCFull) {
|
859
|
+
for (let i = 0; i < n; ++i) {
|
860
|
+
const bcIndex = (this.BCUnusedIndex + i) % total;
|
861
|
+
const z = Math.floor(bcIndex / (BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y));
|
862
|
+
const rem = bcIndex - z * BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y;
|
863
|
+
const y = Math.floor(rem / BRICK_CACHE_SIZE_X);
|
864
|
+
const x = rem % BRICK_CACHE_SIZE_X;
|
865
|
+
slots.push({ bcIndex, x, y, z });
|
866
|
+
}
|
867
|
+
this.BCUnusedIndex += n;
|
868
|
+
} else {
|
869
|
+
if (this.LRUStack.length < n)
|
870
|
+
this._buildLRU();
|
871
|
+
slots = this.LRUStack.splice(0, n).map((bcIndex) => {
|
872
|
+
this._evictBrick(bcIndex);
|
873
|
+
const z = Math.floor(bcIndex / (BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y));
|
874
|
+
const rem = bcIndex - z * BRICK_CACHE_SIZE_X * BRICK_CACHE_SIZE_Y;
|
875
|
+
const y = Math.floor(rem / BRICK_CACHE_SIZE_X);
|
876
|
+
const x = rem % BRICK_CACHE_SIZE_X;
|
877
|
+
return { bcIndex, x, y, z };
|
878
|
+
});
|
879
|
+
}
|
880
|
+
return slots;
|
881
|
+
}
|
882
|
+
/* ------------------------------------------------------------- *
|
883
|
+
* 4. Upload one brick + PT entry *
|
884
|
+
* ------------------------------------------------------------- */
|
885
|
+
async _uploadBrick(ptCoord, bcSlot) {
|
886
|
+
if (ptCoord.x >= this.PT.xExtent || ptCoord.y >= this.PT.yExtent || ptCoord.z >= this.PT.zTotal || ptCoord.x < 0 || ptCoord.y < 0 || ptCoord.z < 0) {
|
887
|
+
log.error("this.PT", this.PT);
|
888
|
+
log.error("ptCoord out of bounds", ptCoord);
|
889
|
+
return;
|
890
|
+
}
|
891
|
+
const { channel, resolution, x, y, z } = _ptToZarr(ptCoord.x, ptCoord.y, ptCoord.z, { PT_zExtent: this.PT.zExtent, PT_z0Extent: this.PT.z0Extent, PT_anchors: this.PT.anchors });
|
892
|
+
const zarrChannel = this.channels.zarrMappings[channel];
|
893
|
+
if (zarrChannel === void 0 || zarrChannel === -1) {
|
894
|
+
log.error("zarrChannel is undefined or -1", zarrChannel);
|
895
|
+
return;
|
896
|
+
}
|
897
|
+
log.debug("starting to load zarr chunk", { resolution, z, y, x, zarrChannel });
|
898
|
+
let chunk = await this.loadZarrChunk(0, zarrChannel, z, y, x, resolution);
|
899
|
+
if (chunk instanceof Uint16Array) {
|
900
|
+
log.debug("chunk is Uint16Array, converting to Uint8Array");
|
901
|
+
if (this.channels.downsampleMin[channel] === void 0) {
|
902
|
+
const channelId = this.channels.zarrMappings[channel];
|
903
|
+
log.debug("channelId was not found in this.channels.downsampleMin[channel]", channelId);
|
904
|
+
this.channels.downsampleMin[channel] = this.ngffMetadata?.omero?.channels?.[channelId]?.window?.min || 0;
|
905
|
+
this.channels.downsampleMax[channel] = this.ngffMetadata?.omero?.channels?.[channelId]?.window?.max || 65535;
|
906
|
+
log.debug("this.channels.downsampleMin[channel]", this.channels.downsampleMin[channel]);
|
907
|
+
log.debug("this.channels.downsampleMax[channel]", this.channels.downsampleMax[channel]);
|
908
|
+
}
|
909
|
+
const uint8Chunk = new Uint8Array(chunk.length);
|
910
|
+
for (let i = 0; i < chunk.length; i++) {
|
911
|
+
uint8Chunk[i] = Math.floor((chunk[i] - this.channels.downsampleMin[channel]) / (this.channels.downsampleMax[channel] - this.channels.downsampleMin[channel]) * 255);
|
912
|
+
}
|
913
|
+
chunk = uint8Chunk;
|
914
|
+
}
|
915
|
+
if (!(chunk instanceof Uint8Array)) {
|
916
|
+
throw new Error(`Unsupported chunk type: ${chunk.constructor.name}. Expected Uint8Array.`);
|
917
|
+
}
|
918
|
+
let min = 255;
|
919
|
+
let max = 0;
|
920
|
+
for (let i = 0; i < chunk.length; ++i) {
|
921
|
+
const v = chunk[i];
|
922
|
+
if (v < min)
|
923
|
+
min = v;
|
924
|
+
if (v > max)
|
925
|
+
max = v;
|
926
|
+
}
|
927
|
+
const { gl } = this;
|
928
|
+
const texBC = this.renderer.properties.get(this.bcTHREE).__webglTexture;
|
929
|
+
gl.activeTexture(gl.TEXTURE2);
|
930
|
+
gl.bindTexture(gl.TEXTURE_3D, texBC);
|
931
|
+
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
|
932
|
+
gl.texSubImage3D(gl.TEXTURE_3D, 0, bcSlot.x * BRICK_SIZE, bcSlot.y * BRICK_SIZE, bcSlot.z * BRICK_SIZE, BRICK_SIZE, BRICK_SIZE, BRICK_SIZE, gl.RED, gl.UNSIGNED_BYTE, chunk);
|
933
|
+
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 4);
|
934
|
+
gl.bindTexture(gl.TEXTURE_3D, null);
|
935
|
+
const error = gl.getError();
|
936
|
+
if (error !== gl.NO_ERROR) {
|
937
|
+
log.error("WebGL error during brick upload:", error, chunk);
|
938
|
+
}
|
939
|
+
if (channel >= this.channels.zarrMappings.length) {
|
940
|
+
log.debug("channel is out of bounds", channel);
|
941
|
+
min = 255;
|
942
|
+
max = 255;
|
943
|
+
}
|
944
|
+
const ptVal = _packPT(min, max, bcSlot.x, bcSlot.y, bcSlot.z);
|
945
|
+
const texPT = this.renderer.properties.get(this.ptTHREE).__webglTexture;
|
946
|
+
gl.activeTexture(gl.TEXTURE0);
|
947
|
+
gl.bindTexture(gl.TEXTURE_3D, texPT);
|
948
|
+
gl.texSubImage3D(gl.TEXTURE_3D, 0, ptCoord.x, ptCoord.y, ptCoord.z, 1, 1, 1, gl.RED_INTEGER, gl.UNSIGNED_INT, new Uint32Array([ptVal]));
|
949
|
+
gl.bindTexture(gl.TEXTURE_3D, null);
|
950
|
+
const error2 = gl.getError();
|
951
|
+
if (error2 !== gl.NO_ERROR) {
|
952
|
+
log.error("WebGL error during pagetable upload:", error2, chunk);
|
953
|
+
}
|
954
|
+
this.BCTimeStamps[bcSlot.bcIndex] = this.timeStamp;
|
955
|
+
this.BCMinMax[bcSlot.bcIndex] = [min, max];
|
956
|
+
this.bc2pt[bcSlot.bcIndex] = ptCoord;
|
957
|
+
}
|
958
|
+
/* ------------------------------------------------------------- *
|
959
|
+
* 5. Public: handle a batch of PT requests (array of {x,y,z}) *
|
960
|
+
* ------------------------------------------------------------- */
|
961
|
+
async handleBrickRequests(ptRequests) {
|
962
|
+
if (ptRequests.length === 0)
|
963
|
+
return;
|
964
|
+
const slots = this._allocateBCSlots(ptRequests.length);
|
965
|
+
log.debug("handleBrickRequests: starting for loop");
|
966
|
+
for (let i = 0; i < ptRequests.length; ++i) {
|
967
|
+
log.debug("uploading brick", ptRequests[i], slots[i]);
|
968
|
+
await this._uploadBrick(ptRequests[i], slots[i]);
|
969
|
+
const rlength = this.bricksEverLoaded.size;
|
970
|
+
this.bricksEverLoaded.add(`${ptRequests[i].x},${ptRequests[i].y},${ptRequests[i].z}`);
|
971
|
+
if (rlength === this.bricksEverLoaded.size) {
|
972
|
+
log.debug("DUPLICATE BRICK LOADED", ptRequests[i]);
|
973
|
+
}
|
974
|
+
if (this.needsBailout) {
|
975
|
+
log.debug("Bailing out of handleBrickRequests early due to needsBailout flag");
|
976
|
+
this.needsBailout = false;
|
977
|
+
break;
|
978
|
+
}
|
979
|
+
}
|
980
|
+
log.debug("this.bricksEverLoaded", this.bricksEverLoaded);
|
981
|
+
}
|
982
|
+
/* --------------------------------------------------------- *
|
983
|
+
* Rebuild the LRUStack with the k least-recently-used bricks *
|
984
|
+
* --------------------------------------------------------- */
|
985
|
+
_buildLRU() {
|
986
|
+
const brickIndicesWithTimes = this.BCTimeStamps.map((time, index) => ({ index, time }));
|
987
|
+
this.LRUStack = brickIndicesWithTimes.sort((a, b) => a.time - b.time).slice(0, this.k).map((item) => item.index);
|
988
|
+
}
|
989
|
+
}
|
990
|
+
const volumeVertexShader = `//
|
991
|
+
// Output: Unnormalized ray direction from camera to each vertex
|
992
|
+
// Used by fragment shader for ray marching through the volume
|
993
|
+
out vec3 rayDirUnnorm;
|
994
|
+
|
995
|
+
// Output: Camera position transformed into volume's local coordinate system
|
996
|
+
// Used to calculate ray origins in the fragment shader
|
997
|
+
out vec3 cameraCorrected;
|
998
|
+
|
999
|
+
// Volume scale uniform (likely for anisotropic voxels)
|
1000
|
+
uniform vec3 u_vol_scale;
|
1001
|
+
|
1002
|
+
// Volume size uniform
|
1003
|
+
uniform vec3 u_size;
|
1004
|
+
|
1005
|
+
// Output: Vertex positions normalized to [0,1] range within volume bounds
|
1006
|
+
// Standard coordinate system for volume sampling
|
1007
|
+
varying vec3 worldSpaceCoords;
|
1008
|
+
|
1009
|
+
// Output: Texture coordinates for sampling volume data
|
1010
|
+
varying vec2 vUv;
|
1011
|
+
|
1012
|
+
// Output: Final clip-space position (stored for fragment shader access)
|
1013
|
+
varying vec4 glPosition;
|
1014
|
+
|
1015
|
+
// Volume bounding box size uniform
|
1016
|
+
uniform highp vec3 boxSize;
|
1017
|
+
|
1018
|
+
void main()
|
1019
|
+
{
|
1020
|
+
// Transform vertex positions from [-0.5, 0.5] range to [0, 1] range
|
1021
|
+
// This is the standard coordinate system for volume sampling
|
1022
|
+
//
|
1023
|
+
// Mathematical transformation:
|
1024
|
+
// worldSpaceCoords = (position / boxSize) + 0.5
|
1025
|
+
//
|
1026
|
+
// Example:
|
1027
|
+
// position = (-0.5, -0.5, -0.5) → worldSpaceCoords = (0, 0, 0)
|
1028
|
+
// position = ( 0.0, 0.0, 0.0) → worldSpaceCoords = (0.5, 0.5, 0.5)
|
1029
|
+
// position = ( 0.5, 0.5, 0.5) → worldSpaceCoords = (1, 1, 1)
|
1030
|
+
worldSpaceCoords = position / boxSize + vec3(0.5, 0.5, 0.5); //move it from [-0.5;0.5] to [0,1]
|
1031
|
+
|
1032
|
+
// Transform camera position into volume's local coordinate system
|
1033
|
+
// This gives us the ray origin in volume space
|
1034
|
+
cameraCorrected = (inverse(modelMatrix) * vec4(cameraPosition, 1.)).xyz;
|
1035
|
+
|
1036
|
+
// Calculate unnormalized ray direction from camera to each vertex
|
1037
|
+
// Used by fragment shader for ray marching through the volume
|
1038
|
+
rayDirUnnorm = position - cameraCorrected;
|
1039
|
+
|
1040
|
+
// Apply standard MVP transformation to get clip-space coordinates
|
1041
|
+
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
1042
|
+
|
1043
|
+
// Store clip-space position for fragment shader access
|
1044
|
+
glPosition = gl_Position;
|
1045
|
+
|
1046
|
+
// Pass through texture coordinates for volume sampling
|
1047
|
+
vUv = uv;
|
1048
|
+
}
|
1049
|
+
`;
|
1050
|
+
const volumeFragmentShader = `//
|
1051
|
+
// #include <packing>
|
1052
|
+
precision highp float;
|
1053
|
+
precision highp int;
|
1054
|
+
precision highp sampler3D;
|
1055
|
+
precision highp usampler3D;
|
1056
|
+
|
1057
|
+
// ========================================
|
1058
|
+
// INPUT VARIABLES (from vertex shader)
|
1059
|
+
// ========================================
|
1060
|
+
// Unnormalized ray direction from camera
|
1061
|
+
in vec3 rayDirUnnorm;
|
1062
|
+
// Camera position in world space
|
1063
|
+
in vec3 cameraCorrected;
|
1064
|
+
|
1065
|
+
// ========================================
|
1066
|
+
// TEXTURE SAMPLERS
|
1067
|
+
// ========================================
|
1068
|
+
// 3D texture containing cached brick data (2048x2048x128)
|
1069
|
+
// (2048*2048*128)/(32*32*32) = 16,384 bricks can be stored?
|
1070
|
+
uniform sampler3D brickCacheTex;
|
1071
|
+
// 3D texture containing page table entries (brick metadata)
|
1072
|
+
uniform usampler3D pageTableTex;
|
1073
|
+
|
1074
|
+
// ========================================
|
1075
|
+
// RENDERING PARAMETERS/CONSTANTS
|
1076
|
+
// ========================================
|
1077
|
+
// Rendering style: 0=MIP, 1=MinIP, 2=standard volume rendering
|
1078
|
+
uniform int u_renderstyle;
|
1079
|
+
// Global opacity multiplier for volume rendering
|
1080
|
+
uniform float opacity;
|
1081
|
+
|
1082
|
+
// ========================================
|
1083
|
+
// CONTRAST LIMITS (per channel)
|
1084
|
+
// per channel min/max values for value normalization
|
1085
|
+
// ========================================
|
1086
|
+
uniform vec2 clim0;
|
1087
|
+
uniform vec2 clim1;
|
1088
|
+
uniform vec2 clim2;
|
1089
|
+
uniform vec2 clim3;
|
1090
|
+
uniform vec2 clim4;
|
1091
|
+
uniform vec2 clim5;
|
1092
|
+
uniform vec2 clim6;
|
1093
|
+
|
1094
|
+
// ========================================
|
1095
|
+
// CLIPPING PLANES
|
1096
|
+
// e.g., for X-axis clipping: (min_x, max_x) or (-1, -1) if disabled
|
1097
|
+
// ========================================
|
1098
|
+
uniform vec2 xClip;
|
1099
|
+
uniform vec2 yClip;
|
1100
|
+
uniform vec2 zClip;
|
1101
|
+
|
1102
|
+
// ========================================
|
1103
|
+
// CHANNEL COLORS AND OPACITIES
|
1104
|
+
// rgb -- color values, a -- visibility (boolean)
|
1105
|
+
// ========================================
|
1106
|
+
uniform vec4 color0;
|
1107
|
+
uniform vec4 color1;
|
1108
|
+
uniform vec4 color2;
|
1109
|
+
uniform vec4 color3;
|
1110
|
+
uniform vec4 color4;
|
1111
|
+
uniform vec4 color5;
|
1112
|
+
uniform vec4 color6;
|
1113
|
+
|
1114
|
+
// maps colors to physical spaces
|
1115
|
+
uniform int channelMapping[7];
|
1116
|
+
|
1117
|
+
// ========================================
|
1118
|
+
// VOLUME AND RESOLUTION PARAMETERS
|
1119
|
+
// ========================================
|
1120
|
+
// Volume bounding box size in world space
|
1121
|
+
uniform highp vec3 boxSize;
|
1122
|
+
// Rendering resolution level (affects step size)
|
1123
|
+
// stepsize, correlates with resolution
|
1124
|
+
uniform int renderRes;
|
1125
|
+
// Volume dimensions in voxels (x, y, z)
|
1126
|
+
// resolution 0 voxel extents
|
1127
|
+
uniform uvec3 voxelExtents;
|
1128
|
+
// Global resolution range: (min_res, max_res)
|
1129
|
+
// global range of requested resolutions
|
1130
|
+
uniform ivec2 resGlobal;
|
1131
|
+
// Maximum number of active channels
|
1132
|
+
// max number of channels (relevant for the cache statistics)
|
1133
|
+
// between 1 and 7
|
1134
|
+
uniform int maxChannels;
|
1135
|
+
|
1136
|
+
// ========================================
|
1137
|
+
// PER-CHANNEL RESOLUTION RANGES
|
1138
|
+
// per color channel resolution range
|
1139
|
+
// Each channel can have different available resolution levels
|
1140
|
+
// e.g., for Channel 0: (min_res, max_res)
|
1141
|
+
// ========================================
|
1142
|
+
uniform ivec2 res0;
|
1143
|
+
uniform ivec2 res1;
|
1144
|
+
uniform ivec2 res2;
|
1145
|
+
uniform ivec2 res3;
|
1146
|
+
uniform ivec2 res4;
|
1147
|
+
uniform ivec2 res5;
|
1148
|
+
uniform ivec2 res6;
|
1149
|
+
// Channel 7: unused
|
1150
|
+
uniform ivec2 res7;
|
1151
|
+
|
1152
|
+
// ========================================
|
1153
|
+
// LEVEL-OF-DETAIL PARAMETERS
|
1154
|
+
// controls how fast we decrease the resolution
|
1155
|
+
// ========================================
|
1156
|
+
// LOD factor for distance-based resolution selection
|
1157
|
+
uniform float lodFactor;
|
1158
|
+
|
1159
|
+
// ========================================
|
1160
|
+
// ANCHOR POINTS (per resolution level)
|
1161
|
+
// per resolution anchor point for pagetable
|
1162
|
+
// ========================================
|
1163
|
+
// Anchor points define the origin of page table for each resolution level
|
1164
|
+
// Resolution 0 anchor point (highest detail)
|
1165
|
+
uniform uvec3 anchor0;
|
1166
|
+
uniform uvec3 anchor1;
|
1167
|
+
uniform uvec3 anchor2;
|
1168
|
+
uniform uvec3 anchor3;
|
1169
|
+
uniform uvec3 anchor4;
|
1170
|
+
uniform uvec3 anchor5;
|
1171
|
+
uniform uvec3 anchor6;
|
1172
|
+
uniform uvec3 anchor7;
|
1173
|
+
uniform uvec3 anchor8;
|
1174
|
+
uniform uvec3 anchor9;
|
1175
|
+
// Resolution 9 anchor point (lowest detail)
|
1176
|
+
|
1177
|
+
// ========================================
|
1178
|
+
// SCALE FACTORS (per resolution level)
|
1179
|
+
// per resolution downsample factor
|
1180
|
+
// ========================================
|
1181
|
+
// Scale factors determine voxel size at each resolution level
|
1182
|
+
// Resolution 0 scale factors (should be 1,1,1)
|
1183
|
+
uniform vec3 scale0;
|
1184
|
+
uniform vec3 scale1;
|
1185
|
+
uniform vec3 scale2;
|
1186
|
+
uniform vec3 scale3;
|
1187
|
+
uniform vec3 scale4;
|
1188
|
+
uniform vec3 scale5;
|
1189
|
+
uniform vec3 scale6;
|
1190
|
+
uniform vec3 scale7;
|
1191
|
+
uniform vec3 scale8;
|
1192
|
+
uniform vec3 scale9;
|
1193
|
+
// Resolution 9 scale factors
|
1194
|
+
|
1195
|
+
// ========================================
|
1196
|
+
// VARYING VARIABLES (unused but required)
|
1197
|
+
// ========================================
|
1198
|
+
// Fragment position (unused)
|
1199
|
+
varying vec4 glPosition;
|
1200
|
+
// World space coordinates (used for depth only)
|
1201
|
+
varying vec3 worldSpaceCoords;
|
1202
|
+
|
1203
|
+
// ========================================
|
1204
|
+
// OUTPUT VARIABLES (multiple render targets)
|
1205
|
+
// output buffers
|
1206
|
+
// ========================================
|
1207
|
+
// Final rendered color (sRGB)
|
1208
|
+
layout(location = 0) out vec4 gColor;
|
1209
|
+
// Brick loading requests (packed coordinates)
|
1210
|
+
layout(location = 1) out vec4 gRequest;
|
1211
|
+
// Brick usage tracking (for cache management)
|
1212
|
+
layout(location = 2) out vec4 gUsage;
|
1213
|
+
|
1214
|
+
// ========================================
|
1215
|
+
// CONSTANTS
|
1216
|
+
// ========================================
|
1217
|
+
// Size of each brick in voxels (32x32x32)
|
1218
|
+
const float BRICK_SIZE = 32.0;
|
1219
|
+
// Brick cache texture width
|
1220
|
+
const float BRICK_CACHE_SIZE_X = 2048.0;
|
1221
|
+
// Brick cache texture height
|
1222
|
+
const float BRICK_CACHE_SIZE_Y = 2048.0;
|
1223
|
+
// Brick cache texture depth
|
1224
|
+
const float BRICK_CACHE_SIZE_Z = 128.0;
|
1225
|
+
// Number of bricks in X (64)
|
1226
|
+
const float BRICK_CACHE_BRICKS_X = BRICK_CACHE_SIZE_X / BRICK_SIZE;
|
1227
|
+
// Number of bricks in Y (64)
|
1228
|
+
const float BRICK_CACHE_BRICKS_Y = BRICK_CACHE_SIZE_Y / BRICK_SIZE;
|
1229
|
+
// Number of bricks in Z (4)
|
1230
|
+
const float BRICK_CACHE_BRICKS_Z = BRICK_CACHE_SIZE_Z / BRICK_SIZE;
|
1231
|
+
|
1232
|
+
// ========================================
|
1233
|
+
// RAY-VOLUME INTERSECTION
|
1234
|
+
// calculating the intersection of the ray with the bounding box
|
1235
|
+
// ========================================
|
1236
|
+
// Calculates the intersection of a ray with the volume's bounding box
|
1237
|
+
// Returns (entry_time, exit_time) for the ray-box intersection
|
1238
|
+
// Handles clipping planes by adjusting the bounding box
|
1239
|
+
//
|
1240
|
+
// Parameters:
|
1241
|
+
// orig - vec3: Ray origin point in world space
|
1242
|
+
// dir - vec3: Ray direction vector (should be normalized)
|
1243
|
+
//
|
1244
|
+
// Returns:
|
1245
|
+
// vec2: (entry_time, exit_time) where:
|
1246
|
+
// - entry_time: Distance along ray to enter the volume
|
1247
|
+
// - exit_time: Distance along ray to exit the volume
|
1248
|
+
// - If no intersection: entry_time > exit_time
|
1249
|
+
vec2 intersect_hit(vec3 orig, vec3 dir) {
|
1250
|
+
// Start with full volume bounds
|
1251
|
+
vec3 boxMin = vec3(-0.5) * boxSize;
|
1252
|
+
vec3 boxMax = vec3(0.5) * boxSize;
|
1253
|
+
|
1254
|
+
// Apply clipping planes if they're active (xClip.x > -1.0 means active)
|
1255
|
+
if (xClip.x > -1.0) {
|
1256
|
+
boxMin.x = xClip.x - (boxSize.x / 2.0);
|
1257
|
+
if (xClip.y < boxSize.x)
|
1258
|
+
boxMax.x = xClip.y - (boxSize.x / 2.0);
|
1259
|
+
}
|
1260
|
+
if (yClip.x > -1.0) {
|
1261
|
+
boxMin.y = yClip.x - (boxSize.y / 2.0);
|
1262
|
+
if (yClip.y < boxSize.y)
|
1263
|
+
boxMax.y = yClip.y - (boxSize.y / 2.0);
|
1264
|
+
}
|
1265
|
+
if (zClip.x > -1.0) {
|
1266
|
+
boxMin.z = zClip.x - (boxSize.z / 2.0);
|
1267
|
+
if (zClip.y < boxSize.z)
|
1268
|
+
boxMax.z = zClip.y - (boxSize.z / 2.0);
|
1269
|
+
}
|
1270
|
+
|
1271
|
+
// Standard ray-box intersection algorithm
|
1272
|
+
vec3 invDir = 1.0 / dir;
|
1273
|
+
vec3 tmin0 = (boxMin - orig) * invDir;
|
1274
|
+
vec3 tmax0 = (boxMax - orig) * invDir;
|
1275
|
+
vec3 tmin = min(tmin0, tmax0);
|
1276
|
+
vec3 tmax = max(tmin0, tmax0);
|
1277
|
+
float t0 = max(tmin.x, max(tmin.y, tmin.z)); // Entry time
|
1278
|
+
float t1 = min(tmax.x, min(tmax.y, tmax.z)); // Exit time
|
1279
|
+
return vec2(t0, t1);
|
1280
|
+
}
|
1281
|
+
|
1282
|
+
// ========================================
|
1283
|
+
// UTILITY FUNCTIONS
|
1284
|
+
// ========================================
|
1285
|
+
|
1286
|
+
// Pseudo-random number generator for jittered sampling
|
1287
|
+
// random number generator based on the uv coordinate
|
1288
|
+
// Author @patriciogv - 2015
|
1289
|
+
// http://patriciogonzalezvivo.com
|
1290
|
+
//
|
1291
|
+
// Parameters:
|
1292
|
+
// None (uses gl_FragCoord.xy as input)
|
1293
|
+
//
|
1294
|
+
// Returns:
|
1295
|
+
// float: Random value between 0.0 and 1.0 based on fragment coordinates
|
1296
|
+
float random() {
|
1297
|
+
return fract(sin(dot(gl_FragCoord.xy, vec2(12.9898,78.233)))* 43758.5453123);
|
1298
|
+
}
|
1299
|
+
|
1300
|
+
// Convert from linear RGB to sRGB color space
|
1301
|
+
// Implements the standard sRGB transfer function for gamma correction
|
1302
|
+
//
|
1303
|
+
// Parameters:
|
1304
|
+
// x - float: Linear RGB value between 0.0 and 1.0
|
1305
|
+
//
|
1306
|
+
// Returns:
|
1307
|
+
// float: sRGB value between 0.0 and 1.0
|
1308
|
+
float linear_to_srgb(float x) {
|
1309
|
+
if (x <= 0.0031308f) {
|
1310
|
+
return 12.92f * x;
|
1311
|
+
}
|
1312
|
+
return 1.055f * pow(x, 1.f / 2.4f) - 0.055f;
|
1313
|
+
}
|
1314
|
+
|
1315
|
+
// Convert from linear RGB to sRGB color space (vector version)
|
1316
|
+
// Applies sRGB conversion to each RGB component while preserving alpha
|
1317
|
+
//
|
1318
|
+
// Parameters:
|
1319
|
+
// x - vec4: Linear RGBA color with components between 0.0 and 1.0
|
1320
|
+
//
|
1321
|
+
// Returns:
|
1322
|
+
// vec4: sRGB RGBA color with components between 0.0 and 1.0
|
1323
|
+
vec4 linear_to_srgb(vec4 x) {
|
1324
|
+
return vec4(linear_to_srgb(x.r), linear_to_srgb(x.g), linear_to_srgb(x.b), x.a);
|
1325
|
+
}
|
1326
|
+
|
1327
|
+
// ========================================
|
1328
|
+
// PAGE TABLE COORDINATE PACKING
|
1329
|
+
// transform the pagetable coordinate into a RGBA8 value
|
1330
|
+
// ========================================
|
1331
|
+
// Packs 3D page table coordinates into RGBA8 texture format
|
1332
|
+
// Uses 10 bits for X, 10 bits for Y, 12 bits for Z
|
1333
|
+
//
|
1334
|
+
// Parameters:
|
1335
|
+
// coord - uvec3: 3D coordinates to pack (X, Y, Z components)
|
1336
|
+
// - X coordinate: 10-bit unsigned integer (0-1023)
|
1337
|
+
// - Y coordinate: 10-bit unsigned integer (0-1023)
|
1338
|
+
// - Z coordinate: 12-bit unsigned integer (0-4095)
|
1339
|
+
//
|
1340
|
+
// Returns:
|
1341
|
+
// vec4: RGBA8 encoded coordinates with components between 0.0 and 1.0
|
1342
|
+
// - R: Upper 8 bits of packed 32-bit value
|
1343
|
+
// - G: Middle-upper 8 bits of packed 32-bit value
|
1344
|
+
// - B: Middle-lower 8 bits of packed 32-bit value
|
1345
|
+
// - A: Lower 8 bits of packed 32-bit value
|
1346
|
+
vec4 packPTCoordToRGBA8(uvec3 coord) {
|
1347
|
+
|
1348
|
+
uint x = coord.x & 0x3FFu; // 10 bits for X coordinate
|
1349
|
+
uint y = coord.y & 0x3FFu; // 10 bits for Y coordinate
|
1350
|
+
uint z = coord.z & 0xFFFu; // 12 bits for Z coordinate
|
1351
|
+
|
1352
|
+
// Pack into 32-bit integer
|
1353
|
+
uint packed =
|
1354
|
+
(x << 22u) |
|
1355
|
+
(y << 12u) |
|
1356
|
+
(z);
|
1357
|
+
|
1358
|
+
// Decompose into RGBA8 format
|
1359
|
+
return vec4(
|
1360
|
+
float((packed >> 24u) & 0xFFu) / 255.0,
|
1361
|
+
float((packed >> 16u) & 0xFFu) / 255.0,
|
1362
|
+
float((packed >> 8u) & 0xFFu) / 255.0,
|
1363
|
+
float(packed & 0xFFu) / 255.0
|
1364
|
+
);
|
1365
|
+
}
|
1366
|
+
|
1367
|
+
// ========================================
|
1368
|
+
// RESOLUTION AND ANCHOR POINT ACCESSORS
|
1369
|
+
// ========================================
|
1370
|
+
|
1371
|
+
// Get anchor point for a specific resolution level
|
1372
|
+
// Anchor points define the origin of the page table for each resolution
|
1373
|
+
//
|
1374
|
+
// Parameters:
|
1375
|
+
// index - int: Resolution level index (0-9, where 0 is highest resolution)
|
1376
|
+
//
|
1377
|
+
// Returns:
|
1378
|
+
// uvec3: 3D anchor point coordinates in the page table, or (-1, -1, -1) if invalid
|
1379
|
+
uvec3 getAnchorPoint(int index) {
|
1380
|
+
if (index == 0) return anchor0;
|
1381
|
+
if (index == 1) return anchor1;
|
1382
|
+
if (index == 2) return anchor2;
|
1383
|
+
if (index == 3) return anchor3;
|
1384
|
+
if (index == 4) return anchor4;
|
1385
|
+
if (index == 5) return anchor5;
|
1386
|
+
if (index == 6) return anchor6;
|
1387
|
+
if (index == 7) return anchor7;
|
1388
|
+
if (index == 8) return anchor8;
|
1389
|
+
if (index == 9) return anchor9;
|
1390
|
+
return uvec3(-1, -1, -1);
|
1391
|
+
}
|
1392
|
+
|
1393
|
+
// Find the lowest available resolution level
|
1394
|
+
// Returns the highest resolution index that has valid data
|
1395
|
+
//
|
1396
|
+
// Parameters:
|
1397
|
+
// None
|
1398
|
+
//
|
1399
|
+
// Returns:
|
1400
|
+
// int: Highest resolution level index (0-9) that has valid anchor point data
|
1401
|
+
// Returns 9 if no valid resolution levels are found
|
1402
|
+
int getLowestRes() {
|
1403
|
+
for (int i = 0; i < 10; i++) {
|
1404
|
+
if (getAnchorPoint(i) == uvec3(0,0,0)) {
|
1405
|
+
return i - 1;
|
1406
|
+
}
|
1407
|
+
}
|
1408
|
+
return 9;
|
1409
|
+
}
|
1410
|
+
|
1411
|
+
// Get the downsample factor for a resolution level
|
1412
|
+
// Scale factors determine the voxel size at each resolution
|
1413
|
+
//
|
1414
|
+
// Parameters:
|
1415
|
+
// index - int: Resolution level index (0-9, where 0 is highest resolution)
|
1416
|
+
//
|
1417
|
+
// Returns:
|
1418
|
+
// vec3: Scale factors (x, y, z) for the resolution level, or (-1, -1, -1) if invalid
|
1419
|
+
// Higher scale factors indicate larger voxels (lower resolution)
|
1420
|
+
vec3 getScale(int index) {
|
1421
|
+
if (index == 0) return scale0;
|
1422
|
+
if (index == 1) return scale1;
|
1423
|
+
if (index == 2) return scale2;
|
1424
|
+
if (index == 3) return scale3;
|
1425
|
+
if (index == 4) return scale4;
|
1426
|
+
if (index == 5) return scale5;
|
1427
|
+
if (index == 6) return scale6;
|
1428
|
+
if (index == 7) return scale7;
|
1429
|
+
if (index == 8) return scale8;
|
1430
|
+
if (index == 9) return scale9;
|
1431
|
+
return vec3(-1.0, -1.0, -1.0);
|
1432
|
+
}
|
1433
|
+
|
1434
|
+
// Get the resolution range for a color channel
|
1435
|
+
// Returns (min_res, max_res) for the channel
|
1436
|
+
//
|
1437
|
+
// Parameters:
|
1438
|
+
// index - int: Channel index (0-6)
|
1439
|
+
//
|
1440
|
+
// Returns:
|
1441
|
+
// ivec2: Resolution range as (min_resolution_level, max_resolution_level)
|
1442
|
+
// Returns (-1, -1) if channel index is invalid
|
1443
|
+
ivec2 getRes(int index) {
|
1444
|
+
if (index == 0) return res0;
|
1445
|
+
if (index == 1) return res1;
|
1446
|
+
if (index == 2) return res2;
|
1447
|
+
if (index == 3) return res3;
|
1448
|
+
if (index == 4) return res4;
|
1449
|
+
if (index == 5) return res5;
|
1450
|
+
if (index == 6) return res6;
|
1451
|
+
return ivec2(-1, -1);
|
1452
|
+
}
|
1453
|
+
|
1454
|
+
// Get the min/max values (contrast limits) for a color channel
|
1455
|
+
// Returns (min_value, max_value) for normalization
|
1456
|
+
//
|
1457
|
+
// Parameters:
|
1458
|
+
// index - int: Channel index (0-6)
|
1459
|
+
//
|
1460
|
+
// Returns:
|
1461
|
+
// vec2: Contrast limits as (min_value, max_value) for data normalization
|
1462
|
+
// Returns (-1.0, -1.0) if channel index is invalid
|
1463
|
+
vec2 getClim(int index) {
|
1464
|
+
if (index == 0) return clim0;
|
1465
|
+
if (index == 1) return clim1;
|
1466
|
+
if (index == 2) return clim2;
|
1467
|
+
if (index == 3) return clim3;
|
1468
|
+
if (index == 4) return clim4;
|
1469
|
+
if (index == 5) return clim5;
|
1470
|
+
if (index == 6) return clim6;
|
1471
|
+
return vec2(-1.0, -1.0);
|
1472
|
+
}
|
1473
|
+
|
1474
|
+
// ========================================
|
1475
|
+
// COORDINATE TRANSFORMATIONS
|
1476
|
+
// ========================================
|
1477
|
+
|
1478
|
+
// Convert normalized coordinates (0-1) to voxel coordinates.
|
1479
|
+
// get the voxel coordinate in the specified resolution from the normalized coordinate
|
1480
|
+
//
|
1481
|
+
// Parameters:
|
1482
|
+
// normalized - vec3: Normalized coordinates in range [0,1] for each axis
|
1483
|
+
// res - int: Resolution level (0=highest detail, 9=lowest detail)
|
1484
|
+
//
|
1485
|
+
// Returns:
|
1486
|
+
// vec3: Voxel coordinates in the volume space at the specified resolution
|
1487
|
+
vec3 getVoxelFromNormalized(vec3 normalized, int res) {
|
1488
|
+
vec3 extents = (vec3(voxelExtents) / getScale(res)); // Voxel extents at this resolution
|
1489
|
+
vec3 voxel = normalized * extents;
|
1490
|
+
return voxel;
|
1491
|
+
}
|
1492
|
+
|
1493
|
+
// Convert voxel coordinates to normalized coordinates (0-1)
|
1494
|
+
// get the normalized coordinate based on the voxel coordinate in the specified resolution
|
1495
|
+
//
|
1496
|
+
// Parameters:
|
1497
|
+
// voxel - vec3: Voxel coordinates in the volume space
|
1498
|
+
// res - int: Resolution level (0=highest detail, 9=lowest detail)
|
1499
|
+
//
|
1500
|
+
// Returns:
|
1501
|
+
// vec3: Normalized coordinates in range [0,1] for each axis
|
1502
|
+
vec3 getNormalizedFromVoxel(vec3 voxel, int res) {
|
1503
|
+
vec3 extents = (vec3(voxelExtents) / getScale(res)); // Voxel extents at this resolution
|
1504
|
+
vec3 normalized = voxel / extents;
|
1505
|
+
return normalized;
|
1506
|
+
}
|
1507
|
+
|
1508
|
+
// Convert normalized coordinates to brick coordinates.
|
1509
|
+
// get the brick coordinate in the specified resolution based on the normalized coordinate
|
1510
|
+
// needed for pagetable calculations
|
1511
|
+
//
|
1512
|
+
// Parameters:
|
1513
|
+
// normalized - vec3: Normalized coordinates in range [0,1] for each axis
|
1514
|
+
// res - int: Resolution level (0=highest detail, 9=lowest detail)
|
1515
|
+
//
|
1516
|
+
// Returns:
|
1517
|
+
// vec3: Brick coordinates (each brick is 32x32x32 voxels)
|
1518
|
+
vec3 getBrickFromNormalized(vec3 normalized, int res) {
|
1519
|
+
vec3 voxel = getVoxelFromNormalized(normalized, res);
|
1520
|
+
vec3 brick = floor(voxel / 32.0); // Each brick is 32x32x32 voxels
|
1521
|
+
return brick;
|
1522
|
+
}
|
1523
|
+
|
1524
|
+
// Convert voxel coordinates to brick coordinates.
|
1525
|
+
// get the brick coordinate in the specified resolution based on the voxel coordinate
|
1526
|
+
//
|
1527
|
+
// Parameters:
|
1528
|
+
// voxel - vec3: Voxel coordinates in the volume space
|
1529
|
+
// res - int: Resolution level (0=highest detail, 9=lowest detail)
|
1530
|
+
//
|
1531
|
+
// Returns:
|
1532
|
+
// vec3: Brick coordinates (each brick is 32x32x32 voxels)
|
1533
|
+
vec3 getBrickFromVoxel(vec3 voxel, int res) {
|
1534
|
+
vec3 brick = floor(voxel / 32.0); // Each brick is 32x32x32 voxels
|
1535
|
+
return brick;
|
1536
|
+
}
|
1537
|
+
|
1538
|
+
// ========================================
|
1539
|
+
// CHANNEL-SPECIFIC ACCESSORS
|
1540
|
+
// ========================================
|
1541
|
+
|
1542
|
+
// Get channel offset in page table.
|
1543
|
+
// get the vector for the specified channel slot in the pagetable
|
1544
|
+
// Different channels are stored at different Z-offsets in the page table
|
1545
|
+
//
|
1546
|
+
// Parameters:
|
1547
|
+
// index - int: Channel index (0-6)
|
1548
|
+
//
|
1549
|
+
// Returns:
|
1550
|
+
// uvec3: 3D offset coordinates for the channel in the page table
|
1551
|
+
uvec3 getChannelOffset(int index) {
|
1552
|
+
if (index == 0) return uvec3(0, 0, 1);
|
1553
|
+
if (index == 1) return uvec3(0, 1, 0);
|
1554
|
+
if (index == 2) return uvec3(0, 1, 1);
|
1555
|
+
if (index == 3) return uvec3(1, 0, 0);
|
1556
|
+
if (index == 4) return uvec3(1, 0, 1);
|
1557
|
+
if (index == 5) return uvec3(1, 1, 0);
|
1558
|
+
if (index == 6) return uvec3(1, 1, 1);
|
1559
|
+
return uvec3(0, 0, 0);
|
1560
|
+
}
|
1561
|
+
|
1562
|
+
// Get color for a channel.
|
1563
|
+
// get the color per color channel
|
1564
|
+
//
|
1565
|
+
// Parameters:
|
1566
|
+
// index - int: Channel index (0-6)
|
1567
|
+
//
|
1568
|
+
// Returns:
|
1569
|
+
// vec3: RGB color values for the specified channel
|
1570
|
+
vec3 getChannelColor(int index) {
|
1571
|
+
if (index == 0) return color0.xyz;
|
1572
|
+
if (index == 1) return color1.xyz;
|
1573
|
+
if (index == 2) return color2.xyz;
|
1574
|
+
if (index == 3) return color3.xyz;
|
1575
|
+
if (index == 4) return color4.xyz;
|
1576
|
+
if (index == 5) return color5.xyz;
|
1577
|
+
if (index == 6) return color6.xyz;
|
1578
|
+
return vec3(0.0, 0.0, 0.0);
|
1579
|
+
}
|
1580
|
+
|
1581
|
+
// Get opacity for a channel
|
1582
|
+
// get the opacity (used as visibility) per color channel
|
1583
|
+
//
|
1584
|
+
// Parameters:
|
1585
|
+
// index - int: Channel index (0-6)
|
1586
|
+
//
|
1587
|
+
// Returns:
|
1588
|
+
// float: Opacity value (0.0-1.0) for the specified channel
|
1589
|
+
float getChannelOpacity(int index) {
|
1590
|
+
if (index == 0) return color0.w;
|
1591
|
+
if (index == 1) return color1.w;
|
1592
|
+
if (index == 2) return color2.w;
|
1593
|
+
if (index == 3) return color3.w;
|
1594
|
+
if (index == 4) return color4.w;
|
1595
|
+
if (index == 5) return color5.w;
|
1596
|
+
if (index == 6) return color6.w;
|
1597
|
+
return 0.0;
|
1598
|
+
}
|
1599
|
+
|
1600
|
+
// ========================================
|
1601
|
+
// PAGE TABLE DECODING
|
1602
|
+
// ========================================
|
1603
|
+
|
1604
|
+
/**
|
1605
|
+
* retrieving the brick based on:
|
1606
|
+
* location -- normalized coordinate
|
1607
|
+
* targetRes -- target resolution
|
1608
|
+
* channel -- physical channel slot
|
1609
|
+
* rnd -- random number for jittering requests
|
1610
|
+
* query -- whether to query the brick (we dont query for interblock interpolation)
|
1611
|
+
* colorIndex -- color index for querying the min max values
|
1612
|
+
*
|
1613
|
+
* returns:
|
1614
|
+
* w >= 0 -- xyz contains brick cache coordinate, w stores resolution
|
1615
|
+
* w == -1 -- not resident in any resolution, should be treated as empty
|
1616
|
+
* w == -2 -- empty (with respect to current transfer function)
|
1617
|
+
* w == -3 -- constant full (with respect to current transfer function)
|
1618
|
+
* w == -4 -- constant value within range, x stores that value
|
1619
|
+
*
|
1620
|
+
* bit layout:
|
1621
|
+
* [1] 31 | 0 — flag resident
|
1622
|
+
* [1] 30 | 1 — flag init
|
1623
|
+
* [7] 23…29 | 2…8 — min → 128
|
1624
|
+
* [7] 16…22 | 9…15 — max → 128
|
1625
|
+
* [6] 10…15 | 16…21 — x offset in brick cache → max 64
|
1626
|
+
* [6] 4…9 | 22…27 — y offset in brick cache → max 64
|
1627
|
+
* [4] 0…3 | 28…31 — z offset in brick cache → max 16, effectively 4
|
1628
|
+
*/
|
1629
|
+
|
1630
|
+
/*
|
1631
|
+
Page table entry format (32 bits):
|
1632
|
+
[31] | 0 — flag resident (1=loaded in cache)
|
1633
|
+
[30] | 1 — flag init (1=initialized)
|
1634
|
+
[29:23] | 2…8 — min value (7 bits) → 128 levels
|
1635
|
+
[22:16] | 9…15 — max value (7 bits) → 128 levels
|
1636
|
+
[15:10] | 16…21 — x offset in brick cache (6 bits) → 64 bricks
|
1637
|
+
[9:4] | 22…27 — y offset in brick cache (6 bits) → 64 bricks
|
1638
|
+
[3:0] | 28…31 — z offset in brick cache (4 bits) → 16 bricks
|
1639
|
+
*/
|
1640
|
+
|
1641
|
+
// Query page table to find brick location and status
|
1642
|
+
// Searches for a brick at the specified location across multiple resolution levels
|
1643
|
+
// and returns its cache coordinates and status information
|
1644
|
+
//
|
1645
|
+
// Parameters:
|
1646
|
+
// location - vec3: Normalized coordinates (0-1) within the volume
|
1647
|
+
// targetRes - int: Target resolution level to start searching from
|
1648
|
+
// channel - int: Channel index (0-6) to query
|
1649
|
+
// rnd - float: Random value (0-1) used for brick loading request selection
|
1650
|
+
// query - bool: Whether to allow brick loading requests (true) or just query (false)
|
1651
|
+
// colorIndex - int
|
1652
|
+
//
|
1653
|
+
// Returns:
|
1654
|
+
// ivec4: (x_offset, y_offset, z_offset, status) where:
|
1655
|
+
// - x_offset, y_offset, z_offset: Brick cache coordinates if found
|
1656
|
+
// - status: Resolution level (>=0) if found, or status code:
|
1657
|
+
// * -1: Not found at any resolution level
|
1658
|
+
// * -2: Empty brick (all values below threshold)
|
1659
|
+
// * -3: Constant full brick (all values above threshold)
|
1660
|
+
// * -4: Constant value brick (uniform value)
|
1661
|
+
// add maxres here
|
1662
|
+
ivec4 getBrickLocation(vec3 location, int targetRes, int channel, float rnd, bool query, int colorIndex) {
|
1663
|
+
|
1664
|
+
// min max for current color
|
1665
|
+
vec2 clim = getClim(colorIndex);
|
1666
|
+
|
1667
|
+
// resolution ranges, TODO: connect this back to color
|
1668
|
+
int channelMin = getRes(channel).x;
|
1669
|
+
int channelMax = getRes(channel).y;
|
1670
|
+
|
1671
|
+
// Clamp resolution to channel's available range
|
1672
|
+
int currentRes = clamp(targetRes, channelMin, channelMax);
|
1673
|
+
currentRes = clamp(currentRes, resGlobal.x, resGlobal.y);
|
1674
|
+
int lowestRes = clamp(resGlobal.y, channelMin, channelMax);
|
1675
|
+
|
1676
|
+
// Determine if this channel should request brick loading.
|
1677
|
+
// request the current channel based on probability
|
1678
|
+
bool requestChannel = false;
|
1679
|
+
if (int(floor(rnd * float(maxChannels))) == colorIndex) {
|
1680
|
+
requestChannel = true;
|
1681
|
+
}
|
1682
|
+
|
1683
|
+
// Try progressively lower resolutions until we find data.
|
1684
|
+
// loop through resolutions
|
1685
|
+
while (currentRes <= lowestRes) {
|
1686
|
+
|
1687
|
+
// Calculate page table coordinates for this brick
|
1688
|
+
uvec3 anchorPoint = getAnchorPoint(currentRes);
|
1689
|
+
vec3 brickLocation = getBrickFromNormalized(location, currentRes);
|
1690
|
+
uvec3 channelOffset = getChannelOffset(channel);
|
1691
|
+
vec3 coordinate = floor(vec3(anchorPoint * channelOffset)) + brickLocation;
|
1692
|
+
|
1693
|
+
// Special handling for resolution 0 (highest detail)
|
1694
|
+
if (currentRes == 0) {
|
1695
|
+
int zExtent = int(ceil(float(voxelExtents.z) / 32.0));
|
1696
|
+
coordinate = vec3(anchorPoint) + vec3(0.0, 0.0, zExtent * channel) + brickLocation;
|
1697
|
+
}
|
1698
|
+
|
1699
|
+
// Query the page table.
|
1700
|
+
// get PT entry
|
1701
|
+
uint ptEntry = texelFetch(pageTableTex, ivec3(coordinate), 0).r;
|
1702
|
+
|
1703
|
+
// Check if brick is initialized.
|
1704
|
+
// check if the PT entry is initialized
|
1705
|
+
uint isInit = (ptEntry >> 30u) & 1u;
|
1706
|
+
if (isInit == 0u) {
|
1707
|
+
currentRes++;
|
1708
|
+
// Request brick loading if needed
|
1709
|
+
if (requestChannel == true && (gRequest.a + gRequest.b + gRequest.g + gRequest.r == 0.0) && query == true) {
|
1710
|
+
gRequest = packPTCoordToRGBA8(uvec3(coordinate));
|
1711
|
+
}
|
1712
|
+
continue;
|
1713
|
+
}
|
1714
|
+
|
1715
|
+
// Extract min/max values from page table entry.
|
1716
|
+
// get the min max values of the brick
|
1717
|
+
uint umin = ((ptEntry >> 23u) & 0x7Fu);
|
1718
|
+
uint umax = ((ptEntry >> 16u) & 0x7Fu);
|
1719
|
+
float min = float(int(umin)) / 127.0;
|
1720
|
+
float max = float(int(umax)) / 127.0;
|
1721
|
+
|
1722
|
+
// Check if brick is empty (all values below threshold).
|
1723
|
+
// exit early if brick is constant
|
1724
|
+
if (float(max) <= clim.x) {
|
1725
|
+
return ivec4(0,0,0,-2);
|
1726
|
+
// EMPTY
|
1727
|
+
} else if (float(min) >= clim.y) {
|
1728
|
+
return ivec4(0,0,0,-3); // CONSTANT FULL
|
1729
|
+
} else if ((umax - umin) < 2u) {
|
1730
|
+
return ivec4(min,0,0,-4); // CONSTANT OTHER VALUE
|
1731
|
+
}
|
1732
|
+
|
1733
|
+
// Check if brick is resident in cache.
|
1734
|
+
// return brick cache location if resident
|
1735
|
+
// continue to next resolution if not resident
|
1736
|
+
uint isResident = (ptEntry >> 31u) & 1u;
|
1737
|
+
if (isResident == 0u) {
|
1738
|
+
currentRes++;
|
1739
|
+
// Request brick loading if needed
|
1740
|
+
if (requestChannel == true && (gRequest.a + gRequest.b + gRequest.g + gRequest.r == 0.0) && query == true) {
|
1741
|
+
gRequest = packPTCoordToRGBA8(uvec3(coordinate));
|
1742
|
+
}
|
1743
|
+
continue;
|
1744
|
+
} else {
|
1745
|
+
// Extract brick cache coordinates
|
1746
|
+
uint xBrickCache = (ptEntry >> 10u) & 0x3Fu;
|
1747
|
+
uint yBrickCache = (ptEntry >> 4u) & 0x3Fu;
|
1748
|
+
uint zBrickCache = ptEntry & 0xFu;
|
1749
|
+
uvec3 brickCacheCoord = uvec3(xBrickCache, yBrickCache, zBrickCache);
|
1750
|
+
|
1751
|
+
return ivec4(brickCacheCoord, currentRes);
|
1752
|
+
}
|
1753
|
+
}
|
1754
|
+
|
1755
|
+
// not resident in any resolution, should be treated as empty
|
1756
|
+
return ivec4(0,0,0,-1); // Not found
|
1757
|
+
}
|
1758
|
+
|
1759
|
+
// Request brick loading for a specific location and resolution.
|
1760
|
+
// Initiates a request to load a brick from disk into the brick cache.
|
1761
|
+
// set the brick request for the specified slot channel
|
1762
|
+
//
|
1763
|
+
// Parameters:
|
1764
|
+
// location - vec3: Normalized world space coordinates (0.0 to 1.0) where the brick is needed
|
1765
|
+
// targetRes - int: Target resolution level (0-9, where 0 is highest resolution)
|
1766
|
+
// channel - int: Channel index (0-6) for multi-channel datasets
|
1767
|
+
// rnd - float: Random value between 0.0 and 1.0 used for load balancing across channels
|
1768
|
+
//
|
1769
|
+
// Returns:
|
1770
|
+
// void: No return value, but sets gRequest output variable if conditions are met
|
1771
|
+
void setBrickRequest(vec3 location, int targetRes, int channel, float rnd) {
|
1772
|
+
uvec3 anchorPoint = getAnchorPoint(targetRes);
|
1773
|
+
vec3 brickLocation = getBrickFromNormalized(location, targetRes);
|
1774
|
+
uvec3 channelOffset = getChannelOffset(channel);
|
1775
|
+
vec3 coordinate = floor(vec3(anchorPoint * channelOffset)) + brickLocation;
|
1776
|
+
|
1777
|
+
// Special handling for resolution 0
|
1778
|
+
if (targetRes == 0) {
|
1779
|
+
int zExtent = int(ceil(float(voxelExtents.z) / 32.0));
|
1780
|
+
coordinate = vec3(anchorPoint) + vec3(0.0, 0.0, zExtent * channel) + brickLocation;
|
1781
|
+
}
|
1782
|
+
|
1783
|
+
// Pack coordinates and set request
|
1784
|
+
if (int(floor(rnd * float(maxChannels))) == channel) {
|
1785
|
+
gRequest = packPTCoordToRGBA8(uvec3(coordinate));
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
|
1789
|
+
// Track brick usage for cache management.
|
1790
|
+
// Records which brick in the cache is being accessed for LRU (Least Recently Used) eviction.
|
1791
|
+
// set the usage for the specified brick
|
1792
|
+
//
|
1793
|
+
// Parameters:
|
1794
|
+
// brickCacheOffset - ivec3: 3D coordinates of the brick within the brick cache texture
|
1795
|
+
// t_hit_min_os - float: Ray entry time in object space (start of ray-volume intersection)
|
1796
|
+
// t_hit_max_os - float: Ray exit time in object space (end of ray-volume intersection)
|
1797
|
+
// t_os - float: Current sampling position along the ray in object space
|
1798
|
+
// rnd - float: Random value between 0.0 and 1.0 used for probabilistic usage tracking
|
1799
|
+
//
|
1800
|
+
// Returns:
|
1801
|
+
// void: No return value, but sets gUsage output variable to track cache access patterns
|
1802
|
+
void setUsage(ivec3 brickCacheOffset, float t_hit_min_os, float t_hit_max_os, float t_os, float rnd) {
|
1803
|
+
float normalized_t_os = (t_os - t_hit_min_os) / (t_hit_max_os - t_hit_min_os); // Normalize to 0-1
|
1804
|
+
if (normalized_t_os <= rnd || gUsage == vec4(0.0, 0.0, 0.0, 0.0)) {
|
1805
|
+
gUsage = vec4(vec3(brickCacheOffset) / 255.0, 1.0);
|
1806
|
+
}
|
1807
|
+
}
|
1808
|
+
|
1809
|
+
// ========================================
|
1810
|
+
// UTILITY FUNCTIONS
|
1811
|
+
// ========================================
|
1812
|
+
|
1813
|
+
// Get maximum component of a 3D vector.
|
1814
|
+
// get the max value of a vec3
|
1815
|
+
//
|
1816
|
+
// Parameters:
|
1817
|
+
// v - vec3: Input 3D vector
|
1818
|
+
//
|
1819
|
+
// Returns:
|
1820
|
+
// float: The maximum value among the x, y, and z components
|
1821
|
+
float vec3_max(vec3 v) {
|
1822
|
+
return max(v.x, max(v.y, v.z));
|
1823
|
+
}
|
1824
|
+
|
1825
|
+
// Get minimum component of a 3D vector.
|
1826
|
+
// get the min value of a vec3
|
1827
|
+
//
|
1828
|
+
// Parameters:
|
1829
|
+
// v - vec3: Input 3D vector
|
1830
|
+
//
|
1831
|
+
// Returns:
|
1832
|
+
// float: The minimum value among the x, y, and z components
|
1833
|
+
float vec3_min(vec3 v) {
|
1834
|
+
return min(v.x, min(v.y, v.z));
|
1835
|
+
}
|
1836
|
+
|
1837
|
+
// Calculate level-of-detail based on distance.
|
1838
|
+
// Uses logarithmic scaling to determine appropriate resolution level.
|
1839
|
+
// get the LOD based on the distance to the camera
|
1840
|
+
//
|
1841
|
+
// Parameters:
|
1842
|
+
// distance - float: Distance from camera to sampling point
|
1843
|
+
// highestRes - int: Highest available resolution level (typically 0)
|
1844
|
+
// lowestRes - int: Lowest available resolution level (typically 9)
|
1845
|
+
// lodFactor - float: Scaling factor that controls LOD sensitivity
|
1846
|
+
//
|
1847
|
+
// Returns:
|
1848
|
+
// int: Resolution level index (0-9) where 0 is highest resolution
|
1849
|
+
int getLOD(float distance, int highestRes, int lowestRes, float lodFactor) {
|
1850
|
+
int lod = int(log2(distance * lodFactor));
|
1851
|
+
return clamp(lod, highestRes, lowestRes);
|
1852
|
+
}
|
1853
|
+
|
1854
|
+
// Calculate step size for ray marching at a given resolution.
|
1855
|
+
// Determines optimal sampling step size based on voxel dimensions and ray direction.
|
1856
|
+
// get the voxel step in object space
|
1857
|
+
//
|
1858
|
+
// Parameters:
|
1859
|
+
// res - int: Resolution level index (0-9, where 0 is highest resolution)
|
1860
|
+
// osDir - vec3: Ray direction vector in object space (should be normalized)
|
1861
|
+
//
|
1862
|
+
// Returns:
|
1863
|
+
// float: Optimal step size in object space units for stable ray marching
|
1864
|
+
float voxelStepOS(int res, vec3 osDir) {
|
1865
|
+
vec3 voxelSize = getScale(res) / vec3(voxelExtents);
|
1866
|
+
vec3 dt_vec = voxelSize / abs(osDir);
|
1867
|
+
return min(dt_vec.x, min(dt_vec.y, dt_vec.z));
|
1868
|
+
}
|
1869
|
+
|
1870
|
+
// ========================================
|
1871
|
+
// INTERPOLATION FUNCTIONS
|
1872
|
+
// ========================================
|
1873
|
+
|
1874
|
+
// Linear interpolation between two values.
|
1875
|
+
// Performs smooth interpolation between two scalar values.
|
1876
|
+
//
|
1877
|
+
// Parameters:
|
1878
|
+
// v0 - float: First value to interpolate from
|
1879
|
+
// v1 - float: Second value to interpolate to
|
1880
|
+
// fx - float: Interpolation factor between 0.0 and 1.0
|
1881
|
+
//
|
1882
|
+
// Returns:
|
1883
|
+
// float: Interpolated value between v0 and v1 based on fx
|
1884
|
+
float lerp(float v0, float v1, float fx) {
|
1885
|
+
return mix(v0, v1, fx); // (1-fx)·v0 + fx·v1
|
1886
|
+
}
|
1887
|
+
|
1888
|
+
// Bilinear interpolation between four values
|
1889
|
+
// Performs 2D interpolation using four corner values arranged in a square
|
1890
|
+
//
|
1891
|
+
// Parameters:
|
1892
|
+
// v00 - float: Bottom-left corner value
|
1893
|
+
// v10 - float: Bottom-right corner value
|
1894
|
+
// v01 - float: Top-left corner value
|
1895
|
+
// v11 - float: Top-right corner value
|
1896
|
+
// f - vec2: 2D interpolation factors (x, y) between 0.0 and 1.0
|
1897
|
+
//
|
1898
|
+
// Returns:
|
1899
|
+
// float: Interpolated value from the four corner values
|
1900
|
+
float bilerp(float v00, float v10, float v01, float v11, vec2 f) {
|
1901
|
+
float c0 = mix(v00, v10, f.x); // Interpolate in X on bottom row
|
1902
|
+
float c1 = mix(v01, v11, f.x); // Interpolate in X on top row
|
1903
|
+
return mix(c0, c1, f.y); // Now interpolate those in Y
|
1904
|
+
}
|
1905
|
+
|
1906
|
+
// Trilinear interpolation between eight values
|
1907
|
+
// Performs 3D interpolation using eight corner values arranged in a cube
|
1908
|
+
//
|
1909
|
+
// Parameters:
|
1910
|
+
// v000 - float: Bottom-left-back corner value
|
1911
|
+
// v100 - float: Bottom-right-back corner value
|
1912
|
+
// v010 - float: Bottom-left-front corner value
|
1913
|
+
// v110 - float: Bottom-right-front corner value
|
1914
|
+
// v001 - float: Top-left-back corner value
|
1915
|
+
// v101 - float: Top-right-back corner value
|
1916
|
+
// v011 - float: Top-left-front corner value
|
1917
|
+
// v111 - float: Top-right-front corner value
|
1918
|
+
// f - vec3: 3D interpolation factors (x, y, z) between 0.0 and 1.0
|
1919
|
+
//
|
1920
|
+
// Returns:
|
1921
|
+
// float: Interpolated value from the eight corner values
|
1922
|
+
float trilerp(
|
1923
|
+
float v000, float v100, float v010, float v110,
|
1924
|
+
float v001, float v101, float v011, float v111,
|
1925
|
+
vec3 f) { // f = fract(coord)
|
1926
|
+
// Interpolate along X for each of the four bottom-face voxels
|
1927
|
+
float c00 = mix(v000, v100, f.x);
|
1928
|
+
float c10 = mix(v010, v110, f.x);
|
1929
|
+
float c01 = mix(v001, v101, f.x);
|
1930
|
+
float c11 = mix(v011, v111, f.x);
|
1931
|
+
|
1932
|
+
// Interpolate those along Y
|
1933
|
+
float c0 = mix(c00, c10, f.y);
|
1934
|
+
float c1 = mix(c01, c11, f.y);
|
1935
|
+
|
1936
|
+
// Final interpolation along Z
|
1937
|
+
return mix(c0, c1, f.z);
|
1938
|
+
}
|
1939
|
+
|
1940
|
+
// ========================================
|
1941
|
+
// BRICK CACHE SAMPLING
|
1942
|
+
// ========================================
|
1943
|
+
|
1944
|
+
// Sample a value from the brick cache texture.
|
1945
|
+
// Converts brick coordinates and voxel position to texture coordinates for sampling.
|
1946
|
+
// sample the brick cache based on the brick cache coordinate and the in-brick coordinate
|
1947
|
+
//
|
1948
|
+
// Parameters:
|
1949
|
+
// brickCacheCoord - vec3: 3D coordinates of the brick within the brick cache
|
1950
|
+
// voxelInBrick - vec3: 3D coordinates of the voxel within the brick (0-31 in each dimension)
|
1951
|
+
//
|
1952
|
+
// Returns:
|
1953
|
+
// float: Sampled voxel value from the brick cache texture (typically normalized 0.0-1.0)
|
1954
|
+
float sampleBrick(vec3 brickCacheCoord, vec3 voxelInBrick) {
|
1955
|
+
vec3 brickCacheCoordNormalized = vec3(
|
1956
|
+
(float(brickCacheCoord.x) * BRICK_SIZE + float(voxelInBrick.x)) / BRICK_CACHE_SIZE_X,
|
1957
|
+
(float(brickCacheCoord.y) * BRICK_SIZE + float(voxelInBrick.y)) / BRICK_CACHE_SIZE_Y,
|
1958
|
+
(float(brickCacheCoord.z) * BRICK_SIZE + float(voxelInBrick.z)) / BRICK_CACHE_SIZE_Z
|
1959
|
+
);
|
1960
|
+
return texture(brickCacheTex, brickCacheCoordNormalized).r;
|
1961
|
+
}
|
1962
|
+
|
1963
|
+
/**
|
1964
|
+
* main renderloop
|
1965
|
+
*/
|
1966
|
+
void main(void) {
|
1967
|
+
|
1968
|
+
// ========================================
|
1969
|
+
// INITIALIZATION
|
1970
|
+
// ========================================
|
1971
|
+
|
1972
|
+
// Initialize all render targets (multiple output textures)
|
1973
|
+
gRequest = vec4(0,0,0,0); // Brick loading requests
|
1974
|
+
gUsage = vec4(0,0,0,0); // Brick usage tracking
|
1975
|
+
gColor = vec4(0.0, 0.0, 0.0, 0.0); // Final color output
|
1976
|
+
|
1977
|
+
// out color sums up our accumulated value before writing it into the gColor buffer
|
1978
|
+
vec4 outColor = vec4(0.0, 0.0, 0.0, 0.0); // Accumulated color
|
1979
|
+
|
1980
|
+
// Generate random number for jittered sampling (reduces artifacts)
|
1981
|
+
float rnd = random();
|
1982
|
+
|
1983
|
+
// Get the lowest available resolution level
|
1984
|
+
int lowestDataRes = getLowestRes();
|
1985
|
+
|
1986
|
+
// ========================================
|
1987
|
+
// RAY-VOLUME INTERSECTION
|
1988
|
+
// ========================================
|
1989
|
+
|
1990
|
+
// Normalize the view ray direction
|
1991
|
+
vec3 ws_rayDir = normalize(rayDirUnnorm);
|
1992
|
+
|
1993
|
+
// Calculate intersection with volume bounding box
|
1994
|
+
// Returns (entry_time, exit_time) for the ray-box intersection
|
1995
|
+
vec2 t_hit = intersect_hit(cameraCorrected, ws_rayDir);
|
1996
|
+
if (t_hit.x >= t_hit.y) { discard; } // Ray misses volume entirely
|
1997
|
+
|
1998
|
+
t_hit.x = max(t_hit.x, 0.0); // Clamp entry to 0 (no negative distances)
|
1999
|
+
float t = t_hit.x;
|
2000
|
+
|
2001
|
+
// Calculate distance from camera for LOD selection
|
2002
|
+
float distance = abs((cameraCorrected / boxSize).z + (ws_rayDir / boxSize).z * t );
|
2003
|
+
|
2004
|
+
// ========================================
|
2005
|
+
// COORDINATE SPACE CONVERSION
|
2006
|
+
// ========================================
|
2007
|
+
|
2008
|
+
// Convert from world space to object space (normalized 0-1 coordinates)
|
2009
|
+
float ws2os = length(ws_rayDir / boxSize); // Scale factor for conversion
|
2010
|
+
float t_hit_min_os = t_hit.x * ws2os; // Entry point in object space
|
2011
|
+
float t_hit_max_os = t_hit.y * ws2os; // Exit point in object space
|
2012
|
+
float t_os = t_hit_min_os; // Current position in object space
|
2013
|
+
|
2014
|
+
// Calculate effective LOD factor based on volume size.
|
2015
|
+
// voxel edge is the max extent of the volume
|
2016
|
+
float voxelEdge = float(max(voxelExtents.x, max(voxelExtents.y, voxelExtents.z)));
|
2017
|
+
|
2018
|
+
// calculate LOD factor based on the voxel edge
|
2019
|
+
float lodFactorEffective = lodFactor * voxelEdge / 256.0;
|
2020
|
+
|
2021
|
+
// ========================================
|
2022
|
+
// RESOLUTION AND SAMPLING SETUP
|
2023
|
+
// ========================================
|
2024
|
+
|
2025
|
+
// Determine target resolution based on distance (LOD)
|
2026
|
+
int targetRes = getLOD(t, 0, 9, lodFactorEffective);
|
2027
|
+
|
2028
|
+
// Set adaptive stepping resolution
|
2029
|
+
int stepResAdaptive = renderRes;
|
2030
|
+
int stepResEffective = clamp(stepResAdaptive, 0, lowestDataRes);
|
2031
|
+
|
2032
|
+
// Convert ray to object space coordinates
|
2033
|
+
vec3 os_rayDir = normalize(ws_rayDir / boxSize);
|
2034
|
+
vec3 os_rayOrigin = cameraCorrected / boxSize + vec3(0.5);
|
2035
|
+
|
2036
|
+
// Calculate step size based on current resolution
|
2037
|
+
float dt = voxelStepOS(stepResEffective, os_rayDir);
|
2038
|
+
|
2039
|
+
// ========================================
|
2040
|
+
// SAMPLING POSITION INITIALIZATION
|
2041
|
+
// ========================================
|
2042
|
+
|
2043
|
+
// Convert to normalized sampling coordinates (0-1 range)
|
2044
|
+
vec3 p = cameraCorrected + t_hit.x * ws_rayDir;
|
2045
|
+
p = p / boxSize + vec3(0.5); // Transform to 0-1 range
|
2046
|
+
|
2047
|
+
// Calculate step vector in normalized space
|
2048
|
+
vec3 dp = (os_rayDir * dt);
|
2049
|
+
|
2050
|
+
// Apply jittered sampling to reduce artifacts
|
2051
|
+
p += dp * (rnd);
|
2052
|
+
// Avoid boundary issues
|
2053
|
+
p = clamp(p, 0.0 + 0.0000028, 1.0 - 0.0000028);
|
2054
|
+
|
2055
|
+
// ========================================
|
2056
|
+
// RENDERING VARIABLES
|
2057
|
+
// ========================================
|
2058
|
+
|
2059
|
+
// Color accumulation for front-to-back compositing.
|
2060
|
+
// color accumulation variables, are calculated per 'slice'
|
2061
|
+
vec3 rgbCombo = vec3(0.0);
|
2062
|
+
float total = 0.0;
|
2063
|
+
|
2064
|
+
// For alpha blending.
|
2065
|
+
// alpha accumulation variable runs globally
|
2066
|
+
float alphaMultiplicator = 1.0;
|
2067
|
+
|
2068
|
+
// Request tracking (for brick loading).
|
2069
|
+
// if we have a request for a brick which not visible in lower
|
2070
|
+
// resolutions, we can overwrite it once
|
2071
|
+
bool overWrittenRequest = false;
|
2072
|
+
|
2073
|
+
// Current state tracking
|
2074
|
+
vec3 currentTargetResPTCoord = vec3(0,0,0);
|
2075
|
+
int currentLOD = targetRes;
|
2076
|
+
|
2077
|
+
// ========================================
|
2078
|
+
// CHANNEL-SPECIFIC CONSTANTS
|
2079
|
+
// ========================================
|
2080
|
+
|
2081
|
+
// Pre-compute channel properties for efficiency.
|
2082
|
+
// constants per color channel
|
2083
|
+
vec3 [] c_color = vec3[7](getChannelColor(0), getChannelColor(1), getChannelColor(2), getChannelColor(3), getChannelColor(4), getChannelColor(5), getChannelColor(6));
|
2084
|
+
float [] c_opacity = float[7](getChannelOpacity(0), getChannelOpacity(1), getChannelOpacity(2), getChannelOpacity(3), getChannelOpacity(4), getChannelOpacity(5), getChannelOpacity(6));
|
2085
|
+
// resolution ranges (currently) per color channel
|
2086
|
+
// TODO: figure out how to hook it up with frontend
|
2087
|
+
int [] c_res_min = int[7](getRes(0).x, getRes(1).x, getRes(2).x, getRes(3).x, getRes(4).x, getRes(5).x, getRes(6).x);
|
2088
|
+
int [] c_res_max = int[7](getRes(0).y, getRes(1).y, getRes(2).y, getRes(3).y, getRes(4).y, getRes(5).y, getRes(6).y);
|
2089
|
+
|
2090
|
+
// ========================================
|
2091
|
+
// PER-CHANNEL STATE ARRAYS
|
2092
|
+
// ========================================
|
2093
|
+
|
2094
|
+
// Current state for each channel.
|
2095
|
+
// current state variables per color channel
|
2096
|
+
|
2097
|
+
// current resolution
|
2098
|
+
int [] c_res_current = int[7](0,0,0,0,0,0,0);
|
2099
|
+
// current value
|
2100
|
+
float [] c_val_current = float[7](0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0);
|
2101
|
+
// current brick cache coordinate
|
2102
|
+
vec3 [] c_brickCacheCoord_current = vec3[7](vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0));
|
2103
|
+
// current voxel in current resolution
|
2104
|
+
vec3 [] c_voxel_current = vec3[7](vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0));
|
2105
|
+
// current pagetable coordinate
|
2106
|
+
vec3 [] c_ptCoord_current = vec3[7](vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0), vec3(0.0));
|
2107
|
+
// current render mode -- 0: empty (add 0), 1: constant (add current val), 2: voxel (query new voxel)
|
2108
|
+
// upon change of PT we re-query anyways
|
2109
|
+
int [] c_renderMode_current = int[7](-1, -1, -1, -1, -1, -1, -1);
|
2110
|
+
|
2111
|
+
// Adjacent brick caching for interpolation
|
2112
|
+
// current pagetable coordinate of the adjacent bricks in X, Y, Z or diagonal direction
|
2113
|
+
vec3 [] c_PT_X_adjacent = vec3[7](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2114
|
+
vec3 [] c_PT_Y_adjacent = vec3[7](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2115
|
+
vec3 [] c_PT_Z_adjacent = vec3[7](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2116
|
+
vec3 [] c_PT_XYZ_adjacent = vec3[7](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2117
|
+
// corresponding brick coordinates of the adjacent bricks in X, Y, Z or diagonal direction
|
2118
|
+
vec4 [] c_brick_X_adjacent = vec4[7](vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0));
|
2119
|
+
vec4 [] c_brick_Y_adjacent = vec4[7](vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0));
|
2120
|
+
vec4 [] c_brick_Z_adjacent = vec4[7](vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0));
|
2121
|
+
vec4 [] c_brick_XYZ_adjacent = vec4[7](vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0), vec4(-1.0));
|
2122
|
+
|
2123
|
+
// Min/max tracking for MIP/MinIP rendering.
|
2124
|
+
// min and max values of the current color
|
2125
|
+
// used for minimum/maximum intensity projection
|
2126
|
+
float [] c_minVal = float[7](-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0);
|
2127
|
+
float [] c_maxVal = float[7](0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0);
|
2128
|
+
|
2129
|
+
// Per-resolution coordinate tracking
|
2130
|
+
vec3 [] r_ptCoord = vec3[10](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2131
|
+
vec3 [] r_voxel = vec3[10](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2132
|
+
vec3 [] r_prevPTCoord = vec3[10](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2133
|
+
vec3 [] r_prevVoxel = vec3[10](vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0), vec3(-1.0));
|
2134
|
+
|
2135
|
+
// resolution changed flag
|
2136
|
+
bool resolutionChanged = false;
|
2137
|
+
// number of repetitions (for debugging purposes)
|
2138
|
+
int reps = 0;
|
2139
|
+
|
2140
|
+
// ========================================
|
2141
|
+
// MAIN RAY-MARCHING LOOP
|
2142
|
+
// ========================================
|
2143
|
+
|
2144
|
+
// while we are 'in' the volume.
|
2145
|
+
// Continue marching until we exit the volume or reach maximum opacity.
|
2146
|
+
while (t_os < t_hit_max_os && t_os >= t_hit_min_os
|
2147
|
+
&& vec3_max(p) < 1.0 && vec3_min(p) >= 0.0
|
2148
|
+
) {
|
2149
|
+
|
2150
|
+
// Reset per-sample accumulation.
|
2151
|
+
// initialize slice values
|
2152
|
+
vec3 rgbCombo = vec3(0.0);
|
2153
|
+
float total = 0.0;
|
2154
|
+
|
2155
|
+
// Update target resolution based on current distance (with jitter).
|
2156
|
+
// calculate target resolution based on distance and lod factor
|
2157
|
+
targetRes = getLOD(t, 0, 9, lodFactorEffective * (0.999 + 0.002 * rnd));
|
2158
|
+
|
2159
|
+
// ========================================
|
2160
|
+
// RESOLUTION CHANGE HANDLING
|
2161
|
+
// ========================================
|
2162
|
+
|
2163
|
+
// if target resolution changed, update the current resolution and stepsize
|
2164
|
+
if (targetRes != currentLOD) {
|
2165
|
+
currentLOD = targetRes;
|
2166
|
+
stepResAdaptive++;
|
2167
|
+
stepResEffective = clamp(stepResAdaptive, 0, lowestDataRes);
|
2168
|
+
|
2169
|
+
// Adjust sampling position for new resolution
|
2170
|
+
p -= dp * rnd;
|
2171
|
+
dt = voxelStepOS(stepResEffective, os_rayDir);
|
2172
|
+
dp = os_rayDir * dt;
|
2173
|
+
p += dp * rnd;
|
2174
|
+
resolutionChanged = true;
|
2175
|
+
|
2176
|
+
// Check bounds after resolution change
|
2177
|
+
if (p.x < 0.0 || p.x >= 1.0 || p.y < 0.0 || p.y >= 1.0 || p.z < 0.0 || p.z >= 1.0) {
|
2178
|
+
break;
|
2179
|
+
}
|
2180
|
+
} else {
|
2181
|
+
resolutionChanged = false;
|
2182
|
+
}
|
2183
|
+
|
2184
|
+
// ========================================
|
2185
|
+
// UPDATE COORDINATES FOR ALL RESOLUTIONS
|
2186
|
+
// ========================================
|
2187
|
+
|
2188
|
+
// Calculate page table coordinates and voxel positions for all resolution levels
|
2189
|
+
for (int r = 0; r < 10; r++) {
|
2190
|
+
r_prevPTCoord[r] = r_ptCoord[r];
|
2191
|
+
r_prevVoxel[r] = r_voxel[r];
|
2192
|
+
r_ptCoord[r] = getBrickFromNormalized(p, r);
|
2193
|
+
r_voxel[r] = getVoxelFromNormalized(p, r);
|
2194
|
+
}
|
2195
|
+
|
2196
|
+
// ========================================
|
2197
|
+
// RENDER MODE DEFINITIONS
|
2198
|
+
// ========================================
|
2199
|
+
// 0: empty brick (no data)
|
2200
|
+
// 1: constant brick (uniform value)
|
2201
|
+
// 2: voxel brick (variable data)
|
2202
|
+
|
2203
|
+
// initialize the per channel slice values
|
2204
|
+
vec3 sliceColor = vec3(0.0);
|
2205
|
+
float sliceAlpha = 0.0;
|
2206
|
+
|
2207
|
+
// ========================================
|
2208
|
+
// MULTI-CHANNEL SAMPLING
|
2209
|
+
// ========================================
|
2210
|
+
|
2211
|
+
// Process each channel independently.
|
2212
|
+
// iterate over up to 7 channels by color
|
2213
|
+
for (int c = 0; c < 7; c++) {
|
2214
|
+
// Skip channels with zero opacity.
|
2215
|
+
// skip if opacity is 0 or if color is not mapped to a physical slot
|
2216
|
+
if (c_opacity[c] <= 0.000001) {
|
2217
|
+
continue;
|
2218
|
+
} else if (channelMapping[c] == -1) {
|
2219
|
+
continue;
|
2220
|
+
}
|
2221
|
+
|
2222
|
+
// physical slot in pagetable
|
2223
|
+
int slot = channelMapping[c];
|
2224
|
+
|
2225
|
+
// keep track of status
|
2226
|
+
bool newBrick = false;
|
2227
|
+
bool newVoxel = false;
|
2228
|
+
// best possible resolution
|
2229
|
+
int bestRes = clamp(targetRes, c_res_min[c], c_res_max[c]);
|
2230
|
+
|
2231
|
+
// Check if we need to load a new brick at a better resolution.
|
2232
|
+
// check if any new better resolution could be available, if so, we need to re-query the brick
|
2233
|
+
bool betterResChanged = false;
|
2234
|
+
for (int r = bestRes; r <= c_res_current[c]; r++ ) {
|
2235
|
+
if (r_ptCoord[r] != r_prevPTCoord[r]) {
|
2236
|
+
betterResChanged = true;
|
2237
|
+
break;
|
2238
|
+
}
|
2239
|
+
}
|
2240
|
+
|
2241
|
+
// Determine if we need to load new brick data.
|
2242
|
+
// check if we need to re-query the brick / voxel or reuse past 'val'
|
2243
|
+
if (r_ptCoord[bestRes] != r_prevPTCoord[bestRes]
|
2244
|
+
|| c_renderMode_current[c] == -1
|
2245
|
+
|| resolutionChanged == true
|
2246
|
+
|| betterResChanged
|
2247
|
+
) {
|
2248
|
+
newBrick = true;
|
2249
|
+
newVoxel = true;
|
2250
|
+
} else if (c_renderMode_current[c] == 2) {
|
2251
|
+
newVoxel = true;
|
2252
|
+
} else if (c_renderMode_current[c] == 0) {
|
2253
|
+
continue; // Skip empty bricks
|
2254
|
+
}
|
2255
|
+
|
2256
|
+
// ========================================
|
2257
|
+
// BRICK LOADING AND CACHING
|
2258
|
+
// ========================================
|
2259
|
+
|
2260
|
+
// check if a new brick is available in the best possible resolution
|
2261
|
+
if (newBrick) {
|
2262
|
+
// Query page table for brick location and status
|
2263
|
+
ivec4 brickCacheInfo = getBrickLocation(p, bestRes, slot, rnd, true, c);
|
2264
|
+
// check information about the newly queried brick
|
2265
|
+
// if the res is not at best res, possibly the same as previous brick
|
2266
|
+
if (brickCacheInfo.w == -1 || brickCacheInfo.w == -2) {
|
2267
|
+
// Empty brick - no data available.
|
2268
|
+
// we can skip the rest of the loop
|
2269
|
+
c_val_current[c] = 0.0;
|
2270
|
+
c_renderMode_current[c] = 0;
|
2271
|
+
c_minVal[c] = 0.0;
|
2272
|
+
continue;
|
2273
|
+
} else if (brickCacheInfo.w == -3) {
|
2274
|
+
// Solid brick - constant maximum value.
|
2275
|
+
// we set the value and do not need to query a voxel
|
2276
|
+
c_val_current[c] = 1.0;
|
2277
|
+
c_renderMode_current[c] = 1;
|
2278
|
+
c_maxVal[c] = 1.0;
|
2279
|
+
newVoxel = false;
|
2280
|
+
} else if (brickCacheInfo.w == -4) {
|
2281
|
+
// Constant brick - uniform value.
|
2282
|
+
// static value -- we set the value and do not need to query a voxel
|
2283
|
+
float val = float(brickCacheInfo.x);
|
2284
|
+
c_val_current[c] = max(0.0, (val - getClim(c).x) / (getClim(c).y - getClim(c).x));
|
2285
|
+
c_renderMode_current[c] = 1;
|
2286
|
+
newVoxel = false;
|
2287
|
+
} else if (brickCacheInfo.w >= 0) {
|
2288
|
+
// Voxel brick - variable data, load from cache.
|
2289
|
+
// new brick -- we set the coordinate and resolution and need to query a voxel
|
2290
|
+
c_res_current[c] = brickCacheInfo.w;
|
2291
|
+
c_ptCoord_current[c] = r_ptCoord[c_res_current[c]];
|
2292
|
+
c_brickCacheCoord_current[c] = vec3(brickCacheInfo.xyz);
|
2293
|
+
c_renderMode_current[c] = 2;
|
2294
|
+
newVoxel = true;
|
2295
|
+
|
2296
|
+
// Track brick usage for cache management.
|
2297
|
+
// we set the usage of the brick based on the channel and the relative distance into the cube
|
2298
|
+
if (int(floor(rnd * float(maxChannels))) == c) {
|
2299
|
+
setUsage(brickCacheInfo.xyz, t_hit_min_os, t_hit_max_os, t_os, rnd);
|
2300
|
+
}
|
2301
|
+
}
|
2302
|
+
}
|
2303
|
+
|
2304
|
+
// ========================================
|
2305
|
+
// VOXEL SAMPLING WITH INTERPOLATION
|
2306
|
+
// ========================================
|
2307
|
+
|
2308
|
+
// we need to query a new voxel e.g. sample the brick cache
|
2309
|
+
if (newVoxel) {
|
2310
|
+
c_voxel_current[c] = r_voxel[c_res_current[c]];
|
2311
|
+
|
2312
|
+
// we clamp the coordinate to be inside the brick and sample the volume
|
2313
|
+
reps++;
|
2314
|
+
|
2315
|
+
// Calculate position within the brick (0-31 range)
|
2316
|
+
vec3 voxelInBrick = mod(c_voxel_current[c], 32.0);
|
2317
|
+
vec3 clampedVoxelInBrick = clamp(voxelInBrick, 0.5, 31.5);
|
2318
|
+
// Sample the brick cache texture
|
2319
|
+
float val = sampleBrick(c_brickCacheCoord_current[c].xyz, clampedVoxelInBrick);
|
2320
|
+
|
2321
|
+
// ========================================
|
2322
|
+
// HIGH-QUALITY INTERPOLATION (renderRes == 0)
|
2323
|
+
// ========================================
|
2324
|
+
|
2325
|
+
// interblock interpolation
|
2326
|
+
if (renderRes == 0) {
|
2327
|
+
// calculate what axis we need to interpolate
|
2328
|
+
|
2329
|
+
// Check if we're near brick boundaries (need interpolation)
|
2330
|
+
bvec3 clampedMin = lessThan(voxelInBrick, clampedVoxelInBrick);
|
2331
|
+
bvec3 clampedMax = greaterThan(voxelInBrick, clampedVoxelInBrick);
|
2332
|
+
bvec3 clamped = bvec3(clampedMin.x || clampedMax.x, clampedMin.y || clampedMax.y, clampedMin.z || clampedMax.z);
|
2333
|
+
vec3 diff = voxelInBrick - clampedVoxelInBrick;
|
2334
|
+
|
2335
|
+
if (any(clampedMin) || any(clampedMax)) {
|
2336
|
+
int boundaryAxes = int(clamped.x) + int(clamped.y) + int(clamped.z);
|
2337
|
+
float f = 0.0;
|
2338
|
+
|
2339
|
+
if (boundaryAxes == 1) {
|
2340
|
+
// Linear interpolation across one boundary
|
2341
|
+
vec3 otherGlobalVoxelPos = vec3(0,0,0);
|
2342
|
+
vec3 otherP = vec3(0,0,0);
|
2343
|
+
float otherVoxelVal = 0.0;
|
2344
|
+
|
2345
|
+
// Determine which axis we're interpolating across
|
2346
|
+
if (clampedMin.x) {
|
2347
|
+
otherGlobalVoxelPos = c_voxel_current[c] - vec3(1.0, 0.0, 0.0);
|
2348
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2349
|
+
f = abs(diff.x);
|
2350
|
+
} else if (clampedMax.x) {
|
2351
|
+
otherGlobalVoxelPos = c_voxel_current[c] + vec3(1.0, 0.0, 0.0);
|
2352
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2353
|
+
f = abs(diff.x);
|
2354
|
+
} else if (clampedMin.y) {
|
2355
|
+
otherGlobalVoxelPos = c_voxel_current[c] - vec3(0.0, 1.0, 0.0);
|
2356
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2357
|
+
f = abs(diff.y);
|
2358
|
+
} else if (clampedMax.y) {
|
2359
|
+
otherGlobalVoxelPos = c_voxel_current[c] + vec3(0.0, 1.0, 0.0);
|
2360
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2361
|
+
f = abs(diff.y);
|
2362
|
+
} else if (clampedMin.z) {
|
2363
|
+
otherGlobalVoxelPos = c_voxel_current[c] - vec3(0.0, 0.0, 1.0);
|
2364
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2365
|
+
f = abs(diff.z);
|
2366
|
+
} else if (clampedMax.z) {
|
2367
|
+
otherGlobalVoxelPos = c_voxel_current[c] + vec3(0.0, 0.0, 1.0);
|
2368
|
+
otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2369
|
+
f = abs(diff.z);
|
2370
|
+
}
|
2371
|
+
|
2372
|
+
// Sample the neighboring voxel
|
2373
|
+
vec3 otherPTcoord = getBrickFromNormalized(otherP, c_res_current[c]);
|
2374
|
+
otherPTcoord = getBrickFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2375
|
+
vec3 otherVoxelInBrick = mod(otherGlobalVoxelPos, 32.0);
|
2376
|
+
otherVoxelInBrick -= diff;
|
2377
|
+
|
2378
|
+
// Check if neighbor is outside volume bounds
|
2379
|
+
if (otherP.x < 0.0 || otherP.x >= 1.0 || otherP.y < 0.0 || otherP.y >= 1.0 || otherP.z < 0.0 || otherP.z >= 1.0) {
|
2380
|
+
otherVoxelVal = val;
|
2381
|
+
} else if (otherPTcoord == c_PT_XYZ_adjacent[c].xyz && c_brick_XYZ_adjacent[c].w >= 0.0) {
|
2382
|
+
// Use cached adjacent brick
|
2383
|
+
otherVoxelVal = sampleBrick(c_brick_XYZ_adjacent[c].xyz, otherVoxelInBrick);
|
2384
|
+
} else {
|
2385
|
+
// Load new adjacent brick
|
2386
|
+
ivec4 otherBrickCacheInfo = ivec4(-1);
|
2387
|
+
if (otherPTcoord == c_PT_XYZ_adjacent[c].xyz) {
|
2388
|
+
otherBrickCacheInfo = ivec4(c_brick_XYZ_adjacent[c]);
|
2389
|
+
} else {
|
2390
|
+
otherBrickCacheInfo = getBrickLocation(otherP, c_res_current[c], slot, rnd, false, c);
|
2391
|
+
}
|
2392
|
+
if (otherBrickCacheInfo.w == -1 || otherBrickCacheInfo.w == -2) {
|
2393
|
+
otherVoxelVal = val;
|
2394
|
+
} else if (otherBrickCacheInfo.w == -3) {
|
2395
|
+
otherVoxelVal = 1.0;
|
2396
|
+
} else if (otherBrickCacheInfo.w == -4) {
|
2397
|
+
otherVoxelVal = float(otherBrickCacheInfo.x);
|
2398
|
+
} else {
|
2399
|
+
// TODO: we do not recalculate the voxelInBrick based on the resolution
|
2400
|
+
otherVoxelVal = sampleBrick(vec3(otherBrickCacheInfo.xyz), otherVoxelInBrick);
|
2401
|
+
}
|
2402
|
+
c_PT_XYZ_adjacent[c] = getBrickFromVoxel(otherGlobalVoxelPos, c_res_current[c]);
|
2403
|
+
c_brick_XYZ_adjacent[c] = vec4(otherBrickCacheInfo);
|
2404
|
+
}
|
2405
|
+
|
2406
|
+
// Perform linear interpolation
|
2407
|
+
float originalVal = val;
|
2408
|
+
val = lerp(originalVal, otherVoxelVal, f);
|
2409
|
+
} else if (boundaryAxes == 2) {
|
2410
|
+
// Bilinear interpolation across two boundaries
|
2411
|
+
vec3 offA = vec3(0.0);
|
2412
|
+
vec3 offB = vec3(0.0);
|
2413
|
+
vec2 f = vec2(0.0);
|
2414
|
+
|
2415
|
+
// Determine which two axes we're interpolating across
|
2416
|
+
if (clamped.x && clamped.y) {
|
2417
|
+
offA.x = clampedMin.x ? -1.0 : 1.0;
|
2418
|
+
offB.y = clampedMin.y ? -1.0 : 1.0;
|
2419
|
+
f = vec2(abs(diff.x), abs(diff.y));
|
2420
|
+
} else if (clamped.x && clamped.z) {
|
2421
|
+
offA.x = clampedMin.x ? -1.0 : 1.0;
|
2422
|
+
offB.z = clampedMin.z ? -1.0 : 1.0;
|
2423
|
+
f = vec2(abs(diff.x), abs(diff.z));
|
2424
|
+
} else if (clamped.y && clamped.z) {
|
2425
|
+
offA.y = clampedMin.y ? -1.0 : 1.0;
|
2426
|
+
offB.z = clampedMin.z ? -1.0 : 1.0;
|
2427
|
+
f = vec2(abs(diff.y), abs(diff.z));
|
2428
|
+
}
|
2429
|
+
|
2430
|
+
// Macro for sampling at offset positions
|
2431
|
+
#define SAMPLE_AT_OFFSET(OFF, DEST) { vec3 otherGlobalVoxelPos = c_voxel_current[c] + (OFF); vec3 otherP = getNormalizedFromVoxel( otherGlobalVoxelPos, c_res_current[c]); if ( any(lessThan(otherP, vec3(0.0))) || any(greaterThanEqual(otherP, vec3(1.0))) ) { DEST = val; } else { vec3 otherPTcoord = getBrickFromNormalized( otherP, c_res_current[c]); vec3 otherVoxelInBrick = mod(otherGlobalVoxelPos, 32.0) - diff; bool matched = false; if (otherPTcoord == c_PT_X_adjacent[c].xyz && c_brick_X_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_X_adjacent[c].xyz, otherVoxelInBrick); matched = true; } else if (otherPTcoord == c_PT_Y_adjacent[c].xyz && c_brick_Y_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_Y_adjacent[c].xyz, otherVoxelInBrick); matched = true; } else if (otherPTcoord == c_PT_Z_adjacent[c].xyz && c_brick_Z_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_Z_adjacent[c].xyz, otherVoxelInBrick); matched = true; } else if (otherPTcoord == c_PT_XYZ_adjacent[c].xyz && c_brick_XYZ_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_XYZ_adjacent[c].xyz, otherVoxelInBrick); matched = true; } ivec4 info = ivec4(-1); if (otherPTcoord == c_PT_X_adjacent[c].xyz) { info = ivec4(c_brick_X_adjacent[c]); } else if (otherPTcoord == c_PT_Y_adjacent[c].xyz) { info = ivec4(c_brick_Y_adjacent[c]); } else if (otherPTcoord == c_PT_Z_adjacent[c].xyz) { info = ivec4(c_brick_Z_adjacent[c]); } else if (otherPTcoord == c_PT_XYZ_adjacent[c].xyz) { info = ivec4(c_brick_XYZ_adjacent[c]); } else { info = getBrickLocation(otherP, c_res_current[c], slot, rnd, false, c); } if (!matched) { if (info.w == -1 || info.w == -2) { DEST = val; } else if (info.w == -3) { DEST = 1.0; } else if (info.w == -4) { DEST = float(info.x); } else { DEST = sampleBrick(vec3(info.xyz), otherVoxelInBrick); } if (abs((OFF).x) > 0.5 && abs((OFF).y) < 0.5 && abs((OFF).z) < 0.5) { c_PT_X_adjacent[c] = otherPTcoord; c_brick_X_adjacent[c] = vec4(info); } else if (abs((OFF).y) > 0.5 && abs((OFF).x) < 0.5 && abs((OFF).z) < 0.5) { c_PT_Y_adjacent[c] = otherPTcoord; c_brick_Y_adjacent[c] = vec4(info); } else if (abs((OFF).z) > 0.5 && abs((OFF).x) < 0.5 && abs((OFF).y) < 0.5) { c_PT_Z_adjacent[c] = otherPTcoord; c_brick_Z_adjacent[c] = vec4(info); } else { c_PT_XYZ_adjacent[c] = otherPTcoord; c_brick_XYZ_adjacent[c] = vec4(info); } } } }
|
2432
|
+
|
2433
|
+
// Sample the four corners for bilinear interpolation
|
2434
|
+
float v00 = val;
|
2435
|
+
float v10; float v01; float v11;
|
2436
|
+
SAMPLE_AT_OFFSET(offA, v10);
|
2437
|
+
SAMPLE_AT_OFFSET(offB, v01);
|
2438
|
+
SAMPLE_AT_OFFSET(offA + offB, v11);
|
2439
|
+
|
2440
|
+
val = bilerp(v00, v10, v01, v11, f);
|
2441
|
+
|
2442
|
+
#undef SAMPLE_AT_OFFSET
|
2443
|
+
|
2444
|
+
} else if (boundaryAxes == 3) {
|
2445
|
+
// Trilinear interpolation across all three boundaries
|
2446
|
+
|
2447
|
+
vec3 offA = vec3(0.0);
|
2448
|
+
vec3 offB = vec3(0.0);
|
2449
|
+
vec3 offC = vec3(0.0);
|
2450
|
+
vec3 f = vec3(0.0);
|
2451
|
+
|
2452
|
+
offA.x = clampedMin.x ? -1.0 : 1.0;
|
2453
|
+
offB.y = clampedMin.y ? -1.0 : 1.0;
|
2454
|
+
offC.z = clampedMin.z ? -1.0 : 1.0;
|
2455
|
+
|
2456
|
+
f = vec3(abs(diff.x), abs(diff.y), abs(diff.z));
|
2457
|
+
|
2458
|
+
// Macro for sampling at offset positions
|
2459
|
+
#define SAMPLE_AT_OFFSET(OFF, DEST) { vec3 otherGlobalVoxelPos = c_voxel_current[c] + (OFF); vec3 otherP = getNormalizedFromVoxel(otherGlobalVoxelPos, c_res_current[c]); if (any(lessThan(otherP, vec3(0.0))) || any(greaterThanEqual(otherP, vec3(1.0)))) { DEST = val; } else { vec3 otherPTcoord = getBrickFromNormalized(otherP, c_res_current[c]); vec3 otherVoxelInBrick = mod(otherGlobalVoxelPos, 32.0) - diff; if (otherPTcoord == c_PT_X_adjacent[c] && c_brick_X_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_X_adjacent[c].xyz, otherVoxelInBrick); } else if (otherPTcoord == c_PT_Y_adjacent[c].xyz && c_brick_Y_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_Y_adjacent[c].xyz, otherVoxelInBrick); } else if (otherPTcoord == c_PT_Z_adjacent[c].xyz && c_brick_Z_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_Z_adjacent[c].xyz, otherVoxelInBrick); } else if (otherPTcoord == c_PT_XYZ_adjacent[c].xyz && c_brick_XYZ_adjacent[c].w >= 0.0) { DEST = sampleBrick(c_brick_XYZ_adjacent[c].xyz, otherVoxelInBrick); } else { ivec4 otherBrickCacheInfo = getBrickLocation(otherP, c_res_current[c], slot, rnd, false, c); vec3 otherVoxelInBrick = mod(otherGlobalVoxelPos, 32.0) - diff; if (otherBrickCacheInfo.w == -1 || otherBrickCacheInfo.w == -2) { DEST = val; } else if (otherBrickCacheInfo.w == -3) { DEST = 1.0; } else if (otherBrickCacheInfo.w == -4) { DEST = float(otherBrickCacheInfo.x); } else { DEST = sampleBrick(vec3(otherBrickCacheInfo.xyz), otherVoxelInBrick); } } } }
|
2460
|
+
|
2461
|
+
// Sample all eight corners for trilinear interpolation
|
2462
|
+
float v000 = val;
|
2463
|
+
float v100; float v010; float v001; float v110; float v101; float v011; float v111;
|
2464
|
+
SAMPLE_AT_OFFSET(offA, v100);
|
2465
|
+
SAMPLE_AT_OFFSET(offB, v010);
|
2466
|
+
SAMPLE_AT_OFFSET(offC, v001);
|
2467
|
+
SAMPLE_AT_OFFSET(offA + offB, v110);
|
2468
|
+
SAMPLE_AT_OFFSET(offA + offC, v101);
|
2469
|
+
SAMPLE_AT_OFFSET(offB + offC, v011);
|
2470
|
+
SAMPLE_AT_OFFSET(offA + offB + offC, v111);
|
2471
|
+
|
2472
|
+
val = trilerp(v000, v100, v010, v001, v110, v101, v011, v111, f);
|
2473
|
+
|
2474
|
+
#undef SAMPLE_AT_OFFSET
|
2475
|
+
|
2476
|
+
}
|
2477
|
+
|
2478
|
+
} else {
|
2479
|
+
// No boundary interpolation needed - clear adjacent brick cache.
|
2480
|
+
// no adjacent bricks -> reset the adjacent trackers
|
2481
|
+
c_PT_X_adjacent[c] = c_PT_Y_adjacent[c] = c_PT_Z_adjacent[c] = c_PT_XYZ_adjacent[c] = vec3(-1.0);
|
2482
|
+
c_brick_X_adjacent[c] = c_brick_Y_adjacent[c] = c_brick_Z_adjacent[c] = c_brick_XYZ_adjacent[c] = vec4(-1.0);
|
2483
|
+
}
|
2484
|
+
}
|
2485
|
+
|
2486
|
+
// ========================================
|
2487
|
+
// VALUE NORMALIZATION AND TRACKING
|
2488
|
+
// ========================================
|
2489
|
+
|
2490
|
+
// Normalize value to 0-1 range using channel-specific contrast limits.
|
2491
|
+
// we normalize the (accumulated) value to the range of the color channel
|
2492
|
+
c_val_current[c] = max(0.0, (val - getClim(c).x) / (getClim(c).y - getClim(c).x));
|
2493
|
+
|
2494
|
+
// Track min/max values for MIP/MinIP rendering.
|
2495
|
+
// update the min and max values for the min/max projection
|
2496
|
+
if (c_minVal[c] == -1.0) {
|
2497
|
+
c_minVal[c] = c_val_current[c];
|
2498
|
+
} else {
|
2499
|
+
c_minVal[c] = min(c_minVal[c], c_val_current[c]);
|
2500
|
+
}
|
2501
|
+
c_maxVal[c] = max(c_maxVal[c], c_val_current[c]);
|
2502
|
+
|
2503
|
+
}
|
2504
|
+
|
2505
|
+
// ========================================
|
2506
|
+
// BRICK REQUEST GENERATION
|
2507
|
+
// ========================================
|
2508
|
+
|
2509
|
+
// Request higher resolution bricks if we're using lower resolution than optimal.
|
2510
|
+
// potentially overwrite brick request
|
2511
|
+
if (!overWrittenRequest
|
2512
|
+
&& c_res_current[c] != bestRes
|
2513
|
+
&& c_val_current[c] > 0.0
|
2514
|
+
&& c_renderMode_current[c] == 2
|
2515
|
+
&& int(floor(rnd * float(maxChannels))) == c) {
|
2516
|
+
setBrickRequest(p, bestRes, slot, rnd);
|
2517
|
+
overWrittenRequest = true;
|
2518
|
+
}
|
2519
|
+
|
2520
|
+
// ========================================
|
2521
|
+
// CHANNEL COMPOSITING
|
2522
|
+
// ========================================
|
2523
|
+
|
2524
|
+
// Accumulate this channel's contribution.
|
2525
|
+
// sum up the values onto the slice values
|
2526
|
+
total += c_val_current[c];
|
2527
|
+
rgbCombo += c_val_current[c] * c_color[c];
|
2528
|
+
|
2529
|
+
}
|
2530
|
+
|
2531
|
+
// ========================================
|
2532
|
+
// FRONT-TO-BACK COMPOSITING
|
2533
|
+
// ========================================
|
2534
|
+
|
2535
|
+
// Clamp total intensity and calculate alpha.
|
2536
|
+
// add the calculated slice to the total color
|
2537
|
+
total = clamp(total, 0.0, 1.0);
|
2538
|
+
sliceAlpha = total * opacity * dt * 32.0; // Scale by step size and brick size
|
2539
|
+
sliceColor = rgbCombo;
|
2540
|
+
|
2541
|
+
// Front-to-back alpha blending
|
2542
|
+
outColor.rgb += sliceAlpha * alphaMultiplicator * sliceColor;
|
2543
|
+
outColor.a += sliceAlpha * alphaMultiplicator;
|
2544
|
+
alphaMultiplicator *= (1.0 - sliceAlpha);
|
2545
|
+
|
2546
|
+
// Early termination for opaque regions (standard rendering only).
|
2547
|
+
// check if we can exit early
|
2548
|
+
if (outColor.a > 0.99 && u_renderstyle == 0) { break; }
|
2549
|
+
|
2550
|
+
// ========================================
|
2551
|
+
// ADVANCE RAY POSITION
|
2552
|
+
// ========================================
|
2553
|
+
|
2554
|
+
// Move to next sample position
|
2555
|
+
t += dt;
|
2556
|
+
p += dp;
|
2557
|
+
t_os += dt;
|
2558
|
+
}
|
2559
|
+
|
2560
|
+
// ========================================
|
2561
|
+
// RENDERING STYLE POST-PROCESSING
|
2562
|
+
// ========================================
|
2563
|
+
|
2564
|
+
if (u_renderstyle == 1) {
|
2565
|
+
// Minimum Intensity Projection (MinIP)
|
2566
|
+
// Shows the minimum value encountered along each ray
|
2567
|
+
outColor = vec4(0.0);
|
2568
|
+
for (int c = 0; c < 7; c++) {
|
2569
|
+
if (c_color[c] != vec3(0.0, 0.0, 0.0)) {
|
2570
|
+
outColor.rgb += c_minVal[c] * c_color[c];
|
2571
|
+
outColor.a += c_minVal[c];
|
2572
|
+
}
|
2573
|
+
}
|
2574
|
+
} else if (u_renderstyle == 0) {
|
2575
|
+
// Maximum Intensity Projection (MIP)
|
2576
|
+
// Shows the maximum value encountered along each ray
|
2577
|
+
outColor = vec4(0.0);
|
2578
|
+
for (int c = 0; c < 7; c++) {
|
2579
|
+
if (c_color[c] != vec3(0.0, 0.0, 0.0)) {
|
2580
|
+
outColor.rgb += c_maxVal[c] * c_color[c];
|
2581
|
+
}
|
2582
|
+
}
|
2583
|
+
outColor.a = 1.0;
|
2584
|
+
}
|
2585
|
+
|
2586
|
+
// ========================================
|
2587
|
+
// FINAL OUTPUT
|
2588
|
+
// ========================================
|
2589
|
+
|
2590
|
+
// Convert from linear to sRGB color space and set all render targets
|
2591
|
+
gColor = vec4(linear_to_srgb(outColor.r),
|
2592
|
+
linear_to_srgb(outColor.g),
|
2593
|
+
linear_to_srgb(outColor.b),
|
2594
|
+
outColor.a);
|
2595
|
+
|
2596
|
+
}
|
2597
|
+
`;
|
2598
|
+
const VolumeShader = {
|
2599
|
+
uniforms: {
|
2600
|
+
u_size: { value: new Vector3(1, 1, 1) },
|
2601
|
+
clim0: { value: new Vector2(0.2, 0.8) },
|
2602
|
+
clim1: { value: new Vector2(0.2, 0.8) },
|
2603
|
+
clim2: { value: new Vector2(0.2, 0.8) },
|
2604
|
+
clim3: { value: new Vector2(0.2, 0.8) },
|
2605
|
+
clim4: { value: new Vector2(0.2, 0.8) },
|
2606
|
+
clim5: { value: new Vector2(0.2, 0.8) },
|
2607
|
+
clim6: { value: new Vector2(0.2, 0.8) },
|
2608
|
+
clim7: { value: new Vector2(0.2, 0.8) },
|
2609
|
+
xClip: { value: new Vector2(0, 1e6) },
|
2610
|
+
yClip: { value: new Vector2(0, 1e6) },
|
2611
|
+
zClip: { value: new Vector2(0, 1e6) },
|
2612
|
+
u_window_size: { value: new Vector2(1, 1) },
|
2613
|
+
u_vol_scale: { value: new Vector3(1, 1, 1) },
|
2614
|
+
u_renderstyle: { value: 2 },
|
2615
|
+
brickCacheTex: { type: "sampler3D", value: null },
|
2616
|
+
pageTableTex: { type: "usampler3D", value: null },
|
2617
|
+
color0: { value: new Vector4(0, 0, 0) },
|
2618
|
+
color1: { value: new Vector4(0, 0, 0) },
|
2619
|
+
color2: { value: new Vector4(0, 0, 0) },
|
2620
|
+
color3: { value: new Vector4(0, 0, 0) },
|
2621
|
+
color4: { value: new Vector4(0, 0, 0) },
|
2622
|
+
color5: { value: new Vector4(0, 0, 0) },
|
2623
|
+
color6: { value: new Vector4(0, 0, 0) },
|
2624
|
+
channelMapping: {
|
2625
|
+
value: [-1, -1, -1, -1, -1, -1, -1]
|
2626
|
+
},
|
2627
|
+
resGlobal: { value: new Vector2(0, 9) },
|
2628
|
+
res0: { value: new Vector2(0, 9) },
|
2629
|
+
res1: { value: new Vector2(0, 9) },
|
2630
|
+
res2: { value: new Vector2(0, 9) },
|
2631
|
+
res3: { value: new Vector2(0, 9) },
|
2632
|
+
res4: { value: new Vector2(0, 9) },
|
2633
|
+
res5: { value: new Vector2(0, 9) },
|
2634
|
+
res6: { value: new Vector2(0, 9) },
|
2635
|
+
maxChannels: { value: 0 },
|
2636
|
+
lodFactor: { value: 1 },
|
2637
|
+
near: { value: 0.1 },
|
2638
|
+
far: { value: 1e4 },
|
2639
|
+
opacity: { value: 1 },
|
2640
|
+
volumeCount: { value: 0 },
|
2641
|
+
boxSize: { value: new Vector3(1, 1, 1) },
|
2642
|
+
renderRes: { value: 1e3 },
|
2643
|
+
voxelExtents: { value: new Vector3(1, 1, 1) },
|
2644
|
+
anchor0: { value: new Vector3(0, 0, 0) },
|
2645
|
+
anchor1: { value: new Vector3(0, 0, 0) },
|
2646
|
+
anchor2: { value: new Vector3(0, 0, 0) },
|
2647
|
+
anchor3: { value: new Vector3(0, 0, 0) },
|
2648
|
+
anchor4: { value: new Vector3(0, 0, 0) },
|
2649
|
+
anchor5: { value: new Vector3(0, 0, 0) },
|
2650
|
+
anchor6: { value: new Vector3(0, 0, 0) },
|
2651
|
+
anchor7: { value: new Vector3(0, 0, 0) },
|
2652
|
+
anchor8: { value: new Vector3(0, 0, 0) },
|
2653
|
+
anchor9: { value: new Vector3(0, 0, 0) },
|
2654
|
+
scale0: { value: new Vector3(1, 1, 1) },
|
2655
|
+
scale1: { value: new Vector3(2, 2, 2) },
|
2656
|
+
scale2: { value: new Vector3(4, 4, 4) },
|
2657
|
+
scale3: { value: new Vector3(8, 8, 8) },
|
2658
|
+
scale4: { value: new Vector3(16, 16, 16) },
|
2659
|
+
scale5: { value: new Vector3(32, 32, 32) },
|
2660
|
+
scale6: { value: new Vector3(64, 64, 64) },
|
2661
|
+
scale7: { value: new Vector3(128, 128, 128) },
|
2662
|
+
scale8: { value: new Vector3(256, 256, 256) },
|
2663
|
+
scale9: { value: new Vector3(512, 512, 512) }
|
2664
|
+
},
|
2665
|
+
vertexShader: volumeVertexShader,
|
2666
|
+
fragmentShader: volumeFragmentShader
|
2667
|
+
};
|
2668
|
+
function logWithColor$1(message) {
|
2669
|
+
if (atLeastLogLevel(LogLevel.DEBUG)) {
|
2670
|
+
console.warn(`%cRM: ${message}`, "background: orange; color: white; padding: 2px; border-radius: 3px;");
|
2671
|
+
}
|
2672
|
+
}
|
2673
|
+
function normalizeValue(value, minMax) {
|
2674
|
+
return value / minMax[1];
|
2675
|
+
}
|
2676
|
+
class VolumeRenderManager {
|
2677
|
+
constructor() {
|
2678
|
+
logWithColor$1("Initializing VolumeRenderManager");
|
2679
|
+
this.uniforms = null;
|
2680
|
+
this.shader = null;
|
2681
|
+
this.meshScale = [1, 1, 1];
|
2682
|
+
this.geometrySize = [1, 1, 1];
|
2683
|
+
this.boxSize = [1, 1, 1];
|
2684
|
+
this.zarrInit = false;
|
2685
|
+
this.channelsVisible = [];
|
2686
|
+
this.channelTargetC = [];
|
2687
|
+
this.zarrStoreNumResolutions = null;
|
2688
|
+
this.channelMaxResolutionIndex = [];
|
2689
|
+
this.colors = [];
|
2690
|
+
this.contrastLimits = [];
|
2691
|
+
this.layerTransparency = 1;
|
2692
|
+
this.xSlice = new Vector2(-1, 1e5);
|
2693
|
+
this.ySlice = new Vector2(-1, 1e5);
|
2694
|
+
this.zSlice = new Vector2(-1, 1e5);
|
2695
|
+
this.originalScale = [1, 1, 1];
|
2696
|
+
this.physicalDimensions = [1, 1, 1];
|
2697
|
+
this.maxResolution = [1, 1, 1];
|
2698
|
+
this.maxRange = 255;
|
2699
|
+
this.maxRangeSet = false;
|
2700
|
+
this.initializeShader();
|
2701
|
+
}
|
2702
|
+
/**
|
2703
|
+
* Initialize shader and uniform objects
|
2704
|
+
*/
|
2705
|
+
initializeShader() {
|
2706
|
+
logWithColor$1("Initializing shader");
|
2707
|
+
this.shader = VolumeShader;
|
2708
|
+
this.uniforms = UniformsUtils.clone(this.shader.uniforms);
|
2709
|
+
}
|
2710
|
+
/**
|
2711
|
+
* Extract rendering settings from coordination props
|
2712
|
+
* @param {Object} props - Component props
|
2713
|
+
* @returns {Object} Extracted rendering settings
|
2714
|
+
*/
|
2715
|
+
extractRenderingSettingsFromProps(props) {
|
2716
|
+
logWithColor$1("Extracting rendering settings from props");
|
2717
|
+
const { images = {}, imageLayerScopes = [], imageLayerCoordination = [{}], imageChannelScopesByLayer = {}, imageChannelCoordination = [{}], spatialRenderingMode } = props;
|
2718
|
+
const layerScope = imageLayerScopes[0];
|
2719
|
+
if (!layerScope) {
|
2720
|
+
return {
|
2721
|
+
valid: false
|
2722
|
+
};
|
2723
|
+
}
|
2724
|
+
const channelScopes = imageChannelScopesByLayer[layerScope];
|
2725
|
+
const layerCoordination = imageLayerCoordination[0][layerScope];
|
2726
|
+
const channelCoordination = imageChannelCoordination[0][layerScope];
|
2727
|
+
const data = images[layerScope]?.image?.instance?.getData();
|
2728
|
+
if (!data) {
|
2729
|
+
return {
|
2730
|
+
valid: false
|
2731
|
+
};
|
2732
|
+
}
|
2733
|
+
if (!channelCoordination[channelScopes?.[0]][CoordinationType.SPATIAL_CHANNEL_WINDOW]) {
|
2734
|
+
return {
|
2735
|
+
valid: false
|
2736
|
+
};
|
2737
|
+
}
|
2738
|
+
const imageWrapperInstance = images[layerScope].image.instance;
|
2739
|
+
const is3dMode = spatialRenderingMode === "3D";
|
2740
|
+
const isRgb = layerCoordination[CoordinationType.PHOTOMETRIC_INTERPRETATION] === "RGB";
|
2741
|
+
const visible = layerCoordination[CoordinationType.SPATIAL_LAYER_VISIBLE];
|
2742
|
+
const layerTransparency = layerCoordination[CoordinationType.SPATIAL_LAYER_OPACITY];
|
2743
|
+
const colors = isRgb ? [
|
2744
|
+
[255, 0, 0],
|
2745
|
+
[0, 255, 0],
|
2746
|
+
[0, 0, 255]
|
2747
|
+
] : channelScopes.map((cScope) => channelCoordination[cScope][CoordinationType.SPATIAL_CHANNEL_COLOR]);
|
2748
|
+
const contrastLimits = isRgb ? [
|
2749
|
+
[0, 255],
|
2750
|
+
[0, 255],
|
2751
|
+
[0, 255]
|
2752
|
+
] : channelScopes.map((cScope) => channelCoordination[cScope][CoordinationType.SPATIAL_CHANNEL_WINDOW] || [0, 255]);
|
2753
|
+
if (!this.maxRangeSet) {
|
2754
|
+
this.maxRange = Math.max(...contrastLimits.map((limit) => limit[1]));
|
2755
|
+
this.maxRangeSet = true;
|
2756
|
+
}
|
2757
|
+
const channelsVisible = isRgb ? [
|
2758
|
+
visible && true,
|
2759
|
+
visible && true,
|
2760
|
+
visible && true
|
2761
|
+
] : channelScopes.map((cScope) => visible && channelCoordination[cScope][CoordinationType.SPATIAL_CHANNEL_VISIBLE]);
|
2762
|
+
const channelTargetC = isRgb ? [
|
2763
|
+
visible && true,
|
2764
|
+
visible && true,
|
2765
|
+
visible && true
|
2766
|
+
] : channelScopes.map((cScope) => visible && imageWrapperInstance.getChannelIndex(channelCoordination[cScope][CoordinationType.SPATIAL_TARGET_C]));
|
2767
|
+
const channelMaxResolutionIndex = isRgb ? [
|
2768
|
+
visible && null,
|
2769
|
+
visible && null,
|
2770
|
+
visible && null
|
2771
|
+
] : channelScopes.map((cScope) => channelCoordination[cScope][CoordinationType.SPATIAL_MAX_RESOLUTION]);
|
2772
|
+
let xSlice = layerCoordination[CoordinationType.SPATIAL_SLICE_X];
|
2773
|
+
let ySlice = layerCoordination[CoordinationType.SPATIAL_SLICE_Y];
|
2774
|
+
let zSlice = layerCoordination[CoordinationType.SPATIAL_SLICE_Z];
|
2775
|
+
xSlice = xSlice !== null ? xSlice : new Vector2(-1, 1e5);
|
2776
|
+
ySlice = ySlice !== null ? ySlice : new Vector2(-1, 1e5);
|
2777
|
+
zSlice = zSlice !== null ? zSlice : new Vector2(-1, 1e5);
|
2778
|
+
const allChannels = images[layerScope].image.loaders[0].channels;
|
2779
|
+
return {
|
2780
|
+
valid: true,
|
2781
|
+
channelsVisible,
|
2782
|
+
allChannels,
|
2783
|
+
channelTargetC,
|
2784
|
+
channelMaxResolutionIndex,
|
2785
|
+
data,
|
2786
|
+
colors,
|
2787
|
+
contrastLimits,
|
2788
|
+
is3dMode,
|
2789
|
+
layerTransparency,
|
2790
|
+
xSlice,
|
2791
|
+
ySlice,
|
2792
|
+
zSlice
|
2793
|
+
};
|
2794
|
+
}
|
2795
|
+
/**
|
2796
|
+
* Update the render settings from the given props
|
2797
|
+
* @param {Object} props - Component props
|
2798
|
+
* @returns {boolean} True if settings were successfully updated
|
2799
|
+
*/
|
2800
|
+
updateFromProps(props) {
|
2801
|
+
logWithColor$1("Updating from props");
|
2802
|
+
const settings = this.extractRenderingSettingsFromProps(props);
|
2803
|
+
if (!settings.valid) {
|
2804
|
+
return false;
|
2805
|
+
}
|
2806
|
+
this.channelsVisible = settings.channelsVisible;
|
2807
|
+
this.channelTargetC = settings.channelTargetC;
|
2808
|
+
this.channelMaxResolutionIndex = settings.channelMaxResolutionIndex;
|
2809
|
+
this.colors = settings.colors;
|
2810
|
+
this.contrastLimits = settings.contrastLimits;
|
2811
|
+
this.renderingMode = settings.renderingMode;
|
2812
|
+
this.layerTransparency = settings.layerTransparency;
|
2813
|
+
this.xSlice = settings.xSlice;
|
2814
|
+
this.ySlice = settings.ySlice;
|
2815
|
+
this.zSlice = settings.zSlice;
|
2816
|
+
return true;
|
2817
|
+
}
|
2818
|
+
/**
|
2819
|
+
* Update the rendering based on the current volume data from the data manager
|
2820
|
+
* @param {object} volumeDataManagerProps - Params derived from the volume data manager
|
2821
|
+
* @returns {Object|null} Updated rendering settings or null if rendering is not possible
|
2822
|
+
*/
|
2823
|
+
updateRendering({ zarrStoreShapes, originalScaleXYZ, physicalDimensionsXYZ, maxResolutionXYZ, boxDimensionsXYZ, normalizedScaleXYZ, bcTHREE, ptTHREE }) {
|
2824
|
+
logWithColor$1("Updating rendering");
|
2825
|
+
this.channelTargetC.findIndex((channel, idx) => this.channelsVisible[idx]);
|
2826
|
+
if (!Array.isArray(zarrStoreShapes) || zarrStoreShapes.length === 0) {
|
2827
|
+
return null;
|
2828
|
+
}
|
2829
|
+
const shape = zarrStoreShapes[0];
|
2830
|
+
const dimensions = {
|
2831
|
+
xLength: shape[4] || 1,
|
2832
|
+
yLength: shape[3] || 1,
|
2833
|
+
zLength: shape[2] || 1
|
2834
|
+
};
|
2835
|
+
const texturesList = [];
|
2836
|
+
const colorsSave = [];
|
2837
|
+
const contrastLimitsList = [];
|
2838
|
+
this.channelTargetC.forEach((channel, id) => {
|
2839
|
+
if (this.channelsVisible[id] || true) {
|
2840
|
+
const max = this.maxRange ? this.maxRange : 255;
|
2841
|
+
const minMax = [0, max];
|
2842
|
+
colorsSave.push([
|
2843
|
+
this.colors[id][0] / 255,
|
2844
|
+
this.colors[id][1] / 255,
|
2845
|
+
this.colors[id][2] / 255,
|
2846
|
+
this.channelsVisible[id] ? 1 : 0
|
2847
|
+
]);
|
2848
|
+
log.debug("colorsSave", colorsSave);
|
2849
|
+
if (this.contrastLimits[id][0] === 0 && this.contrastLimits[id][1] === 255) {
|
2850
|
+
contrastLimitsList.push([
|
2851
|
+
normalizeValue(minMax[0], minMax),
|
2852
|
+
normalizeValue(minMax[1], minMax)
|
2853
|
+
]);
|
2854
|
+
} else {
|
2855
|
+
contrastLimitsList.push([
|
2856
|
+
normalizeValue(this.contrastLimits[id][0], minMax),
|
2857
|
+
normalizeValue(this.contrastLimits[id][1], minMax)
|
2858
|
+
]);
|
2859
|
+
}
|
2860
|
+
}
|
2861
|
+
});
|
2862
|
+
if (!this.zarrInit) {
|
2863
|
+
this.originalScale = originalScaleXYZ;
|
2864
|
+
this.physicalDimensions = physicalDimensionsXYZ;
|
2865
|
+
this.maxResolution = maxResolutionXYZ;
|
2866
|
+
const scaledResolution = boxDimensionsXYZ;
|
2867
|
+
this.normalizedScale = normalizedScaleXYZ;
|
2868
|
+
this.meshScale = [
|
2869
|
+
this.originalScale[0] / this.originalScale[0],
|
2870
|
+
this.originalScale[1] / this.originalScale[0],
|
2871
|
+
this.originalScale[2] / this.originalScale[0]
|
2872
|
+
];
|
2873
|
+
this.geometrySize = scaledResolution;
|
2874
|
+
this.boxSize = scaledResolution;
|
2875
|
+
log.debug("this.boxSize", this.boxSize);
|
2876
|
+
log.debug("this.geometrySize", this.geometrySize);
|
2877
|
+
log.debug("this.meshScale", this.meshScale);
|
2878
|
+
log.debug("this.originalScale", this.originalScale);
|
2879
|
+
log.debug("this.physicalDimensions", this.physicalDimensions);
|
2880
|
+
log.debug("this.maxResolution", this.maxResolution);
|
2881
|
+
log.debug("scaledResolution", scaledResolution);
|
2882
|
+
this.zarrInit = true;
|
2883
|
+
}
|
2884
|
+
this.updateUniforms(
|
2885
|
+
texturesList,
|
2886
|
+
dimensions,
|
2887
|
+
// Pass dimensions object instead of volume
|
2888
|
+
this.renderingMode,
|
2889
|
+
contrastLimitsList,
|
2890
|
+
colorsSave,
|
2891
|
+
this.layerTransparency,
|
2892
|
+
this.xSlice,
|
2893
|
+
this.ySlice,
|
2894
|
+
this.zSlice,
|
2895
|
+
bcTHREE,
|
2896
|
+
ptTHREE
|
2897
|
+
);
|
2898
|
+
return {
|
2899
|
+
uniforms: this.uniforms,
|
2900
|
+
shader: this.shader,
|
2901
|
+
meshScale: this.meshScale,
|
2902
|
+
geometrySize: this.geometrySize,
|
2903
|
+
boxSize: this.boxSize
|
2904
|
+
};
|
2905
|
+
}
|
2906
|
+
/**
|
2907
|
+
* Update shader uniforms with current rendering values
|
2908
|
+
* @param {Array} textures - List of 3D textures
|
2909
|
+
* @param {Object} dimensions - Dimensions object
|
2910
|
+
* @param {number} renderstyle - Rendering mode value
|
2911
|
+
* @param {Array} contrastLimits - List of contrast limits for each channel
|
2912
|
+
* @param {Array} colors - List of colors for each channel
|
2913
|
+
* @param {number} layerTransparency - Overall transparency value
|
2914
|
+
* @param {Vector2} xSlice - X clipping plane
|
2915
|
+
* @param {Vector2} ySlice - Y clipping plane
|
2916
|
+
* @param {Vector2} zSlice - Z clipping plane
|
2917
|
+
*/
|
2918
|
+
updateUniforms(textures, dimensions, renderstyle, contrastLimits, colors, layerTransparency, xSlice, ySlice, zSlice, brickCacheTexture, pageTableTexture) {
|
2919
|
+
logWithColor$1("Updating uniforms");
|
2920
|
+
this.uniforms.boxSize.value.set(this.boxSize[0], this.boxSize[1], this.boxSize[2]);
|
2921
|
+
this.uniforms.brickCacheTex.value = brickCacheTexture;
|
2922
|
+
this.uniforms.pageTableTex.value = pageTableTexture;
|
2923
|
+
this.uniforms.near.value = 0.1;
|
2924
|
+
this.uniforms.far.value = 3e3;
|
2925
|
+
this.uniforms.opacity.value = layerTransparency;
|
2926
|
+
this.uniforms.volumeCount.value = textures.length;
|
2927
|
+
this.uniforms.u_size.value.set(dimensions.xLength, dimensions.yLength, dimensions.zLength);
|
2928
|
+
this.uniforms.u_window_size.value.set(0, 0);
|
2929
|
+
this.uniforms.u_vol_scale.value.set(1 / dimensions.xLength, 1 / dimensions.yLength, 1 / dimensions.zLength * 2);
|
2930
|
+
this.uniforms.clim0.value.set(contrastLimits.length > 0 ? contrastLimits[0][0] : null, contrastLimits.length > 0 ? contrastLimits[0][1] : null);
|
2931
|
+
this.uniforms.clim1.value.set(contrastLimits.length > 1 ? contrastLimits[1][0] : null, contrastLimits.length > 1 ? contrastLimits[1][1] : null);
|
2932
|
+
this.uniforms.clim2.value.set(contrastLimits.length > 2 ? contrastLimits[2][0] : null, contrastLimits.length > 2 ? contrastLimits[2][1] : null);
|
2933
|
+
this.uniforms.clim3.value.set(contrastLimits.length > 3 ? contrastLimits[3][0] : null, contrastLimits.length > 3 ? contrastLimits[3][1] : null);
|
2934
|
+
this.uniforms.clim4.value.set(contrastLimits.length > 4 ? contrastLimits[4][0] : null, contrastLimits.length > 4 ? contrastLimits[4][1] : null);
|
2935
|
+
this.uniforms.clim5.value.set(contrastLimits.length > 5 ? contrastLimits[5][0] : null, contrastLimits.length > 5 ? contrastLimits[5][1] : null);
|
2936
|
+
this.uniforms.clim6.value.set(contrastLimits.length > 6 ? contrastLimits[6][0] : null, contrastLimits.length > 6 ? contrastLimits[6][1] : null);
|
2937
|
+
this.uniforms.xClip.value.set(xSlice[0] * (1 / this.maxResolution[0]) * this.boxSize[0], xSlice[1] * (1 / this.maxResolution[0]) * this.boxSize[0]);
|
2938
|
+
this.uniforms.yClip.value.set(ySlice[0] * (1 / this.maxResolution[1]) * this.boxSize[1], ySlice[1] * (1 / this.maxResolution[1]) * this.boxSize[1]);
|
2939
|
+
this.uniforms.zClip.value.set(zSlice[0] * (1 / this.maxResolution[2]) * this.boxSize[2], zSlice[1] * (1 / this.maxResolution[2]) * this.boxSize[2]);
|
2940
|
+
this.uniforms.color0.value.set(colors.length > 0 ? colors[0][0] : null, colors.length > 0 ? colors[0][1] : null, colors.length > 0 ? colors[0][2] : null, colors.length > 0 ? colors[0][3] : null);
|
2941
|
+
this.uniforms.color1.value.set(colors.length > 1 ? colors[1][0] : null, colors.length > 1 ? colors[1][1] : null, colors.length > 1 ? colors[1][2] : null, colors.length > 1 ? colors[1][3] : null);
|
2942
|
+
this.uniforms.color2.value.set(colors.length > 2 ? colors[2][0] : null, colors.length > 2 ? colors[2][1] : null, colors.length > 2 ? colors[2][2] : null, colors.length > 2 ? colors[2][3] : null);
|
2943
|
+
this.uniforms.color3.value.set(colors.length > 3 ? colors[3][0] : null, colors.length > 3 ? colors[3][1] : null, colors.length > 3 ? colors[3][2] : null, colors.length > 3 ? colors[3][3] : null);
|
2944
|
+
this.uniforms.color4.value.set(colors.length > 4 ? colors[4][0] : null, colors.length > 4 ? colors[4][1] : null, colors.length > 4 ? colors[4][2] : null, colors.length > 4 ? colors[4][3] : null);
|
2945
|
+
this.uniforms.color5.value.set(colors.length > 5 ? colors[5][0] : null, colors.length > 5 ? colors[5][1] : null, colors.length > 5 ? colors[5][2] : null, colors.length > 5 ? colors[5][3] : null);
|
2946
|
+
this.uniforms.color6.value.set(colors.length > 6 ? colors[6][0] : null, colors.length > 6 ? colors[6][1] : null, colors.length > 6 ? colors[6][2] : null, colors.length > 6 ? colors[6][3] : null);
|
2947
|
+
for (let i = 0; i < 7; i++) {
|
2948
|
+
if (typeof this.channelMaxResolutionIndex[i] === "number") {
|
2949
|
+
this.uniforms[`res${i}`].value.set(Math.max(1, this.channelMaxResolutionIndex[i]), this.zarrStoreNumResolutions - 1);
|
2950
|
+
}
|
2951
|
+
}
|
2952
|
+
}
|
2953
|
+
/**
|
2954
|
+
* Sets the processing render target
|
2955
|
+
* @param {WebGLMultipleRenderTargets} mrt - Multiple render targets object with 3 attachments
|
2956
|
+
*/
|
2957
|
+
/*
|
2958
|
+
setProcessingTargets(mrt) {
|
2959
|
+
logWithColor('setting processing targets');
|
2960
|
+
this.mrt = mrt;
|
2961
|
+
}
|
2962
|
+
*/
|
2963
|
+
setChannelMapping(channelMapping) {
|
2964
|
+
logWithColor$1("setting channel mapping");
|
2965
|
+
log.debug("channelMapping", channelMapping);
|
2966
|
+
this.uniforms.channelMapping.value = channelMapping;
|
2967
|
+
}
|
2968
|
+
// Only called on initialization of the
|
2969
|
+
// VolumeDataManager and VolumeRenderManager for a particular image.
|
2970
|
+
setZarrUniforms(zarrStore, PT) {
|
2971
|
+
logWithColor$1("setting zarr uniforms");
|
2972
|
+
log.debug("zarrStore", zarrStore);
|
2973
|
+
log.debug("PT", PT);
|
2974
|
+
for (let i = 0; i <= 9; i++) {
|
2975
|
+
if (PT.anchors && PT.anchors[i]) {
|
2976
|
+
this.uniforms[`anchor${i}`].value.set(PT.anchors[i][0] || 0, PT.anchors[i][1] || 0, PT.anchors[i][2] || 0);
|
2977
|
+
} else {
|
2978
|
+
log.debug("anchor", i, "does not exist");
|
2979
|
+
this.uniforms[`anchor${i}`].value.set(0, 0, 0);
|
2980
|
+
}
|
2981
|
+
if (zarrStore.scales && zarrStore.scales[i]) {
|
2982
|
+
this.uniforms[`scale${i}`].value.set(zarrStore.scales[i][0] || 1, zarrStore.scales[i][1] || 1, zarrStore.scales[i][2] || 1);
|
2983
|
+
} else {
|
2984
|
+
log.debug("scale", i, "does not exist");
|
2985
|
+
}
|
2986
|
+
}
|
2987
|
+
log.debug("zarrStore.brickLayout", zarrStore.brickLayout);
|
2988
|
+
this.zarrStoreNumResolutions = zarrStore.brickLayout.length;
|
2989
|
+
for (let i = 0; i < 7; i++) {
|
2990
|
+
this.uniforms[`res${i}`].value.set(
|
2991
|
+
// TODO(mark) make this dependent on the per-channel maxResolution slider value.
|
2992
|
+
1,
|
2993
|
+
zarrStore.brickLayout.length - 1
|
2994
|
+
);
|
2995
|
+
}
|
2996
|
+
this.uniforms.resGlobal.value.set(1, zarrStore.brickLayout.length - 1);
|
2997
|
+
this.uniforms.voxelExtents.value.set(zarrStore.shapes[0][4], zarrStore.shapes[0][3], zarrStore.shapes[0][2]);
|
2998
|
+
this.uniforms.maxChannels.value = Math.min(zarrStore.channelCount, 7);
|
2999
|
+
log.debug("this.channelsVisible", this.channelsVisible);
|
3000
|
+
log.debug("zarrStore.shapes[0]", zarrStore.shapes[0]);
|
3001
|
+
log.debug("PT", PT);
|
3002
|
+
log.debug("uniforms", this.uniforms);
|
3003
|
+
}
|
3004
|
+
}
|
3005
|
+
const gaussianVertexShader = `//
|
3006
|
+
varying vec2 vUv;
|
3007
|
+
void main() {
|
3008
|
+
vUv = uv;
|
3009
|
+
gl_Position = vec4(position, 1.0);
|
3010
|
+
}
|
3011
|
+
`;
|
3012
|
+
const gaussianFragmentShader = `//
|
3013
|
+
// Input texture to blur
|
3014
|
+
uniform sampler2D tDiffuse;
|
3015
|
+
// Resolution of the texture (width, height)
|
3016
|
+
uniform vec2 resolution;
|
3017
|
+
// Blur strength: 1=no blur, 2-3=3x3 kernel, 4-5=5x5 kernel, 6+=7x7 kernel
|
3018
|
+
uniform int gaussian;
|
3019
|
+
// Texture coordinates for current pixel
|
3020
|
+
varying vec2 vUv;
|
3021
|
+
|
3022
|
+
/**
|
3023
|
+
* No blur - returns the original pixel color
|
3024
|
+
*/
|
3025
|
+
vec4 noGaussian() {
|
3026
|
+
vec4 color = texture2D(tDiffuse, vUv);
|
3027
|
+
return color;
|
3028
|
+
}
|
3029
|
+
|
3030
|
+
/**
|
3031
|
+
* Applies 3x3 Gaussian blur kernel
|
3032
|
+
* Samples 9 pixels in a 3x3 grid around the current pixel
|
3033
|
+
* Weights are based on 2D Gaussian distribution
|
3034
|
+
*/
|
3035
|
+
vec4 gaussian3(vec2 texel) {
|
3036
|
+
vec4 color = vec4(0.0);
|
3037
|
+
|
3038
|
+
// Top row: weights [0.0625, 0.125, 0.0625]
|
3039
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -1.0)) * 0.0625;
|
3040
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -1.0)) * 0.125;
|
3041
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -1.0)) * 0.0625;
|
3042
|
+
|
3043
|
+
// Middle row: weights [0.125, 0.25, 0.125] (center pixel gets highest weight)
|
3044
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 0.0)) * 0.125;
|
3045
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 0.0)) * 0.25;
|
3046
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 0.0)) * 0.125;
|
3047
|
+
|
3048
|
+
// Bottom row: weights [0.0625, 0.125, 0.0625]
|
3049
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 1.0)) * 0.0625;
|
3050
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 1.0)) * 0.125;
|
3051
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 1.0)) * 0.0625;
|
3052
|
+
|
3053
|
+
return color;
|
3054
|
+
}
|
3055
|
+
|
3056
|
+
/**
|
3057
|
+
* Applies 5x5 Gaussian blur kernel
|
3058
|
+
* Samples 25 pixels in a 5x5 grid around the current pixel
|
3059
|
+
* Weights sum to 1.0 (273/273) for proper normalization
|
3060
|
+
*/
|
3061
|
+
vec4 gaussian5(vec2 texel) {
|
3062
|
+
vec4 color = vec4(0.0);
|
3063
|
+
|
3064
|
+
// Row 1: weights [1/273, 4/273, 7/273, 4/273, 1/273]
|
3065
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, -2.0)) * 1.0/273.0;
|
3066
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -2.0)) * 4.0/273.0;
|
3067
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -2.0)) * 7.0/273.0;
|
3068
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -2.0)) * 4.0/273.0;
|
3069
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, -2.0)) * 1.0/273.0;
|
3070
|
+
|
3071
|
+
// Row 2: weights [4/273, 16/273, 26/273, 16/273, 4/273]
|
3072
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, -1.0)) * 4.0/273.0;
|
3073
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -1.0)) * 16.0/273.0;
|
3074
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -1.0)) * 26.0/273.0;
|
3075
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -1.0)) * 16.0/273.0;
|
3076
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, -1.0)) * 4.0/273.0;
|
3077
|
+
|
3078
|
+
// Row 3 (center): weights [7/273, 26/273, 41/273, 26/273, 7/273]
|
3079
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 0.0)) * 7.0/273.0;
|
3080
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 0.0)) * 26.0/273.0;
|
3081
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 0.0)) * 41.0/273.0; // Center pixel
|
3082
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 0.0)) * 26.0/273.0;
|
3083
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 0.0)) * 7.0/273.0;
|
3084
|
+
|
3085
|
+
// Row 4: weights [4/273, 16/273, 26/273, 16/273, 4/273]
|
3086
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 1.0)) * 4.0/273.0;
|
3087
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 1.0)) * 16.0/273.0;
|
3088
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 1.0)) * 26.0/273.0;
|
3089
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 1.0)) * 16.0/273.0;
|
3090
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 1.0)) * 4.0/273.0;
|
3091
|
+
|
3092
|
+
// Row 5: weights [1/273, 4/273, 7/273, 4/273, 1/273]
|
3093
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 2.0)) * 1.0/273.0;
|
3094
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 2.0)) * 4.0/273.0;
|
3095
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 2.0)) * 7.0/273.0;
|
3096
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 2.0)) * 4.0/273.0;
|
3097
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 2.0)) * 1.0/273.0;
|
3098
|
+
|
3099
|
+
return color;
|
3100
|
+
}
|
3101
|
+
|
3102
|
+
/**
|
3103
|
+
* Applies 7x7 Gaussian blur kernel
|
3104
|
+
* Samples 49 pixels in a 7x7 grid around the current pixel
|
3105
|
+
* Weights sum to 1.0 (1003/1003) for proper normalization
|
3106
|
+
* Creates the strongest blur effect
|
3107
|
+
*/
|
3108
|
+
vec4 gaussian7(vec2 texel) {
|
3109
|
+
vec4 color = vec4(0.0);
|
3110
|
+
|
3111
|
+
// Row 1: weights [0, 0, 1/1003, 2/1003, 1/1003, 0, 0]
|
3112
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -3.0)) * 1.0/1003.0;
|
3113
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -3.0)) * 2.0/1003.0;
|
3114
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -3.0)) * 1.0/1003.0;
|
3115
|
+
|
3116
|
+
// Row 2: weights [0, 3/1003, 13/1003, 22/1003, 13/1003, 3/1003, 0]
|
3117
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, -2.0)) * 3.0/1003.0;
|
3118
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -2.0)) * 13.0/1003.0;
|
3119
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -2.0)) * 22.0/1003.0;
|
3120
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -2.0)) * 13.0/1003.0;
|
3121
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, -2.0)) * 3.0/1003.0;
|
3122
|
+
|
3123
|
+
// Row 3: weights [1/1003, 13/1003, 59/1003, 97/1003, 59/1003, 13/1003, 1/1003]
|
3124
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-3.0, -1.0)) * 1.0/1003.0;
|
3125
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, -1.0)) * 13.0/1003.0;
|
3126
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, -1.0)) * 59.0/1003.0;
|
3127
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, -1.0)) * 97.0/1003.0;
|
3128
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, -1.0)) * 59.0/1003.0;
|
3129
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, -1.0)) * 13.0/1003.0;
|
3130
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 3.0, -1.0)) * 1.0/1003.0;
|
3131
|
+
|
3132
|
+
// Row 4 (center): weights [2/1003, 22/1003, 97/1003, 159/1003, 97/1003, 22/1003, 2/1003]
|
3133
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-3.0, 0.0)) * 2.0/1003.0;
|
3134
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 0.0)) * 22.0/1003.0;
|
3135
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 0.0)) * 97.0/1003.0;
|
3136
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 0.0)) * 159.0/1003.0; // Center pixel
|
3137
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 0.0)) * 97.0/1003.0;
|
3138
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 0.0)) * 22.0/1003.0;
|
3139
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 3.0, 0.0)) * 2.0/1003.0;
|
3140
|
+
|
3141
|
+
// Row 5: weights [1/1003, 13/1003, 59/1003, 97/1003, 59/1003, 13/1003, 1/1003]
|
3142
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-3.0, 1.0)) * 1.0/1003.0;
|
3143
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 1.0)) * 13.0/1003.0;
|
3144
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 1.0)) * 59.0/1003.0;
|
3145
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 1.0)) * 97.0/1003.0;
|
3146
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 1.0)) * 59.0/1003.0;
|
3147
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 1.0)) * 13.0/1003.0;
|
3148
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 3.0, 1.0)) * 1.0/1003.0;
|
3149
|
+
|
3150
|
+
// Row 6: weights [0, 3/1003, 13/1003, 22/1003, 13/1003, 3/1003, 0]
|
3151
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-2.0, 2.0)) * 3.0/1003.0;
|
3152
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 2.0)) * 13.0/1003.0;
|
3153
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 2.0)) * 22.0/1003.0;
|
3154
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 2.0)) * 13.0/1003.0;
|
3155
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 2.0, 2.0)) * 3.0/1003.0;
|
3156
|
+
|
3157
|
+
// Row 7: weights [0, 0, 1/1003, 2/1003, 1/1003, 0, 0]
|
3158
|
+
color += texture2D(tDiffuse, vUv + texel * vec2(-1.0, 3.0)) * 1.0/1003.0;
|
3159
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 0.0, 3.0)) * 2.0/1003.0;
|
3160
|
+
color += texture2D(tDiffuse, vUv + texel * vec2( 1.0, 3.0)) * 1.0/1003.0;
|
3161
|
+
|
3162
|
+
return color;
|
3163
|
+
}
|
3164
|
+
|
3165
|
+
/**
|
3166
|
+
* Main fragment shader function
|
3167
|
+
* Determines which blur kernel to apply based on the 'gaussian' uniform
|
3168
|
+
*/
|
3169
|
+
void main() {
|
3170
|
+
// Calculate the size of one texel (pixel) in texture coordinates
|
3171
|
+
vec2 texel = 1.0 / resolution;
|
3172
|
+
|
3173
|
+
// Initialize output color (this line appears to be unused/debug code)
|
3174
|
+
vec4 color = vec4(1.0, 0.0, 0.0, 0.0);
|
3175
|
+
|
3176
|
+
// TODO: remove these variables since not used
|
3177
|
+
bool left = vUv.x < 0.5;
|
3178
|
+
bool top = vUv.y < 0.5;
|
3179
|
+
|
3180
|
+
if (gaussian > 1 && gaussian <= 3) {
|
3181
|
+
// Apply 3x3 Gaussian blur for values 2-3
|
3182
|
+
color = gaussian3(texel);
|
3183
|
+
} else if (gaussian > 3 && gaussian <= 5) {
|
3184
|
+
// Apply 5x5 Gaussian blur for values 4-5
|
3185
|
+
color = gaussian5(texel);
|
3186
|
+
} else if (gaussian > 5) {
|
3187
|
+
// Apply 7x7 Gaussian blur for values 6 and above
|
3188
|
+
color = gaussian7(texel);
|
3189
|
+
} else {
|
3190
|
+
// No blur for value 1 or less
|
3191
|
+
color = noGaussian();
|
3192
|
+
}
|
3193
|
+
|
3194
|
+
// Output the final blurred color
|
3195
|
+
gl_FragColor = color;
|
3196
|
+
}
|
3197
|
+
`;
|
3198
|
+
function framebufferFor(renderer, rt) {
|
3199
|
+
const p = renderer.properties?.get(rt);
|
3200
|
+
return p?.framebuffer || p?.__webglFramebuffer || rt.__webglFramebuffer;
|
3201
|
+
}
|
3202
|
+
function logWithColor(msg) {
|
3203
|
+
if (atLeastLogLevel(LogLevel.DEBUG)) {
|
3204
|
+
console.warn(`%cV: ${msg}`, "background: deeppink; color: white; padding: 2px; border-radius: 3px;");
|
3205
|
+
}
|
3206
|
+
}
|
3207
|
+
function performGeometryPass(_gl, _camera, _scene, { mrtRef }) {
|
3208
|
+
_gl.setRenderTarget(mrtRef.current);
|
3209
|
+
_gl.clear(true, true, true);
|
3210
|
+
_gl.render(_scene, _camera);
|
3211
|
+
}
|
3212
|
+
function performBlitPass(_gl, { screenSceneRef, screenCameraRef }) {
|
3213
|
+
_gl.setRenderTarget(null);
|
3214
|
+
_gl.clear(true, true, true);
|
3215
|
+
_gl.render(screenSceneRef.current, screenCameraRef.current);
|
3216
|
+
}
|
3217
|
+
function handleRequests(_gl, { frameRef, dataManager, mrtRef, bufRequest, bufUsage }) {
|
3218
|
+
const ctx = _gl.getContext();
|
3219
|
+
const f = frameRef.current;
|
3220
|
+
if (dataManager.noNewRequests === true && f % 100 === 0 && f < 500) {
|
3221
|
+
dataManager.noNewRequests = false;
|
3222
|
+
}
|
3223
|
+
if (dataManager.triggerRequest === true && dataManager.noNewRequests === false) {
|
3224
|
+
ctx.bindFramebuffer(ctx.READ_FRAMEBUFFER, framebufferFor(_gl, mrtRef.current));
|
3225
|
+
ctx.readBuffer(ctx.COLOR_ATTACHMENT1);
|
3226
|
+
ctx.readPixels(0, 0, mrtRef.current.width, mrtRef.current.height, ctx.RGBA, ctx.UNSIGNED_BYTE, bufRequest.current);
|
3227
|
+
dataManager.processRequestData(bufRequest.current);
|
3228
|
+
} else if (dataManager.triggerUsage === true && dataManager.noNewRequests === false) {
|
3229
|
+
ctx.bindFramebuffer(ctx.READ_FRAMEBUFFER, framebufferFor(_gl, mrtRef.current));
|
3230
|
+
ctx.readBuffer(ctx.COLOR_ATTACHMENT2);
|
3231
|
+
ctx.readPixels(0, 0, mrtRef.current.width, mrtRef.current.height, ctx.RGBA, ctx.UNSIGNED_BYTE, bufUsage.current);
|
3232
|
+
dataManager.processUsageData(bufUsage.current);
|
3233
|
+
}
|
3234
|
+
}
|
3235
|
+
function handleAdaptiveQuality(clock, params) {
|
3236
|
+
const { invalidate, isInteracting, dataManager, spatialRenderingModeChanging, meshRef, stillRef, screenQuadRef, lastSampleRef, frameRef, lastFrameCountRef } = params;
|
3237
|
+
if (screenQuadRef.current) {
|
3238
|
+
if (!stillRef.current) {
|
3239
|
+
screenQuadRef.current.material.uniforms.gaussian.value = 7;
|
3240
|
+
} else {
|
3241
|
+
screenQuadRef.current.material.uniforms.gaussian.value = 0;
|
3242
|
+
}
|
3243
|
+
}
|
3244
|
+
if (isInteracting) {
|
3245
|
+
dataManager.noNewRequests = false;
|
3246
|
+
dataManager.triggerUsage = true;
|
3247
|
+
return;
|
3248
|
+
}
|
3249
|
+
if (spatialRenderingModeChanging)
|
3250
|
+
return;
|
3251
|
+
const meshRefUniforms = meshRef.current?.material?.uniforms;
|
3252
|
+
const renderSpeed = meshRefUniforms?.renderRes?.value ?? dataManager?.PT?.lowestDataRes;
|
3253
|
+
if (dataManager.noNewRequests) {
|
3254
|
+
if (renderSpeed !== 0) {
|
3255
|
+
if (meshRefUniforms) {
|
3256
|
+
meshRefUniforms.renderRes.value = 0;
|
3257
|
+
}
|
3258
|
+
log.debug("Adaptive Quality: No new requests. Setting renderSpeed to 0 (best quality).");
|
3259
|
+
stillRef.current = false;
|
3260
|
+
screenQuadRef.current.material.uniforms.gaussian.value = 7;
|
3261
|
+
invalidate();
|
3262
|
+
} else if (!stillRef.current) {
|
3263
|
+
log.debug("Adaptive Quality: No new requests and already at best quality. Setting stillRef to true.");
|
3264
|
+
stillRef.current = true;
|
3265
|
+
screenQuadRef.current.material.uniforms.gaussian.value = 0;
|
3266
|
+
}
|
3267
|
+
return;
|
3268
|
+
}
|
3269
|
+
if (stillRef.current) {
|
3270
|
+
stillRef.current = false;
|
3271
|
+
invalidate();
|
3272
|
+
}
|
3273
|
+
const t = clock.getElapsedTime();
|
3274
|
+
if (t - lastSampleRef.current < 1) {
|
3275
|
+
return;
|
3276
|
+
}
|
3277
|
+
const timeElapsedDuringSample = t - lastSampleRef.current;
|
3278
|
+
const framesRenderedDuringSample = frameRef.current - lastFrameCountRef.current;
|
3279
|
+
let fps = 0;
|
3280
|
+
if (timeElapsedDuringSample > 0) {
|
3281
|
+
fps = framesRenderedDuringSample / timeElapsedDuringSample;
|
3282
|
+
}
|
3283
|
+
lastSampleRef.current = t;
|
3284
|
+
lastFrameCountRef.current = frameRef.current;
|
3285
|
+
const upscale = fps > 100 && renderSpeed > 0;
|
3286
|
+
const downscale = fps < 30 && renderSpeed < dataManager.PT.lowestDataRes;
|
3287
|
+
if (upscale || downscale) {
|
3288
|
+
const newSpeed = renderSpeed + (downscale ? 1 : -1);
|
3289
|
+
if (meshRefUniforms) {
|
3290
|
+
meshRefUniforms.renderRes.value = newSpeed;
|
3291
|
+
}
|
3292
|
+
invalidate();
|
3293
|
+
}
|
3294
|
+
}
|
3295
|
+
function VolumeView(props) {
|
3296
|
+
const {
|
3297
|
+
images,
|
3298
|
+
imageLayerScopes,
|
3299
|
+
imageLayerCoordination,
|
3300
|
+
imageChannelScopesByLayer,
|
3301
|
+
imageChannelCoordination,
|
3302
|
+
// onInitComplete,
|
3303
|
+
spatialRenderingMode,
|
3304
|
+
spatialRenderingModeChanging
|
3305
|
+
} = props;
|
3306
|
+
const {
|
3307
|
+
gl
|
3308
|
+
// scene,
|
3309
|
+
// camera
|
3310
|
+
} = useThree();
|
3311
|
+
const invalidate = useThree((state) => state.invalidate);
|
3312
|
+
const orbitRef = useRef(null);
|
3313
|
+
const meshRef = useRef(null);
|
3314
|
+
const bufRequest = useRef(null);
|
3315
|
+
const bufUsage = useRef(null);
|
3316
|
+
const mrtRef = useRef(null);
|
3317
|
+
const [renderState, setRenderState] = useState({
|
3318
|
+
uniforms: null,
|
3319
|
+
shader: null,
|
3320
|
+
meshScale: [1, 1, 1],
|
3321
|
+
geometrySize: [1, 1, 1]
|
3322
|
+
});
|
3323
|
+
const [is3D, setIs3D] = useState(false);
|
3324
|
+
const screenSceneRef = useRef(null);
|
3325
|
+
const screenCameraRef = useRef(null);
|
3326
|
+
const screenQuadRef = useRef(null);
|
3327
|
+
const [isInteracting, setIsInteracting] = useState(false);
|
3328
|
+
const interactionTimeoutRef = useRef(null);
|
3329
|
+
const stillRef = useRef(false);
|
3330
|
+
const frameRef = useRef(0);
|
3331
|
+
const lastSampleRef = useRef(0);
|
3332
|
+
const lastFrameCountRef = useRef(0);
|
3333
|
+
const dataManager = useMemo(() => new VolumeDataManager(gl), [gl]);
|
3334
|
+
const renderManager = useMemo(() => new VolumeRenderManager(), []);
|
3335
|
+
useEffect(() => {
|
3336
|
+
logWithColor("useEffect MRT target matching canvas");
|
3337
|
+
const { width, height } = gl.domElement;
|
3338
|
+
const mrt = new WebGLMultipleRenderTargets(width, height, 3);
|
3339
|
+
mrt.texture.forEach((tex) => {
|
3340
|
+
tex.format = RGBAFormat;
|
3341
|
+
tex.type = UnsignedByteType;
|
3342
|
+
tex.minFilter = NearestFilter;
|
3343
|
+
tex.magFilter = NearestFilter;
|
3344
|
+
tex.generateMipmaps = false;
|
3345
|
+
});
|
3346
|
+
const screenScene = new Scene();
|
3347
|
+
const screenCamera = new OrthographicCamera(-1, 1, 1, -1, 0.1, 10);
|
3348
|
+
screenCamera.position.z = 1;
|
3349
|
+
const screenMaterial = new ShaderMaterial({
|
3350
|
+
uniforms: {
|
3351
|
+
// Bind the first render target texture as the input of the gaussian blur shader.
|
3352
|
+
tDiffuse: { value: mrt.texture[0] },
|
3353
|
+
resolution: { value: new Vector2(width, height) },
|
3354
|
+
gaussian: { value: 7 }
|
3355
|
+
},
|
3356
|
+
vertexShader: gaussianVertexShader,
|
3357
|
+
fragmentShader: gaussianFragmentShader,
|
3358
|
+
transparent: true
|
3359
|
+
});
|
3360
|
+
const screenQuad = new Mesh(new PlaneGeometry(2, 2), screenMaterial);
|
3361
|
+
screenScene.add(screenQuad);
|
3362
|
+
screenSceneRef.current = screenScene;
|
3363
|
+
screenCameraRef.current = screenCamera;
|
3364
|
+
screenQuadRef.current = screenQuad;
|
3365
|
+
bufRequest.current = new Uint8Array(width * height * 4);
|
3366
|
+
bufUsage.current = new Uint8Array(width * height * 4);
|
3367
|
+
mrtRef.current = mrt;
|
3368
|
+
return () => {
|
3369
|
+
mrt.dispose();
|
3370
|
+
screenMaterial.dispose();
|
3371
|
+
screenQuad.geometry.dispose();
|
3372
|
+
};
|
3373
|
+
}, [gl]);
|
3374
|
+
const firstImageLayerScope = imageLayerScopes?.[0];
|
3375
|
+
const firstImage = images?.[firstImageLayerScope];
|
3376
|
+
imageChannelScopesByLayer?.[firstImageLayerScope];
|
3377
|
+
const firstImageLayerChannelCoordination = imageChannelCoordination?.[0]?.[firstImageLayerScope];
|
3378
|
+
useEffect(() => {
|
3379
|
+
logWithColor("useEffect INIT");
|
3380
|
+
if (!dataManager || !renderManager) {
|
3381
|
+
log.debug("dataManager or renderManager not initialized yet");
|
3382
|
+
return;
|
3383
|
+
}
|
3384
|
+
if (!firstImage) {
|
3385
|
+
log.debug("no first image layer yet");
|
3386
|
+
return;
|
3387
|
+
}
|
3388
|
+
if (!firstImageLayerChannelCoordination) {
|
3389
|
+
log.debug("no firstImageLayerChannelCoordination yet");
|
3390
|
+
return;
|
3391
|
+
}
|
3392
|
+
(async () => {
|
3393
|
+
dataManager.initImages(images, imageLayerScopes);
|
3394
|
+
await dataManager.init(firstImageLayerChannelCoordination);
|
3395
|
+
renderManager.setZarrUniforms(dataManager.zarrStore, dataManager.PT);
|
3396
|
+
renderManager.setChannelMapping(dataManager.channels.colorMappings);
|
3397
|
+
log.debug("rm.uniforms", renderManager.uniforms);
|
3398
|
+
})();
|
3399
|
+
}, [
|
3400
|
+
dataManager,
|
3401
|
+
renderManager,
|
3402
|
+
images,
|
3403
|
+
imageLayerScopes
|
3404
|
+
]);
|
3405
|
+
useEffect(() => {
|
3406
|
+
logWithColor("useEffect spatialRenderingMode");
|
3407
|
+
const on3D = spatialRenderingMode === "3D";
|
3408
|
+
setIs3D(on3D);
|
3409
|
+
if (on3D && dataManager && renderManager) {
|
3410
|
+
const propsForRenderManager = {
|
3411
|
+
images,
|
3412
|
+
imageLayerScopes,
|
3413
|
+
imageLayerCoordination,
|
3414
|
+
imageChannelScopesByLayer,
|
3415
|
+
imageChannelCoordination,
|
3416
|
+
spatialRenderingMode
|
3417
|
+
};
|
3418
|
+
if (renderManager.updateFromProps(propsForRenderManager)) {
|
3419
|
+
const { zarrInit } = renderManager;
|
3420
|
+
if (!zarrInit) {
|
3421
|
+
dataManager.ptTHREE.needsUpdate = false;
|
3422
|
+
dataManager.bcTHREE.needsUpdate = false;
|
3423
|
+
dataManager.renderer.initTexture(dataManager.bcTHREE);
|
3424
|
+
dataManager.renderer.initTexture(dataManager.ptTHREE);
|
3425
|
+
dataManager.initTexture();
|
3426
|
+
}
|
3427
|
+
const nextRenderState = renderManager.updateRendering({
|
3428
|
+
zarrStoreShapes: dataManager.zarrStore.shapes,
|
3429
|
+
originalScaleXYZ: dataManager.getOriginalScaleXYZ(),
|
3430
|
+
physicalDimensionsXYZ: dataManager.getPhysicalDimensionsXYZ(),
|
3431
|
+
maxResolutionXYZ: dataManager.getMaxResolutionXYZ(),
|
3432
|
+
boxDimensionsXYZ: dataManager.getBoxDimensionsXYZ(),
|
3433
|
+
normalizedScaleXYZ: dataManager.getNormalizedScaleXYZ(),
|
3434
|
+
bcTHREE: dataManager.bcTHREE,
|
3435
|
+
ptTHREE: dataManager.ptTHREE
|
3436
|
+
});
|
3437
|
+
if (nextRenderState) {
|
3438
|
+
setRenderState(nextRenderState);
|
3439
|
+
}
|
3440
|
+
}
|
3441
|
+
}
|
3442
|
+
}, [
|
3443
|
+
dataManager,
|
3444
|
+
renderManager,
|
3445
|
+
images,
|
3446
|
+
imageLayerScopes,
|
3447
|
+
imageLayerCoordination,
|
3448
|
+
imageChannelScopesByLayer,
|
3449
|
+
imageChannelCoordination,
|
3450
|
+
spatialRenderingMode
|
3451
|
+
]);
|
3452
|
+
useEffect(() => {
|
3453
|
+
logWithColor("useEffect isInteracting");
|
3454
|
+
if (isInteracting) {
|
3455
|
+
const meshRefUniforms = meshRef.current?.material?.uniforms;
|
3456
|
+
if (meshRefUniforms) {
|
3457
|
+
meshRefUniforms.renderRes.value = dataManager.PT.lowestDataRes;
|
3458
|
+
}
|
3459
|
+
stillRef.current = false;
|
3460
|
+
invalidate();
|
3461
|
+
}
|
3462
|
+
}, [invalidate, isInteracting]);
|
3463
|
+
const RENDER_PRIORITY = 1;
|
3464
|
+
useFrame((state, delta, xrFrame) => {
|
3465
|
+
if (!mrtRef.current || !dataManager || !renderManager)
|
3466
|
+
return;
|
3467
|
+
const { gl: frameGl, camera: frameCamera, scene: frameScene, clock } = state;
|
3468
|
+
if (!stillRef.current) {
|
3469
|
+
performGeometryPass(frameGl, frameCamera, frameScene, { mrtRef });
|
3470
|
+
}
|
3471
|
+
performBlitPass(frameGl, { screenSceneRef, screenCameraRef });
|
3472
|
+
handleRequests(frameGl, { frameRef, dataManager, mrtRef, bufRequest, bufUsage });
|
3473
|
+
handleAdaptiveQuality(clock, {
|
3474
|
+
invalidate,
|
3475
|
+
isInteracting,
|
3476
|
+
dataManager,
|
3477
|
+
spatialRenderingModeChanging,
|
3478
|
+
meshRef,
|
3479
|
+
stillRef,
|
3480
|
+
screenQuadRef,
|
3481
|
+
lastSampleRef,
|
3482
|
+
frameRef,
|
3483
|
+
lastFrameCountRef
|
3484
|
+
});
|
3485
|
+
frameRef.current += 1;
|
3486
|
+
}, RENDER_PRIORITY);
|
3487
|
+
const onOrbitControlsStart = useCallback((e) => {
|
3488
|
+
setIsInteracting(true);
|
3489
|
+
}, []);
|
3490
|
+
const onOrbitControlsEnd = useCallback((e) => {
|
3491
|
+
clearTimeout(interactionTimeoutRef.current);
|
3492
|
+
interactionTimeoutRef.current = setTimeout(() => {
|
3493
|
+
setIsInteracting(false);
|
3494
|
+
}, 300);
|
3495
|
+
}, []);
|
3496
|
+
useEffect(() => {
|
3497
|
+
logWithColor("useEffect firstImageLayerChannelCoordination");
|
3498
|
+
setIsInteracting(true);
|
3499
|
+
log.debug("something about channels changed");
|
3500
|
+
dataManager.updateChannels(firstImageLayerChannelCoordination);
|
3501
|
+
renderManager.setChannelMapping(dataManager.channels.colorMappings);
|
3502
|
+
clearTimeout(interactionTimeoutRef.current);
|
3503
|
+
interactionTimeoutRef.current = setTimeout(() => {
|
3504
|
+
setIsInteracting(false);
|
3505
|
+
}, 300);
|
3506
|
+
}, [firstImageLayerChannelCoordination]);
|
3507
|
+
if (!is3D || !dataManager || !renderManager)
|
3508
|
+
return null;
|
3509
|
+
if (!renderState.shader) {
|
3510
|
+
return jsxRuntimeExports.jsxs("group", { children: [jsxRuntimeExports.jsxs("mesh", { children: [jsxRuntimeExports.jsx("boxGeometry", { args: [1, 1, 1] }), jsxRuntimeExports.jsx("meshBasicMaterial", { color: "#444", wireframe: true })] }), jsxRuntimeExports.jsx(OrbitControls, { ref: orbitRef })] });
|
3511
|
+
}
|
3512
|
+
return jsxRuntimeExports.jsxs("group", { children: [jsxRuntimeExports.jsx(
|
3513
|
+
OrbitControls,
|
3514
|
+
{
|
3515
|
+
// ref={mainOrbitControlsRef}
|
3516
|
+
enableDamping: false,
|
3517
|
+
onStart: onOrbitControlsStart,
|
3518
|
+
onEnd: onOrbitControlsEnd
|
3519
|
+
}
|
3520
|
+
), jsxRuntimeExports.jsxs("mesh", { ref: meshRef, scale: renderState.meshScale, children: [jsxRuntimeExports.jsx("boxGeometry", { args: renderState.geometrySize }), jsxRuntimeExports.jsx("shaderMaterial", { uniforms: renderState.uniforms, vertexShader: renderState.shader.vertexShader, fragmentShader: renderState.shader.fragmentShader, side: BackSide, transparent: false, glslVersion: GLSL3 })] })] });
|
3521
|
+
}
|
3522
|
+
function SpatialWrapper(props) {
|
3523
|
+
return jsxRuntimeExports.jsx(Canvas, { frameloop: "always", style: {
|
3524
|
+
position: "absolute",
|
3525
|
+
top: 0,
|
3526
|
+
left: 0,
|
3527
|
+
width: "100%",
|
3528
|
+
height: "100%",
|
3529
|
+
padding: 0,
|
3530
|
+
margin: 0
|
3531
|
+
// backgroundColor: 'white',
|
3532
|
+
}, camera: {
|
3533
|
+
fov: 50,
|
3534
|
+
up: [0, 1, 0],
|
3535
|
+
position: [0, 0, 4],
|
3536
|
+
near: 0.01,
|
3537
|
+
far: 15
|
3538
|
+
}, gl: {
|
3539
|
+
antialias: true,
|
3540
|
+
logarithmicDepthBuffer: false,
|
3541
|
+
preserveDrawingBuffer: false,
|
3542
|
+
autoClear: false
|
3543
|
+
}, children: jsxRuntimeExports.jsx(VolumeView, { ...props }) });
|
3544
|
+
}
|
3545
|
+
export {
|
3546
|
+
SpatialWrapper
|
3547
|
+
};
|