@aics/vole-core 3.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +26 -0
- package/README.md +119 -0
- package/es/Atlas2DSlice.js +224 -0
- package/es/Channel.js +264 -0
- package/es/FileSaver.js +31 -0
- package/es/FusedChannelData.js +192 -0
- package/es/Histogram.js +250 -0
- package/es/ImageInfo.js +127 -0
- package/es/Light.js +74 -0
- package/es/Lut.js +500 -0
- package/es/MarchingCubes.js +507 -0
- package/es/MeshVolume.js +334 -0
- package/es/NaiveSurfaceNets.js +251 -0
- package/es/PathTracedVolume.js +482 -0
- package/es/RayMarchedAtlasVolume.js +250 -0
- package/es/RenderToBuffer.js +31 -0
- package/es/ThreeJsPanel.js +633 -0
- package/es/Timing.js +28 -0
- package/es/TrackballControls.js +538 -0
- package/es/View3d.js +848 -0
- package/es/Volume.js +352 -0
- package/es/VolumeCache.js +161 -0
- package/es/VolumeDims.js +16 -0
- package/es/VolumeDrawable.js +702 -0
- package/es/VolumeMaker.js +101 -0
- package/es/VolumeRenderImpl.js +1 -0
- package/es/VolumeRenderSettings.js +203 -0
- package/es/constants/basicShaders.js +29 -0
- package/es/constants/colors.js +59 -0
- package/es/constants/denoiseShader.js +43 -0
- package/es/constants/lights.js +42 -0
- package/es/constants/materials.js +85 -0
- package/es/constants/pathtraceOutputShader.js +13 -0
- package/es/constants/scaleBarSVG.js +21 -0
- package/es/constants/time.js +34 -0
- package/es/constants/volumePTshader.js +153 -0
- package/es/constants/volumeRayMarchShader.js +123 -0
- package/es/constants/volumeSliceShader.js +115 -0
- package/es/index.js +21 -0
- package/es/loaders/IVolumeLoader.js +131 -0
- package/es/loaders/JsonImageInfoLoader.js +255 -0
- package/es/loaders/OmeZarrLoader.js +495 -0
- package/es/loaders/OpenCellLoader.js +65 -0
- package/es/loaders/RawArrayLoader.js +89 -0
- package/es/loaders/TiffLoader.js +219 -0
- package/es/loaders/VolumeLoadError.js +44 -0
- package/es/loaders/VolumeLoaderUtils.js +221 -0
- package/es/loaders/index.js +40 -0
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +143 -0
- package/es/loaders/zarr_utils/WrappedStore.js +51 -0
- package/es/loaders/zarr_utils/types.js +24 -0
- package/es/loaders/zarr_utils/utils.js +225 -0
- package/es/loaders/zarr_utils/validation.js +49 -0
- package/es/test/ChunkPrefetchIterator.test.js +208 -0
- package/es/test/RequestQueue.test.js +442 -0
- package/es/test/SubscribableRequestQueue.test.js +244 -0
- package/es/test/VolumeCache.test.js +118 -0
- package/es/test/VolumeRenderSettings.test.js +71 -0
- package/es/test/lut.test.js +671 -0
- package/es/test/num_utils.test.js +140 -0
- package/es/test/volume.test.js +98 -0
- package/es/test/zarr_utils.test.js +358 -0
- package/es/types/Atlas2DSlice.d.ts +41 -0
- package/es/types/Channel.d.ts +44 -0
- package/es/types/FileSaver.d.ts +6 -0
- package/es/types/FusedChannelData.d.ts +26 -0
- package/es/types/Histogram.d.ts +57 -0
- package/es/types/ImageInfo.d.ts +87 -0
- package/es/types/Light.d.ts +27 -0
- package/es/types/Lut.d.ts +67 -0
- package/es/types/MarchingCubes.d.ts +53 -0
- package/es/types/MeshVolume.d.ts +40 -0
- package/es/types/NaiveSurfaceNets.d.ts +11 -0
- package/es/types/PathTracedVolume.d.ts +65 -0
- package/es/types/RayMarchedAtlasVolume.d.ts +41 -0
- package/es/types/RenderToBuffer.d.ts +17 -0
- package/es/types/ThreeJsPanel.d.ts +107 -0
- package/es/types/Timing.d.ts +11 -0
- package/es/types/TrackballControls.d.ts +51 -0
- package/es/types/View3d.d.ts +357 -0
- package/es/types/Volume.d.ts +152 -0
- package/es/types/VolumeCache.d.ts +43 -0
- package/es/types/VolumeDims.d.ts +28 -0
- package/es/types/VolumeDrawable.d.ts +108 -0
- package/es/types/VolumeMaker.d.ts +49 -0
- package/es/types/VolumeRenderImpl.d.ts +22 -0
- package/es/types/VolumeRenderSettings.d.ts +98 -0
- package/es/types/constants/basicShaders.d.ts +4 -0
- package/es/types/constants/colors.d.ts +2 -0
- package/es/types/constants/denoiseShader.d.ts +40 -0
- package/es/types/constants/lights.d.ts +38 -0
- package/es/types/constants/materials.d.ts +20 -0
- package/es/types/constants/pathtraceOutputShader.d.ts +11 -0
- package/es/types/constants/scaleBarSVG.d.ts +2 -0
- package/es/types/constants/time.d.ts +19 -0
- package/es/types/constants/volumePTshader.d.ts +137 -0
- package/es/types/constants/volumeRayMarchShader.d.ts +117 -0
- package/es/types/constants/volumeSliceShader.d.ts +109 -0
- package/es/types/glsl.d.js +0 -0
- package/es/types/index.d.ts +28 -0
- package/es/types/loaders/IVolumeLoader.d.ts +113 -0
- package/es/types/loaders/JsonImageInfoLoader.d.ts +80 -0
- package/es/types/loaders/OmeZarrLoader.d.ts +87 -0
- package/es/types/loaders/OpenCellLoader.d.ts +9 -0
- package/es/types/loaders/RawArrayLoader.d.ts +33 -0
- package/es/types/loaders/TiffLoader.d.ts +45 -0
- package/es/types/loaders/VolumeLoadError.d.ts +18 -0
- package/es/types/loaders/VolumeLoaderUtils.d.ts +38 -0
- package/es/types/loaders/index.d.ts +22 -0
- package/es/types/loaders/zarr_utils/ChunkPrefetchIterator.d.ts +22 -0
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +24 -0
- package/es/types/loaders/zarr_utils/types.d.ts +94 -0
- package/es/types/loaders/zarr_utils/utils.d.ts +23 -0
- package/es/types/loaders/zarr_utils/validation.d.ts +7 -0
- package/es/types/test/ChunkPrefetchIterator.test.d.ts +1 -0
- package/es/types/test/RequestQueue.test.d.ts +1 -0
- package/es/types/test/SubscribableRequestQueue.test.d.ts +1 -0
- package/es/types/test/VolumeCache.test.d.ts +1 -0
- package/es/types/test/VolumeRenderSettings.test.d.ts +1 -0
- package/es/types/test/lut.test.d.ts +1 -0
- package/es/types/test/num_utils.test.d.ts +1 -0
- package/es/types/test/volume.test.d.ts +1 -0
- package/es/types/test/zarr_utils.test.d.ts +1 -0
- package/es/types/types.d.ts +115 -0
- package/es/types/utils/RequestQueue.d.ts +112 -0
- package/es/types/utils/SubscribableRequestQueue.d.ts +52 -0
- package/es/types/utils/num_utils.d.ts +43 -0
- package/es/types/workers/VolumeLoaderContext.d.ts +106 -0
- package/es/types/workers/types.d.ts +101 -0
- package/es/types/workers/util.d.ts +3 -0
- package/es/types.js +75 -0
- package/es/typings.d.js +0 -0
- package/es/utils/RequestQueue.js +267 -0
- package/es/utils/SubscribableRequestQueue.js +187 -0
- package/es/utils/num_utils.js +231 -0
- package/es/workers/FetchTiffWorker.js +153 -0
- package/es/workers/VolumeLoadWorker.js +129 -0
- package/es/workers/VolumeLoaderContext.js +271 -0
- package/es/workers/types.js +41 -0
- package/es/workers/util.js +8 -0
- package/package.json +83 -0
|
@@ -0,0 +1,482 @@
|
|
|
1
|
+
import { DataTexture, Data3DTexture, FloatType, Matrix4, Mesh, NormalBlending, PlaneGeometry, Quaternion, RGBAFormat, ShaderMaterial, UnsignedByteType, Vector3, WebGLRenderTarget, LinearFilter, NearestFilter } from "three";
|
|
2
|
+
import { renderToBufferVertShader, copyImageFragShader } from "./constants/basicShaders.js";
|
|
3
|
+
import { denoiseFragmentShaderSrc, denoiseShaderUniforms } from "./constants/denoiseShader.js";
|
|
4
|
+
import { pathtraceOutputFragmentShaderSrc, pathtraceOutputShaderUniforms } from "./constants/pathtraceOutputShader.js";
|
|
5
|
+
import { pathTracingFragmentShaderSrc, pathTracingUniforms } from "./constants/volumePTshader.js";
|
|
6
|
+
import { LUT_ARRAY_LENGTH } from "./Lut.js";
|
|
7
|
+
import { FUSE_DISABLED_RGB_COLOR, isOrthographicCamera } from "./types.js";
|
|
8
|
+
import { VolumeRenderSettings, SettingsFlags } from "./VolumeRenderSettings.js";
|
|
9
|
+
import RenderToBuffer from "./RenderToBuffer.js";
|
|
10
|
+
export default class PathTracedVolume {
|
|
11
|
+
// should have 4 or less elements
|
|
12
|
+
|
|
13
|
+
pathTracingUniforms = pathTracingUniforms();
|
|
14
|
+
denoiseShaderUniforms = denoiseShaderUniforms();
|
|
15
|
+
screenOutputShaderUniforms = pathtraceOutputShaderUniforms();
|
|
16
|
+
/**
|
|
17
|
+
* Creates a new PathTracedVolume.
|
|
18
|
+
* @param volume The volume that this renderer should render data from.
|
|
19
|
+
* @param settings Optional settings object. If set, updates the renderer with
|
|
20
|
+
* the given settings. Otherwise, uses the default VolumeRenderSettings.
|
|
21
|
+
*/
|
|
22
|
+
constructor(volume, settings = new VolumeRenderSettings(volume)) {
|
|
23
|
+
this.volume = volume;
|
|
24
|
+
this.viewChannels = [-1, -1, -1, -1];
|
|
25
|
+
|
|
26
|
+
// create volume texture
|
|
27
|
+
const {
|
|
28
|
+
x: sx,
|
|
29
|
+
y: sy,
|
|
30
|
+
z: sz
|
|
31
|
+
} = volume.imageInfo.subregionSize;
|
|
32
|
+
const data = new Uint8Array(sx * sy * sz * 4).fill(0);
|
|
33
|
+
// defaults to rgba and unsignedbytetype so dont need to supply format this time.
|
|
34
|
+
this.volumeTexture = new Data3DTexture(data, sx, sy, sz);
|
|
35
|
+
this.volumeTexture.minFilter = this.volumeTexture.magFilter = LinearFilter;
|
|
36
|
+
this.volumeTexture.generateMipmaps = false;
|
|
37
|
+
this.volumeTexture.needsUpdate = true;
|
|
38
|
+
|
|
39
|
+
// create Lut textures
|
|
40
|
+
// empty array
|
|
41
|
+
const lutData = new Uint8Array(LUT_ARRAY_LENGTH * 4).fill(1);
|
|
42
|
+
const lut0 = new DataTexture(lutData, 256, 4, RGBAFormat, UnsignedByteType);
|
|
43
|
+
lut0.minFilter = lut0.magFilter = LinearFilter;
|
|
44
|
+
lut0.needsUpdate = true;
|
|
45
|
+
this.pathTracingUniforms.gLutTexture.value = lut0;
|
|
46
|
+
this.cameraIsMoving = false;
|
|
47
|
+
this.sampleCounter = 0;
|
|
48
|
+
this.frameCounter = 0;
|
|
49
|
+
this.pathTracingRenderTarget = new WebGLRenderTarget(2, 2, {
|
|
50
|
+
minFilter: NearestFilter,
|
|
51
|
+
magFilter: NearestFilter,
|
|
52
|
+
format: RGBAFormat,
|
|
53
|
+
type: FloatType,
|
|
54
|
+
depthBuffer: false,
|
|
55
|
+
stencilBuffer: false,
|
|
56
|
+
generateMipmaps: false
|
|
57
|
+
});
|
|
58
|
+
this.screenTextureRenderTarget = new WebGLRenderTarget(2, 2, {
|
|
59
|
+
minFilter: NearestFilter,
|
|
60
|
+
magFilter: NearestFilter,
|
|
61
|
+
format: RGBAFormat,
|
|
62
|
+
type: FloatType,
|
|
63
|
+
depthBuffer: false,
|
|
64
|
+
stencilBuffer: false,
|
|
65
|
+
generateMipmaps: false
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
// initialize texture.
|
|
69
|
+
this.pathTracingUniforms.volumeTexture.value = this.volumeTexture;
|
|
70
|
+
this.pathTracingUniforms.tPreviousTexture.value = this.screenTextureRenderTarget.texture;
|
|
71
|
+
this.pathTracingRenderToBuffer = new RenderToBuffer(pathTracingFragmentShaderSrc, this.pathTracingUniforms);
|
|
72
|
+
this.screenTextureRenderToBuffer = new RenderToBuffer(copyImageFragShader, {
|
|
73
|
+
image: {
|
|
74
|
+
value: this.pathTracingRenderTarget.texture
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
this.screenOutputGeometry = new PlaneGeometry(2, 2);
|
|
78
|
+
this.screenOutputMaterial = new ShaderMaterial({
|
|
79
|
+
uniforms: this.screenOutputShaderUniforms,
|
|
80
|
+
vertexShader: renderToBufferVertShader,
|
|
81
|
+
fragmentShader: pathtraceOutputFragmentShaderSrc,
|
|
82
|
+
depthWrite: false,
|
|
83
|
+
depthTest: false,
|
|
84
|
+
blending: NormalBlending,
|
|
85
|
+
transparent: true
|
|
86
|
+
});
|
|
87
|
+
this.denoiseShaderUniforms = denoiseShaderUniforms();
|
|
88
|
+
this.screenOutputDenoiseMaterial = new ShaderMaterial({
|
|
89
|
+
uniforms: this.denoiseShaderUniforms,
|
|
90
|
+
vertexShader: renderToBufferVertShader,
|
|
91
|
+
fragmentShader: denoiseFragmentShaderSrc,
|
|
92
|
+
depthWrite: false,
|
|
93
|
+
depthTest: false,
|
|
94
|
+
blending: NormalBlending,
|
|
95
|
+
transparent: true
|
|
96
|
+
});
|
|
97
|
+
this.screenOutputMaterial.uniforms.tTexture0.value = this.pathTracingRenderTarget.texture;
|
|
98
|
+
this.screenOutputDenoiseMaterial.uniforms.tTexture0.value = this.pathTracingRenderTarget.texture;
|
|
99
|
+
this.screenOutputMesh = new Mesh(this.screenOutputGeometry, this.screenOutputMaterial);
|
|
100
|
+
this.gradientDelta = 1.0 / Math.max(sx, Math.max(sy, sz));
|
|
101
|
+
const invGradientDelta = 1.0 / this.gradientDelta; // a voxel count...
|
|
102
|
+
|
|
103
|
+
this.pathTracingUniforms.gGradientDeltaX.value = new Vector3(this.gradientDelta, 0, 0);
|
|
104
|
+
this.pathTracingUniforms.gGradientDeltaY.value = new Vector3(0, this.gradientDelta, 0);
|
|
105
|
+
this.pathTracingUniforms.gGradientDeltaZ.value = new Vector3(0, 0, this.gradientDelta);
|
|
106
|
+
// can this be a per-x,y,z value?
|
|
107
|
+
this.pathTracingUniforms.gInvGradientDelta.value = invGradientDelta; // a voxel count
|
|
108
|
+
this.pathTracingUniforms.gGradientFactor.value = 50.0; // related to voxel counts also
|
|
109
|
+
|
|
110
|
+
// Update settings
|
|
111
|
+
this.updateSettings(settings);
|
|
112
|
+
this.settings = settings; // turns off ts initialization warning
|
|
113
|
+
|
|
114
|
+
// bounds will go from 0 to physicalSize
|
|
115
|
+
const physicalSize = this.getNormVolumeSize();
|
|
116
|
+
this.pathTracingUniforms.gInvAaBbMax.value = new Vector3(1.0 / physicalSize.x, 1.0 / physicalSize.y, 1.0 / physicalSize.z).divide(volume.normRegionSize);
|
|
117
|
+
this.updateLightsSecondary();
|
|
118
|
+
}
|
|
119
|
+
cleanup() {
|
|
120
|
+
// warning do not use after cleanup is called!
|
|
121
|
+
this.volumeTexture.dispose();
|
|
122
|
+
}
|
|
123
|
+
setRenderUpdateListener(callback) {
|
|
124
|
+
this.renderUpdateListener = callback;
|
|
125
|
+
}
|
|
126
|
+
resetProgress() {
|
|
127
|
+
if (this.sampleCounter !== 0 && this.renderUpdateListener) {
|
|
128
|
+
this.renderUpdateListener(0);
|
|
129
|
+
}
|
|
130
|
+
this.sampleCounter = 0;
|
|
131
|
+
}
|
|
132
|
+
getNormVolumeSize() {
|
|
133
|
+
return this.volume.normPhysicalSize.clone().multiply(this.settings.scale);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* If the new settings have changed, applies all changes to this volume renderer.
|
|
138
|
+
* @param newSettings
|
|
139
|
+
* @returns
|
|
140
|
+
*/
|
|
141
|
+
updateSettings(newSettings, dirtyFlags) {
|
|
142
|
+
if (dirtyFlags === undefined) {
|
|
143
|
+
dirtyFlags = SettingsFlags.ALL;
|
|
144
|
+
}
|
|
145
|
+
this.settings = newSettings;
|
|
146
|
+
|
|
147
|
+
// Update resolution
|
|
148
|
+
if (dirtyFlags & SettingsFlags.SAMPLING) {
|
|
149
|
+
const resolution = this.settings.resolution.clone();
|
|
150
|
+
const dpr = window.devicePixelRatio ? window.devicePixelRatio : 1.0;
|
|
151
|
+
const nx = Math.floor(resolution.x * this.settings.pixelSamplingRate / dpr);
|
|
152
|
+
const ny = Math.floor(resolution.y * this.settings.pixelSamplingRate / dpr);
|
|
153
|
+
this.pathTracingUniforms.uResolution.value.x = nx;
|
|
154
|
+
this.pathTracingUniforms.uResolution.value.y = ny;
|
|
155
|
+
this.pathTracingRenderTarget.setSize(nx, ny);
|
|
156
|
+
this.screenTextureRenderTarget.setSize(nx, ny);
|
|
157
|
+
|
|
158
|
+
// update ray step size
|
|
159
|
+
this.pathTracingUniforms.gStepSize.value = this.settings.primaryRayStepSize * this.gradientDelta;
|
|
160
|
+
this.pathTracingUniforms.gStepSizeShadow.value = this.settings.secondaryRayStepSize * this.gradientDelta;
|
|
161
|
+
}
|
|
162
|
+
if (dirtyFlags & SettingsFlags.TRANSFORM) {
|
|
163
|
+
this.pathTracingUniforms.flipVolume.value = this.settings.flipAxes;
|
|
164
|
+
}
|
|
165
|
+
if (dirtyFlags & SettingsFlags.MATERIAL) {
|
|
166
|
+
this.pathTracingUniforms.gDensityScale.value = this.settings.density * 150.0;
|
|
167
|
+
this.updateMaterial();
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// update bounds
|
|
171
|
+
if (dirtyFlags & SettingsFlags.ROI) {
|
|
172
|
+
const {
|
|
173
|
+
normRegionSize,
|
|
174
|
+
normRegionOffset
|
|
175
|
+
} = this.volume;
|
|
176
|
+
const {
|
|
177
|
+
bmin,
|
|
178
|
+
bmax
|
|
179
|
+
} = this.settings.bounds;
|
|
180
|
+
const scaledSize = this.getNormVolumeSize();
|
|
181
|
+
const sizeMin = normRegionOffset.clone().subScalar(0.5).multiply(scaledSize);
|
|
182
|
+
const sizeMax = normRegionOffset.clone().add(normRegionSize).subScalar(0.5).multiply(scaledSize);
|
|
183
|
+
const clipMin = bmin.clone().multiply(scaledSize);
|
|
184
|
+
this.pathTracingUniforms.gClippedAaBbMin.value = clipMin.clamp(sizeMin, sizeMax);
|
|
185
|
+
const clipMax = bmax.clone().multiply(scaledSize);
|
|
186
|
+
this.pathTracingUniforms.gClippedAaBbMax.value = clipMax.clamp(sizeMin, sizeMax);
|
|
187
|
+
this.pathTracingUniforms.gVolCenter.value = this.volume.getContentCenter().multiply(this.settings.scale);
|
|
188
|
+
}
|
|
189
|
+
if (dirtyFlags & SettingsFlags.CAMERA) {
|
|
190
|
+
this.updateExposure(this.settings.brightness);
|
|
191
|
+
}
|
|
192
|
+
if (dirtyFlags & SettingsFlags.MASK_ALPHA) {
|
|
193
|
+
// Update channel and alpha mask if they have changed
|
|
194
|
+
this.updateVolumeData4();
|
|
195
|
+
}
|
|
196
|
+
if (dirtyFlags & SettingsFlags.VIEW) {
|
|
197
|
+
this.pathTracingUniforms.gCamera.value.mIsOrtho = this.settings.isOrtho ? 1 : 0;
|
|
198
|
+
}
|
|
199
|
+
if (dirtyFlags & SettingsFlags.SAMPLING) {
|
|
200
|
+
this.volumeTexture.minFilter = this.volumeTexture.magFilter = newSettings.useInterpolation ? LinearFilter : NearestFilter;
|
|
201
|
+
this.volumeTexture.needsUpdate = true;
|
|
202
|
+
}
|
|
203
|
+
this.resetProgress();
|
|
204
|
+
}
|
|
205
|
+
updateVolumeDimensions() {
|
|
206
|
+
this.updateSettings(this.settings, SettingsFlags.ROI);
|
|
207
|
+
}
|
|
208
|
+
doRender(renderer, camera) {
|
|
209
|
+
if (!this.volumeTexture) {
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
if (this.cameraIsMoving) {
|
|
213
|
+
this.resetProgress();
|
|
214
|
+
this.frameCounter += 1.0;
|
|
215
|
+
} else {
|
|
216
|
+
this.sampleCounter += 1.0;
|
|
217
|
+
this.frameCounter += 1.0;
|
|
218
|
+
if (this.renderUpdateListener) {
|
|
219
|
+
this.renderUpdateListener(this.sampleCounter);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
this.pathTracingUniforms.uSampleCounter.value = this.sampleCounter;
|
|
223
|
+
this.pathTracingUniforms.uFrameCounter.value = this.frameCounter;
|
|
224
|
+
|
|
225
|
+
// CAMERA
|
|
226
|
+
// force the camera to update its world matrix.
|
|
227
|
+
camera.updateMatrixWorld(true);
|
|
228
|
+
|
|
229
|
+
// rotate lights with camera, as if we are tumbling the volume with a fixed camera and world lighting.
|
|
230
|
+
// this code is analogous to this threejs code from View3d.preRender:
|
|
231
|
+
// this.scene.getObjectByName('lightContainer').rotation.setFromRotationMatrix(this.canvas3d.camera.matrixWorld);
|
|
232
|
+
const mycamxform = camera.matrixWorld.clone();
|
|
233
|
+
mycamxform.setPosition(new Vector3(0, 0, 0));
|
|
234
|
+
this.updateLightsSecondary(mycamxform);
|
|
235
|
+
let mydir = new Vector3();
|
|
236
|
+
mydir = camera.getWorldDirection(mydir);
|
|
237
|
+
const myup = new Vector3().copy(camera.up);
|
|
238
|
+
// don't rotate this vector. we are using translation as the pivot point of the object, and THEN rotating.
|
|
239
|
+
const mypos = new Vector3().copy(camera.position);
|
|
240
|
+
|
|
241
|
+
// apply volume translation and rotation:
|
|
242
|
+
// rotate camera.up, camera.direction, and camera position by inverse of volume's modelview
|
|
243
|
+
const m = new Matrix4().makeRotationFromQuaternion(new Quaternion().setFromEuler(this.settings.rotation).invert());
|
|
244
|
+
mypos.sub(this.settings.translation);
|
|
245
|
+
mypos.applyMatrix4(m);
|
|
246
|
+
myup.applyMatrix4(m);
|
|
247
|
+
mydir.applyMatrix4(m);
|
|
248
|
+
this.pathTracingUniforms.gCamera.value.mIsOrtho = isOrthographicCamera(camera) ? 1 : 0;
|
|
249
|
+
this.pathTracingUniforms.gCamera.value.mFrom.copy(mypos);
|
|
250
|
+
this.pathTracingUniforms.gCamera.value.mN.copy(mydir);
|
|
251
|
+
this.pathTracingUniforms.gCamera.value.mU.crossVectors(this.pathTracingUniforms.gCamera.value.mN, myup).normalize();
|
|
252
|
+
this.pathTracingUniforms.gCamera.value.mV.crossVectors(this.pathTracingUniforms.gCamera.value.mU, this.pathTracingUniforms.gCamera.value.mN).normalize();
|
|
253
|
+
|
|
254
|
+
// the choice of y = scale/aspect or x = scale*aspect is made here to match up with the other raymarch volume
|
|
255
|
+
const fScale = isOrthographicCamera(camera) ? Math.abs(camera.top) / camera.zoom : Math.tan(0.5 * camera.fov * Math.PI / 180.0);
|
|
256
|
+
const aspect = this.pathTracingUniforms.uResolution.value.x / this.pathTracingUniforms.uResolution.value.y;
|
|
257
|
+
this.pathTracingUniforms.gCamera.value.mScreen.set(-fScale * aspect, fScale * aspect,
|
|
258
|
+
// the "0" Y pixel will be at +Scale.
|
|
259
|
+
fScale, -fScale);
|
|
260
|
+
const scr = this.pathTracingUniforms.gCamera.value.mScreen;
|
|
261
|
+
this.pathTracingUniforms.gCamera.value.mInvScreen.set(
|
|
262
|
+
// the amount to increment for each pixel
|
|
263
|
+
(scr.y - scr.x) / this.pathTracingUniforms.uResolution.value.x, (scr.w - scr.z) / this.pathTracingUniforms.uResolution.value.y);
|
|
264
|
+
const denoiseLerpC = 0.33 * (Math.max(this.sampleCounter - 1, 1.0) * 0.035);
|
|
265
|
+
if (denoiseLerpC > 0.0 && denoiseLerpC < 1.0) {
|
|
266
|
+
this.screenOutputDenoiseMaterial.uniforms.gDenoiseLerpC.value = denoiseLerpC;
|
|
267
|
+
this.screenOutputMesh.material = this.screenOutputDenoiseMaterial;
|
|
268
|
+
} else {
|
|
269
|
+
this.screenOutputMesh.material = this.screenOutputMaterial;
|
|
270
|
+
}
|
|
271
|
+
this.screenOutputDenoiseMaterial.uniforms.gDenoisePixelSize.value.x = this.pathTracingUniforms.uResolution.value.x;
|
|
272
|
+
this.screenOutputDenoiseMaterial.uniforms.gDenoisePixelSize.value.y = this.pathTracingUniforms.uResolution.value.y;
|
|
273
|
+
|
|
274
|
+
// RENDERING in 3 steps
|
|
275
|
+
|
|
276
|
+
// STEP 1
|
|
277
|
+
// Perform PathTracing and Render(save) into pathTracingRenderTarget
|
|
278
|
+
|
|
279
|
+
// This is currently rendered as a fullscreen quad with no camera transform in the vertex shader!
|
|
280
|
+
// It is also composited with screenTextureRenderTarget's texture.
|
|
281
|
+
// (Read previous screenTextureRenderTarget to use as a new starting point to blend with)
|
|
282
|
+
this.pathTracingRenderToBuffer.render(renderer, this.pathTracingRenderTarget);
|
|
283
|
+
|
|
284
|
+
// STEP 2
|
|
285
|
+
// Render(copy) the final pathTracingScene output(above) into screenTextureRenderTarget
|
|
286
|
+
// This will be used as a new starting point for Step 1 above
|
|
287
|
+
this.screenTextureRenderToBuffer.render(renderer, this.screenTextureRenderTarget);
|
|
288
|
+
|
|
289
|
+
// STEP 3
|
|
290
|
+
// Render full screen quad with generated pathTracingRenderTarget in STEP 1 above.
|
|
291
|
+
// After the image is gamma corrected, it will be shown on the screen as the final accumulated output
|
|
292
|
+
// DMT - this step is handled by the threeJsPanel.
|
|
293
|
+
// tell the threejs panel to use the quadCamera to render this scene.
|
|
294
|
+
|
|
295
|
+
// renderer.render( this.screenOutputScene, this.quadCamera );
|
|
296
|
+
renderer.setRenderTarget(null);
|
|
297
|
+
}
|
|
298
|
+
get3dObject() {
|
|
299
|
+
return this.screenOutputMesh;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
//////////////////////////////////////////
|
|
303
|
+
//////////////////////////////////////////
|
|
304
|
+
|
|
305
|
+
onStartControls() {
|
|
306
|
+
this.cameraIsMoving = true;
|
|
307
|
+
}
|
|
308
|
+
onChangeControls() {
|
|
309
|
+
// this.cameraIsMoving = true;
|
|
310
|
+
}
|
|
311
|
+
onEndControls() {
|
|
312
|
+
this.cameraIsMoving = false;
|
|
313
|
+
this.resetProgress();
|
|
314
|
+
}
|
|
315
|
+
viewpointMoved() {
|
|
316
|
+
this.resetProgress();
|
|
317
|
+
}
|
|
318
|
+
updateActiveChannels(channelColors, channelData) {
|
|
319
|
+
const ch = [-1, -1, -1, -1];
|
|
320
|
+
let activeChannel = 0;
|
|
321
|
+
const NC = this.volume.imageInfo.numChannels;
|
|
322
|
+
const maxch = 4;
|
|
323
|
+
for (let i = 0; i < NC && activeChannel < maxch; ++i) {
|
|
324
|
+
// check that channel is not disabled and is loaded
|
|
325
|
+
if (channelColors[i].rgbColor !== FUSE_DISABLED_RGB_COLOR && channelData[i].loaded) {
|
|
326
|
+
ch[activeChannel] = i;
|
|
327
|
+
activeChannel++;
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
const unchanged = ch.every((elem, index) => elem === this.viewChannels[index], this);
|
|
331
|
+
if (unchanged) {
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
this.pathTracingUniforms.gNChannels.value = activeChannel;
|
|
335
|
+
this.viewChannels = ch;
|
|
336
|
+
// update volume data according to channels selected.
|
|
337
|
+
this.updateVolumeData4();
|
|
338
|
+
this.resetProgress();
|
|
339
|
+
this.updateLuts(channelColors, channelData);
|
|
340
|
+
this.updateMaterial();
|
|
341
|
+
}
|
|
342
|
+
updateVolumeData4() {
|
|
343
|
+
const {
|
|
344
|
+
x: sx,
|
|
345
|
+
y: sy,
|
|
346
|
+
z: sz
|
|
347
|
+
} = this.volume.imageInfo.subregionSize;
|
|
348
|
+
const data = new Uint8Array(sx * sy * sz * 4);
|
|
349
|
+
data.fill(0);
|
|
350
|
+
for (let i = 0; i < 4; ++i) {
|
|
351
|
+
const ch = this.viewChannels[i];
|
|
352
|
+
if (ch === -1) {
|
|
353
|
+
continue;
|
|
354
|
+
}
|
|
355
|
+
const volumeChannel = this.volume.getChannel(ch);
|
|
356
|
+
for (let iz = 0; iz < sz; ++iz) {
|
|
357
|
+
for (let iy = 0; iy < sy; ++iy) {
|
|
358
|
+
for (let ix = 0; ix < sx; ++ix) {
|
|
359
|
+
// TODO expand to 16-bpp raw intensities?
|
|
360
|
+
data[i + ix * 4 + iy * 4 * sx + iz * 4 * sx * sy] = 255 * volumeChannel.normalizeRaw(volumeChannel.getIntensity(ix, iy, iz));
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
if (this.settings.maskChannelIndex !== -1 && this.settings.maskAlpha < 1.0) {
|
|
365
|
+
const maskChannel = this.volume.getChannel(this.settings.maskChannelIndex);
|
|
366
|
+
// const maskMax = maskChannel.getHistogram().dataMax;
|
|
367
|
+
let maskVal = 1.0;
|
|
368
|
+
const maskAlpha = this.settings.maskAlpha;
|
|
369
|
+
for (let iz = 0; iz < sz; ++iz) {
|
|
370
|
+
for (let iy = 0; iy < sy; ++iy) {
|
|
371
|
+
for (let ix = 0; ix < sx; ++ix) {
|
|
372
|
+
// nonbinary masking
|
|
373
|
+
// maskVal = maskChannel.getIntensity(ix,iy,iz) * maskAlpha / maskMax;
|
|
374
|
+
|
|
375
|
+
// binary masking
|
|
376
|
+
maskVal = maskChannel.getIntensity(ix, iy, iz) > 0 ? 1.0 : maskAlpha;
|
|
377
|
+
data[i + ix * 4 + iy * 4 * sx + iz * 4 * sx * sy] *= maskVal;
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
// defaults to rgba and unsignedbytetype so dont need to supply format this time.
|
|
384
|
+
this.volumeTexture.image.data.set(data);
|
|
385
|
+
this.volumeTexture.needsUpdate = true;
|
|
386
|
+
}
|
|
387
|
+
updateLuts(channelColors, channelData) {
|
|
388
|
+
for (let i = 0; i < this.pathTracingUniforms.gNChannels.value; ++i) {
|
|
389
|
+
const channel = this.viewChannels[i];
|
|
390
|
+
const combinedLut = channelData[channel].combineLuts(channelColors[channel].rgbColor);
|
|
391
|
+
this.pathTracingUniforms.gLutTexture.value.image.data.set(combinedLut, i * LUT_ARRAY_LENGTH);
|
|
392
|
+
|
|
393
|
+
// TODO expand to 16-bpp raw intensities?
|
|
394
|
+
this.pathTracingUniforms.gIntensityMax.value.setComponent(i, this.volume.channels[channel].histogram.getMax() / 255.0);
|
|
395
|
+
this.pathTracingUniforms.gIntensityMin.value.setComponent(i, this.volume.channels[channel].histogram.getMin() / 255.0);
|
|
396
|
+
}
|
|
397
|
+
this.pathTracingUniforms.gLutTexture.value.needsUpdate = true;
|
|
398
|
+
this.resetProgress();
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
// image is a material interface that supports per-channel color, spec,
|
|
402
|
+
// emissive, glossiness
|
|
403
|
+
updateMaterial() {
|
|
404
|
+
for (let c = 0; c < this.viewChannels.length; ++c) {
|
|
405
|
+
const i = this.viewChannels[c];
|
|
406
|
+
if (i > -1) {
|
|
407
|
+
// diffuse color is actually blended into the LUT now.
|
|
408
|
+
const channelData = this.volume.getChannel(i);
|
|
409
|
+
const combinedLut = channelData.combineLuts(this.settings.diffuse[i]);
|
|
410
|
+
this.pathTracingUniforms.gLutTexture.value.image.data.set(combinedLut, c * LUT_ARRAY_LENGTH);
|
|
411
|
+
this.pathTracingUniforms.gLutTexture.value.needsUpdate = true;
|
|
412
|
+
this.pathTracingUniforms.gDiffuse.value[c] = new Vector3(1.0, 1.0, 1.0);
|
|
413
|
+
this.pathTracingUniforms.gSpecular.value[c] = new Vector3().fromArray(this.settings.specular[i]).multiplyScalar(1.0 / 255.0);
|
|
414
|
+
this.pathTracingUniforms.gEmissive.value[c] = new Vector3().fromArray(this.settings.emissive[i]).multiplyScalar(1.0 / 255.0);
|
|
415
|
+
this.pathTracingUniforms.gGlossiness.value[c] = this.settings.glossiness[i];
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
this.resetProgress();
|
|
419
|
+
}
|
|
420
|
+
updateShadingMethod(brdf) {
|
|
421
|
+
this.pathTracingUniforms.gShadingType.value = brdf;
|
|
422
|
+
this.resetProgress();
|
|
423
|
+
}
|
|
424
|
+
updateShowLights(showlights) {
|
|
425
|
+
this.pathTracingUniforms.uShowLights.value = showlights;
|
|
426
|
+
this.resetProgress();
|
|
427
|
+
}
|
|
428
|
+
updateExposure(e) {
|
|
429
|
+
// 1.0 causes division by zero.
|
|
430
|
+
if (e > 0.99999) {
|
|
431
|
+
e = 0.99999;
|
|
432
|
+
}
|
|
433
|
+
this.screenOutputMaterial.uniforms.gInvExposure.value = 1.0 / (1.0 - e) - 1.0;
|
|
434
|
+
this.screenOutputDenoiseMaterial.uniforms.gInvExposure.value = 1.0 / (1.0 - e) - 1.0;
|
|
435
|
+
this.resetProgress();
|
|
436
|
+
}
|
|
437
|
+
updateCamera(fov, focalDistance, apertureSize) {
|
|
438
|
+
this.pathTracingUniforms.gCamera.value.mApertureSize = apertureSize;
|
|
439
|
+
this.pathTracingUniforms.gCamera.value.mFocalDistance = focalDistance;
|
|
440
|
+
this.resetProgress();
|
|
441
|
+
}
|
|
442
|
+
updateLights(state) {
|
|
443
|
+
// 0th light in state array is sphere light
|
|
444
|
+
this.pathTracingUniforms.gLights.value[0].mColorTop = new Vector3().copy(state[0].mColorTop);
|
|
445
|
+
this.pathTracingUniforms.gLights.value[0].mColorMiddle = new Vector3().copy(state[0].mColorMiddle);
|
|
446
|
+
this.pathTracingUniforms.gLights.value[0].mColorBottom = new Vector3().copy(state[0].mColorBottom);
|
|
447
|
+
|
|
448
|
+
// 1st light in state array is area light
|
|
449
|
+
this.pathTracingUniforms.gLights.value[1].mColor = new Vector3().copy(state[1].mColor);
|
|
450
|
+
this.pathTracingUniforms.gLights.value[1].mTheta = state[1].mTheta;
|
|
451
|
+
this.pathTracingUniforms.gLights.value[1].mPhi = state[1].mPhi;
|
|
452
|
+
this.pathTracingUniforms.gLights.value[1].mDistance = state[1].mDistance;
|
|
453
|
+
this.pathTracingUniforms.gLights.value[1].mWidth = state[1].mWidth;
|
|
454
|
+
this.pathTracingUniforms.gLights.value[1].mHeight = state[1].mHeight;
|
|
455
|
+
this.updateLightsSecondary();
|
|
456
|
+
this.resetProgress();
|
|
457
|
+
}
|
|
458
|
+
updateLightsSecondary(cameraMatrix) {
|
|
459
|
+
console.log("lights secondary");
|
|
460
|
+
const physicalSize = this.getNormVolumeSize();
|
|
461
|
+
const bbctr = new Vector3(physicalSize.x * 0.5, physicalSize.y * 0.5, physicalSize.z * 0.5);
|
|
462
|
+
for (let i = 0; i < 2; ++i) {
|
|
463
|
+
const lt = this.pathTracingUniforms.gLights.value[i];
|
|
464
|
+
lt.update(bbctr, cameraMatrix);
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// 0..1 ranges as input
|
|
469
|
+
updateClipRegion(xmin, xmax, ymin, ymax, zmin, zmax) {
|
|
470
|
+
this.settings.bounds = {
|
|
471
|
+
bmin: new Vector3(xmin - 0.5, ymin - 0.5, zmin - 0.5),
|
|
472
|
+
bmax: new Vector3(xmax - 0.5, ymax - 0.5, zmax - 0.5)
|
|
473
|
+
};
|
|
474
|
+
const physicalSize = this.getNormVolumeSize();
|
|
475
|
+
this.pathTracingUniforms.gClippedAaBbMin.value = new Vector3(xmin * physicalSize.x - 0.5 * physicalSize.x, ymin * physicalSize.y - 0.5 * physicalSize.y, zmin * physicalSize.z - 0.5 * physicalSize.z);
|
|
476
|
+
this.pathTracingUniforms.gClippedAaBbMax.value = new Vector3(xmax * physicalSize.x - 0.5 * physicalSize.x, ymax * physicalSize.y - 0.5 * physicalSize.y, zmax * physicalSize.z - 0.5 * physicalSize.z);
|
|
477
|
+
this.resetProgress();
|
|
478
|
+
}
|
|
479
|
+
setZSlice(_slice) {
|
|
480
|
+
return true;
|
|
481
|
+
}
|
|
482
|
+
}
|