@aics/vole-core 3.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +26 -0
- package/README.md +119 -0
- package/es/Atlas2DSlice.js +224 -0
- package/es/Channel.js +264 -0
- package/es/FileSaver.js +31 -0
- package/es/FusedChannelData.js +192 -0
- package/es/Histogram.js +250 -0
- package/es/ImageInfo.js +127 -0
- package/es/Light.js +74 -0
- package/es/Lut.js +500 -0
- package/es/MarchingCubes.js +507 -0
- package/es/MeshVolume.js +334 -0
- package/es/NaiveSurfaceNets.js +251 -0
- package/es/PathTracedVolume.js +482 -0
- package/es/RayMarchedAtlasVolume.js +250 -0
- package/es/RenderToBuffer.js +31 -0
- package/es/ThreeJsPanel.js +633 -0
- package/es/Timing.js +28 -0
- package/es/TrackballControls.js +538 -0
- package/es/View3d.js +848 -0
- package/es/Volume.js +352 -0
- package/es/VolumeCache.js +161 -0
- package/es/VolumeDims.js +16 -0
- package/es/VolumeDrawable.js +702 -0
- package/es/VolumeMaker.js +101 -0
- package/es/VolumeRenderImpl.js +1 -0
- package/es/VolumeRenderSettings.js +203 -0
- package/es/constants/basicShaders.js +29 -0
- package/es/constants/colors.js +59 -0
- package/es/constants/denoiseShader.js +43 -0
- package/es/constants/lights.js +42 -0
- package/es/constants/materials.js +85 -0
- package/es/constants/pathtraceOutputShader.js +13 -0
- package/es/constants/scaleBarSVG.js +21 -0
- package/es/constants/time.js +34 -0
- package/es/constants/volumePTshader.js +153 -0
- package/es/constants/volumeRayMarchShader.js +123 -0
- package/es/constants/volumeSliceShader.js +115 -0
- package/es/index.js +21 -0
- package/es/loaders/IVolumeLoader.js +131 -0
- package/es/loaders/JsonImageInfoLoader.js +255 -0
- package/es/loaders/OmeZarrLoader.js +495 -0
- package/es/loaders/OpenCellLoader.js +65 -0
- package/es/loaders/RawArrayLoader.js +89 -0
- package/es/loaders/TiffLoader.js +219 -0
- package/es/loaders/VolumeLoadError.js +44 -0
- package/es/loaders/VolumeLoaderUtils.js +221 -0
- package/es/loaders/index.js +40 -0
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +143 -0
- package/es/loaders/zarr_utils/WrappedStore.js +51 -0
- package/es/loaders/zarr_utils/types.js +24 -0
- package/es/loaders/zarr_utils/utils.js +225 -0
- package/es/loaders/zarr_utils/validation.js +49 -0
- package/es/test/ChunkPrefetchIterator.test.js +208 -0
- package/es/test/RequestQueue.test.js +442 -0
- package/es/test/SubscribableRequestQueue.test.js +244 -0
- package/es/test/VolumeCache.test.js +118 -0
- package/es/test/VolumeRenderSettings.test.js +71 -0
- package/es/test/lut.test.js +671 -0
- package/es/test/num_utils.test.js +140 -0
- package/es/test/volume.test.js +98 -0
- package/es/test/zarr_utils.test.js +358 -0
- package/es/types/Atlas2DSlice.d.ts +41 -0
- package/es/types/Channel.d.ts +44 -0
- package/es/types/FileSaver.d.ts +6 -0
- package/es/types/FusedChannelData.d.ts +26 -0
- package/es/types/Histogram.d.ts +57 -0
- package/es/types/ImageInfo.d.ts +87 -0
- package/es/types/Light.d.ts +27 -0
- package/es/types/Lut.d.ts +67 -0
- package/es/types/MarchingCubes.d.ts +53 -0
- package/es/types/MeshVolume.d.ts +40 -0
- package/es/types/NaiveSurfaceNets.d.ts +11 -0
- package/es/types/PathTracedVolume.d.ts +65 -0
- package/es/types/RayMarchedAtlasVolume.d.ts +41 -0
- package/es/types/RenderToBuffer.d.ts +17 -0
- package/es/types/ThreeJsPanel.d.ts +107 -0
- package/es/types/Timing.d.ts +11 -0
- package/es/types/TrackballControls.d.ts +51 -0
- package/es/types/View3d.d.ts +357 -0
- package/es/types/Volume.d.ts +152 -0
- package/es/types/VolumeCache.d.ts +43 -0
- package/es/types/VolumeDims.d.ts +28 -0
- package/es/types/VolumeDrawable.d.ts +108 -0
- package/es/types/VolumeMaker.d.ts +49 -0
- package/es/types/VolumeRenderImpl.d.ts +22 -0
- package/es/types/VolumeRenderSettings.d.ts +98 -0
- package/es/types/constants/basicShaders.d.ts +4 -0
- package/es/types/constants/colors.d.ts +2 -0
- package/es/types/constants/denoiseShader.d.ts +40 -0
- package/es/types/constants/lights.d.ts +38 -0
- package/es/types/constants/materials.d.ts +20 -0
- package/es/types/constants/pathtraceOutputShader.d.ts +11 -0
- package/es/types/constants/scaleBarSVG.d.ts +2 -0
- package/es/types/constants/time.d.ts +19 -0
- package/es/types/constants/volumePTshader.d.ts +137 -0
- package/es/types/constants/volumeRayMarchShader.d.ts +117 -0
- package/es/types/constants/volumeSliceShader.d.ts +109 -0
- package/es/types/glsl.d.js +0 -0
- package/es/types/index.d.ts +28 -0
- package/es/types/loaders/IVolumeLoader.d.ts +113 -0
- package/es/types/loaders/JsonImageInfoLoader.d.ts +80 -0
- package/es/types/loaders/OmeZarrLoader.d.ts +87 -0
- package/es/types/loaders/OpenCellLoader.d.ts +9 -0
- package/es/types/loaders/RawArrayLoader.d.ts +33 -0
- package/es/types/loaders/TiffLoader.d.ts +45 -0
- package/es/types/loaders/VolumeLoadError.d.ts +18 -0
- package/es/types/loaders/VolumeLoaderUtils.d.ts +38 -0
- package/es/types/loaders/index.d.ts +22 -0
- package/es/types/loaders/zarr_utils/ChunkPrefetchIterator.d.ts +22 -0
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +24 -0
- package/es/types/loaders/zarr_utils/types.d.ts +94 -0
- package/es/types/loaders/zarr_utils/utils.d.ts +23 -0
- package/es/types/loaders/zarr_utils/validation.d.ts +7 -0
- package/es/types/test/ChunkPrefetchIterator.test.d.ts +1 -0
- package/es/types/test/RequestQueue.test.d.ts +1 -0
- package/es/types/test/SubscribableRequestQueue.test.d.ts +1 -0
- package/es/types/test/VolumeCache.test.d.ts +1 -0
- package/es/types/test/VolumeRenderSettings.test.d.ts +1 -0
- package/es/types/test/lut.test.d.ts +1 -0
- package/es/types/test/num_utils.test.d.ts +1 -0
- package/es/types/test/volume.test.d.ts +1 -0
- package/es/types/test/zarr_utils.test.d.ts +1 -0
- package/es/types/types.d.ts +115 -0
- package/es/types/utils/RequestQueue.d.ts +112 -0
- package/es/types/utils/SubscribableRequestQueue.d.ts +52 -0
- package/es/types/utils/num_utils.d.ts +43 -0
- package/es/types/workers/VolumeLoaderContext.d.ts +106 -0
- package/es/types/workers/types.d.ts +101 -0
- package/es/types/workers/util.d.ts +3 -0
- package/es/types.js +75 -0
- package/es/typings.d.js +0 -0
- package/es/utils/RequestQueue.js +267 -0
- package/es/utils/SubscribableRequestQueue.js +187 -0
- package/es/utils/num_utils.js +231 -0
- package/es/workers/FetchTiffWorker.js +153 -0
- package/es/workers/VolumeLoadWorker.js +129 -0
- package/es/workers/VolumeLoaderContext.js +271 -0
- package/es/workers/types.js +41 -0
- package/es/workers/util.js +8 -0
- package/package.json +83 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { expect } from "chai";
|
|
2
|
+
import { it } from "mocha";
|
|
3
|
+
import { constrainToAxis, formatNumber, getTimestamp } from "../utils/num_utils";
|
|
4
|
+
import { Axis } from "../VolumeRenderSettings.js";
|
|
5
|
+
describe("num_utils", () => {
|
|
6
|
+
describe("formatNumber", () => {
|
|
7
|
+
it("stringifies integers with 4 or fewer digits", () => {
|
|
8
|
+
expect(formatNumber(4)).to.equal("4");
|
|
9
|
+
expect(formatNumber(-876)).to.equal("-876");
|
|
10
|
+
expect(formatNumber(1234)).to.equal("1234");
|
|
11
|
+
expect(formatNumber(9999)).to.equal("9999");
|
|
12
|
+
expect(formatNumber(-9999)).to.equal("-9999");
|
|
13
|
+
});
|
|
14
|
+
it("rounds decimals to 5 significant figures", () => {
|
|
15
|
+
expect(formatNumber(123.4567)).to.equal("123.46");
|
|
16
|
+
expect(formatNumber(-0.123456)).to.equal("-0.12346");
|
|
17
|
+
});
|
|
18
|
+
it("formats integers with 5 or more digits in scientific notation", () => {
|
|
19
|
+
expect(formatNumber(10000)).to.equal("1.00×10⁴");
|
|
20
|
+
expect(formatNumber(12345)).to.equal("1.23×10⁴");
|
|
21
|
+
expect(formatNumber(99999)).to.equal("1.00×10⁵");
|
|
22
|
+
expect(formatNumber(-12345)).to.equal("-1.23×10⁴");
|
|
23
|
+
expect(formatNumber(123456789012345)).to.equal("1.23×10¹⁴");
|
|
24
|
+
});
|
|
25
|
+
it("formats decimals below hundredths in scientific notation", () => {
|
|
26
|
+
expect(formatNumber(0.01)).to.equal("0.01");
|
|
27
|
+
expect(formatNumber(0.001)).to.equal("1.00×10⁻³");
|
|
28
|
+
expect(formatNumber(0.0009876)).to.equal("9.88×10⁻⁴");
|
|
29
|
+
expect(formatNumber(0.00000000000000123456789012345)).to.equal("1.23×10⁻¹⁵");
|
|
30
|
+
});
|
|
31
|
+
it("does not format zero in scientific notation", () => {
|
|
32
|
+
expect(formatNumber(0)).to.equal("0");
|
|
33
|
+
});
|
|
34
|
+
it("formats numbers which would round to 10000 in scientific notation", () => {
|
|
35
|
+
expect(formatNumber(9999.9)).to.equal("9999.9");
|
|
36
|
+
expect(formatNumber(9999.99)).to.equal("1.00×10⁴");
|
|
37
|
+
});
|
|
38
|
+
it("does not account for numbers which round up to 0.01 in scientific notation", () => {
|
|
39
|
+
expect(formatNumber(0.00999)).to.equal("9.99×10⁻³");
|
|
40
|
+
expect(formatNumber(0.009999)).to.equal("1.00×10⁻²"); // aw shucks
|
|
41
|
+
});
|
|
42
|
+
it("rounds off decimals to the specified number of significant figures", () => {
|
|
43
|
+
expect(formatNumber(123.4567, 3)).to.equal("123");
|
|
44
|
+
expect(formatNumber(123.4567, 4)).to.equal("123.5");
|
|
45
|
+
expect(formatNumber(123.4567, 6)).to.equal("123.457");
|
|
46
|
+
expect(formatNumber(123.4567, 2)).to.equal("123"); // oh well
|
|
47
|
+
});
|
|
48
|
+
it("rounds numbers in scientific notation to the specified number of significant figures", () => {
|
|
49
|
+
expect(formatNumber(12345, 3, 3)).to.equal("1.23×10⁴");
|
|
50
|
+
expect(formatNumber(12345, 4, 4)).to.equal("1.235×10⁴");
|
|
51
|
+
expect(formatNumber(12345, 6, 6)).to.equal("1.23450×10⁴");
|
|
52
|
+
});
|
|
53
|
+
it("rounds numbers in scientific notation to two fewer significant figures than decimals if unspecified", () => {
|
|
54
|
+
expect(formatNumber(12345, 5)).to.equal("1.23×10⁴");
|
|
55
|
+
expect(formatNumber(12345, 6)).to.equal("1.235×10⁴");
|
|
56
|
+
expect(formatNumber(12345, 8)).to.equal("1.23450×10⁴");
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
describe("getTimestamp", () => {
|
|
60
|
+
it("shows only milliseconds if total time units < 1000 ms", () => {
|
|
61
|
+
expect(getTimestamp(0, 999, "ms")).to.equal("0 / 999 ms");
|
|
62
|
+
expect(getTimestamp(41, 999, "ms")).to.equal("41 / 999 ms");
|
|
63
|
+
expect(getTimestamp(-50, 999, "ms")).to.equal("-50 / 999 ms");
|
|
64
|
+
expect(getTimestamp(999, 999, "ms")).to.equal("999 / 999 ms");
|
|
65
|
+
});
|
|
66
|
+
it("ignores decimal places in milliseconds", () => {
|
|
67
|
+
expect(getTimestamp(0.4, 100, "ms")).to.equal("0 / 100 ms");
|
|
68
|
+
expect(getTimestamp(9.9, 100, "ms")).to.equal("9 / 100 ms");
|
|
69
|
+
});
|
|
70
|
+
it("does not convert unrecognized time units", () => {
|
|
71
|
+
expect(getTimestamp(0, 999, "foo")).to.equal("0 / 999 foo");
|
|
72
|
+
expect(getTimestamp(41, 999, "bar")).to.equal("41 / 999 bar");
|
|
73
|
+
expect(getTimestamp(999, 999, "baz")).to.equal("999 / 999 baz");
|
|
74
|
+
});
|
|
75
|
+
it("shows seconds if total time is > 1000 ms", () => {
|
|
76
|
+
expect(getTimestamp(0, 1000, "ms")).to.equal("0.000 / 1.000 s");
|
|
77
|
+
expect(getTimestamp(999, 1000, "ms")).to.equal("0.999 / 1.000 s");
|
|
78
|
+
expect(getTimestamp(1, 4500, "ms")).to.equal("0.001 / 4.500 s");
|
|
79
|
+
});
|
|
80
|
+
it("does not show past three decimals for seconds", () => {
|
|
81
|
+
expect(getTimestamp(9.9, 1000, "ms")).to.equal("0.009 / 1.000 s");
|
|
82
|
+
});
|
|
83
|
+
it("ignores milliseconds when unit is seconds", () => {
|
|
84
|
+
expect(getTimestamp(0, 59, "s")).to.equal("0 / 59 s");
|
|
85
|
+
expect(getTimestamp(0.54, 59, "s")).to.equal("0 / 59 s");
|
|
86
|
+
expect(getTimestamp(12, 59, "s")).to.equal("12 / 59 s");
|
|
87
|
+
});
|
|
88
|
+
it("converts from seconds to minutes", () => {
|
|
89
|
+
expect(getTimestamp(0, 60, "s")).to.equal("0:00 / 1:00 m:s");
|
|
90
|
+
});
|
|
91
|
+
it("converts from seconds to other units", () => {
|
|
92
|
+
const minutesInSec = 60;
|
|
93
|
+
const hoursInSec = 60 * minutesInSec;
|
|
94
|
+
const daysInSec = 24 * hoursInSec;
|
|
95
|
+
let time = 1 * hoursInSec + 23 * minutesInSec + 45;
|
|
96
|
+
let total = 2 * hoursInSec + 30 * minutesInSec + 0;
|
|
97
|
+
expect(getTimestamp(time, total, "s")).to.equal("1:23:45 / 2:30:00 h:m:s");
|
|
98
|
+
time = 15 * hoursInSec + 0 * minutesInSec + 1;
|
|
99
|
+
total = 17 * hoursInSec + 0 * minutesInSec + 0;
|
|
100
|
+
expect(getTimestamp(time, total, "s")).to.equal("15:00:01 / 17:00:00 h:m:s");
|
|
101
|
+
time = 0;
|
|
102
|
+
total = 23 * hoursInSec + 59 * minutesInSec + 59;
|
|
103
|
+
expect(getTimestamp(time, total, "s")).to.equal("0:00:00 / 23:59:59 h:m:s");
|
|
104
|
+
time = 1 * daysInSec + 0 * hoursInSec + 0 * minutesInSec + 0;
|
|
105
|
+
total = 2 * daysInSec + 0 * hoursInSec + 0 * minutesInSec + 0;
|
|
106
|
+
expect(getTimestamp(time, total, "s")).to.equal("1:00:00:00 / 2:00:00:00 d:h:m:s");
|
|
107
|
+
time = 1 * daysInSec + 23 * hoursInSec + 59 * minutesInSec + 59;
|
|
108
|
+
total = 2 * daysInSec + 0 * hoursInSec + 0 * minutesInSec + 0;
|
|
109
|
+
expect(getTimestamp(time, total, "s")).to.equal("1:23:59:59 / 2:00:00:00 d:h:m:s");
|
|
110
|
+
});
|
|
111
|
+
it("can show full d:hh:mm:ss.sss timestamps", () => {
|
|
112
|
+
const secondsInMs = 1000;
|
|
113
|
+
const minutesInMs = 60 * secondsInMs;
|
|
114
|
+
const hoursInMs = 60 * minutesInMs;
|
|
115
|
+
const daysInMs = 24 * hoursInMs;
|
|
116
|
+
const time = 1 * daysInMs + 17 * hoursInMs + 23 * minutesInMs + 45 * secondsInMs + 678;
|
|
117
|
+
const total = 2 * daysInMs;
|
|
118
|
+
expect(getTimestamp(time, total, "ms")).to.equal("1:17:23:45.678 / 2:00:00:00.000 d:h:m:s");
|
|
119
|
+
});
|
|
120
|
+
it("ignores smaller units when time defined in hours", () => {
|
|
121
|
+
expect(getTimestamp(0, 123 * 24, "h")).to.equal("0:00 / 123:00 d:h");
|
|
122
|
+
expect(getTimestamp(23, 123 * 24, "h")).to.equal("0:23 / 123:00 d:h");
|
|
123
|
+
expect(getTimestamp(24 + 5, 123 * 24, "h")).to.equal("1:05 / 123:00 d:h");
|
|
124
|
+
});
|
|
125
|
+
});
|
|
126
|
+
describe("constrainToAxis", () => {
|
|
127
|
+
it("constrains to the X, Y, Z axis", () => {
|
|
128
|
+
const src = [1, 2, 3];
|
|
129
|
+
const target = [4, 5, 6];
|
|
130
|
+
expect(constrainToAxis(src, target, Axis.X)).to.eql([1, 5, 6]);
|
|
131
|
+
expect(constrainToAxis(src, target, Axis.Y)).to.eql([4, 2, 6]);
|
|
132
|
+
expect(constrainToAxis(src, target, Axis.Z)).to.eql([4, 5, 3]);
|
|
133
|
+
});
|
|
134
|
+
it("does nothing if Axis.None is specified", () => {
|
|
135
|
+
const src = [1, 2, 3];
|
|
136
|
+
const target = [4, 5, 6];
|
|
137
|
+
expect(constrainToAxis(src, target, Axis.NONE)).to.eql([1, 2, 3]);
|
|
138
|
+
});
|
|
139
|
+
});
|
|
140
|
+
});
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { expect } from "chai";
|
|
2
|
+
import Volume from "../Volume";
|
|
3
|
+
import VolumeMaker from "../VolumeMaker";
|
|
4
|
+
import { LUT_ARRAY_LENGTH } from "../Lut";
|
|
5
|
+
import { CImageInfo } from "../ImageInfo";
|
|
6
|
+
import { getDataRange } from "../utils/num_utils";
|
|
7
|
+
|
|
8
|
+
// PREPARE SOME TEST DATA TO TRY TO DISPLAY A VOLUME.
|
|
9
|
+
const testimgdata = {
|
|
10
|
+
name: "AICS-10_5_5",
|
|
11
|
+
atlasTileDims: [7, 10],
|
|
12
|
+
subregionSize: [204, 292, 65],
|
|
13
|
+
subregionOffset: [0, 0, 0],
|
|
14
|
+
combinedNumChannels: 9,
|
|
15
|
+
channelNames: ["DRAQ5", "EGFP", "Hoechst 33258", "TL Brightfield", "SEG_STRUCT", "SEG_Memb", "SEG_DNA", "CON_Memb", "CON_DNA"],
|
|
16
|
+
multiscaleLevel: 0,
|
|
17
|
+
multiscaleLevelDims: [{
|
|
18
|
+
shape: [1, 9, 65, 292, 204],
|
|
19
|
+
// original volume had 0.065 um pixels in x and y, 0.29 um pixels in z, and 65x494x306 voxels
|
|
20
|
+
spacing: [1, 1, 0.29, 0.065 * 494 / 292, 0.065 * 306 / 204],
|
|
21
|
+
spaceUnit: "",
|
|
22
|
+
timeUnit: "",
|
|
23
|
+
dataType: "uint8"
|
|
24
|
+
}],
|
|
25
|
+
transform: {
|
|
26
|
+
translation: [0, 0, 0],
|
|
27
|
+
rotation: [0, 0, 0],
|
|
28
|
+
scale: [1, 1, 1]
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
function checkVolumeConstruction(v, imgdata) {
|
|
32
|
+
expect(v).to.be.a("Object");
|
|
33
|
+
expect(v.isLoaded()).to.not.be.ok;
|
|
34
|
+
const {
|
|
35
|
+
originalSize,
|
|
36
|
+
physicalPixelSize
|
|
37
|
+
} = new CImageInfo(imgdata);
|
|
38
|
+
const physicalSize = originalSize.clone().multiply(physicalPixelSize);
|
|
39
|
+
expect(v.physicalSize.x).to.equal(physicalSize.x);
|
|
40
|
+
expect(v.physicalSize.y).to.equal(physicalSize.y);
|
|
41
|
+
expect(v.physicalSize.z).to.equal(physicalSize.z);
|
|
42
|
+
expect(v.channelNames.length).to.equal(imgdata.combinedNumChannels);
|
|
43
|
+
expect(v.channels.length).to.equal(imgdata.combinedNumChannels);
|
|
44
|
+
const mx = Math.max(Math.max(v.normPhysicalSize.x, v.normPhysicalSize.y), v.normPhysicalSize.z);
|
|
45
|
+
expect(mx).to.equal(1.0);
|
|
46
|
+
}
|
|
47
|
+
function checkChannelDataConstruction(c, index, imgdata) {
|
|
48
|
+
expect(c.loaded).to.be.true;
|
|
49
|
+
expect(c.name).to.equal(imgdata.channelNames[index]);
|
|
50
|
+
const atlasWidth = imgdata.atlasTileDims[0] * imgdata.subregionSize[0];
|
|
51
|
+
const atlasHeight = imgdata.atlasTileDims[1] * imgdata.subregionSize[1];
|
|
52
|
+
expect(c.imgData.width).to.equal(atlasWidth);
|
|
53
|
+
expect(c.imgData.height).to.equal(atlasHeight);
|
|
54
|
+
expect(c.imgData.data).to.be.a("Uint8Array");
|
|
55
|
+
expect(c.imgData.data.length).to.equal(atlasWidth * atlasHeight);
|
|
56
|
+
expect(c.lut.lut).to.be.a("Uint8Array");
|
|
57
|
+
expect(c.lut.lut.length).to.equal(LUT_ARRAY_LENGTH);
|
|
58
|
+
}
|
|
59
|
+
describe("test volume", () => {
|
|
60
|
+
describe("creation", () => {
|
|
61
|
+
const v = new Volume(testimgdata);
|
|
62
|
+
it("is created", () => {
|
|
63
|
+
checkVolumeConstruction(v, testimgdata);
|
|
64
|
+
});
|
|
65
|
+
it("loaded channel data", () => {
|
|
66
|
+
const size = v.imageInfo.subregionSize;
|
|
67
|
+
const conedata = VolumeMaker.createCone(size.x, size.y, size.z, size.x / 8, size.z);
|
|
68
|
+
v.setChannelDataFromVolume(0, conedata, getDataRange(conedata));
|
|
69
|
+
const c0 = v.getChannel(0);
|
|
70
|
+
checkChannelDataConstruction(c0, 0, testimgdata);
|
|
71
|
+
const spheredata = VolumeMaker.createSphere(size.x, size.y, size.z, size.z / 4);
|
|
72
|
+
v.setChannelDataFromVolume(1, spheredata, getDataRange(spheredata));
|
|
73
|
+
const c1 = v.getChannel(1);
|
|
74
|
+
checkChannelDataConstruction(c1, 1, testimgdata);
|
|
75
|
+
expect(v.getIntensity(1, Math.floor(size.x / 2), Math.floor(size.y / 2), Math.floor(size.z / 2))).to.equal(255);
|
|
76
|
+
expect(v.getIntensity(1, 0, 0, 0)).to.equal(0);
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
describe("property validation", () => {
|
|
80
|
+
it("has a correct value for normalizedPhysicalSize", () => {
|
|
81
|
+
// `Volume` formerly derived a `scale` property by a different means than `normPhysicalSize`, but depended
|
|
82
|
+
// on `scale` and `normPhysicalSize` being equal. With `scale` gone, this test ensures the equality stays.
|
|
83
|
+
const v = new Volume(testimgdata);
|
|
84
|
+
const {
|
|
85
|
+
originalSize,
|
|
86
|
+
physicalPixelSize
|
|
87
|
+
} = v.imageInfo;
|
|
88
|
+
const sizemax = Math.max(originalSize.x * physicalPixelSize.x, originalSize.y * physicalPixelSize.y, originalSize.z * physicalPixelSize.z);
|
|
89
|
+
const sx = physicalPixelSize.x * originalSize.x / sizemax;
|
|
90
|
+
const sy = physicalPixelSize.y * originalSize.y / sizemax;
|
|
91
|
+
const sz = physicalPixelSize.z * originalSize.z / sizemax;
|
|
92
|
+
const EPSILON = 0.000000001;
|
|
93
|
+
expect(v.normPhysicalSize.x).to.be.closeTo(sx, EPSILON);
|
|
94
|
+
expect(v.normPhysicalSize.y).to.be.closeTo(sy, EPSILON);
|
|
95
|
+
expect(v.normPhysicalSize.z).to.be.closeTo(sz, EPSILON);
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
});
|
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
import { expect } from "chai";
|
|
2
|
+
import * as zarr from "@zarrita/core";
|
|
3
|
+
import WrappedStore from "../loaders/zarr_utils/WrappedStore";
|
|
4
|
+
import { getDimensionCount, getScale, getSourceChannelNames, matchSourceScaleLevels, orderByDimension, orderByTCZYX, remapAxesToTCZYX } from "../loaders/zarr_utils/utils";
|
|
5
|
+
|
|
6
|
+
/** Contains only the data required to produce a mock `ZarrSourceMeta` which is useful for testing */
|
|
7
|
+
|
|
8
|
+
const createMockOmeroMetadata = (numChannels, channelNames) => ({
|
|
9
|
+
id: 0,
|
|
10
|
+
name: "0",
|
|
11
|
+
version: "0.0",
|
|
12
|
+
channels: (channelNames ?? Array.from({
|
|
13
|
+
length: numChannels
|
|
14
|
+
}, (_, i) => `channel ${i}`)).map(label => ({
|
|
15
|
+
label,
|
|
16
|
+
active: true,
|
|
17
|
+
coefficient: 1,
|
|
18
|
+
color: "ffffffff",
|
|
19
|
+
family: "linear",
|
|
20
|
+
inverted: false,
|
|
21
|
+
window: {
|
|
22
|
+
end: 1,
|
|
23
|
+
max: 1,
|
|
24
|
+
min: 0,
|
|
25
|
+
start: 0
|
|
26
|
+
}
|
|
27
|
+
}))
|
|
28
|
+
});
|
|
29
|
+
const createMockMultiscaleMetadata = (scales, paths) => ({
|
|
30
|
+
name: "0",
|
|
31
|
+
version: "0.0",
|
|
32
|
+
axes: [{
|
|
33
|
+
name: "t"
|
|
34
|
+
}, {
|
|
35
|
+
name: "c"
|
|
36
|
+
}, {
|
|
37
|
+
name: "z"
|
|
38
|
+
}, {
|
|
39
|
+
name: "y"
|
|
40
|
+
}, {
|
|
41
|
+
name: "x"
|
|
42
|
+
}],
|
|
43
|
+
datasets: scales.map((scale, idx) => ({
|
|
44
|
+
path: (paths && paths[idx]) ?? `${idx}`,
|
|
45
|
+
coordinateTransformations: [{
|
|
46
|
+
type: "scale",
|
|
47
|
+
scale
|
|
48
|
+
}]
|
|
49
|
+
}))
|
|
50
|
+
});
|
|
51
|
+
class MockStore {
|
|
52
|
+
get(_key, _opts) {
|
|
53
|
+
return undefined;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
const MOCK_STORE = new WrappedStore(new MockStore());
|
|
57
|
+
const createMockArrays = shapes => {
|
|
58
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
59
|
+
const promises = shapes.map(shape => zarr.create(MOCK_STORE, {
|
|
60
|
+
shape,
|
|
61
|
+
chunk_shape: shape,
|
|
62
|
+
data_type: "uint8"
|
|
63
|
+
}));
|
|
64
|
+
return Promise.all(promises);
|
|
65
|
+
};
|
|
66
|
+
const createOneMockSource = async (shapes, scales, channelOffset, paths, names) => ({
|
|
67
|
+
scaleLevels: await createMockArrays(shapes),
|
|
68
|
+
multiscaleMetadata: createMockMultiscaleMetadata(scales, paths),
|
|
69
|
+
omeroMetadata: createMockOmeroMetadata(shapes[0][1], names),
|
|
70
|
+
axesTCZYX: [0, 1, 2, 3, 4],
|
|
71
|
+
channelOffset
|
|
72
|
+
});
|
|
73
|
+
const createMockSources = specs => {
|
|
74
|
+
let channelOffset = 0;
|
|
75
|
+
const sourcePromises = specs.map(({
|
|
76
|
+
shapes,
|
|
77
|
+
scales,
|
|
78
|
+
paths
|
|
79
|
+
}) => {
|
|
80
|
+
if (!scales) {
|
|
81
|
+
scales = Array.from({
|
|
82
|
+
length: shapes.length
|
|
83
|
+
}, () => [1, 1, 1, 1, 1]);
|
|
84
|
+
}
|
|
85
|
+
const result = createOneMockSource(shapes, scales, channelOffset, paths);
|
|
86
|
+
channelOffset += shapes[0][1];
|
|
87
|
+
return result;
|
|
88
|
+
});
|
|
89
|
+
return Promise.all(sourcePromises);
|
|
90
|
+
};
|
|
91
|
+
const createTwoMockSourceArrs = specs => {
|
|
92
|
+
return Promise.all([createMockSources(specs), createMockSources(specs)]);
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
/** Chai's deep equality doesn't seem to work for zarr arrays */
|
|
96
|
+
const expectSourcesEqual = (aArr, bArr) => {
|
|
97
|
+
expect(aArr.length).to.equal(bArr.length);
|
|
98
|
+
for (const [idx, a] of aArr.entries()) {
|
|
99
|
+
const b = bArr[idx];
|
|
100
|
+
expect(a.multiscaleMetadata).to.deep.equal(b.multiscaleMetadata);
|
|
101
|
+
expect(a.omeroMetadata).to.deep.equal(b.omeroMetadata);
|
|
102
|
+
expect(a.axesTCZYX).to.deep.equal(b.axesTCZYX);
|
|
103
|
+
expect(a.channelOffset).to.equal(b.channelOffset);
|
|
104
|
+
expect(a.scaleLevels.length).to.equal(b.scaleLevels.length);
|
|
105
|
+
for (const [idx, aLevel] of a.scaleLevels.entries()) {
|
|
106
|
+
const bLevel = b.scaleLevels[idx];
|
|
107
|
+
expect(aLevel.path).to.equal(bLevel.path);
|
|
108
|
+
expect(aLevel.shape).to.deep.equal(bLevel.shape);
|
|
109
|
+
expect(aLevel.chunks).to.deep.equal(bLevel.chunks);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
describe("zarr_utils", () => {
|
|
114
|
+
describe("getSourceChannelNames", () => {
|
|
115
|
+
it("extracts a list of channel labels from the given source", async () => {
|
|
116
|
+
const names = ["foo", "bar", "baz"];
|
|
117
|
+
const source = await createOneMockSource([[1, 3, 1, 1, 1]], [[1, 1, 1, 1, 1]], 0, ["1", "2", "3"], names);
|
|
118
|
+
expect(getSourceChannelNames(source)).to.deep.equal(names);
|
|
119
|
+
});
|
|
120
|
+
it("does not resolve channel name collisions", async () => {
|
|
121
|
+
const names = ["foo", "bar", "foo"];
|
|
122
|
+
const source = await createOneMockSource([[1, 3, 1, 1, 1]], [[1, 1, 1, 1, 1]], 0, ["1", "2", "3"], names);
|
|
123
|
+
expect(getSourceChannelNames(source)).to.deep.equal(names);
|
|
124
|
+
});
|
|
125
|
+
it('applies default names of the form "Channel N" for missing labels', async () => {
|
|
126
|
+
const names = ["foo", "bar", undefined];
|
|
127
|
+
const source = await createOneMockSource([[1, 3, 1, 1, 1]], [[1, 1, 1, 1, 1]], 0, ["1", "2", "3"], names);
|
|
128
|
+
expect(getSourceChannelNames(source)).to.deep.equal(["foo", "bar", "Channel 2"]);
|
|
129
|
+
});
|
|
130
|
+
it("applies default names when `omeroMetadata` is missing entirely", async () => {
|
|
131
|
+
const source = await createOneMockSource([[1, 3, 1, 1, 1]], [[1, 1, 1, 1, 1]], 0);
|
|
132
|
+
delete source.omeroMetadata;
|
|
133
|
+
expect(getSourceChannelNames(source)).to.deep.equal(["Channel 0", "Channel 1", "Channel 2"]);
|
|
134
|
+
});
|
|
135
|
+
it("applies `channelOffset` to default names", async () => {
|
|
136
|
+
const source = await createOneMockSource([[1, 3, 1, 1, 1]], [[1, 1, 1, 1, 1]], 3);
|
|
137
|
+
delete source.omeroMetadata;
|
|
138
|
+
expect(getSourceChannelNames(source)).to.deep.equal(["Channel 3", "Channel 4", "Channel 5"]);
|
|
139
|
+
});
|
|
140
|
+
});
|
|
141
|
+
describe("getDimensionCount", () => {
|
|
142
|
+
it("returns 5 when all 5 dimension indices are positive", () => {
|
|
143
|
+
expect(getDimensionCount([1, 1, 1, 1, 1])).to.equal(5);
|
|
144
|
+
});
|
|
145
|
+
it("recognizes when T, C, or Z is missing", () => {
|
|
146
|
+
for (let dim = 0; dim < 3; dim++) {
|
|
147
|
+
const tczyx = [1, 1, 1, 1, 1];
|
|
148
|
+
tczyx[dim] = -1;
|
|
149
|
+
expect(getDimensionCount(tczyx)).to.equal(4);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
it("returns 2 when all of T, C, and Z are missing", () => {
|
|
153
|
+
expect(getDimensionCount([-1, -1, -1, 1, 1])).to.equal(2);
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
describe("remapAxesToTCZYX", () => {
|
|
157
|
+
it("produces an array of indices in T, C, Z, Y, X order", () => {
|
|
158
|
+
const axes = [{
|
|
159
|
+
name: "t"
|
|
160
|
+
}, {
|
|
161
|
+
name: "c"
|
|
162
|
+
}, {
|
|
163
|
+
name: "x"
|
|
164
|
+
}, {
|
|
165
|
+
name: "y"
|
|
166
|
+
}, {
|
|
167
|
+
name: "z"
|
|
168
|
+
}];
|
|
169
|
+
expect(remapAxesToTCZYX(axes)).to.deep.equal([0, 1, 4, 3, 2]);
|
|
170
|
+
});
|
|
171
|
+
it("defaults to -1 for missing T, C, or Z axes", () => {
|
|
172
|
+
const axes = [{
|
|
173
|
+
name: "x"
|
|
174
|
+
}, {
|
|
175
|
+
name: "y"
|
|
176
|
+
}];
|
|
177
|
+
expect(remapAxesToTCZYX(axes)).to.deep.equal([-1, -1, -1, 1, 0]);
|
|
178
|
+
});
|
|
179
|
+
it("throws an error if it encounters an unrecognized (not t, c, z, y, or x) axis name", () => {
|
|
180
|
+
const axes = [{
|
|
181
|
+
name: "t"
|
|
182
|
+
}, {
|
|
183
|
+
name: "c"
|
|
184
|
+
}, {
|
|
185
|
+
name: "x"
|
|
186
|
+
}, {
|
|
187
|
+
name: "y"
|
|
188
|
+
}, {
|
|
189
|
+
name: "foo"
|
|
190
|
+
}];
|
|
191
|
+
expect(() => remapAxesToTCZYX(axes)).to.throw("Unrecognized axis");
|
|
192
|
+
});
|
|
193
|
+
});
|
|
194
|
+
// TODO: `pickLevelToLoad`
|
|
195
|
+
|
|
196
|
+
const VALS_TCZYX = [1, 2, 3, 4, 5];
|
|
197
|
+
describe("orderByDimension", () => {
|
|
198
|
+
it("orders an array in dimension order based on the given indices", () => {
|
|
199
|
+
const order = [3, 1, 4, 0, 2];
|
|
200
|
+
expect(orderByDimension(VALS_TCZYX, order)).to.deep.equal([4, 2, 5, 1, 3]);
|
|
201
|
+
});
|
|
202
|
+
it("excludes the T, C, or Z dimension if its index is negative", () => {
|
|
203
|
+
expect(orderByDimension(VALS_TCZYX, [-1, 0, 1, 3, 2])).to.deep.equal([2, 3, 5, 4]);
|
|
204
|
+
expect(orderByDimension(VALS_TCZYX, [0, -1, 1, 3, 2])).to.deep.equal([1, 3, 5, 4]);
|
|
205
|
+
expect(orderByDimension(VALS_TCZYX, [0, 1, -1, 3, 2])).to.deep.equal([1, 2, 5, 4]);
|
|
206
|
+
});
|
|
207
|
+
it("throws an error if an axis index is out of bounds", () => {
|
|
208
|
+
// Out of bounds for full-size array
|
|
209
|
+
expect(() => orderByDimension(VALS_TCZYX, [0, 1, 2, 3, 5])).to.throw("Unexpected axis index");
|
|
210
|
+
// Out of bounds for smaller array
|
|
211
|
+
expect(() => orderByDimension(VALS_TCZYX, [-1, -1, 0, 3, 1])).to.throw("Unexpected axis index");
|
|
212
|
+
});
|
|
213
|
+
});
|
|
214
|
+
const VALS_DIM = [16, 8, 4, 2, 1];
|
|
215
|
+
describe("orderByTCZYX", () => {
|
|
216
|
+
it("orders an array TCZYX based on the given indices", () => {
|
|
217
|
+
const order = [3, 1, 4, 0, 2];
|
|
218
|
+
expect(orderByTCZYX(VALS_DIM, order, 0)).to.deep.equal([2, 8, 1, 16, 4]);
|
|
219
|
+
});
|
|
220
|
+
it("fills in missing dimensions with a default value", () => {
|
|
221
|
+
const order = [3, 1, 4, -1, 2];
|
|
222
|
+
expect(orderByTCZYX(VALS_DIM, order, 3)).to.deep.equal([2, 8, 1, 3, 4]);
|
|
223
|
+
});
|
|
224
|
+
it("throws an error if an axis index is out of bounds", () => {
|
|
225
|
+
// Out of bounds for full-size array
|
|
226
|
+
expect(() => orderByTCZYX(VALS_DIM, [0, 1, 2, 5, 3], 0)).to.throw("Unexpected axis index");
|
|
227
|
+
// Out of bounds for smaller array
|
|
228
|
+
expect(() => orderByTCZYX(VALS_DIM.slice(2), [-1, 0, 1, 3, 2], 0)).to.throw("Unexpected axis index");
|
|
229
|
+
});
|
|
230
|
+
});
|
|
231
|
+
const MOCK_DATASET = {
|
|
232
|
+
path: "0",
|
|
233
|
+
coordinateTransformations: [{
|
|
234
|
+
type: "translation",
|
|
235
|
+
translation: [0, 0, 0, 0, 0]
|
|
236
|
+
}, {
|
|
237
|
+
type: "scale",
|
|
238
|
+
scale: [1, 2, 3, 4, 5]
|
|
239
|
+
}, {
|
|
240
|
+
type: "identity"
|
|
241
|
+
}]
|
|
242
|
+
};
|
|
243
|
+
describe("getScale", () => {
|
|
244
|
+
it("returns the scale transformation for a given dataset", () => {
|
|
245
|
+
expect(getScale(MOCK_DATASET, [0, 1, 2, 3, 4])).to.deep.equal([1, 2, 3, 4, 5]);
|
|
246
|
+
});
|
|
247
|
+
it("orders the scale transformation in TCZYX order", () => {
|
|
248
|
+
expect(getScale(MOCK_DATASET, [3, 1, 4, 0, 2])).to.deep.equal([4, 2, 5, 1, 3]);
|
|
249
|
+
});
|
|
250
|
+
it('defaults to `[1, 1, 1, 1, 1]` if no coordinate transformation of type "scale" is found', () => {
|
|
251
|
+
const dataset = {
|
|
252
|
+
...MOCK_DATASET,
|
|
253
|
+
coordinateTransformations: MOCK_DATASET.coordinateTransformations.slice(0, 1)
|
|
254
|
+
};
|
|
255
|
+
expect(getScale(dataset, [0, 1, 2, 3, 4])).to.deep.equal([1, 1, 1, 1, 1]);
|
|
256
|
+
});
|
|
257
|
+
it("defaults to `[1, 1, 1, 1, 1]` if no coordinate transformations are present at all", () => {
|
|
258
|
+
expect(getScale({
|
|
259
|
+
path: "0"
|
|
260
|
+
}, [0, 1, 2, 3, 4])).to.deep.equal([1, 1, 1, 1, 1]);
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
describe("matchSourceScaleLevels", () => {
|
|
264
|
+
it("does nothing if passed only one source scale level", async () => {
|
|
265
|
+
const [testSource, refSource] = await createTwoMockSourceArrs([{
|
|
266
|
+
shapes: [[5, 5, 5, 5, 5]]
|
|
267
|
+
}]);
|
|
268
|
+
matchSourceScaleLevels(testSource);
|
|
269
|
+
expectSourcesEqual(testSource, refSource);
|
|
270
|
+
});
|
|
271
|
+
it("does nothing if all source scale levels are the same", async () => {
|
|
272
|
+
const spec = {
|
|
273
|
+
shapes: [[1, 1, 10, 10, 10], [1, 1, 5, 5, 5]]
|
|
274
|
+
};
|
|
275
|
+
const [testSource, refSource] = await createTwoMockSourceArrs([spec, spec]);
|
|
276
|
+
matchSourceScaleLevels(testSource);
|
|
277
|
+
expectSourcesEqual(testSource, refSource);
|
|
278
|
+
});
|
|
279
|
+
it("trims source scale levels which are outside the range of any other sources", async () => {
|
|
280
|
+
const baseSpec = {
|
|
281
|
+
shapes: [[1, 1, 10, 10, 10], [1, 1, 5, 5, 5]]
|
|
282
|
+
};
|
|
283
|
+
|
|
284
|
+
// Has all the same scale levels as `baseSpec`, plus one smaller
|
|
285
|
+
const specSmaller = {
|
|
286
|
+
shapes: [...baseSpec.shapes.slice(), [1, 1, 2, 2, 2]]
|
|
287
|
+
};
|
|
288
|
+
// Has all the same scale levels as `baseSpec`, plus one larger
|
|
289
|
+
const specLarger = {
|
|
290
|
+
shapes: [[1, 1, 20, 20, 20], ...baseSpec.shapes.slice()]
|
|
291
|
+
};
|
|
292
|
+
const refSourceSmaller = await createMockSources([baseSpec, baseSpec]);
|
|
293
|
+
// The largest source will have path "0", so we have to shift the paths to match
|
|
294
|
+
const refSourceLarger = await createMockSources([baseSpec, {
|
|
295
|
+
...baseSpec,
|
|
296
|
+
paths: ["1", "2"]
|
|
297
|
+
}]);
|
|
298
|
+
const testSourceSmaller = await createMockSources([baseSpec, specSmaller]);
|
|
299
|
+
const testSourceLarger = await createMockSources([baseSpec, specLarger]);
|
|
300
|
+
matchSourceScaleLevels(testSourceSmaller);
|
|
301
|
+
matchSourceScaleLevels(testSourceLarger);
|
|
302
|
+
expectSourcesEqual(testSourceSmaller, refSourceSmaller);
|
|
303
|
+
expectSourcesEqual(testSourceLarger, refSourceLarger);
|
|
304
|
+
});
|
|
305
|
+
it("handles unmatched scale levels within the range of other sources", async () => {
|
|
306
|
+
// The only level shapes that all three of these sources have in common are [1, 1, 6, 6, 6] and [1, 1, 2, 2, 2]
|
|
307
|
+
const shapes1 = [[1, 1, 6, 6, 6], [1, 1, 5, 5, 5], [1, 1, 3, 3, 3], [1, 1, 2, 2, 2]];
|
|
308
|
+
const shapes2 = [[1, 1, 6, 6, 6], [1, 1, 5, 5, 5], [1, 1, 4, 4, 4], [1, 1, 2, 2, 2], [1, 1, 1, 1, 1]];
|
|
309
|
+
const shapes3 = [[1, 1, 7, 7, 7], [1, 1, 6, 6, 6], [1, 1, 4, 4, 4], [1, 1, 2, 2, 2]];
|
|
310
|
+
const testSources = await createMockSources([{
|
|
311
|
+
shapes: shapes1
|
|
312
|
+
}, {
|
|
313
|
+
shapes: shapes2
|
|
314
|
+
}, {
|
|
315
|
+
shapes: shapes3
|
|
316
|
+
}]);
|
|
317
|
+
matchSourceScaleLevels(testSources);
|
|
318
|
+
const shapes = [[1, 1, 6, 6, 6], [1, 1, 2, 2, 2]];
|
|
319
|
+
const refSources = await createMockSources([{
|
|
320
|
+
shapes,
|
|
321
|
+
paths: ["0", "3"]
|
|
322
|
+
}, {
|
|
323
|
+
shapes,
|
|
324
|
+
paths: ["0", "3"]
|
|
325
|
+
}, {
|
|
326
|
+
shapes,
|
|
327
|
+
paths: ["1", "3"]
|
|
328
|
+
}]);
|
|
329
|
+
expectSourcesEqual(testSources, refSources);
|
|
330
|
+
});
|
|
331
|
+
it("throws an error if the size of two scale levels are mismatched", async () => {
|
|
332
|
+
const sources = await createMockSources([{
|
|
333
|
+
shapes: [[1, 1, 2, 1, 1]]
|
|
334
|
+
}, {
|
|
335
|
+
shapes: [[1, 1, 1, 2, 1]]
|
|
336
|
+
}]);
|
|
337
|
+
expect(() => matchSourceScaleLevels(sources)).to.throw("Incompatible zarr arrays: pixel dimensions are mismatched");
|
|
338
|
+
});
|
|
339
|
+
it("Does not throw an error if two scale levels of the same size have different scale transformations", async () => {
|
|
340
|
+
const sources = await createMockSources([{
|
|
341
|
+
shapes: [[1, 1, 1, 1, 1]],
|
|
342
|
+
scales: [[1, 1, 2, 2, 2]]
|
|
343
|
+
}, {
|
|
344
|
+
shapes: [[1, 1, 1, 1, 1]],
|
|
345
|
+
scales: [[1, 1, 1, 1, 1]]
|
|
346
|
+
}]);
|
|
347
|
+
expect(() => matchSourceScaleLevels(sources)).to.not.throw("Incompatible zarr arrays: scale levels of equal size have different scale transformations");
|
|
348
|
+
});
|
|
349
|
+
it("Does not throw an error if two scale levels of the same size have a different number of timesteps", async () => {
|
|
350
|
+
const sources = await createMockSources([{
|
|
351
|
+
shapes: [[1, 1, 1, 1, 1]]
|
|
352
|
+
}, {
|
|
353
|
+
shapes: [[2, 1, 1, 1, 1]]
|
|
354
|
+
}]);
|
|
355
|
+
expect(() => matchSourceScaleLevels(sources)).to.not.throw("Incompatible zarr arrays: different numbers of timesteps");
|
|
356
|
+
});
|
|
357
|
+
});
|
|
358
|
+
});
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { BufferGeometry, Group, Material, Mesh, OrthographicCamera, PerspectiveCamera, WebGLRenderer } from "three";
|
|
2
|
+
import { Channel, Volume } from ".";
|
|
3
|
+
import type { VolumeRenderImpl } from "./VolumeRenderImpl.js";
|
|
4
|
+
import { SettingsFlags, VolumeRenderSettings } from "./VolumeRenderSettings.js";
|
|
5
|
+
import type { FuseChannel } from "./types.js";
|
|
6
|
+
/**
|
|
7
|
+
* Creates a plane that renders a 2D XY slice of volume atlas data.
|
|
8
|
+
*/
|
|
9
|
+
export default class Atlas2DSlice implements VolumeRenderImpl {
|
|
10
|
+
private settings;
|
|
11
|
+
volume: Volume;
|
|
12
|
+
private geometry;
|
|
13
|
+
protected geometryMesh: Mesh<BufferGeometry, Material>;
|
|
14
|
+
private geometryTransformNode;
|
|
15
|
+
private boxHelper;
|
|
16
|
+
private uniforms;
|
|
17
|
+
private channelData;
|
|
18
|
+
private sliceUpdateWaiting;
|
|
19
|
+
/**
|
|
20
|
+
* Creates a new Atlas2DSlice.
|
|
21
|
+
* @param volume The volume that this renderer should render data from.
|
|
22
|
+
* @param settings Optional settings object. If set, updates the renderer with
|
|
23
|
+
* the given settings. Otherwise, uses the default VolumeRenderSettings.
|
|
24
|
+
*/
|
|
25
|
+
constructor(volume: Volume, settings?: VolumeRenderSettings);
|
|
26
|
+
/**
|
|
27
|
+
* Syncs `this.settings.zSlice` with the corresponding shader uniform, or defers syncing until the slice is loaded.
|
|
28
|
+
* @returns a boolean indicating whether the slice is out of bounds of the volume entirely.
|
|
29
|
+
*/
|
|
30
|
+
private updateSlice;
|
|
31
|
+
updateVolumeDimensions(): void;
|
|
32
|
+
updateSettings(newSettings: VolumeRenderSettings, dirtyFlags?: number | SettingsFlags): void;
|
|
33
|
+
private createGeometry;
|
|
34
|
+
cleanup(): void;
|
|
35
|
+
viewpointMoved(): void;
|
|
36
|
+
doRender(renderer: WebGLRenderer, camera: PerspectiveCamera | OrthographicCamera): void;
|
|
37
|
+
get3dObject(): Group;
|
|
38
|
+
updateActiveChannels(channelcolors: FuseChannel[], channeldata: Channel[]): void;
|
|
39
|
+
private setUniform;
|
|
40
|
+
setRenderUpdateListener(_listener?: ((iteration: number) => void) | undefined): void;
|
|
41
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { DataTexture, Vector3 } from "three";
|
|
2
|
+
import Histogram from "./Histogram.js";
|
|
3
|
+
import { Lut } from "./Lut.js";
|
|
4
|
+
import { TypedArray, NumberType } from "./types.js";
|
|
5
|
+
interface ChannelImageData {
|
|
6
|
+
/** Returns the one-dimensional array containing the data in RGBA order, as integers in the range 0 to 255. */
|
|
7
|
+
readonly data: TypedArray<NumberType>;
|
|
8
|
+
/** Returns the actual dimensions of the data in the ImageData object, in pixels. */
|
|
9
|
+
readonly height: number;
|
|
10
|
+
/** Returns the actual dimensions of the data in the ImageData object, in pixels. */
|
|
11
|
+
readonly width: number;
|
|
12
|
+
}
|
|
13
|
+
export default class Channel {
|
|
14
|
+
loaded: boolean;
|
|
15
|
+
dtype: NumberType;
|
|
16
|
+
imgData: ChannelImageData;
|
|
17
|
+
volumeData: TypedArray<NumberType>;
|
|
18
|
+
name: string;
|
|
19
|
+
histogram: Histogram;
|
|
20
|
+
lut: Lut;
|
|
21
|
+
colorPalette: Uint8Array;
|
|
22
|
+
colorPaletteAlpha: number;
|
|
23
|
+
dims: [number, number, number];
|
|
24
|
+
dataTexture: DataTexture;
|
|
25
|
+
lutTexture: DataTexture;
|
|
26
|
+
rawMin: number;
|
|
27
|
+
rawMax: number;
|
|
28
|
+
constructor(name: string);
|
|
29
|
+
combineLuts(rgbColor: [number, number, number] | number, out?: Uint8Array): Uint8Array;
|
|
30
|
+
setRawDataRange(min: number, max: number): void;
|
|
31
|
+
getHistogram(): Histogram;
|
|
32
|
+
getIntensity(x: number, y: number, z: number): number;
|
|
33
|
+
normalizeRaw(val: number): number;
|
|
34
|
+
getIntensityFromAtlas(x: number, y: number, z: number): number;
|
|
35
|
+
private rebuildDataTexture;
|
|
36
|
+
setFromAtlas(bitsArray: TypedArray<NumberType>, w: number, h: number, dtype: NumberType, rawMin: number, rawMax: number, subregionSize: Vector3): void;
|
|
37
|
+
private unpackFromAtlas;
|
|
38
|
+
setFromVolumeData(bitsArray: TypedArray<NumberType>, vx: number, vy: number, vz: number, ax: number, ay: number, rawMin: number, rawMax: number, dtype: NumberType): void;
|
|
39
|
+
private packToAtlas;
|
|
40
|
+
setLut(lut: Lut): void;
|
|
41
|
+
setColorPalette(palette: Uint8Array): void;
|
|
42
|
+
setColorPaletteAlpha(alpha: number): void;
|
|
43
|
+
}
|
|
44
|
+
export {};
|