@aics/vole-core 3.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +26 -0
- package/README.md +119 -0
- package/es/Atlas2DSlice.js +224 -0
- package/es/Channel.js +264 -0
- package/es/FileSaver.js +31 -0
- package/es/FusedChannelData.js +192 -0
- package/es/Histogram.js +250 -0
- package/es/ImageInfo.js +127 -0
- package/es/Light.js +74 -0
- package/es/Lut.js +500 -0
- package/es/MarchingCubes.js +507 -0
- package/es/MeshVolume.js +334 -0
- package/es/NaiveSurfaceNets.js +251 -0
- package/es/PathTracedVolume.js +482 -0
- package/es/RayMarchedAtlasVolume.js +250 -0
- package/es/RenderToBuffer.js +31 -0
- package/es/ThreeJsPanel.js +633 -0
- package/es/Timing.js +28 -0
- package/es/TrackballControls.js +538 -0
- package/es/View3d.js +848 -0
- package/es/Volume.js +352 -0
- package/es/VolumeCache.js +161 -0
- package/es/VolumeDims.js +16 -0
- package/es/VolumeDrawable.js +702 -0
- package/es/VolumeMaker.js +101 -0
- package/es/VolumeRenderImpl.js +1 -0
- package/es/VolumeRenderSettings.js +203 -0
- package/es/constants/basicShaders.js +29 -0
- package/es/constants/colors.js +59 -0
- package/es/constants/denoiseShader.js +43 -0
- package/es/constants/lights.js +42 -0
- package/es/constants/materials.js +85 -0
- package/es/constants/pathtraceOutputShader.js +13 -0
- package/es/constants/scaleBarSVG.js +21 -0
- package/es/constants/time.js +34 -0
- package/es/constants/volumePTshader.js +153 -0
- package/es/constants/volumeRayMarchShader.js +123 -0
- package/es/constants/volumeSliceShader.js +115 -0
- package/es/index.js +21 -0
- package/es/loaders/IVolumeLoader.js +131 -0
- package/es/loaders/JsonImageInfoLoader.js +255 -0
- package/es/loaders/OmeZarrLoader.js +495 -0
- package/es/loaders/OpenCellLoader.js +65 -0
- package/es/loaders/RawArrayLoader.js +89 -0
- package/es/loaders/TiffLoader.js +219 -0
- package/es/loaders/VolumeLoadError.js +44 -0
- package/es/loaders/VolumeLoaderUtils.js +221 -0
- package/es/loaders/index.js +40 -0
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +143 -0
- package/es/loaders/zarr_utils/WrappedStore.js +51 -0
- package/es/loaders/zarr_utils/types.js +24 -0
- package/es/loaders/zarr_utils/utils.js +225 -0
- package/es/loaders/zarr_utils/validation.js +49 -0
- package/es/test/ChunkPrefetchIterator.test.js +208 -0
- package/es/test/RequestQueue.test.js +442 -0
- package/es/test/SubscribableRequestQueue.test.js +244 -0
- package/es/test/VolumeCache.test.js +118 -0
- package/es/test/VolumeRenderSettings.test.js +71 -0
- package/es/test/lut.test.js +671 -0
- package/es/test/num_utils.test.js +140 -0
- package/es/test/volume.test.js +98 -0
- package/es/test/zarr_utils.test.js +358 -0
- package/es/types/Atlas2DSlice.d.ts +41 -0
- package/es/types/Channel.d.ts +44 -0
- package/es/types/FileSaver.d.ts +6 -0
- package/es/types/FusedChannelData.d.ts +26 -0
- package/es/types/Histogram.d.ts +57 -0
- package/es/types/ImageInfo.d.ts +87 -0
- package/es/types/Light.d.ts +27 -0
- package/es/types/Lut.d.ts +67 -0
- package/es/types/MarchingCubes.d.ts +53 -0
- package/es/types/MeshVolume.d.ts +40 -0
- package/es/types/NaiveSurfaceNets.d.ts +11 -0
- package/es/types/PathTracedVolume.d.ts +65 -0
- package/es/types/RayMarchedAtlasVolume.d.ts +41 -0
- package/es/types/RenderToBuffer.d.ts +17 -0
- package/es/types/ThreeJsPanel.d.ts +107 -0
- package/es/types/Timing.d.ts +11 -0
- package/es/types/TrackballControls.d.ts +51 -0
- package/es/types/View3d.d.ts +357 -0
- package/es/types/Volume.d.ts +152 -0
- package/es/types/VolumeCache.d.ts +43 -0
- package/es/types/VolumeDims.d.ts +28 -0
- package/es/types/VolumeDrawable.d.ts +108 -0
- package/es/types/VolumeMaker.d.ts +49 -0
- package/es/types/VolumeRenderImpl.d.ts +22 -0
- package/es/types/VolumeRenderSettings.d.ts +98 -0
- package/es/types/constants/basicShaders.d.ts +4 -0
- package/es/types/constants/colors.d.ts +2 -0
- package/es/types/constants/denoiseShader.d.ts +40 -0
- package/es/types/constants/lights.d.ts +38 -0
- package/es/types/constants/materials.d.ts +20 -0
- package/es/types/constants/pathtraceOutputShader.d.ts +11 -0
- package/es/types/constants/scaleBarSVG.d.ts +2 -0
- package/es/types/constants/time.d.ts +19 -0
- package/es/types/constants/volumePTshader.d.ts +137 -0
- package/es/types/constants/volumeRayMarchShader.d.ts +117 -0
- package/es/types/constants/volumeSliceShader.d.ts +109 -0
- package/es/types/glsl.d.js +0 -0
- package/es/types/index.d.ts +28 -0
- package/es/types/loaders/IVolumeLoader.d.ts +113 -0
- package/es/types/loaders/JsonImageInfoLoader.d.ts +80 -0
- package/es/types/loaders/OmeZarrLoader.d.ts +87 -0
- package/es/types/loaders/OpenCellLoader.d.ts +9 -0
- package/es/types/loaders/RawArrayLoader.d.ts +33 -0
- package/es/types/loaders/TiffLoader.d.ts +45 -0
- package/es/types/loaders/VolumeLoadError.d.ts +18 -0
- package/es/types/loaders/VolumeLoaderUtils.d.ts +38 -0
- package/es/types/loaders/index.d.ts +22 -0
- package/es/types/loaders/zarr_utils/ChunkPrefetchIterator.d.ts +22 -0
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +24 -0
- package/es/types/loaders/zarr_utils/types.d.ts +94 -0
- package/es/types/loaders/zarr_utils/utils.d.ts +23 -0
- package/es/types/loaders/zarr_utils/validation.d.ts +7 -0
- package/es/types/test/ChunkPrefetchIterator.test.d.ts +1 -0
- package/es/types/test/RequestQueue.test.d.ts +1 -0
- package/es/types/test/SubscribableRequestQueue.test.d.ts +1 -0
- package/es/types/test/VolumeCache.test.d.ts +1 -0
- package/es/types/test/VolumeRenderSettings.test.d.ts +1 -0
- package/es/types/test/lut.test.d.ts +1 -0
- package/es/types/test/num_utils.test.d.ts +1 -0
- package/es/types/test/volume.test.d.ts +1 -0
- package/es/types/test/zarr_utils.test.d.ts +1 -0
- package/es/types/types.d.ts +115 -0
- package/es/types/utils/RequestQueue.d.ts +112 -0
- package/es/types/utils/SubscribableRequestQueue.d.ts +52 -0
- package/es/types/utils/num_utils.d.ts +43 -0
- package/es/types/workers/VolumeLoaderContext.d.ts +106 -0
- package/es/types/workers/types.d.ts +101 -0
- package/es/types/workers/util.d.ts +3 -0
- package/es/types.js +75 -0
- package/es/typings.d.js +0 -0
- package/es/utils/RequestQueue.js +267 -0
- package/es/utils/SubscribableRequestQueue.js +187 -0
- package/es/utils/num_utils.js +231 -0
- package/es/workers/FetchTiffWorker.js +153 -0
- package/es/workers/VolumeLoadWorker.js +129 -0
- package/es/workers/VolumeLoaderContext.js +271 -0
- package/es/workers/types.js +41 -0
- package/es/workers/util.js +8 -0
- package/package.json +83 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `Readable` is zarrita's minimal abstraction for any source of data.
|
|
3
|
+
* `WrappedStore` wraps another `Readable` and adds (optional) connections to `VolumeCache` and `RequestQueue`.
|
|
4
|
+
*/
|
|
5
|
+
class WrappedStore {
|
|
6
|
+
constructor(baseStore, cache, queue) {
|
|
7
|
+
this.baseStore = baseStore;
|
|
8
|
+
this.cache = cache;
|
|
9
|
+
this.queue = queue;
|
|
10
|
+
}
|
|
11
|
+
// Dummy implementation to make this class easier to use in tests
|
|
12
|
+
set(_key, _value) {
|
|
13
|
+
return Promise.resolve();
|
|
14
|
+
}
|
|
15
|
+
async getAndCache(key, cacheKey, opts) {
|
|
16
|
+
const result = await this.baseStore.get(key, opts);
|
|
17
|
+
if (this.cache && result) {
|
|
18
|
+
this.cache.insert(cacheKey, result);
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
}
|
|
22
|
+
async get(key, opts) {
|
|
23
|
+
const ZARR_EXTS = [".zarray", ".zgroup", ".zattrs", "zarr.json"];
|
|
24
|
+
if (!this.cache || ZARR_EXTS.some(s => key.endsWith(s))) {
|
|
25
|
+
return this.baseStore.get(key, opts?.options);
|
|
26
|
+
}
|
|
27
|
+
if (opts?.reportKey) {
|
|
28
|
+
opts.reportKey(key, opts.subscriber);
|
|
29
|
+
}
|
|
30
|
+
let keyPrefix = this.baseStore.url ?? "";
|
|
31
|
+
if (keyPrefix !== "" && !(keyPrefix instanceof URL) && !keyPrefix.endsWith("/")) {
|
|
32
|
+
keyPrefix += "/";
|
|
33
|
+
}
|
|
34
|
+
const fullKey = keyPrefix + key.slice(1);
|
|
35
|
+
|
|
36
|
+
// Check the cache
|
|
37
|
+
const cacheResult = this.cache.get(fullKey);
|
|
38
|
+
if (cacheResult) {
|
|
39
|
+
return new Uint8Array(cacheResult);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Not in cache; load the chunk and cache it
|
|
43
|
+
if (this.queue && opts) {
|
|
44
|
+
return this.queue.addRequest(fullKey, opts.subscriber, () => this.getAndCache(key, fullKey, opts?.options), opts.isPrefetch);
|
|
45
|
+
} else {
|
|
46
|
+
// Should we ever hit this code? We should always have a request queue.
|
|
47
|
+
return this.getAndCache(key, fullKey, opts?.options);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
export default WrappedStore;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Directions in which to move outward from the loaded set of chunks while prefetching.
|
|
3
|
+
*
|
|
4
|
+
* Ordered in pairs of opposite directions both because that's a sensible order in which to prefetch for our purposes,
|
|
5
|
+
* and because it lets us treat the least significant bit as the sign. So `direction >> 1` gives the index of the
|
|
6
|
+
* direction in TZYX-ordered arrays, and `direction & 1` gives the sign of the direction (e.g. positive vs negative Z).
|
|
7
|
+
*/
|
|
8
|
+
export let PrefetchDirection = /*#__PURE__*/function (PrefetchDirection) {
|
|
9
|
+
PrefetchDirection[PrefetchDirection["T_MINUS"] = 0] = "T_MINUS";
|
|
10
|
+
PrefetchDirection[PrefetchDirection["T_PLUS"] = 1] = "T_PLUS";
|
|
11
|
+
PrefetchDirection[PrefetchDirection["Z_MINUS"] = 2] = "Z_MINUS";
|
|
12
|
+
PrefetchDirection[PrefetchDirection["Z_PLUS"] = 3] = "Z_PLUS";
|
|
13
|
+
PrefetchDirection[PrefetchDirection["Y_MINUS"] = 4] = "Y_MINUS";
|
|
14
|
+
PrefetchDirection[PrefetchDirection["Y_PLUS"] = 5] = "Y_PLUS";
|
|
15
|
+
PrefetchDirection[PrefetchDirection["X_MINUS"] = 6] = "X_MINUS";
|
|
16
|
+
PrefetchDirection[PrefetchDirection["X_PLUS"] = 7] = "X_PLUS";
|
|
17
|
+
return PrefetchDirection;
|
|
18
|
+
}({});
|
|
19
|
+
|
|
20
|
+
/** https://ngff.openmicroscopy.org/latest/#multiscale-md */
|
|
21
|
+
|
|
22
|
+
/** https://ngff.openmicroscopy.org/latest/#omero-md */
|
|
23
|
+
|
|
24
|
+
/** A record with everything we need to access and use a single remote source of multiscale OME-Zarr data. */
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import { VolumeLoadErrorType, VolumeLoadError } from "../VolumeLoadError.js";
|
|
2
|
+
/** Extracts channel names from a `ZarrSource`. Handles missing `omeroMetadata`. Does *not* resolve name collisions. */
|
|
3
|
+
export function getSourceChannelNames(src) {
|
|
4
|
+
if (src.omeroMetadata?.channels) {
|
|
5
|
+
return src.omeroMetadata.channels.map(({
|
|
6
|
+
label
|
|
7
|
+
}, idx) => label ?? `Channel ${idx + src.channelOffset}`);
|
|
8
|
+
}
|
|
9
|
+
const length = src.scaleLevels[0].shape[src.axesTCZYX[1]];
|
|
10
|
+
return Array.from({
|
|
11
|
+
length
|
|
12
|
+
}, (_, idx) => `Channel ${idx + src.channelOffset}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/** Turns `axesTCZYX` into the number of dimensions in the array */
|
|
16
|
+
export const getDimensionCount = ([t, c, z]) => 2 + Number(t > -1) + Number(c > -1) + Number(z > -1);
|
|
17
|
+
export function remapAxesToTCZYX(axes) {
|
|
18
|
+
const axesTCZYX = [-1, -1, -1, -1, -1];
|
|
19
|
+
const axisNames = ["t", "c", "z", "y", "x"];
|
|
20
|
+
axes.forEach((axis, idx) => {
|
|
21
|
+
const axisIdx = axisNames.indexOf(axis.name);
|
|
22
|
+
if (axisIdx > -1) {
|
|
23
|
+
axesTCZYX[axisIdx] = idx;
|
|
24
|
+
} else {
|
|
25
|
+
throw new VolumeLoadError(`Unrecognized axis in zarr: ${axis.name}`, {
|
|
26
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
// it is possible that Z might not exist but we require X and Y at least.
|
|
32
|
+
const noXAxis = axesTCZYX[4] === -1;
|
|
33
|
+
if (noXAxis || axesTCZYX[3] === -1) {
|
|
34
|
+
throw new VolumeLoadError(`Did not find ${noXAxis ? "an X" : "a Y"} axis in zarr`, {
|
|
35
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
return axesTCZYX;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/** Reorder an array of values [T, C, Z, Y, X] to the given dimension order */
|
|
42
|
+
export function orderByDimension(valsTCZYX, orderTCZYX) {
|
|
43
|
+
const specLen = getDimensionCount(orderTCZYX);
|
|
44
|
+
const result = Array(specLen);
|
|
45
|
+
orderTCZYX.forEach((val, idx) => {
|
|
46
|
+
if (val >= 0) {
|
|
47
|
+
if (val >= specLen) {
|
|
48
|
+
throw new VolumeLoadError(`Unexpected axis index in zarr: ${val}`, {
|
|
49
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
result[val] = valsTCZYX[idx];
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
return result;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/** Reorder an array of values in the given dimension order to [T, C, Z, Y, X] */
|
|
59
|
+
export function orderByTCZYX(valsDimension, orderTCZYX, defaultValue) {
|
|
60
|
+
const result = [defaultValue, defaultValue, defaultValue, defaultValue, defaultValue];
|
|
61
|
+
orderTCZYX.forEach((val, idx) => {
|
|
62
|
+
if (val >= 0) {
|
|
63
|
+
if (val >= valsDimension.length) {
|
|
64
|
+
throw new VolumeLoadError(`Unexpected axis index in zarr: ${val}`, {
|
|
65
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
result[idx] = valsDimension[val];
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
return result;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/** Select the scale transform from an OME metadata object with coordinate transforms, and return it in TCZYX order */
|
|
75
|
+
export function getScale(dataset, orderTCZYX) {
|
|
76
|
+
const transforms = dataset.coordinateTransformations;
|
|
77
|
+
if (transforms === undefined) {
|
|
78
|
+
console.warn("WARNING: OMEZarrLoader: no coordinate transformations for scale level.");
|
|
79
|
+
return [1, 1, 1, 1, 1];
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// this assumes we'll never encounter the "path" variant
|
|
83
|
+
const isScaleTransform = t => t.type === "scale";
|
|
84
|
+
|
|
85
|
+
// there can be any number of coordinateTransformations
|
|
86
|
+
// but there must be only one of type "scale".
|
|
87
|
+
const scaleTransform = transforms.find(isScaleTransform);
|
|
88
|
+
if (!scaleTransform) {
|
|
89
|
+
console.warn(`WARNING: OMEZarrLoader: no coordinate transformation of type "scale" for scale level.`);
|
|
90
|
+
return [1, 1, 1, 1, 1];
|
|
91
|
+
}
|
|
92
|
+
const scale = scaleTransform.scale.slice();
|
|
93
|
+
return orderByTCZYX(scale, orderTCZYX, 1);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Defines a partial order of zarr arrays based on their size. Specifically:
|
|
98
|
+
* - If array size x, y, z are all equal, the arrays are equal
|
|
99
|
+
* - otherwise, if all xyz of `a` are less than or equal to those of `b`, `a` is less than `b` (and vice versa)
|
|
100
|
+
* - if some xyz is less and some is greater, the arrays are uncomparable
|
|
101
|
+
*/
|
|
102
|
+
function compareZarrArraySize(aArr, aTCZYX, bArr, bTCZYX) {
|
|
103
|
+
const aZ = aTCZYX[2] > -1 ? aArr.shape[aTCZYX[2]] : 1;
|
|
104
|
+
const bZ = bTCZYX[2] > -1 ? bArr.shape[bTCZYX[2]] : 1;
|
|
105
|
+
const diffZ = aZ - bZ;
|
|
106
|
+
const diffY = aArr.shape[aTCZYX[3]] - bArr.shape[bTCZYX[3]];
|
|
107
|
+
const diffX = aArr.shape[aTCZYX[4]] - bArr.shape[bTCZYX[4]];
|
|
108
|
+
if (diffZ === 0 && diffY === 0 && diffX === 0) {
|
|
109
|
+
return 0;
|
|
110
|
+
} else if (diffZ <= 0 && diffY <= 0 && diffX <= 0) {
|
|
111
|
+
return -1;
|
|
112
|
+
} else if (diffZ >= 0 && diffY >= 0 && diffX >= 0) {
|
|
113
|
+
return 1;
|
|
114
|
+
} else {
|
|
115
|
+
return undefined;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
const EPSILON = 0.00001;
|
|
119
|
+
const aboutEquals = (a, b) => Math.abs(a - b) < EPSILON;
|
|
120
|
+
function scaleTransformsAreEqual(aSrc, aLevel, bSrc, bLevel) {
|
|
121
|
+
const aScale = getScale(aSrc.multiscaleMetadata.datasets[aLevel], aSrc.axesTCZYX);
|
|
122
|
+
const bScale = getScale(bSrc.multiscaleMetadata.datasets[bLevel], bSrc.axesTCZYX);
|
|
123
|
+
return aboutEquals(aScale[2], bScale[2]) && aboutEquals(aScale[3], bScale[3]) && aboutEquals(aScale[4], bScale[4]);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Ensures that all scale levels in `sources` are matched up by size. More precisely: enforces that, for any scale
|
|
128
|
+
* level `i`, the size of zarr array `s[i]` is equal for every source `s`. We accomplish this by removing any arrays
|
|
129
|
+
* (and their associated OME dataset metadata) which don't match up in all sources.
|
|
130
|
+
*
|
|
131
|
+
* Note that this function modifies the input `sources` array rather than returning a new value.
|
|
132
|
+
*
|
|
133
|
+
* Assumes all sources have scale levels ordered by size from largest to smallest. (This should always be true for
|
|
134
|
+
* compliant OME-Zarr data.)
|
|
135
|
+
*/
|
|
136
|
+
export function matchSourceScaleLevels(sources) {
|
|
137
|
+
if (sources.length < 2) {
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Save matching scale levels and metadata here
|
|
142
|
+
const matchedLevels = Array.from({
|
|
143
|
+
length: sources.length
|
|
144
|
+
}, () => []);
|
|
145
|
+
const matchedMetas = Array.from({
|
|
146
|
+
length: sources.length
|
|
147
|
+
}, () => []);
|
|
148
|
+
|
|
149
|
+
// Start as many index counters as we have sources
|
|
150
|
+
const scaleIndexes = new Array(sources.length).fill(0);
|
|
151
|
+
while (scaleIndexes.every((val, idx) => val < sources[idx].scaleLevels.length)) {
|
|
152
|
+
// First pass: find the smallest source / determine if all sources are equal
|
|
153
|
+
let allEqual = true;
|
|
154
|
+
let smallestIdx = 0;
|
|
155
|
+
let smallestSrc = sources[0];
|
|
156
|
+
let smallestArr = smallestSrc.scaleLevels[scaleIndexes[0]];
|
|
157
|
+
for (let currentIdx = 1; currentIdx < sources.length; currentIdx++) {
|
|
158
|
+
const currentSrc = sources[currentIdx];
|
|
159
|
+
const currentArr = currentSrc.scaleLevels[scaleIndexes[currentIdx]];
|
|
160
|
+
const ordering = compareZarrArraySize(smallestArr, smallestSrc.axesTCZYX, currentArr, currentSrc.axesTCZYX);
|
|
161
|
+
if (!ordering) {
|
|
162
|
+
// Arrays are equal, or they are uncomparable
|
|
163
|
+
if (ordering === undefined) {
|
|
164
|
+
throw new VolumeLoadError("Incompatible zarr arrays: pixel dimensions are mismatched", {
|
|
165
|
+
type: VolumeLoadErrorType.INVALID_MULTI_SOURCE_ZARR
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Now we know the arrays are equal, but they may still be invalid to match up because...
|
|
170
|
+
// ...they have different scale transformations
|
|
171
|
+
if (!scaleTransformsAreEqual(smallestSrc, scaleIndexes[smallestIdx], currentSrc, scaleIndexes[currentIdx])) {
|
|
172
|
+
// today we are going to treat this as a warning.
|
|
173
|
+
// For our implementation it is enough that the xyz pixel ranges are the same.
|
|
174
|
+
// Ideally scale*arraysize=physical size is really the quantity that should be equal, for combining two volume data sets as channels.
|
|
175
|
+
console.warn("Incompatible zarr arrays: scale levels of equal size have different scale transformations");
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// ...they have different numbers of timesteps
|
|
179
|
+
const largestT = smallestSrc.axesTCZYX[0] > -1 ? smallestArr.shape[smallestSrc.axesTCZYX[0]] : 1;
|
|
180
|
+
const currentT = currentSrc.axesTCZYX[0] > -1 ? currentArr.shape[currentSrc.axesTCZYX[0]] : 1;
|
|
181
|
+
if (largestT !== currentT) {
|
|
182
|
+
// we also treat this as a warning.
|
|
183
|
+
// In OmeZarrLoader we will take the minimum T size of all sources
|
|
184
|
+
console.warn(`Incompatible zarr arrays: different numbers of timesteps: ${largestT} vs ${currentT}`);
|
|
185
|
+
}
|
|
186
|
+
} else {
|
|
187
|
+
allEqual = false;
|
|
188
|
+
if (ordering > 0) {
|
|
189
|
+
smallestIdx = currentIdx;
|
|
190
|
+
smallestSrc = currentSrc;
|
|
191
|
+
smallestArr = currentArr;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
if (allEqual) {
|
|
196
|
+
// We've found a matching set of scale levels! Save it and increment all indexes
|
|
197
|
+
for (let i = 0; i < scaleIndexes.length; i++) {
|
|
198
|
+
const currentSrc = sources[i];
|
|
199
|
+
const matchedScaleLevel = scaleIndexes[i];
|
|
200
|
+
matchedLevels[i].push(currentSrc.scaleLevels[matchedScaleLevel]);
|
|
201
|
+
matchedMetas[i].push(currentSrc.multiscaleMetadata.datasets[matchedScaleLevel]);
|
|
202
|
+
scaleIndexes[i] += 1;
|
|
203
|
+
}
|
|
204
|
+
} else {
|
|
205
|
+
// Increment the indexes of the sources which are larger than the smallest
|
|
206
|
+
for (const [idx, srcIdx] of scaleIndexes.entries()) {
|
|
207
|
+
const currentSrc = sources[idx];
|
|
208
|
+
const currentArr = currentSrc.scaleLevels[srcIdx];
|
|
209
|
+
const ordering = compareZarrArraySize(smallestArr, smallestSrc.axesTCZYX, currentArr, currentSrc.axesTCZYX);
|
|
210
|
+
if (ordering !== 0) {
|
|
211
|
+
scaleIndexes[idx] += 1;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
if (sources[0].scaleLevels.length === 0) {
|
|
217
|
+
throw new VolumeLoadError("Incompatible zarr arrays: no sets of scale levels found that matched in all sources", {
|
|
218
|
+
type: VolumeLoadErrorType.INVALID_MULTI_SOURCE_ZARR
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
for (let i = 0; i < sources.length; i++) {
|
|
222
|
+
sources[i].scaleLevels = matchedLevels[i];
|
|
223
|
+
sources[i].multiscaleMetadata.datasets = matchedMetas[i];
|
|
224
|
+
}
|
|
225
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { VolumeLoadError, VolumeLoadErrorType } from "../VolumeLoadError.js";
|
|
2
|
+
function isObjectWithProp(obj, prop) {
|
|
3
|
+
return typeof obj === "object" && obj !== null && prop in obj;
|
|
4
|
+
}
|
|
5
|
+
function assertMetadataHasProp(obj, prop, name = "zarr") {
|
|
6
|
+
if (!isObjectWithProp(obj, prop)) {
|
|
7
|
+
throw new VolumeLoadError(`${name} metadata is missing required entry "${prop}"`, {
|
|
8
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
function assertPropIsArray(obj, prop, name = "zarr") {
|
|
13
|
+
if (!Array.isArray(obj[prop])) {
|
|
14
|
+
throw new VolumeLoadError(`${name} metadata entry "${prop}" is not an array`, {
|
|
15
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Validates that the `OMEZarrMetadata` record `data` has the minimal amount of data required to open a volume. Since
|
|
22
|
+
* we only ever open one multiscale, we only validate the multiscale metadata record at index `multiscaleIdx` here.
|
|
23
|
+
* `name` is used in error messages to identify the source of the metadata.
|
|
24
|
+
*/
|
|
25
|
+
export function validateOMEZarrMetadata(data, multiscaleIdx = 0, name = "zarr") {
|
|
26
|
+
// data is an object with a key "multiscales", which is an array
|
|
27
|
+
assertMetadataHasProp(data, "multiscales", name);
|
|
28
|
+
assertPropIsArray(data, "multiscales", name);
|
|
29
|
+
|
|
30
|
+
// check that a multiscale metadata entry exists at `multiscaleIdx`
|
|
31
|
+
const multiscaleMeta = data.multiscales[multiscaleIdx];
|
|
32
|
+
if (!multiscaleMeta) {
|
|
33
|
+
throw new VolumeLoadError(`${name} metadata does not have requested multiscale level ${multiscaleIdx}`, {
|
|
34
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
const multiscaleMetaName = isObjectWithProp(multiscaleMeta, "name") ? ` ("${multiscaleMeta.name})` : "";
|
|
38
|
+
const multiscaleName = `${name} multiscale ${multiscaleIdx}${multiscaleMetaName}`;
|
|
39
|
+
|
|
40
|
+
// multiscale has a key "axes", which is an array. Each axis has a "name".
|
|
41
|
+
assertMetadataHasProp(multiscaleMeta, "axes", multiscaleName);
|
|
42
|
+
assertPropIsArray(multiscaleMeta, "axes", multiscaleName);
|
|
43
|
+
multiscaleMeta.axes.forEach((axis, i) => assertMetadataHasProp(axis, "name", `${multiscaleName} axis ${i}`));
|
|
44
|
+
|
|
45
|
+
// multiscale has a key "datasets", which is an array. Each dataset has a "path".
|
|
46
|
+
assertMetadataHasProp(multiscaleMeta, "datasets", name);
|
|
47
|
+
assertPropIsArray(multiscaleMeta, "datasets", name);
|
|
48
|
+
multiscaleMeta.datasets.forEach((data, i) => assertMetadataHasProp(data, "path", `${multiscaleName} dataset ${i}`));
|
|
49
|
+
}
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import { expect } from "chai";
|
|
2
|
+
import ChunkPrefetchIterator from "../loaders/zarr_utils/ChunkPrefetchIterator";
|
|
3
|
+
import { PrefetchDirection } from "../loaders/zarr_utils/types";
|
|
4
|
+
const EXPECTED_3X3X3X3 = [[0, 0, 1, 1, 1],
|
|
5
|
+
// T-
|
|
6
|
+
[2, 0, 1, 1, 1],
|
|
7
|
+
// T+
|
|
8
|
+
[1, 0, 0, 1, 1],
|
|
9
|
+
// Z-
|
|
10
|
+
[1, 0, 2, 1, 1],
|
|
11
|
+
// Z+
|
|
12
|
+
[1, 0, 1, 0, 1],
|
|
13
|
+
// Y-
|
|
14
|
+
[1, 0, 1, 2, 1],
|
|
15
|
+
// Y+
|
|
16
|
+
[1, 0, 1, 1, 0],
|
|
17
|
+
// X-
|
|
18
|
+
[1, 0, 1, 1, 2] // X+
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
// move from the middle of a 3x3x3x3 cube to the middle of a 5x5x5x5 cube
|
|
22
|
+
const EXPECTED_5X5X5X5_1 = EXPECTED_3X3X3X3.map(([t, c, z, y, x]) => [t + 1, c, z + 1, y + 1, x + 1]);
|
|
23
|
+
// offset = 2!
|
|
24
|
+
const EXPECTED_5X5X5X5_2 = [[0, 0, 2, 2, 2],
|
|
25
|
+
// T--
|
|
26
|
+
[4, 0, 2, 2, 2],
|
|
27
|
+
// T++
|
|
28
|
+
[2, 0, 0, 2, 2],
|
|
29
|
+
// Z--
|
|
30
|
+
[2, 0, 4, 2, 2],
|
|
31
|
+
// Z++
|
|
32
|
+
[2, 0, 2, 0, 2],
|
|
33
|
+
// Y--
|
|
34
|
+
[2, 0, 2, 4, 2],
|
|
35
|
+
// Y++
|
|
36
|
+
[2, 0, 2, 2, 0],
|
|
37
|
+
// X--
|
|
38
|
+
[2, 0, 2, 2, 4] // X++
|
|
39
|
+
];
|
|
40
|
+
function validate(iter, expected) {
|
|
41
|
+
expect([...iter]).to.deep.equal(expected);
|
|
42
|
+
}
|
|
43
|
+
describe("ChunkPrefetchIterator", () => {
|
|
44
|
+
it("iterates outward in TZYX order, negative then positive", () => {
|
|
45
|
+
// 3x3x3x3, with one chunk in the center
|
|
46
|
+
const iterator = new ChunkPrefetchIterator([[1, 0, 1, 1, 1]], [1, 1, 1, 1], [[3, 1, 3, 3, 3]]);
|
|
47
|
+
validate(iterator, EXPECTED_3X3X3X3);
|
|
48
|
+
});
|
|
49
|
+
it("finds the borders of a set of multiple chunks and iterates outward from them", () => {
|
|
50
|
+
// 4x4x4, with a 2x2x2 set of chunks in the center
|
|
51
|
+
const fetchedChunks = [[1, 0, 1, 1, 1], [1, 0, 1, 1, 2], [1, 0, 1, 2, 1], [1, 0, 1, 2, 2], [1, 0, 2, 1, 1], [1, 0, 2, 1, 2], [1, 0, 2, 2, 1], [1, 0, 2, 2, 2]];
|
|
52
|
+
const iterator = new ChunkPrefetchIterator(fetchedChunks, [1, 1, 1, 1], [[3, 1, 4, 4, 4]]);
|
|
53
|
+
const expected = [...fetchedChunks.map(([_t, c, z, y, x]) => [0, c, z, y, x]),
|
|
54
|
+
// T-
|
|
55
|
+
...fetchedChunks.map(([_t, c, z, y, x]) => [2, c, z, y, x]),
|
|
56
|
+
// T+
|
|
57
|
+
...fetchedChunks.filter(([_t, _c, z, _y, _x]) => z === 1).map(([t, c, _z, y, x]) => [t, c, 0, y, x]),
|
|
58
|
+
// Z-
|
|
59
|
+
...fetchedChunks.filter(([_t, _c, z, _y, _x]) => z === 2).map(([t, c, _z, y, x]) => [t, c, 3, y, x]),
|
|
60
|
+
// Z+
|
|
61
|
+
...fetchedChunks.filter(([_t, _c, _z, y, _x]) => y === 1).map(([t, c, z, _y, x]) => [t, c, z, 0, x]),
|
|
62
|
+
// Y-
|
|
63
|
+
...fetchedChunks.filter(([_t, _c, _z, y, _x]) => y === 2).map(([t, c, z, _y, x]) => [t, c, z, 3, x]),
|
|
64
|
+
// Y+
|
|
65
|
+
...fetchedChunks.filter(([_t, _c, _z, _y, x]) => x === 1).map(([t, c, z, y, _x]) => [t, c, z, y, 0]),
|
|
66
|
+
// X-
|
|
67
|
+
...fetchedChunks.filter(([_t, _c, _z, _y, x]) => x === 2).map(([t, c, z, y, _x]) => [t, c, z, y, 3]) // X+
|
|
68
|
+
];
|
|
69
|
+
validate(iterator, expected);
|
|
70
|
+
});
|
|
71
|
+
it("iterates through the same offset in all dimensions before increasing the offset", () => {
|
|
72
|
+
// 5x5x5, with one chunk in the center
|
|
73
|
+
const iterator = new ChunkPrefetchIterator([[2, 0, 2, 2, 2]], [2, 2, 2, 2], [[5, 1, 5, 5, 5]]);
|
|
74
|
+
const expected = [
|
|
75
|
+
// offset = 1
|
|
76
|
+
...EXPECTED_5X5X5X5_1,
|
|
77
|
+
// offset = 2!
|
|
78
|
+
...EXPECTED_5X5X5X5_2];
|
|
79
|
+
validate(iterator, expected);
|
|
80
|
+
});
|
|
81
|
+
it("stops at the max offset in each dimension", () => {
|
|
82
|
+
// 5x5x5, with one chunk in the center
|
|
83
|
+
const iterator = new ChunkPrefetchIterator([[2, 0, 2, 2, 2]], [1, 1, 1, 1], [[5, 1, 5, 5, 5]]);
|
|
84
|
+
validate(iterator, EXPECTED_5X5X5X5_1); // never reaches offset = 2, as it does above
|
|
85
|
+
});
|
|
86
|
+
it("stops at the borders of the zarr", () => {
|
|
87
|
+
// 3x3x3x3, with one chunk in the center
|
|
88
|
+
const iterator = new ChunkPrefetchIterator([[1, 0, 1, 1, 1]], [2, 2, 2, 2], [[3, 1, 3, 3, 3]]);
|
|
89
|
+
validate(iterator, EXPECTED_3X3X3X3);
|
|
90
|
+
});
|
|
91
|
+
it("does not iterate in dimensions which are entirely covered by the fetched set", () => {
|
|
92
|
+
// 3x3x3x3, with a 1x1x3x3 slice covering all of x and y
|
|
93
|
+
const fetchedChunks = [[1, 0, 1, 0, 0],
|
|
94
|
+
// 0, 0
|
|
95
|
+
[1, 0, 1, 0, 1],
|
|
96
|
+
// 0, 1
|
|
97
|
+
[1, 0, 1, 0, 2],
|
|
98
|
+
// 0, 2
|
|
99
|
+
[1, 0, 1, 1, 0],
|
|
100
|
+
// 1, 0
|
|
101
|
+
[1, 0, 1, 1, 1],
|
|
102
|
+
// 1, 1
|
|
103
|
+
[1, 0, 1, 1, 2],
|
|
104
|
+
// 1, 2
|
|
105
|
+
[1, 0, 1, 2, 0],
|
|
106
|
+
// 2, 0
|
|
107
|
+
[1, 0, 1, 2, 1],
|
|
108
|
+
// 2, 1
|
|
109
|
+
[1, 0, 1, 2, 2] // 2, 2
|
|
110
|
+
];
|
|
111
|
+
const iterator = new ChunkPrefetchIterator(fetchedChunks, [1, 1, 1, 1], [[3, 1, 3, 3, 3]]);
|
|
112
|
+
const expected = [...fetchedChunks.map(([_t, c, z, y, x]) => [0, c, z, y, x]),
|
|
113
|
+
// T-
|
|
114
|
+
...fetchedChunks.map(([_t, c, z, y, x]) => [2, c, z, y, x]),
|
|
115
|
+
// T+
|
|
116
|
+
...fetchedChunks.map(([t, c, _z, y, x]) => [t, c, 0, y, x]),
|
|
117
|
+
// Z-
|
|
118
|
+
...fetchedChunks.map(([t, c, _z, y, x]) => [t, c, 2, y, x]) // Z+
|
|
119
|
+
// skips x and y
|
|
120
|
+
];
|
|
121
|
+
validate(iterator, expected);
|
|
122
|
+
});
|
|
123
|
+
it("does not iterate in dimensions where the max offset is 0", () => {
|
|
124
|
+
// 3x3x3x3, with one chunk in the center
|
|
125
|
+
const iterator = new ChunkPrefetchIterator([[1, 0, 1, 1, 1]], [1, 0, 1, 0], [[3, 1, 3, 3, 3]]);
|
|
126
|
+
const expected = [[0, 0, 1, 1, 1],
|
|
127
|
+
// T-
|
|
128
|
+
[2, 0, 1, 1, 1],
|
|
129
|
+
// T+
|
|
130
|
+
// skips z
|
|
131
|
+
[1, 0, 1, 0, 1],
|
|
132
|
+
// Y-
|
|
133
|
+
[1, 0, 1, 2, 1] // Y+
|
|
134
|
+
// skips x
|
|
135
|
+
];
|
|
136
|
+
validate(iterator, expected);
|
|
137
|
+
});
|
|
138
|
+
it("yields chunks in all prioritized directions first", () => {
|
|
139
|
+
// 5x5x5, with one chunk in the center
|
|
140
|
+
const iterator = new ChunkPrefetchIterator([[2, 0, 2, 2, 2]], [2, 2, 2, 2], [[5, 1, 5, 5, 5]], [PrefetchDirection.T_PLUS, PrefetchDirection.Y_MINUS]);
|
|
141
|
+
const expected = [[3, 0, 2, 2, 2],
|
|
142
|
+
// T+
|
|
143
|
+
[2, 0, 2, 1, 2],
|
|
144
|
+
// Y-
|
|
145
|
+
[4, 0, 2, 2, 2],
|
|
146
|
+
// T++
|
|
147
|
+
[2, 0, 2, 0, 2],
|
|
148
|
+
// Y--
|
|
149
|
+
...EXPECTED_5X5X5X5_1.filter(([t, _c, _z, y, _x]) => t <= 2 && y >= 2), ...EXPECTED_5X5X5X5_2.filter(([t, _c, _z, y, _x]) => t <= 2 && y >= 2)];
|
|
150
|
+
validate(iterator, expected);
|
|
151
|
+
});
|
|
152
|
+
it("continues iterating in other dimensions when some reach their limits", () => {
|
|
153
|
+
// final boss: 4x4x6 volume with off-center fetched set
|
|
154
|
+
// t has a max offset of 2, but is already at its maximum of 2 and never iterates in positive direction
|
|
155
|
+
// z has a max offset of 2, but stops early in negative direction at 0
|
|
156
|
+
// y has a max offset of 2, but is already covered in the negative direction by chunks in the fetched set
|
|
157
|
+
// x has a max offset of 1, which stops iteration before it gets to either edge
|
|
158
|
+
const fetchedChunks = [[2, 0, 1, 0, 2], [2, 0, 1, 0, 3], [2, 0, 1, 1, 2], [2, 0, 1, 1, 3]];
|
|
159
|
+
const iterator = new ChunkPrefetchIterator(fetchedChunks, [2, 2, 2, 1], [[3, 1, 4, 4, 6]]);
|
|
160
|
+
|
|
161
|
+
// prettier-ignore
|
|
162
|
+
const expected = [...fetchedChunks.map(([_t, c, z, y, x]) => [1, c, z, y, x]),
|
|
163
|
+
// T-
|
|
164
|
+
// skip t+: already at max t
|
|
165
|
+
...fetchedChunks.map(([t, c, _z, y, x]) => [t, c, 0, y, x]),
|
|
166
|
+
// Z-
|
|
167
|
+
...fetchedChunks.map(([t, c, _z, y, x]) => [t, c, 2, y, x]),
|
|
168
|
+
// Z+
|
|
169
|
+
// skip y-: already covered by fetched chunks
|
|
170
|
+
[2, 0, 1, 2, 2], [2, 0, 1, 2, 3],
|
|
171
|
+
// Y+
|
|
172
|
+
[2, 0, 1, 0, 1], [2, 0, 1, 1, 1],
|
|
173
|
+
// X-
|
|
174
|
+
[2, 0, 1, 0, 4], [2, 0, 1, 1, 4],
|
|
175
|
+
// X+
|
|
176
|
+
...fetchedChunks.map(([_t, c, z, y, x]) => [0, c, z, y, x]),
|
|
177
|
+
// T--
|
|
178
|
+
// skip t++: still at max t
|
|
179
|
+
// skip z--: already reached z = 0 above
|
|
180
|
+
...fetchedChunks.map(([t, c, _z, y, x]) => [t, c, 3, y, x]),
|
|
181
|
+
// Z++
|
|
182
|
+
// skip y-: still already covered by fetched chunks
|
|
183
|
+
[2, 0, 1, 3, 2], [2, 0, 1, 3, 3] // Y+
|
|
184
|
+
// skip x: already at max offset in x
|
|
185
|
+
];
|
|
186
|
+
validate(iterator, expected);
|
|
187
|
+
});
|
|
188
|
+
it("correctly handles sources with differing chunk dimensions", () => {
|
|
189
|
+
const allChannels = (x, y, ch = [0, 1, 2, 3]) => ch.map(c => [0, c, 0, y, x]);
|
|
190
|
+
const iterator = new ChunkPrefetchIterator(allChannels(1, 2), [0, 0, 2, 2], [[0, 1, 0, 4, 3], [0, 2, 0, 5, 2], [0, 1, 0, 3, 4]]);
|
|
191
|
+
const expected = [...allChannels(1, 1),
|
|
192
|
+
// Y-
|
|
193
|
+
...allChannels(1, 3, [0, 1, 2]),
|
|
194
|
+
// Y+: channel 3 is maxed out
|
|
195
|
+
...allChannels(0, 2),
|
|
196
|
+
// X-
|
|
197
|
+
...allChannels(2, 2, [0, 3]),
|
|
198
|
+
// X+: channels 1 and 2 are maxed out
|
|
199
|
+
...allChannels(1, 0),
|
|
200
|
+
// Y--
|
|
201
|
+
...allChannels(1, 4, [1, 2]),
|
|
202
|
+
// Y++: channels 0 and 3 are maxed out
|
|
203
|
+
// skip X--
|
|
204
|
+
[0, 3, 0, 2, 3] // X++: all channels but channel 3 are maxed out
|
|
205
|
+
];
|
|
206
|
+
validate(iterator, expected);
|
|
207
|
+
});
|
|
208
|
+
});
|