@aics/vole-core 3.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +26 -0
- package/README.md +119 -0
- package/es/Atlas2DSlice.js +224 -0
- package/es/Channel.js +264 -0
- package/es/FileSaver.js +31 -0
- package/es/FusedChannelData.js +192 -0
- package/es/Histogram.js +250 -0
- package/es/ImageInfo.js +127 -0
- package/es/Light.js +74 -0
- package/es/Lut.js +500 -0
- package/es/MarchingCubes.js +507 -0
- package/es/MeshVolume.js +334 -0
- package/es/NaiveSurfaceNets.js +251 -0
- package/es/PathTracedVolume.js +482 -0
- package/es/RayMarchedAtlasVolume.js +250 -0
- package/es/RenderToBuffer.js +31 -0
- package/es/ThreeJsPanel.js +633 -0
- package/es/Timing.js +28 -0
- package/es/TrackballControls.js +538 -0
- package/es/View3d.js +848 -0
- package/es/Volume.js +352 -0
- package/es/VolumeCache.js +161 -0
- package/es/VolumeDims.js +16 -0
- package/es/VolumeDrawable.js +702 -0
- package/es/VolumeMaker.js +101 -0
- package/es/VolumeRenderImpl.js +1 -0
- package/es/VolumeRenderSettings.js +203 -0
- package/es/constants/basicShaders.js +29 -0
- package/es/constants/colors.js +59 -0
- package/es/constants/denoiseShader.js +43 -0
- package/es/constants/lights.js +42 -0
- package/es/constants/materials.js +85 -0
- package/es/constants/pathtraceOutputShader.js +13 -0
- package/es/constants/scaleBarSVG.js +21 -0
- package/es/constants/time.js +34 -0
- package/es/constants/volumePTshader.js +153 -0
- package/es/constants/volumeRayMarchShader.js +123 -0
- package/es/constants/volumeSliceShader.js +115 -0
- package/es/index.js +21 -0
- package/es/loaders/IVolumeLoader.js +131 -0
- package/es/loaders/JsonImageInfoLoader.js +255 -0
- package/es/loaders/OmeZarrLoader.js +495 -0
- package/es/loaders/OpenCellLoader.js +65 -0
- package/es/loaders/RawArrayLoader.js +89 -0
- package/es/loaders/TiffLoader.js +219 -0
- package/es/loaders/VolumeLoadError.js +44 -0
- package/es/loaders/VolumeLoaderUtils.js +221 -0
- package/es/loaders/index.js +40 -0
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +143 -0
- package/es/loaders/zarr_utils/WrappedStore.js +51 -0
- package/es/loaders/zarr_utils/types.js +24 -0
- package/es/loaders/zarr_utils/utils.js +225 -0
- package/es/loaders/zarr_utils/validation.js +49 -0
- package/es/test/ChunkPrefetchIterator.test.js +208 -0
- package/es/test/RequestQueue.test.js +442 -0
- package/es/test/SubscribableRequestQueue.test.js +244 -0
- package/es/test/VolumeCache.test.js +118 -0
- package/es/test/VolumeRenderSettings.test.js +71 -0
- package/es/test/lut.test.js +671 -0
- package/es/test/num_utils.test.js +140 -0
- package/es/test/volume.test.js +98 -0
- package/es/test/zarr_utils.test.js +358 -0
- package/es/types/Atlas2DSlice.d.ts +41 -0
- package/es/types/Channel.d.ts +44 -0
- package/es/types/FileSaver.d.ts +6 -0
- package/es/types/FusedChannelData.d.ts +26 -0
- package/es/types/Histogram.d.ts +57 -0
- package/es/types/ImageInfo.d.ts +87 -0
- package/es/types/Light.d.ts +27 -0
- package/es/types/Lut.d.ts +67 -0
- package/es/types/MarchingCubes.d.ts +53 -0
- package/es/types/MeshVolume.d.ts +40 -0
- package/es/types/NaiveSurfaceNets.d.ts +11 -0
- package/es/types/PathTracedVolume.d.ts +65 -0
- package/es/types/RayMarchedAtlasVolume.d.ts +41 -0
- package/es/types/RenderToBuffer.d.ts +17 -0
- package/es/types/ThreeJsPanel.d.ts +107 -0
- package/es/types/Timing.d.ts +11 -0
- package/es/types/TrackballControls.d.ts +51 -0
- package/es/types/View3d.d.ts +357 -0
- package/es/types/Volume.d.ts +152 -0
- package/es/types/VolumeCache.d.ts +43 -0
- package/es/types/VolumeDims.d.ts +28 -0
- package/es/types/VolumeDrawable.d.ts +108 -0
- package/es/types/VolumeMaker.d.ts +49 -0
- package/es/types/VolumeRenderImpl.d.ts +22 -0
- package/es/types/VolumeRenderSettings.d.ts +98 -0
- package/es/types/constants/basicShaders.d.ts +4 -0
- package/es/types/constants/colors.d.ts +2 -0
- package/es/types/constants/denoiseShader.d.ts +40 -0
- package/es/types/constants/lights.d.ts +38 -0
- package/es/types/constants/materials.d.ts +20 -0
- package/es/types/constants/pathtraceOutputShader.d.ts +11 -0
- package/es/types/constants/scaleBarSVG.d.ts +2 -0
- package/es/types/constants/time.d.ts +19 -0
- package/es/types/constants/volumePTshader.d.ts +137 -0
- package/es/types/constants/volumeRayMarchShader.d.ts +117 -0
- package/es/types/constants/volumeSliceShader.d.ts +109 -0
- package/es/types/glsl.d.js +0 -0
- package/es/types/index.d.ts +28 -0
- package/es/types/loaders/IVolumeLoader.d.ts +113 -0
- package/es/types/loaders/JsonImageInfoLoader.d.ts +80 -0
- package/es/types/loaders/OmeZarrLoader.d.ts +87 -0
- package/es/types/loaders/OpenCellLoader.d.ts +9 -0
- package/es/types/loaders/RawArrayLoader.d.ts +33 -0
- package/es/types/loaders/TiffLoader.d.ts +45 -0
- package/es/types/loaders/VolumeLoadError.d.ts +18 -0
- package/es/types/loaders/VolumeLoaderUtils.d.ts +38 -0
- package/es/types/loaders/index.d.ts +22 -0
- package/es/types/loaders/zarr_utils/ChunkPrefetchIterator.d.ts +22 -0
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +24 -0
- package/es/types/loaders/zarr_utils/types.d.ts +94 -0
- package/es/types/loaders/zarr_utils/utils.d.ts +23 -0
- package/es/types/loaders/zarr_utils/validation.d.ts +7 -0
- package/es/types/test/ChunkPrefetchIterator.test.d.ts +1 -0
- package/es/types/test/RequestQueue.test.d.ts +1 -0
- package/es/types/test/SubscribableRequestQueue.test.d.ts +1 -0
- package/es/types/test/VolumeCache.test.d.ts +1 -0
- package/es/types/test/VolumeRenderSettings.test.d.ts +1 -0
- package/es/types/test/lut.test.d.ts +1 -0
- package/es/types/test/num_utils.test.d.ts +1 -0
- package/es/types/test/volume.test.d.ts +1 -0
- package/es/types/test/zarr_utils.test.d.ts +1 -0
- package/es/types/types.d.ts +115 -0
- package/es/types/utils/RequestQueue.d.ts +112 -0
- package/es/types/utils/SubscribableRequestQueue.d.ts +52 -0
- package/es/types/utils/num_utils.d.ts +43 -0
- package/es/types/workers/VolumeLoaderContext.d.ts +106 -0
- package/es/types/workers/types.d.ts +101 -0
- package/es/types/workers/util.d.ts +3 -0
- package/es/types.js +75 -0
- package/es/typings.d.js +0 -0
- package/es/utils/RequestQueue.js +267 -0
- package/es/utils/SubscribableRequestQueue.js +187 -0
- package/es/utils/num_utils.js +231 -0
- package/es/workers/FetchTiffWorker.js +153 -0
- package/es/workers/VolumeLoadWorker.js +129 -0
- package/es/workers/VolumeLoaderContext.js +271 -0
- package/es/workers/types.js +41 -0
- package/es/workers/util.js +8 -0
- package/package.json +83 -0
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
import { Box3, Vector3 } from "three";
|
|
2
|
+
import * as zarr from "@zarrita/core";
|
|
3
|
+
import { get as zarrGet, slice } from "@zarrita/indexing";
|
|
4
|
+
// Importing `FetchStore` from its home subpackage (@zarrita/storage) causes errors.
|
|
5
|
+
// Getting it from the top-level package means we don't get its type. This is also a bug, but it's more acceptable.
|
|
6
|
+
import { FetchStore } from "zarrita";
|
|
7
|
+
import SubscribableRequestQueue from "../utils/SubscribableRequestQueue.js";
|
|
8
|
+
import { ThreadableVolumeLoader } from "./IVolumeLoader.js";
|
|
9
|
+
import { composeSubregion, computePackedAtlasDims, convertSubregionToPixels, pickLevelToLoad, unitNameToSymbol } from "./VolumeLoaderUtils.js";
|
|
10
|
+
import ChunkPrefetchIterator from "./zarr_utils/ChunkPrefetchIterator.js";
|
|
11
|
+
import WrappedStore from "./zarr_utils/WrappedStore.js";
|
|
12
|
+
import { getDimensionCount, getScale, getSourceChannelNames, matchSourceScaleLevels, orderByDimension, orderByTCZYX, remapAxesToTCZYX } from "./zarr_utils/utils.js";
|
|
13
|
+
import { VolumeLoadError, VolumeLoadErrorType, wrapVolumeLoadError } from "./VolumeLoadError.js";
|
|
14
|
+
import { validateOMEZarrMetadata } from "./zarr_utils/validation.js";
|
|
15
|
+
const CHUNK_REQUEST_CANCEL_REASON = "chunk request cancelled";
|
|
16
|
+
|
|
17
|
+
// returns the converted data and the original min and max values
|
|
18
|
+
function convertChannel(channelData, dtype) {
|
|
19
|
+
// get min and max
|
|
20
|
+
// TODO FIXME Histogram will also compute min and max!
|
|
21
|
+
let min = channelData[0];
|
|
22
|
+
let max = channelData[0];
|
|
23
|
+
for (let i = 0; i < channelData.length; i++) {
|
|
24
|
+
const val = channelData[i];
|
|
25
|
+
if (val < min) {
|
|
26
|
+
min = val;
|
|
27
|
+
}
|
|
28
|
+
if (val > max) {
|
|
29
|
+
max = val;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (dtype === "float64") {
|
|
33
|
+
// convert to float32
|
|
34
|
+
const f32 = new Float32Array(channelData.length);
|
|
35
|
+
for (let i = 0; i < channelData.length; i++) {
|
|
36
|
+
f32[i] = channelData[i];
|
|
37
|
+
}
|
|
38
|
+
dtype = "float32";
|
|
39
|
+
channelData = f32;
|
|
40
|
+
}
|
|
41
|
+
return {
|
|
42
|
+
data: channelData,
|
|
43
|
+
dtype,
|
|
44
|
+
min,
|
|
45
|
+
max
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
const DEFAULT_FETCH_OPTIONS = {
|
|
49
|
+
maxPrefetchDistance: [5, 5, 5, 5],
|
|
50
|
+
maxPrefetchChunks: 30
|
|
51
|
+
};
|
|
52
|
+
class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
53
|
+
/** The ID of the subscriber responsible for "actual loads" (non-prefetch requests) */
|
|
54
|
+
|
|
55
|
+
/** The ID of the subscriber responsible for prefetches, so that requests can be cancelled and reissued */
|
|
56
|
+
|
|
57
|
+
// TODO: this property should definitely be owned by `Volume` if this loader is ever used by multiple volumes.
|
|
58
|
+
// This may cause errors or incorrect results otherwise!
|
|
59
|
+
|
|
60
|
+
syncChannels = false;
|
|
61
|
+
constructor(
|
|
62
|
+
/**
|
|
63
|
+
* Array of records, each containing the objects and metadata we need to load from one source of multiscale zarr
|
|
64
|
+
* data. See documentation on `ZarrSource` for more.
|
|
65
|
+
*/
|
|
66
|
+
sources, /** Handle to a `SubscribableRequestQueue` for smart concurrency management and request cancelling/reissuing. */
|
|
67
|
+
requestQueue, /** Options to configure (pre)fetching behavior. */
|
|
68
|
+
fetchOptions = DEFAULT_FETCH_OPTIONS, /** Direction(s) to prioritize when prefetching. Stored separate from `fetchOptions` since it may be mutated. */
|
|
69
|
+
priorityDirections = []) {
|
|
70
|
+
super();
|
|
71
|
+
this.sources = sources;
|
|
72
|
+
this.requestQueue = requestQueue;
|
|
73
|
+
this.fetchOptions = fetchOptions;
|
|
74
|
+
this.priorityDirections = priorityDirections;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Creates a new `OMEZarrLoader`.
|
|
79
|
+
*
|
|
80
|
+
* @param urls The URL(s) of the OME-Zarr data to load. If `urls` is an array, the loader will attempt to find scale
|
|
81
|
+
* levels with exactly the same size in every source. If matching level(s) are available, the loader will produce a
|
|
82
|
+
* volume containing all channels from every provided zarr in the order they appear in `urls`. If no matching sets
|
|
83
|
+
* of scale levels are available, creation fails.
|
|
84
|
+
* @param scenes The scene(s) to load from each URL. If `urls` is an array, `scenes` may either be an array of values
|
|
85
|
+
* corresponding to each URL, or a single value to apply to all URLs. Default 0.
|
|
86
|
+
* @param cache A cache to use for storing fetched data. If not provided, a new cache will be created.
|
|
87
|
+
* @param queue A queue to use for managing requests. If not provided, a new queue will be created.
|
|
88
|
+
* @param fetchOptions Options to configure (pre)fetching behavior.
|
|
89
|
+
*/
|
|
90
|
+
static async createLoader(urls, scenes = 0, cache, queue, fetchOptions) {
|
|
91
|
+
// Setup queue and store, get basic metadata
|
|
92
|
+
if (!queue) {
|
|
93
|
+
queue = new SubscribableRequestQueue(fetchOptions?.concurrencyLimit, fetchOptions?.prefetchConcurrencyLimit);
|
|
94
|
+
}
|
|
95
|
+
const urlsArr = Array.isArray(urls) ? urls : [urls];
|
|
96
|
+
const scenesArr = Array.isArray(scenes) ? scenes : [scenes];
|
|
97
|
+
|
|
98
|
+
// Create one `ZarrSource` per URL
|
|
99
|
+
const sourceProms = urlsArr.map(async (url, i) => {
|
|
100
|
+
const store = new WrappedStore(new FetchStore(url), cache, queue);
|
|
101
|
+
const root = zarr.root(store);
|
|
102
|
+
const group = await zarr.open(root, {
|
|
103
|
+
kind: "group"
|
|
104
|
+
}).catch(wrapVolumeLoadError(`Failed to open OME-Zarr data at ${url}`, VolumeLoadErrorType.NOT_FOUND));
|
|
105
|
+
|
|
106
|
+
// Pick scene (multiscale)
|
|
107
|
+
let scene = scenesArr[Math.min(i, scenesArr.length - 1)];
|
|
108
|
+
if (scene > group.attrs.multiscales?.length) {
|
|
109
|
+
console.warn(`WARNING: OMEZarrLoader: scene ${scene} is invalid. Using scene 0.`);
|
|
110
|
+
scene = 0;
|
|
111
|
+
}
|
|
112
|
+
validateOMEZarrMetadata(group.attrs, scene, urlsArr.length > 1 ? `Zarr source ${i}` : "Zarr");
|
|
113
|
+
const {
|
|
114
|
+
multiscales,
|
|
115
|
+
omero
|
|
116
|
+
} = group.attrs;
|
|
117
|
+
const multiscaleMetadata = multiscales[scene];
|
|
118
|
+
|
|
119
|
+
// Open all scale levels of multiscale
|
|
120
|
+
const lvlProms = multiscaleMetadata.datasets.map(({
|
|
121
|
+
path
|
|
122
|
+
}) => zarr.open(root.resolve(path), {
|
|
123
|
+
kind: "array"
|
|
124
|
+
}).catch(wrapVolumeLoadError(`Failed to open scale level ${path} of OME-Zarr data at ${url}`, VolumeLoadErrorType.NOT_FOUND)));
|
|
125
|
+
const scaleLevels = await Promise.all(lvlProms);
|
|
126
|
+
const axesTCZYX = remapAxesToTCZYX(multiscaleMetadata.axes);
|
|
127
|
+
return {
|
|
128
|
+
scaleLevels,
|
|
129
|
+
multiscaleMetadata,
|
|
130
|
+
omeroMetadata: omero,
|
|
131
|
+
axesTCZYX,
|
|
132
|
+
channelOffset: 0
|
|
133
|
+
};
|
|
134
|
+
});
|
|
135
|
+
const sources = await Promise.all(sourceProms);
|
|
136
|
+
|
|
137
|
+
// Set `channelOffset`s so we can match channel indices to sources
|
|
138
|
+
let channelCount = 0;
|
|
139
|
+
for (const s of sources) {
|
|
140
|
+
s.channelOffset = channelCount;
|
|
141
|
+
channelCount += s.omeroMetadata?.channels.length ?? s.scaleLevels[0].shape[s.axesTCZYX[1]];
|
|
142
|
+
}
|
|
143
|
+
// Ensure the sizes of all sources' scale levels are matched up. See this function's docs for more.
|
|
144
|
+
matchSourceScaleLevels(sources);
|
|
145
|
+
// TODO: if `matchSourceScaleLevels` returned successfully, every one of these sources' `multiscaleMetadata` is the
|
|
146
|
+
// same in every field we care about, so we only ever use the first source's `multiscaleMetadata` after this point.
|
|
147
|
+
// Should we only store one `OMEMultiscale` record total, rather than one per source?
|
|
148
|
+
const priorityDirs = fetchOptions?.priorityDirections ? fetchOptions.priorityDirections.slice() : undefined;
|
|
149
|
+
return new OMEZarrLoader(sources, queue, fetchOptions, priorityDirs);
|
|
150
|
+
}
|
|
151
|
+
getUnitSymbols() {
|
|
152
|
+
const source = this.sources[0];
|
|
153
|
+
// Assume all spatial axes in all sources have the same units - we have no means of storing per-axis unit symbols
|
|
154
|
+
const xi = source.axesTCZYX[4];
|
|
155
|
+
const spaceUnitName = source.multiscaleMetadata.axes[xi].unit;
|
|
156
|
+
const spaceUnitSymbol = unitNameToSymbol(spaceUnitName) || spaceUnitName || "";
|
|
157
|
+
const ti = source.axesTCZYX[0];
|
|
158
|
+
const timeUnitName = ti > -1 ? source.multiscaleMetadata.axes[ti].unit : undefined;
|
|
159
|
+
const timeUnitSymbol = unitNameToSymbol(timeUnitName) || timeUnitName || "";
|
|
160
|
+
return [spaceUnitSymbol, timeUnitSymbol];
|
|
161
|
+
}
|
|
162
|
+
getLevelShapesZYX() {
|
|
163
|
+
const source = this.sources[0];
|
|
164
|
+
const [z, y, x] = source.axesTCZYX.slice(-3);
|
|
165
|
+
return source.scaleLevels.map(({
|
|
166
|
+
shape
|
|
167
|
+
}) => [z === -1 ? 1 : shape[z], shape[y], shape[x]]);
|
|
168
|
+
}
|
|
169
|
+
getScale(level) {
|
|
170
|
+
return getScale(this.sources[0].multiscaleMetadata.datasets[level], this.sources[0].axesTCZYX);
|
|
171
|
+
}
|
|
172
|
+
orderByDimension(valsTCZYX, sourceIdx = 0) {
|
|
173
|
+
return orderByDimension(valsTCZYX, this.sources[sourceIdx].axesTCZYX);
|
|
174
|
+
}
|
|
175
|
+
orderByTCZYX(valsDimension, defaultValue, sourceIdx = 0) {
|
|
176
|
+
return orderByTCZYX(valsDimension, this.sources[sourceIdx].axesTCZYX, defaultValue);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Converts a volume channel index to the index of its zarr source and its channel index within that zarr.
|
|
181
|
+
* e.g., if the loader has 2 sources, the first with 3 channels and the second with 2, then `matchChannelToSource(4)`
|
|
182
|
+
* returns `[1, 1]` (the second channel of the second source).
|
|
183
|
+
*/
|
|
184
|
+
matchChannelToSource(absoluteChannelIndex) {
|
|
185
|
+
const lastSrcIdx = this.sources.length - 1;
|
|
186
|
+
const lastSrc = this.sources[lastSrcIdx];
|
|
187
|
+
const lastSrcNumChannels = lastSrc.scaleLevels[0].shape[lastSrc.axesTCZYX[1]];
|
|
188
|
+
const maxChannelIndex = lastSrc.channelOffset + lastSrcNumChannels;
|
|
189
|
+
if (absoluteChannelIndex > maxChannelIndex) {
|
|
190
|
+
throw new VolumeLoadError(`Volume channel index ${absoluteChannelIndex} out of range (${maxChannelIndex} channels available)`, {
|
|
191
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
const firstGreaterIdx = this.sources.findIndex(src => src.channelOffset > absoluteChannelIndex);
|
|
195
|
+
const sourceIndex = firstGreaterIdx === -1 ? lastSrcIdx : firstGreaterIdx - 1;
|
|
196
|
+
const channelIndexInSource = absoluteChannelIndex - this.sources[sourceIndex].channelOffset;
|
|
197
|
+
return {
|
|
198
|
+
sourceIndex,
|
|
199
|
+
channelIndexInSource
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Change which directions to prioritize when prefetching. All chunks will be prefetched in these directions before
|
|
205
|
+
* any chunks are prefetched in any other directions.
|
|
206
|
+
*/
|
|
207
|
+
setPrefetchPriority(directions) {
|
|
208
|
+
this.priorityDirections = directions;
|
|
209
|
+
}
|
|
210
|
+
syncMultichannelLoading(sync) {
|
|
211
|
+
this.syncChannels = sync;
|
|
212
|
+
}
|
|
213
|
+
updateFetchOptions(options) {
|
|
214
|
+
this.fetchOptions = {
|
|
215
|
+
...this.fetchOptions,
|
|
216
|
+
...options
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
loadDims(loadSpec) {
|
|
220
|
+
const [spaceUnit, timeUnit] = this.getUnitSymbols();
|
|
221
|
+
// Compute subregion size so we can factor that in
|
|
222
|
+
const maxExtent = this.maxExtent ?? new Box3(new Vector3(0, 0, 0), new Vector3(1, 1, 1));
|
|
223
|
+
const subregion = composeSubregion(loadSpec.subregion, maxExtent);
|
|
224
|
+
const regionSize = subregion.getSize(new Vector3());
|
|
225
|
+
const regionArr = [1, 1, regionSize.z, regionSize.y, regionSize.x];
|
|
226
|
+
const result = this.sources[0].scaleLevels.map((level, i) => {
|
|
227
|
+
const scale = this.getScale(i);
|
|
228
|
+
const dims = {
|
|
229
|
+
spaceUnit: spaceUnit,
|
|
230
|
+
timeUnit: timeUnit,
|
|
231
|
+
shape: this.orderByTCZYX(level.shape, 1).map((val, idx) => Math.max(Math.ceil(val * regionArr[idx]), 1)),
|
|
232
|
+
spacing: this.orderByTCZYX(scale, 1),
|
|
233
|
+
dataType: level.dtype
|
|
234
|
+
};
|
|
235
|
+
return dims;
|
|
236
|
+
});
|
|
237
|
+
return Promise.resolve(result);
|
|
238
|
+
}
|
|
239
|
+
createImageInfo(loadSpec) {
|
|
240
|
+
// We ensured most info (dims, chunks, etc.) matched between sources earlier, so we can just use the first source.
|
|
241
|
+
const source0 = this.sources[0];
|
|
242
|
+
const [t,, z, y, x] = source0.axesTCZYX;
|
|
243
|
+
const hasT = t > -1;
|
|
244
|
+
const hasZ = z > -1;
|
|
245
|
+
const levelToLoad = pickLevelToLoad(loadSpec, this.getLevelShapesZYX());
|
|
246
|
+
const shapeLv = source0.scaleLevels[levelToLoad].shape;
|
|
247
|
+
const [spatialUnit, timeUnit] = this.getUnitSymbols();
|
|
248
|
+
|
|
249
|
+
// Now we care about other sources: # of channels is the `channelOffset` of the last source plus its # of channels
|
|
250
|
+
const sourceLast = this.sources[this.sources.length - 1];
|
|
251
|
+
const cLast = sourceLast.axesTCZYX[1];
|
|
252
|
+
const lastHasC = cLast > -1;
|
|
253
|
+
const numChannels = sourceLast.channelOffset + (lastHasC ? sourceLast.scaleLevels[levelToLoad].shape[cLast] : 1);
|
|
254
|
+
// we need to make sure that the corresponding matched shapes
|
|
255
|
+
// use the min size of T
|
|
256
|
+
let times = 1;
|
|
257
|
+
if (hasT) {
|
|
258
|
+
times = shapeLv[t];
|
|
259
|
+
for (let i = 0; i < this.sources.length; i++) {
|
|
260
|
+
const shape = this.sources[i].scaleLevels[levelToLoad].shape;
|
|
261
|
+
const tindex = this.sources[i].axesTCZYX[0];
|
|
262
|
+
if (shape[tindex] < times) {
|
|
263
|
+
console.warn("The number of time points is not consistent across sources: ", shape[tindex], times);
|
|
264
|
+
times = shape[tindex];
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
if (!this.maxExtent) {
|
|
269
|
+
this.maxExtent = loadSpec.subregion.clone();
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// from source 0:
|
|
273
|
+
const pxDimsLv = convertSubregionToPixels(loadSpec.subregion, new Vector3(shapeLv[x], shapeLv[y], hasZ ? shapeLv[z] : 1));
|
|
274
|
+
const pxSizeLv = pxDimsLv.getSize(new Vector3());
|
|
275
|
+
const atlasTileDims = computePackedAtlasDims(pxSizeLv.z, pxSizeLv.x, pxSizeLv.y);
|
|
276
|
+
|
|
277
|
+
// Channel names is the other place where we have to check every source
|
|
278
|
+
// Track which channel names we've seen so far, so that we can rename them to avoid name collisions
|
|
279
|
+
const channelNamesMap = new Map();
|
|
280
|
+
const channelNames = this.sources.flatMap(src => {
|
|
281
|
+
const sourceChannelNames = getSourceChannelNames(src);
|
|
282
|
+
|
|
283
|
+
// Resolve name collisions
|
|
284
|
+
return sourceChannelNames.map(channelName => {
|
|
285
|
+
const numMatchingChannels = channelNamesMap.get(channelName);
|
|
286
|
+
if (numMatchingChannels !== undefined) {
|
|
287
|
+
// If e.g. we've seen channel "Membrane" once before, rename this one to "Membrane (1)"
|
|
288
|
+
channelNamesMap.set(channelName, numMatchingChannels + 1);
|
|
289
|
+
return `${channelName} (${numMatchingChannels})`;
|
|
290
|
+
} else {
|
|
291
|
+
channelNamesMap.set(channelName, 1);
|
|
292
|
+
return channelName;
|
|
293
|
+
}
|
|
294
|
+
});
|
|
295
|
+
});
|
|
296
|
+
const alldims = source0.scaleLevels.map((level, i) => {
|
|
297
|
+
const dims = {
|
|
298
|
+
spaceUnit: spatialUnit,
|
|
299
|
+
timeUnit: timeUnit,
|
|
300
|
+
shape: this.orderByTCZYX(level.shape, 1),
|
|
301
|
+
spacing: this.getScale(i),
|
|
302
|
+
dataType: level.dtype
|
|
303
|
+
};
|
|
304
|
+
return dims;
|
|
305
|
+
});
|
|
306
|
+
const imgdata = {
|
|
307
|
+
name: source0.omeroMetadata?.name || "Volume",
|
|
308
|
+
atlasTileDims: [atlasTileDims.x, atlasTileDims.y],
|
|
309
|
+
subregionSize: [pxSizeLv.x, pxSizeLv.y, pxSizeLv.z],
|
|
310
|
+
subregionOffset: [0, 0, 0],
|
|
311
|
+
combinedNumChannels: numChannels,
|
|
312
|
+
channelNames,
|
|
313
|
+
multiscaleLevel: levelToLoad,
|
|
314
|
+
multiscaleLevelDims: alldims,
|
|
315
|
+
transform: {
|
|
316
|
+
translation: [0, 0, 0],
|
|
317
|
+
rotation: [0, 0, 0],
|
|
318
|
+
scale: [1, 1, 1]
|
|
319
|
+
}
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
// The `LoadSpec` passed in at this stage should represent the subset which this loader loads, not that
|
|
323
|
+
// which the volume contains. The volume contains the full extent of the subset recognized by this loader.
|
|
324
|
+
const fullExtentLoadSpec = {
|
|
325
|
+
...loadSpec,
|
|
326
|
+
subregion: new Box3(new Vector3(0, 0, 0), new Vector3(1, 1, 1))
|
|
327
|
+
};
|
|
328
|
+
return Promise.resolve({
|
|
329
|
+
imageInfo: imgdata,
|
|
330
|
+
loadSpec: fullExtentLoadSpec
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
async prefetchChunk(scaleLevel, coords, subscriber) {
|
|
334
|
+
const {
|
|
335
|
+
store,
|
|
336
|
+
path
|
|
337
|
+
} = scaleLevel;
|
|
338
|
+
const separator = path.endsWith("/") ? "" : "/";
|
|
339
|
+
const key = path + separator + this.orderByDimension(coords).join("/");
|
|
340
|
+
// Calling `get` and doing nothing with the result still triggers a cache check, fetch, and insertion
|
|
341
|
+
await store.get(key, {
|
|
342
|
+
subscriber,
|
|
343
|
+
isPrefetch: true
|
|
344
|
+
}).catch(wrapVolumeLoadError(`Unable to prefetch chunk with key ${key}`, VolumeLoadErrorType.LOAD_DATA_FAILED, CHUNK_REQUEST_CANCEL_REASON));
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
/** Reads a list of chunk keys requested by a `loadVolumeData` call and sets up appropriate prefetch requests. */
|
|
348
|
+
beginPrefetch(keys, scaleLevel) {
|
|
349
|
+
// Convert keys to arrays of coords
|
|
350
|
+
const chunkCoords = keys.map(({
|
|
351
|
+
sourceIdx,
|
|
352
|
+
key
|
|
353
|
+
}) => {
|
|
354
|
+
const numDims = getDimensionCount(this.sources[sourceIdx].axesTCZYX);
|
|
355
|
+
const coordsInDimensionOrder = key.trim().split("/").slice(-numDims).filter(s => s !== "").map(s => parseInt(s, 10));
|
|
356
|
+
const sourceCoords = this.orderByTCZYX(coordsInDimensionOrder, 0, sourceIdx);
|
|
357
|
+
// Convert source channel index to absolute channel index for `ChunkPrefetchIterator`'s benefit
|
|
358
|
+
// (we match chunk coordinates output from `ChunkPrefetchIterator` back to sources below)
|
|
359
|
+
sourceCoords[1] += this.sources[sourceIdx].channelOffset;
|
|
360
|
+
return sourceCoords;
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
// Get number of chunks per dimension in every source array
|
|
364
|
+
const chunkDimsTCZYX = this.sources.map(src => {
|
|
365
|
+
const level = src.scaleLevels[scaleLevel];
|
|
366
|
+
const chunkDimsUnordered = level.shape.map((dim, idx) => Math.ceil(dim / level.chunks[idx]));
|
|
367
|
+
return this.orderByTCZYX(chunkDimsUnordered, 1);
|
|
368
|
+
});
|
|
369
|
+
// `ChunkPrefetchIterator` yields chunk coordinates in order of roughly how likely they are to be loaded next
|
|
370
|
+
const prefetchIterator = new ChunkPrefetchIterator(chunkCoords, this.fetchOptions.maxPrefetchDistance, chunkDimsTCZYX, this.priorityDirections, this.fetchOptions.onlyPriorityDirections);
|
|
371
|
+
const subscriber = this.requestQueue.addSubscriber();
|
|
372
|
+
let prefetchCount = 0;
|
|
373
|
+
for (const chunk of prefetchIterator) {
|
|
374
|
+
if (prefetchCount >= this.fetchOptions.maxPrefetchChunks) {
|
|
375
|
+
break;
|
|
376
|
+
}
|
|
377
|
+
// Match absolute channel coordinate back to source index and channel index
|
|
378
|
+
const {
|
|
379
|
+
sourceIndex,
|
|
380
|
+
channelIndexInSource
|
|
381
|
+
} = this.matchChannelToSource(chunk[1]);
|
|
382
|
+
const sourceScaleLevel = this.sources[sourceIndex].scaleLevels[scaleLevel];
|
|
383
|
+
chunk[1] = channelIndexInSource;
|
|
384
|
+
this.prefetchChunk(sourceScaleLevel, chunk, subscriber);
|
|
385
|
+
prefetchCount++;
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
// Clear out old prefetch requests (requests which also cover this new prefetch will be preserved)
|
|
389
|
+
if (this.prefetchSubscriber !== undefined) {
|
|
390
|
+
this.requestQueue.removeSubscriber(this.prefetchSubscriber, CHUNK_REQUEST_CANCEL_REASON);
|
|
391
|
+
}
|
|
392
|
+
this.prefetchSubscriber = subscriber;
|
|
393
|
+
}
|
|
394
|
+
updateImageInfoForLoad(imageInfo, loadSpec) {
|
|
395
|
+
// Apply `this.maxExtent` to subregion, if it exists
|
|
396
|
+
const maxExtent = this.maxExtent ?? new Box3(new Vector3(0, 0, 0), new Vector3(1, 1, 1));
|
|
397
|
+
const subregion = composeSubregion(loadSpec.subregion, maxExtent);
|
|
398
|
+
|
|
399
|
+
// Pick the level to load based on the subregion size
|
|
400
|
+
const multiscaleLevel = pickLevelToLoad({
|
|
401
|
+
...loadSpec,
|
|
402
|
+
subregion
|
|
403
|
+
}, this.getLevelShapesZYX());
|
|
404
|
+
const array0Shape = this.sources[0].scaleLevels[multiscaleLevel].shape;
|
|
405
|
+
|
|
406
|
+
// Convert subregion to volume voxels
|
|
407
|
+
const [z, y, x] = this.sources[0].axesTCZYX.slice(2);
|
|
408
|
+
const regionPx = convertSubregionToPixels(subregion, new Vector3(array0Shape[x], array0Shape[y], z === -1 ? 1 : array0Shape[z]));
|
|
409
|
+
|
|
410
|
+
// Derive other image info properties from subregion and level to load
|
|
411
|
+
const subregionSize = regionPx.getSize(new Vector3());
|
|
412
|
+
const atlasTileDims = computePackedAtlasDims(subregionSize.z, subregionSize.x, subregionSize.y);
|
|
413
|
+
return {
|
|
414
|
+
...imageInfo,
|
|
415
|
+
atlasTileDims: [atlasTileDims.x, atlasTileDims.y],
|
|
416
|
+
subregionSize: [subregionSize.x, subregionSize.y, subregionSize.z],
|
|
417
|
+
subregionOffset: [regionPx.min.x, regionPx.min.y, regionPx.min.z],
|
|
418
|
+
multiscaleLevel
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
async loadRawChannelData(imageInfo, loadSpec, onUpdateMetadata, onData) {
|
|
422
|
+
// This seemingly useless line keeps a stable local copy of `syncChannels` which the async closures below capture
|
|
423
|
+
// so that changes to `this.syncChannels` don't affect the behavior of loads in progress.
|
|
424
|
+
const syncChannels = this.syncChannels;
|
|
425
|
+
const updatedImageInfo = this.updateImageInfoForLoad(imageInfo, loadSpec);
|
|
426
|
+
onUpdateMetadata(updatedImageInfo);
|
|
427
|
+
const {
|
|
428
|
+
combinedNumChannels,
|
|
429
|
+
multiscaleLevel
|
|
430
|
+
} = updatedImageInfo;
|
|
431
|
+
const channelIndexes = loadSpec.channels ?? Array.from({
|
|
432
|
+
length: combinedNumChannels
|
|
433
|
+
}, (_, i) => i);
|
|
434
|
+
const subscriber = this.requestQueue.addSubscriber();
|
|
435
|
+
|
|
436
|
+
// Prefetch housekeeping: we want to save keys involved in this load to prefetch later
|
|
437
|
+
const keys = [];
|
|
438
|
+
const reportKeyBase = (sourceIdx, key, sub) => {
|
|
439
|
+
if (sub === subscriber) {
|
|
440
|
+
keys.push({
|
|
441
|
+
sourceIdx,
|
|
442
|
+
key
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
};
|
|
446
|
+
const resultChannelIndices = [];
|
|
447
|
+
const resultChannelData = [];
|
|
448
|
+
const resultChannelDtype = [];
|
|
449
|
+
const resultChannelRanges = [];
|
|
450
|
+
const channelPromises = channelIndexes.map(async ch => {
|
|
451
|
+
// Build slice spec
|
|
452
|
+
const min = new Vector3(...updatedImageInfo.subregionOffset);
|
|
453
|
+
const max = min.clone().add(new Vector3(...updatedImageInfo.subregionSize));
|
|
454
|
+
const {
|
|
455
|
+
sourceIndex: sourceIdx,
|
|
456
|
+
channelIndexInSource: sourceCh
|
|
457
|
+
} = this.matchChannelToSource(ch);
|
|
458
|
+
const unorderedSpec = [loadSpec.time, sourceCh, slice(min.z, max.z), slice(min.y, max.y), slice(min.x, max.x)];
|
|
459
|
+
const level = this.sources[sourceIdx].scaleLevels[multiscaleLevel];
|
|
460
|
+
const sliceSpec = this.orderByDimension(unorderedSpec, sourceIdx);
|
|
461
|
+
const reportKey = (key, sub) => reportKeyBase(sourceIdx, key, sub);
|
|
462
|
+
const result = await zarrGet(level, sliceSpec, {
|
|
463
|
+
opts: {
|
|
464
|
+
subscriber,
|
|
465
|
+
reportKey
|
|
466
|
+
}
|
|
467
|
+
}).catch(wrapVolumeLoadError("Could not load OME-Zarr volume data", VolumeLoadErrorType.LOAD_DATA_FAILED, CHUNK_REQUEST_CANCEL_REASON));
|
|
468
|
+
if (result?.data === undefined) {
|
|
469
|
+
return;
|
|
470
|
+
}
|
|
471
|
+
const converted = convertChannel(result.data, level.dtype);
|
|
472
|
+
if (syncChannels) {
|
|
473
|
+
resultChannelDtype.push(converted.dtype);
|
|
474
|
+
resultChannelData.push(converted.data);
|
|
475
|
+
resultChannelIndices.push(ch);
|
|
476
|
+
resultChannelRanges.push([converted.min, converted.max]);
|
|
477
|
+
} else {
|
|
478
|
+
onData([ch], [converted.dtype], [converted.data], [[converted.min, converted.max]]);
|
|
479
|
+
}
|
|
480
|
+
});
|
|
481
|
+
|
|
482
|
+
// Cancel any in-flight requests from previous loads that aren't useful to this one
|
|
483
|
+
if (this.loadSubscriber !== undefined) {
|
|
484
|
+
this.requestQueue.removeSubscriber(this.loadSubscriber, CHUNK_REQUEST_CANCEL_REASON);
|
|
485
|
+
}
|
|
486
|
+
this.loadSubscriber = subscriber;
|
|
487
|
+
this.beginPrefetch(keys, multiscaleLevel);
|
|
488
|
+
await Promise.all(channelPromises);
|
|
489
|
+
if (syncChannels) {
|
|
490
|
+
onData(resultChannelIndices, resultChannelDtype, resultChannelData, resultChannelRanges);
|
|
491
|
+
}
|
|
492
|
+
this.requestQueue.removeSubscriber(subscriber, CHUNK_REQUEST_CANCEL_REASON);
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
export { OMEZarrLoader };
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { ThreadableVolumeLoader, LoadSpec } from "./IVolumeLoader.js";
|
|
2
|
+
import { computeAtlasSize } from "../ImageInfo.js";
|
|
3
|
+
import { JsonImageInfoLoader } from "./JsonImageInfoLoader.js";
|
|
4
|
+
import { getDataRange } from "../utils/num_utils.js";
|
|
5
|
+
class OpenCellLoader extends ThreadableVolumeLoader {
|
|
6
|
+
async loadDims(_) {
|
|
7
|
+
const d = {
|
|
8
|
+
shape: [1, 2, 27, 600, 600],
|
|
9
|
+
spacing: [1, 1, 2, 1, 1],
|
|
10
|
+
spaceUnit: "",
|
|
11
|
+
// unknown unit.
|
|
12
|
+
dataType: "uint8",
|
|
13
|
+
timeUnit: ""
|
|
14
|
+
};
|
|
15
|
+
return [d];
|
|
16
|
+
}
|
|
17
|
+
async createImageInfo(_loadSpec) {
|
|
18
|
+
const numChannels = 2;
|
|
19
|
+
|
|
20
|
+
// we know these are standardized to 600x600, two channels, one channel per jpg.
|
|
21
|
+
const chnames = ["DNA", "Structure"];
|
|
22
|
+
const imgdata = {
|
|
23
|
+
name: "TEST",
|
|
24
|
+
atlasTileDims: [27, 1],
|
|
25
|
+
subregionSize: [600, 600, 27],
|
|
26
|
+
subregionOffset: [0, 0, 0],
|
|
27
|
+
combinedNumChannels: numChannels,
|
|
28
|
+
channelNames: chnames,
|
|
29
|
+
multiscaleLevel: 0,
|
|
30
|
+
multiscaleLevelDims: [{
|
|
31
|
+
shape: [1, numChannels, 27, 600, 600],
|
|
32
|
+
spacing: [1, 1, 2, 1, 1],
|
|
33
|
+
spaceUnit: "µm",
|
|
34
|
+
timeUnit: "",
|
|
35
|
+
dataType: "uint8"
|
|
36
|
+
}],
|
|
37
|
+
transform: {
|
|
38
|
+
translation: [0, 0, 0],
|
|
39
|
+
rotation: [0, 0, 0],
|
|
40
|
+
scale: [1, 1, 1]
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// This loader uses no fields from `LoadSpec`. Initialize volume with defaults.
|
|
45
|
+
return {
|
|
46
|
+
imageInfo: imgdata,
|
|
47
|
+
loadSpec: new LoadSpec()
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
loadRawChannelData(imageInfo, _loadSpec, _onUpdateMetadata, onData) {
|
|
51
|
+
// HQTILE or LQTILE
|
|
52
|
+
// make a json metadata dict for the two channels:
|
|
53
|
+
const urls = [{
|
|
54
|
+
name: "https://opencell.czbiohub.org/data/opencell-microscopy/roi/czML0383-P0007/czML0383-P0007-A02-PML0308-S13_ROI-0424-0025-0600-0600-LQTILE-CH405.jpg",
|
|
55
|
+
channels: [0]
|
|
56
|
+
}, {
|
|
57
|
+
name: "https://opencell.czbiohub.org/data/opencell-microscopy/roi/czML0383-P0007/czML0383-P0007-A02-PML0308-S13_ROI-0424-0025-0600-0600-LQTILE-CH488.jpg",
|
|
58
|
+
channels: [1]
|
|
59
|
+
}];
|
|
60
|
+
const [w, h] = computeAtlasSize(imageInfo);
|
|
61
|
+
// all data coming from this loader is natively 8-bit
|
|
62
|
+
return JsonImageInfoLoader.loadVolumeAtlasData(urls, (ch, dtype, data) => onData(ch, dtype, data, data.map(getDataRange), [w, h]));
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
export { OpenCellLoader };
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { Box3, Vector3 } from "three";
|
|
2
|
+
import { ThreadableVolumeLoader } from "./IVolumeLoader.js";
|
|
3
|
+
import { computePackedAtlasDims } from "./VolumeLoaderUtils.js";
|
|
4
|
+
import { getDataRange } from "../utils/num_utils.js";
|
|
5
|
+
|
|
6
|
+
// this is the form in which a 4D numpy array arrives as converted
|
|
7
|
+
// by jupyterlab into a js object.
|
|
8
|
+
// This loader does not yet support multiple time samples.
|
|
9
|
+
|
|
10
|
+
// minimal metadata for visualization
|
|
11
|
+
|
|
12
|
+
const convertImageInfo = json => {
|
|
13
|
+
const atlasTileDims = computePackedAtlasDims(json.sizeZ, json.sizeX, json.sizeY);
|
|
14
|
+
return {
|
|
15
|
+
name: json.name,
|
|
16
|
+
// assumption: the data is already sized to fit in our viewer's preferred
|
|
17
|
+
// memory footprint (a tiled atlas texture as of this writing)
|
|
18
|
+
atlasTileDims: [atlasTileDims.x, atlasTileDims.y],
|
|
19
|
+
subregionSize: [json.sizeX, json.sizeY, json.sizeZ],
|
|
20
|
+
subregionOffset: [0, 0, 0],
|
|
21
|
+
combinedNumChannels: json.sizeC,
|
|
22
|
+
channelNames: json.channelNames,
|
|
23
|
+
channelColors: undefined,
|
|
24
|
+
multiscaleLevel: 0,
|
|
25
|
+
multiscaleLevelDims: [{
|
|
26
|
+
shape: [1, json.sizeC, json.sizeZ, json.sizeY, json.sizeX],
|
|
27
|
+
spacing: [1, 1, json.physicalPixelSize[2], json.physicalPixelSize[1], json.physicalPixelSize[0]],
|
|
28
|
+
spaceUnit: json.spatialUnit || "μm",
|
|
29
|
+
timeUnit: "s",
|
|
30
|
+
dataType: "uint8"
|
|
31
|
+
}],
|
|
32
|
+
transform: {
|
|
33
|
+
translation: [0, 0, 0],
|
|
34
|
+
rotation: [0, 0, 0],
|
|
35
|
+
scale: [1, 1, 1]
|
|
36
|
+
},
|
|
37
|
+
userData: json.userData
|
|
38
|
+
};
|
|
39
|
+
};
|
|
40
|
+
class RawArrayLoader extends ThreadableVolumeLoader {
|
|
41
|
+
constructor(rawData, rawDataInfo) {
|
|
42
|
+
super();
|
|
43
|
+
this.jsonInfo = rawDataInfo;
|
|
44
|
+
this.data = rawData;
|
|
45
|
+
// check consistent dims
|
|
46
|
+
if (this.data.shape[0] !== this.jsonInfo.sizeC || this.data.shape[1] !== this.jsonInfo.sizeZ || this.data.shape[2] !== this.jsonInfo.sizeY || this.data.shape[3] !== this.jsonInfo.sizeX) {
|
|
47
|
+
throw new Error("RawArrayLoader: data shape does not match metadata");
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
async loadDims(_loadSpec) {
|
|
51
|
+
const jsonInfo = this.jsonInfo;
|
|
52
|
+
const d = {
|
|
53
|
+
shape: [1, jsonInfo.sizeC, jsonInfo.sizeZ, jsonInfo.sizeY, jsonInfo.sizeX],
|
|
54
|
+
spacing: [1, 1, jsonInfo.physicalPixelSize[2], jsonInfo.physicalPixelSize[1], jsonInfo.physicalPixelSize[0]],
|
|
55
|
+
spaceUnit: jsonInfo.spatialUnit || "μm",
|
|
56
|
+
dataType: "uint8",
|
|
57
|
+
timeUnit: "s" // time unit not specified
|
|
58
|
+
};
|
|
59
|
+
return [d];
|
|
60
|
+
}
|
|
61
|
+
async createImageInfo(loadSpec) {
|
|
62
|
+
return {
|
|
63
|
+
imageInfo: convertImageInfo(this.jsonInfo),
|
|
64
|
+
loadSpec
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
loadRawChannelData(imageInfo, loadSpec, onUpdateMetadata, onData) {
|
|
68
|
+
const requestedChannels = loadSpec.channels;
|
|
69
|
+
const adjustedLoadSpec = {
|
|
70
|
+
...loadSpec,
|
|
71
|
+
// `subregion` and `multiscaleLevel` are unused by this loader
|
|
72
|
+
subregion: new Box3(new Vector3(0, 0, 0), new Vector3(1, 1, 1)),
|
|
73
|
+
multiscaleLevel: 0
|
|
74
|
+
};
|
|
75
|
+
onUpdateMetadata(undefined, adjustedLoadSpec);
|
|
76
|
+
for (let chindex = 0; chindex < imageInfo.combinedNumChannels; ++chindex) {
|
|
77
|
+
if (requestedChannels && requestedChannels.length > 0 && !requestedChannels.includes(chindex)) {
|
|
78
|
+
continue;
|
|
79
|
+
}
|
|
80
|
+
const volSizeBytes = this.data.shape[3] * this.data.shape[2] * this.data.shape[1]; // x*y*z pixels * 1 byte/pixel
|
|
81
|
+
const channelData = new Uint8Array(this.data.buffer.buffer, chindex * volSizeBytes, volSizeBytes);
|
|
82
|
+
const range = getDataRange(channelData);
|
|
83
|
+
// all data coming from this loader is natively 8-bit
|
|
84
|
+
onData([chindex], ["uint8"], [channelData], [range]);
|
|
85
|
+
}
|
|
86
|
+
return Promise.resolve();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
export { RawArrayLoader };
|