@aics/vole-core 3.13.0 → 3.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -2
- package/es/View3d.js +14 -3
- package/es/Volume.js +1 -1
- package/es/VolumeCache.js +10 -3
- package/es/loaders/JsonImageInfoLoader.js +2 -1
- package/es/loaders/OmeZarrLoader.js +30 -31
- package/es/loaders/VolumeLoadError.js +1 -1
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +7 -0
- package/es/loaders/zarr_utils/validation.js +18 -7
- package/es/loaders/zarr_utils/wrapArray.js +39 -0
- package/es/types/View3d.d.ts +6 -2
- package/es/types/VolumeCache.d.ts +5 -2
- package/es/types/loaders/zarr_utils/types.d.ts +17 -12
- package/es/types/loaders/zarr_utils/validation.d.ts +14 -2
- package/es/types/loaders/zarr_utils/wrapArray.d.ts +7 -0
- package/package.json +3 -3
- package/es/loaders/zarr_utils/WrappedStore.js +0 -51
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +0 -24
package/README.md
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
# Vol-E core
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+

|
|
4
|
+

|
|
5
|
+
|
|
6
|
+
**Vol-E core** is a WebGL canvas-based volume viewer. It can display multichannel volume data with high channel counts. The viewer is optimized for OME-Zarr files, and can prefetch and cache Zarr chunks in browser memory for performance.
|
|
4
7
|
|
|
5
8
|
The Vol-E core package exposes several key modules:
|
|
6
9
|
|
|
@@ -61,7 +64,7 @@ loader.loadVolumeData(volume);
|
|
|
61
64
|
|
|
62
65
|
## React example
|
|
63
66
|
|
|
64
|
-
See [vole-app](https://github.com/allen-cell-animated/vole-app) for a complete application that wraps
|
|
67
|
+
See [vole-app](https://github.com/allen-cell-animated/vole-app) for a complete application that wraps Vol-E core in a React component.
|
|
65
68
|
|
|
66
69
|
## Acknowledgements
|
|
67
70
|
|
package/es/View3d.js
CHANGED
|
@@ -96,6 +96,9 @@ export class View3d {
|
|
|
96
96
|
getDOMElement() {
|
|
97
97
|
return this.canvas3d.containerdiv;
|
|
98
98
|
}
|
|
99
|
+
getCanvasDOMElement() {
|
|
100
|
+
return this.canvas3d.renderer.domElement;
|
|
101
|
+
}
|
|
99
102
|
getCameraState() {
|
|
100
103
|
return this.canvas3d.getCameraState();
|
|
101
104
|
}
|
|
@@ -106,9 +109,16 @@ export class View3d {
|
|
|
106
109
|
|
|
107
110
|
/**
|
|
108
111
|
* Force a redraw.
|
|
112
|
+
* @param synchronous If true, the redraw will be done synchronously. If false (default), the
|
|
113
|
+
* redraw will be done asynchronously via `requestAnimationFrame`. Redraws should be done async
|
|
114
|
+
* whenever possible for the best performance.
|
|
109
115
|
*/
|
|
110
|
-
redraw() {
|
|
111
|
-
|
|
116
|
+
redraw(synchronous = false) {
|
|
117
|
+
if (synchronous) {
|
|
118
|
+
this.canvas3d.onAnimationLoop();
|
|
119
|
+
} else {
|
|
120
|
+
this.canvas3d.redraw();
|
|
121
|
+
}
|
|
112
122
|
}
|
|
113
123
|
unsetImage() {
|
|
114
124
|
if (this.image) {
|
|
@@ -206,10 +216,11 @@ export class View3d {
|
|
|
206
216
|
}
|
|
207
217
|
setTime(volume, time, onChannelLoaded) {
|
|
208
218
|
const timeClamped = Math.max(0, Math.min(time, volume.imageInfo.times - 1));
|
|
209
|
-
volume.updateRequiredData({
|
|
219
|
+
const loadPromise = volume.updateRequiredData({
|
|
210
220
|
time: timeClamped
|
|
211
221
|
}, onChannelLoaded);
|
|
212
222
|
this.updateTimestepIndicator(volume);
|
|
223
|
+
return loadPromise;
|
|
213
224
|
}
|
|
214
225
|
|
|
215
226
|
/**
|
package/es/Volume.js
CHANGED
package/es/VolumeCache.js
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
export const isChunk = data => data.data !== undefined;
|
|
2
|
+
const chunkSize = ({
|
|
3
|
+
data
|
|
4
|
+
}) => Array.isArray(data) ? data.length : data.byteLength;
|
|
5
|
+
const dataSize = data => data.byteLength ?? chunkSize(data);
|
|
6
|
+
|
|
1
7
|
/** Default: 250MB. Should be large enough to be useful but safe for most any computer that can run the app */
|
|
2
8
|
const CACHE_MAX_SIZE_DEFAULT = 250_000_000;
|
|
3
9
|
export default class VolumeCache {
|
|
@@ -31,7 +37,7 @@ export default class VolumeCache {
|
|
|
31
37
|
*/
|
|
32
38
|
removeEntryFromStore(entry) {
|
|
33
39
|
this.entries.delete(entry.key);
|
|
34
|
-
this.currentSize -= entry.data
|
|
40
|
+
this.currentSize -= dataSize(entry.data);
|
|
35
41
|
}
|
|
36
42
|
|
|
37
43
|
/**
|
|
@@ -98,7 +104,8 @@ export default class VolumeCache {
|
|
|
98
104
|
* @returns {boolean} a boolean indicating whether the insertion succeeded.
|
|
99
105
|
*/
|
|
100
106
|
insert(key, data) {
|
|
101
|
-
|
|
107
|
+
const size = dataSize(data);
|
|
108
|
+
if (size > this.maxSize) {
|
|
102
109
|
console.error("VolumeCache: attempt to insert a single entry larger than the cache");
|
|
103
110
|
return false;
|
|
104
111
|
}
|
|
@@ -120,7 +127,7 @@ export default class VolumeCache {
|
|
|
120
127
|
};
|
|
121
128
|
this.addEntryAsFirst(newEntry);
|
|
122
129
|
this.entries.set(key, newEntry);
|
|
123
|
-
this.currentSize +=
|
|
130
|
+
this.currentSize += size;
|
|
124
131
|
|
|
125
132
|
// Evict until size is within limit
|
|
126
133
|
while (this.currentSize > this.maxSize) {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { Box3, Vector3 } from "three";
|
|
2
2
|
import { ThreadableVolumeLoader } from "./IVolumeLoader.js";
|
|
3
3
|
import { computeAtlasSize } from "../ImageInfo.js";
|
|
4
|
+
import { isChunk } from "../VolumeCache.js";
|
|
4
5
|
import { getDataRange } from "../utils/num_utils.js";
|
|
5
6
|
|
|
6
7
|
/* eslint-disable @typescript-eslint/naming-convention */
|
|
@@ -169,7 +170,7 @@ class JsonImageInfoLoader extends ThreadableVolumeLoader {
|
|
|
169
170
|
for (let j = 0; j < Math.min(image.channels.length, 4); ++j) {
|
|
170
171
|
const chindex = image.channels[j];
|
|
171
172
|
const cacheResult = cache?.get(`${image.name}/${chindex}`);
|
|
172
|
-
if (cacheResult) {
|
|
173
|
+
if (cacheResult && !isChunk(cacheResult)) {
|
|
173
174
|
// all data coming from this loader is natively 8-bit
|
|
174
175
|
const channelData = new Uint8Array(cacheResult);
|
|
175
176
|
if (syncChannels) {
|
|
@@ -1,17 +1,16 @@
|
|
|
1
1
|
import { Box3, Vector3 } from "three";
|
|
2
|
-
import * as zarr from "
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
import { FetchStore } from "zarrita";
|
|
2
|
+
import * as zarr from "zarrita";
|
|
3
|
+
const {
|
|
4
|
+
slice
|
|
5
|
+
} = zarr;
|
|
7
6
|
import SubscribableRequestQueue from "../utils/SubscribableRequestQueue.js";
|
|
8
7
|
import { ThreadableVolumeLoader } from "./IVolumeLoader.js";
|
|
9
8
|
import { composeSubregion, computePackedAtlasDims, convertSubregionToPixels, pickLevelToLoad, unitNameToSymbol } from "./VolumeLoaderUtils.js";
|
|
10
9
|
import ChunkPrefetchIterator from "./zarr_utils/ChunkPrefetchIterator.js";
|
|
11
|
-
import
|
|
12
|
-
import { getDimensionCount, getScale, getSourceChannelNames, matchSourceScaleLevels, orderByDimension, orderByTCZYX, remapAxesToTCZYX } from "./zarr_utils/utils.js";
|
|
10
|
+
import { getScale, getSourceChannelNames, matchSourceScaleLevels, orderByDimension, orderByTCZYX, remapAxesToTCZYX } from "./zarr_utils/utils.js";
|
|
13
11
|
import { VolumeLoadError, VolumeLoadErrorType, wrapVolumeLoadError } from "./VolumeLoadError.js";
|
|
14
|
-
import
|
|
12
|
+
import wrapArray from "./zarr_utils/wrapArray.js";
|
|
13
|
+
import { assertMetadataHasMultiscales, toOMEZarrMetaV4, validateOMEZarrMetadata } from "./zarr_utils/validation.js";
|
|
15
14
|
const CHUNK_REQUEST_CANCEL_REASON = "chunk request cancelled";
|
|
16
15
|
|
|
17
16
|
// returns the converted data and the original min and max values
|
|
@@ -97,23 +96,26 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
97
96
|
|
|
98
97
|
// Create one `ZarrSource` per URL
|
|
99
98
|
const sourceProms = urlsArr.map(async (url, i) => {
|
|
100
|
-
const store = new
|
|
99
|
+
const store = new zarr.FetchStore(url);
|
|
101
100
|
const root = zarr.root(store);
|
|
102
101
|
const group = await zarr.open(root, {
|
|
103
102
|
kind: "group"
|
|
104
103
|
}).catch(wrapVolumeLoadError(`Failed to open OME-Zarr data at ${url}`, VolumeLoadErrorType.NOT_FOUND));
|
|
104
|
+
const sourceName = urlsArr.length > 1 ? `Zarr source ${i}` : "Zarr";
|
|
105
|
+
const meta = toOMEZarrMetaV4(group.attrs);
|
|
106
|
+
assertMetadataHasMultiscales(meta, sourceName);
|
|
105
107
|
|
|
106
108
|
// Pick scene (multiscale)
|
|
107
109
|
let scene = scenesArr[Math.min(i, scenesArr.length - 1)];
|
|
108
|
-
if (scene >
|
|
110
|
+
if (scene > meta.multiscales?.length) {
|
|
109
111
|
console.warn(`WARNING: OMEZarrLoader: scene ${scene} is invalid. Using scene 0.`);
|
|
110
112
|
scene = 0;
|
|
111
113
|
}
|
|
112
|
-
validateOMEZarrMetadata(
|
|
114
|
+
validateOMEZarrMetadata(meta, scene, sourceName);
|
|
113
115
|
const {
|
|
114
116
|
multiscales,
|
|
115
117
|
omero
|
|
116
|
-
} =
|
|
118
|
+
} = meta;
|
|
117
119
|
const multiscaleMetadata = multiscales[scene];
|
|
118
120
|
|
|
119
121
|
// Open all scale levels of multiscale
|
|
@@ -121,7 +123,7 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
121
123
|
path
|
|
122
124
|
}) => zarr.open(root.resolve(path), {
|
|
123
125
|
kind: "array"
|
|
124
|
-
}).catch(wrapVolumeLoadError(`Failed to open scale level ${path} of OME-Zarr data at ${url}`, VolumeLoadErrorType.NOT_FOUND)));
|
|
126
|
+
}).then(array => wrapArray(array, url, cache, queue)).catch(wrapVolumeLoadError(`Failed to open scale level ${path} of OME-Zarr data at ${url}`, VolumeLoadErrorType.NOT_FOUND)));
|
|
125
127
|
const scaleLevels = await Promise.all(lvlProms);
|
|
126
128
|
const axesTCZYX = remapAxesToTCZYX(multiscaleMetadata.axes);
|
|
127
129
|
return {
|
|
@@ -330,30 +332,25 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
330
332
|
loadSpec: fullExtentLoadSpec
|
|
331
333
|
});
|
|
332
334
|
}
|
|
333
|
-
|
|
334
|
-
const {
|
|
335
|
-
store,
|
|
336
|
-
path
|
|
337
|
-
} = scaleLevel;
|
|
338
|
-
const separator = path.endsWith("/") ? "" : "/";
|
|
339
|
-
const key = path + separator + this.orderByDimension(coords).join("/");
|
|
335
|
+
prefetchChunk(scaleLevel, coords, subscriber) {
|
|
340
336
|
// Calling `get` and doing nothing with the result still triggers a cache check, fetch, and insertion
|
|
341
|
-
|
|
337
|
+
scaleLevel.getChunk(this.orderByDimension(coords), {
|
|
342
338
|
subscriber,
|
|
343
339
|
isPrefetch: true
|
|
344
|
-
}).catch(wrapVolumeLoadError(`Unable to prefetch chunk with
|
|
340
|
+
}).catch(wrapVolumeLoadError(`Unable to prefetch chunk with coords ${coords.join(", ")}`, VolumeLoadErrorType.LOAD_DATA_FAILED, CHUNK_REQUEST_CANCEL_REASON));
|
|
345
341
|
}
|
|
346
342
|
|
|
347
343
|
/** Reads a list of chunk keys requested by a `loadVolumeData` call and sets up appropriate prefetch requests. */
|
|
348
344
|
beginPrefetch(keys, scaleLevel) {
|
|
349
345
|
// Convert keys to arrays of coords
|
|
346
|
+
if (keys.length === 0) {
|
|
347
|
+
return;
|
|
348
|
+
}
|
|
350
349
|
const chunkCoords = keys.map(({
|
|
351
350
|
sourceIdx,
|
|
352
|
-
|
|
351
|
+
coords
|
|
353
352
|
}) => {
|
|
354
|
-
const
|
|
355
|
-
const coordsInDimensionOrder = key.trim().split("/").slice(-numDims).filter(s => s !== "").map(s => parseInt(s, 10));
|
|
356
|
-
const sourceCoords = this.orderByTCZYX(coordsInDimensionOrder, 0, sourceIdx);
|
|
353
|
+
const sourceCoords = this.orderByTCZYX(coords, 0, sourceIdx);
|
|
357
354
|
// Convert source channel index to absolute channel index for `ChunkPrefetchIterator`'s benefit
|
|
358
355
|
// (we match chunk coordinates output from `ChunkPrefetchIterator` back to sources below)
|
|
359
356
|
sourceCoords[1] += this.sources[sourceIdx].channelOffset;
|
|
@@ -366,6 +363,7 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
366
363
|
const chunkDimsUnordered = level.shape.map((dim, idx) => Math.ceil(dim / level.chunks[idx]));
|
|
367
364
|
return this.orderByTCZYX(chunkDimsUnordered, 1);
|
|
368
365
|
});
|
|
366
|
+
|
|
369
367
|
// `ChunkPrefetchIterator` yields chunk coordinates in order of roughly how likely they are to be loaded next
|
|
370
368
|
const prefetchIterator = new ChunkPrefetchIterator(chunkCoords, this.fetchOptions.maxPrefetchDistance, chunkDimsTCZYX, this.priorityDirections, this.fetchOptions.onlyPriorityDirections);
|
|
371
369
|
const subscriber = this.requestQueue.addSubscriber();
|
|
@@ -435,11 +433,11 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
435
433
|
|
|
436
434
|
// Prefetch housekeeping: we want to save keys involved in this load to prefetch later
|
|
437
435
|
const keys = [];
|
|
438
|
-
const
|
|
436
|
+
const reportChunkBase = (sourceIdx, coords, sub) => {
|
|
439
437
|
if (sub === subscriber) {
|
|
440
438
|
keys.push({
|
|
441
439
|
sourceIdx,
|
|
442
|
-
|
|
440
|
+
coords
|
|
443
441
|
});
|
|
444
442
|
}
|
|
445
443
|
};
|
|
@@ -458,11 +456,12 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
|
|
|
458
456
|
const unorderedSpec = [loadSpec.time, sourceCh, slice(min.z, max.z), slice(min.y, max.y), slice(min.x, max.x)];
|
|
459
457
|
const level = this.sources[sourceIdx].scaleLevels[multiscaleLevel];
|
|
460
458
|
const sliceSpec = this.orderByDimension(unorderedSpec, sourceIdx);
|
|
461
|
-
const
|
|
462
|
-
|
|
459
|
+
const reportChunk = (coords, sub) => reportChunkBase(sourceIdx, coords, sub);
|
|
460
|
+
console.log(level);
|
|
461
|
+
const result = await zarr.get(level, sliceSpec, {
|
|
463
462
|
opts: {
|
|
464
463
|
subscriber,
|
|
465
|
-
|
|
464
|
+
reportChunk
|
|
466
465
|
}
|
|
467
466
|
}).catch(wrapVolumeLoadError("Could not load OME-Zarr volume data", VolumeLoadErrorType.LOAD_DATA_FAILED, CHUNK_REQUEST_CANCEL_REASON));
|
|
468
467
|
if (result?.data === undefined) {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { errorConstructors } from "serialize-error";
|
|
2
|
-
import { NodeNotFoundError, KeyError } from "
|
|
2
|
+
import { NodeNotFoundError, KeyError } from "zarrita";
|
|
3
3
|
// geotiff doesn't export its error types...
|
|
4
4
|
|
|
5
5
|
/** Groups possible load errors into a few broad categories which we can give similar guidance to the user about. */
|
|
@@ -34,6 +34,13 @@ export default class ChunkPrefetchIterator {
|
|
|
34
34
|
updateMinMax(chunk[4], extrema[3]);
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
+
// Bail out if we have any non-finite values in the extrema (the iterator will be empty)
|
|
38
|
+
if (extrema.flat().some(val => !Number.isFinite(val))) {
|
|
39
|
+
this.directionStates = [];
|
|
40
|
+
this.priorityDirectionStates = [];
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
|
|
37
44
|
// Create `PrefetchDirectionState`s for each direction
|
|
38
45
|
this.directionStates = [];
|
|
39
46
|
this.priorityDirectionStates = [];
|
|
@@ -1,4 +1,11 @@
|
|
|
1
1
|
import { VolumeLoadError, VolumeLoadErrorType } from "../VolumeLoadError.js";
|
|
2
|
+
/**
|
|
3
|
+
* If `meta` is the top-level metadata of a zarr node formatted according to the OME-Zarr spec version 0.5, returns
|
|
4
|
+
* the object formatted according to v0.4 of the spec. For our purposes this just means flattening out the `ome` key.
|
|
5
|
+
*
|
|
6
|
+
* Return type is `unknown` because this does no actual validation; use `validateOMEZarrMetadata` for that.
|
|
7
|
+
*/
|
|
8
|
+
export const toOMEZarrMetaV4 = meta => meta.ome ?? meta;
|
|
2
9
|
function isObjectWithProp(obj, prop) {
|
|
3
10
|
return typeof obj === "object" && obj !== null && prop in obj;
|
|
4
11
|
}
|
|
@@ -17,18 +24,22 @@ function assertPropIsArray(obj, prop, name = "zarr") {
|
|
|
17
24
|
}
|
|
18
25
|
}
|
|
19
26
|
|
|
27
|
+
/** Intermediate stage of validation, before we've picked a single multiscale to validate */
|
|
28
|
+
|
|
29
|
+
export function assertMetadataHasMultiscales(meta, name = "zarr") {
|
|
30
|
+
// data is an object with a key "multiscales", which is a non-empty array
|
|
31
|
+
assertMetadataHasProp(meta, "multiscales", name);
|
|
32
|
+
assertPropIsArray(meta, "multiscales", name);
|
|
33
|
+
}
|
|
34
|
+
|
|
20
35
|
/**
|
|
21
|
-
* Validates that the `OMEZarrMetadata` record `
|
|
36
|
+
* Validates that the `OMEZarrMetadata` record `meta` has the minimal amount of data required to open a volume. Since
|
|
22
37
|
* we only ever open one multiscale, we only validate the multiscale metadata record at index `multiscaleIdx` here.
|
|
23
38
|
* `name` is used in error messages to identify the source of the metadata.
|
|
24
39
|
*/
|
|
25
|
-
export function validateOMEZarrMetadata(
|
|
26
|
-
// data is an object with a key "multiscales", which is an array
|
|
27
|
-
assertMetadataHasProp(data, "multiscales", name);
|
|
28
|
-
assertPropIsArray(data, "multiscales", name);
|
|
29
|
-
|
|
40
|
+
export function validateOMEZarrMetadata(meta, multiscaleIdx = 0, name = "zarr") {
|
|
30
41
|
// check that a multiscale metadata entry exists at `multiscaleIdx`
|
|
31
|
-
const multiscaleMeta =
|
|
42
|
+
const multiscaleMeta = meta.multiscales[multiscaleIdx];
|
|
32
43
|
if (!multiscaleMeta) {
|
|
33
44
|
throw new VolumeLoadError(`${name} metadata does not have requested multiscale level ${multiscaleIdx}`, {
|
|
34
45
|
type: VolumeLoadErrorType.INVALID_METADATA
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { isChunk } from "../../VolumeCache.js";
|
|
2
|
+
export default function wrapArray(array, basePath, cache, queue) {
|
|
3
|
+
const path = basePath.endsWith("/") ? basePath.slice(0, -1) : basePath;
|
|
4
|
+
const keyBase = path + array.path + (array.path.endsWith("/") ? "" : "/");
|
|
5
|
+
const getChunk = async (coords, opts) => {
|
|
6
|
+
if (opts?.subscriber && opts.reportChunk) {
|
|
7
|
+
opts.reportChunk(coords, opts.subscriber);
|
|
8
|
+
}
|
|
9
|
+
const fullKey = keyBase + coords.join(",");
|
|
10
|
+
const cacheResult = cache?.get(fullKey);
|
|
11
|
+
if (cacheResult && isChunk(cacheResult)) {
|
|
12
|
+
return cacheResult;
|
|
13
|
+
}
|
|
14
|
+
let result;
|
|
15
|
+
if (queue && opts?.subscriber) {
|
|
16
|
+
result = await queue.addRequest(fullKey, opts?.subscriber, () => array.getChunk(coords, opts), opts.isPrefetch);
|
|
17
|
+
} else {
|
|
18
|
+
result = await array.getChunk(coords, opts);
|
|
19
|
+
}
|
|
20
|
+
cache?.insert(fullKey, result);
|
|
21
|
+
return result;
|
|
22
|
+
};
|
|
23
|
+
return new Proxy(array, {
|
|
24
|
+
get: (target, prop) => {
|
|
25
|
+
if (prop === "getChunk") {
|
|
26
|
+
return getChunk;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy#no_private_property_forwarding
|
|
30
|
+
const value = target[prop];
|
|
31
|
+
if (value instanceof Function) {
|
|
32
|
+
return function (...args) {
|
|
33
|
+
return value.apply(target, args);
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
return value;
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
}
|
package/es/types/View3d.d.ts
CHANGED
|
@@ -49,12 +49,16 @@ export declare class View3d {
|
|
|
49
49
|
*/
|
|
50
50
|
capture(dataurlcallback: (name: string) => void): void;
|
|
51
51
|
getDOMElement(): HTMLDivElement;
|
|
52
|
+
getCanvasDOMElement(): HTMLCanvasElement;
|
|
52
53
|
getCameraState(): CameraState;
|
|
53
54
|
setCameraState(transform: Partial<CameraState>): void;
|
|
54
55
|
/**
|
|
55
56
|
* Force a redraw.
|
|
57
|
+
* @param synchronous If true, the redraw will be done synchronously. If false (default), the
|
|
58
|
+
* redraw will be done asynchronously via `requestAnimationFrame`. Redraws should be done async
|
|
59
|
+
* whenever possible for the best performance.
|
|
56
60
|
*/
|
|
57
|
-
redraw(): void;
|
|
61
|
+
redraw(synchronous?: boolean): void;
|
|
58
62
|
unsetImage(): VolumeDrawable | undefined;
|
|
59
63
|
/**
|
|
60
64
|
* Add a new volume image to the viewer. (The viewer currently only supports a single image at a time - adding repeatedly, without removing in between, is a potential resource leak)
|
|
@@ -92,7 +96,7 @@ export declare class View3d {
|
|
|
92
96
|
onVolumeChannelAdded(volume: Volume, newChannelIndex: number): void;
|
|
93
97
|
onVolumeLoadError(volume: Volume, error: unknown): void;
|
|
94
98
|
setLoadErrorHandler(handler: ((volume: Volume, error: unknown) => void) | undefined): void;
|
|
95
|
-
setTime(volume: Volume, time: number, onChannelLoaded?: PerChannelCallback): void
|
|
99
|
+
setTime(volume: Volume, time: number, onChannelLoaded?: PerChannelCallback): Promise<void>;
|
|
96
100
|
/**
|
|
97
101
|
* Nudge the scale level loaded into this volume off the one chosen by the loader.
|
|
98
102
|
* E.g. a bias of `1` will load 1 scale level lower than "ideal."
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { Chunk, DataType } from "zarrita";
|
|
2
|
+
export type CacheData = ArrayBuffer | Chunk<DataType>;
|
|
3
|
+
export declare const isChunk: (data: CacheData) => data is Chunk<DataType>;
|
|
1
4
|
export default class VolumeCache {
|
|
2
5
|
private entries;
|
|
3
6
|
readonly maxSize: number;
|
|
@@ -31,11 +34,11 @@ export default class VolumeCache {
|
|
|
31
34
|
* Adds a new entry to the cache.
|
|
32
35
|
* @returns {boolean} a boolean indicating whether the insertion succeeded.
|
|
33
36
|
*/
|
|
34
|
-
insert(key: string, data:
|
|
37
|
+
insert(key: string, data: CacheData): boolean;
|
|
35
38
|
/** Internal implementation of `get`. Returns all entry metadata, not just the raw data. */
|
|
36
39
|
private getEntry;
|
|
37
40
|
/** Attempts to get a single entry from the cache. */
|
|
38
|
-
get(key: string):
|
|
41
|
+
get(key: string): CacheData | undefined;
|
|
39
42
|
/** Clears all cache entries whose keys begin with the specified prefix. */
|
|
40
43
|
clearWithPrefix(prefix: string): void;
|
|
41
44
|
/** Clears all data from the cache. */
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import * as zarr from "
|
|
2
|
-
import
|
|
1
|
+
import * as zarr from "zarrita";
|
|
2
|
+
import { AsyncReadable } from "@zarrita/storage";
|
|
3
3
|
import type SubscribableRequestQueue from "../../utils/SubscribableRequestQueue.js";
|
|
4
4
|
export type TCZYX<T> = [T, T, T, T, T];
|
|
5
5
|
export type SubscriberId = ReturnType<SubscribableRequestQueue["addSubscriber"]>;
|
|
@@ -53,16 +53,16 @@ export type OMEMultiscale = {
|
|
|
53
53
|
};
|
|
54
54
|
/** https://ngff.openmicroscopy.org/latest/#omero-md */
|
|
55
55
|
export type OmeroTransitionalMetadata = {
|
|
56
|
-
id
|
|
57
|
-
name
|
|
58
|
-
version
|
|
56
|
+
id?: number;
|
|
57
|
+
name?: string;
|
|
58
|
+
version?: string;
|
|
59
59
|
channels: {
|
|
60
|
-
active
|
|
61
|
-
coefficient
|
|
60
|
+
active?: boolean;
|
|
61
|
+
coefficient?: number;
|
|
62
62
|
color: string;
|
|
63
|
-
family
|
|
64
|
-
inverted
|
|
65
|
-
label
|
|
63
|
+
family?: string;
|
|
64
|
+
inverted?: boolean;
|
|
65
|
+
label?: string;
|
|
66
66
|
window: {
|
|
67
67
|
end: number;
|
|
68
68
|
max: number;
|
|
@@ -73,9 +73,14 @@ export type OmeroTransitionalMetadata = {
|
|
|
73
73
|
};
|
|
74
74
|
export type OMEZarrMetadata = {
|
|
75
75
|
multiscales: OMEMultiscale[];
|
|
76
|
-
omero
|
|
76
|
+
omero?: OmeroTransitionalMetadata;
|
|
77
|
+
};
|
|
78
|
+
export type WrappedArrayOpts = {
|
|
79
|
+
subscriber?: SubscriberId;
|
|
80
|
+
reportChunk?: (coords: number[], subscriber: SubscriberId) => void;
|
|
81
|
+
isPrefetch?: boolean;
|
|
77
82
|
};
|
|
78
|
-
export type NumericZarrArray = zarr.Array<zarr.NumberDataType,
|
|
83
|
+
export type NumericZarrArray = zarr.Array<zarr.NumberDataType, AsyncReadable<RequestInit & WrappedArrayOpts>>;
|
|
79
84
|
/** A record with everything we need to access and use a single remote source of multiscale OME-Zarr data. */
|
|
80
85
|
export type ZarrSource = {
|
|
81
86
|
/** Representations of each scale level in this zarr. We pick one and pass it to zarrita to load data. */
|
|
@@ -1,7 +1,19 @@
|
|
|
1
1
|
import { OMEZarrMetadata } from "./types.js";
|
|
2
2
|
/**
|
|
3
|
-
*
|
|
3
|
+
* If `meta` is the top-level metadata of a zarr node formatted according to the OME-Zarr spec version 0.5, returns
|
|
4
|
+
* the object formatted according to v0.4 of the spec. For our purposes this just means flattening out the `ome` key.
|
|
5
|
+
*
|
|
6
|
+
* Return type is `unknown` because this does no actual validation; use `validateOMEZarrMetadata` for that.
|
|
7
|
+
*/
|
|
8
|
+
export declare const toOMEZarrMetaV4: (meta: unknown) => unknown;
|
|
9
|
+
/** Intermediate stage of validation, before we've picked a single multiscale to validate */
|
|
10
|
+
export type MultiscaleRecord = {
|
|
11
|
+
multiscales: unknown[];
|
|
12
|
+
};
|
|
13
|
+
export declare function assertMetadataHasMultiscales(meta: unknown, name?: string): asserts meta is MultiscaleRecord;
|
|
14
|
+
/**
|
|
15
|
+
* Validates that the `OMEZarrMetadata` record `meta` has the minimal amount of data required to open a volume. Since
|
|
4
16
|
* we only ever open one multiscale, we only validate the multiscale metadata record at index `multiscaleIdx` here.
|
|
5
17
|
* `name` is used in error messages to identify the source of the metadata.
|
|
6
18
|
*/
|
|
7
|
-
export declare function validateOMEZarrMetadata(
|
|
19
|
+
export declare function validateOMEZarrMetadata(meta: MultiscaleRecord, multiscaleIdx?: number, name?: string): asserts meta is OMEZarrMetadata;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { Array as ZarrArray, AsyncReadable, DataType } from "zarrita";
|
|
2
|
+
import VolumeCache from "../../VolumeCache.js";
|
|
3
|
+
import type { WrappedArrayOpts } from "./types.js";
|
|
4
|
+
import SubscribableRequestQueue from "../../utils/SubscribableRequestQueue.js";
|
|
5
|
+
type AsyncReadableExt<Opts> = AsyncReadable<Opts & WrappedArrayOpts>;
|
|
6
|
+
export default function wrapArray<T extends DataType, Opts = unknown, Store extends AsyncReadable<Opts> = AsyncReadable<Opts>>(array: ZarrArray<T, Store>, basePath: string, cache?: VolumeCache, queue?: SubscribableRequestQueue): ZarrArray<T, AsyncReadableExt<Opts>>;
|
|
7
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aics/vole-core",
|
|
3
|
-
"version": "3.13.
|
|
3
|
+
"version": "3.13.1",
|
|
4
4
|
"description": "volume renderer for 3d, 4d, or 5d imaging data with OME-Zarr support",
|
|
5
5
|
"main": "es/index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
"three": "^0.171.0",
|
|
40
40
|
"throttled-queue": "^2.1.4",
|
|
41
41
|
"tweakpane": "^3.1.9",
|
|
42
|
-
"zarrita": "^0.
|
|
42
|
+
"zarrita": "^0.4.0"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@babel/cli": "^7.25.6",
|
|
@@ -74,8 +74,8 @@
|
|
|
74
74
|
"typescript": "^4.3.5",
|
|
75
75
|
"url-loader": "^4.1.1",
|
|
76
76
|
"vite": "^6.0.6",
|
|
77
|
-
"vitest": "^2.1.8",
|
|
78
77
|
"vite-plugin-glsl": "^1.3.1",
|
|
78
|
+
"vitest": "^3.0.8",
|
|
79
79
|
"webpack": "^5.69.1",
|
|
80
80
|
"webpack-cli": "^4.9.2",
|
|
81
81
|
"webpack-dev-server": "^4.7.4"
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* `Readable` is zarrita's minimal abstraction for any source of data.
|
|
3
|
-
* `WrappedStore` wraps another `Readable` and adds (optional) connections to `VolumeCache` and `RequestQueue`.
|
|
4
|
-
*/
|
|
5
|
-
class WrappedStore {
|
|
6
|
-
constructor(baseStore, cache, queue) {
|
|
7
|
-
this.baseStore = baseStore;
|
|
8
|
-
this.cache = cache;
|
|
9
|
-
this.queue = queue;
|
|
10
|
-
}
|
|
11
|
-
// Dummy implementation to make this class easier to use in tests
|
|
12
|
-
set(_key, _value) {
|
|
13
|
-
return Promise.resolve();
|
|
14
|
-
}
|
|
15
|
-
async getAndCache(key, cacheKey, opts) {
|
|
16
|
-
const result = await this.baseStore.get(key, opts);
|
|
17
|
-
if (this.cache && result) {
|
|
18
|
-
this.cache.insert(cacheKey, result);
|
|
19
|
-
}
|
|
20
|
-
return result;
|
|
21
|
-
}
|
|
22
|
-
async get(key, opts) {
|
|
23
|
-
const ZARR_EXTS = [".zarray", ".zgroup", ".zattrs", "zarr.json"];
|
|
24
|
-
if (!this.cache || ZARR_EXTS.some(s => key.endsWith(s))) {
|
|
25
|
-
return this.baseStore.get(key, opts?.options);
|
|
26
|
-
}
|
|
27
|
-
if (opts?.reportKey) {
|
|
28
|
-
opts.reportKey(key, opts.subscriber);
|
|
29
|
-
}
|
|
30
|
-
let keyPrefix = this.baseStore.url ?? "";
|
|
31
|
-
if (keyPrefix !== "" && !(keyPrefix instanceof URL) && !keyPrefix.endsWith("/")) {
|
|
32
|
-
keyPrefix += "/";
|
|
33
|
-
}
|
|
34
|
-
const fullKey = keyPrefix + key.slice(1);
|
|
35
|
-
|
|
36
|
-
// Check the cache
|
|
37
|
-
const cacheResult = this.cache.get(fullKey);
|
|
38
|
-
if (cacheResult) {
|
|
39
|
-
return new Uint8Array(cacheResult);
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// Not in cache; load the chunk and cache it
|
|
43
|
-
if (this.queue && opts) {
|
|
44
|
-
return this.queue.addRequest(fullKey, opts.subscriber, () => this.getAndCache(key, fullKey, opts?.options), opts.isPrefetch);
|
|
45
|
-
} else {
|
|
46
|
-
// Should we ever hit this code? We should always have a request queue.
|
|
47
|
-
return this.getAndCache(key, fullKey, opts?.options);
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
export default WrappedStore;
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { AbsolutePath, AsyncMutable, Readable } from "@zarrita/storage";
|
|
2
|
-
import SubscribableRequestQueue from "../../utils/SubscribableRequestQueue";
|
|
3
|
-
import VolumeCache from "../../VolumeCache";
|
|
4
|
-
import { SubscriberId } from "./types";
|
|
5
|
-
type WrappedStoreOpts<Opts> = {
|
|
6
|
-
options?: Opts;
|
|
7
|
-
subscriber: SubscriberId;
|
|
8
|
-
reportKey?: (key: string, subscriber: SubscriberId) => void;
|
|
9
|
-
isPrefetch?: boolean;
|
|
10
|
-
};
|
|
11
|
-
/**
|
|
12
|
-
* `Readable` is zarrita's minimal abstraction for any source of data.
|
|
13
|
-
* `WrappedStore` wraps another `Readable` and adds (optional) connections to `VolumeCache` and `RequestQueue`.
|
|
14
|
-
*/
|
|
15
|
-
declare class WrappedStore<Opts, S extends Readable<Opts> = Readable<Opts>> implements AsyncMutable<WrappedStoreOpts<Opts>> {
|
|
16
|
-
private baseStore;
|
|
17
|
-
private cache?;
|
|
18
|
-
private queue?;
|
|
19
|
-
constructor(baseStore: S, cache?: VolumeCache | undefined, queue?: SubscribableRequestQueue | undefined);
|
|
20
|
-
set(_key: AbsolutePath, _value: Uint8Array): Promise<void>;
|
|
21
|
-
private getAndCache;
|
|
22
|
-
get(key: AbsolutePath, opts?: WrappedStoreOpts<Opts> | undefined): Promise<Uint8Array | undefined>;
|
|
23
|
-
}
|
|
24
|
-
export default WrappedStore;
|