@aics/vole-core 3.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +26 -0
- package/README.md +119 -0
- package/es/Atlas2DSlice.js +224 -0
- package/es/Channel.js +264 -0
- package/es/FileSaver.js +31 -0
- package/es/FusedChannelData.js +192 -0
- package/es/Histogram.js +250 -0
- package/es/ImageInfo.js +127 -0
- package/es/Light.js +74 -0
- package/es/Lut.js +500 -0
- package/es/MarchingCubes.js +507 -0
- package/es/MeshVolume.js +334 -0
- package/es/NaiveSurfaceNets.js +251 -0
- package/es/PathTracedVolume.js +482 -0
- package/es/RayMarchedAtlasVolume.js +250 -0
- package/es/RenderToBuffer.js +31 -0
- package/es/ThreeJsPanel.js +633 -0
- package/es/Timing.js +28 -0
- package/es/TrackballControls.js +538 -0
- package/es/View3d.js +848 -0
- package/es/Volume.js +352 -0
- package/es/VolumeCache.js +161 -0
- package/es/VolumeDims.js +16 -0
- package/es/VolumeDrawable.js +702 -0
- package/es/VolumeMaker.js +101 -0
- package/es/VolumeRenderImpl.js +1 -0
- package/es/VolumeRenderSettings.js +203 -0
- package/es/constants/basicShaders.js +29 -0
- package/es/constants/colors.js +59 -0
- package/es/constants/denoiseShader.js +43 -0
- package/es/constants/lights.js +42 -0
- package/es/constants/materials.js +85 -0
- package/es/constants/pathtraceOutputShader.js +13 -0
- package/es/constants/scaleBarSVG.js +21 -0
- package/es/constants/time.js +34 -0
- package/es/constants/volumePTshader.js +153 -0
- package/es/constants/volumeRayMarchShader.js +123 -0
- package/es/constants/volumeSliceShader.js +115 -0
- package/es/index.js +21 -0
- package/es/loaders/IVolumeLoader.js +131 -0
- package/es/loaders/JsonImageInfoLoader.js +255 -0
- package/es/loaders/OmeZarrLoader.js +495 -0
- package/es/loaders/OpenCellLoader.js +65 -0
- package/es/loaders/RawArrayLoader.js +89 -0
- package/es/loaders/TiffLoader.js +219 -0
- package/es/loaders/VolumeLoadError.js +44 -0
- package/es/loaders/VolumeLoaderUtils.js +221 -0
- package/es/loaders/index.js +40 -0
- package/es/loaders/zarr_utils/ChunkPrefetchIterator.js +143 -0
- package/es/loaders/zarr_utils/WrappedStore.js +51 -0
- package/es/loaders/zarr_utils/types.js +24 -0
- package/es/loaders/zarr_utils/utils.js +225 -0
- package/es/loaders/zarr_utils/validation.js +49 -0
- package/es/test/ChunkPrefetchIterator.test.js +208 -0
- package/es/test/RequestQueue.test.js +442 -0
- package/es/test/SubscribableRequestQueue.test.js +244 -0
- package/es/test/VolumeCache.test.js +118 -0
- package/es/test/VolumeRenderSettings.test.js +71 -0
- package/es/test/lut.test.js +671 -0
- package/es/test/num_utils.test.js +140 -0
- package/es/test/volume.test.js +98 -0
- package/es/test/zarr_utils.test.js +358 -0
- package/es/types/Atlas2DSlice.d.ts +41 -0
- package/es/types/Channel.d.ts +44 -0
- package/es/types/FileSaver.d.ts +6 -0
- package/es/types/FusedChannelData.d.ts +26 -0
- package/es/types/Histogram.d.ts +57 -0
- package/es/types/ImageInfo.d.ts +87 -0
- package/es/types/Light.d.ts +27 -0
- package/es/types/Lut.d.ts +67 -0
- package/es/types/MarchingCubes.d.ts +53 -0
- package/es/types/MeshVolume.d.ts +40 -0
- package/es/types/NaiveSurfaceNets.d.ts +11 -0
- package/es/types/PathTracedVolume.d.ts +65 -0
- package/es/types/RayMarchedAtlasVolume.d.ts +41 -0
- package/es/types/RenderToBuffer.d.ts +17 -0
- package/es/types/ThreeJsPanel.d.ts +107 -0
- package/es/types/Timing.d.ts +11 -0
- package/es/types/TrackballControls.d.ts +51 -0
- package/es/types/View3d.d.ts +357 -0
- package/es/types/Volume.d.ts +152 -0
- package/es/types/VolumeCache.d.ts +43 -0
- package/es/types/VolumeDims.d.ts +28 -0
- package/es/types/VolumeDrawable.d.ts +108 -0
- package/es/types/VolumeMaker.d.ts +49 -0
- package/es/types/VolumeRenderImpl.d.ts +22 -0
- package/es/types/VolumeRenderSettings.d.ts +98 -0
- package/es/types/constants/basicShaders.d.ts +4 -0
- package/es/types/constants/colors.d.ts +2 -0
- package/es/types/constants/denoiseShader.d.ts +40 -0
- package/es/types/constants/lights.d.ts +38 -0
- package/es/types/constants/materials.d.ts +20 -0
- package/es/types/constants/pathtraceOutputShader.d.ts +11 -0
- package/es/types/constants/scaleBarSVG.d.ts +2 -0
- package/es/types/constants/time.d.ts +19 -0
- package/es/types/constants/volumePTshader.d.ts +137 -0
- package/es/types/constants/volumeRayMarchShader.d.ts +117 -0
- package/es/types/constants/volumeSliceShader.d.ts +109 -0
- package/es/types/glsl.d.js +0 -0
- package/es/types/index.d.ts +28 -0
- package/es/types/loaders/IVolumeLoader.d.ts +113 -0
- package/es/types/loaders/JsonImageInfoLoader.d.ts +80 -0
- package/es/types/loaders/OmeZarrLoader.d.ts +87 -0
- package/es/types/loaders/OpenCellLoader.d.ts +9 -0
- package/es/types/loaders/RawArrayLoader.d.ts +33 -0
- package/es/types/loaders/TiffLoader.d.ts +45 -0
- package/es/types/loaders/VolumeLoadError.d.ts +18 -0
- package/es/types/loaders/VolumeLoaderUtils.d.ts +38 -0
- package/es/types/loaders/index.d.ts +22 -0
- package/es/types/loaders/zarr_utils/ChunkPrefetchIterator.d.ts +22 -0
- package/es/types/loaders/zarr_utils/WrappedStore.d.ts +24 -0
- package/es/types/loaders/zarr_utils/types.d.ts +94 -0
- package/es/types/loaders/zarr_utils/utils.d.ts +23 -0
- package/es/types/loaders/zarr_utils/validation.d.ts +7 -0
- package/es/types/test/ChunkPrefetchIterator.test.d.ts +1 -0
- package/es/types/test/RequestQueue.test.d.ts +1 -0
- package/es/types/test/SubscribableRequestQueue.test.d.ts +1 -0
- package/es/types/test/VolumeCache.test.d.ts +1 -0
- package/es/types/test/VolumeRenderSettings.test.d.ts +1 -0
- package/es/types/test/lut.test.d.ts +1 -0
- package/es/types/test/num_utils.test.d.ts +1 -0
- package/es/types/test/volume.test.d.ts +1 -0
- package/es/types/test/zarr_utils.test.d.ts +1 -0
- package/es/types/types.d.ts +115 -0
- package/es/types/utils/RequestQueue.d.ts +112 -0
- package/es/types/utils/SubscribableRequestQueue.d.ts +52 -0
- package/es/types/utils/num_utils.d.ts +43 -0
- package/es/types/workers/VolumeLoaderContext.d.ts +106 -0
- package/es/types/workers/types.d.ts +101 -0
- package/es/types/workers/util.d.ts +3 -0
- package/es/types.js +75 -0
- package/es/typings.d.js +0 -0
- package/es/utils/RequestQueue.js +267 -0
- package/es/utils/SubscribableRequestQueue.js +187 -0
- package/es/utils/num_utils.js +231 -0
- package/es/workers/FetchTiffWorker.js +153 -0
- package/es/workers/VolumeLoadWorker.js +129 -0
- package/es/workers/VolumeLoaderContext.js +271 -0
- package/es/workers/types.js +41 -0
- package/es/workers/util.js +8 -0
- package/package.json +83 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import { fromUrl } from "geotiff";
|
|
2
|
+
import { deserializeError } from "serialize-error";
|
|
3
|
+
import { ThreadableVolumeLoader, LoadSpec } from "./IVolumeLoader.js";
|
|
4
|
+
import { computePackedAtlasDims, MAX_ATLAS_EDGE } from "./VolumeLoaderUtils.js";
|
|
5
|
+
import { VolumeLoadError, VolumeLoadErrorType, wrapVolumeLoadError } from "./VolumeLoadError.js";
|
|
6
|
+
import { CImageInfo } from "../ImageInfo.js";
|
|
7
|
+
function prepareXML(xml) {
|
|
8
|
+
// trim trailing unicode zeros?
|
|
9
|
+
// eslint-disable-next-line no-control-regex
|
|
10
|
+
const expr = /[\u0000]$/g;
|
|
11
|
+
return xml.trim().replace(expr, "").trim();
|
|
12
|
+
}
|
|
13
|
+
function getOME(xml) {
|
|
14
|
+
const parser = new DOMParser();
|
|
15
|
+
try {
|
|
16
|
+
const xmlDoc = parser.parseFromString(xml, "text/xml");
|
|
17
|
+
return xmlDoc.getElementsByTagName("OME")[0];
|
|
18
|
+
} catch (e) {
|
|
19
|
+
throw new VolumeLoadError("Could not find OME metadata in TIFF file", {
|
|
20
|
+
type: VolumeLoadErrorType.INVALID_METADATA,
|
|
21
|
+
cause: e
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
class OMEDims {
|
|
26
|
+
sizex = 0;
|
|
27
|
+
sizey = 0;
|
|
28
|
+
sizez = 1;
|
|
29
|
+
sizec = 1;
|
|
30
|
+
sizet = 1;
|
|
31
|
+
unit = "";
|
|
32
|
+
pixeltype = "";
|
|
33
|
+
dimensionorder = "";
|
|
34
|
+
pixelsizex = 1;
|
|
35
|
+
pixelsizey = 1;
|
|
36
|
+
pixelsizez = 1;
|
|
37
|
+
channelnames = [];
|
|
38
|
+
}
|
|
39
|
+
function getDtype(omepixeltype) {
|
|
40
|
+
const mapping = {
|
|
41
|
+
uint8: "uint8",
|
|
42
|
+
uint16: "uint16",
|
|
43
|
+
uint32: "uint32",
|
|
44
|
+
int8: "int8",
|
|
45
|
+
int16: "int16",
|
|
46
|
+
int32: "int32",
|
|
47
|
+
float: "float32"
|
|
48
|
+
};
|
|
49
|
+
const dtype = mapping[omepixeltype];
|
|
50
|
+
if (dtype === undefined) {
|
|
51
|
+
console.warn(`Unsupported OME pixel type ${omepixeltype}; defaulting to uint8`);
|
|
52
|
+
return "uint8";
|
|
53
|
+
}
|
|
54
|
+
return dtype;
|
|
55
|
+
}
|
|
56
|
+
function getAttributeOrError(el, attr) {
|
|
57
|
+
const val = el.getAttribute(attr);
|
|
58
|
+
if (val === null) {
|
|
59
|
+
throw new VolumeLoadError(`Missing attribute ${attr} in OME-TIFF metadata`, {
|
|
60
|
+
type: VolumeLoadErrorType.INVALID_METADATA
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
return val;
|
|
64
|
+
}
|
|
65
|
+
function getOMEDims(imageEl) {
|
|
66
|
+
const dims = new OMEDims();
|
|
67
|
+
const pixelsEl = imageEl.getElementsByTagName("Pixels")[0];
|
|
68
|
+
dims.sizex = Number(getAttributeOrError(pixelsEl, "SizeX"));
|
|
69
|
+
dims.sizey = Number(getAttributeOrError(pixelsEl, "SizeY"));
|
|
70
|
+
dims.sizez = Number(pixelsEl.getAttribute("SizeZ"));
|
|
71
|
+
dims.sizec = Number(pixelsEl.getAttribute("SizeC"));
|
|
72
|
+
dims.sizet = Number(pixelsEl.getAttribute("SizeT"));
|
|
73
|
+
dims.unit = pixelsEl.getAttribute("PhysicalSizeXUnit") || "";
|
|
74
|
+
dims.pixeltype = pixelsEl.getAttribute("Type") || "";
|
|
75
|
+
dims.dimensionorder = pixelsEl.getAttribute("DimensionOrder") || "XYZCT";
|
|
76
|
+
dims.pixelsizex = Number(pixelsEl.getAttribute("PhysicalSizeX"));
|
|
77
|
+
dims.pixelsizey = Number(pixelsEl.getAttribute("PhysicalSizeY"));
|
|
78
|
+
dims.pixelsizez = Number(pixelsEl.getAttribute("PhysicalSizeZ"));
|
|
79
|
+
const channelsEls = pixelsEl.getElementsByTagName("Channel");
|
|
80
|
+
for (let i = 0; i < channelsEls.length; ++i) {
|
|
81
|
+
const name = channelsEls[i].getAttribute("Name");
|
|
82
|
+
const id = channelsEls[i].getAttribute("ID");
|
|
83
|
+
dims.channelnames.push(name ? name : id ? id : "Channel" + i);
|
|
84
|
+
}
|
|
85
|
+
return dims;
|
|
86
|
+
}
|
|
87
|
+
const getBytesPerSample = type => type === "uint8" ? 1 : type === "uint16" ? 2 : 4;
|
|
88
|
+
|
|
89
|
+
// Despite the class `TiffLoader` extends, this loader is not threadable, since geotiff internally uses features that
|
|
90
|
+
// aren't available on workers. It uses its own specialized workers anyways.
|
|
91
|
+
class TiffLoader extends ThreadableVolumeLoader {
|
|
92
|
+
constructor(url) {
|
|
93
|
+
super();
|
|
94
|
+
this.url = url;
|
|
95
|
+
}
|
|
96
|
+
async loadOmeDims() {
|
|
97
|
+
if (!this.dims) {
|
|
98
|
+
const tiff = await fromUrl(this.url, {
|
|
99
|
+
allowFullFile: true
|
|
100
|
+
}).catch(wrapVolumeLoadError(`Could not open TIFF file at ${this.url}`, VolumeLoadErrorType.NOT_FOUND));
|
|
101
|
+
// DO NOT DO THIS, ITS SLOW
|
|
102
|
+
// const imagecount = await tiff.getImageCount();
|
|
103
|
+
// read the FIRST image
|
|
104
|
+
const image = await tiff.getImage().catch(wrapVolumeLoadError("Failed to open TIFF image", VolumeLoadErrorType.NOT_FOUND));
|
|
105
|
+
const tiffimgdesc = prepareXML(image.getFileDirectory().ImageDescription);
|
|
106
|
+
const omeEl = getOME(tiffimgdesc);
|
|
107
|
+
const image0El = omeEl.getElementsByTagName("Image")[0];
|
|
108
|
+
this.dims = getOMEDims(image0El);
|
|
109
|
+
}
|
|
110
|
+
return this.dims;
|
|
111
|
+
}
|
|
112
|
+
async loadDims(_loadSpec) {
|
|
113
|
+
const dims = await this.loadOmeDims();
|
|
114
|
+
const atlasDims = computePackedAtlasDims(dims.sizez, dims.sizex, dims.sizey);
|
|
115
|
+
// fit tiles to max of 2048x2048?
|
|
116
|
+
const targetSize = MAX_ATLAS_EDGE;
|
|
117
|
+
const tilesizex = Math.floor(targetSize / atlasDims.x);
|
|
118
|
+
const tilesizey = Math.floor(targetSize / atlasDims.y);
|
|
119
|
+
const d = {
|
|
120
|
+
shape: [dims.sizet, dims.sizec, dims.sizez, tilesizey, tilesizex],
|
|
121
|
+
spacing: [1, 1, dims.pixelsizez, dims.pixelsizey * dims.sizey / tilesizey, dims.pixelsizex * dims.sizex / tilesizex],
|
|
122
|
+
spaceUnit: dims.unit ? dims.unit : "micron",
|
|
123
|
+
dataType: getDtype(dims.pixeltype),
|
|
124
|
+
timeUnit: "s"
|
|
125
|
+
};
|
|
126
|
+
return [d];
|
|
127
|
+
}
|
|
128
|
+
async createImageInfo(_loadSpec) {
|
|
129
|
+
const dims = await this.loadOmeDims();
|
|
130
|
+
// compare with sizex, sizey
|
|
131
|
+
//const width = image.getWidth();
|
|
132
|
+
//const height = image.getHeight();
|
|
133
|
+
|
|
134
|
+
// TODO allow user setting of this downsampling info?
|
|
135
|
+
// TODO allow ROI selection: range of x,y,z,c for a given t
|
|
136
|
+
const atlasDims = computePackedAtlasDims(dims.sizez, dims.sizex, dims.sizey);
|
|
137
|
+
// fit tiles to max of 2048x2048?
|
|
138
|
+
const targetSize = MAX_ATLAS_EDGE;
|
|
139
|
+
const tilesizex = Math.floor(targetSize / atlasDims.x);
|
|
140
|
+
const tilesizey = Math.floor(targetSize / atlasDims.y);
|
|
141
|
+
|
|
142
|
+
// load tiff and check metadata
|
|
143
|
+
|
|
144
|
+
const imgdata = {
|
|
145
|
+
name: "TEST",
|
|
146
|
+
atlasTileDims: [atlasDims.x, atlasDims.y],
|
|
147
|
+
subregionSize: [tilesizex, tilesizey, dims.sizez],
|
|
148
|
+
subregionOffset: [0, 0, 0],
|
|
149
|
+
combinedNumChannels: dims.sizec,
|
|
150
|
+
channelNames: dims.channelnames,
|
|
151
|
+
multiscaleLevel: 0,
|
|
152
|
+
multiscaleLevelDims: [{
|
|
153
|
+
shape: [dims.sizet, dims.sizec, dims.sizez, tilesizey, tilesizex],
|
|
154
|
+
spacing: [1, 1, dims.pixelsizez, dims.pixelsizey * dims.sizey / tilesizey, dims.pixelsizex * dims.sizex / tilesizex],
|
|
155
|
+
spaceUnit: dims.unit || "",
|
|
156
|
+
timeUnit: "",
|
|
157
|
+
dataType: getDtype(dims.pixeltype)
|
|
158
|
+
}],
|
|
159
|
+
transform: {
|
|
160
|
+
translation: [0, 0, 0],
|
|
161
|
+
rotation: [0, 0, 0],
|
|
162
|
+
scale: [1, 1, 1]
|
|
163
|
+
}
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
// This loader uses no fields from `LoadSpec`. Initialize volume with defaults.
|
|
167
|
+
return {
|
|
168
|
+
imageInfo: imgdata,
|
|
169
|
+
loadSpec: new LoadSpec()
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
async loadRawChannelData(imageInfo, _loadSpec, _onUpdateMetadata, onData) {
|
|
173
|
+
const dims = await this.loadOmeDims();
|
|
174
|
+
|
|
175
|
+
// get some size info.
|
|
176
|
+
const cimageinfo = new CImageInfo(imageInfo);
|
|
177
|
+
const volumeSize = cimageinfo.volumeSize;
|
|
178
|
+
const channelProms = [];
|
|
179
|
+
// do each channel on a worker?
|
|
180
|
+
for (let channel = 0; channel < imageInfo.combinedNumChannels; ++channel) {
|
|
181
|
+
const thisChannelProm = new Promise((resolve, reject) => {
|
|
182
|
+
const params = {
|
|
183
|
+
channel: channel,
|
|
184
|
+
// these are target xy sizes for the in-memory volume data
|
|
185
|
+
// they may or may not be the same size as original xy sizes
|
|
186
|
+
tilesizex: volumeSize.x,
|
|
187
|
+
tilesizey: volumeSize.y,
|
|
188
|
+
sizec: imageInfo.combinedNumChannels,
|
|
189
|
+
sizez: volumeSize.z,
|
|
190
|
+
dimensionOrder: dims.dimensionorder,
|
|
191
|
+
bytesPerSample: getBytesPerSample(dims.pixeltype),
|
|
192
|
+
url: this.url
|
|
193
|
+
};
|
|
194
|
+
const worker = new Worker(new URL("../workers/FetchTiffWorker", import.meta.url));
|
|
195
|
+
worker.onmessage = e => {
|
|
196
|
+
if (e.data.isError) {
|
|
197
|
+
reject(deserializeError(e.data.error));
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
const {
|
|
201
|
+
data,
|
|
202
|
+
dtype,
|
|
203
|
+
channel,
|
|
204
|
+
range
|
|
205
|
+
} = e.data;
|
|
206
|
+
onData([channel], [dtype], [data], [range]);
|
|
207
|
+
worker.terminate();
|
|
208
|
+
resolve();
|
|
209
|
+
};
|
|
210
|
+
worker.postMessage(params);
|
|
211
|
+
});
|
|
212
|
+
channelProms.push(thisChannelProm);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// waiting for all channels to load allows errors to propagate to the caller via this promise
|
|
216
|
+
await Promise.all(channelProms);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
export { TiffLoader };
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { errorConstructors } from "serialize-error";
|
|
2
|
+
import { NodeNotFoundError, KeyError } from "@zarrita/core";
|
|
3
|
+
// geotiff doesn't export its error types...
|
|
4
|
+
|
|
5
|
+
/** Groups possible load errors into a few broad categories which we can give similar guidance to the user about. */
|
|
6
|
+
export let VolumeLoadErrorType = /*#__PURE__*/function (VolumeLoadErrorType) {
|
|
7
|
+
VolumeLoadErrorType["UNKNOWN"] = "unknown";
|
|
8
|
+
VolumeLoadErrorType["NOT_FOUND"] = "not_found";
|
|
9
|
+
VolumeLoadErrorType["TOO_LARGE"] = "too_large";
|
|
10
|
+
VolumeLoadErrorType["LOAD_DATA_FAILED"] = "load_data_failed";
|
|
11
|
+
VolumeLoadErrorType["INVALID_METADATA"] = "invalid_metadata";
|
|
12
|
+
VolumeLoadErrorType["INVALID_MULTI_SOURCE_ZARR"] = "invalid_multi_source_zarr";
|
|
13
|
+
return VolumeLoadErrorType;
|
|
14
|
+
}({});
|
|
15
|
+
export class VolumeLoadError extends Error {
|
|
16
|
+
constructor(message, options) {
|
|
17
|
+
super(message, options);
|
|
18
|
+
this.name = "VolumeLoadError";
|
|
19
|
+
this.type = options?.type ?? VolumeLoadErrorType.UNKNOWN;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// serialize-error only ever calls an error constructor with zero arguments. The required `ErrorConstructor`
|
|
24
|
+
// type is a bit too restrictive - as long as the constructor can be called with no arguments it's fine.
|
|
25
|
+
errorConstructors.set("NodeNotFoundError", NodeNotFoundError);
|
|
26
|
+
errorConstructors.set("KeyError", KeyError);
|
|
27
|
+
errorConstructors.set("VolumeLoadError", VolumeLoadError);
|
|
28
|
+
|
|
29
|
+
/** Curried function to re-throw an error wrapped in a `VolumeLoadError` with the given `message` and `type`. */
|
|
30
|
+
export function wrapVolumeLoadError(message = "Unknown error occurred while loading volume data", type = VolumeLoadErrorType.UNKNOWN, ignore) {
|
|
31
|
+
return e => {
|
|
32
|
+
if (ignore !== undefined && e === ignore) {
|
|
33
|
+
return e;
|
|
34
|
+
}
|
|
35
|
+
if (e instanceof VolumeLoadError) {
|
|
36
|
+
throw e;
|
|
37
|
+
}
|
|
38
|
+
console.log(`Error loading volume data: ${e}`);
|
|
39
|
+
throw new VolumeLoadError(message, {
|
|
40
|
+
type,
|
|
41
|
+
cause: e
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
}
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import { Box3, Vector2, Vector3 } from "three";
|
|
2
|
+
import { CImageInfo } from "../ImageInfo.js";
|
|
3
|
+
export const MAX_ATLAS_EDGE = 4096;
|
|
4
|
+
|
|
5
|
+
// Map from units to their symbols
|
|
6
|
+
const UNIT_SYMBOLS = {
|
|
7
|
+
angstrom: "Å",
|
|
8
|
+
day: "d",
|
|
9
|
+
foot: "ft",
|
|
10
|
+
hour: "h",
|
|
11
|
+
inch: "in",
|
|
12
|
+
meter: "m",
|
|
13
|
+
micron: "μm",
|
|
14
|
+
mile: "mi",
|
|
15
|
+
minute: "min",
|
|
16
|
+
parsec: "pc",
|
|
17
|
+
second: "s",
|
|
18
|
+
yard: "yd"
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
// Units which may take SI prefixes (e.g. micro-, tera-)
|
|
22
|
+
const SI_UNITS = ["meter", "second"];
|
|
23
|
+
|
|
24
|
+
// SI prefixes which abbreviate in nonstandard ways
|
|
25
|
+
const SI_PREFIX_ABBVS = {
|
|
26
|
+
micro: "μ",
|
|
27
|
+
deca: "da"
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
/** Converts a full spatial or temporal unit name supported by OME-Zarr to its unit symbol */
|
|
31
|
+
// (see https://ngff.openmicroscopy.org/latest/#axes-md)
|
|
32
|
+
export function unitNameToSymbol(unitName) {
|
|
33
|
+
if (unitName === undefined) {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
if (UNIT_SYMBOLS[unitName]) {
|
|
37
|
+
return UNIT_SYMBOLS[unitName];
|
|
38
|
+
}
|
|
39
|
+
const prefixedSIUnit = SI_UNITS.find(siUnit => unitName.endsWith(siUnit));
|
|
40
|
+
if (prefixedSIUnit) {
|
|
41
|
+
const prefix = unitName.substring(0, unitName.length - prefixedSIUnit.length);
|
|
42
|
+
if (SI_PREFIX_ABBVS[prefix]) {
|
|
43
|
+
// "special" SI prefix
|
|
44
|
+
return SI_PREFIX_ABBVS[prefix] + UNIT_SYMBOLS[prefixedSIUnit];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// almost all SI prefixes are abbreviated by first letter, capitalized if prefix ends with "a"
|
|
48
|
+
const capitalize = prefix.endsWith("a");
|
|
49
|
+
const prefixAbbr = capitalize ? prefix[0].toUpperCase() : prefix[0];
|
|
50
|
+
return prefixAbbr + UNIT_SYMBOLS[prefixedSIUnit];
|
|
51
|
+
}
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// We want to find the most "square" packing of z tw by th tiles.
|
|
56
|
+
// Compute number of rows and columns.
|
|
57
|
+
export function computePackedAtlasDims(z, tw, th) {
|
|
58
|
+
let nextrows = 1;
|
|
59
|
+
let nextcols = z;
|
|
60
|
+
let ratio = nextcols * tw / (nextrows * th);
|
|
61
|
+
let nrows = nextrows;
|
|
62
|
+
let ncols = nextcols;
|
|
63
|
+
while (ratio > 1) {
|
|
64
|
+
nrows = nextrows;
|
|
65
|
+
ncols = nextcols;
|
|
66
|
+
nextcols -= 1;
|
|
67
|
+
nextrows = Math.ceil(z / nextcols);
|
|
68
|
+
ratio = nextcols * tw / (nextrows * th);
|
|
69
|
+
}
|
|
70
|
+
return new Vector2(nrows, ncols);
|
|
71
|
+
}
|
|
72
|
+
function doesSpatialDimensionFitInAtlas(spatialDimZYX, maxAtlasEdge = MAX_ATLAS_EDGE) {
|
|
73
|
+
// Estimate atlas size
|
|
74
|
+
const x = spatialDimZYX[2];
|
|
75
|
+
const y = spatialDimZYX[1];
|
|
76
|
+
const z = spatialDimZYX[0];
|
|
77
|
+
const xtiles = Math.floor(maxAtlasEdge / x);
|
|
78
|
+
const ytiles = Math.floor(maxAtlasEdge / y);
|
|
79
|
+
return xtiles * ytiles >= z;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/** Picks the largest scale level that can fit into a texture atlas with edges no longer than `maxAtlasEdge`. */
|
|
83
|
+
export function estimateLevelForAtlas(spatialDimsZYX, maxAtlasEdge = MAX_ATLAS_EDGE) {
|
|
84
|
+
if (spatialDimsZYX.length <= 1) {
|
|
85
|
+
return 0;
|
|
86
|
+
}
|
|
87
|
+
for (let i = 0; i < spatialDimsZYX.length; ++i) {
|
|
88
|
+
// estimate atlas size:
|
|
89
|
+
if (doesSpatialDimensionFitInAtlas(spatialDimsZYX[i], maxAtlasEdge)) {
|
|
90
|
+
return i;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
return undefined;
|
|
94
|
+
}
|
|
95
|
+
const maxCeil = val => Math.max(Math.ceil(val), 1);
|
|
96
|
+
const scaleDims = (size, [z, y, x]) => [maxCeil(z * size.z), maxCeil(y * size.y), maxCeil(x * size.x)];
|
|
97
|
+
export function scaleDimsToSubregion(subregion, dims) {
|
|
98
|
+
const size = subregion.getSize(new Vector3());
|
|
99
|
+
return scaleDims(size, dims);
|
|
100
|
+
}
|
|
101
|
+
export function scaleMultipleDimsToSubregion(subregion, dims) {
|
|
102
|
+
const size = subregion.getSize(new Vector3());
|
|
103
|
+
return dims.map(dim => scaleDims(size, dim));
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Picks the best scale level to load based on scale level dimensions and a `LoadSpec`. This calls
|
|
108
|
+
* `estimateLevelForAtlas`, then accounts for `LoadSpec`'s scale level picking properties:
|
|
109
|
+
* - `multiscaleLevel` imposes a minimum scale level (or *maximum* resolution level) to load
|
|
110
|
+
* - `maxAtlasEdge` sets the maximum size of the texture atlas that may be produced by a load
|
|
111
|
+
* - `scaleLevelBias` offsets the scale level index after the optimal level is picked based on `maxAtlasEdge`
|
|
112
|
+
*
|
|
113
|
+
* This function assumes that `spatialDimsZYX` has already been appropriately scaled to match `loadSpec`'s `subregion`.
|
|
114
|
+
*/
|
|
115
|
+
export function pickLevelToLoadUnscaled(loadSpec, spatialDimsZYX) {
|
|
116
|
+
if (loadSpec.useExplicitLevel && loadSpec.multiscaleLevel !== undefined) {
|
|
117
|
+
// clamp to actual allowed level range
|
|
118
|
+
return Math.max(0, Math.min(spatialDimsZYX.length - 1, loadSpec.multiscaleLevel));
|
|
119
|
+
}
|
|
120
|
+
let levelToLoad = estimateLevelForAtlas(spatialDimsZYX, loadSpec.maxAtlasEdge);
|
|
121
|
+
// Check here for whether levelToLoad is within max atlas size?
|
|
122
|
+
if (levelToLoad !== undefined) {
|
|
123
|
+
levelToLoad = Math.max(levelToLoad + (loadSpec.scaleLevelBias ?? 0), loadSpec.multiscaleLevel ?? 0);
|
|
124
|
+
levelToLoad = Math.max(0, Math.min(spatialDimsZYX.length - 1, levelToLoad));
|
|
125
|
+
if (doesSpatialDimensionFitInAtlas(spatialDimsZYX[levelToLoad], loadSpec.maxAtlasEdge)) {
|
|
126
|
+
return levelToLoad;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Level to load could not be loaded due to atlas size constraints.
|
|
131
|
+
if (levelToLoad === undefined) {
|
|
132
|
+
// No optimal level exists so choose the smallest level to report out
|
|
133
|
+
levelToLoad = spatialDimsZYX.length - 1;
|
|
134
|
+
}
|
|
135
|
+
const smallestDims = spatialDimsZYX[levelToLoad];
|
|
136
|
+
console.error(`Volume is too large; no multiscale level found that fits in preferred memory footprint. Selected level ${levelToLoad} has dimensions `, smallestDims, `. Max atlas edge allowed is ${loadSpec.maxAtlasEdge}.`);
|
|
137
|
+
console.log("All available levels: ", spatialDimsZYX);
|
|
138
|
+
return levelToLoad;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Picks the best scale level to load based on scale level dimensions and a `LoadSpec`. This calls
|
|
143
|
+
* `estimateLevelForAtlas` and accounts for all properties of `LoadSpec` considered by
|
|
144
|
+
* `pickLevelToLoadUnscaled`, and additionally scales the dimensions of the scale levels to account for the
|
|
145
|
+
* `LoadSpec`'s `subregion` property.
|
|
146
|
+
*/
|
|
147
|
+
export function pickLevelToLoad(loadSpec, spatialDimsZYX) {
|
|
148
|
+
const scaledDims = scaleMultipleDimsToSubregion(loadSpec.subregion, spatialDimsZYX);
|
|
149
|
+
return pickLevelToLoadUnscaled(loadSpec, scaledDims);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/** Given the size of a volume in pixels, convert a `Box3` in the 0-1 range to pixels */
|
|
153
|
+
export function convertSubregionToPixels(region, size) {
|
|
154
|
+
const min = region.min.clone().multiply(size).floor();
|
|
155
|
+
const max = region.max.clone().multiply(size).ceil();
|
|
156
|
+
|
|
157
|
+
// ensure it's always valid to specify the same number at both ends and get a single slice
|
|
158
|
+
if (min.x === max.x && min.x < size.x) {
|
|
159
|
+
max.x += 1;
|
|
160
|
+
}
|
|
161
|
+
if (min.y === max.y && min.y < size.y) {
|
|
162
|
+
max.y += 1;
|
|
163
|
+
}
|
|
164
|
+
if (min.z === max.z && min.z < size.z) {
|
|
165
|
+
max.z += 1;
|
|
166
|
+
}
|
|
167
|
+
return new Box3(min, max);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* Return the subset of `container` specified by `region`, assuming that `region` contains fractional values (between 0
|
|
172
|
+
* and 1). i.e. if `container`'s range on the X axis is 0-4 and `region`'s is 0.25-0.5, the result will have range 1-2.
|
|
173
|
+
*/
|
|
174
|
+
export function composeSubregion(region, container) {
|
|
175
|
+
const size = container.getSize(new Vector3());
|
|
176
|
+
const min = region.min.clone().multiply(size).add(container.min);
|
|
177
|
+
const max = region.max.clone().multiply(size).add(container.min);
|
|
178
|
+
return new Box3(min, max);
|
|
179
|
+
}
|
|
180
|
+
function isEmpty(obj) {
|
|
181
|
+
for (const key in obj) {
|
|
182
|
+
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
183
|
+
return false;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
return true;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// currently everything needed can come from the imageInfo
|
|
190
|
+
// but in the future each IVolumeLoader could have a completely separate implementation.
|
|
191
|
+
export function buildDefaultMetadata(rawImageInfo) {
|
|
192
|
+
// wrap
|
|
193
|
+
const imageInfo = new CImageInfo(rawImageInfo);
|
|
194
|
+
const physicalSize = imageInfo.volumeSize.clone().multiply(imageInfo.physicalPixelSize);
|
|
195
|
+
const metadata = {};
|
|
196
|
+
metadata["Dimensions"] = {
|
|
197
|
+
...imageInfo.subregionSize
|
|
198
|
+
};
|
|
199
|
+
metadata["Original dimensions"] = {
|
|
200
|
+
...imageInfo.originalSize
|
|
201
|
+
};
|
|
202
|
+
metadata["Physical size"] = {
|
|
203
|
+
x: physicalSize.x + imageInfo.spatialUnit,
|
|
204
|
+
y: physicalSize.y + imageInfo.spatialUnit,
|
|
205
|
+
z: physicalSize.z + imageInfo.spatialUnit
|
|
206
|
+
};
|
|
207
|
+
metadata["Physical size per pixel"] = {
|
|
208
|
+
x: imageInfo.physicalPixelSize.x + imageInfo.spatialUnit,
|
|
209
|
+
y: imageInfo.physicalPixelSize.y + imageInfo.spatialUnit,
|
|
210
|
+
z: imageInfo.physicalPixelSize.z + imageInfo.spatialUnit
|
|
211
|
+
};
|
|
212
|
+
metadata["Multiresolution levels"] = rawImageInfo.multiscaleLevelDims;
|
|
213
|
+
// TODO decide???? combined or not?
|
|
214
|
+
metadata["Channels"] = rawImageInfo.combinedNumChannels; //imageInfo.numChannels;
|
|
215
|
+
metadata["Time series frames"] = imageInfo.times || 1;
|
|
216
|
+
// don't add User data if it's empty
|
|
217
|
+
if (rawImageInfo.userData && !isEmpty(rawImageInfo.userData)) {
|
|
218
|
+
metadata["User data"] = rawImageInfo.userData;
|
|
219
|
+
}
|
|
220
|
+
return metadata;
|
|
221
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { OMEZarrLoader } from "./OmeZarrLoader.js";
|
|
2
|
+
import { JsonImageInfoLoader } from "./JsonImageInfoLoader.js";
|
|
3
|
+
import { RawArrayLoader } from "./RawArrayLoader.js";
|
|
4
|
+
import { TiffLoader } from "./TiffLoader.js";
|
|
5
|
+
export { PrefetchDirection } from "./zarr_utils/types.js";
|
|
6
|
+
export let VolumeFileFormat = /*#__PURE__*/function (VolumeFileFormat) {
|
|
7
|
+
VolumeFileFormat["ZARR"] = "zarr";
|
|
8
|
+
VolumeFileFormat["JSON"] = "json";
|
|
9
|
+
VolumeFileFormat["TIFF"] = "tiff";
|
|
10
|
+
VolumeFileFormat["DATA"] = "data";
|
|
11
|
+
return VolumeFileFormat;
|
|
12
|
+
}({});
|
|
13
|
+
|
|
14
|
+
// superset of all necessary loader options
|
|
15
|
+
|
|
16
|
+
export function pathToFileType(path) {
|
|
17
|
+
if (path.endsWith(".json")) {
|
|
18
|
+
return VolumeFileFormat.JSON;
|
|
19
|
+
} else if (path.endsWith(".tif") || path.endsWith(".tiff")) {
|
|
20
|
+
return VolumeFileFormat.TIFF;
|
|
21
|
+
}
|
|
22
|
+
return VolumeFileFormat.ZARR;
|
|
23
|
+
}
|
|
24
|
+
export async function createVolumeLoader(path, options) {
|
|
25
|
+
const pathString = Array.isArray(path) ? path[0] : path;
|
|
26
|
+
const fileType = options?.fileType || pathToFileType(pathString);
|
|
27
|
+
switch (fileType) {
|
|
28
|
+
case VolumeFileFormat.ZARR:
|
|
29
|
+
return await OMEZarrLoader.createLoader(path, options?.scene, options?.cache, options?.queue, options?.fetchOptions);
|
|
30
|
+
case VolumeFileFormat.JSON:
|
|
31
|
+
return new JsonImageInfoLoader(path, options?.cache);
|
|
32
|
+
case VolumeFileFormat.TIFF:
|
|
33
|
+
return new TiffLoader(pathString);
|
|
34
|
+
case VolumeFileFormat.DATA:
|
|
35
|
+
if (!options?.rawArrayOptions) {
|
|
36
|
+
throw new Error("Must provide RawArrayOptions for RawArrayLoader");
|
|
37
|
+
}
|
|
38
|
+
return new RawArrayLoader(options?.rawArrayOptions.data, options?.rawArrayOptions.metadata);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
const allEqual = arr => arr.every(v => v === arr[0]);
|
|
2
|
+
const pushN = (arr, val, n) => {
|
|
3
|
+
for (let i = 0; i < n; i++) {
|
|
4
|
+
arr.push(val);
|
|
5
|
+
}
|
|
6
|
+
};
|
|
7
|
+
const directionToIndex = dir => {
|
|
8
|
+
const absDir = dir >> 1; // shave off sign bit to get index in TZYX
|
|
9
|
+
return absDir + Number(absDir !== 0); // convert TZYX -> TCZYX by skipping c (index 1)
|
|
10
|
+
};
|
|
11
|
+
function updateMinMax(val, minmax) {
|
|
12
|
+
if (val < minmax[0]) {
|
|
13
|
+
minmax[0] = val;
|
|
14
|
+
}
|
|
15
|
+
if (val > minmax[1]) {
|
|
16
|
+
minmax[1] = val;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Since the user is most likely to want nearby data (in space or time) first, we should prefetch those chunks first.
|
|
22
|
+
*
|
|
23
|
+
* Given a list of just-loaded chunks and some bounds, `ChunkPrefetchIterator` iterates evenly outwards in T/Z/Y/X.
|
|
24
|
+
*/
|
|
25
|
+
// NOTE: Assumes `chunks` form a rectangular prism! Will create gaps otherwise! (in practice they always should)
|
|
26
|
+
export default class ChunkPrefetchIterator {
|
|
27
|
+
constructor(chunks, tzyxMaxPrefetchOffset, tczyxChunksPerSource, priorityDirections, onlyPriorityDirections = false) {
|
|
28
|
+
// Get min and max chunk coordinates for T/Z/Y/X
|
|
29
|
+
const extrema = [[Infinity, -Infinity], [Infinity, -Infinity], [Infinity, -Infinity], [Infinity, -Infinity]];
|
|
30
|
+
for (const chunk of chunks) {
|
|
31
|
+
updateMinMax(chunk[0], extrema[0]);
|
|
32
|
+
updateMinMax(chunk[2], extrema[1]);
|
|
33
|
+
updateMinMax(chunk[3], extrema[2]);
|
|
34
|
+
updateMinMax(chunk[4], extrema[3]);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Create `PrefetchDirectionState`s for each direction
|
|
38
|
+
this.directionStates = [];
|
|
39
|
+
this.priorityDirectionStates = [];
|
|
40
|
+
|
|
41
|
+
// iterating like this: direction is the index in the flattened entries
|
|
42
|
+
// and corresponds to our +T, -T, +Z, -Z, +Y, -Y, +X, -X directions in order
|
|
43
|
+
// because extrema is in TZYX order.
|
|
44
|
+
for (const [direction, start] of extrema.flat().entries()) {
|
|
45
|
+
const dimension = direction >> 1; // shave off sign bit to get index in TZYX
|
|
46
|
+
const tczyxIndex = dimension + Number(dimension !== 0); // convert TZYX -> TCZYX by skipping c (index 1)
|
|
47
|
+
let end;
|
|
48
|
+
if (direction & 1) {
|
|
49
|
+
// Positive direction - end is either the max coordinate in the fetched set plus the max offset in this
|
|
50
|
+
// dimension, or the max chunk coordinate in this dimension, whichever comes first
|
|
51
|
+
const endsPerSource = tczyxChunksPerSource.map(chunkDims => {
|
|
52
|
+
return Math.min(start + tzyxMaxPrefetchOffset[dimension], chunkDims[tczyxIndex] - 1);
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
// Save some time: if all sources have the same end, we can just store that
|
|
56
|
+
if (allEqual(endsPerSource)) {
|
|
57
|
+
end = endsPerSource[0];
|
|
58
|
+
} else {
|
|
59
|
+
// Otherwise, expand our ends per source array to ends per channel
|
|
60
|
+
end = [];
|
|
61
|
+
for (const [i, sourceEnd] of endsPerSource.entries()) {
|
|
62
|
+
pushN(end, sourceEnd, tczyxChunksPerSource[i][1]);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// end = Math.min(start + tzyxMaxPrefetchOffset[dimension], tczyxChunksPerDimension[dimension] - 1);
|
|
66
|
+
} else {
|
|
67
|
+
// Negative direction - end is either the min coordinate in the fetched set minus the max offset in this
|
|
68
|
+
// dimension, or 0, whichever comes first
|
|
69
|
+
end = Math.max(start - tzyxMaxPrefetchOffset[dimension], 0);
|
|
70
|
+
}
|
|
71
|
+
const directionState = {
|
|
72
|
+
direction,
|
|
73
|
+
start,
|
|
74
|
+
end,
|
|
75
|
+
chunks: []
|
|
76
|
+
};
|
|
77
|
+
if (priorityDirections && priorityDirections.includes(direction)) {
|
|
78
|
+
this.priorityDirectionStates.push(directionState);
|
|
79
|
+
} else {
|
|
80
|
+
// we have an option setting that can let us ignore non-priority directions
|
|
81
|
+
if (!onlyPriorityDirections) {
|
|
82
|
+
this.directionStates.push(directionState);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Fill each `PrefetchDirectionState` with chunks at the border of the fetched set
|
|
88
|
+
for (const chunk of chunks) {
|
|
89
|
+
for (const dir of this.directionStates) {
|
|
90
|
+
if (chunk[directionToIndex(dir.direction)] === dir.start) {
|
|
91
|
+
dir.chunks.push(chunk);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
for (const dir of this.priorityDirectionStates) {
|
|
95
|
+
if (chunk[directionToIndex(dir.direction)] === dir.start) {
|
|
96
|
+
dir.chunks.push(chunk);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
static *iterateDirections(directions) {
|
|
102
|
+
let offset = 1;
|
|
103
|
+
while (directions.length > 0) {
|
|
104
|
+
// Remove directions in which we have reached the end (or, if per-channel ends, the end for all channels)
|
|
105
|
+
directions = directions.filter(dir => {
|
|
106
|
+
const end = Array.isArray(dir.end) ? Math.max(...dir.end) : dir.end;
|
|
107
|
+
if (dir.direction & 1) {
|
|
108
|
+
return dir.start + offset <= end;
|
|
109
|
+
} else {
|
|
110
|
+
return dir.start - offset >= end;
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// Yield chunks one chunk farther out in every remaining direction
|
|
115
|
+
for (const dir of directions) {
|
|
116
|
+
const offsetDir = offset * (dir.direction & 1 ? 1 : -1);
|
|
117
|
+
for (const chunk of dir.chunks) {
|
|
118
|
+
// Skip this chunk if this channel has a specific per-channel end and we've reached it
|
|
119
|
+
if (Array.isArray(dir.end) && chunk[directionToIndex(dir.direction)] + offsetDir > dir.end[chunk[1]]) {
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
const newChunk = chunk.slice();
|
|
123
|
+
newChunk[directionToIndex(dir.direction)] += offsetDir;
|
|
124
|
+
yield newChunk;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
offset += 1;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
*[Symbol.iterator]() {
|
|
131
|
+
// Yield all chunks in priority direction(s) first, if any
|
|
132
|
+
if (this.priorityDirectionStates.length > 0) {
|
|
133
|
+
for (const chunk of ChunkPrefetchIterator.iterateDirections(this.priorityDirectionStates)) {
|
|
134
|
+
yield chunk;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Then yield all chunks in other directions
|
|
139
|
+
for (const chunk of ChunkPrefetchIterator.iterateDirections(this.directionStates)) {
|
|
140
|
+
yield chunk;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|