@fideus-labs/ngff-zarr 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/browser-mod.d.ts +14 -0
- package/esm/browser-mod.d.ts.map +1 -0
- package/esm/browser-mod.js +23 -0
- package/esm/methods/itkwasm-browser.d.ts +6 -0
- package/esm/methods/itkwasm-browser.d.ts.map +1 -0
- package/esm/methods/itkwasm-browser.js +462 -0
- package/esm/methods/itkwasm-node.d.ts +6 -0
- package/esm/methods/itkwasm-node.d.ts.map +1 -0
- package/esm/methods/itkwasm-node.js +462 -0
- package/esm/methods/itkwasm-shared.d.ts +68 -0
- package/esm/methods/itkwasm-shared.d.ts.map +1 -0
- package/esm/methods/itkwasm-shared.js +489 -0
- package/esm/methods/itkwasm.d.ts +11 -3
- package/esm/methods/itkwasm.d.ts.map +1 -1
- package/esm/methods/itkwasm.js +11 -952
- package/package.json +27 -3
- package/script/browser-mod.d.ts +14 -0
- package/script/browser-mod.d.ts.map +1 -0
- package/script/browser-mod.js +48 -0
- package/script/methods/itkwasm-browser.d.ts +6 -0
- package/script/methods/itkwasm-browser.d.ts.map +1 -0
- package/script/methods/itkwasm-browser.js +488 -0
- package/script/methods/itkwasm-node.d.ts +6 -0
- package/script/methods/itkwasm-node.d.ts.map +1 -0
- package/script/methods/itkwasm-node.js +488 -0
- package/script/methods/itkwasm-shared.d.ts +68 -0
- package/script/methods/itkwasm-shared.d.ts.map +1 -0
- package/script/methods/itkwasm-shared.js +524 -0
- package/script/methods/itkwasm.d.ts +11 -3
- package/script/methods/itkwasm.d.ts.map +1 -1
- package/script/methods/itkwasm.js +14 -977
package/esm/methods/itkwasm.js
CHANGED
|
@@ -1,958 +1,17 @@
|
|
|
1
1
|
// SPDX-FileCopyrightText: Copyright (c) Fideus Labs LLC
|
|
2
2
|
// SPDX-License-Identifier: MIT
|
|
3
|
-
import { downsampleBinShrinkNode as downsampleBinShrink, downsampleLabelImageNode as downsampleLabelImage, downsampleNode as downsample, } from "@itk-wasm/downsample";
|
|
4
|
-
import * as zarr from "zarrita";
|
|
5
|
-
import { NgffImage } from "../types/ngff_image.js";
|
|
6
|
-
const SPATIAL_DIMS = ["x", "y", "z"];
|
|
7
3
|
/**
|
|
8
|
-
*
|
|
9
|
-
* This ensures exact target sizes when downsampling incrementally.
|
|
10
|
-
*/
|
|
11
|
-
function calculateIncrementalFactor(previousSize, targetSize) {
|
|
12
|
-
if (targetSize <= 0) {
|
|
13
|
-
return 1;
|
|
14
|
-
}
|
|
15
|
-
// Start with the theoretical factor
|
|
16
|
-
let factor = Math.floor(Math.ceil(previousSize / (targetSize + 0.5)));
|
|
17
|
-
// Verify this gives us the right size
|
|
18
|
-
let actualSize = Math.floor(previousSize / factor);
|
|
19
|
-
if (actualSize !== targetSize) {
|
|
20
|
-
// Adjust factor to get exact target
|
|
21
|
-
factor = Math.max(1, Math.floor(previousSize / targetSize));
|
|
22
|
-
actualSize = Math.floor(previousSize / factor);
|
|
23
|
-
// If still not exact, try ceil
|
|
24
|
-
if (actualSize !== targetSize) {
|
|
25
|
-
factor = Math.max(1, Math.ceil(previousSize / targetSize));
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
return Math.max(1, factor);
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Convert dimension scale factors to ITK-Wasm format
|
|
32
|
-
* This computes the incremental scale factor relative to the previous scale,
|
|
33
|
-
* not the absolute scale factor from the original image.
|
|
4
|
+
* ITK-Wasm downsampling support for multiscale generation
|
|
34
5
|
*
|
|
35
|
-
*
|
|
36
|
-
*
|
|
37
|
-
*
|
|
38
|
-
|
|
39
|
-
function dimScaleFactors(dims, scaleFactor, previousDimFactors, originalImage, previousImage) {
|
|
40
|
-
const dimFactors = {};
|
|
41
|
-
if (typeof scaleFactor === "number") {
|
|
42
|
-
if (originalImage !== undefined && previousImage !== undefined) {
|
|
43
|
-
// Calculate target size: floor(original_size / scale_factor)
|
|
44
|
-
// Then calculate incremental factor from previous size to target size
|
|
45
|
-
for (const dim of dims) {
|
|
46
|
-
if (SPATIAL_DIMS.includes(dim)) {
|
|
47
|
-
const dimIndex = originalImage.dims.indexOf(dim);
|
|
48
|
-
const originalSize = originalImage.data.shape[dimIndex];
|
|
49
|
-
const targetSize = Math.floor(originalSize / scaleFactor);
|
|
50
|
-
const prevDimIndex = previousImage.dims.indexOf(dim);
|
|
51
|
-
const previousSize = previousImage.data.shape[prevDimIndex];
|
|
52
|
-
dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
|
|
53
|
-
}
|
|
54
|
-
else {
|
|
55
|
-
dimFactors[dim] = 1;
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
else {
|
|
60
|
-
// Fallback to old behavior when images not provided
|
|
61
|
-
for (const dim of dims) {
|
|
62
|
-
if (SPATIAL_DIMS.includes(dim)) {
|
|
63
|
-
// Divide by previous factor to get incremental scaling
|
|
64
|
-
// Use Math.floor to truncate (matching Python's int() behavior)
|
|
65
|
-
const incrementalFactor = scaleFactor /
|
|
66
|
-
(previousDimFactors[dim] || 1);
|
|
67
|
-
dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
|
|
68
|
-
}
|
|
69
|
-
else {
|
|
70
|
-
dimFactors[dim] = previousDimFactors[dim] || 1;
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
else {
|
|
76
|
-
if (originalImage !== undefined && previousImage !== undefined) {
|
|
77
|
-
for (const dim in scaleFactor) {
|
|
78
|
-
const dimIndex = originalImage.dims.indexOf(dim);
|
|
79
|
-
const originalSize = originalImage.data.shape[dimIndex];
|
|
80
|
-
const targetSize = Math.floor(originalSize / scaleFactor[dim]);
|
|
81
|
-
const prevDimIndex = previousImage.dims.indexOf(dim);
|
|
82
|
-
const previousSize = previousImage.data.shape[prevDimIndex];
|
|
83
|
-
dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
else {
|
|
87
|
-
// Fallback to old behavior when images not provided
|
|
88
|
-
for (const dim in scaleFactor) {
|
|
89
|
-
// Divide by previous factor to get incremental scaling
|
|
90
|
-
// Use Math.floor to truncate (matching Python's int() behavior)
|
|
91
|
-
const incrementalFactor = scaleFactor[dim] /
|
|
92
|
-
(previousDimFactors[dim] || 1);
|
|
93
|
-
dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
// Add dims not in scale_factor with factor of 1
|
|
97
|
-
for (const dim of dims) {
|
|
98
|
-
if (!(dim in dimFactors)) {
|
|
99
|
-
dimFactors[dim] = 1;
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
return dimFactors;
|
|
104
|
-
}
|
|
105
|
-
/**
|
|
106
|
-
* Update previous dimension factors
|
|
107
|
-
*/
|
|
108
|
-
function updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors) {
|
|
109
|
-
const updated = { ...previousDimFactors };
|
|
110
|
-
if (typeof scaleFactor === "number") {
|
|
111
|
-
for (const dim of spatialDims) {
|
|
112
|
-
updated[dim] = scaleFactor;
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
else {
|
|
116
|
-
for (const dim in scaleFactor) {
|
|
117
|
-
updated[dim] = scaleFactor[dim];
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
return updated;
|
|
121
|
-
}
|
|
122
|
-
/**
|
|
123
|
-
* Compute next scale metadata
|
|
124
|
-
*/
|
|
125
|
-
function nextScaleMetadata(image, dimFactors, spatialDims) {
|
|
126
|
-
const translation = {};
|
|
127
|
-
const scale = {};
|
|
128
|
-
for (const dim of image.dims) {
|
|
129
|
-
if (spatialDims.includes(dim)) {
|
|
130
|
-
const factor = dimFactors[dim];
|
|
131
|
-
scale[dim] = image.scale[dim] * factor;
|
|
132
|
-
// Add offset to account for pixel center shift when downsampling
|
|
133
|
-
translation[dim] = image.translation[dim] +
|
|
134
|
-
0.5 * (factor - 1) * image.scale[dim];
|
|
135
|
-
}
|
|
136
|
-
else {
|
|
137
|
-
scale[dim] = image.scale[dim];
|
|
138
|
-
translation[dim] = image.translation[dim];
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
return [translation, scale];
|
|
142
|
-
}
|
|
143
|
-
/**
|
|
144
|
-
* Convert zarr array to ITK-Wasm Image format
|
|
145
|
-
* If isVector is true, ensures "c" dimension is last by transposing if needed
|
|
146
|
-
*/
|
|
147
|
-
async function zarrToItkImage(array, dims, isVector = false) {
|
|
148
|
-
// Read the full array data
|
|
149
|
-
const result = await zarr.get(array);
|
|
150
|
-
// Ensure we have the data
|
|
151
|
-
if (!result.data || result.data.length === 0) {
|
|
152
|
-
throw new Error("Zarr array data is empty");
|
|
153
|
-
}
|
|
154
|
-
let data;
|
|
155
|
-
let shape = result.shape;
|
|
156
|
-
let _finalDims = dims;
|
|
157
|
-
// If vector image, ensure "c" is last dimension
|
|
158
|
-
if (isVector) {
|
|
159
|
-
const cIndex = dims.indexOf("c");
|
|
160
|
-
if (cIndex !== -1 && cIndex !== dims.length - 1) {
|
|
161
|
-
// Need to transpose to move "c" to the end
|
|
162
|
-
const permutation = dims.map((_, i) => i).filter((i) => i !== cIndex);
|
|
163
|
-
permutation.push(cIndex);
|
|
164
|
-
// Reorder dims
|
|
165
|
-
_finalDims = permutation.map((i) => dims[i]);
|
|
166
|
-
// Reorder shape
|
|
167
|
-
shape = permutation.map((i) => result.shape[i]);
|
|
168
|
-
// Transpose the data
|
|
169
|
-
data = transposeArray(result.data, result.shape, permutation, getItkComponentType(result.data));
|
|
170
|
-
}
|
|
171
|
-
else {
|
|
172
|
-
// "c" already at end or not present, just copy data
|
|
173
|
-
data = copyTypedArray(result.data);
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
else {
|
|
177
|
-
// Not a vector image, just copy data
|
|
178
|
-
data = copyTypedArray(result.data);
|
|
179
|
-
}
|
|
180
|
-
// For vector images, the last dimension is the component count, not a spatial dimension
|
|
181
|
-
const spatialShape = isVector ? shape.slice(0, -1) : shape;
|
|
182
|
-
const components = isVector ? shape[shape.length - 1] : 1;
|
|
183
|
-
// ITK expects size in physical space order [x, y, z], but spatialShape is in array order [z, y, x]
|
|
184
|
-
// So we need to reverse it
|
|
185
|
-
const itkSize = [...spatialShape].reverse();
|
|
186
|
-
// Create ITK-Wasm image
|
|
187
|
-
const itkImage = {
|
|
188
|
-
imageType: {
|
|
189
|
-
dimension: spatialShape.length,
|
|
190
|
-
componentType: getItkComponentType(data),
|
|
191
|
-
pixelType: isVector ? "VariableLengthVector" : "Scalar",
|
|
192
|
-
components,
|
|
193
|
-
},
|
|
194
|
-
name: "image",
|
|
195
|
-
origin: spatialShape.map(() => 0),
|
|
196
|
-
spacing: spatialShape.map(() => 1),
|
|
197
|
-
direction: createIdentityMatrix(spatialShape.length),
|
|
198
|
-
size: itkSize,
|
|
199
|
-
data: data,
|
|
200
|
-
metadata: new Map(),
|
|
201
|
-
};
|
|
202
|
-
return itkImage;
|
|
203
|
-
}
|
|
204
|
-
/**
|
|
205
|
-
* Copy typed array to appropriate type
|
|
206
|
-
*/
|
|
207
|
-
function copyTypedArray(data) {
|
|
208
|
-
if (data instanceof Float32Array) {
|
|
209
|
-
return new Float32Array(data);
|
|
210
|
-
}
|
|
211
|
-
else if (data instanceof Float64Array) {
|
|
212
|
-
return new Float64Array(data);
|
|
213
|
-
}
|
|
214
|
-
else if (data instanceof Uint8Array) {
|
|
215
|
-
return new Uint8Array(data);
|
|
216
|
-
}
|
|
217
|
-
else if (data instanceof Int8Array) {
|
|
218
|
-
return new Int8Array(data);
|
|
219
|
-
}
|
|
220
|
-
else if (data instanceof Uint16Array) {
|
|
221
|
-
return new Uint16Array(data);
|
|
222
|
-
}
|
|
223
|
-
else if (data instanceof Int16Array) {
|
|
224
|
-
return new Int16Array(data);
|
|
225
|
-
}
|
|
226
|
-
else if (data instanceof Uint32Array) {
|
|
227
|
-
return new Uint32Array(data);
|
|
228
|
-
}
|
|
229
|
-
else if (data instanceof Int32Array) {
|
|
230
|
-
return new Int32Array(data);
|
|
231
|
-
}
|
|
232
|
-
else {
|
|
233
|
-
// Convert to Float32Array as fallback
|
|
234
|
-
return new Float32Array(data);
|
|
235
|
-
}
|
|
236
|
-
}
|
|
237
|
-
/**
|
|
238
|
-
* Transpose array data according to permutation
|
|
239
|
-
*/
|
|
240
|
-
function transposeArray(data, shape, permutation, componentType) {
|
|
241
|
-
const typedData = data;
|
|
242
|
-
// Create output array of same type
|
|
243
|
-
let output;
|
|
244
|
-
const totalSize = typedData.length;
|
|
245
|
-
switch (componentType) {
|
|
246
|
-
case "uint8":
|
|
247
|
-
output = new Uint8Array(totalSize);
|
|
248
|
-
break;
|
|
249
|
-
case "int8":
|
|
250
|
-
output = new Int8Array(totalSize);
|
|
251
|
-
break;
|
|
252
|
-
case "uint16":
|
|
253
|
-
output = new Uint16Array(totalSize);
|
|
254
|
-
break;
|
|
255
|
-
case "int16":
|
|
256
|
-
output = new Int16Array(totalSize);
|
|
257
|
-
break;
|
|
258
|
-
case "uint32":
|
|
259
|
-
output = new Uint32Array(totalSize);
|
|
260
|
-
break;
|
|
261
|
-
case "int32":
|
|
262
|
-
output = new Int32Array(totalSize);
|
|
263
|
-
break;
|
|
264
|
-
case "float64":
|
|
265
|
-
output = new Float64Array(totalSize);
|
|
266
|
-
break;
|
|
267
|
-
case "float32":
|
|
268
|
-
default:
|
|
269
|
-
output = new Float32Array(totalSize);
|
|
270
|
-
break;
|
|
271
|
-
}
|
|
272
|
-
// Calculate strides for source
|
|
273
|
-
const sourceStride = calculateStride(shape);
|
|
274
|
-
// Calculate new shape after permutation
|
|
275
|
-
const newShape = permutation.map((i) => shape[i]);
|
|
276
|
-
const targetStride = calculateStride(newShape);
|
|
277
|
-
// Perform transpose
|
|
278
|
-
const indices = new Array(shape.length).fill(0);
|
|
279
|
-
for (let i = 0; i < totalSize; i++) {
|
|
280
|
-
// Calculate source index from multi-dimensional indices
|
|
281
|
-
let sourceIdx = 0;
|
|
282
|
-
for (let j = 0; j < shape.length; j++) {
|
|
283
|
-
sourceIdx += indices[j] * sourceStride[j];
|
|
284
|
-
}
|
|
285
|
-
// Calculate target index with permuted dimensions
|
|
286
|
-
let targetIdx = 0;
|
|
287
|
-
for (let j = 0; j < permutation.length; j++) {
|
|
288
|
-
targetIdx += indices[permutation[j]] * targetStride[j];
|
|
289
|
-
}
|
|
290
|
-
output[targetIdx] = typedData[sourceIdx];
|
|
291
|
-
// Increment indices
|
|
292
|
-
for (let j = shape.length - 1; j >= 0; j--) {
|
|
293
|
-
indices[j]++;
|
|
294
|
-
if (indices[j] < shape[j])
|
|
295
|
-
break;
|
|
296
|
-
indices[j] = 0;
|
|
297
|
-
}
|
|
298
|
-
}
|
|
299
|
-
return output;
|
|
300
|
-
}
|
|
301
|
-
/**
|
|
302
|
-
* Get ITK component type from typed array
|
|
303
|
-
*/
|
|
304
|
-
function getItkComponentType(data) {
|
|
305
|
-
if (data instanceof Uint8Array)
|
|
306
|
-
return "uint8";
|
|
307
|
-
if (data instanceof Int8Array)
|
|
308
|
-
return "int8";
|
|
309
|
-
if (data instanceof Uint16Array)
|
|
310
|
-
return "uint16";
|
|
311
|
-
if (data instanceof Int16Array)
|
|
312
|
-
return "int16";
|
|
313
|
-
if (data instanceof Uint32Array)
|
|
314
|
-
return "uint32";
|
|
315
|
-
if (data instanceof Int32Array)
|
|
316
|
-
return "int32";
|
|
317
|
-
if (data instanceof Float64Array)
|
|
318
|
-
return "float64";
|
|
319
|
-
return "float32";
|
|
320
|
-
}
|
|
321
|
-
/**
|
|
322
|
-
* Create identity matrix for ITK direction
|
|
323
|
-
*/
|
|
324
|
-
function createIdentityMatrix(dimension) {
|
|
325
|
-
const matrix = new Float64Array(dimension * dimension);
|
|
326
|
-
for (let i = 0; i < dimension * dimension; i++) {
|
|
327
|
-
matrix[i] = i % (dimension + 1) === 0 ? 1 : 0;
|
|
328
|
-
}
|
|
329
|
-
return matrix;
|
|
330
|
-
}
|
|
331
|
-
/**
|
|
332
|
-
* Convert ITK-Wasm Image back to zarr array
|
|
333
|
-
* Uses the provided store instead of creating a new one
|
|
334
|
-
*
|
|
335
|
-
* Important: ITK-Wasm stores size in physical space order [x, y, z], but data in
|
|
336
|
-
* column-major order (x contiguous). This column-major layout with size [x, y, z]
|
|
337
|
-
* is equivalent to C-order (row-major) with shape [z, y, x]. We reverse the size
|
|
338
|
-
* to get the zarr shape and use C-order strides for that reversed shape.
|
|
6
|
+
* This module provides conditional exports for browser and Node environments.
|
|
7
|
+
* The actual implementation is delegated to environment-specific modules:
|
|
8
|
+
* - itkwasm-browser.ts: Uses WebWorker-based functions for browser environments
|
|
9
|
+
* - itkwasm-node.ts: Uses native WASM functions for Node/Deno environments
|
|
339
10
|
*
|
|
340
|
-
*
|
|
341
|
-
*
|
|
342
|
-
*
|
|
343
|
-
* @param chunkShape - The chunk shape (in spatial dimension order, will be adjusted for components)
|
|
344
|
-
* @param targetDims - The target dimension order (e.g., ["c", "z", "y", "x"])
|
|
345
|
-
*/
|
|
346
|
-
async function itkImageToZarr(itkImage, store, path, chunkShape, targetDims) {
|
|
347
|
-
const root = zarr.root(store);
|
|
348
|
-
if (!itkImage.data) {
|
|
349
|
-
throw new Error("ITK image data is null or undefined");
|
|
350
|
-
}
|
|
351
|
-
// Determine data type - support all ITK TypedArray types
|
|
352
|
-
let dataType;
|
|
353
|
-
if (itkImage.data instanceof Uint8Array) {
|
|
354
|
-
dataType = "uint8";
|
|
355
|
-
}
|
|
356
|
-
else if (itkImage.data instanceof Int8Array) {
|
|
357
|
-
dataType = "int8";
|
|
358
|
-
}
|
|
359
|
-
else if (itkImage.data instanceof Uint16Array) {
|
|
360
|
-
dataType = "uint16";
|
|
361
|
-
}
|
|
362
|
-
else if (itkImage.data instanceof Int16Array) {
|
|
363
|
-
dataType = "int16";
|
|
364
|
-
}
|
|
365
|
-
else if (itkImage.data instanceof Uint32Array) {
|
|
366
|
-
dataType = "uint32";
|
|
367
|
-
}
|
|
368
|
-
else if (itkImage.data instanceof Int32Array) {
|
|
369
|
-
dataType = "int32";
|
|
370
|
-
}
|
|
371
|
-
else if (itkImage.data instanceof Float32Array) {
|
|
372
|
-
dataType = "float32";
|
|
373
|
-
}
|
|
374
|
-
else if (itkImage.data instanceof Float64Array) {
|
|
375
|
-
dataType = "float64";
|
|
376
|
-
}
|
|
377
|
-
else {
|
|
378
|
-
throw new Error(`Unsupported data type: ${itkImage.data.constructor.name}`);
|
|
379
|
-
}
|
|
380
|
-
// ITK stores size/spacing/origin in physical space order [x, y, z],
|
|
381
|
-
// but the data buffer is in C-order (row-major) which means [z, y, x] indexing.
|
|
382
|
-
// We need to reverse the size to match the data layout, just like we do for spacing/origin.
|
|
383
|
-
const shape = [...itkImage.size].reverse();
|
|
384
|
-
// For vector images, the components are stored in the data but not in the size
|
|
385
|
-
// The actual data length includes components
|
|
386
|
-
const components = itkImage.imageType.components || 1;
|
|
387
|
-
const isVector = components > 1;
|
|
388
|
-
// Validate data length matches expected shape (including components for vector images)
|
|
389
|
-
const spatialElements = shape.reduce((a, b) => a * b, 1);
|
|
390
|
-
const expectedLength = spatialElements * components;
|
|
391
|
-
if (itkImage.data.length !== expectedLength) {
|
|
392
|
-
console.error(`[ERROR] Data length mismatch in itkImageToZarr:`);
|
|
393
|
-
console.error(` ITK image size (physical order):`, itkImage.size);
|
|
394
|
-
console.error(` Shape (reversed):`, shape);
|
|
395
|
-
console.error(` Components:`, components);
|
|
396
|
-
console.error(` Expected data length:`, expectedLength);
|
|
397
|
-
console.error(` Actual data length:`, itkImage.data.length);
|
|
398
|
-
throw new Error(`Data length (${itkImage.data.length}) doesn't match expected shape ${shape} with ${components} components (${expectedLength} elements)`);
|
|
399
|
-
}
|
|
400
|
-
// Determine the final shape and whether we need to transpose
|
|
401
|
-
// ITK image data has shape [...spatialDimsReversed, components] (with c at end)
|
|
402
|
-
// If targetDims is provided, we need to match that order
|
|
403
|
-
let zarrShape;
|
|
404
|
-
let zarrChunkShape;
|
|
405
|
-
let finalData = itkImage.data;
|
|
406
|
-
if (isVector && targetDims) {
|
|
407
|
-
// Find where "c" should be in targetDims
|
|
408
|
-
const cIndex = targetDims.indexOf("c");
|
|
409
|
-
if (cIndex === -1) {
|
|
410
|
-
throw new Error("Vector image but 'c' not found in targetDims");
|
|
411
|
-
}
|
|
412
|
-
// Current shape is [z, y, x, c] (spatial reversed + c at end)
|
|
413
|
-
// Target shape should match targetDims order
|
|
414
|
-
const currentShape = [...shape, components];
|
|
415
|
-
// Build target shape based on targetDims
|
|
416
|
-
zarrShape = new Array(targetDims.length);
|
|
417
|
-
const spatialDims = shape.slice(); // [z, y, x]
|
|
418
|
-
let spatialIdx = 0;
|
|
419
|
-
for (let i = 0; i < targetDims.length; i++) {
|
|
420
|
-
if (targetDims[i] === "c") {
|
|
421
|
-
zarrShape[i] = components;
|
|
422
|
-
}
|
|
423
|
-
else {
|
|
424
|
-
zarrShape[i] = spatialDims[spatialIdx++];
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
// If c is not at the end, we need to transpose
|
|
428
|
-
if (cIndex !== targetDims.length - 1) {
|
|
429
|
-
// Build permutation: where does each target dim come from in current shape?
|
|
430
|
-
const permutation = [];
|
|
431
|
-
spatialIdx = 0;
|
|
432
|
-
for (let i = 0; i < targetDims.length; i++) {
|
|
433
|
-
if (targetDims[i] === "c") {
|
|
434
|
-
permutation.push(currentShape.length - 1); // c is at end of current
|
|
435
|
-
}
|
|
436
|
-
else {
|
|
437
|
-
permutation.push(spatialIdx++);
|
|
438
|
-
}
|
|
439
|
-
}
|
|
440
|
-
// Transpose the data
|
|
441
|
-
finalData = transposeArray(itkImage.data, currentShape, permutation, getItkComponentType(itkImage.data));
|
|
442
|
-
}
|
|
443
|
-
// Chunk shape should match zarrShape
|
|
444
|
-
zarrChunkShape = new Array(zarrShape.length);
|
|
445
|
-
spatialIdx = 0;
|
|
446
|
-
for (let i = 0; i < targetDims.length; i++) {
|
|
447
|
-
if (targetDims[i] === "c") {
|
|
448
|
-
zarrChunkShape[i] = components;
|
|
449
|
-
}
|
|
450
|
-
else {
|
|
451
|
-
zarrChunkShape[i] = chunkShape[spatialIdx++];
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
}
|
|
455
|
-
else {
|
|
456
|
-
// No targetDims or not a vector - use default behavior
|
|
457
|
-
zarrShape = isVector ? [...shape, components] : shape;
|
|
458
|
-
zarrChunkShape = isVector ? [...chunkShape, components] : chunkShape;
|
|
459
|
-
}
|
|
460
|
-
// Chunk shape should match the dimensionality of zarrShape
|
|
461
|
-
if (zarrChunkShape.length !== zarrShape.length) {
|
|
462
|
-
throw new Error(`chunkShape length (${zarrChunkShape.length}) must match shape length (${zarrShape.length})`);
|
|
463
|
-
}
|
|
464
|
-
const array = await zarr.create(root.resolve(path), {
|
|
465
|
-
shape: zarrShape,
|
|
466
|
-
chunk_shape: zarrChunkShape,
|
|
467
|
-
data_type: dataType,
|
|
468
|
-
fill_value: 0,
|
|
469
|
-
});
|
|
470
|
-
// Write data - preserve the actual data type, don't cast to Float32Array
|
|
471
|
-
// Shape and stride should match the ITK image size order
|
|
472
|
-
// Use null for each dimension to select the entire array
|
|
473
|
-
const selection = zarrShape.map(() => null);
|
|
474
|
-
await zarr.set(array, selection, {
|
|
475
|
-
data: finalData,
|
|
476
|
-
shape: zarrShape,
|
|
477
|
-
stride: calculateStride(zarrShape),
|
|
478
|
-
});
|
|
479
|
-
return array;
|
|
480
|
-
}
|
|
481
|
-
/**
|
|
482
|
-
* Calculate stride for array
|
|
483
|
-
*/
|
|
484
|
-
function calculateStride(shape) {
|
|
485
|
-
const stride = new Array(shape.length);
|
|
486
|
-
stride[shape.length - 1] = 1;
|
|
487
|
-
for (let i = shape.length - 2; i >= 0; i--) {
|
|
488
|
-
stride[i] = stride[i + 1] * shape[i + 1];
|
|
489
|
-
}
|
|
490
|
-
return stride;
|
|
491
|
-
}
|
|
492
|
-
/**
|
|
493
|
-
* Perform Gaussian downsampling using ITK-Wasm
|
|
494
|
-
*/
|
|
495
|
-
async function downsampleGaussian(image, dimFactors, spatialDims) {
|
|
496
|
-
// Handle time dimension by processing each time slice independently
|
|
497
|
-
if (image.dims.includes("t")) {
|
|
498
|
-
const tDimIndex = image.dims.indexOf("t");
|
|
499
|
-
const tSize = image.data.shape[tDimIndex];
|
|
500
|
-
const newDims = image.dims.filter((dim) => dim !== "t");
|
|
501
|
-
// Downsample each time slice
|
|
502
|
-
const downsampledSlices = [];
|
|
503
|
-
for (let t = 0; t < tSize; t++) {
|
|
504
|
-
// Extract time slice
|
|
505
|
-
const selection = new Array(image.data.shape.length).fill(null);
|
|
506
|
-
selection[tDimIndex] = t;
|
|
507
|
-
const sliceData = await zarr.get(image.data, selection);
|
|
508
|
-
// Create temporary zarr array for this slice
|
|
509
|
-
const sliceStore = new Map();
|
|
510
|
-
const sliceRoot = zarr.root(sliceStore);
|
|
511
|
-
const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
|
|
512
|
-
const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
|
|
513
|
-
const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
|
|
514
|
-
shape: sliceShape,
|
|
515
|
-
chunk_shape: sliceChunkShape,
|
|
516
|
-
data_type: image.data.dtype,
|
|
517
|
-
fill_value: 0,
|
|
518
|
-
});
|
|
519
|
-
const fullSelection = new Array(sliceShape.length).fill(null);
|
|
520
|
-
await zarr.set(sliceArray, fullSelection, sliceData);
|
|
521
|
-
// Create NgffImage for this slice (without 't' dimension)
|
|
522
|
-
const sliceImage = new NgffImage({
|
|
523
|
-
data: sliceArray,
|
|
524
|
-
dims: newDims,
|
|
525
|
-
scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
|
|
526
|
-
translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
|
|
527
|
-
name: image.name,
|
|
528
|
-
axesUnits: image.axesUnits
|
|
529
|
-
? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
|
|
530
|
-
: undefined,
|
|
531
|
-
computedCallbacks: image.computedCallbacks,
|
|
532
|
-
});
|
|
533
|
-
// Recursively downsample this slice (without 't', so no infinite loop)
|
|
534
|
-
const downsampledSlice = await downsampleGaussian(sliceImage, dimFactors, spatialDims);
|
|
535
|
-
downsampledSlices.push(downsampledSlice.data);
|
|
536
|
-
}
|
|
537
|
-
// Combine downsampled slices back into a single array with 't' dimension
|
|
538
|
-
const firstSlice = downsampledSlices[0];
|
|
539
|
-
const combinedShape = [...image.data.shape];
|
|
540
|
-
combinedShape[tDimIndex] = tSize;
|
|
541
|
-
// Update spatial dimensions based on downsampled size
|
|
542
|
-
for (let i = 0; i < image.dims.length; i++) {
|
|
543
|
-
if (i !== tDimIndex) {
|
|
544
|
-
const sliceIndex = i < tDimIndex ? i : i - 1;
|
|
545
|
-
combinedShape[i] = firstSlice.shape[sliceIndex];
|
|
546
|
-
}
|
|
547
|
-
}
|
|
548
|
-
// Create combined array
|
|
549
|
-
const combinedStore = new Map();
|
|
550
|
-
const combinedRoot = zarr.root(combinedStore);
|
|
551
|
-
const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
|
|
552
|
-
shape: combinedShape,
|
|
553
|
-
chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
|
|
554
|
-
data_type: image.data.dtype,
|
|
555
|
-
fill_value: 0,
|
|
556
|
-
});
|
|
557
|
-
// Copy each downsampled slice into the combined array
|
|
558
|
-
for (let t = 0; t < tSize; t++) {
|
|
559
|
-
const sliceData = await zarr.get(downsampledSlices[t]);
|
|
560
|
-
const targetSelection = new Array(combinedShape.length).fill(null);
|
|
561
|
-
targetSelection[tDimIndex] = t;
|
|
562
|
-
await zarr.set(combinedArray, targetSelection, sliceData);
|
|
563
|
-
}
|
|
564
|
-
// Compute new metadata (time dimension unchanged, spatial dimensions downsampled)
|
|
565
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
566
|
-
return new NgffImage({
|
|
567
|
-
data: combinedArray,
|
|
568
|
-
dims: image.dims,
|
|
569
|
-
scale: { ...image.scale, ...scale },
|
|
570
|
-
translation: { ...image.translation, ...translation },
|
|
571
|
-
name: image.name,
|
|
572
|
-
axesUnits: image.axesUnits,
|
|
573
|
-
computedCallbacks: image.computedCallbacks,
|
|
574
|
-
});
|
|
575
|
-
}
|
|
576
|
-
const isVector = image.dims.includes("c");
|
|
577
|
-
// Convert to ITK-Wasm format
|
|
578
|
-
const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
|
|
579
|
-
// Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
|
|
580
|
-
const shrinkFactors = [];
|
|
581
|
-
for (let i = image.dims.length - 1; i >= 0; i--) {
|
|
582
|
-
const dim = image.dims[i];
|
|
583
|
-
if (SPATIAL_DIMS.includes(dim)) {
|
|
584
|
-
shrinkFactors.push(dimFactors[dim] || 1);
|
|
585
|
-
}
|
|
586
|
-
}
|
|
587
|
-
// Use all zeros for cropRadius
|
|
588
|
-
const cropRadius = new Array(shrinkFactors.length).fill(0);
|
|
589
|
-
// Perform downsampling
|
|
590
|
-
const { downsampled } = await downsample(itkImage, {
|
|
591
|
-
shrinkFactors,
|
|
592
|
-
cropRadius: cropRadius,
|
|
593
|
-
});
|
|
594
|
-
// Compute new metadata
|
|
595
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
596
|
-
// Convert back to zarr array in a new in-memory store
|
|
597
|
-
// Each downsampled image gets its own store - toNgffZarr will handle copying to target
|
|
598
|
-
const store = new Map();
|
|
599
|
-
// Chunk shape needs to be in zarr order (reversed from ITK order)
|
|
600
|
-
const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
|
|
601
|
-
const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
|
|
602
|
-
return new NgffImage({
|
|
603
|
-
data: array,
|
|
604
|
-
dims: image.dims,
|
|
605
|
-
scale,
|
|
606
|
-
translation,
|
|
607
|
-
name: image.name,
|
|
608
|
-
axesUnits: image.axesUnits,
|
|
609
|
-
computedCallbacks: image.computedCallbacks,
|
|
610
|
-
});
|
|
611
|
-
}
|
|
612
|
-
/**
|
|
613
|
-
* Perform bin shrink downsampling using ITK-Wasm
|
|
614
|
-
*/
|
|
615
|
-
async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
|
|
616
|
-
// Handle time dimension by processing each time slice independently
|
|
617
|
-
if (image.dims.includes("t")) {
|
|
618
|
-
const tDimIndex = image.dims.indexOf("t");
|
|
619
|
-
const tSize = image.data.shape[tDimIndex];
|
|
620
|
-
const newDims = image.dims.filter((dim) => dim !== "t");
|
|
621
|
-
// Downsample each time slice
|
|
622
|
-
const downsampledSlices = [];
|
|
623
|
-
for (let t = 0; t < tSize; t++) {
|
|
624
|
-
// Extract time slice
|
|
625
|
-
const selection = new Array(image.data.shape.length).fill(null);
|
|
626
|
-
selection[tDimIndex] = t;
|
|
627
|
-
const sliceData = await zarr.get(image.data, selection);
|
|
628
|
-
// Create temporary zarr array for this slice
|
|
629
|
-
const sliceStore = new Map();
|
|
630
|
-
const sliceRoot = zarr.root(sliceStore);
|
|
631
|
-
const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
|
|
632
|
-
const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
|
|
633
|
-
const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
|
|
634
|
-
shape: sliceShape,
|
|
635
|
-
chunk_shape: sliceChunkShape,
|
|
636
|
-
data_type: image.data.dtype,
|
|
637
|
-
fill_value: 0,
|
|
638
|
-
});
|
|
639
|
-
const fullSelection = new Array(sliceShape.length).fill(null);
|
|
640
|
-
await zarr.set(sliceArray, fullSelection, sliceData);
|
|
641
|
-
// Create NgffImage for this slice (without 't' dimension)
|
|
642
|
-
const sliceImage = new NgffImage({
|
|
643
|
-
data: sliceArray,
|
|
644
|
-
dims: newDims,
|
|
645
|
-
scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
|
|
646
|
-
translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
|
|
647
|
-
name: image.name,
|
|
648
|
-
axesUnits: image.axesUnits
|
|
649
|
-
? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
|
|
650
|
-
: undefined,
|
|
651
|
-
computedCallbacks: image.computedCallbacks,
|
|
652
|
-
});
|
|
653
|
-
// Recursively downsample this slice
|
|
654
|
-
const downsampledSlice = await downsampleBinShrinkImpl(sliceImage, dimFactors, spatialDims);
|
|
655
|
-
downsampledSlices.push(downsampledSlice.data);
|
|
656
|
-
}
|
|
657
|
-
// Combine downsampled slices back into a single array with 't' dimension
|
|
658
|
-
const firstSlice = downsampledSlices[0];
|
|
659
|
-
const combinedShape = [...image.data.shape];
|
|
660
|
-
combinedShape[tDimIndex] = tSize;
|
|
661
|
-
// Update spatial dimensions based on downsampled size
|
|
662
|
-
for (let i = 0; i < image.dims.length; i++) {
|
|
663
|
-
if (i !== tDimIndex) {
|
|
664
|
-
const sliceIndex = i < tDimIndex ? i : i - 1;
|
|
665
|
-
combinedShape[i] = firstSlice.shape[sliceIndex];
|
|
666
|
-
}
|
|
667
|
-
}
|
|
668
|
-
// Create combined array
|
|
669
|
-
const combinedStore = new Map();
|
|
670
|
-
const combinedRoot = zarr.root(combinedStore);
|
|
671
|
-
const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
|
|
672
|
-
shape: combinedShape,
|
|
673
|
-
chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
|
|
674
|
-
data_type: image.data.dtype,
|
|
675
|
-
fill_value: 0,
|
|
676
|
-
});
|
|
677
|
-
// Copy each downsampled slice into the combined array
|
|
678
|
-
for (let t = 0; t < tSize; t++) {
|
|
679
|
-
const sliceData = await zarr.get(downsampledSlices[t]);
|
|
680
|
-
const targetSelection = new Array(combinedShape.length).fill(null);
|
|
681
|
-
targetSelection[tDimIndex] = t;
|
|
682
|
-
await zarr.set(combinedArray, targetSelection, sliceData);
|
|
683
|
-
}
|
|
684
|
-
// Compute new metadata
|
|
685
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
686
|
-
return new NgffImage({
|
|
687
|
-
data: combinedArray,
|
|
688
|
-
dims: image.dims,
|
|
689
|
-
scale: { ...image.scale, ...scale },
|
|
690
|
-
translation: { ...image.translation, ...translation },
|
|
691
|
-
name: image.name,
|
|
692
|
-
axesUnits: image.axesUnits,
|
|
693
|
-
computedCallbacks: image.computedCallbacks,
|
|
694
|
-
});
|
|
695
|
-
}
|
|
696
|
-
const isVector = image.dims.includes("c");
|
|
697
|
-
// Convert to ITK-Wasm format
|
|
698
|
-
const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
|
|
699
|
-
// Prepare shrink factors - only for spatial dimensions in ITK order (reversed)
|
|
700
|
-
// ITK bin shrink does not expect shrink factors for non-spatial dimensions like 'c'
|
|
701
|
-
const shrinkFactors = [];
|
|
702
|
-
for (let i = image.dims.length - 1; i >= 0; i--) {
|
|
703
|
-
const dim = image.dims[i];
|
|
704
|
-
if (SPATIAL_DIMS.includes(dim)) {
|
|
705
|
-
shrinkFactors.push(dimFactors[dim] || 1);
|
|
706
|
-
}
|
|
707
|
-
}
|
|
708
|
-
// Perform downsampling
|
|
709
|
-
const { downsampled } = await downsampleBinShrink(itkImage, {
|
|
710
|
-
shrinkFactors,
|
|
711
|
-
});
|
|
712
|
-
// Compute new metadata
|
|
713
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
714
|
-
// Convert back to zarr array in a new in-memory store
|
|
715
|
-
// Each downsampled image gets its own store - toNgffZarr will handle copying to target
|
|
716
|
-
const store = new Map();
|
|
717
|
-
// Chunk shape needs to be in zarr order (reversed from ITK order)
|
|
718
|
-
const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
|
|
719
|
-
const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
|
|
720
|
-
return new NgffImage({
|
|
721
|
-
data: array,
|
|
722
|
-
dims: image.dims,
|
|
723
|
-
scale,
|
|
724
|
-
translation,
|
|
725
|
-
name: image.name,
|
|
726
|
-
axesUnits: image.axesUnits,
|
|
727
|
-
computedCallbacks: image.computedCallbacks,
|
|
728
|
-
});
|
|
729
|
-
}
|
|
730
|
-
/**
|
|
731
|
-
* Perform label image downsampling using ITK-Wasm
|
|
732
|
-
*/
|
|
733
|
-
async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
|
|
734
|
-
// Handle time dimension by processing each time slice independently
|
|
735
|
-
if (image.dims.includes("t")) {
|
|
736
|
-
const tDimIndex = image.dims.indexOf("t");
|
|
737
|
-
const tSize = image.data.shape[tDimIndex];
|
|
738
|
-
const newDims = image.dims.filter((dim) => dim !== "t");
|
|
739
|
-
// Downsample each time slice
|
|
740
|
-
const downsampledSlices = [];
|
|
741
|
-
for (let t = 0; t < tSize; t++) {
|
|
742
|
-
// Extract time slice
|
|
743
|
-
const selection = new Array(image.data.shape.length).fill(null);
|
|
744
|
-
selection[tDimIndex] = t;
|
|
745
|
-
const sliceData = await zarr.get(image.data, selection);
|
|
746
|
-
// Create temporary zarr array for this slice
|
|
747
|
-
const sliceStore = new Map();
|
|
748
|
-
const sliceRoot = zarr.root(sliceStore);
|
|
749
|
-
const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
|
|
750
|
-
const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
|
|
751
|
-
const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
|
|
752
|
-
shape: sliceShape,
|
|
753
|
-
chunk_shape: sliceChunkShape,
|
|
754
|
-
data_type: image.data.dtype,
|
|
755
|
-
fill_value: 0,
|
|
756
|
-
});
|
|
757
|
-
const fullSelection = new Array(sliceShape.length).fill(null);
|
|
758
|
-
await zarr.set(sliceArray, fullSelection, sliceData);
|
|
759
|
-
// Create NgffImage for this slice (without 't' dimension)
|
|
760
|
-
const sliceImage = new NgffImage({
|
|
761
|
-
data: sliceArray,
|
|
762
|
-
dims: newDims,
|
|
763
|
-
scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
|
|
764
|
-
translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
|
|
765
|
-
name: image.name,
|
|
766
|
-
axesUnits: image.axesUnits
|
|
767
|
-
? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
|
|
768
|
-
: undefined,
|
|
769
|
-
computedCallbacks: image.computedCallbacks,
|
|
770
|
-
});
|
|
771
|
-
// Recursively downsample this slice
|
|
772
|
-
const downsampledSlice = await downsampleLabelImageImpl(sliceImage, dimFactors, spatialDims);
|
|
773
|
-
downsampledSlices.push(downsampledSlice.data);
|
|
774
|
-
}
|
|
775
|
-
// Combine downsampled slices back into a single array with 't' dimension
|
|
776
|
-
const firstSlice = downsampledSlices[0];
|
|
777
|
-
const combinedShape = [...image.data.shape];
|
|
778
|
-
combinedShape[tDimIndex] = tSize;
|
|
779
|
-
// Update spatial dimensions based on downsampled size
|
|
780
|
-
for (let i = 0; i < image.dims.length; i++) {
|
|
781
|
-
if (i !== tDimIndex) {
|
|
782
|
-
const sliceIndex = i < tDimIndex ? i : i - 1;
|
|
783
|
-
combinedShape[i] = firstSlice.shape[sliceIndex];
|
|
784
|
-
}
|
|
785
|
-
}
|
|
786
|
-
// Create combined array
|
|
787
|
-
const combinedStore = new Map();
|
|
788
|
-
const combinedRoot = zarr.root(combinedStore);
|
|
789
|
-
const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
|
|
790
|
-
shape: combinedShape,
|
|
791
|
-
chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
|
|
792
|
-
data_type: image.data.dtype,
|
|
793
|
-
fill_value: 0,
|
|
794
|
-
});
|
|
795
|
-
// Copy each downsampled slice into the combined array
|
|
796
|
-
for (let t = 0; t < tSize; t++) {
|
|
797
|
-
const sliceData = await zarr.get(downsampledSlices[t]);
|
|
798
|
-
const targetSelection = new Array(combinedShape.length).fill(null);
|
|
799
|
-
targetSelection[tDimIndex] = t;
|
|
800
|
-
await zarr.set(combinedArray, targetSelection, sliceData);
|
|
801
|
-
}
|
|
802
|
-
// Compute new metadata
|
|
803
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
804
|
-
return new NgffImage({
|
|
805
|
-
data: combinedArray,
|
|
806
|
-
dims: image.dims,
|
|
807
|
-
scale: { ...image.scale, ...scale },
|
|
808
|
-
translation: { ...image.translation, ...translation },
|
|
809
|
-
name: image.name,
|
|
810
|
-
axesUnits: image.axesUnits,
|
|
811
|
-
computedCallbacks: image.computedCallbacks,
|
|
812
|
-
});
|
|
813
|
-
}
|
|
814
|
-
const isVector = image.dims.includes("c");
|
|
815
|
-
// Convert to ITK-Wasm format
|
|
816
|
-
const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
|
|
817
|
-
// Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
|
|
818
|
-
const shrinkFactors = [];
|
|
819
|
-
for (let i = image.dims.length - 1; i >= 0; i--) {
|
|
820
|
-
const dim = image.dims[i];
|
|
821
|
-
if (SPATIAL_DIMS.includes(dim)) {
|
|
822
|
-
shrinkFactors.push(dimFactors[dim] || 1);
|
|
823
|
-
}
|
|
824
|
-
else {
|
|
825
|
-
shrinkFactors.push(1); // Non-spatial dimensions don't shrink
|
|
826
|
-
}
|
|
827
|
-
}
|
|
828
|
-
// Use all zeros for cropRadius
|
|
829
|
-
const cropRadius = new Array(shrinkFactors.length).fill(0);
|
|
830
|
-
// Perform downsampling
|
|
831
|
-
const { downsampled } = await downsampleLabelImage(itkImage, {
|
|
832
|
-
shrinkFactors,
|
|
833
|
-
cropRadius: cropRadius,
|
|
834
|
-
});
|
|
835
|
-
// Compute new metadata
|
|
836
|
-
const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
|
|
837
|
-
// Convert back to zarr array in a new in-memory store
|
|
838
|
-
// Each downsampled image gets its own store - toNgffZarr will handle copying to target
|
|
839
|
-
const store = new Map();
|
|
840
|
-
// Chunk shape needs to be in zarr order (reversed from ITK order)
|
|
841
|
-
const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
|
|
842
|
-
const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
|
|
843
|
-
return new NgffImage({
|
|
844
|
-
data: array,
|
|
845
|
-
dims: image.dims,
|
|
846
|
-
scale,
|
|
847
|
-
translation,
|
|
848
|
-
name: image.name,
|
|
849
|
-
axesUnits: image.axesUnits,
|
|
850
|
-
computedCallbacks: image.computedCallbacks,
|
|
851
|
-
});
|
|
852
|
-
}
|
|
853
|
-
/**
|
|
854
|
-
* Main downsampling function for ITK-Wasm
|
|
11
|
+
* For Deno runtime, we default to the node implementation.
|
|
12
|
+
* For browser bundlers, they should use conditional exports in package.json
|
|
13
|
+
* to resolve to the browser implementation.
|
|
855
14
|
*/
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
const spatialDims = dims.filter((dim) => SPATIAL_DIMS.includes(dim));
|
|
860
|
-
// Two strategies:
|
|
861
|
-
// 1. gaussian / label_image: hybrid absolute scale factors (each element is absolute from original)
|
|
862
|
-
// using dimScaleFactors to choose incremental vs from-original for exact sizes.
|
|
863
|
-
// 2. bin_shrink: treat provided scaleFactors sequence as incremental factors applied successively.
|
|
864
|
-
let previousImage = ngffImage;
|
|
865
|
-
let previousDimFactors = {};
|
|
866
|
-
for (const dim of dims)
|
|
867
|
-
previousDimFactors[dim] = 1;
|
|
868
|
-
for (let i = 0; i < scaleFactors.length; i++) {
|
|
869
|
-
const scaleFactor = scaleFactors[i];
|
|
870
|
-
let sourceImage;
|
|
871
|
-
let sourceDimFactors;
|
|
872
|
-
if (smoothing === "bin_shrink") {
|
|
873
|
-
// Purely incremental: scaleFactor is the shrink for this step
|
|
874
|
-
sourceImage = previousImage; // always from previous
|
|
875
|
-
sourceDimFactors = {};
|
|
876
|
-
if (typeof scaleFactor === "number") {
|
|
877
|
-
for (const dim of spatialDims)
|
|
878
|
-
sourceDimFactors[dim] = scaleFactor;
|
|
879
|
-
}
|
|
880
|
-
else {
|
|
881
|
-
for (const dim of spatialDims) {
|
|
882
|
-
sourceDimFactors[dim] = scaleFactor[dim] || 1;
|
|
883
|
-
}
|
|
884
|
-
}
|
|
885
|
-
// Non-spatial dims factor 1
|
|
886
|
-
for (const dim of dims) {
|
|
887
|
-
if (!(dim in sourceDimFactors))
|
|
888
|
-
sourceDimFactors[dim] = 1;
|
|
889
|
-
}
|
|
890
|
-
}
|
|
891
|
-
else {
|
|
892
|
-
// Hybrid absolute strategy
|
|
893
|
-
const dimFactors = dimScaleFactors(dims, scaleFactor, previousDimFactors, ngffImage, previousImage);
|
|
894
|
-
// Decide if we can be incremental
|
|
895
|
-
let canDownsampleIncrementally = true;
|
|
896
|
-
for (const dim of Object.keys(dimFactors)) {
|
|
897
|
-
const dimIndex = ngffImage.dims.indexOf(dim);
|
|
898
|
-
if (dimIndex >= 0) {
|
|
899
|
-
const originalSize = ngffImage.data.shape[dimIndex];
|
|
900
|
-
const targetSize = Math.floor(originalSize /
|
|
901
|
-
(typeof scaleFactor === "number"
|
|
902
|
-
? scaleFactor
|
|
903
|
-
: scaleFactor[dim]));
|
|
904
|
-
const prevDimIndex = previousImage.dims.indexOf(dim);
|
|
905
|
-
const previousSize = previousImage.data.shape[prevDimIndex];
|
|
906
|
-
if (Math.floor(previousSize / dimFactors[dim]) !== targetSize) {
|
|
907
|
-
canDownsampleIncrementally = false;
|
|
908
|
-
break;
|
|
909
|
-
}
|
|
910
|
-
}
|
|
911
|
-
}
|
|
912
|
-
if (canDownsampleIncrementally) {
|
|
913
|
-
sourceImage = previousImage;
|
|
914
|
-
sourceDimFactors = dimFactors;
|
|
915
|
-
}
|
|
916
|
-
else {
|
|
917
|
-
sourceImage = ngffImage;
|
|
918
|
-
const originalDimFactors = {};
|
|
919
|
-
for (const dim of dims)
|
|
920
|
-
originalDimFactors[dim] = 1;
|
|
921
|
-
sourceDimFactors = dimScaleFactors(dims, scaleFactor, originalDimFactors);
|
|
922
|
-
}
|
|
923
|
-
}
|
|
924
|
-
let downsampled;
|
|
925
|
-
if (smoothing === "gaussian") {
|
|
926
|
-
downsampled = await downsampleGaussian(sourceImage, sourceDimFactors, spatialDims);
|
|
927
|
-
}
|
|
928
|
-
else if (smoothing === "bin_shrink") {
|
|
929
|
-
downsampled = await downsampleBinShrinkImpl(sourceImage, sourceDimFactors, spatialDims);
|
|
930
|
-
}
|
|
931
|
-
else if (smoothing === "label_image") {
|
|
932
|
-
downsampled = await downsampleLabelImageImpl(sourceImage, sourceDimFactors, spatialDims);
|
|
933
|
-
}
|
|
934
|
-
else {
|
|
935
|
-
throw new Error(`Unknown smoothing method: ${smoothing}`);
|
|
936
|
-
}
|
|
937
|
-
multiscales.push(downsampled);
|
|
938
|
-
// Update for next iteration
|
|
939
|
-
previousImage = downsampled;
|
|
940
|
-
if (smoothing === "bin_shrink") {
|
|
941
|
-
// Accumulate cumulative factors (multiply) for bin_shrink to reflect total shrink so far
|
|
942
|
-
if (typeof scaleFactor === "number") {
|
|
943
|
-
for (const dim of spatialDims) {
|
|
944
|
-
previousDimFactors[dim] *= scaleFactor;
|
|
945
|
-
}
|
|
946
|
-
}
|
|
947
|
-
else {
|
|
948
|
-
for (const dim of spatialDims) {
|
|
949
|
-
previousDimFactors[dim] *= scaleFactor[dim] || 1;
|
|
950
|
-
}
|
|
951
|
-
}
|
|
952
|
-
}
|
|
953
|
-
else {
|
|
954
|
-
previousDimFactors = updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors);
|
|
955
|
-
}
|
|
956
|
-
}
|
|
957
|
-
return multiscales;
|
|
958
|
-
}
|
|
15
|
+
// Default to Node implementation for Deno and Node.js environments
|
|
16
|
+
// Browser bundlers should use conditional exports to get itkwasm-browser.ts
|
|
17
|
+
export { downsampleItkWasm } from "./itkwasm-node.js";
|