@fideus-labs/ngff-zarr 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +1 -0
  2. package/esm/io/itk_image_to_ngff_image.d.ts +5 -0
  3. package/esm/io/itk_image_to_ngff_image.d.ts.map +1 -1
  4. package/esm/io/itk_image_to_ngff_image.js +20 -20
  5. package/esm/io/ngff_image_to_itk_image.d.ts.map +1 -1
  6. package/esm/io/ngff_image_to_itk_image.js +2 -0
  7. package/esm/io/to_multiscales.js +1 -1
  8. package/esm/io/to_ngff_zarr.js +16 -0
  9. package/esm/methods/itkwasm.d.ts.map +1 -1
  10. package/esm/methods/itkwasm.js +568 -426
  11. package/esm/schemas/coordinate_systems.d.ts +159 -552
  12. package/esm/schemas/coordinate_systems.d.ts.map +1 -1
  13. package/esm/schemas/coordinate_systems.js +0 -1
  14. package/esm/schemas/ome_zarr.d.ts +105 -69
  15. package/esm/schemas/ome_zarr.d.ts.map +1 -1
  16. package/esm/schemas/rfc4.d.ts +26 -131
  17. package/esm/schemas/rfc4.d.ts.map +1 -1
  18. package/esm/schemas/units.d.ts +70 -5
  19. package/esm/schemas/units.d.ts.map +1 -1
  20. package/esm/schemas/units.js +2 -15
  21. package/esm/schemas/zarr_metadata.d.ts +13 -300
  22. package/esm/schemas/zarr_metadata.d.ts.map +1 -1
  23. package/package.json +1 -1
  24. package/script/io/itk_image_to_ngff_image.d.ts +5 -0
  25. package/script/io/itk_image_to_ngff_image.d.ts.map +1 -1
  26. package/script/io/itk_image_to_ngff_image.js +20 -20
  27. package/script/io/ngff_image_to_itk_image.d.ts.map +1 -1
  28. package/script/io/ngff_image_to_itk_image.js +2 -0
  29. package/script/io/to_multiscales.js +1 -1
  30. package/script/io/to_ngff_zarr.js +16 -0
  31. package/script/methods/itkwasm.d.ts.map +1 -1
  32. package/script/methods/itkwasm.js +567 -425
  33. package/script/schemas/coordinate_systems.d.ts +159 -552
  34. package/script/schemas/coordinate_systems.d.ts.map +1 -1
  35. package/script/schemas/coordinate_systems.js +0 -1
  36. package/script/schemas/ome_zarr.d.ts +105 -69
  37. package/script/schemas/ome_zarr.d.ts.map +1 -1
  38. package/script/schemas/rfc4.d.ts +26 -131
  39. package/script/schemas/rfc4.d.ts.map +1 -1
  40. package/script/schemas/units.d.ts +70 -5
  41. package/script/schemas/units.d.ts.map +1 -1
  42. package/script/schemas/units.js +2 -15
  43. package/script/schemas/zarr_metadata.d.ts +13 -300
  44. package/script/schemas/zarr_metadata.d.ts.map +1 -1
@@ -1,31 +1,102 @@
1
1
  // SPDX-FileCopyrightText: Copyright (c) Fideus Labs LLC
2
2
  // SPDX-License-Identifier: MIT
3
- import { downsampleBinShrinkNode as downsampleBinShrink, downsampleLabelImageNode as downsampleLabelImage, downsampleNode as downsample, gaussianKernelRadiusNode as gaussianKernelRadius, } from "@itk-wasm/downsample";
3
+ import { downsampleBinShrinkNode as downsampleBinShrink, downsampleLabelImageNode as downsampleLabelImage, downsampleNode as downsample, } from "@itk-wasm/downsample";
4
4
  import * as zarr from "zarrita";
5
5
  import { NgffImage } from "../types/ngff_image.js";
6
6
  const SPATIAL_DIMS = ["x", "y", "z"];
7
+ /**
8
+ * Calculate the incremental factor needed to reach the target size from the previous size.
9
+ * This ensures exact target sizes when downsampling incrementally.
10
+ */
11
+ function calculateIncrementalFactor(previousSize, targetSize) {
12
+ if (targetSize <= 0) {
13
+ return 1;
14
+ }
15
+ // Start with the theoretical factor
16
+ let factor = Math.floor(Math.ceil(previousSize / (targetSize + 0.5)));
17
+ // Verify this gives us the right size
18
+ let actualSize = Math.floor(previousSize / factor);
19
+ if (actualSize !== targetSize) {
20
+ // Adjust factor to get exact target
21
+ factor = Math.max(1, Math.floor(previousSize / targetSize));
22
+ actualSize = Math.floor(previousSize / factor);
23
+ // If still not exact, try ceil
24
+ if (actualSize !== targetSize) {
25
+ factor = Math.max(1, Math.ceil(previousSize / targetSize));
26
+ }
27
+ }
28
+ return Math.max(1, factor);
29
+ }
7
30
  /**
8
31
  * Convert dimension scale factors to ITK-Wasm format
32
+ * This computes the incremental scale factor relative to the previous scale,
33
+ * not the absolute scale factor from the original image.
34
+ *
35
+ * When originalImage and previousImage are provided, calculates the exact
36
+ * incremental factor needed to reach the target size from the previous size.
37
+ * This ensures we get exact 1x, 2x, 3x, 4x sizes even with incremental downsampling.
9
38
  */
10
- function dimScaleFactors(dims, scaleFactor, previousDimFactors) {
39
+ function dimScaleFactors(dims, scaleFactor, previousDimFactors, originalImage, previousImage) {
11
40
  const dimFactors = {};
12
41
  if (typeof scaleFactor === "number") {
13
- for (const dim of dims) {
14
- if (SPATIAL_DIMS.includes(dim)) {
15
- dimFactors[dim] = scaleFactor;
42
+ if (originalImage !== undefined && previousImage !== undefined) {
43
+ // Calculate target size: floor(original_size / scale_factor)
44
+ // Then calculate incremental factor from previous size to target size
45
+ for (const dim of dims) {
46
+ if (SPATIAL_DIMS.includes(dim)) {
47
+ const dimIndex = originalImage.dims.indexOf(dim);
48
+ const originalSize = originalImage.data.shape[dimIndex];
49
+ const targetSize = Math.floor(originalSize / scaleFactor);
50
+ const prevDimIndex = previousImage.dims.indexOf(dim);
51
+ const previousSize = previousImage.data.shape[prevDimIndex];
52
+ dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
53
+ }
54
+ else {
55
+ dimFactors[dim] = 1;
56
+ }
16
57
  }
17
- else {
18
- dimFactors[dim] = previousDimFactors[dim] || 1;
58
+ }
59
+ else {
60
+ // Fallback to old behavior when images not provided
61
+ for (const dim of dims) {
62
+ if (SPATIAL_DIMS.includes(dim)) {
63
+ // Divide by previous factor to get incremental scaling
64
+ // Use Math.floor to truncate (matching Python's int() behavior)
65
+ const incrementalFactor = scaleFactor /
66
+ (previousDimFactors[dim] || 1);
67
+ dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
68
+ }
69
+ else {
70
+ dimFactors[dim] = previousDimFactors[dim] || 1;
71
+ }
19
72
  }
20
73
  }
21
74
  }
22
75
  else {
23
- for (const dim of dims) {
24
- if (dim in scaleFactor) {
25
- dimFactors[dim] = scaleFactor[dim];
76
+ if (originalImage !== undefined && previousImage !== undefined) {
77
+ for (const dim in scaleFactor) {
78
+ const dimIndex = originalImage.dims.indexOf(dim);
79
+ const originalSize = originalImage.data.shape[dimIndex];
80
+ const targetSize = Math.floor(originalSize / scaleFactor[dim]);
81
+ const prevDimIndex = previousImage.dims.indexOf(dim);
82
+ const previousSize = previousImage.data.shape[prevDimIndex];
83
+ dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
26
84
  }
27
- else {
28
- dimFactors[dim] = previousDimFactors[dim] || 1;
85
+ }
86
+ else {
87
+ // Fallback to old behavior when images not provided
88
+ for (const dim in scaleFactor) {
89
+ // Divide by previous factor to get incremental scaling
90
+ // Use Math.floor to truncate (matching Python's int() behavior)
91
+ const incrementalFactor = scaleFactor[dim] /
92
+ (previousDimFactors[dim] || 1);
93
+ dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
94
+ }
95
+ }
96
+ // Add dims not in scale_factor with factor of 1
97
+ for (const dim of dims) {
98
+ if (!(dim in dimFactors)) {
99
+ dimFactors[dim] = 1;
29
100
  }
30
101
  }
31
102
  }
@@ -42,10 +113,8 @@ function updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors)
42
113
  }
43
114
  }
44
115
  else {
45
- for (const dim of spatialDims) {
46
- if (dim in scaleFactor) {
47
- updated[dim] = scaleFactor[dim];
48
- }
116
+ for (const dim in scaleFactor) {
117
+ updated[dim] = scaleFactor[dim];
49
118
  }
50
119
  }
51
120
  return updated;
@@ -60,38 +129,17 @@ function nextScaleMetadata(image, dimFactors, spatialDims) {
60
129
  if (spatialDims.includes(dim)) {
61
130
  const factor = dimFactors[dim];
62
131
  scale[dim] = image.scale[dim] * factor;
132
+ // Add offset to account for pixel center shift when downsampling
63
133
  translation[dim] = image.translation[dim] +
64
134
  0.5 * (factor - 1) * image.scale[dim];
65
135
  }
66
136
  else {
67
- // Only copy non-spatial dimensions if they exist in the source
68
- if (dim in image.scale) {
69
- scale[dim] = image.scale[dim];
70
- }
71
- if (dim in image.translation) {
72
- translation[dim] = image.translation[dim];
73
- }
137
+ scale[dim] = image.scale[dim];
138
+ translation[dim] = image.translation[dim];
74
139
  }
75
140
  }
76
141
  return [translation, scale];
77
142
  }
78
- /**
79
- * Compute Gaussian kernel sigma values in pixel units for downsampling.
80
- *
81
- * Formula: sigma = sqrt((k^2 - 1^2)/(2*sqrt(2*ln(2)))^2)
82
- *
83
- * Reference:
84
- * - https://discourse.itk.org/t/resampling-to-isotropic-signal-processing-theory/1403/16
85
- * - https://doi.org/10.1007/978-3-319-24571-3_81
86
- * - http://discovery.ucl.ac.uk/1469251/1/scale-factor-point-5.pdf
87
- *
88
- * @param shrinkFactors - Shrink ratio along each axis
89
- * @returns Standard deviation of Gaussian kernel along each axis
90
- */
91
- function computeSigma(shrinkFactors) {
92
- const denominator = Math.pow(2 * Math.sqrt(2 * Math.log(2)), 2);
93
- return shrinkFactors.map((factor) => Math.sqrt((factor * factor - 1) / denominator));
94
- }
95
143
  /**
96
144
  * Convert zarr array to ITK-Wasm Image format
97
145
  * If isVector is true, ensures "c" dimension is last by transposing if needed
@@ -132,6 +180,9 @@ async function zarrToItkImage(array, dims, isVector = false) {
132
180
  // For vector images, the last dimension is the component count, not a spatial dimension
133
181
  const spatialShape = isVector ? shape.slice(0, -1) : shape;
134
182
  const components = isVector ? shape[shape.length - 1] : 1;
183
+ // ITK expects size in physical space order [x, y, z], but spatialShape is in array order [z, y, x]
184
+ // So we need to reverse it
185
+ const itkSize = [...spatialShape].reverse();
135
186
  // Create ITK-Wasm image
136
187
  const itkImage = {
137
188
  imageType: {
@@ -144,8 +195,8 @@ async function zarrToItkImage(array, dims, isVector = false) {
144
195
  origin: spatialShape.map(() => 0),
145
196
  spacing: spatialShape.map(() => 1),
146
197
  direction: createIdentityMatrix(spatialShape.length),
147
- size: spatialShape,
148
- data,
198
+ size: itkSize,
199
+ data: data,
149
200
  metadata: new Map(),
150
201
  };
151
202
  return itkImage;
@@ -178,12 +229,6 @@ function copyTypedArray(data) {
178
229
  else if (data instanceof Int32Array) {
179
230
  return new Int32Array(data);
180
231
  }
181
- else if (data instanceof BigInt64Array) {
182
- return new BigInt64Array(data);
183
- }
184
- else if (data instanceof BigUint64Array) {
185
- return new BigUint64Array(data);
186
- }
187
232
  else {
188
233
  // Convert to Float32Array as fallback
189
234
  return new Float32Array(data);
@@ -204,23 +249,17 @@ function transposeArray(data, shape, permutation, componentType) {
204
249
  case "int8":
205
250
  output = new Int8Array(totalSize);
206
251
  break;
207
- case "int16":
208
- output = new Int16Array(totalSize);
209
- break;
210
252
  case "uint16":
211
253
  output = new Uint16Array(totalSize);
212
254
  break;
213
- case "int32":
214
- output = new Int32Array(totalSize);
255
+ case "int16":
256
+ output = new Int16Array(totalSize);
215
257
  break;
216
258
  case "uint32":
217
259
  output = new Uint32Array(totalSize);
218
260
  break;
219
- case "int64":
220
- output = new BigInt64Array(totalSize);
221
- break;
222
- case "uint64":
223
- output = new BigUint64Array(totalSize);
261
+ case "int32":
262
+ output = new Int32Array(totalSize);
224
263
  break;
225
264
  case "float64":
226
265
  output = new Float64Array(totalSize);
@@ -275,10 +314,6 @@ function getItkComponentType(data) {
275
314
  return "uint32";
276
315
  if (data instanceof Int32Array)
277
316
  return "int32";
278
- if (data instanceof BigUint64Array)
279
- return "uint64";
280
- if (data instanceof BigInt64Array)
281
- return "int64";
282
317
  if (data instanceof Float64Array)
283
318
  return "float64";
284
319
  return "float32";
@@ -295,12 +330,25 @@ function createIdentityMatrix(dimension) {
295
330
  }
296
331
  /**
297
332
  * Convert ITK-Wasm Image back to zarr array
333
+ * Uses the provided store instead of creating a new one
334
+ *
335
+ * Important: ITK-Wasm stores size in physical space order [x, y, z], but data in
336
+ * column-major order (x contiguous). This column-major layout with size [x, y, z]
337
+ * is equivalent to C-order (row-major) with shape [z, y, x]. We reverse the size
338
+ * to get the zarr shape and use C-order strides for that reversed shape.
339
+ *
340
+ * @param itkImage - The ITK-Wasm image to convert
341
+ * @param store - The zarr store to write to
342
+ * @param path - The path within the store
343
+ * @param chunkShape - The chunk shape (in spatial dimension order, will be adjusted for components)
344
+ * @param targetDims - The target dimension order (e.g., ["c", "z", "y", "x"])
298
345
  */
299
- async function itkImageToZarr(itkImage, path, chunkShape) {
300
- // Use in-memory store
301
- const store = new Map();
346
+ async function itkImageToZarr(itkImage, store, path, chunkShape, targetDims) {
302
347
  const root = zarr.root(store);
303
- // Determine data type
348
+ if (!itkImage.data) {
349
+ throw new Error("ITK image data is null or undefined");
350
+ }
351
+ // Determine data type - support all ITK TypedArray types
304
352
  let dataType;
305
353
  if (itkImage.data instanceof Uint8Array) {
306
354
  dataType = "uint8";
@@ -308,44 +356,125 @@ async function itkImageToZarr(itkImage, path, chunkShape) {
308
356
  else if (itkImage.data instanceof Int8Array) {
309
357
  dataType = "int8";
310
358
  }
311
- else if (itkImage.data instanceof Int16Array) {
312
- dataType = "int16";
313
- }
314
359
  else if (itkImage.data instanceof Uint16Array) {
315
360
  dataType = "uint16";
316
361
  }
317
- else if (itkImage.data instanceof Int32Array) {
318
- dataType = "int32";
362
+ else if (itkImage.data instanceof Int16Array) {
363
+ dataType = "int16";
319
364
  }
320
365
  else if (itkImage.data instanceof Uint32Array) {
321
366
  dataType = "uint32";
322
367
  }
323
- else if (itkImage.data instanceof BigInt64Array) {
324
- dataType = "int64";
368
+ else if (itkImage.data instanceof Int32Array) {
369
+ dataType = "int32";
325
370
  }
326
- else if (itkImage.data instanceof BigUint64Array) {
327
- dataType = "uint64";
371
+ else if (itkImage.data instanceof Float32Array) {
372
+ dataType = "float32";
328
373
  }
329
374
  else if (itkImage.data instanceof Float64Array) {
330
375
  dataType = "float64";
331
376
  }
332
- else if (itkImage.data instanceof Float32Array) {
333
- dataType = "float32";
377
+ else {
378
+ throw new Error(`Unsupported data type: ${itkImage.data.constructor.name}`);
379
+ }
380
+ // ITK stores size/spacing/origin in physical space order [x, y, z],
381
+ // but the data buffer is in C-order (row-major) which means [z, y, x] indexing.
382
+ // We need to reverse the size to match the data layout, just like we do for spacing/origin.
383
+ const shape = [...itkImage.size].reverse();
384
+ // For vector images, the components are stored in the data but not in the size
385
+ // The actual data length includes components
386
+ const components = itkImage.imageType.components || 1;
387
+ const isVector = components > 1;
388
+ // Validate data length matches expected shape (including components for vector images)
389
+ const spatialElements = shape.reduce((a, b) => a * b, 1);
390
+ const expectedLength = spatialElements * components;
391
+ if (itkImage.data.length !== expectedLength) {
392
+ console.error(`[ERROR] Data length mismatch in itkImageToZarr:`);
393
+ console.error(` ITK image size (physical order):`, itkImage.size);
394
+ console.error(` Shape (reversed):`, shape);
395
+ console.error(` Components:`, components);
396
+ console.error(` Expected data length:`, expectedLength);
397
+ console.error(` Actual data length:`, itkImage.data.length);
398
+ throw new Error(`Data length (${itkImage.data.length}) doesn't match expected shape ${shape} with ${components} components (${expectedLength} elements)`);
399
+ }
400
+ // Determine the final shape and whether we need to transpose
401
+ // ITK image data has shape [...spatialDimsReversed, components] (with c at end)
402
+ // If targetDims is provided, we need to match that order
403
+ let zarrShape;
404
+ let zarrChunkShape;
405
+ let finalData = itkImage.data;
406
+ if (isVector && targetDims) {
407
+ // Find where "c" should be in targetDims
408
+ const cIndex = targetDims.indexOf("c");
409
+ if (cIndex === -1) {
410
+ throw new Error("Vector image but 'c' not found in targetDims");
411
+ }
412
+ // Current shape is [z, y, x, c] (spatial reversed + c at end)
413
+ // Target shape should match targetDims order
414
+ const currentShape = [...shape, components];
415
+ // Build target shape based on targetDims
416
+ zarrShape = new Array(targetDims.length);
417
+ const spatialDims = shape.slice(); // [z, y, x]
418
+ let spatialIdx = 0;
419
+ for (let i = 0; i < targetDims.length; i++) {
420
+ if (targetDims[i] === "c") {
421
+ zarrShape[i] = components;
422
+ }
423
+ else {
424
+ zarrShape[i] = spatialDims[spatialIdx++];
425
+ }
426
+ }
427
+ // If c is not at the end, we need to transpose
428
+ if (cIndex !== targetDims.length - 1) {
429
+ // Build permutation: where does each target dim come from in current shape?
430
+ const permutation = [];
431
+ spatialIdx = 0;
432
+ for (let i = 0; i < targetDims.length; i++) {
433
+ if (targetDims[i] === "c") {
434
+ permutation.push(currentShape.length - 1); // c is at end of current
435
+ }
436
+ else {
437
+ permutation.push(spatialIdx++);
438
+ }
439
+ }
440
+ // Transpose the data
441
+ finalData = transposeArray(itkImage.data, currentShape, permutation, getItkComponentType(itkImage.data));
442
+ }
443
+ // Chunk shape should match zarrShape
444
+ zarrChunkShape = new Array(zarrShape.length);
445
+ spatialIdx = 0;
446
+ for (let i = 0; i < targetDims.length; i++) {
447
+ if (targetDims[i] === "c") {
448
+ zarrChunkShape[i] = components;
449
+ }
450
+ else {
451
+ zarrChunkShape[i] = chunkShape[spatialIdx++];
452
+ }
453
+ }
334
454
  }
335
455
  else {
336
- dataType = "float32";
456
+ // No targetDims or not a vector - use default behavior
457
+ zarrShape = isVector ? [...shape, components] : shape;
458
+ zarrChunkShape = isVector ? [...chunkShape, components] : chunkShape;
459
+ }
460
+ // Chunk shape should match the dimensionality of zarrShape
461
+ if (zarrChunkShape.length !== zarrShape.length) {
462
+ throw new Error(`chunkShape length (${zarrChunkShape.length}) must match shape length (${zarrShape.length})`);
337
463
  }
338
464
  const array = await zarr.create(root.resolve(path), {
339
- shape: itkImage.size,
340
- chunk_shape: chunkShape,
465
+ shape: zarrShape,
466
+ chunk_shape: zarrChunkShape,
341
467
  data_type: dataType,
342
468
  fill_value: 0,
343
469
  });
344
- // Write data
345
- await zarr.set(array, [], {
346
- data: itkImage.data,
347
- shape: itkImage.size,
348
- stride: calculateStride(itkImage.size),
470
+ // Write data - preserve the actual data type, don't cast to Float32Array
471
+ // Shape and stride should match the ITK image size order
472
+ // Use null for each dimension to select the entire array
473
+ const selection = zarrShape.map(() => null);
474
+ await zarr.set(array, selection, {
475
+ data: finalData,
476
+ shape: zarrShape,
477
+ stride: calculateStride(zarrShape),
349
478
  });
350
479
  return array;
351
480
  }
@@ -361,316 +490,115 @@ function calculateStride(shape) {
361
490
  return stride;
362
491
  }
363
492
  /**
364
- * Process channel-first data by downsampling each channel separately
493
+ * Perform Gaussian downsampling using ITK-Wasm
365
494
  */
366
- async function downsampleChannelFirst(image, dimFactors, spatialDims, smoothing) {
367
- // Get the channel index and count
368
- const cIndex = image.dims.indexOf("c");
369
- const result = await zarr.get(image.data);
370
- const channelCount = result.shape[cIndex];
371
- // Process each channel separately
372
- const downsampledChannels = [];
373
- for (let channelIdx = 0; channelIdx < channelCount; channelIdx++) {
374
- // Extract single channel data
375
- const channelSlice = extractChannel(result, cIndex, channelIdx);
376
- // Create temporary zarr array for this channel
377
- const store = new Map();
378
- const root = zarr.root(store);
379
- const channelDims = image.dims.filter((d) => d !== "c");
380
- const channelShape = result.shape.filter((_, i) => i !== cIndex);
381
- const chunkShape = channelShape.map((s) => Math.min(s, 256));
382
- const channelArray = await zarr.create(root.resolve("channel"), {
383
- shape: channelShape,
384
- chunk_shape: chunkShape,
385
- data_type: getItkComponentType(result.data),
386
- fill_value: 0,
387
- });
388
- await zarr.set(channelArray, [], {
389
- data: channelSlice,
390
- shape: channelShape,
391
- stride: calculateStride(channelShape),
392
- });
393
- // Create NgffImage for this channel (unused but kept for potential future use)
394
- // const _channelImage = new NgffImage({
395
- // data: channelArray,
396
- // dims: channelDims,
397
- // scale: Object.fromEntries(
398
- // Object.entries(image.scale).filter(([k]) => k !== "c")
399
- // ),
400
- // translation: Object.fromEntries(
401
- // Object.entries(image.translation).filter(([k]) => k !== "c")
402
- // ),
403
- // name: image.name,
404
- // axesUnits: image.axesUnits,
405
- // computedCallbacks: image.computedCallbacks,
406
- // });
407
- // Downsample this channel
408
- const itkImage = await zarrToItkImage(channelArray, channelDims, false);
409
- const shrinkFactors = [];
410
- for (let i = 0; i < channelDims.length; i++) {
411
- const dim = channelDims[i];
412
- if (SPATIAL_DIMS.includes(dim)) {
413
- shrinkFactors.push(dimFactors[dim] || 1);
414
- }
415
- else {
416
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
417
- }
418
- }
419
- let downsampled;
420
- if (smoothing === "gaussian") {
421
- const blockSize = itkImage.size.slice().reverse();
422
- const sigma = computeSigma(shrinkFactors);
423
- const { radius: _radius } = await gaussianKernelRadius({
424
- size: blockSize,
425
- sigma,
426
- });
427
- const result = await downsample(itkImage, {
428
- shrinkFactors,
429
- cropRadius: shrinkFactors.map(() => 0),
430
- });
431
- downsampled = result.downsampled;
432
- }
433
- else if (smoothing === "bin_shrink") {
434
- const result = await downsampleBinShrink(itkImage, {
435
- shrinkFactors,
436
- });
437
- downsampled = result.downsampled;
438
- }
439
- else if (smoothing === "label_image") {
440
- const blockSize = itkImage.size.slice().reverse();
441
- const sigma = computeSigma(shrinkFactors);
442
- const { radius: _radius } = await gaussianKernelRadius({
443
- size: blockSize,
444
- sigma,
495
+ async function downsampleGaussian(image, dimFactors, spatialDims) {
496
+ // Handle time dimension by processing each time slice independently
497
+ if (image.dims.includes("t")) {
498
+ const tDimIndex = image.dims.indexOf("t");
499
+ const tSize = image.data.shape[tDimIndex];
500
+ const newDims = image.dims.filter((dim) => dim !== "t");
501
+ // Downsample each time slice
502
+ const downsampledSlices = [];
503
+ for (let t = 0; t < tSize; t++) {
504
+ // Extract time slice
505
+ const selection = new Array(image.data.shape.length).fill(null);
506
+ selection[tDimIndex] = t;
507
+ const sliceData = await zarr.get(image.data, selection);
508
+ // Create temporary zarr array for this slice
509
+ const sliceStore = new Map();
510
+ const sliceRoot = zarr.root(sliceStore);
511
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
512
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
513
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
514
+ shape: sliceShape,
515
+ chunk_shape: sliceChunkShape,
516
+ data_type: image.data.dtype,
517
+ fill_value: 0,
445
518
  });
446
- const result = await downsampleLabelImage(itkImage, {
447
- shrinkFactors,
448
- cropRadius: shrinkFactors.map(() => 0),
519
+ const fullSelection = new Array(sliceShape.length).fill(null);
520
+ await zarr.set(sliceArray, fullSelection, sliceData);
521
+ // Create NgffImage for this slice (without 't' dimension)
522
+ const sliceImage = new NgffImage({
523
+ data: sliceArray,
524
+ dims: newDims,
525
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
526
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
527
+ name: image.name,
528
+ axesUnits: image.axesUnits
529
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
530
+ : undefined,
531
+ computedCallbacks: image.computedCallbacks,
449
532
  });
450
- downsampled = result.downsampled;
451
- }
452
- else {
453
- throw new Error(`Unknown smoothing method: ${smoothing}`);
533
+ // Recursively downsample this slice (without 't', so no infinite loop)
534
+ const downsampledSlice = await downsampleGaussian(sliceImage, dimFactors, spatialDims);
535
+ downsampledSlices.push(downsampledSlice.data);
454
536
  }
455
- // Convert back to zarr array
456
- const downsampledChunkShape = downsampled.size.map((s) => Math.min(s, 256));
457
- const downsampledArray = await itkImageToZarr(downsampled, "downsampled_channel", downsampledChunkShape);
458
- downsampledChannels.push(downsampledArray);
459
- }
460
- // Combine all channels back together
461
- const combinedArray = await combineChannels(downsampledChannels, cIndex, image.dims);
462
- // Compute new metadata
463
- const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
464
- return new NgffImage({
465
- data: combinedArray,
466
- dims: image.dims,
467
- scale,
468
- translation,
469
- name: image.name,
470
- axesUnits: image.axesUnits,
471
- computedCallbacks: image.computedCallbacks,
472
- });
473
- }
474
- /**
475
- * Extract a single channel from the data
476
- */
477
- function extractChannel(result, cIndex, channelIdx) {
478
- const typedData = result.data;
479
- const shape = result.shape;
480
- // Calculate output size (all dims except channel)
481
- const outputSize = shape.reduce((acc, s, i) => (i === cIndex ? acc : acc * s), 1);
482
- let output;
483
- if (typedData instanceof Uint8Array) {
484
- output = new Uint8Array(outputSize);
485
- }
486
- else if (typedData instanceof Int8Array) {
487
- output = new Int8Array(outputSize);
488
- }
489
- else if (typedData instanceof Int16Array) {
490
- output = new Int16Array(outputSize);
491
- }
492
- else if (typedData instanceof Uint16Array) {
493
- output = new Uint16Array(outputSize);
494
- }
495
- else if (typedData instanceof Int32Array) {
496
- output = new Int32Array(outputSize);
497
- }
498
- else if (typedData instanceof Uint32Array) {
499
- output = new Uint32Array(outputSize);
500
- }
501
- else if (typedData instanceof BigInt64Array) {
502
- output = new BigInt64Array(outputSize);
503
- }
504
- else if (typedData instanceof BigUint64Array) {
505
- output = new BigUint64Array(outputSize);
506
- }
507
- else if (typedData instanceof Float64Array) {
508
- output = new Float64Array(outputSize);
509
- }
510
- else {
511
- output = new Float32Array(outputSize);
512
- }
513
- // Calculate strides
514
- const stride = calculateStride(shape);
515
- const outputShape = shape.filter((_, i) => i !== cIndex);
516
- const _outputStride = calculateStride(outputShape);
517
- // Extract channel
518
- const indices = new Array(shape.length).fill(0);
519
- let outputIdx = 0;
520
- for (let i = 0; i < outputSize; i++) {
521
- // Set channel index
522
- indices[cIndex] = channelIdx;
523
- // Calculate source index
524
- let sourceIdx = 0;
525
- for (let j = 0; j < shape.length; j++) {
526
- sourceIdx += indices[j] * stride[j];
527
- }
528
- output[outputIdx++] = typedData[sourceIdx];
529
- // Increment indices (skip channel dimension)
530
- for (let j = shape.length - 1; j >= 0; j--) {
531
- if (j === cIndex)
532
- continue;
533
- indices[j]++;
534
- if (indices[j] < shape[j])
535
- break;
536
- indices[j] = 0;
537
- }
538
- }
539
- return output;
540
- }
541
- /**
542
- * Combine multiple channel arrays back into a single multi-channel array
543
- */
544
- async function combineChannels(channels, cIndex, _originalDims) {
545
- // Read all channel data
546
- const channelData = await Promise.all(channels.map((c) => zarr.get(c)));
547
- // Determine combined shape
548
- const firstChannel = channelData[0];
549
- const channelShape = firstChannel.shape;
550
- const combinedShape = [...channelShape];
551
- combinedShape.splice(cIndex, 0, channels.length);
552
- // Create combined array
553
- const store = new Map();
554
- const root = zarr.root(store);
555
- const chunkShape = combinedShape.map((s) => Math.min(s, 256));
556
- const dataType = getItkComponentType(firstChannel.data);
557
- const combinedArray = await zarr.create(root.resolve("combined"), {
558
- shape: combinedShape,
559
- chunk_shape: chunkShape,
560
- data_type: dataType,
561
- fill_value: 0,
562
- });
563
- // Combine all channels
564
- const totalSize = combinedShape.reduce((acc, s) => acc * s, 1);
565
- let combined;
566
- if (dataType === "uint8") {
567
- combined = new Uint8Array(totalSize);
568
- }
569
- else if (dataType === "int8") {
570
- combined = new Int8Array(totalSize);
571
- }
572
- else if (dataType === "int16") {
573
- combined = new Int16Array(totalSize);
574
- }
575
- else if (dataType === "uint16") {
576
- combined = new Uint16Array(totalSize);
577
- }
578
- else if (dataType === "int32") {
579
- combined = new Int32Array(totalSize);
580
- }
581
- else if (dataType === "uint32") {
582
- combined = new Uint32Array(totalSize);
583
- }
584
- else if (dataType === "int64") {
585
- combined = new BigInt64Array(totalSize);
586
- }
587
- else if (dataType === "uint64") {
588
- combined = new BigUint64Array(totalSize);
589
- }
590
- else if (dataType === "float64") {
591
- combined = new Float64Array(totalSize);
592
- }
593
- else {
594
- combined = new Float32Array(totalSize);
595
- }
596
- const stride = calculateStride(combinedShape);
597
- const _channelStride = calculateStride(channelShape);
598
- // Copy each channel's data
599
- for (let c = 0; c < channels.length; c++) {
600
- const channelTypedData = channelData[c].data;
601
- const indices = new Array(combinedShape.length).fill(0);
602
- for (let i = 0; i < channelTypedData.length; i++) {
603
- // Set channel index
604
- indices[cIndex] = c;
605
- // Calculate target index in combined array
606
- let targetIdx = 0;
607
- for (let j = 0; j < combinedShape.length; j++) {
608
- targetIdx += indices[j] * stride[j];
609
- }
610
- combined[targetIdx] = channelTypedData[i];
611
- // Increment indices (skip channel dimension)
612
- for (let j = combinedShape.length - 1; j >= 0; j--) {
613
- if (j === cIndex)
614
- continue;
615
- indices[j]++;
616
- if (indices[j] < combinedShape[j])
617
- break;
618
- indices[j] = 0;
537
+ // Combine downsampled slices back into a single array with 't' dimension
538
+ const firstSlice = downsampledSlices[0];
539
+ const combinedShape = [...image.data.shape];
540
+ combinedShape[tDimIndex] = tSize;
541
+ // Update spatial dimensions based on downsampled size
542
+ for (let i = 0; i < image.dims.length; i++) {
543
+ if (i !== tDimIndex) {
544
+ const sliceIndex = i < tDimIndex ? i : i - 1;
545
+ combinedShape[i] = firstSlice.shape[sliceIndex];
619
546
  }
620
547
  }
548
+ // Create combined array
549
+ const combinedStore = new Map();
550
+ const combinedRoot = zarr.root(combinedStore);
551
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
552
+ shape: combinedShape,
553
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
554
+ data_type: image.data.dtype,
555
+ fill_value: 0,
556
+ });
557
+ // Copy each downsampled slice into the combined array
558
+ for (let t = 0; t < tSize; t++) {
559
+ const sliceData = await zarr.get(downsampledSlices[t]);
560
+ const targetSelection = new Array(combinedShape.length).fill(null);
561
+ targetSelection[tDimIndex] = t;
562
+ await zarr.set(combinedArray, targetSelection, sliceData);
563
+ }
564
+ // Compute new metadata (time dimension unchanged, spatial dimensions downsampled)
565
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
566
+ return new NgffImage({
567
+ data: combinedArray,
568
+ dims: image.dims,
569
+ scale: { ...image.scale, ...scale },
570
+ translation: { ...image.translation, ...translation },
571
+ name: image.name,
572
+ axesUnits: image.axesUnits,
573
+ computedCallbacks: image.computedCallbacks,
574
+ });
621
575
  }
622
- // Write combined data
623
- await zarr.set(combinedArray, [], {
624
- data: combined,
625
- shape: combinedShape,
626
- stride,
627
- });
628
- return combinedArray;
629
- }
630
- /**
631
- * Perform Gaussian downsampling using ITK-Wasm
632
- */
633
- async function downsampleGaussian(image, dimFactors, spatialDims) {
634
- const cIndex = image.dims.indexOf("c");
635
- const isVector = cIndex === image.dims.length - 1;
636
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
637
- !isVector;
638
- // If channel is first (before spatial dims), process each channel separately
639
- if (isChannelFirst) {
640
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "gaussian");
641
- }
576
+ const isVector = image.dims.includes("c");
642
577
  // Convert to ITK-Wasm format
643
578
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
644
- // Prepare shrink factors - need to be for spatial dimensions only
645
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
579
+ // Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
646
580
  const shrinkFactors = [];
647
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
648
- for (let i = 0; i < effectiveDims.length; i++) {
649
- const dim = effectiveDims[i];
581
+ for (let i = image.dims.length - 1; i >= 0; i--) {
582
+ const dim = image.dims[i];
650
583
  if (SPATIAL_DIMS.includes(dim)) {
651
584
  shrinkFactors.push(dimFactors[dim] || 1);
652
585
  }
653
- else {
654
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
655
- }
656
586
  }
657
- // Compute kernel radius - sigma should also be for ALL dimensions
658
- const blockSize = itkImage.size.slice().reverse();
659
- const sigma = computeSigma(shrinkFactors);
660
- const { radius: _radius } = await gaussianKernelRadius({
661
- size: blockSize,
662
- sigma,
663
- });
587
+ // Use all zeros for cropRadius
588
+ const cropRadius = new Array(shrinkFactors.length).fill(0);
664
589
  // Perform downsampling
665
590
  const { downsampled } = await downsample(itkImage, {
666
591
  shrinkFactors,
667
- cropRadius: shrinkFactors.map(() => 0),
592
+ cropRadius: cropRadius,
668
593
  });
669
594
  // Compute new metadata
670
595
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
671
- // Convert back to zarr array
672
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
673
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
596
+ // Convert back to zarr array in a new in-memory store
597
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
598
+ const store = new Map();
599
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
600
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
601
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
674
602
  return new NgffImage({
675
603
  data: array,
676
604
  dims: image.dims,
@@ -685,28 +613,97 @@ async function downsampleGaussian(image, dimFactors, spatialDims) {
685
613
  * Perform bin shrink downsampling using ITK-Wasm
686
614
  */
687
615
  async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
688
- const cIndex = image.dims.indexOf("c");
689
- const isVector = cIndex === image.dims.length - 1;
690
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
691
- !isVector;
692
- // If channel is first (before spatial dims), process each channel separately
693
- if (isChannelFirst) {
694
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "bin_shrink");
616
+ // Handle time dimension by processing each time slice independently
617
+ if (image.dims.includes("t")) {
618
+ const tDimIndex = image.dims.indexOf("t");
619
+ const tSize = image.data.shape[tDimIndex];
620
+ const newDims = image.dims.filter((dim) => dim !== "t");
621
+ // Downsample each time slice
622
+ const downsampledSlices = [];
623
+ for (let t = 0; t < tSize; t++) {
624
+ // Extract time slice
625
+ const selection = new Array(image.data.shape.length).fill(null);
626
+ selection[tDimIndex] = t;
627
+ const sliceData = await zarr.get(image.data, selection);
628
+ // Create temporary zarr array for this slice
629
+ const sliceStore = new Map();
630
+ const sliceRoot = zarr.root(sliceStore);
631
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
632
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
633
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
634
+ shape: sliceShape,
635
+ chunk_shape: sliceChunkShape,
636
+ data_type: image.data.dtype,
637
+ fill_value: 0,
638
+ });
639
+ const fullSelection = new Array(sliceShape.length).fill(null);
640
+ await zarr.set(sliceArray, fullSelection, sliceData);
641
+ // Create NgffImage for this slice (without 't' dimension)
642
+ const sliceImage = new NgffImage({
643
+ data: sliceArray,
644
+ dims: newDims,
645
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
646
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
647
+ name: image.name,
648
+ axesUnits: image.axesUnits
649
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
650
+ : undefined,
651
+ computedCallbacks: image.computedCallbacks,
652
+ });
653
+ // Recursively downsample this slice
654
+ const downsampledSlice = await downsampleBinShrinkImpl(sliceImage, dimFactors, spatialDims);
655
+ downsampledSlices.push(downsampledSlice.data);
656
+ }
657
+ // Combine downsampled slices back into a single array with 't' dimension
658
+ const firstSlice = downsampledSlices[0];
659
+ const combinedShape = [...image.data.shape];
660
+ combinedShape[tDimIndex] = tSize;
661
+ // Update spatial dimensions based on downsampled size
662
+ for (let i = 0; i < image.dims.length; i++) {
663
+ if (i !== tDimIndex) {
664
+ const sliceIndex = i < tDimIndex ? i : i - 1;
665
+ combinedShape[i] = firstSlice.shape[sliceIndex];
666
+ }
667
+ }
668
+ // Create combined array
669
+ const combinedStore = new Map();
670
+ const combinedRoot = zarr.root(combinedStore);
671
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
672
+ shape: combinedShape,
673
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
674
+ data_type: image.data.dtype,
675
+ fill_value: 0,
676
+ });
677
+ // Copy each downsampled slice into the combined array
678
+ for (let t = 0; t < tSize; t++) {
679
+ const sliceData = await zarr.get(downsampledSlices[t]);
680
+ const targetSelection = new Array(combinedShape.length).fill(null);
681
+ targetSelection[tDimIndex] = t;
682
+ await zarr.set(combinedArray, targetSelection, sliceData);
683
+ }
684
+ // Compute new metadata
685
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
686
+ return new NgffImage({
687
+ data: combinedArray,
688
+ dims: image.dims,
689
+ scale: { ...image.scale, ...scale },
690
+ translation: { ...image.translation, ...translation },
691
+ name: image.name,
692
+ axesUnits: image.axesUnits,
693
+ computedCallbacks: image.computedCallbacks,
694
+ });
695
695
  }
696
+ const isVector = image.dims.includes("c");
696
697
  // Convert to ITK-Wasm format
697
698
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
698
- // Prepare shrink factors - need to be for spatial dimensions only
699
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
699
+ // Prepare shrink factors - only for spatial dimensions in ITK order (reversed)
700
+ // ITK bin shrink does not expect shrink factors for non-spatial dimensions like 'c'
700
701
  const shrinkFactors = [];
701
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
702
- for (let i = 0; i < effectiveDims.length; i++) {
703
- const dim = effectiveDims[i];
702
+ for (let i = image.dims.length - 1; i >= 0; i--) {
703
+ const dim = image.dims[i];
704
704
  if (SPATIAL_DIMS.includes(dim)) {
705
705
  shrinkFactors.push(dimFactors[dim] || 1);
706
706
  }
707
- else {
708
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
709
- }
710
707
  }
711
708
  // Perform downsampling
712
709
  const { downsampled } = await downsampleBinShrink(itkImage, {
@@ -714,9 +711,12 @@ async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
714
711
  });
715
712
  // Compute new metadata
716
713
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
717
- // Convert back to zarr array
718
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
719
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
714
+ // Convert back to zarr array in a new in-memory store
715
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
716
+ const store = new Map();
717
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
718
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
719
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
720
720
  return new NgffImage({
721
721
  data: array,
722
722
  dims: image.dims,
@@ -731,22 +731,93 @@ async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
731
731
  * Perform label image downsampling using ITK-Wasm
732
732
  */
733
733
  async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
734
- const cIndex = image.dims.indexOf("c");
735
- const isVector = cIndex === image.dims.length - 1;
736
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
737
- !isVector;
738
- // If channel is first (before spatial dims), process each channel separately
739
- if (isChannelFirst) {
740
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "label_image");
734
+ // Handle time dimension by processing each time slice independently
735
+ if (image.dims.includes("t")) {
736
+ const tDimIndex = image.dims.indexOf("t");
737
+ const tSize = image.data.shape[tDimIndex];
738
+ const newDims = image.dims.filter((dim) => dim !== "t");
739
+ // Downsample each time slice
740
+ const downsampledSlices = [];
741
+ for (let t = 0; t < tSize; t++) {
742
+ // Extract time slice
743
+ const selection = new Array(image.data.shape.length).fill(null);
744
+ selection[tDimIndex] = t;
745
+ const sliceData = await zarr.get(image.data, selection);
746
+ // Create temporary zarr array for this slice
747
+ const sliceStore = new Map();
748
+ const sliceRoot = zarr.root(sliceStore);
749
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
750
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
751
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
752
+ shape: sliceShape,
753
+ chunk_shape: sliceChunkShape,
754
+ data_type: image.data.dtype,
755
+ fill_value: 0,
756
+ });
757
+ const fullSelection = new Array(sliceShape.length).fill(null);
758
+ await zarr.set(sliceArray, fullSelection, sliceData);
759
+ // Create NgffImage for this slice (without 't' dimension)
760
+ const sliceImage = new NgffImage({
761
+ data: sliceArray,
762
+ dims: newDims,
763
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
764
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
765
+ name: image.name,
766
+ axesUnits: image.axesUnits
767
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
768
+ : undefined,
769
+ computedCallbacks: image.computedCallbacks,
770
+ });
771
+ // Recursively downsample this slice
772
+ const downsampledSlice = await downsampleLabelImageImpl(sliceImage, dimFactors, spatialDims);
773
+ downsampledSlices.push(downsampledSlice.data);
774
+ }
775
+ // Combine downsampled slices back into a single array with 't' dimension
776
+ const firstSlice = downsampledSlices[0];
777
+ const combinedShape = [...image.data.shape];
778
+ combinedShape[tDimIndex] = tSize;
779
+ // Update spatial dimensions based on downsampled size
780
+ for (let i = 0; i < image.dims.length; i++) {
781
+ if (i !== tDimIndex) {
782
+ const sliceIndex = i < tDimIndex ? i : i - 1;
783
+ combinedShape[i] = firstSlice.shape[sliceIndex];
784
+ }
785
+ }
786
+ // Create combined array
787
+ const combinedStore = new Map();
788
+ const combinedRoot = zarr.root(combinedStore);
789
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
790
+ shape: combinedShape,
791
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
792
+ data_type: image.data.dtype,
793
+ fill_value: 0,
794
+ });
795
+ // Copy each downsampled slice into the combined array
796
+ for (let t = 0; t < tSize; t++) {
797
+ const sliceData = await zarr.get(downsampledSlices[t]);
798
+ const targetSelection = new Array(combinedShape.length).fill(null);
799
+ targetSelection[tDimIndex] = t;
800
+ await zarr.set(combinedArray, targetSelection, sliceData);
801
+ }
802
+ // Compute new metadata
803
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
804
+ return new NgffImage({
805
+ data: combinedArray,
806
+ dims: image.dims,
807
+ scale: { ...image.scale, ...scale },
808
+ translation: { ...image.translation, ...translation },
809
+ name: image.name,
810
+ axesUnits: image.axesUnits,
811
+ computedCallbacks: image.computedCallbacks,
812
+ });
741
813
  }
814
+ const isVector = image.dims.includes("c");
742
815
  // Convert to ITK-Wasm format
743
816
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
744
- // Prepare shrink factors - need to be for spatial dimensions only
745
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
817
+ // Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
746
818
  const shrinkFactors = [];
747
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
748
- for (let i = 0; i < effectiveDims.length; i++) {
749
- const dim = effectiveDims[i];
819
+ for (let i = image.dims.length - 1; i >= 0; i--) {
820
+ const dim = image.dims[i];
750
821
  if (SPATIAL_DIMS.includes(dim)) {
751
822
  shrinkFactors.push(dimFactors[dim] || 1);
752
823
  }
@@ -754,23 +825,21 @@ async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
754
825
  shrinkFactors.push(1); // Non-spatial dimensions don't shrink
755
826
  }
756
827
  }
757
- // Compute kernel radius
758
- const blockSize = itkImage.size.slice().reverse();
759
- const sigma = computeSigma(shrinkFactors);
760
- const { radius: _radius } = await gaussianKernelRadius({
761
- size: blockSize,
762
- sigma,
763
- });
828
+ // Use all zeros for cropRadius
829
+ const cropRadius = new Array(shrinkFactors.length).fill(0);
764
830
  // Perform downsampling
765
831
  const { downsampled } = await downsampleLabelImage(itkImage, {
766
832
  shrinkFactors,
767
- cropRadius: shrinkFactors.map(() => 0),
833
+ cropRadius: cropRadius,
768
834
  });
769
835
  // Compute new metadata
770
836
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
771
- // Convert back to zarr array
772
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
773
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
837
+ // Convert back to zarr array in a new in-memory store
838
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
839
+ const store = new Map();
840
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
841
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
842
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
774
843
  return new NgffImage({
775
844
  data: array,
776
845
  dims: image.dims,
@@ -786,31 +855,104 @@ async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
786
855
  */
787
856
  export async function downsampleItkWasm(ngffImage, scaleFactors, smoothing) {
788
857
  const multiscales = [ngffImage];
789
- let previousImage = ngffImage;
790
858
  const dims = ngffImage.dims;
859
+ const spatialDims = dims.filter((dim) => SPATIAL_DIMS.includes(dim));
860
+ // Two strategies:
861
+ // 1. gaussian / label_image: hybrid absolute scale factors (each element is absolute from original)
862
+ // using dimScaleFactors to choose incremental vs from-original for exact sizes.
863
+ // 2. bin_shrink: treat provided scaleFactors sequence as incremental factors applied successively.
864
+ let previousImage = ngffImage;
791
865
  let previousDimFactors = {};
792
- for (const dim of dims) {
866
+ for (const dim of dims)
793
867
  previousDimFactors[dim] = 1;
794
- }
795
- const spatialDims = dims.filter((dim) => SPATIAL_DIMS.includes(dim));
796
- for (const scaleFactor of scaleFactors) {
797
- const dimFactors = dimScaleFactors(dims, scaleFactor, previousDimFactors);
798
- previousDimFactors = updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors);
868
+ for (let i = 0; i < scaleFactors.length; i++) {
869
+ const scaleFactor = scaleFactors[i];
870
+ let sourceImage;
871
+ let sourceDimFactors;
872
+ if (smoothing === "bin_shrink") {
873
+ // Purely incremental: scaleFactor is the shrink for this step
874
+ sourceImage = previousImage; // always from previous
875
+ sourceDimFactors = {};
876
+ if (typeof scaleFactor === "number") {
877
+ for (const dim of spatialDims)
878
+ sourceDimFactors[dim] = scaleFactor;
879
+ }
880
+ else {
881
+ for (const dim of spatialDims) {
882
+ sourceDimFactors[dim] = scaleFactor[dim] || 1;
883
+ }
884
+ }
885
+ // Non-spatial dims factor 1
886
+ for (const dim of dims) {
887
+ if (!(dim in sourceDimFactors))
888
+ sourceDimFactors[dim] = 1;
889
+ }
890
+ }
891
+ else {
892
+ // Hybrid absolute strategy
893
+ const dimFactors = dimScaleFactors(dims, scaleFactor, previousDimFactors, ngffImage, previousImage);
894
+ // Decide if we can be incremental
895
+ let canDownsampleIncrementally = true;
896
+ for (const dim of Object.keys(dimFactors)) {
897
+ const dimIndex = ngffImage.dims.indexOf(dim);
898
+ if (dimIndex >= 0) {
899
+ const originalSize = ngffImage.data.shape[dimIndex];
900
+ const targetSize = Math.floor(originalSize /
901
+ (typeof scaleFactor === "number"
902
+ ? scaleFactor
903
+ : scaleFactor[dim]));
904
+ const prevDimIndex = previousImage.dims.indexOf(dim);
905
+ const previousSize = previousImage.data.shape[prevDimIndex];
906
+ if (Math.floor(previousSize / dimFactors[dim]) !== targetSize) {
907
+ canDownsampleIncrementally = false;
908
+ break;
909
+ }
910
+ }
911
+ }
912
+ if (canDownsampleIncrementally) {
913
+ sourceImage = previousImage;
914
+ sourceDimFactors = dimFactors;
915
+ }
916
+ else {
917
+ sourceImage = ngffImage;
918
+ const originalDimFactors = {};
919
+ for (const dim of dims)
920
+ originalDimFactors[dim] = 1;
921
+ sourceDimFactors = dimScaleFactors(dims, scaleFactor, originalDimFactors);
922
+ }
923
+ }
799
924
  let downsampled;
800
925
  if (smoothing === "gaussian") {
801
- downsampled = await downsampleGaussian(previousImage, dimFactors, spatialDims);
926
+ downsampled = await downsampleGaussian(sourceImage, sourceDimFactors, spatialDims);
802
927
  }
803
928
  else if (smoothing === "bin_shrink") {
804
- downsampled = await downsampleBinShrinkImpl(previousImage, dimFactors, spatialDims);
929
+ downsampled = await downsampleBinShrinkImpl(sourceImage, sourceDimFactors, spatialDims);
805
930
  }
806
931
  else if (smoothing === "label_image") {
807
- downsampled = await downsampleLabelImageImpl(previousImage, dimFactors, spatialDims);
932
+ downsampled = await downsampleLabelImageImpl(sourceImage, sourceDimFactors, spatialDims);
808
933
  }
809
934
  else {
810
935
  throw new Error(`Unknown smoothing method: ${smoothing}`);
811
936
  }
812
937
  multiscales.push(downsampled);
938
+ // Update for next iteration
813
939
  previousImage = downsampled;
940
+ if (smoothing === "bin_shrink") {
941
+ // Accumulate cumulative factors (multiply) for bin_shrink to reflect total shrink so far
942
+ if (typeof scaleFactor === "number") {
943
+ for (const dim of spatialDims) {
944
+ previousDimFactors[dim] *= scaleFactor;
945
+ }
946
+ }
947
+ else {
948
+ for (const dim of spatialDims) {
949
+ previousDimFactors[dim] *= scaleFactor[dim] || 1;
950
+ }
951
+ }
952
+ }
953
+ else {
954
+ previousDimFactors = updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors);
955
+ }
814
956
  }
815
957
  return multiscales;
816
958
  }