@fideus-labs/ngff-zarr 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +1 -0
  2. package/esm/io/itk_image_to_ngff_image.d.ts +5 -0
  3. package/esm/io/itk_image_to_ngff_image.d.ts.map +1 -1
  4. package/esm/io/itk_image_to_ngff_image.js +20 -20
  5. package/esm/io/ngff_image_to_itk_image.d.ts.map +1 -1
  6. package/esm/io/ngff_image_to_itk_image.js +2 -0
  7. package/esm/io/to_multiscales.js +1 -1
  8. package/esm/io/to_ngff_zarr.js +16 -0
  9. package/esm/methods/itkwasm.d.ts.map +1 -1
  10. package/esm/methods/itkwasm.js +568 -426
  11. package/esm/schemas/coordinate_systems.d.ts +159 -552
  12. package/esm/schemas/coordinate_systems.d.ts.map +1 -1
  13. package/esm/schemas/coordinate_systems.js +0 -1
  14. package/esm/schemas/ome_zarr.d.ts +105 -69
  15. package/esm/schemas/ome_zarr.d.ts.map +1 -1
  16. package/esm/schemas/rfc4.d.ts +26 -131
  17. package/esm/schemas/rfc4.d.ts.map +1 -1
  18. package/esm/schemas/units.d.ts +70 -5
  19. package/esm/schemas/units.d.ts.map +1 -1
  20. package/esm/schemas/units.js +2 -15
  21. package/esm/schemas/zarr_metadata.d.ts +13 -300
  22. package/esm/schemas/zarr_metadata.d.ts.map +1 -1
  23. package/package.json +1 -1
  24. package/script/io/itk_image_to_ngff_image.d.ts +5 -0
  25. package/script/io/itk_image_to_ngff_image.d.ts.map +1 -1
  26. package/script/io/itk_image_to_ngff_image.js +20 -20
  27. package/script/io/ngff_image_to_itk_image.d.ts.map +1 -1
  28. package/script/io/ngff_image_to_itk_image.js +2 -0
  29. package/script/io/to_multiscales.js +1 -1
  30. package/script/io/to_ngff_zarr.js +16 -0
  31. package/script/methods/itkwasm.d.ts.map +1 -1
  32. package/script/methods/itkwasm.js +567 -425
  33. package/script/schemas/coordinate_systems.d.ts +159 -552
  34. package/script/schemas/coordinate_systems.d.ts.map +1 -1
  35. package/script/schemas/coordinate_systems.js +0 -1
  36. package/script/schemas/ome_zarr.d.ts +105 -69
  37. package/script/schemas/ome_zarr.d.ts.map +1 -1
  38. package/script/schemas/rfc4.d.ts +26 -131
  39. package/script/schemas/rfc4.d.ts.map +1 -1
  40. package/script/schemas/units.d.ts +70 -5
  41. package/script/schemas/units.d.ts.map +1 -1
  42. package/script/schemas/units.js +2 -15
  43. package/script/schemas/zarr_metadata.d.ts +13 -300
  44. package/script/schemas/zarr_metadata.d.ts.map +1 -1
@@ -30,28 +30,99 @@ const downsample_1 = require("@itk-wasm/downsample");
30
30
  const zarr = __importStar(require("zarrita"));
31
31
  const ngff_image_js_1 = require("../types/ngff_image.js");
32
32
  const SPATIAL_DIMS = ["x", "y", "z"];
33
+ /**
34
+ * Calculate the incremental factor needed to reach the target size from the previous size.
35
+ * This ensures exact target sizes when downsampling incrementally.
36
+ */
37
+ function calculateIncrementalFactor(previousSize, targetSize) {
38
+ if (targetSize <= 0) {
39
+ return 1;
40
+ }
41
+ // Start with the theoretical factor
42
+ let factor = Math.floor(Math.ceil(previousSize / (targetSize + 0.5)));
43
+ // Verify this gives us the right size
44
+ let actualSize = Math.floor(previousSize / factor);
45
+ if (actualSize !== targetSize) {
46
+ // Adjust factor to get exact target
47
+ factor = Math.max(1, Math.floor(previousSize / targetSize));
48
+ actualSize = Math.floor(previousSize / factor);
49
+ // If still not exact, try ceil
50
+ if (actualSize !== targetSize) {
51
+ factor = Math.max(1, Math.ceil(previousSize / targetSize));
52
+ }
53
+ }
54
+ return Math.max(1, factor);
55
+ }
33
56
  /**
34
57
  * Convert dimension scale factors to ITK-Wasm format
58
+ * This computes the incremental scale factor relative to the previous scale,
59
+ * not the absolute scale factor from the original image.
60
+ *
61
+ * When originalImage and previousImage are provided, calculates the exact
62
+ * incremental factor needed to reach the target size from the previous size.
63
+ * This ensures we get exact 1x, 2x, 3x, 4x sizes even with incremental downsampling.
35
64
  */
36
- function dimScaleFactors(dims, scaleFactor, previousDimFactors) {
65
+ function dimScaleFactors(dims, scaleFactor, previousDimFactors, originalImage, previousImage) {
37
66
  const dimFactors = {};
38
67
  if (typeof scaleFactor === "number") {
39
- for (const dim of dims) {
40
- if (SPATIAL_DIMS.includes(dim)) {
41
- dimFactors[dim] = scaleFactor;
68
+ if (originalImage !== undefined && previousImage !== undefined) {
69
+ // Calculate target size: floor(original_size / scale_factor)
70
+ // Then calculate incremental factor from previous size to target size
71
+ for (const dim of dims) {
72
+ if (SPATIAL_DIMS.includes(dim)) {
73
+ const dimIndex = originalImage.dims.indexOf(dim);
74
+ const originalSize = originalImage.data.shape[dimIndex];
75
+ const targetSize = Math.floor(originalSize / scaleFactor);
76
+ const prevDimIndex = previousImage.dims.indexOf(dim);
77
+ const previousSize = previousImage.data.shape[prevDimIndex];
78
+ dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
79
+ }
80
+ else {
81
+ dimFactors[dim] = 1;
82
+ }
42
83
  }
43
- else {
44
- dimFactors[dim] = previousDimFactors[dim] || 1;
84
+ }
85
+ else {
86
+ // Fallback to old behavior when images not provided
87
+ for (const dim of dims) {
88
+ if (SPATIAL_DIMS.includes(dim)) {
89
+ // Divide by previous factor to get incremental scaling
90
+ // Use Math.floor to truncate (matching Python's int() behavior)
91
+ const incrementalFactor = scaleFactor /
92
+ (previousDimFactors[dim] || 1);
93
+ dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
94
+ }
95
+ else {
96
+ dimFactors[dim] = previousDimFactors[dim] || 1;
97
+ }
45
98
  }
46
99
  }
47
100
  }
48
101
  else {
49
- for (const dim of dims) {
50
- if (dim in scaleFactor) {
51
- dimFactors[dim] = scaleFactor[dim];
102
+ if (originalImage !== undefined && previousImage !== undefined) {
103
+ for (const dim in scaleFactor) {
104
+ const dimIndex = originalImage.dims.indexOf(dim);
105
+ const originalSize = originalImage.data.shape[dimIndex];
106
+ const targetSize = Math.floor(originalSize / scaleFactor[dim]);
107
+ const prevDimIndex = previousImage.dims.indexOf(dim);
108
+ const previousSize = previousImage.data.shape[prevDimIndex];
109
+ dimFactors[dim] = calculateIncrementalFactor(previousSize, targetSize);
52
110
  }
53
- else {
54
- dimFactors[dim] = previousDimFactors[dim] || 1;
111
+ }
112
+ else {
113
+ // Fallback to old behavior when images not provided
114
+ for (const dim in scaleFactor) {
115
+ // Divide by previous factor to get incremental scaling
116
+ // Use Math.floor to truncate (matching Python's int() behavior)
117
+ const incrementalFactor = scaleFactor[dim] /
118
+ (previousDimFactors[dim] || 1);
119
+ dimFactors[dim] = Math.max(1, Math.floor(incrementalFactor));
120
+ }
121
+ }
122
+ // Add dims not in scale_factor with factor of 1
123
+ for (const dim of dims) {
124
+ if (!(dim in dimFactors)) {
125
+ dimFactors[dim] = 1;
55
126
  }
56
127
  }
57
128
  }
@@ -68,10 +139,8 @@ function updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors)
68
139
  }
69
140
  }
70
141
  else {
71
- for (const dim of spatialDims) {
72
- if (dim in scaleFactor) {
73
- updated[dim] = scaleFactor[dim];
74
- }
142
+ for (const dim in scaleFactor) {
143
+ updated[dim] = scaleFactor[dim];
75
144
  }
76
145
  }
77
146
  return updated;
@@ -86,38 +155,17 @@ function nextScaleMetadata(image, dimFactors, spatialDims) {
86
155
  if (spatialDims.includes(dim)) {
87
156
  const factor = dimFactors[dim];
88
157
  scale[dim] = image.scale[dim] * factor;
158
+ // Add offset to account for pixel center shift when downsampling
89
159
  translation[dim] = image.translation[dim] +
90
160
  0.5 * (factor - 1) * image.scale[dim];
91
161
  }
92
162
  else {
93
- // Only copy non-spatial dimensions if they exist in the source
94
- if (dim in image.scale) {
95
- scale[dim] = image.scale[dim];
96
- }
97
- if (dim in image.translation) {
98
- translation[dim] = image.translation[dim];
99
- }
163
+ scale[dim] = image.scale[dim];
164
+ translation[dim] = image.translation[dim];
100
165
  }
101
166
  }
102
167
  return [translation, scale];
103
168
  }
104
- /**
105
- * Compute Gaussian kernel sigma values in pixel units for downsampling.
106
- *
107
- * Formula: sigma = sqrt((k^2 - 1^2)/(2*sqrt(2*ln(2)))^2)
108
- *
109
- * Reference:
110
- * - https://discourse.itk.org/t/resampling-to-isotropic-signal-processing-theory/1403/16
111
- * - https://doi.org/10.1007/978-3-319-24571-3_81
112
- * - http://discovery.ucl.ac.uk/1469251/1/scale-factor-point-5.pdf
113
- *
114
- * @param shrinkFactors - Shrink ratio along each axis
115
- * @returns Standard deviation of Gaussian kernel along each axis
116
- */
117
- function computeSigma(shrinkFactors) {
118
- const denominator = Math.pow(2 * Math.sqrt(2 * Math.log(2)), 2);
119
- return shrinkFactors.map((factor) => Math.sqrt((factor * factor - 1) / denominator));
120
- }
121
169
  /**
122
170
  * Convert zarr array to ITK-Wasm Image format
123
171
  * If isVector is true, ensures "c" dimension is last by transposing if needed
@@ -158,6 +206,9 @@ async function zarrToItkImage(array, dims, isVector = false) {
158
206
  // For vector images, the last dimension is the component count, not a spatial dimension
159
207
  const spatialShape = isVector ? shape.slice(0, -1) : shape;
160
208
  const components = isVector ? shape[shape.length - 1] : 1;
209
+ // ITK expects size in physical space order [x, y, z], but spatialShape is in array order [z, y, x]
210
+ // So we need to reverse it
211
+ const itkSize = [...spatialShape].reverse();
161
212
  // Create ITK-Wasm image
162
213
  const itkImage = {
163
214
  imageType: {
@@ -170,8 +221,8 @@ async function zarrToItkImage(array, dims, isVector = false) {
170
221
  origin: spatialShape.map(() => 0),
171
222
  spacing: spatialShape.map(() => 1),
172
223
  direction: createIdentityMatrix(spatialShape.length),
173
- size: spatialShape,
174
- data,
224
+ size: itkSize,
225
+ data: data,
175
226
  metadata: new Map(),
176
227
  };
177
228
  return itkImage;
@@ -204,12 +255,6 @@ function copyTypedArray(data) {
204
255
  else if (data instanceof Int32Array) {
205
256
  return new Int32Array(data);
206
257
  }
207
- else if (data instanceof BigInt64Array) {
208
- return new BigInt64Array(data);
209
- }
210
- else if (data instanceof BigUint64Array) {
211
- return new BigUint64Array(data);
212
- }
213
258
  else {
214
259
  // Convert to Float32Array as fallback
215
260
  return new Float32Array(data);
@@ -230,23 +275,17 @@ function transposeArray(data, shape, permutation, componentType) {
230
275
  case "int8":
231
276
  output = new Int8Array(totalSize);
232
277
  break;
233
- case "int16":
234
- output = new Int16Array(totalSize);
235
- break;
236
278
  case "uint16":
237
279
  output = new Uint16Array(totalSize);
238
280
  break;
239
- case "int32":
240
- output = new Int32Array(totalSize);
281
+ case "int16":
282
+ output = new Int16Array(totalSize);
241
283
  break;
242
284
  case "uint32":
243
285
  output = new Uint32Array(totalSize);
244
286
  break;
245
- case "int64":
246
- output = new BigInt64Array(totalSize);
247
- break;
248
- case "uint64":
249
- output = new BigUint64Array(totalSize);
287
+ case "int32":
288
+ output = new Int32Array(totalSize);
250
289
  break;
251
290
  case "float64":
252
291
  output = new Float64Array(totalSize);
@@ -301,10 +340,6 @@ function getItkComponentType(data) {
301
340
  return "uint32";
302
341
  if (data instanceof Int32Array)
303
342
  return "int32";
304
- if (data instanceof BigUint64Array)
305
- return "uint64";
306
- if (data instanceof BigInt64Array)
307
- return "int64";
308
343
  if (data instanceof Float64Array)
309
344
  return "float64";
310
345
  return "float32";
@@ -321,12 +356,25 @@ function createIdentityMatrix(dimension) {
321
356
  }
322
357
  /**
323
358
  * Convert ITK-Wasm Image back to zarr array
359
+ * Uses the provided store instead of creating a new one
360
+ *
361
+ * Important: ITK-Wasm stores size in physical space order [x, y, z], but data in
362
+ * column-major order (x contiguous). This column-major layout with size [x, y, z]
363
+ * is equivalent to C-order (row-major) with shape [z, y, x]. We reverse the size
364
+ * to get the zarr shape and use C-order strides for that reversed shape.
365
+ *
366
+ * @param itkImage - The ITK-Wasm image to convert
367
+ * @param store - The zarr store to write to
368
+ * @param path - The path within the store
369
+ * @param chunkShape - The chunk shape (in spatial dimension order, will be adjusted for components)
370
+ * @param targetDims - The target dimension order (e.g., ["c", "z", "y", "x"])
324
371
  */
325
- async function itkImageToZarr(itkImage, path, chunkShape) {
326
- // Use in-memory store
327
- const store = new Map();
372
+ async function itkImageToZarr(itkImage, store, path, chunkShape, targetDims) {
328
373
  const root = zarr.root(store);
329
- // Determine data type
374
+ if (!itkImage.data) {
375
+ throw new Error("ITK image data is null or undefined");
376
+ }
377
+ // Determine data type - support all ITK TypedArray types
330
378
  let dataType;
331
379
  if (itkImage.data instanceof Uint8Array) {
332
380
  dataType = "uint8";
@@ -334,44 +382,125 @@ async function itkImageToZarr(itkImage, path, chunkShape) {
334
382
  else if (itkImage.data instanceof Int8Array) {
335
383
  dataType = "int8";
336
384
  }
337
- else if (itkImage.data instanceof Int16Array) {
338
- dataType = "int16";
339
- }
340
385
  else if (itkImage.data instanceof Uint16Array) {
341
386
  dataType = "uint16";
342
387
  }
343
- else if (itkImage.data instanceof Int32Array) {
344
- dataType = "int32";
388
+ else if (itkImage.data instanceof Int16Array) {
389
+ dataType = "int16";
345
390
  }
346
391
  else if (itkImage.data instanceof Uint32Array) {
347
392
  dataType = "uint32";
348
393
  }
349
- else if (itkImage.data instanceof BigInt64Array) {
350
- dataType = "int64";
394
+ else if (itkImage.data instanceof Int32Array) {
395
+ dataType = "int32";
351
396
  }
352
- else if (itkImage.data instanceof BigUint64Array) {
353
- dataType = "uint64";
397
+ else if (itkImage.data instanceof Float32Array) {
398
+ dataType = "float32";
354
399
  }
355
400
  else if (itkImage.data instanceof Float64Array) {
356
401
  dataType = "float64";
357
402
  }
358
- else if (itkImage.data instanceof Float32Array) {
359
- dataType = "float32";
403
+ else {
404
+ throw new Error(`Unsupported data type: ${itkImage.data.constructor.name}`);
405
+ }
406
+ // ITK stores size/spacing/origin in physical space order [x, y, z],
407
+ // but the data buffer is in C-order (row-major) which means [z, y, x] indexing.
408
+ // We need to reverse the size to match the data layout, just like we do for spacing/origin.
409
+ const shape = [...itkImage.size].reverse();
410
+ // For vector images, the components are stored in the data but not in the size
411
+ // The actual data length includes components
412
+ const components = itkImage.imageType.components || 1;
413
+ const isVector = components > 1;
414
+ // Validate data length matches expected shape (including components for vector images)
415
+ const spatialElements = shape.reduce((a, b) => a * b, 1);
416
+ const expectedLength = spatialElements * components;
417
+ if (itkImage.data.length !== expectedLength) {
418
+ console.error(`[ERROR] Data length mismatch in itkImageToZarr:`);
419
+ console.error(` ITK image size (physical order):`, itkImage.size);
420
+ console.error(` Shape (reversed):`, shape);
421
+ console.error(` Components:`, components);
422
+ console.error(` Expected data length:`, expectedLength);
423
+ console.error(` Actual data length:`, itkImage.data.length);
424
+ throw new Error(`Data length (${itkImage.data.length}) doesn't match expected shape ${shape} with ${components} components (${expectedLength} elements)`);
425
+ }
426
+ // Determine the final shape and whether we need to transpose
427
+ // ITK image data has shape [...spatialDimsReversed, components] (with c at end)
428
+ // If targetDims is provided, we need to match that order
429
+ let zarrShape;
430
+ let zarrChunkShape;
431
+ let finalData = itkImage.data;
432
+ if (isVector && targetDims) {
433
+ // Find where "c" should be in targetDims
434
+ const cIndex = targetDims.indexOf("c");
435
+ if (cIndex === -1) {
436
+ throw new Error("Vector image but 'c' not found in targetDims");
437
+ }
438
+ // Current shape is [z, y, x, c] (spatial reversed + c at end)
439
+ // Target shape should match targetDims order
440
+ const currentShape = [...shape, components];
441
+ // Build target shape based on targetDims
442
+ zarrShape = new Array(targetDims.length);
443
+ const spatialDims = shape.slice(); // [z, y, x]
444
+ let spatialIdx = 0;
445
+ for (let i = 0; i < targetDims.length; i++) {
446
+ if (targetDims[i] === "c") {
447
+ zarrShape[i] = components;
448
+ }
449
+ else {
450
+ zarrShape[i] = spatialDims[spatialIdx++];
451
+ }
452
+ }
453
+ // If c is not at the end, we need to transpose
454
+ if (cIndex !== targetDims.length - 1) {
455
+ // Build permutation: where does each target dim come from in current shape?
456
+ const permutation = [];
457
+ spatialIdx = 0;
458
+ for (let i = 0; i < targetDims.length; i++) {
459
+ if (targetDims[i] === "c") {
460
+ permutation.push(currentShape.length - 1); // c is at end of current
461
+ }
462
+ else {
463
+ permutation.push(spatialIdx++);
464
+ }
465
+ }
466
+ // Transpose the data
467
+ finalData = transposeArray(itkImage.data, currentShape, permutation, getItkComponentType(itkImage.data));
468
+ }
469
+ // Chunk shape should match zarrShape
470
+ zarrChunkShape = new Array(zarrShape.length);
471
+ spatialIdx = 0;
472
+ for (let i = 0; i < targetDims.length; i++) {
473
+ if (targetDims[i] === "c") {
474
+ zarrChunkShape[i] = components;
475
+ }
476
+ else {
477
+ zarrChunkShape[i] = chunkShape[spatialIdx++];
478
+ }
479
+ }
360
480
  }
361
481
  else {
362
- dataType = "float32";
482
+ // No targetDims or not a vector - use default behavior
483
+ zarrShape = isVector ? [...shape, components] : shape;
484
+ zarrChunkShape = isVector ? [...chunkShape, components] : chunkShape;
485
+ }
486
+ // Chunk shape should match the dimensionality of zarrShape
487
+ if (zarrChunkShape.length !== zarrShape.length) {
488
+ throw new Error(`chunkShape length (${zarrChunkShape.length}) must match shape length (${zarrShape.length})`);
363
489
  }
364
490
  const array = await zarr.create(root.resolve(path), {
365
- shape: itkImage.size,
366
- chunk_shape: chunkShape,
491
+ shape: zarrShape,
492
+ chunk_shape: zarrChunkShape,
367
493
  data_type: dataType,
368
494
  fill_value: 0,
369
495
  });
370
- // Write data
371
- await zarr.set(array, [], {
372
- data: itkImage.data,
373
- shape: itkImage.size,
374
- stride: calculateStride(itkImage.size),
496
+ // Write data - preserve the actual data type, don't cast to Float32Array
497
+ // Shape and stride should match the ITK image size order
498
+ // Use null for each dimension to select the entire array
499
+ const selection = zarrShape.map(() => null);
500
+ await zarr.set(array, selection, {
501
+ data: finalData,
502
+ shape: zarrShape,
503
+ stride: calculateStride(zarrShape),
375
504
  });
376
505
  return array;
377
506
  }
@@ -387,316 +516,115 @@ function calculateStride(shape) {
387
516
  return stride;
388
517
  }
389
518
  /**
390
- * Process channel-first data by downsampling each channel separately
519
+ * Perform Gaussian downsampling using ITK-Wasm
391
520
  */
392
- async function downsampleChannelFirst(image, dimFactors, spatialDims, smoothing) {
393
- // Get the channel index and count
394
- const cIndex = image.dims.indexOf("c");
395
- const result = await zarr.get(image.data);
396
- const channelCount = result.shape[cIndex];
397
- // Process each channel separately
398
- const downsampledChannels = [];
399
- for (let channelIdx = 0; channelIdx < channelCount; channelIdx++) {
400
- // Extract single channel data
401
- const channelSlice = extractChannel(result, cIndex, channelIdx);
402
- // Create temporary zarr array for this channel
403
- const store = new Map();
404
- const root = zarr.root(store);
405
- const channelDims = image.dims.filter((d) => d !== "c");
406
- const channelShape = result.shape.filter((_, i) => i !== cIndex);
407
- const chunkShape = channelShape.map((s) => Math.min(s, 256));
408
- const channelArray = await zarr.create(root.resolve("channel"), {
409
- shape: channelShape,
410
- chunk_shape: chunkShape,
411
- data_type: getItkComponentType(result.data),
412
- fill_value: 0,
413
- });
414
- await zarr.set(channelArray, [], {
415
- data: channelSlice,
416
- shape: channelShape,
417
- stride: calculateStride(channelShape),
418
- });
419
- // Create NgffImage for this channel (unused but kept for potential future use)
420
- // const _channelImage = new NgffImage({
421
- // data: channelArray,
422
- // dims: channelDims,
423
- // scale: Object.fromEntries(
424
- // Object.entries(image.scale).filter(([k]) => k !== "c")
425
- // ),
426
- // translation: Object.fromEntries(
427
- // Object.entries(image.translation).filter(([k]) => k !== "c")
428
- // ),
429
- // name: image.name,
430
- // axesUnits: image.axesUnits,
431
- // computedCallbacks: image.computedCallbacks,
432
- // });
433
- // Downsample this channel
434
- const itkImage = await zarrToItkImage(channelArray, channelDims, false);
435
- const shrinkFactors = [];
436
- for (let i = 0; i < channelDims.length; i++) {
437
- const dim = channelDims[i];
438
- if (SPATIAL_DIMS.includes(dim)) {
439
- shrinkFactors.push(dimFactors[dim] || 1);
440
- }
441
- else {
442
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
443
- }
444
- }
445
- let downsampled;
446
- if (smoothing === "gaussian") {
447
- const blockSize = itkImage.size.slice().reverse();
448
- const sigma = computeSigma(shrinkFactors);
449
- const { radius: _radius } = await (0, downsample_1.gaussianKernelRadiusNode)({
450
- size: blockSize,
451
- sigma,
452
- });
453
- const result = await (0, downsample_1.downsampleNode)(itkImage, {
454
- shrinkFactors,
455
- cropRadius: shrinkFactors.map(() => 0),
456
- });
457
- downsampled = result.downsampled;
458
- }
459
- else if (smoothing === "bin_shrink") {
460
- const result = await (0, downsample_1.downsampleBinShrinkNode)(itkImage, {
461
- shrinkFactors,
462
- });
463
- downsampled = result.downsampled;
464
- }
465
- else if (smoothing === "label_image") {
466
- const blockSize = itkImage.size.slice().reverse();
467
- const sigma = computeSigma(shrinkFactors);
468
- const { radius: _radius } = await (0, downsample_1.gaussianKernelRadiusNode)({
469
- size: blockSize,
470
- sigma,
521
+ async function downsampleGaussian(image, dimFactors, spatialDims) {
522
+ // Handle time dimension by processing each time slice independently
523
+ if (image.dims.includes("t")) {
524
+ const tDimIndex = image.dims.indexOf("t");
525
+ const tSize = image.data.shape[tDimIndex];
526
+ const newDims = image.dims.filter((dim) => dim !== "t");
527
+ // Downsample each time slice
528
+ const downsampledSlices = [];
529
+ for (let t = 0; t < tSize; t++) {
530
+ // Extract time slice
531
+ const selection = new Array(image.data.shape.length).fill(null);
532
+ selection[tDimIndex] = t;
533
+ const sliceData = await zarr.get(image.data, selection);
534
+ // Create temporary zarr array for this slice
535
+ const sliceStore = new Map();
536
+ const sliceRoot = zarr.root(sliceStore);
537
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
538
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
539
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
540
+ shape: sliceShape,
541
+ chunk_shape: sliceChunkShape,
542
+ data_type: image.data.dtype,
543
+ fill_value: 0,
471
544
  });
472
- const result = await (0, downsample_1.downsampleLabelImageNode)(itkImage, {
473
- shrinkFactors,
474
- cropRadius: shrinkFactors.map(() => 0),
545
+ const fullSelection = new Array(sliceShape.length).fill(null);
546
+ await zarr.set(sliceArray, fullSelection, sliceData);
547
+ // Create NgffImage for this slice (without 't' dimension)
548
+ const sliceImage = new ngff_image_js_1.NgffImage({
549
+ data: sliceArray,
550
+ dims: newDims,
551
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
552
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
553
+ name: image.name,
554
+ axesUnits: image.axesUnits
555
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
556
+ : undefined,
557
+ computedCallbacks: image.computedCallbacks,
475
558
  });
476
- downsampled = result.downsampled;
477
- }
478
- else {
479
- throw new Error(`Unknown smoothing method: ${smoothing}`);
559
+ // Recursively downsample this slice (without 't', so no infinite loop)
560
+ const downsampledSlice = await downsampleGaussian(sliceImage, dimFactors, spatialDims);
561
+ downsampledSlices.push(downsampledSlice.data);
480
562
  }
481
- // Convert back to zarr array
482
- const downsampledChunkShape = downsampled.size.map((s) => Math.min(s, 256));
483
- const downsampledArray = await itkImageToZarr(downsampled, "downsampled_channel", downsampledChunkShape);
484
- downsampledChannels.push(downsampledArray);
485
- }
486
- // Combine all channels back together
487
- const combinedArray = await combineChannels(downsampledChannels, cIndex, image.dims);
488
- // Compute new metadata
489
- const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
490
- return new ngff_image_js_1.NgffImage({
491
- data: combinedArray,
492
- dims: image.dims,
493
- scale,
494
- translation,
495
- name: image.name,
496
- axesUnits: image.axesUnits,
497
- computedCallbacks: image.computedCallbacks,
498
- });
499
- }
500
- /**
501
- * Extract a single channel from the data
502
- */
503
- function extractChannel(result, cIndex, channelIdx) {
504
- const typedData = result.data;
505
- const shape = result.shape;
506
- // Calculate output size (all dims except channel)
507
- const outputSize = shape.reduce((acc, s, i) => (i === cIndex ? acc : acc * s), 1);
508
- let output;
509
- if (typedData instanceof Uint8Array) {
510
- output = new Uint8Array(outputSize);
511
- }
512
- else if (typedData instanceof Int8Array) {
513
- output = new Int8Array(outputSize);
514
- }
515
- else if (typedData instanceof Int16Array) {
516
- output = new Int16Array(outputSize);
517
- }
518
- else if (typedData instanceof Uint16Array) {
519
- output = new Uint16Array(outputSize);
520
- }
521
- else if (typedData instanceof Int32Array) {
522
- output = new Int32Array(outputSize);
523
- }
524
- else if (typedData instanceof Uint32Array) {
525
- output = new Uint32Array(outputSize);
526
- }
527
- else if (typedData instanceof BigInt64Array) {
528
- output = new BigInt64Array(outputSize);
529
- }
530
- else if (typedData instanceof BigUint64Array) {
531
- output = new BigUint64Array(outputSize);
532
- }
533
- else if (typedData instanceof Float64Array) {
534
- output = new Float64Array(outputSize);
535
- }
536
- else {
537
- output = new Float32Array(outputSize);
538
- }
539
- // Calculate strides
540
- const stride = calculateStride(shape);
541
- const outputShape = shape.filter((_, i) => i !== cIndex);
542
- const _outputStride = calculateStride(outputShape);
543
- // Extract channel
544
- const indices = new Array(shape.length).fill(0);
545
- let outputIdx = 0;
546
- for (let i = 0; i < outputSize; i++) {
547
- // Set channel index
548
- indices[cIndex] = channelIdx;
549
- // Calculate source index
550
- let sourceIdx = 0;
551
- for (let j = 0; j < shape.length; j++) {
552
- sourceIdx += indices[j] * stride[j];
553
- }
554
- output[outputIdx++] = typedData[sourceIdx];
555
- // Increment indices (skip channel dimension)
556
- for (let j = shape.length - 1; j >= 0; j--) {
557
- if (j === cIndex)
558
- continue;
559
- indices[j]++;
560
- if (indices[j] < shape[j])
561
- break;
562
- indices[j] = 0;
563
- }
564
- }
565
- return output;
566
- }
567
- /**
568
- * Combine multiple channel arrays back into a single multi-channel array
569
- */
570
- async function combineChannels(channels, cIndex, _originalDims) {
571
- // Read all channel data
572
- const channelData = await Promise.all(channels.map((c) => zarr.get(c)));
573
- // Determine combined shape
574
- const firstChannel = channelData[0];
575
- const channelShape = firstChannel.shape;
576
- const combinedShape = [...channelShape];
577
- combinedShape.splice(cIndex, 0, channels.length);
578
- // Create combined array
579
- const store = new Map();
580
- const root = zarr.root(store);
581
- const chunkShape = combinedShape.map((s) => Math.min(s, 256));
582
- const dataType = getItkComponentType(firstChannel.data);
583
- const combinedArray = await zarr.create(root.resolve("combined"), {
584
- shape: combinedShape,
585
- chunk_shape: chunkShape,
586
- data_type: dataType,
587
- fill_value: 0,
588
- });
589
- // Combine all channels
590
- const totalSize = combinedShape.reduce((acc, s) => acc * s, 1);
591
- let combined;
592
- if (dataType === "uint8") {
593
- combined = new Uint8Array(totalSize);
594
- }
595
- else if (dataType === "int8") {
596
- combined = new Int8Array(totalSize);
597
- }
598
- else if (dataType === "int16") {
599
- combined = new Int16Array(totalSize);
600
- }
601
- else if (dataType === "uint16") {
602
- combined = new Uint16Array(totalSize);
603
- }
604
- else if (dataType === "int32") {
605
- combined = new Int32Array(totalSize);
606
- }
607
- else if (dataType === "uint32") {
608
- combined = new Uint32Array(totalSize);
609
- }
610
- else if (dataType === "int64") {
611
- combined = new BigInt64Array(totalSize);
612
- }
613
- else if (dataType === "uint64") {
614
- combined = new BigUint64Array(totalSize);
615
- }
616
- else if (dataType === "float64") {
617
- combined = new Float64Array(totalSize);
618
- }
619
- else {
620
- combined = new Float32Array(totalSize);
621
- }
622
- const stride = calculateStride(combinedShape);
623
- const _channelStride = calculateStride(channelShape);
624
- // Copy each channel's data
625
- for (let c = 0; c < channels.length; c++) {
626
- const channelTypedData = channelData[c].data;
627
- const indices = new Array(combinedShape.length).fill(0);
628
- for (let i = 0; i < channelTypedData.length; i++) {
629
- // Set channel index
630
- indices[cIndex] = c;
631
- // Calculate target index in combined array
632
- let targetIdx = 0;
633
- for (let j = 0; j < combinedShape.length; j++) {
634
- targetIdx += indices[j] * stride[j];
635
- }
636
- combined[targetIdx] = channelTypedData[i];
637
- // Increment indices (skip channel dimension)
638
- for (let j = combinedShape.length - 1; j >= 0; j--) {
639
- if (j === cIndex)
640
- continue;
641
- indices[j]++;
642
- if (indices[j] < combinedShape[j])
643
- break;
644
- indices[j] = 0;
563
+ // Combine downsampled slices back into a single array with 't' dimension
564
+ const firstSlice = downsampledSlices[0];
565
+ const combinedShape = [...image.data.shape];
566
+ combinedShape[tDimIndex] = tSize;
567
+ // Update spatial dimensions based on downsampled size
568
+ for (let i = 0; i < image.dims.length; i++) {
569
+ if (i !== tDimIndex) {
570
+ const sliceIndex = i < tDimIndex ? i : i - 1;
571
+ combinedShape[i] = firstSlice.shape[sliceIndex];
645
572
  }
646
573
  }
574
+ // Create combined array
575
+ const combinedStore = new Map();
576
+ const combinedRoot = zarr.root(combinedStore);
577
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
578
+ shape: combinedShape,
579
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
580
+ data_type: image.data.dtype,
581
+ fill_value: 0,
582
+ });
583
+ // Copy each downsampled slice into the combined array
584
+ for (let t = 0; t < tSize; t++) {
585
+ const sliceData = await zarr.get(downsampledSlices[t]);
586
+ const targetSelection = new Array(combinedShape.length).fill(null);
587
+ targetSelection[tDimIndex] = t;
588
+ await zarr.set(combinedArray, targetSelection, sliceData);
589
+ }
590
+ // Compute new metadata (time dimension unchanged, spatial dimensions downsampled)
591
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
592
+ return new ngff_image_js_1.NgffImage({
593
+ data: combinedArray,
594
+ dims: image.dims,
595
+ scale: { ...image.scale, ...scale },
596
+ translation: { ...image.translation, ...translation },
597
+ name: image.name,
598
+ axesUnits: image.axesUnits,
599
+ computedCallbacks: image.computedCallbacks,
600
+ });
647
601
  }
648
- // Write combined data
649
- await zarr.set(combinedArray, [], {
650
- data: combined,
651
- shape: combinedShape,
652
- stride,
653
- });
654
- return combinedArray;
655
- }
656
- /**
657
- * Perform Gaussian downsampling using ITK-Wasm
658
- */
659
- async function downsampleGaussian(image, dimFactors, spatialDims) {
660
- const cIndex = image.dims.indexOf("c");
661
- const isVector = cIndex === image.dims.length - 1;
662
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
663
- !isVector;
664
- // If channel is first (before spatial dims), process each channel separately
665
- if (isChannelFirst) {
666
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "gaussian");
667
- }
602
+ const isVector = image.dims.includes("c");
668
603
  // Convert to ITK-Wasm format
669
604
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
670
- // Prepare shrink factors - need to be for spatial dimensions only
671
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
605
+ // Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
672
606
  const shrinkFactors = [];
673
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
674
- for (let i = 0; i < effectiveDims.length; i++) {
675
- const dim = effectiveDims[i];
607
+ for (let i = image.dims.length - 1; i >= 0; i--) {
608
+ const dim = image.dims[i];
676
609
  if (SPATIAL_DIMS.includes(dim)) {
677
610
  shrinkFactors.push(dimFactors[dim] || 1);
678
611
  }
679
- else {
680
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
681
- }
682
612
  }
683
- // Compute kernel radius - sigma should also be for ALL dimensions
684
- const blockSize = itkImage.size.slice().reverse();
685
- const sigma = computeSigma(shrinkFactors);
686
- const { radius: _radius } = await (0, downsample_1.gaussianKernelRadiusNode)({
687
- size: blockSize,
688
- sigma,
689
- });
613
+ // Use all zeros for cropRadius
614
+ const cropRadius = new Array(shrinkFactors.length).fill(0);
690
615
  // Perform downsampling
691
616
  const { downsampled } = await (0, downsample_1.downsampleNode)(itkImage, {
692
617
  shrinkFactors,
693
- cropRadius: shrinkFactors.map(() => 0),
618
+ cropRadius: cropRadius,
694
619
  });
695
620
  // Compute new metadata
696
621
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
697
- // Convert back to zarr array
698
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
699
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
622
+ // Convert back to zarr array in a new in-memory store
623
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
624
+ const store = new Map();
625
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
626
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
627
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
700
628
  return new ngff_image_js_1.NgffImage({
701
629
  data: array,
702
630
  dims: image.dims,
@@ -711,28 +639,97 @@ async function downsampleGaussian(image, dimFactors, spatialDims) {
711
639
  * Perform bin shrink downsampling using ITK-Wasm
712
640
  */
713
641
  async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
714
- const cIndex = image.dims.indexOf("c");
715
- const isVector = cIndex === image.dims.length - 1;
716
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
717
- !isVector;
718
- // If channel is first (before spatial dims), process each channel separately
719
- if (isChannelFirst) {
720
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "bin_shrink");
642
+ // Handle time dimension by processing each time slice independently
643
+ if (image.dims.includes("t")) {
644
+ const tDimIndex = image.dims.indexOf("t");
645
+ const tSize = image.data.shape[tDimIndex];
646
+ const newDims = image.dims.filter((dim) => dim !== "t");
647
+ // Downsample each time slice
648
+ const downsampledSlices = [];
649
+ for (let t = 0; t < tSize; t++) {
650
+ // Extract time slice
651
+ const selection = new Array(image.data.shape.length).fill(null);
652
+ selection[tDimIndex] = t;
653
+ const sliceData = await zarr.get(image.data, selection);
654
+ // Create temporary zarr array for this slice
655
+ const sliceStore = new Map();
656
+ const sliceRoot = zarr.root(sliceStore);
657
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
658
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
659
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
660
+ shape: sliceShape,
661
+ chunk_shape: sliceChunkShape,
662
+ data_type: image.data.dtype,
663
+ fill_value: 0,
664
+ });
665
+ const fullSelection = new Array(sliceShape.length).fill(null);
666
+ await zarr.set(sliceArray, fullSelection, sliceData);
667
+ // Create NgffImage for this slice (without 't' dimension)
668
+ const sliceImage = new ngff_image_js_1.NgffImage({
669
+ data: sliceArray,
670
+ dims: newDims,
671
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
672
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
673
+ name: image.name,
674
+ axesUnits: image.axesUnits
675
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
676
+ : undefined,
677
+ computedCallbacks: image.computedCallbacks,
678
+ });
679
+ // Recursively downsample this slice
680
+ const downsampledSlice = await downsampleBinShrinkImpl(sliceImage, dimFactors, spatialDims);
681
+ downsampledSlices.push(downsampledSlice.data);
682
+ }
683
+ // Combine downsampled slices back into a single array with 't' dimension
684
+ const firstSlice = downsampledSlices[0];
685
+ const combinedShape = [...image.data.shape];
686
+ combinedShape[tDimIndex] = tSize;
687
+ // Update spatial dimensions based on downsampled size
688
+ for (let i = 0; i < image.dims.length; i++) {
689
+ if (i !== tDimIndex) {
690
+ const sliceIndex = i < tDimIndex ? i : i - 1;
691
+ combinedShape[i] = firstSlice.shape[sliceIndex];
692
+ }
693
+ }
694
+ // Create combined array
695
+ const combinedStore = new Map();
696
+ const combinedRoot = zarr.root(combinedStore);
697
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
698
+ shape: combinedShape,
699
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
700
+ data_type: image.data.dtype,
701
+ fill_value: 0,
702
+ });
703
+ // Copy each downsampled slice into the combined array
704
+ for (let t = 0; t < tSize; t++) {
705
+ const sliceData = await zarr.get(downsampledSlices[t]);
706
+ const targetSelection = new Array(combinedShape.length).fill(null);
707
+ targetSelection[tDimIndex] = t;
708
+ await zarr.set(combinedArray, targetSelection, sliceData);
709
+ }
710
+ // Compute new metadata
711
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
712
+ return new ngff_image_js_1.NgffImage({
713
+ data: combinedArray,
714
+ dims: image.dims,
715
+ scale: { ...image.scale, ...scale },
716
+ translation: { ...image.translation, ...translation },
717
+ name: image.name,
718
+ axesUnits: image.axesUnits,
719
+ computedCallbacks: image.computedCallbacks,
720
+ });
721
721
  }
722
+ const isVector = image.dims.includes("c");
722
723
  // Convert to ITK-Wasm format
723
724
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
724
- // Prepare shrink factors - need to be for spatial dimensions only
725
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
725
+ // Prepare shrink factors - only for spatial dimensions in ITK order (reversed)
726
+ // ITK bin shrink does not expect shrink factors for non-spatial dimensions like 'c'
726
727
  const shrinkFactors = [];
727
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
728
- for (let i = 0; i < effectiveDims.length; i++) {
729
- const dim = effectiveDims[i];
728
+ for (let i = image.dims.length - 1; i >= 0; i--) {
729
+ const dim = image.dims[i];
730
730
  if (SPATIAL_DIMS.includes(dim)) {
731
731
  shrinkFactors.push(dimFactors[dim] || 1);
732
732
  }
733
- else {
734
- shrinkFactors.push(1); // Non-spatial dimensions don't shrink
735
- }
736
733
  }
737
734
  // Perform downsampling
738
735
  const { downsampled } = await (0, downsample_1.downsampleBinShrinkNode)(itkImage, {
@@ -740,9 +737,12 @@ async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
740
737
  });
741
738
  // Compute new metadata
742
739
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
743
- // Convert back to zarr array
744
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
745
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
740
+ // Convert back to zarr array in a new in-memory store
741
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
742
+ const store = new Map();
743
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
744
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
745
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
746
746
  return new ngff_image_js_1.NgffImage({
747
747
  data: array,
748
748
  dims: image.dims,
@@ -757,22 +757,93 @@ async function downsampleBinShrinkImpl(image, dimFactors, spatialDims) {
757
757
  * Perform label image downsampling using ITK-Wasm
758
758
  */
759
759
  async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
760
- const cIndex = image.dims.indexOf("c");
761
- const isVector = cIndex === image.dims.length - 1;
762
- const isChannelFirst = cIndex !== -1 && cIndex < image.dims.length - 1 &&
763
- !isVector;
764
- // If channel is first (before spatial dims), process each channel separately
765
- if (isChannelFirst) {
766
- return await downsampleChannelFirst(image, dimFactors, spatialDims, "label_image");
760
+ // Handle time dimension by processing each time slice independently
761
+ if (image.dims.includes("t")) {
762
+ const tDimIndex = image.dims.indexOf("t");
763
+ const tSize = image.data.shape[tDimIndex];
764
+ const newDims = image.dims.filter((dim) => dim !== "t");
765
+ // Downsample each time slice
766
+ const downsampledSlices = [];
767
+ for (let t = 0; t < tSize; t++) {
768
+ // Extract time slice
769
+ const selection = new Array(image.data.shape.length).fill(null);
770
+ selection[tDimIndex] = t;
771
+ const sliceData = await zarr.get(image.data, selection);
772
+ // Create temporary zarr array for this slice
773
+ const sliceStore = new Map();
774
+ const sliceRoot = zarr.root(sliceStore);
775
+ const sliceShape = image.data.shape.filter((_, i) => i !== tDimIndex);
776
+ const sliceChunkShape = sliceShape.map((s) => Math.min(s, 256));
777
+ const sliceArray = await zarr.create(sliceRoot.resolve("slice"), {
778
+ shape: sliceShape,
779
+ chunk_shape: sliceChunkShape,
780
+ data_type: image.data.dtype,
781
+ fill_value: 0,
782
+ });
783
+ const fullSelection = new Array(sliceShape.length).fill(null);
784
+ await zarr.set(sliceArray, fullSelection, sliceData);
785
+ // Create NgffImage for this slice (without 't' dimension)
786
+ const sliceImage = new ngff_image_js_1.NgffImage({
787
+ data: sliceArray,
788
+ dims: newDims,
789
+ scale: Object.fromEntries(Object.entries(image.scale).filter(([dim]) => dim !== "t")),
790
+ translation: Object.fromEntries(Object.entries(image.translation).filter(([dim]) => dim !== "t")),
791
+ name: image.name,
792
+ axesUnits: image.axesUnits
793
+ ? Object.fromEntries(Object.entries(image.axesUnits).filter(([dim]) => dim !== "t"))
794
+ : undefined,
795
+ computedCallbacks: image.computedCallbacks,
796
+ });
797
+ // Recursively downsample this slice
798
+ const downsampledSlice = await downsampleLabelImageImpl(sliceImage, dimFactors, spatialDims);
799
+ downsampledSlices.push(downsampledSlice.data);
800
+ }
801
+ // Combine downsampled slices back into a single array with 't' dimension
802
+ const firstSlice = downsampledSlices[0];
803
+ const combinedShape = [...image.data.shape];
804
+ combinedShape[tDimIndex] = tSize;
805
+ // Update spatial dimensions based on downsampled size
806
+ for (let i = 0; i < image.dims.length; i++) {
807
+ if (i !== tDimIndex) {
808
+ const sliceIndex = i < tDimIndex ? i : i - 1;
809
+ combinedShape[i] = firstSlice.shape[sliceIndex];
810
+ }
811
+ }
812
+ // Create combined array
813
+ const combinedStore = new Map();
814
+ const combinedRoot = zarr.root(combinedStore);
815
+ const combinedArray = await zarr.create(combinedRoot.resolve("combined"), {
816
+ shape: combinedShape,
817
+ chunk_shape: combinedShape.map((s) => Math.min(s, 256)),
818
+ data_type: image.data.dtype,
819
+ fill_value: 0,
820
+ });
821
+ // Copy each downsampled slice into the combined array
822
+ for (let t = 0; t < tSize; t++) {
823
+ const sliceData = await zarr.get(downsampledSlices[t]);
824
+ const targetSelection = new Array(combinedShape.length).fill(null);
825
+ targetSelection[tDimIndex] = t;
826
+ await zarr.set(combinedArray, targetSelection, sliceData);
827
+ }
828
+ // Compute new metadata
829
+ const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
830
+ return new ngff_image_js_1.NgffImage({
831
+ data: combinedArray,
832
+ dims: image.dims,
833
+ scale: { ...image.scale, ...scale },
834
+ translation: { ...image.translation, ...translation },
835
+ name: image.name,
836
+ axesUnits: image.axesUnits,
837
+ computedCallbacks: image.computedCallbacks,
838
+ });
767
839
  }
840
+ const isVector = image.dims.includes("c");
768
841
  // Convert to ITK-Wasm format
769
842
  const itkImage = await zarrToItkImage(image.data, image.dims, isVector);
770
- // Prepare shrink factors - need to be for spatial dimensions only
771
- // For vector images, the last dimension (c) is NOT a spatial dimension in the ITK image
843
+ // Prepare shrink factors - need to be for ALL dimensions in ITK order (reversed)
772
844
  const shrinkFactors = [];
773
- const effectiveDims = isVector ? image.dims.slice(0, -1) : image.dims;
774
- for (let i = 0; i < effectiveDims.length; i++) {
775
- const dim = effectiveDims[i];
845
+ for (let i = image.dims.length - 1; i >= 0; i--) {
846
+ const dim = image.dims[i];
776
847
  if (SPATIAL_DIMS.includes(dim)) {
777
848
  shrinkFactors.push(dimFactors[dim] || 1);
778
849
  }
@@ -780,23 +851,21 @@ async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
780
851
  shrinkFactors.push(1); // Non-spatial dimensions don't shrink
781
852
  }
782
853
  }
783
- // Compute kernel radius
784
- const blockSize = itkImage.size.slice().reverse();
785
- const sigma = computeSigma(shrinkFactors);
786
- const { radius: _radius } = await (0, downsample_1.gaussianKernelRadiusNode)({
787
- size: blockSize,
788
- sigma,
789
- });
854
+ // Use all zeros for cropRadius
855
+ const cropRadius = new Array(shrinkFactors.length).fill(0);
790
856
  // Perform downsampling
791
857
  const { downsampled } = await (0, downsample_1.downsampleLabelImageNode)(itkImage, {
792
858
  shrinkFactors,
793
- cropRadius: shrinkFactors.map(() => 0),
859
+ cropRadius: cropRadius,
794
860
  });
795
861
  // Compute new metadata
796
862
  const [translation, scale] = nextScaleMetadata(image, dimFactors, spatialDims);
797
- // Convert back to zarr array
798
- const chunkShape = downsampled.size.map((s) => Math.min(s, 256));
799
- const array = await itkImageToZarr(downsampled, "downsampled", chunkShape);
863
+ // Convert back to zarr array in a new in-memory store
864
+ // Each downsampled image gets its own store - toNgffZarr will handle copying to target
865
+ const store = new Map();
866
+ // Chunk shape needs to be in zarr order (reversed from ITK order)
867
+ const chunkShape = downsampled.size.map((s) => Math.min(s, 256)).reverse();
868
+ const array = await itkImageToZarr(downsampled, store, "image", chunkShape, image.dims);
800
869
  return new ngff_image_js_1.NgffImage({
801
870
  data: array,
802
871
  dims: image.dims,
@@ -812,31 +881,104 @@ async function downsampleLabelImageImpl(image, dimFactors, spatialDims) {
812
881
  */
813
882
  async function downsampleItkWasm(ngffImage, scaleFactors, smoothing) {
814
883
  const multiscales = [ngffImage];
815
- let previousImage = ngffImage;
816
884
  const dims = ngffImage.dims;
885
+ const spatialDims = dims.filter((dim) => SPATIAL_DIMS.includes(dim));
886
+ // Two strategies:
887
+ // 1. gaussian / label_image: hybrid absolute scale factors (each element is absolute from original)
888
+ // using dimScaleFactors to choose incremental vs from-original for exact sizes.
889
+ // 2. bin_shrink: treat provided scaleFactors sequence as incremental factors applied successively.
890
+ let previousImage = ngffImage;
817
891
  let previousDimFactors = {};
818
- for (const dim of dims) {
892
+ for (const dim of dims)
819
893
  previousDimFactors[dim] = 1;
820
- }
821
- const spatialDims = dims.filter((dim) => SPATIAL_DIMS.includes(dim));
822
- for (const scaleFactor of scaleFactors) {
823
- const dimFactors = dimScaleFactors(dims, scaleFactor, previousDimFactors);
824
- previousDimFactors = updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors);
894
+ for (let i = 0; i < scaleFactors.length; i++) {
895
+ const scaleFactor = scaleFactors[i];
896
+ let sourceImage;
897
+ let sourceDimFactors;
898
+ if (smoothing === "bin_shrink") {
899
+ // Purely incremental: scaleFactor is the shrink for this step
900
+ sourceImage = previousImage; // always from previous
901
+ sourceDimFactors = {};
902
+ if (typeof scaleFactor === "number") {
903
+ for (const dim of spatialDims)
904
+ sourceDimFactors[dim] = scaleFactor;
905
+ }
906
+ else {
907
+ for (const dim of spatialDims) {
908
+ sourceDimFactors[dim] = scaleFactor[dim] || 1;
909
+ }
910
+ }
911
+ // Non-spatial dims factor 1
912
+ for (const dim of dims) {
913
+ if (!(dim in sourceDimFactors))
914
+ sourceDimFactors[dim] = 1;
915
+ }
916
+ }
917
+ else {
918
+ // Hybrid absolute strategy
919
+ const dimFactors = dimScaleFactors(dims, scaleFactor, previousDimFactors, ngffImage, previousImage);
920
+ // Decide if we can be incremental
921
+ let canDownsampleIncrementally = true;
922
+ for (const dim of Object.keys(dimFactors)) {
923
+ const dimIndex = ngffImage.dims.indexOf(dim);
924
+ if (dimIndex >= 0) {
925
+ const originalSize = ngffImage.data.shape[dimIndex];
926
+ const targetSize = Math.floor(originalSize /
927
+ (typeof scaleFactor === "number"
928
+ ? scaleFactor
929
+ : scaleFactor[dim]));
930
+ const prevDimIndex = previousImage.dims.indexOf(dim);
931
+ const previousSize = previousImage.data.shape[prevDimIndex];
932
+ if (Math.floor(previousSize / dimFactors[dim]) !== targetSize) {
933
+ canDownsampleIncrementally = false;
934
+ break;
935
+ }
936
+ }
937
+ }
938
+ if (canDownsampleIncrementally) {
939
+ sourceImage = previousImage;
940
+ sourceDimFactors = dimFactors;
941
+ }
942
+ else {
943
+ sourceImage = ngffImage;
944
+ const originalDimFactors = {};
945
+ for (const dim of dims)
946
+ originalDimFactors[dim] = 1;
947
+ sourceDimFactors = dimScaleFactors(dims, scaleFactor, originalDimFactors);
948
+ }
949
+ }
825
950
  let downsampled;
826
951
  if (smoothing === "gaussian") {
827
- downsampled = await downsampleGaussian(previousImage, dimFactors, spatialDims);
952
+ downsampled = await downsampleGaussian(sourceImage, sourceDimFactors, spatialDims);
828
953
  }
829
954
  else if (smoothing === "bin_shrink") {
830
- downsampled = await downsampleBinShrinkImpl(previousImage, dimFactors, spatialDims);
955
+ downsampled = await downsampleBinShrinkImpl(sourceImage, sourceDimFactors, spatialDims);
831
956
  }
832
957
  else if (smoothing === "label_image") {
833
- downsampled = await downsampleLabelImageImpl(previousImage, dimFactors, spatialDims);
958
+ downsampled = await downsampleLabelImageImpl(sourceImage, sourceDimFactors, spatialDims);
834
959
  }
835
960
  else {
836
961
  throw new Error(`Unknown smoothing method: ${smoothing}`);
837
962
  }
838
963
  multiscales.push(downsampled);
964
+ // Update for next iteration
839
965
  previousImage = downsampled;
966
+ if (smoothing === "bin_shrink") {
967
+ // Accumulate cumulative factors (multiply) for bin_shrink to reflect total shrink so far
968
+ if (typeof scaleFactor === "number") {
969
+ for (const dim of spatialDims) {
970
+ previousDimFactors[dim] *= scaleFactor;
971
+ }
972
+ }
973
+ else {
974
+ for (const dim of spatialDims) {
975
+ previousDimFactors[dim] *= scaleFactor[dim] || 1;
976
+ }
977
+ }
978
+ }
979
+ else {
980
+ previousDimFactors = updatePreviousDimFactors(scaleFactor, spatialDims, previousDimFactors);
981
+ }
840
982
  }
841
983
  return multiscales;
842
984
  }