@aics/vole-core 4.1.0 → 4.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/es/ImageInfo.js CHANGED
@@ -6,7 +6,7 @@ export function defaultImageInfo() {
6
6
  atlasTileDims: [1, 1],
7
7
  subregionSize: [1, 1, 1],
8
8
  subregionOffset: [0, 0, 0],
9
- combinedNumChannels: 1,
9
+ numChannelsPerSource: [1],
10
10
  channelNames: ["0"],
11
11
  channelColors: [[255, 255, 255]],
12
12
  multiscaleLevel: 0,
@@ -34,7 +34,12 @@ export class CImageInfo {
34
34
 
35
35
  /** Number of channels in the image */
36
36
  get numChannels() {
37
- return this.imageInfo.combinedNumChannels;
37
+ return this.imageInfo.numChannelsPerSource.reduce((a, b) => a + b, 0);
38
+ }
39
+
40
+ /** Number of channels per source, ordered by source index */
41
+ get numChannelsPerSource() {
42
+ return this.imageInfo.numChannelsPerSource;
38
43
  }
39
44
 
40
45
  /** XYZ size of the *original* (not downsampled) volume, in pixels */
@@ -45,7 +45,9 @@ export default class VolumeDrawable {
45
45
  this.viewMode = Axis.NONE; // 3D mode
46
46
 
47
47
  this.channelColors = this.volume.channelColorsDefault.slice();
48
- this.channelOptions = new Array(this.volume.imageInfo.numChannels).fill({});
48
+ this.channelOptions = Array.from({
49
+ length: this.volume.imageInfo.numChannels
50
+ }, () => ({}));
49
51
  this.fusion = this.channelColors.map((col, index) => {
50
52
  let rgbColor;
51
53
  // take copy of original channel color
@@ -29,7 +29,7 @@ const convertImageInfo = json => {
29
29
  atlasTileDims: [json.cols, json.rows],
30
30
  subregionSize: [json.tile_width, json.tile_height, json.tiles],
31
31
  subregionOffset: [0, 0, 0],
32
- combinedNumChannels: json.channels,
32
+ numChannelsPerSource: json.images.map(image => image.channels.length),
33
33
  channelNames: json.channel_names,
34
34
  channelColors: json.channel_colors,
35
35
  multiscaleLevel: 0,
@@ -247,12 +247,14 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
247
247
  const levelToLoad = pickLevelToLoad(loadSpec, this.getLevelShapesZYX());
248
248
  const shapeLv = source0.scaleLevels[levelToLoad].shape;
249
249
  const [spatialUnit, timeUnit] = this.getUnitSymbols();
250
+ const numChannelsPerSource = [];
251
+ for (let i = 0; i < this.sources.length; i++) {
252
+ const source = this.sources[i];
253
+ const cIndex = source.axesTCZYX[1];
254
+ const sourceChannelCount = cIndex > -1 ? source.scaleLevels[levelToLoad].shape[cIndex] : 1;
255
+ numChannelsPerSource.push(sourceChannelCount);
256
+ }
250
257
 
251
- // Now we care about other sources: # of channels is the `channelOffset` of the last source plus its # of channels
252
- const sourceLast = this.sources[this.sources.length - 1];
253
- const cLast = sourceLast.axesTCZYX[1];
254
- const lastHasC = cLast > -1;
255
- const numChannels = sourceLast.channelOffset + (lastHasC ? sourceLast.scaleLevels[levelToLoad].shape[cLast] : 1);
256
258
  // we need to make sure that the corresponding matched shapes
257
259
  // use the min size of T
258
260
  let times = 1;
@@ -310,7 +312,7 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
310
312
  atlasTileDims: [atlasTileDims.x, atlasTileDims.y],
311
313
  subregionSize: [pxSizeLv.x, pxSizeLv.y, pxSizeLv.z],
312
314
  subregionOffset: [0, 0, 0],
313
- combinedNumChannels: numChannels,
315
+ numChannelsPerSource,
314
316
  channelNames,
315
317
  multiscaleLevel: levelToLoad,
316
318
  multiscaleLevelDims: alldims,
@@ -423,9 +425,10 @@ class OMEZarrLoader extends ThreadableVolumeLoader {
423
425
  const updatedImageInfo = this.updateImageInfoForLoad(imageInfo, loadSpec);
424
426
  onUpdateMetadata(updatedImageInfo);
425
427
  const {
426
- combinedNumChannels,
428
+ numChannelsPerSource,
427
429
  multiscaleLevel
428
430
  } = updatedImageInfo;
431
+ const combinedNumChannels = numChannelsPerSource.reduce((a, b) => a + b, 0);
429
432
  const channelIndexes = loadSpec.channels ?? Array.from({
430
433
  length: combinedNumChannels
431
434
  }, (_, i) => i);
@@ -24,7 +24,7 @@ class OpenCellLoader extends ThreadableVolumeLoader {
24
24
  atlasTileDims: [27, 1],
25
25
  subregionSize: [600, 600, 27],
26
26
  subregionOffset: [0, 0, 0],
27
- combinedNumChannels: numChannels,
27
+ numChannelsPerSource: [numChannels],
28
28
  channelNames: chnames,
29
29
  multiscaleLevel: 0,
30
30
  multiscaleLevelDims: [{
@@ -37,7 +37,7 @@ const convertImageInfo = (json, dtype) => {
37
37
  atlasTileDims: [atlasTileDims.x, atlasTileDims.y],
38
38
  subregionSize: [json.sizeX, json.sizeY, json.sizeZ],
39
39
  subregionOffset: [0, 0, 0],
40
- combinedNumChannels: json.sizeC,
40
+ numChannelsPerSource: [json.sizeC],
41
41
  channelNames: json.channelNames,
42
42
  channelColors: undefined,
43
43
  multiscaleLevel: 0,
@@ -92,7 +92,8 @@ class RawArrayLoader extends ThreadableVolumeLoader {
92
92
  multiscaleLevel: 0
93
93
  };
94
94
  onUpdateMetadata(undefined, adjustedLoadSpec);
95
- for (let chindex = 0; chindex < imageInfo.combinedNumChannels; ++chindex) {
95
+ const totalChannels = imageInfo.numChannelsPerSource.reduce((a, b) => a + b, 0);
96
+ for (let chindex = 0; chindex < totalChannels; ++chindex) {
96
97
  if (requestedChannels && requestedChannels.length > 0 && !requestedChannels.includes(chindex)) {
97
98
  continue;
98
99
  }
@@ -157,13 +157,13 @@ class TiffLoader extends ThreadableVolumeLoader {
157
157
  const tilesizey = Math.floor(targetSize / atlasDims.y);
158
158
 
159
159
  // load tiff and check metadata
160
-
160
+ const numChannelsPerSource = this.url.length > 1 ? Array(this.url.length).fill(1) : [dims.sizec];
161
161
  const imgdata = {
162
162
  name: "TEST",
163
163
  atlasTileDims: [atlasDims.x, atlasDims.y],
164
164
  subregionSize: [tilesizex, tilesizey, dims.sizez],
165
165
  subregionOffset: [0, 0, 0],
166
- combinedNumChannels: dims.sizec,
166
+ numChannelsPerSource,
167
167
  channelNames: dims.channelnames,
168
168
  multiscaleLevel: 0,
169
169
  multiscaleLevelDims: [{
@@ -194,41 +194,44 @@ class TiffLoader extends ThreadableVolumeLoader {
194
194
  const volumeSize = cimageinfo.volumeSize;
195
195
  const channelProms = [];
196
196
  // do each channel on a worker?
197
- for (let channel = 0; channel < imageInfo.combinedNumChannels; ++channel) {
198
- const thisChannelProm = new Promise((resolve, reject) => {
199
- const params = {
200
- channel: channel,
201
- // these are target xy sizes for the in-memory volume data
202
- // they may or may not be the same size as original xy sizes
203
- tilesizex: volumeSize.x,
204
- tilesizey: volumeSize.y,
205
- sizec: imageInfo.combinedNumChannels,
206
- sizez: volumeSize.z,
207
- dimensionOrder: dims.dimensionorder,
208
- bytesPerSample: getBytesPerSample(dims.pixeltype),
209
- url: this.url.length > 1 ? this.url[channel] : this.url[0] // if multiple urls, use the channel index to select the right one
210
- };
211
- const worker = new Worker(new URL("../workers/FetchTiffWorker", import.meta.url), {
212
- type: "module"
197
+ for (let source = 0; source < imageInfo.numChannelsPerSource.length; ++source) {
198
+ const numChannels = imageInfo.numChannelsPerSource[source];
199
+ for (let channel = 0; channel < numChannels; ++channel) {
200
+ const thisChannelProm = new Promise((resolve, reject) => {
201
+ const params = {
202
+ channel: channel,
203
+ // these are target xy sizes for the in-memory volume data
204
+ // they may or may not be the same size as original xy sizes
205
+ tilesizex: volumeSize.x,
206
+ tilesizey: volumeSize.y,
207
+ sizec: numChannels,
208
+ sizez: volumeSize.z,
209
+ dimensionOrder: dims.dimensionorder,
210
+ bytesPerSample: getBytesPerSample(dims.pixeltype),
211
+ url: this.url[source]
212
+ };
213
+ const worker = new Worker(new URL("../workers/FetchTiffWorker", import.meta.url), {
214
+ type: "module"
215
+ });
216
+ worker.onmessage = e => {
217
+ if (e.data.isError) {
218
+ reject(deserializeError(e.data.error));
219
+ return;
220
+ }
221
+ const {
222
+ data,
223
+ dtype,
224
+ channel,
225
+ range
226
+ } = e.data;
227
+ onData([channel], [dtype], [data], [range]);
228
+ worker.terminate();
229
+ resolve();
230
+ };
231
+ worker.postMessage(params);
213
232
  });
214
- worker.onmessage = e => {
215
- if (e.data.isError) {
216
- reject(deserializeError(e.data.error));
217
- return;
218
- }
219
- const {
220
- data,
221
- dtype,
222
- channel,
223
- range
224
- } = e.data;
225
- onData([channel], [dtype], [data], [range]);
226
- worker.terminate();
227
- resolve();
228
- };
229
- worker.postMessage(params);
230
- });
231
- channelProms.push(thisChannelProm);
233
+ channelProms.push(thisChannelProm);
234
+ }
232
235
  }
233
236
 
234
237
  // waiting for all channels to load allows errors to propagate to the caller via this promise
@@ -211,7 +211,8 @@ export function buildDefaultMetadata(rawImageInfo) {
211
211
  };
212
212
  metadata["Multiresolution levels"] = rawImageInfo.multiscaleLevelDims;
213
213
  // TODO decide???? combined or not?
214
- metadata["Channels"] = rawImageInfo.combinedNumChannels; //imageInfo.numChannels;
214
+ const totalChannels = imageInfo.numChannelsPerSource.reduce((a, b) => a + b, 0);
215
+ metadata["Channels"] = totalChannels;
215
216
  metadata["Time series frames"] = imageInfo.times || 1;
216
217
  // don't add User data if it's empty
217
218
  if (rawImageInfo.userData && !isEmpty(rawImageInfo.userData)) {
@@ -3,20 +3,17 @@ import { Vector3, Vector2 } from "three";
3
3
  export type ImageInfo = Readonly<{
4
4
  name: string;
5
5
  /**
6
- * XY dimensions of the texture atlas used by `RayMarchedAtlasVolume` and `Atlas2DSlice`, in number of z-slice
7
- * tiles (not pixels). Chosen by the loader to lay out the 3D volume in the squarest possible 2D texture atlas.
6
+ * XY dimensions of the texture atlas used by `RayMarchedAtlasVolume` and
7
+ * `Atlas2DSlice`, in number of z-slice tiles (not pixels). Chosen by the
8
+ * loader to lay out the 3D volume in the squarest possible 2D texture atlas.
8
9
  */
9
10
  atlasTileDims: [number, number];
10
11
  /** Size of the currently loaded subregion, in pixels, in XYZ order */
11
12
  subregionSize: [number, number, number];
12
13
  /** Offset of the loaded subregion into the total volume, in pixels, in XYZ order */
13
14
  subregionOffset: [number, number, number];
14
- /** Number of channels in the image, accounting for convergence of multiple sources.
15
- * Because of multiple sources, which is not accounted for in ImageInfo,
16
- * that this could be different than the number of channels in the multiscaleLevelDims.
17
- * NOTE Currently there is one ImageInfo per Volume, not per source.
18
- */
19
- combinedNumChannels: number;
15
+ /** The number of channels in each source, in source order. */
16
+ numChannelsPerSource: number[];
20
17
  /** The names of each channel */
21
18
  channelNames: string[];
22
19
  /** Optional overrides to default channel colors, in 0-255 range, RGB order */
@@ -26,9 +23,9 @@ export type ImageInfo = Readonly<{
26
23
  /** The scale level from which this image was loaded, between `0` and `numMultiscaleLevels-1` */
27
24
  multiscaleLevel: number;
28
25
  /**
29
- * An *optional* transform which may be supplied by image metadata. It is *not* applied by
30
- * default, but may be read and fed to `View3d` methods: `setVolumeTransform`,
31
- * `setVolumeRotation`, `setVolumeScale`.
26
+ * An *optional* transform which may be supplied by image metadata. It is
27
+ * *not* applied by default, but may be read and fed to `View3d` methods:
28
+ * `setVolumeTransform`, `setVolumeRotation`, `setVolumeScale`.
32
29
  */
33
30
  transform: {
34
31
  /** Translation of the volume from the center of space, in volume voxels in XYZ order */
@@ -48,6 +45,8 @@ export declare class CImageInfo {
48
45
  get currentLevelDims(): VolumeDims;
49
46
  /** Number of channels in the image */
50
47
  get numChannels(): number;
48
+ /** Number of channels per source, ordered by source index */
49
+ get numChannelsPerSource(): number[];
51
50
  /** XYZ size of the *original* (not downsampled) volume, in pixels */
52
51
  get originalSize(): Vector3;
53
52
  /** Size of the volume, in pixels */
@@ -96,28 +96,26 @@ export interface FuseChannel {
96
96
  }
97
97
  /** If `FuseChannel.rgbColor` is this value, it is disabled from fusion. */
98
98
  export declare const FUSE_DISABLED_RGB_COLOR = 0;
99
- /**
100
- * Provide options to control the visual appearance of a Volume
101
- * @typedef {Object} VolumeChannelDisplayOptions
102
- * @property {boolean} enabled array of boolean per channel
103
- * @property {Array.<number>} color array of rgb per channel
104
- * @property {Array.<number>} specularColor array of rgb per channel
105
- * @property {Array.<number>} emissiveColor array of rgb per channel
106
- * @property {number} glossiness array of float per channel
107
- * @property {boolean} isosurfaceEnabled array of boolean per channel
108
- * @property {number} isovalue array of number per channel
109
- * @property {number} isosurfaceOpacity array of number per channel
110
- * @example let options = {
111
- };
112
- */
113
99
  export interface VolumeChannelDisplayOptions {
100
+ /** Whether the channel's volume data should be rendered for this channel. */
114
101
  enabled?: boolean;
102
+ /** RGB color array, with values in the range of [0, 255]. */
115
103
  color?: [number, number, number];
104
+ /** RGB color array for specular (highlight) color, with values in the range of [0, 255]. */
116
105
  specularColor?: [number, number, number];
106
+ /** RGB color array for emissive (glow) color, with values in the range of [0, 255]. */
117
107
  emissiveColor?: [number, number, number];
108
+ /** Exponent factor controlling the glossiness ("shininess") of the material. 0 is default. */
118
109
  glossiness?: number;
110
+ /** Whether the isosurface mesh should be rendered for this channel. */
119
111
  isosurfaceEnabled?: boolean;
112
+ /**
113
+ * Isovalue used to calculate the isosurface mesh, in a [0, 255] range.
114
+ * Isosurface is found at the set of all boundaries between voxels whose
115
+ * intensities span across this isovalue.
116
+ */
120
117
  isovalue?: number;
118
+ /** Opacity of the isosurface, in a [0, 1] range. */
121
119
  isosurfaceOpacity?: number;
122
120
  }
123
121
  export declare enum RenderMode {
package/es/types.js CHANGED
@@ -18,22 +18,6 @@ export function isFloatTypeArray(array) {
18
18
  }
19
19
  /** If `FuseChannel.rgbColor` is this value, it is disabled from fusion. */
20
20
  export const FUSE_DISABLED_RGB_COLOR = 0;
21
-
22
- /**
23
- * Provide options to control the visual appearance of a Volume
24
- * @typedef {Object} VolumeChannelDisplayOptions
25
- * @property {boolean} enabled array of boolean per channel
26
- * @property {Array.<number>} color array of rgb per channel
27
- * @property {Array.<number>} specularColor array of rgb per channel
28
- * @property {Array.<number>} emissiveColor array of rgb per channel
29
- * @property {number} glossiness array of float per channel
30
- * @property {boolean} isosurfaceEnabled array of boolean per channel
31
- * @property {number} isovalue array of number per channel
32
- * @property {number} isosurfaceOpacity array of number per channel
33
- * @example let options = {
34
- };
35
- */
36
-
37
21
  export let RenderMode = /*#__PURE__*/function (RenderMode) {
38
22
  RenderMode[RenderMode["RAYMARCH"] = 0] = "RAYMARCH";
39
23
  RenderMode[RenderMode["PATHTRACE"] = 1] = "PATHTRACE";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aics/vole-core",
3
- "version": "4.1.0",
3
+ "version": "4.2.0",
4
4
  "description": "volume renderer for 3d, 4d, or 5d imaging data with OME-Zarr support",
5
5
  "main": "es/index.js",
6
6
  "type": "module",