@vitessce/neuroglancer 3.9.5 → 3.9.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/{ReactNeuroglancer-BCg93QGV.js → ReactNeuroglancer-pv4bM8Yp.js} +43 -26
  2. package/dist/index-BEPd2Tds.js +37856 -0
  3. package/dist/index.js +1 -1
  4. package/dist-tsc/Neuroglancer.d.ts +0 -2
  5. package/dist-tsc/Neuroglancer.d.ts.map +1 -1
  6. package/dist-tsc/Neuroglancer.js +26 -26
  7. package/dist-tsc/NeuroglancerSubscriber.d.ts.map +1 -1
  8. package/dist-tsc/NeuroglancerSubscriber.js +219 -53
  9. package/dist-tsc/ReactNeuroglancer.d.ts +2 -2
  10. package/dist-tsc/ReactNeuroglancer.d.ts.map +1 -1
  11. package/dist-tsc/ReactNeuroglancer.js +31 -28
  12. package/dist-tsc/data-hook-ng-utils.d.ts +18 -20
  13. package/dist-tsc/data-hook-ng-utils.d.ts.map +1 -1
  14. package/dist-tsc/data-hook-ng-utils.js +136 -68
  15. package/dist-tsc/shader-utils.d.ts +126 -0
  16. package/dist-tsc/shader-utils.d.ts.map +1 -0
  17. package/dist-tsc/shader-utils.js +547 -0
  18. package/dist-tsc/shader-utils.test.d.ts +2 -0
  19. package/dist-tsc/shader-utils.test.d.ts.map +1 -0
  20. package/dist-tsc/shader-utils.test.js +364 -0
  21. package/dist-tsc/use-memo-custom-comparison.d.ts +14 -0
  22. package/dist-tsc/use-memo-custom-comparison.d.ts.map +1 -0
  23. package/dist-tsc/use-memo-custom-comparison.js +150 -0
  24. package/package.json +9 -8
  25. package/src/Neuroglancer.js +31 -26
  26. package/src/NeuroglancerSubscriber.js +361 -81
  27. package/src/README.md +28 -0
  28. package/src/ReactNeuroglancer.js +34 -27
  29. package/src/data-hook-ng-utils.js +178 -78
  30. package/src/shader-utils.js +653 -0
  31. package/src/shader-utils.test.js +432 -0
  32. package/src/use-memo-custom-comparison.js +190 -0
  33. package/dist/index-Wdrc02VW.js +0 -32390
  34. package/dist-tsc/data-hook-ng-utils.test.d.ts +0 -2
  35. package/dist-tsc/data-hook-ng-utils.test.d.ts.map +0 -1
  36. package/dist-tsc/data-hook-ng-utils.test.js +0 -35
  37. package/src/data-hook-ng-utils.test.js +0 -52
@@ -1,21 +1,41 @@
1
+ /* eslint-disable max-len */
1
2
  /* eslint-disable no-unused-vars */
2
- import React, { useCallback, useMemo, useRef, useEffect, useState } from 'react';
3
+ import React, { useCallback, useMemo, useRef, useEffect, useState, useReducer } from 'react';
3
4
  import {
4
5
  TitleInfo,
6
+ useReady,
7
+ useInitialCoordination,
5
8
  useCoordination,
6
- useObsSetsData,
7
- useLoaders,
8
- useObsEmbeddingData,
9
9
  useCoordinationScopes,
10
+ useCoordinationScopesBy,
11
+ useComplexCoordination,
12
+ useMultiCoordinationScopesNonNull,
13
+ useMultiCoordinationScopesSecondaryNonNull,
14
+ useComplexCoordinationSecondary,
15
+ useLoaders,
16
+ useMergeCoordination,
17
+ useMultiObsPoints,
18
+ usePointMultiObsFeatureMatrixIndices,
19
+ useMultiObsSegmentations,
20
+ useSegmentationMultiFeatureSelection,
21
+ useSegmentationMultiObsFeatureMatrixIndices,
22
+ useSegmentationMultiObsSets,
23
+ useGridItemSize,
10
24
  } from '@vitessce/vit-s';
11
25
  import {
12
26
  ViewHelpMapping,
13
27
  ViewType,
28
+ CoordinationType,
14
29
  COMPONENT_COORDINATION_TYPES,
15
30
  } from '@vitessce/constants-internal';
16
31
  import { mergeObsSets, getCellColors, setObsSelection } from '@vitessce/sets-utils';
32
+ import { MultiLegend } from '@vitessce/legend';
17
33
  import { NeuroglancerComp } from './Neuroglancer.js';
18
34
  import { useNeuroglancerViewerState } from './data-hook-ng-utils.js';
35
+ import {
36
+ useMemoCustomComparison,
37
+ customIsEqualForCellColors,
38
+ } from './use-memo-custom-comparison.js';
19
39
  import { useStyles } from './styles.js';
20
40
  import {
21
41
  quaternionToEuler,
@@ -37,6 +57,8 @@ const ROTATION_EPS = 1e-3;
37
57
  const TARGET_EPS = 0.5;
38
58
  const NG_ROT_COOLDOWN_MS = 120;
39
59
 
60
+ const GUIDE_URL = 'https://vitessce.io/docs/ng-guide/';
61
+
40
62
  const LAST_INTERACTION_SOURCE = {
41
63
  vitessce: 'vitessce',
42
64
  neuroglancer: 'neuroglancer',
@@ -50,17 +72,31 @@ function rgbToHex(rgb) {
50
72
 
51
73
  export function NeuroglancerSubscriber(props) {
52
74
  const {
75
+ uuid,
53
76
  coordinationScopes: coordinationScopesRaw,
77
+ coordinationScopesBy: coordinationScopesByRaw,
54
78
  closeButtonVisible,
55
79
  downloadButtonVisible,
56
80
  removeGridComponent,
57
81
  theme,
58
- title = 'Neuroglancer',
82
+ title = 'Spatial',
83
+ subtitle = 'Powered by Neuroglancer',
59
84
  helpText = ViewHelpMapping.NEUROGLANCER,
85
+ // Note: this is a temporary mechanism
86
+ // to pass an initial NG camera state.
87
+ // Ideally, all camera state should be passed via
88
+ // the existing spatialZoom, spatialTargetX, spatialRotationOrbit, etc,
89
+ // and then NeuroglancerSubscriber should internally convert
90
+ // to NG-compatible values, which would eliminate the need for this.
91
+ initialNgCameraState,
60
92
  } = props;
61
93
 
62
94
  const loaders = useLoaders();
95
+ const mergeCoordination = useMergeCoordination();
96
+
97
+ // Acccount for possible meta-coordination.
63
98
  const coordinationScopes = useCoordinationScopes(coordinationScopesRaw);
99
+ const coordinationScopesBy = useCoordinationScopesBy(coordinationScopes, coordinationScopesByRaw);
64
100
 
65
101
  const [{
66
102
  dataset,
@@ -90,32 +126,265 @@ export function NeuroglancerSubscriber(props) {
90
126
  // setSpatialRotationZ: setRotationZ,
91
127
  setSpatialRotationOrbit: setRotationOrbit,
92
128
  setSpatialZoom: setZoom,
93
- }] = useCoordination(COMPONENT_COORDINATION_TYPES[ViewType.NEUROGLANCER], coordinationScopes);
129
+ }] = useCoordination(
130
+ COMPONENT_COORDINATION_TYPES[ViewType.NEUROGLANCER],
131
+ coordinationScopes,
132
+ );
94
133
 
134
+ const [ngWidth, ngHeight, containerRef] = useGridItemSize();
95
135
 
96
- // console.log("NG Subs Render orbit", spatialRotationX, spatialRotationY, spatialRotationOrbit);
136
+ const [
137
+ segmentationLayerScopes,
138
+ segmentationChannelScopesByLayer,
139
+ ] = useMultiCoordinationScopesSecondaryNonNull(
140
+ CoordinationType.SEGMENTATION_CHANNEL,
141
+ CoordinationType.SEGMENTATION_LAYER,
142
+ coordinationScopes,
143
+ coordinationScopesBy,
144
+ );
97
145
 
98
- const { classes } = useStyles();
146
+ const pointLayerScopes = useMultiCoordinationScopesNonNull(
147
+ CoordinationType.POINT_LAYER,
148
+ coordinationScopes,
149
+ );
150
+
151
+ // Object keys are coordination scope names for spatialSegmentationLayer.
152
+ const segmentationLayerCoordination = useComplexCoordination(
153
+ [
154
+ CoordinationType.FILE_UID,
155
+ CoordinationType.SEGMENTATION_CHANNEL,
156
+ CoordinationType.SPATIAL_LAYER_VISIBLE,
157
+ CoordinationType.SPATIAL_LAYER_OPACITY,
158
+ ],
159
+ coordinationScopes,
160
+ coordinationScopesBy,
161
+ CoordinationType.SEGMENTATION_LAYER,
162
+ );
99
163
 
100
- const [{ obsSets: cellSets }] = useObsSetsData(
101
- loaders, dataset, false,
102
- { setObsSetSelection: setCellSetSelection, setObsSetColor: setCellSetColor },
103
- { cellSetSelection, obsSetColor: cellSetColor },
104
- { obsType },
164
+ // Object keys are coordination scope names for spatialSegmentationChannel.
165
+ const segmentationChannelCoordination = useComplexCoordinationSecondary(
166
+ [
167
+ CoordinationType.OBS_TYPE,
168
+ CoordinationType.SPATIAL_TARGET_C,
169
+ CoordinationType.SPATIAL_CHANNEL_VISIBLE,
170
+ CoordinationType.SPATIAL_CHANNEL_OPACITY,
171
+ CoordinationType.SPATIAL_CHANNEL_COLOR,
172
+ CoordinationType.SPATIAL_SEGMENTATION_FILLED,
173
+ CoordinationType.SPATIAL_SEGMENTATION_STROKE_WIDTH,
174
+ CoordinationType.OBS_COLOR_ENCODING,
175
+ CoordinationType.FEATURE_SELECTION,
176
+ CoordinationType.FEATURE_AGGREGATION_STRATEGY,
177
+ CoordinationType.FEATURE_VALUE_COLORMAP,
178
+ CoordinationType.FEATURE_VALUE_COLORMAP_RANGE,
179
+ CoordinationType.OBS_SET_COLOR,
180
+ CoordinationType.OBS_SET_SELECTION,
181
+ CoordinationType.ADDITIONAL_OBS_SETS,
182
+ CoordinationType.OBS_HIGHLIGHT,
183
+ CoordinationType.TOOLTIPS_VISIBLE,
184
+ CoordinationType.TOOLTIP_CROSSHAIRS_VISIBLE,
185
+ CoordinationType.LEGEND_VISIBLE,
186
+ ],
187
+ coordinationScopes,
188
+ coordinationScopesBy,
189
+ CoordinationType.SEGMENTATION_LAYER,
190
+ CoordinationType.SEGMENTATION_CHANNEL,
105
191
  );
106
192
 
107
- const [{ obsIndex }] = useObsEmbeddingData(
108
- loaders, dataset, true, {}, {},
109
- { obsType, embeddingType: mapping },
193
+ // Point layer
194
+ const pointLayerCoordination = useComplexCoordination(
195
+ [
196
+ CoordinationType.OBS_TYPE,
197
+ CoordinationType.SPATIAL_LAYER_VISIBLE,
198
+ CoordinationType.SPATIAL_LAYER_OPACITY,
199
+ CoordinationType.OBS_COLOR_ENCODING,
200
+ CoordinationType.FEATURE_COLOR,
201
+ CoordinationType.FEATURE_FILTER_MODE,
202
+ CoordinationType.FEATURE_SELECTION,
203
+ CoordinationType.FEATURE_VALUE_COLORMAP,
204
+ CoordinationType.FEATURE_VALUE_COLORMAP_RANGE,
205
+ CoordinationType.SPATIAL_LAYER_COLOR,
206
+ CoordinationType.OBS_HIGHLIGHT,
207
+ CoordinationType.TOOLTIPS_VISIBLE,
208
+ CoordinationType.TOOLTIP_CROSSHAIRS_VISIBLE,
209
+ CoordinationType.LEGEND_VISIBLE,
210
+ ],
211
+ coordinationScopes,
212
+ coordinationScopesBy,
213
+ CoordinationType.POINT_LAYER,
110
214
  );
111
215
 
112
- const [initalViewerState] = useNeuroglancerViewerState(
113
- loaders, dataset, false,
114
- undefined, undefined,
115
- { obsType: 'cell' },
216
+ // Points data
217
+ const [obsPointsData, obsPointsDataStatus, obsPointsUrls, obsPointsErrors] = useMultiObsPoints(
218
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
219
+ mergeCoordination, uuid,
116
220
  );
117
221
 
118
- const latestViewerStateRef = useRef(initalViewerState);
222
+ const [pointMultiIndicesData, pointMultiIndicesDataStatus, pointMultiIndicesDataErrors] = usePointMultiObsFeatureMatrixIndices(
223
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
224
+ );
225
+
226
+
227
+ // Segmentations data
228
+ const [obsSegmentationsData, obsSegmentationsDataStatus, obsSegmentationsUrls, obsSegmentationsDataErrors] = useMultiObsSegmentations(
229
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
230
+ mergeCoordination, uuid,
231
+ );
232
+
233
+ const [obsSegmentationsSetsData, obsSegmentationsSetsDataStatus, obsSegmentationsSetsDataErrors] = useSegmentationMultiObsSets(
234
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
235
+ );
236
+
237
+ const [
238
+ segmentationMultiExpressionData,
239
+ segmentationMultiLoadedFeatureSelection,
240
+ segmentationMultiExpressionExtents,
241
+ segmentationMultiExpressionNormData,
242
+ segmentationMultiFeatureSelectionStatus,
243
+ segmentationMultiFeatureSelectionErrors,
244
+ ] = useSegmentationMultiFeatureSelection(
245
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
246
+ );
247
+
248
+ const [segmentationMultiIndicesData, segmentationMultiIndicesDataStatus, segmentationMultiIndicesDataErrors] = useSegmentationMultiObsFeatureMatrixIndices(
249
+ coordinationScopes, coordinationScopesBy, loaders, dataset,
250
+ );
251
+
252
+ const errors = [
253
+ ...obsPointsErrors,
254
+ ...obsSegmentationsDataErrors,
255
+ ...obsSegmentationsSetsDataErrors,
256
+ ...pointMultiIndicesDataErrors,
257
+ ...segmentationMultiFeatureSelectionErrors,
258
+ ...segmentationMultiIndicesDataErrors,
259
+ ];
260
+
261
+ const isReady = useReady([
262
+ // Points
263
+ obsPointsDataStatus,
264
+ pointMultiIndicesDataStatus,
265
+ // Segmentations
266
+ obsSegmentationsDataStatus,
267
+ obsSegmentationsSetsDataStatus,
268
+ segmentationMultiFeatureSelectionStatus,
269
+ segmentationMultiIndicesDataStatus,
270
+ ]);
271
+
272
+ // console.log("NG Subs Render orbit", spatialRotationX, spatialRotationY, spatialRotationOrbit);
273
+
274
+ const { classes } = useStyles();
275
+
276
+ const segmentationColorMapping = useMemoCustomComparison(() => {
277
+ // TODO: ultimately, segmentationColorMapping becomes cellColorMapping, and makes its way into the viewerState.
278
+ // It may make sense to merge the multiple useMemoCustomComparisons upstream of derivedViewerState into one.
279
+ // This would complicate the comparison function, but the multiple separate useMemos are not really necessary.
280
+ const result = {};
281
+ segmentationLayerScopes?.forEach((layerScope) => {
282
+ result[layerScope] = {};
283
+ segmentationChannelScopesByLayer?.[layerScope]?.forEach((channelScope) => {
284
+ const { obsSets: layerSets, obsIndex: layerIndex } = obsSegmentationsSetsData
285
+ ?.[layerScope]?.[channelScope] || {};
286
+ const {
287
+ obsSetColor,
288
+ obsColorEncoding,
289
+ obsSetSelection,
290
+ additionalObsSets,
291
+ spatialChannelColor,
292
+ } = segmentationChannelCoordination[0][layerScope][channelScope];
293
+
294
+ if (obsColorEncoding === 'spatialChannelColor') {
295
+ // All segments get the same static channel color
296
+ if (layerIndex && spatialChannelColor) {
297
+ const hex = rgbToHex(spatialChannelColor);
298
+ const ngCellColors = {};
299
+
300
+ if (obsSetSelection?.length > 0) {
301
+ // Only color the segments belonging to selected sets.
302
+ const mergedCellSets = mergeObsSets(layerSets, additionalObsSets);
303
+ const selectedIds = new Set();
304
+ obsSetSelection.forEach((setPath) => {
305
+ const rootNode = mergedCellSets?.tree?.find(n => n.name === setPath[0]);
306
+ const leafNode = setPath.length > 1
307
+ ? rootNode?.children?.find(n => n.name === setPath[1])
308
+ : rootNode;
309
+ leafNode?.set?.forEach(([id]) => selectedIds.add(String(id)));
310
+ });
311
+ layerIndex.forEach((id) => {
312
+ if (selectedIds.has(String(id))) {
313
+ ngCellColors[id] = hex;
314
+ }
315
+ });
316
+ }
317
+ result[layerScope][channelScope] = ngCellColors;
318
+ }
319
+ } else if (layerSets && layerIndex) {
320
+ const mergedCellSets = mergeObsSets(layerSets, additionalObsSets);
321
+ const cellColors = getCellColors({
322
+ cellSets: mergedCellSets,
323
+ cellSetSelection: obsSetSelection,
324
+ cellSetColor: obsSetColor,
325
+ obsIndex: layerIndex,
326
+ theme,
327
+ });
328
+ // Convert the list of colors to an object of hex strings, which NG requires.
329
+ const ngCellColors = {};
330
+ cellColors.forEach((color, i) => {
331
+ ngCellColors[i] = rgbToHex(color);
332
+ });
333
+ result[layerScope][channelScope] = ngCellColors;
334
+ }
335
+ });
336
+ });
337
+ return result;
338
+ }, {
339
+ // The dependencies for the comparison,
340
+ // used by the custom equality function.
341
+ segmentationLayerScopes,
342
+ segmentationChannelScopesByLayer,
343
+ obsSegmentationsSetsData,
344
+ segmentationChannelCoordination,
345
+ theme,
346
+ }, customIsEqualForCellColors);
347
+
348
+
349
+ // Obtain the Neuroglancer viewerState object.
350
+ const initalViewerState = useNeuroglancerViewerState(
351
+ theme,
352
+ segmentationLayerScopes,
353
+ segmentationChannelScopesByLayer,
354
+ segmentationLayerCoordination,
355
+ segmentationChannelCoordination,
356
+ obsSegmentationsUrls,
357
+ obsSegmentationsData,
358
+ pointLayerScopes,
359
+ pointLayerCoordination,
360
+ obsPointsUrls,
361
+ obsPointsData,
362
+ pointMultiIndicesData,
363
+ );
364
+
365
+
366
+ const [latestViewerStateIteration, incrementLatestViewerStateIteration] = useReducer(x => x + 1, 0);
367
+ const latestViewerStateRef = useRef({
368
+ ...initalViewerState,
369
+ ...(initialNgCameraState ?? {}),
370
+ });
371
+
372
+ useEffect(() => {
373
+ const prevNgCameraState = {
374
+ position: latestViewerStateRef.current.position,
375
+ projectionOrientation: latestViewerStateRef.current.projectionOrientation,
376
+ projectionScale: latestViewerStateRef.current.projectionScale,
377
+ };
378
+ latestViewerStateRef.current = {
379
+ ...initalViewerState,
380
+ ...prevNgCameraState,
381
+ };
382
+ // Force a re-render by incrementing a piece of state.
383
+ // This works because we have made latestViewerStateIteration
384
+ // a dependency for derivedViewerState, triggering the useMemo downstream.
385
+ incrementLatestViewerStateIteration();
386
+ }, [initalViewerState]);
387
+
119
388
  const initialRotationPushedRef = useRef(false);
120
389
 
121
390
  const ngRotPushAtRef = useRef(0);
@@ -147,19 +416,6 @@ export function NeuroglancerSubscriber(props) {
147
416
  ty: spatialTargetY,
148
417
  });
149
418
 
150
- const mergedCellSets = useMemo(() => mergeObsSets(
151
- cellSets, additionalCellSets,
152
- ), [cellSets, additionalCellSets]);
153
-
154
- const cellColors = useMemo(() => getCellColors({
155
- cellSets: mergedCellSets,
156
- cellSetSelection,
157
- cellSetColor,
158
- obsIndex,
159
- theme,
160
- }), [mergedCellSets, theme,
161
- cellSetColor, cellSetSelection, obsIndex]);
162
-
163
419
  /*
164
420
  * handleStateUpdate - Interactions from NG to Vitessce are pushed here
165
421
  */
@@ -278,6 +534,8 @@ export function NeuroglancerSubscriber(props) {
278
534
  }, []);
279
535
 
280
536
  const onSegmentClick = useCallback((value) => {
537
+ // Note: this callback is no longer called by the child component.
538
+ // Reference: https://github.com/vitessce/vitessce/pull/2439
281
539
  if (value) {
282
540
  const id = String(value);
283
541
  const selectedCellIds = [id];
@@ -286,6 +544,10 @@ export function NeuroglancerSubscriber(props) {
286
544
  if (alreadySelectedId) {
287
545
  return;
288
546
  }
547
+ // TODO: update this now that we are using layer/channel-based organization of segmentations.
548
+ // There is no more "top-level" obsSets coordination; it is only on a per-layer basis.
549
+ // We should probably just assume the first segmentation layer/channel when updating the logic,
550
+ // since it is not clear how we would determine which layer/channel to update if there are multiple.
289
551
  setObsSelection(
290
552
  selectedCellIds, additionalCellSets, cellSetColor,
291
553
  setCellSetSelection, setAdditionalCellSets, setCellSetColor,
@@ -298,37 +560,28 @@ export function NeuroglancerSubscriber(props) {
298
560
  setCellColorEncoding, setCellSetColor, setCellSetSelection,
299
561
  ]);
300
562
 
301
- const batchedUpdateTimeoutRef = useRef(null);
302
- const [batchedCellColors, setBatchedCellColors] = useState(cellColors);
563
+ // Get the ultimate cellColorMapping for each layer to pass to NeuroglancerComp as a prop.
303
564
 
304
- useEffect(() => {
305
- if (batchedUpdateTimeoutRef.current) {
306
- clearTimeout(batchedUpdateTimeoutRef.current);
307
- }
308
- batchedUpdateTimeoutRef.current = setTimeout(() => {
309
- setBatchedCellColors(cellColors);
310
- }, 100);
311
-
312
- // TODO: look into deferredValue from React
313
- // startTransition(() => {
314
- // setBatchedCellColors(cellColors);
315
- // });
316
- }, [cellColors]);
317
- // TODO use a ref if slow - see prev commits
318
- const cellColorMapping = useMemo(() => {
319
- const colorMapping = {};
320
- batchedCellColors.forEach((color, cell) => {
321
- colorMapping[cell] = rgbToHex(color);
565
+ const cellColorMappingByLayer = useMemo(() => {
566
+ const result = {};
567
+ segmentationLayerScopes?.forEach((layerScope) => {
568
+ const channelScope = segmentationChannelScopesByLayer?.[layerScope]?.[0];
569
+ result[layerScope] = segmentationColorMapping?.[layerScope]?.[channelScope] ?? {};
322
570
  });
323
- return colorMapping;
324
- }, [batchedCellColors]);
325
-
326
-
571
+ return result;
572
+ }, [segmentationColorMapping, segmentationLayerScopes, segmentationChannelScopesByLayer]);
573
+
574
+ // TODO: try to simplify using useMemoCustomComparison?
575
+ // This would allow us to refactor a lot of the checking-for-changes logic into a comparison function,
576
+ // simplify some of the manual bookkeeping like with prevCoordsRef and lastInteractionSource,
577
+ // and would allow us to potentially remove usage of some refs (e.g., latestViewerStateRef)
578
+ // by relying on the memoization to prevent unnecessary updates.
327
579
  const derivedViewerState = useMemo(() => {
328
580
  const { current } = latestViewerStateRef;
329
- const nextSegments = Object.keys(cellColorMapping);
330
- const prevLayer = current?.layers?.[0] || {};
331
- const prevSegments = prevLayer.segments || [];
581
+ if (current.layers.length <= 0) {
582
+ return current;
583
+ }
584
+
332
585
  const { projectionScale, projectionOrientation, position } = current;
333
586
 
334
587
  // Did Vitessce coords change vs the *previous* render?
@@ -465,20 +718,23 @@ export function NeuroglancerSubscriber(props) {
465
718
  lastInteractionSource.current = null;
466
719
  }
467
720
 
468
- const newLayer0 = {
469
- ...prevLayer,
470
- segments: nextSegments,
471
- segmentColors: cellColorMapping,
472
- };
473
-
721
+ const updatedLayers = current?.layers?.map((layer, idx) => {
722
+ const layerScope = segmentationLayerScopes?.[idx];
723
+ const layerColorMapping = cellColorMappingByLayer?.[layerScope] ?? {};
724
+ const layerSegments = Object.keys(layerColorMapping);
725
+ return {
726
+ ...layer,
727
+ segments: layerSegments,
728
+ segmentColors: layerColorMapping,
729
+ };
730
+ }) ?? [];
474
731
 
475
732
  const updated = {
476
733
  ...current,
477
734
  projectionScale: nextProjectionScale,
478
735
  projectionOrientation: nextOrientation,
479
736
  position: nextPosition,
480
- layers: prevSegments.length === 0 ? [newLayer0, ...(current?.layers?.slice(1)
481
- || [])] : current?.layers,
737
+ layers: updatedLayers,
482
738
  };
483
739
 
484
740
  latestViewerStateRef.current = updated;
@@ -494,38 +750,62 @@ export function NeuroglancerSubscriber(props) {
494
750
  };
495
751
 
496
752
  return updated;
497
- }, [cellColorMapping, spatialZoom, spatialRotationX, spatialRotationY,
498
- spatialRotationZ, spatialTargetX, spatialTargetY]);
753
+ }, [cellColorMappingByLayer, spatialZoom, spatialRotationX, spatialRotationY,
754
+ spatialRotationZ, spatialTargetX, spatialTargetY, initalViewerState,
755
+ latestViewerStateIteration]);
499
756
 
500
757
  const onSegmentHighlight = useCallback((obsId) => {
501
758
  setCellHighlight(String(obsId));
502
- }, [obsIndex, setCellHighlight]);
759
+ }, [setCellHighlight]);
503
760
 
504
761
  // TODO: if all cells are deselected, a black view is shown, rather we want to show empty NG view?
505
762
  // if (!cellColorMapping || Object.keys(cellColorMapping).length === 0) {
506
763
  // return;
507
764
  // }
508
765
 
766
+ const hasLayers = derivedViewerState?.layers?.length > 0;
767
+ // console.log(derivedViewerState);
768
+
509
769
  return (
770
+
510
771
  <TitleInfo
511
772
  title={title}
773
+ info={subtitle}
512
774
  helpText={helpText}
513
775
  isSpatial
514
776
  theme={theme}
515
777
  closeButtonVisible={closeButtonVisible}
516
778
  downloadButtonVisible={downloadButtonVisible}
517
779
  removeGridComponent={removeGridComponent}
518
- isReady
780
+ isReady={isReady}
781
+ errors={errors}
519
782
  withPadding={false}
783
+ guideUrl={GUIDE_URL}
520
784
  >
521
- <NeuroglancerComp
522
- classes={classes}
523
- onSegmentClick={onSegmentClick}
524
- onSelectHoveredCoords={onSegmentHighlight}
525
- viewerState={derivedViewerState}
526
- cellColorMapping={cellColorMapping}
527
- setViewerState={handleStateUpdate}
528
- />
785
+ {hasLayers ? (
786
+ <div style={{ position: 'relative', width: '100%', height: '100%' }} ref={containerRef}>
787
+ <div style={{ position: 'absolute', top: 0, right: 0, zIndex: 50 }}>
788
+ <MultiLegend
789
+ theme="dark"
790
+ maxHeight={ngHeight}
791
+ segmentationLayerScopes={segmentationLayerScopes}
792
+ segmentationLayerCoordination={segmentationLayerCoordination}
793
+ segmentationChannelScopesByLayer={segmentationChannelScopesByLayer}
794
+ segmentationChannelCoordination={segmentationChannelCoordination}
795
+ />
796
+ </div>
797
+
798
+ <NeuroglancerComp
799
+ classes={classes}
800
+ onSegmentClick={onSegmentClick}
801
+ onSelectHoveredCoords={onSegmentHighlight}
802
+ viewerState={derivedViewerState}
803
+ cellColorMapping={cellColorMappingByLayer}
804
+ setViewerState={handleStateUpdate}
805
+ />
806
+ </div>
807
+ ) : null}
529
808
  </TitleInfo>
809
+
530
810
  );
531
811
  }
package/src/README.md ADDED
@@ -0,0 +1,28 @@
1
+ # neuroglancer view
2
+
3
+ This view is powered by neuroglancer.
4
+ Here, we provide developer-facing documentation on working with Neuroglancer and its `viewerState`:
5
+
6
+ ## viewerState
7
+
8
+ ### Camera position (zoom, translation, rotation)
9
+
10
+ The following properties in the viewerState control the camera:
11
+
12
+ - `viewerState.position`
13
+ - `viewerState.projectionScale`
14
+ - `viewerState.projectionOrientation`
15
+
16
+ The complete viewerState schema is available in the [Neuroglancer documentation](https://neuroglancer-docs.web.app/json/api/index.html).
17
+
18
+ ## Mesh format
19
+
20
+ See the Neuroglancer documentation to learn about the [precomputed multi-resolution mesh format](https://github.com/google/neuroglancer/blob/master/src/datasource/precomputed/meshes.md#multi-resolution-mesh-format).
21
+
22
+ ## Points format
23
+
24
+
25
+
26
+ ## Converting a SpatialData object to the Neuroglancer data formats
27
+
28
+ Use [tissue-map-tools](https://github.com/hms-dbmi/tissue-map-tools) to convert data from a SpatialData object to the mesh and point formats that are compatible with Neuroglancer.