@vitessce/neuroglancer 3.8.1 → 3.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ var __defProp = Object.defineProperty;
2
2
  var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
3
3
  var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
4
4
  import React__default from "react";
5
- import { g as getDefaultExportFromCjs, c as commonjsGlobal, d as diffCameraState } from "./index-O9LG3z3b.js";
5
+ import { g as getDefaultExportFromCjs, c as commonjsGlobal, d as diffCameraState } from "./index-CuSU8uy8.js";
6
6
  var es6_object_assign = {};
7
7
  var _global = { exports: {} };
8
8
  var hasRequired_global;
@@ -166,6 +166,9 @@ const FileType$1 = {
166
166
  OBS_SEGMENTATIONS_OME_TIFF: "obsSegmentations.ome-tiff",
167
167
  // GLB
168
168
  OBS_SEGMENTATIONS_GLB: "obsSegmentations.glb",
169
+ // Neuroglancer
170
+ // Precomputed (mesh) format
171
+ OBS_SEGMENTATIONS_NG_PRECOMPUTED: "obsSegmentations.ng-precomputed",
169
172
  // New file types to support old file types:
170
173
  // - cells.json
171
174
  OBS_EMBEDDING_CELLS_JSON: "obsEmbedding.cells.json",
@@ -393,6 +396,7 @@ const ViewHelpMapping = {
393
396
  [FileType$1.OBS_LABELS_MUDATA_ZARR]: DataType$1.OBS_LABELS,
394
397
  [FileType$1.FEATURE_LABELS_MUDATA_ZARR]: DataType$1.FEATURE_LABELS,
395
398
  [FileType$1.OBS_SEGMENTATIONS_GLB]: DataType$1.OBS_SEGMENTATIONS,
399
+ [FileType$1.OBS_SEGMENTATIONS_NG_PRECOMPUTED]: DataType$1.OBS_SEGMENTATIONS,
396
400
  [FileType$1.IMAGE_SPATIALDATA_ZARR]: DataType$1.IMAGE,
397
401
  [FileType$1.LABELS_SPATIALDATA_ZARR]: DataType$1.OBS_SEGMENTATIONS,
398
402
  [FileType$1.SHAPES_SPATIALDATA_ZARR]: DataType$1.OBS_SEGMENTATIONS,
@@ -10436,6 +10440,18 @@ z.object({
10436
10440
  sceneScaleZ: z.number(),
10437
10441
  materialSide: z.enum(["front", "back"])
10438
10442
  }).partial().nullable();
10443
+ z.object({
10444
+ // TODO: Should this explicitly specify sharded vs. unsharded?
10445
+ // Or can/should that be inferred from the data?
10446
+ dimensionX: z.number(),
10447
+ dimensionY: z.number(),
10448
+ dimensionZ: z.number(),
10449
+ dimensionUnit: z.enum(["nm", "um", "µm", "mm", "cm", "m"]),
10450
+ // TODO: should the following be passed via coordination types instead?
10451
+ projectionScale: z.number(),
10452
+ position: z.array(z.number()).length(3),
10453
+ projectionOrientation: z.array(z.number()).length(4)
10454
+ }).partial().nullable();
10439
10455
  z.object({
10440
10456
  obsIndex: z.string(),
10441
10457
  obsEmbedding: z.array(z.string()).length(2)
@@ -28173,7 +28189,7 @@ function NeuroglancerGlobalStyles(props) {
28173
28189
  }
28174
28190
  ));
28175
28191
  }
28176
- const LazyReactNeuroglancer = React__default.lazy(() => import("./ReactNeuroglancer-DXStdU0p.js"));
28192
+ const LazyReactNeuroglancer = React__default.lazy(() => import("./ReactNeuroglancer-CexklRkL.js"));
28177
28193
  function createWorker() {
28178
28194
  return new WorkerFactory();
28179
28195
  }
@@ -28262,6 +28278,118 @@ class NeuroglancerComp extends PureComponent {
28262
28278
  ))));
28263
28279
  }
28264
28280
  }
28281
+ const DEFAULT_NG_PROPS = {
28282
+ layout: "3d",
28283
+ position: [0, 0, 0],
28284
+ projectionOrientation: [0, 0, 0, 1],
28285
+ projectionScale: 1024,
28286
+ crossSectionScale: 1
28287
+ };
28288
+ function toPrecomputedSource(url) {
28289
+ if (!url) {
28290
+ return void 0;
28291
+ }
28292
+ return `precomputed://${url}`;
28293
+ }
28294
+ const UNIT_TO_NM = {
28295
+ nm: 1,
28296
+ um: 1e3,
28297
+ µm: 1e3,
28298
+ mm: 1e6,
28299
+ cm: 1e7,
28300
+ m: 1e9
28301
+ };
28302
+ function isInNanometerRange(value, unit, minNm = 1, maxNm = 100) {
28303
+ const n = typeof value === "number" ? value : Number(value);
28304
+ if (!Number.isFinite(n)) return false;
28305
+ const factor = unit && UNIT_TO_NM[unit];
28306
+ if (!factor) return false;
28307
+ const nm = n * factor;
28308
+ return nm >= minNm && nm <= maxNm;
28309
+ }
28310
+ function normalizeDimensionsToNanometers(opts) {
28311
+ const { dimensionUnit, dimensionX, dimensionY, dimensionZ } = opts;
28312
+ if (!dimensionUnit || !dimensionX || !dimensionY || !dimensionZ) {
28313
+ console.warn("Missing dimension info");
28314
+ }
28315
+ const xNm = isInNanometerRange(dimensionX, dimensionUnit);
28316
+ const yNm = isInNanometerRange(dimensionY, dimensionUnit);
28317
+ const zNm = isInNanometerRange(dimensionZ, dimensionUnit);
28318
+ if (!xNm || !yNm || !zNm) {
28319
+ console.warn("Dimension was converted to nm units");
28320
+ }
28321
+ return {
28322
+ x: xNm ? [dimensionX, dimensionUnit] : [1, "nm"],
28323
+ y: yNm ? [dimensionY, dimensionUnit] : [1, "nm"],
28324
+ z: zNm ? [dimensionZ, dimensionUnit] : [1, "nm"]
28325
+ };
28326
+ }
28327
+ function extractDataTypeEntities(loaders, dataset, dataType) {
28328
+ var _a;
28329
+ const datasetEntry = loaders == null ? void 0 : loaders[dataset];
28330
+ const internMap = (_a = datasetEntry == null ? void 0 : datasetEntry.loaders) == null ? void 0 : _a[dataType];
28331
+ if (!internMap || typeof internMap.entries !== "function") return [];
28332
+ return Array.from(internMap.entries()).map(([key, loader]) => {
28333
+ var _a2, _b;
28334
+ const url = (loader == null ? void 0 : loader.url) ?? ((_a2 = loader == null ? void 0 : loader.dataSource) == null ? void 0 : _a2.url) ?? void 0;
28335
+ const fileUid = (key == null ? void 0 : key.fileUid) ?? ((_b = loader == null ? void 0 : loader.coordinationValues) == null ? void 0 : _b.fileUid) ?? void 0;
28336
+ const {
28337
+ position: position2,
28338
+ projectionOrientation,
28339
+ projectionScale,
28340
+ crossSectionScale
28341
+ } = (loader == null ? void 0 : loader.options) ?? {};
28342
+ const isPrecomputed = loader == null ? void 0 : loader.fileType.includes("precomputed");
28343
+ if (!isPrecomputed) {
28344
+ console.warn("Filetype needs to be precomputed");
28345
+ }
28346
+ return {
28347
+ key,
28348
+ type: "segmentation",
28349
+ fileUid,
28350
+ layout: DEFAULT_NG_PROPS.layout,
28351
+ url,
28352
+ source: toPrecomputedSource(url),
28353
+ name: fileUid ?? (key == null ? void 0 : key.name) ?? "segmentation",
28354
+ // For precomputed: nm is the unit used
28355
+ dimensions: normalizeDimensionsToNanometers(loader == null ? void 0 : loader.options),
28356
+ // If not provided, no error, but difficult to see the data
28357
+ position: Array.isArray(position2) && position2.length === 3 ? position2 : DEFAULT_NG_PROPS.position,
28358
+ // If not provided, will have a default orientation
28359
+ projectionOrientation: Array.isArray(projectionOrientation) && projectionOrientation.length === 4 ? projectionOrientation : DEFAULT_NG_PROPS.projectionOrientation,
28360
+ projectionScale: Number.isFinite(projectionScale) ? projectionScale : DEFAULT_NG_PROPS.projectionScale,
28361
+ crossSectionScale: Number.isFinite(crossSectionScale) ? crossSectionScale : DEFAULT_NG_PROPS.crossSectionScale
28362
+ };
28363
+ });
28364
+ }
28365
+ function useExtractOptionsForNg(loaders, dataset, dataType) {
28366
+ const extractedEntities = useMemo(
28367
+ () => extractDataTypeEntities(loaders, dataset, dataType),
28368
+ [loaders, dataset, dataType]
28369
+ );
28370
+ const layers = useMemo(() => extractedEntities.filter((t) => t.source).map((t) => ({
28371
+ type: t.type,
28372
+ source: t.source,
28373
+ segments: [],
28374
+ name: t.name || "segmentation"
28375
+ })), [extractedEntities]);
28376
+ const viewerState = useMemo(() => {
28377
+ var _a, _b, _c, _d, _e;
28378
+ return {
28379
+ dimensions: (_a = extractedEntities[0]) == null ? void 0 : _a.dimensions,
28380
+ position: (_b = extractedEntities[0]) == null ? void 0 : _b.position,
28381
+ crossSectionScale: (_c = extractedEntities[0]) == null ? void 0 : _c.crossSectionScale,
28382
+ projectionOrientation: (_d = extractedEntities[0]) == null ? void 0 : _d.projectionOrientation,
28383
+ projectionScale: (_e = extractedEntities[0]) == null ? void 0 : _e.projectionScale,
28384
+ layers,
28385
+ layout: extractedEntities[0].layout
28386
+ };
28387
+ });
28388
+ return [viewerState];
28389
+ }
28390
+ function useNeuroglancerViewerState(loaders, dataset, isRequired, coordinationSetters, initialCoordinationValues, matchOn) {
28391
+ return useExtractOptionsForNg(loaders, dataset, DataType$1.OBS_SEGMENTATIONS);
28392
+ }
28265
28393
  /**
28266
28394
  * @license
28267
28395
  * Copyright 2010-2023 Three.js Authors
@@ -30060,10 +30188,6 @@ const multiplyQuat = (a, b) => {
30060
30188
  const conjQuat = (q) => [-q[0], -q[1], -q[2], q[3]];
30061
30189
  const rad2deg = (r) => r * 180 / Math.PI;
30062
30190
  const deg2rad = (d) => d * Math.PI / 180;
30063
- function isValidState(viewerState) {
30064
- const { projectionScale, projectionOrientation, position: position2, dimensions } = viewerState || {};
30065
- return dimensions !== void 0 && typeof projectionScale === "number" && Array.isArray(projectionOrientation) && projectionOrientation.length === 4 && Array.isArray(position2) && position2.length === 3;
30066
- }
30067
30191
  function valueGreaterThanEpsilon(a, b, epsilon) {
30068
30192
  if (Array.isArray(a) && Array.isArray(b) && a.length === b.length) {
30069
30193
  return a.some((val, i) => Math.abs(val - b[i]) > epsilon);
@@ -30075,7 +30199,6 @@ function valueGreaterThanEpsilon(a, b, epsilon) {
30075
30199
  }
30076
30200
  const nearEq = (a, b, epsilon) => Number.isFinite(a) && Number.isFinite(b) ? Math.abs(a - b) <= epsilon : a === b;
30077
30201
  function diffCameraState(prev2, next2) {
30078
- if (!isValidState(next2)) return { changed: false, scale: false, pos: false, rot: false };
30079
30202
  const eps = EPSILON_KEYS_MAPPING_NG;
30080
30203
  const scale = valueGreaterThanEpsilon(
30081
30204
  prev2 == null ? void 0 : prev2.projectionScale,
@@ -30137,8 +30260,7 @@ function NeuroglancerSubscriber(props) {
30137
30260
  removeGridComponent,
30138
30261
  theme,
30139
30262
  title = "Neuroglancer",
30140
- helpText = ViewHelpMapping.NEUROGLANCER,
30141
- viewerState: initialViewerState
30263
+ helpText = ViewHelpMapping.NEUROGLANCER
30142
30264
  } = props;
30143
30265
  const loaders = useLoaders();
30144
30266
  const coordinationScopes = useCoordinationScopes(coordinationScopesRaw);
@@ -30171,8 +30293,6 @@ function NeuroglancerSubscriber(props) {
30171
30293
  setSpatialRotationOrbit: setRotationOrbit,
30172
30294
  setSpatialZoom: setZoom
30173
30295
  }] = useCoordination(COMPONENT_COORDINATION_TYPES[ViewType$1.NEUROGLANCER], coordinationScopes);
30174
- const latestViewerStateRef = useRef(initialViewerState);
30175
- const initialRotationPushedRef = useRef(false);
30176
30296
  const { classes } = useStyles();
30177
30297
  const [{ obsSets: cellSets }] = useObsSetsData(
30178
30298
  loaders,
@@ -30190,6 +30310,12 @@ function NeuroglancerSubscriber(props) {
30190
30310
  {},
30191
30311
  { obsType, embeddingType: mapping }
30192
30312
  );
30313
+ const [initalViewerState] = useNeuroglancerViewerState(
30314
+ loaders,
30315
+ dataset
30316
+ );
30317
+ const latestViewerStateRef = useRef(initalViewerState);
30318
+ const initialRotationPushedRef = useRef(false);
30193
30319
  const ngRotPushAtRef = useRef(0);
30194
30320
  const lastInteractionSource = useRef(null);
30195
30321
  const applyNgUpdateTimeoutRef = useRef(null);
@@ -30238,7 +30364,7 @@ function NeuroglancerSubscriber(props) {
30238
30364
  if (!Number.isFinite(projectionScale) || projectionScale <= 0) return;
30239
30365
  const zRef = Number.isFinite(spatialZoom) ? spatialZoom : 0;
30240
30366
  initialRenderCalibratorRef.current = makeVitNgZoomCalibrator(projectionScale, zRef);
30241
- const [px = 0, py = 0, pz = 0] = Array.isArray(position2) ? position2 : [0, 0, 0];
30367
+ const [px = 0, py = 0, pz = 0] = position2;
30242
30368
  const tX = Number.isFinite(spatialTargetX) ? spatialTargetX : 0;
30243
30369
  const tY = Number.isFinite(spatialTargetY) ? spatialTargetY : 0;
30244
30370
  translationOffsetRef.current = [px - tX, py - tY, pz];
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { N } from "./index-O9LG3z3b.js";
1
+ import { N } from "./index-CuSU8uy8.js";
2
2
  export {
3
3
  N as NeuroglancerSubscriber
4
4
  };
@@ -1 +1 @@
1
- {"version":3,"file":"NeuroglancerSubscriber.d.ts","sourceRoot":"","sources":["../src/NeuroglancerSubscriber.js"],"names":[],"mappings":"AAiDA,gEA2dC"}
1
+ {"version":3,"file":"NeuroglancerSubscriber.d.ts","sourceRoot":"","sources":["../src/NeuroglancerSubscriber.js"],"names":[],"mappings":"AAkDA,gEAgeC"}
@@ -5,6 +5,7 @@ import { TitleInfo, useCoordination, useObsSetsData, useLoaders, useObsEmbedding
5
5
  import { ViewHelpMapping, ViewType, COMPONENT_COORDINATION_TYPES, } from '@vitessce/constants-internal';
6
6
  import { mergeObsSets, getCellColors, setObsSelection } from '@vitessce/sets-utils';
7
7
  import { NeuroglancerComp } from './Neuroglancer.js';
8
+ import { useNeuroglancerViewerState } from './data-hook-ng-utils.js';
8
9
  import { useStyles } from './styles.js';
9
10
  import { quaternionToEuler, eulerToQuaternion, valueGreaterThanEpsilon, nearEq, makeVitNgZoomCalibrator, conjQuat, multiplyQuat, rad2deg, deg2rad, Q_Y_UP, } from './utils.js';
10
11
  const VITESSCE_INTERACTION_DELAY = 50;
@@ -23,7 +24,7 @@ function rgbToHex(rgb) {
23
24
  : `#${rgb.map(c => c.toString(16).padStart(2, '0')).join('')}`);
24
25
  }
25
26
  export function NeuroglancerSubscriber(props) {
26
- const { coordinationScopes: coordinationScopesRaw, closeButtonVisible, downloadButtonVisible, removeGridComponent, theme, title = 'Neuroglancer', helpText = ViewHelpMapping.NEUROGLANCER, viewerState: initialViewerState, } = props;
27
+ const { coordinationScopes: coordinationScopesRaw, closeButtonVisible, downloadButtonVisible, removeGridComponent, theme, title = 'Neuroglancer', helpText = ViewHelpMapping.NEUROGLANCER, } = props;
27
28
  const loaders = useLoaders();
28
29
  const coordinationScopes = useCoordinationScopes(coordinationScopesRaw);
29
30
  const [{ dataset, obsType, spatialZoom, spatialTargetX, spatialTargetY, spatialRotationX, spatialRotationY, spatialRotationZ, spatialRotationOrbit,
@@ -32,12 +33,13 @@ export function NeuroglancerSubscriber(props) {
32
33
  // setSpatialRotationY: setRotationY,
33
34
  // setSpatialRotationZ: setRotationZ,
34
35
  setSpatialRotationOrbit: setRotationOrbit, setSpatialZoom: setZoom, }] = useCoordination(COMPONENT_COORDINATION_TYPES[ViewType.NEUROGLANCER], coordinationScopes);
35
- const latestViewerStateRef = useRef(initialViewerState);
36
- const initialRotationPushedRef = useRef(false);
37
36
  // console.log("NG Subs Render orbit", spatialRotationX, spatialRotationY, spatialRotationOrbit);
38
37
  const { classes } = useStyles();
39
38
  const [{ obsSets: cellSets }] = useObsSetsData(loaders, dataset, false, { setObsSetSelection: setCellSetSelection, setObsSetColor: setCellSetColor }, { cellSetSelection, obsSetColor: cellSetColor }, { obsType });
40
39
  const [{ obsIndex }] = useObsEmbeddingData(loaders, dataset, true, {}, {}, { obsType, embeddingType: mapping });
40
+ const [initalViewerState] = useNeuroglancerViewerState(loaders, dataset, false, undefined, undefined, { obsType: 'cell' });
41
+ const latestViewerStateRef = useRef(initalViewerState);
42
+ const initialRotationPushedRef = useRef(false);
41
43
  const ngRotPushAtRef = useRef(0);
42
44
  const lastInteractionSource = useRef(null);
43
45
  const applyNgUpdateTimeoutRef = useRef(null);
@@ -88,7 +90,7 @@ export function NeuroglancerSubscriber(props) {
88
90
  // anchor to current Vitessce zoom
89
91
  const zRef = Number.isFinite(spatialZoom) ? spatialZoom : 0;
90
92
  initialRenderCalibratorRef.current = makeVitNgZoomCalibrator(projectionScale, zRef);
91
- const [px = 0, py = 0, pz = 0] = Array.isArray(position) ? position : [0, 0, 0];
93
+ const [px = 0, py = 0, pz = 0] = position;
92
94
  const tX = Number.isFinite(spatialTargetX) ? spatialTargetX : 0;
93
95
  const tY = Number.isFinite(spatialTargetY) ? spatialTargetY : 0;
94
96
  // TODO: translation off in the first render - turn pz to 0 if z-axis needs to be avoided
@@ -0,0 +1,40 @@
1
+ export function extractDataTypeEntities(loaders: any, dataset: any, dataType: any): {
2
+ key: any;
3
+ type: string;
4
+ fileUid: any;
5
+ layout: string;
6
+ url: any;
7
+ source: string | undefined;
8
+ name: any;
9
+ dimensions: {
10
+ x: [number, "nm"];
11
+ y: [number, "nm"];
12
+ z: [number, "nm"];
13
+ };
14
+ position: any[];
15
+ projectionOrientation: any[];
16
+ projectionScale: any;
17
+ crossSectionScale: any;
18
+ }[];
19
+ export function useExtractOptionsForNg(loaders: any, dataset: any, dataType: any): any[];
20
+ /**
21
+ * Get the parameters for NG's viewerstate.
22
+ * @param {object} loaders The object mapping
23
+ * datasets and data types to loader instances.
24
+ * @param {string} dataset The key for a dataset,
25
+ * used to identify which loader to use.
26
+ * @returns {array} [viewerstate] where
27
+ * viewerState is an object. ref=> (https://neuroglancer-docs.web.app/json/api/index.html#json-Layer.name).
28
+ */
29
+ /**
30
+ * @returns [viewerState]
31
+ */
32
+ export function useNeuroglancerViewerState(loaders: any, dataset: any, isRequired: any, coordinationSetters: any, initialCoordinationValues: any, matchOn: any): any[];
33
+ export namespace DEFAULT_NG_PROPS {
34
+ let layout: string;
35
+ let position: number[];
36
+ let projectionOrientation: number[];
37
+ let projectionScale: number;
38
+ let crossSectionScale: number;
39
+ }
40
+ //# sourceMappingURL=data-hook-ng-utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"data-hook-ng-utils.d.ts","sourceRoot":"","sources":["../src/data-hook-ng-utils.js"],"names":[],"mappings":"AAgEA;;;;;;;;;WArBmB,CAAC,MAAM,EAAC,IAAI,CAAC;WAAI,CAAC,MAAM,EAAC,IAAI,CAAC;WAAI,CAAC,MAAM,EAAC,IAAI,CAAC;;;;;;IA6DjE;AAED,yFAyBC;AAGD;;;;;;;;GAQG;AACH;;GAEG;AACH,uKAKC"}
@@ -0,0 +1,130 @@
1
+ import { useMemo } from 'react';
2
+ import { DataType } from '@vitessce/constants-internal';
3
+ export const DEFAULT_NG_PROPS = {
4
+ layout: '3d',
5
+ position: [0, 0, 0],
6
+ projectionOrientation: [0, 0, 0, 1],
7
+ projectionScale: 1024,
8
+ crossSectionScale: 1,
9
+ };
10
+ function toPrecomputedSource(url) {
11
+ if (!url) {
12
+ return undefined;
13
+ }
14
+ return `precomputed://${url}`;
15
+ }
16
+ const UNIT_TO_NM = {
17
+ nm: 1,
18
+ um: 1e3,
19
+ µm: 1e3,
20
+ mm: 1e6,
21
+ cm: 1e7,
22
+ m: 1e9,
23
+ };
24
+ function isInNanometerRange(value, unit, minNm = 1, maxNm = 100) {
25
+ const n = typeof value === 'number' ? value : Number(value);
26
+ if (!Number.isFinite(n))
27
+ return false;
28
+ const factor = unit && UNIT_TO_NM[unit];
29
+ if (!factor)
30
+ return false;
31
+ const nm = n * factor;
32
+ return nm >= minNm && nm <= maxNm;
33
+ }
34
+ /**
35
+ * Normalize dimensionX/Y/Z to nanometers.
36
+ * @param {object} opts
37
+ * @returns {{ x:[number,'nm'], y:[number,'nm'], z:[number,'nm'] }}
38
+ */
39
+ function normalizeDimensionsToNanometers(opts) {
40
+ const { dimensionUnit, dimensionX, dimensionY, dimensionZ } = opts;
41
+ if (!dimensionUnit || !dimensionX || !dimensionY || !dimensionZ) {
42
+ console.warn('Missing dimension info');
43
+ }
44
+ const xNm = isInNanometerRange(dimensionX, dimensionUnit);
45
+ const yNm = isInNanometerRange(dimensionY, dimensionUnit);
46
+ const zNm = isInNanometerRange(dimensionZ, dimensionUnit);
47
+ if (!xNm || !yNm || !zNm) {
48
+ console.warn('Dimension was converted to nm units');
49
+ }
50
+ return {
51
+ x: xNm ? [dimensionX, dimensionUnit] : [1, 'nm'],
52
+ y: yNm ? [dimensionY, dimensionUnit] : [1, 'nm'],
53
+ z: zNm ? [dimensionZ, dimensionUnit] : [1, 'nm'],
54
+ };
55
+ }
56
+ export function extractDataTypeEntities(loaders, dataset, dataType) {
57
+ const datasetEntry = loaders?.[dataset];
58
+ const internMap = datasetEntry?.loaders?.[dataType];
59
+ if (!internMap || typeof internMap.entries !== 'function')
60
+ return [];
61
+ return Array.from(internMap.entries()).map(([key, loader]) => {
62
+ const url = loader?.url ?? loader?.dataSource?.url ?? undefined;
63
+ const fileUid = key?.fileUid
64
+ ?? loader?.coordinationValues?.fileUid
65
+ ?? undefined;
66
+ const { position, projectionOrientation, projectionScale, crossSectionScale } = loader?.options ?? {};
67
+ const isPrecomputed = loader?.fileType.includes('precomputed');
68
+ if (!isPrecomputed) {
69
+ console.warn('Filetype needs to be precomputed');
70
+ }
71
+ return {
72
+ key,
73
+ type: 'segmentation',
74
+ fileUid,
75
+ layout: DEFAULT_NG_PROPS.layout,
76
+ url,
77
+ source: toPrecomputedSource(url),
78
+ name: fileUid ?? key?.name ?? 'segmentation',
79
+ // For precomputed: nm is the unit used
80
+ dimensions: normalizeDimensionsToNanometers(loader?.options),
81
+ // If not provided, no error, but difficult to see the data
82
+ position: Array.isArray(position) && position.length === 3
83
+ ? position : DEFAULT_NG_PROPS.position,
84
+ // If not provided, will have a default orientation
85
+ projectionOrientation: Array.isArray(projectionOrientation)
86
+ && projectionOrientation.length === 4
87
+ ? projectionOrientation : DEFAULT_NG_PROPS.projectionOrientation,
88
+ projectionScale: Number.isFinite(projectionScale)
89
+ ? projectionScale : DEFAULT_NG_PROPS.projectionScale,
90
+ crossSectionScale: Number.isFinite(crossSectionScale)
91
+ ? crossSectionScale : DEFAULT_NG_PROPS.crossSectionScale,
92
+ };
93
+ });
94
+ }
95
+ export function useExtractOptionsForNg(loaders, dataset, dataType) {
96
+ const extractedEntities = useMemo(() => extractDataTypeEntities(loaders, dataset, dataType), [loaders, dataset, dataType]);
97
+ const layers = useMemo(() => extractedEntities
98
+ .filter(t => t.source)
99
+ .map(t => ({
100
+ type: t.type,
101
+ source: t.source,
102
+ segments: [],
103
+ name: t.name || 'segmentation',
104
+ })), [extractedEntities]);
105
+ const viewerState = useMemo(() => ({
106
+ dimensions: extractedEntities[0]?.dimensions,
107
+ position: extractedEntities[0]?.position,
108
+ crossSectionScale: extractedEntities[0]?.crossSectionScale,
109
+ projectionOrientation: extractedEntities[0]?.projectionOrientation,
110
+ projectionScale: extractedEntities[0]?.projectionScale,
111
+ layers,
112
+ layout: extractedEntities[0].layout,
113
+ }));
114
+ return [viewerState];
115
+ }
116
+ /**
117
+ * Get the parameters for NG's viewerstate.
118
+ * @param {object} loaders The object mapping
119
+ * datasets and data types to loader instances.
120
+ * @param {string} dataset The key for a dataset,
121
+ * used to identify which loader to use.
122
+ * @returns {array} [viewerstate] where
123
+ * viewerState is an object. ref=> (https://neuroglancer-docs.web.app/json/api/index.html#json-Layer.name).
124
+ */
125
+ /**
126
+ * @returns [viewerState]
127
+ */
128
+ export function useNeuroglancerViewerState(loaders, dataset, isRequired, coordinationSetters, initialCoordinationValues, matchOn) {
129
+ return useExtractOptionsForNg(loaders, dataset, DataType.OBS_SEGMENTATIONS, matchOn);
130
+ }
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=data-hook-ng-utils.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"data-hook-ng-utils.test.d.ts","sourceRoot":"","sources":["../src/data-hook-ng-utils.test.js"],"names":[],"mappings":""}
@@ -0,0 +1,35 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { extractDataTypeEntities, DEFAULT_NG_PROPS, } from './data-hook-ng-utils.js';
3
+ describe('extractDataTypeEntities (minimal tests)', () => {
4
+ it('returns empty array when internMap is missing or invalid', () => {
5
+ expect(extractDataTypeEntities({}, 'A', 'obsSegmentations')).toEqual([]);
6
+ expect(extractDataTypeEntities({ A: { loaders: {} } }, 'A', 'obsSegmentations')).toEqual([]);
7
+ expect(extractDataTypeEntities({ A: { loaders: { obsSegmentations: {} } } }, 'A', 'obsSegmentations')).toEqual([]);
8
+ });
9
+ it('builds an entity for a precomputed loader and applies sane defaults', () => {
10
+ const key = { fileUid: 'melanoma-meshes' };
11
+ const loader = {
12
+ fileType: 'obsSegmentations.ng-precomputed',
13
+ url: 'https://www.example.com/example/example_meshes',
14
+ options: { projectionScale: 2048 },
15
+ };
16
+ const internMap = new Map([[key, loader]]);
17
+ const loaders = { A: { loaders: { obsSegmentations: internMap } } };
18
+ const out = extractDataTypeEntities(loaders, 'A', 'obsSegmentations');
19
+ expect(out).toHaveLength(1);
20
+ const e = out[0];
21
+ expect(e.key).toBe(key);
22
+ expect(e.type).toBe('segmentation');
23
+ expect(e.fileUid).toBe('melanoma-meshes');
24
+ expect(e.layout).toBe(DEFAULT_NG_PROPS.layout);
25
+ // URL + source prefixing
26
+ expect(e.url).toBe(loader.url);
27
+ expect(e.source).toBe('precomputed://https://www.example.com/example/example_meshes');
28
+ expect(e.dimensions).toEqual({ x: [1, 'nm'], y: [1, 'nm'], z: [1, 'nm'] });
29
+ // camera defaults + single override
30
+ expect(e.position).toEqual(DEFAULT_NG_PROPS.position);
31
+ expect(e.projectionOrientation).toEqual(DEFAULT_NG_PROPS.projectionOrientation);
32
+ expect(e.projectionScale).toBe(2048);
33
+ expect(e.crossSectionScale).toBe(DEFAULT_NG_PROPS.crossSectionScale);
34
+ });
35
+ });
@@ -1,3 +1,8 @@
1
+ /**
2
+ * Is this a valid viewerState object?
3
+ * @param {object} viewerState
4
+ * @returns {boolean}
5
+ */
1
6
  /**
2
7
  * Returns true if the difference is greater than the epsilon for that key.
3
8
  * @param {array | number} a Previous viewerState key, i.e., position.
@@ -1 +1 @@
1
- {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.js"],"names":[],"mappings":"AA8DA;;;;;GAKG;AAEH,2CALW,KAAK,GAAG,MAAM,KACd,KAAK,GAAG,MAAM,qCAYxB;AAMD;;;;;GAKG;AAEH,gDALW,MAAM,aACN,MAAM,WAYhB;AAID;;;;;EAgBC;AAID,6EAUC;AAID,8EAIC;AAID;;;;EAWC;;;;;;AA3ID,mCAAoC;AAEpC,8BAAmC;AAG5B,uDASN;AAEM,wCAAmD;AAGnD,mDAA4F;AAG5F,wCAAsC;AACtC,wCAAsC;AAwCtC,8DAEN"}
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.js"],"names":[],"mappings":"AA6CA;;;;GAIG;AAcH;;;;;GAKG;AAEH,2CALW,KAAK,GAAG,MAAM,KACd,KAAK,GAAG,MAAM,qCAYxB;AAMD;;;;;GAKG;AAEH,gDALW,MAAM,aACN,MAAM,WAYhB;AAID;;;;;EAgBC;AAID,6EAUC;AAID,8EAIC;AAID;;;;EAWC;;;;;;AA5ID,mCAAoC;AAEpC,8BAAmC;AAG5B,uDASN;AAEM,wCAAmD;AAGnD,mDAA4F;AAG5F,wCAAsC;AACtC,wCAAsC;AAyCtC,8DAEN"}
package/dist-tsc/utils.js CHANGED
@@ -33,15 +33,18 @@ export const deg2rad = d => d * Math.PI / 180;
33
33
  * @param {object} viewerState
34
34
  * @returns {boolean}
35
35
  */
36
- function isValidState(viewerState) {
37
- const { projectionScale, projectionOrientation, position, dimensions } = viewerState || {};
38
- return (dimensions !== undefined
39
- && typeof projectionScale === 'number'
40
- && Array.isArray(projectionOrientation)
41
- && projectionOrientation.length === 4
42
- && Array.isArray(position)
43
- && position.length === 3);
44
- }
36
+ // function isValidState(viewerState) {
37
+ // const { projectionScale, projectionOrientation, position, dimensions } = viewerState || {};
38
+ // console.log ("valid", projectionScale, projectionOrientation, position, dimensions)
39
+ // return (
40
+ // dimensions !== undefined
41
+ // && typeof projectionScale === 'number'
42
+ // && Array.isArray(projectionOrientation)
43
+ // && projectionOrientation.length === 4
44
+ // && Array.isArray(position)
45
+ // && position.length === 3
46
+ // );
47
+ // }
45
48
  /**
46
49
  * Returns true if the difference is greater than the epsilon for that key.
47
50
  * @param {array | number} a Previous viewerState key, i.e., position.
@@ -65,16 +68,14 @@ export const nearEq = (a, b, epsilon) => (Number.isFinite(a) && Number.isFinite(
65
68
  * @returns {Boolean} True if any key has changed
66
69
  */
67
70
  export function didCameraStateChange(prevState, nextState) {
68
- if (!isValidState(nextState))
69
- return false;
71
+ // if (!isValidState(nextState)) return false;
70
72
  return Object.entries(EPSILON_KEYS_MAPPING_NG)
71
73
  .some(([key, eps]) => valueGreaterThanEpsilon(prevState?.[key], nextState?.[key], eps));
72
74
  }
73
75
  // To see if any and which cameraState has changed
74
76
  // adjust for coupled zoom+position changes
75
77
  export function diffCameraState(prev, next) {
76
- if (!isValidState(next))
77
- return { changed: false, scale: false, pos: false, rot: false };
78
+ // if (!isValidState(next)) return { changed: false, scale: false, pos: false, rot: false };
78
79
  const eps = EPSILON_KEYS_MAPPING_NG;
79
80
  const scale = valueGreaterThanEpsilon(prev?.projectionScale, next?.projectionScale, eps.projectionScale);
80
81
  const posHard = valueGreaterThanEpsilon(prev?.position, next?.position, eps.position);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vitessce/neuroglancer",
3
- "version": "3.8.1",
3
+ "version": "3.8.3",
4
4
  "author": "Gehlenborg Lab",
5
5
  "homepage": "http://vitessce.io",
6
6
  "repository": {
@@ -20,13 +20,13 @@
20
20
  "lodash-es": "^4.17.21",
21
21
  "three": "^0.154.0",
22
22
  "react": "18.3.1",
23
- "@vitessce/neuroglancer-workers": "3.8.1",
24
- "@vitessce/styles": "3.8.1",
25
- "@vitessce/constants-internal": "3.8.1",
26
- "@vitessce/vit-s": "3.8.1",
27
- "@vitessce/sets-utils": "3.8.1",
28
- "@vitessce/tooltip": "3.8.1",
29
- "@vitessce/utils": "3.8.1"
23
+ "@vitessce/neuroglancer-workers": "3.8.3",
24
+ "@vitessce/styles": "3.8.3",
25
+ "@vitessce/constants-internal": "3.8.3",
26
+ "@vitessce/vit-s": "3.8.3",
27
+ "@vitessce/sets-utils": "3.8.3",
28
+ "@vitessce/utils": "3.8.3",
29
+ "@vitessce/tooltip": "3.8.3"
30
30
  },
31
31
  "devDependencies": {
32
32
  "@testing-library/jest-dom": "^6.6.3",
@@ -15,6 +15,7 @@ import {
15
15
  } from '@vitessce/constants-internal';
16
16
  import { mergeObsSets, getCellColors, setObsSelection } from '@vitessce/sets-utils';
17
17
  import { NeuroglancerComp } from './Neuroglancer.js';
18
+ import { useNeuroglancerViewerState } from './data-hook-ng-utils.js';
18
19
  import { useStyles } from './styles.js';
19
20
  import {
20
21
  quaternionToEuler,
@@ -56,7 +57,6 @@ export function NeuroglancerSubscriber(props) {
56
57
  theme,
57
58
  title = 'Neuroglancer',
58
59
  helpText = ViewHelpMapping.NEUROGLANCER,
59
- viewerState: initialViewerState,
60
60
  } = props;
61
61
 
62
62
  const loaders = useLoaders();
@@ -93,9 +93,6 @@ export function NeuroglancerSubscriber(props) {
93
93
  }] = useCoordination(COMPONENT_COORDINATION_TYPES[ViewType.NEUROGLANCER], coordinationScopes);
94
94
 
95
95
 
96
- const latestViewerStateRef = useRef(initialViewerState);
97
- const initialRotationPushedRef = useRef(false);
98
-
99
96
  // console.log("NG Subs Render orbit", spatialRotationX, spatialRotationY, spatialRotationOrbit);
100
97
 
101
98
  const { classes } = useStyles();
@@ -112,6 +109,15 @@ export function NeuroglancerSubscriber(props) {
112
109
  { obsType, embeddingType: mapping },
113
110
  );
114
111
 
112
+ const [initalViewerState] = useNeuroglancerViewerState(
113
+ loaders, dataset, false,
114
+ undefined, undefined,
115
+ { obsType: 'cell' },
116
+ );
117
+
118
+ const latestViewerStateRef = useRef(initalViewerState);
119
+ const initialRotationPushedRef = useRef(false);
120
+
115
121
  const ngRotPushAtRef = useRef(0);
116
122
  const lastInteractionSource = useRef(null);
117
123
  const applyNgUpdateTimeoutRef = useRef(null);
@@ -170,7 +176,7 @@ export function NeuroglancerSubscriber(props) {
170
176
  const zRef = Number.isFinite(spatialZoom) ? spatialZoom : 0;
171
177
  initialRenderCalibratorRef.current = makeVitNgZoomCalibrator(projectionScale, zRef);
172
178
 
173
- const [px = 0, py = 0, pz = 0] = Array.isArray(position) ? position : [0, 0, 0];
179
+ const [px = 0, py = 0, pz = 0] = position;
174
180
  const tX = Number.isFinite(spatialTargetX) ? spatialTargetX : 0;
175
181
  const tY = Number.isFinite(spatialTargetY) ? spatialTargetY : 0;
176
182
  // TODO: translation off in the first render - turn pz to 0 if z-axis needs to be avoided
@@ -0,0 +1,152 @@
1
+ import { useMemo } from 'react';
2
+ import { DataType } from '@vitessce/constants-internal';
3
+
4
+
5
+ export const DEFAULT_NG_PROPS = {
6
+ layout: '3d',
7
+ position: [0, 0, 0],
8
+ projectionOrientation: [0, 0, 0, 1],
9
+ projectionScale: 1024,
10
+ crossSectionScale: 1,
11
+ };
12
+
13
+ function toPrecomputedSource(url) {
14
+ if (!url) {
15
+ return undefined;
16
+ }
17
+ return `precomputed://${url}`;
18
+ }
19
+
20
+ const UNIT_TO_NM = {
21
+ nm: 1,
22
+ um: 1e3,
23
+ µm: 1e3,
24
+ mm: 1e6,
25
+ cm: 1e7,
26
+ m: 1e9,
27
+ };
28
+
29
+
30
+ function isInNanometerRange(value, unit, minNm = 1, maxNm = 100) {
31
+ const n = typeof value === 'number' ? value : Number(value);
32
+ if (!Number.isFinite(n)) return false;
33
+
34
+ const factor = unit && UNIT_TO_NM[unit];
35
+ if (!factor) return false;
36
+
37
+ const nm = n * factor;
38
+ return nm >= minNm && nm <= maxNm;
39
+ }
40
+
41
+ /**
42
+ * Normalize dimensionX/Y/Z to nanometers.
43
+ * @param {object} opts
44
+ * @returns {{ x:[number,'nm'], y:[number,'nm'], z:[number,'nm'] }}
45
+ */
46
+ function normalizeDimensionsToNanometers(opts) {
47
+ const { dimensionUnit, dimensionX, dimensionY, dimensionZ } = opts;
48
+
49
+ if (!dimensionUnit || !dimensionX || !dimensionY || !dimensionZ) {
50
+ console.warn('Missing dimension info');
51
+ }
52
+ const xNm = isInNanometerRange(dimensionX, dimensionUnit);
53
+ const yNm = isInNanometerRange(dimensionY, dimensionUnit);
54
+ const zNm = isInNanometerRange(dimensionZ, dimensionUnit);
55
+ if (!xNm || !yNm || !zNm) {
56
+ console.warn('Dimension was converted to nm units');
57
+ }
58
+ return {
59
+ x: xNm ? [dimensionX, dimensionUnit] : [1, 'nm'],
60
+ y: yNm ? [dimensionY, dimensionUnit] : [1, 'nm'],
61
+ z: zNm ? [dimensionZ, dimensionUnit] : [1, 'nm'],
62
+ };
63
+ }
64
+
65
+ export function extractDataTypeEntities(loaders, dataset, dataType) {
66
+ const datasetEntry = loaders?.[dataset];
67
+ const internMap = datasetEntry?.loaders?.[dataType];
68
+ if (!internMap || typeof internMap.entries !== 'function') return [];
69
+
70
+ return Array.from(internMap.entries()).map(([key, loader]) => {
71
+ const url = loader?.url ?? loader?.dataSource?.url ?? undefined;
72
+ const fileUid = key?.fileUid
73
+ ?? loader?.coordinationValues?.fileUid
74
+ ?? undefined;
75
+
76
+ const { position, projectionOrientation,
77
+ projectionScale, crossSectionScale } = loader?.options ?? {};
78
+ const isPrecomputed = loader?.fileType.includes('precomputed');
79
+ if (!isPrecomputed) {
80
+ console.warn('Filetype needs to be precomputed');
81
+ }
82
+ return {
83
+ key,
84
+ type: 'segmentation',
85
+ fileUid,
86
+ layout: DEFAULT_NG_PROPS.layout,
87
+ url,
88
+ source: toPrecomputedSource(url),
89
+ name: fileUid ?? key?.name ?? 'segmentation',
90
+ // For precomputed: nm is the unit used
91
+ dimensions: normalizeDimensionsToNanometers(loader?.options),
92
+ // If not provided, no error, but difficult to see the data
93
+ position: Array.isArray(position) && position.length === 3
94
+ ? position : DEFAULT_NG_PROPS.position,
95
+ // If not provided, will have a default orientation
96
+ projectionOrientation: Array.isArray(projectionOrientation)
97
+ && projectionOrientation.length === 4
98
+ ? projectionOrientation : DEFAULT_NG_PROPS.projectionOrientation,
99
+ projectionScale: Number.isFinite(projectionScale)
100
+ ? projectionScale : DEFAULT_NG_PROPS.projectionScale,
101
+ crossSectionScale: Number.isFinite(crossSectionScale)
102
+ ? crossSectionScale : DEFAULT_NG_PROPS.crossSectionScale,
103
+ };
104
+ });
105
+ }
106
+
107
+ export function useExtractOptionsForNg(loaders, dataset, dataType) {
108
+ const extractedEntities = useMemo(
109
+ () => extractDataTypeEntities(loaders, dataset, dataType),
110
+ [loaders, dataset, dataType],
111
+ );
112
+ const layers = useMemo(() => extractedEntities
113
+ .filter(t => t.source)
114
+ .map(t => ({
115
+ type: t.type,
116
+ source: t.source,
117
+ segments: [],
118
+ name: t.name || 'segmentation',
119
+ })), [extractedEntities]);
120
+
121
+ const viewerState = useMemo(() => ({
122
+ dimensions: extractedEntities[0]?.dimensions,
123
+ position: extractedEntities[0]?.position,
124
+ crossSectionScale: extractedEntities[0]?.crossSectionScale,
125
+ projectionOrientation: extractedEntities[0]?.projectionOrientation,
126
+ projectionScale: extractedEntities[0]?.projectionScale,
127
+ layers,
128
+ layout: extractedEntities[0].layout,
129
+ }));
130
+
131
+ return [viewerState];
132
+ }
133
+
134
+
135
+ /**
136
+ * Get the parameters for NG's viewerstate.
137
+ * @param {object} loaders The object mapping
138
+ * datasets and data types to loader instances.
139
+ * @param {string} dataset The key for a dataset,
140
+ * used to identify which loader to use.
141
+ * @returns {array} [viewerstate] where
142
+ * viewerState is an object. ref=> (https://neuroglancer-docs.web.app/json/api/index.html#json-Layer.name).
143
+ */
144
+ /**
145
+ * @returns [viewerState]
146
+ */
147
+ export function useNeuroglancerViewerState(
148
+ loaders, dataset, isRequired,
149
+ coordinationSetters, initialCoordinationValues, matchOn,
150
+ ) {
151
+ return useExtractOptionsForNg(loaders, dataset, DataType.OBS_SEGMENTATIONS, matchOn);
152
+ }
@@ -0,0 +1,52 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ import {
4
+ extractDataTypeEntities,
5
+ DEFAULT_NG_PROPS,
6
+ } from './data-hook-ng-utils.js';
7
+
8
+ describe('extractDataTypeEntities (minimal tests)', () => {
9
+ it('returns empty array when internMap is missing or invalid', () => {
10
+ expect(extractDataTypeEntities({}, 'A', 'obsSegmentations')).toEqual([]);
11
+
12
+ expect(
13
+ extractDataTypeEntities({ A: { loaders: {} } }, 'A', 'obsSegmentations'),
14
+ ).toEqual([]);
15
+
16
+ expect(
17
+ extractDataTypeEntities({ A: { loaders: { obsSegmentations: {} } } }, 'A', 'obsSegmentations'),
18
+ ).toEqual([]);
19
+ });
20
+
21
+ it('builds an entity for a precomputed loader and applies sane defaults', () => {
22
+ const key = { fileUid: 'melanoma-meshes' };
23
+ const loader = {
24
+ fileType: 'obsSegmentations.ng-precomputed',
25
+ url: 'https://www.example.com/example/example_meshes',
26
+ options: { projectionScale: 2048 },
27
+ };
28
+ const internMap = new Map([[key, loader]]);
29
+ const loaders = { A: { loaders: { obsSegmentations: internMap } } };
30
+
31
+ const out = extractDataTypeEntities(loaders, 'A', 'obsSegmentations');
32
+ expect(out).toHaveLength(1);
33
+
34
+ const e = out[0];
35
+ expect(e.key).toBe(key);
36
+ expect(e.type).toBe('segmentation');
37
+ expect(e.fileUid).toBe('melanoma-meshes');
38
+ expect(e.layout).toBe(DEFAULT_NG_PROPS.layout);
39
+
40
+ // URL + source prefixing
41
+ expect(e.url).toBe(loader.url);
42
+ expect(e.source).toBe('precomputed://https://www.example.com/example/example_meshes');
43
+
44
+ expect(e.dimensions).toEqual({ x: [1, 'nm'], y: [1, 'nm'], z: [1, 'nm'] });
45
+
46
+ // camera defaults + single override
47
+ expect(e.position).toEqual(DEFAULT_NG_PROPS.position);
48
+ expect(e.projectionOrientation).toEqual(DEFAULT_NG_PROPS.projectionOrientation);
49
+ expect(e.projectionScale).toBe(2048);
50
+ expect(e.crossSectionScale).toBe(DEFAULT_NG_PROPS.crossSectionScale);
51
+ });
52
+ });
package/src/utils.js CHANGED
@@ -48,17 +48,18 @@ export const deg2rad = d => d * Math.PI / 180;
48
48
  * @param {object} viewerState
49
49
  * @returns {boolean}
50
50
  */
51
- function isValidState(viewerState) {
52
- const { projectionScale, projectionOrientation, position, dimensions } = viewerState || {};
53
- return (
54
- dimensions !== undefined
55
- && typeof projectionScale === 'number'
56
- && Array.isArray(projectionOrientation)
57
- && projectionOrientation.length === 4
58
- && Array.isArray(position)
59
- && position.length === 3
60
- );
61
- }
51
+ // function isValidState(viewerState) {
52
+ // const { projectionScale, projectionOrientation, position, dimensions } = viewerState || {};
53
+ // console.log ("valid", projectionScale, projectionOrientation, position, dimensions)
54
+ // return (
55
+ // dimensions !== undefined
56
+ // && typeof projectionScale === 'number'
57
+ // && Array.isArray(projectionOrientation)
58
+ // && projectionOrientation.length === 4
59
+ // && Array.isArray(position)
60
+ // && position.length === 3
61
+ // );
62
+ // }
62
63
 
63
64
  /**
64
65
  * Returns true if the difference is greater than the epsilon for that key.
@@ -89,7 +90,7 @@ export const nearEq = (a, b, epsilon) => (
89
90
  */
90
91
 
91
92
  export function didCameraStateChange(prevState, nextState) {
92
- if (!isValidState(nextState)) return false;
93
+ // if (!isValidState(nextState)) return false;
93
94
  return Object.entries(EPSILON_KEYS_MAPPING_NG)
94
95
  .some(([key, eps]) => valueGreaterThanEpsilon(
95
96
  prevState?.[key],
@@ -101,7 +102,7 @@ export function didCameraStateChange(prevState, nextState) {
101
102
  // To see if any and which cameraState has changed
102
103
  // adjust for coupled zoom+position changes
103
104
  export function diffCameraState(prev, next) {
104
- if (!isValidState(next)) return { changed: false, scale: false, pos: false, rot: false };
105
+ // if (!isValidState(next)) return { changed: false, scale: false, pos: false, rot: false };
105
106
 
106
107
  const eps = EPSILON_KEYS_MAPPING_NG;
107
108
  const scale = valueGreaterThanEpsilon(prev?.projectionScale,