siibra 0.5a2__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (83) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +20 -12
  3. siibra/commons.py +145 -90
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +22 -17
  6. siibra/configuration/factory.py +177 -128
  7. siibra/core/__init__.py +1 -8
  8. siibra/core/{relation_qualification.py → assignment.py} +17 -14
  9. siibra/core/atlas.py +66 -35
  10. siibra/core/concept.py +81 -39
  11. siibra/core/parcellation.py +83 -67
  12. siibra/core/region.py +569 -263
  13. siibra/core/space.py +7 -39
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +16 -0
  22. siibra/explorer/url.py +112 -52
  23. siibra/explorer/util.py +31 -9
  24. siibra/features/__init__.py +73 -8
  25. siibra/features/anchor.py +75 -196
  26. siibra/features/connectivity/__init__.py +1 -1
  27. siibra/features/connectivity/functional_connectivity.py +2 -2
  28. siibra/features/connectivity/regional_connectivity.py +99 -10
  29. siibra/features/connectivity/streamline_counts.py +1 -1
  30. siibra/features/connectivity/streamline_lengths.py +1 -1
  31. siibra/features/connectivity/tracing_connectivity.py +1 -1
  32. siibra/features/dataset/__init__.py +1 -1
  33. siibra/features/dataset/ebrains.py +3 -3
  34. siibra/features/feature.py +219 -110
  35. siibra/features/image/__init__.py +1 -1
  36. siibra/features/image/image.py +21 -13
  37. siibra/features/image/sections.py +1 -1
  38. siibra/features/image/volume_of_interest.py +1 -1
  39. siibra/features/tabular/__init__.py +1 -1
  40. siibra/features/tabular/bigbrain_intensity_profile.py +24 -13
  41. siibra/features/tabular/cell_density_profile.py +111 -69
  42. siibra/features/tabular/cortical_profile.py +82 -16
  43. siibra/features/tabular/gene_expression.py +117 -6
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +7 -9
  45. siibra/features/tabular/layerwise_cell_density.py +9 -24
  46. siibra/features/tabular/receptor_density_fingerprint.py +11 -6
  47. siibra/features/tabular/receptor_density_profile.py +12 -15
  48. siibra/features/tabular/regional_timeseries_activity.py +74 -18
  49. siibra/features/tabular/tabular.py +17 -8
  50. siibra/livequeries/__init__.py +1 -7
  51. siibra/livequeries/allen.py +139 -77
  52. siibra/livequeries/bigbrain.py +104 -128
  53. siibra/livequeries/ebrains.py +7 -4
  54. siibra/livequeries/query.py +1 -2
  55. siibra/locations/__init__.py +32 -25
  56. siibra/locations/boundingbox.py +153 -127
  57. siibra/locations/location.py +45 -80
  58. siibra/locations/point.py +97 -83
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +1 -1
  61. siibra/retrieval/cache.py +107 -13
  62. siibra/retrieval/datasets.py +9 -14
  63. siibra/retrieval/exceptions/__init__.py +2 -1
  64. siibra/retrieval/repositories.py +147 -53
  65. siibra/retrieval/requests.py +64 -29
  66. siibra/vocabularies/__init__.py +2 -2
  67. siibra/volumes/__init__.py +7 -9
  68. siibra/volumes/parcellationmap.py +396 -253
  69. siibra/volumes/providers/__init__.py +20 -0
  70. siibra/volumes/providers/freesurfer.py +113 -0
  71. siibra/volumes/{gifti.py → providers/gifti.py} +29 -18
  72. siibra/volumes/{neuroglancer.py → providers/neuroglancer.py} +204 -92
  73. siibra/volumes/{nifti.py → providers/nifti.py} +64 -44
  74. siibra/volumes/providers/provider.py +107 -0
  75. siibra/volumes/sparsemap.py +159 -260
  76. siibra/volumes/volume.py +720 -152
  77. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/METADATA +25 -28
  78. siibra-1.0.0a1.dist-info/RECORD +84 -0
  79. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/WHEEL +1 -1
  80. siibra/locations/pointset.py +0 -198
  81. siibra-0.5a2.dist-info/RECORD +0 -74
  82. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/LICENSE +0 -0
  83. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,256 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import contour
17
+ from . import patch
18
+ from ..locations import point, pointcloud
19
+ from ..volumes import volume
20
+
21
+ import numpy as np
22
+
23
+
24
+ class Plane3D:
25
+ """
26
+ A 3D plane in reference space.
27
+ This shall eventually be derived from siibra.Location
28
+ """
29
+
30
+ def __init__(self, point1: point.Point, point2: point.Point, point3: point.Point):
31
+ """
32
+ Create a 3D plane from 3 points.
33
+ The plane's reference space is defined by the first point.
34
+ """
35
+ self.space = point1.space
36
+ # normal is the cross product of two arbitray in-plane vectors
37
+ n = np.cross(
38
+ (point2.warp(self.space) - point1).coordinate,
39
+ (point3.warp(self.space) - point1).coordinate,
40
+ )
41
+ self._n = n / np.linalg.norm(n)
42
+ # distance from origin is the projection of any in-plane point onto the normal
43
+ self._d = np.dot(point1.coordinate, self._n)
44
+
45
+ @property
46
+ def normal(self):
47
+ return self._n
48
+
49
+ @property
50
+ def distance_from_origin(self):
51
+ return self._d
52
+
53
+ def sidedness(self, points: np.ndarray):
54
+ """
55
+ Computes labels for a set of 3D coordinates classifying them
56
+ by the halfspaces spanned by this plane.
57
+ """
58
+ assert points.shape[1] == 3
59
+ return (np.dot(points, self.normal) >= self.distance_from_origin).astype("int")
60
+
61
+ def intersect_line_segments(self, startpoints: np.ndarray, endpoints: np.ndarray):
62
+ """
63
+ Intersects a set of straight line segments with the plane.
64
+ Returns the set of intersection points.
65
+ The line segments are given by two Nx3 arrays of their start- and endpoints.
66
+ The result is an Nx3 list of intersection coordinates.
67
+ TODO This returns an intersection even if the line segment intersects the plane,
68
+
69
+ """
70
+ directions = endpoints - startpoints
71
+ lengths = np.linalg.norm(directions, axis=1)
72
+ directions = directions / lengths[:, None]
73
+ lambdas = (self._d - np.dot(startpoints, self._n)) / np.dot(directions, self._n)
74
+ assert all(lambdas >= 0)
75
+ result = startpoints + lambdas[:, None] * directions
76
+ non_intersecting = lambdas > lengths
77
+ num_failed = np.count_nonzero(non_intersecting)
78
+ result[non_intersecting, :] = np.nan
79
+ if num_failed > 0:
80
+ print(
81
+ "WARNING: line segment intersection includes NaN rows "
82
+ f"for {num_failed} non-intersecting segments."
83
+ )
84
+ return result
85
+
86
+ def intersect_mesh(self, mesh: dict):
87
+ """
88
+ Intersects a 3D surface mesh with the plane.
89
+ Returns a set of split 2D contours, represented by ordered coordinate lists.
90
+ The output contour coordinates are intersection points of mesh edges going through the plane,
91
+ and only combined into a contour if arising from neighboring edges in the mesh.
92
+ The mesh is passed as a dictionary with an Nx3 array "verts" of vertex coordinates,
93
+ and an Mx3 array "faces" of face definitions.
94
+ Each row in the face array corresponds to the three indices of vertices making up the
95
+ triangle.
96
+ The result is a list of contour segments, each represented as a PointCloud
97
+ holding the ordered list of contour points.
98
+ The point labels in each "contour" PointCloud hold the index of the face in the
99
+ mesh which made up each contour point.
100
+ """
101
+
102
+ # select faces whose vertices are in different halfspaces relative to the y plane
103
+ vertex_in_halfspace = self.sidedness(mesh["verts"])
104
+ face_vertex_in_halfspace = vertex_in_halfspace[mesh["faces"]]
105
+ face_indices = np.where(
106
+ face_vertex_in_halfspace.min(1) != face_vertex_in_halfspace.max(1)
107
+ )[0]
108
+ faces = mesh["faces"][face_indices]
109
+
110
+ # for each of N selected faces, indicate wether we cross the plane
111
+ # as we go from vertex 2->0, 0->1, 1->2, respectively.
112
+ # This gives us an Nx3 array, where forward crossings are identified by 1,
113
+ # and backward crossings by -1.
114
+ # Each column of the crossings is linked to two columns of the faces array.
115
+ crossings = np.diff(
116
+ face_vertex_in_halfspace[face_indices][:, [2, 0, 1, 2]], axis=1
117
+ )
118
+ face_columns = np.array([[2, 0], [0, 1], [1, 2]])
119
+
120
+ # We assume that there is exactly one forward and one inverse crossing
121
+ # per selected face. Test this assumption.
122
+ # NOTE This will fail if an edge is exactly in-plane
123
+ assert all(all((crossings == v).sum(1) == 1) for v in [-1, 0, 1])
124
+
125
+ # Compute the actual intersection points for forward and backward crossing edges.
126
+ fwd_columns = np.where(crossings == 1)[1]
127
+ bwd_columns = np.where(crossings == -1)[1]
128
+ fwd_vertices = np.array(
129
+ [
130
+ faces[np.arange(len(faces)), face_columns[fwd_columns][:, 0]],
131
+ faces[np.arange(len(faces)), face_columns[fwd_columns][:, 1]],
132
+ ]
133
+ ).T
134
+ bwd_vertices = np.array(
135
+ [
136
+ faces[np.arange(len(faces)), face_columns[bwd_columns][:, 1]],
137
+ faces[np.arange(len(faces)), face_columns[bwd_columns][:, 0]],
138
+ ]
139
+ ).T
140
+ fwd_intersections = self.intersect_line_segments(
141
+ mesh["verts"][fwd_vertices[:, 0]], mesh["verts"][fwd_vertices[:, 1]]
142
+ )
143
+ bwd_intersections = self.intersect_line_segments(
144
+ mesh["verts"][bwd_vertices[:, 0]], mesh["verts"][bwd_vertices[:, 1]]
145
+ )
146
+
147
+ # By construction, the fwd and backward intersections
148
+ # should include the exact same set of points. Verify this now.
149
+ sortrows = lambda A: A[np.lexsort(A.T[::-1]), :]
150
+ err = (sortrows(fwd_intersections) - sortrows(bwd_intersections)).sum()
151
+ assert err == 0
152
+
153
+ # Due to the above property, we can construct closed contours in the
154
+ # intersection plane by following the interleaved fwd/bwd roles of intersection
155
+ # points.
156
+ face_indices = list(range(fwd_intersections.shape[0]))
157
+ result = []
158
+ points = []
159
+ labels = []
160
+ face_id = 0 # index of the mesh face to consider
161
+ while len(face_indices) > 0:
162
+
163
+ # continue the contour with the next foward edge intersection
164
+ p = fwd_intersections[face_id]
165
+ points.append(p)
166
+ # Remember the ids of the face and start-/end vertices for the point
167
+ labels.append((face_id, fwd_vertices[face_id, 0], fwd_vertices[face_id, 1]))
168
+ face_indices.remove(face_id)
169
+ neighbours = np.where(np.all(np.isin(bwd_intersections, p), axis=1))[0]
170
+ assert len(neighbours) > 0
171
+ face_id = neighbours[0]
172
+ if face_id in face_indices:
173
+ # more points available in the contour
174
+ continue
175
+
176
+ # finish the current contour.
177
+ result.append(
178
+ contour.Contour(np.array(points), labels=labels, space=self.space)
179
+ )
180
+ if len(face_indices) > 0:
181
+ # prepare to process another contour segment
182
+ face_id = face_indices[0]
183
+ points = []
184
+ labels = []
185
+
186
+ return result
187
+
188
+ def project_points(self, points: pointcloud.PointCloud):
189
+ """projects the given points onto the plane."""
190
+ assert self.space == points.space
191
+ XYZ = points.coordinates
192
+ N = XYZ.shape[0]
193
+ dists = np.dot(self._n, XYZ.T) - self._d
194
+ return pointcloud.PointCloud(
195
+ XYZ - np.tile(self._n, (N, 1)) * dists[:, np.newaxis], space=self.space
196
+ )
197
+
198
+ def get_enclosing_patch(self, points: pointcloud.PointCloud, margin=[0.5, 0.5]):
199
+ """
200
+ Computes the enclosing patch in the given plane
201
+ which contains the projections of the given points.
202
+ The orientation of the patch follows the principal axis.
203
+ """
204
+ projections = self.project_points(points)
205
+
206
+ # compute PCA of point projections to obtain two orthogonal
207
+ # in-plane spanning vectors
208
+ XYZ = np.copy(projections.coordinates)
209
+ mu = XYZ.mean(0)
210
+ XYZ -= mu
211
+ cov = np.dot(XYZ.T, XYZ)
212
+ eigvals_, eigvecs_ = np.linalg.eigh(cov)
213
+ eigvecs = eigvecs_[:, ::-1].T
214
+ v1, v2 = [-eigvecs[_] for _ in np.argsort(eigvals_)[:2]]
215
+
216
+ # get extremal points along first spanning vector
217
+ order = np.argsort(np.dot(projections.coordinates, v1))
218
+ p0 = projections[order[0]].homogeneous[0, :3]
219
+ p1 = projections[order[-1]].homogeneous[0, :3]
220
+
221
+ # get extremal points along second spanning vector
222
+ order = np.argsort(np.dot(projections.coordinates, v2))
223
+ p2 = projections[order[0]].homogeneous[0, :3]
224
+ p3 = projections[order[-1]].homogeneous[0, :3]
225
+
226
+ m0, m1 = margin
227
+ w = np.linalg.norm(p3 - p2)
228
+ corners = pointcloud.PointCloud(
229
+ [
230
+ p1 + (w / 2 + m1) * v2 + m0 * v1,
231
+ p0 + (w / 2 + m1) * v2 - m0 * v1,
232
+ p0 - (w / 2 + m1) * v2 - m0 * v1,
233
+ p1 - (w / 2 + m1) * v2 + m0 * v1,
234
+ ],
235
+ space=self.space,
236
+ )
237
+ err = (self.project_points(corners).coordinates - corners.coordinates).sum()
238
+ if err > 1e-5:
239
+ print(f"WARNING: patch coordinates were not exactly in-plane (error={err}).")
240
+ return patch.Patch(self.project_points(corners))
241
+
242
+ @classmethod
243
+ def from_image(cls, image: volume.Volume):
244
+ """
245
+ Derive an image plane by assuming the volume to be a 2D image.
246
+ The smallest dimension in voxel space is considered flat.
247
+ The plane is defined in the physical space of the volume.
248
+ """
249
+ assert isinstance(image, volume.Volume)
250
+ im_lowres = image.fetch(resolution_mm=1)
251
+ plane_dims = np.where(np.argsort(im_lowres.shape) < 2)[0]
252
+ voxels = pointcloud.PointCloud(
253
+ np.vstack(([0, 0, 0], np.identity(3)[plane_dims])), space=None
254
+ )
255
+ points = voxels.transform(im_lowres.affine, space=image.space)
256
+ return cls(points[0], points[1], points[2])
@@ -1 +1,17 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Interface to create links to :ref:`siibra-explorer<https://atlases.ebrains.eu/viewer/>`"""
16
+
1
17
  from .url import encode_url, decode_url
siibra/explorer/url.py CHANGED
@@ -1,3 +1,18 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
1
16
  from typing import Optional, TYPE_CHECKING
2
17
  from urllib.parse import quote_plus
3
18
  from numpy import int32
@@ -16,117 +31,161 @@ if TYPE_CHECKING:
16
31
  from siibra.core.region import Region
17
32
  from siibra.features.feature import Feature
18
33
 
19
- class DecodeNavigationException(Exception): pass
20
34
 
21
- min_int32=-2_147_483_648
22
- max_int32=2_147_483_647
35
+ class DecodeNavigationException(Exception):
36
+ pass
23
37
 
24
38
 
25
- default_root_url='https://atlases.ebrains.eu/viewer/'
39
+ min_int32 = -2_147_483_648
40
+ max_int32 = 2_147_483_647
41
+
42
+
43
+ default_root_url = "https://atlases.ebrains.eu/viewer/"
44
+
26
45
 
27
46
  def sanitize_id(id: str):
28
- return id.replace('/', ':')
47
+ return id.replace("/", ":")
48
+
29
49
 
30
- def get_perspective_zoom(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]):
50
+ def get_perspective_zoom(
51
+ atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]
52
+ ):
31
53
  import siibra
32
- if atlas is siibra.atlases['rat'] or atlas is siibra.atlases['mouse']:
54
+
55
+ if atlas is siibra.atlases["rat"] or atlas is siibra.atlases["mouse"]:
33
56
  return 200000
34
57
  return 2000000
35
58
 
36
- def get_zoom(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]):
59
+
60
+ def get_zoom(
61
+ atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]
62
+ ):
37
63
  import siibra
38
- if atlas is siibra.atlases['rat'] or atlas is siibra.atlases['mouse']:
64
+
65
+ if atlas is siibra.atlases["rat"] or atlas is siibra.atlases["mouse"]:
39
66
  return 35000
40
67
  return 350000
41
68
 
42
- supported_prefix = (
43
- "nifti://",
44
- "swc://",
45
- "precomputed://",
46
- "deepzoom://"
47
- )
69
+
70
+ supported_prefix = ("nifti://", "swc://", "precomputed://", "deepzoom://")
71
+
48
72
 
49
73
  def append_query_params(url: str, *args, query_params={}, **kwargs):
50
- query_str = "&".join([f"{key}={quote_plus(value)}" for key, value in query_params.items()])
74
+ query_str = "&".join(
75
+ [f"{key}={quote_plus(value)}" for key, value in query_params.items()]
76
+ )
51
77
  if len(query_str) > 0:
52
78
  query_str = "?" + query_str
53
79
  return url + query_str
54
80
 
81
+
55
82
  @post_process(append_query_params)
56
- def encode_url(atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]=None, *, root_url=default_root_url, external_url:str=None, feature: "Feature"=None, ignore_warning=False, query_params={}):
57
-
83
+ def encode_url(
84
+ atlas: "Atlas",
85
+ space: "Space",
86
+ parc: "Parcellation",
87
+ region: Optional["Region"] = None,
88
+ *,
89
+ root_url=default_root_url,
90
+ external_url: str = None,
91
+ location: "Point" = None,
92
+ feature: "Feature" = None,
93
+ ignore_warning=False,
94
+ query_params={},
95
+ ):
96
+ from siibra.locations import Point
58
97
  overlay_url = None
98
+ encoded_position = None
99
+ if location:
100
+ assert isinstance(location, Point), "currently, location only supports Point"
101
+ encoded_position = ".".join([encode_number(int(p * 1e6)) for p in location])
59
102
  if external_url:
60
- assert any([external_url.startswith(prefix) for prefix in supported_prefix]), f"url needs to start with {(' , '.join(supported_prefix))}"
61
- overlay_url = '/x-overlay-layer:{url}'.format(
103
+ assert any(
104
+ [external_url.startswith(prefix) for prefix in supported_prefix]
105
+ ), f"url needs to start with {(' , '.join(supported_prefix))}"
106
+ overlay_url = "/x-overlay-layer:{url}".format(
62
107
  url=external_url.replace("/", "%2F")
63
108
  )
64
109
 
65
110
  zoom = get_zoom(atlas, space, parc, region)
66
111
  pzoom = get_perspective_zoom(atlas, space, parc, region)
67
-
112
+
68
113
  zoom_kwargs = {
69
114
  "encoded_pzoom": encode_number(pzoom, False),
70
- "encoded_zoom": encode_number(zoom, False)
115
+ "encoded_zoom": encode_number(zoom, False),
71
116
  }
72
- nav_string='/@:0.0.0.-W000.._eCwg.2-FUe3._-s_W.2_evlu..{encoded_pzoom}..{encoded_nav}..{encoded_zoom}'
73
-
74
- return_url='{root_url}#/a:{atlas_id}/t:{template_id}/p:{parc_id}{overlay_url}'.format(
75
- root_url = root_url,
76
- atlas_id = sanitize_id(atlas.id),
77
- template_id = sanitize_id(space.id),
78
- parc_id = sanitize_id(parc.id),
79
- overlay_url = overlay_url if overlay_url else "",
117
+ nav_string = "/@:0.0.0.-W000.._eCwg.2-FUe3._-s_W.2_evlu..{encoded_pzoom}..{encoded_nav}..{encoded_zoom}"
118
+
119
+ return_url = (
120
+ "{root_url}#/a:{atlas_id}/t:{template_id}/p:{parc_id}{overlay_url}".format(
121
+ root_url=root_url,
122
+ atlas_id=sanitize_id(atlas.id),
123
+ template_id=sanitize_id(space.id),
124
+ parc_id=sanitize_id(parc.id),
125
+ overlay_url=overlay_url if overlay_url else "",
126
+ )
80
127
  )
81
128
 
82
129
  if feature is not None:
83
130
  return_url = return_url + f"/f:{sanitize_id(feature.id)}"
84
131
 
85
132
  if region is None:
86
- return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
87
-
88
- return_url=f'{return_url}/rn:{get_hash(region.name)}'
133
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
134
+
135
+ return_url = f"{return_url}/rn:{get_hash(region.name)}"
89
136
 
90
137
  try:
91
- result_props=region.spatial_props(space, maptype='labelled')
138
+ result_props = region.spatial_props(space, maptype="labelled")
92
139
  if len(result_props.components) == 0:
93
- return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
140
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
94
141
  except Exception as e:
95
- print(f'Cannot get_spatial_props {str(e)}')
142
+ print(f"Cannot get_spatial_props {str(e)}")
96
143
  if not ignore_warning:
97
144
  raise e
98
- return return_url + nav_string.format(encoded_nav='0.0.0', **zoom_kwargs)
145
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
99
146
 
100
- centroid=result_props.components[0].centroid
147
+ centroid = result_props.components[0].centroid
101
148
 
102
- encoded_centroid=separator.join([ encode_number(math.floor(val * 1e6)) for val in centroid ])
103
- return_url=return_url + nav_string.format(encoded_nav=encoded_centroid, **zoom_kwargs)
149
+ encoded_centroid = separator.join(
150
+ [encode_number(math.floor(val * 1e6)) for val in centroid]
151
+ )
152
+ return_url = return_url + nav_string.format(
153
+ encoded_nav=encoded_position or encoded_centroid, **zoom_kwargs
154
+ )
104
155
  return return_url
105
156
 
157
+
106
158
  @dataclass
107
159
  class DecodedUrl:
108
160
  bounding_box: "BoundingBox"
109
161
 
162
+
110
163
  def decode_url(url: str, vp_length=1000):
111
164
  import siibra
165
+
112
166
  try:
113
- space_match = re.search(r'/t:(?P<space_id>[^/]+)', url)
167
+ space_match = re.search(r"/t:(?P<space_id>[^/]+)", url)
114
168
  space_id = space_match.group("space_id")
115
169
  space_id = space_id.replace(":", "/")
116
170
  space = siibra.spaces[space_id]
117
171
  except Exception as e:
118
172
  raise DecodeNavigationException from e
119
173
 
120
- nav_match = re.search(r'/@:(?P<navigation_str>.+)/?', url)
174
+ nav_match = re.search(r"/@:(?P<navigation_str>.+)/?", url)
121
175
  navigation_str = nav_match.group("navigation_str")
122
176
  for char in navigation_str:
123
- assert char in cipher or char in [neg, separator], f"char {char} not in cipher, nor separator/neg"
124
-
177
+ assert char in cipher or char in [
178
+ neg,
179
+ separator,
180
+ ], f"char {char} not in cipher, nor separator/neg"
181
+
125
182
  try:
126
- ori_enc, pers_ori_enc, pers_zoom_enc, pos_enc, zoomm_enc = navigation_str.split(f"{separator}{separator}")
183
+ ori_enc, pers_ori_enc, pers_zoom_enc, pos_enc, zoomm_enc = navigation_str.split(
184
+ f"{separator}{separator}"
185
+ )
127
186
  except Exception as e:
128
187
  raise DecodeNavigationException from e
129
-
188
+
130
189
  try:
131
190
  x_enc, y_enc, z_enc = pos_enc.split(separator)
132
191
  pos = [decode_number(val) for val in [x_enc, y_enc, z_enc]]
@@ -135,7 +194,7 @@ def decode_url(url: str, vp_length=1000):
135
194
  # zoom = nm/pixel
136
195
  pt1 = [(coord - (zoom * vp_length / 2)) / 1e6 for coord in pos]
137
196
  pt1 = Point(pt1, space)
138
-
197
+
139
198
  pt2 = [(coord + (zoom * vp_length / 2)) / 1e6 for coord in pos]
140
199
  pt2 = Point(pt2, space)
141
200
 
@@ -144,17 +203,18 @@ def decode_url(url: str, vp_length=1000):
144
203
 
145
204
  bbx = BoundingBox(pt1, pt2, space)
146
205
  return DecodedUrl(bounding_box=bbx)
147
-
206
+
207
+
148
208
  def get_hash(full_string: str):
149
- return_val=0
209
+ return_val = 0
150
210
  with np.errstate(over="ignore"):
151
211
  for char in full_string:
152
212
  # overflowing is expected and in fact the whole reason why convert number to int32
153
-
213
+
154
214
  # in windows, int32((0 - min_int32) << 5), rather than overflow to wraper around, raises OverflowError
155
215
  shifted_5 = int32(
156
- (return_val - min_int32) if return_val > max_int32 else return_val
157
- << 5)
216
+ (return_val - min_int32) if return_val > max_int32 else return_val << 5
217
+ )
158
218
 
159
219
  return_val = int32(shifted_5 - return_val + ord(char))
160
220
  return_val = return_val & return_val
siibra/explorer/util.py CHANGED
@@ -1,31 +1,49 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
1
16
  import math
2
17
  import struct
3
18
  from functools import wraps
4
19
  from typing import Callable
5
20
 
6
- cipher = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_-'
7
- separator = '.'
8
- neg = '~'
21
+ cipher = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_-"
22
+ separator = "."
23
+ neg = "~"
24
+
25
+
9
26
  def encode_number(n, float_flag=False):
10
27
  if float_flag:
11
- b=struct.pack('f', n)
12
- new_n=struct.unpack('i',b)
28
+ b = struct.pack("f", n)
29
+ new_n = struct.unpack("i", b)
13
30
  return encode_int(new_n[0])
14
31
  else:
15
32
  return encode_int(n)
16
33
 
34
+
17
35
  def encode_int(n):
18
36
  if not isinstance(n, int):
19
- raise ValueError('Cannot encode int')
37
+ raise ValueError("Cannot encode int")
20
38
 
21
- residual=None
22
- result=''
39
+ residual = None
40
+ result = ""
23
41
  if n < 0:
24
42
  result += neg
25
43
  residual = n * -1
26
44
  else:
27
45
  residual = n
28
-
46
+
29
47
  while True:
30
48
  result = cipher[residual % 64] + result
31
49
  residual = math.floor(residual / 64)
@@ -34,6 +52,7 @@ def encode_int(n):
34
52
  break
35
53
  return result
36
54
 
55
+
37
56
  def decode_int(n):
38
57
  neg_flag = False
39
58
  if n[-1] == neg:
@@ -49,6 +68,7 @@ def decode_int(n):
49
68
  result = result * -1
50
69
  return result
51
70
 
71
+
52
72
  def decode_number(n, float_flag=False):
53
73
  if float_flag:
54
74
  raise NotImplementedError
@@ -61,5 +81,7 @@ def post_process(post_process: Callable):
61
81
  def inner(*args, **kwargs):
62
82
  val = fn(*args, **kwargs)
63
83
  return post_process(val, *args, **kwargs)
84
+
64
85
  return inner
86
+
65
87
  return outer