siibra 1.0a1__1-py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (84) hide show
  1. siibra/VERSION +1 -0
  2. siibra/__init__.py +164 -0
  3. siibra/commons.py +823 -0
  4. siibra/configuration/__init__.py +17 -0
  5. siibra/configuration/configuration.py +189 -0
  6. siibra/configuration/factory.py +589 -0
  7. siibra/core/__init__.py +16 -0
  8. siibra/core/assignment.py +110 -0
  9. siibra/core/atlas.py +239 -0
  10. siibra/core/concept.py +308 -0
  11. siibra/core/parcellation.py +387 -0
  12. siibra/core/region.py +1223 -0
  13. siibra/core/space.py +131 -0
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +17 -0
  22. siibra/explorer/url.py +222 -0
  23. siibra/explorer/util.py +87 -0
  24. siibra/features/__init__.py +117 -0
  25. siibra/features/anchor.py +224 -0
  26. siibra/features/connectivity/__init__.py +33 -0
  27. siibra/features/connectivity/functional_connectivity.py +57 -0
  28. siibra/features/connectivity/regional_connectivity.py +494 -0
  29. siibra/features/connectivity/streamline_counts.py +27 -0
  30. siibra/features/connectivity/streamline_lengths.py +27 -0
  31. siibra/features/connectivity/tracing_connectivity.py +30 -0
  32. siibra/features/dataset/__init__.py +17 -0
  33. siibra/features/dataset/ebrains.py +90 -0
  34. siibra/features/feature.py +970 -0
  35. siibra/features/image/__init__.py +27 -0
  36. siibra/features/image/image.py +115 -0
  37. siibra/features/image/sections.py +26 -0
  38. siibra/features/image/volume_of_interest.py +88 -0
  39. siibra/features/tabular/__init__.py +24 -0
  40. siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
  41. siibra/features/tabular/cell_density_profile.py +298 -0
  42. siibra/features/tabular/cortical_profile.py +322 -0
  43. siibra/features/tabular/gene_expression.py +257 -0
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
  45. siibra/features/tabular/layerwise_cell_density.py +95 -0
  46. siibra/features/tabular/receptor_density_fingerprint.py +192 -0
  47. siibra/features/tabular/receptor_density_profile.py +110 -0
  48. siibra/features/tabular/regional_timeseries_activity.py +294 -0
  49. siibra/features/tabular/tabular.py +139 -0
  50. siibra/livequeries/__init__.py +19 -0
  51. siibra/livequeries/allen.py +352 -0
  52. siibra/livequeries/bigbrain.py +197 -0
  53. siibra/livequeries/ebrains.py +145 -0
  54. siibra/livequeries/query.py +49 -0
  55. siibra/locations/__init__.py +91 -0
  56. siibra/locations/boundingbox.py +454 -0
  57. siibra/locations/location.py +115 -0
  58. siibra/locations/point.py +344 -0
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +27 -0
  61. siibra/retrieval/cache.py +233 -0
  62. siibra/retrieval/datasets.py +389 -0
  63. siibra/retrieval/exceptions/__init__.py +27 -0
  64. siibra/retrieval/repositories.py +769 -0
  65. siibra/retrieval/requests.py +659 -0
  66. siibra/vocabularies/__init__.py +45 -0
  67. siibra/vocabularies/gene_names.json +29176 -0
  68. siibra/vocabularies/receptor_symbols.json +210 -0
  69. siibra/vocabularies/region_aliases.json +460 -0
  70. siibra/volumes/__init__.py +23 -0
  71. siibra/volumes/parcellationmap.py +1279 -0
  72. siibra/volumes/providers/__init__.py +20 -0
  73. siibra/volumes/providers/freesurfer.py +113 -0
  74. siibra/volumes/providers/gifti.py +165 -0
  75. siibra/volumes/providers/neuroglancer.py +736 -0
  76. siibra/volumes/providers/nifti.py +266 -0
  77. siibra/volumes/providers/provider.py +107 -0
  78. siibra/volumes/sparsemap.py +468 -0
  79. siibra/volumes/volume.py +892 -0
  80. siibra-1.0.0a1.dist-info/LICENSE +201 -0
  81. siibra-1.0.0a1.dist-info/METADATA +160 -0
  82. siibra-1.0.0a1.dist-info/RECORD +84 -0
  83. siibra-1.0.0a1.dist-info/WHEEL +5 -0
  84. siibra-1.0.0a1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,256 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import contour
17
+ from . import patch
18
+ from ..locations import point, pointcloud
19
+ from ..volumes import volume
20
+
21
+ import numpy as np
22
+
23
+
24
+ class Plane3D:
25
+ """
26
+ A 3D plane in reference space.
27
+ This shall eventually be derived from siibra.Location
28
+ """
29
+
30
+ def __init__(self, point1: point.Point, point2: point.Point, point3: point.Point):
31
+ """
32
+ Create a 3D plane from 3 points.
33
+ The plane's reference space is defined by the first point.
34
+ """
35
+ self.space = point1.space
36
+ # normal is the cross product of two arbitray in-plane vectors
37
+ n = np.cross(
38
+ (point2.warp(self.space) - point1).coordinate,
39
+ (point3.warp(self.space) - point1).coordinate,
40
+ )
41
+ self._n = n / np.linalg.norm(n)
42
+ # distance from origin is the projection of any in-plane point onto the normal
43
+ self._d = np.dot(point1.coordinate, self._n)
44
+
45
+ @property
46
+ def normal(self):
47
+ return self._n
48
+
49
+ @property
50
+ def distance_from_origin(self):
51
+ return self._d
52
+
53
+ def sidedness(self, points: np.ndarray):
54
+ """
55
+ Computes labels for a set of 3D coordinates classifying them
56
+ by the halfspaces spanned by this plane.
57
+ """
58
+ assert points.shape[1] == 3
59
+ return (np.dot(points, self.normal) >= self.distance_from_origin).astype("int")
60
+
61
+ def intersect_line_segments(self, startpoints: np.ndarray, endpoints: np.ndarray):
62
+ """
63
+ Intersects a set of straight line segments with the plane.
64
+ Returns the set of intersection points.
65
+ The line segments are given by two Nx3 arrays of their start- and endpoints.
66
+ The result is an Nx3 list of intersection coordinates.
67
+ TODO This returns an intersection even if the line segment intersects the plane,
68
+
69
+ """
70
+ directions = endpoints - startpoints
71
+ lengths = np.linalg.norm(directions, axis=1)
72
+ directions = directions / lengths[:, None]
73
+ lambdas = (self._d - np.dot(startpoints, self._n)) / np.dot(directions, self._n)
74
+ assert all(lambdas >= 0)
75
+ result = startpoints + lambdas[:, None] * directions
76
+ non_intersecting = lambdas > lengths
77
+ num_failed = np.count_nonzero(non_intersecting)
78
+ result[non_intersecting, :] = np.nan
79
+ if num_failed > 0:
80
+ print(
81
+ "WARNING: line segment intersection includes NaN rows "
82
+ f"for {num_failed} non-intersecting segments."
83
+ )
84
+ return result
85
+
86
+ def intersect_mesh(self, mesh: dict):
87
+ """
88
+ Intersects a 3D surface mesh with the plane.
89
+ Returns a set of split 2D contours, represented by ordered coordinate lists.
90
+ The output contour coordinates are intersection points of mesh edges going through the plane,
91
+ and only combined into a contour if arising from neighboring edges in the mesh.
92
+ The mesh is passed as a dictionary with an Nx3 array "verts" of vertex coordinates,
93
+ and an Mx3 array "faces" of face definitions.
94
+ Each row in the face array corresponds to the three indices of vertices making up the
95
+ triangle.
96
+ The result is a list of contour segments, each represented as a PointCloud
97
+ holding the ordered list of contour points.
98
+ The point labels in each "contour" PointCloud hold the index of the face in the
99
+ mesh which made up each contour point.
100
+ """
101
+
102
+ # select faces whose vertices are in different halfspaces relative to the y plane
103
+ vertex_in_halfspace = self.sidedness(mesh["verts"])
104
+ face_vertex_in_halfspace = vertex_in_halfspace[mesh["faces"]]
105
+ face_indices = np.where(
106
+ face_vertex_in_halfspace.min(1) != face_vertex_in_halfspace.max(1)
107
+ )[0]
108
+ faces = mesh["faces"][face_indices]
109
+
110
+ # for each of N selected faces, indicate wether we cross the plane
111
+ # as we go from vertex 2->0, 0->1, 1->2, respectively.
112
+ # This gives us an Nx3 array, where forward crossings are identified by 1,
113
+ # and backward crossings by -1.
114
+ # Each column of the crossings is linked to two columns of the faces array.
115
+ crossings = np.diff(
116
+ face_vertex_in_halfspace[face_indices][:, [2, 0, 1, 2]], axis=1
117
+ )
118
+ face_columns = np.array([[2, 0], [0, 1], [1, 2]])
119
+
120
+ # We assume that there is exactly one forward and one inverse crossing
121
+ # per selected face. Test this assumption.
122
+ # NOTE This will fail if an edge is exactly in-plane
123
+ assert all(all((crossings == v).sum(1) == 1) for v in [-1, 0, 1])
124
+
125
+ # Compute the actual intersection points for forward and backward crossing edges.
126
+ fwd_columns = np.where(crossings == 1)[1]
127
+ bwd_columns = np.where(crossings == -1)[1]
128
+ fwd_vertices = np.array(
129
+ [
130
+ faces[np.arange(len(faces)), face_columns[fwd_columns][:, 0]],
131
+ faces[np.arange(len(faces)), face_columns[fwd_columns][:, 1]],
132
+ ]
133
+ ).T
134
+ bwd_vertices = np.array(
135
+ [
136
+ faces[np.arange(len(faces)), face_columns[bwd_columns][:, 1]],
137
+ faces[np.arange(len(faces)), face_columns[bwd_columns][:, 0]],
138
+ ]
139
+ ).T
140
+ fwd_intersections = self.intersect_line_segments(
141
+ mesh["verts"][fwd_vertices[:, 0]], mesh["verts"][fwd_vertices[:, 1]]
142
+ )
143
+ bwd_intersections = self.intersect_line_segments(
144
+ mesh["verts"][bwd_vertices[:, 0]], mesh["verts"][bwd_vertices[:, 1]]
145
+ )
146
+
147
+ # By construction, the fwd and backward intersections
148
+ # should include the exact same set of points. Verify this now.
149
+ sortrows = lambda A: A[np.lexsort(A.T[::-1]), :]
150
+ err = (sortrows(fwd_intersections) - sortrows(bwd_intersections)).sum()
151
+ assert err == 0
152
+
153
+ # Due to the above property, we can construct closed contours in the
154
+ # intersection plane by following the interleaved fwd/bwd roles of intersection
155
+ # points.
156
+ face_indices = list(range(fwd_intersections.shape[0]))
157
+ result = []
158
+ points = []
159
+ labels = []
160
+ face_id = 0 # index of the mesh face to consider
161
+ while len(face_indices) > 0:
162
+
163
+ # continue the contour with the next foward edge intersection
164
+ p = fwd_intersections[face_id]
165
+ points.append(p)
166
+ # Remember the ids of the face and start-/end vertices for the point
167
+ labels.append((face_id, fwd_vertices[face_id, 0], fwd_vertices[face_id, 1]))
168
+ face_indices.remove(face_id)
169
+ neighbours = np.where(np.all(np.isin(bwd_intersections, p), axis=1))[0]
170
+ assert len(neighbours) > 0
171
+ face_id = neighbours[0]
172
+ if face_id in face_indices:
173
+ # more points available in the contour
174
+ continue
175
+
176
+ # finish the current contour.
177
+ result.append(
178
+ contour.Contour(np.array(points), labels=labels, space=self.space)
179
+ )
180
+ if len(face_indices) > 0:
181
+ # prepare to process another contour segment
182
+ face_id = face_indices[0]
183
+ points = []
184
+ labels = []
185
+
186
+ return result
187
+
188
+ def project_points(self, points: pointcloud.PointCloud):
189
+ """projects the given points onto the plane."""
190
+ assert self.space == points.space
191
+ XYZ = points.coordinates
192
+ N = XYZ.shape[0]
193
+ dists = np.dot(self._n, XYZ.T) - self._d
194
+ return pointcloud.PointCloud(
195
+ XYZ - np.tile(self._n, (N, 1)) * dists[:, np.newaxis], space=self.space
196
+ )
197
+
198
+ def get_enclosing_patch(self, points: pointcloud.PointCloud, margin=[0.5, 0.5]):
199
+ """
200
+ Computes the enclosing patch in the given plane
201
+ which contains the projections of the given points.
202
+ The orientation of the patch follows the principal axis.
203
+ """
204
+ projections = self.project_points(points)
205
+
206
+ # compute PCA of point projections to obtain two orthogonal
207
+ # in-plane spanning vectors
208
+ XYZ = np.copy(projections.coordinates)
209
+ mu = XYZ.mean(0)
210
+ XYZ -= mu
211
+ cov = np.dot(XYZ.T, XYZ)
212
+ eigvals_, eigvecs_ = np.linalg.eigh(cov)
213
+ eigvecs = eigvecs_[:, ::-1].T
214
+ v1, v2 = [-eigvecs[_] for _ in np.argsort(eigvals_)[:2]]
215
+
216
+ # get extremal points along first spanning vector
217
+ order = np.argsort(np.dot(projections.coordinates, v1))
218
+ p0 = projections[order[0]].homogeneous[0, :3]
219
+ p1 = projections[order[-1]].homogeneous[0, :3]
220
+
221
+ # get extremal points along second spanning vector
222
+ order = np.argsort(np.dot(projections.coordinates, v2))
223
+ p2 = projections[order[0]].homogeneous[0, :3]
224
+ p3 = projections[order[-1]].homogeneous[0, :3]
225
+
226
+ m0, m1 = margin
227
+ w = np.linalg.norm(p3 - p2)
228
+ corners = pointcloud.PointCloud(
229
+ [
230
+ p1 + (w / 2 + m1) * v2 + m0 * v1,
231
+ p0 + (w / 2 + m1) * v2 - m0 * v1,
232
+ p0 - (w / 2 + m1) * v2 - m0 * v1,
233
+ p1 - (w / 2 + m1) * v2 + m0 * v1,
234
+ ],
235
+ space=self.space,
236
+ )
237
+ err = (self.project_points(corners).coordinates - corners.coordinates).sum()
238
+ if err > 1e-5:
239
+ print(f"WARNING: patch coordinates were not exactly in-plane (error={err}).")
240
+ return patch.Patch(self.project_points(corners))
241
+
242
+ @classmethod
243
+ def from_image(cls, image: volume.Volume):
244
+ """
245
+ Derive an image plane by assuming the volume to be a 2D image.
246
+ The smallest dimension in voxel space is considered flat.
247
+ The plane is defined in the physical space of the volume.
248
+ """
249
+ assert isinstance(image, volume.Volume)
250
+ im_lowres = image.fetch(resolution_mm=1)
251
+ plane_dims = np.where(np.argsort(im_lowres.shape) < 2)[0]
252
+ voxels = pointcloud.PointCloud(
253
+ np.vstack(([0, 0, 0], np.identity(3)[plane_dims])), space=None
254
+ )
255
+ points = voxels.transform(im_lowres.affine, space=image.space)
256
+ return cls(points[0], points[1], points[2])
@@ -0,0 +1,17 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Interface to create links to :ref:`siibra-explorer<https://atlases.ebrains.eu/viewer/>`"""
16
+
17
+ from .url import encode_url, decode_url
siibra/explorer/url.py ADDED
@@ -0,0 +1,222 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from typing import Optional, TYPE_CHECKING
17
+ from urllib.parse import quote_plus
18
+ from numpy import int32
19
+ import numpy as np
20
+ import re
21
+ from dataclasses import dataclass
22
+ import math
23
+
24
+ from .util import encode_number, separator, cipher, neg, decode_number, post_process
25
+
26
+ if TYPE_CHECKING:
27
+ from siibra.core.atlas import Atlas
28
+ from siibra.core.space import Space
29
+ from siibra.locations import BoundingBox, Point
30
+ from siibra.core.parcellation import Parcellation
31
+ from siibra.core.region import Region
32
+ from siibra.features.feature import Feature
33
+
34
+
35
+ class DecodeNavigationException(Exception):
36
+ pass
37
+
38
+
39
+ min_int32 = -2_147_483_648
40
+ max_int32 = 2_147_483_647
41
+
42
+
43
+ default_root_url = "https://atlases.ebrains.eu/viewer/"
44
+
45
+
46
+ def sanitize_id(id: str):
47
+ return id.replace("/", ":")
48
+
49
+
50
+ def get_perspective_zoom(
51
+ atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]
52
+ ):
53
+ import siibra
54
+
55
+ if atlas is siibra.atlases["rat"] or atlas is siibra.atlases["mouse"]:
56
+ return 200000
57
+ return 2000000
58
+
59
+
60
+ def get_zoom(
61
+ atlas: "Atlas", space: "Space", parc: "Parcellation", region: Optional["Region"]
62
+ ):
63
+ import siibra
64
+
65
+ if atlas is siibra.atlases["rat"] or atlas is siibra.atlases["mouse"]:
66
+ return 35000
67
+ return 350000
68
+
69
+
70
+ supported_prefix = ("nifti://", "swc://", "precomputed://", "deepzoom://")
71
+
72
+
73
+ def append_query_params(url: str, *args, query_params={}, **kwargs):
74
+ query_str = "&".join(
75
+ [f"{key}={quote_plus(value)}" for key, value in query_params.items()]
76
+ )
77
+ if len(query_str) > 0:
78
+ query_str = "?" + query_str
79
+ return url + query_str
80
+
81
+
82
+ @post_process(append_query_params)
83
+ def encode_url(
84
+ atlas: "Atlas",
85
+ space: "Space",
86
+ parc: "Parcellation",
87
+ region: Optional["Region"] = None,
88
+ *,
89
+ root_url=default_root_url,
90
+ external_url: str = None,
91
+ location: "Point" = None,
92
+ feature: "Feature" = None,
93
+ ignore_warning=False,
94
+ query_params={},
95
+ ):
96
+ from siibra.locations import Point
97
+ overlay_url = None
98
+ encoded_position = None
99
+ if location:
100
+ assert isinstance(location, Point), "currently, location only supports Point"
101
+ encoded_position = ".".join([encode_number(int(p * 1e6)) for p in location])
102
+ if external_url:
103
+ assert any(
104
+ [external_url.startswith(prefix) for prefix in supported_prefix]
105
+ ), f"url needs to start with {(' , '.join(supported_prefix))}"
106
+ overlay_url = "/x-overlay-layer:{url}".format(
107
+ url=external_url.replace("/", "%2F")
108
+ )
109
+
110
+ zoom = get_zoom(atlas, space, parc, region)
111
+ pzoom = get_perspective_zoom(atlas, space, parc, region)
112
+
113
+ zoom_kwargs = {
114
+ "encoded_pzoom": encode_number(pzoom, False),
115
+ "encoded_zoom": encode_number(zoom, False),
116
+ }
117
+ nav_string = "/@:0.0.0.-W000.._eCwg.2-FUe3._-s_W.2_evlu..{encoded_pzoom}..{encoded_nav}..{encoded_zoom}"
118
+
119
+ return_url = (
120
+ "{root_url}#/a:{atlas_id}/t:{template_id}/p:{parc_id}{overlay_url}".format(
121
+ root_url=root_url,
122
+ atlas_id=sanitize_id(atlas.id),
123
+ template_id=sanitize_id(space.id),
124
+ parc_id=sanitize_id(parc.id),
125
+ overlay_url=overlay_url if overlay_url else "",
126
+ )
127
+ )
128
+
129
+ if feature is not None:
130
+ return_url = return_url + f"/f:{sanitize_id(feature.id)}"
131
+
132
+ if region is None:
133
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
134
+
135
+ return_url = f"{return_url}/rn:{get_hash(region.name)}"
136
+
137
+ try:
138
+ result_props = region.spatial_props(space, maptype="labelled")
139
+ if len(result_props.components) == 0:
140
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
141
+ except Exception as e:
142
+ print(f"Cannot get_spatial_props {str(e)}")
143
+ if not ignore_warning:
144
+ raise e
145
+ return return_url + nav_string.format(encoded_nav=encoded_position or "0.0.0", **zoom_kwargs)
146
+
147
+ centroid = result_props.components[0].centroid
148
+
149
+ encoded_centroid = separator.join(
150
+ [encode_number(math.floor(val * 1e6)) for val in centroid]
151
+ )
152
+ return_url = return_url + nav_string.format(
153
+ encoded_nav=encoded_position or encoded_centroid, **zoom_kwargs
154
+ )
155
+ return return_url
156
+
157
+
158
+ @dataclass
159
+ class DecodedUrl:
160
+ bounding_box: "BoundingBox"
161
+
162
+
163
+ def decode_url(url: str, vp_length=1000):
164
+ import siibra
165
+
166
+ try:
167
+ space_match = re.search(r"/t:(?P<space_id>[^/]+)", url)
168
+ space_id = space_match.group("space_id")
169
+ space_id = space_id.replace(":", "/")
170
+ space = siibra.spaces[space_id]
171
+ except Exception as e:
172
+ raise DecodeNavigationException from e
173
+
174
+ nav_match = re.search(r"/@:(?P<navigation_str>.+)/?", url)
175
+ navigation_str = nav_match.group("navigation_str")
176
+ for char in navigation_str:
177
+ assert char in cipher or char in [
178
+ neg,
179
+ separator,
180
+ ], f"char {char} not in cipher, nor separator/neg"
181
+
182
+ try:
183
+ ori_enc, pers_ori_enc, pers_zoom_enc, pos_enc, zoomm_enc = navigation_str.split(
184
+ f"{separator}{separator}"
185
+ )
186
+ except Exception as e:
187
+ raise DecodeNavigationException from e
188
+
189
+ try:
190
+ x_enc, y_enc, z_enc = pos_enc.split(separator)
191
+ pos = [decode_number(val) for val in [x_enc, y_enc, z_enc]]
192
+ zoom = decode_number(zoomm_enc)
193
+
194
+ # zoom = nm/pixel
195
+ pt1 = [(coord - (zoom * vp_length / 2)) / 1e6 for coord in pos]
196
+ pt1 = Point(pt1, space)
197
+
198
+ pt2 = [(coord + (zoom * vp_length / 2)) / 1e6 for coord in pos]
199
+ pt2 = Point(pt2, space)
200
+
201
+ except Exception as e:
202
+ raise DecodeNavigationException from e
203
+
204
+ bbx = BoundingBox(pt1, pt2, space)
205
+ return DecodedUrl(bounding_box=bbx)
206
+
207
+
208
+ def get_hash(full_string: str):
209
+ return_val = 0
210
+ with np.errstate(over="ignore"):
211
+ for char in full_string:
212
+ # overflowing is expected and in fact the whole reason why convert number to int32
213
+
214
+ # in windows, int32((0 - min_int32) << 5), rather than overflow to wraper around, raises OverflowError
215
+ shifted_5 = int32(
216
+ (return_val - min_int32) if return_val > max_int32 else return_val << 5
217
+ )
218
+
219
+ return_val = int32(shifted_5 - return_val + ord(char))
220
+ return_val = return_val & return_val
221
+ hex_val = hex(return_val)
222
+ return hex_val[3:]
@@ -0,0 +1,87 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import math
17
+ import struct
18
+ from functools import wraps
19
+ from typing import Callable
20
+
21
+ cipher = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_-"
22
+ separator = "."
23
+ neg = "~"
24
+
25
+
26
+ def encode_number(n, float_flag=False):
27
+ if float_flag:
28
+ b = struct.pack("f", n)
29
+ new_n = struct.unpack("i", b)
30
+ return encode_int(new_n[0])
31
+ else:
32
+ return encode_int(n)
33
+
34
+
35
+ def encode_int(n):
36
+ if not isinstance(n, int):
37
+ raise ValueError("Cannot encode int")
38
+
39
+ residual = None
40
+ result = ""
41
+ if n < 0:
42
+ result += neg
43
+ residual = n * -1
44
+ else:
45
+ residual = n
46
+
47
+ while True:
48
+ result = cipher[residual % 64] + result
49
+ residual = math.floor(residual / 64)
50
+
51
+ if residual == 0:
52
+ break
53
+ return result
54
+
55
+
56
+ def decode_int(n):
57
+ neg_flag = False
58
+ if n[-1] == neg:
59
+ neg_flag = True
60
+ n = n[:-1]
61
+
62
+ result = 0
63
+ for char in n:
64
+ val = cipher.index(char)
65
+ result = result * 64 + val
66
+
67
+ if neg_flag:
68
+ result = result * -1
69
+ return result
70
+
71
+
72
+ def decode_number(n, float_flag=False):
73
+ if float_flag:
74
+ raise NotImplementedError
75
+ return decode_int(n)
76
+
77
+
78
+ def post_process(post_process: Callable):
79
+ def outer(fn):
80
+ @wraps(fn)
81
+ def inner(*args, **kwargs):
82
+ val = fn(*args, **kwargs)
83
+ return post_process(val, *args, **kwargs)
84
+
85
+ return inner
86
+
87
+ return outer
@@ -0,0 +1,117 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Multimodal data features types and query mechanisms."""
16
+
17
+ from typing import Union
18
+ from functools import partial
19
+
20
+ from . import (
21
+ connectivity,
22
+ tabular,
23
+ image,
24
+ dataset,
25
+ )
26
+
27
+ from ..commons import logger
28
+
29
+ from .feature import Feature
30
+ from ..retrieval import cache
31
+ from ..commons import siibra_tqdm
32
+
33
+ get = Feature._match
34
+
35
+ TYPES = Feature._get_subclasses() # Feature types that can be used to query for features
36
+
37
+
38
+ def __dir__():
39
+ return list(Feature._CATEGORIZED.keys()) + ["get", "TYPES", "render_ascii_tree"]
40
+
41
+
42
+ def __getattr__(attr: str):
43
+ if attr in Feature._CATEGORIZED:
44
+ return Feature._CATEGORIZED[attr]
45
+ else:
46
+ hint = ""
47
+ if isinstance(attr, str):
48
+ import difflib
49
+ closest = difflib.get_close_matches(attr, list(__dir__()), n=3)
50
+ if len(closest) > 0:
51
+ hint = f"Did you mean {' or '.join(closest)}?"
52
+ raise AttributeError(f"No such attribute: {__name__}.{attr} " + hint)
53
+
54
+
55
+ @cache.Warmup.register_warmup_fn()
56
+ def _warm_feature_cache_instances():
57
+ """Preload preconfigured multimodal data features."""
58
+ for ftype in TYPES.values():
59
+ _ = ftype._get_instances()
60
+
61
+
62
+ @cache.Warmup.register_warmup_fn(cache.WarmupLevel.DATA, is_factory=True)
63
+ def _warm_feature_cache_data():
64
+ return_callables = []
65
+ for ftype in TYPES.values():
66
+ instances = ftype._get_instances()
67
+
68
+ # the instances *must* be cleared, or it will impede the garbage collection, and results in memleak
69
+ ftype._clean_instances()
70
+ tally = siibra_tqdm(desc=f"Warming data {ftype.__name__}", total=len(instances))
71
+ for f in instances:
72
+ def get_data(arg):
73
+ tally = arg.pop("tally")
74
+ feature = arg.pop("feature")
75
+ # TODO
76
+ # the try catch is as a result of https://github.com/FZJ-INM1-BDA/siibra-python/issues/509
77
+ # sometimes f.data can fail
78
+ try:
79
+ _ = feature.data
80
+ except Exception as e:
81
+ logger.warn(f"Feature {feature.name} warmup failed: {str(e)}")
82
+ finally:
83
+ tally.update(1)
84
+ # append dictionary, so that popping the dictionary will mark the feature to be garbage collected
85
+ return_callables.append(partial(get_data, {"feature": f, "tally": tally}))
86
+ return return_callables
87
+
88
+
89
+ def render_ascii_tree(class_or_classname: Union[type, str]):
90
+ """
91
+ Print the ascii hierarchy representation of a feature type.
92
+
93
+ Parameters
94
+ ----------
95
+ class_or_classname: type, str
96
+ Any Feature class or string of the feature type name
97
+ """
98
+ from anytree.importer import DictImporter
99
+ from anytree import RenderTree
100
+ Cls = TYPES[class_or_classname] if isinstance(class_or_classname, str) else class_or_classname
101
+ assert issubclass(Cls, Feature)
102
+
103
+ def create_treenode(feature_type):
104
+ return {
105
+ 'name': feature_type.__name__,
106
+ 'children': [
107
+ create_treenode(c)
108
+ for c in feature_type.__subclasses__()
109
+ ]
110
+ }
111
+ D = create_treenode(Cls)
112
+ importer = DictImporter()
113
+ tree = importer.import_(D)
114
+ print("\n".join(
115
+ "%s%s" % (pre, node.name)
116
+ for pre, _, node in RenderTree(tree)
117
+ ))