siibra 1.0a19__py3-none-any.whl → 1.0.1a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (38) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +3 -3
  3. siibra/commons.py +0 -46
  4. siibra/configuration/factory.py +10 -20
  5. siibra/core/atlas.py +20 -14
  6. siibra/core/parcellation.py +67 -52
  7. siibra/core/region.py +133 -123
  8. siibra/exceptions.py +8 -0
  9. siibra/experimental/contour.py +6 -6
  10. siibra/experimental/patch.py +2 -2
  11. siibra/experimental/plane3d.py +8 -8
  12. siibra/features/anchor.py +12 -13
  13. siibra/features/connectivity/regional_connectivity.py +2 -2
  14. siibra/features/feature.py +14 -16
  15. siibra/features/tabular/bigbrain_intensity_profile.py +1 -1
  16. siibra/features/tabular/cell_density_profile.py +97 -63
  17. siibra/features/tabular/layerwise_cell_density.py +3 -22
  18. siibra/features/tabular/regional_timeseries_activity.py +2 -2
  19. siibra/livequeries/allen.py +39 -16
  20. siibra/livequeries/bigbrain.py +8 -8
  21. siibra/livequeries/query.py +0 -1
  22. siibra/locations/__init__.py +9 -9
  23. siibra/locations/boundingbox.py +29 -24
  24. siibra/locations/point.py +4 -4
  25. siibra/locations/{pointset.py → pointcloud.py} +30 -22
  26. siibra/retrieval/repositories.py +9 -26
  27. siibra/retrieval/requests.py +19 -2
  28. siibra/volumes/__init__.py +1 -1
  29. siibra/volumes/parcellationmap.py +88 -81
  30. siibra/volumes/providers/neuroglancer.py +62 -36
  31. siibra/volumes/providers/nifti.py +11 -25
  32. siibra/volumes/sparsemap.py +124 -245
  33. siibra/volumes/volume.py +141 -52
  34. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/METADATA +16 -3
  35. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/RECORD +38 -38
  36. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/WHEEL +1 -1
  37. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/LICENSE +0 -0
  38. {siibra-1.0a19.dist-info → siibra-1.0.1a0.dist-info}/top_level.txt +0 -0
@@ -21,7 +21,7 @@ from .. import anchor as _anchor
21
21
 
22
22
  from ...commons import logger, QUIET, siibra_tqdm
23
23
  from ...core import region as _region
24
- from ...locations import pointset
24
+ from ...locations import pointcloud
25
25
  from ...retrieval.repositories import RepositoryConnector
26
26
  from ...retrieval.requests import HttpRequest
27
27
 
@@ -440,7 +440,7 @@ class RegionalConnectivity(Feature, Compoundable):
440
440
  found = [r for r in region if r.name in all_centroids]
441
441
  assert len(found) > 0
442
442
  result.append(
443
- tuple(pointset.PointSet(
443
+ tuple(pointcloud.PointCloud(
444
444
  [all_centroids[r.name] for r in found], space=space
445
445
  ).centroid)
446
446
  )
@@ -251,13 +251,16 @@ class Feature:
251
251
  """ Removes all instantiated object instances"""
252
252
  cls._preconfigured_instances = None
253
253
 
254
- def matches(self, concept: structure.BrainStructure, restrict_space: bool = False) -> bool:
254
+ def matches(
255
+ self,
256
+ concept: Union[structure.BrainStructure, space.Space],
257
+ ) -> bool:
255
258
  """
256
259
  Match the features anatomical anchor agains the given query concept.
257
260
  Record the most recently matched concept for inspection by the caller.
258
261
  """
259
262
  # TODO: storing the last matched concept. It is not ideal, might cause problems in multithreading
260
- if self.anchor and self.anchor.matches(concept, restrict_space):
263
+ if self.anchor and self.anchor.matches(concept):
261
264
  self.anchor._last_matched_concept = concept
262
265
  return True
263
266
  self.anchor._last_matched_concept = None
@@ -483,10 +486,10 @@ class Feature:
483
486
  f"objects linked to {str(concept)}{argstr}"
484
487
  )
485
488
  q = QueryType(**kwargs)
486
- try:
489
+ if isinstance(concept, space.Space):
490
+ features = q.query(concept.get_template())
491
+ else:
487
492
  features = q.query(concept)
488
- except StopIteration:
489
- continue
490
493
  live_instances.extend(
491
494
  Feature._wrap_livequery_feature(f, Feature._serialize_query_context(f, concept))
492
495
  for f in features
@@ -497,9 +500,8 @@ class Feature:
497
500
  @classmethod
498
501
  def _match(
499
502
  cls,
500
- concept: structure.BrainStructure,
503
+ concept: Union[structure.BrainStructure, space.Space],
501
504
  feature_type: Union[str, Type['Feature'], list],
502
- restrict_space: bool = False,
503
505
  **kwargs
504
506
  ) -> List['Feature']:
505
507
  """
@@ -518,10 +520,6 @@ class Feature:
518
520
  An anatomical concept, typically a brain region or parcellation.
519
521
  feature_type: subclass of Feature, str
520
522
  specififies the type of features ("modality")
521
- restrict_space: bool: default: False
522
- If true, will skip features anchored at spatial locations of
523
- different spaces than the concept. Requires concept to be a
524
- Location.
525
523
  """
526
524
  if isinstance(feature_type, list):
527
525
  # a list of feature types is given, collect match results on those
@@ -531,7 +529,7 @@ class Feature:
531
529
  )
532
530
  return list(dict.fromkeys(
533
531
  sum((
534
- cls._match(concept, t, restrict_space, **kwargs) for t in feature_type
532
+ cls._match(concept, t, **kwargs) for t in feature_type
535
533
  ), [])
536
534
  ))
537
535
 
@@ -548,15 +546,15 @@ class Feature:
548
546
  f"'{feature_type}' decoded as feature type/s: "
549
547
  f"{[c.__name__ for c in ftype_candidates]}."
550
548
  )
551
- return cls._match(concept, ftype_candidates, restrict_space, **kwargs)
549
+ return cls._match(concept, ftype_candidates, **kwargs)
552
550
 
553
551
  assert issubclass(feature_type, Feature)
554
552
 
555
553
  # At this stage, no recursion is needed.
556
554
  # We expect a specific supported feature type is to be matched now.
557
- if not isinstance(concept, structure.BrainStructure):
555
+ if not isinstance(concept, (structure.BrainStructure, space.Space)):
558
556
  raise ValueError(
559
- f"{concept.__class__.__name__} cannot be used for feature queries as it is not a BrainStructure type."
557
+ f"{concept.__class__.__name__} cannot be used for feature queries as it is not a `BrainStructure` or a `Space` type."
560
558
  )
561
559
 
562
560
  # Collect any preconfigured instances of the requested feature type
@@ -574,7 +572,7 @@ class Feature:
574
572
  total=len(instances),
575
573
  disable=(not instances)
576
574
  )
577
- if f.matches(concept, restrict_space)
575
+ if f.matches(concept)
578
576
  ]
579
577
 
580
578
  # Then run any registered live queries for the requested feature type
@@ -65,7 +65,7 @@ class BigBrainIntensityProfile(
65
65
 
66
66
  @classmethod
67
67
  def _merge_anchors(cls, anchors: List['AnatomicalAnchor']):
68
- from ...locations.pointset import from_points
68
+ from ...locations.pointcloud import from_points
69
69
  from ...features.anchor import AnatomicalAnchor
70
70
 
71
71
  location = from_points([anchor.location for anchor in anchors])
@@ -16,15 +16,77 @@
16
16
  from . import cortical_profile
17
17
 
18
18
  from .. import anchor as _anchor
19
- from ...commons import PolyLine, logger, create_key
19
+ from ...commons import logger
20
20
  from ...retrieval import requests
21
21
 
22
22
  from skimage.draw import polygon
23
23
  from skimage.transform import resize
24
- from io import BytesIO
25
24
  import numpy as np
26
25
  import pandas as pd
27
26
 
27
+ from io import BytesIO
28
+ from typing import Union, Tuple, Iterable
29
+
30
+
31
+ def cell_reader(bytes_buffer: bytes):
32
+ return pd.read_csv(BytesIO(bytes_buffer[2:]), delimiter=" ", header=0).astype(
33
+ {"layer": int, "label": int}
34
+ )
35
+
36
+
37
+ def layer_reader(bytes_buffer: bytes):
38
+ return pd.read_csv(BytesIO(bytes_buffer[2:]), delimiter=" ", header=0, index_col=0)
39
+
40
+
41
+ def poly_srt(poly):
42
+ return poly[poly[:, 0].argsort(), :]
43
+
44
+
45
+ def poly_rev(poly):
46
+ return poly[poly[:, 0].argsort()[::-1], :]
47
+
48
+
49
+ class PolyLine:
50
+ """Simple polyline representation which allows equidistant sampling."""
51
+
52
+ def __init__(self, pts):
53
+ self.pts = pts
54
+ self.lengths = [
55
+ np.sqrt(np.sum((pts[i, :] - pts[i - 1, :]) ** 2))
56
+ for i in range(1, pts.shape[0])
57
+ ]
58
+
59
+ def length(self):
60
+ return sum(self.lengths)
61
+
62
+ def sample(self, d: Union[Iterable[float], np.ndarray, float]):
63
+ # if d is interable, we assume a list of sample positions
64
+ try:
65
+ iter(d)
66
+ except TypeError:
67
+ positions = [d]
68
+ else:
69
+ positions = d
70
+
71
+ samples = []
72
+ for s_ in positions:
73
+ s = min(max(s_, 0), 1)
74
+ target_distance = s * self.length()
75
+ current_distance = 0
76
+ for i, length in enumerate(self.lengths):
77
+ current_distance += length
78
+ if current_distance >= target_distance:
79
+ p1 = self.pts[i, :]
80
+ p2 = self.pts[i + 1, :]
81
+ r = (target_distance - current_distance + length) / length
82
+ samples.append(p1 + (p2 - p1) * r)
83
+ break
84
+
85
+ if len(samples) == 1:
86
+ return samples[0]
87
+ else:
88
+ return np.array(samples)
89
+
28
90
 
29
91
  class CellDensityProfile(
30
92
  cortical_profile.CorticalProfile,
@@ -45,24 +107,6 @@ class CellDensityProfile(
45
107
 
46
108
  _filter_attrs = cortical_profile.CorticalProfile._filter_attrs + ["location"]
47
109
 
48
- @classmethod
49
- def CELL_READER(cls, b):
50
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0).astype(
51
- {"layer": int, "label": int}
52
- )
53
-
54
- @classmethod
55
- def LAYER_READER(cls, b):
56
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0, index_col=0)
57
-
58
- @staticmethod
59
- def poly_srt(poly):
60
- return poly[poly[:, 0].argsort(), :]
61
-
62
- @staticmethod
63
- def poly_rev(poly):
64
- return poly[poly[:, 0].argsort()[::-1], :]
65
-
66
110
  def __init__(
67
111
  self,
68
112
  section: int,
@@ -89,9 +133,9 @@ class CellDensityProfile(
89
133
  )
90
134
  self._step = 0.01
91
135
  self._url = url
92
- self._cell_loader = requests.HttpRequest(url, self.CELL_READER)
136
+ self._cell_loader = requests.HttpRequest(url, cell_reader)
93
137
  self._layer_loader = requests.HttpRequest(
94
- url.replace("segments", "layerinfo"), self.LAYER_READER
138
+ url.replace("segments", "layerinfo"), layer_reader
95
139
  )
96
140
  self._density_image = None
97
141
  self._layer_mask = None
@@ -105,47 +149,49 @@ class CellDensityProfile(
105
149
 
106
150
  @property
107
151
  def shape(self):
108
- return tuple(self.cells[["y", "x"]].max().astype("int") + 1)
152
+ """(y,x)"""
153
+ return tuple(np.ceil(self.cells[["y", "x"]].max()).astype("int"))
109
154
 
110
- def boundary_annotation(self, boundary):
155
+ def boundary_annotation(self, boundary: Tuple[int, int]) -> np.ndarray:
111
156
  """Returns the annotation of a specific layer boundary."""
112
- y1, x1 = self.shape
157
+ shape_y, shape_x = self.shape
113
158
 
114
159
  # start of image patch
115
160
  if boundary == (-1, 0):
116
- return np.array([[0, 0], [x1, 0]])
161
+ return np.array([[0, 0], [shape_x, 0]])
117
162
 
118
163
  # end of image patch
119
164
  if boundary == (7, 8):
120
- return np.array([[0, y1], [x1, y1]])
165
+ return np.array([[0, shape_y], [shape_x, shape_y]])
121
166
 
122
167
  # retrieve polygon
123
168
  basename = "{}_{}.json".format(
124
169
  *(self.LAYERS[layer] for layer in boundary)
125
170
  ).replace("0_I", "0")
126
- url = self._url.replace("segments.txt", basename)
127
- poly = self.poly_srt(np.array(requests.HttpRequest(url).get()["segments"]))
171
+ poly_url = self._url.replace("segments.txt", basename)
172
+ poly = poly_srt(np.array(requests.HttpRequest(poly_url).get()["segments"]))
128
173
 
129
- # ensure full width
174
+ # ensure full width and trim to the image shape
130
175
  poly[0, 0] = 0
131
- poly[-1, 0] = x1
176
+ poly[poly[:, 0] > shape_x, 0] = shape_x
177
+ poly[poly[:, 1] > shape_y, 1] = shape_y
132
178
 
133
179
  return poly
134
180
 
135
- def layer_annotation(self, layer):
181
+ def layer_annotation(self, layer: int) -> np.ndarray:
136
182
  return np.vstack(
137
183
  (
138
184
  self.boundary_annotation((layer - 1, layer)),
139
- self.poly_rev(self.boundary_annotation((layer, layer + 1))),
185
+ poly_rev(self.boundary_annotation((layer, layer + 1))),
140
186
  self.boundary_annotation((layer - 1, layer))[0, :],
141
187
  )
142
188
  )
143
189
 
144
190
  @property
145
- def layer_mask(self):
191
+ def layer_mask(self) -> np.ndarray:
146
192
  """Generates a layer mask from boundary annotations."""
147
193
  if self._layer_mask is None:
148
- self._layer_mask = np.zeros(np.array(self.shape).astype("int") + 1)
194
+ self._layer_mask = np.zeros(np.array(self.shape, dtype=int) + 1, dtype="int")
149
195
  for layer in range(1, 8):
150
196
  pl = self.layer_annotation(layer)
151
197
  X, Y = polygon(pl[:, 0], pl[:, 1])
@@ -153,20 +199,20 @@ class CellDensityProfile(
153
199
  return self._layer_mask
154
200
 
155
201
  @property
156
- def depth_image(self):
202
+ def depth_image(self) -> np.ndarray:
157
203
  """Cortical depth image from layer boundary polygons by equidistant sampling."""
158
204
 
159
205
  if self._depth_image is None:
160
-
206
+ logger.info("Calculating cell densities from cell and layer data...")
161
207
  # compute equidistant cortical depth image from inner and outer contour
162
208
  scale = 0.1
163
- D = np.zeros((np.array(self.density_image.shape) * scale).astype("int") + 1)
209
+ depth_arr = np.zeros(np.ceil(np.array(self.shape) * scale).astype("int") + 1)
164
210
 
165
211
  # determine sufficient stepwidth for profile sampling
166
212
  # to match downscaled image resolution
167
- vstep, hstep = 1.0 / np.array(D.shape) / 2.0
213
+ vstep, hstep = 1.0 / np.array(depth_arr.shape) / 2.0
168
214
  vsteps = np.arange(0, 1 + vstep, vstep)
169
- hsteps = np.arange(0, 1 + vstep, hstep)
215
+ hsteps = np.arange(0, 1 + hstep, hstep)
170
216
 
171
217
  # build straight profiles between outer and inner cortical boundary
172
218
  s0 = PolyLine(self.boundary_annotation((0, 1)) * scale).sample(hsteps)
@@ -175,16 +221,16 @@ class CellDensityProfile(
175
221
 
176
222
  # write sample depths to their location in the depth image
177
223
  for prof in profiles:
178
- XY = prof.sample(vsteps).astype("int")
179
- D[XY[:, 1], XY[:, 0]] = vsteps
224
+ prof_samples_as_index = prof.sample(vsteps).astype("int")
225
+ depth_arr[prof_samples_as_index[:, 1], prof_samples_as_index[:, 0]] = vsteps
180
226
 
181
227
  # fix wm region, account for rounding error
182
228
  XY = self.layer_annotation(7) * scale
183
- D[polygon(XY[:, 1] - 1, XY[:, 0])] = 1
184
- D[-1, :] = 1
229
+ depth_arr[polygon(XY[:, 1] - 1, XY[:, 0])] = 1
230
+ depth_arr[-1, :] = 1
185
231
 
186
232
  # rescale depth image to original patch size
187
- self._depth_image = resize(D, self.density_image.shape)
233
+ self._depth_image = resize(depth_arr, self.density_image.shape)
188
234
 
189
235
  return self._depth_image
190
236
 
@@ -200,7 +246,7 @@ class CellDensityProfile(
200
246
  return self._boundary_positions
201
247
 
202
248
  @property
203
- def density_image(self):
249
+ def density_image(self) -> np.ndarray:
204
250
  if self._density_image is None:
205
251
  logger.debug("Computing density image for", self._url)
206
252
  # we integrate cell counts into 2D bins
@@ -209,9 +255,7 @@ class CellDensityProfile(
209
255
  counts, xedges, yedges = np.histogram2d(
210
256
  self.cells.y,
211
257
  self.cells.x,
212
- bins=(np.array(self.layer_mask.shape) / pixel_size_micron + 0.5).astype(
213
- "int"
214
- ),
258
+ bins=np.round(np.array(self.shape) / pixel_size_micron).astype("int"),
215
259
  )
216
260
 
217
261
  # rescale the counts from count / pixel_size**2 to count / 0.1mm^3,
@@ -229,11 +273,11 @@ class CellDensityProfile(
229
273
  return self._density_image
230
274
 
231
275
  @property
232
- def cells(self):
276
+ def cells(self) -> pd.DataFrame:
233
277
  return self._cell_loader.get()
234
278
 
235
279
  @property
236
- def layers(self):
280
+ def layers(self) -> pd.DataFrame:
237
281
  return self._layer_loader.get()
238
282
 
239
283
  @property
@@ -242,6 +286,7 @@ class CellDensityProfile(
242
286
 
243
287
  @property
244
288
  def _values(self):
289
+ # TODO: release a dataset update instead of on the fly computation
245
290
  densities = []
246
291
  delta = self._step / 2.0
247
292
  for d in self._depths:
@@ -249,16 +294,5 @@ class CellDensityProfile(
249
294
  if np.sum(mask) > 0:
250
295
  densities.append(self.density_image[mask].mean())
251
296
  else:
252
- densities.append(np.NaN)
297
+ densities.append(np.nan)
253
298
  return np.asanyarray(densities)
254
-
255
- @property
256
- def key(self):
257
- assert len(self.species) == 1
258
- return create_key("{}_{}_{}_{}_{}".format(
259
- self.id,
260
- self.species[0]['name'],
261
- self.regionspec,
262
- self.section,
263
- self.patch
264
- ))
@@ -16,13 +16,13 @@
16
16
  from . import cortical_profile
17
17
  from .. import anchor as _anchor
18
18
  from . import tabular
19
+ from ..tabular.cell_density_profile import cell_reader, layer_reader
19
20
 
20
21
  from ... import commons
21
22
  from ...retrieval import requests
22
23
 
23
24
  import pandas as pd
24
25
  import numpy as np
25
- from io import BytesIO
26
26
 
27
27
 
28
28
  class LayerwiseCellDensity(
@@ -40,16 +40,6 @@ class LayerwiseCellDensity(
40
40
  "The cortical depth is estimated from the measured layer thicknesses."
41
41
  )
42
42
 
43
- @classmethod
44
- def CELL_READER(cls, b):
45
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0).astype(
46
- {"layer": int, "label": int}
47
- )
48
-
49
- @classmethod
50
- def LAYER_READER(cls, b):
51
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0, index_col=0)
52
-
53
43
  def __init__(
54
44
  self,
55
45
  segmentfiles: list,
@@ -77,8 +67,8 @@ class LayerwiseCellDensity(
77
67
  density_dict = {}
78
68
  for i, (cellfile, layerfile) in enumerate(self._filepairs):
79
69
  try:
80
- cells = requests.HttpRequest(cellfile, func=self.CELL_READER).data
81
- layers = requests.HttpRequest(layerfile, func=self.LAYER_READER).data
70
+ cells = requests.HttpRequest(cellfile, func=cell_reader).data
71
+ layers = requests.HttpRequest(layerfile, func=layer_reader).data
82
72
  except requests.SiibraHttpRequestError as e:
83
73
  print(str(e))
84
74
  commons.logger.error(f"Skipping to bootstrap a {self.__class__.__name__} feature, cannot access file resource.")
@@ -103,12 +93,3 @@ class LayerwiseCellDensity(
103
93
  )
104
94
  self._data_cached.index.name = 'layer'
105
95
  return self._data_cached
106
-
107
- @property
108
- def key(self):
109
- assert len(self.species) == 1
110
- return commons.create_key("{}_{}_{}".format(
111
- self.dataset_id,
112
- self.species[0]['name'],
113
- self.regionspec
114
- ))
@@ -19,7 +19,7 @@ from ..feature import Compoundable
19
19
  from ...core import region as _region
20
20
  from .. import anchor as _anchor
21
21
  from ...commons import QUIET, siibra_tqdm
22
- from ...locations import pointset
22
+ from ...locations import pointcloud
23
23
  from ...retrieval.repositories import RepositoryConnector
24
24
  from ...retrieval.requests import HttpRequest
25
25
 
@@ -203,7 +203,7 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
203
203
  found = [r for r in region if r.name in all_centroids]
204
204
  assert len(found) > 0
205
205
  result.append(
206
- tuple(pointset.PointSet(
206
+ tuple(pointcloud.PointCloud(
207
207
  [all_centroids[r.name] for r in found], space=space
208
208
  ).centroid)
209
209
  )
@@ -20,11 +20,11 @@ from ..core import space as _space, structure
20
20
  from ..features import anchor as _anchor
21
21
  from ..features.tabular.gene_expression import GeneExpressions
22
22
  from ..commons import logger, Species
23
- from ..locations import point, pointset
23
+ from ..locations import point, pointcloud
24
24
  from ..retrieval import HttpRequest
25
25
  from ..vocabularies import GENE_NAMES
26
26
 
27
- from typing import Iterable
27
+ from typing import List
28
28
  from xml.etree import ElementTree
29
29
  import numpy as np
30
30
  import json
@@ -32,6 +32,24 @@ import json
32
32
 
33
33
  BASE_URL = "http://api.brain-map.org/api/v2/data"
34
34
 
35
+ LOCATION_PRECISION_MM = 2. # the assumed spatial precision of the probe locations in MNI space
36
+
37
+
38
+ def is_allen_api_microarray_service_available():
39
+ import requests
40
+
41
+ # see https://community.brain-map.org/t/human-brain-atlas-api/2876
42
+ microarray_test_url = "http://api.brain-map.org/api/v2/data/query.json?criteria= service::human_microarray_expression[probes$eq1023146,1023147][donors$eq15496][structures$eq9148]"
43
+ try:
44
+ response = requests.get(microarray_test_url).json()
45
+ except requests.RequestException:
46
+ return False
47
+ return response["success"]
48
+
49
+
50
+ class InvalidAllenAPIResponseException(Exception):
51
+ pass
52
+
35
53
 
36
54
  class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions):
37
55
  """
@@ -117,7 +135,14 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
117
135
 
118
136
  self.genes = parse_gene(gene)
119
137
 
120
- def query(self, concept: structure.BrainStructure) -> Iterable[GeneExpressions]:
138
+ def query(self, concept: structure.BrainStructure) -> List[GeneExpressions]:
139
+ if not is_allen_api_microarray_service_available():
140
+ raise InvalidAllenAPIResponseException(
141
+ 'The service "web API of the Allen Brain Atlas for the human microarray expression" '
142
+ 'is not available at the moment, therefore siibra is not able to fetch '
143
+ 'gene expression features. This is a known issue which we are investigating: '
144
+ 'https://github.com/FZJ-INM1-BDA/siibra-python/issues/636.'
145
+ )
121
146
 
122
147
  mnispace = _space.Space.registry().get('mni152')
123
148
 
@@ -125,23 +150,21 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
125
150
  # Record matched instances and their locations.
126
151
  measurements = []
127
152
  coordinates = []
128
- points_inside = dict()
129
153
  for measurement in self:
130
- pt = point.Point(measurement['mni_xyz'], space=mnispace)
131
- if pt not in points_inside: # cache redundant intersection tests
132
- points_inside[pt] = pt in concept
133
- if points_inside[pt]:
154
+ pt = point.Point(measurement['mni_xyz'], space=mnispace, sigma_mm=LOCATION_PRECISION_MM)
155
+ if pt in concept:
134
156
  measurements.append(measurement)
135
157
  coordinates.append(pt)
136
158
 
137
- if len(points_inside) == 0:
138
- raise StopIteration
159
+ if len(coordinates) == 0:
160
+ logger.info(f"No probes found that lie within {concept}")
161
+ return []
139
162
 
140
163
  # Build the anatomical anchor and assignment to the query concept.
141
164
  # It will be attached to the returned feature, with the set of matched
142
165
  # MNI coordinates as anchor's location.
143
166
  anchor = _anchor.AnatomicalAnchor(
144
- location=pointset.from_points(coordinates),
167
+ location=pointcloud.from_points(coordinates),
145
168
  species=self.species
146
169
  )
147
170
  explanation = f"MNI coordinates of tissue samples were filtered using {concept}"
@@ -153,7 +176,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
153
176
  )]
154
177
  anchor._last_matched_concept = concept
155
178
 
156
- yield GeneExpressions(
179
+ return [GeneExpressions(
157
180
  anchor=anchor,
158
181
  genes=[m['gene'] for m in measurements],
159
182
  levels=[m['expression_level'] for m in measurements],
@@ -167,7 +190,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
167
190
  "probe_id": [m['probe_id'] for m in measurements],
168
191
  "donor_name": [m['donor_name'] for m in measurements],
169
192
  }
170
- )
193
+ )]
171
194
 
172
195
  def __iter__(self):
173
196
 
@@ -261,7 +284,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
261
284
  url = AllenBrainAtlasQuery._QUERY["specimen"].format(specimen_id=specimen_id)
262
285
  response = HttpRequest(url).get()
263
286
  if not response["success"]:
264
- raise Exception(
287
+ raise InvalidAllenAPIResponseException(
265
288
  "Invalid response when retrieving specimen information: {}".format(url)
266
289
  )
267
290
  # we ask for 1 specimen, so list should have length 1
@@ -278,7 +301,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
278
301
  return specimen
279
302
 
280
303
  @classmethod
281
- def _retrieve_microarray(cls, donor_id: str, probe_ids: str) -> Iterable[GeneExpressions]:
304
+ def _retrieve_microarray(cls, donor_id: str, probe_ids: str):
282
305
  """
283
306
  Retrieve microarray data for several probes of a given donor, and
284
307
  compute the MRI position of the corresponding tissue block in the ICBM
@@ -297,7 +320,7 @@ class AllenBrainAtlasQuery(LiveQuery, args=['gene'], FeatureType=GeneExpressions
297
320
  except json.JSONDecodeError as e:
298
321
  raise RuntimeError(f"Allen institute site produced an empty response - please try again later.\n{e}")
299
322
  if not response["success"]:
300
- raise Exception(
323
+ raise InvalidAllenAPIResponseException(
301
324
  "Invalid response when retrieving microarray data: {}".format(url)
302
325
  )
303
326
 
@@ -19,7 +19,7 @@ from . import query
19
19
  from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
20
20
  from ..features import anchor as _anchor
21
21
  from ..commons import logger
22
- from ..locations import point, pointset
22
+ from ..locations import point, pointcloud
23
23
  from ..core import structure
24
24
  from ..retrieval import requests, cache
25
25
  from ..retrieval.datasets import GenericDataset
@@ -119,11 +119,11 @@ class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intens
119
119
 
120
120
  def query(self, concept: structure.BrainStructure, **kwargs) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
121
121
  loader = WagstylProfileLoader()
122
- mesh_vertices = pointset.PointSet(loader._vertices, space='bigbrain')
123
- matched = concept.intersection(mesh_vertices) # returns a reduced PointSet with og indices as labels
122
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
123
+ matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
124
124
  if matched is None:
125
125
  return []
126
- assert isinstance(matched, pointset.PointSet)
126
+ assert isinstance(matched, pointcloud.PointCloud)
127
127
  indices = matched.labels
128
128
  assert indices is not None
129
129
  features = []
@@ -159,11 +159,11 @@ class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=laye
159
159
  def query(self, concept: structure.BrainStructure, **kwargs) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
160
160
 
161
161
  loader = WagstylProfileLoader()
162
- mesh_vertices = pointset.PointSet(loader._vertices, space='bigbrain')
163
- matched = concept.intersection(mesh_vertices) # returns a reduced PointSet with og indices as labels
162
+ mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
163
+ matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
164
164
  if matched is None:
165
165
  return []
166
- assert isinstance(matched, pointset.PointSet)
166
+ assert isinstance(matched, pointcloud.PointCloud)
167
167
  indices = matched.labels
168
168
  assert indices is not None
169
169
  matched_profiles = loader._profiles[indices, :]
@@ -177,7 +177,7 @@ class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=laye
177
177
  ]).reshape((-1, 200))
178
178
 
179
179
  anchor = _anchor.AnatomicalAnchor(
180
- location=pointset.PointSet(loader._vertices[indices, :], space='bigbrain'),
180
+ location=pointcloud.PointCloud(loader._vertices[indices, :], space='bigbrain'),
181
181
  region=str(concept),
182
182
  species='Homo sapiens'
183
183
  )
@@ -47,4 +47,3 @@ class LiveQuery(ABC):
47
47
  @abstractmethod
48
48
  def query(self, concept: AtlasConcept, **kwargs) -> List[Feature]:
49
49
  raise NotImplementedError(f"Dervied class {self.__class__} needs to implement query()")
50
- pass