siibra 1.0a1__1-py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (84) hide show
  1. siibra/VERSION +1 -0
  2. siibra/__init__.py +164 -0
  3. siibra/commons.py +823 -0
  4. siibra/configuration/__init__.py +17 -0
  5. siibra/configuration/configuration.py +189 -0
  6. siibra/configuration/factory.py +589 -0
  7. siibra/core/__init__.py +16 -0
  8. siibra/core/assignment.py +110 -0
  9. siibra/core/atlas.py +239 -0
  10. siibra/core/concept.py +308 -0
  11. siibra/core/parcellation.py +387 -0
  12. siibra/core/region.py +1223 -0
  13. siibra/core/space.py +131 -0
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +17 -0
  22. siibra/explorer/url.py +222 -0
  23. siibra/explorer/util.py +87 -0
  24. siibra/features/__init__.py +117 -0
  25. siibra/features/anchor.py +224 -0
  26. siibra/features/connectivity/__init__.py +33 -0
  27. siibra/features/connectivity/functional_connectivity.py +57 -0
  28. siibra/features/connectivity/regional_connectivity.py +494 -0
  29. siibra/features/connectivity/streamline_counts.py +27 -0
  30. siibra/features/connectivity/streamline_lengths.py +27 -0
  31. siibra/features/connectivity/tracing_connectivity.py +30 -0
  32. siibra/features/dataset/__init__.py +17 -0
  33. siibra/features/dataset/ebrains.py +90 -0
  34. siibra/features/feature.py +970 -0
  35. siibra/features/image/__init__.py +27 -0
  36. siibra/features/image/image.py +115 -0
  37. siibra/features/image/sections.py +26 -0
  38. siibra/features/image/volume_of_interest.py +88 -0
  39. siibra/features/tabular/__init__.py +24 -0
  40. siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
  41. siibra/features/tabular/cell_density_profile.py +298 -0
  42. siibra/features/tabular/cortical_profile.py +322 -0
  43. siibra/features/tabular/gene_expression.py +257 -0
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
  45. siibra/features/tabular/layerwise_cell_density.py +95 -0
  46. siibra/features/tabular/receptor_density_fingerprint.py +192 -0
  47. siibra/features/tabular/receptor_density_profile.py +110 -0
  48. siibra/features/tabular/regional_timeseries_activity.py +294 -0
  49. siibra/features/tabular/tabular.py +139 -0
  50. siibra/livequeries/__init__.py +19 -0
  51. siibra/livequeries/allen.py +352 -0
  52. siibra/livequeries/bigbrain.py +197 -0
  53. siibra/livequeries/ebrains.py +145 -0
  54. siibra/livequeries/query.py +49 -0
  55. siibra/locations/__init__.py +91 -0
  56. siibra/locations/boundingbox.py +454 -0
  57. siibra/locations/location.py +115 -0
  58. siibra/locations/point.py +344 -0
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +27 -0
  61. siibra/retrieval/cache.py +233 -0
  62. siibra/retrieval/datasets.py +389 -0
  63. siibra/retrieval/exceptions/__init__.py +27 -0
  64. siibra/retrieval/repositories.py +769 -0
  65. siibra/retrieval/requests.py +659 -0
  66. siibra/vocabularies/__init__.py +45 -0
  67. siibra/vocabularies/gene_names.json +29176 -0
  68. siibra/vocabularies/receptor_symbols.json +210 -0
  69. siibra/vocabularies/region_aliases.json +460 -0
  70. siibra/volumes/__init__.py +23 -0
  71. siibra/volumes/parcellationmap.py +1279 -0
  72. siibra/volumes/providers/__init__.py +20 -0
  73. siibra/volumes/providers/freesurfer.py +113 -0
  74. siibra/volumes/providers/gifti.py +165 -0
  75. siibra/volumes/providers/neuroglancer.py +736 -0
  76. siibra/volumes/providers/nifti.py +266 -0
  77. siibra/volumes/providers/provider.py +107 -0
  78. siibra/volumes/sparsemap.py +468 -0
  79. siibra/volumes/volume.py +892 -0
  80. siibra-1.0.0a1.dist-info/LICENSE +201 -0
  81. siibra-1.0.0a1.dist-info/METADATA +160 -0
  82. siibra-1.0.0a1.dist-info/RECORD +84 -0
  83. siibra-1.0.0a1.dist-info/WHEEL +5 -0
  84. siibra-1.0.0a1.dist-info/top_level.txt +1 -0
siibra/core/region.py ADDED
@@ -0,0 +1,1223 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Representation of a brain region."""
16
+
17
+ from . import concept, structure, space as _space, parcellation as _parcellation
18
+ from .assignment import Qualification, AnatomicalAssignment
19
+
20
+ from ..retrieval.cache import cache_user_fn
21
+ from ..locations import location, pointcloud, boundingbox as _boundingbox
22
+ from ..volumes import parcellationmap, volume
23
+ from ..commons import (
24
+ logger,
25
+ MapType,
26
+ create_key,
27
+ clear_name,
28
+ InstanceTable,
29
+ )
30
+ from ..exceptions import NoMapAvailableError, SpaceWarpingFailedError
31
+
32
+ import re
33
+ import anytree
34
+ from typing import List, Union, Iterable, Dict, Callable, Tuple
35
+ from difflib import SequenceMatcher
36
+ from ebrains_drive import BucketApiClient
37
+ import json
38
+ from functools import wraps, reduce
39
+ from concurrent.futures import ThreadPoolExecutor
40
+ from functools import lru_cache
41
+
42
+
43
+ REGEX_TYPE = type(re.compile("test"))
44
+
45
+ THRESHOLD_STATISTICAL_MAPS = None
46
+
47
+
48
+ class Region(anytree.NodeMixin, concept.AtlasConcept, structure.BrainStructure):
49
+ """
50
+ Representation of a region with name and more optional attributes
51
+ """
52
+
53
+ _regex_re = re.compile(r'^\/(?P<expression>.+)\/(?P<flags>[a-zA-Z]*)$')
54
+ _accepted_flags = "aiLmsux"
55
+
56
+ _GETMAP_CACHE = {}
57
+ _GETMAP_CACHE_MAX_ENTRIES = 1
58
+
59
+ def __init__(
60
+ self,
61
+ name: str,
62
+ children: List["Region"] = [],
63
+ parent: "Region" = None,
64
+ shortname: str = "",
65
+ description: str = "",
66
+ modality: str = "",
67
+ publications: list = [],
68
+ datasets: list = [],
69
+ rgb: str = None,
70
+ spec=None,
71
+ prerelease: bool = False,
72
+ ):
73
+ """
74
+ Constructs a new Region object.
75
+
76
+ Parameters
77
+ ----------
78
+ name : str
79
+ Human-readable name of the region
80
+ children: list[Region]
81
+ parent: Region
82
+ shortname: str
83
+ Shortform of human-readable name (optional)
84
+ description: str
85
+ Textual description of the parcellation
86
+ modality: str or None
87
+ Specification of the modality used for specifying this region
88
+ publications: list
89
+ List of associated publications, each a dictionary with "doi"
90
+ and/or "citation" fields
91
+ datasets: list
92
+ datasets associated with this region
93
+ rgb: str, default: None
94
+ Hexcode of preferred color of this region (e.g. "#9FE770")
95
+ spec: dict, default: None
96
+ The preconfigured specification.
97
+ """
98
+ anytree.NodeMixin.__init__(self)
99
+ concept.AtlasConcept.__init__(
100
+ self,
101
+ identifier=None, # lazy property implementation below
102
+ name=clear_name(name),
103
+ species=None, # lazy property implementation below
104
+ shortname=shortname,
105
+ description=description,
106
+ modality=modality,
107
+ publications=publications,
108
+ datasets=datasets,
109
+ spec=spec,
110
+ prerelease=prerelease,
111
+ )
112
+
113
+ # anytree node will take care to use this appropriately
114
+ self.parent = parent
115
+ self.children = children
116
+ # convert hex to int tuple if rgb is given
117
+ self.rgb = (
118
+ None if rgb is None
119
+ else tuple(int(rgb[p:p + 2], 16) for p in [1, 3, 5])
120
+ )
121
+ self._supported_spaces = None # computed on 1st call of self.supported_spaces
122
+ self._str_aliases = None
123
+ self.find = lru_cache(maxsize=3)(self.find)
124
+
125
+ def get_related_regions(self) -> Iterable["RegionRelationAssessments"]:
126
+ """
127
+ Get assements on relations of this region to others defined on EBRAINS.
128
+
129
+ Yields
130
+ ------
131
+ Qualification
132
+
133
+ Example
134
+ -------
135
+ >>> region = siibra.get_region("monkey", "PG")
136
+ >>> for assesment in region.get_related_regions():
137
+ >>> print(assesment)
138
+ 'PG' is homologous to 'Area PGa (IPL)'
139
+ 'PG' is homologous to 'Area PGa (IPL) left'
140
+ 'PG' is homologous to 'Area PGa (IPL) right'
141
+ 'PG' is homologous to 'Area PGa (IPL)'
142
+ 'PG' is homologous to 'Area PGa (IPL) left'
143
+ 'PG' is homologous to 'Area PGa (IPL) right'
144
+ 'PG' is homologous to 'Area PGa (IPL)'
145
+ 'PG' is homologous to 'Area PGa (IPL) right'
146
+ 'PG' is homologous to 'Area PGa (IPL) left'
147
+ """
148
+ yield from RegionRelationAssessments.parse_from_region(self)
149
+
150
+ @property
151
+ def id(self):
152
+ if self.parent is None:
153
+ return create_key(self.name)
154
+ else:
155
+ return f"{self.parent.root.id}_{create_key(self.name)}"
156
+
157
+ @property
158
+ def parcellation(self):
159
+ if isinstance(self.root, _parcellation.Parcellation):
160
+ return self.root
161
+ else:
162
+ return None
163
+
164
+ @property
165
+ def species(self):
166
+ # lazy request of the root parcellation's species
167
+ if self._species_cached is None:
168
+ self._species_cached = self.parcellation.species
169
+ return self._species_cached
170
+
171
+ @staticmethod
172
+ def copy(other: 'Region'):
173
+ """
174
+ copy constructor must detach the parent to avoid problems with
175
+ the Anytree implementation.
176
+ """
177
+ # create an isolated object, detached from the other's tree
178
+ region = Region(
179
+ name=other.name,
180
+ children=[Region.copy(c) for c in other.children],
181
+ parent=None,
182
+ shortname=other.shortname,
183
+ description=other.description,
184
+ modality=other.modality,
185
+ publications=other.publications,
186
+ datasets=other.datasets,
187
+ rgb=other.rgb)
188
+
189
+ for c in region.children:
190
+ c.parent = region
191
+ return region
192
+
193
+ @property
194
+ def names(self):
195
+ return {r.name for r in self}
196
+
197
+ def __eq__(self, other):
198
+ """
199
+ Compare this region with other objects. If other is a string,
200
+ compare to key, name or id.
201
+ """
202
+ if isinstance(other, Region):
203
+ return self.id == other.id
204
+ if isinstance(other, str):
205
+ if not self._str_aliases:
206
+ self._str_aliases = {
207
+ self.name,
208
+ self.key,
209
+ self.id,
210
+ }
211
+ if self._spec:
212
+ ebrain_ids = [
213
+ value
214
+ for value in self._spec.get("ebrains", {}).values()
215
+ if isinstance(value, str)
216
+ ]
217
+ ebrain_nested_ids = [
218
+ _id
219
+ for value in self._spec.get("ebrains", {}).values() if isinstance(value, list)
220
+ for _id in value
221
+ ]
222
+ assert all(isinstance(_id, str) for _id in ebrain_nested_ids)
223
+ all_ebrain_ids = [
224
+ *ebrain_ids,
225
+ *ebrain_nested_ids
226
+ ]
227
+
228
+ self._str_aliases.update(all_ebrain_ids)
229
+
230
+ return other in self._str_aliases
231
+ return False
232
+
233
+ def __hash__(self):
234
+ return hash(self.id)
235
+
236
+ def has_parent(self, parent):
237
+ return parent in [a for a in self.ancestors]
238
+
239
+ def includes(self, region):
240
+ """
241
+ Determine whether this region-tree includes the given region.
242
+
243
+ Parameters
244
+ ----------
245
+ region: Region
246
+
247
+ Returns
248
+ -------
249
+ bool
250
+ True if the region is in the region-tree.
251
+ """
252
+ return region == self or region in self.descendants
253
+
254
+ def find(
255
+ self,
256
+ regionspec,
257
+ filter_children=False,
258
+ find_topmost=True,
259
+ ) -> List['Region']:
260
+ """
261
+ Find regions that match the given region specification in the subtree
262
+ headed by this region.
263
+
264
+ Parameters
265
+ ----------
266
+ regionspec: str, regex, Region
267
+ - a string with a possibly inexact name (matched both against the name and the identifier key)
268
+ - a string in '/pattern/flags' format to use regex search (acceptable flags: aiLmsux, see at https://docs.python.org/3/library/re.html#flags)
269
+ - a regex applied to region names
270
+ - a Region object
271
+ filter_children : bool, default: False
272
+ If True, children of matched parents will not be returned
273
+ find_topmost : bool, default: True
274
+ If True (requires `filter_children=True`), will return parent
275
+ structures if all children are matched, even though the parent
276
+ itself might not match the specification.
277
+
278
+ Returns
279
+ -------
280
+ list[Region]
281
+ list of regions matching to the regionspec
282
+
283
+ Tip
284
+ ---
285
+ See example 01-003, find regions.
286
+ """
287
+ if isinstance(regionspec, str):
288
+ # convert the specified string into a regex for matching
289
+ regex_match = self._regex_re.match(regionspec)
290
+ if regex_match:
291
+ flags = regex_match.group('flags')
292
+ expression = regex_match.group('expression')
293
+
294
+ for flag in flags or []: # catch if flags is nullish
295
+ if flag not in self._accepted_flags:
296
+ raise Exception(f"only accepted flag are in {self._accepted_flags}. {flag} is not within them")
297
+ search_regex = (f"(?{flags})" if flags else "") + expression
298
+ regionspec = re.compile(search_regex)
299
+
300
+ candidates = list(
301
+ anytree.search.findall(self, lambda node: node.matches(regionspec))
302
+ )
303
+
304
+ if len(candidates) > 1 and filter_children:
305
+ filtered = []
306
+ for region in candidates:
307
+ children_included = [c for c in region.children if c in candidates]
308
+ if len(children_included) > 0:
309
+ filtered.append(region)
310
+ else:
311
+ if region.parent not in candidates:
312
+ filtered.append(region)
313
+
314
+ # find any non-matched regions of which all children are matched
315
+ if find_topmost:
316
+ complete_parents = list(
317
+ {
318
+ r.parent
319
+ for r in filtered
320
+ if (r.parent is not None)
321
+ and all((c in filtered) for c in r.parent.children)
322
+ }
323
+ )
324
+
325
+ if len(complete_parents) == 0:
326
+ candidates = filtered
327
+ else:
328
+ # filter child regions again
329
+ filtered += complete_parents
330
+ candidates = [
331
+ r for r in filtered
332
+ if (r.parent not in filtered) or r == regionspec
333
+ ]
334
+ else:
335
+ candidates = filtered
336
+
337
+ # ensure the result is a list
338
+ if candidates is None:
339
+ candidates = []
340
+ elif isinstance(candidates, Region):
341
+ candidates = [candidates]
342
+ else:
343
+ candidates = list(candidates)
344
+
345
+ found_regions = sorted(set(candidates), key=lambda r: r.depth)
346
+
347
+ # reverse is set to True, since SequenceMatcher().ratio(), higher == better
348
+ return (
349
+ sorted(
350
+ found_regions,
351
+ reverse=True,
352
+ key=lambda region: SequenceMatcher(None, str(region), regionspec).ratio(),
353
+ )
354
+ if isinstance(regionspec, str) else found_regions
355
+ )
356
+
357
+ def matches(self, regionspec):
358
+ """
359
+ Checks whether this region matches the given region specification.
360
+
361
+ Parameters
362
+ ----------
363
+ regionspec: str, regex, Region
364
+ - a string with a possibly inexact name, which is matched both against the name and the identifier key,
365
+ - a regex applied to region names,
366
+ - a region object
367
+
368
+ Returns
369
+ -------
370
+ bool
371
+ If the regionspec matches to the Region.
372
+ """
373
+ if regionspec not in self._CACHED_MATCHES:
374
+ def splitstr(s):
375
+ return [w for w in re.split(r"[^a-zA-Z0-9.\-]", s) if len(w) > 0]
376
+
377
+ if regionspec is None:
378
+ self._CACHED_MATCHES[regionspec] = False
379
+
380
+ elif isinstance(regionspec, Region):
381
+ self._CACHED_MATCHES[regionspec] = self == regionspec
382
+
383
+ elif isinstance(regionspec, str):
384
+ # string is given, perform lazy string matching
385
+ q = regionspec.lower().strip()
386
+ if q == self.key.lower().strip():
387
+ self._CACHED_MATCHES[regionspec] = True
388
+ elif q == self.id.lower().strip():
389
+ self._CACHED_MATCHES[regionspec] = True
390
+ elif q == self.name.lower().strip():
391
+ self._CACHED_MATCHES[regionspec] = True
392
+ else:
393
+ # match if all words of the query are also included in the region name
394
+ W = splitstr(clear_name(self.name.lower()))
395
+ Q = splitstr(clear_name(regionspec))
396
+ self._CACHED_MATCHES[regionspec] = all([any(
397
+ q.lower() == w or 'v' + q.lower() == w
398
+ for w in W
399
+ ) for q in Q])
400
+
401
+ # TODO since dropping 3.6 support, maybe reimplement as re.Pattern ?
402
+ elif isinstance(regionspec, REGEX_TYPE):
403
+ # match regular expression
404
+ self._CACHED_MATCHES[regionspec] = any(regionspec.search(s) is not None for s in [self.name, self.key])
405
+
406
+ elif isinstance(regionspec, (list, tuple)):
407
+ self._CACHED_MATCHES[regionspec] = any(self.matches(_) for _ in regionspec)
408
+
409
+ else:
410
+ raise TypeError(
411
+ f"Cannot interpret region specification of type '{type(regionspec)}'"
412
+ )
413
+
414
+ return self._CACHED_MATCHES[regionspec]
415
+
416
+ def get_regional_mask(
417
+ self,
418
+ space: Union[str, _space.Space],
419
+ maptype: MapType = MapType.LABELLED,
420
+ threshold: float = 0.0,
421
+ ) -> volume.FilteredVolume:
422
+ """
423
+ Get a binary mask of this region in the given space,
424
+ using the specified MapTypes.
425
+
426
+ Parameters
427
+ ----------
428
+ space: Space or str
429
+ The requested reference space
430
+ maptype: MapType, default: SIIBRA_DEFAULT_MAPTYPE
431
+ The type of map to be used ('labelled' or 'statistical')
432
+ threshold: float, default: 0.0
433
+ When fetching a statistical map, use this threshold to convert
434
+ it to a binary mask.
435
+
436
+ Returns
437
+ -------
438
+ Volume (use fetch() to get a NiftiImage)
439
+ """
440
+ if isinstance(maptype, str):
441
+ maptype = MapType[maptype.upper()]
442
+
443
+ threshold_info = "" if maptype == MapType.LABELLED else f"(threshold: {threshold}) "
444
+ name = f"Mask {threshold_info}of '{self.name} ({self.parcellation})' in "
445
+ try:
446
+ regional_map = self.get_regional_map(space=space, maptype=maptype)
447
+ if maptype == MapType.LABELLED:
448
+ assert threshold == 0.0, f"threshold can only be set for {MapType.STATISTICAL} maps."
449
+ result = regional_map
450
+ result._boundingbox = None
451
+ if maptype == MapType.STATISTICAL:
452
+ result = volume.FilteredVolume(
453
+ parent_volume=regional_map,
454
+ threshold=threshold
455
+ )
456
+ if threshold == 0.0:
457
+ result._boundingbox = regional_map._boundingbox
458
+ name += f"'{result.space}'"
459
+ except NoMapAvailableError:
460
+ # This region is not mapped directly in any map in the registry.
461
+ # Try building a map from the child regions
462
+ if (len(self.children) > 0) and all(c.mapped_in_space(space) for c in self.children):
463
+ logger.info(f"{self.name} is not mapped in {space}. Merging the masks of its {len(self.children)} child regions.")
464
+ child_volumes = [
465
+ child.get_regional_mask(space=space, maptype=maptype, threshold=threshold)
466
+ for child in self.children
467
+ ]
468
+ result = volume.FilteredVolume(
469
+ volume.merge(child_volumes),
470
+ label=1
471
+ )
472
+ name += f"'{result.space}' (built by merging the mask {threshold_info} of its decendants)"
473
+ result._name = name
474
+ return result
475
+
476
+ def get_regional_map(
477
+ self,
478
+ space: Union[str, _space.Space],
479
+ maptype: MapType = MapType.LABELLED,
480
+ ) -> Union[volume.FilteredVolume, volume.Volume, volume.Subvolume]:
481
+ """
482
+ Get a volume reprsenting this region in the given space and MapType.
483
+
484
+ Note
485
+ ----
486
+ If a region is not mapped in any of the `Map`s in the registry, then
487
+ siibra will get the maps of its children recursively and merge them.
488
+ If no map is available this way as well, an exception is raised.
489
+
490
+ Parameters
491
+ ----------
492
+ space: Space or str
493
+ The requested reference space
494
+ maptype: MapType, default: SIIBRA_DEFAULT_MAPTYPE
495
+ The type of map to be used ('labelled' or 'statistical')
496
+
497
+ Returns
498
+ -------
499
+ Volume (use fetch() to get a NiftiImage)
500
+ """
501
+ if isinstance(maptype, str):
502
+ maptype = MapType[maptype.upper()]
503
+
504
+ # prepare space instance
505
+ if isinstance(space, str):
506
+ space = _space.Space.get_instance(space)
507
+
508
+ # see if we find a map supporting the requested region
509
+ for m in parcellationmap.Map.registry():
510
+ if (
511
+ m.space.matches(space)
512
+ and m.parcellation == self.parcellation
513
+ and m.provides_image
514
+ and m.maptype == maptype
515
+ and self.name in m.regions
516
+ ):
517
+ return m.get_volume(region=self)
518
+ else:
519
+ raise NoMapAvailableError(
520
+ f"{self.name} is not mapped in {space} as a {str(maptype)} map."
521
+ " Please try getting the children or getting the mask."
522
+ )
523
+
524
+ def mapped_in_space(self, space, recurse: bool = True) -> bool:
525
+ """
526
+ Verifies wether this region is defined by an explicit map in the given space.
527
+
528
+ Parameters
529
+ ----------
530
+ space: Space or str
531
+ reference space
532
+ recurse: bool, default: True
533
+ If True, check if all child regions are mapped instead
534
+ Returns
535
+ -------
536
+ bool
537
+ """
538
+ from ..volumes.parcellationmap import Map
539
+ for m in Map.registry():
540
+ # Use and operant for efficiency (short circuiting logic)
541
+ # Put the most inexpensive logic first
542
+ if (
543
+ self.name in m.regions
544
+ and m.space.matches(space)
545
+ and m.parcellation.matches(self.parcellation)
546
+ ):
547
+ return True
548
+ if recurse and not self.is_leaf:
549
+ # check if all children are mapped instead
550
+ return all(c.mapped_in_space(space, recurse=True) for c in self.children)
551
+ return False
552
+
553
+ @property
554
+ def supported_spaces(self) -> List[_space.Space]:
555
+ """
556
+ The set of spaces for which a mask could be extracted.
557
+ Overwrites the corresponding method of AtlasConcept.
558
+ """
559
+ if self._supported_spaces is None:
560
+ self._supported_spaces = sorted(
561
+ {s for s in _space.Space.registry() if self.mapped_in_space(s)}
562
+ )
563
+ return self._supported_spaces
564
+
565
+ def supports_space(self, space: _space.Space):
566
+ """
567
+ Return true if this region supports the given space, else False.
568
+ """
569
+ return any(s.matches(space) for s in self.supported_spaces)
570
+
571
+ @property
572
+ def spaces(self):
573
+ return InstanceTable(
574
+ matchfunc=_space.Space.matches,
575
+ elements={s.key: s for s in self.supported_spaces},
576
+ )
577
+
578
+ def __contains__(self, other: Union[location.Location, 'Region']) -> bool:
579
+ if isinstance(other, Region):
580
+ return len(self.find(other)) > 0
581
+ else:
582
+ try:
583
+ regionmap = self.get_regional_mask(space=other.space)
584
+ return regionmap.__contains__(other)
585
+ except NoMapAvailableError:
586
+ return False
587
+
588
+ def assign(self, other: structure.BrainStructure) -> AnatomicalAssignment:
589
+ """
590
+ Compute assignment of a location to this region.
591
+
592
+ Two cases:
593
+ 1) other is location -> get region map, call regionmap.assign(other)
594
+ 2) other is region -> just do a semantic check for the regions
595
+
596
+ Parameters
597
+ ----------
598
+ other : Location or Region
599
+
600
+ Returns
601
+ -------
602
+ AnatomicalAssignment or None
603
+ None if there is no Qualification found.
604
+ """
605
+ if (self, other) in self._ASSIGNMENT_CACHE:
606
+ return self._ASSIGNMENT_CACHE[self, other]
607
+ if (other, self) in self._ASSIGNMENT_CACHE:
608
+ if self._ASSIGNMENT_CACHE[other, self] is None:
609
+ return None
610
+ return self._ASSIGNMENT_CACHE[other, self].invert()
611
+
612
+ if isinstance(other, (location.Location, volume.Volume)):
613
+ if self.mapped_in_space(other.space):
614
+ regionmap = self.get_regional_mask(other.space)
615
+ self._ASSIGNMENT_CACHE[self, other] = regionmap.assign(other)
616
+ return self._ASSIGNMENT_CACHE[self, other]
617
+
618
+ if isinstance(other, _boundingbox.BoundingBox): # volume.intersection(bbox) gets boundingbox anyway
619
+ try:
620
+ regionbbox_otherspace = self.get_boundingbox(other.space, restrict_space=False)
621
+ if regionbbox_otherspace is not None:
622
+ self._ASSIGNMENT_CACHE[self, other] = regionbbox_otherspace.assign(other)
623
+ return self._ASSIGNMENT_CACHE[self, other]
624
+ except Exception as e:
625
+ logger.debug(e)
626
+
627
+ assignment_result = None
628
+ for targetspace in self.supported_spaces:
629
+ try:
630
+ other_warped = other.warp(targetspace)
631
+ regionmap = self.get_regional_mask(targetspace)
632
+ assignment_result = regionmap.assign(other_warped)
633
+ except SpaceWarpingFailedError:
634
+ try:
635
+ regionbbox_targetspace = self.get_boundingbox(
636
+ targetspace, restrict_space=True
637
+ )
638
+ if regionbbox_targetspace is None:
639
+ continue
640
+ regionbbox_warped = regionbbox_targetspace.warp(other.space)
641
+ except SpaceWarpingFailedError:
642
+ continue
643
+ assignment_result = regionbbox_warped.assign(other)
644
+ except Exception as e:
645
+ logger.debug(e)
646
+ continue
647
+ break
648
+ self._ASSIGNMENT_CACHE[self, other] = assignment_result
649
+ else: # other is a Region
650
+ assert isinstance(other, Region)
651
+ if self == other:
652
+ qualification = Qualification.EXACT
653
+ elif self.__contains__(other):
654
+ qualification = Qualification.CONTAINS
655
+ elif other.__contains__(self):
656
+ qualification = Qualification.CONTAINED
657
+ else:
658
+ qualification = None
659
+ if qualification is None:
660
+ self._ASSIGNMENT_CACHE[self, other] = None
661
+ else:
662
+ self._ASSIGNMENT_CACHE[self, other] = AnatomicalAssignment(self, other, qualification)
663
+ return self._ASSIGNMENT_CACHE[self, other]
664
+
665
+ def tree2str(self):
666
+ """Render region-tree as a string"""
667
+ return "\n".join(
668
+ "%s%s" % (pre, node.name)
669
+ for pre, _, node
670
+ in anytree.RenderTree(self, style=anytree.render.ContRoundStyle)
671
+ )
672
+
673
+ def render_tree(self):
674
+ """Prints the tree representation of the region"""
675
+ print(self.tree2str())
676
+
677
+ def get_boundingbox(
678
+ self,
679
+ space: _space.Space,
680
+ maptype: MapType = MapType.LABELLED,
681
+ threshold_statistical: float = 0.0,
682
+ restrict_space: bool = True,
683
+ **fetch_kwargs
684
+ ) -> Union[_boundingbox.BoundingBox, None]:
685
+ """
686
+ Compute the bounding box of this region in the given space.
687
+
688
+ Parameters
689
+ ----------
690
+ space: Space or str
691
+ Requested reference space
692
+ maptype: MapType, default: MapType.LABELLED
693
+ Type of map to build ('labelled' will result in a binary mask,
694
+ 'statistical' attempts to build a statistical mask, possibly by
695
+ elementwise maximum of statistical maps of children)
696
+ threshold_statistical: float, default: 0.0
697
+ When masking a statistical map, use this threshold to convert
698
+ it to a binary mask before finding its bounding box.
699
+ restrict_space: bool, default: False
700
+ If True, it will not try to fetch maps from other spaces and warp
701
+ its boundingbox to requested space.
702
+
703
+ Returns
704
+ -------
705
+ BoundingBox
706
+ """
707
+ spaceobj = _space.Space.get_instance(space)
708
+ try:
709
+ mask = self.get_regional_mask(
710
+ spaceobj, maptype=maptype, threshold=threshold_statistical
711
+ )
712
+ return mask.get_boundingbox(
713
+ clip=True,
714
+ background=0.0,
715
+ **fetch_kwargs
716
+ )
717
+ except (RuntimeError, ValueError):
718
+ if restrict_space:
719
+ return None
720
+ for other_space in self.parcellation.spaces - spaceobj:
721
+ try:
722
+ mask = self.get_regional_mask(
723
+ other_space,
724
+ maptype=maptype,
725
+ threshold=threshold_statistical,
726
+ )
727
+ bbox = mask.get_boundingbox(clip=True, background=0.0, **fetch_kwargs)
728
+ if bbox is not None:
729
+ try:
730
+ bbox_warped = bbox.warp(spaceobj)
731
+ except SpaceWarpingFailedError:
732
+ continue
733
+ logger.debug(
734
+ f"No bounding box for {self.name} defined in {spaceobj.name}, "
735
+ f"warped the bounding box from {other_space.name} instead."
736
+ )
737
+ return bbox_warped
738
+ except RuntimeError:
739
+ continue
740
+ logger.error(f"Could not compute bounding box for {self.name}.")
741
+ return None
742
+
743
+ def compute_centroids(
744
+ self,
745
+ space: _space.Space,
746
+ maptype: MapType = MapType.LABELLED,
747
+ threshold_statistical: float = 0.0,
748
+ split_components: bool = True,
749
+ **fetch_kwargs,
750
+ ) -> pointcloud.PointCloud:
751
+ """
752
+ Compute the centroids of the region in the given space.
753
+
754
+ Parameters
755
+ ----------
756
+ space: Space
757
+ reference space in which the computation will be performed
758
+ maptype: MapType, default: MapType.LABELLED
759
+ Type of map to build ('labelled' will result in a binary mask,
760
+ 'statistical' attempts to build a statistical mask, possibly by
761
+ elementwise maximum of statistical maps of children)
762
+ threshold_statistical: float, default: 0.0
763
+ When masking a statistical map, use this threshold to convert
764
+ it to a binary mask before finding its centroids.
765
+
766
+ Returns
767
+ -------
768
+ PointCloud
769
+ Found centroids (as Point objects) in a PointCloud
770
+
771
+ Note
772
+ ----
773
+ A region can generally have multiple centroids if it has multiple
774
+ connected components in the map.
775
+ """
776
+ props = self.spatial_props(
777
+ space=space,
778
+ maptype=maptype,
779
+ threshold_statistical=threshold_statistical,
780
+ split_components=split_components,
781
+ **fetch_kwargs,
782
+ )
783
+ return pointcloud.PointCloud(
784
+ [c.centroid for c in props],
785
+ space=space
786
+ )
787
+
788
+ def spatial_props(
789
+ self,
790
+ space: _space.Space,
791
+ maptype: MapType = MapType.LABELLED,
792
+ threshold_statistical: float = 0.0,
793
+ split_components: bool = True,
794
+ **fetch_kwargs,
795
+ ):
796
+ """
797
+ Compute spatial properties for connected components of this region in the given space.
798
+
799
+ Parameters
800
+ ----------
801
+ space: Space
802
+ reference space in which the computation will be performed
803
+ maptype: MapType, default: MapType.LABELLED
804
+ Type of map to build ('labelled' will result in a binary mask,
805
+ 'statistical' attempts to build a statistical mask, possibly by
806
+ elementwise maximum of statistical maps of children)
807
+ threshold_statistical: float, default: 0.0
808
+ if not None, masks will be preferably constructed by thresholding
809
+ statistical maps with the given value.
810
+
811
+ Returns
812
+ -------
813
+ List
814
+ List of region's component spatial properties
815
+ """
816
+ if not isinstance(space, _space.Space):
817
+ space = _space.Space.get_instance(space)
818
+
819
+ # build binary mask of the image
820
+ try:
821
+ region_vol = self.get_regional_mask(
822
+ space, maptype=maptype, threshold=threshold_statistical
823
+ )
824
+ except NoMapAvailableError:
825
+ raise ValueError(
826
+ f"Spatial properties of {self.name} cannot be computed in {space.name}. "
827
+ "This region is only mapped in these spaces: "
828
+ f"{', '.join(s.name for s in self.supported_spaces)}"
829
+ )
830
+
831
+ return region_vol.compute_spatial_props(
832
+ split_components=split_components, **fetch_kwargs
833
+ )
834
+
835
+ def __iter__(self):
836
+ """
837
+ Returns an iterator that goes through all regions in this subtree
838
+ (including this parent region)
839
+ """
840
+ return anytree.PreOrderIter(self)
841
+
842
+ def intersection(self, other: "location.Location") -> "location.Location":
843
+ """Use this region for filtering a location object."""
844
+
845
+ if self.supports_space(other.space):
846
+ try:
847
+ volume = self.get_regional_mask(other.space)
848
+ if volume is not None:
849
+ return volume.intersection(other)
850
+ except NotImplementedError:
851
+ intersections = [child.intersection(other) for child in self.children]
852
+ return reduce(lambda a, b: a.union(b), intersections)
853
+
854
+ for space in self.supported_spaces:
855
+ if space.provides_image:
856
+ try:
857
+ volume = self.get_regional_mask(space)
858
+ if volume is not None:
859
+ intersection = volume.intersection(other)
860
+ logger.info(f"Warped {other} to {space} to find the intersection.")
861
+ return intersection
862
+ except SpaceWarpingFailedError:
863
+ continue
864
+
865
+ return None
866
+
867
+
868
+ @cache_user_fn
869
+ def _get_related_regions_str(pe_id: str) -> Tuple[Tuple[str, str, str, str], ...]:
870
+ logger.info("LONG CALC...", pe_id)
871
+ return_val = []
872
+ region_relation_assessments = RegionRelationAssessments.translate_pes(pe_id, pe_id)
873
+ for asgmt in region_relation_assessments:
874
+ assert isinstance(asgmt, RegionRelationAssessments), f"Expecting type to be of RegionRelationAssessments, but is {type(asgmt)}"
875
+ assert isinstance(asgmt.assigned_structure, Region), f"Expecting assigned structure to be of type Region, but is {type(asgmt.assigned_structure)}"
876
+ return_val.append((
877
+ asgmt.assigned_structure.parcellation.id,
878
+ asgmt.assigned_structure.name,
879
+ asgmt.qualification.name,
880
+ asgmt.explanation
881
+ ))
882
+ return tuple(return_val)
883
+
884
+
885
+ def get_peid_from_region(region: Region) -> str:
886
+ """
887
+ Given a region, obtain the Parcellation Entity ID.
888
+
889
+ Parameters
890
+ ----------
891
+ region : Region
892
+
893
+ Returns
894
+ -------
895
+ str
896
+ """
897
+ if region._spec:
898
+ region_peid = region._spec.get("ebrains", {}).get("openminds/ParcellationEntity")
899
+ if region_peid:
900
+ return region_peid
901
+ # In some cases (e.g. Julich Brain, PE is defined on the parent leaf nodes)
902
+ if region.parent and region.parent._spec:
903
+ parent_peid = region.parent._spec.get("ebrains", {}).get("openminds/ParcellationEntity")
904
+ if parent_peid:
905
+ return parent_peid
906
+ return None
907
+
908
+
909
+ def get_related_regions(region: Region) -> Iterable["RegionRelationAssessments"]:
910
+ """
911
+ Get assements on relations of a region to others defined on EBRAINS.
912
+
913
+ Parameters
914
+ ----------
915
+ region: Region
916
+
917
+ Yields
918
+ ------
919
+ Qualification
920
+
921
+ Example
922
+ -------
923
+ >>> region = siibra.get_region("monkey", "PG")
924
+ >>> for assesment in siibra.core.region.get_related_regions(region):
925
+ >>> print(assesment)
926
+ 'PG' is homologous to 'Area PGa (IPL)'
927
+ 'PG' is homologous to 'Area PGa (IPL) left'
928
+ 'PG' is homologous to 'Area PGa (IPL) right'
929
+ 'PG' is homologous to 'Area PGa (IPL)'
930
+ 'PG' is homologous to 'Area PGa (IPL) left'
931
+ 'PG' is homologous to 'Area PGa (IPL) right'
932
+ 'PG' is homologous to 'Area PGa (IPL)'
933
+ 'PG' is homologous to 'Area PGa (IPL) right'
934
+ 'PG' is homologous to 'Area PGa (IPL) left'
935
+ """
936
+ logger.info("get related region called")
937
+ pe_id = get_peid_from_region(region)
938
+ if not pe_id:
939
+ return []
940
+
941
+ for parc_id, region_name, qual, explanation in _get_related_regions_str(pe_id):
942
+ parc = _parcellation.Parcellation.get_instance(parc_id)
943
+ found_region = parc.get_region(region_name)
944
+ yield RegionRelationAssessments(region, found_region, qual, explanation)
945
+
946
+
947
+ _get_reg_relation_asmgt_types: Dict[str, Callable] = {}
948
+
949
+
950
+ def _register_region_reference_type(ebrain_type: str):
951
+ def outer(fn: Callable):
952
+ _get_reg_relation_asmgt_types[ebrain_type] = fn
953
+
954
+ @wraps(fn)
955
+ def inner(*args, **kwargs):
956
+ return fn(*args, **kwargs)
957
+ return inner
958
+ return outer
959
+
960
+
961
+ class RegionRelationAssessments(AnatomicalAssignment[Region]):
962
+ """
963
+ A collection of methods on finding related regions and the quantification
964
+ of the relationship.
965
+ """
966
+
967
+ anony_client = BucketApiClient()
968
+
969
+ @staticmethod
970
+ def get_uuid(long_id: Union[str, Dict]) -> str:
971
+ """
972
+ Returns the uuid portion of either a fully formed openminds id, or get
973
+ the 'id' property first, and extract the uuid portion of the id.
974
+
975
+ Parameters
976
+ ----------
977
+ long_id: str, dict[str, str]
978
+
979
+ Returns
980
+ -------
981
+ str
982
+
983
+ Raises
984
+ ------
985
+ AssertionError
986
+ RuntimeError
987
+ """
988
+ if isinstance(long_id, str):
989
+ pass
990
+ elif isinstance(long_id, dict):
991
+ long_id = long_id.get("id")
992
+ assert isinstance(long_id, str)
993
+ else:
994
+ raise RuntimeError("uuid arg must be str or object")
995
+ uuid_search = re.search(r"(?P<uuid>[a-f0-9-]+)$", long_id)
996
+ assert uuid_search, "uuid not found"
997
+ return uuid_search.group("uuid")
998
+
999
+ @staticmethod
1000
+ def parse_id_arg(_id: Union[str, List[str]]) -> List[str]:
1001
+ """
1002
+ Normalizes the ebrains id property. The ebrains id field can be either
1003
+ a str or list[str]. This method normalizes it to always be list[str].
1004
+
1005
+ Parameters
1006
+ ----------
1007
+ _id: strl, list[str]
1008
+
1009
+ Returns
1010
+ -------
1011
+ list[str]
1012
+
1013
+ Raises
1014
+ ------
1015
+ RuntimeError
1016
+ """
1017
+ if isinstance(_id, list):
1018
+ assert all(isinstance(_i, str) for _i in _id), "all instances of pev should be str"
1019
+ elif isinstance(_id, str):
1020
+ _id = [_id]
1021
+ else:
1022
+ raise RuntimeError("parse_pev error: arg must be either list of str or str")
1023
+ return _id
1024
+
1025
+ @classmethod
1026
+ def get_object(cls, obj: str):
1027
+ """
1028
+ Gets given a object (path), loads the content and serializes to json.
1029
+ Relative to the bucket 'reference-atlas-data'.
1030
+
1031
+ Parameters
1032
+ ----------
1033
+ obj: str
1034
+
1035
+ Returns
1036
+ -------
1037
+ dict
1038
+ """
1039
+ bucket = cls.anony_client.buckets.get_bucket("reference-atlas-data")
1040
+ return json.loads(bucket.get_file(obj).get_content())
1041
+
1042
+ @classmethod
1043
+ def get_snapshot_factory(cls, type_str: str):
1044
+ """
1045
+ Factory method for given type.
1046
+
1047
+ Parameters
1048
+ ----------
1049
+ type_str: str
1050
+
1051
+ Returns
1052
+ -------
1053
+ Callable[[str|list[str]], dict]
1054
+ """
1055
+ def get_objects(_id: Union[str, List[str]]):
1056
+ _id = cls.parse_id_arg(_id)
1057
+ with ThreadPoolExecutor() as ex:
1058
+ return list(
1059
+ ex.map(
1060
+ cls.get_object,
1061
+ [f"ebrainsquery/v3/{type_str}/{_}.json" for _ in _id]
1062
+ ))
1063
+ return get_objects
1064
+
1065
+ @classmethod
1066
+ def parse_relationship_assessment(cls, src: "Region", assessment):
1067
+ """
1068
+ Given a region, and the fetched assessment json, yield
1069
+ RegionRelationAssignment object.
1070
+
1071
+ Parameters
1072
+ ----------
1073
+ src: Region
1074
+ assessment: dict
1075
+
1076
+ Returns
1077
+ -------
1078
+ Iterable[RegionRelationAssessments]
1079
+ """
1080
+ all_regions = [
1081
+ region
1082
+ for p in _parcellation.Parcellation.registry()
1083
+ for region in p
1084
+ ]
1085
+
1086
+ overlap = assessment.get("qualitativeOverlap")
1087
+ targets = assessment.get("relationAssessment") or assessment.get("inRelationTo")
1088
+ assert len(overlap) == 1, f"should be 1&o1 overlap {len(overlap)!r} "
1089
+ overlap, = overlap
1090
+ for target in targets:
1091
+ target_id = cls.get_uuid(target)
1092
+
1093
+ found_targets = [
1094
+ region
1095
+ for region in all_regions
1096
+ if region == target_id
1097
+ ]
1098
+
1099
+ for found_target in found_targets:
1100
+ yield cls(
1101
+ query_structure=src,
1102
+ assigned_structure=found_target,
1103
+ qualification=Qualification.parse_relation_assessment(overlap)
1104
+ )
1105
+
1106
+ if "https://openminds.ebrains.eu/sands/ParcellationEntity" in target.get("type"):
1107
+ pev_uuids = [
1108
+ cls.get_uuid(has_version)
1109
+ for pe in cls.get_snapshot_factory("ParcellationEntity")(target_id)
1110
+ for has_version in pe.get("hasVersion")
1111
+ ]
1112
+ for reg in all_regions:
1113
+ if reg in pev_uuids:
1114
+ yield cls(
1115
+ query_structure=src,
1116
+ assigned_structure=reg,
1117
+ qualification=Qualification.parse_relation_assessment(overlap)
1118
+ )
1119
+
1120
+ @classmethod
1121
+ @_register_region_reference_type("openminds/CustomAnatomicalEntity")
1122
+ def translate_cae(cls, src: "Region", _id: Union[str, List[str]]):
1123
+ """Register how CustomAnatomicalEntity should be parsed
1124
+
1125
+ Parameters
1126
+ ----------
1127
+ src: Region
1128
+ _id: str|list[str]
1129
+
1130
+ Returns
1131
+ -------
1132
+ Iterable[RegionRelationAssessments]
1133
+ """
1134
+ caes = cls.get_snapshot_factory("CustomAnatomicalEntity")(_id)
1135
+ for cae in caes:
1136
+ for ass in cae.get("relationAssessment", []):
1137
+ yield from cls.parse_relationship_assessment(src, ass)
1138
+
1139
+ @classmethod
1140
+ @_register_region_reference_type("openminds/ParcellationEntity")
1141
+ def translate_pes(cls, src: "Region", _id: Union[str, List[str]]):
1142
+ """
1143
+ Register how ParcellationEntity should be parsed
1144
+
1145
+ Parameters
1146
+ ----------
1147
+ src: Region
1148
+ _id: str|list[str]
1149
+
1150
+ Returns
1151
+ -------
1152
+ Iterable[RegionRelationAssessments]
1153
+ """
1154
+ pes = cls.get_snapshot_factory("ParcellationEntity")(_id)
1155
+
1156
+ all_regions = [
1157
+ region
1158
+ for p in _parcellation.Parcellation.registry()
1159
+ for region in p
1160
+ ]
1161
+
1162
+ for pe in pes:
1163
+ for region in all_regions:
1164
+ if region is src:
1165
+ continue
1166
+ region_peid = get_peid_from_region(region)
1167
+ if region_peid and (region_peid in pe.get("id")):
1168
+ yield cls(
1169
+ query_structure=src,
1170
+ assigned_structure=region,
1171
+ qualification=Qualification.OTHER_VERSION
1172
+ )
1173
+
1174
+ # homologuous
1175
+ relations = pe.get("inRelationTo", [])
1176
+ for relation in relations:
1177
+ yield from cls.parse_relationship_assessment(src, relation)
1178
+
1179
+ @classmethod
1180
+ @_register_region_reference_type("openminds/ParcellationEntityVersion")
1181
+ def translate_pevs(cls, src: "Region", _id: Union[str, List[str]]):
1182
+ """
1183
+ Register how ParcellationEntityVersion should be parsed
1184
+
1185
+ Parameters
1186
+ ----------
1187
+ src: Region
1188
+ _id: str|list[str]
1189
+
1190
+ Returns
1191
+ -------
1192
+ Iterable[RegionRelationAssessments]
1193
+ """
1194
+ pe_uuids = [
1195
+ uuid for uuid in
1196
+ {
1197
+ cls.get_uuid(pe)
1198
+ for pev in cls.get_snapshot_factory("ParcellationEntityVersion")(_id)
1199
+ for pe in pev.get("isVersionOf")
1200
+ }
1201
+ ]
1202
+ yield from cls.translate_pes(src, pe_uuids)
1203
+
1204
+ @classmethod
1205
+ def parse_from_region(cls, region: "Region") -> Iterable["RegionRelationAssessments"]:
1206
+ """
1207
+ Main entry on how related regions should be retrieved. Given a region,
1208
+ retrieves all RegionRelationAssessments
1209
+
1210
+ Parameters
1211
+ ----------
1212
+ region: Region
1213
+
1214
+ Returns
1215
+ -------
1216
+ Iterable[RegionRelationAssessments]
1217
+ """
1218
+ if not region._spec:
1219
+ return None
1220
+ for ebrain_type, ebrain_ref in region._spec.get("ebrains", {}).items():
1221
+ if ebrain_type in _get_reg_relation_asmgt_types:
1222
+ fn = _get_reg_relation_asmgt_types[ebrain_type]
1223
+ yield from fn(cls, region, ebrain_ref)