siibra 0.5a2__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (83) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +20 -12
  3. siibra/commons.py +145 -90
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +22 -17
  6. siibra/configuration/factory.py +177 -128
  7. siibra/core/__init__.py +1 -8
  8. siibra/core/{relation_qualification.py → assignment.py} +17 -14
  9. siibra/core/atlas.py +66 -35
  10. siibra/core/concept.py +81 -39
  11. siibra/core/parcellation.py +83 -67
  12. siibra/core/region.py +569 -263
  13. siibra/core/space.py +7 -39
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +16 -0
  22. siibra/explorer/url.py +112 -52
  23. siibra/explorer/util.py +31 -9
  24. siibra/features/__init__.py +73 -8
  25. siibra/features/anchor.py +75 -196
  26. siibra/features/connectivity/__init__.py +1 -1
  27. siibra/features/connectivity/functional_connectivity.py +2 -2
  28. siibra/features/connectivity/regional_connectivity.py +99 -10
  29. siibra/features/connectivity/streamline_counts.py +1 -1
  30. siibra/features/connectivity/streamline_lengths.py +1 -1
  31. siibra/features/connectivity/tracing_connectivity.py +1 -1
  32. siibra/features/dataset/__init__.py +1 -1
  33. siibra/features/dataset/ebrains.py +3 -3
  34. siibra/features/feature.py +219 -110
  35. siibra/features/image/__init__.py +1 -1
  36. siibra/features/image/image.py +21 -13
  37. siibra/features/image/sections.py +1 -1
  38. siibra/features/image/volume_of_interest.py +1 -1
  39. siibra/features/tabular/__init__.py +1 -1
  40. siibra/features/tabular/bigbrain_intensity_profile.py +24 -13
  41. siibra/features/tabular/cell_density_profile.py +111 -69
  42. siibra/features/tabular/cortical_profile.py +82 -16
  43. siibra/features/tabular/gene_expression.py +117 -6
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +7 -9
  45. siibra/features/tabular/layerwise_cell_density.py +9 -24
  46. siibra/features/tabular/receptor_density_fingerprint.py +11 -6
  47. siibra/features/tabular/receptor_density_profile.py +12 -15
  48. siibra/features/tabular/regional_timeseries_activity.py +74 -18
  49. siibra/features/tabular/tabular.py +17 -8
  50. siibra/livequeries/__init__.py +1 -7
  51. siibra/livequeries/allen.py +139 -77
  52. siibra/livequeries/bigbrain.py +104 -128
  53. siibra/livequeries/ebrains.py +7 -4
  54. siibra/livequeries/query.py +1 -2
  55. siibra/locations/__init__.py +32 -25
  56. siibra/locations/boundingbox.py +153 -127
  57. siibra/locations/location.py +45 -80
  58. siibra/locations/point.py +97 -83
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +1 -1
  61. siibra/retrieval/cache.py +107 -13
  62. siibra/retrieval/datasets.py +9 -14
  63. siibra/retrieval/exceptions/__init__.py +2 -1
  64. siibra/retrieval/repositories.py +147 -53
  65. siibra/retrieval/requests.py +64 -29
  66. siibra/vocabularies/__init__.py +2 -2
  67. siibra/volumes/__init__.py +7 -9
  68. siibra/volumes/parcellationmap.py +396 -253
  69. siibra/volumes/providers/__init__.py +20 -0
  70. siibra/volumes/providers/freesurfer.py +113 -0
  71. siibra/volumes/{gifti.py → providers/gifti.py} +29 -18
  72. siibra/volumes/{neuroglancer.py → providers/neuroglancer.py} +204 -92
  73. siibra/volumes/{nifti.py → providers/nifti.py} +64 -44
  74. siibra/volumes/providers/provider.py +107 -0
  75. siibra/volumes/sparsemap.py +159 -260
  76. siibra/volumes/volume.py +720 -152
  77. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/METADATA +25 -28
  78. siibra-1.0.0a1.dist-info/RECORD +84 -0
  79. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/WHEEL +1 -1
  80. siibra/locations/pointset.py +0 -198
  81. siibra-0.5a2.dist-info/RECORD +0 -74
  82. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/LICENSE +0 -0
  83. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,14 +17,16 @@
17
17
  from . import anchor as _anchor
18
18
 
19
19
  from ..commons import logger, InstanceTable, siibra_tqdm, __version__
20
- from ..core import concept
21
- from ..core import space, region, parcellation
20
+ from ..core import concept, space, region, parcellation, structure
21
+ from ..volumes import volume
22
22
 
23
23
  from typing import Union, TYPE_CHECKING, List, Dict, Type, Tuple, BinaryIO, Any, Iterator
24
24
  from hashlib import md5
25
25
  from collections import defaultdict
26
26
  from zipfile import ZipFile
27
- from abc import ABC
27
+ from abc import ABC, abstractmethod
28
+ from re import sub
29
+ from textwrap import wrap
28
30
 
29
31
  if TYPE_CHECKING:
30
32
  from ..retrieval.datasets import EbrainsDataset
@@ -86,9 +88,8 @@ class Feature:
86
88
  Base class for anatomically anchored data features.
87
89
  """
88
90
 
89
- SUBCLASSES: Dict[Type['Feature'], List[Type['Feature']]] = defaultdict(list)
90
-
91
- CATEGORIZED: Dict[str, Type['InstanceTable']] = defaultdict(InstanceTable)
91
+ _SUBCLASSES: Dict[Type['Feature'], List[Type['Feature']]] = defaultdict(list)
92
+ _CATEGORIZED: Dict[str, Type['InstanceTable']] = defaultdict(InstanceTable)
92
93
 
93
94
  category: str = None
94
95
 
@@ -97,7 +98,9 @@ class Feature:
97
98
  modality: str,
98
99
  description: str,
99
100
  anchor: _anchor.AnatomicalAnchor,
100
- datasets: List['TypeDataset'] = []
101
+ datasets: List['TypeDataset'] = [],
102
+ id: str = None,
103
+ prerelease: bool = False,
101
104
  ):
102
105
  """
103
106
  Parameters
@@ -114,6 +117,8 @@ class Feature:
114
117
  self._description = description
115
118
  self._anchor_cached = anchor
116
119
  self.datasets = datasets
120
+ self._id = id
121
+ self._prerelease = prerelease
117
122
 
118
123
  @property
119
124
  def modality(self):
@@ -141,19 +146,19 @@ class Feature:
141
146
  # some base classes may not be sub class of feature, ignore these
142
147
  if not issubclass(BaseCls, Feature):
143
148
  continue
144
- cls.SUBCLASSES[BaseCls].append(cls)
149
+ cls._SUBCLASSES[BaseCls].append(cls)
145
150
 
146
151
  cls._live_queries = []
147
152
  cls._preconfigured_instances = None
148
153
  cls._configuration_folder = configuration_folder
149
154
  cls.category = category
150
155
  if category is not None:
151
- cls.CATEGORIZED[category].add(cls.__name__, cls)
156
+ cls._CATEGORIZED[category].add(cls.__name__, cls)
152
157
  return super().__init_subclass__(**kwargs)
153
158
 
154
159
  @classmethod
155
160
  def _get_subclasses(cls):
156
- return {Cls.__name__: Cls for Cls in cls.SUBCLASSES}
161
+ return {Cls.__name__: Cls for Cls in cls._SUBCLASSES}
157
162
 
158
163
  @property
159
164
  def description(self):
@@ -167,15 +172,29 @@ class Feature:
167
172
 
168
173
  @property
169
174
  def LICENSE(self) -> str:
170
- return '\n'.join([ds.LICENSE for ds in self.datasets])
175
+ licenses = []
176
+ for ds in self.datasets:
177
+ if ds.LICENSE is None or ds.LICENSE == "No license information is found.":
178
+ continue
179
+ if isinstance(ds.LICENSE, str):
180
+ licenses.append(ds.LICENSE)
181
+ if isinstance(ds.LICENSE, list):
182
+ licenses.extend(ds.LICENSE)
183
+ if len(licenses) == 0:
184
+ logger.warning("No license information is found.")
185
+ return ""
186
+ if len(licenses) > 1:
187
+ logger.info("Found multiple licenses corresponding to datasets.")
188
+ return '\n'.join(licenses)
171
189
 
172
190
  @property
173
- def doi_or_url(self) -> str:
174
- return '\n'.join([
191
+ def urls(self) -> List[str]:
192
+ """The list of URLs (including DOIs) associated with this feature."""
193
+ return [
175
194
  url.get("url")
176
195
  for ds in self.datasets
177
196
  for url in ds.urls
178
- ])
197
+ ]
179
198
 
180
199
  @property
181
200
  def authors(self):
@@ -188,10 +207,12 @@ class Feature:
188
207
  @property
189
208
  def name(self):
190
209
  """Returns a short human-readable name of this feature."""
191
- return f"{self.__class__.__name__} ({self.modality}) anchored at {self.anchor}"
210
+ readable_class_name = sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.__class__.__name__)
211
+ name_ = sub("([b,B]ig [b,B]rain)", "BigBrain", readable_class_name)
212
+ return name_ if not self._prerelease else f"[PRERELEASE] {name_}"
192
213
 
193
214
  @classmethod
194
- def get_instances(cls, **kwargs) -> List['Feature']:
215
+ def _get_instances(cls, **kwargs) -> List['Feature']:
195
216
  """
196
217
  Retrieve objects of a particular feature subclass.
197
218
  Objects can be preconfigured in the configuration,
@@ -209,7 +230,7 @@ class Feature:
209
230
 
210
231
  from ..configuration.configuration import Configuration
211
232
  conf = Configuration()
212
- Configuration.register_cleanup(cls.clean_instances)
233
+ Configuration.register_cleanup(cls._clean_instances)
213
234
  assert cls._configuration_folder in conf.folders
214
235
  cls._preconfigured_instances = [
215
236
  o for o in conf.build_objects(cls._configuration_folder)
@@ -226,11 +247,19 @@ class Feature:
226
247
  raise NotImplementedError("Generic feature class does not have a standardized plot.")
227
248
 
228
249
  @classmethod
229
- def clean_instances(cls):
250
+ def _clean_instances(cls):
230
251
  """ Removes all instantiated object instances"""
231
252
  cls._preconfigured_instances = None
232
253
 
233
- def matches(self, concept: concept.AtlasConcept) -> bool:
254
+ def matches(
255
+ self,
256
+ concept: Union[structure.BrainStructure, space.Space],
257
+ ) -> bool:
258
+ """
259
+ Match the features anatomical anchor agains the given query concept.
260
+ Record the most recently matched concept for inspection by the caller.
261
+ """
262
+ # TODO: storing the last matched concept. It is not ideal, might cause problems in multithreading
234
263
  if self.anchor and self.anchor.matches(concept):
235
264
  self.anchor._last_matched_concept = concept
236
265
  return True
@@ -239,26 +268,36 @@ class Feature:
239
268
 
240
269
  @property
241
270
  def last_match_result(self):
242
- return None if self.anchor is None \
243
- else self.anchor.last_match_result
271
+ "The result of the last anchor comparison to a BrainStructure."
272
+ return None if self.anchor is None else self.anchor.last_match_result
244
273
 
245
274
  @property
246
275
  def last_match_description(self):
247
- return "" if self.anchor is None \
248
- else self.anchor.last_match_description
276
+ "The description of the last anchor comparison to a BrainStructure."
277
+ return "" if self.anchor is None else self.anchor.last_match_description
249
278
 
250
279
  @property
251
280
  def id(self):
281
+ if self._id:
282
+ return self._id
283
+
284
+ if self._prerelease:
285
+ name_ = self.name.replace("[PRERELEASE] ", "")
286
+ else:
287
+ name_ = self.name
288
+
252
289
  prefix = ''
253
290
  for ds in self.datasets:
254
291
  if hasattr(ds, "id"):
255
292
  prefix = ds.id + '--'
256
293
  break
257
- return prefix + md5(self.name.encode("utf-8")).hexdigest()
294
+ return prefix + md5(
295
+ f"{name_} - {self.anchor}".encode("utf-8")
296
+ ).hexdigest()
258
297
 
259
- def _export(self, fh: ZipFile):
298
+ def _to_zip(self, fh: ZipFile):
260
299
  """
261
- Internal implementation. Subclasses can override but call super()._export(fh).
300
+ Internal implementation. Subclasses can override but call super()._to_zip(fh).
262
301
  This allows all classes in the __mro__ to have the opportunity to append files
263
302
  of interest.
264
303
  """
@@ -298,7 +337,7 @@ class Feature:
298
337
  )
299
338
  )
300
339
 
301
- def export(self, filelike: Union[str, BinaryIO]):
340
+ def to_zip(self, filelike: Union[str, BinaryIO]):
302
341
  """
303
342
  Export as a zip archive.
304
343
 
@@ -309,11 +348,11 @@ class Feature:
309
348
  correct extension (.zip) is set.
310
349
  """
311
350
  fh = ZipFile(filelike, "w")
312
- self._export(fh)
351
+ self._to_zip(fh)
313
352
  fh.close()
314
353
 
315
354
  @staticmethod
316
- def serialize_query_context(feat: 'Feature', concept: concept.AtlasConcept) -> str:
355
+ def _serialize_query_context(feat: 'Feature', concept: concept.AtlasConcept) -> str:
317
356
  """
318
357
  Serialize feature from livequery and query context.
319
358
 
@@ -348,7 +387,7 @@ class Feature:
348
387
  return f"lq0::{feat.__class__.__name__}::{encoded_c}::{feat.id}"
349
388
 
350
389
  @classmethod
351
- def deserialize_query_context(cls, feature_id: str) -> Tuple[Type['Feature'], concept.AtlasConcept, str]:
390
+ def _deserialize_query_context(cls, feature_id: str) -> Tuple[Type['Feature'], concept.AtlasConcept, str]:
352
391
  """
353
392
  Deserialize id into query context.
354
393
 
@@ -372,6 +411,7 @@ class Feature:
372
411
 
373
412
  @staticmethod
374
413
  def _encode_concept(concept: concept.AtlasConcept):
414
+ from ..locations import Location
375
415
  encoded_c = []
376
416
  if isinstance(concept, space.Space):
377
417
  encoded_c.append(f"s:{concept.id}")
@@ -380,6 +420,10 @@ class Feature:
380
420
  elif isinstance(concept, region.Region):
381
421
  encoded_c.append(f"p:{concept.parcellation.id}")
382
422
  encoded_c.append(f"r:{concept.name}")
423
+ elif isinstance(concept, volume.Volume):
424
+ encoded_c.append(f"v:{concept.name}")
425
+ elif isinstance(concept, Location):
426
+ encoded_c.append(f"loc:{Location}")
383
427
 
384
428
  if len(encoded_c) == 0:
385
429
  raise EncodeLiveQueryIdException("no concept is encoded")
@@ -419,7 +463,7 @@ class Feature:
419
463
  def _parse_featuretype(cls, feature_type: str) -> List[Type['Feature']]:
420
464
  ftypes = sorted({
421
465
  feattype
422
- for FeatCls, feattypes in cls.SUBCLASSES.items()
466
+ for FeatCls, feattypes in cls._SUBCLASSES.items()
423
467
  if all(w.lower() in FeatCls.__name__.lower() for w in feature_type.split())
424
468
  for feattype in feattypes
425
469
  }, key=lambda t: t.__name__)
@@ -429,41 +473,52 @@ class Feature:
429
473
  return list(ftypes)
430
474
 
431
475
  @classmethod
432
- def livequery(cls, concept: Union[region.Region, parcellation.Parcellation, space.Space], **kwargs) -> List['Feature']:
476
+ def _livequery(cls, concept: Union[region.Region, parcellation.Parcellation, space.Space], **kwargs) -> List['Feature']:
433
477
  if not hasattr(cls, "_live_queries"):
434
478
  return []
435
479
 
436
480
  live_instances = []
437
481
  for QueryType in cls._live_queries:
438
- argstr = f" ({', '.join('='.join(map(str,_)) for _ in kwargs.items())})" \
482
+ argstr = f" ({', '.join('='.join(map(str, _)) for _ in kwargs.items())})" \
439
483
  if len(kwargs) > 0 else ""
440
- logger.info(
484
+ logger.debug(
441
485
  f"Running live query for {QueryType.feature_type.__name__} "
442
486
  f"objects linked to {str(concept)}{argstr}"
443
487
  )
444
488
  q = QueryType(**kwargs)
445
- features = [
446
- Feature.wrap_livequery_feature(feat, Feature.serialize_query_context(feat, concept))
447
- for feat in q.query(concept)
448
- ]
449
- live_instances.extend(features)
489
+ if isinstance(concept, space.Space):
490
+ features = q.query(concept.get_template())
491
+ else:
492
+ features = q.query(concept)
493
+ live_instances.extend(
494
+ Feature._wrap_livequery_feature(f, Feature._serialize_query_context(f, concept))
495
+ for f in features
496
+ )
497
+
450
498
  return live_instances
451
499
 
452
500
  @classmethod
453
- def match(
501
+ def _match(
454
502
  cls,
455
- concept: Union[region.Region, parcellation.Parcellation, space.Space],
503
+ concept: Union[structure.BrainStructure, space.Space],
456
504
  feature_type: Union[str, Type['Feature'], list],
457
505
  **kwargs
458
506
  ) -> List['Feature']:
459
507
  """
460
- Retrieve data features of the desired modality.
508
+ Retrieve data features of the requested feature type (i.e. modality).
509
+ This will
510
+ - call Feature.match(concept) for any registered preconfigured features
511
+ - run any registered live queries
512
+ The preconfigured and live query instances are merged and returend as a list.
513
+
514
+ If multiple feature types are given, recurse for each of them.
515
+
461
516
 
462
517
  Parameters
463
518
  ----------
464
519
  concept: AtlasConcept
465
520
  An anatomical concept, typically a brain region or parcellation.
466
- modality: subclass of Feature
521
+ feature_type: subclass of Feature, str
467
522
  specififies the type of features ("modality")
468
523
  """
469
524
  if isinstance(feature_type, list):
@@ -474,7 +529,7 @@ class Feature:
474
529
  )
475
530
  return list(dict.fromkeys(
476
531
  sum((
477
- cls.match(concept, t, **kwargs) for t in feature_type
532
+ cls._match(concept, t, **kwargs) for t in feature_type
478
533
  ), [])
479
534
  ))
480
535
 
@@ -485,39 +540,47 @@ class Feature:
485
540
  if len(ftype_candidates) == 0:
486
541
  raise ValueError(
487
542
  f"feature_type {str(feature_type)} did not match with any "
488
- f"features. Available features are: {', '.join(cls.SUBCLASSES.keys())}"
543
+ f"features. Available features are: {', '.join(cls._SUBCLASSES.keys())}"
489
544
  )
490
545
  logger.info(
491
546
  f"'{feature_type}' decoded as feature type/s: "
492
547
  f"{[c.__name__ for c in ftype_candidates]}."
493
548
  )
494
- return cls.match(concept, ftype_candidates, **kwargs)
549
+ return cls._match(concept, ftype_candidates, **kwargs)
495
550
 
496
551
  assert issubclass(feature_type, Feature)
497
552
 
498
- if not isinstance(concept, (region.Region, parcellation.Parcellation, space.Space)):
553
+ # At this stage, no recursion is needed.
554
+ # We expect a specific supported feature type is to be matched now.
555
+ if not isinstance(concept, (structure.BrainStructure, space.Space)):
499
556
  raise ValueError(
500
- "Feature.match / siibra.features.get only accepts Region, "
501
- "Space and Parcellation objects as concept."
557
+ f"{concept.__class__.__name__} cannot be used for feature queries as it is not a `BrainStructure` or a `Space` type."
502
558
  )
503
559
 
504
- msg = f"Matching {feature_type.__name__} to {concept}"
560
+ # Collect any preconfigured instances of the requested feature type
561
+ # which match the query concept
505
562
  instances = [
506
563
  instance
507
- for f_type in cls.SUBCLASSES[feature_type]
508
- for instance in f_type.get_instances()
564
+ for f_type in cls._SUBCLASSES[feature_type]
565
+ for instance in f_type._get_instances()
509
566
  ]
510
567
 
511
568
  preconfigured_instances = [
512
569
  f for f in siibra_tqdm(
513
- instances, desc=msg, total=len(instances), disable=(not instances)
514
- ) if f.matches(concept)
570
+ instances,
571
+ desc=f"Matching {feature_type.__name__} to {concept}",
572
+ total=len(instances),
573
+ disable=(not instances)
574
+ )
575
+ if f.matches(concept)
515
576
  ]
516
577
 
517
- live_instances = feature_type.livequery(concept, **kwargs)
578
+ # Then run any registered live queries for the requested feature type
579
+ # with the query concept.
580
+ live_instances = feature_type._livequery(concept, **kwargs)
518
581
 
519
582
  results = list(dict.fromkeys(preconfigured_instances + live_instances))
520
- return CompoundFeature.compound(results, concept)
583
+ return CompoundFeature._compound(results, concept)
521
584
 
522
585
  @classmethod
523
586
  def _get_instance_by_id(cls, feature_id: str, **kwargs):
@@ -527,17 +590,17 @@ class Feature:
527
590
  pass
528
591
 
529
592
  try:
530
- F, concept, fid = cls.deserialize_query_context(feature_id)
593
+ F, concept, fid = cls._deserialize_query_context(feature_id)
531
594
  return [
532
595
  f
533
- for f in F.livequery(concept, **kwargs)
596
+ for f in F._livequery(concept, **kwargs)
534
597
  if f.id == fid or f.id == feature_id
535
598
  ][0]
536
599
  except ParseLiveQueryIdException:
537
600
  candidates = [
538
601
  inst
539
- for Cls in Feature.SUBCLASSES[Feature]
540
- for inst in Cls.get_instances()
602
+ for Cls in Feature._SUBCLASSES[Feature]
603
+ for inst in Cls._get_instances()
541
604
  if inst.id == feature_id
542
605
  ]
543
606
  if len(candidates) == 0:
@@ -552,31 +615,8 @@ class Feature:
552
615
  except IndexError:
553
616
  raise NotFoundException
554
617
 
555
- @classmethod
556
- def get_ascii_tree(cls):
557
- # build an Ascii representation of class hierarchy
558
- # under this feature class
559
- from anytree.importer import DictImporter
560
- from anytree import RenderTree
561
-
562
- def create_treenode(feature_type):
563
- return {
564
- 'name': feature_type.__name__,
565
- 'children': [
566
- create_treenode(c)
567
- for c in feature_type.__subclasses__()
568
- ]
569
- }
570
- D = create_treenode(cls)
571
- importer = DictImporter()
572
- tree = importer.import_(D)
573
- return "\n".join(
574
- "%s%s" % (pre, node.name)
575
- for pre, _, node in RenderTree(tree)
576
- )
577
-
578
618
  @staticmethod
579
- def wrap_livequery_feature(feature: 'Feature', fid: str):
619
+ def _wrap_livequery_feature(feature: 'Feature', fid: str):
580
620
  """
581
621
  Wrap live query features, override only the id attribute.
582
622
 
@@ -596,6 +636,7 @@ class Feature:
596
636
  def __init__(self, inst: Feature, fid: str):
597
637
  self.inst = inst
598
638
  self.fid = fid
639
+ self.category = inst.category
599
640
 
600
641
  def __str__(self) -> str:
601
642
  return self.inst.__str__()
@@ -625,6 +666,11 @@ class Compoundable(ABC):
625
666
  assert len(cls._filter_attrs) > 0, "All compoundable classes have to have `_filter_attrs` defined."
626
667
  assert len(cls._compound_attrs) > 0, "All compoundable classes have to have `_compound_attrs` defined."
627
668
  assert all(attr in cls._filter_attrs for attr in cls._compound_attrs), "`_compound_attrs` must be a subset of `_filter_attrs`."
669
+ cls._indexing_attrs = [
670
+ attr
671
+ for attr in cls._filter_attrs
672
+ if attr not in cls._compound_attrs
673
+ ]
628
674
  return super().__init_subclass__(**kwargs)
629
675
 
630
676
  def __init__(self):
@@ -649,15 +695,35 @@ class Compoundable(ABC):
649
695
  @property
650
696
  def _element_index(self) -> Any:
651
697
  """
652
- Unique index of this compoundable feature as a subfeature of the
653
- Compound. Should be hashable.
698
+ Unique index of this compoundable feature as an element of the Compound.
699
+ Must be hashable.
654
700
  """
655
- index = [
656
- self.filter_attributes[attr]
657
- for attr in self._filter_attrs
658
- if attr not in self._compound_attrs
659
- ]
660
- return index[0] if len(index) == 1 else tuple(index)
701
+ index_ = [self.filter_attributes[attr] for attr in self._indexing_attrs]
702
+ index = index_[0] if len(index_) == 1 else tuple(index_)
703
+ assert hash(index), "`_element_index` of a compoundable must be hashable."
704
+ return index
705
+
706
+ @classmethod
707
+ def _merge_anchors(cls, anchors: List[_anchor.AnatomicalAnchor]):
708
+ return sum(anchors)
709
+
710
+ @classmethod
711
+ @abstractmethod
712
+ def _merge_elements(
713
+ cls,
714
+ elements,
715
+ description: str,
716
+ modality: str,
717
+ anchor: _anchor.AnatomicalAnchor
718
+ ) -> Feature:
719
+ """
720
+ Compute the merge data and create a merged instance from a set of
721
+ elements of this class. This will be used by CompoundFeature to
722
+ create the aggegated data and plot it. For example, to compute an
723
+ average connectivity matrix from a set of subfeatures, we create a
724
+ RegionalConnectivty feature.
725
+ """
726
+ raise NotImplementedError
661
727
 
662
728
 
663
729
  class CompoundFeature(Feature):
@@ -671,7 +737,7 @@ class CompoundFeature(Feature):
671
737
  def __init__(
672
738
  self,
673
739
  elements: List['Feature'],
674
- queryconcept: Union[region.Region, parcellation.Parcellation, space.Space]
740
+ queryconcept: Union[region.Region, parcellation.Parcellation, space.Space],
675
741
  ):
676
742
  """
677
743
  A compound of several features of the same type with an anchor created
@@ -703,21 +769,57 @@ class CompoundFeature(Feature):
703
769
  self,
704
770
  modality=modality,
705
771
  description="\n".join({f.description for f in elements}),
706
- anchor=sum([f.anchor for f in elements]),
707
- datasets=list(dict.fromkeys([ds for f in elements for ds in f.datasets]))
772
+ anchor=self._feature_type._merge_anchors([f.anchor for f in elements]),
773
+ datasets=list(dict.fromkeys([ds for f in elements for ds in f.datasets])),
774
+ prerelease=all(f._prerelease for f in elements),
708
775
  )
709
776
  self._queryconcept = queryconcept
777
+ self._merged_feature_cached = None
710
778
 
711
779
  def __getattr__(self, attr: str) -> Any:
712
780
  """Expose compounding attributes explicitly."""
713
781
  if attr in self._compounding_attributes:
714
782
  return self._compounding_attributes[attr]
715
- else:
716
- raise AttributeError(f"{self._feature_type.__name__} has no attribute {attr}.")
783
+ if hasattr(self._feature_type, attr):
784
+ raise AttributeError(
785
+ f"{self.__class__.__name__} does not have access to '{attr}' "
786
+ "since it does not have the same value for all its elements."
787
+ )
788
+ raise AttributeError(
789
+ f"{self.__class__.__name__} or {self._feature_type.__name__} have no attribute {attr}."
790
+ )
717
791
 
718
792
  def __dir__(self):
719
793
  return super().__dir__() + list(self._compounding_attributes.keys())
720
794
 
795
+ def plot(self, *args, **kwargs):
796
+ kwargs["title"] = "(Derived data: averaged)\n" + kwargs.get(
797
+ "title",
798
+ "\n".join(wrap(self.name, kwargs.pop("textwrap", 40)))
799
+ )
800
+ return self._get_merged_feature().plot(*args, **kwargs)
801
+
802
+ def _get_merged_feature(self) -> Feature:
803
+ if self._merged_feature_cached is None:
804
+ logger.info(f"{self.__class__.__name__}.data averages the data of each element.")
805
+ assert issubclass(self.feature_type, Compoundable)
806
+ self._merged_feature_cached = self.feature_type._merge_elements(
807
+ elements=self.elements,
808
+ modality=self.modality,
809
+ description=self.description,
810
+ anchor=self.anchor
811
+ )
812
+ return self._merged_feature_cached
813
+
814
+ @property
815
+ def data(self):
816
+ return self._get_merged_feature().data
817
+
818
+ @property
819
+ def indexing_attributes(self) -> Tuple[str]:
820
+ "The attributes determining the index of this CompoundFeature's elements."
821
+ return tuple(self.elements[0]._indexing_attrs)
822
+
721
823
  @property
722
824
  def elements(self):
723
825
  """Features that make up the compound feature."""
@@ -725,7 +827,7 @@ class CompoundFeature(Feature):
725
827
 
726
828
  @property
727
829
  def indices(self):
728
- """Unique indices to features making up the compound feature."""
830
+ """Unique indices to features making up the CompoundFeature."""
729
831
  return list(self._elements.keys())
730
832
 
731
833
  @property
@@ -736,23 +838,30 @@ class CompoundFeature(Feature):
736
838
  @property
737
839
  def name(self) -> str:
738
840
  """Returns a short human-readable name of this feature."""
841
+ readable_feature_type = sub(
842
+ "([b,B]ig [b,B]rain)", "BigBrain",
843
+ sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.feature_type.__name__)
844
+ )
739
845
  groupby = ', '.join([
740
- f"{v} {k}" for k, v in self._compounding_attributes.items()
846
+ f"{k}: {v}"
847
+ for k, v in self._compounding_attributes.items()
848
+ if k != 'modality'
741
849
  ])
742
- return (
743
- f"{self.__class__.__name__} of {len(self)} "
744
- f"{self.feature_type.__name__} features grouped by ({groupby})"
745
- f" anchored at {self.anchor}"
746
- )
850
+ cf_name = f"{len(self)} {readable_feature_type} features{f' {groupby}' if groupby else ''}"
851
+ return cf_name if not self._prerelease else f"[PRERELEASE] {cf_name}"
747
852
 
748
853
  @property
749
854
  def id(self) -> str:
855
+ if self._prerelease:
856
+ name_ = self.name.replace("[PRERELEASE] ", "")
857
+ else:
858
+ name_ = self.name
750
859
  return "::".join((
751
860
  "cf0",
752
861
  f"{self._feature_type.__name__}",
753
862
  self._encode_concept(self._queryconcept),
754
863
  self.datasets[0].id if self.datasets else "nodsid",
755
- md5(self.name.encode("utf-8")).hexdigest()
864
+ md5(name_.encode("utf-8")).hexdigest()
756
865
  ))
757
866
 
758
867
  def __iter__(self) -> Iterator['Feature']:
@@ -775,7 +884,7 @@ class CompoundFeature(Feature):
775
884
  raise IndexError(f"No feature with index '{index}' in this compound.")
776
885
 
777
886
  @classmethod
778
- def compound(
887
+ def _compound(
779
888
  cls,
780
889
  features: List['Feature'],
781
890
  queryconcept: Union[region.Region, parcellation.Parcellation, space.Space]
@@ -834,7 +943,7 @@ class CompoundFeature(Feature):
834
943
  assert cf_version == "cf0"
835
944
  candidates = [
836
945
  f
837
- for f in Feature.match(
946
+ for f in Feature._match(
838
947
  concept=cls._decode_concept(queryconcept),
839
948
  feature_type=clsname
840
949
  )
@@ -849,8 +958,8 @@ class CompoundFeature(Feature):
849
958
  else:
850
959
  raise ParseCompoundFeatureIdException
851
960
 
852
- def _export(self, fh: ZipFile):
853
- super()._export(fh)
961
+ def _to_zip(self, fh: ZipFile):
962
+ super()._to_zip(fh)
854
963
  for idx, element in siibra_tqdm(self._elements.items(), desc="Exporting elements", unit="element"):
855
964
  if '/' in str(idx):
856
965
  logger.warning(f"'/' will be replaced with ' ' of the file for element with index {idx}")
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");