siibra 1.0a1__1-py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -0
- siibra/__init__.py +164 -0
- siibra/commons.py +823 -0
- siibra/configuration/__init__.py +17 -0
- siibra/configuration/configuration.py +189 -0
- siibra/configuration/factory.py +589 -0
- siibra/core/__init__.py +16 -0
- siibra/core/assignment.py +110 -0
- siibra/core/atlas.py +239 -0
- siibra/core/concept.py +308 -0
- siibra/core/parcellation.py +387 -0
- siibra/core/region.py +1223 -0
- siibra/core/space.py +131 -0
- siibra/core/structure.py +111 -0
- siibra/exceptions.py +63 -0
- siibra/experimental/__init__.py +19 -0
- siibra/experimental/contour.py +61 -0
- siibra/experimental/cortical_profile_sampler.py +57 -0
- siibra/experimental/patch.py +98 -0
- siibra/experimental/plane3d.py +256 -0
- siibra/explorer/__init__.py +17 -0
- siibra/explorer/url.py +222 -0
- siibra/explorer/util.py +87 -0
- siibra/features/__init__.py +117 -0
- siibra/features/anchor.py +224 -0
- siibra/features/connectivity/__init__.py +33 -0
- siibra/features/connectivity/functional_connectivity.py +57 -0
- siibra/features/connectivity/regional_connectivity.py +494 -0
- siibra/features/connectivity/streamline_counts.py +27 -0
- siibra/features/connectivity/streamline_lengths.py +27 -0
- siibra/features/connectivity/tracing_connectivity.py +30 -0
- siibra/features/dataset/__init__.py +17 -0
- siibra/features/dataset/ebrains.py +90 -0
- siibra/features/feature.py +970 -0
- siibra/features/image/__init__.py +27 -0
- siibra/features/image/image.py +115 -0
- siibra/features/image/sections.py +26 -0
- siibra/features/image/volume_of_interest.py +88 -0
- siibra/features/tabular/__init__.py +24 -0
- siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
- siibra/features/tabular/cell_density_profile.py +298 -0
- siibra/features/tabular/cortical_profile.py +322 -0
- siibra/features/tabular/gene_expression.py +257 -0
- siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
- siibra/features/tabular/layerwise_cell_density.py +95 -0
- siibra/features/tabular/receptor_density_fingerprint.py +192 -0
- siibra/features/tabular/receptor_density_profile.py +110 -0
- siibra/features/tabular/regional_timeseries_activity.py +294 -0
- siibra/features/tabular/tabular.py +139 -0
- siibra/livequeries/__init__.py +19 -0
- siibra/livequeries/allen.py +352 -0
- siibra/livequeries/bigbrain.py +197 -0
- siibra/livequeries/ebrains.py +145 -0
- siibra/livequeries/query.py +49 -0
- siibra/locations/__init__.py +91 -0
- siibra/locations/boundingbox.py +454 -0
- siibra/locations/location.py +115 -0
- siibra/locations/point.py +344 -0
- siibra/locations/pointcloud.py +349 -0
- siibra/retrieval/__init__.py +27 -0
- siibra/retrieval/cache.py +233 -0
- siibra/retrieval/datasets.py +389 -0
- siibra/retrieval/exceptions/__init__.py +27 -0
- siibra/retrieval/repositories.py +769 -0
- siibra/retrieval/requests.py +659 -0
- siibra/vocabularies/__init__.py +45 -0
- siibra/vocabularies/gene_names.json +29176 -0
- siibra/vocabularies/receptor_symbols.json +210 -0
- siibra/vocabularies/region_aliases.json +460 -0
- siibra/volumes/__init__.py +23 -0
- siibra/volumes/parcellationmap.py +1279 -0
- siibra/volumes/providers/__init__.py +20 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/providers/gifti.py +165 -0
- siibra/volumes/providers/neuroglancer.py +736 -0
- siibra/volumes/providers/nifti.py +266 -0
- siibra/volumes/providers/provider.py +107 -0
- siibra/volumes/sparsemap.py +468 -0
- siibra/volumes/volume.py +892 -0
- siibra-1.0.0a1.dist-info/LICENSE +201 -0
- siibra-1.0.0a1.dist-info/METADATA +160 -0
- siibra-1.0.0a1.dist-info/RECORD +84 -0
- siibra-1.0.0a1.dist-info/WHEEL +5 -0
- siibra-1.0.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,970 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Handles multimodal data features and related queries."""
|
|
16
|
+
|
|
17
|
+
from . import anchor as _anchor
|
|
18
|
+
|
|
19
|
+
from ..commons import logger, InstanceTable, siibra_tqdm, __version__
|
|
20
|
+
from ..core import concept, space, region, parcellation, structure
|
|
21
|
+
from ..volumes import volume
|
|
22
|
+
|
|
23
|
+
from typing import Union, TYPE_CHECKING, List, Dict, Type, Tuple, BinaryIO, Any, Iterator
|
|
24
|
+
from hashlib import md5
|
|
25
|
+
from collections import defaultdict
|
|
26
|
+
from zipfile import ZipFile
|
|
27
|
+
from abc import ABC, abstractmethod
|
|
28
|
+
from re import sub
|
|
29
|
+
from textwrap import wrap
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
from ..retrieval.datasets import EbrainsDataset
|
|
33
|
+
TypeDataset = EbrainsDataset
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ParseLiveQueryIdException(Exception):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class EncodeLiveQueryIdException(Exception):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class NotFoundException(Exception):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class ParseCompoundFeatureIdException(Exception):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
_README_TMPL = """
|
|
53
|
+
Downloaded from siibra toolsuite.
|
|
54
|
+
siibra-python version: {version}
|
|
55
|
+
|
|
56
|
+
All releated resources (e.g. doi, web resources) are categorized under publications.
|
|
57
|
+
|
|
58
|
+
Name
|
|
59
|
+
----
|
|
60
|
+
{name}
|
|
61
|
+
|
|
62
|
+
Description
|
|
63
|
+
-----------
|
|
64
|
+
{description}
|
|
65
|
+
|
|
66
|
+
Modality
|
|
67
|
+
--------
|
|
68
|
+
{modality}
|
|
69
|
+
|
|
70
|
+
{publications}
|
|
71
|
+
"""
|
|
72
|
+
_README_PUBLICATIONS = """
|
|
73
|
+
Publications
|
|
74
|
+
------------
|
|
75
|
+
{doi}
|
|
76
|
+
|
|
77
|
+
{ebrains_page}
|
|
78
|
+
|
|
79
|
+
{authors}
|
|
80
|
+
|
|
81
|
+
{publication_desc}
|
|
82
|
+
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class Feature:
|
|
87
|
+
"""
|
|
88
|
+
Base class for anatomically anchored data features.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
_SUBCLASSES: Dict[Type['Feature'], List[Type['Feature']]] = defaultdict(list)
|
|
92
|
+
_CATEGORIZED: Dict[str, Type['InstanceTable']] = defaultdict(InstanceTable)
|
|
93
|
+
|
|
94
|
+
category: str = None
|
|
95
|
+
|
|
96
|
+
def __init__(
|
|
97
|
+
self,
|
|
98
|
+
modality: str,
|
|
99
|
+
description: str,
|
|
100
|
+
anchor: _anchor.AnatomicalAnchor,
|
|
101
|
+
datasets: List['TypeDataset'] = [],
|
|
102
|
+
id: str = None,
|
|
103
|
+
prerelease: bool = False,
|
|
104
|
+
):
|
|
105
|
+
"""
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
modality: str
|
|
109
|
+
A textual description of the type of measured information
|
|
110
|
+
description: str
|
|
111
|
+
A textual description of the feature.
|
|
112
|
+
anchor: AnatomicalAnchor
|
|
113
|
+
datasets : list
|
|
114
|
+
list of datasets corresponding to this feature
|
|
115
|
+
"""
|
|
116
|
+
self._modality_cached = modality
|
|
117
|
+
self._description = description
|
|
118
|
+
self._anchor_cached = anchor
|
|
119
|
+
self.datasets = datasets
|
|
120
|
+
self._id = id
|
|
121
|
+
self._prerelease = prerelease
|
|
122
|
+
|
|
123
|
+
@property
|
|
124
|
+
def modality(self):
|
|
125
|
+
# allows subclasses to implement lazy loading of the modality
|
|
126
|
+
return self._modality_cached
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
def anchor(self):
|
|
130
|
+
# allows subclasses to implement lazy loading of an anchor
|
|
131
|
+
return self._anchor_cached
|
|
132
|
+
|
|
133
|
+
def __init_subclass__(cls, configuration_folder=None, category=None, do_not_index=False, **kwargs):
|
|
134
|
+
|
|
135
|
+
# Feature.SUBCLASSES serves as an index where feature class inheritance is cached. When users
|
|
136
|
+
# queries a branch on the hierarchy, all children will also be queried. There are usecases where
|
|
137
|
+
# such behavior is not desired (e.g. ProxyFeature, which wraps livequery features id to capture the
|
|
138
|
+
# query context).
|
|
139
|
+
# do_not_index flag allow the default index behavior to be toggled off.
|
|
140
|
+
|
|
141
|
+
if do_not_index is False:
|
|
142
|
+
|
|
143
|
+
# extend the subclass lists
|
|
144
|
+
# Iterate over all mro, not just immediate base classes
|
|
145
|
+
for BaseCls in cls.__mro__:
|
|
146
|
+
# some base classes may not be sub class of feature, ignore these
|
|
147
|
+
if not issubclass(BaseCls, Feature):
|
|
148
|
+
continue
|
|
149
|
+
cls._SUBCLASSES[BaseCls].append(cls)
|
|
150
|
+
|
|
151
|
+
cls._live_queries = []
|
|
152
|
+
cls._preconfigured_instances = None
|
|
153
|
+
cls._configuration_folder = configuration_folder
|
|
154
|
+
cls.category = category
|
|
155
|
+
if category is not None:
|
|
156
|
+
cls._CATEGORIZED[category].add(cls.__name__, cls)
|
|
157
|
+
return super().__init_subclass__(**kwargs)
|
|
158
|
+
|
|
159
|
+
@classmethod
|
|
160
|
+
def _get_subclasses(cls):
|
|
161
|
+
return {Cls.__name__: Cls for Cls in cls._SUBCLASSES}
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def description(self):
|
|
165
|
+
"""Allows subclasses to overwrite the description with a function call."""
|
|
166
|
+
if self._description:
|
|
167
|
+
return self._description
|
|
168
|
+
for ds in self.datasets:
|
|
169
|
+
if ds.description:
|
|
170
|
+
return ds.description
|
|
171
|
+
return ''
|
|
172
|
+
|
|
173
|
+
@property
|
|
174
|
+
def LICENSE(self) -> str:
|
|
175
|
+
licenses = []
|
|
176
|
+
for ds in self.datasets:
|
|
177
|
+
if ds.LICENSE is None or ds.LICENSE == "No license information is found.":
|
|
178
|
+
continue
|
|
179
|
+
if isinstance(ds.LICENSE, str):
|
|
180
|
+
licenses.append(ds.LICENSE)
|
|
181
|
+
if isinstance(ds.LICENSE, list):
|
|
182
|
+
licenses.extend(ds.LICENSE)
|
|
183
|
+
if len(licenses) == 0:
|
|
184
|
+
logger.warning("No license information is found.")
|
|
185
|
+
return ""
|
|
186
|
+
if len(licenses) > 1:
|
|
187
|
+
logger.info("Found multiple licenses corresponding to datasets.")
|
|
188
|
+
return '\n'.join(licenses)
|
|
189
|
+
|
|
190
|
+
@property
|
|
191
|
+
def urls(self) -> List[str]:
|
|
192
|
+
"""The list of URLs (including DOIs) associated with this feature."""
|
|
193
|
+
return [
|
|
194
|
+
url.get("url")
|
|
195
|
+
for ds in self.datasets
|
|
196
|
+
for url in ds.urls
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
@property
|
|
200
|
+
def authors(self):
|
|
201
|
+
return [
|
|
202
|
+
contributer['name']
|
|
203
|
+
for ds in self.datasets
|
|
204
|
+
for contributer in ds.contributors
|
|
205
|
+
]
|
|
206
|
+
|
|
207
|
+
@property
|
|
208
|
+
def name(self):
|
|
209
|
+
"""Returns a short human-readable name of this feature."""
|
|
210
|
+
readable_class_name = sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.__class__.__name__)
|
|
211
|
+
name_ = sub("([b,B]ig [b,B]rain)", "BigBrain", readable_class_name)
|
|
212
|
+
return name_ if not self._prerelease else f"[PRERELEASE] {name_}"
|
|
213
|
+
|
|
214
|
+
@classmethod
|
|
215
|
+
def _get_instances(cls, **kwargs) -> List['Feature']:
|
|
216
|
+
"""
|
|
217
|
+
Retrieve objects of a particular feature subclass.
|
|
218
|
+
Objects can be preconfigured in the configuration,
|
|
219
|
+
or delivered by Live queries.
|
|
220
|
+
"""
|
|
221
|
+
if not hasattr(cls, "_preconfigured_instances"):
|
|
222
|
+
return []
|
|
223
|
+
|
|
224
|
+
if cls._preconfigured_instances is not None:
|
|
225
|
+
return cls._preconfigured_instances
|
|
226
|
+
|
|
227
|
+
if cls._configuration_folder is None:
|
|
228
|
+
cls._preconfigured_instances = []
|
|
229
|
+
return cls._preconfigured_instances
|
|
230
|
+
|
|
231
|
+
from ..configuration.configuration import Configuration
|
|
232
|
+
conf = Configuration()
|
|
233
|
+
Configuration.register_cleanup(cls._clean_instances)
|
|
234
|
+
assert cls._configuration_folder in conf.folders
|
|
235
|
+
cls._preconfigured_instances = [
|
|
236
|
+
o for o in conf.build_objects(cls._configuration_folder)
|
|
237
|
+
if isinstance(o, cls)
|
|
238
|
+
]
|
|
239
|
+
logger.debug(
|
|
240
|
+
f"Built {len(cls._preconfigured_instances)} preconfigured {cls.__name__} "
|
|
241
|
+
f"objects from {cls._configuration_folder}."
|
|
242
|
+
)
|
|
243
|
+
return cls._preconfigured_instances
|
|
244
|
+
|
|
245
|
+
def plot(self, *args, **kwargs):
|
|
246
|
+
"""Feature subclasses override this with their customized plot methods."""
|
|
247
|
+
raise NotImplementedError("Generic feature class does not have a standardized plot.")
|
|
248
|
+
|
|
249
|
+
@classmethod
|
|
250
|
+
def _clean_instances(cls):
|
|
251
|
+
""" Removes all instantiated object instances"""
|
|
252
|
+
cls._preconfigured_instances = None
|
|
253
|
+
|
|
254
|
+
def matches(
|
|
255
|
+
self,
|
|
256
|
+
concept: Union[structure.BrainStructure, space.Space],
|
|
257
|
+
) -> bool:
|
|
258
|
+
"""
|
|
259
|
+
Match the features anatomical anchor agains the given query concept.
|
|
260
|
+
Record the most recently matched concept for inspection by the caller.
|
|
261
|
+
"""
|
|
262
|
+
# TODO: storing the last matched concept. It is not ideal, might cause problems in multithreading
|
|
263
|
+
if self.anchor and self.anchor.matches(concept):
|
|
264
|
+
self.anchor._last_matched_concept = concept
|
|
265
|
+
return True
|
|
266
|
+
self.anchor._last_matched_concept = None
|
|
267
|
+
return False
|
|
268
|
+
|
|
269
|
+
@property
|
|
270
|
+
def last_match_result(self):
|
|
271
|
+
"The result of the last anchor comparison to a BrainStructure."
|
|
272
|
+
return None if self.anchor is None else self.anchor.last_match_result
|
|
273
|
+
|
|
274
|
+
@property
|
|
275
|
+
def last_match_description(self):
|
|
276
|
+
"The description of the last anchor comparison to a BrainStructure."
|
|
277
|
+
return "" if self.anchor is None else self.anchor.last_match_description
|
|
278
|
+
|
|
279
|
+
@property
|
|
280
|
+
def id(self):
|
|
281
|
+
if self._id:
|
|
282
|
+
return self._id
|
|
283
|
+
|
|
284
|
+
if self._prerelease:
|
|
285
|
+
name_ = self.name.replace("[PRERELEASE] ", "")
|
|
286
|
+
else:
|
|
287
|
+
name_ = self.name
|
|
288
|
+
|
|
289
|
+
prefix = ''
|
|
290
|
+
for ds in self.datasets:
|
|
291
|
+
if hasattr(ds, "id"):
|
|
292
|
+
prefix = ds.id + '--'
|
|
293
|
+
break
|
|
294
|
+
return prefix + md5(
|
|
295
|
+
f"{name_} - {self.anchor}".encode("utf-8")
|
|
296
|
+
).hexdigest()
|
|
297
|
+
|
|
298
|
+
def _to_zip(self, fh: ZipFile):
|
|
299
|
+
"""
|
|
300
|
+
Internal implementation. Subclasses can override but call super()._to_zip(fh).
|
|
301
|
+
This allows all classes in the __mro__ to have the opportunity to append files
|
|
302
|
+
of interest.
|
|
303
|
+
"""
|
|
304
|
+
if isinstance(self, Compoundable) and "README.md" in fh.namelist():
|
|
305
|
+
return
|
|
306
|
+
ebrains_page = "\n".join(
|
|
307
|
+
{ds.ebrains_page for ds in self.datasets if getattr(ds, "ebrains_page", None)}
|
|
308
|
+
)
|
|
309
|
+
doi = "\n".join({
|
|
310
|
+
u.get("url")
|
|
311
|
+
for ds in self.datasets if ds.urls
|
|
312
|
+
for u in ds.urls
|
|
313
|
+
})
|
|
314
|
+
authors = ", ".join({
|
|
315
|
+
cont.get('name')
|
|
316
|
+
for ds in self.datasets if ds.contributors
|
|
317
|
+
for cont in ds.contributors
|
|
318
|
+
})
|
|
319
|
+
publication_desc = "\n".join({ds.description for ds in self.datasets})
|
|
320
|
+
if (ebrains_page or doi) and authors:
|
|
321
|
+
publications = _README_PUBLICATIONS.format(
|
|
322
|
+
ebrains_page="EBRAINS page\n" + ebrains_page if ebrains_page else "",
|
|
323
|
+
doi="DOI\n" + doi if doi else "",
|
|
324
|
+
authors="Authors\n" + authors if authors else "",
|
|
325
|
+
publication_desc="Publication description\n" + publication_desc if publication_desc else ""
|
|
326
|
+
)
|
|
327
|
+
else:
|
|
328
|
+
publications = "Note: could not obtain any publication information. The data may not have been published yet."
|
|
329
|
+
fh.writestr(
|
|
330
|
+
"README.md",
|
|
331
|
+
_README_TMPL.format(
|
|
332
|
+
version=__version__,
|
|
333
|
+
name=self.name,
|
|
334
|
+
description=self.description,
|
|
335
|
+
modality=self.modality,
|
|
336
|
+
publications=publications
|
|
337
|
+
)
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
def to_zip(self, filelike: Union[str, BinaryIO]):
|
|
341
|
+
"""
|
|
342
|
+
Export as a zip archive.
|
|
343
|
+
|
|
344
|
+
Parameters
|
|
345
|
+
----------
|
|
346
|
+
filelike: str or path
|
|
347
|
+
Filelike to write the zip file. User is responsible to ensure the
|
|
348
|
+
correct extension (.zip) is set.
|
|
349
|
+
"""
|
|
350
|
+
fh = ZipFile(filelike, "w")
|
|
351
|
+
self._to_zip(fh)
|
|
352
|
+
fh.close()
|
|
353
|
+
|
|
354
|
+
@staticmethod
|
|
355
|
+
def _serialize_query_context(feat: 'Feature', concept: concept.AtlasConcept) -> str:
|
|
356
|
+
"""
|
|
357
|
+
Serialize feature from livequery and query context.
|
|
358
|
+
|
|
359
|
+
It is currently impossible to retrieve a livequery with a generic UUID.
|
|
360
|
+
As such, the query context (e.g. region, space or parcellation) needs to
|
|
361
|
+
be encoded in the id.
|
|
362
|
+
|
|
363
|
+
Whilst it is possible to (de)serialize *any* queries, the method is setup to only serialize
|
|
364
|
+
livequery features.
|
|
365
|
+
|
|
366
|
+
The serialized livequery id follows the following pattern:
|
|
367
|
+
|
|
368
|
+
<livequeryid_version>::<feature_cls_name>::<query_context>::<unserialized_id>
|
|
369
|
+
|
|
370
|
+
Where:
|
|
371
|
+
|
|
372
|
+
- livequeryid_version: version of the serialization. (e.g. lq0)
|
|
373
|
+
- feature_cls_name: class name to query. (e.g. BigBrainIntensityProfile)
|
|
374
|
+
- query_context: string to retrieve atlas concept in the query context. Can be one of the following:
|
|
375
|
+
- s:<space_id>
|
|
376
|
+
- p:<parcellation_id>
|
|
377
|
+
- p:<parcellation_id>::r:<region_id>
|
|
378
|
+
- unserialized_id: id prior to serialization
|
|
379
|
+
|
|
380
|
+
See test/features/test_feature.py for tests and usages.
|
|
381
|
+
"""
|
|
382
|
+
if not hasattr(feat.__class__, '_live_queries'):
|
|
383
|
+
raise EncodeLiveQueryIdException(f"generate_livequery_featureid can only be used on live queries, but {feat.__class__.__name__} is not.")
|
|
384
|
+
|
|
385
|
+
encoded_c = Feature._encode_concept(concept)
|
|
386
|
+
|
|
387
|
+
return f"lq0::{feat.__class__.__name__}::{encoded_c}::{feat.id}"
|
|
388
|
+
|
|
389
|
+
@classmethod
|
|
390
|
+
def _deserialize_query_context(cls, feature_id: str) -> Tuple[Type['Feature'], concept.AtlasConcept, str]:
|
|
391
|
+
"""
|
|
392
|
+
Deserialize id into query context.
|
|
393
|
+
|
|
394
|
+
See docstring of serialize_query_context for context.
|
|
395
|
+
"""
|
|
396
|
+
lq_version, *rest = feature_id.split("::")
|
|
397
|
+
if lq_version != "lq0":
|
|
398
|
+
raise ParseLiveQueryIdException("livequery id must start with lq0::")
|
|
399
|
+
|
|
400
|
+
clsname, *concepts, fid = rest
|
|
401
|
+
|
|
402
|
+
Features = cls._parse_featuretype(clsname)
|
|
403
|
+
|
|
404
|
+
if len(Features) == 0:
|
|
405
|
+
raise ParseLiveQueryIdException(f"classname {clsname!r} could not be parsed correctly. {feature_id!r}")
|
|
406
|
+
F = Features[0]
|
|
407
|
+
|
|
408
|
+
concept = cls._decode_concept(concepts)
|
|
409
|
+
|
|
410
|
+
return (F, concept, fid)
|
|
411
|
+
|
|
412
|
+
@staticmethod
|
|
413
|
+
def _encode_concept(concept: concept.AtlasConcept):
|
|
414
|
+
from ..locations import Location
|
|
415
|
+
encoded_c = []
|
|
416
|
+
if isinstance(concept, space.Space):
|
|
417
|
+
encoded_c.append(f"s:{concept.id}")
|
|
418
|
+
elif isinstance(concept, parcellation.Parcellation):
|
|
419
|
+
encoded_c.append(f"p:{concept.id}")
|
|
420
|
+
elif isinstance(concept, region.Region):
|
|
421
|
+
encoded_c.append(f"p:{concept.parcellation.id}")
|
|
422
|
+
encoded_c.append(f"r:{concept.name}")
|
|
423
|
+
elif isinstance(concept, volume.Volume):
|
|
424
|
+
encoded_c.append(f"v:{concept.name}")
|
|
425
|
+
elif isinstance(concept, Location):
|
|
426
|
+
encoded_c.append(f"loc:{Location}")
|
|
427
|
+
|
|
428
|
+
if len(encoded_c) == 0:
|
|
429
|
+
raise EncodeLiveQueryIdException("no concept is encoded")
|
|
430
|
+
|
|
431
|
+
return '::'.join(encoded_c)
|
|
432
|
+
|
|
433
|
+
@classmethod
|
|
434
|
+
def _decode_concept(cls, concepts: List[str]) -> concept.AtlasConcept:
|
|
435
|
+
# choose exception to divert try-except correctly
|
|
436
|
+
if issubclass(cls, CompoundFeature):
|
|
437
|
+
exception = ParseCompoundFeatureIdException
|
|
438
|
+
else:
|
|
439
|
+
exception = ParseLiveQueryIdException
|
|
440
|
+
|
|
441
|
+
concept = None
|
|
442
|
+
for c in concepts:
|
|
443
|
+
if c.startswith("s:"):
|
|
444
|
+
if concept is not None:
|
|
445
|
+
raise exception("Conflicting spec.")
|
|
446
|
+
concept = space.Space.registry()[c.replace("s:", "")]
|
|
447
|
+
if c.startswith("p:"):
|
|
448
|
+
if concept is not None:
|
|
449
|
+
raise exception("Conflicting spec.")
|
|
450
|
+
concept = parcellation.Parcellation.registry()[c.replace("p:", "")]
|
|
451
|
+
if c.startswith("r:"):
|
|
452
|
+
if concept is None:
|
|
453
|
+
raise exception("region has been encoded, but parcellation has not been populated in the encoding, {feature_id!r}")
|
|
454
|
+
if not isinstance(concept, parcellation.Parcellation):
|
|
455
|
+
raise exception("region has been encoded, but previous encoded concept is not parcellation")
|
|
456
|
+
concept = concept.get_region(c.replace("r:", ""))
|
|
457
|
+
|
|
458
|
+
if concept is None:
|
|
459
|
+
raise ParseLiveQueryIdException("concept was not populated in feature id")
|
|
460
|
+
return concept
|
|
461
|
+
|
|
462
|
+
@classmethod
|
|
463
|
+
def _parse_featuretype(cls, feature_type: str) -> List[Type['Feature']]:
|
|
464
|
+
ftypes = sorted({
|
|
465
|
+
feattype
|
|
466
|
+
for FeatCls, feattypes in cls._SUBCLASSES.items()
|
|
467
|
+
if all(w.lower() in FeatCls.__name__.lower() for w in feature_type.split())
|
|
468
|
+
for feattype in feattypes
|
|
469
|
+
}, key=lambda t: t.__name__)
|
|
470
|
+
if len(ftypes) > 1:
|
|
471
|
+
return [ft for ft in ftypes if getattr(ft, 'category')]
|
|
472
|
+
else:
|
|
473
|
+
return list(ftypes)
|
|
474
|
+
|
|
475
|
+
@classmethod
|
|
476
|
+
def _livequery(cls, concept: Union[region.Region, parcellation.Parcellation, space.Space], **kwargs) -> List['Feature']:
|
|
477
|
+
if not hasattr(cls, "_live_queries"):
|
|
478
|
+
return []
|
|
479
|
+
|
|
480
|
+
live_instances = []
|
|
481
|
+
for QueryType in cls._live_queries:
|
|
482
|
+
argstr = f" ({', '.join('='.join(map(str, _)) for _ in kwargs.items())})" \
|
|
483
|
+
if len(kwargs) > 0 else ""
|
|
484
|
+
logger.debug(
|
|
485
|
+
f"Running live query for {QueryType.feature_type.__name__} "
|
|
486
|
+
f"objects linked to {str(concept)}{argstr}"
|
|
487
|
+
)
|
|
488
|
+
q = QueryType(**kwargs)
|
|
489
|
+
if isinstance(concept, space.Space):
|
|
490
|
+
features = q.query(concept.get_template())
|
|
491
|
+
else:
|
|
492
|
+
features = q.query(concept)
|
|
493
|
+
live_instances.extend(
|
|
494
|
+
Feature._wrap_livequery_feature(f, Feature._serialize_query_context(f, concept))
|
|
495
|
+
for f in features
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
return live_instances
|
|
499
|
+
|
|
500
|
+
@classmethod
|
|
501
|
+
def _match(
|
|
502
|
+
cls,
|
|
503
|
+
concept: Union[structure.BrainStructure, space.Space],
|
|
504
|
+
feature_type: Union[str, Type['Feature'], list],
|
|
505
|
+
**kwargs
|
|
506
|
+
) -> List['Feature']:
|
|
507
|
+
"""
|
|
508
|
+
Retrieve data features of the requested feature type (i.e. modality).
|
|
509
|
+
This will
|
|
510
|
+
- call Feature.match(concept) for any registered preconfigured features
|
|
511
|
+
- run any registered live queries
|
|
512
|
+
The preconfigured and live query instances are merged and returend as a list.
|
|
513
|
+
|
|
514
|
+
If multiple feature types are given, recurse for each of them.
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
Parameters
|
|
518
|
+
----------
|
|
519
|
+
concept: AtlasConcept
|
|
520
|
+
An anatomical concept, typically a brain region or parcellation.
|
|
521
|
+
feature_type: subclass of Feature, str
|
|
522
|
+
specififies the type of features ("modality")
|
|
523
|
+
"""
|
|
524
|
+
if isinstance(feature_type, list):
|
|
525
|
+
# a list of feature types is given, collect match results on those
|
|
526
|
+
assert all(
|
|
527
|
+
(isinstance(t, str) or issubclass(t, cls))
|
|
528
|
+
for t in feature_type
|
|
529
|
+
)
|
|
530
|
+
return list(dict.fromkeys(
|
|
531
|
+
sum((
|
|
532
|
+
cls._match(concept, t, **kwargs) for t in feature_type
|
|
533
|
+
), [])
|
|
534
|
+
))
|
|
535
|
+
|
|
536
|
+
if isinstance(feature_type, str):
|
|
537
|
+
# feature type given as a string. Decode the corresponding class.
|
|
538
|
+
# Some string inputs, such as connectivity, may hit multiple matches.
|
|
539
|
+
ftype_candidates = cls._parse_featuretype(feature_type)
|
|
540
|
+
if len(ftype_candidates) == 0:
|
|
541
|
+
raise ValueError(
|
|
542
|
+
f"feature_type {str(feature_type)} did not match with any "
|
|
543
|
+
f"features. Available features are: {', '.join(cls._SUBCLASSES.keys())}"
|
|
544
|
+
)
|
|
545
|
+
logger.info(
|
|
546
|
+
f"'{feature_type}' decoded as feature type/s: "
|
|
547
|
+
f"{[c.__name__ for c in ftype_candidates]}."
|
|
548
|
+
)
|
|
549
|
+
return cls._match(concept, ftype_candidates, **kwargs)
|
|
550
|
+
|
|
551
|
+
assert issubclass(feature_type, Feature)
|
|
552
|
+
|
|
553
|
+
# At this stage, no recursion is needed.
|
|
554
|
+
# We expect a specific supported feature type is to be matched now.
|
|
555
|
+
if not isinstance(concept, (structure.BrainStructure, space.Space)):
|
|
556
|
+
raise ValueError(
|
|
557
|
+
f"{concept.__class__.__name__} cannot be used for feature queries as it is not a `BrainStructure` or a `Space` type."
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
# Collect any preconfigured instances of the requested feature type
|
|
561
|
+
# which match the query concept
|
|
562
|
+
instances = [
|
|
563
|
+
instance
|
|
564
|
+
for f_type in cls._SUBCLASSES[feature_type]
|
|
565
|
+
for instance in f_type._get_instances()
|
|
566
|
+
]
|
|
567
|
+
|
|
568
|
+
preconfigured_instances = [
|
|
569
|
+
f for f in siibra_tqdm(
|
|
570
|
+
instances,
|
|
571
|
+
desc=f"Matching {feature_type.__name__} to {concept}",
|
|
572
|
+
total=len(instances),
|
|
573
|
+
disable=(not instances)
|
|
574
|
+
)
|
|
575
|
+
if f.matches(concept)
|
|
576
|
+
]
|
|
577
|
+
|
|
578
|
+
# Then run any registered live queries for the requested feature type
|
|
579
|
+
# with the query concept.
|
|
580
|
+
live_instances = feature_type._livequery(concept, **kwargs)
|
|
581
|
+
|
|
582
|
+
results = list(dict.fromkeys(preconfigured_instances + live_instances))
|
|
583
|
+
return CompoundFeature._compound(results, concept)
|
|
584
|
+
|
|
585
|
+
@classmethod
|
|
586
|
+
def _get_instance_by_id(cls, feature_id: str, **kwargs):
|
|
587
|
+
try:
|
|
588
|
+
return CompoundFeature._get_instance_by_id(feature_id, **kwargs)
|
|
589
|
+
except ParseCompoundFeatureIdException:
|
|
590
|
+
pass
|
|
591
|
+
|
|
592
|
+
try:
|
|
593
|
+
F, concept, fid = cls._deserialize_query_context(feature_id)
|
|
594
|
+
return [
|
|
595
|
+
f
|
|
596
|
+
for f in F._livequery(concept, **kwargs)
|
|
597
|
+
if f.id == fid or f.id == feature_id
|
|
598
|
+
][0]
|
|
599
|
+
except ParseLiveQueryIdException:
|
|
600
|
+
candidates = [
|
|
601
|
+
inst
|
|
602
|
+
for Cls in Feature._SUBCLASSES[Feature]
|
|
603
|
+
for inst in Cls._get_instances()
|
|
604
|
+
if inst.id == feature_id
|
|
605
|
+
]
|
|
606
|
+
if len(candidates) == 0:
|
|
607
|
+
raise NotFoundException(f"No feature instance wth {feature_id} found.")
|
|
608
|
+
if len(candidates) == 1:
|
|
609
|
+
return candidates[0]
|
|
610
|
+
else:
|
|
611
|
+
raise RuntimeError(
|
|
612
|
+
f"Multiple feature instance match {feature_id}",
|
|
613
|
+
[c.name for c in candidates]
|
|
614
|
+
)
|
|
615
|
+
except IndexError:
|
|
616
|
+
raise NotFoundException
|
|
617
|
+
|
|
618
|
+
@staticmethod
|
|
619
|
+
def _wrap_livequery_feature(feature: 'Feature', fid: str):
|
|
620
|
+
"""
|
|
621
|
+
Wrap live query features, override only the id attribute.
|
|
622
|
+
|
|
623
|
+
Some features do not have setters for the id property. The ProxyFeature class
|
|
624
|
+
allow the id property to be overridden without touching the underlying class.
|
|
625
|
+
|
|
626
|
+
See docstring of serialize_query_context for further context.
|
|
627
|
+
"""
|
|
628
|
+
class ProxyFeature(feature.__class__, do_not_index=True):
|
|
629
|
+
|
|
630
|
+
# override __class__ property
|
|
631
|
+
# some instances of features accesses inst.__class__
|
|
632
|
+
@property
|
|
633
|
+
def __class__(self):
|
|
634
|
+
return self.inst.__class__
|
|
635
|
+
|
|
636
|
+
def __init__(self, inst: Feature, fid: str):
|
|
637
|
+
self.inst = inst
|
|
638
|
+
self.fid = fid
|
|
639
|
+
self.category = inst.category
|
|
640
|
+
|
|
641
|
+
def __str__(self) -> str:
|
|
642
|
+
return self.inst.__str__()
|
|
643
|
+
|
|
644
|
+
def __repr__(self) -> str:
|
|
645
|
+
return self.inst.__repr__()
|
|
646
|
+
|
|
647
|
+
@property
|
|
648
|
+
def id(self):
|
|
649
|
+
return self.fid
|
|
650
|
+
|
|
651
|
+
def __getattr__(self, __name: str):
|
|
652
|
+
return getattr(self.inst, __name)
|
|
653
|
+
|
|
654
|
+
return ProxyFeature(feature, fid)
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
class Compoundable(ABC):
|
|
658
|
+
"""
|
|
659
|
+
Base class for structures which allow compounding.
|
|
660
|
+
Determines the necessary grouping and compounding attributes.
|
|
661
|
+
"""
|
|
662
|
+
_filter_attrs = [] # the attributes to filter this instance of feature
|
|
663
|
+
_compound_attrs = [] # `compound_key` has to be created from `filter_attributes`
|
|
664
|
+
|
|
665
|
+
def __init_subclass__(cls, **kwargs):
|
|
666
|
+
assert len(cls._filter_attrs) > 0, "All compoundable classes have to have `_filter_attrs` defined."
|
|
667
|
+
assert len(cls._compound_attrs) > 0, "All compoundable classes have to have `_compound_attrs` defined."
|
|
668
|
+
assert all(attr in cls._filter_attrs for attr in cls._compound_attrs), "`_compound_attrs` must be a subset of `_filter_attrs`."
|
|
669
|
+
cls._indexing_attrs = [
|
|
670
|
+
attr
|
|
671
|
+
for attr in cls._filter_attrs
|
|
672
|
+
if attr not in cls._compound_attrs
|
|
673
|
+
]
|
|
674
|
+
return super().__init_subclass__(**kwargs)
|
|
675
|
+
|
|
676
|
+
def __init__(self):
|
|
677
|
+
assert all(hasattr(self, attr) for attr in self._filter_attrs), "`_filter_attrs` can only consist of the attributes of the class."
|
|
678
|
+
|
|
679
|
+
@property
|
|
680
|
+
def filter_attributes(self) -> Dict[str, Any]:
|
|
681
|
+
"""
|
|
682
|
+
Attributes that help distinguish or combine features of the same type
|
|
683
|
+
among others.
|
|
684
|
+
"""
|
|
685
|
+
return {attr: getattr(self, attr) for attr in self._filter_attrs}
|
|
686
|
+
|
|
687
|
+
@property
|
|
688
|
+
def _compound_key(self) -> Tuple[Any]:
|
|
689
|
+
"""
|
|
690
|
+
A tuple of values that define the basis for compounding elements of
|
|
691
|
+
the same type.
|
|
692
|
+
"""
|
|
693
|
+
return tuple([self.filter_attributes[attr] for attr in self._compound_attrs])
|
|
694
|
+
|
|
695
|
+
@property
|
|
696
|
+
def _element_index(self) -> Any:
|
|
697
|
+
"""
|
|
698
|
+
Unique index of this compoundable feature as an element of the Compound.
|
|
699
|
+
Must be hashable.
|
|
700
|
+
"""
|
|
701
|
+
index_ = [self.filter_attributes[attr] for attr in self._indexing_attrs]
|
|
702
|
+
index = index_[0] if len(index_) == 1 else tuple(index_)
|
|
703
|
+
assert hash(index), "`_element_index` of a compoundable must be hashable."
|
|
704
|
+
return index
|
|
705
|
+
|
|
706
|
+
@classmethod
|
|
707
|
+
def _merge_anchors(cls, anchors: List[_anchor.AnatomicalAnchor]):
|
|
708
|
+
return sum(anchors)
|
|
709
|
+
|
|
710
|
+
@classmethod
|
|
711
|
+
@abstractmethod
|
|
712
|
+
def _merge_elements(
|
|
713
|
+
cls,
|
|
714
|
+
elements,
|
|
715
|
+
description: str,
|
|
716
|
+
modality: str,
|
|
717
|
+
anchor: _anchor.AnatomicalAnchor
|
|
718
|
+
) -> Feature:
|
|
719
|
+
"""
|
|
720
|
+
Compute the merge data and create a merged instance from a set of
|
|
721
|
+
elements of this class. This will be used by CompoundFeature to
|
|
722
|
+
create the aggegated data and plot it. For example, to compute an
|
|
723
|
+
average connectivity matrix from a set of subfeatures, we create a
|
|
724
|
+
RegionalConnectivty feature.
|
|
725
|
+
"""
|
|
726
|
+
raise NotImplementedError
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
class CompoundFeature(Feature):
|
|
730
|
+
"""
|
|
731
|
+
A compound aggregating mutliple features of the same type, forming its
|
|
732
|
+
elements. The anatomical anchors and data of the features is merged.
|
|
733
|
+
Features need to subclass "Compoundable" to allow aggregation
|
|
734
|
+
into a compound feature.
|
|
735
|
+
"""
|
|
736
|
+
|
|
737
|
+
def __init__(
|
|
738
|
+
self,
|
|
739
|
+
elements: List['Feature'],
|
|
740
|
+
queryconcept: Union[region.Region, parcellation.Parcellation, space.Space],
|
|
741
|
+
):
|
|
742
|
+
"""
|
|
743
|
+
A compound of several features of the same type with an anchor created
|
|
744
|
+
as a sum of adjoinable anchors.
|
|
745
|
+
"""
|
|
746
|
+
self._feature_type = elements[0].__class__
|
|
747
|
+
assert all(isinstance(f, self._feature_type) for f in elements), NotImplementedError("Cannot compound features of different types.")
|
|
748
|
+
self.category = elements[0].category # same feature types have the same category
|
|
749
|
+
assert issubclass(self._feature_type, Compoundable), NotImplementedError(f"Cannot compound {self._feature_type}.")
|
|
750
|
+
|
|
751
|
+
modality = elements[0].modality
|
|
752
|
+
assert all(f.modality == modality for f in elements), NotImplementedError("Cannot compound features of different modalities.")
|
|
753
|
+
|
|
754
|
+
compound_keys = {element._compound_key for element in elements}
|
|
755
|
+
assert len(compound_keys) == 1, ValueError(
|
|
756
|
+
"Only features with identical compound_key can be aggregated."
|
|
757
|
+
)
|
|
758
|
+
self._compounding_attributes = {
|
|
759
|
+
attr: elements[0].filter_attributes[attr]
|
|
760
|
+
for attr in elements[0]._compound_attrs
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
self._elements = {f._element_index: f for f in elements}
|
|
764
|
+
assert len(self._elements) == len(elements), RuntimeError(
|
|
765
|
+
"Element indices should be unique to each element within a CompoundFeature."
|
|
766
|
+
)
|
|
767
|
+
|
|
768
|
+
Feature.__init__(
|
|
769
|
+
self,
|
|
770
|
+
modality=modality,
|
|
771
|
+
description="\n".join({f.description for f in elements}),
|
|
772
|
+
anchor=self._feature_type._merge_anchors([f.anchor for f in elements]),
|
|
773
|
+
datasets=list(dict.fromkeys([ds for f in elements for ds in f.datasets])),
|
|
774
|
+
prerelease=all(f._prerelease for f in elements),
|
|
775
|
+
)
|
|
776
|
+
self._queryconcept = queryconcept
|
|
777
|
+
self._merged_feature_cached = None
|
|
778
|
+
|
|
779
|
+
def __getattr__(self, attr: str) -> Any:
|
|
780
|
+
"""Expose compounding attributes explicitly."""
|
|
781
|
+
if attr in self._compounding_attributes:
|
|
782
|
+
return self._compounding_attributes[attr]
|
|
783
|
+
if hasattr(self._feature_type, attr):
|
|
784
|
+
raise AttributeError(
|
|
785
|
+
f"{self.__class__.__name__} does not have access to '{attr}' "
|
|
786
|
+
"since it does not have the same value for all its elements."
|
|
787
|
+
)
|
|
788
|
+
raise AttributeError(
|
|
789
|
+
f"{self.__class__.__name__} or {self._feature_type.__name__} have no attribute {attr}."
|
|
790
|
+
)
|
|
791
|
+
|
|
792
|
+
def __dir__(self):
|
|
793
|
+
return super().__dir__() + list(self._compounding_attributes.keys())
|
|
794
|
+
|
|
795
|
+
def plot(self, *args, **kwargs):
|
|
796
|
+
kwargs["title"] = "(Derived data: averaged)\n" + kwargs.get(
|
|
797
|
+
"title",
|
|
798
|
+
"\n".join(wrap(self.name, kwargs.pop("textwrap", 40)))
|
|
799
|
+
)
|
|
800
|
+
return self._get_merged_feature().plot(*args, **kwargs)
|
|
801
|
+
|
|
802
|
+
def _get_merged_feature(self) -> Feature:
|
|
803
|
+
if self._merged_feature_cached is None:
|
|
804
|
+
logger.info(f"{self.__class__.__name__}.data averages the data of each element.")
|
|
805
|
+
assert issubclass(self.feature_type, Compoundable)
|
|
806
|
+
self._merged_feature_cached = self.feature_type._merge_elements(
|
|
807
|
+
elements=self.elements,
|
|
808
|
+
modality=self.modality,
|
|
809
|
+
description=self.description,
|
|
810
|
+
anchor=self.anchor
|
|
811
|
+
)
|
|
812
|
+
return self._merged_feature_cached
|
|
813
|
+
|
|
814
|
+
@property
|
|
815
|
+
def data(self):
|
|
816
|
+
return self._get_merged_feature().data
|
|
817
|
+
|
|
818
|
+
@property
|
|
819
|
+
def indexing_attributes(self) -> Tuple[str]:
|
|
820
|
+
"The attributes determining the index of this CompoundFeature's elements."
|
|
821
|
+
return tuple(self.elements[0]._indexing_attrs)
|
|
822
|
+
|
|
823
|
+
@property
|
|
824
|
+
def elements(self):
|
|
825
|
+
"""Features that make up the compound feature."""
|
|
826
|
+
return list(self._elements.values())
|
|
827
|
+
|
|
828
|
+
@property
|
|
829
|
+
def indices(self):
|
|
830
|
+
"""Unique indices to features making up the CompoundFeature."""
|
|
831
|
+
return list(self._elements.keys())
|
|
832
|
+
|
|
833
|
+
@property
|
|
834
|
+
def feature_type(self) -> Type:
|
|
835
|
+
"""Feature type of the elements forming the CompoundFeature."""
|
|
836
|
+
return self._feature_type
|
|
837
|
+
|
|
838
|
+
@property
|
|
839
|
+
def name(self) -> str:
|
|
840
|
+
"""Returns a short human-readable name of this feature."""
|
|
841
|
+
readable_feature_type = sub(
|
|
842
|
+
"([b,B]ig [b,B]rain)", "BigBrain",
|
|
843
|
+
sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.feature_type.__name__)
|
|
844
|
+
)
|
|
845
|
+
groupby = ', '.join([
|
|
846
|
+
f"{k}: {v}"
|
|
847
|
+
for k, v in self._compounding_attributes.items()
|
|
848
|
+
if k != 'modality'
|
|
849
|
+
])
|
|
850
|
+
cf_name = f"{len(self)} {readable_feature_type} features{f' {groupby}' if groupby else ''}"
|
|
851
|
+
return cf_name if not self._prerelease else f"[PRERELEASE] {cf_name}"
|
|
852
|
+
|
|
853
|
+
@property
|
|
854
|
+
def id(self) -> str:
|
|
855
|
+
if self._prerelease:
|
|
856
|
+
name_ = self.name.replace("[PRERELEASE] ", "")
|
|
857
|
+
else:
|
|
858
|
+
name_ = self.name
|
|
859
|
+
return "::".join((
|
|
860
|
+
"cf0",
|
|
861
|
+
f"{self._feature_type.__name__}",
|
|
862
|
+
self._encode_concept(self._queryconcept),
|
|
863
|
+
self.datasets[0].id if self.datasets else "nodsid",
|
|
864
|
+
md5(name_.encode("utf-8")).hexdigest()
|
|
865
|
+
))
|
|
866
|
+
|
|
867
|
+
def __iter__(self) -> Iterator['Feature']:
|
|
868
|
+
"""Iterate over subfeatures"""
|
|
869
|
+
return self.elements.__iter__()
|
|
870
|
+
|
|
871
|
+
def __len__(self):
|
|
872
|
+
"""Number of subfeatures making the CompoundFeature"""
|
|
873
|
+
return len(self._elements)
|
|
874
|
+
|
|
875
|
+
def __getitem__(self, index: Any):
|
|
876
|
+
"""Get the nth element in the compound."""
|
|
877
|
+
return self.elements[index]
|
|
878
|
+
|
|
879
|
+
def get_element(self, index: Any):
|
|
880
|
+
"""Get the element with its unique index in the compound."""
|
|
881
|
+
try:
|
|
882
|
+
return self._elements[index]
|
|
883
|
+
except Exception:
|
|
884
|
+
raise IndexError(f"No feature with index '{index}' in this compound.")
|
|
885
|
+
|
|
886
|
+
@classmethod
|
|
887
|
+
def _compound(
|
|
888
|
+
cls,
|
|
889
|
+
features: List['Feature'],
|
|
890
|
+
queryconcept: Union[region.Region, parcellation.Parcellation, space.Space]
|
|
891
|
+
) -> List['CompoundFeature']:
|
|
892
|
+
"""
|
|
893
|
+
Compound features of the same the same type based on their `_compound_key`.
|
|
894
|
+
|
|
895
|
+
If there are features that are not of type `Compoundable`, they are
|
|
896
|
+
returned as is.
|
|
897
|
+
|
|
898
|
+
Parameters
|
|
899
|
+
----------
|
|
900
|
+
features: List[Feature]
|
|
901
|
+
Feature instances to be compounded.
|
|
902
|
+
queryconcept:
|
|
903
|
+
AtlasConcept used for the query.
|
|
904
|
+
|
|
905
|
+
Returns
|
|
906
|
+
-------
|
|
907
|
+
List[CompoundFeature | Feature]
|
|
908
|
+
"""
|
|
909
|
+
non_compound_features = []
|
|
910
|
+
grouped_features = defaultdict(list)
|
|
911
|
+
for f in features:
|
|
912
|
+
if isinstance(f, Compoundable):
|
|
913
|
+
grouped_features[f._compound_key].append(f)
|
|
914
|
+
continue
|
|
915
|
+
non_compound_features.append(f)
|
|
916
|
+
return non_compound_features + [
|
|
917
|
+
cls(fts, queryconcept)
|
|
918
|
+
for fts in grouped_features.values() if fts
|
|
919
|
+
]
|
|
920
|
+
|
|
921
|
+
@classmethod
|
|
922
|
+
def _get_instance_by_id(cls, feature_id: str, **kwargs):
|
|
923
|
+
"""
|
|
924
|
+
Use the feature id to obtain the same feature instance.
|
|
925
|
+
|
|
926
|
+
Parameters
|
|
927
|
+
----------
|
|
928
|
+
feature_id : str
|
|
929
|
+
|
|
930
|
+
Returns
|
|
931
|
+
-------
|
|
932
|
+
CompoundFeature
|
|
933
|
+
|
|
934
|
+
Raises
|
|
935
|
+
------
|
|
936
|
+
ParseCompoundFeatureIdException
|
|
937
|
+
If no or multiple matches are found. Or id is not fitting to
|
|
938
|
+
compound features.
|
|
939
|
+
"""
|
|
940
|
+
if not feature_id.startswith("cf0::"):
|
|
941
|
+
raise ParseCompoundFeatureIdException("CompoundFeature id must start with cf0::")
|
|
942
|
+
cf_version, clsname, *queryconcept, dsid, fid = feature_id.split("::")
|
|
943
|
+
assert cf_version == "cf0"
|
|
944
|
+
candidates = [
|
|
945
|
+
f
|
|
946
|
+
for f in Feature._match(
|
|
947
|
+
concept=cls._decode_concept(queryconcept),
|
|
948
|
+
feature_type=clsname
|
|
949
|
+
)
|
|
950
|
+
if f.id == fid or f.id == feature_id
|
|
951
|
+
]
|
|
952
|
+
if candidates:
|
|
953
|
+
if len(candidates) == 1:
|
|
954
|
+
return candidates[0]
|
|
955
|
+
else:
|
|
956
|
+
raise ParseCompoundFeatureIdException(
|
|
957
|
+
f"The query with id '{feature_id}' have resulted multiple instances.")
|
|
958
|
+
else:
|
|
959
|
+
raise ParseCompoundFeatureIdException
|
|
960
|
+
|
|
961
|
+
def _to_zip(self, fh: ZipFile):
|
|
962
|
+
super()._to_zip(fh)
|
|
963
|
+
for idx, element in siibra_tqdm(self._elements.items(), desc="Exporting elements", unit="element"):
|
|
964
|
+
if '/' in str(idx):
|
|
965
|
+
logger.warning(f"'/' will be replaced with ' ' of the file for element with index {idx}")
|
|
966
|
+
filename = '/'.join([
|
|
967
|
+
str(i).replace('/', ' ')
|
|
968
|
+
for i in (idx if isinstance(idx, tuple) else [idx])
|
|
969
|
+
])
|
|
970
|
+
fh.writestr(f"{self.feature_type.__name__}/{filename}.csv", element.data.to_csv())
|