siibra 0.4a35__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (35) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +1 -0
  3. siibra/commons.py +38 -17
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +38 -12
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +22 -2
  8. siibra/core/parcellation.py +42 -22
  9. siibra/core/region.py +56 -95
  10. siibra/features/anchor.py +7 -4
  11. siibra/features/connectivity/functional_connectivity.py +8 -1
  12. siibra/features/connectivity/regional_connectivity.py +14 -19
  13. siibra/features/dataset/ebrains.py +1 -1
  14. siibra/features/feature.py +193 -29
  15. siibra/features/image/__init__.py +1 -1
  16. siibra/features/image/image.py +1 -0
  17. siibra/features/image/volume_of_interest.py +14 -5
  18. siibra/features/tabular/__init__.py +2 -0
  19. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  20. siibra/livequeries/ebrains.py +2 -3
  21. siibra/locations/location.py +4 -3
  22. siibra/locations/pointset.py +2 -2
  23. siibra/retrieval/datasets.py +73 -3
  24. siibra/retrieval/repositories.py +17 -6
  25. siibra/retrieval/requests.py +68 -61
  26. siibra/volumes/neuroglancer.py +9 -9
  27. siibra/volumes/nifti.py +4 -5
  28. siibra/volumes/parcellationmap.py +157 -97
  29. siibra/volumes/sparsemap.py +27 -31
  30. siibra/volumes/volume.py +1 -1
  31. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/METADATA +2 -1
  32. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/RECORD +35 -34
  33. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  34. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  35. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
@@ -15,12 +15,11 @@
15
15
 
16
16
  from . import anchor as _anchor
17
17
 
18
- from ..commons import logger, InstanceTable
18
+ from ..commons import logger, InstanceTable, siibra_tqdm
19
19
  from ..core import concept
20
20
  from ..core import space, region, parcellation
21
21
 
22
- from typing import Union, TYPE_CHECKING, List, Dict, Type
23
- from tqdm import tqdm
22
+ from typing import Union, TYPE_CHECKING, List, Dict, Type, Tuple
24
23
  from hashlib import md5
25
24
  from collections import defaultdict
26
25
 
@@ -29,6 +28,18 @@ if TYPE_CHECKING:
29
28
  TypeDataset = EbrainsDataset
30
29
 
31
30
 
31
+ class ParseLiveQueryIdException(Exception):
32
+ pass
33
+
34
+
35
+ class EncodeLiveQueryIdException(Exception):
36
+ pass
37
+
38
+
39
+ class NotFoundException(Exception):
40
+ pass
41
+
42
+
32
43
  class Feature:
33
44
  """
34
45
  Base class for anatomically anchored data features.
@@ -37,7 +48,7 @@ class Feature:
37
48
  SUBCLASSES: Dict[Type['Feature'], List[Type['Feature']]] = defaultdict(list)
38
49
 
39
50
  CATEGORIZED: Dict[str, Type['InstanceTable']] = defaultdict(InstanceTable)
40
-
51
+
41
52
  category: str = None
42
53
 
43
54
  def __init__(
@@ -166,6 +177,121 @@ class Feature:
166
177
  prefix = list(id_set)[0] + '--'
167
178
  return prefix + md5(self.name.encode("utf-8")).hexdigest()
168
179
 
180
+ @staticmethod
181
+ def serialize_query_context(feat: 'Feature', concept: concept.AtlasConcept) -> str:
182
+ """
183
+ Serialize feature from livequery and query context.
184
+
185
+ It is currently impossible to retrieve a livequery with a generic UUID.
186
+ As such, the query context (e.g. region, space or parcellation) needs to
187
+ be encoded in the id.
188
+
189
+ Whilst it is possible to (de)serialize *any* queries, the method is setup to only serialize
190
+ livequery features.
191
+
192
+ The serialized livequery id follows the following pattern:
193
+
194
+ <livequeryid_version>::<feature_cls_name>::<query_context>::<unserialized_id>
195
+
196
+ Where:
197
+
198
+ - livequeryid_version: version of the serialization. (e.g. lq0)
199
+ - feature_cls_name: class name to query. (e.g. BigBrainIntensityProfile)
200
+ - query_context: string to retrieve atlas concept in the query context. Can be one of the following:
201
+ - s:<space_id>
202
+ - p:<parcellation_id>
203
+ - p:<parcellation_id>::r:<region_id>
204
+ - unserialized_id: id prior to serialization
205
+
206
+ See test/features/test_feature.py for tests and usages.
207
+ """
208
+ if not hasattr(feat.__class__, '_live_queries'):
209
+ raise EncodeLiveQueryIdException(f"generate_livequery_featureid can only be used on live queries, but {feat.__class__.__name__} is not.")
210
+
211
+ encoded_c = []
212
+ if isinstance(concept, space.Space):
213
+ encoded_c.append(f"s:{concept.id}")
214
+ elif isinstance(concept, parcellation.Parcellation):
215
+ encoded_c.append(f"p:{concept.id}")
216
+ elif isinstance(concept, region.Region):
217
+ encoded_c.append(f"p:{concept.parcellation.id}")
218
+ encoded_c.append(f"r:{concept.name}")
219
+
220
+ if len(encoded_c) == 0:
221
+ raise EncodeLiveQueryIdException("no concept is encoded")
222
+
223
+ return f"lq0::{feat.__class__.__name__}::{'::'.join(encoded_c)}::{feat.id}"
224
+
225
+ @classmethod
226
+ def deserialize_query_context(Cls, feature_id: str) -> Tuple[Type['Feature'], concept.AtlasConcept, str]:
227
+ """
228
+ Deserialize id into query context.
229
+
230
+ See docstring of serialize_query_context for context.
231
+ """
232
+ lq_version, *rest = feature_id.split("::")
233
+ if lq_version != "lq0":
234
+ raise ParseLiveQueryIdException("livequery id must start with lq0::")
235
+
236
+ clsname, *concepts, fid = rest
237
+
238
+ Features = Cls.parse_featuretype(clsname)
239
+
240
+ if len(Features) == 0:
241
+ raise ParseLiveQueryIdException(f"classname {clsname!r} could not be parsed correctly. {feature_id!r}")
242
+ F = Features[0]
243
+
244
+ concept = None
245
+ for c in concepts:
246
+ if c.startswith("s:"):
247
+ if concept is not None:
248
+ raise ParseLiveQueryIdException("Conflicting spec.")
249
+ concept = space.Space.registry()[c.replace("s:", "")]
250
+ if c.startswith("p:"):
251
+ if concept is not None:
252
+ raise ParseLiveQueryIdException("Conflicting spec.")
253
+ concept = parcellation.Parcellation.registry()[c.replace("p:", "")]
254
+ if c.startswith("r:"):
255
+ if concept is None:
256
+ raise ParseLiveQueryIdException("region has been encoded, but parcellation has not been populated in the encoding, {feature_id!r}")
257
+ if not isinstance(concept, parcellation.Parcellation):
258
+ raise ParseLiveQueryIdException("region has been encoded, but previous encoded concept is not parcellation")
259
+ concept = concept.get_region(c.replace("r:", ""))
260
+ if concept is None:
261
+ raise ParseLiveQueryIdException(f"concept was not populated: {feature_id!r}")
262
+
263
+ return (F, concept, fid)
264
+
265
+ @classmethod
266
+ def parse_featuretype(cls, feature_type: str) -> List[Type['Feature']]:
267
+ return [
268
+ feattype
269
+ for FeatCls, feattypes in cls.SUBCLASSES.items()
270
+ if all(w.lower() in FeatCls.__name__.lower() for w in feature_type.split())
271
+ for feattype in feattypes
272
+ ]
273
+
274
+ @classmethod
275
+ def livequery(cls, concept: Union[region.Region, parcellation.Parcellation, space.Space], **kwargs) -> List['Feature']:
276
+ if not hasattr(cls, "_live_queries"):
277
+ return []
278
+
279
+ live_instances = []
280
+ for QueryType in cls._live_queries:
281
+ argstr = f" ({', '.join('='.join(map(str,_)) for _ in kwargs.items())})" \
282
+ if len(kwargs) > 0 else ""
283
+ logger.info(
284
+ f"Running live query for {QueryType.feature_type.__name__} "
285
+ f"objects linked to {str(concept)}{argstr}"
286
+ )
287
+ q = QueryType(**kwargs)
288
+ features = [
289
+ Feature.wrap_livequery_feature(feat, Feature.serialize_query_context(feat, concept))
290
+ for feat in q.query(concept)
291
+ ]
292
+ live_instances.extend(features)
293
+ return live_instances
294
+
169
295
  @classmethod
170
296
  def match(cls, concept: Union[region.Region, parcellation.Parcellation, space.Space], feature_type: Union[str, Type['Feature'], list], **kwargs) -> List['Feature']:
171
297
  """
@@ -181,22 +307,17 @@ class Feature:
181
307
  if isinstance(feature_type, list):
182
308
  # a list of feature types is given, collect match results on those
183
309
  assert all((isinstance(t, str) or issubclass(t, cls)) for t in feature_type)
184
- return sum((cls.match(concept, t, **kwargs) for t in feature_type), [])
310
+ return list(set(sum((cls.match(concept, t, **kwargs) for t in feature_type), [])))
185
311
 
186
312
  if isinstance(feature_type, str):
187
313
  # feature type given as a string. Decode the corresponding class.
188
314
  # Some string inputs, such as connectivity, may hit multiple matches
189
315
  # In this case
190
- candidates = [
191
- feattype
192
- for FeatCls, feattypes in cls.SUBCLASSES.items()
193
- if all(w.lower() in FeatCls.__name__.lower() for w in feature_type.split())
194
- for feattype in feattypes
195
- ]
316
+ candidates = cls.parse_featuretype(feature_type)
196
317
  if len(candidates) == 0:
197
318
  raise ValueError(f"feature_type {str(feature_type)} did not match with any features. Available features are: {', '.join(cls.SUBCLASSES.keys())}")
198
319
 
199
- return [feat for c in candidates for feat in cls.match(concept, c, **kwargs)]
320
+ return list({feat for c in candidates for feat in cls.match(concept, c, **kwargs)})
200
321
 
201
322
  assert issubclass(feature_type, Feature)
202
323
 
@@ -213,24 +334,30 @@ class Feature:
213
334
  for instance in f_type.get_instances()
214
335
  ]
215
336
 
216
- if logger.getEffectiveLevel() > 20:
217
- preconfigured_instances = [f for f in instances if f.matches(concept)]
218
- else:
219
- preconfigured_instances = [f for f in tqdm(instances, desc=msg, total=len(instances)) if f.matches(concept)]
337
+ preconfigured_instances = [f for f in siibra_tqdm(instances, desc=msg, total=len(instances)) if f.matches(concept)]
220
338
 
221
- live_instances = []
222
- if hasattr(feature_type, "_live_queries"):
223
- for QueryType in feature_type._live_queries:
224
- argstr = f" ({', '.join('='.join(map(str,_)) for _ in kwargs.items())})" \
225
- if len(kwargs) > 0 else ""
226
- logger.info(
227
- f"Running live query for {QueryType.feature_type.__name__} "
228
- f"objects linked to {str(concept)}{argstr}"
229
- )
230
- q = QueryType(**kwargs)
231
- live_instances.extend(q.query(concept))
232
-
233
- return preconfigured_instances + live_instances
339
+ live_instances = feature_type.livequery(concept, **kwargs)
340
+
341
+ return list(set((preconfigured_instances + live_instances)))
342
+
343
+ @classmethod
344
+ def get_instance_by_id(cls, feature_id: str, **kwargs):
345
+ try:
346
+ F, concept, fid = cls.deserialize_query_context(feature_id)
347
+ return [
348
+ f
349
+ for f in F.livequery(concept, **kwargs)
350
+ if f.id == fid or f.id == feature_id
351
+ ][0]
352
+ except ParseLiveQueryIdException:
353
+ return [
354
+ inst
355
+ for Cls in Feature.SUBCLASSES[Feature]
356
+ for inst in Cls.get_instances()
357
+ if inst.id == feature_id
358
+ ][0]
359
+ except IndexError:
360
+ raise NotFoundException
234
361
 
235
362
  @classmethod
236
363
  def get_ascii_tree(cls):
@@ -254,3 +381,40 @@ class Feature:
254
381
  "%s%s" % (pre, node.name)
255
382
  for pre, _, node in RenderTree(tree)
256
383
  )
384
+
385
+ @staticmethod
386
+ def wrap_livequery_feature(feature: 'Feature', fid: str):
387
+ """
388
+ Wrap live query features, override only the id attribute.
389
+
390
+ Some features do not have setters for the id property. The ProxyFeature class
391
+ allow the id property to be overridden without touching the underlying class.
392
+
393
+ See docstring of serialize_query_context for further context.
394
+ """
395
+ class ProxyFeature(feature.__class__):
396
+
397
+ # override __class__ property
398
+ # some instances of features accesses inst.__class__
399
+ @property
400
+ def __class__(self):
401
+ return self.inst.__class__
402
+
403
+ def __init__(self, inst: Feature, fid: str):
404
+ self.inst = inst
405
+ self.fid = fid
406
+
407
+ def __str__(self) -> str:
408
+ return self.inst.__str__()
409
+
410
+ def __repr__(self) -> str:
411
+ return self.inst.__repr__()
412
+
413
+ @property
414
+ def id(self):
415
+ return self.fid
416
+
417
+ def __getattr__(self, __name: str):
418
+ return getattr(self.inst, __name)
419
+
420
+ return ProxyFeature(feature, fid)
@@ -18,5 +18,5 @@ from .volume_of_interest import (
18
18
  BlockfaceVolumeOfInterest,
19
19
  PLIVolumeOfInterest,
20
20
  MRIVolumeOfInterest,
21
- SegmentedVolumeOfInterest
21
+ # SegmentedVolumeOfInterest
22
22
  )
@@ -74,6 +74,7 @@ class Image(feature.Feature, _volume.Volume):
74
74
  space_spec=space_spec,
75
75
  providers=providers,
76
76
  name=name,
77
+ datasets=datasets,
77
78
  )
78
79
 
79
80
  self._anchor_cached = ImageAnchor(self, region=region)
@@ -52,10 +52,19 @@ class MRIVolumeOfInterest(
52
52
  image.Image.__init__(self, **kwargs, modality=modality)
53
53
 
54
54
 
55
- class SegmentedVolumeOfInterest(
55
+ class XPCTVolumeOfInterest(
56
56
  image.Image,
57
- configuration_folder="features/images/vois/segmentation",
58
- category="segmentation"
57
+ configuration_folder="features/images/vois/xpct",
58
+ category="cellular"
59
59
  ):
60
- def __init__(self, **kwargs):
61
- image.Image.__init__(self, **kwargs, modality="segmentation")
60
+ def __init__(self, modality, **kwargs):
61
+ image.Image.__init__(self, **kwargs, modality=modality)
62
+
63
+
64
+ # class SegmentedVolumeOfInterest(
65
+ # image.Image,
66
+ # configuration_folder="features/images/vois/segmentation",
67
+ # category="segmentation"
68
+ # ):
69
+ # def __init__(self, **kwargs):
70
+ # image.Image.__init__(self, **kwargs, modality="segmentation")
@@ -20,3 +20,5 @@ from .layerwise_bigbrain_intensities import LayerwiseBigBrainIntensities
20
20
  from .layerwise_cell_density import LayerwiseCellDensity
21
21
  from .receptor_density_fingerprint import ReceptorDensityFingerprint
22
22
  from .receptor_density_profile import ReceptorDensityProfile
23
+ from .regional_timeseries_activity import RegionalTimeseriesActivity
24
+ from .regional_timeseries_activity import RegionalBOLD
@@ -0,0 +1,213 @@
1
+ # Copyright 2018-2021
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import tabular
17
+
18
+ from .. import anchor as _anchor
19
+
20
+ from ...commons import logger, QUIET, siibra_tqdm
21
+ from ...core import region as _region
22
+ from ...locations import pointset
23
+ from ...retrieval.repositories import RepositoryConnector
24
+
25
+ from typing import Callable, Dict, Union
26
+ import pandas as pd
27
+ import numpy as np
28
+
29
+
30
+ class RegionalTimeseriesActivity(tabular.Tabular):
31
+ """
32
+ Datasets that provide regional activity over time.
33
+ """
34
+
35
+ DESCRIPTION = (
36
+ ""
37
+ )
38
+
39
+ def __init__(
40
+ self,
41
+ cohort: str,
42
+ modality: str,
43
+ regions: list,
44
+ connector: RepositoryConnector,
45
+ decode_func: Callable,
46
+ files: Dict[str, str],
47
+ anchor: _anchor.AnatomicalAnchor,
48
+ timestep: str,
49
+ description: str = "",
50
+ datasets: list = [],
51
+ paradigm: str = ""
52
+ ):
53
+ """
54
+ """
55
+ tabular.Tabular.__init__(
56
+ self,
57
+ modality=modality,
58
+ description=description,
59
+ anchor=anchor,
60
+ datasets=datasets,
61
+ data=None # lazy loading below
62
+ )
63
+ self.cohort = cohort.upper()
64
+ self._connector = connector
65
+ self._files = files
66
+ self._decode_func = decode_func
67
+ self.regions = regions
68
+ self._tables = {}
69
+ self.paradigm = paradigm
70
+ self.timestep = timestep
71
+
72
+ @property
73
+ def subjects(self):
74
+ """
75
+ Returns the subject identifiers for which signal tables are available.
76
+ """
77
+ return list(self._files.keys())
78
+
79
+ @property
80
+ def name(self):
81
+ supername = super().name
82
+ return f"{supername} with paradigm {self.paradigm}"
83
+
84
+ def get_table(self, subject: str = None):
85
+ """
86
+ Returns a pandas dataframe where the column headers are regions and the
87
+ indcies indicate disctrete timesteps.
88
+
89
+ Parameters
90
+ ----------
91
+ subject: str, default: None
92
+ Name of the subject (see RegionalTimeseriesActivity.subjects for available names).
93
+ If None, the mean is taken in case of multiple available data tables.
94
+ Returns
95
+ -------
96
+ pd.DataFrame
97
+ A table with region names as the column and timesteps as indices.
98
+ """
99
+ assert len(self) > 0
100
+ if (subject is None) and (len(self) > 1):
101
+ # multiple signal tables available, but no subject given - return mean table
102
+ logger.info(
103
+ f"No subject name supplied, returning mean signal table across {len(self)} subjects. "
104
+ "You might alternatively specify an individual subject."
105
+ )
106
+ if "mean" not in self._tables:
107
+ all_arrays = [
108
+ self._connector.get(fname, decode_func=self._decode_func)
109
+ for fname in siibra_tqdm(
110
+ self._files.values(),
111
+ total=len(self),
112
+ desc=f"Averaging {len(self)} signal tables"
113
+ )
114
+ ]
115
+ self._tables['mean'] = self._array_to_dataframe(np.stack(all_arrays).mean(0))
116
+ return self._tables['mean'].copy()
117
+ if subject is None:
118
+ subject = next(iter(self._files.keys()))
119
+ if subject not in self._files:
120
+ raise ValueError(f"Subject name '{subject}' not known, use one of: {', '.join(self._files)}")
121
+ if subject not in self._tables:
122
+ self._tables[subject] = self._load_table(subject)
123
+ return self._tables[subject].copy()
124
+
125
+ def _load_table(self, subject: str):
126
+ """
127
+ Extract the timeseries table.
128
+ """
129
+ assert subject in self.subjects
130
+ array = self._connector.get(self._files[subject], decode_func=self._decode_func)
131
+ return self._array_to_dataframe(array)
132
+
133
+ def __len__(self):
134
+ return len(self._files)
135
+
136
+ def __str__(self):
137
+ return "{} with paradigm {} for {} from {} cohort ({} signal tables)".format(
138
+ self.modality, self.paradigm,
139
+ "_".join(p.name for p in self.anchor.parcellations),
140
+ self.cohort,
141
+ len(self._files),
142
+ )
143
+
144
+ def compute_centroids(self, space):
145
+ """
146
+ Computes the list of centroid coordinates corresponding to
147
+ dataframe columns, in the given reference space.
148
+
149
+ Parameters
150
+ ----------
151
+ space: Space, str
152
+
153
+ Returns
154
+ -------
155
+ list[tuple(float, float, float)]
156
+ """
157
+ result = []
158
+ parcellations = self.anchor.represented_parcellations()
159
+ assert len(parcellations) == 1
160
+ parcmap = next(iter(parcellations)).get_map(space)
161
+ all_centroids = parcmap.compute_centroids()
162
+ for regionname in self.regions:
163
+ region = parcmap.parcellation.get_region(regionname, allow_tuple=True)
164
+ if isinstance(region, tuple): # deal with sets of matched regions
165
+ found = [c for r in region for c in r if c.name in all_centroids]
166
+ else:
167
+ found = [r for r in region if r.name in all_centroids]
168
+ assert len(found) > 0
169
+ result.append(
170
+ tuple(pointset.PointSet(
171
+ [all_centroids[r.name] for r in found], space=space
172
+ ).centroid)
173
+ )
174
+ return result
175
+
176
+ def _array_to_dataframe(self, array: np.ndarray) -> pd.DataFrame:
177
+ """
178
+ Convert a numpy array with the regional activity data to
179
+ a DataFrame with regions as column headers and timesteps as indices.
180
+ """
181
+ df = pd.DataFrame(array)
182
+ parcellations = self.anchor.represented_parcellations()
183
+ assert len(parcellations) == 1
184
+ parc = next(iter(parcellations))
185
+ with QUIET:
186
+ indexmap = {
187
+ i: parc.get_region(regionname, allow_tuple=True)
188
+ for i, regionname in enumerate(self.regions)
189
+ }
190
+ ncols = array.shape[1]
191
+ if len(indexmap) == ncols:
192
+ remapper = {
193
+ label - min(indexmap.keys()): region
194
+ for label, region in indexmap.items()
195
+ }
196
+ df = df.rename(columns=remapper)
197
+ return df
198
+
199
+ def plot(self, subject: str = None, **kwargs):
200
+ table = self.get_table(subject)
201
+ return table.mean().plot(kind="bar", **kwargs)
202
+
203
+
204
+ class RegionalBOLD(
205
+ RegionalTimeseriesActivity,
206
+ configuration_folder="features/tabular/activity_timeseries/bold",
207
+ category="activity_timeseries"
208
+ ):
209
+ """
210
+ Blood-oxygen-level-dependent (BOLD) signals per region.
211
+ """
212
+
213
+ pass
@@ -16,7 +16,7 @@
16
16
  from ..features.dataset import ebrains as _ebrains
17
17
  from . import query
18
18
 
19
- from ..commons import logger
19
+ from ..commons import logger, siibra_tqdm
20
20
  from ..features import anchor as _anchor
21
21
  from ..retrieval import requests, datasets
22
22
  from ..core import parcellation, region
@@ -24,7 +24,6 @@ from ..core import parcellation, region
24
24
  from collections import defaultdict
25
25
  import re
26
26
  from distutils.version import LooseVersion
27
- from tqdm import tqdm
28
27
  from tempfile import NamedTemporaryFile
29
28
 
30
29
 
@@ -70,7 +69,7 @@ class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.Ebrains
70
69
  invalid_species_datasets = {}
71
70
  results = self.loader.data.get("results", [])
72
71
 
73
- for r in tqdm(results, total=len(results)):
72
+ for r in siibra_tqdm(results, total=len(results)):
74
73
 
75
74
  regionname = r.get("name", None)
76
75
  alias = r.get("alias", None)
@@ -17,6 +17,7 @@
17
17
  import numpy as np
18
18
  from abc import ABC, abstractmethod
19
19
  from nibabel import Nifti1Image
20
+ from typing import Union
20
21
 
21
22
 
22
23
  class Location(ABC):
@@ -48,14 +49,14 @@ class Location(ABC):
48
49
  pass
49
50
 
50
51
  @abstractmethod
51
- def intersects(self, mask: Nifti1Image):
52
+ def intersects(self, other: Union[Nifti1Image, 'Location']) -> bool:
52
53
  """
53
54
  Verifies wether this 3D location intersects the given mask.
54
55
 
55
56
  NOTE: The affine matrix of the image must be set to warp voxels
56
57
  coordinates into the reference space of this Bounding Box.
57
58
  """
58
- pass
59
+ raise NotImplementedError
59
60
 
60
61
  @abstractmethod
61
62
  def warp(self, space):
@@ -114,7 +115,7 @@ class WholeBrain(Location):
114
115
  def __init__(self, space=None):
115
116
  Location.__init__(self, space)
116
117
 
117
- def intersects(self, mask: Nifti1Image):
118
+ def intersects(self, *_args, **_kwargs):
118
119
  """Always true for whole brain features"""
119
120
  return True
120
121
 
@@ -74,8 +74,8 @@ class PointSet(location.Location):
74
74
  sigma_mm=[p.sigma for p in inside],
75
75
  )
76
76
 
77
- def intersects(self, mask: Nifti1Image):
78
- return len(self.intersection(mask)) > 0
77
+ def intersects(self, other: Union[location.Location, Nifti1Image]):
78
+ return len(self.intersection(other)) > 0
79
79
 
80
80
  @property
81
81
  def sigma(self):