siibra 0.4a33__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (64) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +2 -0
  3. siibra/commons.py +53 -8
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +95 -19
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +41 -8
  8. siibra/core/parcellation.py +94 -43
  9. siibra/core/region.py +160 -187
  10. siibra/core/space.py +44 -39
  11. siibra/features/__init__.py +19 -19
  12. siibra/features/anchor.py +9 -6
  13. siibra/features/connectivity/__init__.py +0 -8
  14. siibra/features/connectivity/functional_connectivity.py +11 -3
  15. siibra/features/{basetypes → connectivity}/regional_connectivity.py +46 -33
  16. siibra/features/connectivity/streamline_counts.py +3 -2
  17. siibra/features/connectivity/streamline_lengths.py +3 -2
  18. siibra/features/{basetypes → dataset}/__init__.py +2 -0
  19. siibra/features/{external → dataset}/ebrains.py +3 -3
  20. siibra/features/feature.py +420 -0
  21. siibra/{samplers → features/image}/__init__.py +7 -1
  22. siibra/features/{basetypes/volume_of_interest.py → image/image.py} +12 -7
  23. siibra/features/{external/__init__.py → image/sections.py} +8 -5
  24. siibra/features/image/volume_of_interest.py +70 -0
  25. siibra/features/{cellular → tabular}/__init__.py +7 -11
  26. siibra/features/{cellular → tabular}/bigbrain_intensity_profile.py +5 -2
  27. siibra/features/{cellular → tabular}/cell_density_profile.py +6 -2
  28. siibra/features/{basetypes → tabular}/cortical_profile.py +48 -41
  29. siibra/features/{molecular → tabular}/gene_expression.py +5 -2
  30. siibra/features/{cellular → tabular}/layerwise_bigbrain_intensities.py +6 -2
  31. siibra/features/{cellular → tabular}/layerwise_cell_density.py +9 -3
  32. siibra/features/{molecular → tabular}/receptor_density_fingerprint.py +3 -2
  33. siibra/features/{molecular → tabular}/receptor_density_profile.py +6 -2
  34. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  35. siibra/features/{basetypes → tabular}/tabular.py +14 -9
  36. siibra/livequeries/allen.py +1 -1
  37. siibra/livequeries/bigbrain.py +2 -3
  38. siibra/livequeries/ebrains.py +3 -9
  39. siibra/livequeries/query.py +1 -1
  40. siibra/locations/location.py +4 -3
  41. siibra/locations/point.py +21 -17
  42. siibra/locations/pointset.py +2 -2
  43. siibra/retrieval/__init__.py +1 -1
  44. siibra/retrieval/cache.py +8 -2
  45. siibra/retrieval/datasets.py +149 -29
  46. siibra/retrieval/repositories.py +19 -8
  47. siibra/retrieval/requests.py +98 -116
  48. siibra/volumes/gifti.py +26 -11
  49. siibra/volumes/neuroglancer.py +35 -19
  50. siibra/volumes/nifti.py +8 -9
  51. siibra/volumes/parcellationmap.py +341 -184
  52. siibra/volumes/sparsemap.py +67 -53
  53. siibra/volumes/volume.py +25 -13
  54. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/METADATA +4 -3
  55. siibra-0.4a46.dist-info/RECORD +69 -0
  56. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  57. siibra/features/basetypes/feature.py +0 -248
  58. siibra/features/fibres/__init__.py +0 -14
  59. siibra/features/functional/__init__.py +0 -14
  60. siibra/features/molecular/__init__.py +0 -26
  61. siibra/samplers/bigbrain.py +0 -181
  62. siibra-0.4a33.dist-info/RECORD +0 -71
  63. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  64. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,7 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import feature
16
+ from . import tabular
17
17
 
18
18
  from .. import anchor as _anchor
19
19
 
@@ -23,7 +23,7 @@ from textwrap import wrap
23
23
  import numpy as np
24
24
 
25
25
 
26
- class CorticalProfile(feature.Feature):
26
+ class CorticalProfile(tabular.Tabular):
27
27
  """
28
28
  Represents a 1-dimensional profile of measurements along cortical depth,
29
29
  measured at relative depths between 0 representing the pial surface,
@@ -56,38 +56,28 @@ class CorticalProfile(feature.Feature):
56
56
  ):
57
57
  """Initialize profile.
58
58
 
59
- Args:
60
- description (str):
61
- Human-readable of the modality of the measurements.
62
- modality (str):
63
- Short textual description of the modaility of measurements
64
- anchor: AnatomicalAnchor
65
- depths (list, optional):
66
- List of cortical depthh positions corresponding to each
67
- measurement, all in the range [0..1].
68
- Defaults to None.
69
- values (list, optional):
70
- List of the actual measurements at each depth position.
71
- Length must correspond to 'depths'.
72
- Defaults to None.
73
- unit (str, optional):
74
- Textual identifier for the unit of measurements.
75
- Defaults to None.
76
- boundary_positions (dict, optional):
77
- Dictionary of depths at which layer boundaries were identified.
78
- Keys are tuples of layer numbers, e.g. (1,2), values are cortical
79
- depth positions in the range [0..1].
80
- Defaults to None.
81
- datasets : list
82
- list of datasets corresponding to this feature
59
+ Parameters
60
+ ----------
61
+ description: str
62
+ Human-readable of the modality of the measurements.
63
+ modality: str
64
+ Short textual description of the modality of measurements.
65
+ anchor: AnatomicalAnchor
66
+ depths: list, default: None
67
+ List of cortical depth positions corresponding to each
68
+ measurement, all in the range [0..1]
69
+ values: list, default: None
70
+ List of the actual measurements at each depth position.
71
+ Length must correspond to 'depths'.
72
+ unit: str, default: None
73
+ Textual identifier for the unit of measurements.
74
+ boundary_positions: dict, default: None
75
+ Dictionary of depths at which layer boundaries were identified.
76
+ Keys are tuples of layer numbers, e.g. (1,2), and values are
77
+ cortical depth positions in the range [0..1].
78
+ datasets : list[Dataset]
79
+ list of datasets corresponding to this feature
83
80
  """
84
- feature.Feature.__init__(
85
- self,
86
- modality=modality,
87
- description=description,
88
- anchor=anchor,
89
- datasets=datasets
90
- )
91
81
 
92
82
  # cached properties will be revealed as property functions,
93
83
  # so derived classes may choose to override for lazy loading.
@@ -96,6 +86,15 @@ class CorticalProfile(feature.Feature):
96
86
  self._values_cached = values
97
87
  self._boundary_positions = boundary_positions
98
88
 
89
+ tabular.Tabular.__init__(
90
+ self,
91
+ modality=modality,
92
+ description=description,
93
+ anchor=anchor,
94
+ data=None, # lazy loader below
95
+ datasets=datasets
96
+ )
97
+
99
98
  def _check_sanity(self):
100
99
  # check plausibility of the profile
101
100
  assert isinstance(self._depths, (list, np.ndarray))
@@ -117,7 +116,7 @@ class CorticalProfile(feature.Feature):
117
116
  return self._unit
118
117
 
119
118
  @property
120
- def boundary_positions(self):
119
+ def boundary_positions(self) -> Dict[Tuple[int, int], float]:
121
120
  if self._boundary_positions is None:
122
121
  return {}
123
122
  else:
@@ -156,12 +155,14 @@ class CorticalProfile(feature.Feature):
156
155
  def data(self):
157
156
  """Return a pandas Series representing the profile."""
158
157
  self._check_sanity()
159
- return pd.Series(
160
- self._values, index=self._depths, name=f"{self.modality} ({self.unit})"
158
+ return pd.DataFrame(
159
+ self._values, index=self._depths, columns=[f"{self.modality} ({self.unit})"]
161
160
  )
162
161
 
163
162
  def plot(self, **kwargs):
164
- """Plot the profile.
163
+ """
164
+ Plot the profile.
165
+
165
166
  Keyword arguments are passed on to the plot command.
166
167
  'layercolor' can be used to specify a color for cortical layer shading.
167
168
  """
@@ -194,8 +195,11 @@ class CorticalProfile(feature.Feature):
194
195
 
195
196
  @property
196
197
  def _depths(self):
197
- """Returns a list of the relative cortical depths of the measured values in the range [0..1].
198
- To be implemented in derived class."""
198
+ """
199
+ Returns a list of the relative cortical depths of the measured values in the range [0..1].
200
+
201
+ To be implemented in derived class.
202
+ """
199
203
  if self._depths_cached is None:
200
204
  raise NotImplementedError(
201
205
  f"'_depths' not available for {self.__class__.__name__}."
@@ -204,8 +208,11 @@ class CorticalProfile(feature.Feature):
204
208
 
205
209
  @property
206
210
  def _values(self):
207
- """Returns a list of the measured values per depth.
208
- To be implemented in derived class."""
211
+ """
212
+ Returns a list of the measured values per depth.
213
+
214
+ To be implemented in derived class.
215
+ """
209
216
  if self._values_cached is None:
210
217
  raise NotImplementedError(
211
218
  f"'_values' not available for {self.__class__.__name__}."
@@ -14,7 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from .. import anchor as _anchor
17
- from ..basetypes import tabular
17
+ from . import tabular
18
18
 
19
19
  from ... import commons
20
20
 
@@ -27,7 +27,10 @@ except ImportError:
27
27
  from typing_extensions import TypedDict
28
28
 
29
29
 
30
- class GeneExpressions(tabular.Tabular):
30
+ class GeneExpressions(
31
+ tabular.Tabular,
32
+ category='molecular'
33
+ ):
31
34
  """
32
35
  A set gene expressions for different candidate genes
33
36
  measured inside a brain structure.
@@ -13,13 +13,17 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from ..basetypes import cortical_profile, tabular
16
+ from . import cortical_profile
17
+ from . import tabular
17
18
 
18
19
  import pandas as pd
19
20
  import numpy as np
20
21
 
21
22
 
22
- class LayerwiseBigBrainIntensities(tabular.Tabular):
23
+ class LayerwiseBigBrainIntensities(
24
+ tabular.Tabular,
25
+ category='cellular'
26
+ ):
23
27
 
24
28
  DESCRIPTION = (
25
29
  "Layerwise averages and standard deviations of of BigBrain staining intensities "
@@ -13,8 +13,9 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ from . import cortical_profile
16
17
  from .. import anchor as _anchor
17
- from ..basetypes import cortical_profile, tabular
18
+ from . import tabular
18
19
 
19
20
  from ... import commons
20
21
  from ...retrieval import requests
@@ -24,7 +25,11 @@ import numpy as np
24
25
  from io import BytesIO
25
26
 
26
27
 
27
- class LayerwiseCellDensity(tabular.Tabular, configuration_folder="features/fingerprints/celldensity"):
28
+ class LayerwiseCellDensity(
29
+ tabular.Tabular,
30
+ configuration_folder="features/tabular/layerstatistics/celldensity",
31
+ category='cellular'
32
+ ):
28
33
 
29
34
  DESCRIPTION = (
30
35
  "Layerwise estimated densities of detected cell bodies (in detected cells per 0.1 cube millimeter) "
@@ -76,7 +81,8 @@ class LayerwiseCellDensity(tabular.Tabular, configuration_folder="features/finge
76
81
  continue
77
82
  counts = cells.layer.value_counts()
78
83
  areas = layers["Area(micron**2)"]
79
- density_dict[i] = counts[areas.index] / areas * 100 ** 2 * 5
84
+ indices = np.intersect1d(areas.index, counts.index)
85
+ density_dict[i] = counts[indices] / areas * 100 ** 2 * 5
80
86
  return pd.DataFrame(density_dict)
81
87
 
82
88
  @property
@@ -14,7 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from .. import anchor as _anchor
17
- from ..basetypes import tabular
17
+ from . import tabular
18
18
 
19
19
  from ... import commons, vocabularies
20
20
  from ...retrieval import requests
@@ -27,7 +27,8 @@ from typing import List
27
27
 
28
28
  class ReceptorDensityFingerprint(
29
29
  tabular.Tabular,
30
- configuration_folder="features/fingerprints/receptor"
30
+ configuration_folder="features/tabular/fingerprints/receptor",
31
+ category='molecular'
31
32
  ):
32
33
 
33
34
  DESCRIPTION = (
@@ -14,14 +14,18 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from .. import anchor as _anchor
17
- from ..basetypes import cortical_profile
17
+ from . import cortical_profile
18
18
 
19
19
  from ... import vocabularies
20
20
  from ...commons import create_key
21
21
  from ...retrieval import requests
22
22
 
23
23
 
24
- class ReceptorDensityProfile(cortical_profile.CorticalProfile, configuration_folder="features/profiles/receptor"):
24
+ class ReceptorDensityProfile(
25
+ cortical_profile.CorticalProfile,
26
+ configuration_folder="features/tabular/corticalprofiles/receptor",
27
+ category='molecular'
28
+ ):
25
29
 
26
30
  DESCRIPTION = (
27
31
  "Cortical profile of densities (in fmol/mg protein) of receptors for classical neurotransmitters "
@@ -0,0 +1,213 @@
1
+ # Copyright 2018-2021
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from . import tabular
17
+
18
+ from .. import anchor as _anchor
19
+
20
+ from ...commons import logger, QUIET, siibra_tqdm
21
+ from ...core import region as _region
22
+ from ...locations import pointset
23
+ from ...retrieval.repositories import RepositoryConnector
24
+
25
+ from typing import Callable, Dict, Union
26
+ import pandas as pd
27
+ import numpy as np
28
+
29
+
30
+ class RegionalTimeseriesActivity(tabular.Tabular):
31
+ """
32
+ Datasets that provide regional activity over time.
33
+ """
34
+
35
+ DESCRIPTION = (
36
+ ""
37
+ )
38
+
39
+ def __init__(
40
+ self,
41
+ cohort: str,
42
+ modality: str,
43
+ regions: list,
44
+ connector: RepositoryConnector,
45
+ decode_func: Callable,
46
+ files: Dict[str, str],
47
+ anchor: _anchor.AnatomicalAnchor,
48
+ timestep: str,
49
+ description: str = "",
50
+ datasets: list = [],
51
+ paradigm: str = ""
52
+ ):
53
+ """
54
+ """
55
+ tabular.Tabular.__init__(
56
+ self,
57
+ modality=modality,
58
+ description=description,
59
+ anchor=anchor,
60
+ datasets=datasets,
61
+ data=None # lazy loading below
62
+ )
63
+ self.cohort = cohort.upper()
64
+ self._connector = connector
65
+ self._files = files
66
+ self._decode_func = decode_func
67
+ self.regions = regions
68
+ self._tables = {}
69
+ self.paradigm = paradigm
70
+ self.timestep = timestep
71
+
72
+ @property
73
+ def subjects(self):
74
+ """
75
+ Returns the subject identifiers for which signal tables are available.
76
+ """
77
+ return list(self._files.keys())
78
+
79
+ @property
80
+ def name(self):
81
+ supername = super().name
82
+ return f"{supername} with paradigm {self.paradigm}"
83
+
84
+ def get_table(self, subject: str = None):
85
+ """
86
+ Returns a pandas dataframe where the column headers are regions and the
87
+ indcies indicate disctrete timesteps.
88
+
89
+ Parameters
90
+ ----------
91
+ subject: str, default: None
92
+ Name of the subject (see RegionalTimeseriesActivity.subjects for available names).
93
+ If None, the mean is taken in case of multiple available data tables.
94
+ Returns
95
+ -------
96
+ pd.DataFrame
97
+ A table with region names as the column and timesteps as indices.
98
+ """
99
+ assert len(self) > 0
100
+ if (subject is None) and (len(self) > 1):
101
+ # multiple signal tables available, but no subject given - return mean table
102
+ logger.info(
103
+ f"No subject name supplied, returning mean signal table across {len(self)} subjects. "
104
+ "You might alternatively specify an individual subject."
105
+ )
106
+ if "mean" not in self._tables:
107
+ all_arrays = [
108
+ self._connector.get(fname, decode_func=self._decode_func)
109
+ for fname in siibra_tqdm(
110
+ self._files.values(),
111
+ total=len(self),
112
+ desc=f"Averaging {len(self)} signal tables"
113
+ )
114
+ ]
115
+ self._tables['mean'] = self._array_to_dataframe(np.stack(all_arrays).mean(0))
116
+ return self._tables['mean'].copy()
117
+ if subject is None:
118
+ subject = next(iter(self._files.keys()))
119
+ if subject not in self._files:
120
+ raise ValueError(f"Subject name '{subject}' not known, use one of: {', '.join(self._files)}")
121
+ if subject not in self._tables:
122
+ self._tables[subject] = self._load_table(subject)
123
+ return self._tables[subject].copy()
124
+
125
+ def _load_table(self, subject: str):
126
+ """
127
+ Extract the timeseries table.
128
+ """
129
+ assert subject in self.subjects
130
+ array = self._connector.get(self._files[subject], decode_func=self._decode_func)
131
+ return self._array_to_dataframe(array)
132
+
133
+ def __len__(self):
134
+ return len(self._files)
135
+
136
+ def __str__(self):
137
+ return "{} with paradigm {} for {} from {} cohort ({} signal tables)".format(
138
+ self.modality, self.paradigm,
139
+ "_".join(p.name for p in self.anchor.parcellations),
140
+ self.cohort,
141
+ len(self._files),
142
+ )
143
+
144
+ def compute_centroids(self, space):
145
+ """
146
+ Computes the list of centroid coordinates corresponding to
147
+ dataframe columns, in the given reference space.
148
+
149
+ Parameters
150
+ ----------
151
+ space: Space, str
152
+
153
+ Returns
154
+ -------
155
+ list[tuple(float, float, float)]
156
+ """
157
+ result = []
158
+ parcellations = self.anchor.represented_parcellations()
159
+ assert len(parcellations) == 1
160
+ parcmap = next(iter(parcellations)).get_map(space)
161
+ all_centroids = parcmap.compute_centroids()
162
+ for regionname in self.regions:
163
+ region = parcmap.parcellation.get_region(regionname, allow_tuple=True)
164
+ if isinstance(region, tuple): # deal with sets of matched regions
165
+ found = [c for r in region for c in r if c.name in all_centroids]
166
+ else:
167
+ found = [r for r in region if r.name in all_centroids]
168
+ assert len(found) > 0
169
+ result.append(
170
+ tuple(pointset.PointSet(
171
+ [all_centroids[r.name] for r in found], space=space
172
+ ).centroid)
173
+ )
174
+ return result
175
+
176
+ def _array_to_dataframe(self, array: np.ndarray) -> pd.DataFrame:
177
+ """
178
+ Convert a numpy array with the regional activity data to
179
+ a DataFrame with regions as column headers and timesteps as indices.
180
+ """
181
+ df = pd.DataFrame(array)
182
+ parcellations = self.anchor.represented_parcellations()
183
+ assert len(parcellations) == 1
184
+ parc = next(iter(parcellations))
185
+ with QUIET:
186
+ indexmap = {
187
+ i: parc.get_region(regionname, allow_tuple=True)
188
+ for i, regionname in enumerate(self.regions)
189
+ }
190
+ ncols = array.shape[1]
191
+ if len(indexmap) == ncols:
192
+ remapper = {
193
+ label - min(indexmap.keys()): region
194
+ for label, region in indexmap.items()
195
+ }
196
+ df = df.rename(columns=remapper)
197
+ return df
198
+
199
+ def plot(self, subject: str = None, **kwargs):
200
+ table = self.get_table(subject)
201
+ return table.mean().plot(kind="bar", **kwargs)
202
+
203
+
204
+ class RegionalBOLD(
205
+ RegionalTimeseriesActivity,
206
+ configuration_folder="features/tabular/activity_timeseries/bold",
207
+ category="activity_timeseries"
208
+ ):
209
+ """
210
+ Blood-oxygen-level-dependent (BOLD) signals per region.
211
+ """
212
+
213
+ pass
@@ -13,7 +13,7 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import feature
16
+ from .. import feature
17
17
 
18
18
  from .. import anchor as _anchor
19
19
 
@@ -27,14 +27,13 @@ class Tabular(feature.Feature):
27
27
  """
28
28
  Represents a table of different measures anchored to a brain location.
29
29
 
30
- Columns represent different types of values, while rows represent
31
- different samples. The number of columns might thus be intrepreted
32
- as the feature dimension.
30
+ Columns represent different types of values, while rows represent different
31
+ samples. The number of columns might thus be interpreted as the feature
32
+ dimension.
33
33
 
34
- As an example, receptor fingerprints use rows
35
- to represent different neurotransmitter receptors, and separate
36
- columns for the mean and standard deviations measure across multiple
37
- tissue samples.
34
+ As an example, receptor fingerprints use rows to represent different
35
+ neurotransmitter receptors, and separate columns for the mean and standard
36
+ deviations measure across multiple tissue samples.
38
37
  """
39
38
 
40
39
  def __init__(
@@ -59,7 +58,13 @@ class Tabular(feature.Feature):
59
58
  return self._data_cached
60
59
 
61
60
  def plot(self, **kwargs):
62
- """ Create a bar plot of a columns of the data."""
61
+ """
62
+ Create a bar plot of a columns of the data.
63
+ Parameters
64
+ ----------
65
+ **kwargs
66
+ takes Matplotlib.pyplot keyword arguments
67
+ """
63
68
 
64
69
  try:
65
70
  import matplotlib.pyplot as plt
@@ -17,7 +17,7 @@ from .query import LiveQuery
17
17
 
18
18
  from ..core import space as _space
19
19
  from ..features import anchor as _anchor
20
- from ..features.molecular.gene_expression import GeneExpressions
20
+ from ..features.tabular.gene_expression import GeneExpressions
21
21
  from ..commons import logger, Species, MapType
22
22
  from ..locations import Point, PointSet
23
23
  from ..core.region import Region
@@ -15,8 +15,7 @@
15
15
 
16
16
  from . import query
17
17
 
18
- from ..features.cellular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
19
-
18
+ from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
20
19
  from ..commons import logger
21
20
  from ..locations import point, pointset
22
21
  from ..core import space, region
@@ -158,7 +157,7 @@ class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=laye
158
157
  region_labels = 7 - np.array([
159
158
  [np.array([[(prange < T) * 1] for i, T in enumerate((b * N).astype('int'))]).squeeze().sum(0)]
160
159
  for b in boundary_depths
161
- ]).squeeze()
160
+ ]).reshape((-1, 200))
162
161
 
163
162
  fp = layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities(
164
163
  regionname=subregion.name,
@@ -13,10 +13,10 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from ..features.external import ebrains as _ebrains
16
+ from ..features.dataset import ebrains as _ebrains
17
17
  from . import query
18
18
 
19
- from ..commons import logger
19
+ from ..commons import logger, siibra_tqdm
20
20
  from ..features import anchor as _anchor
21
21
  from ..retrieval import requests, datasets
22
22
  from ..core import parcellation, region
@@ -24,7 +24,6 @@ from ..core import parcellation, region
24
24
  from collections import defaultdict
25
25
  import re
26
26
  from distutils.version import LooseVersion
27
- from tqdm import tqdm
28
27
  from tempfile import NamedTemporaryFile
29
28
 
30
29
 
@@ -48,11 +47,6 @@ class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.Ebrains
48
47
  requests.GitlabProxy(
49
48
  flavour=requests.GitlabProxyEnum.PARCELLATIONREGION_V1,
50
49
  ),
51
- requests.EbrainsKgQuery(
52
- query_id="siibra-kg-feature-summary-0_0_4",
53
- schema="parcellationregion",
54
- params={"vocab": "https://schema.hbp.eu/myQuery/"},
55
- )
56
50
  ]
57
51
  )
58
52
 
@@ -75,7 +69,7 @@ class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.Ebrains
75
69
  invalid_species_datasets = {}
76
70
  results = self.loader.data.get("results", [])
77
71
 
78
- for r in tqdm(results, total=len(results)):
72
+ for r in siibra_tqdm(results, total=len(results)):
79
73
 
80
74
  regionname = r.get("name", None)
81
75
  alias = r.get("alias", None)
@@ -14,7 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from ..commons import logger
17
- from ..features.basetypes.feature import Feature
17
+ from ..features.feature import Feature
18
18
  from ..core.concept import AtlasConcept
19
19
 
20
20
  from abc import ABC, abstractmethod
@@ -17,6 +17,7 @@
17
17
  import numpy as np
18
18
  from abc import ABC, abstractmethod
19
19
  from nibabel import Nifti1Image
20
+ from typing import Union
20
21
 
21
22
 
22
23
  class Location(ABC):
@@ -48,14 +49,14 @@ class Location(ABC):
48
49
  pass
49
50
 
50
51
  @abstractmethod
51
- def intersects(self, mask: Nifti1Image):
52
+ def intersects(self, other: Union[Nifti1Image, 'Location']) -> bool:
52
53
  """
53
54
  Verifies wether this 3D location intersects the given mask.
54
55
 
55
56
  NOTE: The affine matrix of the image must be set to warp voxels
56
57
  coordinates into the reference space of this Bounding Box.
57
58
  """
58
- pass
59
+ raise NotImplementedError
59
60
 
60
61
  @abstractmethod
61
62
  def warp(self, space):
@@ -114,7 +115,7 @@ class WholeBrain(Location):
114
115
  def __init__(self, space=None):
115
116
  Location.__init__(self, space)
116
117
 
117
- def intersects(self, mask: Nifti1Image):
118
+ def intersects(self, *_args, **_kwargs):
118
119
  """Always true for whole brain features"""
119
120
  return True
120
121