siibra 0.4a33__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (64) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +2 -0
  3. siibra/commons.py +53 -8
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +95 -19
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +41 -8
  8. siibra/core/parcellation.py +94 -43
  9. siibra/core/region.py +160 -187
  10. siibra/core/space.py +44 -39
  11. siibra/features/__init__.py +19 -19
  12. siibra/features/anchor.py +9 -6
  13. siibra/features/connectivity/__init__.py +0 -8
  14. siibra/features/connectivity/functional_connectivity.py +11 -3
  15. siibra/features/{basetypes → connectivity}/regional_connectivity.py +46 -33
  16. siibra/features/connectivity/streamline_counts.py +3 -2
  17. siibra/features/connectivity/streamline_lengths.py +3 -2
  18. siibra/features/{basetypes → dataset}/__init__.py +2 -0
  19. siibra/features/{external → dataset}/ebrains.py +3 -3
  20. siibra/features/feature.py +420 -0
  21. siibra/{samplers → features/image}/__init__.py +7 -1
  22. siibra/features/{basetypes/volume_of_interest.py → image/image.py} +12 -7
  23. siibra/features/{external/__init__.py → image/sections.py} +8 -5
  24. siibra/features/image/volume_of_interest.py +70 -0
  25. siibra/features/{cellular → tabular}/__init__.py +7 -11
  26. siibra/features/{cellular → tabular}/bigbrain_intensity_profile.py +5 -2
  27. siibra/features/{cellular → tabular}/cell_density_profile.py +6 -2
  28. siibra/features/{basetypes → tabular}/cortical_profile.py +48 -41
  29. siibra/features/{molecular → tabular}/gene_expression.py +5 -2
  30. siibra/features/{cellular → tabular}/layerwise_bigbrain_intensities.py +6 -2
  31. siibra/features/{cellular → tabular}/layerwise_cell_density.py +9 -3
  32. siibra/features/{molecular → tabular}/receptor_density_fingerprint.py +3 -2
  33. siibra/features/{molecular → tabular}/receptor_density_profile.py +6 -2
  34. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  35. siibra/features/{basetypes → tabular}/tabular.py +14 -9
  36. siibra/livequeries/allen.py +1 -1
  37. siibra/livequeries/bigbrain.py +2 -3
  38. siibra/livequeries/ebrains.py +3 -9
  39. siibra/livequeries/query.py +1 -1
  40. siibra/locations/location.py +4 -3
  41. siibra/locations/point.py +21 -17
  42. siibra/locations/pointset.py +2 -2
  43. siibra/retrieval/__init__.py +1 -1
  44. siibra/retrieval/cache.py +8 -2
  45. siibra/retrieval/datasets.py +149 -29
  46. siibra/retrieval/repositories.py +19 -8
  47. siibra/retrieval/requests.py +98 -116
  48. siibra/volumes/gifti.py +26 -11
  49. siibra/volumes/neuroglancer.py +35 -19
  50. siibra/volumes/nifti.py +8 -9
  51. siibra/volumes/parcellationmap.py +341 -184
  52. siibra/volumes/sparsemap.py +67 -53
  53. siibra/volumes/volume.py +25 -13
  54. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/METADATA +4 -3
  55. siibra-0.4a46.dist-info/RECORD +69 -0
  56. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  57. siibra/features/basetypes/feature.py +0 -248
  58. siibra/features/fibres/__init__.py +0 -14
  59. siibra/features/functional/__init__.py +0 -14
  60. siibra/features/molecular/__init__.py +0 -26
  61. siibra/samplers/bigbrain.py +0 -181
  62. siibra-0.4a33.dist-info/RECORD +0 -71
  63. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  64. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
siibra/locations/point.py CHANGED
@@ -25,26 +25,25 @@ import numpy as np
25
25
  import json
26
26
  import numbers
27
27
  import hashlib
28
- from typing import Union
28
+ from typing import Union, Tuple
29
29
 
30
30
 
31
31
  class Point(location.Location):
32
32
  """A single 3D point in reference space."""
33
33
 
34
34
  @staticmethod
35
- def parse(spec, unit="mm"):
35
+ def parse(spec, unit="mm") -> Tuple[float, float, float]:
36
36
  """Converts a 3D coordinate specification into a 3D tuple of floats.
37
37
 
38
38
  Parameters
39
39
  ----------
40
- spec : Any of str, tuple(float,float,float)
40
+ spec: Any of str, tuple(float,float,float)
41
41
  For string specifications, comma separation with decimal points are expected.
42
- unit : str
42
+ unit: str
43
43
  specification of the unit (only 'mm' supported so far)
44
-
45
44
  Returns
46
45
  -------
47
- tuple(float,float,float)
46
+ tuple(float, float, float)
48
47
  """
49
48
  if unit != "mm":
50
49
  raise NotImplementedError(
@@ -77,13 +76,16 @@ class Point(location.Location):
77
76
 
78
77
  Parameters
79
78
  ----------
80
- coordinate : 3-tuple of int/float, or string specification
79
+ coordinatespec: 3-tuple of int/float, or string specification
81
80
  Coordinate in mm of the given space
82
- space : reference space specification (id, object, or name)
83
- The reference space
84
- sigma_mm : float
85
- Optional location uncertainy of the point
86
- (will be interpreted as the isotropic standard deviation of location)
81
+ space: Space or str
82
+ The reference space (id, object, or name)
83
+ sigma_mm : float, optional
84
+ Location uncertainty of the point
85
+
86
+ Note
87
+ ----
88
+ Interpreted as the isotropic standard deviation of location.
87
89
  """
88
90
  location.Location.__init__(self, space)
89
91
  self.coordinate = Point.parse(coordinatespec)
@@ -247,12 +249,14 @@ class Point(location.Location):
247
249
 
248
250
  Parameters
249
251
  ----------
250
- affine : numpy 4x4 ndarray
252
+ affine: numpy 4x4 ndarray
251
253
  affine matrix
252
- space : reference space, (str, Space, or None))
253
- Target reference space which is reached after
254
- applying the transform. Note that the consistency
255
- of this cannot be checked and is up to the user.
254
+ space: str, Space, or None
255
+ Target reference space which is reached after applying the transform
256
+
257
+ Note
258
+ ----
259
+ The consistency of this cannot be checked and is up to the user.
256
260
  """
257
261
  from ..core.space import Space
258
262
  spaceobj = Space.get_instance(space)
@@ -74,8 +74,8 @@ class PointSet(location.Location):
74
74
  sigma_mm=[p.sigma for p in inside],
75
75
  )
76
76
 
77
- def intersects(self, mask: Nifti1Image):
78
- return len(self.intersection(mask)) > 0
77
+ def intersects(self, other: Union[location.Location, Nifti1Image]):
78
+ return len(self.intersection(other)) > 0
79
79
 
80
80
  @property
81
81
  def sigma(self):
@@ -21,6 +21,6 @@ from .repositories import (
21
21
  LocalFileRepository,
22
22
  ZipfileConnector
23
23
  )
24
- from .requests import HttpRequest, ZipfileRequest, EbrainsRequest, EbrainsKgQuery, SiibraHttpRequestError
24
+ from .requests import HttpRequest, ZipfileRequest, EbrainsRequest, SiibraHttpRequestError
25
25
  from .cache import CACHE
26
26
  from .exceptions import NoSiibraConfigMirrorsAvailableException, TagNotFoundException
siibra/retrieval/cache.py CHANGED
@@ -93,8 +93,14 @@ class Cache:
93
93
  if index > 0:
94
94
  logger.debug(f"Removing the {index+1} oldest files to keep cache size below {targetsize:.2f} GiB.")
95
95
  for fn, st in sfiles[:index + 1]:
96
- size_gib -= st.st_size / 1024**3
97
- os.remove(fn)
96
+ if os.path.isdir(fn):
97
+ import shutil
98
+ size = sum(os.path.getsize(f) for f in os.listdir(fn) if os.path.isfile(f))
99
+ shutil.rmtree(fn)
100
+ else:
101
+ size = st.st_size
102
+ os.remove(fn)
103
+ size_gib -= size / 1024**3
98
104
 
99
105
  @property
100
106
  def size(self):
@@ -14,10 +14,11 @@
14
14
  # limitations under the License.
15
15
 
16
16
 
17
- from .requests import EbrainsKgQuery, MultiSourcedRequest, GitlabProxy, GitlabProxyEnum
17
+ from .requests import MultiSourcedRequest, GitlabProxy, GitlabProxyEnum
18
18
 
19
19
  import re
20
20
  from typing import Union, List
21
+ from abc import ABC, abstractproperty
21
22
 
22
23
  try:
23
24
  from typing import TypedDict
@@ -44,15 +45,69 @@ EbrainsDatasetEmbargoStatus = TypedDict('EbrainsDatasetEmbargoStatus', {
44
45
  'identifier': List[str]
45
46
  })
46
47
 
48
+ class EbrainsBaseDataset(ABC):
49
+
50
+ @abstractproperty
51
+ def id(self) -> str:
52
+ raise NotImplementedError
53
+
54
+ @abstractproperty
55
+ def name(self) -> str:
56
+ raise NotImplementedError
57
+
58
+ @abstractproperty
59
+ def urls(self) -> List[EbrainsDatasetUrl]:
60
+ raise NotImplementedError
61
+
62
+ @abstractproperty
63
+ def description(self) -> str:
64
+ raise NotImplementedError
65
+
66
+ @abstractproperty
67
+ def contributors(self) -> List[EbrainsDatasetPerson]:
68
+ raise NotImplementedError
69
+
70
+ @abstractproperty
71
+ def ebrains_page(self) -> str:
72
+ raise NotImplementedError
73
+
74
+ @abstractproperty
75
+ def custodians(self) -> List[EbrainsDatasetPerson]:
76
+ raise NotImplementedError
77
+
78
+ def __hash__(self):
79
+ return hash(self.id)
80
+
81
+ def __eq__(self, o: object) -> bool:
82
+ return hasattr(o, "id") and self.id == o.id
83
+
84
+ def match(self, spec: Union[str, 'EbrainsBaseDataset']) -> bool:
85
+ """
86
+ Checks if the given specification describes this dataset.
87
+
88
+ Parameters
89
+ ----------
90
+ spec (str, EbrainsBaseDataset)
91
+ specification to be matched.
92
+ Returns
93
+ -------
94
+ bool
95
+ """
96
+ if spec is self:
97
+ return True
98
+ if isinstance(spec, str):
99
+ return self.id == spec
100
+ raise RuntimeError(f"Cannot match {spec.__class__}, must be either str or EbrainsBaseDataset")
47
101
 
48
- class EbrainsDataset:
102
+ class EbrainsDataset(EbrainsBaseDataset):
49
103
 
50
104
  def __init__(self, id, name=None, embargo_status: List[EbrainsDatasetEmbargoStatus] = None, *, cached_data=None):
105
+ super().__init__()
51
106
 
52
107
  self._id = id
108
+ self._name = name
53
109
  self._cached_data = cached_data
54
110
  self.embargo_status = embargo_status
55
- self._name_cached = name
56
111
 
57
112
  if id is None:
58
113
  raise TypeError("Dataset id is required")
@@ -78,20 +133,15 @@ class EbrainsDataset:
78
133
  GitlabProxyEnum.DATASET_V1,
79
134
  instance_id=instance_id,
80
135
  ),
81
- EbrainsKgQuery(
82
- query_id="interactiveViewerKgQuery-v1_0",
83
- instance_id=instance_id,
84
- params={"vocab": "https://schema.hbp.eu/myQuery/"},
85
- )
86
136
  ]
87
137
  ).data
88
138
  return self._cached_data
89
139
 
90
140
  @property
91
141
  def name(self) -> str:
92
- if self._name_cached is None:
93
- self._name_cached = self.detail.get("name")
94
- return self._name_cached
142
+ if self._name is None:
143
+ self._name = self.detail.get("name")
144
+ return self._name
95
145
 
96
146
  @property
97
147
  def urls(self) -> List[EbrainsDatasetUrl]:
@@ -118,26 +168,96 @@ class EbrainsDataset:
118
168
  def custodians(self) -> EbrainsDatasetPerson:
119
169
  return self.detail.get("custodians")
120
170
 
171
+ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
172
+
173
+ @staticmethod
174
+ def parse_person(d: dict) -> EbrainsDatasetPerson:
175
+ assert "https://openminds.ebrains.eu/core/Person" in d.get("type"), f"Cannot convert a non person to a person dict!"
176
+ _id = d.get('id')
177
+ name = f"{d.get('givenName')} {d.get('familyName')}"
178
+ return {
179
+ '@id': _id,
180
+ 'schema.org/shortName': name,
181
+ 'identifier': _id,
182
+ "shortName": name,
183
+ "name": name
184
+ }
185
+
186
+ def __init__(self, id, *, cached_data=None) -> None:
187
+ super().__init__()
188
+
189
+ self._id = id
190
+ self._cached_data = cached_data
191
+
192
+ @property
193
+ def detail(self):
194
+ if not self._cached_data:
195
+ match = re.search(r"([a-f0-9-]+)$", self._id)
196
+ instance_id = match.group(1)
197
+ self._cached_data = MultiSourcedRequest(
198
+ requests=[
199
+ GitlabProxy(
200
+ GitlabProxyEnum.DATASETVERSION_V3,
201
+ instance_id=instance_id,
202
+ ),
203
+ ]
204
+ ).data
205
+ return self._cached_data
206
+
207
+ @property
208
+ def id(self) -> str :
209
+ return self._id
210
+
211
+ @property
212
+ def name(self) -> str :
213
+ fullname = self.detail.get("fullName")
214
+ version_id = self.detail.get("versionIdentifier")
215
+ return f"{fullname} ({version_id})"
216
+
121
217
  @property
122
- def key(self):
123
- return self.id
218
+ def urls(self) -> List[EbrainsDatasetUrl] :
219
+ return [{
220
+ "url": doi.get("identifier", None)
221
+ } for doi in self.detail.get("doi", []) ]
222
+
223
+ @property
224
+ def description(self) -> str :
225
+ return self.detail.get("description", "")
124
226
 
125
- def __hash__(self):
126
- return hash(self.id)
227
+ @property
228
+ def contributors(self) -> List[EbrainsDatasetPerson] :
229
+ return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("author", [])]
127
230
 
128
- def __eq__(self, o: object) -> bool:
129
- if type(o) is not EbrainsDataset and not issubclass(type(o), EbrainsDataset):
130
- return False
131
- return self.id == o.id
231
+ @property
232
+ def ebrains_page(self) -> str :
233
+ if len(self.urls) > 0:
234
+ return self.urls[0].get("url")
235
+ return None
236
+
237
+ @property
238
+ def custodians(self) -> EbrainsDatasetPerson:
239
+ return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("custodian", [])]
132
240
 
133
- def match(self, spec: Union[str, 'EbrainsDataset']) -> bool:
134
- """Checks of a given spec (of type str or EbrainsDataset) describes this dataset.
241
+ class EbrainsV3Dataset(EbrainsBaseDataset):
242
+ # TODO finish implementing me
243
+ # some fields are currently missing, e.g. desc, contributors etc.
244
+ def __init__(self, id, *, cached_data=None) -> None:
245
+ super().__init__()
135
246
 
136
- Args:
137
- spec (str, EbrainsDataset): spec to be checked
138
- """
139
- if spec is self:
140
- return True
141
- if isinstance(spec, str):
142
- return self.id == spec
143
- raise RuntimeError(f"Cannot match {spec.__class__}, must be either str or EbrainsDataset")
247
+ self._id = id
248
+ self._cached_data = cached_data
249
+
250
+ @property
251
+ def detail(self):
252
+ if not self._cached_data:
253
+ match = re.search(r"([a-f0-9-]+)$", self._id)
254
+ instance_id = match.group(1)
255
+ self._cached_data = MultiSourcedRequest(
256
+ requests=[
257
+ GitlabProxy(
258
+ GitlabProxyEnum.DATASET_V3,
259
+ instance_id=instance_id,
260
+ ),
261
+ ]
262
+ ).data
263
+ return self._cached_data
@@ -16,11 +16,10 @@
16
16
  from .requests import DECODERS, HttpRequest, EbrainsRequest, SiibraHttpRequestError
17
17
  from .cache import CACHE
18
18
 
19
- from .. import logger
19
+ from ..commons import logger, siibra_tqdm
20
20
 
21
21
  from abc import ABC, abstractmethod
22
22
  from urllib.parse import quote
23
- from tqdm import tqdm
24
23
  import os
25
24
  from zipfile import ZipFile
26
25
  from typing import List
@@ -84,9 +83,7 @@ class RepositoryConnector(ABC):
84
83
  if progress is None or all_cached:
85
84
  return result
86
85
  else:
87
- return list(tqdm(
88
- result, total=len(fnames), desc=progress, disable=logger.level > 20
89
- ))
86
+ return list(siibra_tqdm(result, total=len(fnames), desc=progress))
90
87
 
91
88
  @classmethod
92
89
  def _from_url(cls, url: str):
@@ -101,6 +98,9 @@ class RepositoryConnector(ABC):
101
98
  f"connector from url '{url}'."
102
99
  )
103
100
 
101
+ def __eq__(self, other):
102
+ return self.base_url == other.base_url
103
+
104
104
 
105
105
  class LocalFileRepository(RepositoryConnector):
106
106
 
@@ -159,6 +159,9 @@ class LocalFileRepository(RepositoryConnector):
159
159
  def __str__(self):
160
160
  return f"{self.__class__.__name__} at {self._folder}"
161
161
 
162
+ def __eq__(self, other):
163
+ return self._folder == other._folder
164
+
162
165
 
163
166
  class GitlabConnector(RepositoryConnector):
164
167
 
@@ -270,6 +273,12 @@ class GitlabConnector(RepositoryConnector):
270
273
  with open(f"{archive_directory}/{folder}/{filename}", "rb") as fp:
271
274
  return decoder(fp.read())
272
275
 
276
+ def __eq__(self, other):
277
+ return all([
278
+ self.base_url == other.base_url,
279
+ self.reftag == other.reftag
280
+ ])
281
+
273
282
 
274
283
  class ZipfileConnector(RepositoryConnector):
275
284
 
@@ -307,6 +316,9 @@ class ZipfileConnector(RepositoryConnector):
307
316
  result.append(fname)
308
317
  return result
309
318
 
319
+ def __eq__(self, other):
320
+ return self.url == other.url
321
+
310
322
  class FileLoader:
311
323
  """
312
324
  Loads a file from the zip archive, but mimics the behaviour
@@ -610,9 +622,9 @@ class EbrainsPublicDatasetConnectorMinds(RepositoryConnector):
610
622
  EBRAINS dataset id of a public dataset in KG v3.
611
623
  title: str
612
624
  Part of dataset title as an alternative dataset specification (will ignore dataset_id then)
613
- in_progress: bool (default:False)
625
+ in_progress: bool, default: False
614
626
  If true, will request datasets that are still under curation.
615
- Will only work when autenticated with an appropriately privileged
627
+ Will only work when authenticated with an appropriately privileged
616
628
  user account.
617
629
  """
618
630
  stage = "IN_PROGRESS" if in_progress else "RELEASED"
@@ -625,7 +637,6 @@ class EbrainsPublicDatasetConnectorMinds(RepositoryConnector):
625
637
  logger.info(f"Using title '{title}' for EBRAINS dataset search, ignoring id '{dataset_id}'")
626
638
  url = f"{self.base_url}/{self.QUERY_ID}/instances?databaseScope={stage}&title={title}"
627
639
  req = EbrainsRequest(url, DECODERS[".json"])
628
- print(req.cachefile)
629
640
  response = req.get()
630
641
  self._files = {}
631
642
  results = response.get('results', [])