siibra 1.0a1__1-py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (84) hide show
  1. siibra/VERSION +1 -0
  2. siibra/__init__.py +164 -0
  3. siibra/commons.py +823 -0
  4. siibra/configuration/__init__.py +17 -0
  5. siibra/configuration/configuration.py +189 -0
  6. siibra/configuration/factory.py +589 -0
  7. siibra/core/__init__.py +16 -0
  8. siibra/core/assignment.py +110 -0
  9. siibra/core/atlas.py +239 -0
  10. siibra/core/concept.py +308 -0
  11. siibra/core/parcellation.py +387 -0
  12. siibra/core/region.py +1223 -0
  13. siibra/core/space.py +131 -0
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +17 -0
  22. siibra/explorer/url.py +222 -0
  23. siibra/explorer/util.py +87 -0
  24. siibra/features/__init__.py +117 -0
  25. siibra/features/anchor.py +224 -0
  26. siibra/features/connectivity/__init__.py +33 -0
  27. siibra/features/connectivity/functional_connectivity.py +57 -0
  28. siibra/features/connectivity/regional_connectivity.py +494 -0
  29. siibra/features/connectivity/streamline_counts.py +27 -0
  30. siibra/features/connectivity/streamline_lengths.py +27 -0
  31. siibra/features/connectivity/tracing_connectivity.py +30 -0
  32. siibra/features/dataset/__init__.py +17 -0
  33. siibra/features/dataset/ebrains.py +90 -0
  34. siibra/features/feature.py +970 -0
  35. siibra/features/image/__init__.py +27 -0
  36. siibra/features/image/image.py +115 -0
  37. siibra/features/image/sections.py +26 -0
  38. siibra/features/image/volume_of_interest.py +88 -0
  39. siibra/features/tabular/__init__.py +24 -0
  40. siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
  41. siibra/features/tabular/cell_density_profile.py +298 -0
  42. siibra/features/tabular/cortical_profile.py +322 -0
  43. siibra/features/tabular/gene_expression.py +257 -0
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
  45. siibra/features/tabular/layerwise_cell_density.py +95 -0
  46. siibra/features/tabular/receptor_density_fingerprint.py +192 -0
  47. siibra/features/tabular/receptor_density_profile.py +110 -0
  48. siibra/features/tabular/regional_timeseries_activity.py +294 -0
  49. siibra/features/tabular/tabular.py +139 -0
  50. siibra/livequeries/__init__.py +19 -0
  51. siibra/livequeries/allen.py +352 -0
  52. siibra/livequeries/bigbrain.py +197 -0
  53. siibra/livequeries/ebrains.py +145 -0
  54. siibra/livequeries/query.py +49 -0
  55. siibra/locations/__init__.py +91 -0
  56. siibra/locations/boundingbox.py +454 -0
  57. siibra/locations/location.py +115 -0
  58. siibra/locations/point.py +344 -0
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +27 -0
  61. siibra/retrieval/cache.py +233 -0
  62. siibra/retrieval/datasets.py +389 -0
  63. siibra/retrieval/exceptions/__init__.py +27 -0
  64. siibra/retrieval/repositories.py +769 -0
  65. siibra/retrieval/requests.py +659 -0
  66. siibra/vocabularies/__init__.py +45 -0
  67. siibra/vocabularies/gene_names.json +29176 -0
  68. siibra/vocabularies/receptor_symbols.json +210 -0
  69. siibra/vocabularies/region_aliases.json +460 -0
  70. siibra/volumes/__init__.py +23 -0
  71. siibra/volumes/parcellationmap.py +1279 -0
  72. siibra/volumes/providers/__init__.py +20 -0
  73. siibra/volumes/providers/freesurfer.py +113 -0
  74. siibra/volumes/providers/gifti.py +165 -0
  75. siibra/volumes/providers/neuroglancer.py +736 -0
  76. siibra/volumes/providers/nifti.py +266 -0
  77. siibra/volumes/providers/provider.py +107 -0
  78. siibra/volumes/sparsemap.py +468 -0
  79. siibra/volumes/volume.py +892 -0
  80. siibra-1.0.0a1.dist-info/LICENSE +201 -0
  81. siibra-1.0.0a1.dist-info/METADATA +160 -0
  82. siibra-1.0.0a1.dist-info/RECORD +84 -0
  83. siibra-1.0.0a1.dist-info/WHEEL +5 -0
  84. siibra-1.0.0a1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,27 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """File digestion and caching."""
16
+
17
+ from .repositories import (
18
+ GitlabConnector,
19
+ OwncloudConnector,
20
+ EbrainsHdgConnector,
21
+ EbrainsPublicDatasetConnector,
22
+ LocalFileRepository,
23
+ ZipfileConnector
24
+ )
25
+ from .requests import HttpRequest, ZipfileRequest, EbrainsRequest, SiibraHttpRequestError
26
+ from .cache import CACHE
27
+ from .exceptions import NoSiibraConfigMirrorsAvailableException, TagNotFoundException
@@ -0,0 +1,233 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Maintaining and handling caching files on disk."""
16
+
17
+ import hashlib
18
+ import os
19
+ from appdirs import user_cache_dir
20
+ import tempfile
21
+ from functools import wraps
22
+ from enum import Enum
23
+ from typing import Callable, List, NamedTuple, Union
24
+ from concurrent.futures import ThreadPoolExecutor
25
+ from pathlib import Path
26
+ from filelock import FileLock as Lock
27
+
28
+ from ..commons import logger, SIIBRA_CACHEDIR, SKIP_CACHEINIT_MAINTENANCE, siibra_tqdm
29
+ from ..exceptions import WarmupRegException
30
+
31
+
32
+ def assert_folder(folder):
33
+ # make sure the folder exists and is writable, then return it.
34
+ # If it cannot be written, create and return
35
+ # a temporary folder.
36
+ try:
37
+ if not os.path.isdir(folder):
38
+ os.makedirs(folder)
39
+ if not os.access(folder, os.W_OK):
40
+ raise OSError
41
+ return folder
42
+ except OSError:
43
+ # cannot write to requested directory, create a temporary one.
44
+ tmpdir = tempfile.mkdtemp(prefix="siibra-cache-")
45
+ logger.warning(
46
+ f"Siibra created a temporary cache directory at {tmpdir}, as "
47
+ f"the requested folder ({folder}) was not usable. "
48
+ "Please consider to set the SIIBRA_CACHEDIR environment variable "
49
+ "to a suitable directory.")
50
+ return tmpdir
51
+
52
+
53
+ class Cache:
54
+
55
+ _instance = None
56
+ folder = user_cache_dir(".".join(__name__.split(".")[:-1]), "")
57
+ SIZE_GIB = 2 # maintenance will delete old files to stay below this limit
58
+
59
+ def __init__(self):
60
+ raise RuntimeError(
61
+ "Call instance() to access "
62
+ f"{self.__class__.__name__}")
63
+
64
+ @classmethod
65
+ def instance(cls):
66
+ """
67
+ Return an instance of the siibra cache. Create folder if needed.
68
+ """
69
+ if cls._instance is None:
70
+ if SIIBRA_CACHEDIR:
71
+ cls.folder = SIIBRA_CACHEDIR
72
+ cls.folder = assert_folder(cls.folder)
73
+ cls._instance = cls.__new__(cls)
74
+ if SKIP_CACHEINIT_MAINTENANCE:
75
+ logger.debug("Will not run maintenance on cache as SKIP_CACHE_MAINTENANCE is set to True.")
76
+ else:
77
+ cls._instance.run_maintenance()
78
+ return cls._instance
79
+
80
+ def clear(self):
81
+ import shutil
82
+
83
+ logger.info(f"Clearing siibra cache at {self.folder}")
84
+ shutil.rmtree(self.folder)
85
+ self.folder = assert_folder(self.folder)
86
+
87
+ def run_maintenance(self):
88
+ """ Shrinks the cache by deleting oldest files first until the total size
89
+ is below cache size (Cache.SIZE) given in GiB."""
90
+ # build sorted list of cache files and their os attributes
91
+ files = [os.path.join(self.folder, fname) for fname in os.listdir(self.folder)]
92
+ sfiles = sorted([(fn, os.stat(fn)) for fn in files], key=lambda t: t[1].st_atime)
93
+
94
+ # determine the first n files that need to be deleted to reach the accepted cache size
95
+ size_gib = sum(t[1].st_size for t in sfiles) / 1024**3
96
+ targetsize = size_gib
97
+ index = 0
98
+ for index, (fn, st) in enumerate(sfiles):
99
+ if targetsize <= self.SIZE_GIB:
100
+ break
101
+ targetsize -= st.st_size / 1024**3
102
+
103
+ if index > 0:
104
+ logger.debug(f"Removing the {index + 1} oldest files to keep cache size below {targetsize:.2f} GiB.")
105
+ for fn, st in sfiles[:index + 1]:
106
+ if os.path.isdir(fn):
107
+ import shutil
108
+ size = sum(os.path.getsize(f) for f in os.listdir(fn) if os.path.isfile(f))
109
+ shutil.rmtree(fn)
110
+ else:
111
+ size = st.st_size
112
+ os.remove(fn)
113
+ size_gib -= size / 1024**3
114
+
115
+ @property
116
+ def size(self):
117
+ """ Return size of the cache in GiB. """
118
+ return sum(os.path.getsize(fn) for fn in self) / 1024**3
119
+
120
+ def __iter__(self):
121
+ """ Iterate all element names in the cache directory. """
122
+ return (os.path.join(self.folder, f) for f in os.listdir(self.folder))
123
+
124
+ def build_filename(self, str_rep: str, suffix=None) -> str:
125
+ """
126
+ Generate a filename in the cache.
127
+
128
+ Parameters
129
+ ----------
130
+ str_rep: str
131
+ Unique string representation of the item. Will be used to compute a hash.
132
+ suffix: str. Default: None
133
+ Optional file suffix, in order to allow filetype recognition by the name.
134
+
135
+ Returns
136
+ -------
137
+ str
138
+ """
139
+ hashfile = os.path.join(
140
+ self.folder, str(hashlib.sha256(str_rep.encode("ascii")).hexdigest())
141
+ )
142
+ if suffix is None:
143
+ return hashfile
144
+ else:
145
+ if suffix.startswith("."):
146
+ return hashfile + suffix
147
+ else:
148
+ return hashfile + "." + suffix
149
+
150
+
151
+ CACHE = Cache.instance()
152
+
153
+
154
+ class WarmupLevel(int, Enum):
155
+ TEST = -1000
156
+ INSTANCE = 1
157
+ DATA = 5
158
+
159
+
160
+ class WarmupParam(NamedTuple):
161
+ level: Union[int, WarmupLevel]
162
+ fn: Callable
163
+ is_factory: bool = False
164
+
165
+
166
+ class Warmup:
167
+
168
+ _warmup_fns: List[WarmupParam] = []
169
+
170
+ @staticmethod
171
+ def fn_eql(wrapped_fn, original_fn):
172
+ return wrapped_fn is original_fn or wrapped_fn.__wrapped__ is original_fn
173
+
174
+ @classmethod
175
+ def is_registered(cls, fn):
176
+ return len([warmup_fn.fn
177
+ for warmup_fn in cls._warmup_fns
178
+ if cls.fn_eql(warmup_fn.fn, fn)]) > 0
179
+
180
+ @classmethod
181
+ def register_warmup_fn(cls, warmup_level: WarmupLevel = WarmupLevel.INSTANCE, *, is_factory=False):
182
+ def outer(fn):
183
+ if cls.is_registered(fn):
184
+ raise WarmupRegException
185
+
186
+ @wraps(fn)
187
+ def inner(*args, **kwargs):
188
+ return fn(*args, **kwargs)
189
+
190
+ cls._warmup_fns.append(WarmupParam(warmup_level, inner, is_factory))
191
+ return inner
192
+ return outer
193
+
194
+ @classmethod
195
+ def deregister_warmup_fn(cls, original_fn):
196
+ cls._warmup_fns = [
197
+ warmup_fn for warmup_fn in cls._warmup_fns
198
+ if not cls.fn_eql(warmup_fn.fn, original_fn)
199
+ ]
200
+
201
+ @classmethod
202
+ def warmup(cls, warmup_level: WarmupLevel = WarmupLevel.INSTANCE, *, max_workers=4):
203
+ all_fns = [warmup for warmup in cls._warmup_fns if warmup.level <= warmup_level]
204
+
205
+ def call_fn(fn: WarmupParam):
206
+ return_val = fn.fn()
207
+ if not fn.is_factory:
208
+ return
209
+ for f in return_val:
210
+ f()
211
+
212
+ with Lock(CACHE.build_filename("lockfile", ".warmup")):
213
+ with ThreadPoolExecutor(max_workers=max_workers) as ex:
214
+ for _ in siibra_tqdm(
215
+ ex.map(
216
+ call_fn,
217
+ all_fns
218
+ ),
219
+ desc="Warming cache",
220
+ total=len(all_fns),
221
+ ):
222
+ ...
223
+
224
+
225
+ try:
226
+ from joblib import Memory
227
+ jobmemory_path = Path(CACHE.folder) / "joblib"
228
+ jobmemory_path.mkdir(parents=True, exist_ok=True)
229
+ jobmemory = Memory(jobmemory_path, verbose=0)
230
+ cache_user_fn = jobmemory.cache
231
+ except ImportError:
232
+ from functools import lru_cache
233
+ cache_user_fn = lru_cache
@@ -0,0 +1,389 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Metadata connection to EBRAINS datasets."""
16
+
17
+ from .requests import MultiSourcedRequest, GitlabProxy, GitlabProxyEnum
18
+
19
+ import re
20
+ from typing import Union, List
21
+ from abc import ABC, abstractproperty
22
+ from hashlib import md5
23
+
24
+ try:
25
+ from typing import TypedDict
26
+ except ImportError:
27
+ # support python 3.7
28
+ from typing_extensions import TypedDict
29
+
30
+
31
+ class EbrainsDatasetUrl(TypedDict):
32
+ url: str
33
+
34
+
35
+ EbrainsDatasetPerson = TypedDict("EbrainsDatasetPerson", {
36
+ "@id": str,
37
+ "schema.org/shortName": str,
38
+ "identifier": str,
39
+ "shortName": str,
40
+ "name": str,
41
+ })
42
+
43
+ EbrainsDatasetEmbargoStatus = TypedDict("EbrainsDatasetEmbargoStatus", {
44
+ "@id": str,
45
+ "name": str,
46
+ "identifier": List[str]
47
+ })
48
+
49
+
50
+ class EbrainsBaseDataset(ABC):
51
+ @abstractproperty
52
+ def id(self) -> str:
53
+ raise NotImplementedError
54
+
55
+ @abstractproperty
56
+ def name(self) -> str:
57
+ raise NotImplementedError
58
+
59
+ @abstractproperty
60
+ def urls(self) -> List[EbrainsDatasetUrl]:
61
+ raise NotImplementedError
62
+
63
+ @abstractproperty
64
+ def description(self) -> str:
65
+ raise NotImplementedError
66
+
67
+ @abstractproperty
68
+ def contributors(self) -> List[EbrainsDatasetPerson]:
69
+ raise NotImplementedError
70
+
71
+ @abstractproperty
72
+ def ebrains_page(self) -> str:
73
+ raise NotImplementedError
74
+
75
+ @abstractproperty
76
+ def custodians(self) -> List[EbrainsDatasetPerson]:
77
+ raise NotImplementedError
78
+
79
+ def __hash__(self):
80
+ return hash(self.id)
81
+
82
+ def __eq__(self, o: object) -> bool:
83
+ return hasattr(o, "id") and self.id == o.id
84
+
85
+ def match(self, spec: Union[str, 'EbrainsBaseDataset']) -> bool:
86
+ """
87
+ Checks if the given specification describes this dataset.
88
+
89
+ Parameters
90
+ ----------
91
+ spec (str, EbrainsBaseDataset)
92
+ specification to be matched.
93
+ Returns
94
+ -------
95
+ bool
96
+ """
97
+ if spec is self:
98
+ return True
99
+ if isinstance(spec, str):
100
+ return self.id == spec
101
+ raise RuntimeError(
102
+ f"Cannot match {spec.__class__}, must be either str or EbrainsBaseDataset"
103
+ )
104
+
105
+ @property
106
+ def LICENSE(self) -> str:
107
+ license_ = self._detail.get("license", [])
108
+ if len(license_) > 0:
109
+ return license_ if isinstance(license_, str) else '\n'.join(license_)
110
+ return None
111
+
112
+
113
+ class EbrainsDataset(EbrainsBaseDataset):
114
+ """Ebrains dataset v1 connection"""
115
+
116
+ def __init__(self, id, name=None, embargo_status: List[EbrainsDatasetEmbargoStatus] = None, *, cached_data=None):
117
+ super().__init__()
118
+
119
+ self._id = id
120
+ self._name = name
121
+ self._cached_data = cached_data
122
+ self.embargo_status = embargo_status
123
+
124
+ if id is None:
125
+ raise TypeError("Dataset id is required")
126
+
127
+ match = re.search(r"([a-f0-9-]+)$", id)
128
+ if not match:
129
+ raise ValueError(
130
+ f"{self.__class__.__name__} initialized with invalid id: {self.id}"
131
+ )
132
+
133
+ @property
134
+ def id(self) -> str:
135
+ return self._id
136
+
137
+ @property
138
+ def _detail(self):
139
+ if not self._cached_data:
140
+ match = re.search(r"([a-f0-9-]+)$", self.id)
141
+ instance_id = match.group(1)
142
+ self._cached_data = MultiSourcedRequest(
143
+ requests=[
144
+ GitlabProxy(
145
+ GitlabProxyEnum.DATASET_V1,
146
+ instance_id=instance_id,
147
+ ),
148
+ ]
149
+ ).data
150
+ return self._cached_data
151
+
152
+ @property
153
+ def name(self) -> str:
154
+ if self._name is None:
155
+ self._name = self._detail.get("name")
156
+ return self._name
157
+
158
+ @property
159
+ def urls(self) -> List[EbrainsDatasetUrl]:
160
+ return [
161
+ {
162
+ "url": f if f.startswith("http") else f"https://doi.org/{f}",
163
+ }
164
+ for f in self._detail.get("kgReference", [])
165
+ ]
166
+
167
+ @property
168
+ def description(self) -> str:
169
+ return self._detail.get("description")
170
+
171
+ @property
172
+ def contributors(self) -> List[EbrainsDatasetPerson]:
173
+ return self._detail.get("contributors")
174
+
175
+ @property
176
+ def ebrains_page(self):
177
+ return f"https://search.kg.ebrains.eu/instances/{self.id}"
178
+
179
+ @property
180
+ def custodians(self) -> EbrainsDatasetPerson:
181
+ return self._detail.get("custodians")
182
+
183
+
184
+ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
185
+ @staticmethod
186
+ def _parse_person(d: dict) -> EbrainsDatasetPerson:
187
+ assert "https://openminds.ebrains.eu/core/Person" in d.get("type"), "Cannot convert a non person to a person dict!"
188
+ _id = d.get("id")
189
+ name = f"{d.get('givenName')} {d.get('familyName')}"
190
+ return {
191
+ '@id': _id,
192
+ 'schema.org/shortName': name,
193
+ 'identifier': _id,
194
+ 'shortName': name,
195
+ 'name': name
196
+ }
197
+
198
+ def __init__(self, id, *, cached_data=None) -> None:
199
+ super().__init__()
200
+
201
+ self._id = id
202
+ self._cached_data = cached_data
203
+
204
+ @property
205
+ def _detail(self):
206
+ if not self._cached_data:
207
+ match = re.search(r"([a-f0-9-]+)$", self._id)
208
+ instance_id = match.group(1)
209
+ self._cached_data = MultiSourcedRequest(
210
+ requests=[
211
+ GitlabProxy(
212
+ GitlabProxyEnum.DATASETVERSION_V3,
213
+ instance_id=instance_id,
214
+ ),
215
+ ]
216
+ ).data
217
+ return self._cached_data
218
+
219
+ @property
220
+ def id(self) -> str:
221
+ return self._id
222
+
223
+ @property
224
+ def name(self) -> str:
225
+ fullname = self._detail.get("fullName")
226
+ if not fullname:
227
+ for dataset in self.is_version_of:
228
+ if fullname:
229
+ break
230
+ fullname = dataset.name
231
+ version_id = self._detail.get("versionIdentifier")
232
+ return f"{fullname} ({version_id})"
233
+
234
+ @property
235
+ def is_version_of(self):
236
+ if not hasattr(self, "_is_version_of"):
237
+ self._is_version_of = [EbrainsV3Dataset(id=id.get("id")) for id in self._detail.get("isVersionOf", [])]
238
+ return self._is_version_of
239
+
240
+ @property
241
+ def urls(self) -> List[EbrainsDatasetUrl]:
242
+ return [{
243
+ "url": doi.get("identifier", None)
244
+ } for doi in self._detail.get("doi", [])]
245
+
246
+ @property
247
+ def description(self) -> str:
248
+ description = self._detail.get("description")
249
+ for ds in self.is_version_of:
250
+ if description:
251
+ break
252
+ description = ds.description
253
+ return description or ""
254
+
255
+ @property
256
+ def contributors(self) -> List[EbrainsDatasetPerson]:
257
+ return [EbrainsV3DatasetVersion._parse_person(d) for d in self._detail.get("author", [])]
258
+
259
+ @property
260
+ def ebrains_page(self) -> str:
261
+ if len(self.urls) > 0:
262
+ return self.urls[0].get("url")
263
+ return None
264
+
265
+ @property
266
+ def custodians(self) -> EbrainsDatasetPerson:
267
+ return [EbrainsV3DatasetVersion._parse_person(d) for d in self._detail.get("custodian", [])]
268
+
269
+ @property
270
+ def version_changelog(self):
271
+ return self._detail.get("versionInnovation", "")
272
+
273
+ @property
274
+ def version_identifier(self):
275
+ return self._detail.get("versionIdentifier", "")
276
+
277
+
278
+ class EbrainsV3Dataset(EbrainsBaseDataset):
279
+ def __init__(self, id, *, cached_data=None) -> None:
280
+ super().__init__()
281
+
282
+ self._id = id
283
+ self._cached_data = cached_data
284
+ self._contributers = None
285
+
286
+ @property
287
+ def id(self) -> str:
288
+ return self._id
289
+
290
+ @property
291
+ def name(self) -> str:
292
+ return self._detail.get("fullName")
293
+
294
+ @property
295
+ def urls(self) -> List[EbrainsDatasetUrl]:
296
+ return [{
297
+ "url": doi.get("identifier", None)
298
+ } for doi in self._detail.get("doi", [])]
299
+
300
+ @property
301
+ def _detail(self):
302
+ if not self._cached_data:
303
+ match = re.search(r"([a-f0-9-]+)$", self._id)
304
+ instance_id = match.group(1)
305
+ self._cached_data = MultiSourcedRequest(
306
+ requests=[
307
+ GitlabProxy(
308
+ GitlabProxyEnum.DATASET_V3,
309
+ instance_id=instance_id,
310
+ ),
311
+ ]
312
+ ).data
313
+ return self._cached_data
314
+
315
+ @property
316
+ def description(self) -> str:
317
+ return self._detail.get("description", "")
318
+
319
+ @property
320
+ def contributors(self):
321
+ if self._contributers is None:
322
+ contributers = {}
323
+ for version_id in self.version_ids:
324
+ contributers.update(
325
+ {c['@id']: c for c in EbrainsV3DatasetVersion(version_id).contributors}
326
+ )
327
+ self._contributers = list(contributers.values())
328
+ return self._contributers
329
+
330
+ @property
331
+ def ebrains_page(self) -> str:
332
+ if len(self.urls) > 0:
333
+ return self.urls[0].get("url")
334
+ return None
335
+
336
+ @property
337
+ def custodians(self) -> EbrainsDatasetPerson:
338
+ return [EbrainsV3DatasetVersion._parse_person(d) for d in self._detail.get("custodian", [])]
339
+
340
+ @property
341
+ def version_ids(self) -> List['str']:
342
+ return [version.get("id") for version in self._detail.get("versions", [])]
343
+
344
+
345
+ class GenericDataset():
346
+
347
+ def __init__(
348
+ self,
349
+ name: str = None,
350
+ contributors: List[str] = None,
351
+ url: str = None,
352
+ description: str = None,
353
+ license: str = None
354
+ ):
355
+ self._name = name
356
+ self._contributors = contributors
357
+ self._url = url
358
+ self._description = description
359
+ self._license = license
360
+
361
+ @property
362
+ def contributors(self):
363
+ return [{"name": cont} for cont in self._contributors]
364
+
365
+ @property
366
+ def id(self) -> str:
367
+ return md5(self.name.encode('utf-8')).hexdigest()
368
+
369
+ @property
370
+ def name(self) -> str:
371
+ return self._name
372
+
373
+ @property
374
+ def LICENSE(self) -> str:
375
+ return self._license
376
+
377
+ @property
378
+ def urls(self) -> List[EbrainsDatasetUrl]:
379
+ return [{"url": self._url}]
380
+
381
+ @property
382
+ def description(self) -> str:
383
+ return self._description
384
+
385
+ def __hash__(self):
386
+ return hash(self.id)
387
+
388
+ def __eq__(self, o: object) -> bool:
389
+ return hasattr(o, "id") and self.id == o.id
@@ -0,0 +1,27 @@
1
+ # Copyright 2018-2024
2
+ # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Exceptions concerning file retrieval processes."""
16
+
17
+
18
+ class NoSiibraConfigMirrorsAvailableException(Exception):
19
+ pass
20
+
21
+
22
+ class TagNotFoundException(Exception):
23
+ pass
24
+
25
+
26
+ class EbrainsAuthenticationError(Exception):
27
+ pass