siibra 1.0a1__1-py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -0
- siibra/__init__.py +164 -0
- siibra/commons.py +823 -0
- siibra/configuration/__init__.py +17 -0
- siibra/configuration/configuration.py +189 -0
- siibra/configuration/factory.py +589 -0
- siibra/core/__init__.py +16 -0
- siibra/core/assignment.py +110 -0
- siibra/core/atlas.py +239 -0
- siibra/core/concept.py +308 -0
- siibra/core/parcellation.py +387 -0
- siibra/core/region.py +1223 -0
- siibra/core/space.py +131 -0
- siibra/core/structure.py +111 -0
- siibra/exceptions.py +63 -0
- siibra/experimental/__init__.py +19 -0
- siibra/experimental/contour.py +61 -0
- siibra/experimental/cortical_profile_sampler.py +57 -0
- siibra/experimental/patch.py +98 -0
- siibra/experimental/plane3d.py +256 -0
- siibra/explorer/__init__.py +17 -0
- siibra/explorer/url.py +222 -0
- siibra/explorer/util.py +87 -0
- siibra/features/__init__.py +117 -0
- siibra/features/anchor.py +224 -0
- siibra/features/connectivity/__init__.py +33 -0
- siibra/features/connectivity/functional_connectivity.py +57 -0
- siibra/features/connectivity/regional_connectivity.py +494 -0
- siibra/features/connectivity/streamline_counts.py +27 -0
- siibra/features/connectivity/streamline_lengths.py +27 -0
- siibra/features/connectivity/tracing_connectivity.py +30 -0
- siibra/features/dataset/__init__.py +17 -0
- siibra/features/dataset/ebrains.py +90 -0
- siibra/features/feature.py +970 -0
- siibra/features/image/__init__.py +27 -0
- siibra/features/image/image.py +115 -0
- siibra/features/image/sections.py +26 -0
- siibra/features/image/volume_of_interest.py +88 -0
- siibra/features/tabular/__init__.py +24 -0
- siibra/features/tabular/bigbrain_intensity_profile.py +77 -0
- siibra/features/tabular/cell_density_profile.py +298 -0
- siibra/features/tabular/cortical_profile.py +322 -0
- siibra/features/tabular/gene_expression.py +257 -0
- siibra/features/tabular/layerwise_bigbrain_intensities.py +62 -0
- siibra/features/tabular/layerwise_cell_density.py +95 -0
- siibra/features/tabular/receptor_density_fingerprint.py +192 -0
- siibra/features/tabular/receptor_density_profile.py +110 -0
- siibra/features/tabular/regional_timeseries_activity.py +294 -0
- siibra/features/tabular/tabular.py +139 -0
- siibra/livequeries/__init__.py +19 -0
- siibra/livequeries/allen.py +352 -0
- siibra/livequeries/bigbrain.py +197 -0
- siibra/livequeries/ebrains.py +145 -0
- siibra/livequeries/query.py +49 -0
- siibra/locations/__init__.py +91 -0
- siibra/locations/boundingbox.py +454 -0
- siibra/locations/location.py +115 -0
- siibra/locations/point.py +344 -0
- siibra/locations/pointcloud.py +349 -0
- siibra/retrieval/__init__.py +27 -0
- siibra/retrieval/cache.py +233 -0
- siibra/retrieval/datasets.py +389 -0
- siibra/retrieval/exceptions/__init__.py +27 -0
- siibra/retrieval/repositories.py +769 -0
- siibra/retrieval/requests.py +659 -0
- siibra/vocabularies/__init__.py +45 -0
- siibra/vocabularies/gene_names.json +29176 -0
- siibra/vocabularies/receptor_symbols.json +210 -0
- siibra/vocabularies/region_aliases.json +460 -0
- siibra/volumes/__init__.py +23 -0
- siibra/volumes/parcellationmap.py +1279 -0
- siibra/volumes/providers/__init__.py +20 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/providers/gifti.py +165 -0
- siibra/volumes/providers/neuroglancer.py +736 -0
- siibra/volumes/providers/nifti.py +266 -0
- siibra/volumes/providers/provider.py +107 -0
- siibra/volumes/sparsemap.py +468 -0
- siibra/volumes/volume.py +892 -0
- siibra-1.0.0a1.dist-info/LICENSE +201 -0
- siibra-1.0.0a1.dist-info/METADATA +160 -0
- siibra-1.0.0a1.dist-info/RECORD +84 -0
- siibra-1.0.0a1.dist-info/WHEEL +5 -0
- siibra-1.0.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Matches BigBrain intesity profiles extracted by Wagstyl et al. to volumes."""
|
|
16
|
+
|
|
17
|
+
from . import query
|
|
18
|
+
|
|
19
|
+
from ..features.tabular import bigbrain_intensity_profile, layerwise_bigbrain_intensities
|
|
20
|
+
from ..features import anchor as _anchor
|
|
21
|
+
from ..commons import logger
|
|
22
|
+
from ..locations import point, pointcloud
|
|
23
|
+
from ..core import structure
|
|
24
|
+
from ..retrieval import requests, cache
|
|
25
|
+
from ..retrieval.datasets import GenericDataset
|
|
26
|
+
|
|
27
|
+
import numpy as np
|
|
28
|
+
from typing import List
|
|
29
|
+
from os import path
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class WagstylProfileLoader:
|
|
33
|
+
|
|
34
|
+
REPO = "https://github.com/kwagstyl/cortical_layers_tutorial/raw/main"
|
|
35
|
+
PROFILES_FILE_LEFT = "data/profiles_left.npy"
|
|
36
|
+
THICKNESSES_FILE_LEFT = "data/thicknesses_left.npy"
|
|
37
|
+
MESH_FILE_LEFT = "data/gray_left_327680.surf.gii"
|
|
38
|
+
_profiles = None
|
|
39
|
+
_vertices = None
|
|
40
|
+
_boundary_depths = None
|
|
41
|
+
DATASET = GenericDataset(
|
|
42
|
+
name="HIBALL workshop on cortical layers",
|
|
43
|
+
contributors=[
|
|
44
|
+
'Konrad Wagstyl',
|
|
45
|
+
'Stéphanie Larocque',
|
|
46
|
+
'Guillem Cucurull',
|
|
47
|
+
'Claude Lepage',
|
|
48
|
+
'Joseph Paul Cohen',
|
|
49
|
+
'Sebastian Bludau',
|
|
50
|
+
'Nicola Palomero-Gallagher',
|
|
51
|
+
'Lindsay B. Lewis',
|
|
52
|
+
'Thomas Funck',
|
|
53
|
+
'Hannah Spitzer',
|
|
54
|
+
'Timo Dickscheid',
|
|
55
|
+
'Paul C. Fletcher',
|
|
56
|
+
'Adriana Romero',
|
|
57
|
+
'Karl Zilles',
|
|
58
|
+
'Katrin Amunts',
|
|
59
|
+
'Yoshua Bengio',
|
|
60
|
+
'Alan C. Evans'
|
|
61
|
+
],
|
|
62
|
+
url="https://github.com/kwagstyl/cortical_layers_tutorial/",
|
|
63
|
+
description="Cortical profiles of BigBrain staining intensities computed by Konrad Wagstyl, "
|
|
64
|
+
"as described in the publication 'Wagstyl, K., et al (2020). BigBrain 3D atlas of "
|
|
65
|
+
"cortical layers: Cortical and laminar thickness gradients diverge in sensory and "
|
|
66
|
+
"motor cortices. PLoS Biology, 18(4), e3000678. "
|
|
67
|
+
"http://dx.doi.org/10.1371/journal.pbio.3000678."
|
|
68
|
+
"The data is taken from the tutorial at "
|
|
69
|
+
"https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
|
|
70
|
+
"assigned to the regional map when queried."
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def __init__(self):
|
|
74
|
+
if self._profiles is None:
|
|
75
|
+
self.__class__._load()
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def profile_labels(self):
|
|
79
|
+
return np.arange(0., 1., 1. / self._profiles.shape[1])
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def _load(cls):
|
|
83
|
+
# read thicknesses, in mm, and normalize by their last column which is the total thickness
|
|
84
|
+
thickness = requests.HttpRequest(f"{cls.REPO}/{cls.THICKNESSES_FILE_LEFT}").data.T
|
|
85
|
+
total_thickness = thickness[:, :-1].sum(1) # last column is the computed total thickness
|
|
86
|
+
valid = np.where(total_thickness > 0)[0]
|
|
87
|
+
cls._boundary_depths = np.c_[np.zeros_like(valid), (thickness[valid, :] / total_thickness[valid, None]).cumsum(1)]
|
|
88
|
+
cls._boundary_depths[:, -1] = 1 # account for float calculation errors
|
|
89
|
+
|
|
90
|
+
# find profiles with valid thickness
|
|
91
|
+
profile_l_url = f"{cls.REPO}/{cls.PROFILES_FILE_LEFT}"
|
|
92
|
+
if not path.exists(cache.CACHE.build_filename(profile_l_url)):
|
|
93
|
+
logger.info(
|
|
94
|
+
"First request to BigBrain profiles. Preprocessing the data "
|
|
95
|
+
"now. This may take a little."
|
|
96
|
+
)
|
|
97
|
+
profiles_l_all = requests.HttpRequest(profile_l_url).data
|
|
98
|
+
cls._profiles = profiles_l_all[valid, :]
|
|
99
|
+
|
|
100
|
+
# read mesh vertices
|
|
101
|
+
mesh_left = requests.HttpRequest(f"{cls.REPO}/{cls.MESH_FILE_LEFT}").data
|
|
102
|
+
mesh_vertices = mesh_left.darrays[0].data
|
|
103
|
+
cls._vertices = mesh_vertices[valid, :]
|
|
104
|
+
|
|
105
|
+
logger.debug(f"{cls._profiles.shape[0]} BigBrain intensity profiles.")
|
|
106
|
+
assert cls._vertices.shape[0] == cls._profiles.shape[0]
|
|
107
|
+
|
|
108
|
+
def __len__(self):
|
|
109
|
+
return self._vertices.shape[0]
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
cache.Warmup.register_warmup_fn()(lambda: WagstylProfileLoader._load())
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intensity_profile.BigBrainIntensityProfile):
|
|
116
|
+
|
|
117
|
+
def __init__(self):
|
|
118
|
+
query.LiveQuery.__init__(self)
|
|
119
|
+
|
|
120
|
+
def query(self, concept: structure.BrainStructure, **kwargs) -> List[bigbrain_intensity_profile.BigBrainIntensityProfile]:
|
|
121
|
+
loader = WagstylProfileLoader()
|
|
122
|
+
mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
|
|
123
|
+
matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
|
|
124
|
+
if matched is None:
|
|
125
|
+
return []
|
|
126
|
+
assert isinstance(matched, pointcloud.PointCloud)
|
|
127
|
+
indices = matched.labels
|
|
128
|
+
assert indices is not None
|
|
129
|
+
features = []
|
|
130
|
+
for i in matched.labels:
|
|
131
|
+
anchor = _anchor.AnatomicalAnchor(
|
|
132
|
+
location=point.Point(loader._vertices[i], space='bigbrain'),
|
|
133
|
+
region=str(concept),
|
|
134
|
+
species='Homo sapiens'
|
|
135
|
+
)
|
|
136
|
+
prof = bigbrain_intensity_profile.BigBrainIntensityProfile(
|
|
137
|
+
anchor=anchor,
|
|
138
|
+
depths=loader.profile_labels,
|
|
139
|
+
values=loader._profiles[i],
|
|
140
|
+
boundaries=loader._boundary_depths[i]
|
|
141
|
+
)
|
|
142
|
+
prof.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
|
|
143
|
+
query_structure=concept,
|
|
144
|
+
assigned_structure=concept,
|
|
145
|
+
qualification=_anchor.Qualification.CONTAINED,
|
|
146
|
+
explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}"
|
|
147
|
+
)
|
|
148
|
+
prof.datasets = [WagstylProfileLoader.DATASET]
|
|
149
|
+
features.append(prof)
|
|
150
|
+
|
|
151
|
+
return features
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities):
|
|
155
|
+
|
|
156
|
+
def __init__(self):
|
|
157
|
+
query.LiveQuery.__init__(self)
|
|
158
|
+
|
|
159
|
+
def query(self, concept: structure.BrainStructure, **kwargs) -> List[layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities]:
|
|
160
|
+
|
|
161
|
+
loader = WagstylProfileLoader()
|
|
162
|
+
mesh_vertices = pointcloud.PointCloud(loader._vertices, space='bigbrain')
|
|
163
|
+
matched = concept.intersection(mesh_vertices) # returns a reduced PointCloud with og indices as labels
|
|
164
|
+
if matched is None:
|
|
165
|
+
return []
|
|
166
|
+
assert isinstance(matched, pointcloud.PointCloud)
|
|
167
|
+
indices = matched.labels
|
|
168
|
+
assert indices is not None
|
|
169
|
+
matched_profiles = loader._profiles[indices, :]
|
|
170
|
+
boundary_depths = loader._boundary_depths[indices, :]
|
|
171
|
+
# compute array of layer labels for all coefficients in profiles_left
|
|
172
|
+
N = matched_profiles.shape[1]
|
|
173
|
+
prange = np.arange(N)
|
|
174
|
+
layer_labels = 7 - np.array([
|
|
175
|
+
[np.array([[(prange < T) * 1] for i, T in enumerate((b * N).astype('int'))]).squeeze().sum(0)]
|
|
176
|
+
for b in boundary_depths
|
|
177
|
+
]).reshape((-1, 200))
|
|
178
|
+
|
|
179
|
+
anchor = _anchor.AnatomicalAnchor(
|
|
180
|
+
location=pointcloud.PointCloud(loader._vertices[indices, :], space='bigbrain'),
|
|
181
|
+
region=str(concept),
|
|
182
|
+
species='Homo sapiens'
|
|
183
|
+
)
|
|
184
|
+
result = layerwise_bigbrain_intensities.LayerwiseBigBrainIntensities(
|
|
185
|
+
anchor=anchor,
|
|
186
|
+
means=[matched_profiles[layer_labels == layer].mean() for layer in range(1, 7)],
|
|
187
|
+
stds=[matched_profiles[layer_labels == layer].std() for layer in range(1, 7)],
|
|
188
|
+
)
|
|
189
|
+
result.anchor._assignments[concept] = _anchor.AnatomicalAssignment(
|
|
190
|
+
query_structure=concept,
|
|
191
|
+
assigned_structure=concept,
|
|
192
|
+
qualification=_anchor.Qualification.CONTAINED,
|
|
193
|
+
explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}"
|
|
194
|
+
)
|
|
195
|
+
result.datasets = [WagstylProfileLoader.DATASET]
|
|
196
|
+
|
|
197
|
+
return [result]
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Query data features published as Ebrains datasets with AtlasConcepts"""
|
|
16
|
+
|
|
17
|
+
from ..features.dataset import ebrains as _ebrains
|
|
18
|
+
from . import query
|
|
19
|
+
|
|
20
|
+
from ..commons import logger, siibra_tqdm
|
|
21
|
+
from ..features import anchor as _anchor
|
|
22
|
+
from ..retrieval import requests, datasets, cache
|
|
23
|
+
from ..core import parcellation, region
|
|
24
|
+
|
|
25
|
+
from collections import defaultdict
|
|
26
|
+
import re
|
|
27
|
+
from packaging.version import Version
|
|
28
|
+
from tempfile import NamedTemporaryFile
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class EbrainsFeatureQuery(query.LiveQuery, args=[], FeatureType=_ebrains.EbrainsDataFeature):
|
|
32
|
+
# in EBRAINS knowledge graph prior to v3, versions were modelled
|
|
33
|
+
# in dataset names. Typically found formats are (v1.0) and [rat, v2.1]
|
|
34
|
+
VERSION_PATTERN = re.compile(r"^(.*?) *[\[\(][^v]*?(v[0-9].*?)[\]\)]")
|
|
35
|
+
COMPACT_FEATURE_LIST = True
|
|
36
|
+
|
|
37
|
+
# datasets whose name contains any of these strings will be ignored
|
|
38
|
+
_BLACKLIST = {
|
|
39
|
+
"Whole-brain parcellation of the Julich-Brain Cytoarchitectonic Atlas",
|
|
40
|
+
"whole-brain collections of cytoarchitectonic probabilistic maps",
|
|
41
|
+
"DiFuMo atlas",
|
|
42
|
+
"Automated Anatomical Labeling (AAL1) atlas",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
loader = requests.MultiSourcedRequest(
|
|
46
|
+
requests=[
|
|
47
|
+
requests.GitlabProxy(
|
|
48
|
+
flavour=requests.GitlabProxyEnum.PARCELLATIONREGION_V1,
|
|
49
|
+
),
|
|
50
|
+
]
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
parcellation_ids = None
|
|
54
|
+
|
|
55
|
+
def __init__(self, **kwargs):
|
|
56
|
+
query.LiveQuery.__init__(self, **kwargs)
|
|
57
|
+
|
|
58
|
+
if self.__class__.parcellation_ids is None:
|
|
59
|
+
self.__class__.parcellation_ids = [
|
|
60
|
+
dset.id
|
|
61
|
+
for parc in parcellation.Parcellation.registry()
|
|
62
|
+
for dset in parc.datasets
|
|
63
|
+
if isinstance(dset, datasets.EbrainsV3DatasetVersion)
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
def query(self, region: region.Region):
|
|
67
|
+
versioned_datasets = defaultdict(dict)
|
|
68
|
+
invalid_species_datasets = {}
|
|
69
|
+
results = self.loader.data.get("results", [])
|
|
70
|
+
|
|
71
|
+
for r in siibra_tqdm(results, total=len(results)):
|
|
72
|
+
regionname = r.get("name", None)
|
|
73
|
+
alias = r.get("alias", None)
|
|
74
|
+
for ds_spec in r.get("datasets", []):
|
|
75
|
+
ds_name = ds_spec.get("name")
|
|
76
|
+
ds_id = ds_spec.get("@id")
|
|
77
|
+
if "dataset" not in ds_id:
|
|
78
|
+
continue
|
|
79
|
+
|
|
80
|
+
try:
|
|
81
|
+
ds_species = _anchor.Species.decode(ds_spec)
|
|
82
|
+
except ValueError:
|
|
83
|
+
logger.debug(f"Cannot decode {ds_spec}")
|
|
84
|
+
invalid_species_datasets[ds_id] = ds_name
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
if self.COMPACT_FEATURE_LIST:
|
|
88
|
+
if any(ds_id.endswith(i) for i in self.parcellation_ids):
|
|
89
|
+
continue
|
|
90
|
+
if any(e.lower() in ds_name.lower() for e in self._BLACKLIST):
|
|
91
|
+
continue
|
|
92
|
+
|
|
93
|
+
dset = _ebrains.EbrainsDataFeature(
|
|
94
|
+
dataset_version_id=ds_id,
|
|
95
|
+
anchor=_anchor.AnatomicalAnchor(
|
|
96
|
+
region=alias or regionname,
|
|
97
|
+
species=ds_species,
|
|
98
|
+
),
|
|
99
|
+
)
|
|
100
|
+
if not dset.matches(region):
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
version_match = self.VERSION_PATTERN.search(ds_name)
|
|
104
|
+
if version_match is None or not self.COMPACT_FEATURE_LIST:
|
|
105
|
+
yield dset
|
|
106
|
+
else: # store version, add only the latest version after the loop
|
|
107
|
+
name, version = version_match.groups()
|
|
108
|
+
versioned_datasets[name][version] = dset
|
|
109
|
+
|
|
110
|
+
if len(invalid_species_datasets) > 0:
|
|
111
|
+
with NamedTemporaryFile(mode="w", suffix=".txt", delete=False, encoding="utf-8") as f:
|
|
112
|
+
for dsid, dsname in invalid_species_datasets.items():
|
|
113
|
+
f.write(f"{dsid} {dsname}\n")
|
|
114
|
+
logger.warning(
|
|
115
|
+
f"{len(invalid_species_datasets)} datasets have been ignored, "
|
|
116
|
+
"because siibra could not decode their species. "
|
|
117
|
+
f"See {f.name}"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# if versioned datasets have been recorded, register only
|
|
121
|
+
# the newest one with older ones linked as a version history.
|
|
122
|
+
for name, dsets in versioned_datasets.items():
|
|
123
|
+
try: # if possible, sort by version tag
|
|
124
|
+
sorted_versions = sorted(dsets.keys(), key=Version)
|
|
125
|
+
except TypeError: # else sort lexicographically
|
|
126
|
+
sorted_versions = sorted(dsets.keys())
|
|
127
|
+
|
|
128
|
+
# chain the dataset versions
|
|
129
|
+
prev = None
|
|
130
|
+
for version in sorted_versions:
|
|
131
|
+
curr = dsets[version]
|
|
132
|
+
curr.version = version
|
|
133
|
+
if prev is not None:
|
|
134
|
+
curr._prev = prev
|
|
135
|
+
prev._next = curr
|
|
136
|
+
prev = curr
|
|
137
|
+
|
|
138
|
+
logger.debug(
|
|
139
|
+
f"Registered only version {version} of {', '.join(sorted_versions)} for {name}. "
|
|
140
|
+
f"Its version history is: {curr.version_history}"
|
|
141
|
+
)
|
|
142
|
+
yield curr
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
cache.Warmup.register_warmup_fn(cache.WarmupLevel.DATA)(lambda: EbrainsFeatureQuery.loader.data)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Handles feature queries that rely on live or on-the-fly calculations."""
|
|
16
|
+
|
|
17
|
+
from ..commons import logger
|
|
18
|
+
from ..features.feature import Feature
|
|
19
|
+
from ..core.concept import AtlasConcept
|
|
20
|
+
|
|
21
|
+
from abc import ABC, abstractmethod
|
|
22
|
+
from typing import List
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class LiveQuery(ABC):
|
|
26
|
+
|
|
27
|
+
# set of mandatory query argument names
|
|
28
|
+
_query_args = []
|
|
29
|
+
|
|
30
|
+
def __init__(self, **kwargs):
|
|
31
|
+
parstr = ", ".join(f"{k}={v}" for k, v in kwargs.items())
|
|
32
|
+
if parstr:
|
|
33
|
+
parstr = "with parameters " + parstr
|
|
34
|
+
if not all(p in kwargs for p in self._query_args):
|
|
35
|
+
logger.error(
|
|
36
|
+
f"Incomplete specification for {self.__class__.__name__} query "
|
|
37
|
+
f"(Mandatory arguments: {', '.join(self._query_args)})"
|
|
38
|
+
)
|
|
39
|
+
self._kwargs = kwargs
|
|
40
|
+
|
|
41
|
+
def __init_subclass__(cls, args: List[str], FeatureType: type):
|
|
42
|
+
cls._query_args = args
|
|
43
|
+
cls.feature_type = FeatureType
|
|
44
|
+
FeatureType._live_queries.append(cls)
|
|
45
|
+
return super().__init_subclass__()
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def query(self, concept: AtlasConcept, **kwargs) -> List[Feature]:
|
|
49
|
+
raise NotImplementedError(f"Dervied class {self.__class__} needs to implement query()")
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
# Copyright 2018-2024
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Handles spatial concepts and spatial operation like warping between spaces."""
|
|
16
|
+
|
|
17
|
+
from .location import Location
|
|
18
|
+
from .point import Point
|
|
19
|
+
from .pointcloud import PointCloud, from_points
|
|
20
|
+
from .boundingbox import BoundingBox
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def reassign_union(loc0: 'Location', loc1: 'Location') -> 'Location':
|
|
24
|
+
"""
|
|
25
|
+
Add two locations of same or diffrent type to find their union as a
|
|
26
|
+
Location object.
|
|
27
|
+
Note
|
|
28
|
+
----
|
|
29
|
+
`loc1` will be warped to `loc0` they are not in the same space.
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
loc0 : Location
|
|
33
|
+
_description_
|
|
34
|
+
loc1 : Location
|
|
35
|
+
_description_
|
|
36
|
+
Returns
|
|
37
|
+
-------
|
|
38
|
+
Location
|
|
39
|
+
- Point U Point = PointCloud
|
|
40
|
+
- Point U PointCloud = PointCloud
|
|
41
|
+
- PointCloud U PointCloud = PointCloud
|
|
42
|
+
- BoundingBox U BoundingBox = BoundingBox
|
|
43
|
+
- BoundingBox U PointCloud = BoundingBox
|
|
44
|
+
- BoundingBox U Point = BoundingBox
|
|
45
|
+
- WholeBrain U Location = NotImplementedError
|
|
46
|
+
(all operations are commutative)
|
|
47
|
+
"""
|
|
48
|
+
if loc0 is None or loc1 is None:
|
|
49
|
+
return loc0 or loc1
|
|
50
|
+
|
|
51
|
+
# All location types should be unionable among each other and this should
|
|
52
|
+
# be implemented here to avoid code repetition. Volumes are the only type of
|
|
53
|
+
# location that has its own union method since it is not a part of locations
|
|
54
|
+
# module and to avoid importing Volume here.
|
|
55
|
+
if not all(
|
|
56
|
+
isinstance(loc, (Point, PointCloud, BoundingBox)) for loc in [loc0, loc1]
|
|
57
|
+
):
|
|
58
|
+
try:
|
|
59
|
+
return loc1.union(loc0)
|
|
60
|
+
except Exception:
|
|
61
|
+
raise NotImplementedError(f"There are no union method for {(loc0.__class__.__name__, loc1.__class__.__name__)}")
|
|
62
|
+
|
|
63
|
+
# convert Points to PointClouds
|
|
64
|
+
loc0, loc1 = [
|
|
65
|
+
from_points([loc]) if isinstance(loc, Point) else loc
|
|
66
|
+
for loc in [loc0, loc1]
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
# adopt the space of the first location
|
|
70
|
+
loc1_w = loc1.warp(loc0.space)
|
|
71
|
+
|
|
72
|
+
if isinstance(loc0, PointCloud):
|
|
73
|
+
if isinstance(loc1_w, PointCloud):
|
|
74
|
+
points = list(dict.fromkeys([*loc0, *loc1_w]))
|
|
75
|
+
return from_points(points)
|
|
76
|
+
if isinstance(loc1_w, BoundingBox):
|
|
77
|
+
return reassign_union(loc0.boundingbox, loc1_w)
|
|
78
|
+
|
|
79
|
+
if isinstance(loc0, BoundingBox) and isinstance(loc1_w, BoundingBox):
|
|
80
|
+
coordinates = [loc0.minpoint, loc0.maxpoint, loc1_w.minpoint, loc1_w.maxpoint]
|
|
81
|
+
return BoundingBox(
|
|
82
|
+
point1=[min(p[i] for p in coordinates) for i in range(3)],
|
|
83
|
+
point2=[max(p[i] for p in coordinates) for i in range(3)],
|
|
84
|
+
space=loc0.space,
|
|
85
|
+
sigma_mm=[loc0.minpoint.sigma, loc0.maxpoint.sigma]
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return reassign_union(loc1_w, loc0)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
Location.union = reassign_union
|