siibra 1.0a9__py3-none-any.whl → 1.0a11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siibra might be problematic. Click here for more details.
- siibra/VERSION +1 -1
- siibra/commons.py +43 -26
- siibra/configuration/factory.py +15 -16
- siibra/core/atlas.py +40 -16
- siibra/core/region.py +241 -38
- siibra/features/__init__.py +19 -8
- siibra/features/connectivity/functional_connectivity.py +1 -1
- siibra/features/connectivity/regional_connectivity.py +45 -3
- siibra/features/feature.py +62 -12
- siibra/features/image/image.py +3 -1
- siibra/features/tabular/bigbrain_intensity_profile.py +1 -1
- siibra/features/tabular/cell_density_profile.py +5 -3
- siibra/features/tabular/cortical_profile.py +79 -15
- siibra/features/tabular/gene_expression.py +110 -1
- siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
- siibra/features/tabular/layerwise_cell_density.py +3 -1
- siibra/features/tabular/receptor_density_fingerprint.py +3 -1
- siibra/features/tabular/receptor_density_profile.py +3 -5
- siibra/features/tabular/regional_timeseries_activity.py +59 -10
- siibra/features/tabular/tabular.py +4 -2
- siibra/livequeries/bigbrain.py +34 -0
- siibra/retrieval/cache.py +14 -9
- siibra/retrieval/requests.py +30 -1
- siibra/volumes/parcellationmap.py +17 -21
- siibra/volumes/providers/__init__.py +1 -0
- siibra/volumes/providers/freesurfer.py +113 -0
- siibra/volumes/providers/neuroglancer.py +55 -25
- siibra/volumes/providers/nifti.py +14 -16
- siibra/volumes/sparsemap.py +1 -1
- siibra/volumes/volume.py +13 -15
- {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/METADATA +1 -1
- {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/RECORD +35 -34
- {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/LICENSE +0 -0
- {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/WHEEL +0 -0
- {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/top_level.txt +0 -0
|
@@ -18,7 +18,7 @@ from ..feature import Compoundable
|
|
|
18
18
|
|
|
19
19
|
from ...core import region as _region
|
|
20
20
|
from .. import anchor as _anchor
|
|
21
|
-
from ...commons import QUIET
|
|
21
|
+
from ...commons import QUIET, siibra_tqdm
|
|
22
22
|
from ...locations import pointset
|
|
23
23
|
from ...retrieval.repositories import RepositoryConnector
|
|
24
24
|
from ...retrieval.requests import HttpRequest
|
|
@@ -48,7 +48,8 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
|
|
|
48
48
|
timestep: str,
|
|
49
49
|
description: str = "",
|
|
50
50
|
datasets: list = [],
|
|
51
|
-
subject: str = "average"
|
|
51
|
+
subject: str = "average",
|
|
52
|
+
id: str = None
|
|
52
53
|
):
|
|
53
54
|
"""
|
|
54
55
|
"""
|
|
@@ -58,7 +59,8 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
|
|
|
58
59
|
description=description,
|
|
59
60
|
anchor=anchor,
|
|
60
61
|
datasets=datasets,
|
|
61
|
-
data=None # lazy loading below
|
|
62
|
+
data=None, # lazy loading below
|
|
63
|
+
id=id
|
|
62
64
|
)
|
|
63
65
|
self.cohort = cohort.upper()
|
|
64
66
|
if isinstance(connector, str) and connector:
|
|
@@ -75,12 +77,12 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
|
|
|
75
77
|
|
|
76
78
|
@property
|
|
77
79
|
def subject(self):
|
|
78
|
-
"""Returns the subject identifiers for which the
|
|
80
|
+
"""Returns the subject identifiers for which the table represents."""
|
|
79
81
|
return self._subject
|
|
80
82
|
|
|
81
83
|
@property
|
|
82
84
|
def name(self):
|
|
83
|
-
return f"{super().name
|
|
85
|
+
return f"{self.subject} - " + super().name + f" cohort: {self.cohort}"
|
|
84
86
|
|
|
85
87
|
@property
|
|
86
88
|
def data(self) -> pd.DataFrame:
|
|
@@ -91,16 +93,61 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
|
|
|
91
93
|
self._load_table()
|
|
92
94
|
return self._table.copy()
|
|
93
95
|
|
|
96
|
+
@classmethod
|
|
97
|
+
def _merge_elements(
|
|
98
|
+
cls,
|
|
99
|
+
elements: List["RegionalTimeseriesActivity"],
|
|
100
|
+
description: str,
|
|
101
|
+
modality: str,
|
|
102
|
+
anchor: _anchor.AnatomicalAnchor,
|
|
103
|
+
):
|
|
104
|
+
assert len({f.cohort for f in elements}) == 1
|
|
105
|
+
assert len({f.timestep for f in elements}) == 1
|
|
106
|
+
merged = cls(
|
|
107
|
+
cohort=elements[0].cohort,
|
|
108
|
+
regions=elements[0].regions,
|
|
109
|
+
connector=elements[0]._connector,
|
|
110
|
+
decode_func=elements[0]._decode_func,
|
|
111
|
+
filename="",
|
|
112
|
+
timestep=" ".join(str(val) for val in elements[0].timestep),
|
|
113
|
+
subject="average",
|
|
114
|
+
description=description,
|
|
115
|
+
modality=modality,
|
|
116
|
+
anchor=anchor,
|
|
117
|
+
**{"paradigm": "average"} if getattr(elements[0], "paradigm") else {}
|
|
118
|
+
)
|
|
119
|
+
if isinstance(elements[0]._connector, HttpRequest):
|
|
120
|
+
getter = lambda elm: elm._connector.get()
|
|
121
|
+
else:
|
|
122
|
+
getter = lambda elm: elm._connector.get(elm._filename, decode_func=elm._decode_func)
|
|
123
|
+
all_arrays = [
|
|
124
|
+
getter(elm)
|
|
125
|
+
for elm in siibra_tqdm(
|
|
126
|
+
elements,
|
|
127
|
+
total=len(elements),
|
|
128
|
+
desc=f"Averaging {len(elements)} activity tables"
|
|
129
|
+
)
|
|
130
|
+
]
|
|
131
|
+
merged._table = elements[0]._arraylike_to_dataframe(
|
|
132
|
+
np.stack(all_arrays).mean(0)
|
|
133
|
+
)
|
|
134
|
+
return merged
|
|
135
|
+
|
|
94
136
|
def _load_table(self):
|
|
95
137
|
"""
|
|
96
138
|
Extract the timeseries table.
|
|
97
139
|
"""
|
|
98
|
-
|
|
140
|
+
if isinstance(self._connector, HttpRequest):
|
|
141
|
+
array = self._connector.data
|
|
142
|
+
else:
|
|
143
|
+
array = self._connector.get(self._filename, decode_func=self._decode_func)
|
|
144
|
+
self._table = self._arraylike_to_dataframe(array)
|
|
145
|
+
|
|
146
|
+
def _arraylike_to_dataframe(self, array: Union[np.ndarray, pd.DataFrame]) -> pd.DataFrame:
|
|
99
147
|
if not isinstance(array, np.ndarray):
|
|
100
|
-
assert isinstance(array, pd.DataFrame)
|
|
101
148
|
array = array.to_numpy()
|
|
102
149
|
ncols = array.shape[1]
|
|
103
|
-
|
|
150
|
+
table = pd.DataFrame(
|
|
104
151
|
array,
|
|
105
152
|
index=pd.TimedeltaIndex(
|
|
106
153
|
np.arange(0, array.shape[0]) * self.timestep[0],
|
|
@@ -121,7 +168,9 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
|
|
|
121
168
|
label - min(columnmap.keys()): region
|
|
122
169
|
for label, region in columnmap.items()
|
|
123
170
|
}
|
|
124
|
-
|
|
171
|
+
table = table.rename(columns=remapper)
|
|
172
|
+
|
|
173
|
+
return table
|
|
125
174
|
|
|
126
175
|
def __str__(self):
|
|
127
176
|
return self.name
|
|
@@ -240,4 +289,4 @@ class RegionalBOLD(
|
|
|
240
289
|
|
|
241
290
|
@property
|
|
242
291
|
def name(self):
|
|
243
|
-
return
|
|
292
|
+
return super().name + f", paradigm: {self.paradigm}"
|
|
@@ -44,14 +44,16 @@ class Tabular(feature.Feature):
|
|
|
44
44
|
modality: str,
|
|
45
45
|
anchor: _anchor.AnatomicalAnchor,
|
|
46
46
|
data: pd.DataFrame, # sample x feature dimension
|
|
47
|
-
datasets: list = []
|
|
47
|
+
datasets: list = [],
|
|
48
|
+
id: str = None
|
|
48
49
|
):
|
|
49
50
|
feature.Feature.__init__(
|
|
50
51
|
self,
|
|
51
52
|
modality=modality,
|
|
52
53
|
description=description,
|
|
53
54
|
anchor=anchor,
|
|
54
|
-
datasets=datasets
|
|
55
|
+
datasets=datasets,
|
|
56
|
+
id=id
|
|
55
57
|
)
|
|
56
58
|
self._data_cached = data
|
|
57
59
|
|
siibra/livequeries/bigbrain.py
CHANGED
|
@@ -22,6 +22,7 @@ from ..commons import logger
|
|
|
22
22
|
from ..locations import point, pointset
|
|
23
23
|
from ..core import structure
|
|
24
24
|
from ..retrieval import requests, cache
|
|
25
|
+
from ..retrieval.datasets import GenericDataset
|
|
25
26
|
|
|
26
27
|
import numpy as np
|
|
27
28
|
from typing import List
|
|
@@ -37,6 +38,37 @@ class WagstylProfileLoader:
|
|
|
37
38
|
_profiles = None
|
|
38
39
|
_vertices = None
|
|
39
40
|
_boundary_depths = None
|
|
41
|
+
DATASET = GenericDataset(
|
|
42
|
+
name="HIBALL workshop on cortical layers",
|
|
43
|
+
contributors=[
|
|
44
|
+
'Konrad Wagstyl',
|
|
45
|
+
'Stéphanie Larocque',
|
|
46
|
+
'Guillem Cucurull',
|
|
47
|
+
'Claude Lepage',
|
|
48
|
+
'Joseph Paul Cohen',
|
|
49
|
+
'Sebastian Bludau',
|
|
50
|
+
'Nicola Palomero-Gallagher',
|
|
51
|
+
'Lindsay B. Lewis',
|
|
52
|
+
'Thomas Funck',
|
|
53
|
+
'Hannah Spitzer',
|
|
54
|
+
'Timo Dickscheid',
|
|
55
|
+
'Paul C. Fletcher',
|
|
56
|
+
'Adriana Romero',
|
|
57
|
+
'Karl Zilles',
|
|
58
|
+
'Katrin Amunts',
|
|
59
|
+
'Yoshua Bengio',
|
|
60
|
+
'Alan C. Evans'
|
|
61
|
+
],
|
|
62
|
+
url="https://github.com/kwagstyl/cortical_layers_tutorial/",
|
|
63
|
+
description="Cortical profiles of BigBrain staining intensities computed by Konrad Wagstyl, "
|
|
64
|
+
"as described in the publication 'Wagstyl, K., et al (2020). BigBrain 3D atlas of "
|
|
65
|
+
"cortical layers: Cortical and laminar thickness gradients diverge in sensory and "
|
|
66
|
+
"motor cortices. PLoS Biology, 18(4), e3000678. "
|
|
67
|
+
"http://dx.doi.org/10.1371/journal.pbio.3000678."
|
|
68
|
+
"The data is taken from the tutorial at "
|
|
69
|
+
"https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
|
|
70
|
+
"assigned to the regional map when queried."
|
|
71
|
+
)
|
|
40
72
|
|
|
41
73
|
def __init__(self):
|
|
42
74
|
if self._profiles is None:
|
|
@@ -113,6 +145,7 @@ class BigBrainProfileQuery(query.LiveQuery, args=[], FeatureType=bigbrain_intens
|
|
|
113
145
|
qualification=_anchor.Qualification.CONTAINED,
|
|
114
146
|
explanation=f"Surface vertex of BigBrain cortical profile was filtered using {concept}"
|
|
115
147
|
)
|
|
148
|
+
prof.datasets = [WagstylProfileLoader.DATASET]
|
|
116
149
|
features.append(prof)
|
|
117
150
|
|
|
118
151
|
return features
|
|
@@ -159,5 +192,6 @@ class LayerwiseBigBrainIntensityQuery(query.LiveQuery, args=[], FeatureType=laye
|
|
|
159
192
|
qualification=_anchor.Qualification.CONTAINED,
|
|
160
193
|
explanation=f"Surface vertices of BigBrain cortical profiles were filtered using {concept}"
|
|
161
194
|
)
|
|
195
|
+
result.datasets = [WagstylProfileLoader.DATASET]
|
|
162
196
|
|
|
163
197
|
return [result]
|
siibra/retrieval/cache.py
CHANGED
|
@@ -121,15 +121,20 @@ class Cache:
|
|
|
121
121
|
""" Iterate all element names in the cache directory. """
|
|
122
122
|
return (os.path.join(self.folder, f) for f in os.listdir(self.folder))
|
|
123
123
|
|
|
124
|
-
def build_filename(self, str_rep: str, suffix=None):
|
|
125
|
-
"""
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
124
|
+
def build_filename(self, str_rep: str, suffix=None) -> str:
|
|
125
|
+
"""
|
|
126
|
+
Generate a filename in the cache.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
str_rep: str
|
|
131
|
+
Unique string representation of the item. Will be used to compute a hash.
|
|
132
|
+
suffix: str. Default: None
|
|
133
|
+
Optional file suffix, in order to allow filetype recognition by the name.
|
|
134
|
+
|
|
135
|
+
Returns
|
|
136
|
+
-------
|
|
137
|
+
str
|
|
133
138
|
"""
|
|
134
139
|
hashfile = os.path.join(
|
|
135
140
|
self.folder, str(hashlib.sha256(str_rep.encode("ascii")).hexdigest())
|
siibra/retrieval/requests.py
CHANGED
|
@@ -30,7 +30,7 @@ import json
|
|
|
30
30
|
from zipfile import ZipFile
|
|
31
31
|
import requests
|
|
32
32
|
import os
|
|
33
|
-
from nibabel import Nifti1Image, GiftiImage, streamlines
|
|
33
|
+
from nibabel import Nifti1Image, GiftiImage, streamlines, freesurfer
|
|
34
34
|
from skimage import io as skimage_io
|
|
35
35
|
import gzip
|
|
36
36
|
from io import BytesIO
|
|
@@ -48,6 +48,34 @@ if TYPE_CHECKING:
|
|
|
48
48
|
|
|
49
49
|
USER_AGENT_HEADER = {"User-Agent": f"siibra-python/{__version__}"}
|
|
50
50
|
|
|
51
|
+
|
|
52
|
+
def read_as_bytesio(function: Callable, suffix: str, bytesio: BytesIO):
|
|
53
|
+
"""
|
|
54
|
+
Helper method to provide BytesIO to methods that only takes file path and
|
|
55
|
+
cannot handle BytesIO normally (e.g., `nibabel.freesurfer.read_annot()`).
|
|
56
|
+
|
|
57
|
+
Writes the bytes to a temporary file on cache and reads with the
|
|
58
|
+
original function.
|
|
59
|
+
|
|
60
|
+
Parameters
|
|
61
|
+
----------
|
|
62
|
+
function : Callable
|
|
63
|
+
suffix : str
|
|
64
|
+
Must match the suffix expected by the function provided.
|
|
65
|
+
bytesio : BytesIO
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
Return type of the provided function.
|
|
70
|
+
"""
|
|
71
|
+
tempfile = CACHE.build_filename(f"temp_{suffix}") + suffix
|
|
72
|
+
with open(tempfile, "wb") as bf:
|
|
73
|
+
bf.write(bytesio.getbuffer())
|
|
74
|
+
result = function(tempfile)
|
|
75
|
+
os.remove(tempfile)
|
|
76
|
+
return result
|
|
77
|
+
|
|
78
|
+
|
|
51
79
|
DECODERS = {
|
|
52
80
|
".nii": lambda b: Nifti1Image.from_bytes(b),
|
|
53
81
|
".gii": lambda b: GiftiImage.from_bytes(b),
|
|
@@ -59,6 +87,7 @@ DECODERS = {
|
|
|
59
87
|
".zip": lambda b: ZipFile(BytesIO(b)),
|
|
60
88
|
".png": lambda b: skimage_io.imread(BytesIO(b)),
|
|
61
89
|
".npy": lambda b: np.load(BytesIO(b)),
|
|
90
|
+
".annot": lambda b: read_as_bytesio(freesurfer.read_annot, '.annot', BytesIO(b)),
|
|
62
91
|
}
|
|
63
92
|
|
|
64
93
|
|
|
@@ -21,7 +21,7 @@ from ..commons import (
|
|
|
21
21
|
MapIndex,
|
|
22
22
|
MapType,
|
|
23
23
|
compare_arrays,
|
|
24
|
-
|
|
24
|
+
resample_img_to_img,
|
|
25
25
|
connected_components,
|
|
26
26
|
clear_name,
|
|
27
27
|
create_key,
|
|
@@ -512,10 +512,10 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
512
512
|
raise RuntimeError("The map cannot be merged since there are no multiple volumes or fragments.")
|
|
513
513
|
|
|
514
514
|
# initialize empty volume according to the template
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
result_affine =
|
|
518
|
-
voxelwise_max = np.zeros_like(
|
|
515
|
+
template_img = self.space.get_template().fetch(**kwargs)
|
|
516
|
+
result_arr = np.zeros_like(np.asanyarray(template_img.dataobj))
|
|
517
|
+
result_affine = template_img.affine
|
|
518
|
+
voxelwise_max = np.zeros_like(result_arr)
|
|
519
519
|
interpolation = 'nearest' if self.is_labelled else 'linear'
|
|
520
520
|
next_labelindex = 1
|
|
521
521
|
region_indices = defaultdict(list)
|
|
@@ -532,13 +532,13 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
532
532
|
):
|
|
533
533
|
mapindex = MapIndex(volume=volidx, fragment=frag)
|
|
534
534
|
img = self.fetch(mapindex)
|
|
535
|
-
if np.
|
|
535
|
+
if np.allclose(img.affine, result_affine):
|
|
536
|
+
img_data = np.asanyarray(img.dataobj)
|
|
537
|
+
else:
|
|
536
538
|
logger.debug(f"Compression requires to resample volume {volidx} ({interpolation})")
|
|
537
|
-
img_data =
|
|
538
|
-
img
|
|
539
|
+
img_data = np.asanyarray(
|
|
540
|
+
resample_img_to_img(img, template_img).dataobj
|
|
539
541
|
)
|
|
540
|
-
else:
|
|
541
|
-
img_data = img.get_fdata()
|
|
542
542
|
|
|
543
543
|
if self.is_labelled:
|
|
544
544
|
labels = set(np.unique(img_data)) - {0}
|
|
@@ -557,7 +557,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
557
557
|
update_voxels = (img_data > voxelwise_max)
|
|
558
558
|
else:
|
|
559
559
|
update_voxels = (img_data == label)
|
|
560
|
-
|
|
560
|
+
result_arr[update_voxels] = next_labelindex
|
|
561
561
|
voxelwise_max[update_voxels] = img_data[update_voxels]
|
|
562
562
|
next_labelindex += 1
|
|
563
563
|
|
|
@@ -568,7 +568,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
568
568
|
parcellation_spec=self._parcellation_spec,
|
|
569
569
|
indices=region_indices,
|
|
570
570
|
volumes=[_volume.from_array(
|
|
571
|
-
|
|
571
|
+
result_arr, result_affine, self._space_spec, name=self.name + " compressed"
|
|
572
572
|
)]
|
|
573
573
|
)
|
|
574
574
|
|
|
@@ -973,7 +973,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
973
973
|
"""
|
|
974
974
|
assign a PointSet to this parcellation map.
|
|
975
975
|
|
|
976
|
-
Parameters
|
|
976
|
+
Parameters
|
|
977
977
|
-----------
|
|
978
978
|
lower_threshold: float, default: 0
|
|
979
979
|
Lower threshold on values in the statistical map. Values smaller than
|
|
@@ -1076,7 +1076,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
1076
1076
|
"""
|
|
1077
1077
|
Assign an image volume to this parcellation map.
|
|
1078
1078
|
|
|
1079
|
-
Parameters
|
|
1079
|
+
Parameters
|
|
1080
1080
|
-----------
|
|
1081
1081
|
queryvolume: Volume
|
|
1082
1082
|
the volume to be compared with maps
|
|
@@ -1101,7 +1101,6 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
1101
1101
|
iter_components = lambda arr: [(0, arr)]
|
|
1102
1102
|
|
|
1103
1103
|
queryimg = queryvolume.fetch()
|
|
1104
|
-
queryimgarr = np.asanyarray(queryimg.dataobj)
|
|
1105
1104
|
assignments = []
|
|
1106
1105
|
all_indices = [
|
|
1107
1106
|
index
|
|
@@ -1120,11 +1119,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
|
|
|
1120
1119
|
region_map_arr = np.asanyarray(region_map.dataobj)
|
|
1121
1120
|
# the shape and affine are checked by `nilearn.image.resample_to_img()`
|
|
1122
1121
|
# and returns the original data if resampling is not necessary.
|
|
1123
|
-
queryimgarr_res =
|
|
1124
|
-
|
|
1125
|
-
queryimg.affine,
|
|
1126
|
-
region_map_arr,
|
|
1127
|
-
region_map.affine
|
|
1122
|
+
queryimgarr_res = np.asanyarray(
|
|
1123
|
+
resample_img_to_img(queryimg, region_map).dataobj
|
|
1128
1124
|
)
|
|
1129
1125
|
for compmode, voxelmask in iter_components(queryimgarr_res):
|
|
1130
1126
|
scores = compare_arrays(
|
|
@@ -1159,7 +1155,7 @@ def from_volume(
|
|
|
1159
1155
|
"""
|
|
1160
1156
|
Add a custom labelled parcellation map to siibra from a labelled NIfTI file.
|
|
1161
1157
|
|
|
1162
|
-
Parameters
|
|
1158
|
+
Parameters
|
|
1163
1159
|
------------
|
|
1164
1160
|
name: str
|
|
1165
1161
|
Human-readable name of the parcellation.
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# Copyright 2018-2021
|
|
2
|
+
# Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
|
|
3
|
+
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Handles reading and preparing gii files."""
|
|
16
|
+
|
|
17
|
+
from . import provider as _provider
|
|
18
|
+
|
|
19
|
+
from ...retrieval.requests import HttpRequest, ZipfileRequest
|
|
20
|
+
|
|
21
|
+
import numpy as np
|
|
22
|
+
from typing import Union, Dict, TYPE_CHECKING
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from ...locations import boundingbox as _boundingbox
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class FreesurferAnnot(_provider.VolumeProvider, srctype="freesurfer-annot"):
|
|
29
|
+
def __init__(self, url: Union[str, dict]):
|
|
30
|
+
self._init_url = url
|
|
31
|
+
if isinstance(url, str): # single mesh labelling
|
|
32
|
+
self._loaders = {None: HttpRequest(url)}
|
|
33
|
+
elif isinstance(url, dict): # named label fragments
|
|
34
|
+
self._loaders = {lbl: HttpRequest(u) for lbl, u in url.items()}
|
|
35
|
+
else:
|
|
36
|
+
raise NotImplementedError(f"Urls for {self.__class__.__name__} are expected to be of type str.")
|
|
37
|
+
|
|
38
|
+
def fetch(self, fragment: str = None, label: int = None, **kwargs):
|
|
39
|
+
"""Returns a 1D numpy array of label indices."""
|
|
40
|
+
vertex_labels = []
|
|
41
|
+
if fragment is None:
|
|
42
|
+
matched_frags = list(self._loaders.keys())
|
|
43
|
+
else:
|
|
44
|
+
matched_frags = [frg for frg in self._loaders.keys() if fragment.lower() in frg.lower()]
|
|
45
|
+
if len(matched_frags) != 1:
|
|
46
|
+
raise ValueError(
|
|
47
|
+
f"Requested fragment '{fragment}' could not be matched uniquely "
|
|
48
|
+
f"to [{', '.join(self._loaders)}]"
|
|
49
|
+
)
|
|
50
|
+
for frag in matched_frags:
|
|
51
|
+
frag_labels, *_ = self._loaders[frag].data
|
|
52
|
+
if label is not None: # create the mask
|
|
53
|
+
selected_label = frag_labels == label
|
|
54
|
+
frag_labels[selected_label] = 1
|
|
55
|
+
frag_labels[~selected_label] = 0
|
|
56
|
+
else:
|
|
57
|
+
frag_labels[frag_labels == -1] = 0 # annot files store backgorund as -1 while siibra uses 0
|
|
58
|
+
vertex_labels.append(frag_labels)
|
|
59
|
+
|
|
60
|
+
return {"labels": np.hstack(vertex_labels)}
|
|
61
|
+
|
|
62
|
+
def get_boundingbox(self, clip=False, background=0.0) -> '_boundingbox.BoundingBox':
|
|
63
|
+
raise NotImplementedError(
|
|
64
|
+
f"Bounding box access to {self.__class__.__name__} objects not yet implemented."
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def _url(self) -> Union[str, Dict[str, str]]:
|
|
69
|
+
return self._init_url
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class ZippedFreesurferAnnot(_provider.VolumeProvider, srctype="zip/freesurfer-annot"):
|
|
73
|
+
def __init__(self, url: Union[str, dict]):
|
|
74
|
+
self._init_url = url
|
|
75
|
+
if isinstance(url, str): # single mesh labelling
|
|
76
|
+
self._loaders = {None: ZipfileRequest(*url.split(" "))}
|
|
77
|
+
elif isinstance(url, dict): # named label fragments
|
|
78
|
+
self._loaders = {lbl: ZipfileRequest(*u.split(" ")) for lbl, u in url.items()}
|
|
79
|
+
else:
|
|
80
|
+
raise NotImplementedError(f"Urls for {self.__class__.__name__} are expected to be of type str.")
|
|
81
|
+
|
|
82
|
+
def fetch(self, fragment: str = None, label: int = None, **kwargs):
|
|
83
|
+
"""Returns a 1D numpy array of label indices."""
|
|
84
|
+
vertex_labels = []
|
|
85
|
+
if fragment is None:
|
|
86
|
+
matched_frags = list(self._loaders.keys())
|
|
87
|
+
else:
|
|
88
|
+
matched_frags = [frg for frg in self._loaders.keys() if fragment.lower() in frg.lower()]
|
|
89
|
+
if len(matched_frags) != 1:
|
|
90
|
+
raise ValueError(
|
|
91
|
+
f"Requested fragment '{fragment}' could not be matched uniquely "
|
|
92
|
+
f"to [{', '.join(self._loaders)}]"
|
|
93
|
+
)
|
|
94
|
+
for frag in matched_frags:
|
|
95
|
+
frag_labels, *_ = self._loaders[frag].data
|
|
96
|
+
if label is not None: # create the mask
|
|
97
|
+
selected_label = frag_labels == label
|
|
98
|
+
frag_labels[selected_label] = 1
|
|
99
|
+
frag_labels[~selected_label] = 0
|
|
100
|
+
else:
|
|
101
|
+
frag_labels[frag_labels == -1] = 0 # annot files store backgorund as -1 while siibra uses 0
|
|
102
|
+
vertex_labels.append(frag_labels)
|
|
103
|
+
|
|
104
|
+
return {"labels": np.hstack(vertex_labels)}
|
|
105
|
+
|
|
106
|
+
def get_boundingbox(self, clip=False, background=0.0) -> '_boundingbox.BoundingBox':
|
|
107
|
+
raise NotImplementedError(
|
|
108
|
+
f"Bounding box access to {self.__class__.__name__} objects not yet implemented."
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def _url(self) -> Union[str, Dict[str, str]]:
|
|
113
|
+
return self._init_url
|
|
@@ -15,7 +15,14 @@
|
|
|
15
15
|
|
|
16
16
|
from . import provider as _provider
|
|
17
17
|
|
|
18
|
-
from ...commons import
|
|
18
|
+
from ...commons import (
|
|
19
|
+
logger,
|
|
20
|
+
MapType,
|
|
21
|
+
merge_meshes,
|
|
22
|
+
SIIBRA_MAX_FETCH_SIZE_GIB,
|
|
23
|
+
QUIET,
|
|
24
|
+
resample_img_to_img
|
|
25
|
+
)
|
|
19
26
|
from ...retrieval import requests, cache
|
|
20
27
|
from ...locations import boundingbox as _boundingbox
|
|
21
28
|
|
|
@@ -86,10 +93,11 @@ class NeuroglancerProvider(_provider.VolumeProvider, srctype="neuroglancer/preco
|
|
|
86
93
|
|
|
87
94
|
if len(self._fragments) > 1:
|
|
88
95
|
if fragment is None:
|
|
89
|
-
|
|
90
|
-
f"Merging
|
|
91
|
-
f"
|
|
96
|
+
logger.info(
|
|
97
|
+
f"Merging fragments [{', '.join(self._fragments.keys())}]. "
|
|
98
|
+
f"You can select one using `fragment` kwarg."
|
|
92
99
|
)
|
|
100
|
+
result = self._merge_fragments(resolution_mm=resolution_mm, voi=voi, **kwargs)
|
|
93
101
|
else:
|
|
94
102
|
matched_names = [n for n in self._fragments if fragment.lower() in n.lower()]
|
|
95
103
|
if len(matched_names) != 1:
|
|
@@ -157,45 +165,67 @@ class NeuroglancerProvider(_provider.VolumeProvider, srctype="neuroglancer/preco
|
|
|
157
165
|
np.asanyarray(img.dataobj), threshold=background, space=None
|
|
158
166
|
).transform(img.affine) # use the affine of the image matching fetch_kwargs
|
|
159
167
|
else:
|
|
160
|
-
|
|
168
|
+
resolution_mm = fetch_kwargs.get("resolution_mm")
|
|
169
|
+
if resolution_mm is None:
|
|
170
|
+
affine = frag.affine
|
|
171
|
+
shape = frag.shape[:3]
|
|
172
|
+
else:
|
|
173
|
+
scale = frag._select_scale(resolution_mm=resolution_mm)
|
|
174
|
+
affine = scale.affine
|
|
175
|
+
shape = scale.size[:3]
|
|
161
176
|
next_bbox = _boundingbox.BoundingBox(
|
|
162
177
|
(0, 0, 0), shape, space=None
|
|
163
|
-
).transform(
|
|
178
|
+
).transform(affine)
|
|
164
179
|
bbox = next_bbox if bbox is None else bbox.union(next_bbox)
|
|
165
180
|
return bbox
|
|
166
181
|
|
|
167
|
-
def _merge_fragments(
|
|
168
|
-
|
|
169
|
-
|
|
182
|
+
def _merge_fragments(
|
|
183
|
+
self,
|
|
184
|
+
resolution_mm: float = None,
|
|
185
|
+
voi: _boundingbox.BoundingBox = None,
|
|
186
|
+
**kwargs
|
|
187
|
+
) -> nib.Nifti1Image:
|
|
188
|
+
with QUIET:
|
|
189
|
+
bbox = self.get_boundingbox(
|
|
190
|
+
clip=False,
|
|
191
|
+
background=0,
|
|
192
|
+
resolution_mm=resolution_mm,
|
|
193
|
+
voi=voi
|
|
194
|
+
)
|
|
195
|
+
|
|
170
196
|
num_conflicts = 0
|
|
171
197
|
result = None
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
198
|
+
for frag_vol in self._fragments.values():
|
|
199
|
+
frag_scale = frag_vol._select_scale(
|
|
200
|
+
resolution_mm=resolution_mm,
|
|
201
|
+
bbox=voi,
|
|
202
|
+
max_bytes=kwargs.pop("maxbytes", NeuroglancerVolume.MAX_BYTES)
|
|
203
|
+
)
|
|
204
|
+
img = frag_scale.fetch(voi=voi)
|
|
175
205
|
if result is None:
|
|
176
206
|
# build the empty result image with its own affine and voxel space
|
|
177
207
|
s0 = np.identity(4)
|
|
178
208
|
s0[:3, -1] = list(bbox.minpoint.transform(np.linalg.inv(img.affine)))
|
|
179
209
|
result_affine = np.dot(img.affine, s0) # adjust global bounding box offset to get global affine
|
|
180
|
-
voxdims = np.asanyarray(
|
|
210
|
+
voxdims = np.asanyarray(np.ceil(
|
|
211
|
+
bbox.transform(np.linalg.inv(result_affine)).shape # transform to the voxel space
|
|
212
|
+
), dtype="int")
|
|
181
213
|
result_arr = np.zeros(voxdims, dtype=img.dataobj.dtype)
|
|
182
214
|
result = nib.Nifti1Image(dataobj=result_arr, affine=result_affine)
|
|
183
215
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
) + .5).astype('int'),
|
|
191
|
-
4, axis=0
|
|
192
|
-
)
|
|
193
|
-
num_conflicts += np.count_nonzero(result_arr[Xt, Yt, Zt])
|
|
194
|
-
result_arr[Xt, Yt, Zt] = arr[Xs, Ys, Zs]
|
|
216
|
+
# resample to merge template and update it
|
|
217
|
+
resampled_img = resample_img_to_img(source_img=img, target_img=result)
|
|
218
|
+
arr = np.asanyarray(resampled_img.dataobj)
|
|
219
|
+
nonzero_voxels = arr != 0
|
|
220
|
+
num_conflicts += np.count_nonzero(result_arr[nonzero_voxels])
|
|
221
|
+
result_arr[nonzero_voxels] = arr[nonzero_voxels]
|
|
195
222
|
|
|
196
223
|
if num_conflicts > 0:
|
|
197
224
|
num_voxels = np.count_nonzero(result_arr)
|
|
198
|
-
logger.warning(
|
|
225
|
+
logger.warning(
|
|
226
|
+
f"Merging fragments required to overwrite {num_conflicts} "
|
|
227
|
+
f"conflicting voxels ({num_conflicts / num_voxels * 100.:2.3f}%)."
|
|
228
|
+
)
|
|
199
229
|
|
|
200
230
|
return result
|
|
201
231
|
|