junifer 0.0.4.dev108__py3-none-any.whl → 0.0.4.dev133__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/data/__init__.py +2 -0
- junifer/data/coordinates.py +72 -7
- junifer/data/parcellations.py +84 -12
- junifer/data/tests/test_coordinates.py +22 -1
- junifer/data/tests/test_parcellations.py +90 -1
- junifer/markers/parcel_aggregation.py +39 -34
- junifer/markers/sphere_aggregation.py +33 -5
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/METADATA +1 -1
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/RECORD +15 -15
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/WHEEL +0 -0
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.4.dev108.dist-info → junifer-0.0.4.dev133.dist-info}/top_level.txt +0 -0
junifer/_version.py
CHANGED
@@ -12,5 +12,5 @@ __version__: str
|
|
12
12
|
__version_tuple__: VERSION_TUPLE
|
13
13
|
version_tuple: VERSION_TUPLE
|
14
14
|
|
15
|
-
__version__ = version = '0.0.4.
|
16
|
-
__version_tuple__ = version_tuple = (0, 0, 4, '
|
15
|
+
__version__ = version = '0.0.4.dev133'
|
16
|
+
__version_tuple__ = version_tuple = (0, 0, 4, 'dev133')
|
junifer/data/__init__.py
CHANGED
@@ -8,12 +8,14 @@ from .coordinates import (
|
|
8
8
|
list_coordinates,
|
9
9
|
load_coordinates,
|
10
10
|
register_coordinates,
|
11
|
+
get_coordinates,
|
11
12
|
)
|
12
13
|
from .parcellations import (
|
13
14
|
list_parcellations,
|
14
15
|
load_parcellation,
|
15
16
|
register_parcellation,
|
16
17
|
merge_parcellations,
|
18
|
+
get_parcellation,
|
17
19
|
)
|
18
20
|
|
19
21
|
from .masks import (
|
junifer/data/coordinates.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1
1
|
"""Provide functions for list of coordinates."""
|
2
2
|
|
3
3
|
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
|
+
# Synchon Mandal <s.mandal@fz-juelich.de>
|
4
5
|
# License: AGPL
|
5
6
|
|
6
7
|
import typing
|
7
8
|
from pathlib import Path
|
8
|
-
from typing import Dict, List, Optional, Tuple, Union
|
9
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
9
10
|
|
10
11
|
import numpy as np
|
11
12
|
import pandas as pd
|
@@ -57,7 +58,7 @@ def register_coordinates(
|
|
57
58
|
voi_names: List[str],
|
58
59
|
overwrite: Optional[bool] = False,
|
59
60
|
) -> None:
|
60
|
-
"""Register coordinates.
|
61
|
+
"""Register a custom user coordinates.
|
61
62
|
|
62
63
|
Parameters
|
63
64
|
----------
|
@@ -73,6 +74,18 @@ def register_coordinates(
|
|
73
74
|
overwrite : bool, optional
|
74
75
|
If True, overwrite an existing list of coordinates with the same name.
|
75
76
|
Does not apply to built-in coordinates (default False).
|
77
|
+
|
78
|
+
Raises
|
79
|
+
------
|
80
|
+
ValueError
|
81
|
+
If the coordinates name is already registered and overwrite is set to
|
82
|
+
False or if the coordinates name is a built-in coordinates or if the
|
83
|
+
``coordinates`` is not a 2D array or if coordinate value does not have
|
84
|
+
3 components or if the ``voi_names`` shape does not match the
|
85
|
+
``coordinates`` shape.
|
86
|
+
TypeError
|
87
|
+
If ``coordinates`` is not a ``numpy.ndarray``.
|
88
|
+
|
76
89
|
"""
|
77
90
|
if name in _available_coordinates:
|
78
91
|
if isinstance(_available_coordinates[name], Path):
|
@@ -90,7 +103,8 @@ def register_coordinates(
|
|
90
103
|
|
91
104
|
if not isinstance(coordinates, np.ndarray):
|
92
105
|
raise_error(
|
93
|
-
f"Coordinates must be a numpy.ndarray
|
106
|
+
f"Coordinates must be a `numpy.ndarray`, not {type(coordinates)}.",
|
107
|
+
klass=TypeError,
|
94
108
|
)
|
95
109
|
if coordinates.ndim != 2:
|
96
110
|
raise_error(
|
@@ -102,8 +116,8 @@ def register_coordinates(
|
|
102
116
|
)
|
103
117
|
if len(voi_names) != coordinates.shape[0]:
|
104
118
|
raise_error(
|
105
|
-
f"Length of voi_names ({len(voi_names)}) does not match the "
|
106
|
-
f"number of coordinates ({coordinates.shape[0]})."
|
119
|
+
f"Length of `voi_names` ({len(voi_names)}) does not match the "
|
120
|
+
f"number of `coordinates` ({coordinates.shape[0]})."
|
107
121
|
)
|
108
122
|
_available_coordinates[name] = {
|
109
123
|
"coords": coordinates,
|
@@ -112,16 +126,55 @@ def register_coordinates(
|
|
112
126
|
|
113
127
|
|
114
128
|
def list_coordinates() -> List[str]:
|
115
|
-
"""List all the available coordinates
|
129
|
+
"""List all the available coordinates (VOIs).
|
116
130
|
|
117
131
|
Returns
|
118
132
|
-------
|
119
133
|
list of str
|
120
134
|
A list with all available coordinates names.
|
135
|
+
|
121
136
|
"""
|
122
137
|
return sorted(_available_coordinates.keys())
|
123
138
|
|
124
139
|
|
140
|
+
def get_coordinates(
|
141
|
+
coords: str,
|
142
|
+
target_data: Dict[str, Any],
|
143
|
+
extra_input: Optional[Dict[str, Any]] = None,
|
144
|
+
) -> Tuple[ArrayLike, List[str]]:
|
145
|
+
"""Get coordinates, tailored for the target image.
|
146
|
+
|
147
|
+
Parameters
|
148
|
+
----------
|
149
|
+
coords : str
|
150
|
+
The name of the coordinates.
|
151
|
+
target_data : dict
|
152
|
+
The corresponding item of the data object to which the coordinates
|
153
|
+
will be applied.
|
154
|
+
extra_input : dict, optional
|
155
|
+
The other fields in the data object. Useful for accessing other data
|
156
|
+
kinds that needs to be used in the computation of coordinates
|
157
|
+
(default None).
|
158
|
+
|
159
|
+
Returns
|
160
|
+
-------
|
161
|
+
numpy.ndarray
|
162
|
+
The coordinates.
|
163
|
+
list of str
|
164
|
+
The names of the VOIs.
|
165
|
+
|
166
|
+
Raises
|
167
|
+
------
|
168
|
+
ValueError
|
169
|
+
If ``extra_input`` is None when ``target_data``'s space is not MNI.
|
170
|
+
|
171
|
+
"""
|
172
|
+
# Load the coordinates
|
173
|
+
seeds, labels = load_coordinates(name=coords)
|
174
|
+
|
175
|
+
return seeds, labels
|
176
|
+
|
177
|
+
|
125
178
|
def load_coordinates(name: str) -> Tuple[ArrayLike, List[str]]:
|
126
179
|
"""Load coordinates.
|
127
180
|
|
@@ -137,14 +190,23 @@ def load_coordinates(name: str) -> Tuple[ArrayLike, List[str]]:
|
|
137
190
|
list of str
|
138
191
|
The names of the VOIs.
|
139
192
|
|
193
|
+
Raises
|
194
|
+
------
|
195
|
+
ValueError
|
196
|
+
If ``name`` is invalid.
|
197
|
+
|
140
198
|
Warns
|
141
199
|
-----
|
142
200
|
DeprecationWarning
|
143
201
|
If ``Power`` is provided as the ``name``.
|
144
202
|
|
145
203
|
"""
|
204
|
+
# Check for valid coordinates name
|
146
205
|
if name not in _available_coordinates:
|
147
|
-
raise_error(
|
206
|
+
raise_error(
|
207
|
+
f"Coordinates {name} not found. "
|
208
|
+
f"Valid options are: {list_coordinates()}"
|
209
|
+
)
|
148
210
|
|
149
211
|
# Put up deprecation notice
|
150
212
|
if name == "Power":
|
@@ -157,8 +219,10 @@ def load_coordinates(name: str) -> Tuple[ArrayLike, List[str]]:
|
|
157
219
|
category=DeprecationWarning,
|
158
220
|
)
|
159
221
|
|
222
|
+
# Load coordinates
|
160
223
|
t_coord = _available_coordinates[name]
|
161
224
|
if isinstance(t_coord, Path):
|
225
|
+
# Load via pandas
|
162
226
|
df_coords = pd.read_csv(t_coord, sep="\t", header=None)
|
163
227
|
coords = df_coords.iloc[:, [0, 1, 2]].to_numpy()
|
164
228
|
names = list(df_coords.iloc[:, [3]].values[:, 0])
|
@@ -167,4 +231,5 @@ def load_coordinates(name: str) -> Tuple[ArrayLike, List[str]]:
|
|
167
231
|
coords = typing.cast(ArrayLike, coords)
|
168
232
|
names = t_coord["voi_names"]
|
169
233
|
names = typing.cast(List[str], names)
|
234
|
+
|
170
235
|
return coords, names
|
junifer/data/parcellations.py
CHANGED
@@ -9,6 +9,7 @@ import io
|
|
9
9
|
import shutil
|
10
10
|
import tarfile
|
11
11
|
import tempfile
|
12
|
+
import typing
|
12
13
|
import zipfile
|
13
14
|
from pathlib import Path
|
14
15
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
@@ -161,8 +162,8 @@ def register_parcellation(
|
|
161
162
|
)
|
162
163
|
else:
|
163
164
|
raise_error(
|
164
|
-
f"Parcellation {name} already registered. Set
|
165
|
-
"to update its value."
|
165
|
+
f"Parcellation {name} already registered. Set "
|
166
|
+
"`overwrite=True` to update its value."
|
166
167
|
)
|
167
168
|
# Convert str to Path
|
168
169
|
if not isinstance(parcellation_path, Path):
|
@@ -187,12 +188,68 @@ def list_parcellations() -> List[str]:
|
|
187
188
|
return sorted(_available_parcellations.keys())
|
188
189
|
|
189
190
|
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
191
|
+
def get_parcellation(
|
192
|
+
parcellation: List[str],
|
193
|
+
target_data: Dict[str, Any],
|
194
|
+
extra_input: Optional[Dict[str, Any]] = None,
|
195
|
+
) -> Tuple["Nifti1Image", List[str]]:
|
196
|
+
"""Get parcellation, tailored for the target image.
|
197
|
+
|
198
|
+
Parameters
|
199
|
+
----------
|
200
|
+
parcellation : list of str
|
201
|
+
The name(s) of the parcellation(s).
|
202
|
+
target_data : dict
|
203
|
+
The corresponding item of the data object to which the parcellation
|
204
|
+
will be applied.
|
205
|
+
extra_input : dict, optional
|
206
|
+
The other fields in the data object. Useful for accessing other data
|
207
|
+
kinds that needs to be used in the computation of parcellations
|
208
|
+
(default None).
|
209
|
+
|
210
|
+
Returns
|
211
|
+
-------
|
212
|
+
Nifti1Image
|
213
|
+
The parcellation image.
|
214
|
+
list of str
|
215
|
+
Parcellation labels.
|
216
|
+
|
217
|
+
"""
|
218
|
+
# Get the min of the voxels sizes and use it as the resolution
|
219
|
+
target_img = target_data["data"]
|
220
|
+
resolution = np.min(target_img.header.get_zooms()[:3])
|
221
|
+
|
222
|
+
# Load the parcellations
|
223
|
+
all_parcellations = []
|
224
|
+
all_labels = []
|
225
|
+
for name in parcellation:
|
226
|
+
img, labels, _ = load_parcellation(
|
227
|
+
name=name,
|
228
|
+
resolution=resolution,
|
229
|
+
)
|
230
|
+
# Resample all of them to the image
|
231
|
+
resampled_img = image.resample_to_img(
|
232
|
+
source_img=img,
|
233
|
+
target_img=target_img,
|
234
|
+
interpolation="nearest",
|
235
|
+
copy=True,
|
236
|
+
)
|
237
|
+
all_parcellations.append(resampled_img)
|
238
|
+
all_labels.append(labels)
|
239
|
+
|
240
|
+
# Avoid merging if there is only one parcellation
|
241
|
+
if len(all_parcellations) == 1:
|
242
|
+
resampled_parcellation_img = all_parcellations[0]
|
243
|
+
labels = all_labels[0]
|
244
|
+
else:
|
245
|
+
# Merge the parcellations
|
246
|
+
resampled_parcellation_img, labels = merge_parcellations(
|
247
|
+
parcellations_list=all_parcellations,
|
248
|
+
parcellations_names=parcellation,
|
249
|
+
labels_lists=all_labels,
|
250
|
+
)
|
251
|
+
|
252
|
+
return resampled_parcellation_img, labels
|
196
253
|
|
197
254
|
|
198
255
|
def load_parcellation(
|
@@ -203,8 +260,8 @@ def load_parcellation(
|
|
203
260
|
) -> Tuple[Optional["Nifti1Image"], List[str], Path]:
|
204
261
|
"""Load a brain parcellation (including a label file).
|
205
262
|
|
206
|
-
If it is a built-in
|
207
|
-
|
263
|
+
If it is a built-in parcellation and the file is not present in the
|
264
|
+
``parcellations_dir`` directory, it will be downloaded.
|
208
265
|
|
209
266
|
Parameters
|
210
267
|
----------
|
@@ -231,17 +288,25 @@ def load_parcellation(
|
|
231
288
|
pathlib.Path
|
232
289
|
File path to the parcellation image.
|
233
290
|
|
291
|
+
Raises
|
292
|
+
------
|
293
|
+
ValueError
|
294
|
+
If ``name`` is invalid or if the parcellation values and labels
|
295
|
+
don't have equal dimension or if the value range is invalid.
|
296
|
+
|
234
297
|
"""
|
235
|
-
#
|
298
|
+
# Check for valid parcellation name
|
236
299
|
if name not in _available_parcellations:
|
237
300
|
raise_error(
|
238
301
|
f"Parcellation {name} not found. "
|
239
302
|
f"Valid options are: {list_parcellations()}"
|
240
303
|
)
|
241
304
|
|
305
|
+
# Copy parcellation definition to avoid edits in original object
|
242
306
|
parcellation_definition = _available_parcellations[name].copy()
|
243
307
|
t_family = parcellation_definition.pop("family")
|
244
308
|
|
309
|
+
# Check if the parcellation family is custom or built-in
|
245
310
|
if t_family == "CustomUserParcellation":
|
246
311
|
parcellation_fname = Path(parcellation_definition["path"])
|
247
312
|
parcellation_labels = parcellation_definition["labels"]
|
@@ -253,24 +318,31 @@ def load_parcellation(
|
|
253
318
|
**parcellation_definition,
|
254
319
|
)
|
255
320
|
|
321
|
+
# Load parcellation image and values
|
256
322
|
logger.info(f"Loading parcellation {parcellation_fname.absolute()!s}")
|
257
|
-
|
258
323
|
parcellation_img = None
|
259
324
|
if path_only is False:
|
325
|
+
# Load image via nibabel
|
260
326
|
parcellation_img = nib.load(parcellation_fname)
|
327
|
+
# Get unique values
|
261
328
|
parcel_values = np.unique(parcellation_img.get_fdata())
|
329
|
+
# Check for dimension
|
262
330
|
if len(parcel_values) - 1 != len(parcellation_labels):
|
263
331
|
raise_error(
|
264
332
|
f"Parcellation {name} has {len(parcel_values) - 1} parcels "
|
265
333
|
f"but {len(parcellation_labels)} labels."
|
266
334
|
)
|
335
|
+
# Sort values
|
267
336
|
parcel_values.sort()
|
337
|
+
# Check if value range is invalid
|
268
338
|
if np.any(np.diff(parcel_values) != 1):
|
269
339
|
raise_error(
|
270
340
|
f"Parcellation {name} must have all the values in the range "
|
271
341
|
f"[0, {len(parcel_values)}]."
|
272
342
|
)
|
273
343
|
|
344
|
+
# Type-cast to remove errors
|
345
|
+
parcellation_img = typing.cast("Nifti1Image", parcellation_img)
|
274
346
|
return parcellation_img, parcellation_labels, parcellation_fname
|
275
347
|
|
276
348
|
|
@@ -8,10 +8,13 @@ import pytest
|
|
8
8
|
from numpy.testing import assert_array_equal
|
9
9
|
|
10
10
|
from junifer.data.coordinates import (
|
11
|
+
get_coordinates,
|
11
12
|
list_coordinates,
|
12
13
|
load_coordinates,
|
13
14
|
register_coordinates,
|
14
15
|
)
|
16
|
+
from junifer.datareader import DefaultDataReader
|
17
|
+
from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
|
15
18
|
|
16
19
|
|
17
20
|
def test_register_coordinates_built_in_check() -> None:
|
@@ -54,7 +57,7 @@ def test_register_coordinates_overwrite() -> None:
|
|
54
57
|
|
55
58
|
def test_register_coordinates_valid_input() -> None:
|
56
59
|
"""Test coordinates registration check for valid input."""
|
57
|
-
with pytest.raises(
|
60
|
+
with pytest.raises(TypeError, match=r"numpy.ndarray"):
|
58
61
|
register_coordinates(
|
59
62
|
name="MyList",
|
60
63
|
coordinates=[1, 2],
|
@@ -105,3 +108,21 @@ def test_load_coordinates_nonexisting() -> None:
|
|
105
108
|
"""Test loading coordinates that not exist."""
|
106
109
|
with pytest.raises(ValueError, match=r"not found"):
|
107
110
|
load_coordinates("NonExisting")
|
111
|
+
|
112
|
+
|
113
|
+
def test_get_coordinates() -> None:
|
114
|
+
"""Test tailored coordinates fetch."""
|
115
|
+
reader = DefaultDataReader()
|
116
|
+
with OasisVBMTestingDataGrabber() as dg:
|
117
|
+
element = dg["sub-01"]
|
118
|
+
element_data = reader.fit_transform(element)
|
119
|
+
vbm_gm = element_data["VBM_GM"]
|
120
|
+
# Get tailored coordinates
|
121
|
+
tailored_coords, tailored_labels = get_coordinates(
|
122
|
+
coords="DMNBuckner", target_data=vbm_gm
|
123
|
+
)
|
124
|
+
# Get raw coordinates
|
125
|
+
raw_coords, raw_labels = load_coordinates("DMNBuckner")
|
126
|
+
# Both tailored and raw should be same for now
|
127
|
+
assert_array_equal(tailored_coords, raw_coords)
|
128
|
+
assert tailored_labels == raw_labels
|
@@ -11,7 +11,7 @@ from typing import List
|
|
11
11
|
import nibabel as nib
|
12
12
|
import numpy as np
|
13
13
|
import pytest
|
14
|
-
from nilearn.image import new_img_like
|
14
|
+
from nilearn.image import new_img_like, resample_to_img
|
15
15
|
from numpy.testing import assert_array_almost_equal, assert_array_equal
|
16
16
|
|
17
17
|
from junifer.data.parcellations import (
|
@@ -22,11 +22,14 @@ from junifer.data.parcellations import (
|
|
22
22
|
_retrieve_suit,
|
23
23
|
_retrieve_tian,
|
24
24
|
_retrieve_yan,
|
25
|
+
get_parcellation,
|
25
26
|
list_parcellations,
|
26
27
|
load_parcellation,
|
27
28
|
merge_parcellations,
|
28
29
|
register_parcellation,
|
29
30
|
)
|
31
|
+
from junifer.datareader import DefaultDataReader
|
32
|
+
from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
|
30
33
|
|
31
34
|
|
32
35
|
def test_register_parcellation_built_in_check() -> None:
|
@@ -1078,3 +1081,89 @@ def test_merge_parcellations_3D_multiple_duplicated_labels(
|
|
1078
1081
|
assert_array_equal(parc_data, merged_parc.get_fdata())
|
1079
1082
|
assert len(labels) == 100
|
1080
1083
|
assert len(np.unique(parc_data)) == 101 # 100 + 1 because background 0
|
1084
|
+
|
1085
|
+
|
1086
|
+
def test_get_parcellation_single() -> None:
|
1087
|
+
"""Test tailored single parcellation fetch."""
|
1088
|
+
reader = DefaultDataReader()
|
1089
|
+
with OasisVBMTestingDataGrabber() as dg:
|
1090
|
+
element = dg["sub-01"]
|
1091
|
+
element_data = reader.fit_transform(element)
|
1092
|
+
vbm_gm = element_data["VBM_GM"]
|
1093
|
+
vbm_gm_img = vbm_gm["data"]
|
1094
|
+
# Get tailored parcellation
|
1095
|
+
tailored_parcellation, tailored_labels = get_parcellation(
|
1096
|
+
parcellation=["Schaefer100x7"],
|
1097
|
+
target_data=vbm_gm,
|
1098
|
+
)
|
1099
|
+
# Check shape and affine with original element data
|
1100
|
+
assert tailored_parcellation.shape == vbm_gm_img.shape
|
1101
|
+
assert_array_equal(tailored_parcellation.affine, vbm_gm_img.affine)
|
1102
|
+
# Get raw parcellation
|
1103
|
+
raw_parcellation, raw_labels, _ = load_parcellation(
|
1104
|
+
"Schaefer100x7",
|
1105
|
+
resolution=1.5,
|
1106
|
+
)
|
1107
|
+
resampled_raw_parcellation = resample_to_img(
|
1108
|
+
source_img=raw_parcellation,
|
1109
|
+
target_img=vbm_gm_img,
|
1110
|
+
interpolation="nearest",
|
1111
|
+
copy=True,
|
1112
|
+
)
|
1113
|
+
# Check resampled data with tailored data
|
1114
|
+
assert_array_equal(
|
1115
|
+
tailored_parcellation.get_fdata(),
|
1116
|
+
resampled_raw_parcellation.get_fdata(),
|
1117
|
+
)
|
1118
|
+
assert tailored_labels == raw_labels
|
1119
|
+
|
1120
|
+
|
1121
|
+
def test_get_parcellation_multi() -> None:
|
1122
|
+
"""Test tailored multi parcellation fetch."""
|
1123
|
+
reader = DefaultDataReader()
|
1124
|
+
with OasisVBMTestingDataGrabber() as dg:
|
1125
|
+
element = dg["sub-01"]
|
1126
|
+
element_data = reader.fit_transform(element)
|
1127
|
+
vbm_gm = element_data["VBM_GM"]
|
1128
|
+
vbm_gm_img = vbm_gm["data"]
|
1129
|
+
# Get tailored parcellation
|
1130
|
+
tailored_parcellation, tailored_labels = get_parcellation(
|
1131
|
+
parcellation=[
|
1132
|
+
"Schaefer100x7",
|
1133
|
+
"TianxS2x3TxMNInonlinear2009cAsym",
|
1134
|
+
],
|
1135
|
+
target_data=vbm_gm,
|
1136
|
+
)
|
1137
|
+
# Check shape and affine with original element data
|
1138
|
+
assert tailored_parcellation.shape == vbm_gm_img.shape
|
1139
|
+
assert_array_equal(tailored_parcellation.affine, vbm_gm_img.affine)
|
1140
|
+
# Get raw parcellations
|
1141
|
+
raw_parcellations = []
|
1142
|
+
raw_labels = []
|
1143
|
+
parcellations_names = [
|
1144
|
+
"Schaefer100x7",
|
1145
|
+
"TianxS2x3TxMNInonlinear2009cAsym",
|
1146
|
+
]
|
1147
|
+
for name in parcellations_names:
|
1148
|
+
img, labels, _ = load_parcellation(name=name, resolution=1.5)
|
1149
|
+
# Resample raw parcellations
|
1150
|
+
resampled_img = resample_to_img(
|
1151
|
+
source_img=img,
|
1152
|
+
target_img=vbm_gm_img,
|
1153
|
+
interpolation="nearest",
|
1154
|
+
copy=True,
|
1155
|
+
)
|
1156
|
+
raw_parcellations.append(resampled_img)
|
1157
|
+
raw_labels.append(labels)
|
1158
|
+
# Merge resampled parcellations
|
1159
|
+
merged_resampled_parcellations, merged_labels = merge_parcellations(
|
1160
|
+
parcellations_list=raw_parcellations,
|
1161
|
+
parcellations_names=parcellations_names,
|
1162
|
+
labels_lists=raw_labels,
|
1163
|
+
)
|
1164
|
+
# Check resampled data with tailored data
|
1165
|
+
assert_array_equal(
|
1166
|
+
tailored_parcellation.get_fdata(),
|
1167
|
+
merged_resampled_parcellations.get_fdata(),
|
1168
|
+
)
|
1169
|
+
assert tailored_labels == merged_labels
|
@@ -7,11 +7,11 @@
|
|
7
7
|
from typing import Any, ClassVar, Dict, List, Optional, Set, Union
|
8
8
|
|
9
9
|
import numpy as np
|
10
|
-
from nilearn.image import math_img
|
10
|
+
from nilearn.image import math_img
|
11
11
|
from nilearn.maskers import NiftiMasker
|
12
12
|
|
13
13
|
from ..api.decorators import register_marker
|
14
|
-
from ..data import get_mask,
|
14
|
+
from ..data import get_mask, get_parcellation
|
15
15
|
from ..stats import get_aggfunc_by_name
|
16
16
|
from ..utils import logger, raise_error, warn_with_log
|
17
17
|
from .base import BaseMarker
|
@@ -49,6 +49,13 @@ class ParcelAggregation(BaseMarker):
|
|
49
49
|
name : str, optional
|
50
50
|
The name of the marker. If None, will use the class name (default
|
51
51
|
None).
|
52
|
+
|
53
|
+
Raises
|
54
|
+
------
|
55
|
+
ValueError
|
56
|
+
If ``time_method`` is specified for non-BOLD data or if
|
57
|
+
``time_method_params`` is not None when ``time_method`` is None.
|
58
|
+
|
52
59
|
"""
|
53
60
|
|
54
61
|
_DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "numpy"}
|
@@ -110,6 +117,11 @@ class ParcelAggregation(BaseMarker):
|
|
110
117
|
str
|
111
118
|
The storage type output by the marker.
|
112
119
|
|
120
|
+
Raises
|
121
|
+
------
|
122
|
+
ValueError
|
123
|
+
If the ``input_type`` is invalid.
|
124
|
+
|
113
125
|
"""
|
114
126
|
|
115
127
|
if input_type in ["VBM_GM", "VBM_WM", "fALFF", "GCOR", "LCOR"]:
|
@@ -117,7 +129,7 @@ class ParcelAggregation(BaseMarker):
|
|
117
129
|
elif input_type == "BOLD":
|
118
130
|
return "timeseries"
|
119
131
|
else:
|
120
|
-
|
132
|
+
raise_error(f"Unknown input kind for {input_type}")
|
121
133
|
|
122
134
|
def compute(
|
123
135
|
self, input: Dict[str, Any], extra_input: Optional[Dict] = None
|
@@ -145,62 +157,53 @@ class ParcelAggregation(BaseMarker):
|
|
145
157
|
* ``data`` : the actual computed values as a numpy.ndarray
|
146
158
|
* ``col_names`` : the column labels for the computed values as list
|
147
159
|
|
160
|
+
Warns
|
161
|
+
-----
|
162
|
+
RuntimeWarning
|
163
|
+
If time aggregation is required but only time point is available.
|
164
|
+
|
148
165
|
"""
|
149
166
|
t_input_img = input["data"]
|
150
167
|
logger.debug(f"Parcel aggregation using {self.method}")
|
168
|
+
# Get aggregation function
|
151
169
|
agg_func = get_aggfunc_by_name(
|
152
170
|
name=self.method, func_params=self.method_params
|
153
171
|
)
|
154
|
-
# Get the min of the voxels sizes and use it as the resolution
|
155
|
-
resolution = np.min(t_input_img.header.get_zooms()[:3])
|
156
|
-
|
157
|
-
# Load the parcellations
|
158
|
-
all_parcelations = []
|
159
|
-
all_labels = []
|
160
|
-
for t_parc_name in self.parcellation:
|
161
|
-
t_parcellation, t_labels, _ = load_parcellation(
|
162
|
-
name=t_parc_name, resolution=resolution
|
163
|
-
)
|
164
|
-
# Resample all of them to the image
|
165
|
-
t_parcellation_img_res = resample_to_img(
|
166
|
-
t_parcellation, t_input_img, interpolation="nearest", copy=True
|
167
|
-
)
|
168
|
-
all_parcelations.append(t_parcellation_img_res)
|
169
|
-
all_labels.append(t_labels)
|
170
172
|
|
171
|
-
#
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
parcellation_img_res, labels = merge_parcellations(
|
178
|
-
all_parcelations, self.parcellation, all_labels
|
179
|
-
)
|
173
|
+
# Get parcellation tailored to target image
|
174
|
+
parcellation_img, labels = get_parcellation(
|
175
|
+
parcellation=self.parcellation,
|
176
|
+
target_data=input,
|
177
|
+
extra_input=extra_input,
|
178
|
+
)
|
180
179
|
|
181
|
-
|
180
|
+
# Get binarized parcellation image for masking
|
181
|
+
parcellation_bin = math_img("img != 0", img=parcellation_img)
|
182
182
|
|
183
|
+
# Load mask
|
183
184
|
if self.masks is not None:
|
184
185
|
logger.debug(f"Masking with {self.masks}")
|
186
|
+
# Get tailored mask
|
185
187
|
mask_img = get_mask(
|
186
188
|
masks=self.masks, target_data=input, extra_input=extra_input
|
187
189
|
)
|
188
|
-
|
190
|
+
# Get "logical and" version of parcellation and mask
|
189
191
|
parcellation_bin = math_img(
|
190
192
|
"np.logical_and(img, mask)",
|
191
193
|
img=parcellation_bin,
|
192
194
|
mask=mask_img,
|
193
195
|
)
|
194
196
|
|
197
|
+
# Initialize masker
|
195
198
|
logger.debug("Masking")
|
196
199
|
masker = NiftiMasker(
|
197
200
|
parcellation_bin, target_affine=t_input_img.affine
|
198
|
-
)
|
199
|
-
|
201
|
+
)
|
200
202
|
# Mask the input data and the parcellation
|
201
203
|
data = masker.fit_transform(t_input_img)
|
202
|
-
parcellation_values =
|
203
|
-
|
204
|
+
parcellation_values = np.squeeze(
|
205
|
+
masker.transform(parcellation_img)
|
206
|
+
).astype(int)
|
204
207
|
|
205
208
|
# Get the values for each parcel and apply agg function
|
206
209
|
logger.debug("Computing ROI means")
|
@@ -214,6 +217,7 @@ class ParcelAggregation(BaseMarker):
|
|
214
217
|
|
215
218
|
out_values = np.array(out_values).T
|
216
219
|
|
220
|
+
# Apply time dimension aggregation if required
|
217
221
|
if self.time_method is not None:
|
218
222
|
if out_values.shape[0] > 1:
|
219
223
|
logger.debug("Aggregating time dimension")
|
@@ -226,5 +230,6 @@ class ParcelAggregation(BaseMarker):
|
|
226
230
|
"No time dimension to aggregate as only one time point is "
|
227
231
|
"available."
|
228
232
|
)
|
233
|
+
# Format the output
|
229
234
|
out = {"data": out_values, "col_names": labels}
|
230
235
|
return out
|
@@ -7,7 +7,7 @@
|
|
7
7
|
from typing import Any, ClassVar, Dict, List, Optional, Set, Union
|
8
8
|
|
9
9
|
from ..api.decorators import register_marker
|
10
|
-
from ..data import
|
10
|
+
from ..data import get_coordinates, get_mask
|
11
11
|
from ..external.nilearn import JuniferNiftiSpheresMasker
|
12
12
|
from ..stats import get_aggfunc_by_name
|
13
13
|
from ..utils import logger, raise_error, warn_with_log
|
@@ -55,6 +55,12 @@ class SphereAggregation(BaseMarker):
|
|
55
55
|
The name of the marker. By default, it will use KIND_SphereAggregation
|
56
56
|
where KIND is the kind of data it was applied to (default None).
|
57
57
|
|
58
|
+
Raises
|
59
|
+
------
|
60
|
+
ValueError
|
61
|
+
If ``time_method`` is specified for non-BOLD data or if
|
62
|
+
``time_method_params`` is not None when ``time_method`` is None.
|
63
|
+
|
58
64
|
"""
|
59
65
|
|
60
66
|
_DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "numpy"}
|
@@ -118,6 +124,11 @@ class SphereAggregation(BaseMarker):
|
|
118
124
|
str
|
119
125
|
The storage type output by the marker.
|
120
126
|
|
127
|
+
Raises
|
128
|
+
------
|
129
|
+
ValueError
|
130
|
+
If the ``input_type`` is invalid.
|
131
|
+
|
121
132
|
"""
|
122
133
|
|
123
134
|
if input_type in ["VBM_GM", "VBM_WM", "fALFF", "GCOR", "LCOR"]:
|
@@ -125,7 +136,7 @@ class SphereAggregation(BaseMarker):
|
|
125
136
|
elif input_type == "BOLD":
|
126
137
|
return "timeseries"
|
127
138
|
else:
|
128
|
-
|
139
|
+
raise_error(f"Unknown input kind for {input_type}")
|
129
140
|
|
130
141
|
def compute(
|
131
142
|
self,
|
@@ -155,6 +166,11 @@ class SphereAggregation(BaseMarker):
|
|
155
166
|
* ``data`` : the actual computed values as a numpy.ndarray
|
156
167
|
* ``col_names`` : the column labels for the computed values as list
|
157
168
|
|
169
|
+
Warns
|
170
|
+
-----
|
171
|
+
RuntimeWarning
|
172
|
+
If time aggregation is required but only time point is available.
|
173
|
+
|
158
174
|
"""
|
159
175
|
t_input_img = input["data"]
|
160
176
|
logger.debug(f"Sphere aggregation using {self.method}")
|
@@ -162,15 +178,25 @@ class SphereAggregation(BaseMarker):
|
|
162
178
|
agg_func = get_aggfunc_by_name(
|
163
179
|
self.method, func_params=self.method_params
|
164
180
|
)
|
181
|
+
|
182
|
+
# Get seeds and labels tailored to target image
|
183
|
+
coords, labels = get_coordinates(
|
184
|
+
coords=self.coords,
|
185
|
+
target_data=input,
|
186
|
+
extra_input=extra_input,
|
187
|
+
)
|
188
|
+
|
165
189
|
# Load mask
|
166
190
|
mask_img = None
|
167
191
|
if self.masks is not None:
|
168
192
|
logger.debug(f"Masking with {self.masks}")
|
193
|
+
# Get tailored mask
|
169
194
|
mask_img = get_mask(
|
170
195
|
masks=self.masks, target_data=input, extra_input=extra_input
|
171
196
|
)
|
172
|
-
|
173
|
-
|
197
|
+
|
198
|
+
# Initialize masker
|
199
|
+
logger.debug("Masking")
|
174
200
|
masker = JuniferNiftiSpheresMasker(
|
175
201
|
seeds=coords,
|
176
202
|
radius=self.radius,
|
@@ -180,6 +206,8 @@ class SphereAggregation(BaseMarker):
|
|
180
206
|
)
|
181
207
|
# Fit and transform the marker on the data
|
182
208
|
out_values = masker.fit_transform(t_input_img)
|
209
|
+
|
210
|
+
# Apply time dimension aggregation if required
|
183
211
|
if self.time_method is not None:
|
184
212
|
if out_values.shape[0] > 1:
|
185
213
|
logger.debug("Aggregating time dimension")
|
@@ -193,5 +221,5 @@ class SphereAggregation(BaseMarker):
|
|
193
221
|
"available."
|
194
222
|
)
|
195
223
|
# Format the output
|
196
|
-
out = {"data": out_values, "col_names":
|
224
|
+
out = {"data": out_values, "col_names": labels}
|
197
225
|
return out
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: junifer
|
3
|
-
Version: 0.0.4.
|
3
|
+
Version: 0.0.4.dev133
|
4
4
|
Summary: JUelich NeuroImaging FEature extractoR
|
5
5
|
Author-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
|
6
6
|
Maintainer-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
|
@@ -1,5 +1,5 @@
|
|
1
1
|
junifer/__init__.py,sha256=x1UR2jUcrUdm2HNl-3Qvyi4UUrU6ms5qm2qcmNY7zZk,391
|
2
|
-
junifer/_version.py,sha256=
|
2
|
+
junifer/_version.py,sha256=H4AlzRmDL8gZspsFPWSrhtdTYzXK9ftnsDE7ckYan-c,428
|
3
3
|
junifer/stats.py,sha256=KUX4jJcLWnlE34coet8EkdFypFd-td4Vtpx5LvlomVs,5879
|
4
4
|
junifer/api/__init__.py,sha256=YILu9M7SC0Ri4CVd90fELH2OnK_gvCYAXCoqBNCFE8E,257
|
5
5
|
junifer/api/cli.py,sha256=mgqSeZbW9DvdztGL3DRXM4SQS7saJ-v18K6FL8LaDVs,11915
|
@@ -36,10 +36,10 @@ junifer/configs/juseless/datagrabbers/tests/test_camcan_vbm.py,sha256=o0dzptS97p
|
|
36
36
|
junifer/configs/juseless/datagrabbers/tests/test_ixi_vbm.py,sha256=8jxpNZelXwpJGvA5LOfpso2X8yt1chvERAYmv76hS_g,1252
|
37
37
|
junifer/configs/juseless/datagrabbers/tests/test_ucla.py,sha256=e-jdvcZ9B0mka6_573JJU_cGwSaUV54U8X_n0UadtJY,3351
|
38
38
|
junifer/configs/juseless/datagrabbers/tests/test_ukb_vbm.py,sha256=b9hjc1mgO--PSRC3id2EzzfE2yWNsuZ2UI47a6sfGZU,1025
|
39
|
-
junifer/data/__init__.py,sha256=
|
40
|
-
junifer/data/coordinates.py,sha256=
|
39
|
+
junifer/data/__init__.py,sha256=oUjOs8_M6fitNb44izxpXf3su1e4pG_vCdjwVYkjZjQ,550
|
40
|
+
junifer/data/coordinates.py,sha256=8jwx3uN4rmmwTWfbwjtsXhTlWIBqXlGizKjwdPklNSk,7231
|
41
41
|
junifer/data/masks.py,sha256=SYt2EJCUCw7KAl8nBK5fRFRonrfIVQn-DH6aQsnr7vE,11280
|
42
|
-
junifer/data/parcellations.py,sha256=
|
42
|
+
junifer/data/parcellations.py,sha256=9W3cZBS1SS3Rc48-diMYMxuzO3oxQfrsWvnPnONGjDc,51172
|
43
43
|
junifer/data/utils.py,sha256=K9quLIoWRmm2QFM8Rdy_5bYsWb_XhL0l5Uq_1Sie0kA,1274
|
44
44
|
junifer/data/VOIs/meta/CogAC_VOIs.txt,sha256=Sr5_E712OLdeQRyUcDNM0wLBvZIyO6gc9Q7KkyJHX1A,398
|
45
45
|
junifer/data/VOIs/meta/CogAR_VOIs.txt,sha256=t3NLwEVUZTPP34p15SaB3UInLrQyK-7Qc4iLBuQlZu8,189
|
@@ -62,10 +62,10 @@ junifer/data/VOIs/meta/extDMN_VOIs.txt,sha256=Ogx1QvqZcnXDM3ncF2ha78br8xwQ5wklSj
|
|
62
62
|
junifer/data/masks/vickery-patil/CAT12_IXI555_MNI152_TMP_GS_GMprob0.2_clean.nii.gz,sha256=j6EY8EtRnUuRxeKgD65Q6B0GPEPIALKDJEIje1TfnAU,88270
|
63
63
|
junifer/data/masks/vickery-patil/CAT12_IXI555_MNI152_TMP_GS_GMprob0.2_clean_3mm.nii.gz,sha256=crb_y7YO1vjjf2PwbRJUm8KamPK6fx1y0B_l-E3g8FY,12862
|
64
64
|
junifer/data/masks/vickery-patil/GMprob0.2_cortex_3mm_NA_rm.nii.gz,sha256=jfMe_4H9XEnArYms5bSQbqS2V1_HbLHTfI5amQa_Pes,8700
|
65
|
-
junifer/data/tests/test_coordinates.py,sha256=
|
65
|
+
junifer/data/tests/test_coordinates.py,sha256=UvB1BexiQUs9e_Lj8VrKGK3XiyukXhA2Tn_kW0wmrUA,4067
|
66
66
|
junifer/data/tests/test_data_utils.py,sha256=Vy7x8zaHws5hmn92PKSv3H38hU2kamOpyaH6nG_NNpw,1086
|
67
67
|
junifer/data/tests/test_masks.py,sha256=uhY3dm0CGbKTMs8hAMJxQx0KMcl9qmfaee3guBrdYas,13269
|
68
|
-
junifer/data/tests/test_parcellations.py,sha256=
|
68
|
+
junifer/data/tests/test_parcellations.py,sha256=XHNW_WrYuIfcbSZ04TexY2F2XwY9PqdKztC8HCE-nFU,34791
|
69
69
|
junifer/datagrabber/__init__.py,sha256=xKMQMjqoWul13YluGTLLMBgKahUg5jJKi4phPih3XJU,634
|
70
70
|
junifer/datagrabber/base.py,sha256=r4SQrse9XWmMqYaCl861bhzEs1uQGBTUeOgH9d39mEE,6016
|
71
71
|
junifer/datagrabber/datalad_base.py,sha256=dDaBiIePPP6-G4ycgBMxTcXxs4vkg-yDS3OBURK4VGs,10731
|
@@ -106,8 +106,8 @@ junifer/markers/__init__.py,sha256=exUUmpDsPkoNa9FK6Y7pDusOYv56_zoci8hiOaxyswE,7
|
|
106
106
|
junifer/markers/base.py,sha256=Uuaz8MvdHOfAnRGpUf6ISWgbNWzkmHIMo6f30jlW7zY,6109
|
107
107
|
junifer/markers/collection.py,sha256=u5DAWdEPsYK1Jmqob1FZgzOK_cm_8JXJ2CpSVxgamBY,4592
|
108
108
|
junifer/markers/ets_rss.py,sha256=7fr6mmbMM5NKIDV6bUyyu-pwHJH56GwYv6oozK4EX6k,4557
|
109
|
-
junifer/markers/parcel_aggregation.py,sha256=
|
110
|
-
junifer/markers/sphere_aggregation.py,sha256=
|
109
|
+
junifer/markers/parcel_aggregation.py,sha256=F059dbrow2v8MREm2l29mbPg2_PcSwfNalbUJOwaFsE,8420
|
110
|
+
junifer/markers/sphere_aggregation.py,sha256=bnX9aiHf8pon47QVA0BiFOZV1Bgpyy4doPh2K0Nl9OA,8027
|
111
111
|
junifer/markers/utils.py,sha256=kqgnzFqu2BkFW-UcJA2TrtgCctUncIbersWFapxcXBg,3777
|
112
112
|
junifer/markers/falff/__init__.py,sha256=QAUIVtaOFWDL7B8xFrDgsmRum7Q0DCENyDYD1cPMJmM,197
|
113
113
|
junifer/markers/falff/falff_base.py,sha256=W6bwkEOtiBTNMow2OQlPWCtP5GLy75TlEWK-48_xGl8,6001
|
@@ -198,10 +198,10 @@ junifer/utils/fs.py,sha256=Jd9AoV2fIF7pT7KhXsn8T1O1fJ1_SFZgaFuOBAM7DG8,460
|
|
198
198
|
junifer/utils/logging.py,sha256=phBwOFaK6ejqbSjkCSAkZhhdo4sr01GdVZmJIL8t-Lw,8994
|
199
199
|
junifer/utils/tests/test_fs.py,sha256=WQS7cKlKEZ742CIuiOYYpueeAhY9PqlastfDVpVVtvE,923
|
200
200
|
junifer/utils/tests/test_logging.py,sha256=l8oo-AiBV7H6_IzlsNcj__cLeZBUvgIGoaMszD9VaJg,7754
|
201
|
-
junifer-0.0.4.
|
202
|
-
junifer-0.0.4.
|
203
|
-
junifer-0.0.4.
|
204
|
-
junifer-0.0.4.
|
205
|
-
junifer-0.0.4.
|
206
|
-
junifer-0.0.4.
|
207
|
-
junifer-0.0.4.
|
201
|
+
junifer-0.0.4.dev133.dist-info/AUTHORS.rst,sha256=rmULKpchpSol4ExWFdm-qu4fkpSZPYqIESVJBZtGb6E,163
|
202
|
+
junifer-0.0.4.dev133.dist-info/LICENSE.md,sha256=MqCnOBu8uXsEOzRZWh9EBVfVz-kE9NkXcLCrtGXo2yU,34354
|
203
|
+
junifer-0.0.4.dev133.dist-info/METADATA,sha256=80MHorCb5IZa8m9SBXNaVmzFs5fMG5pPAWYwc_EJ1oI,6825
|
204
|
+
junifer-0.0.4.dev133.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
205
|
+
junifer-0.0.4.dev133.dist-info/entry_points.txt,sha256=DxFvKq0pOqRunAK0FxwJcoDfV1-dZvsFDpD5HRqSDhw,48
|
206
|
+
junifer-0.0.4.dev133.dist-info/top_level.txt,sha256=4bAq1R2QFQ4b3hohjys2JBvxrl0GKk5LNFzYvz9VGcA,8
|
207
|
+
junifer-0.0.4.dev133.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|