junifer 0.0.6.dev154__py3-none-any.whl → 0.0.6.dev194__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.pyi +2 -0
- junifer/_version.py +2 -2
- junifer/api/decorators.py +6 -11
- junifer/api/functions.py +74 -62
- junifer/api/tests/test_functions.py +2 -2
- junifer/data/__init__.pyi +17 -31
- junifer/data/_dispatch.py +251 -0
- junifer/data/coordinates/__init__.py +9 -0
- junifer/data/coordinates/__init__.pyi +5 -0
- junifer/data/coordinates/_ants_coordinates_warper.py +96 -0
- junifer/data/coordinates/_coordinates.py +356 -0
- junifer/data/coordinates/_fsl_coordinates_warper.py +83 -0
- junifer/data/{tests → coordinates/tests}/test_coordinates.py +25 -31
- junifer/data/masks/__init__.py +9 -0
- junifer/data/masks/__init__.pyi +6 -0
- junifer/data/masks/_ants_mask_warper.py +144 -0
- junifer/data/masks/_fsl_mask_warper.py +87 -0
- junifer/data/masks/_masks.py +624 -0
- junifer/data/{tests → masks/tests}/test_masks.py +63 -58
- junifer/data/parcellations/__init__.py +9 -0
- junifer/data/parcellations/__init__.pyi +6 -0
- junifer/data/parcellations/_ants_parcellation_warper.py +154 -0
- junifer/data/parcellations/_fsl_parcellation_warper.py +91 -0
- junifer/data/{parcellations.py → parcellations/_parcellations.py} +450 -473
- junifer/data/{tests → parcellations/tests}/test_parcellations.py +73 -81
- junifer/data/pipeline_data_registry_base.py +74 -0
- junifer/data/utils.py +4 -0
- junifer/datagrabber/aomic/piop2.py +1 -1
- junifer/markers/complexity/hurst_exponent.py +2 -2
- junifer/markers/complexity/multiscale_entropy_auc.py +2 -2
- junifer/markers/complexity/perm_entropy.py +2 -2
- junifer/markers/complexity/range_entropy.py +2 -2
- junifer/markers/complexity/range_entropy_auc.py +2 -2
- junifer/markers/complexity/sample_entropy.py +2 -2
- junifer/markers/complexity/weighted_perm_entropy.py +2 -2
- junifer/markers/ets_rss.py +2 -2
- junifer/markers/falff/falff_parcels.py +2 -2
- junifer/markers/falff/falff_spheres.py +2 -2
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -1
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +1 -1
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +1 -1
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +1 -1
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +3 -3
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +2 -2
- junifer/markers/parcel_aggregation.py +11 -7
- junifer/markers/reho/reho_parcels.py +2 -2
- junifer/markers/reho/reho_spheres.py +2 -2
- junifer/markers/sphere_aggregation.py +11 -7
- junifer/markers/temporal_snr/temporal_snr_parcels.py +2 -2
- junifer/markers/temporal_snr/temporal_snr_spheres.py +2 -2
- junifer/markers/tests/test_ets_rss.py +3 -3
- junifer/markers/tests/test_parcel_aggregation.py +24 -24
- junifer/markers/tests/test_sphere_aggregation.py +6 -6
- junifer/pipeline/__init__.pyi +2 -2
- junifer/pipeline/pipeline_component_registry.py +299 -0
- junifer/pipeline/tests/test_pipeline_component_registry.py +201 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +6 -3
- junifer/testing/__init__.pyi +2 -2
- junifer/testing/registry.py +4 -7
- junifer/testing/tests/test_testing_registry.py +9 -17
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/METADATA +1 -1
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/RECORD +86 -72
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/WHEEL +1 -1
- junifer/data/coordinates.py +0 -408
- junifer/data/masks.py +0 -670
- junifer/pipeline/registry.py +0 -245
- junifer/pipeline/tests/test_registry.py +0 -150
- /junifer/data/{VOIs → coordinates/VOIs}/meta/AutobiographicalMemory_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAC_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAR_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/DMNBuckner_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Dosenbach2010_MNI_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Empathy_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Motor_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/MultiTask_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/PhysioStress_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2011_MNI_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2013_MNI_VOIs.tsv +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Rew_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/Somatosensory_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/ToM_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/VigAtt_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/WM_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/eMDN_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/eSAD_VOIs.txt +0 -0
- /junifer/data/{VOIs → coordinates/VOIs}/meta/extDMN_VOIs.txt +0 -0
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.6.dev154.dist-info → junifer-0.0.6.dev194.dist-info}/top_level.txt +0 -0
junifer/data/coordinates.py
DELETED
@@ -1,408 +0,0 @@
|
|
1
|
-
"""Functions for coordinates manipulation."""
|
2
|
-
|
3
|
-
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
|
-
# Synchon Mandal <s.mandal@fz-juelich.de>
|
5
|
-
# License: AGPL
|
6
|
-
|
7
|
-
import typing
|
8
|
-
from pathlib import Path
|
9
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
10
|
-
|
11
|
-
import numpy as np
|
12
|
-
import pandas as pd
|
13
|
-
from numpy.typing import ArrayLike
|
14
|
-
|
15
|
-
from ..pipeline import WorkDirManager
|
16
|
-
from ..utils import logger, raise_error, run_ext_cmd
|
17
|
-
|
18
|
-
|
19
|
-
__all__ = [
|
20
|
-
"register_coordinates",
|
21
|
-
"list_coordinates",
|
22
|
-
"get_coordinates",
|
23
|
-
"load_coordinates",
|
24
|
-
]
|
25
|
-
|
26
|
-
|
27
|
-
# Path to the VOIs
|
28
|
-
_vois_path = Path(__file__).parent / "VOIs"
|
29
|
-
|
30
|
-
# Path to the metadata of the VOIs
|
31
|
-
_vois_meta_path = _vois_path / "meta"
|
32
|
-
|
33
|
-
# A dictionary containing all supported coordinates and their respective file
|
34
|
-
# or data.
|
35
|
-
|
36
|
-
# Each entry is a dictionary that must contain at least the following keys:
|
37
|
-
# * 'space': the coordinates' space (e.g., 'MNI')
|
38
|
-
|
39
|
-
# The built-in coordinates are files that are shipped with the package in the
|
40
|
-
# data/VOIs directory. The user can also register their own coordinates, which
|
41
|
-
# will be stored as numpy arrays in the dictionary.
|
42
|
-
_available_coordinates: Dict[
|
43
|
-
str, Dict[str, Union[Path, ArrayLike, List[str]]]
|
44
|
-
] = {
|
45
|
-
"CogAC": {
|
46
|
-
"path": _vois_meta_path / "CogAC_VOIs.txt",
|
47
|
-
"space": "MNI",
|
48
|
-
},
|
49
|
-
"CogAR": {
|
50
|
-
"path": _vois_meta_path / "CogAR_VOIs.txt",
|
51
|
-
"space": "MNI",
|
52
|
-
},
|
53
|
-
"DMNBuckner": {
|
54
|
-
"path": _vois_meta_path / "DMNBuckner_VOIs.txt",
|
55
|
-
"space": "MNI",
|
56
|
-
},
|
57
|
-
"eMDN": {
|
58
|
-
"path": _vois_meta_path / "eMDN_VOIs.txt",
|
59
|
-
"space": "MNI",
|
60
|
-
},
|
61
|
-
"Empathy": {
|
62
|
-
"path": _vois_meta_path / "Empathy_VOIs.txt",
|
63
|
-
"space": "MNI",
|
64
|
-
},
|
65
|
-
"eSAD": {
|
66
|
-
"path": _vois_meta_path / "eSAD_VOIs.txt",
|
67
|
-
"space": "MNI",
|
68
|
-
},
|
69
|
-
"extDMN": {
|
70
|
-
"path": _vois_meta_path / "extDMN_VOIs.txt",
|
71
|
-
"space": "MNI",
|
72
|
-
},
|
73
|
-
"Motor": {
|
74
|
-
"path": _vois_meta_path / "Motor_VOIs.txt",
|
75
|
-
"space": "MNI",
|
76
|
-
},
|
77
|
-
"MultiTask": {
|
78
|
-
"path": _vois_meta_path / "MultiTask_VOIs.txt",
|
79
|
-
"space": "MNI",
|
80
|
-
},
|
81
|
-
"PhysioStress": {
|
82
|
-
"path": _vois_meta_path / "PhysioStress_VOIs.txt",
|
83
|
-
"space": "MNI",
|
84
|
-
},
|
85
|
-
"Rew": {
|
86
|
-
"path": _vois_meta_path / "Rew_VOIs.txt",
|
87
|
-
"space": "MNI",
|
88
|
-
},
|
89
|
-
"Somatosensory": {
|
90
|
-
"path": _vois_meta_path / "Somatosensory_VOIs.txt",
|
91
|
-
"space": "MNI",
|
92
|
-
},
|
93
|
-
"ToM": {
|
94
|
-
"path": _vois_meta_path / "ToM_VOIs.txt",
|
95
|
-
"space": "MNI",
|
96
|
-
},
|
97
|
-
"VigAtt": {
|
98
|
-
"path": _vois_meta_path / "VigAtt_VOIs.txt",
|
99
|
-
"space": "MNI",
|
100
|
-
},
|
101
|
-
"WM": {
|
102
|
-
"path": _vois_meta_path / "WM_VOIs.txt",
|
103
|
-
"space": "MNI",
|
104
|
-
},
|
105
|
-
"Power": {
|
106
|
-
"path": _vois_meta_path / "Power2011_MNI_VOIs.txt",
|
107
|
-
"space": "MNI",
|
108
|
-
},
|
109
|
-
"Power2011": {
|
110
|
-
"path": _vois_meta_path / "Power2011_MNI_VOIs.txt",
|
111
|
-
"space": "MNI",
|
112
|
-
},
|
113
|
-
"Dosenbach": {
|
114
|
-
"path": _vois_meta_path / "Dosenbach2010_MNI_VOIs.txt",
|
115
|
-
"space": "MNI",
|
116
|
-
},
|
117
|
-
"Power2013": {
|
118
|
-
"path": _vois_meta_path / "Power2013_MNI_VOIs.tsv",
|
119
|
-
"space": "MNI",
|
120
|
-
},
|
121
|
-
"AutobiographicalMemory": {
|
122
|
-
"path": _vois_meta_path / "AutobiographicalMemory_VOIs.txt",
|
123
|
-
"space": "MNI",
|
124
|
-
},
|
125
|
-
}
|
126
|
-
|
127
|
-
|
128
|
-
def register_coordinates(
|
129
|
-
name: str,
|
130
|
-
coordinates: ArrayLike,
|
131
|
-
voi_names: List[str],
|
132
|
-
space: str,
|
133
|
-
overwrite: Optional[bool] = False,
|
134
|
-
) -> None:
|
135
|
-
"""Register a custom user coordinates.
|
136
|
-
|
137
|
-
Parameters
|
138
|
-
----------
|
139
|
-
name : str
|
140
|
-
The name of the coordinates.
|
141
|
-
coordinates : numpy.ndarray
|
142
|
-
The coordinates. This should be a 2-dimensional array with three
|
143
|
-
columns. Each row corresponds to a volume-of-interest (VOI) and each
|
144
|
-
column corresponds to a spatial dimension (i.e. x, y, and
|
145
|
-
z-coordinates).
|
146
|
-
voi_names : list of str
|
147
|
-
The names of the VOIs.
|
148
|
-
space : str
|
149
|
-
The space of the coordinates, for e.g., "MNI".
|
150
|
-
overwrite : bool, optional
|
151
|
-
If True, overwrite an existing list of coordinates with the same name.
|
152
|
-
Does not apply to built-in coordinates (default False).
|
153
|
-
|
154
|
-
Raises
|
155
|
-
------
|
156
|
-
ValueError
|
157
|
-
If the coordinates name is already registered and overwrite is set to
|
158
|
-
False or if the coordinates name is a built-in coordinates or if the
|
159
|
-
``coordinates`` is not a 2D array or if coordinate value does not have
|
160
|
-
3 components or if the ``voi_names`` shape does not match the
|
161
|
-
``coordinates`` shape.
|
162
|
-
TypeError
|
163
|
-
If ``coordinates`` is not a ``numpy.ndarray``.
|
164
|
-
|
165
|
-
"""
|
166
|
-
if name in _available_coordinates:
|
167
|
-
if isinstance(_available_coordinates[name].get("path"), Path):
|
168
|
-
raise_error(
|
169
|
-
f"Coordinates {name} already registered as built-in "
|
170
|
-
"coordinates."
|
171
|
-
)
|
172
|
-
if overwrite is True:
|
173
|
-
logger.info(f"Overwriting coordinates {name}")
|
174
|
-
else:
|
175
|
-
raise_error(
|
176
|
-
f"Coordinates {name} already registered. "
|
177
|
-
"Set `overwrite=True` to update its value."
|
178
|
-
)
|
179
|
-
|
180
|
-
if not isinstance(coordinates, np.ndarray):
|
181
|
-
raise_error(
|
182
|
-
f"Coordinates must be a `numpy.ndarray`, not {type(coordinates)}.",
|
183
|
-
klass=TypeError,
|
184
|
-
)
|
185
|
-
if coordinates.ndim != 2:
|
186
|
-
raise_error(
|
187
|
-
f"Coordinates must be a 2D array, not {coordinates.ndim}D."
|
188
|
-
)
|
189
|
-
if coordinates.shape[1] != 3:
|
190
|
-
raise_error(
|
191
|
-
f"Each coordinate must have 3 values, not {coordinates.shape[1]} "
|
192
|
-
)
|
193
|
-
if len(voi_names) != coordinates.shape[0]:
|
194
|
-
raise_error(
|
195
|
-
f"Length of `voi_names` ({len(voi_names)}) does not match the "
|
196
|
-
f"number of `coordinates` ({coordinates.shape[0]})."
|
197
|
-
)
|
198
|
-
_available_coordinates[name] = {
|
199
|
-
"coords": coordinates,
|
200
|
-
"voi_names": voi_names,
|
201
|
-
"space": space,
|
202
|
-
}
|
203
|
-
|
204
|
-
|
205
|
-
def list_coordinates() -> List[str]:
|
206
|
-
"""List all the available coordinates (VOIs).
|
207
|
-
|
208
|
-
Returns
|
209
|
-
-------
|
210
|
-
list of str
|
211
|
-
A list with all available coordinates names.
|
212
|
-
|
213
|
-
"""
|
214
|
-
return sorted(_available_coordinates.keys())
|
215
|
-
|
216
|
-
|
217
|
-
def get_coordinates(
|
218
|
-
coords: str,
|
219
|
-
target_data: Dict[str, Any],
|
220
|
-
extra_input: Optional[Dict[str, Any]] = None,
|
221
|
-
) -> Tuple[ArrayLike, List[str]]:
|
222
|
-
"""Get coordinates, tailored for the target image.
|
223
|
-
|
224
|
-
Parameters
|
225
|
-
----------
|
226
|
-
coords : str
|
227
|
-
The name of the coordinates.
|
228
|
-
target_data : dict
|
229
|
-
The corresponding item of the data object to which the coordinates
|
230
|
-
will be applied.
|
231
|
-
extra_input : dict, optional
|
232
|
-
The other fields in the data object. Useful for accessing other data
|
233
|
-
kinds that needs to be used in the computation of coordinates
|
234
|
-
(default None).
|
235
|
-
|
236
|
-
Returns
|
237
|
-
-------
|
238
|
-
numpy.ndarray
|
239
|
-
The coordinates.
|
240
|
-
list of str
|
241
|
-
The names of the VOIs.
|
242
|
-
|
243
|
-
Raises
|
244
|
-
------
|
245
|
-
RuntimeError
|
246
|
-
If warp / transformation file extension is not ".mat" or ".h5".
|
247
|
-
ValueError
|
248
|
-
If ``extra_input`` is None when ``target_data``'s space is native.
|
249
|
-
|
250
|
-
"""
|
251
|
-
# Load the coordinates
|
252
|
-
seeds, labels, _ = load_coordinates(name=coords)
|
253
|
-
|
254
|
-
# Transform coordinate if target data is native
|
255
|
-
if target_data["space"] == "native":
|
256
|
-
# Check for extra inputs
|
257
|
-
if extra_input is None:
|
258
|
-
raise_error(
|
259
|
-
"No extra input provided, requires `Warp` and `T1w` "
|
260
|
-
"data types in particular for transformation to "
|
261
|
-
f"{target_data['space']} space for further computation."
|
262
|
-
)
|
263
|
-
|
264
|
-
# Create component-scoped tempdir
|
265
|
-
tempdir = WorkDirManager().get_tempdir(prefix="coordinates")
|
266
|
-
|
267
|
-
# Create element-scoped tempdir so that transformed coordinates is
|
268
|
-
# available later as numpy stores file path reference for
|
269
|
-
# loading on computation
|
270
|
-
element_tempdir = WorkDirManager().get_element_tempdir(
|
271
|
-
prefix="coordinates"
|
272
|
-
)
|
273
|
-
|
274
|
-
# Check for warp file type to use correct tool
|
275
|
-
warp_file_ext = extra_input["Warp"]["path"].suffix
|
276
|
-
if warp_file_ext == ".mat":
|
277
|
-
# Save existing coordinates to a component-scoped tempfile
|
278
|
-
pretransform_coordinates_path = (
|
279
|
-
tempdir / "pretransform_coordinates.txt"
|
280
|
-
)
|
281
|
-
np.savetxt(pretransform_coordinates_path, seeds)
|
282
|
-
|
283
|
-
# Create an element-scoped tempfile for transformed coordinates
|
284
|
-
# output
|
285
|
-
transformed_coords_path = (
|
286
|
-
element_tempdir / "coordinates_transformed.txt"
|
287
|
-
)
|
288
|
-
|
289
|
-
logger.debug("Using FSL for coordinates transformation")
|
290
|
-
# Set img2imgcoord command
|
291
|
-
img2imgcoord_cmd = [
|
292
|
-
"cat",
|
293
|
-
f"{pretransform_coordinates_path.resolve()}",
|
294
|
-
"| img2imgcoord -mm",
|
295
|
-
f"-src {target_data['path'].resolve()}",
|
296
|
-
f"-dest {target_data['reference_path'].resolve()}",
|
297
|
-
f"-warp {extra_input['Warp']['path'].resolve()}",
|
298
|
-
f"> {transformed_coords_path.resolve()};",
|
299
|
-
f"sed -i 1d {transformed_coords_path.resolve()}",
|
300
|
-
]
|
301
|
-
# Call img2imgcoord
|
302
|
-
run_ext_cmd(name="img2imgcoord", cmd=img2imgcoord_cmd)
|
303
|
-
|
304
|
-
# Load coordinates
|
305
|
-
seeds = np.loadtxt(transformed_coords_path)
|
306
|
-
|
307
|
-
elif warp_file_ext == ".h5":
|
308
|
-
# Save existing coordinates to a component-scoped tempfile
|
309
|
-
pretransform_coordinates_path = (
|
310
|
-
tempdir / "pretransform_coordinates.csv"
|
311
|
-
)
|
312
|
-
np.savetxt(
|
313
|
-
pretransform_coordinates_path,
|
314
|
-
seeds,
|
315
|
-
delimiter=",",
|
316
|
-
# Add header while saving to make ANTs work
|
317
|
-
header="x,y,z",
|
318
|
-
)
|
319
|
-
|
320
|
-
# Create an element-scoped tempfile for transformed coordinates
|
321
|
-
# output
|
322
|
-
transformed_coords_path = (
|
323
|
-
element_tempdir / "coordinates_transformed.csv"
|
324
|
-
)
|
325
|
-
|
326
|
-
logger.debug("Using ANTs for coordinates transformation")
|
327
|
-
# Set antsApplyTransformsToPoints command
|
328
|
-
apply_transforms_to_points_cmd = [
|
329
|
-
"antsApplyTransformsToPoints",
|
330
|
-
"-d 3",
|
331
|
-
"-p 1",
|
332
|
-
"-f 0",
|
333
|
-
f"-i {pretransform_coordinates_path.resolve()}",
|
334
|
-
f"-o {transformed_coords_path.resolve()}",
|
335
|
-
f"-t {extra_input['Warp']['path'].resolve()};",
|
336
|
-
]
|
337
|
-
# Call antsApplyTransformsToPoints
|
338
|
-
run_ext_cmd(
|
339
|
-
name="antsApplyTransformsToPoints",
|
340
|
-
cmd=apply_transforms_to_points_cmd,
|
341
|
-
)
|
342
|
-
|
343
|
-
# Load coordinates
|
344
|
-
seeds = np.loadtxt(
|
345
|
-
# Skip header when reading
|
346
|
-
transformed_coords_path,
|
347
|
-
delimiter=",",
|
348
|
-
skiprows=1,
|
349
|
-
)
|
350
|
-
|
351
|
-
else:
|
352
|
-
raise_error(
|
353
|
-
msg=(
|
354
|
-
"Unknown warp / transformation file extension: "
|
355
|
-
f"{warp_file_ext}"
|
356
|
-
),
|
357
|
-
klass=RuntimeError,
|
358
|
-
)
|
359
|
-
|
360
|
-
# Delete tempdir
|
361
|
-
WorkDirManager().delete_tempdir(tempdir)
|
362
|
-
|
363
|
-
return seeds, labels
|
364
|
-
|
365
|
-
|
366
|
-
def load_coordinates(name: str) -> Tuple[ArrayLike, List[str], str]:
|
367
|
-
"""Load coordinates.
|
368
|
-
|
369
|
-
Parameters
|
370
|
-
----------
|
371
|
-
name : str
|
372
|
-
The name of the coordinates.
|
373
|
-
|
374
|
-
Returns
|
375
|
-
-------
|
376
|
-
numpy.ndarray
|
377
|
-
The coordinates.
|
378
|
-
list of str
|
379
|
-
The names of the VOIs.
|
380
|
-
str
|
381
|
-
The space of the coordinates.
|
382
|
-
|
383
|
-
Raises
|
384
|
-
------
|
385
|
-
ValueError
|
386
|
-
If ``name`` is invalid.
|
387
|
-
|
388
|
-
"""
|
389
|
-
# Check for valid coordinates name
|
390
|
-
if name not in _available_coordinates:
|
391
|
-
raise_error(
|
392
|
-
f"Coordinates {name} not found. "
|
393
|
-
f"Valid options are: {list_coordinates()}"
|
394
|
-
)
|
395
|
-
# Load coordinates
|
396
|
-
t_coord = _available_coordinates[name]
|
397
|
-
if isinstance(t_coord.get("path"), Path):
|
398
|
-
# Load via pandas
|
399
|
-
df_coords = pd.read_csv(t_coord["path"], sep="\t", header=None)
|
400
|
-
coords = df_coords.iloc[:, [0, 1, 2]].to_numpy()
|
401
|
-
names = list(df_coords.iloc[:, [3]].values[:, 0])
|
402
|
-
else:
|
403
|
-
coords = t_coord["coords"]
|
404
|
-
coords = typing.cast(ArrayLike, coords)
|
405
|
-
names = t_coord["voi_names"]
|
406
|
-
names = typing.cast(List[str], names)
|
407
|
-
|
408
|
-
return coords, names, t_coord["space"]
|