junifer 0.0.7.dev18__py3-none-any.whl → 0.0.7.dev43__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/api/decorators.py +0 -1
- junifer/api/functions.py +1 -2
- junifer/api/queue_context/gnu_parallel_local_adapter.py +4 -7
- junifer/api/queue_context/htcondor_adapter.py +6 -10
- junifer/cli/tests/test_parser.py +66 -0
- junifer/data/_dispatch.py +5 -5
- junifer/data/coordinates/_ants_coordinates_warper.py +1 -1
- junifer/data/coordinates/_coordinates.py +1 -1
- junifer/data/coordinates/_fsl_coordinates_warper.py +1 -1
- junifer/data/coordinates/tests/test_coordinates.py +38 -16
- junifer/data/masks/_ants_mask_warper.py +1 -1
- junifer/data/masks/_fsl_mask_warper.py +1 -1
- junifer/data/masks/tests/test_masks.py +66 -33
- junifer/data/parcellations/_ants_parcellation_warper.py +1 -1
- junifer/data/parcellations/_fsl_parcellation_warper.py +1 -1
- junifer/data/parcellations/_parcellations.py +7 -7
- junifer/data/parcellations/tests/test_parcellations.py +162 -76
- junifer/data/tests/test_data_utils.py +0 -1
- junifer/data/utils.py +1 -1
- junifer/datagrabber/aomic/id1000.py +6 -0
- junifer/datagrabber/aomic/piop1.py +4 -3
- junifer/datagrabber/aomic/piop2.py +4 -3
- junifer/datagrabber/pattern_datalad.py +0 -1
- junifer/datagrabber/pattern_validation_mixin.py +0 -1
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +4 -8
- junifer/markers/base.py +3 -3
- junifer/markers/brainprint.py +5 -5
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -3
- junifer/markers/complexity/range_entropy.py +3 -3
- junifer/markers/complexity/sample_entropy.py +3 -3
- junifer/markers/falff/falff_parcels.py +2 -6
- junifer/markers/falff/falff_spheres.py +2 -6
- junifer/markers/functional_connectivity/functional_connectivity_base.py +0 -1
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +2 -1
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +2 -1
- junifer/markers/reho/_afni_reho.py +1 -1
- junifer/markers/reho/reho_base.py +0 -1
- junifer/markers/reho/reho_parcels.py +0 -1
- junifer/markers/reho/reho_spheres.py +0 -1
- junifer/markers/temporal_snr/temporal_snr_base.py +0 -1
- junifer/markers/tests/test_markers_base.py +0 -1
- junifer/onthefly/_brainprint.py +3 -3
- junifer/onthefly/read_transform.py +1 -2
- junifer/onthefly/tests/test_read_transform.py +0 -1
- junifer/pipeline/tests/test_marker_collection.py +2 -1
- junifer/pipeline/workdir_manager.py +1 -2
- junifer/preprocess/confounds/fmriprep_confound_remover.py +1 -1
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +0 -1
- junifer/preprocess/smoothing/tests/test_smoothing.py +0 -1
- junifer/preprocess/warping/_ants_warper.py +2 -3
- junifer/preprocess/warping/_fsl_warper.py +1 -1
- junifer/preprocess/warping/space_warper.py +4 -2
- junifer/storage/pandas_base.py +3 -1
- junifer/storage/sqlite.py +3 -8
- junifer/storage/tests/test_pandas_base.py +6 -3
- junifer/storage/tests/test_storage_base.py +2 -1
- junifer/utils/logging.py +38 -128
- junifer/utils/tests/test_logging.py +12 -4
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/METADATA +3 -2
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/RECORD +66 -89
- junifer/data/coordinates/VOIs/meta/AutobiographicalMemory_VOIs.txt +0 -23
- junifer/data/coordinates/VOIs/meta/CogAC_VOIs.txt +0 -19
- junifer/data/coordinates/VOIs/meta/CogAR_VOIs.txt +0 -8
- junifer/data/coordinates/VOIs/meta/DMNBuckner_VOIs.txt +0 -6
- junifer/data/coordinates/VOIs/meta/Dosenbach2010_MNI_VOIs.txt +0 -160
- junifer/data/coordinates/VOIs/meta/Empathy_VOIs.txt +0 -22
- junifer/data/coordinates/VOIs/meta/Motor_VOIs.txt +0 -10
- junifer/data/coordinates/VOIs/meta/MultiTask_VOIs.txt +0 -9
- junifer/data/coordinates/VOIs/meta/PhysioStress_VOIs.txt +0 -18
- junifer/data/coordinates/VOIs/meta/Power2011_MNI_VOIs.txt +0 -264
- junifer/data/coordinates/VOIs/meta/Power2013_MNI_VOIs.tsv +0 -264
- junifer/data/coordinates/VOIs/meta/Rew_VOIs.txt +0 -25
- junifer/data/coordinates/VOIs/meta/Somatosensory_VOIs.txt +0 -10
- junifer/data/coordinates/VOIs/meta/ToM_VOIs.txt +0 -15
- junifer/data/coordinates/VOIs/meta/VigAtt_VOIs.txt +0 -16
- junifer/data/coordinates/VOIs/meta/WM_VOIs.txt +0 -23
- junifer/data/coordinates/VOIs/meta/eMDN_VOIs.txt +0 -17
- junifer/data/coordinates/VOIs/meta/eSAD_VOIs.txt +0 -12
- junifer/data/coordinates/VOIs/meta/extDMN_VOIs.txt +0 -16
- junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
- junifer/data/masks/vickery-patil/CAT12_IXI555_MNI152_TMP_GS_GMprob0.2_clean.nii.gz +0 -0
- junifer/data/masks/vickery-patil/CAT12_IXI555_MNI152_TMP_GS_GMprob0.2_clean_3mm.nii.gz +0 -0
- junifer/data/masks/vickery-patil/GMprob0.2_cortex_3mm_NA_rm.nii.gz +0 -0
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/WHEEL +0 -0
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/licenses/AUTHORS.rst +0 -0
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/licenses/LICENSE.md +0 -0
- {junifer-0.0.7.dev18.dist-info → junifer-0.0.7.dev43.dist-info}/top_level.txt +0 -0
junifer/_version.py
CHANGED
@@ -17,5 +17,5 @@ __version__: str
|
|
17
17
|
__version_tuple__: VERSION_TUPLE
|
18
18
|
version_tuple: VERSION_TUPLE
|
19
19
|
|
20
|
-
__version__ = version = '0.0.7.
|
21
|
-
__version_tuple__ = version_tuple = (0, 0, 7, '
|
20
|
+
__version__ = version = '0.0.7.dev43'
|
21
|
+
__version_tuple__ = version_tuple = (0, 0, 7, 'dev43')
|
junifer/api/decorators.py
CHANGED
junifer/api/functions.py
CHANGED
@@ -294,8 +294,7 @@ def queue(
|
|
294
294
|
valid_kind = ["HTCondor", "GNUParallelLocal"]
|
295
295
|
if kind not in valid_kind:
|
296
296
|
raise_error(
|
297
|
-
f"Invalid value for `kind`: {kind}, "
|
298
|
-
f"must be one of {valid_kind}"
|
297
|
+
f"Invalid value for `kind`: {kind}, must be one of {valid_kind}"
|
299
298
|
)
|
300
299
|
|
301
300
|
# Create a folder within the CWD to store the job files / config
|
@@ -218,8 +218,7 @@ class GnuParallelLocalAdapter(QueueContextAdapter):
|
|
218
218
|
# Copy executable if not local
|
219
219
|
if hasattr(self, "_exec_path"):
|
220
220
|
logger.info(
|
221
|
-
f"Copying {self._executable} to "
|
222
|
-
f"{self._exec_path.resolve()!s}"
|
221
|
+
f"Copying {self._executable} to {self._exec_path.resolve()!s}"
|
223
222
|
)
|
224
223
|
shutil.copy(
|
225
224
|
src=Path(__file__).parent.parent / "res" / self._executable,
|
@@ -235,15 +234,14 @@ class GnuParallelLocalAdapter(QueueContextAdapter):
|
|
235
234
|
self._elements_file_path.write_text(textwrap.dedent(self.elements()))
|
236
235
|
# Create pre run
|
237
236
|
logger.info(
|
238
|
-
f"Writing {self._pre_run_path.name} to "
|
239
|
-
f"{self._job_dir.resolve()!s}"
|
237
|
+
f"Writing {self._pre_run_path.name} to {self._job_dir.resolve()!s}"
|
240
238
|
)
|
241
239
|
self._pre_run_path.touch()
|
242
240
|
self._pre_run_path.write_text(textwrap.dedent(self.pre_run()))
|
243
241
|
make_executable(self._pre_run_path)
|
244
242
|
# Create run
|
245
243
|
logger.info(
|
246
|
-
f"Writing {self._run_path.name} to
|
244
|
+
f"Writing {self._run_path.name} to {self._job_dir.resolve()!s}"
|
247
245
|
)
|
248
246
|
self._run_path.touch()
|
249
247
|
self._run_path.write_text(textwrap.dedent(self.run()))
|
@@ -258,8 +256,7 @@ class GnuParallelLocalAdapter(QueueContextAdapter):
|
|
258
256
|
make_executable(self._pre_collect_path)
|
259
257
|
# Create collect
|
260
258
|
logger.info(
|
261
|
-
f"Writing {self._collect_path.name} to "
|
262
|
-
f"{self._job_dir.resolve()!s}"
|
259
|
+
f"Writing {self._collect_path.name} to {self._job_dir.resolve()!s}"
|
263
260
|
)
|
264
261
|
self._collect_path.touch()
|
265
262
|
self._collect_path.write_text(textwrap.dedent(self.collect()))
|
@@ -264,9 +264,7 @@ class HTCondorAdapter(QueueContextAdapter):
|
|
264
264
|
)
|
265
265
|
|
266
266
|
junifer_collect_args = (
|
267
|
-
"collect "
|
268
|
-
f"{self._yaml_config_path.resolve()!s} "
|
269
|
-
f"{verbose_args}"
|
267
|
+
f"collect {self._yaml_config_path.resolve()!s} {verbose_args}"
|
270
268
|
)
|
271
269
|
log_dir_prefix = f"{self._log_dir.resolve()!s}/junifer_collect"
|
272
270
|
fixed = (
|
@@ -316,7 +314,7 @@ class HTCondorAdapter(QueueContextAdapter):
|
|
316
314
|
"$DAG_STATUS\n"
|
317
315
|
)
|
318
316
|
elif self._collect == "on_success_only":
|
319
|
-
var += f"JOB collect {self._submit_collect_path}\
|
317
|
+
var += f"JOB collect {self._submit_collect_path}\nPARENT "
|
320
318
|
for idx, _ in enumerate(self._elements):
|
321
319
|
var += f"run{idx} "
|
322
320
|
var += "CHILD collect\n"
|
@@ -328,14 +326,13 @@ class HTCondorAdapter(QueueContextAdapter):
|
|
328
326
|
logger.info("Creating HTCondor job")
|
329
327
|
# Create logs
|
330
328
|
logger.info(
|
331
|
-
f"Creating logs directory under
|
329
|
+
f"Creating logs directory under {self._job_dir.resolve()!s}"
|
332
330
|
)
|
333
331
|
self._log_dir.mkdir(exist_ok=True, parents=True)
|
334
332
|
# Copy executable if not local
|
335
333
|
if hasattr(self, "_exec_path"):
|
336
334
|
logger.info(
|
337
|
-
f"Copying {self._executable} to "
|
338
|
-
f"{self._exec_path.resolve()!s}"
|
335
|
+
f"Copying {self._executable} to {self._exec_path.resolve()!s}"
|
339
336
|
)
|
340
337
|
shutil.copy(
|
341
338
|
src=Path(__file__).parent.parent / "res" / self._executable,
|
@@ -344,8 +341,7 @@ class HTCondorAdapter(QueueContextAdapter):
|
|
344
341
|
make_executable(self._exec_path)
|
345
342
|
# Create pre run
|
346
343
|
logger.info(
|
347
|
-
f"Writing {self._pre_run_path.name} to "
|
348
|
-
f"{self._job_dir.resolve()!s}"
|
344
|
+
f"Writing {self._pre_run_path.name} to {self._job_dir.resolve()!s}"
|
349
345
|
)
|
350
346
|
self._pre_run_path.touch()
|
351
347
|
self._pre_run_path.write_text(textwrap.dedent(self.pre_run()))
|
@@ -374,7 +370,7 @@ class HTCondorAdapter(QueueContextAdapter):
|
|
374
370
|
self._submit_collect_path.write_text(textwrap.dedent(self.collect()))
|
375
371
|
# Create DAG
|
376
372
|
logger.debug(
|
377
|
-
f"Writing {self._dag_path.name} to
|
373
|
+
f"Writing {self._dag_path.name} to {self._job_dir.resolve()!s}"
|
378
374
|
)
|
379
375
|
self._dag_path.touch()
|
380
376
|
self._dag_path.write_text(textwrap.dedent(self.dag()))
|
junifer/cli/tests/test_parser.py
CHANGED
@@ -18,6 +18,23 @@ def test_parse_yaml_failure() -> None:
|
|
18
18
|
parse_yaml("foo.yaml")
|
19
19
|
|
20
20
|
|
21
|
+
def test_parse_yaml_empty_elements_failure(tmp_path: Path) -> None:
|
22
|
+
"""Test YAML parsing with empty elements failure.
|
23
|
+
|
24
|
+
Parameters
|
25
|
+
----------
|
26
|
+
tmp_path : pathlib.Path
|
27
|
+
The path to the test directory.
|
28
|
+
|
29
|
+
"""
|
30
|
+
# Write test file
|
31
|
+
fname = tmp_path / "test_parse_yaml_empty_elements_failure.yaml"
|
32
|
+
fname.write_text("elements:")
|
33
|
+
# Check test file
|
34
|
+
with pytest.raises(ValueError, match="elements key was defined"):
|
35
|
+
parse_yaml(fname)
|
36
|
+
|
37
|
+
|
21
38
|
def test_parse_yaml_success(tmp_path: Path) -> None:
|
22
39
|
"""Test YAML parsing success.
|
23
40
|
|
@@ -159,6 +176,41 @@ def test_parse_yaml_absolute_path(tmp_path: Path) -> None:
|
|
159
176
|
parse_yaml(yaml_fname)
|
160
177
|
|
161
178
|
|
179
|
+
def test_parse_yaml_multi_module_deps(tmp_path: Path) -> None:
|
180
|
+
"""Test YAML parsing with multi-module import with deps.
|
181
|
+
|
182
|
+
Parameters
|
183
|
+
----------
|
184
|
+
tmp_path : pathlib.Path
|
185
|
+
The path to the test directory.
|
186
|
+
|
187
|
+
"""
|
188
|
+
t_tmp_path = tmp_path / "test_with_multi_module"
|
189
|
+
|
190
|
+
# Write .py to include
|
191
|
+
py_path = t_tmp_path / "external"
|
192
|
+
py_path.mkdir(exist_ok=True, parents=True)
|
193
|
+
py_fname_1 = py_path / "first.py"
|
194
|
+
py_fname_1.write_text(
|
195
|
+
"import numpy as np\nfrom second import hej\n"
|
196
|
+
"def junifer_module_deps(): return ['second.py']\n"
|
197
|
+
)
|
198
|
+
py_fname_2 = py_path / "second.py"
|
199
|
+
py_fname_2.write_text("def hej(): print('hej')\n")
|
200
|
+
|
201
|
+
# Write yaml
|
202
|
+
yaml_path = t_tmp_path / "yamls"
|
203
|
+
yaml_path.mkdir(exist_ok=True, parents=True)
|
204
|
+
yaml_fname = yaml_path / "test_parse_yaml_multi_module.yaml"
|
205
|
+
|
206
|
+
yaml_fname.write_text(
|
207
|
+
"foo: bar\nwith:\n - ../external/first.py\n - scipy\n"
|
208
|
+
)
|
209
|
+
|
210
|
+
# Check test file
|
211
|
+
parse_yaml(yaml_fname)
|
212
|
+
|
213
|
+
|
162
214
|
def test_parse_storage_uri_relative(tmp_path: Path) -> None:
|
163
215
|
"""Test YAML parsing with storage and relative URI.
|
164
216
|
|
@@ -212,3 +264,17 @@ def test_parse_storage_uri_relative(tmp_path: Path) -> None:
|
|
212
264
|
assert "foo" in contents
|
213
265
|
assert contents["foo"] == "bar"
|
214
266
|
assert "storage" in contents
|
267
|
+
|
268
|
+
|
269
|
+
def test_parse_yaml_queue_venv_relative(tmp_path: Path) -> None:
|
270
|
+
"""Test YAML parsing with relative venv queue.
|
271
|
+
|
272
|
+
Parameters
|
273
|
+
----------
|
274
|
+
tmp_path : pathlib.Path
|
275
|
+
The path to the test directory.
|
276
|
+
|
277
|
+
"""
|
278
|
+
fname = tmp_path / "test_parse_yaml_queue_venv_relative.yaml"
|
279
|
+
fname.write_text("queue:\n env:\n kind: venv\n name: .venv\n")
|
280
|
+
_ = parse_yaml(fname)
|
junifer/data/_dispatch.py
CHANGED
@@ -95,7 +95,7 @@ def get_data(
|
|
95
95
|
target_data=target_data,
|
96
96
|
extra_input=extra_input,
|
97
97
|
)
|
98
|
-
else:
|
98
|
+
else: # pragma: no cover
|
99
99
|
raise_error(f"Unknown data kind: {kind}")
|
100
100
|
|
101
101
|
|
@@ -125,7 +125,7 @@ def list_data(kind: str) -> list[str]:
|
|
125
125
|
return ParcellationRegistry().list
|
126
126
|
elif kind == "mask":
|
127
127
|
return MaskRegistry().list
|
128
|
-
else:
|
128
|
+
else: # pragma: no cover
|
129
129
|
raise_error(f"Unknown data kind: {kind}")
|
130
130
|
|
131
131
|
|
@@ -172,7 +172,7 @@ def load_data(
|
|
172
172
|
return ParcellationRegistry().load(name=name, **kwargs)
|
173
173
|
elif kind == "mask":
|
174
174
|
return MaskRegistry().load(name=name, **kwargs)
|
175
|
-
else:
|
175
|
+
else: # pragma: no cover
|
176
176
|
raise_error(f"Unknown data kind: {kind}")
|
177
177
|
|
178
178
|
|
@@ -217,7 +217,7 @@ def register_data(
|
|
217
217
|
return MaskRegistry().register(
|
218
218
|
name=name, space=space, overwrite=overwrite, **kwargs
|
219
219
|
)
|
220
|
-
else:
|
220
|
+
else: # pragma: no cover
|
221
221
|
raise_error(f"Unknown data kind: {kind}")
|
222
222
|
|
223
223
|
|
@@ -244,5 +244,5 @@ def deregister_data(kind: str, name: str) -> None:
|
|
244
244
|
return ParcellationRegistry().deregister(name=name)
|
245
245
|
elif kind == "mask":
|
246
246
|
return MaskRegistry().deregister(name=name)
|
247
|
-
else:
|
247
|
+
else: # pragma: no cover
|
248
248
|
raise_error(f"Unknown data kind: {kind}")
|
@@ -339,7 +339,7 @@ class CoordinatesRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
339
339
|
seeds, labels, _ = self.load(name=coords)
|
340
340
|
|
341
341
|
# Transform coordinate if target data is native
|
342
|
-
if target_data["space"] == "native":
|
342
|
+
if target_data["space"] == "native": # pragma: no cover
|
343
343
|
# Check for extra inputs
|
344
344
|
if extra_input is None:
|
345
345
|
raise_error(
|
@@ -8,7 +8,13 @@ import numpy as np
|
|
8
8
|
import pytest
|
9
9
|
from numpy.testing import assert_array_equal
|
10
10
|
|
11
|
-
from junifer.data import
|
11
|
+
from junifer.data import (
|
12
|
+
deregister_data,
|
13
|
+
get_data,
|
14
|
+
list_data,
|
15
|
+
load_data,
|
16
|
+
register_data,
|
17
|
+
)
|
12
18
|
from junifer.datareader import DefaultDataReader
|
13
19
|
from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
|
14
20
|
|
@@ -16,7 +22,8 @@ from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
|
|
16
22
|
def test_register_built_in_check() -> None:
|
17
23
|
"""Test coordinates registration check for built-in coordinates."""
|
18
24
|
with pytest.raises(ValueError, match=r"built-in"):
|
19
|
-
|
25
|
+
register_data(
|
26
|
+
kind="coordinates",
|
20
27
|
name="DMNBuckner",
|
21
28
|
coordinates=np.zeros(2),
|
22
29
|
voi_names=["1", "2"],
|
@@ -26,14 +33,16 @@ def test_register_built_in_check() -> None:
|
|
26
33
|
|
27
34
|
def test_register_overwrite() -> None:
|
28
35
|
"""Test coordinates registration check for overwriting."""
|
29
|
-
|
36
|
+
register_data(
|
37
|
+
kind="coordinates",
|
30
38
|
name="MyList",
|
31
39
|
coordinates=np.zeros((2, 3)),
|
32
40
|
voi_names=["roi1", "roi2"],
|
33
41
|
space="MNI",
|
34
42
|
)
|
35
43
|
with pytest.raises(ValueError, match=r"already registered"):
|
36
|
-
|
44
|
+
register_data(
|
45
|
+
kind="coordinates",
|
37
46
|
name="MyList",
|
38
47
|
coordinates=np.ones((2, 3)),
|
39
48
|
voi_names=["roi2", "roi3"],
|
@@ -41,7 +50,8 @@ def test_register_overwrite() -> None:
|
|
41
50
|
overwrite=False,
|
42
51
|
)
|
43
52
|
|
44
|
-
|
53
|
+
register_data(
|
54
|
+
kind="coordinates",
|
45
55
|
name="MyList",
|
46
56
|
coordinates=np.ones((2, 3)),
|
47
57
|
voi_names=["roi2", "roi3"],
|
@@ -49,7 +59,7 @@ def test_register_overwrite() -> None:
|
|
49
59
|
overwrite=True,
|
50
60
|
)
|
51
61
|
|
52
|
-
coord, names, space =
|
62
|
+
coord, names, space = load_data(kind="coordinates", name="MyList")
|
53
63
|
assert_array_equal(coord, np.ones((2, 3)))
|
54
64
|
assert names == ["roi2", "roi3"]
|
55
65
|
assert space == "MNI"
|
@@ -58,7 +68,8 @@ def test_register_overwrite() -> None:
|
|
58
68
|
def test_register_valid_input() -> None:
|
59
69
|
"""Test coordinates registration check for valid input."""
|
60
70
|
with pytest.raises(TypeError, match=r"numpy.ndarray"):
|
61
|
-
|
71
|
+
register_data(
|
72
|
+
kind="coordinates",
|
62
73
|
name="MyList",
|
63
74
|
coordinates=[1, 2],
|
64
75
|
voi_names=["roi1", "roi2"],
|
@@ -66,7 +77,8 @@ def test_register_valid_input() -> None:
|
|
66
77
|
overwrite=True,
|
67
78
|
)
|
68
79
|
with pytest.raises(ValueError, match=r"2D array"):
|
69
|
-
|
80
|
+
register_data(
|
81
|
+
kind="coordinates",
|
70
82
|
name="MyList",
|
71
83
|
coordinates=np.zeros((2, 3, 4)),
|
72
84
|
voi_names=["roi1", "roi2"],
|
@@ -75,7 +87,8 @@ def test_register_valid_input() -> None:
|
|
75
87
|
)
|
76
88
|
|
77
89
|
with pytest.raises(ValueError, match=r"3 values"):
|
78
|
-
|
90
|
+
register_data(
|
91
|
+
kind="coordinates",
|
79
92
|
name="MyList",
|
80
93
|
coordinates=np.zeros((2, 4)),
|
81
94
|
voi_names=["roi1", "roi2"],
|
@@ -83,7 +96,8 @@ def test_register_valid_input() -> None:
|
|
83
96
|
overwrite=True,
|
84
97
|
)
|
85
98
|
with pytest.raises(ValueError, match=r"voi_names"):
|
86
|
-
|
99
|
+
register_data(
|
100
|
+
kind="coordinates",
|
87
101
|
name="MyList",
|
88
102
|
coordinates=np.zeros((2, 3)),
|
89
103
|
voi_names=["roi1", "roi2", "roi3"],
|
@@ -95,13 +109,13 @@ def test_register_valid_input() -> None:
|
|
95
109
|
def test_list() -> None:
|
96
110
|
"""Test listing of available coordinates."""
|
97
111
|
assert {"DMNBuckner", "MultiTask", "VigAtt", "WM"}.issubset(
|
98
|
-
set(
|
112
|
+
set(list_data(kind="coordinates"))
|
99
113
|
)
|
100
114
|
|
101
115
|
|
102
116
|
def test_load() -> None:
|
103
117
|
"""Test loading coordinates from file."""
|
104
|
-
coord, names, space =
|
118
|
+
coord, names, space = load_data(kind="coordinates", name="DMNBuckner")
|
105
119
|
assert coord.shape == (6, 3) # type: ignore
|
106
120
|
assert names == ["PCC", "MPFC", "lAG", "rAG", "lHF", "rHF"]
|
107
121
|
assert space == "MNI"
|
@@ -110,7 +124,7 @@ def test_load() -> None:
|
|
110
124
|
def test_load_nonexisting() -> None:
|
111
125
|
"""Test loading coordinates that not exist."""
|
112
126
|
with pytest.raises(ValueError, match=r"not found"):
|
113
|
-
|
127
|
+
load_data(kind="coordinates", name="NonExisting")
|
114
128
|
|
115
129
|
|
116
130
|
def test_get() -> None:
|
@@ -121,11 +135,19 @@ def test_get() -> None:
|
|
121
135
|
element_data = reader.fit_transform(element)
|
122
136
|
vbm_gm = element_data["VBM_GM"]
|
123
137
|
# Get tailored coordinates
|
124
|
-
tailored_coords, tailored_labels =
|
125
|
-
|
138
|
+
tailored_coords, tailored_labels = get_data(
|
139
|
+
kind="coordinates", names="DMNBuckner", target_data=vbm_gm
|
126
140
|
)
|
127
141
|
# Get raw coordinates
|
128
|
-
raw_coords, raw_labels, _ =
|
142
|
+
raw_coords, raw_labels, _ = load_data(
|
143
|
+
kind="coordinates", name="DMNBuckner"
|
144
|
+
)
|
129
145
|
# Both tailored and raw should be same for now
|
130
146
|
assert_array_equal(tailored_coords, raw_coords)
|
131
147
|
assert tailored_labels == raw_labels
|
148
|
+
|
149
|
+
|
150
|
+
def test_deregister() -> None:
|
151
|
+
"""Test coordinates deregistration."""
|
152
|
+
deregister_data(kind="coordinates", name="MyList")
|
153
|
+
assert "MyList" not in list_data(kind="coordinates")
|