junifer 0.0.4.dev733__py3-none-any.whl → 0.0.4.dev782__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
  3. junifer/api/tests/test_cli.py +7 -13
  4. junifer/api/tests/test_functions.py +156 -102
  5. junifer/data/coordinates.py +1 -1
  6. junifer/data/masks.py +213 -54
  7. junifer/data/parcellations.py +91 -42
  8. junifer/data/template_spaces.py +33 -6
  9. junifer/data/tests/test_masks.py +127 -62
  10. junifer/data/tests/test_parcellations.py +66 -49
  11. junifer/data/tests/test_template_spaces.py +42 -7
  12. junifer/datagrabber/aomic/id1000.py +3 -0
  13. junifer/datagrabber/aomic/piop1.py +3 -0
  14. junifer/datagrabber/aomic/piop2.py +3 -0
  15. junifer/datagrabber/dmcc13_benchmark.py +3 -0
  16. junifer/datagrabber/hcp1200/hcp1200.py +3 -0
  17. junifer/markers/falff/tests/test_falff_parcels.py +3 -3
  18. junifer/markers/falff/tests/test_falff_spheres.py +3 -3
  19. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -45
  20. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -41
  21. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -56
  22. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -74
  23. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -89
  24. junifer/markers/reho/tests/test_reho_parcels.py +17 -11
  25. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
  26. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
  27. junifer/markers/tests/test_collection.py +38 -37
  28. junifer/markers/tests/test_ets_rss.py +29 -41
  29. junifer/markers/tests/test_parcel_aggregation.py +600 -511
  30. junifer/markers/tests/test_sphere_aggregation.py +209 -163
  31. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/METADATA +1 -1
  32. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/RECORD +37 -36
  33. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/AUTHORS.rst +0 -0
  34. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/LICENSE.md +0 -0
  35. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/WHEEL +0 -0
  36. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/entry_points.txt +0 -0
  37. {junifer-0.0.4.dev733.dist-info → junifer-0.0.4.dev782.dist-info}/top_level.txt +0 -0
@@ -5,16 +5,16 @@
5
5
  # Synchon Mandal <s.mandal@fz-juelich.de>
6
6
  # License: AGPL
7
7
 
8
+ import socket
8
9
  from pathlib import Path
9
10
  from typing import Callable, Dict, List, Optional, Union
10
11
 
12
+ import nibabel as nib
11
13
  import numpy as np
12
14
  import pytest
13
- from nilearn.datasets import fetch_icbm152_brain_gm_mask
14
15
  from nilearn.image import resample_to_img
15
16
  from nilearn.masking import (
16
17
  compute_background_mask,
17
- compute_brain_mask,
18
18
  compute_epi_mask,
19
19
  intersect_masks,
20
20
  )
@@ -23,18 +23,96 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal
23
23
  from junifer.data.masks import (
24
24
  _available_masks,
25
25
  _load_vickery_patil_mask,
26
+ compute_brain_mask,
26
27
  get_mask,
27
28
  list_masks,
28
29
  load_mask,
29
30
  register_mask,
30
31
  )
32
+ from junifer.datagrabber import DMCC13Benchmark
31
33
  from junifer.datareader import DefaultDataReader
32
34
  from junifer.testing.datagrabbers import (
33
35
  OasisVBMTestingDataGrabber,
36
+ PartlyCloudyTestingDataGrabber,
34
37
  SPMAuditoryTestingDataGrabber,
35
38
  )
36
39
 
37
40
 
41
+ @pytest.mark.parametrize(
42
+ "mask_type, threshold",
43
+ [
44
+ ("brain", 0.2),
45
+ ("brain", 0.5),
46
+ ("brain", 0.8),
47
+ ("gm", 0.2),
48
+ ("gm", 0.5),
49
+ ("gm", 0.8),
50
+ ("wm", 0.2),
51
+ ("wm", 0.5),
52
+ ("wm", 0.8),
53
+ ],
54
+ )
55
+ def test_compute_brain_mask(mask_type: str, threshold: float) -> None:
56
+ """Test compute_brain_mask().
57
+
58
+ Parameters
59
+ ----------
60
+ mask_type : str
61
+ The parametrized mask type.
62
+ threshold : float
63
+ The parametrized threshold.
64
+
65
+ """
66
+ with PartlyCloudyTestingDataGrabber() as dg:
67
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
68
+ mask = compute_brain_mask(
69
+ target_data=element_data["BOLD"],
70
+ extra_input=None,
71
+ mask_type=mask_type,
72
+ )
73
+ assert isinstance(mask, nib.Nifti1Image)
74
+
75
+
76
+ @pytest.mark.skipif(
77
+ socket.gethostname() != "juseless",
78
+ reason="only for juseless",
79
+ )
80
+ @pytest.mark.parametrize(
81
+ "mask_type",
82
+ [
83
+ "brain",
84
+ "gm",
85
+ "wm",
86
+ ],
87
+ )
88
+ def test_compute_brain_mask_for_native(mask_type: str) -> None:
89
+ """Test compute_brain_mask().
90
+
91
+ Parameters
92
+ ----------
93
+ mask_type : str
94
+ The parametrized mask type.
95
+
96
+ """
97
+ with DMCC13Benchmark(
98
+ types=["BOLD"],
99
+ sessions=["wave1bas"],
100
+ tasks=["Rest"],
101
+ phase_encodings=["AP"],
102
+ runs=["1"],
103
+ native_t1w=True,
104
+ ) as dg:
105
+ element_data = DefaultDataReader().fit_transform(
106
+ dg[("f1031ax", "wave1bas", "Rest", "AP", "1")]
107
+ )
108
+ mask = compute_brain_mask(
109
+ target_data=element_data["BOLD"],
110
+ extra_input=None,
111
+ mask_type=mask_type,
112
+ )
113
+ assert isinstance(mask, nib.Nifti1Image)
114
+
115
+
38
116
  def test_register_mask_built_in_check() -> None:
39
117
  """Test mask registration check for built-in masks."""
40
118
  with pytest.raises(ValueError, match=r"built-in mask"):
@@ -215,18 +293,19 @@ def test_vickery_patil_error() -> None:
215
293
 
216
294
  def test_get_mask() -> None:
217
295
  """Test the get_mask function."""
218
- reader = DefaultDataReader()
219
296
  with OasisVBMTestingDataGrabber() as dg:
220
- input = dg["sub-01"]
221
- input = reader.fit_transform(input)
222
- vbm_gm = input["VBM_GM"]
297
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
298
+ vbm_gm = element_data["VBM_GM"]
223
299
  vbm_gm_img = vbm_gm["data"]
224
- mask = get_mask(masks="GM_prob0.2", target_data=vbm_gm)
300
+ mask = get_mask(masks="compute_brain_mask", target_data=vbm_gm)
225
301
 
226
302
  assert mask.shape == vbm_gm_img.shape
227
303
  assert_array_equal(mask.affine, vbm_gm_img.affine)
228
304
 
229
- raw_mask_img, _, _ = load_mask("GM_prob0.2", resolution=1.5)
305
+ raw_mask_callable, _, _ = load_mask(
306
+ "compute_brain_mask", resolution=1.5
307
+ )
308
+ raw_mask_img = raw_mask_callable(vbm_gm) # type: ignore
230
309
  res_mask_img = resample_to_img(
231
310
  raw_mask_img,
232
311
  vbm_gm_img,
@@ -245,13 +324,11 @@ def test_mask_callable() -> None:
245
324
  _available_masks["identity"] = {
246
325
  "family": "Callable",
247
326
  "func": ident,
248
- "space": "MNI",
327
+ "space": "MNI152Lin",
249
328
  }
250
- reader = DefaultDataReader()
251
329
  with OasisVBMTestingDataGrabber() as dg:
252
- input = dg["sub-01"]
253
- input = reader.fit_transform(input)
254
- vbm_gm = input["VBM_GM"]
330
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
331
+ vbm_gm = element_data["VBM_GM"]
255
332
  vbm_gm_img = vbm_gm["data"]
256
333
  mask = get_mask(masks="identity", target_data=vbm_gm)
257
334
 
@@ -262,11 +339,9 @@ def test_mask_callable() -> None:
262
339
 
263
340
  def test_get_mask_errors() -> None:
264
341
  """Test passing wrong parameters to get_mask."""
265
- reader = DefaultDataReader()
266
342
  with OasisVBMTestingDataGrabber() as dg:
267
- input = dg["sub-01"]
268
- input = reader.fit_transform(input)
269
- vbm_gm = input["VBM_GM"]
343
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
344
+ vbm_gm = element_data["VBM_GM"]
270
345
  # Test wrong masks definitions (more than one key per dict)
271
346
  with pytest.raises(ValueError, match=r"only one key"):
272
347
  get_mask(masks={"GM_prob0.2": {}, "Other": {}}, target_data=vbm_gm)
@@ -286,7 +361,8 @@ def test_get_mask_errors() -> None:
286
361
  ValueError, match=r"parameters to the intersection"
287
362
  ):
288
363
  get_mask(
289
- masks=["GM_prob0.2", {"threshold": 1}], target_data=vbm_gm
364
+ masks=["compute_brain_mask", {"threshold": 1}],
365
+ target_data=vbm_gm,
290
366
  )
291
367
 
292
368
  # Test "inherited" masks errors
@@ -310,19 +386,20 @@ def test_get_mask_errors() -> None:
310
386
  masks="inherit", target_data=vbm_gm, extra_input=extra_input
311
387
  )
312
388
 
389
+ # Block fetch_icbm152_brain_gm_mask space transformation
390
+ with pytest.raises(RuntimeError, match="prohibited"):
391
+ get_mask(
392
+ masks="fetch_icbm152_brain_gm_mask",
393
+ target_data=vbm_gm,
394
+ extra_input=extra_input,
395
+ )
396
+
313
397
 
314
398
  @pytest.mark.parametrize(
315
399
  "mask_name,function,params,resample",
316
400
  [
317
- ("compute_brain_mask", compute_brain_mask, {"threshold": 0.2}, False),
318
401
  ("compute_background_mask", compute_background_mask, None, False),
319
402
  ("compute_epi_mask", compute_epi_mask, None, False),
320
- (
321
- "fetch_icbm152_brain_gm_mask",
322
- fetch_icbm152_brain_gm_mask,
323
- None,
324
- True,
325
- ),
326
403
  ],
327
404
  )
328
405
  def test_nilearn_compute_masks(
@@ -345,11 +422,9 @@ def test_nilearn_compute_masks(
345
422
  Whether to resample the mask to the target data.
346
423
 
347
424
  """
348
- reader = DefaultDataReader()
349
425
  with SPMAuditoryTestingDataGrabber() as dg:
350
- input = dg["sub001"]
351
- input = reader.fit_transform(input)
352
- bold = input["BOLD"]
426
+ element_data = DefaultDataReader().fit_transform(dg["sub001"])
427
+ bold = element_data["BOLD"]
353
428
  bold_img = bold["data"]
354
429
 
355
430
  if params is None:
@@ -378,27 +453,30 @@ def test_nilearn_compute_masks(
378
453
 
379
454
  def test_get_mask_inherit() -> None:
380
455
  """Test using the inherit mask functionality."""
381
- reader = DefaultDataReader()
382
456
  with SPMAuditoryTestingDataGrabber() as dg:
383
- input = dg["sub001"]
384
- input = reader.fit_transform(input)
457
+ element_data = DefaultDataReader().fit_transform(dg["sub001"])
385
458
  # Compute brain mask using nilearn
386
- gm_mask = compute_brain_mask(input["BOLD"]["data"], threshold=0.2)
459
+ gm_mask = compute_brain_mask(element_data["BOLD"], threshold=0.2)
387
460
 
388
461
  # Get mask using the compute_brain_mask function
389
462
  mask1 = get_mask(
390
463
  masks={"compute_brain_mask": {"threshold": 0.2}},
391
- target_data=input["BOLD"],
464
+ target_data=element_data["BOLD"],
392
465
  )
393
466
 
394
467
  # Now get the mask using the inherit functionality, passing the
395
468
  # computed mask as extra data
396
469
  extra_input = {
397
- "BOLD_MASK": {"data": gm_mask, "space": input["BOLD"]["space"]}
470
+ "BOLD_MASK": {
471
+ "data": gm_mask,
472
+ "space": element_data["BOLD"]["space"],
473
+ }
398
474
  }
399
- input["BOLD"]["mask_item"] = "BOLD_MASK"
475
+ element_data["BOLD"]["mask_item"] = "BOLD_MASK"
400
476
  mask2 = get_mask(
401
- masks="inherit", target_data=input["BOLD"], extra_input=extra_input
477
+ masks="inherit",
478
+ target_data=element_data["BOLD"],
479
+ extra_input=extra_input,
402
480
  )
403
481
 
404
482
  # Both masks should be equal
@@ -408,7 +486,6 @@ def test_get_mask_inherit() -> None:
408
486
  @pytest.mark.parametrize(
409
487
  "masks,params",
410
488
  [
411
- (["GM_prob0.2", "GM_prob0.2_cortex"], {}),
412
489
  (["compute_brain_mask", "compute_background_mask"], {}),
413
490
  (["compute_brain_mask", "compute_epi_mask"], {}),
414
491
  ],
@@ -426,10 +503,8 @@ def test_get_mask_multiple(
426
503
  Parameters to pass to the intersect_masks function.
427
504
 
428
505
  """
429
- reader = DefaultDataReader()
430
506
  with SPMAuditoryTestingDataGrabber() as dg:
431
- input = dg["sub001"]
432
- input = reader.fit_transform(input)
507
+ element_data = DefaultDataReader().fit_transform(dg["sub001"])
433
508
  if not isinstance(masks, list):
434
509
  junifer_masks = [masks]
435
510
  else:
@@ -438,10 +513,12 @@ def test_get_mask_multiple(
438
513
  # Convert params to junifer style (one dict per param)
439
514
  junifer_params = [{k: params[k]} for k in params.keys()]
440
515
  junifer_masks.extend(junifer_params)
441
- target_img = input["BOLD"]["data"]
516
+ target_img = element_data["BOLD"]["data"]
442
517
  resolution = np.min(target_img.header.get_zooms()[:3])
443
518
 
444
- computed = get_mask(masks=junifer_masks, target_data=input["BOLD"])
519
+ computed = get_mask(
520
+ masks=junifer_masks, target_data=element_data["BOLD"]
521
+ )
445
522
 
446
523
  masks_names = [
447
524
  next(iter(x.keys())) if isinstance(x, dict) else x for x in masks
@@ -464,7 +541,13 @@ def test_get_mask_multiple(
464
541
  ]
465
542
 
466
543
  for t_func in mask_funcs:
467
- mask_imgs.append(_available_masks[t_func]["func"](target_img))
544
+ # Bypass for custom mask
545
+ if t_func == "compute_brain_mask":
546
+ mask_imgs.append(
547
+ _available_masks[t_func]["func"](element_data["BOLD"])
548
+ )
549
+ else:
550
+ mask_imgs.append(_available_masks[t_func]["func"](target_img))
468
551
 
469
552
  mask_imgs = [
470
553
  resample_to_img(
@@ -478,21 +561,3 @@ def test_get_mask_multiple(
478
561
 
479
562
  expected = intersect_masks(mask_imgs, **params)
480
563
  assert_array_equal(computed.get_fdata(), expected.get_fdata())
481
-
482
-
483
- def test_get_mask_multiple_incorrect_space() -> None:
484
- """Test incorrect space error for getting multiple masks."""
485
- reader = DefaultDataReader()
486
- with SPMAuditoryTestingDataGrabber() as dg:
487
- input = dg["sub001"]
488
- input = reader.fit_transform(input)
489
-
490
- with pytest.raises(RuntimeError, match="unable to merge."):
491
- get_mask(
492
- masks=[
493
- "GM_prob0.2",
494
- "compute_brain_mask",
495
- "fetch_icbm152_brain_gm_mask",
496
- ],
497
- target_data=input["BOLD"],
498
- )
@@ -30,7 +30,11 @@ from junifer.data.parcellations import (
30
30
  register_parcellation,
31
31
  )
32
32
  from junifer.datareader import DefaultDataReader
33
- from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
33
+ from junifer.pipeline.utils import _check_ants
34
+ from junifer.testing.datagrabbers import (
35
+ OasisVBMTestingDataGrabber,
36
+ PartlyCloudyTestingDataGrabber,
37
+ )
34
38
 
35
39
 
36
40
  def test_register_parcellation_built_in_check() -> None:
@@ -58,7 +62,7 @@ def test_register_parcellation_already_registered() -> None:
58
62
  name="testparc",
59
63
  parcellation_path="testparc.nii.gz",
60
64
  parcels_labels=["1", "2", "3"],
61
- space="MNI",
65
+ space="MNI152Lin",
62
66
  )
63
67
  assert (
64
68
  load_parcellation("testparc", path_only=True)[2].name
@@ -71,13 +75,13 @@ def test_register_parcellation_already_registered() -> None:
71
75
  name="testparc",
72
76
  parcellation_path="testparc.nii.gz",
73
77
  parcels_labels=["1", "2", "3"],
74
- space="MNI",
78
+ space="MNI152Lin",
75
79
  )
76
80
  register_parcellation(
77
81
  name="testparc",
78
82
  parcellation_path="testparc2.nii.gz",
79
83
  parcels_labels=["1", "2", "3"],
80
- space="MNI",
84
+ space="MNI152Lin",
81
85
  overwrite=True,
82
86
  )
83
87
 
@@ -100,14 +104,16 @@ def test_parcellation_wrong_labels_values(tmp_path: Path) -> None:
100
104
  assert schaefer is not None
101
105
 
102
106
  # Test wrong number of labels
103
- register_parcellation("WrongLabels", schaefer_path, labels[:10], "MNI")
107
+ register_parcellation(
108
+ "WrongLabels", schaefer_path, labels[:10], "MNI152Lin"
109
+ )
104
110
 
105
111
  with pytest.raises(ValueError, match=r"has 100 parcels but 10"):
106
112
  load_parcellation("WrongLabels")
107
113
 
108
114
  # Test wrong number of labels
109
115
  register_parcellation(
110
- "WrongLabels2", schaefer_path, [*labels, "wrong"], "MNI"
116
+ "WrongLabels2", schaefer_path, [*labels, "wrong"], "MNI152Lin"
111
117
  )
112
118
 
113
119
  with pytest.raises(ValueError, match=r"has 100 parcels but 101"):
@@ -119,7 +125,9 @@ def test_parcellation_wrong_labels_values(tmp_path: Path) -> None:
119
125
  new_schaefer_img = new_img_like(schaefer, schaefer_data)
120
126
  nib.save(new_schaefer_img, new_schaefer_path)
121
127
 
122
- register_parcellation("WrongValues", new_schaefer_path, labels[:-1], "MNI")
128
+ register_parcellation(
129
+ "WrongValues", new_schaefer_path, labels[:-1], "MNI152Lin"
130
+ )
123
131
  with pytest.raises(ValueError, match=r"the range [0, 99]"):
124
132
  load_parcellation("WrongValues")
125
133
 
@@ -129,7 +137,9 @@ def test_parcellation_wrong_labels_values(tmp_path: Path) -> None:
129
137
  new_schaefer_img = new_img_like(schaefer, schaefer_data)
130
138
  nib.save(new_schaefer_img, new_schaefer_path)
131
139
 
132
- register_parcellation("WrongValues2", new_schaefer_path, labels, "MNI")
140
+ register_parcellation(
141
+ "WrongValues2", new_schaefer_path, labels, "MNI152Lin"
142
+ )
133
143
  with pytest.raises(ValueError, match=r"the range [0, 100]"):
134
144
  load_parcellation("WrongValues2")
135
145
 
@@ -137,13 +147,25 @@ def test_parcellation_wrong_labels_values(tmp_path: Path) -> None:
137
147
  @pytest.mark.parametrize(
138
148
  "name, parcellation_path, parcels_labels, space, overwrite",
139
149
  [
140
- ("testparc_1", "testparc_1.nii.gz", ["1", "2", "3"], "MNI", True),
141
- ("testparc_2", "testparc_2.nii.gz", ["1", "2", "6"], "MNI", True),
150
+ (
151
+ "testparc_1",
152
+ "testparc_1.nii.gz",
153
+ ["1", "2", "3"],
154
+ "MNI152Lin",
155
+ True,
156
+ ),
157
+ (
158
+ "testparc_2",
159
+ "testparc_2.nii.gz",
160
+ ["1", "2", "6"],
161
+ "MNI152Lin",
162
+ True,
163
+ ),
142
164
  (
143
165
  "testparc_3",
144
166
  Path("testparc_3.nii.gz"),
145
167
  ["1", "2", "6"],
146
- "MNI",
168
+ "MNI152Lin",
147
169
  True,
148
170
  ),
149
171
  ],
@@ -1172,28 +1194,26 @@ def test_merge_parcellations_3D_multiple_duplicated_labels() -> None:
1172
1194
 
1173
1195
  def test_get_parcellation_single() -> None:
1174
1196
  """Test tailored single parcellation fetch."""
1175
- reader = DefaultDataReader()
1176
- with OasisVBMTestingDataGrabber() as dg:
1177
- element = dg["sub-01"]
1178
- element_data = reader.fit_transform(element)
1179
- vbm_gm = element_data["VBM_GM"]
1180
- vbm_gm_img = vbm_gm["data"]
1197
+ with PartlyCloudyTestingDataGrabber() as dg:
1198
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
1199
+ bold = element_data["BOLD"]
1200
+ bold_img = bold["data"]
1181
1201
  # Get tailored parcellation
1182
1202
  tailored_parcellation, tailored_labels = get_parcellation(
1183
- parcellation=["Schaefer100x7"],
1184
- target_data=vbm_gm,
1203
+ parcellation=["TianxS1x3TxMNInonlinear2009cAsym"],
1204
+ target_data=bold,
1185
1205
  )
1186
1206
  # Check shape and affine with original element data
1187
- assert tailored_parcellation.shape == vbm_gm_img.shape
1188
- assert_array_equal(tailored_parcellation.affine, vbm_gm_img.affine)
1207
+ assert tailored_parcellation.shape == bold_img.shape[:3]
1208
+ assert_array_equal(tailored_parcellation.affine, bold_img.affine)
1189
1209
  # Get raw parcellation
1190
1210
  raw_parcellation, raw_labels, _, _ = load_parcellation(
1191
- "Schaefer100x7",
1211
+ "TianxS1x3TxMNInonlinear2009cAsym",
1192
1212
  resolution=1.5,
1193
1213
  )
1194
1214
  resampled_raw_parcellation = resample_to_img(
1195
1215
  source_img=raw_parcellation,
1196
- target_img=vbm_gm_img,
1216
+ target_img=bold_img,
1197
1217
  interpolation="nearest",
1198
1218
  copy=True,
1199
1219
  )
@@ -1207,36 +1227,34 @@ def test_get_parcellation_single() -> None:
1207
1227
 
1208
1228
  def test_get_parcellation_multi_same_space() -> None:
1209
1229
  """Test tailored multi parcellation fetch in same space."""
1210
- reader = DefaultDataReader()
1211
- with OasisVBMTestingDataGrabber() as dg:
1212
- element = dg["sub-01"]
1213
- element_data = reader.fit_transform(element)
1214
- vbm_gm = element_data["VBM_GM"]
1215
- vbm_gm_img = vbm_gm["data"]
1230
+ with PartlyCloudyTestingDataGrabber() as dg:
1231
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
1232
+ bold = element_data["BOLD"]
1233
+ bold_img = bold["data"]
1216
1234
  # Get tailored parcellation
1217
1235
  tailored_parcellation, tailored_labels = get_parcellation(
1218
1236
  parcellation=[
1219
- "Schaefer100x7",
1220
- "TianxS2x3TxMNI6thgeneration",
1237
+ "Shen_2015_268",
1238
+ "TianxS1x3TxMNInonlinear2009cAsym",
1221
1239
  ],
1222
- target_data=vbm_gm,
1240
+ target_data=bold,
1223
1241
  )
1224
1242
  # Check shape and affine with original element data
1225
- assert tailored_parcellation.shape == vbm_gm_img.shape
1226
- assert_array_equal(tailored_parcellation.affine, vbm_gm_img.affine)
1243
+ assert tailored_parcellation.shape == bold_img.shape[:3]
1244
+ assert_array_equal(tailored_parcellation.affine, bold_img.affine)
1227
1245
  # Get raw parcellations
1228
1246
  raw_parcellations = []
1229
1247
  raw_labels = []
1230
1248
  parcellations_names = [
1231
- "Schaefer100x7",
1232
- "TianxS2x3TxMNI6thgeneration",
1249
+ "Shen_2015_268",
1250
+ "TianxS1x3TxMNInonlinear2009cAsym",
1233
1251
  ]
1234
1252
  for name in parcellations_names:
1235
1253
  img, labels, _, _ = load_parcellation(name=name, resolution=1.5)
1236
1254
  # Resample raw parcellations
1237
1255
  resampled_img = resample_to_img(
1238
1256
  source_img=img,
1239
- target_img=vbm_gm_img,
1257
+ target_img=bold_img,
1240
1258
  interpolation="nearest",
1241
1259
  copy=True,
1242
1260
  )
@@ -1256,19 +1274,18 @@ def test_get_parcellation_multi_same_space() -> None:
1256
1274
  assert tailored_labels == merged_labels
1257
1275
 
1258
1276
 
1277
+ @pytest.mark.skipif(
1278
+ _check_ants() is False, reason="requires ANTs to be in PATH"
1279
+ )
1259
1280
  def test_get_parcellation_multi_different_space() -> None:
1260
1281
  """Test tailored multi parcellation fetch in different space."""
1261
- reader = DefaultDataReader()
1262
1282
  with OasisVBMTestingDataGrabber() as dg:
1263
- element = dg["sub-01"]
1264
- element_data = reader.fit_transform(element)
1265
- vbm_gm = element_data["VBM_GM"]
1283
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
1266
1284
  # Get tailored parcellation
1267
- with pytest.raises(RuntimeError, match="unable to merge."):
1268
- get_parcellation(
1269
- parcellation=[
1270
- "Schaefer100x7",
1271
- "SUITxSUIT",
1272
- ],
1273
- target_data=vbm_gm,
1274
- )
1285
+ get_parcellation(
1286
+ parcellation=[
1287
+ "Schaefer100x7",
1288
+ "TianxS1x3TxMNInonlinear2009cAsym",
1289
+ ],
1290
+ target_data=element_data["VBM_GM"],
1291
+ )
@@ -11,7 +11,10 @@ import pytest
11
11
 
12
12
  from junifer.data import get_template, get_xfm
13
13
  from junifer.datareader import DefaultDataReader
14
- from junifer.testing.datagrabbers import OasisVBMTestingDataGrabber
14
+ from junifer.testing.datagrabbers import (
15
+ OasisVBMTestingDataGrabber,
16
+ PartlyCloudyTestingDataGrabber,
17
+ )
15
18
 
16
19
 
17
20
  @pytest.mark.skipif(
@@ -33,15 +36,32 @@ def test_get_xfm(tmp_path: Path) -> None:
33
36
  assert isinstance(xfm_path, Path)
34
37
 
35
38
 
36
- def test_get_template() -> None:
37
- """Test tailored template image fetch."""
38
- with OasisVBMTestingDataGrabber() as dg:
39
+ @pytest.mark.parametrize(
40
+ "template_type",
41
+ [
42
+ "T1w",
43
+ "brain",
44
+ "gm",
45
+ "wm",
46
+ "csf",
47
+ ],
48
+ )
49
+ def test_get_template(template_type: str) -> None:
50
+ """Test tailored template image fetch.
51
+
52
+ Parameters
53
+ ----------
54
+ template_type : str
55
+ The parametrized template type.
56
+
57
+ """
58
+ with PartlyCloudyTestingDataGrabber() as dg:
39
59
  element = dg["sub-01"]
40
60
  element_data = DefaultDataReader().fit_transform(element)
41
- vbm_gm = element_data["VBM_GM"]
61
+ bold = element_data["BOLD"]
42
62
  # Get tailored parcellation
43
63
  tailored_template = get_template(
44
- space=vbm_gm["space"], target_data=vbm_gm
64
+ space=bold["space"], target_data=bold, template_type=template_type
45
65
  )
46
66
  assert isinstance(tailored_template, nib.Nifti1Image)
47
67
 
@@ -54,7 +74,22 @@ def test_get_template_invalid_space() -> None:
54
74
  vbm_gm = element_data["VBM_GM"]
55
75
  # Get tailored parcellation
56
76
  with pytest.raises(ValueError, match="Unknown template space:"):
57
- _ = get_template(space="andromeda", target_data=vbm_gm)
77
+ get_template(space="andromeda", target_data=vbm_gm)
78
+
79
+
80
+ def test_get_template_invalid_template_type() -> None:
81
+ """Test invalid template type check for template fetch."""
82
+ with OasisVBMTestingDataGrabber() as dg:
83
+ element = dg["sub-01"]
84
+ element_data = DefaultDataReader().fit_transform(element)
85
+ vbm_gm = element_data["VBM_GM"]
86
+ # Get tailored parcellation
87
+ with pytest.raises(ValueError, match="Unknown template type:"):
88
+ get_template(
89
+ space=vbm_gm["space"],
90
+ target_data=vbm_gm,
91
+ template_type="xenon",
92
+ )
58
93
 
59
94
 
60
95
  def test_get_template_closest_resolution() -> None:
@@ -154,4 +154,7 @@ class DataladAOMICID1000(PatternDataladDataGrabber):
154
154
  out["T1w"].update({"space": "native"})
155
155
  else:
156
156
  out["T1w"].update({"space": "MNI152NLin2009cAsym"})
157
+ if out.get("Warp"):
158
+ # Add source space information
159
+ out["Warp"].update({"src": "MNI152NLin2009cAsym"})
157
160
  return out
@@ -204,6 +204,9 @@ class DataladAOMICPIOP1(PatternDataladDataGrabber):
204
204
  out["T1w"].update({"space": "native"})
205
205
  else:
206
206
  out["T1w"].update({"space": "MNI152NLin2009cAsym"})
207
+ if out.get("Warp"):
208
+ # Add source space information
209
+ out["Warp"].update({"src": "MNI152NLin2009cAsym"})
207
210
  return out
208
211
 
209
212
  def get_elements(self) -> List:
@@ -204,4 +204,7 @@ class DataladAOMICPIOP2(PatternDataladDataGrabber):
204
204
  out["T1w"].update({"space": "native"})
205
205
  else:
206
206
  out["T1w"].update({"space": "MNI152NLin2009cAsym"})
207
+ if out.get("Warp"):
208
+ # Add source space information
209
+ out["Warp"].update({"src": "MNI152NLin2009cAsym"})
207
210
  return out
@@ -271,6 +271,9 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
271
271
  out["T1w"].update({"space": "native"})
272
272
  else:
273
273
  out["T1w"].update({"space": "MNI152NLin2009cAsym"})
274
+ if out.get("Warp"):
275
+ # Add source space information
276
+ out["Warp"].update({"src": "MNI152NLin2009cAsym"})
274
277
  return out
275
278
 
276
279
  def get_elements(self) -> List:
@@ -159,6 +159,9 @@ class HCP1200(PatternDataGrabber):
159
159
  # Add space for T1w data type
160
160
  if "T1w" in out:
161
161
  out["T1w"].update({"space": "native"})
162
+ # Add source space for Warp data type
163
+ if "Warp" in out:
164
+ out["Warp"].update({"src": "MNI152NLin6Asym"})
162
165
  return out
163
166
 
164
167
  def get_elements(self) -> List: