junifer 0.0.5__py3-none-any.whl → 0.0.5.dev24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. junifer/__init__.py +0 -17
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +1 -4
  4. junifer/api/cli.py +1 -91
  5. junifer/api/decorators.py +0 -9
  6. junifer/api/functions.py +10 -56
  7. junifer/api/parser.py +0 -3
  8. junifer/api/queue_context/__init__.py +1 -4
  9. junifer/api/res/afni/run_afni_docker.sh +1 -1
  10. junifer/api/res/ants/run_ants_docker.sh +1 -1
  11. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  12. junifer/api/tests/test_api_utils.py +2 -4
  13. junifer/api/tests/test_cli.py +0 -83
  14. junifer/api/tests/test_functions.py +2 -27
  15. junifer/configs/__init__.py +1 -1
  16. junifer/configs/juseless/__init__.py +1 -4
  17. junifer/configs/juseless/datagrabbers/__init__.py +1 -10
  18. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +0 -3
  19. junifer/configs/juseless/datagrabbers/camcan_vbm.py +0 -3
  20. junifer/configs/juseless/datagrabbers/ixi_vbm.py +0 -3
  21. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +3 -1
  22. junifer/configs/juseless/datagrabbers/ucla.py +9 -12
  23. junifer/configs/juseless/datagrabbers/ukb_vbm.py +0 -3
  24. junifer/data/__init__.py +1 -21
  25. junifer/data/coordinates.py +19 -10
  26. junifer/data/masks.py +87 -58
  27. junifer/data/parcellations.py +3 -14
  28. junifer/data/template_spaces.py +1 -4
  29. junifer/data/tests/test_masks.py +37 -26
  30. junifer/data/utils.py +0 -3
  31. junifer/datagrabber/__init__.py +1 -18
  32. junifer/datagrabber/aomic/__init__.py +0 -3
  33. junifer/datagrabber/aomic/id1000.py +37 -70
  34. junifer/datagrabber/aomic/piop1.py +36 -69
  35. junifer/datagrabber/aomic/piop2.py +38 -71
  36. junifer/datagrabber/aomic/tests/test_id1000.py +99 -44
  37. junifer/datagrabber/aomic/tests/test_piop1.py +108 -65
  38. junifer/datagrabber/aomic/tests/test_piop2.py +102 -45
  39. junifer/datagrabber/base.py +6 -13
  40. junifer/datagrabber/datalad_base.py +1 -13
  41. junifer/datagrabber/dmcc13_benchmark.py +53 -36
  42. junifer/datagrabber/hcp1200/__init__.py +0 -3
  43. junifer/datagrabber/hcp1200/datalad_hcp1200.py +0 -3
  44. junifer/datagrabber/hcp1200/hcp1200.py +1 -4
  45. junifer/datagrabber/multiple.py +6 -45
  46. junifer/datagrabber/pattern.py +62 -170
  47. junifer/datagrabber/pattern_datalad.py +12 -25
  48. junifer/datagrabber/tests/test_datagrabber_utils.py +218 -0
  49. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  50. junifer/datagrabber/tests/test_dmcc13_benchmark.py +19 -46
  51. junifer/datagrabber/tests/test_multiple.py +84 -161
  52. junifer/datagrabber/tests/test_pattern.py +0 -45
  53. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  54. junifer/datagrabber/utils.py +230 -0
  55. junifer/datareader/__init__.py +1 -4
  56. junifer/datareader/default.py +43 -95
  57. junifer/external/__init__.py +1 -1
  58. junifer/external/nilearn/__init__.py +1 -5
  59. junifer/external/nilearn/junifer_nifti_spheres_masker.py +9 -23
  60. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +1 -76
  61. junifer/markers/__init__.py +1 -23
  62. junifer/markers/base.py +28 -68
  63. junifer/markers/collection.py +2 -10
  64. junifer/markers/complexity/__init__.py +0 -10
  65. junifer/markers/complexity/complexity_base.py +43 -26
  66. junifer/markers/complexity/hurst_exponent.py +0 -3
  67. junifer/markers/complexity/multiscale_entropy_auc.py +0 -3
  68. junifer/markers/complexity/perm_entropy.py +0 -3
  69. junifer/markers/complexity/range_entropy.py +0 -3
  70. junifer/markers/complexity/range_entropy_auc.py +0 -3
  71. junifer/markers/complexity/sample_entropy.py +0 -3
  72. junifer/markers/complexity/tests/test_hurst_exponent.py +3 -11
  73. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +3 -11
  74. junifer/markers/complexity/tests/test_perm_entropy.py +3 -11
  75. junifer/markers/complexity/tests/test_range_entropy.py +3 -11
  76. junifer/markers/complexity/tests/test_range_entropy_auc.py +3 -11
  77. junifer/markers/complexity/tests/test_sample_entropy.py +3 -11
  78. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +3 -11
  79. junifer/markers/complexity/weighted_perm_entropy.py +0 -3
  80. junifer/markers/ets_rss.py +42 -27
  81. junifer/markers/falff/__init__.py +0 -3
  82. junifer/markers/falff/_afni_falff.py +2 -5
  83. junifer/markers/falff/_junifer_falff.py +0 -3
  84. junifer/markers/falff/falff_base.py +46 -20
  85. junifer/markers/falff/falff_parcels.py +27 -56
  86. junifer/markers/falff/falff_spheres.py +29 -60
  87. junifer/markers/falff/tests/test_falff_parcels.py +23 -39
  88. junifer/markers/falff/tests/test_falff_spheres.py +23 -39
  89. junifer/markers/functional_connectivity/__init__.py +0 -9
  90. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +60 -63
  91. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +32 -45
  92. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +36 -49
  93. junifer/markers/functional_connectivity/functional_connectivity_base.py +70 -71
  94. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +25 -34
  95. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +30 -40
  96. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +7 -11
  97. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +7 -27
  98. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +12 -28
  99. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +11 -35
  100. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +62 -36
  101. junifer/markers/parcel_aggregation.py +61 -47
  102. junifer/markers/reho/__init__.py +0 -3
  103. junifer/markers/reho/_afni_reho.py +2 -5
  104. junifer/markers/reho/_junifer_reho.py +1 -4
  105. junifer/markers/reho/reho_base.py +27 -8
  106. junifer/markers/reho/reho_parcels.py +17 -28
  107. junifer/markers/reho/reho_spheres.py +18 -27
  108. junifer/markers/reho/tests/test_reho_parcels.py +3 -8
  109. junifer/markers/reho/tests/test_reho_spheres.py +3 -8
  110. junifer/markers/sphere_aggregation.py +59 -43
  111. junifer/markers/temporal_snr/__init__.py +0 -3
  112. junifer/markers/temporal_snr/temporal_snr_base.py +32 -23
  113. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -9
  114. junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -9
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +3 -6
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +3 -6
  117. junifer/markers/tests/test_collection.py +8 -9
  118. junifer/markers/tests/test_ets_rss.py +9 -15
  119. junifer/markers/tests/test_markers_base.py +18 -17
  120. junifer/markers/tests/test_parcel_aggregation.py +32 -93
  121. junifer/markers/tests/test_sphere_aggregation.py +19 -72
  122. junifer/onthefly/__init__.py +1 -4
  123. junifer/onthefly/read_transform.py +0 -3
  124. junifer/pipeline/__init__.py +1 -9
  125. junifer/pipeline/pipeline_step_mixin.py +4 -21
  126. junifer/pipeline/registry.py +0 -3
  127. junifer/pipeline/singleton.py +0 -3
  128. junifer/pipeline/tests/test_registry.py +1 -1
  129. junifer/pipeline/update_meta_mixin.py +0 -3
  130. junifer/pipeline/utils.py +1 -67
  131. junifer/pipeline/workdir_manager.py +0 -3
  132. junifer/preprocess/__init__.py +2 -9
  133. junifer/preprocess/ants/__init__.py +4 -0
  134. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  135. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  136. junifer/preprocess/base.py +3 -6
  137. junifer/preprocess/bold_warper.py +265 -0
  138. junifer/preprocess/confounds/__init__.py +0 -3
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +60 -47
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +113 -72
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/smoothing/__init__.py +0 -3
  145. junifer/preprocess/smoothing/_afni_smoothing.py +1 -1
  146. junifer/preprocess/tests/test_bold_warper.py +159 -0
  147. junifer/preprocess/warping/__init__.py +0 -3
  148. junifer/preprocess/warping/_ants_warper.py +0 -3
  149. junifer/preprocess/warping/_fsl_warper.py +0 -3
  150. junifer/stats.py +1 -4
  151. junifer/storage/__init__.py +1 -9
  152. junifer/storage/base.py +1 -40
  153. junifer/storage/hdf5.py +9 -71
  154. junifer/storage/pandas_base.py +0 -3
  155. junifer/storage/sqlite.py +0 -3
  156. junifer/storage/tests/test_hdf5.py +10 -82
  157. junifer/storage/utils.py +0 -9
  158. junifer/testing/__init__.py +1 -4
  159. junifer/testing/datagrabbers.py +6 -13
  160. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  161. junifer/testing/utils.py +0 -3
  162. junifer/utils/__init__.py +2 -13
  163. junifer/utils/fs.py +0 -3
  164. junifer/utils/helpers.py +1 -32
  165. junifer/utils/logging.py +4 -33
  166. junifer/utils/tests/test_logging.py +0 -8
  167. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/METADATA +16 -17
  168. junifer-0.0.5.dev24.dist-info/RECORD +265 -0
  169. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/WHEEL +1 -1
  170. junifer/api/res/freesurfer/mri_binarize +0 -3
  171. junifer/api/res/freesurfer/mri_mc +0 -3
  172. junifer/api/res/freesurfer/mri_pretess +0 -3
  173. junifer/api/res/freesurfer/mris_convert +0 -3
  174. junifer/api/res/freesurfer/run_freesurfer_docker.sh +0 -61
  175. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  176. junifer/datagrabber/pattern_validation_mixin.py +0 -388
  177. junifer/datagrabber/tests/test_pattern_validation_mixin.py +0 -249
  178. junifer/external/BrainPrint/brainprint/__init__.py +0 -4
  179. junifer/external/BrainPrint/brainprint/_version.py +0 -3
  180. junifer/external/BrainPrint/brainprint/asymmetry.py +0 -91
  181. junifer/external/BrainPrint/brainprint/brainprint.py +0 -441
  182. junifer/external/BrainPrint/brainprint/surfaces.py +0 -258
  183. junifer/external/BrainPrint/brainprint/utils/__init__.py +0 -1
  184. junifer/external/BrainPrint/brainprint/utils/_config.py +0 -112
  185. junifer/external/BrainPrint/brainprint/utils/utils.py +0 -188
  186. junifer/external/nilearn/junifer_connectivity_measure.py +0 -483
  187. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +0 -1089
  188. junifer/markers/brainprint.py +0 -459
  189. junifer/markers/tests/test_brainprint.py +0 -58
  190. junifer-0.0.5.dist-info/RECORD +0 -275
  191. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/AUTHORS.rst +0 -0
  192. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/LICENSE.md +0 -0
  193. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/entry_points.txt +0 -0
  194. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/top_level.txt +0 -0
@@ -23,63 +23,16 @@ from junifer.storage import SQLiteFeatureStorage
23
23
  from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
24
24
 
25
25
 
26
- @pytest.mark.parametrize(
27
- "input_type, storage_type",
28
- [
29
- (
30
- "T1w",
31
- "vector",
32
- ),
33
- (
34
- "T2w",
35
- "vector",
36
- ),
37
- (
38
- "BOLD",
39
- "timeseries",
40
- ),
41
- (
42
- "VBM_GM",
43
- "vector",
44
- ),
45
- (
46
- "VBM_WM",
47
- "vector",
48
- ),
49
- (
50
- "VBM_CSF",
51
- "vector",
52
- ),
53
- (
54
- "fALFF",
55
- "vector",
56
- ),
57
- (
58
- "GCOR",
59
- "vector",
60
- ),
61
- (
62
- "LCOR",
63
- "vector",
64
- ),
65
- ],
66
- )
67
- def test_ParcelAggregation_input_output(
68
- input_type: str, storage_type: str
69
- ) -> None:
70
- """Test ParcelAggregation input and output types.
26
+ def test_ParcelAggregation_input_output() -> None:
27
+ """Test ParcelAggregation input and output types."""
28
+ marker = ParcelAggregation(
29
+ parcellation="Schaefer100x7", method="mean", on="VBM_GM"
30
+ )
31
+ for in_, out_ in [("VBM_GM", "vector"), ("BOLD", "timeseries")]:
32
+ assert marker.get_output_type(in_) == out_
71
33
 
72
- Parameters
73
- ----------
74
- input_type : str
75
- The parametrized input type.
76
- storage_type : str
77
- The parametrized storage type.
78
-
79
- """
80
- assert storage_type == ParcelAggregation(
81
- parcellation="Schaefer100x7", method="mean", on=input_type
82
- ).get_output_type(input_type=input_type, output_feature="aggregation")
34
+ with pytest.raises(ValueError, match="Unknown input"):
35
+ marker.get_output_type("unknown")
83
36
 
84
37
 
85
38
  def test_ParcelAggregation_3D() -> None:
@@ -132,8 +85,8 @@ def test_ParcelAggregation_3D() -> None:
132
85
  )
133
86
 
134
87
  parcel_agg_mean_bold_data = marker.fit_transform(element_data)["BOLD"][
135
- "aggregation"
136
- ]["data"]
88
+ "data"
89
+ ]
137
90
  # Check that arrays are almost equal
138
91
  assert_array_equal(parcel_agg_mean_bold_data, manual)
139
92
  assert_array_almost_equal(nifti_labels_masked_bold, manual)
@@ -160,8 +113,8 @@ def test_ParcelAggregation_3D() -> None:
160
113
  on="BOLD",
161
114
  )
162
115
  parcel_agg_std_bold_data = marker.fit_transform(element_data)["BOLD"][
163
- "aggregation"
164
- ]["data"]
116
+ "data"
117
+ ]
165
118
  assert parcel_agg_std_bold_data.ndim == 2
166
119
  assert parcel_agg_std_bold_data.shape[0] == 1
167
120
  assert_array_equal(parcel_agg_std_bold_data, manual)
@@ -186,7 +139,7 @@ def test_ParcelAggregation_3D() -> None:
186
139
  )
187
140
  parcel_agg_trim_mean_bold_data = marker.fit_transform(element_data)[
188
141
  "BOLD"
189
- ]["aggregation"]["data"]
142
+ ]["data"]
190
143
  assert parcel_agg_trim_mean_bold_data.ndim == 2
191
144
  assert parcel_agg_trim_mean_bold_data.shape[0] == 1
192
145
  assert_array_equal(parcel_agg_trim_mean_bold_data, manual)
@@ -201,8 +154,8 @@ def test_ParcelAggregation_4D():
201
154
  parcellation="TianxS1x3TxMNInonlinear2009cAsym", method="mean"
202
155
  )
203
156
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
204
- "aggregation"
205
- ]["data"]
157
+ "data"
158
+ ]
206
159
 
207
160
  # Compare with nilearn
208
161
  # Load testing parcellation
@@ -251,8 +204,7 @@ def test_ParcelAggregation_storage(tmp_path: Path) -> None:
251
204
  marker.fit_transform(input=element_data, storage=storage)
252
205
  features = storage.list_features()
253
206
  assert any(
254
- x["name"] == "BOLD_ParcelAggregation_aggregation"
255
- for x in features.values()
207
+ x["name"] == "BOLD_ParcelAggregation" for x in features.values()
256
208
  )
257
209
 
258
210
  # Store 4D
@@ -269,8 +221,7 @@ def test_ParcelAggregation_storage(tmp_path: Path) -> None:
269
221
  marker.fit_transform(input=element_data, storage=storage)
270
222
  features = storage.list_features()
271
223
  assert any(
272
- x["name"] == "BOLD_ParcelAggregation_aggregation"
273
- for x in features.values()
224
+ x["name"] == "BOLD_ParcelAggregation" for x in features.values()
274
225
  )
275
226
 
276
227
 
@@ -290,8 +241,8 @@ def test_ParcelAggregation_3D_mask() -> None:
290
241
  ..., 0:1
291
242
  ]
292
243
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
293
- "aggregation"
294
- ]["data"]
244
+ "data"
245
+ ]
295
246
 
296
247
  # Compare with nilearn
297
248
  # Load testing parcellation
@@ -365,8 +316,8 @@ def test_ParcelAggregation_3D_mask_computed() -> None:
365
316
  on="BOLD",
366
317
  )
367
318
  parcel_agg_mean_bold_data = marker.fit_transform(element_data)["BOLD"][
368
- "aggregation"
369
- ]["data"]
319
+ "data"
320
+ ]
370
321
 
371
322
  assert parcel_agg_mean_bold_data.ndim == 2
372
323
  assert parcel_agg_mean_bold_data.shape[0] == 1
@@ -446,9 +397,7 @@ def test_ParcelAggregation_3D_multiple_non_overlapping(tmp_path: Path) -> None:
446
397
  name="tian_mean",
447
398
  on="BOLD",
448
399
  )
449
- orig_mean = marker_original.fit_transform(element_data)["BOLD"][
450
- "aggregation"
451
- ]
400
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"]
452
401
 
453
402
  orig_mean_data = orig_mean["data"]
454
403
  assert orig_mean_data.ndim == 2
@@ -468,9 +417,7 @@ def test_ParcelAggregation_3D_multiple_non_overlapping(tmp_path: Path) -> None:
468
417
  # No warnings should be raised
469
418
  with warnings.catch_warnings():
470
419
  warnings.simplefilter("error", category=UserWarning)
471
- split_mean = marker_split.fit_transform(element_data)["BOLD"][
472
- "aggregation"
473
- ]
420
+ split_mean = marker_split.fit_transform(element_data)["BOLD"]
474
421
 
475
422
  split_mean_data = split_mean["data"]
476
423
 
@@ -550,9 +497,7 @@ def test_ParcelAggregation_3D_multiple_overlapping(tmp_path: Path) -> None:
550
497
  name="tian_mean",
551
498
  on="BOLD",
552
499
  )
553
- orig_mean = marker_original.fit_transform(element_data)["BOLD"][
554
- "aggregation"
555
- ]
500
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"]
556
501
 
557
502
  orig_mean_data = orig_mean["data"]
558
503
  assert orig_mean_data.ndim == 2
@@ -570,9 +515,7 @@ def test_ParcelAggregation_3D_multiple_overlapping(tmp_path: Path) -> None:
570
515
  )
571
516
  # Warning should be raised
572
517
  with pytest.warns(RuntimeWarning, match="overlapping voxels"):
573
- split_mean = marker_split.fit_transform(element_data)["BOLD"][
574
- "aggregation"
575
- ]
518
+ split_mean = marker_split.fit_transform(element_data)["BOLD"]
576
519
 
577
520
  split_mean_data = split_mean["data"]
578
521
 
@@ -659,9 +602,7 @@ def test_ParcelAggregation_3D_multiple_duplicated_labels(
659
602
  name="tian_mean",
660
603
  on="BOLD",
661
604
  )
662
- orig_mean = marker_original.fit_transform(element_data)["BOLD"][
663
- "aggregation"
664
- ]
605
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"]
665
606
 
666
607
  orig_mean_data = orig_mean["data"]
667
608
  assert orig_mean_data.ndim == 2
@@ -680,9 +621,7 @@ def test_ParcelAggregation_3D_multiple_duplicated_labels(
680
621
 
681
622
  # Warning should be raised
682
623
  with pytest.warns(RuntimeWarning, match="duplicated labels."):
683
- split_mean = marker_split.fit_transform(element_data)["BOLD"][
684
- "aggregation"
685
- ]
624
+ split_mean = marker_split.fit_transform(element_data)["BOLD"]
686
625
 
687
626
  split_mean_data = split_mean["data"]
688
627
 
@@ -714,8 +653,8 @@ def test_ParcelAggregation_4D_agg_time():
714
653
  on="BOLD",
715
654
  )
716
655
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
717
- "aggregation"
718
- ]["data"]
656
+ "data"
657
+ ]
719
658
 
720
659
  # Compare with nilearn
721
660
  # Loading testing parcellation
@@ -750,8 +689,8 @@ def test_ParcelAggregation_4D_agg_time():
750
689
  on="BOLD",
751
690
  )
752
691
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
753
- "aggregation"
754
- ]["data"]
692
+ "data"
693
+ ]
755
694
 
756
695
  assert parcel_agg_bold_data.ndim == 2
757
696
  assert_array_equal(
@@ -25,65 +25,14 @@ COORDS = "DMNBuckner"
25
25
  RADIUS = 8
26
26
 
27
27
 
28
- @pytest.mark.parametrize(
29
- "input_type, storage_type",
30
- [
31
- (
32
- "T1w",
33
- "vector",
34
- ),
35
- (
36
- "T2w",
37
- "vector",
38
- ),
39
- (
40
- "BOLD",
41
- "timeseries",
42
- ),
43
- (
44
- "VBM_GM",
45
- "vector",
46
- ),
47
- (
48
- "VBM_WM",
49
- "vector",
50
- ),
51
- (
52
- "VBM_CSF",
53
- "vector",
54
- ),
55
- (
56
- "fALFF",
57
- "vector",
58
- ),
59
- (
60
- "GCOR",
61
- "vector",
62
- ),
63
- (
64
- "LCOR",
65
- "vector",
66
- ),
67
- ],
68
- )
69
- def test_SphereAggregation_input_output(
70
- input_type: str, storage_type: str
71
- ) -> None:
72
- """Test SphereAggregation input and output types.
28
+ def test_SphereAggregation_input_output() -> None:
29
+ """Test SphereAggregation input and output types."""
30
+ marker = SphereAggregation(coords="DMNBuckner", method="mean", on="VBM_GM")
31
+ for in_, out_ in [("VBM_GM", "vector"), ("BOLD", "timeseries")]:
32
+ assert marker.get_output_type(in_) == out_
73
33
 
74
- Parameters
75
- ----------
76
- input_type : str
77
- The parametrized input type.
78
- storage_type : str
79
- The parametrized storage type.
80
-
81
- """
82
- assert storage_type == SphereAggregation(
83
- coords="DMNBuckner",
84
- method="mean",
85
- on=input_type,
86
- ).get_output_type(input_type=input_type, output_feature="aggregation")
34
+ with pytest.raises(ValueError, match="Unknown input"):
35
+ marker.get_output_type("unknown")
87
36
 
88
37
 
89
38
  def test_SphereAggregation_3D() -> None:
@@ -95,8 +44,8 @@ def test_SphereAggregation_3D() -> None:
95
44
  coords=COORDS, method="mean", radius=RADIUS, on="VBM_GM"
96
45
  )
97
46
  sphere_agg_vbm_gm_data = marker.fit_transform(element_data)["VBM_GM"][
98
- "aggregation"
99
- ]["data"]
47
+ "data"
48
+ ]
100
49
 
101
50
  # Compare with nilearn
102
51
  # Load testing coordinates
@@ -127,8 +76,8 @@ def test_SphereAggregation_4D() -> None:
127
76
  coords=COORDS, method="mean", radius=RADIUS, on="BOLD"
128
77
  )
129
78
  sphere_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
130
- "aggregation"
131
- ]["data"]
79
+ "data"
80
+ ]
132
81
 
133
82
  # Compare with nilearn
134
83
  # Load testing coordinates
@@ -171,8 +120,7 @@ def test_SphereAggregation_storage(tmp_path: Path) -> None:
171
120
  marker.fit_transform(input=element_data, storage=storage)
172
121
  features = storage.list_features()
173
122
  assert any(
174
- x["name"] == "VBM_GM_SphereAggregation_aggregation"
175
- for x in features.values()
123
+ x["name"] == "VBM_GM_SphereAggregation" for x in features.values()
176
124
  )
177
125
 
178
126
  # Store 4D
@@ -187,8 +135,7 @@ def test_SphereAggregation_storage(tmp_path: Path) -> None:
187
135
  marker.fit_transform(input=element_data, storage=storage)
188
136
  features = storage.list_features()
189
137
  assert any(
190
- x["name"] == "BOLD_SphereAggregation_aggregation"
191
- for x in features.values()
138
+ x["name"] == "BOLD_SphereAggregation" for x in features.values()
192
139
  )
193
140
 
194
141
 
@@ -205,8 +152,8 @@ def test_SphereAggregation_3D_mask() -> None:
205
152
  masks="compute_brain_mask",
206
153
  )
207
154
  sphere_agg_vbm_gm_data = marker.fit_transform(element_data)["VBM_GM"][
208
- "aggregation"
209
- ]["data"]
155
+ "data"
156
+ ]
210
157
 
211
158
  # Compare with nilearn
212
159
  # Load testing coordinates
@@ -248,8 +195,8 @@ def test_SphereAggregation_4D_agg_time() -> None:
248
195
  on="BOLD",
249
196
  )
250
197
  sphere_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
251
- "aggregation"
252
- ]["data"]
198
+ "data"
199
+ ]
253
200
 
254
201
  # Compare with nilearn
255
202
  # Load testing coordinates
@@ -284,8 +231,8 @@ def test_SphereAggregation_4D_agg_time() -> None:
284
231
  on="BOLD",
285
232
  )
286
233
  sphere_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
287
- "aggregation"
288
- ]["data"]
234
+ "data"
235
+ ]
289
236
 
290
237
  assert sphere_agg_bold_data.ndim == 2
291
238
  assert_array_equal(
@@ -1,9 +1,6 @@
1
- """Utilities for on-the-fly analyses."""
1
+ """Provide imports for onthefly sub-package."""
2
2
 
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
6
  from .read_transform import read_transform
7
-
8
-
9
- __all__ = ["read_transform"]
@@ -15,9 +15,6 @@ if TYPE_CHECKING:
15
15
  from junifer.storage import BaseFeatureStorage
16
16
 
17
17
 
18
- __all__ = ["read_transform"]
19
-
20
-
21
18
  def read_transform(
22
19
  storage: Type["BaseFeatureStorage"],
23
20
  transform: str,
@@ -1,4 +1,4 @@
1
- """Pipeline components."""
1
+ """Provide imports for pipeline sub-package."""
2
2
 
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
@@ -7,11 +7,3 @@ from . import registry
7
7
  from .pipeline_step_mixin import PipelineStepMixin
8
8
  from .update_meta_mixin import UpdateMetaMixin
9
9
  from .workdir_manager import WorkDirManager
10
-
11
-
12
- __all__ = [
13
- "registry",
14
- "PipelineStepMixin",
15
- "UpdateMetaMixin",
16
- "WorkDirManager",
17
- ]
@@ -4,14 +4,10 @@
4
4
  # Synchon Mandal <s.mandal@fz-juelich.de>
5
5
  # License: AGPL
6
6
 
7
- import sys
8
-
9
-
10
- if sys.version_info < (3, 11): # pragma: no cover
11
- from importlib_metadata import packages_distributions
12
- else:
7
+ try:
13
8
  from importlib.metadata import packages_distributions
14
-
9
+ except ImportError: # pragma: no cover
10
+ from importlib_metadata import packages_distributions
15
11
 
16
12
  from importlib.util import find_spec
17
13
  from itertools import chain
@@ -21,9 +17,6 @@ from ..utils import raise_error
21
17
  from .utils import check_ext_dependencies
22
18
 
23
19
 
24
- __all__ = ["PipelineStepMixin"]
25
-
26
-
27
20
  class PipelineStepMixin:
28
21
  """Mixin class for a pipeline step."""
29
22
 
@@ -210,17 +203,7 @@ class PipelineStepMixin:
210
203
  # Validate input
211
204
  fit_input = self.validate_input(input=input)
212
205
  # Validate output type
213
- # Nested output type for marker
214
- if hasattr(self, "_MARKER_INOUT_MAPPINGS"):
215
- outputs = list(
216
- {
217
- val
218
- for t_input in fit_input
219
- for val in self._MARKER_INOUT_MAPPINGS[t_input].values()
220
- }
221
- )
222
- else:
223
- outputs = [self.get_output_type(t_input) for t_input in fit_input]
206
+ outputs = [self.get_output_type(t_input) for t_input in fit_input]
224
207
  return outputs
225
208
 
226
209
  def fit_transform(
@@ -16,9 +16,6 @@ if TYPE_CHECKING:
16
16
  from .pipeline_step_mixin import PipelineStepMixin
17
17
 
18
18
 
19
- __all__ = ["register", "get_step_names", "get_class", "build"]
20
-
21
-
22
19
  # Define valid steps for operation
23
20
  _VALID_STEPS: List[str] = [
24
21
  "datagrabber",
@@ -6,9 +6,6 @@
6
6
  from typing import Any, Dict, Type
7
7
 
8
8
 
9
- __all__ = ["singleton"]
10
-
11
-
12
9
  def singleton(cls: Type) -> Type:
13
10
  """Make a class singleton.
14
11
 
@@ -101,7 +101,7 @@ def test_get_class():
101
101
  register(step="datagrabber", name="bar", klass=str)
102
102
  # Get class
103
103
  obj = get_class(step="datagrabber", name="bar")
104
- assert isinstance(obj, type(str))
104
+ assert obj == str
105
105
 
106
106
 
107
107
  # TODO: possible parametrization?
@@ -7,9 +7,6 @@
7
7
  from typing import Dict
8
8
 
9
9
 
10
- __all__ = ["UpdateMetaMixin"]
11
-
12
-
13
10
  class UpdateMetaMixin:
14
11
  """Mixin class for updating meta."""
15
12
 
junifer/pipeline/utils.py CHANGED
@@ -10,9 +10,6 @@ from typing import Any, List, Optional
10
10
  from junifer.utils.logging import raise_error, warn_with_log
11
11
 
12
12
 
13
- __all__ = ["check_ext_dependencies"]
14
-
15
-
16
13
  def check_ext_dependencies(
17
14
  name: str, optional: bool = False, **kwargs: Any
18
15
  ) -> bool:
@@ -40,7 +37,7 @@ def check_ext_dependencies(
40
37
  If ``name`` is mandatory and is not found.
41
38
 
42
39
  """
43
- valid_ext_dependencies = ("afni", "fsl", "ants", "freesurfer")
40
+ valid_ext_dependencies = ("afni", "fsl", "ants")
44
41
  if name not in valid_ext_dependencies:
45
42
  raise_error(
46
43
  "Invalid value for `name`, should be one of: "
@@ -55,9 +52,6 @@ def check_ext_dependencies(
55
52
  # Check for ants
56
53
  elif name == "ants":
57
54
  found = _check_ants(**kwargs)
58
- # Check for freesurfer
59
- elif name == "freesurfer":
60
- found = _check_freesurfer(**kwargs)
61
55
 
62
56
  # Check if the dependency is mandatory in case it's not found
63
57
  if not found and not optional:
@@ -251,63 +245,3 @@ def _check_ants(commands: Optional[List[str]] = None) -> bool:
251
245
  f"{commands_found_results}"
252
246
  )
253
247
  return ants_found
254
-
255
-
256
- def _check_freesurfer(commands: Optional[List[str]] = None) -> bool:
257
- """Check if FreeSurfer is present in the system.
258
-
259
- Parameters
260
- ----------
261
- commands : list of str, optional
262
- The commands to specifically check for from FreeSurfer. If None, only
263
- the basic FreeSurfer help would be looked up, else, would also
264
- check for specific commands (default None).
265
-
266
- Returns
267
- -------
268
- bool
269
- Whether FreeSurfer is found or not.
270
-
271
- """
272
- completed_process = subprocess.run(
273
- "recon-all -help",
274
- stdin=subprocess.DEVNULL,
275
- stdout=subprocess.DEVNULL,
276
- stderr=subprocess.STDOUT,
277
- shell=True, # is unsafe but kept for resolution via PATH
278
- check=False,
279
- )
280
- fs_found = completed_process.returncode == 0
281
-
282
- # Check for specific commands
283
- if fs_found and commands is not None:
284
- if not isinstance(commands, list):
285
- commands = [commands]
286
- # Store command found results
287
- commands_found_results = {}
288
- # Set all commands found flag to True
289
- all_commands_found = True
290
- # Check commands' existence
291
- for command in commands:
292
- command_process = subprocess.run(
293
- [command],
294
- stdin=subprocess.DEVNULL,
295
- stdout=subprocess.DEVNULL,
296
- stderr=subprocess.STDOUT,
297
- shell=True, # is unsafe but kept for resolution via PATH
298
- check=False,
299
- )
300
- command_found = command_process.returncode == 0
301
- commands_found_results[command] = (
302
- "found" if command_found else "not found"
303
- )
304
- # Set flag to trigger warning
305
- all_commands_found = all_commands_found and command_found
306
- # One or more commands were missing
307
- if not all_commands_found:
308
- warn_with_log(
309
- "FreeSurfer is installed but some of the required commands "
310
- "were not found. These are the results: "
311
- f"{commands_found_results}"
312
- )
313
- return fs_found
@@ -13,9 +13,6 @@ from ..utils import logger
13
13
  from .singleton import singleton
14
14
 
15
15
 
16
- __all__ = ["WorkDirManager"]
17
-
18
-
19
16
  @singleton
20
17
  class WorkDirManager:
21
18
  """Class for working directory manager.
@@ -1,4 +1,4 @@
1
- """Preprocessors for preprocessing data before feature extraction."""
1
+ """Provide imports for preprocess sub-package."""
2
2
 
3
3
  # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
4
  # Leonard Sasse <l.sasse@fz-juelich.de>
@@ -7,13 +7,6 @@
7
7
 
8
8
  from .base import BasePreprocessor
9
9
  from .confounds import fMRIPrepConfoundRemover
10
+ from .bold_warper import BOLDWarper
10
11
  from .warping import SpaceWarper
11
12
  from .smoothing import Smoothing
12
-
13
-
14
- __all__ = [
15
- "BasePreprocessor",
16
- "fMRIPrepConfoundRemover",
17
- "SpaceWarper",
18
- "Smoothing",
19
- ]
@@ -0,0 +1,4 @@
1
+ """Provide imports for ants sub-package."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL