rapidtide 3.0.10__py3-none-any.whl → 3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. rapidtide/Colortables.py +492 -27
  2. rapidtide/OrthoImageItem.py +1053 -47
  3. rapidtide/RapidtideDataset.py +1533 -86
  4. rapidtide/_version.py +3 -3
  5. rapidtide/calccoherence.py +196 -29
  6. rapidtide/calcnullsimfunc.py +191 -40
  7. rapidtide/calcsimfunc.py +245 -42
  8. rapidtide/correlate.py +1210 -393
  9. rapidtide/data/examples/src/testLD +56 -0
  10. rapidtide/data/examples/src/testalign +1 -1
  11. rapidtide/data/examples/src/testdelayvar +0 -1
  12. rapidtide/data/examples/src/testfmri +19 -1
  13. rapidtide/data/examples/src/testglmfilt +5 -5
  14. rapidtide/data/examples/src/testhappy +30 -1
  15. rapidtide/data/examples/src/testppgproc +17 -0
  16. rapidtide/data/examples/src/testrolloff +11 -0
  17. rapidtide/data/models/model_cnn_pytorch/best_model.pth +0 -0
  18. rapidtide/data/models/model_cnn_pytorch/loss.png +0 -0
  19. rapidtide/data/models/model_cnn_pytorch/loss.txt +1 -0
  20. rapidtide/data/models/model_cnn_pytorch/model.pth +0 -0
  21. rapidtide/data/models/model_cnn_pytorch/model_meta.json +68 -0
  22. rapidtide/data/reference/JHU-ArterialTerritoriesNoVent-LVL1_space-MNI152NLin2009cAsym_2mm.nii.gz +0 -0
  23. rapidtide/data/reference/JHU-ArterialTerritoriesNoVent-LVL1_space-MNI152NLin2009cAsym_2mm_mask.nii.gz +0 -0
  24. rapidtide/decorators.py +91 -0
  25. rapidtide/dlfilter.py +2225 -108
  26. rapidtide/dlfiltertorch.py +4843 -0
  27. rapidtide/externaltools.py +327 -12
  28. rapidtide/fMRIData_class.py +79 -40
  29. rapidtide/filter.py +1899 -810
  30. rapidtide/fit.py +2004 -574
  31. rapidtide/genericmultiproc.py +93 -18
  32. rapidtide/happy_supportfuncs.py +2044 -171
  33. rapidtide/helper_classes.py +584 -43
  34. rapidtide/io.py +2363 -370
  35. rapidtide/linfitfiltpass.py +341 -75
  36. rapidtide/makelaggedtcs.py +211 -20
  37. rapidtide/maskutil.py +423 -53
  38. rapidtide/miscmath.py +827 -121
  39. rapidtide/multiproc.py +210 -22
  40. rapidtide/patchmatch.py +234 -33
  41. rapidtide/peakeval.py +32 -30
  42. rapidtide/ppgproc.py +2203 -0
  43. rapidtide/qualitycheck.py +352 -39
  44. rapidtide/refinedelay.py +422 -57
  45. rapidtide/refineregressor.py +498 -184
  46. rapidtide/resample.py +671 -185
  47. rapidtide/scripts/applyppgproc.py +28 -0
  48. rapidtide/simFuncClasses.py +1052 -77
  49. rapidtide/simfuncfit.py +260 -46
  50. rapidtide/stats.py +540 -238
  51. rapidtide/tests/happycomp +9 -0
  52. rapidtide/tests/test_dlfiltertorch.py +627 -0
  53. rapidtide/tests/test_findmaxlag.py +24 -8
  54. rapidtide/tests/test_fullrunhappy_v1.py +0 -2
  55. rapidtide/tests/test_fullrunhappy_v2.py +0 -2
  56. rapidtide/tests/test_fullrunhappy_v3.py +1 -0
  57. rapidtide/tests/test_fullrunhappy_v4.py +2 -2
  58. rapidtide/tests/test_fullrunrapidtide_v7.py +1 -1
  59. rapidtide/tests/test_simroundtrip.py +8 -8
  60. rapidtide/tests/utils.py +9 -8
  61. rapidtide/tidepoolTemplate.py +142 -38
  62. rapidtide/tidepoolTemplate_alt.py +165 -44
  63. rapidtide/tidepoolTemplate_big.py +189 -52
  64. rapidtide/util.py +1217 -118
  65. rapidtide/voxelData.py +684 -37
  66. rapidtide/wiener.py +19 -12
  67. rapidtide/wiener2.py +113 -7
  68. rapidtide/wiener_doc.py +255 -0
  69. rapidtide/workflows/adjustoffset.py +105 -3
  70. rapidtide/workflows/aligntcs.py +85 -2
  71. rapidtide/workflows/applydlfilter.py +87 -10
  72. rapidtide/workflows/applyppgproc.py +522 -0
  73. rapidtide/workflows/atlasaverage.py +210 -47
  74. rapidtide/workflows/atlastool.py +100 -3
  75. rapidtide/workflows/calcSimFuncMap.py +294 -64
  76. rapidtide/workflows/calctexticc.py +201 -9
  77. rapidtide/workflows/ccorrica.py +97 -4
  78. rapidtide/workflows/cleanregressor.py +168 -29
  79. rapidtide/workflows/delayvar.py +163 -10
  80. rapidtide/workflows/diffrois.py +81 -3
  81. rapidtide/workflows/endtidalproc.py +144 -4
  82. rapidtide/workflows/fdica.py +195 -15
  83. rapidtide/workflows/filtnifti.py +70 -3
  84. rapidtide/workflows/filttc.py +74 -3
  85. rapidtide/workflows/fitSimFuncMap.py +206 -48
  86. rapidtide/workflows/fixtr.py +73 -3
  87. rapidtide/workflows/gmscalc.py +113 -3
  88. rapidtide/workflows/happy.py +813 -201
  89. rapidtide/workflows/happy2std.py +144 -12
  90. rapidtide/workflows/happy_parser.py +149 -8
  91. rapidtide/workflows/histnifti.py +118 -2
  92. rapidtide/workflows/histtc.py +84 -3
  93. rapidtide/workflows/linfitfilt.py +117 -4
  94. rapidtide/workflows/localflow.py +328 -28
  95. rapidtide/workflows/mergequality.py +79 -3
  96. rapidtide/workflows/niftidecomp.py +322 -18
  97. rapidtide/workflows/niftistats.py +174 -4
  98. rapidtide/workflows/pairproc.py +88 -2
  99. rapidtide/workflows/pairwisemergenifti.py +85 -2
  100. rapidtide/workflows/parser_funcs.py +1421 -40
  101. rapidtide/workflows/physiofreq.py +137 -11
  102. rapidtide/workflows/pixelcomp.py +208 -5
  103. rapidtide/workflows/plethquality.py +103 -21
  104. rapidtide/workflows/polyfitim.py +151 -11
  105. rapidtide/workflows/proj2flow.py +75 -2
  106. rapidtide/workflows/rankimage.py +111 -4
  107. rapidtide/workflows/rapidtide.py +272 -15
  108. rapidtide/workflows/rapidtide2std.py +98 -2
  109. rapidtide/workflows/rapidtide_parser.py +109 -9
  110. rapidtide/workflows/refineDelayMap.py +143 -33
  111. rapidtide/workflows/refineRegressor.py +682 -93
  112. rapidtide/workflows/regressfrommaps.py +152 -31
  113. rapidtide/workflows/resamplenifti.py +85 -3
  114. rapidtide/workflows/resampletc.py +91 -3
  115. rapidtide/workflows/retrolagtcs.py +98 -6
  116. rapidtide/workflows/retroregress.py +165 -9
  117. rapidtide/workflows/roisummarize.py +173 -5
  118. rapidtide/workflows/runqualitycheck.py +71 -3
  119. rapidtide/workflows/showarbcorr.py +147 -4
  120. rapidtide/workflows/showhist.py +86 -2
  121. rapidtide/workflows/showstxcorr.py +160 -3
  122. rapidtide/workflows/showtc.py +159 -3
  123. rapidtide/workflows/showxcorrx.py +184 -4
  124. rapidtide/workflows/showxy.py +185 -15
  125. rapidtide/workflows/simdata.py +262 -36
  126. rapidtide/workflows/spatialfit.py +77 -2
  127. rapidtide/workflows/spatialmi.py +251 -27
  128. rapidtide/workflows/spectrogram.py +305 -32
  129. rapidtide/workflows/synthASL.py +154 -3
  130. rapidtide/workflows/tcfrom2col.py +76 -2
  131. rapidtide/workflows/tcfrom3col.py +74 -2
  132. rapidtide/workflows/tidepool.py +2972 -133
  133. rapidtide/workflows/utils.py +19 -14
  134. rapidtide/workflows/utils_doc.py +293 -0
  135. rapidtide/workflows/variabilityizer.py +116 -3
  136. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/METADATA +10 -9
  137. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/RECORD +141 -122
  138. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/entry_points.txt +1 -0
  139. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/WHEEL +0 -0
  140. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/licenses/LICENSE +0 -0
  141. {rapidtide-3.0.10.dist-info → rapidtide-3.1.dist-info}/top_level.txt +0 -0
@@ -23,9 +23,12 @@ import os
23
23
  import platform
24
24
  import sys
25
25
  import time
26
+ from argparse import Namespace
26
27
  from pathlib import Path
28
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
27
29
 
28
30
  import numpy as np
31
+ from numpy.typing import NDArray
29
32
 
30
33
  import rapidtide.filter as tide_filt
31
34
  import rapidtide.io as tide_io
@@ -63,9 +66,30 @@ DEFAULT_REFINEDELAYNUMPOINTS = 501
63
66
  DEFAULT_DELAYOFFSETSPATIALFILT = -1
64
67
 
65
68
 
66
- def _get_parser():
69
+ def _get_parser() -> Any:
67
70
  """
68
- Argument parser for glmfilt
71
+ Argument parser for glmfilt.
72
+
73
+ This function constructs and returns an `argparse.ArgumentParser` object configured
74
+ for the `glmfilt` command-line tool, which performs sLFO (spatially localized
75
+ filter) filtering using maps generated from a previous rapidtide analysis.
76
+
77
+ Returns
78
+ -------
79
+ argparse.ArgumentParser
80
+ Configured argument parser for the glmfilt command-line interface.
81
+
82
+ Notes
83
+ -----
84
+ The parser includes both standard and experimental options. Experimental options
85
+ are not fully tested and may not work as expected.
86
+
87
+ Examples
88
+ --------
89
+ >>> parser = _get_parser()
90
+ >>> args = parser.parse_args(['fmri.nii', 'dataset_root'])
91
+ >>> print(args.fmrifile)
92
+ 'fmri.nii'
69
93
  """
70
94
  parser = argparse.ArgumentParser(
71
95
  prog="retroregress",
@@ -232,7 +256,90 @@ def _get_parser():
232
256
  return parser
233
257
 
234
258
 
235
- def retroregress(args):
259
+ def retroregress(args: Any) -> None:
260
+ """
261
+ Perform retrospective regression analysis on fMRI data to filter out slow
262
+ physiological noise (sLFO).
263
+
264
+ This function applies a retrospective regression approach to remove slow
265
+ physiological noise from fMRI data. It uses a delayed sLFO regressor to
266
+ model and remove the noise, optionally refining the delay using temporal
267
+ derivatives of the regressor.
268
+
269
+ Parameters
270
+ ----------
271
+ args : argparse.Namespace
272
+ Command-line arguments parsed by argparse. Expected attributes include:
273
+ - data_root : str
274
+ Root path for input data files
275
+ - output_prefix : str
276
+ Prefix for output files
277
+ - filter_type : str
278
+ Type of filtering to apply (e.g., 'bandpass')
279
+ - filter_low : float
280
+ Low cutoff frequency for filtering
281
+ - filter_high : float
282
+ High cutoff frequency for filtering
283
+ - regress_derivs : int
284
+ Number of temporal derivatives to include in the regressor
285
+ - refinedelay : bool
286
+ Whether to refine the delay using temporal derivatives
287
+ - refinecorr : bool
288
+ Whether to compute correlation refinement
289
+ - savemovingsignal : bool
290
+ Whether to save the filtered signal
291
+ - savenormalsLFOfiltfiles : bool
292
+ Whether to save standard output files
293
+ - saveminimumsLFOfiltfiles : bool
294
+ Whether to save minimum output files
295
+ - saveallsLFOfiltfiles : bool
296
+ Whether to save all output files
297
+ - makepseudofile : bool
298
+ Whether to create a pseudo-file
299
+ - nprocs : int
300
+ Number of processes to use
301
+ - debug : bool
302
+ Enable debug mode
303
+ - focaldebug : bool
304
+ Enable focal debug mode
305
+
306
+ Returns
307
+ -------
308
+ None
309
+ This function does not return a value but writes output files to disk.
310
+
311
+ Notes
312
+ -----
313
+ The function performs the following steps:
314
+ 1. Reads input data files including mean image, correlation mask, and processed mask
315
+ 2. Applies temporal filtering to the input data
316
+ 3. Performs GLM regression using delayed sLFO regressors
317
+ 4. Refines delay if requested using temporal derivatives
318
+ 5. Saves output files including filtered data, regressors, and timing information
319
+
320
+ Examples
321
+ --------
322
+ >>> import argparse
323
+ >>> args = argparse.Namespace(
324
+ ... data_root='path/to/data',
325
+ ... output_prefix='output',
326
+ ... filter_type='bandpass',
327
+ ... filter_low=0.01,
328
+ ... filter_high=0.1,
329
+ ... regress_derivs=2,
330
+ ... refinedelay=True,
331
+ ... refinecorr=False,
332
+ ... savemovingsignal=True,
333
+ ... savenormalsLFOfiltfiles=True,
334
+ ... saveminimumsLFOfiltfiles=True,
335
+ ... saveallsLFOfiltfiles=False,
336
+ ... makepseudofile=False,
337
+ ... nprocs=4,
338
+ ... debug=False,
339
+ ... focaldebug=False
340
+ ... )
341
+ >>> retroregress(args)
342
+ """
236
343
  # get the pid of the parent process
237
344
  args.pid = os.getpid()
238
345
 
@@ -253,7 +360,7 @@ def retroregress(args):
253
360
  logger_filename=f"{outputname}_retrolog.txt",
254
361
  timing_filename=f"{outputname}_retroruntimings.tsv",
255
362
  error_filename=f"{outputname}_retroerrorlog.txt",
256
- verbose=False,
363
+ isverbose=False,
257
364
  debug=args.debug,
258
365
  )
259
366
  TimingLGR.info("Start")
@@ -519,16 +626,24 @@ def retroregress(args):
519
626
  sLFOfitmean, sLFOfitmean_shm = tide_util.allocarray(
520
627
  internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
521
628
  )
522
- rvalue, rvalue_shm = tide_util.allocarray(internalvalidspaceshape, rt_outfloattype, shared=usesharedmem)
523
- r2value, r2value_shm = tide_util.allocarray(internalvalidspaceshape, rt_outfloattype, shared=usesharedmem)
524
- fitNorm, fitNorm_shm = tide_util.allocarray(internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem)
629
+ rvalue, rvalue_shm = tide_util.allocarray(
630
+ internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
631
+ )
632
+ r2value, r2value_shm = tide_util.allocarray(
633
+ internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
634
+ )
635
+ fitNorm, fitNorm_shm = tide_util.allocarray(
636
+ internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem
637
+ )
525
638
  fitcoeff, fitcoeff_shm = tide_util.allocarray(
526
639
  internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem
527
640
  )
528
641
  movingsignal, movingsignal_shm = tide_util.allocarray(
529
642
  internalvalidfmrishape, rt_outfloattype, shared=usesharedmem
530
643
  )
531
- lagtc, lagtc_shm = tide_util.allocarray(internalvalidfmrishape, rt_floattype, shared=usesharedmem)
644
+ lagtc, lagtc_shm = tide_util.allocarray(
645
+ internalvalidfmrishape, rt_floattype, shared=usesharedmem
646
+ )
532
647
  filtereddata, filtereddata_shm = tide_util.allocarray(
533
648
  internalvalidfmrishape, rt_outfloattype, shared=usesharedmem
534
649
  )
@@ -731,6 +846,12 @@ def retroregress(args):
731
846
  varchange = initialvariance * 0.0
732
847
  varchange[divlocs] = 100.0 * (finalvariance[divlocs] / initialvariance[divlocs] - 1.0)
733
848
 
849
+ # calculate the voxelwise mean of the filtered data
850
+ lfofilteredmeanvalue = np.mean(
851
+ filtereddata,
852
+ axis=1,
853
+ )
854
+
734
855
  # save regional timecourses if masks are defined
735
856
  # read in the anatomic masks
736
857
  anatomiclist = [
@@ -889,6 +1010,13 @@ def retroregress(args):
889
1010
  "Change in inband variance after filtering, in percent",
890
1011
  ),
891
1012
  # (
1013
+ # lfofilteredmeanvalue,
1014
+ # "lfofilterMean",
1015
+ # "map",
1016
+ # None,
1017
+ # "Voxelwise mean of the sLFO filtered data",
1018
+ # )
1019
+ # (
892
1020
  # initialrawvariance,
893
1021
  # "lfofilterTotalVarianceBefore",
894
1022
  # "map",
@@ -1343,9 +1471,37 @@ def retroregress(args):
1343
1471
  Path(f"{outputname}_RETRODONE.txt").touch()
1344
1472
 
1345
1473
 
1346
- def process_args(inputargs=None):
1474
+ def process_args(inputargs: Optional[Any] = None) -> None:
1347
1475
  """
1348
1476
  Compile arguments for retroregress workflow.
1477
+
1478
+ This function processes input arguments for the retroregress workflow by parsing
1479
+ command line arguments or provided input arguments using a predefined parser.
1480
+
1481
+ Parameters
1482
+ ----------
1483
+ inputargs : Any, optional
1484
+ Input arguments to be processed. Can be None (default), a list of strings,
1485
+ or other argument formats supported by the underlying parser. Default is None.
1486
+
1487
+ Returns
1488
+ -------
1489
+ argparse.Namespace
1490
+ Parsed arguments namespace containing all processed arguments for the workflow.
1491
+
1492
+ Notes
1493
+ -----
1494
+ The function relies on `pf.setargs` and `_get_parser` which should be defined
1495
+ in the module's scope. The returned arguments can be used directly in the
1496
+ retroregress workflow pipeline.
1497
+
1498
+ Examples
1499
+ --------
1500
+ >>> # Using default arguments
1501
+ >>> args = process_args()
1502
+
1503
+ >>> # Using custom input arguments
1504
+ >>> args = process_args(['--input', 'data.csv', '--output', 'results.txt'])
1349
1505
  """
1350
1506
  args, argstowrite = pf.setargs(_get_parser, inputargs=inputargs)
1351
1507
  return args
@@ -18,17 +18,43 @@
18
18
  #
19
19
  import argparse
20
20
  import sys
21
+ from argparse import Namespace
22
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
21
23
 
22
24
  import numpy as np
25
+ from numpy.typing import NDArray
23
26
 
24
27
  import rapidtide.io as tide_io
25
28
  import rapidtide.miscmath as tide_math
26
29
  import rapidtide.workflows.parser_funcs as pf
27
30
 
28
31
 
29
- def _get_parser():
32
+ def _get_parser() -> Any:
30
33
  """
31
- Argument parser for roisummarize
34
+ Argument parser for roisummarize.
35
+
36
+ This function constructs and returns an `argparse.ArgumentParser` object configured
37
+ for parsing command-line arguments used by the `roisummarize` tool. It defines
38
+ required inputs, optional arguments for sampling frequency, filtering, normalization,
39
+ and debugging options.
40
+
41
+ Returns
42
+ -------
43
+ argparse.ArgumentParser
44
+ Configured argument parser for the roisummarize tool.
45
+
46
+ Notes
47
+ -----
48
+ The parser supports two mutually exclusive ways to specify sampling frequency:
49
+ either via `--samplerate` or `--sampletstep`. These are equivalent and both
50
+ set the same internal `samplerate` parameter.
51
+
52
+ Examples
53
+ --------
54
+ >>> parser = _get_parser()
55
+ >>> args = parser.parse_args(['--inputfilename', 'input.txt',
56
+ ... '--templatefile', 'template.nii',
57
+ ... '--outputfile', 'output.txt'])
32
58
  """
33
59
  parser = argparse.ArgumentParser(
34
60
  prog="filttc",
@@ -101,7 +127,50 @@ def _get_parser():
101
127
  return parser
102
128
 
103
129
 
104
- def summarize4Dbylabel(inputvoxels, templatevoxels, normmethod="z", debug=False):
130
+ def summarize4Dbylabel(
131
+ inputvoxels: Any, templatevoxels: Any, normmethod: str = "z", debug: bool = False
132
+ ) -> None:
133
+ """
134
+ Summarize 4D voxel data by region labels from a template.
135
+
136
+ This function extracts time series data for each region defined in a template
137
+ and computes normalized mean time courses for each region across time points.
138
+
139
+ Parameters
140
+ ----------
141
+ inputvoxels : array-like
142
+ 4D array containing voxel data with shape (n_voxels, n_timepoints, n_other_dims)
143
+ templatevoxels : array-like
144
+ 3D array containing region labels with shape (n_voxels, 1, 1)
145
+ normmethod : str, optional
146
+ Normalization method to apply to time courses, default is "z"
147
+ Supported methods depend on tide_math.normalize function
148
+ debug : bool, optional
149
+ If True, print debugging information including voxel counts and shapes,
150
+ default is False
151
+
152
+ Returns
153
+ -------
154
+ timecourses : numpy.ndarray
155
+ 2D array of shape (n_regions, n_timepoints) containing normalized mean
156
+ time courses for each region
157
+
158
+ Notes
159
+ -----
160
+ - Regions are assumed to be labeled starting from 1
161
+ - Zero-valued voxels in template are ignored
162
+ - NaN values are converted to zeros before computing means
163
+ - The function uses tide_math.normalize for normalization
164
+
165
+ Examples
166
+ --------
167
+ >>> import numpy as np
168
+ >>> input_data = np.random.rand(100, 50, 1)
169
+ >>> template = np.random.randint(1, 4, (100, 1, 1))
170
+ >>> result = summarize4Dbylabel(input_data, template, normmethod="z")
171
+ >>> print(result.shape)
172
+ (3, 50)
173
+ """
105
174
  numregions = np.max(templatevoxels)
106
175
  numtimepoints = inputvoxels.shape[1]
107
176
  timecourses = np.zeros((numregions, numtimepoints), dtype="float")
@@ -118,7 +187,49 @@ def summarize4Dbylabel(inputvoxels, templatevoxels, normmethod="z", debug=False)
118
187
  return timecourses
119
188
 
120
189
 
121
- def summarize3Dbylabel(inputvoxels, templatevoxels, debug=False):
190
+ def summarize3Dbylabel(inputvoxels: Any, templatevoxels: Any, debug: bool = False) -> None:
191
+ """
192
+ Summarize 3D voxel data by label using mean, standard deviation, and median statistics.
193
+
194
+ This function processes 3D voxel data by grouping voxels according to labels in a template
195
+ and computes summary statistics for each labeled region. The input voxels are replaced
196
+ with the mean value of each region, and statistics are returned for further analysis.
197
+
198
+ Parameters
199
+ ----------
200
+ inputvoxels : array-like
201
+ 3D array containing the voxel values to be summarized
202
+ templatevoxels : array-like
203
+ 3D array containing integer labels defining regions of interest
204
+ debug : bool, optional
205
+ Flag to enable debug output (default is False)
206
+
207
+ Returns
208
+ -------
209
+ tuple
210
+ A tuple containing:
211
+ - outputvoxels : ndarray
212
+ 3D array with each labeled region replaced by its mean value
213
+ - regionstats : list
214
+ List of lists containing [mean, std, median] statistics for each region
215
+
216
+ Notes
217
+ -----
218
+ - Regions are labeled starting from 1 to max(templatevoxels)
219
+ - NaN values are converted to 0 during statistics calculation
220
+ - The function modifies the input arrays in-place during processing
221
+
222
+ Examples
223
+ --------
224
+ >>> import numpy as np
225
+ >>> input_data = np.random.rand(10, 10, 10)
226
+ >>> template = np.zeros((10, 10, 10), dtype=int)
227
+ >>> template[2:5, 2:5, 2:5] = 1
228
+ >>> template[6:8, 6:8, 6:8] = 2
229
+ >>> result, stats = summarize3Dbylabel(input_data, template)
230
+ >>> print(f"Region 1 mean: {stats[0][0]:.3f}")
231
+ >>> print(f"Region 2 mean: {stats[1][0]:.3f}")
232
+ """
122
233
  numregions = np.max(templatevoxels)
123
234
  outputvoxels = 0.0 * inputvoxels
124
235
  regionstats = []
@@ -132,7 +243,64 @@ def summarize3Dbylabel(inputvoxels, templatevoxels, debug=False):
132
243
  return outputvoxels, regionstats
133
244
 
134
245
 
135
- def roisummarize(args):
246
+ def roisummarize(args: Any) -> None:
247
+ """
248
+ Summarize fMRI data by regions of interest (ROIs) using a template image.
249
+
250
+ This function reads input fMRI and template NIfTI files, checks spatial
251
+ compatibility, and computes either 3D or 4D summaries depending on the
252
+ number of timepoints in the input data. For 4D data, it applies a filter
253
+ and summarizes timecourses by ROI. For 3D data, it computes mean values and
254
+ region statistics.
255
+
256
+ Parameters
257
+ ----------
258
+ args : Any
259
+ Command-line arguments parsed by `_get_parser()`. Expected attributes include:
260
+ - `inputfilename` : str
261
+ Path to the input fMRI NIfTI file.
262
+ - `templatefile` : str
263
+ Path to the template NIfTI file defining ROIs.
264
+ - `samplerate` : str or float
265
+ Sampling rate for filtering. If "auto", defaults to 1.0.
266
+ - `numskip` : int
267
+ Number of initial timepoints to skip when summarizing 4D data.
268
+ - `normmethod` : str
269
+ Normalization method for 4D summarization.
270
+ - `debug` : bool
271
+ Enable debug mode for additional output.
272
+ - `outputfile` : str
273
+ Base name for output files.
274
+
275
+ Returns
276
+ -------
277
+ None
278
+ The function writes output files to disk:
279
+ - `<outputfile>_timecourses`: Timecourses for each ROI (4D case).
280
+ - `<outputfile>_meanvals`: Mean values per ROI (3D case).
281
+ - `<outputfile>_regionstats.txt`: Statistics for each ROI (3D case).
282
+
283
+ Notes
284
+ -----
285
+ - The function assumes that the template file defines ROIs with integer labels.
286
+ - For 4D data, the input is filtered using `pf.postprocessfilteropts`.
287
+ - If the spatial dimensions of the input and template files do not match,
288
+ the function exits with an error message.
289
+
290
+ Examples
291
+ --------
292
+ >>> import argparse
293
+ >>> args = argparse.Namespace(
294
+ ... inputfilename='fmri.nii',
295
+ ... templatefile='roi_template.nii',
296
+ ... samplerate='auto',
297
+ ... numskip=5,
298
+ ... normmethod='zscore',
299
+ ... debug=False,
300
+ ... outputfile='output'
301
+ ... )
302
+ >>> roisummarize(args)
303
+ """
136
304
  # grab the command line arguments then pass them off.
137
305
  try:
138
306
  args = _get_parser().parse_args()
@@ -17,14 +17,41 @@
17
17
  #
18
18
  #
19
19
  import argparse
20
+ from argparse import Namespace
21
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
20
22
 
21
23
  import rapidtide.io as tide_io
22
24
  import rapidtide.qualitycheck as tide_quality
23
25
 
24
26
 
25
- def _get_parser():
27
+ def _get_parser() -> Any:
26
28
  """
27
- Argument parser for runqualitycheck
29
+ Argument parser for runqualitycheck.
30
+
31
+ This function creates and configures an argument parser for the runqualitycheck
32
+ command-line tool. The parser handles both required and optional arguments needed
33
+ to perform quality checks on rapidtide datasets.
34
+
35
+ Returns
36
+ -------
37
+ argparse.ArgumentParser
38
+ Configured argument parser object with all required and optional arguments
39
+ for the runqualitycheck tool.
40
+
41
+ Notes
42
+ -----
43
+ The argument parser is configured with:
44
+ - Required input file root name
45
+ - Optional gray matter mask specification
46
+ - Optional white matter mask specification
47
+ - Debug flag for additional output
48
+
49
+ Examples
50
+ --------
51
+ >>> parser = _get_parser()
52
+ >>> args = parser.parse_args(['dataset_root'])
53
+ >>> print(args.inputfileroot)
54
+ 'dataset_root'
28
55
  """
29
56
  parser = argparse.ArgumentParser(
30
57
  prog="runqualitycheck",
@@ -66,7 +93,48 @@ def _get_parser():
66
93
  return parser
67
94
 
68
95
 
69
- def runqualitycheck(args):
96
+ def runqualitycheck(args: Any) -> None:
97
+ """
98
+ Run quality check on input data and write results to JSON file.
99
+
100
+ This function performs a quality check on the input data using the tide_quality
101
+ module and writes the results to a JSON file with a standardized naming convention.
102
+
103
+ Parameters
104
+ ----------
105
+ args : Any
106
+ An object containing input arguments with the following attributes:
107
+ - inputfileroot : str
108
+ Root name of the input file(s)
109
+ - graymaskspec : str, optional
110
+ Specification for gray matter masking
111
+ - whitemaskspec : str, optional
112
+ Specification for white matter masking
113
+ - debug : bool, optional
114
+ Flag to enable debug mode
115
+
116
+ Returns
117
+ -------
118
+ None
119
+ This function does not return any value but writes results to a JSON file.
120
+
121
+ Notes
122
+ -----
123
+ The output JSON file will be named as '{inputfileroot}_desc-qualitymetrics_info.json'
124
+ where inputfileroot is the root name provided in the args object.
125
+
126
+ Examples
127
+ --------
128
+ >>> class Args:
129
+ ... def __init__(self):
130
+ ... self.inputfileroot = "sub-01_task-rest"
131
+ ... self.graymaskspec = "gray_mask.nii.gz"
132
+ ... self.whitemaskspec = "white_mask.nii.gz"
133
+ ... self.debug = False
134
+ ...
135
+ >>> args = Args()
136
+ >>> runqualitycheck(args)
137
+ """
70
138
  resultsdict = tide_quality.qualitycheck(
71
139
  args.inputfileroot,
72
140
  graymaskspec=args.graymaskspec,