rapidtide 3.0.11__py3-none-any.whl → 3.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. rapidtide/Colortables.py +492 -27
  2. rapidtide/OrthoImageItem.py +1049 -46
  3. rapidtide/RapidtideDataset.py +1533 -86
  4. rapidtide/_version.py +3 -3
  5. rapidtide/calccoherence.py +196 -29
  6. rapidtide/calcnullsimfunc.py +188 -40
  7. rapidtide/calcsimfunc.py +242 -42
  8. rapidtide/correlate.py +1203 -383
  9. rapidtide/data/examples/src/testLD +56 -0
  10. rapidtide/data/examples/src/testalign +1 -1
  11. rapidtide/data/examples/src/testdelayvar +0 -1
  12. rapidtide/data/examples/src/testfmri +53 -3
  13. rapidtide/data/examples/src/testglmfilt +5 -5
  14. rapidtide/data/examples/src/testhappy +29 -7
  15. rapidtide/data/examples/src/testppgproc +17 -0
  16. rapidtide/data/examples/src/testrolloff +11 -0
  17. rapidtide/data/models/model_cnn_pytorch/best_model.pth +0 -0
  18. rapidtide/data/models/model_cnn_pytorch/loss.png +0 -0
  19. rapidtide/data/models/model_cnn_pytorch/loss.txt +1 -0
  20. rapidtide/data/models/model_cnn_pytorch/model.pth +0 -0
  21. rapidtide/data/models/model_cnn_pytorch/model_meta.json +68 -0
  22. rapidtide/decorators.py +91 -0
  23. rapidtide/dlfilter.py +2226 -110
  24. rapidtide/dlfiltertorch.py +4842 -0
  25. rapidtide/externaltools.py +327 -12
  26. rapidtide/fMRIData_class.py +79 -40
  27. rapidtide/filter.py +1899 -810
  28. rapidtide/fit.py +2011 -581
  29. rapidtide/genericmultiproc.py +93 -18
  30. rapidtide/happy_supportfuncs.py +2047 -172
  31. rapidtide/helper_classes.py +584 -43
  32. rapidtide/io.py +2370 -372
  33. rapidtide/linfitfiltpass.py +346 -99
  34. rapidtide/makelaggedtcs.py +210 -24
  35. rapidtide/maskutil.py +448 -62
  36. rapidtide/miscmath.py +827 -121
  37. rapidtide/multiproc.py +210 -22
  38. rapidtide/patchmatch.py +242 -42
  39. rapidtide/peakeval.py +31 -31
  40. rapidtide/ppgproc.py +2203 -0
  41. rapidtide/qualitycheck.py +352 -39
  42. rapidtide/refinedelay.py +431 -57
  43. rapidtide/refineregressor.py +494 -189
  44. rapidtide/resample.py +671 -185
  45. rapidtide/scripts/applyppgproc.py +28 -0
  46. rapidtide/scripts/showxcorr_legacy.py +7 -7
  47. rapidtide/scripts/stupidramtricks.py +15 -17
  48. rapidtide/simFuncClasses.py +1052 -77
  49. rapidtide/simfuncfit.py +269 -69
  50. rapidtide/stats.py +540 -238
  51. rapidtide/tests/happycomp +9 -0
  52. rapidtide/tests/test_cleanregressor.py +1 -2
  53. rapidtide/tests/test_dlfiltertorch.py +627 -0
  54. rapidtide/tests/test_findmaxlag.py +24 -8
  55. rapidtide/tests/test_fullrunhappy_v1.py +0 -2
  56. rapidtide/tests/test_fullrunhappy_v2.py +0 -2
  57. rapidtide/tests/test_fullrunhappy_v3.py +11 -4
  58. rapidtide/tests/test_fullrunhappy_v4.py +10 -2
  59. rapidtide/tests/test_fullrunrapidtide_v7.py +1 -1
  60. rapidtide/tests/test_getparsers.py +11 -3
  61. rapidtide/tests/test_refinedelay.py +0 -1
  62. rapidtide/tests/test_simroundtrip.py +16 -8
  63. rapidtide/tests/test_stcorrelate.py +3 -1
  64. rapidtide/tests/utils.py +9 -8
  65. rapidtide/tidepoolTemplate.py +142 -38
  66. rapidtide/tidepoolTemplate_alt.py +165 -44
  67. rapidtide/tidepoolTemplate_big.py +189 -52
  68. rapidtide/util.py +1217 -118
  69. rapidtide/voxelData.py +684 -37
  70. rapidtide/wiener.py +136 -23
  71. rapidtide/wiener2.py +113 -7
  72. rapidtide/workflows/adjustoffset.py +105 -3
  73. rapidtide/workflows/aligntcs.py +85 -2
  74. rapidtide/workflows/applydlfilter.py +87 -10
  75. rapidtide/workflows/applyppgproc.py +540 -0
  76. rapidtide/workflows/atlasaverage.py +210 -47
  77. rapidtide/workflows/atlastool.py +100 -3
  78. rapidtide/workflows/calcSimFuncMap.py +288 -69
  79. rapidtide/workflows/calctexticc.py +201 -9
  80. rapidtide/workflows/ccorrica.py +101 -6
  81. rapidtide/workflows/cleanregressor.py +165 -31
  82. rapidtide/workflows/delayvar.py +171 -23
  83. rapidtide/workflows/diffrois.py +81 -3
  84. rapidtide/workflows/endtidalproc.py +144 -4
  85. rapidtide/workflows/fdica.py +195 -15
  86. rapidtide/workflows/filtnifti.py +70 -3
  87. rapidtide/workflows/filttc.py +74 -3
  88. rapidtide/workflows/fitSimFuncMap.py +202 -51
  89. rapidtide/workflows/fixtr.py +73 -3
  90. rapidtide/workflows/gmscalc.py +113 -3
  91. rapidtide/workflows/happy.py +801 -199
  92. rapidtide/workflows/happy2std.py +144 -12
  93. rapidtide/workflows/happy_parser.py +163 -23
  94. rapidtide/workflows/histnifti.py +118 -2
  95. rapidtide/workflows/histtc.py +84 -3
  96. rapidtide/workflows/linfitfilt.py +117 -4
  97. rapidtide/workflows/localflow.py +328 -28
  98. rapidtide/workflows/mergequality.py +79 -3
  99. rapidtide/workflows/niftidecomp.py +322 -18
  100. rapidtide/workflows/niftistats.py +174 -4
  101. rapidtide/workflows/pairproc.py +98 -4
  102. rapidtide/workflows/pairwisemergenifti.py +85 -2
  103. rapidtide/workflows/parser_funcs.py +1421 -40
  104. rapidtide/workflows/physiofreq.py +137 -11
  105. rapidtide/workflows/pixelcomp.py +207 -5
  106. rapidtide/workflows/plethquality.py +103 -21
  107. rapidtide/workflows/polyfitim.py +151 -11
  108. rapidtide/workflows/proj2flow.py +75 -2
  109. rapidtide/workflows/rankimage.py +111 -4
  110. rapidtide/workflows/rapidtide.py +368 -76
  111. rapidtide/workflows/rapidtide2std.py +98 -2
  112. rapidtide/workflows/rapidtide_parser.py +109 -9
  113. rapidtide/workflows/refineDelayMap.py +144 -33
  114. rapidtide/workflows/refineRegressor.py +675 -96
  115. rapidtide/workflows/regressfrommaps.py +161 -37
  116. rapidtide/workflows/resamplenifti.py +85 -3
  117. rapidtide/workflows/resampletc.py +91 -3
  118. rapidtide/workflows/retrolagtcs.py +99 -9
  119. rapidtide/workflows/retroregress.py +176 -26
  120. rapidtide/workflows/roisummarize.py +174 -5
  121. rapidtide/workflows/runqualitycheck.py +71 -3
  122. rapidtide/workflows/showarbcorr.py +149 -6
  123. rapidtide/workflows/showhist.py +86 -2
  124. rapidtide/workflows/showstxcorr.py +160 -3
  125. rapidtide/workflows/showtc.py +159 -3
  126. rapidtide/workflows/showxcorrx.py +190 -10
  127. rapidtide/workflows/showxy.py +185 -15
  128. rapidtide/workflows/simdata.py +264 -38
  129. rapidtide/workflows/spatialfit.py +77 -2
  130. rapidtide/workflows/spatialmi.py +250 -27
  131. rapidtide/workflows/spectrogram.py +305 -32
  132. rapidtide/workflows/synthASL.py +154 -3
  133. rapidtide/workflows/tcfrom2col.py +76 -2
  134. rapidtide/workflows/tcfrom3col.py +74 -2
  135. rapidtide/workflows/tidepool.py +2971 -130
  136. rapidtide/workflows/utils.py +19 -14
  137. rapidtide/workflows/utils_doc.py +293 -0
  138. rapidtide/workflows/variabilityizer.py +116 -3
  139. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/METADATA +10 -8
  140. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/RECORD +144 -128
  141. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/entry_points.txt +1 -0
  142. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/WHEEL +0 -0
  143. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/licenses/LICENSE +0 -0
  144. {rapidtide-3.0.11.dist-info → rapidtide-3.1.1.dist-info}/top_level.txt +0 -0
@@ -23,9 +23,12 @@ import os
23
23
  import platform
24
24
  import sys
25
25
  import time
26
+ from argparse import Namespace
26
27
  from pathlib import Path
28
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
27
29
 
28
30
  import numpy as np
31
+ from numpy.typing import NDArray
29
32
 
30
33
  import rapidtide.filter as tide_filt
31
34
  import rapidtide.io as tide_io
@@ -63,9 +66,30 @@ DEFAULT_REFINEDELAYNUMPOINTS = 501
63
66
  DEFAULT_DELAYOFFSETSPATIALFILT = -1
64
67
 
65
68
 
66
- def _get_parser():
69
+ def _get_parser() -> Any:
67
70
  """
68
- Argument parser for glmfilt
71
+ Argument parser for glmfilt.
72
+
73
+ This function constructs and returns an `argparse.ArgumentParser` object configured
74
+ for the `glmfilt` command-line tool, which performs sLFO (spatially localized
75
+ filter) filtering using maps generated from a previous rapidtide analysis.
76
+
77
+ Returns
78
+ -------
79
+ argparse.ArgumentParser
80
+ Configured argument parser for the glmfilt command-line interface.
81
+
82
+ Notes
83
+ -----
84
+ The parser includes both standard and experimental options. Experimental options
85
+ are not fully tested and may not work as expected.
86
+
87
+ Examples
88
+ --------
89
+ >>> parser = _get_parser()
90
+ >>> args = parser.parse_args(['fmri.nii', 'dataset_root'])
91
+ >>> print(args.fmrifile)
92
+ 'fmri.nii'
69
93
  """
70
94
  parser = argparse.ArgumentParser(
71
95
  prog="retroregress",
@@ -232,7 +256,90 @@ def _get_parser():
232
256
  return parser
233
257
 
234
258
 
235
- def retroregress(args):
259
+ def retroregress(args: Any) -> None:
260
+ """
261
+ Perform retrospective regression analysis on fMRI data to filter out slow
262
+ physiological noise (sLFO).
263
+
264
+ This function applies a retrospective regression approach to remove slow
265
+ physiological noise from fMRI data. It uses a delayed sLFO regressor to
266
+ model and remove the noise, optionally refining the delay using temporal
267
+ derivatives of the regressor.
268
+
269
+ Parameters
270
+ ----------
271
+ args : argparse.Namespace
272
+ Command-line arguments parsed by argparse. Expected attributes include:
273
+ - data_root : str
274
+ Root path for input data files
275
+ - output_prefix : str
276
+ Prefix for output files
277
+ - filter_type : str
278
+ Type of filtering to apply (e.g., 'bandpass')
279
+ - filter_low : float
280
+ Low cutoff frequency for filtering
281
+ - filter_high : float
282
+ High cutoff frequency for filtering
283
+ - regress_derivs : int
284
+ Number of temporal derivatives to include in the regressor
285
+ - refinedelay : bool
286
+ Whether to refine the delay using temporal derivatives
287
+ - refinecorr : bool
288
+ Whether to compute correlation refinement
289
+ - savemovingsignal : bool
290
+ Whether to save the filtered signal
291
+ - savenormalsLFOfiltfiles : bool
292
+ Whether to save standard output files
293
+ - saveminimumsLFOfiltfiles : bool
294
+ Whether to save minimum output files
295
+ - saveallsLFOfiltfiles : bool
296
+ Whether to save all output files
297
+ - makepseudofile : bool
298
+ Whether to create a pseudo-file
299
+ - nprocs : int
300
+ Number of processes to use
301
+ - debug : bool
302
+ Enable debug mode
303
+ - focaldebug : bool
304
+ Enable focal debug mode
305
+
306
+ Returns
307
+ -------
308
+ None
309
+ This function does not return a value but writes output files to disk.
310
+
311
+ Notes
312
+ -----
313
+ The function performs the following steps:
314
+ 1. Reads input data files including mean image, correlation mask, and processed mask
315
+ 2. Applies temporal filtering to the input data
316
+ 3. Performs GLM regression using delayed sLFO regressors
317
+ 4. Refines delay if requested using temporal derivatives
318
+ 5. Saves output files including filtered data, regressors, and timing information
319
+
320
+ Examples
321
+ --------
322
+ >>> import argparse
323
+ >>> args = argparse.Namespace(
324
+ ... data_root='path/to/data',
325
+ ... output_prefix='output',
326
+ ... filter_type='bandpass',
327
+ ... filter_low=0.01,
328
+ ... filter_high=0.1,
329
+ ... regress_derivs=2,
330
+ ... refinedelay=True,
331
+ ... refinecorr=False,
332
+ ... savemovingsignal=True,
333
+ ... savenormalsLFOfiltfiles=True,
334
+ ... saveminimumsLFOfiltfiles=True,
335
+ ... saveallsLFOfiltfiles=False,
336
+ ... makepseudofile=False,
337
+ ... nprocs=4,
338
+ ... debug=False,
339
+ ... focaldebug=False
340
+ ... )
341
+ >>> retroregress(args)
342
+ """
236
343
  # get the pid of the parent process
237
344
  args.pid = os.getpid()
238
345
 
@@ -253,7 +360,7 @@ def retroregress(args):
253
360
  logger_filename=f"{outputname}_retrolog.txt",
254
361
  timing_filename=f"{outputname}_retroruntimings.tsv",
255
362
  error_filename=f"{outputname}_retroerrorlog.txt",
256
- verbose=False,
363
+ isverbose=False,
257
364
  debug=args.debug,
258
365
  )
259
366
  TimingLGR.info("Start")
@@ -307,7 +414,7 @@ def retroregress(args):
307
414
  # save the raw and formatted command lines
308
415
  argstowrite = sys.argv
309
416
  thecommandline = " ".join(sys.argv[1:])
310
- tide_io.writevec([thecommandline], f"{outputname}_retrocommandline.txt")
417
+ tide_io.writevec(np.asarray([thecommandline]), f"{outputname}_retrocommandline.txt")
311
418
  formattedcommandline = []
312
419
  for thetoken in argstowrite[0:3]:
313
420
  formattedcommandline.append(thetoken)
@@ -326,7 +433,9 @@ def retroregress(args):
326
433
  else:
327
434
  suffix = ""
328
435
  formattedcommandline[i] = prefix + formattedcommandline[i] + suffix
329
- tide_io.writevec(formattedcommandline, f"{outputname}_retroformattedcommandline.txt")
436
+ tide_io.writevec(
437
+ np.asarray(formattedcommandline), f"{outputname}_retroformattedcommandline.txt"
438
+ )
330
439
 
331
440
  if args.nprocs < 1:
332
441
  args.nprocs = tide_multiproc.maxcpus()
@@ -362,19 +471,15 @@ def retroregress(args):
362
471
  sys.exit()
363
472
 
364
473
  if therunoptions["internalprecision"] == "double":
365
- rt_floattype = "float64"
366
- rt_floatset = np.float64
474
+ rt_floattype = np.float64
367
475
  else:
368
- rt_floattype = "float32"
369
- rt_floatset = np.float32
476
+ rt_floattype = np.float32
370
477
 
371
478
  # set the output precision
372
479
  if therunoptions["outputprecision"] == "double":
373
- rt_outfloattype = "float64"
374
- rt_outfloatset = np.float64
480
+ rt_outfloattype = np.float64
375
481
  else:
376
- rt_outfloattype = "float32"
377
- rt_outfloatset = np.float32
482
+ rt_outfloattype = np.float32
378
483
  therunoptions["saveminimumsLFOfiltfiles"] = args.saveminimumsLFOfiltfiles
379
484
 
380
485
  # read the fmri input files
@@ -519,16 +624,24 @@ def retroregress(args):
519
624
  sLFOfitmean, sLFOfitmean_shm = tide_util.allocarray(
520
625
  internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
521
626
  )
522
- rvalue, rvalue_shm = tide_util.allocarray(internalvalidspaceshape, rt_outfloattype, shared=usesharedmem)
523
- r2value, r2value_shm = tide_util.allocarray(internalvalidspaceshape, rt_outfloattype, shared=usesharedmem)
524
- fitNorm, fitNorm_shm = tide_util.allocarray(internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem)
627
+ rvalue, rvalue_shm = tide_util.allocarray(
628
+ internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
629
+ )
630
+ r2value, r2value_shm = tide_util.allocarray(
631
+ internalvalidspaceshape, rt_outfloattype, shared=usesharedmem
632
+ )
633
+ fitNorm, fitNorm_shm = tide_util.allocarray(
634
+ internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem
635
+ )
525
636
  fitcoeff, fitcoeff_shm = tide_util.allocarray(
526
637
  internalvalidspaceshapederivs, rt_outfloattype, shared=usesharedmem
527
638
  )
528
639
  movingsignal, movingsignal_shm = tide_util.allocarray(
529
640
  internalvalidfmrishape, rt_outfloattype, shared=usesharedmem
530
641
  )
531
- lagtc, lagtc_shm = tide_util.allocarray(internalvalidfmrishape, rt_floattype, shared=usesharedmem)
642
+ lagtc, lagtc_shm = tide_util.allocarray(
643
+ internalvalidfmrishape, rt_floattype, shared=usesharedmem
644
+ )
532
645
  filtereddata, filtereddata_shm = tide_util.allocarray(
533
646
  internalvalidfmrishape, rt_outfloattype, shared=usesharedmem
534
647
  )
@@ -658,7 +771,6 @@ def retroregress(args):
658
771
  maxdelay=args.maxdelay,
659
772
  numpoints=args.numpoints,
660
773
  histlen=therunoptions["histlen"],
661
- rt_floatset=rt_floatset,
662
774
  rt_floattype=rt_floattype,
663
775
  debug=args.debug,
664
776
  )
@@ -731,6 +843,12 @@ def retroregress(args):
731
843
  varchange = initialvariance * 0.0
732
844
  varchange[divlocs] = 100.0 * (finalvariance[divlocs] / initialvariance[divlocs] - 1.0)
733
845
 
846
+ # calculate the voxelwise mean of the filtered data
847
+ lfofilteredmeanvalue = np.mean(
848
+ filtereddata,
849
+ axis=1,
850
+ )
851
+
734
852
  # save regional timecourses if masks are defined
735
853
  # read in the anatomic masks
736
854
  anatomiclist = [
@@ -889,6 +1007,13 @@ def retroregress(args):
889
1007
  "Change in inband variance after filtering, in percent",
890
1008
  ),
891
1009
  # (
1010
+ # lfofilteredmeanvalue,
1011
+ # "lfofilterMean",
1012
+ # "map",
1013
+ # None,
1014
+ # "Voxelwise mean of the sLFO filtered data",
1015
+ # )
1016
+ # (
892
1017
  # initialrawvariance,
893
1018
  # "lfofilterTotalVarianceBefore",
894
1019
  # "map",
@@ -930,13 +1055,10 @@ def retroregress(args):
930
1055
  (initialvariance, "lfofilterInbandVarianceBefore", "map", None),
931
1056
  (finalvariance, "lfofilterInbandVarianceAfter", "map", None),
932
1057
  (varchange, "CVRVariance", "map", None),
1058
+ (rvalue, "CVRR", "map", None),
1059
+ (r2value, "CVRR2", "map", None),
1060
+ (fitcoeff[:, 0], "CVR", "map", "percent"),
933
1061
  ]
934
- if args.savenormalsLFOfiltfiles:
935
- maplist += [
936
- (rvalue, "CVRR", "map", None),
937
- (r2value, "CVRR2", "map", None),
938
- (fitcoeff, "CVR", "map", "percent"),
939
- ]
940
1062
  bidsdict = bidsbasedict.copy()
941
1063
 
942
1064
  if args.debug or args.focaldebug:
@@ -1343,9 +1465,37 @@ def retroregress(args):
1343
1465
  Path(f"{outputname}_RETRODONE.txt").touch()
1344
1466
 
1345
1467
 
1346
- def process_args(inputargs=None):
1468
+ def process_args(inputargs: Optional[Any] = None) -> None:
1347
1469
  """
1348
1470
  Compile arguments for retroregress workflow.
1471
+
1472
+ This function processes input arguments for the retroregress workflow by parsing
1473
+ command line arguments or provided input arguments using a predefined parser.
1474
+
1475
+ Parameters
1476
+ ----------
1477
+ inputargs : Any, optional
1478
+ Input arguments to be processed. Can be None (default), a list of strings,
1479
+ or other argument formats supported by the underlying parser. Default is None.
1480
+
1481
+ Returns
1482
+ -------
1483
+ argparse.Namespace
1484
+ Parsed arguments namespace containing all processed arguments for the workflow.
1485
+
1486
+ Notes
1487
+ -----
1488
+ The function relies on `pf.setargs` and `_get_parser` which should be defined
1489
+ in the module's scope. The returned arguments can be used directly in the
1490
+ retroregress workflow pipeline.
1491
+
1492
+ Examples
1493
+ --------
1494
+ >>> # Using default arguments
1495
+ >>> args = process_args()
1496
+
1497
+ >>> # Using custom input arguments
1498
+ >>> args = process_args(['--input', 'data.csv', '--output', 'results.txt'])
1349
1499
  """
1350
1500
  args, argstowrite = pf.setargs(_get_parser, inputargs=inputargs)
1351
1501
  return args
@@ -18,17 +18,42 @@
18
18
  #
19
19
  import argparse
20
20
  import sys
21
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
21
22
 
22
23
  import numpy as np
24
+ from numpy.typing import NDArray
23
25
 
24
26
  import rapidtide.io as tide_io
25
27
  import rapidtide.miscmath as tide_math
26
28
  import rapidtide.workflows.parser_funcs as pf
27
29
 
28
30
 
29
- def _get_parser():
31
+ def _get_parser() -> Any:
30
32
  """
31
- Argument parser for roisummarize
33
+ Argument parser for roisummarize.
34
+
35
+ This function constructs and returns an `argparse.ArgumentParser` object configured
36
+ for parsing command-line arguments used by the `roisummarize` tool. It defines
37
+ required inputs, optional arguments for sampling frequency, filtering, normalization,
38
+ and debugging options.
39
+
40
+ Returns
41
+ -------
42
+ argparse.ArgumentParser
43
+ Configured argument parser for the roisummarize tool.
44
+
45
+ Notes
46
+ -----
47
+ The parser supports two mutually exclusive ways to specify sampling frequency:
48
+ either via `--samplerate` or `--sampletstep`. These are equivalent and both
49
+ set the same internal `samplerate` parameter.
50
+
51
+ Examples
52
+ --------
53
+ >>> parser = _get_parser()
54
+ >>> args = parser.parse_args(['--inputfilename', 'input.txt',
55
+ ... '--templatefile', 'template.nii',
56
+ ... '--outputfile', 'output.txt'])
32
57
  """
33
58
  parser = argparse.ArgumentParser(
34
59
  prog="filttc",
@@ -101,7 +126,50 @@ def _get_parser():
101
126
  return parser
102
127
 
103
128
 
104
- def summarize4Dbylabel(inputvoxels, templatevoxels, normmethod="z", debug=False):
129
+ def summarize4Dbylabel(
130
+ inputvoxels: NDArray, templatevoxels: NDArray, normmethod: str = "z", debug: bool = False
131
+ ) -> NDArray:
132
+ """
133
+ Summarize 4D voxel data by region labels from a template.
134
+
135
+ This function extracts time series data for each region defined in a template
136
+ and computes normalized mean time courses for each region across time points.
137
+
138
+ Parameters
139
+ ----------
140
+ inputvoxels : NDArray
141
+ 4D array containing voxel data with shape (n_voxels, n_timepoints, n_other_dims)
142
+ templatevoxels : NDArray
143
+ 3D array containing region labels with shape (n_voxels, 1, 1)
144
+ normmethod : str, optional
145
+ Normalization method to apply to time courses, default is "z"
146
+ Supported methods depend on tide_math.normalize function
147
+ debug : bool, optional
148
+ If True, print debugging information including voxel counts and shapes,
149
+ default is False
150
+
151
+ Returns
152
+ -------
153
+ timecourses : NDArray
154
+ 2D array of shape (n_regions, n_timepoints) containing normalized mean
155
+ time courses for each region
156
+
157
+ Notes
158
+ -----
159
+ - Regions are assumed to be labeled starting from 1
160
+ - Zero-valued voxels in template are ignored
161
+ - NaN values are converted to zeros before computing means
162
+ - The function uses tide_math.normalize for normalization
163
+
164
+ Examples
165
+ --------
166
+ >>> import numpy as np
167
+ >>> input_data = np.random.rand(100, 50, 1)
168
+ >>> template = np.random.randint(1, 4, (100, 1, 1))
169
+ >>> result = summarize4Dbylabel(input_data, template, normmethod="z")
170
+ >>> print(result.shape)
171
+ (3, 50)
172
+ """
105
173
  numregions = np.max(templatevoxels)
106
174
  numtimepoints = inputvoxels.shape[1]
107
175
  timecourses = np.zeros((numregions, numtimepoints), dtype="float")
@@ -118,7 +186,51 @@ def summarize4Dbylabel(inputvoxels, templatevoxels, normmethod="z", debug=False)
118
186
  return timecourses
119
187
 
120
188
 
121
- def summarize3Dbylabel(inputvoxels, templatevoxels, debug=False):
189
+ def summarize3Dbylabel(
190
+ inputvoxels: NDArray, templatevoxels: NDArray, debug: bool = False
191
+ ) -> Tuple[NDArray, list]:
192
+ """
193
+ Summarize 3D voxel data by label using mean, standard deviation, and median statistics.
194
+
195
+ This function processes 3D voxel data by grouping voxels according to labels in a template
196
+ and computes summary statistics for each labeled region. The input voxels are replaced
197
+ with the mean value of each region, and statistics are returned for further analysis.
198
+
199
+ Parameters
200
+ ----------
201
+ inputvoxels : NDArray
202
+ 3D array containing the voxel values to be summarized
203
+ templatevoxels : NDArray
204
+ 3D array containing integer labels defining regions of interest
205
+ debug : bool, optional
206
+ Flag to enable debug output (default is False)
207
+
208
+ Returns
209
+ -------
210
+ tuple
211
+ A tuple containing:
212
+ - outputvoxels : NDArray
213
+ 3D array with each labeled region replaced by its mean value
214
+ - regionstats : list
215
+ List of lists containing [mean, std, median] statistics for each region
216
+
217
+ Notes
218
+ -----
219
+ - Regions are labeled starting from 1 to max(templatevoxels)
220
+ - NaN values are converted to 0 during statistics calculation
221
+ - The function modifies the input arrays in-place during processing
222
+
223
+ Examples
224
+ --------
225
+ >>> import numpy as np
226
+ >>> input_data = np.random.rand(10, 10, 10)
227
+ >>> template = np.zeros((10, 10, 10), dtype=int)
228
+ >>> template[2:5, 2:5, 2:5] = 1
229
+ >>> template[6:8, 6:8, 6:8] = 2
230
+ >>> result, stats = summarize3Dbylabel(input_data, template)
231
+ >>> print(f"Region 1 mean: {stats[0][0]:.3f}")
232
+ >>> print(f"Region 2 mean: {stats[1][0]:.3f}")
233
+ """
122
234
  numregions = np.max(templatevoxels)
123
235
  outputvoxels = 0.0 * inputvoxels
124
236
  regionstats = []
@@ -132,7 +244,64 @@ def summarize3Dbylabel(inputvoxels, templatevoxels, debug=False):
132
244
  return outputvoxels, regionstats
133
245
 
134
246
 
135
- def roisummarize(args):
247
+ def roisummarize(args: Any) -> None:
248
+ """
249
+ Summarize fMRI data by regions of interest (ROIs) using a template image.
250
+
251
+ This function reads input fMRI and template NIfTI files, checks spatial
252
+ compatibility, and computes either 3D or 4D summaries depending on the
253
+ number of timepoints in the input data. For 4D data, it applies a filter
254
+ and summarizes timecourses by ROI. For 3D data, it computes mean values and
255
+ region statistics.
256
+
257
+ Parameters
258
+ ----------
259
+ args : Any
260
+ Command-line arguments parsed by `_get_parser()`. Expected attributes include:
261
+ - `inputfilename` : str
262
+ Path to the input fMRI NIfTI file.
263
+ - `templatefile` : str
264
+ Path to the template NIfTI file defining ROIs.
265
+ - `samplerate` : str or float
266
+ Sampling rate for filtering. If "auto", defaults to 1.0.
267
+ - `numskip` : int
268
+ Number of initial timepoints to skip when summarizing 4D data.
269
+ - `normmethod` : str
270
+ Normalization method for 4D summarization.
271
+ - `debug` : bool
272
+ Enable debug mode for additional output.
273
+ - `outputfile` : str
274
+ Base name for output files.
275
+
276
+ Returns
277
+ -------
278
+ None
279
+ The function writes output files to disk:
280
+ - `<outputfile>_timecourses`: Timecourses for each ROI (4D case).
281
+ - `<outputfile>_meanvals`: Mean values per ROI (3D case).
282
+ - `<outputfile>_regionstats.txt`: Statistics for each ROI (3D case).
283
+
284
+ Notes
285
+ -----
286
+ - The function assumes that the template file defines ROIs with integer labels.
287
+ - For 4D data, the input is filtered using `pf.postprocessfilteropts`.
288
+ - If the spatial dimensions of the input and template files do not match,
289
+ the function exits with an error message.
290
+
291
+ Examples
292
+ --------
293
+ >>> import argparse
294
+ >>> args = argparse.Namespace(
295
+ ... inputfilename='fmri.nii',
296
+ ... templatefile='roi_template.nii',
297
+ ... samplerate='auto',
298
+ ... numskip=5,
299
+ ... normmethod='zscore',
300
+ ... debug=False,
301
+ ... outputfile='output'
302
+ ... )
303
+ >>> roisummarize(args)
304
+ """
136
305
  # grab the command line arguments then pass them off.
137
306
  try:
138
307
  args = _get_parser().parse_args()
@@ -17,14 +17,41 @@
17
17
  #
18
18
  #
19
19
  import argparse
20
+ from argparse import Namespace
21
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
20
22
 
21
23
  import rapidtide.io as tide_io
22
24
  import rapidtide.qualitycheck as tide_quality
23
25
 
24
26
 
25
- def _get_parser():
27
+ def _get_parser() -> Any:
26
28
  """
27
- Argument parser for runqualitycheck
29
+ Argument parser for runqualitycheck.
30
+
31
+ This function creates and configures an argument parser for the runqualitycheck
32
+ command-line tool. The parser handles both required and optional arguments needed
33
+ to perform quality checks on rapidtide datasets.
34
+
35
+ Returns
36
+ -------
37
+ argparse.ArgumentParser
38
+ Configured argument parser object with all required and optional arguments
39
+ for the runqualitycheck tool.
40
+
41
+ Notes
42
+ -----
43
+ The argument parser is configured with:
44
+ - Required input file root name
45
+ - Optional gray matter mask specification
46
+ - Optional white matter mask specification
47
+ - Debug flag for additional output
48
+
49
+ Examples
50
+ --------
51
+ >>> parser = _get_parser()
52
+ >>> args = parser.parse_args(['dataset_root'])
53
+ >>> print(args.inputfileroot)
54
+ 'dataset_root'
28
55
  """
29
56
  parser = argparse.ArgumentParser(
30
57
  prog="runqualitycheck",
@@ -66,7 +93,48 @@ def _get_parser():
66
93
  return parser
67
94
 
68
95
 
69
- def runqualitycheck(args):
96
+ def runqualitycheck(args: Any) -> None:
97
+ """
98
+ Run quality check on input data and write results to JSON file.
99
+
100
+ This function performs a quality check on the input data using the tide_quality
101
+ module and writes the results to a JSON file with a standardized naming convention.
102
+
103
+ Parameters
104
+ ----------
105
+ args : Any
106
+ An object containing input arguments with the following attributes:
107
+ - inputfileroot : str
108
+ Root name of the input file(s)
109
+ - graymaskspec : str, optional
110
+ Specification for gray matter masking
111
+ - whitemaskspec : str, optional
112
+ Specification for white matter masking
113
+ - debug : bool, optional
114
+ Flag to enable debug mode
115
+
116
+ Returns
117
+ -------
118
+ None
119
+ This function does not return any value but writes results to a JSON file.
120
+
121
+ Notes
122
+ -----
123
+ The output JSON file will be named as '{inputfileroot}_desc-qualitymetrics_info.json'
124
+ where inputfileroot is the root name provided in the args object.
125
+
126
+ Examples
127
+ --------
128
+ >>> class Args:
129
+ ... def __init__(self):
130
+ ... self.inputfileroot = "sub-01_task-rest"
131
+ ... self.graymaskspec = "gray_mask.nii.gz"
132
+ ... self.whitemaskspec = "white_mask.nii.gz"
133
+ ... self.debug = False
134
+ ...
135
+ >>> args = Args()
136
+ >>> runqualitycheck(args)
137
+ """
70
138
  resultsdict = tide_quality.qualitycheck(
71
139
  args.inputfileroot,
72
140
  graymaskspec=args.graymaskspec,