rapidtide 3.0.5__py3-none-any.whl → 3.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. rapidtide/RapidtideDataset.py +17 -0
  2. rapidtide/_version.py +3 -3
  3. rapidtide/calccoherence.py +51 -73
  4. rapidtide/calcnullsimfunc.py +65 -111
  5. rapidtide/calcsimfunc.py +73 -91
  6. rapidtide/correlate.py +25 -6
  7. rapidtide/data/examples/src/testatlasaverage +22 -0
  8. rapidtide/data/examples/src/testfmri +16 -1
  9. rapidtide/data/examples/src/testhappy +57 -60
  10. rapidtide/data/examples/src/testsimdata +45 -28
  11. rapidtide/genericmultiproc.py +122 -0
  12. rapidtide/happy_supportfuncs.py +608 -107
  13. rapidtide/linfitfiltpass.py +8 -1
  14. rapidtide/makelaggedtcs.py +49 -78
  15. rapidtide/multiproc.py +5 -17
  16. rapidtide/refineregressor.py +59 -81
  17. rapidtide/resample.py +24 -14
  18. rapidtide/tests/.coveragerc +9 -0
  19. rapidtide/tests/test_congrid.py +68 -79
  20. rapidtide/tests/test_externaltools.py +69 -0
  21. rapidtide/tests/test_fastresampler.py +1 -0
  22. rapidtide/tests/test_fullrunrapidtide_v2.py +1 -0
  23. rapidtide/tests/test_nullcorr.py +2 -5
  24. rapidtide/tests/test_parserfuncs.py +46 -15
  25. rapidtide/tests/test_zRapidtideDataset.py +2 -2
  26. rapidtide/voxelData.py +17 -3
  27. rapidtide/workflows/ccorrica.py +1 -2
  28. rapidtide/workflows/cleanregressor.py +3 -2
  29. rapidtide/workflows/happy.py +62 -3
  30. rapidtide/workflows/happy_parser.py +36 -0
  31. rapidtide/workflows/rapidtide.py +18 -13
  32. rapidtide/workflows/rapidtide_parser.py +8 -1
  33. rapidtide/workflows/regressfrommaps.py +0 -2
  34. rapidtide/workflows/showarbcorr.py +19 -6
  35. rapidtide/workflows/showxcorrx.py +4 -8
  36. rapidtide/workflows/simdata.py +149 -65
  37. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/METADATA +1 -1
  38. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/RECORD +42 -43
  39. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/WHEEL +1 -1
  40. rapidtide/DerivativeDelay.py +0 -209
  41. rapidtide/calcandfitcorrpairs.py +0 -262
  42. rapidtide/transformerdlfilter.py +0 -126
  43. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/entry_points.txt +0 -0
  44. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/licenses/LICENSE +0 -0
  45. {rapidtide-3.0.5.dist-info → rapidtide-3.0.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,69 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ #
4
+ # Copyright 2016-2025 Blaise Frederick
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ #
18
+ #
19
+ import os
20
+
21
+ import matplotlib as mpl
22
+
23
+ import rapidtide.externaltools as tide_exttools
24
+ from rapidtide.tests.utils import get_examples_path, get_test_temp_path, mse
25
+
26
+
27
+ def test_externaltools(debug=False, local=False):
28
+ # set input and output directories
29
+ if local:
30
+ exampleroot = "../data/examples/src"
31
+ testtemproot = "./tmp"
32
+ else:
33
+ exampleroot = get_examples_path()
34
+ testtemproot = get_test_temp_path()
35
+
36
+ thefsldir = tide_exttools.fslinfo()
37
+ if debug:
38
+ print(f"{thefsldir=}")
39
+
40
+ if not local:
41
+ os.environ["FSLDIR"] = "/plausible_FSLDIR"
42
+
43
+ thefsldir = tide_exttools.fslinfo()
44
+ if debug:
45
+ print(f"{thefsldir=}")
46
+
47
+ fslexists, c3dexists, antsexists = tide_exttools.whatexists()
48
+ if debug:
49
+ print(f"{fslexists=}, {c3dexists=}, {antsexists=}")
50
+
51
+ fslsubcmd, flirtcmd, applywarpcmd = tide_exttools.getfslcmds()
52
+ if debug:
53
+ print(f"{fslsubcmd=}, {flirtcmd=}, {applywarpcmd=}")
54
+
55
+ tide_exttools.runflirt(
56
+ "inputname", "targetname", "xform", "outputname", warpfile="thewarp", fake=True
57
+ )
58
+ tide_exttools.runflirt("inputname", "targetname", "xform", "outputname", fake=True)
59
+
60
+ tide_exttools.n4correct("inputname", "outputdir", fake=True)
61
+
62
+ tide_exttools.antsapply(
63
+ "inputname", "targetname", "outputroot", ["transform1", "transform2"], fake=True
64
+ )
65
+
66
+
67
+ if __name__ == "__main__":
68
+ mpl.use("TkAgg")
69
+ test_externaltools(debug=True, local=True)
@@ -45,6 +45,7 @@ def test_FastResampler(debug=False):
45
45
  if debug:
46
46
  print(f"{genlaggedtc.initstart=}, {genlaggedtc.initend=}, {genlaggedtc.initstep=}")
47
47
  print(f"{genlaggedtc.hiresstart=}, {genlaggedtc.hiresend=}, {genlaggedtc.hiresstep=}")
48
+ genlaggedtc.info()
48
49
 
49
50
  # save and reload with another name
50
51
  resamplername = os.path.join(get_test_temp_path(), "savedresampler")
@@ -68,6 +68,7 @@ def test_fullrunrapidtide_v2(debug=False, local=False, displayplots=False):
68
68
  "--outputlevel",
69
69
  "max",
70
70
  "--calccoherence",
71
+ "--cleanrefined",
71
72
  "--dispersioncalc",
72
73
  "--nprocs",
73
74
  "1",
@@ -128,14 +128,11 @@ def test_nullsimfunc(debug=False, displayplots=False):
128
128
  histograms = []
129
129
  for thenprocs in [1, -1]:
130
130
  for i in range(numpasses):
131
- corrlist = tide_nullsimfunc.getNullDistributionDatax(
132
- sourcedata,
131
+ corrlist = tide_nullsimfunc.getNullDistributionData(
133
132
  Fs,
134
133
  theCorrelator,
135
134
  thefitter,
136
- despeckle_thresh=5.0,
137
- fixdelay=False,
138
- initialdelayvalue=0.0,
135
+ None,
139
136
  numestreps=optiondict["numestreps"],
140
137
  nprocs=thenprocs,
141
138
  showprogressbar=optiondict["showprogressbar"],
@@ -16,11 +16,27 @@
16
16
  # limitations under the License.
17
17
  #
18
18
  #
19
- import os
20
19
  import argparse
20
+ import os
21
+
22
+ import rapidtide.io as tide_io
21
23
  import rapidtide.workflows.parser_funcs as pf
22
24
  from rapidtide.tests.utils import get_examples_path, get_test_temp_path
23
25
 
26
+
27
+ def proccolspec(thecolspec):
28
+ if thecolspec is not None:
29
+ # see if this is a numeric or text list
30
+ tokenlist = (thecolspec.split(",")[0]).split("-")
31
+ try:
32
+ firstelement = int(tokenlist[0])
33
+ return tide_io.colspectolist(thecolspec)
34
+ except ValueError:
35
+ return thecolspec.split(",")
36
+ else:
37
+ return [None]
38
+
39
+
24
40
  def _get_parser():
25
41
  """
26
42
  Argument parser for adjust offset
@@ -41,7 +57,7 @@ def _get_parser():
41
57
  return parser
42
58
 
43
59
 
44
- def test_parserfuncs(debug=False, local=False, displayplots=False):
60
+ def test_parserfuncs(debug=False, local=False):
45
61
  # set input and output directories
46
62
  if local:
47
63
  exampleroot = "../data/examples/src"
@@ -50,21 +66,36 @@ def test_parserfuncs(debug=False, local=False, displayplots=False):
50
66
  exampleroot = get_examples_path()
51
67
  testtemproot = get_test_temp_path()
52
68
 
53
-
54
69
  theparser = _get_parser()
55
70
 
56
- filename = os.path.join(exampleroot,"sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json")
57
- retval = pf.is_valid_file(theparser, filename)
58
- print(filename, retval)
59
-
60
- #filename = os.path.join(exampleroot,"sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseriesxyz.json")
61
- #retval = pf.is_valid_file(theparser, filename)
62
- #print(filename, retval)
63
-
64
- filename = os.path.join(exampleroot,"sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json:acolname")
65
- retval = pf.is_valid_file(theparser, filename)
66
- print(filename, retval)
71
+ testvecs = [
72
+ ["sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json", [None]],
73
+ [
74
+ "sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json:acolname",
75
+ ["acolname"],
76
+ ],
77
+ [
78
+ "sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json:acolname,bcolname",
79
+ ["acolname", "bcolname"],
80
+ ],
81
+ [
82
+ "sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.tsv.gz:1,2,5-10",
83
+ [1, 2, 5, 6, 7, 8, 9, 10],
84
+ ],
85
+ [
86
+ "sub-RAPIDTIDETEST_desc-oversampledmovingregressor_timeseries.json:3,2,7,5-10,6-11",
87
+ [2, 3, 5, 6, 7, 8, 9, 10, 11],
88
+ ],
89
+ ]
90
+ for infile, expectedcols in testvecs:
91
+ filename = os.path.join(exampleroot, infile)
92
+ retval = pf.is_valid_file(theparser, filename)
93
+ thename, thecolspec = tide_io.parsefilespec(retval)
94
+ collist = proccolspec(thecolspec)
95
+ if debug:
96
+ print(filename, retval, thename, thecolspec, collist)
97
+ assert collist == expectedcols
67
98
 
68
99
 
69
100
  if __name__ == "__main__":
70
- test_parserfuncs(debug=True, local=True, displayplots=True)
101
+ test_parserfuncs(debug=True, local=True)
@@ -26,7 +26,7 @@ from rapidtide.RapidtideDataset import RapidtideDataset
26
26
  from rapidtide.tests.utils import get_examples_path, get_test_temp_path
27
27
 
28
28
 
29
- def main(runninglocally=False):
29
+ def main(runninglocally=False, debug=False):
30
30
  # initialize default values
31
31
  if runninglocally:
32
32
  datafileroot = "../data/examples/dst/sub-RAPIDTIDETEST_"
@@ -74,4 +74,4 @@ def main(runninglocally=False):
74
74
 
75
75
 
76
76
  if __name__ == "__main__":
77
- main(runninglocally=True)
77
+ main(runninglocally=True, debug=True)
rapidtide/voxelData.py CHANGED
@@ -100,6 +100,7 @@ class VoxelData:
100
100
  ysize = None
101
101
  numslices = None
102
102
  timepoints = None
103
+ dimensions = None
103
104
  realtimepoints = None
104
105
  xdim = None
105
106
  ydim = None
@@ -160,6 +161,10 @@ class VoxelData:
160
161
  self.xsize, self.ysize, self.numslices, self.timepoints = tide_io.parseniftidims(
161
162
  self.thedims
162
163
  )
164
+ if self.timepoints == 1:
165
+ self.dimensions = 3
166
+ else:
167
+ self.dimensions = 4
163
168
  self.numslicelocs = int(self.xsize) * int(self.ysize)
164
169
  self.numspatiallocs = int(self.xsize) * int(self.ysize) * int(self.numslices)
165
170
  self.cifti_hdr = None
@@ -283,15 +288,24 @@ class VoxelData:
283
288
  if not self.resident:
284
289
  self.load()
285
290
  if self.filetype == "nifti":
286
- return self.nim_data[:, :, :, self.validstart : self.validend + 1]
291
+ if self.dimensions == 4 or self.filetype == "cifti" or self.filetype == "text":
292
+ return self.nim_data[:, :, :, self.validstart : self.validend + 1]
293
+ else:
294
+ return self.nim_data[:, :, :]
287
295
  else:
288
296
  return self.nim_data[:, self.validstart : self.validend + 1]
289
297
 
290
298
  def byvoxel(self):
291
- return self.byvoltrimmed().reshape(self.numspatiallocs, -1)
299
+ if self.dimensions == 4 or self.filetype == "cifti" or self.filetype == "text":
300
+ return self.byvoltrimmed().reshape(self.numspatiallocs, -1)
301
+ else:
302
+ return self.byvoltrimmed().reshape(self.numspatiallocs)
292
303
 
293
304
  def byslice(self):
294
- return self.byvoltrimmed().reshape(self.numslicelocs, self.numslices, -1)
305
+ if self.dimensions == 4 or self.filetype == "cifti" or self.filetype == "text":
306
+ return self.byvoltrimmed().reshape(self.numslicelocs, self.numslices, -1)
307
+ else:
308
+ return self.byvoltrimmed().reshape(self.numslicelocs, self.numslices)
295
309
 
296
310
  def validdata(self):
297
311
  if self.validvoxels is None:
@@ -156,8 +156,7 @@ def ccorrica(args):
156
156
  else:
157
157
  Fs = args.samplerate
158
158
  else:
159
- if args.samplerate != "auto":
160
- Fs = args.samplerate
159
+ Fs = samplerate
161
160
 
162
161
  sampletime = 1.0 / Fs
163
162
  thedims = tcdata.shape
@@ -103,8 +103,9 @@ def cleanregressor(
103
103
  theCorrelator.setlimits(acmininpts, acmaxinpts)
104
104
  # theCorrelator.setlimits(lagmininpts, lagmaxinpts)
105
105
  print("check_autocorrelation:", acmininpts, acmaxinpts, lagmininpts, lagmaxinpts)
106
- thexcorr, accheckcorrscale, dummy = theCorrelator.run(
107
- resampref_y[osvalidsimcalcstart : osvalidsimcalcend + 1]
106
+ thexcorr, accheckcorrscale, theglobalmax = theCorrelator.run(
107
+ resampref_y[osvalidsimcalcstart : osvalidsimcalcend + 1],
108
+ trim=False,
108
109
  )
109
110
  theFitter.setcorrtimeaxis(accheckcorrscale)
110
111
  (
@@ -24,7 +24,7 @@ import warnings
24
24
  from pathlib import Path
25
25
 
26
26
  import numpy as np
27
- from tqdm import tqdm
27
+ from scipy.stats import pearsonr
28
28
 
29
29
  import rapidtide.correlate as tide_corr
30
30
  import rapidtide.filter as tide_filt
@@ -314,7 +314,11 @@ def happy_main(argparsingfunc):
314
314
  validprojvoxels,
315
315
  time,
316
316
  timings,
317
+ LGR=None,
318
+ mpcode=args.mpdetrend,
319
+ nprocs=args.nprocs,
317
320
  showprogressbar=args.showprogressbar,
321
+ debug=args.debug,
318
322
  )
319
323
  normdata_byslice = normdata.reshape((xsize * ysize, numslices, timepoints))
320
324
 
@@ -1241,7 +1245,9 @@ def happy_main(argparsingfunc):
1241
1245
  demeandata_byslice = demeandata.reshape((xsize * ysize, numslices, timepoints))
1242
1246
  means_byslice = means.reshape((xsize * ysize, numslices))
1243
1247
 
1244
- timings.append(["Phase projection to image started" + passstring, time.time(), None, None])
1248
+ timings.append(
1249
+ ["Phase projection to image prep started" + passstring, time.time(), None, None]
1250
+ )
1245
1251
  print("Starting phase projection")
1246
1252
  proctrs = range(timepoints) # proctrs is the list of all fmri trs to be projected
1247
1253
  procpoints = range(
@@ -1255,6 +1261,14 @@ def happy_main(argparsingfunc):
1255
1261
  proctrs = np.where(censortrs < 1)[0]
1256
1262
  procpoints = np.where(censorpoints < 1)[0]
1257
1263
 
1264
+ # preload congrid
1265
+ if args.preloadcongrid:
1266
+ print("Preloading congrid values...")
1267
+ happy_support.preloadcongrid(
1268
+ outphases, args.congridbins, gridkernel=args.gridkernel, cyclic=True, debug=False
1269
+ )
1270
+ print("done")
1271
+
1258
1272
  # do phase averaging
1259
1273
  app_bypoint, weights_bypoint = happy_support.cardiaccycleaverage(
1260
1274
  instantaneous_cardiacphase,
@@ -1264,6 +1278,7 @@ def happy_main(argparsingfunc):
1264
1278
  args.congridbins,
1265
1279
  args.gridkernel,
1266
1280
  args.centric,
1281
+ cache=args.congridcache,
1267
1282
  cyclic=True,
1268
1283
  )
1269
1284
  if thispass == numpasses - 1:
@@ -1356,9 +1371,53 @@ def happy_main(argparsingfunc):
1356
1371
  debug=args.debug,
1357
1372
  )
1358
1373
 
1374
+ timings.append(
1375
+ ["Phase projection to image prep ended" + passstring, time.time(), None, None]
1376
+ )
1359
1377
  if not args.verbose:
1360
- print("Phase projecting...")
1378
+ print("Setting up for phase projection...")
1361
1379
 
1380
+ # make a vessel map using Wright's method
1381
+ if args.wrightiterations > 0:
1382
+ timings.append(
1383
+ [
1384
+ "Wright mask generation started" + passstring,
1385
+ time.time(),
1386
+ None,
1387
+ None,
1388
+ ]
1389
+ )
1390
+ wrightcorrs = happy_support.wrightmap(
1391
+ input_data,
1392
+ demeandata_byslice,
1393
+ rawapp_byslice,
1394
+ projmask_byslice,
1395
+ outphases,
1396
+ cardphasevals,
1397
+ proctrs,
1398
+ args.congridbins,
1399
+ args.gridkernel,
1400
+ args.destpoints,
1401
+ iterations=args.wrightiterations,
1402
+ nprocs=args.nprocs,
1403
+ verbose=False,
1404
+ debug=args.debug,
1405
+ )
1406
+
1407
+ theheader = input_data.copyheader(numtimepoints=1)
1408
+ wrightfilename = f"{outputroot}_desc-wrightcorrspass{thispass + 1}_map"
1409
+ tide_io.writedicttojson(bidsbasedict, wrightfilename + ".json")
1410
+ tide_io.savetonifti(wrightcorrs, theheader, wrightfilename)
1411
+ timings.append(
1412
+ [
1413
+ "Wright mask generation completed" + passstring,
1414
+ time.time(),
1415
+ None,
1416
+ None,
1417
+ ]
1418
+ )
1419
+
1420
+ timings.append(["Phase projection to image started" + passstring, time.time(), None, None])
1362
1421
  # make a lowpass filter for the projected data. Limit frequency to 3 cycles per 2pi (1/6th Fs)
1363
1422
  phaseFs = 1.0 / phasestep
1364
1423
  phaseFc = phaseFs / 6.0
@@ -593,6 +593,14 @@ def _get_parser():
593
593
  help="Will disable showing progress bars (helpful if stdout is going to a file). ",
594
594
  default=True,
595
595
  )
596
+ misc_opts.add_argument(
597
+ "--wrightiterations",
598
+ dest="wrightiterations",
599
+ action="store",
600
+ type=lambda x: pf.is_int(parser, x),
601
+ help="Number of iterations for calculating Wright map. Set to 0 to disable.",
602
+ default=0,
603
+ )
596
604
  pf.addtagopts(
597
605
  misc_opts,
598
606
  helptext="Additional key, value pairs to add to the info json file (useful for tracking analyses).",
@@ -692,6 +700,34 @@ def _get_parser():
692
700
  help="Decrease the number of intermediate output files. ",
693
701
  default=0,
694
702
  )
703
+ debug_opts.add_argument(
704
+ "--nompdetrend",
705
+ dest="mpdetrend",
706
+ action="store_false",
707
+ help="Disable multiproc detrending.",
708
+ default=True,
709
+ )
710
+ debug_opts.add_argument(
711
+ "--nompphaseproject",
712
+ dest="mpphaseproject",
713
+ action="store_false",
714
+ help="Disable multiproc phase projection.",
715
+ default=True,
716
+ )
717
+ debug_opts.add_argument(
718
+ "--noprefillcongrid",
719
+ dest="preloadcongrid",
720
+ action="store_false",
721
+ help="Don't prefill the congrid value cache.",
722
+ default=True,
723
+ )
724
+ debug_opts.add_argument(
725
+ "--nocongridcache",
726
+ dest="congridcache",
727
+ action="store_false",
728
+ help="Disable the congrid value cache completely.",
729
+ default=True,
730
+ )
695
731
 
696
732
  return parser
697
733
 
@@ -16,7 +16,6 @@
16
16
  # limitations under the License.
17
17
  #
18
18
  #
19
- import copy
20
19
  import gc
21
20
  import logging
22
21
  import os
@@ -28,8 +27,6 @@ from pathlib import Path
28
27
  import numpy as np
29
28
  from scipy import ndimage
30
29
  from scipy.stats import rankdata
31
- from sklearn.decomposition import PCA
32
- from tqdm import tqdm
33
30
 
34
31
  import rapidtide.calccoherence as tide_calccoherence
35
32
  import rapidtide.calcnullsimfunc as tide_nullsimfunc
@@ -55,7 +52,6 @@ import rapidtide.voxelData as tide_voxelData
55
52
  import rapidtide.wiener as tide_wiener
56
53
  import rapidtide.workflows.cleanregressor as tide_cleanregressor
57
54
  import rapidtide.workflows.regressfrommaps as tide_regressfrommaps
58
- from rapidtide.tests.utils import mse
59
55
 
60
56
  from .utils import setup_logger
61
57
 
@@ -395,6 +391,11 @@ def rapidtide_main(argparsingfunc):
395
391
  csfmask = anatomicmasks[3]
396
392
 
397
393
  # do spatial filtering if requested
394
+ if theinputdata.filetype == "nifti":
395
+ unfiltmeanvalue = np.mean(
396
+ theinputdata.byvoxel(),
397
+ axis=1,
398
+ )
398
399
  optiondict["gausssigma"] = theinputdata.smooth(
399
400
  optiondict["gausssigma"],
400
401
  brainmask=brainmask,
@@ -1731,18 +1732,17 @@ def rapidtide_main(argparsingfunc):
1731
1732
  else:
1732
1733
  theSimFunc = theCorrelator
1733
1734
  tide_util.disablemkl(optiondict["nprocs_getNullDist"], debug=threaddebug)
1734
- simdistdata = tide_nullsimfunc.getNullDistributionDatax(
1735
- cleaned_resampref_y,
1735
+ simdistdata = tide_nullsimfunc.getNullDistributionData(
1736
1736
  oversampfreq,
1737
1737
  theSimFunc,
1738
1738
  theFitter,
1739
+ LGR,
1739
1740
  numestreps=optiondict["numestreps"],
1740
1741
  nprocs=optiondict["nprocs_getNullDist"],
1741
1742
  alwaysmultiproc=optiondict["alwaysmultiproc"],
1742
1743
  showprogressbar=optiondict["showprogressbar"],
1743
1744
  chunksize=optiondict["mp_chunksize"],
1744
1745
  permutationmethod=optiondict["permutationmethod"],
1745
- fixdelay=optiondict["fixdelay"],
1746
1746
  rt_floatset=np.float64,
1747
1747
  rt_floattype="float64",
1748
1748
  )
@@ -2572,8 +2572,6 @@ def rapidtide_main(argparsingfunc):
2572
2572
  chunksize=optiondict["mp_chunksize"],
2573
2573
  nprocs=1,
2574
2574
  alwaysmultiproc=optiondict["alwaysmultiproc"],
2575
- rt_floatset=rt_floatset,
2576
- rt_floattype=rt_floattype,
2577
2575
  )
2578
2576
  tide_util.enablemkl(optiondict["mklthreads"], debug=threaddebug)
2579
2577
 
@@ -2658,6 +2656,9 @@ def rapidtide_main(argparsingfunc):
2658
2656
  optiondict["currentstage"] = "presLFOfit"
2659
2657
  tide_io.writedicttojson(optiondict, f"{outputname}_desc-runoptions_info.json")
2660
2658
  if optiondict["dolinfitfilt"] or optiondict["docvrmap"] or optiondict["refinedelay"]:
2659
+ sLFOfiltmask = fitmask + 0.0
2660
+ if optiondict["nosLFOfiltmask"]:
2661
+ sLFOfiltmask = sLFOfiltmask * 0.0 + 1.0
2661
2662
  if optiondict["dolinfitfilt"]:
2662
2663
  if optiondict["refinedelay"]:
2663
2664
  TimingLGR.info("Setting up for delay refinement and sLFO filtering")
@@ -2837,7 +2838,7 @@ def rapidtide_main(argparsingfunc):
2837
2838
  validvoxels,
2838
2839
  initial_fmri_x,
2839
2840
  lagtimes,
2840
- fitmask,
2841
+ sLFOfiltmask,
2841
2842
  genlagtc,
2842
2843
  mode,
2843
2844
  outputname,
@@ -2933,7 +2934,7 @@ def rapidtide_main(argparsingfunc):
2933
2934
  namesuffix = "_desc-delayoffset_hist"
2934
2935
  if optiondict["dolinfitfilt"]:
2935
2936
  tide_stats.makeandsavehistogram(
2936
- delayoffset[np.where(fitmask > 0)],
2937
+ delayoffset[np.where(sLFOfiltmask > 0)],
2937
2938
  optiondict["histlen"],
2938
2939
  1,
2939
2940
  outputname + namesuffix,
@@ -2971,7 +2972,7 @@ def rapidtide_main(argparsingfunc):
2971
2972
  validvoxels,
2972
2973
  initial_fmri_x,
2973
2974
  lagstouse,
2974
- fitmask,
2975
+ sLFOfiltmask,
2975
2976
  genlagtc,
2976
2977
  mode,
2977
2978
  outputname,
@@ -3477,7 +3478,7 @@ def rapidtide_main(argparsingfunc):
3477
3478
  validvoxels,
3478
3479
  initial_fmri_x,
3479
3480
  lagstouse,
3480
- fitmask,
3481
+ sLFOfiltmask,
3481
3482
  genlagtc,
3482
3483
  mode,
3483
3484
  outputname,
@@ -3545,7 +3546,10 @@ def rapidtide_main(argparsingfunc):
3545
3546
  tide_util.cleanup_shm(fitNorm_shm)
3546
3547
 
3547
3548
  # write the 3D maps that don't need to be remapped
3549
+ if theinputdata.filetype != "nifti":
3550
+ unfiltmeanvalue = meanvalue
3548
3551
  maplist = [
3552
+ (unfiltmeanvalue, "unfiltmean", "map", None, "Voxelwise mean of fmri data before smoothing"),
3549
3553
  (meanvalue, "mean", "map", None, "Voxelwise mean of fmri data"),
3550
3554
  (stddevvalue, "std", "map", None, "Voxelwise standard deviation of fmri data"),
3551
3555
  (covvalue, "CoV", "map", None, "Voxelwise coefficient of variation of fmri data"),
@@ -3570,6 +3574,7 @@ def rapidtide_main(argparsingfunc):
3570
3574
  cifti_hdr=theinputdata.cifti_hdr,
3571
3575
  )
3572
3576
  del meanvalue
3577
+ del unfiltmeanvalue
3573
3578
 
3574
3579
  if optiondict["numestreps"] > 0:
3575
3580
  masklist = []
@@ -1462,6 +1462,13 @@ def _get_parser():
1462
1462
  help=("Do regressor refinement on the final pass."),
1463
1463
  default=False,
1464
1464
  )
1465
+ experimental.add_argument(
1466
+ "--nosLFOfiltmask",
1467
+ dest="nosLFOfiltmask",
1468
+ action="store_true",
1469
+ help=("Don't spatially limit sLFO filter to fit voxels."),
1470
+ default=False,
1471
+ )
1465
1472
  experimental.add_argument(
1466
1473
  "--territorymap",
1467
1474
  dest="territorymap",
@@ -1810,7 +1817,7 @@ def process_args(inputargs=None):
1810
1817
  args["hardlimit"] = True
1811
1818
  # The fraction of the main peak over which points are included in the peak
1812
1819
  args["searchfrac"] = 0.5
1813
- args["mp_chunksize"] = 50000
1820
+ args["mp_chunksize"] = 500
1814
1821
  args["patchminsize"] = DEFAULT_PATCHMINSIZE
1815
1822
  args["patchfwhm"] = DEFAULT_PATCHFWHM
1816
1823
 
@@ -22,7 +22,6 @@ import numpy as np
22
22
  import rapidtide.io as tide_io
23
23
  import rapidtide.linfitfiltpass as tide_linfitfiltpass
24
24
  import rapidtide.makelaggedtcs as tide_makelagged
25
- import rapidtide.util as tide_util
26
25
 
27
26
 
28
27
  def regressfrommaps(
@@ -133,7 +132,6 @@ def regressfrommaps(
133
132
  if TimingLGR is not None:
134
133
  TimingLGR.info("Start filtering operation")
135
134
 
136
-
137
135
  voxelsprocessed_regressionfilt = tide_linfitfiltpass.linfitfiltpass(
138
136
  numvalidspatiallocs,
139
137
  fmri_data_valid,
@@ -297,6 +297,8 @@ def showarbcorr(args):
297
297
  sys.exit()
298
298
  if starttime1 == None:
299
299
  starttime1 = 0.0
300
+ endtime1 = starttime1 + len(inputdata1) / Fs1
301
+ print(f"inputdata1 goes from {starttime1} to {endtime1}")
300
302
 
301
303
  if args.samplerate2 is not None:
302
304
  Fs2 = args.samplerate2
@@ -305,9 +307,19 @@ def showarbcorr(args):
305
307
  sys.exit()
306
308
  if starttime2 == None:
307
309
  starttime2 = 0.0
310
+ endtime2 = starttime2 + len(inputdata2) / Fs2
311
+ print(f"inputdata2 goes from {starttime2} to {endtime2}")
308
312
 
309
- trimdata1 = inputdata1
310
- trimdata2 = inputdata2
313
+ matchedinput1, matchedinput2, commonFs = tide_corr.matchsamplerates(
314
+ inputdata1,
315
+ Fs1,
316
+ inputdata2,
317
+ Fs2,
318
+ method="univariate",
319
+ debug=args.debug,
320
+ )
321
+ trimdata1 = matchedinput1
322
+ trimdata2 = matchedinput2
311
323
 
312
324
  if args.trimdata:
313
325
  minlen = np.min([len(trimdata1), len(trimdata2)])
@@ -324,12 +336,12 @@ def showarbcorr(args):
324
336
  if args.verbose:
325
337
  print("filtering to ", theprefilter.gettype(), " band")
326
338
  filtereddata1 = tide_math.corrnormalize(
327
- theprefilter.apply(Fs1, trimdata1),
339
+ theprefilter.apply(commonFs, trimdata1),
328
340
  detrendorder=args.detrendorder,
329
341
  windowfunc=args.windowfunc,
330
342
  )
331
343
  filtereddata2 = tide_math.corrnormalize(
332
- theprefilter.apply(Fs2, trimdata2),
344
+ theprefilter.apply(commonFs, trimdata2),
333
345
  detrendorder=args.detrendorder,
334
346
  windowfunc=args.windowfunc,
335
347
  )
@@ -341,9 +353,9 @@ def showarbcorr(args):
341
353
  print(f"{Fs1=}, {Fs2=}, {starttime1=}, {starttime2=}, {args.windowfunc=}")
342
354
  xcorr_x, thexcorr, corrFs, zeroloc = tide_corr.arbcorr(
343
355
  filtereddata1,
344
- Fs1,
356
+ commonFs,
345
357
  filtereddata2,
346
- Fs2,
358
+ commonFs,
347
359
  start1=starttime1,
348
360
  start2=starttime2,
349
361
  windowfunc=args.windowfunc,
@@ -375,6 +387,7 @@ def showarbcorr(args):
375
387
  thexcorr_trim = thexcorr[lowerlim:upperlim]
376
388
  print("trimmed Correlator lengths (x, y):", len(xcorr_x_trim), len(thexcorr_trim))
377
389
 
390
+ print(f"{len(filtereddata1)=}, {len(filtereddata2)=}")
378
391
  thepxcorr = pearsonr(filtereddata1, filtereddata2)
379
392
 
380
393
  # initialize the correlation fitter