antspymm 1.4.0__py3-none-any.whl → 1.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antspymm/__init__.py +1 -0
- antspymm/mm.py +168 -16
- {antspymm-1.4.0.dist-info → antspymm-1.4.2.dist-info}/METADATA +67 -7
- antspymm-1.4.2.dist-info/RECORD +7 -0
- {antspymm-1.4.0.dist-info → antspymm-1.4.2.dist-info}/WHEEL +1 -1
- antspymm-1.4.0.dist-info/RECORD +0 -7
- {antspymm-1.4.0.dist-info → antspymm-1.4.2.dist-info}/LICENSE +0 -0
- {antspymm-1.4.0.dist-info → antspymm-1.4.2.dist-info}/top_level.txt +0 -0
antspymm/__init__.py
CHANGED
antspymm/mm.py
CHANGED
@@ -2707,7 +2707,7 @@ def template_figure_with_overlay(scalar_label_df, prefix, outputfilename=None, t
|
|
2707
2707
|
toviz = temp['overlay']
|
2708
2708
|
return { "underlay": seggm, 'overlay': toviz, 'seg': tcrop }
|
2709
2709
|
|
2710
|
-
def get_data( name=None, force_download=False, version=
|
2710
|
+
def get_data( name=None, force_download=False, version=25, target_extension='.csv' ):
|
2711
2711
|
"""
|
2712
2712
|
Get ANTsPyMM data filename
|
2713
2713
|
|
@@ -10219,7 +10219,7 @@ def remove_volumes_from_timeseries(time_series, volumes_to_remove):
|
|
10219
10219
|
volumes_to_keep = [i for i in range(time_series.shape[3]) if i not in volumes_to_remove]
|
10220
10220
|
|
10221
10221
|
# Select the volumes to keep
|
10222
|
-
filtered_time_series = ants.from_numpy( time_series[..., volumes_to_keep] )
|
10222
|
+
filtered_time_series = ants.from_numpy( time_series.numpy()[..., volumes_to_keep] )
|
10223
10223
|
|
10224
10224
|
return ants.copy_image_info( time_series, filtered_time_series )
|
10225
10225
|
|
@@ -10654,7 +10654,136 @@ def novelty_detection_quantile(df_train, df_test):
|
|
10654
10654
|
myqs[mykey] = abs( temp - 0.5 ) / 0.5
|
10655
10655
|
return myqs
|
10656
10656
|
|
10657
|
-
|
10657
|
+
|
10658
|
+
|
10659
|
+
def shorten_pymm_names(x):
|
10660
|
+
"""
|
10661
|
+
Shortens pmymm names by applying a series of regex substitutions.
|
10662
|
+
|
10663
|
+
Parameters:
|
10664
|
+
x (str): The input string to be shortened
|
10665
|
+
|
10666
|
+
Returns:
|
10667
|
+
str: The shortened string
|
10668
|
+
"""
|
10669
|
+
xx = x.lower()
|
10670
|
+
xx = re.sub("_", ".", xx) # Replace underscores with periods
|
10671
|
+
xx = re.sub("\.\.", ".", xx, flags=re.I) # Replace double dots with single dot
|
10672
|
+
# Apply the following regex substitutions in order
|
10673
|
+
xx = re.sub("sagittal.stratum.include.inferior.longitidinal.fasciculus.and.inferior.fronto.occipital.fasciculus.","ilf.and.ifo", xx, flags=re.I)
|
10674
|
+
xx = re.sub(r"sagittal.stratum.include.inferior.longitidinal.fasciculus.and.inferior.fronto.occipital.fasciculus.", "ilf.and.ifo", xx, flags=re.I)
|
10675
|
+
xx = re.sub(r".cres.stria.terminalis.can.not.be.resolved.with.current.resolution.", "",
|
10676
|
+
xx, flags=re.I)
|
10677
|
+
xx = re.sub("_", ".", xx) # Replace underscores with periods
|
10678
|
+
xx = re.sub(r"longitudinal.fasciculus", "l.fasc", xx, flags=re.I)
|
10679
|
+
xx = re.sub(r"corona.radiata", "cor.rad", xx, flags=re.I)
|
10680
|
+
xx = re.sub("central", "cent", xx, flags=re.I)
|
10681
|
+
xx = re.sub(r"deep.cit168", "dp.", xx, flags=re.I)
|
10682
|
+
xx = re.sub("cit168", "", xx, flags=re.I)
|
10683
|
+
xx = re.sub(".include", "", xx, flags=re.I)
|
10684
|
+
xx = re.sub("mtg.sn", "", xx, flags=re.I)
|
10685
|
+
xx = re.sub("brainstem", ".bst", xx, flags=re.I)
|
10686
|
+
xx = re.sub(r"rsfmri.", "rsf.", xx, flags=re.I)
|
10687
|
+
xx = re.sub(r"dti.mean.fa.", "dti.fa.", xx, flags=re.I)
|
10688
|
+
xx = re.sub("perf.cbf.mean.", "cbf.", xx, flags=re.I)
|
10689
|
+
xx = re.sub(".jhu.icbm.labels.1mm", "", xx, flags=re.I)
|
10690
|
+
xx = re.sub(".include.optic.radiation.", "", xx, flags=re.I)
|
10691
|
+
xx = re.sub("\.\.", ".", xx, flags=re.I) # Replace double dots with single dot
|
10692
|
+
xx = re.sub("\.\.", ".", xx, flags=re.I) # Replace double dots with single dot
|
10693
|
+
xx = re.sub("cerebellar.peduncle", "cereb.ped", xx, flags=re.I)
|
10694
|
+
xx = re.sub(r"anterior.limb.of.internal.capsule", "ant.int.cap", xx, flags=re.I)
|
10695
|
+
xx = re.sub(r"posterior.limb.of.internal.capsule", "post.int.cap", xx, flags=re.I)
|
10696
|
+
xx = re.sub("t1hier.", "t1.", xx, flags=re.I)
|
10697
|
+
xx = re.sub("anterior", "ant", xx, flags=re.I)
|
10698
|
+
xx = re.sub("posterior", "post", xx, flags=re.I)
|
10699
|
+
xx = re.sub("inferior", "inf", xx, flags=re.I)
|
10700
|
+
xx = re.sub("superior", "sup", xx, flags=re.I)
|
10701
|
+
xx = re.sub(r"dktcortex", ".ctx", xx, flags=re.I)
|
10702
|
+
xx = re.sub(".lravg", "", xx, flags=re.I)
|
10703
|
+
xx = re.sub("dti.mean.fa", "dti.fa", xx, flags=re.I)
|
10704
|
+
xx = re.sub(r"retrolenticular.part.of.internal", "rent.int.cap", xx, flags=re.I)
|
10705
|
+
xx = re.sub(r"iculus.could.be.a.part.of.ant.internal.capsule", "", xx, flags=re.I) # Twice
|
10706
|
+
xx = re.sub(".fronto.occipital.", ".frnt.occ.", xx, flags=re.I)
|
10707
|
+
xx = re.sub(r".longitidinal.fasciculus.", ".long.fasc.", xx, flags=re.I) # Twice
|
10708
|
+
xx = re.sub(".external.capsule", ".ext.cap", xx, flags=re.I)
|
10709
|
+
xx = re.sub("of.internal.capsule", ".int.cap", xx, flags=re.I)
|
10710
|
+
xx = re.sub("fornix.cres.stria.terminalis", "fornix.", xx, flags=re.I)
|
10711
|
+
xx = re.sub("capsule", "", xx, flags=re.I)
|
10712
|
+
xx = re.sub("and.inf.frnt.occ.fasciculus.", "", xx, flags=re.I)
|
10713
|
+
xx = re.sub("crossing.tract.a.part.of.mcp.", "", xx, flags=re.I)
|
10714
|
+
return xx[:40] # Truncate to first 40 characters
|
10715
|
+
|
10716
|
+
|
10717
|
+
def shorten_pymm_names2(x, verbose=False ):
|
10718
|
+
"""
|
10719
|
+
Shortens pmymm names by applying a series of regex substitutions.
|
10720
|
+
|
10721
|
+
Parameters:
|
10722
|
+
x (str): The input string to be shortened
|
10723
|
+
|
10724
|
+
verbose (bool): explain the patterns and replacements and their impact
|
10725
|
+
|
10726
|
+
Returns:
|
10727
|
+
str: The shortened string
|
10728
|
+
"""
|
10729
|
+
# Define substitution patterns as tuples
|
10730
|
+
substitutions = [
|
10731
|
+
("_", "."),
|
10732
|
+
("\.\.", "."),
|
10733
|
+
("sagittal.stratum.include.inferior.longitidinal.fasciculus.and.inferior.fronto.occipital.fasciculus.","ilf.and.ifo"),
|
10734
|
+
(r"sagittal.stratum.include.inferior.longitidinal.fasciculus.and.inferior.fronto.occipital.fasciculus.", "ilf.and.ifo"),
|
10735
|
+
(r".cres.stria.terminalis.can.not.be.resolved.with.current.resolution.", ""),
|
10736
|
+
("_", "."),
|
10737
|
+
(r"longitudinal.fasciculus", "l.fasc"),
|
10738
|
+
(r"corona.radiata", "cor.rad"),
|
10739
|
+
("central", "cent"),
|
10740
|
+
(r"deep.cit168", "dp."),
|
10741
|
+
("cit168", ""),
|
10742
|
+
(".include", ""),
|
10743
|
+
("mtg.sn", ""),
|
10744
|
+
("brainstem", ".bst"),
|
10745
|
+
(r"rsfmri.", "rsf."),
|
10746
|
+
(r"dti.mean.fa.", "dti.fa."),
|
10747
|
+
("perf.cbf.mean.", "cbf."),
|
10748
|
+
(".jhu.icbm.labels.1mm", ""),
|
10749
|
+
(".include.optic.radiation.", ""),
|
10750
|
+
("\.\.", "."), # Replace double dots with single dot
|
10751
|
+
("\.\.", "."), # Replace double dots with single dot
|
10752
|
+
("cerebellar.peduncle", "cereb.ped"),
|
10753
|
+
(r"anterior.limb.of.internal.capsule", "ant.int.cap"),
|
10754
|
+
(r"posterior.limb.of.internal.capsule", "post.int.cap"),
|
10755
|
+
("t1hier.", "t1."),
|
10756
|
+
("anterior", "ant"),
|
10757
|
+
("posterior", "post"),
|
10758
|
+
("inferior", "inf"),
|
10759
|
+
("superior", "sup"),
|
10760
|
+
(r"dktcortex", ".ctx"),
|
10761
|
+
(".lravg", ""),
|
10762
|
+
("dti.mean.fa", "dti.fa"),
|
10763
|
+
(r"retrolenticular.part.of.internal", "rent.int.cap"),
|
10764
|
+
(r"iculus.could.be.a.part.of.ant.internal.capsule", ""), # Twice
|
10765
|
+
(".fronto.occipital.", ".frnt.occ."),
|
10766
|
+
(r".longitidinal.fasciculus.", ".long.fasc."), # Twice
|
10767
|
+
(".external.capsule", ".ext.cap"),
|
10768
|
+
("of.internal.capsule", ".int.cap"),
|
10769
|
+
("fornix.cres.stria.terminalis", "fornix."),
|
10770
|
+
("capsule", ""),
|
10771
|
+
("and.inf.frnt.occ.fasciculus.", ""),
|
10772
|
+
("crossing.tract.a.part.of.mcp.", "")
|
10773
|
+
]
|
10774
|
+
|
10775
|
+
# Apply substitutions in order
|
10776
|
+
for pattern, replacement in substitutions:
|
10777
|
+
if verbose:
|
10778
|
+
print("Pre " + x + " pattern "+pattern + " repl " + replacement )
|
10779
|
+
x = re.sub(pattern, replacement, x.lower(), flags=re.IGNORECASE)
|
10780
|
+
if verbose:
|
10781
|
+
print("Post " + x)
|
10782
|
+
|
10783
|
+
return x[:40] # Truncate to first 40 characters
|
10784
|
+
|
10785
|
+
|
10786
|
+
def brainmap_figure(statistical_df, data_dictionary, output_prefix, brain_image, overlay_cmap='bwr', nslices=21, ncol=7, edge_image_dilation = 0, black_bg=True, axes = [0,1,2], fixed_overlay_range=None, crop=5, verbose=0 ):
|
10658
10787
|
"""
|
10659
10788
|
Create figures based on statistical data and an underlying brain image.
|
10660
10789
|
|
@@ -10666,7 +10795,7 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10666
10795
|
with respect to regions that are measured in antspymm. value will be
|
10667
10796
|
the value to be displayed. if two examples of a given region exist in
|
10668
10797
|
statistical_df, then the largest absolute value will be taken for display.
|
10669
|
-
-
|
10798
|
+
- data_dictionary (pandas dataframe): antspymm data dictionary.
|
10670
10799
|
- output_prefix (str): Prefix for the output figure filenames.
|
10671
10800
|
- brain_image (antsImage): the brain image on which results will overlay.
|
10672
10801
|
- overlay_cmap (str): see matplotlib
|
@@ -10684,11 +10813,14 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10684
10813
|
"""
|
10685
10814
|
import re
|
10686
10815
|
|
10816
|
+
def is_bst_region(filename):
|
10817
|
+
return filename[-4:] == '.bst'
|
10818
|
+
|
10687
10819
|
# Read the statistical file
|
10688
10820
|
zz = statistical_df
|
10689
10821
|
|
10690
10822
|
# Read the data dictionary from a CSV file
|
10691
|
-
mydict =
|
10823
|
+
mydict = data_dictionary
|
10692
10824
|
mydict = mydict[~mydict['Measurement'].str.contains("tractography-based connectivity", na=False)]
|
10693
10825
|
mydict2=mydict.copy()
|
10694
10826
|
mydict2['tidynames']=mydict2['tidynames'].str.replace(".left","")
|
@@ -10719,8 +10851,18 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10719
10851
|
if verbose > 0 :
|
10720
10852
|
print(str(k) + " " + anattoshow[k] )
|
10721
10853
|
mysub = zz[zz['anat'].str.contains(anattoshow[k])]
|
10722
|
-
anatsear=
|
10723
|
-
anatsear=re.sub(r'
|
10854
|
+
anatsear=shorten_pymm_names( anattoshow[k] )
|
10855
|
+
anatsear=re.sub(r'[()]', '.', anatsear )
|
10856
|
+
anatsear=re.sub(r'\.\.', '.', anatsear )
|
10857
|
+
anatsear=re.sub("dti.mean.md.snc","md.snc",anatsear)
|
10858
|
+
anatsear=re.sub("dti.mean.fa.snc","fa.snc",anatsear)
|
10859
|
+
anatsear=re.sub("dti.mean.md.snr","md.snr",anatsear)
|
10860
|
+
anatsear=re.sub("dti.mean.fa.snr","fa.snr",anatsear)
|
10861
|
+
anatsear=re.sub("dti.mean.md.","",anatsear)
|
10862
|
+
anatsear=re.sub("dti.mean.fa.","",anatsear)
|
10863
|
+
anatsear=re.sub("dti.md.","",anatsear)
|
10864
|
+
anatsear=re.sub("dti.fa.","",anatsear)
|
10865
|
+
anatsear=re.sub("dti.md","",anatsear)
|
10724
10866
|
anatsear=re.sub("dti.fa","",anatsear)
|
10725
10867
|
anatsear=re.sub("cbf.","",anatsear)
|
10726
10868
|
anatsear=re.sub("rsfmri.fcnxpro122.","",anatsear)
|
@@ -10741,12 +10883,6 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10741
10883
|
anatsear=re.sub("t1.area.","",anatsear)
|
10742
10884
|
anatsear=re.sub("asymdp.","",anatsear)
|
10743
10885
|
anatsear=re.sub("asym.","",anatsear)
|
10744
|
-
anatsear=re.sub("dti.md.","",anatsear)
|
10745
|
-
anatsear=re.sub("dti.fa.","",anatsear)
|
10746
|
-
anatsear=re.sub("dti.md","",anatsear)
|
10747
|
-
anatsear=re.sub("dti.mean.md.","",anatsear)
|
10748
|
-
anatsear=re.sub("dti.mean.fa.","",anatsear)
|
10749
|
-
anatsear=re.sub("asym.","",anatsear)
|
10750
10886
|
anatsear=re.sub("asym","",anatsear)
|
10751
10887
|
anatsear=re.sub("lravg.","",anatsear)
|
10752
10888
|
anatsear=re.sub("lravg","",anatsear)
|
@@ -10756,22 +10892,38 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10756
10892
|
anatsear=re.sub("superior","sup",anatsear)
|
10757
10893
|
anatsear=re.sub("cerebellum","",anatsear)
|
10758
10894
|
anatsear=re.sub("brainstem","",anatsear)
|
10895
|
+
anatsear=re.sub("t.limb.int","t.int",anatsear)
|
10896
|
+
anatsear=re.sub("paracentral","paracent",anatsear)
|
10897
|
+
anatsear=re.sub("precentral","precent",anatsear)
|
10898
|
+
anatsear=re.sub("postcentral","postcent",anatsear)
|
10759
10899
|
anatsear=re.sub("sup.cerebellar.peduncle","sup.cereb.ped",anatsear)
|
10760
10900
|
anatsear=re.sub("inferior.cerebellar.peduncle","inf.cereb.ped",anatsear)
|
10761
10901
|
anatsear=re.sub(".crossing.tract.a.part.of.mcp.","",anatsear)
|
10902
|
+
anatsear=re.sub(".crossing.tract.a.part.of.","",anatsear)
|
10762
10903
|
anatsear=re.sub(".column.and.body.of.fornix.","",anatsear)
|
10763
|
-
anatsear=re.sub("fronto.occipital.fasciculus.could.be.a.part.of.
|
10904
|
+
anatsear=re.sub("fronto.occipital.fasciculus.could.be.a.part.of.ant.internal.capsule","frnt.occ",anatsear)
|
10764
10905
|
anatsear=re.sub("inferior.fronto.occipital.fasciculus.could.be.a.part.of.anterior.internal.capsule","inf.frnt.occ",anatsear)
|
10765
10906
|
anatsear=re.sub("fornix.cres.stria.terminalis.can.not.be.resolved.with.current.resolution","fornix.column.and.body.of.fornix",anatsear)
|
10766
10907
|
anatsear=re.sub("external.capsule","ext.cap",anatsear)
|
10767
10908
|
anatsear=re.sub(".jhu.icbm.labels.1mm","",anatsear)
|
10909
|
+
anatsear=re.sub("dp.",".",anatsear)
|
10910
|
+
anatsear=re.sub(".mtg.sn.snc.",".snc.",anatsear)
|
10911
|
+
anatsear=re.sub(".mtg.sn.snr.",".snr.",anatsear)
|
10768
10912
|
anatsear=re.sub("mtg.sn.snc.",".snc.",anatsear)
|
10769
10913
|
anatsear=re.sub("mtg.sn.snr.",".snr.",anatsear)
|
10914
|
+
anatsear=re.sub("mtg.sn.snc",".snc.",anatsear)
|
10915
|
+
anatsear=re.sub("mtg.sn.snr",".snr.",anatsear)
|
10770
10916
|
anatsear=re.sub("anterior.","ant.",anatsear)
|
10917
|
+
anatsear=re.sub("rsf.","",anatsear)
|
10918
|
+
anatsear=re.sub("fcnxpro122.","",anatsear)
|
10919
|
+
anatsear=re.sub("fcnxpro129.","",anatsear)
|
10920
|
+
anatsear=re.sub("fcnxpro134.","",anatsear)
|
10771
10921
|
anatsear=re.sub("ant.corona.radiata","ant.cor.rad",anatsear)
|
10772
10922
|
anatsear=re.sub("sup.corona.radiata","sup.cor.rad",anatsear)
|
10773
10923
|
anatsear=re.sub("posterior.thalamic.radiation.include.optic.radiation","post.thalamic.radiation",anatsear)
|
10774
10924
|
anatsear=re.sub("retrolenticular.part.of.internal.capsule","rent.int.cap",anatsear)
|
10925
|
+
anatsear=re.sub("post.limb.of.internal.capsule","post.int.cap",anatsear)
|
10926
|
+
anatsear=re.sub("ant.limb.of.internal.capsule","ant.int.cap",anatsear)
|
10775
10927
|
anatsear=re.sub("sagittal.stratum.include.inferior.longitidinal.fasciculus.and.inferior.fronto.occipital.fasciculus","ilf.and.ifo",anatsear)
|
10776
10928
|
atlassearch = mydict['tidynames'].str.contains(anatsear)
|
10777
10929
|
if atlassearch.sum() == 0:
|
@@ -10796,7 +10948,7 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10796
10948
|
myext = 'JHU_wm'
|
10797
10949
|
elif 'dktcortex' in anattoshow[k] or whichatlas == 'desikan-killiany-tourville' or 'dtkregions' in anattoshow[k] :
|
10798
10950
|
myext = 'dkt_cortex'
|
10799
|
-
elif ('cit168' in anattoshow[k] or whichatlas == 'CIT168') and not 'brainstem' in anattoshow[k]:
|
10951
|
+
elif ('cit168' in anattoshow[k] or whichatlas == 'CIT168') and not 'brainstem' in anattoshow[k] and not is_bst_region(anatsear):
|
10800
10952
|
myext = 'cit168lab'
|
10801
10953
|
elif 'mtl' in anattoshow[k]:
|
10802
10954
|
myext = 'mtl'
|
@@ -10805,7 +10957,7 @@ def brainmap_figure(statistical_df, data_dictionary_path, output_prefix, brain_i
|
|
10805
10957
|
myext = 'cerebellum'
|
10806
10958
|
oglabelname=re.sub('cerebellum', '',anatsear)
|
10807
10959
|
# oglabelname=oglabelname[2:]
|
10808
|
-
elif 'brainstem' in anattoshow[k]:
|
10960
|
+
elif 'brainstem' in anattoshow[k] or is_bst_region(anatsear):
|
10809
10961
|
myext = 'brainstem'
|
10810
10962
|
elif any(item in anattoshow[k] for item in ['nbm', 'bf']):
|
10811
10963
|
myext = 'bf'
|
@@ -1,18 +1,18 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: antspymm
|
3
|
-
Version: 1.4.
|
3
|
+
Version: 1.4.2
|
4
4
|
Summary: multi-channel/time-series medical image processing with antspyx
|
5
5
|
Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
|
6
6
|
License: Apache 2.0
|
7
7
|
Requires-Python: >=3.8
|
8
8
|
Description-Content-Type: text/markdown
|
9
9
|
License-File: LICENSE
|
10
|
-
Requires-Dist: h5py
|
11
|
-
Requires-Dist: numpy
|
12
|
-
Requires-Dist: pandas
|
13
|
-
Requires-Dist: antspyx
|
14
|
-
Requires-Dist: antspynet
|
15
|
-
Requires-Dist: antspyt1w
|
10
|
+
Requires-Dist: h5py>=2.10.0
|
11
|
+
Requires-Dist: numpy>=1.19.4
|
12
|
+
Requires-Dist: pandas>=1.0.1
|
13
|
+
Requires-Dist: antspyx>=0.4.2
|
14
|
+
Requires-Dist: antspynet>=0.2.8
|
15
|
+
Requires-Dist: antspyt1w>=0.9.3
|
16
16
|
Requires-Dist: pathlib
|
17
17
|
Requires-Dist: dipy
|
18
18
|
Requires-Dist: nibabel
|
@@ -112,6 +112,66 @@ achieved through four steps (recommended approach):
|
|
112
112
|
|
113
113
|
4. run the main antspymm function
|
114
114
|
|
115
|
+
# formal description
|
116
|
+
|
117
|
+
<details>
|
118
|
+
<summary>Overview</summary>
|
119
|
+
|
120
|
+
The Advanced Normalization Tools Ecosystem (ANTsX) is a collection of interrelated, open-source software libraries for biological and medical image processing [1](https://pubmed.ncbi.nlm.nih.gov/33907199/) and analysis built on the NIH’s Insight Toolkit (ITK). ANTsX has demonstrated significant applicability for a variety of organ systems, species, and imaging modalities [2](https://pubmed.ncbi.nlm.nih.gov/25977810/) [3](https://pubmed.ncbi.nlm.nih.gov/38746199/) [4](https://pubmed.ncbi.nlm.nih.gov/35703369/). ANTsX-based processing of multi-modality studies utilizes the Python-based ANTsPyMM library. This includes structural (T1-w, DTI, FLAIR), functional (fMRI), and perfusion (ASL) modalities.
|
121
|
+
|
122
|
+
</details>
|
123
|
+
|
124
|
+
<details>
|
125
|
+
<summary>T1-weighted MRI</summary>
|
126
|
+
|
127
|
+
T1-weighted MRI processing has been previously described [1](https://pubmed.ncbi.nlm.nih.gov/33907199/) [2](https://pubmed.ncbi.nlm.nih.gov/38632390/). It is coordinated by ANTsPyT1w, a sub-component of ANTsPyMM, and includes tools for image registration, segmentation, and super-resolution as customized for the human brain. Processing components include preprocessing (denoising + bias correction), brain extraction, sis-tissue parenchymal parcellation (CSF, gray matter, white matter, deep gray matter, brain stem, and cerebellum), and cortical parcellation for morphological quantitation. Derived measurements are tabulated by the neuroanatomical coordinates defined above and include cortical and subcortical measurements and morphological measurements of the hippocampus, basal forebrain and cerebellum.
|
128
|
+
|
129
|
+
</details>
|
130
|
+
|
131
|
+
<details>
|
132
|
+
<summary>Diffusion</summary>
|
133
|
+
|
134
|
+
Diffusion processing. ANTsPyMM couples the ANTsX toolkit with the Diffusion Imaging in Python (Dipy) library for the processing and analysis of diffusion MRI [1](https://pubmed.ncbi.nlm.nih.gov/24600385/). The former is used for motion correction and normalization to corresponding T1-w images. Output consists of QC images, the motion corrected diffusion series, RGB images, FA, mean diffusion and dense tractography as well as tractography-based connectivity matrices. Tabular summary of these metrics are written to csv.
|
135
|
+
|
136
|
+
</details>
|
137
|
+
|
138
|
+
<details>
|
139
|
+
<summary>Resting-state fMRI</summary>
|
140
|
+
|
141
|
+
The rsfMRI processing computes a robust summary of resting-state fMRI time series data and corrects for motion artifacts, extracts temporal statistics, and computes functional connectivity across the brain. The analysis pipeline incorporates advanced techniques for noise reduction, temporal derivative calculation, signal standardization, and the calculation of functional connectivity between brain regions using Yeo homotopic labels. This provides researchers with cleaned, standardized, and biologically meaningful data for further analysis of brain network activity.
|
142
|
+
|
143
|
+
Several preprocessing steps are included in the ANTsPyMM processing of resting state fMRI (rsfMRI) data. First, motion correction is used to align the time series to a series specific fMRI template. Distortion correction to t1 is also performed along with brain extraction. The **temporal derivative** is calculated to quantify changes over time in the signal, followed by the **temporal standard deviation (tSTD)**, which computes the variability across time for each voxel. This is done to identify regions of interest with significant signal variation. The **CompCor matrix** [1](https://pubmed.ncbi.nlm.nih.gov/17560126/) is computed using temporal standard deviation thresholds, which helps remove noise by identifying high-variance regions. Additionally, **motion correction** is a central part of the pipeline, where metrics like framewise displacement (FD) and motion-induced signal changes (DVARS) are calculated to detect and correct for artifacts caused by subject movement. **Bandpass filtering** is applied to focus the analysis on brain signals within specific frequency ranges, and **censoring** is used to exclude high-motion time points from the analysis, ensuring cleaner data. The preprocessing concludes with the calculation of **summary statistics**, including measures of signal-to-noise ratio (SNR) and motion effects, which are compiled into a dictionary for further use.
|
144
|
+
|
145
|
+
The next step in the pipeline involves computing functional connectivity through **correlation matrices** based on the **Yeo homotopic labels**, which group brain regions into homotopic (mirrored) areas across hemispheres. Time series data are averaged within each Yeo-labeled region, and pairwise **correlation matrices** are computed to assess the functional connectivity both within and between brain hemispheres. This allows for an in-depth analysis of large-scale brain networks, including symmetrical interactions between homotopic regions and connectivity within networks such as the default mode network, visual network, and others.
|
146
|
+
|
147
|
+
This provides a comprehensive framework for preprocessing resting-state fMRI data and analyzing functional connectivity across the brain. By combining robust noise reduction, motion correction, and correlation matrix computation, the pipeline ensures that the resulting data are suitable for high-quality research into brain network dynamics and resting-state connectivity. Through the use of Yeo homotopic labels and functional correlation matrices, the code offers a valuable tool for investigating symmetrical brain activity and network interactions in fMRI studies. By default, three different sets of processing parameters are employed. These were chosen based on an empirical evaluation of reproducibility in a traveling participant cohort.
|
148
|
+
|
149
|
+
</details>
|
150
|
+
|
151
|
+
<details>
|
152
|
+
<summary>ASL</summary>
|
153
|
+
|
154
|
+
ASL processing. Cerebral blood flow (CBF) is a critical parameter in understanding brain function and its alterations in various neurological conditions. Arterial spin labeling (ASL) MRI is a non-invasive technique that can measure CBF without the need for contrast agents. ANTsPyMM estimates CBF from ASL MRI data through a combination of image processing and mathematical modeling techniques [1](https://pubmed.ncbi.nlm.nih.gov/24715426/). First, motion artifacts and outliers are removed from the time series data. Second, the preprocessed data is registered to the reference T1-w space using a rigid transformation optimized through the ANTs registration tools [2](https://pubmed.ncbi.nlm.nih.gov/24817849/). Third, the six-tissue segmentation generated during the T1-w segmentation is used to partition the registered ASL data from which the CBF is estimated using a mathematical model that takes into account the ASL MRI signal, the labeling efficiency, and the longitudinal relaxation time of blood. Finally, the M0 image, which represents the equilibrium magnetization of brain tissue, is estimated using a separate mathematical model.
|
155
|
+
|
156
|
+
</details>
|
157
|
+
|
158
|
+
<details>
|
159
|
+
<summary>Magnetic resonance angiography</summary>
|
160
|
+
|
161
|
+
MRA processing and analysis. A precursor for quantitative measures of potential vascular irregularities is the segmentation derived from MR angiography. To extract these image-based quantitative measures (described below), we developed and trained a deep learning network as part of the ANTsXNet functional library. Training data was adapted from a publicly available resource [1](https://data.kitware.com/#item/58a372e48d777f0721a64dc9) consisting of 42 subjects with vascular network segmentations and brain masks provided. A previously constructed high-resolution template was generated from the Human Connectome Project Young Adult cohort comprising T1-w, T2-w, and FA modalities using ANTs functionality [2](https://pubmed.ncbi.nlm.nih.gov/19818860/) [3](https://pubmed.ncbi.nlm.nih.gov/25433513/) and served as the prediction space for network training. Prior spatial information was included by warping all vascular segmentations to the template space, averaged, spatially smoothed, and renormalized to the intensity range [0,1]. Aggressive data augmentation was used to generate additional data in real time during training consisting of random spatial linear and deformable transformations, random Gaussian additive noise, random histogram-based intensity warping [4](https://pubmed.ncbi.nlm.nih.gov/34227163/), and simulated bias field based on the popular N4 algorithm [5](https://pubmed.ncbi.nlm.nih.gov/20378467/). The functionality is available as open-source in both the R-based ANTsRNet (``brainMraVesselSegmentation(...)``) and Python-based ANTsPyNet (``brain_mra_vessel_segmentation(...)``).
|
162
|
+
|
163
|
+
</details>
|
164
|
+
|
165
|
+
<details>
|
166
|
+
<summary>Periventricular spaces and white matter hyperintensities</summary>
|
167
|
+
|
168
|
+
Traditional measures for indicating abnormal vascular morphology can then be calculated, such as tortuosity [1](https://pubmed.ncbi.nlm.nih.gov/12956271/). For example, tight vascular coils (i.e., high tortuosity) are often associated with the presence of malignant tumors. From the centerline of the vessel segmentation, various tortuosity measures have been proposed including 1) distance metric: the ratio of the length of the centerline to the distance between the two endpoints, 2) inflection count metric: the distance metric multiplied by the number of the inflection points (i.e., a point of minimum total curvature), 3) sum of angles metric: the integrated total curvature normalized by total path length.
|
169
|
+
|
170
|
+
For quantitative assessment of enlarged perivascular spaces, we employ functionality made publicly available through the ANTsX toolkit. Specifically, previous published research [2](https://pubmed.ncbi.nlm.nih.gov/34262443/) has been ported to the ANTsXNet library and will be used for segmenting enlarged perivascular spaces. Using both T1 and FLAIR modalities, a trained deep learning U-net neural network was trained using 40 datasets in which all visible perivascular spaces were manually annotated by an expert. An ensemble of trained weights is used to produce the final probability image. Previous work in MRI super resolution [3](https://www.medrxiv.org/content/10.1101/2023.02.02.23285376v1) will be used to explore the possible output enhancement. PVS segmentation results will be tabulated per lobe using separate ANTsXNet functionality [4](https://pubmed.ncbi.nlm.nih.gov/38632390/).
|
171
|
+
|
172
|
+
Similar functionality exists for segmentation of white matter hyperintensities and will be included in the MRI processing for this project. Several algorithms have been ported to ANTsXNet from previous research from multiple groups to complement existing capabilities [5](https://pubmed.ncbi.nlm.nih.gov/30125711/) [6](https://pubmed.ncbi.nlm.nih.gov/35088930/) [7](https://pubmed.ncbi.nlm.nih.gov/38050769/). Regional tabulation will also depend on the lobar segmentation within the white matter as with the PVS segmentation results.
|
173
|
+
|
174
|
+
</details>
|
115
175
|
|
116
176
|
# first time setup
|
117
177
|
|
@@ -0,0 +1,7 @@
|
|
1
|
+
antspymm/__init__.py,sha256=qUzRd3GmYB8hSO7GNaBuP7Jlm0QNMttTaUfvIpeeAig,4497
|
2
|
+
antspymm/mm.py,sha256=iZvIpNvltqqzmtEF1D9tDoMUIbJK-GpsOjzSaH3z1ns,503059
|
3
|
+
antspymm-1.4.2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
4
|
+
antspymm-1.4.2.dist-info/METADATA,sha256=6Z-7BaP2_duk8af-kmSXg99BZcCzbWm7yhsxvxi7JRk,25668
|
5
|
+
antspymm-1.4.2.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
6
|
+
antspymm-1.4.2.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
|
7
|
+
antspymm-1.4.2.dist-info/RECORD,,
|
antspymm-1.4.0.dist-info/RECORD
DELETED
@@ -1,7 +0,0 @@
|
|
1
|
-
antspymm/__init__.py,sha256=1fHqufHndrkJwz473av8qOf5-1xm5r-aKHuMAETGIiE,4462
|
2
|
-
antspymm/mm.py,sha256=mSizSFmJEisEuzVdBozOxKwg7GtxPPT299q6vOmOII0,495807
|
3
|
-
antspymm-1.4.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
4
|
-
antspymm-1.4.0.dist-info/METADATA,sha256=SXTcovYb7YSYn1K7xHKhwq9fE2QpJxGVfVv63VBFwk8,14866
|
5
|
-
antspymm-1.4.0.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
|
6
|
-
antspymm-1.4.0.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
|
7
|
-
antspymm-1.4.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|