antspymm 1.4.5__tar.gz → 1.4.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {antspymm-1.4.5/antspymm.egg-info → antspymm-1.4.8}/PKG-INFO +5 -2
  2. {antspymm-1.4.5 → antspymm-1.4.8}/README.md +2 -0
  3. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm/mm.py +116 -8
  4. {antspymm-1.4.5 → antspymm-1.4.8/antspymm.egg-info}/PKG-INFO +5 -2
  5. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm.egg-info/SOURCES.txt +4 -0
  6. antspymm-1.4.8/docs/describe_mm_data.R +144 -0
  7. antspymm-1.4.8/docs/template_overlays.py +17 -0
  8. {antspymm-1.4.5 → antspymm-1.4.8}/pyproject.toml +1 -1
  9. antspymm-1.4.8/tests/perfusion_run_nnl.py +46 -0
  10. {antspymm-1.4.5 → antspymm-1.4.8}/tests/test_loop.py +2 -3
  11. antspymm-1.4.8/tests/test_rsfmri_run_minimal_ptbp.py +53 -0
  12. {antspymm-1.4.5 → antspymm-1.4.8}/LICENSE +0 -0
  13. {antspymm-1.4.5 → antspymm-1.4.8}/MANIFEST.in +0 -0
  14. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm/__init__.py +0 -0
  15. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm.egg-info/dependency_links.txt +0 -0
  16. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm.egg-info/requires.txt +0 -0
  17. {antspymm-1.4.5 → antspymm-1.4.8}/antspymm.egg-info/top_level.txt +0 -0
  18. {antspymm-1.4.5 → antspymm-1.4.8}/docs/adni_rsfmri_2_nrg_conversion.py +0 -0
  19. {antspymm-1.4.5 → antspymm-1.4.8}/docs/antspymm_annotated_output_tree.pages +0 -0
  20. {antspymm-1.4.5 → antspymm-1.4.8}/docs/antspymm_annotated_output_tree.txt +0 -0
  21. {antspymm-1.4.5 → antspymm-1.4.8}/docs/antspymm_data_dictionary.csv +0 -0
  22. {antspymm-1.4.5 → antspymm-1.4.8}/docs/bids_cohort_example.py +0 -0
  23. {antspymm-1.4.5 → antspymm-1.4.8}/docs/blind_qc.Rmd +0 -0
  24. {antspymm-1.4.5 → antspymm-1.4.8}/docs/blind_qc.html +0 -0
  25. {antspymm-1.4.5 → antspymm-1.4.8}/docs/convert_adni_dti_to_nrg.R +0 -0
  26. {antspymm-1.4.5 → antspymm-1.4.8}/docs/deepnbm.jpg +0 -0
  27. {antspymm-1.4.5 → antspymm-1.4.8}/docs/example_antspymm_output.csv +0 -0
  28. {antspymm-1.4.5 → antspymm-1.4.8}/docs/example_run_from_directory.py +0 -0
  29. {antspymm-1.4.5 → antspymm-1.4.8}/docs/make_dict_table.Rmd +0 -0
  30. {antspymm-1.4.5 → antspymm-1.4.8}/docs/make_dict_table.html +0 -0
  31. {antspymm-1.4.5 → antspymm-1.4.8}/docs/nrg_cohort_example.py +0 -0
  32. {antspymm-1.4.5 → antspymm-1.4.8}/docs/ptbp_nrg.py +0 -0
  33. {antspymm-1.4.5 → antspymm-1.4.8}/docs/roi_visualization.py +0 -0
  34. {antspymm-1.4.5 → antspymm-1.4.8}/docs/roi_visualization_ppmi.py +0 -0
  35. {antspymm-1.4.5 → antspymm-1.4.8}/docs/step1_blind_qc.py +0 -0
  36. {antspymm-1.4.5 → antspymm-1.4.8}/docs/step2_outlierness.py +0 -0
  37. {antspymm-1.4.5 → antspymm-1.4.8}/docs/step3_mm_nrg_csv.py +0 -0
  38. {antspymm-1.4.5 → antspymm-1.4.8}/docs/step4_aggregate.py +0 -0
  39. {antspymm-1.4.5 → antspymm-1.4.8}/docs/ukbb_to_nrg_processing.py +0 -0
  40. {antspymm-1.4.5 → antspymm-1.4.8}/docs/ukbb_to_nrg_processing2.py +0 -0
  41. {antspymm-1.4.5 → antspymm-1.4.8}/setup.cfg +0 -0
  42. {antspymm-1.4.5 → antspymm-1.4.8}/tests/bids_2_nrg.py +0 -0
  43. {antspymm-1.4.5 → antspymm-1.4.8}/tests/blind_qc.py +0 -0
  44. {antspymm-1.4.5 → antspymm-1.4.8}/tests/deformation_gradient_reo.py +0 -0
  45. {antspymm-1.4.5 → antspymm-1.4.8}/tests/dti_recon.py +0 -0
  46. {antspymm-1.4.5 → antspymm-1.4.8}/tests/dti_reg.py +0 -0
  47. {antspymm-1.4.5 → antspymm-1.4.8}/tests/dwi_rebasing.py +0 -0
  48. {antspymm-1.4.5 → antspymm-1.4.8}/tests/dwi_run.py +0 -0
  49. {antspymm-1.4.5 → antspymm-1.4.8}/tests/dwi_run_ptbp_scrub.py +0 -0
  50. {antspymm-1.4.5 → antspymm-1.4.8}/tests/flair_run.py +0 -0
  51. {antspymm-1.4.5 → antspymm-1.4.8}/tests/joint_dti_recon.py +0 -0
  52. {antspymm-1.4.5 → antspymm-1.4.8}/tests/mm.py +0 -0
  53. {antspymm-1.4.5 → antspymm-1.4.8}/tests/mm_csv.py +0 -0
  54. {antspymm-1.4.5 → antspymm-1.4.8}/tests/mm_nrg.py +0 -0
  55. {antspymm-1.4.5 → antspymm-1.4.8}/tests/nrg_validation.py +0 -0
  56. {antspymm-1.4.5 → antspymm-1.4.8}/tests/outlierness.py +0 -0
  57. {antspymm-1.4.5 → antspymm-1.4.8}/tests/parallel_study_aggregation_example.py +0 -0
  58. {antspymm-1.4.5 → antspymm-1.4.8}/tests/perfusion_ptbp.py +0 -0
  59. {antspymm-1.4.5 → antspymm-1.4.8}/tests/perfusion_run.py +0 -0
  60. {antspymm-1.4.5 → antspymm-1.4.8}/tests/rsfmri_run.py +0 -0
  61. {antspymm-1.4.5 → antspymm-1.4.8}/tests/rsfmri_run_minimal.py +0 -0
  62. {antspymm-1.4.5 → antspymm-1.4.8}/tests/test_reference_run.py +0 -0
  63. {antspymm-1.4.5 → antspymm-1.4.8}/tests/testsr.py +0 -0
  64. {antspymm-1.4.5 → antspymm-1.4.8}/tests/ukbb_rsfmri.py +0 -0
  65. {antspymm-1.4.5 → antspymm-1.4.8}/tests/visualize_tractogram.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: antspymm
3
- Version: 1.4.5
3
+ Version: 1.4.8
4
4
  Summary: multi-channel/time-series medical image processing with antspyx
5
5
  Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
6
6
  License: Apache 2.0
@@ -19,9 +19,12 @@ Requires-Dist: nibabel
19
19
  Requires-Dist: scipy
20
20
  Requires-Dist: siq
21
21
  Requires-Dist: scikit-learn
22
+ Dynamic: license-file
22
23
 
23
24
  # ANTsPyMM
24
25
 
26
+ [![DOI](https://zenodo.org/badge/423404500.svg)](https://doi.org/10.5281/zenodo.15131653)
27
+
25
28
  [![CircleCI](https://dl.circleci.com/status-badge/img/gh/ANTsX/ANTsPyMM/tree/main.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/ANTsX/ANTsPyMM/tree/main)
26
29
 
27
30
  ![mapping](https://i.imgur.com/qKqYjU9.jpeg)
@@ -1,5 +1,7 @@
1
1
  # ANTsPyMM
2
2
 
3
+ [![DOI](https://zenodo.org/badge/423404500.svg)](https://doi.org/10.5281/zenodo.15131653)
4
+
3
5
  [![CircleCI](https://dl.circleci.com/status-badge/img/gh/ANTsX/ANTsPyMM/tree/main.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/ANTsX/ANTsPyMM/tree/main)
4
6
 
5
7
  ![mapping](https://i.imgur.com/qKqYjU9.jpeg)
@@ -3882,9 +3882,13 @@ def joint_dti_recon(
3882
3882
  print("recon after distortion correction", flush=True)
3883
3883
 
3884
3884
  if impute:
3885
+ print("impute begin", flush=True)
3885
3886
  img_LRdwp=impute_dwi( img_LRdwp, verbose=True )
3887
+ print("impute done", flush=True)
3886
3888
  elif censor:
3889
+ print("censor begin", flush=True)
3887
3890
  img_LRdwp, reg_LR['bvals'], reg_LR['bvecs'] = censor_dwi( img_LRdwp, reg_LR['bvals'], reg_LR['bvecs'], verbose=True )
3891
+ print("censor done", flush=True)
3888
3892
  if impute and img_RL is not None:
3889
3893
  img_RLdwp=impute_dwi( img_RLdwp, verbose=True )
3890
3894
  elif censor and img_RL is not None:
@@ -5355,6 +5359,7 @@ def PerAF( x, mask, globalmean=True ):
5355
5359
  return outimg
5356
5360
 
5357
5361
 
5362
+
5358
5363
  def resting_state_fmri_networks( fmri, fmri_template, t1, t1segmentation,
5359
5364
  f=[0.03, 0.08],
5360
5365
  FD_threshold=5.0,
@@ -7281,8 +7286,8 @@ def mm(
7281
7286
  img_RL=dw_image[1],
7282
7287
  bval_RL=bvals[1],
7283
7288
  bvec_RL=bvecs[1],
7284
- motion_correct='SyN', # set to False if using input from qsiprep
7285
- denoise=True,
7289
+ motion_correct=dti_motion_correct, # set to False if using input from qsiprep
7290
+ denoise=dti_denoise,
7286
7291
  verbose = verbose)
7287
7292
  mydti = output_dict['DTI']
7288
7293
  # summarize dwi with T1 outputs
@@ -10601,9 +10606,9 @@ def censor_dwi( dwi, bval, bvec, threshold = 0.20, imputeb0=False, mask=None, ve
10601
10606
  list1 = segment_timeseries_by_meanvalue( dwi )['highermeans']
10602
10607
  if imputeb0:
10603
10608
  dwib = impute_timeseries( dwi, list1 ) # focus on the dwi - not the b0
10604
- looped, list2 = loop_timeseries_censoring( dwib, threshold, mask )
10609
+ looped, list2 = loop_timeseries_censoring( dwib, threshold, mask, verbose=verbose)
10605
10610
  else:
10606
- looped, list2 = loop_timeseries_censoring( dwi, threshold, mask )
10611
+ looped, list2 = loop_timeseries_censoring( dwi, threshold, mask, verbose=verbose )
10607
10612
  if verbose:
10608
10613
  print( list1 )
10609
10614
  print( list2 )
@@ -10626,18 +10631,21 @@ def flatten_time_series(time_series):
10626
10631
  n_volumes = time_series.shape[3]
10627
10632
  return time_series.reshape(-1, n_volumes).T
10628
10633
 
10629
- def calculate_loop_scores(flattened_series, n_neighbors=20):
10634
+ def calculate_loop_scores_full(flattened_series, n_neighbors=20, verbose=True ):
10630
10635
  """
10631
10636
  Calculate Local Outlier Probabilities for each volume.
10632
10637
 
10633
10638
  :param flattened_series: A 2D numpy array from flatten_time_series.
10634
10639
  :param n_neighbors: Number of neighbors to use for calculating LOF scores.
10640
+ :param verbose: boolean
10635
10641
  :return: An array of LoOP scores.
10636
10642
  """
10637
10643
  from PyNomaly import loop
10638
10644
  from sklearn.neighbors import NearestNeighbors
10639
10645
  from sklearn.preprocessing import StandardScaler
10640
10646
  # replace nans with zero
10647
+ if verbose:
10648
+ print("loop: nan_to_num")
10641
10649
  flattened_series=np.nan_to_num(flattened_series, nan=0)
10642
10650
  scaler = StandardScaler()
10643
10651
  scaler.fit(flattened_series)
@@ -10645,12 +10653,109 @@ def calculate_loop_scores(flattened_series, n_neighbors=20):
10645
10653
  data=np.nan_to_num(data, nan=0)
10646
10654
  if n_neighbors > int(flattened_series.shape[0]/2.0):
10647
10655
  n_neighbors = int(flattened_series.shape[0]/2.0)
10656
+ if verbose:
10657
+ print("loop: nearest neighbors init")
10648
10658
  neigh = NearestNeighbors(n_neighbors=n_neighbors, metric='minkowski')
10659
+ if verbose:
10660
+ print("loop: nearest neighbors fit")
10649
10661
  neigh.fit(data)
10650
10662
  d, idx = neigh.kneighbors(data, return_distance=True)
10663
+ if verbose:
10664
+ print("loop: probability")
10651
10665
  m = loop.LocalOutlierProbability(distance_matrix=d, neighbor_matrix=idx, n_neighbors=n_neighbors).fit()
10652
10666
  return m.local_outlier_probabilities[:]
10653
10667
 
10668
+
10669
+ def calculate_loop_scores(flattened_series, n_neighbors=20,
10670
+ n_features_sample=10000, seed=42, verbose=True):
10671
+ """
10672
+ Approximate LoOP scores using a random subset of features to reduce memory usage.
10673
+
10674
+ Parameters:
10675
+ flattened_series (np.ndarray): 2D array of shape (n_samples, n_features)
10676
+ n_neighbors (int): Number of neighbors for LOF/LoOP computation
10677
+ n_features_sample (int): Number of features to sample for approximation
10678
+ seed (int): Random seed for reproducible feature sampling
10679
+ verbose (bool): If True, print detailed progress and dimensions
10680
+
10681
+ Returns:
10682
+ np.ndarray: 1D array of local outlier probabilities (length n_samples)
10683
+ """
10684
+ import numpy as np
10685
+ from sklearn.preprocessing import StandardScaler
10686
+ from sklearn.neighbors import NearestNeighbors
10687
+ from PyNomaly import loop
10688
+
10689
+ # -------------------------------
10690
+ # Step 1: Input stats and cleanup
10691
+ # -------------------------------
10692
+ X = np.nan_to_num(flattened_series, nan=0).astype(np.float32)
10693
+ n_samples, n_features = X.shape
10694
+
10695
+ if verbose:
10696
+ print("\n[LoOP Approximation - Verbose Mode]")
10697
+ print(f"- Original input shape: {X.shape} (samples x features)")
10698
+ print(f"- Requested sampled features: {n_features_sample}")
10699
+
10700
+ if n_features_sample > n_features:
10701
+ n_features_sample = n_features
10702
+ if verbose:
10703
+ print(f"- Requested n_features_sample exceeds available features. Using all {n_features} features.")
10704
+
10705
+ # -------------------------------
10706
+ # Step 2: Feature sampling
10707
+ # -------------------------------
10708
+ rng = np.random.default_rng(seed)
10709
+ sampled_indices = rng.choice(n_features, n_features_sample, replace=False)
10710
+ X_sampled = X[:, sampled_indices]
10711
+
10712
+ if verbose:
10713
+ print(f"- Sampled feature shape: {X_sampled.shape} (samples x sampled_features)")
10714
+ print(f"- Random seed for reproducibility: {seed}")
10715
+
10716
+ # -------------------------------
10717
+ # Step 3: Standardization
10718
+ # -------------------------------
10719
+ scaler = StandardScaler(copy=False)
10720
+ X_sampled = scaler.fit_transform(X_sampled)
10721
+ X_sampled = np.nan_to_num(X_sampled, nan=0)
10722
+
10723
+ # -------------------------------
10724
+ # Step 4: KNN setup for LoOP
10725
+ # -------------------------------
10726
+ if n_neighbors >= n_samples:
10727
+ n_neighbors = max(1, n_samples // 2)
10728
+ if verbose:
10729
+ print(f"- Adjusted n_neighbors to {n_neighbors} (was too large for available samples).")
10730
+
10731
+ if verbose:
10732
+ print(f"- Performing KNN using Minkowski distance (default p=2, Euclidean)")
10733
+ print(f"- Each point will use its {n_neighbors} nearest neighbors for local density estimation")
10734
+
10735
+ neigh = NearestNeighbors(n_neighbors=n_neighbors)
10736
+ neigh.fit(X_sampled)
10737
+ dists, indices = neigh.kneighbors(X_sampled, return_distance=True)
10738
+
10739
+ # -------------------------------
10740
+ # Step 5: LoOP probability calculation
10741
+ # -------------------------------
10742
+ if verbose:
10743
+ print(f"- Distance matrix shape: {dists.shape} (samples x n_neighbors)")
10744
+ print(f"- Neighbor index matrix shape: {indices.shape}")
10745
+ print("- Estimating Local Outlier Probabilities (LoOP)...")
10746
+
10747
+ model = loop.LocalOutlierProbability(
10748
+ distance_matrix=dists,
10749
+ neighbor_matrix=indices,
10750
+ n_neighbors=n_neighbors
10751
+ ).fit()
10752
+
10753
+ if verbose:
10754
+ print("- LoOP scoring complete.\n")
10755
+
10756
+ return model.local_outlier_probabilities[:]
10757
+
10758
+
10654
10759
  def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
10655
10760
  """
10656
10761
  Process CBF time series to remove high-leverage points.
@@ -10718,7 +10823,7 @@ def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
10718
10823
  cbfts_recon_ants = ants.copy_image_info(cbfts, cbfts_recon_ants)
10719
10824
  return cbfts_recon_ants, indx
10720
10825
 
10721
- def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=False):
10826
+ def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=10000, verbose=True):
10722
10827
  """
10723
10828
  Censor high leverage volumes from a time series using Local Outlier Probabilities (LoOP).
10724
10829
 
@@ -10726,6 +10831,7 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=False):
10726
10831
  x (ANTsImage): A 4D time series image.
10727
10832
  threshold (float): Threshold for determining high leverage volumes based on LoOP scores.
10728
10833
  mask (antsImage): restricts to a ROI
10834
+ n_features_sample (int): feature sample size default 5000
10729
10835
  verbose (bool)
10730
10836
 
10731
10837
  Returns:
@@ -10739,10 +10845,12 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=False):
10739
10845
  flattened_series = flatten_time_series(x.numpy())
10740
10846
  else:
10741
10847
  flattened_series = ants.timeseries_to_matrix( x, mask )
10742
- loop_scores = calculate_loop_scores(flattened_series)
10848
+ if verbose:
10849
+ print("loop_timeseries_censoring: flattened")
10850
+ loop_scores = calculate_loop_scores(flattened_series, n_features_sample=n_features_sample, verbose=verbose )
10743
10851
  high_leverage_volumes = np.where(loop_scores > threshold)[0]
10744
10852
  if verbose:
10745
- print("LOOP High Leverage Volumes:", high_leverage_volumes)
10853
+ print("loop_timeseries_censoring: High Leverage Volumes:", high_leverage_volumes)
10746
10854
  new_asl = remove_volumes_from_timeseries(x, high_leverage_volumes)
10747
10855
  return new_asl, high_leverage_volumes
10748
10856
 
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: antspymm
3
- Version: 1.4.5
3
+ Version: 1.4.8
4
4
  Summary: multi-channel/time-series medical image processing with antspyx
5
5
  Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
6
6
  License: Apache 2.0
@@ -19,9 +19,12 @@ Requires-Dist: nibabel
19
19
  Requires-Dist: scipy
20
20
  Requires-Dist: siq
21
21
  Requires-Dist: scikit-learn
22
+ Dynamic: license-file
22
23
 
23
24
  # ANTsPyMM
24
25
 
26
+ [![DOI](https://zenodo.org/badge/423404500.svg)](https://doi.org/10.5281/zenodo.15131653)
27
+
25
28
  [![CircleCI](https://dl.circleci.com/status-badge/img/gh/ANTsX/ANTsPyMM/tree/main.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/ANTsX/ANTsPyMM/tree/main)
26
29
 
27
30
  ![mapping](https://i.imgur.com/qKqYjU9.jpeg)
@@ -18,6 +18,7 @@ docs/blind_qc.Rmd
18
18
  docs/blind_qc.html
19
19
  docs/convert_adni_dti_to_nrg.R
20
20
  docs/deepnbm.jpg
21
+ docs/describe_mm_data.R
21
22
  docs/example_antspymm_output.csv
22
23
  docs/example_run_from_directory.py
23
24
  docs/make_dict_table.Rmd
@@ -30,6 +31,7 @@ docs/step1_blind_qc.py
30
31
  docs/step2_outlierness.py
31
32
  docs/step3_mm_nrg_csv.py
32
33
  docs/step4_aggregate.py
34
+ docs/template_overlays.py
33
35
  docs/ukbb_to_nrg_processing.py
34
36
  docs/ukbb_to_nrg_processing2.py
35
37
  tests/bids_2_nrg.py
@@ -50,10 +52,12 @@ tests/outlierness.py
50
52
  tests/parallel_study_aggregation_example.py
51
53
  tests/perfusion_ptbp.py
52
54
  tests/perfusion_run.py
55
+ tests/perfusion_run_nnl.py
53
56
  tests/rsfmri_run.py
54
57
  tests/rsfmri_run_minimal.py
55
58
  tests/test_loop.py
56
59
  tests/test_reference_run.py
60
+ tests/test_rsfmri_run_minimal_ptbp.py
57
61
  tests/testsr.py
58
62
  tests/ukbb_rsfmri.py
59
63
  tests/visualize_tractogram.py
@@ -0,0 +1,144 @@
1
+ # powers points 10.1016/j.conb.2012.12.009
2
+ powers=read.csv("~/.antspymm/powers_mni_itk.csv")
3
+ # 10.1016/j.neuroimage.2008.07.009
4
+ # https://doi.org/10.1016/j.neuroimage.2007.07.053
5
+ jhu=read.csv("~/.antspyt1w/FA_JHU_labels_edited.csv")
6
+ # doi: 10.3389/fnins.2012.00171
7
+ # http://dx.doi.org/10.1016/j.neuroimage.2006.01.021
8
+ dktcsv=read.csv("~/.antspyt1w/dkt.csv")
9
+ dktcsv=dktcsv[dktcsv$Label>0,]
10
+ # hipp https://doi.org/10.1101/2023.01.17.23284693
11
+ hipp=read.csv("~/.antspyt1w/mtl_description.csv")
12
+ hipp$Anatomy=hipp$Description
13
+ hipp$Anatomy=gsub("alEC"," antero-lateral entorhinal cortex",hipp$Anatomy)
14
+ hipp$Anatomy=gsub("pMEC"," postero-medial entorhinal cortex",hipp$Anatomy)
15
+ hipp$Anatomy=gsub("DG"," dentate gyrus",hipp$Anatomy)
16
+ hipp$Anatomy=gsub("CA"," cornu ammonis", hipp$Anatomy)
17
+
18
+ # https://doi.org/10.1101/211201
19
+ cit=read.csv("~/.antspyt1w/CIT168_Reinf_Learn_v1_label_descriptions_pad.csv")
20
+ cit$Anatomy=NA
21
+ cit$Anatomy[ grep("STR_Ca", cit$Description )] = 'caudate'
22
+ cit$Anatomy[ grep("STR_Pu", cit$Description )] = 'putamen'
23
+ cit$Anatomy[ grep("STR_NAC", cit$Description )] = 'Nucleus Accumbens'
24
+ cit$Anatomy[ grep("VTA", cit$Description )] = 'Ventral Tegmental Area'
25
+ cit$Anatomy[ grep("PBP", cit$Description )] = 'Parabrachial Pigmented Nucleus'
26
+ cit$Anatomy[ grep("SNc", cit$Description )] = 'Substantia Nigra pars compacta'
27
+ cit$Anatomy[ grep("SNr", cit$Description )] = 'Substantia Nigra pars reticulated'
28
+ cit$Anatomy[ grep("GPe", cit$Description )] = 'globus pallidus externa'
29
+ cit$Anatomy[ grep("GPi", cit$Description )] = 'globus pallidus interna'
30
+ cit$Anatomy[ grep("RN", cit$Description )] = 'red nucleus'
31
+ cit$Anatomy[ grep("STH", cit$Description )] = 'Subthalamic Nucleus'
32
+ cit$Anatomy[ grep("HTH", cit$Description )] = 'Hypothalamus'
33
+ cit$Anatomy[ grep("HN", cit$Description )] = 'Habenular Nuclei'
34
+ cit$Anatomy[ grep("EXA", cit$Description )] = 'extended amygdala'
35
+ cit$Anatomy[ grep("BNST", cit$Description )] = 'bed nuclei of the stria terminali'
36
+ cit$Anatomy[ grep("MN", cit$Description )] = 'mammillary nucleus'
37
+ cit$Anatomy[ grep("SLEA", cit$Description )] = 'sublenticular extended amygdala'
38
+ cit$Anatomy[ grep("VeP", cit$Description )] = 'ventral pallidum'
39
+
40
+ interpretcnx<-function( x ) {
41
+ breaker=gsub("DTI_cnxcount","",x)
42
+ temp = unlist(strsplit(breaker,"_"))
43
+ ind=temp[1]
44
+ anat=paste( temp[-1],collapse='_')
45
+ return( paste( anat, "to", dktcsv[as.integer(ind),'Description'] ) )
46
+ }
47
+ interpretcnx2<-function( x ) {
48
+ breaker=gsub("DTI_cnxcount","",x)
49
+ temp = unlist(strsplit(breaker,"_"))
50
+ ind=temp[1]
51
+ anat=paste( temp[-1],collapse='_')
52
+ return( dktcsv[as.integer(ind),'Description'] )
53
+ }
54
+ # dd=read.csv("joined_mm_or2.csv")
55
+ zz=data.frame( Label=colnames(dd))
56
+ qcrows=min(grep("RandBasis",zz$Label)):grep("resnetGrade", zz$Label)
57
+ zz$Modality='Other'
58
+ zz[ grep("T1Hier", zz$Label), 'Modality']='T1 hierarchical processing'
59
+ zz[ grep("T1w", zz$Label), 'Modality']='T1 DiReCT thickness processing'
60
+ zz[ grep("DTI", zz$Label), 'Modality']='DTI'
61
+ zz[ grep("NM2DMT", zz$Label), 'Modality']='Neuromelanin'
62
+ zz[ grep("rsfMRI", zz$Label), 'Modality']='restingStatefMRI'
63
+ zz[ grep("lair", zz$Label), 'Modality']='Flair'
64
+ zz[ grep("left", zz$Label), 'side']='left'
65
+ zz[ grep("right", zz$Label), 'side']='right'
66
+ zz$Atlas='ANTs'
67
+ zz[ grep("dkt", zz$Label), 'Atlas']='desikan-killiany-tourville'
68
+ zz[ grep("cnxcou", zz$Label), 'Atlas']='desikan-killiany-tourville'
69
+ zz[ grep("jhu", zz$Label), 'Atlas']='johns hopkins white matter'
70
+ zz[ grep("cit", zz$Label), 'Atlas']='CIT168'
71
+ zz[ grep("nbm", zz$Label), 'Atlas']='BF'
72
+ zz[ grep("ch13", zz$Label), 'Atlas']='BF'
73
+ zz[ grep("mtl", zz$Label), 'Atlas']='MTL'
74
+ zz[ grep("rsfMRI", zz$Label),'Atlas']='power peterson fMRI meta-analyses'
75
+ zz[qcrows,'Atlas']='quality control metrics'
76
+ zz[qcrows,'Measurement']='QC'
77
+ zz$Measurement[ grep("FD", zz$Label)]='motion statistic on framewise displacement'
78
+ zz$Measurement[ grep("thk", zz$Label)]='geometry/thickness'
79
+ zz$Measurement[ grep("area", zz$Label)]='geometry/area'
80
+ zz$Measurement[ grep("vol", zz$Label)]='geometry/volume'
81
+ zz$Measurement[ grep("mean_md", zz$Label)]='mean diffusion'
82
+ zz$Measurement[ grep("mean_fa", zz$Label)]='fractional anisotropy'
83
+ zz$Measurement[ grep("cnx", zz$Label)]='tractography-based connectivity'
84
+ zz$Anatomy = zz$Label
85
+ zz$Anatomy = gsub("_thk_","", zz$Anatomy)
86
+ zz$Anatomy = gsub("_area_","", zz$Anatomy)
87
+ zz$Anatomy = gsub("_volume_","", zz$Anatomy)
88
+ zz$Anatomy = gsub("DTI_cnxcount","", zz$Anatomy)
89
+ zz$Anatomy = gsub("DTI_mean_md","", zz$Anatomy)
90
+ zz$Anatomy = gsub("DTI_mean_fa","", zz$Anatomy)
91
+ zz$Anatomy = gsub("T1Hier_","", zz$Anatomy)
92
+ zz$Anatomy = gsub("T1Hier","", zz$Anatomy)
93
+ # fix dkt
94
+ dktlabs=dktcsv$Description
95
+ dktlabs=gsub("right ","",dktlabs)
96
+ dktlabs=gsub("left ","",dktlabs)
97
+ dktlabs2=gsub(" ","_",dktlabs)
98
+ for ( k in 1:length(dktlabs) ) {
99
+ gg=grep( dktlabs[k], zz$Label)
100
+ zz[ gg, "Atlas"]="desikan-killiany-tourville"
101
+ zz[ gg, "Anatomy"]=dktlabs[k]
102
+ gg=grep( dktlabs2[k], zz$Label)
103
+ zz[ gg, "Atlas"]="desikan-killiany-tourville"
104
+ zz[ gg, "Anatomy"]=dktlabs[k]
105
+ }
106
+
107
+ # fix cit
108
+ citlabs=tolower( cit$Description)
109
+ for ( k in 1:length(citlabs) ) {
110
+ gg=grep( citlabs[k], zz$Label)
111
+ zz[ gg, "Atlas"]="CIT168"
112
+ zz[ gg, "Anatomy"]=cit$Anatomy[k]
113
+ }
114
+ zz$Anatomy = gsub("DTIfa","", zz$Anatomy)
115
+ zz$Anatomy = gsub("DTImd","", zz$Anatomy)
116
+ zz$Anatomy = gsub("dktregions","", zz$Anatomy)
117
+ zz$Anatomy = gsub("dktcortex"," cortex only ", zz$Anatomy)
118
+ zz$Anatomy = gsub("_right_","", zz$Anatomy)
119
+ zz$Anatomy = gsub("_left_","", zz$Anatomy)
120
+ zz$Anatomy = gsub("right","", zz$Anatomy)
121
+ zz$Anatomy = gsub("left","", zz$Anatomy)
122
+ zz$Anatomy = gsub("jhu_icbm_labels_1mm","", zz$Anatomy)
123
+ zz[ grep("u_hier_id", zz$Label), -1 ]='unique id'
124
+ cnxrows=grep("DTI_cnxcount",zz$Label)
125
+ for ( k in cnxrows )
126
+ zz$Anatomy[k]=interpretcnx( zz[k,'Label'] )
127
+
128
+ zz[ multigrep( c("rsfMRI","R"), zz$Label, intersect=TRUE), 'side' ]='right'
129
+ zz[ multigrep( c("rsfMRI","L"), zz$Label, intersect=TRUE), 'side' ]='left'
130
+ zz$Measurement[ multigrep( c("rsfMRI","_2_"), zz$Label, intersect=TRUE) ]='network correlation'
131
+ zz$Measurement[ multigrep(c("rsfMRI","_alff"), zz$Label, intersect=TRUE) ]='amplitude of low frequency fluctuations ALFF'
132
+ zz$Measurement[ multigrep( c("rsfMRI","_falff"), zz$Label, intersect=TRUE) ]='fractional amplitude of low frequency fluctuations fALFF'
133
+ zz$Anatomy = gsub("rsfMRI_", "", zz$Anatomy )
134
+ zz$Anatomy = gsub("falffPoint", "", zz$Anatomy )
135
+ zz$Anatomy = gsub("alffPoint", "", zz$Anatomy )
136
+ noncnx=1:1888
137
+ for ( k in sample(noncnx, 3) ) print( zz[k,c("Label","Atlas","Anatomy")] )
138
+
139
+ zz[ zz$Label == 'Flair', 'Measurement' ]='white matter hyper-intensity'
140
+ zz[ zz$Label == 'T2Flair_flair_wmh_prior', 'Measurement' ]='prior-constrained white matter hyper-intensity'
141
+
142
+ zz[ multigrep( c("NM2DMT", "q0pt"), zz$Label, intersect=TRUE), "Measurement" ]='neuromelanin intensity quantile'
143
+
144
+ write.csv( zz, "~/code/ANTsPyMM/antspymm_data_dictionary.csv", row.names=FALSE)
@@ -0,0 +1,17 @@
1
+
2
+ ######## template figures ########
3
+ import ants
4
+ import antspymm
5
+ import pandas as pd
6
+ # Example Usage
7
+ scalar_label_df = pd.DataFrame({'label': range(33), 'scalar_value': range(33)})
8
+ prefix = '../PPMI_template0_'
9
+ print("begin")
10
+ for t in ['mtl','bf','jhuwm','cerebellum','cit168']:
11
+ print( t )
12
+ proimgs=antspymm.template_figure_with_overlay(scalar_label_df, prefix, template=t, outputfilename='/tmp/py_viz_'+t+'.png' )
13
+ t='ctx'
14
+ scalar_label_df = pd.DataFrame({'label': range(1001,1200), 'scalar_value': range(1001,1200)})
15
+ proimgs=antspymm.template_figure_with_overlay(scalar_label_df, prefix, template=t, mask_dilation=0, outputfilename='/tmp/py_viz_'+t+'.png')
16
+
17
+
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "antspymm"
7
- version = "1.4.5"
7
+ version = "1.4.8"
8
8
  description = "multi-channel/time-series medical image processing with antspyx"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.8"
@@ -0,0 +1,46 @@
1
+ import sys, os
2
+ import unittest
3
+
4
+ os.environ["TF_NUM_INTEROP_THREADS"] = "8"
5
+ os.environ["TF_NUM_INTRAOP_THREADS"] = "8"
6
+ os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "8"
7
+
8
+ import tempfile
9
+ import shutil
10
+ import tensorflow as tf
11
+ import antspymm
12
+ import antspyt1w
13
+ import antspynet
14
+ import ants
15
+ import numpy as np
16
+ from scipy.stats import median_abs_deviation
17
+ import math
18
+
19
+ islocal = False
20
+ t1fn = '/Users/stnava/code/NNL/data/sub-1001-0462_run-001_t1.nii.gz'
21
+ ffn='/Users/stnava/code/NNL/data/sub-1001-0462_run-001_perf.nii.gz'
22
+ fmri = ants.image_read( ffn )
23
+ #dkt
24
+ if not 'dkt' in globals():
25
+ t1head = ants.image_read( t1fn ).n3_bias_field_correction( 8 ).n3_bias_field_correction( 4 )
26
+ t1bxt = antspynet.brain_extraction( t1head, 't1' ).threshold_image( 0.3, 1.0 )
27
+ t1 = t1bxt * t1head
28
+ t1seg = antspynet.deep_atropos( t1head )
29
+ t1segmentation = t1seg['segmentation_image']
30
+ dkt = antspynet.desikan_killiany_tourville_labeling( t1head )
31
+ #################
32
+
33
+ #################
34
+ type_of_transform='Rigid'
35
+ tc='alternating'
36
+ fmri_template, hlinds = antspymm.loop_timeseries_censoring( fmri, 0.1 )
37
+ fmri_template = ants.get_average_of_timeseries( fmri_template )
38
+ print("do perf")
39
+ olthresh=0.2
40
+ perf = antspymm.bold_perfusion( fmri, fmri_template, t1head, t1,
41
+ t1segmentation, dkt, nc=4, type_of_transform=type_of_transform,
42
+ spa=(0.,0.,0.,0.),
43
+ outlier_threshold=olthresh, add_FD_to_nuisance=False, verbose=True )
44
+ ants.image_write( ants.iMath( perf['perfusion'], "Normalize" ), '/tmp/temp.nii.gz' )
45
+ ants.image_write( perf['motion_corrected'], '/tmp/temp2.nii.gz' )
46
+ ants.plot( ants.iMath( perf['perfusion'], "Normalize" ), axis=2, crop=True )
@@ -15,6 +15,5 @@ import math
15
15
  testingClass = unittest.TestCase( )
16
16
  islocal = False
17
17
  idp = "LS2001_3T_rfMRI_REST1_LR_gdc"
18
- # fmri = ants.image_read( antspymm.get_data( idp, target_extension=".nii.gz") )
19
- # fmri = ants.resample_image( fmri, [20,20,20,20], use_voxels=True )
20
- # fmri_template, hlinds = antspymm.loop_timeseries_censoring( fmri, 0.1 )
18
+ fmri = ants.image_read( antspymm.get_data( idp, target_extension=".nii.gz") )
19
+ fmri_template, hlinds = antspymm.loop_timeseries_censoring( fmri, 0.1 )
@@ -0,0 +1,53 @@
1
+ import sys, os
2
+ import unittest
3
+
4
+ os.environ["TF_NUM_INTEROP_THREADS"] = "8"
5
+ os.environ["TF_NUM_INTRAOP_THREADS"] = "8"
6
+ os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "8"
7
+
8
+ import tempfile
9
+ import shutil
10
+ import tensorflow as tf
11
+ import antspymm
12
+ import antspyt1w
13
+ import antspynet
14
+ import ants
15
+ import numpy as np
16
+ import math
17
+ import glob
18
+
19
+ testingClass = unittest.TestCase( )
20
+ id = 'PEDS131'; dt = '20130816' # high motion - bad volumes
21
+ id = "PEDS074"; dt = "20110803" # high motion - bad volumes
22
+ # id = 'PEDS144'; dt = '20131212' # not so bad
23
+ # id = "PEDS107"; dt = "20130118" # challenging
24
+ prefix = "/Users/stnava/data/PTBP/images/" + id + "/" + dt
25
+ bold = glob.glob( prefix + '/BOLD/' + id + "_" + dt + "*bold*nii.gz" )
26
+ t1w = glob.glob( prefix + '/Anatomy/' + id + "_" + dt + "*t1.nii.gz" )
27
+ rsfn = bold[0]
28
+ print( rsfn )
29
+ t1fn = t1w[0]
30
+ import pandas as pd
31
+ print("do t1")
32
+ if not "t1" in globals():
33
+ t1 = ants.image_read( t1fn ).n3_bias_field_correction( 8 ).n3_bias_field_correction( 4 )
34
+ t1bxt = antspynet.brain_extraction( t1, 't1' ).threshold_image( 0.3, 1.0 )
35
+ t1seg = antspynet.deep_atropos( t1 )
36
+ t1segmentation = t1seg['segmentation_image']
37
+
38
+ if not 'rsfpro' in globals():
39
+ print("do rsf: default")
40
+ img1 = ants.image_read( rsfn )
41
+ fmri_template, hlinds = antspymm.loop_timeseries_censoring( img1, 0.1 )
42
+ fmri_template = antspymm.get_average_rsf( fmri_template )
43
+ # img1 = antspymm.remove_volumes_from_timeseries(img1, list(range(55,4000)))
44
+ rsfpro = antspymm.resting_state_fmri_networks(
45
+ img1,
46
+ fmri_template, t1 * t1bxt, t1segmentation,
47
+ verbose=True )
48
+ mm = { 'rsf': rsfpro }
49
+ antspymm.write_mm( '/tmp/RSF', mm )
50
+ ants.plot( rsfpro['fmri_template'], rsfpro[ rsfpro['dfnname']], crop=True, axis=2 )
51
+ ants.plot( rsfpro['fmri_template'], rsfpro['alff'], crop=True, axis=2 )
52
+ ants.plot( rsfpro['fmri_template'], rsfpro['falff'], crop=True, axis=2 )
53
+ ants.plot( rsfpro['fmri_template'], rsfpro['PerAF'], crop=True, axis=2 )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes