antspymm 1.4.5__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antspymm/mm.py +116 -8
- {antspymm-1.4.5.dist-info → antspymm-1.4.8.dist-info}/METADATA +5 -2
- antspymm-1.4.8.dist-info/RECORD +7 -0
- {antspymm-1.4.5.dist-info → antspymm-1.4.8.dist-info}/WHEEL +1 -1
- antspymm-1.4.5.dist-info/RECORD +0 -7
- {antspymm-1.4.5.dist-info → antspymm-1.4.8.dist-info/licenses}/LICENSE +0 -0
- {antspymm-1.4.5.dist-info → antspymm-1.4.8.dist-info}/top_level.txt +0 -0
antspymm/mm.py
CHANGED
@@ -3882,9 +3882,13 @@ def joint_dti_recon(
|
|
3882
3882
|
print("recon after distortion correction", flush=True)
|
3883
3883
|
|
3884
3884
|
if impute:
|
3885
|
+
print("impute begin", flush=True)
|
3885
3886
|
img_LRdwp=impute_dwi( img_LRdwp, verbose=True )
|
3887
|
+
print("impute done", flush=True)
|
3886
3888
|
elif censor:
|
3889
|
+
print("censor begin", flush=True)
|
3887
3890
|
img_LRdwp, reg_LR['bvals'], reg_LR['bvecs'] = censor_dwi( img_LRdwp, reg_LR['bvals'], reg_LR['bvecs'], verbose=True )
|
3891
|
+
print("censor done", flush=True)
|
3888
3892
|
if impute and img_RL is not None:
|
3889
3893
|
img_RLdwp=impute_dwi( img_RLdwp, verbose=True )
|
3890
3894
|
elif censor and img_RL is not None:
|
@@ -5355,6 +5359,7 @@ def PerAF( x, mask, globalmean=True ):
|
|
5355
5359
|
return outimg
|
5356
5360
|
|
5357
5361
|
|
5362
|
+
|
5358
5363
|
def resting_state_fmri_networks( fmri, fmri_template, t1, t1segmentation,
|
5359
5364
|
f=[0.03, 0.08],
|
5360
5365
|
FD_threshold=5.0,
|
@@ -7281,8 +7286,8 @@ def mm(
|
|
7281
7286
|
img_RL=dw_image[1],
|
7282
7287
|
bval_RL=bvals[1],
|
7283
7288
|
bvec_RL=bvecs[1],
|
7284
|
-
motion_correct=
|
7285
|
-
denoise=
|
7289
|
+
motion_correct=dti_motion_correct, # set to False if using input from qsiprep
|
7290
|
+
denoise=dti_denoise,
|
7286
7291
|
verbose = verbose)
|
7287
7292
|
mydti = output_dict['DTI']
|
7288
7293
|
# summarize dwi with T1 outputs
|
@@ -10601,9 +10606,9 @@ def censor_dwi( dwi, bval, bvec, threshold = 0.20, imputeb0=False, mask=None, ve
|
|
10601
10606
|
list1 = segment_timeseries_by_meanvalue( dwi )['highermeans']
|
10602
10607
|
if imputeb0:
|
10603
10608
|
dwib = impute_timeseries( dwi, list1 ) # focus on the dwi - not the b0
|
10604
|
-
looped, list2 = loop_timeseries_censoring( dwib, threshold, mask )
|
10609
|
+
looped, list2 = loop_timeseries_censoring( dwib, threshold, mask, verbose=verbose)
|
10605
10610
|
else:
|
10606
|
-
looped, list2 = loop_timeseries_censoring( dwi, threshold, mask )
|
10611
|
+
looped, list2 = loop_timeseries_censoring( dwi, threshold, mask, verbose=verbose )
|
10607
10612
|
if verbose:
|
10608
10613
|
print( list1 )
|
10609
10614
|
print( list2 )
|
@@ -10626,18 +10631,21 @@ def flatten_time_series(time_series):
|
|
10626
10631
|
n_volumes = time_series.shape[3]
|
10627
10632
|
return time_series.reshape(-1, n_volumes).T
|
10628
10633
|
|
10629
|
-
def
|
10634
|
+
def calculate_loop_scores_full(flattened_series, n_neighbors=20, verbose=True ):
|
10630
10635
|
"""
|
10631
10636
|
Calculate Local Outlier Probabilities for each volume.
|
10632
10637
|
|
10633
10638
|
:param flattened_series: A 2D numpy array from flatten_time_series.
|
10634
10639
|
:param n_neighbors: Number of neighbors to use for calculating LOF scores.
|
10640
|
+
:param verbose: boolean
|
10635
10641
|
:return: An array of LoOP scores.
|
10636
10642
|
"""
|
10637
10643
|
from PyNomaly import loop
|
10638
10644
|
from sklearn.neighbors import NearestNeighbors
|
10639
10645
|
from sklearn.preprocessing import StandardScaler
|
10640
10646
|
# replace nans with zero
|
10647
|
+
if verbose:
|
10648
|
+
print("loop: nan_to_num")
|
10641
10649
|
flattened_series=np.nan_to_num(flattened_series, nan=0)
|
10642
10650
|
scaler = StandardScaler()
|
10643
10651
|
scaler.fit(flattened_series)
|
@@ -10645,12 +10653,109 @@ def calculate_loop_scores(flattened_series, n_neighbors=20):
|
|
10645
10653
|
data=np.nan_to_num(data, nan=0)
|
10646
10654
|
if n_neighbors > int(flattened_series.shape[0]/2.0):
|
10647
10655
|
n_neighbors = int(flattened_series.shape[0]/2.0)
|
10656
|
+
if verbose:
|
10657
|
+
print("loop: nearest neighbors init")
|
10648
10658
|
neigh = NearestNeighbors(n_neighbors=n_neighbors, metric='minkowski')
|
10659
|
+
if verbose:
|
10660
|
+
print("loop: nearest neighbors fit")
|
10649
10661
|
neigh.fit(data)
|
10650
10662
|
d, idx = neigh.kneighbors(data, return_distance=True)
|
10663
|
+
if verbose:
|
10664
|
+
print("loop: probability")
|
10651
10665
|
m = loop.LocalOutlierProbability(distance_matrix=d, neighbor_matrix=idx, n_neighbors=n_neighbors).fit()
|
10652
10666
|
return m.local_outlier_probabilities[:]
|
10653
10667
|
|
10668
|
+
|
10669
|
+
def calculate_loop_scores(flattened_series, n_neighbors=20,
|
10670
|
+
n_features_sample=10000, seed=42, verbose=True):
|
10671
|
+
"""
|
10672
|
+
Approximate LoOP scores using a random subset of features to reduce memory usage.
|
10673
|
+
|
10674
|
+
Parameters:
|
10675
|
+
flattened_series (np.ndarray): 2D array of shape (n_samples, n_features)
|
10676
|
+
n_neighbors (int): Number of neighbors for LOF/LoOP computation
|
10677
|
+
n_features_sample (int): Number of features to sample for approximation
|
10678
|
+
seed (int): Random seed for reproducible feature sampling
|
10679
|
+
verbose (bool): If True, print detailed progress and dimensions
|
10680
|
+
|
10681
|
+
Returns:
|
10682
|
+
np.ndarray: 1D array of local outlier probabilities (length n_samples)
|
10683
|
+
"""
|
10684
|
+
import numpy as np
|
10685
|
+
from sklearn.preprocessing import StandardScaler
|
10686
|
+
from sklearn.neighbors import NearestNeighbors
|
10687
|
+
from PyNomaly import loop
|
10688
|
+
|
10689
|
+
# -------------------------------
|
10690
|
+
# Step 1: Input stats and cleanup
|
10691
|
+
# -------------------------------
|
10692
|
+
X = np.nan_to_num(flattened_series, nan=0).astype(np.float32)
|
10693
|
+
n_samples, n_features = X.shape
|
10694
|
+
|
10695
|
+
if verbose:
|
10696
|
+
print("\n[LoOP Approximation - Verbose Mode]")
|
10697
|
+
print(f"- Original input shape: {X.shape} (samples x features)")
|
10698
|
+
print(f"- Requested sampled features: {n_features_sample}")
|
10699
|
+
|
10700
|
+
if n_features_sample > n_features:
|
10701
|
+
n_features_sample = n_features
|
10702
|
+
if verbose:
|
10703
|
+
print(f"- Requested n_features_sample exceeds available features. Using all {n_features} features.")
|
10704
|
+
|
10705
|
+
# -------------------------------
|
10706
|
+
# Step 2: Feature sampling
|
10707
|
+
# -------------------------------
|
10708
|
+
rng = np.random.default_rng(seed)
|
10709
|
+
sampled_indices = rng.choice(n_features, n_features_sample, replace=False)
|
10710
|
+
X_sampled = X[:, sampled_indices]
|
10711
|
+
|
10712
|
+
if verbose:
|
10713
|
+
print(f"- Sampled feature shape: {X_sampled.shape} (samples x sampled_features)")
|
10714
|
+
print(f"- Random seed for reproducibility: {seed}")
|
10715
|
+
|
10716
|
+
# -------------------------------
|
10717
|
+
# Step 3: Standardization
|
10718
|
+
# -------------------------------
|
10719
|
+
scaler = StandardScaler(copy=False)
|
10720
|
+
X_sampled = scaler.fit_transform(X_sampled)
|
10721
|
+
X_sampled = np.nan_to_num(X_sampled, nan=0)
|
10722
|
+
|
10723
|
+
# -------------------------------
|
10724
|
+
# Step 4: KNN setup for LoOP
|
10725
|
+
# -------------------------------
|
10726
|
+
if n_neighbors >= n_samples:
|
10727
|
+
n_neighbors = max(1, n_samples // 2)
|
10728
|
+
if verbose:
|
10729
|
+
print(f"- Adjusted n_neighbors to {n_neighbors} (was too large for available samples).")
|
10730
|
+
|
10731
|
+
if verbose:
|
10732
|
+
print(f"- Performing KNN using Minkowski distance (default p=2, Euclidean)")
|
10733
|
+
print(f"- Each point will use its {n_neighbors} nearest neighbors for local density estimation")
|
10734
|
+
|
10735
|
+
neigh = NearestNeighbors(n_neighbors=n_neighbors)
|
10736
|
+
neigh.fit(X_sampled)
|
10737
|
+
dists, indices = neigh.kneighbors(X_sampled, return_distance=True)
|
10738
|
+
|
10739
|
+
# -------------------------------
|
10740
|
+
# Step 5: LoOP probability calculation
|
10741
|
+
# -------------------------------
|
10742
|
+
if verbose:
|
10743
|
+
print(f"- Distance matrix shape: {dists.shape} (samples x n_neighbors)")
|
10744
|
+
print(f"- Neighbor index matrix shape: {indices.shape}")
|
10745
|
+
print("- Estimating Local Outlier Probabilities (LoOP)...")
|
10746
|
+
|
10747
|
+
model = loop.LocalOutlierProbability(
|
10748
|
+
distance_matrix=dists,
|
10749
|
+
neighbor_matrix=indices,
|
10750
|
+
n_neighbors=n_neighbors
|
10751
|
+
).fit()
|
10752
|
+
|
10753
|
+
if verbose:
|
10754
|
+
print("- LoOP scoring complete.\n")
|
10755
|
+
|
10756
|
+
return model.local_outlier_probabilities[:]
|
10757
|
+
|
10758
|
+
|
10654
10759
|
def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
|
10655
10760
|
"""
|
10656
10761
|
Process CBF time series to remove high-leverage points.
|
@@ -10718,7 +10823,7 @@ def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
|
|
10718
10823
|
cbfts_recon_ants = ants.copy_image_info(cbfts, cbfts_recon_ants)
|
10719
10824
|
return cbfts_recon_ants, indx
|
10720
10825
|
|
10721
|
-
def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=
|
10826
|
+
def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=10000, verbose=True):
|
10722
10827
|
"""
|
10723
10828
|
Censor high leverage volumes from a time series using Local Outlier Probabilities (LoOP).
|
10724
10829
|
|
@@ -10726,6 +10831,7 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=False):
|
|
10726
10831
|
x (ANTsImage): A 4D time series image.
|
10727
10832
|
threshold (float): Threshold for determining high leverage volumes based on LoOP scores.
|
10728
10833
|
mask (antsImage): restricts to a ROI
|
10834
|
+
n_features_sample (int): feature sample size default 5000
|
10729
10835
|
verbose (bool)
|
10730
10836
|
|
10731
10837
|
Returns:
|
@@ -10739,10 +10845,12 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, verbose=False):
|
|
10739
10845
|
flattened_series = flatten_time_series(x.numpy())
|
10740
10846
|
else:
|
10741
10847
|
flattened_series = ants.timeseries_to_matrix( x, mask )
|
10742
|
-
|
10848
|
+
if verbose:
|
10849
|
+
print("loop_timeseries_censoring: flattened")
|
10850
|
+
loop_scores = calculate_loop_scores(flattened_series, n_features_sample=n_features_sample, verbose=verbose )
|
10743
10851
|
high_leverage_volumes = np.where(loop_scores > threshold)[0]
|
10744
10852
|
if verbose:
|
10745
|
-
print("
|
10853
|
+
print("loop_timeseries_censoring: High Leverage Volumes:", high_leverage_volumes)
|
10746
10854
|
new_asl = remove_volumes_from_timeseries(x, high_leverage_volumes)
|
10747
10855
|
return new_asl, high_leverage_volumes
|
10748
10856
|
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: antspymm
|
3
|
-
Version: 1.4.
|
3
|
+
Version: 1.4.8
|
4
4
|
Summary: multi-channel/time-series medical image processing with antspyx
|
5
5
|
Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
|
6
6
|
License: Apache 2.0
|
@@ -19,9 +19,12 @@ Requires-Dist: nibabel
|
|
19
19
|
Requires-Dist: scipy
|
20
20
|
Requires-Dist: siq
|
21
21
|
Requires-Dist: scikit-learn
|
22
|
+
Dynamic: license-file
|
22
23
|
|
23
24
|
# ANTsPyMM
|
24
25
|
|
26
|
+
[](https://doi.org/10.5281/zenodo.15131653)
|
27
|
+
|
25
28
|
[](https://dl.circleci.com/status-badge/redirect/gh/ANTsX/ANTsPyMM/tree/main)
|
26
29
|
|
27
30
|

|
@@ -0,0 +1,7 @@
|
|
1
|
+
antspymm/__init__.py,sha256=ZdNJyHwS6rzq59v0OK3tE3qSTD0za2iULzSLGkM_0uc,4527
|
2
|
+
antspymm/mm.py,sha256=tJXaT-81XEjNjCOhmGKCjSRB7HEM2z_mlAWxKwJlc3M,517529
|
3
|
+
antspymm-1.4.8.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
4
|
+
antspymm-1.4.8.dist-info/METADATA,sha256=ETYrQeMwhaF6LkNzhXZrN2htysVl3rvUmUX2yJymHsw,25781
|
5
|
+
antspymm-1.4.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
6
|
+
antspymm-1.4.8.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
|
7
|
+
antspymm-1.4.8.dist-info/RECORD,,
|
antspymm-1.4.5.dist-info/RECORD
DELETED
@@ -1,7 +0,0 @@
|
|
1
|
-
antspymm/__init__.py,sha256=ZdNJyHwS6rzq59v0OK3tE3qSTD0za2iULzSLGkM_0uc,4527
|
2
|
-
antspymm/mm.py,sha256=CKNLB3F00E5BOvNHTLoHXGN7l3fXzr9Tt3Oc-BiSvfM,513311
|
3
|
-
antspymm-1.4.5.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
4
|
-
antspymm-1.4.5.dist-info/METADATA,sha256=68JOQDfbTRq3y5NrC3R_BsELpzGbupAYA55L72wektY,25668
|
5
|
-
antspymm-1.4.5.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
|
6
|
-
antspymm-1.4.5.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
|
7
|
-
antspymm-1.4.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|