antspymm 1.4.8__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
antspymm/__init__.py CHANGED
@@ -5,6 +5,7 @@ except:
5
5
  pass
6
6
 
7
7
  from .mm import get_data
8
+ from .mm import ants_to_nibabel_affine
8
9
  from .mm import get_dti
9
10
  from .mm import triangular_to_tensor
10
11
  from .mm import dti_numpy_to_image
antspymm/mm.py CHANGED
@@ -93,6 +93,7 @@ __all__ = ['version',
93
93
  'loop_timeseries_censoring',
94
94
  'clean_tmp_directory',
95
95
  'validate_nrg_file_format',
96
+ 'ants_to_nibabel_affine',
96
97
  'dict_to_dataframe']
97
98
 
98
99
  from pathlib import Path
@@ -288,12 +289,44 @@ def get_antsimage_keys(dictionary):
288
289
  """
289
290
  return [key for key, value in dictionary.items() if isinstance(value, ants.core.ants_image.ANTsImage)]
290
291
 
291
- def to_nibabel(img: "ants.core.ants_image.ANTsImage"):
292
- with tempfile.TemporaryDirectory() as tmp:
293
- temp_file_name = os.path.join(tmp, str(uuid.uuid1()) + '.nii.gz')
294
- ants.image_write(img, temp_file_name)
295
- nibabel_image = nib.load(temp_file_name)
296
- return(nibabel_image)
292
+ def to_nibabel(img: "ants.core.ants_image.ANTsImage") -> nib.Nifti1Image:
293
+ """
294
+ Convert an ANTsPy image to a Nibabel Nifti1Image in-memory, using correct spatial affine.
295
+
296
+ Parameters:
297
+ img (ants.ANTsImage): An image from ANTsPy.
298
+
299
+ Returns:
300
+ nib.Nifti1Image: The corresponding Nibabel image with spatial orientation in RAS.
301
+ """
302
+ array_data = img.numpy() # get voxel data as NumPy array
303
+ affine = ants_to_nibabel_affine(img)
304
+ return nib.Nifti1Image(array_data, affine)
305
+
306
+ def ants_to_nibabel_affine(ants_img):
307
+ """
308
+ Convert an ANTsPy image (in LPS space) to a Nibabel-compatible affine (in RAS space).
309
+ Handles 2D, 3D, 4D input (only spatial dimensions are encoded in the affine).
310
+
311
+ Returns:
312
+ 4x4 np.ndarray affine matrix in RAS space.
313
+ """
314
+ spatial_dim = ants_img.dimension
315
+ spacing = np.array(ants_img.spacing)
316
+ origin = np.array(ants_img.origin)
317
+ direction = np.array(ants_img.direction).reshape((spatial_dim, spatial_dim))
318
+ # Compute rotation-scale matrix
319
+ affine_linear = direction @ np.diag(spacing)
320
+ # Build full 4x4 affine with identity in homogeneous bottom row
321
+ affine = np.eye(4)
322
+ affine[:spatial_dim, :spatial_dim] = affine_linear
323
+ affine[:spatial_dim, 3] = origin
324
+ affine[3, 3]=1
325
+ # Convert LPS -> RAS by flipping x and y
326
+ lps_to_ras = np.diag([-1, -1, 1, 1])
327
+ affine = lps_to_ras @ affine
328
+ return affine
329
+
297
330
 
298
331
  def dict_to_dataframe(data_dict, convert_lists=True, convert_arrays=True, convert_images=True, verbose=False):
299
332
  """
@@ -2195,7 +2228,7 @@ def transform_and_reorient_dti( fixed, moving_dti, composite_transform, py_based
2195
2228
  if verbose:
2196
2229
  print("reorient tensors locally: compose and get reo image")
2197
2230
  locrot = ants.deformation_gradient( ants.image_read(composite_transform),
2198
- to_rotation = True, py_based=py_based )
2231
+ to_rotation = True, py_based=py_based ).numpy()
2199
2232
  rebaser = np.dot( np.transpose( fixed.direction ), moving_dti.direction )
2200
2233
  if verbose:
2201
2234
  print("convert UT to full tensor")
@@ -3642,7 +3675,7 @@ def dipy_dti_recon(
3642
3675
  return tenfit, FA, MD1, RGB
3643
3676
 
3644
3677
  bvecs = repair_bvecs( bvecs )
3645
- gtab = gradient_table(bvals, bvecs, atol=2.0 )
3678
+ gtab = gradient_table(bvals, bvecs=bvecs, atol=2.0 )
3646
3679
  if free_water:
3647
3680
  free_water=len( np.unique( bvals ) ) >= 3
3648
3681
  tenfit, FA, MD1, RGB = justthefit( gtab, fit_method, image, maskdil, free_water=free_water )
@@ -4204,16 +4237,15 @@ def dwi_deterministic_tracking(
4204
4237
  if verbose:
4205
4238
  print("begin tracking",flush=True)
4206
4239
 
4207
- dwi_img = to_nibabel(dwi)
4208
- affine = dwi_img.affine
4240
+ affine = ants_to_nibabel_affine(dwi)
4209
4241
 
4210
4242
  if isinstance( bvals, str ) or isinstance( bvecs, str ):
4211
4243
  bvals, bvecs = read_bvals_bvecs(bvals, bvecs)
4212
4244
  bvecs = repair_bvecs( bvecs )
4213
- gtab = gradient_table(bvals, bvecs, atol=2.0 )
4245
+ gtab = gradient_table(bvals, bvecs=bvecs, atol=2.0 )
4214
4246
  if mask is None:
4215
4247
  mask = ants.threshold_image( fa, fa_thresh, 2.0 ).iMath("GetLargestComponent")
4216
- dwi_data = dwi_img.get_fdata()
4248
+ dwi_data = dwi.numpy() # dwi_img.get_fdata()
4217
4249
  dwi_mask = mask.numpy() == 1
4218
4250
  dti_model = dti.TensorModel(gtab,fit_method=fit_method)
4219
4251
  if verbose:
@@ -4223,7 +4255,7 @@ def dwi_deterministic_tracking(
4223
4255
  from dipy.tracking.stopping_criterion import ThresholdStoppingCriterion
4224
4256
  stopping_criterion = ThresholdStoppingCriterion(fa.numpy(), fa_thresh)
4225
4257
  from dipy.data import get_sphere
4226
- sphere = get_sphere('symmetric362')
4258
+ sphere = get_sphere(name='symmetric362')
4227
4259
  from dipy.direction import peaks_from_model
4228
4260
  if peak_indices is None:
4229
4261
  # problems with multi-threading ...
@@ -4282,7 +4314,7 @@ def dwi_deterministic_tracking(
4282
4314
  streamlines = Streamlines(streamlines_generator)
4283
4315
  from dipy.io.stateful_tractogram import Space, StatefulTractogram
4284
4316
  from dipy.io.streamline import save_tractogram
4285
- sft = StatefulTractogram(streamlines, dwi_img, Space.RASMM)
4317
+ sft = None # StatefulTractogram(streamlines, dwi_img, Space.RASMM)
4286
4318
  if verbose:
4287
4319
  print("streamlines done", flush=True)
4288
4320
  return {
@@ -4384,15 +4416,14 @@ def dwi_closest_peak_tracking(
4384
4416
  if verbose:
4385
4417
  print("begin tracking",flush=True)
4386
4418
 
4387
- dwi_img = to_nibabel(dwi)
4388
- affine = dwi_img.affine
4419
+ affine = ants_to_nibabel_affine(dwi)
4389
4420
  if isinstance( bvals, str ) or isinstance( bvecs, str ):
4390
4421
  bvals, bvecs = read_bvals_bvecs(bvals, bvecs)
4391
4422
  bvecs = repair_bvecs( bvecs )
4392
- gtab = gradient_table(bvals, bvecs, atol=2.0 )
4423
+ gtab = gradient_table(bvals, bvecs=bvecs, atol=2.0 )
4393
4424
  if mask is None:
4394
4425
  mask = ants.threshold_image( fa, fa_thresh, 2.0 ).iMath("GetLargestComponent")
4395
- dwi_data = dwi_img.get_fdata()
4426
+ dwi_data = dwi.numpy()
4396
4427
  dwi_mask = mask.numpy() == 1
4397
4428
 
4398
4429
 
@@ -4429,7 +4460,7 @@ def dwi_closest_peak_tracking(
4429
4460
  streamlines = Streamlines(streamlines_generator)
4430
4461
  from dipy.io.stateful_tractogram import Space, StatefulTractogram
4431
4462
  from dipy.io.streamline import save_tractogram
4432
- sft = StatefulTractogram(streamlines, dwi_img, Space.RASMM)
4463
+ sft = None # StatefulTractogram(streamlines, dwi_img, Space.RASMM)
4433
4464
  if verbose:
4434
4465
  print("streamlines done", flush=True)
4435
4466
  return {
@@ -4465,8 +4496,7 @@ def dwi_streamline_pairwise_connectivity( streamlines, label_image, labels_to_co
4465
4496
  from dipy.tracking.streamline import Streamlines
4466
4497
  keep_streamlines = Streamlines()
4467
4498
 
4468
-
4469
- affine = to_nibabel(label_image).affine
4499
+ affine = ants_to_nibabel_affine(label_image) # to_nibabel(label_image).affine
4470
4500
 
4471
4501
  lin_T, offset = utils._mapping_to_voxel(affine)
4472
4502
  label_image_np = label_image.numpy()
@@ -4526,7 +4556,7 @@ def dwi_streamline_pairwise_connectivity_old(
4526
4556
  volUnit = np.prod( ants.get_spacing( label_image ) )
4527
4557
  labels = label_image.numpy()
4528
4558
 
4529
- affine = to_nibabel(label_image).affine
4559
+ affine = ants_to_nibabel_affine(label_image) # to_nibabel(label_image).affine
4530
4560
 
4531
4561
  import numpy as np
4532
4562
  from dipy.io.image import load_nifti_data, load_nifti, save_nifti
@@ -4618,7 +4648,7 @@ def dwi_streamline_connectivity(
4618
4648
  volUnit = np.prod( ants.get_spacing( label_image ) )
4619
4649
  labels = label_image.numpy()
4620
4650
 
4621
- affine = to_nibabel(label_image).affine
4651
+ affine = ants_to_nibabel_affine(label_image) # to_nibabel(label_image).affine
4622
4652
 
4623
4653
  import numpy as np
4624
4654
  from dipy.io.image import load_nifti_data, load_nifti, save_nifti
@@ -4708,7 +4738,7 @@ def dwi_streamline_connectivity_old(
4708
4738
  volUnit = np.prod( ants.get_spacing( label_image ) )
4709
4739
  labels = label_image.numpy()
4710
4740
 
4711
- affine = to_nibabel(label_image).affine
4741
+ affine = ants_to_nibabel_affine(label_image) # to_nibabel(label_image).affine
4712
4742
 
4713
4743
  if verbose:
4714
4744
  print("path length begin ... volUnit = " + str( volUnit ) )
@@ -7480,7 +7510,8 @@ def write_mm( output_prefix, mm, mm_norm=None, t1wide=None, separator='_', verbo
7480
7510
  if 'tractography' in mm:
7481
7511
  if mm['tractography'] is not None:
7482
7512
  ofn = output_prefix + separator + 'tractogram.trk'
7483
- save_tractogram( mm['tractography']['tractogram'], ofn )
7513
+ if mm['tractography']['tractogram'] is not None:
7514
+ save_tractogram( mm['tractography']['tractogram'], ofn )
7484
7515
  cnxderk = None
7485
7516
  if 'tractography_connectivity' in mm:
7486
7517
  if mm['tractography_connectivity'] is not None:
@@ -10666,94 +10697,104 @@ def calculate_loop_scores_full(flattened_series, n_neighbors=20, verbose=True ):
10666
10697
  return m.local_outlier_probabilities[:]
10667
10698
 
10668
10699
 
10669
- def calculate_loop_scores(flattened_series, n_neighbors=20,
10670
- n_features_sample=10000, seed=42, verbose=True):
10700
+ def calculate_loop_scores(
10701
+ flattened_series,
10702
+ n_neighbors=20,
10703
+ n_features_sample=0.02,
10704
+ n_feature_repeats=5,
10705
+ seed=42,
10706
+ use_approx_knn=True,
10707
+ verbose=True,
10708
+ ):
10671
10709
  """
10672
- Approximate LoOP scores using a random subset of features to reduce memory usage.
10710
+ Memory-efficient and robust LoOP score estimation with optional approximate KNN
10711
+ and averaging over multiple random feature subsets.
10673
10712
 
10674
10713
  Parameters:
10675
- flattened_series (np.ndarray): 2D array of shape (n_samples, n_features)
10676
- n_neighbors (int): Number of neighbors for LOF/LoOP computation
10677
- n_features_sample (int): Number of features to sample for approximation
10678
- seed (int): Random seed for reproducible feature sampling
10679
- verbose (bool): If True, print detailed progress and dimensions
10714
+ flattened_series (np.ndarray): 2D array (n_samples x n_features)
10715
+ n_neighbors (int): Number of neighbors for LoOP
10716
+ n_features_sample (int or float): Number or fraction of features to sample
10717
+ n_feature_repeats (int): How many independent feature subsets to sample and average over
10718
+ seed (int): Random seed
10719
+ use_approx_knn (bool): Whether to use fast approximate KNN (via pynndescent)
10720
+ verbose (bool): Verbose output
10680
10721
 
10681
10722
  Returns:
10682
- np.ndarray: 1D array of local outlier probabilities (length n_samples)
10723
+ np.ndarray: Averaged local outlier probabilities (length n_samples)
10683
10724
  """
10684
10725
  import numpy as np
10685
10726
  from sklearn.preprocessing import StandardScaler
10686
- from sklearn.neighbors import NearestNeighbors
10687
10727
  from PyNomaly import loop
10688
10728
 
10689
- # -------------------------------
10690
- # Step 1: Input stats and cleanup
10691
- # -------------------------------
10729
+ # Optional approximate nearest neighbors
10730
+ try:
10731
+ from pynndescent import NNDescent
10732
+ has_nn_descent = True
10733
+ except ImportError:
10734
+ has_nn_descent = False
10735
+
10736
+ rng = np.random.default_rng(seed)
10692
10737
  X = np.nan_to_num(flattened_series, nan=0).astype(np.float32)
10693
10738
  n_samples, n_features = X.shape
10694
10739
 
10695
- if verbose:
10696
- print("\n[LoOP Approximation - Verbose Mode]")
10697
- print(f"- Original input shape: {X.shape} (samples x features)")
10698
- print(f"- Requested sampled features: {n_features_sample}")
10699
-
10700
- if n_features_sample > n_features:
10701
- n_features_sample = n_features
10702
- if verbose:
10703
- print(f"- Requested n_features_sample exceeds available features. Using all {n_features} features.")
10740
+ # Handle feature sampling
10741
+ if isinstance(n_features_sample, float):
10742
+ if 0 < n_features_sample <= 1.0:
10743
+ n_features_sample = max(1, int(n_features_sample * n_features))
10744
+ else:
10745
+ raise ValueError("If float, n_features_sample must be in (0, 1].")
10704
10746
 
10705
- # -------------------------------
10706
- # Step 2: Feature sampling
10707
- # -------------------------------
10708
- rng = np.random.default_rng(seed)
10709
- sampled_indices = rng.choice(n_features, n_features_sample, replace=False)
10710
- X_sampled = X[:, sampled_indices]
10747
+ n_features_sample = min(n_features, n_features_sample)
10711
10748
 
10712
- if verbose:
10713
- print(f"- Sampled feature shape: {X_sampled.shape} (samples x sampled_features)")
10714
- print(f"- Random seed for reproducibility: {seed}")
10715
-
10716
- # -------------------------------
10717
- # Step 3: Standardization
10718
- # -------------------------------
10719
- scaler = StandardScaler(copy=False)
10720
- X_sampled = scaler.fit_transform(X_sampled)
10721
- X_sampled = np.nan_to_num(X_sampled, nan=0)
10722
-
10723
- # -------------------------------
10724
- # Step 4: KNN setup for LoOP
10725
- # -------------------------------
10726
10749
  if n_neighbors >= n_samples:
10727
10750
  n_neighbors = max(1, n_samples // 2)
10728
- if verbose:
10729
- print(f"- Adjusted n_neighbors to {n_neighbors} (was too large for available samples).")
10730
10751
 
10731
10752
  if verbose:
10732
- print(f"- Performing KNN using Minkowski distance (default p=2, Euclidean)")
10733
- print(f"- Each point will use its {n_neighbors} nearest neighbors for local density estimation")
10753
+ print(f"[LoOP] Input shape: {X.shape}")
10754
+ print(f"[LoOP] Sampling {n_features_sample} features per repeat, {n_feature_repeats} repeats")
10755
+ print(f"[LoOP] Using {n_neighbors} neighbors")
10734
10756
 
10735
- neigh = NearestNeighbors(n_neighbors=n_neighbors)
10736
- neigh.fit(X_sampled)
10737
- dists, indices = neigh.kneighbors(X_sampled, return_distance=True)
10757
+ loop_scores = []
10738
10758
 
10739
- # -------------------------------
10740
- # Step 5: LoOP probability calculation
10741
- # -------------------------------
10742
- if verbose:
10743
- print(f"- Distance matrix shape: {dists.shape} (samples x n_neighbors)")
10744
- print(f"- Neighbor index matrix shape: {indices.shape}")
10745
- print("- Estimating Local Outlier Probabilities (LoOP)...")
10759
+ for rep in range(n_feature_repeats):
10760
+ feature_idx = rng.choice(n_features, n_features_sample, replace=False)
10761
+ X_sub = X[:, feature_idx]
10746
10762
 
10747
- model = loop.LocalOutlierProbability(
10748
- distance_matrix=dists,
10749
- neighbor_matrix=indices,
10750
- n_neighbors=n_neighbors
10751
- ).fit()
10763
+ scaler = StandardScaler(copy=False)
10764
+ X_sub = scaler.fit_transform(X_sub)
10765
+ X_sub = np.nan_to_num(X_sub, nan=0)
10766
+
10767
+ # Approximate or exact KNN
10768
+ if use_approx_knn and has_nn_descent and n_samples > 1000:
10769
+ if verbose:
10770
+ print(f" [Rep {rep+1}] Using NNDescent (approximate KNN)")
10771
+ ann = NNDescent(X_sub, n_neighbors=n_neighbors, random_state=seed + rep)
10772
+ indices, dists = ann.neighbor_graph
10773
+ else:
10774
+ from sklearn.neighbors import NearestNeighbors
10775
+ if verbose:
10776
+ print(f" [Rep {rep+1}] Using NearestNeighbors (exact KNN)")
10777
+ nn = NearestNeighbors(n_neighbors=n_neighbors)
10778
+ nn.fit(X_sub)
10779
+ dists, indices = nn.kneighbors(X_sub)
10780
+
10781
+ # LoOP score for this repeat
10782
+ model = loop.LocalOutlierProbability(
10783
+ distance_matrix=dists,
10784
+ neighbor_matrix=indices,
10785
+ n_neighbors=n_neighbors
10786
+ ).fit()
10787
+ loop_scores.append(model.local_outlier_probabilities[:])
10788
+
10789
+ # Average over repeats
10790
+ loop_scores = np.stack(loop_scores)
10791
+ loop_scores_mean = loop_scores.mean(axis=0)
10752
10792
 
10753
10793
  if verbose:
10754
- print("- LoOP scoring complete.\n")
10794
+ print(f"[LoOP] Averaged over {n_feature_repeats} feature subsets. Final shape: {loop_scores_mean.shape}")
10795
+
10796
+ return loop_scores_mean
10755
10797
 
10756
- return model.local_outlier_probabilities[:]
10757
10798
 
10758
10799
 
10759
10800
  def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
@@ -10823,7 +10864,7 @@ def score_fmri_censoring(cbfts, csf_seg, gm_seg, wm_seg ):
10823
10864
  cbfts_recon_ants = ants.copy_image_info(cbfts, cbfts_recon_ants)
10824
10865
  return cbfts_recon_ants, indx
10825
10866
 
10826
- def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=10000, verbose=True):
10867
+ def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=0.02, seed=42, verbose=True):
10827
10868
  """
10828
10869
  Censor high leverage volumes from a time series using Local Outlier Probabilities (LoOP).
10829
10870
 
@@ -10831,7 +10872,8 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=100
10831
10872
  x (ANTsImage): A 4D time series image.
10832
10873
  threshold (float): Threshold for determining high leverage volumes based on LoOP scores.
10833
10874
  mask (antsImage): restricts to a ROI
10834
- n_features_sample (int): feature sample size default 5000
10875
+ n_features_sample (int/float): feature sample size default 0.01; if less than one then this is interpreted as a percentage of the total features otherwise it sets the number of features to be used
10876
+ seed (int): random seed
10835
10877
  verbose (bool)
10836
10878
 
10837
10879
  Returns:
@@ -10847,7 +10889,7 @@ def loop_timeseries_censoring(x, threshold=0.5, mask=None, n_features_sample=100
10847
10889
  flattened_series = ants.timeseries_to_matrix( x, mask )
10848
10890
  if verbose:
10849
10891
  print("loop_timeseries_censoring: flattened")
10850
- loop_scores = calculate_loop_scores(flattened_series, n_features_sample=n_features_sample, verbose=verbose )
10892
+ loop_scores = calculate_loop_scores(flattened_series, n_features_sample=n_features_sample, seed=seed, verbose=verbose )
10851
10893
  high_leverage_volumes = np.where(loop_scores > threshold)[0]
10852
10894
  if verbose:
10853
10895
  print("loop_timeseries_censoring: High Leverage Volumes:", high_leverage_volumes)
@@ -1,10 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: antspymm
3
- Version: 1.4.8
3
+ Version: 1.5.0
4
4
  Summary: multi-channel/time-series medical image processing with antspyx
5
5
  Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
6
- License: Apache 2.0
7
- Requires-Python: >=3.8
6
+ License: Apache-2.0
7
+ Classifier: License :: OSI Approved :: Apache Software License
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Operating System :: OS Independent
10
+ Requires-Python: >=3.9
8
11
  Description-Content-Type: text/markdown
9
12
  License-File: LICENSE
10
13
  Requires-Dist: h5py>=2.10.0
@@ -0,0 +1,7 @@
1
+ antspymm/__init__.py,sha256=3t4jqSbQVRfecm1ekc02ytWlZ3yAGcPyB_kzQAcg4Bc,4566
2
+ antspymm/mm.py,sha256=UdmKcS4wYazBdC8DUchdnzXZzM4tetFx3RfDS9zH4ys,519104
3
+ antspymm-1.5.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
4
+ antspymm-1.5.0.dist-info/METADATA,sha256=fXgNR7g8l0I1AFjAPIZwTRpN_RBrW5FiaBtdjg5z0Mc,25939
5
+ antspymm-1.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
+ antspymm-1.5.0.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
7
+ antspymm-1.5.0.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- antspymm/__init__.py,sha256=ZdNJyHwS6rzq59v0OK3tE3qSTD0za2iULzSLGkM_0uc,4527
2
- antspymm/mm.py,sha256=tJXaT-81XEjNjCOhmGKCjSRB7HEM2z_mlAWxKwJlc3M,517529
3
- antspymm-1.4.8.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
4
- antspymm-1.4.8.dist-info/METADATA,sha256=ETYrQeMwhaF6LkNzhXZrN2htysVl3rvUmUX2yJymHsw,25781
5
- antspymm-1.4.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
- antspymm-1.4.8.dist-info/top_level.txt,sha256=iyD1sRhCKzfwKRJLq5ZUeV9xsv1cGQl8Ejp6QwXM1Zg,9
7
- antspymm-1.4.8.dist-info/RECORD,,