antspymm 1.3.4__tar.gz → 1.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {antspymm-1.3.4/antspymm.egg-info → antspymm-1.3.6}/PKG-INFO +12 -9
  2. {antspymm-1.3.4 → antspymm-1.3.6}/README.md +6 -3
  3. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm/mm.py +54 -25
  4. {antspymm-1.3.4 → antspymm-1.3.6/antspymm.egg-info}/PKG-INFO +12 -9
  5. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm.egg-info/SOURCES.txt +5 -2
  6. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm.egg-info/requires.txt +2 -1
  7. antspymm-1.3.6/docs/release_notes.py +50 -0
  8. antspymm-1.3.6/docs/roi_visualization_limbic.py +28 -0
  9. antspymm-1.3.6/pyproject.toml +32 -0
  10. antspymm-1.3.6/tests/test_00_setup.py +7 -0
  11. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_deformation_gradient_reo.py +2 -0
  12. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_dti_recon.py +3 -1
  13. antspymm-1.3.6/tests/test_perfusion_ptbp2.py +36 -0
  14. antspymm-1.3.4/antspymm.egg-info/not-zip-safe +0 -1
  15. antspymm-1.3.4/setup.py +0 -30
  16. {antspymm-1.3.4 → antspymm-1.3.6}/LICENSE +0 -0
  17. {antspymm-1.3.4 → antspymm-1.3.6}/MANIFEST.in +0 -0
  18. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm/__init__.py +0 -0
  19. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm.egg-info/dependency_links.txt +0 -0
  20. {antspymm-1.3.4 → antspymm-1.3.6}/antspymm.egg-info/top_level.txt +0 -0
  21. {antspymm-1.3.4 → antspymm-1.3.6}/docs/adni_rsfmri_2_nrg_conversion.py +0 -0
  22. {antspymm-1.3.4 → antspymm-1.3.6}/docs/antspymm_data_dictionary.csv +0 -0
  23. {antspymm-1.3.4 → antspymm-1.3.6}/docs/bids_cohort_example.py +0 -0
  24. {antspymm-1.3.4 → antspymm-1.3.6}/docs/blind_qc.Rmd +0 -0
  25. {antspymm-1.3.4 → antspymm-1.3.6}/docs/blind_qc.html +0 -0
  26. {antspymm-1.3.4 → antspymm-1.3.6}/docs/convert_adni_dti_to_nrg.R +0 -0
  27. {antspymm-1.3.4 → antspymm-1.3.6}/docs/deepnbm.jpg +0 -0
  28. {antspymm-1.3.4 → antspymm-1.3.6}/docs/example_antspymm_output.csv +0 -0
  29. {antspymm-1.3.4 → antspymm-1.3.6}/docs/example_run_from_directory.py +0 -0
  30. {antspymm-1.3.4 → antspymm-1.3.6}/docs/make_dict_table.Rmd +0 -0
  31. {antspymm-1.3.4 → antspymm-1.3.6}/docs/make_dict_table.html +0 -0
  32. {antspymm-1.3.4 → antspymm-1.3.6}/docs/nrg_cohort_example.py +0 -0
  33. {antspymm-1.3.4 → antspymm-1.3.6}/docs/ptbp_nrg.py +0 -0
  34. {antspymm-1.3.4 → antspymm-1.3.6}/docs/roi_visualization.py +0 -0
  35. {antspymm-1.3.4 → antspymm-1.3.6}/docs/roi_visualization_ppmi.py +0 -0
  36. {antspymm-1.3.4 → antspymm-1.3.6}/docs/step1_blind_qc.py +0 -0
  37. {antspymm-1.3.4 → antspymm-1.3.6}/docs/step2_outlierness.py +0 -0
  38. {antspymm-1.3.4 → antspymm-1.3.6}/docs/step3_mm_nrg_csv.py +0 -0
  39. {antspymm-1.3.4 → antspymm-1.3.6}/docs/step4_aggregate.py +0 -0
  40. {antspymm-1.3.4 → antspymm-1.3.6}/docs/ukbb_to_nrg_processing.py +0 -0
  41. {antspymm-1.3.4 → antspymm-1.3.6}/docs/ukbb_to_nrg_processing2.py +0 -0
  42. {antspymm-1.3.4 → antspymm-1.3.6}/setup.cfg +0 -0
  43. {antspymm-1.3.4 → antspymm-1.3.6}/tests/blind_qc.py +0 -0
  44. {antspymm-1.3.4 → antspymm-1.3.6}/tests/mm.py +0 -0
  45. {antspymm-1.3.4 → antspymm-1.3.6}/tests/mm_nrg.py +0 -0
  46. {antspymm-1.3.4 → antspymm-1.3.6}/tests/outlierness.py +0 -0
  47. {antspymm-1.3.4 → antspymm-1.3.6}/tests/parallel_study_aggregation_example.py +0 -0
  48. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_bids_2_nrg.py +0 -0
  49. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_dti_reg.py +0 -0
  50. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_dwi_rebasing.py +0 -0
  51. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_dwi_run.py +0 -0
  52. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_dwi_run_ptbp_scrub.py +0 -0
  53. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_flair_run.py +0 -0
  54. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_joint_dti_recon.py +0 -0
  55. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_mm_csv.py +0 -0
  56. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_nrg_validation.py +0 -0
  57. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_perfusion_ptbp.py +0 -0
  58. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_perfusion_run.py +0 -0
  59. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_rsfmri_run.py +0 -0
  60. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_rsfmri_run_minimal.py +0 -0
  61. {antspymm-1.3.4 → antspymm-1.3.6}/tests/test_ukbb_rsfmri.py +0 -0
  62. {antspymm-1.3.4 → antspymm-1.3.6}/tests/testsr.py +0 -0
  63. {antspymm-1.3.4 → antspymm-1.3.6}/tests/visualize_tractogram.py +0 -0
@@ -1,18 +1,18 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: antspymm
3
- Version: 1.3.4
3
+ Version: 1.3.6
4
4
  Summary: multi-channel/time-series medical image processing with antspyx
5
- Home-page: https://github.com/stnava/ANTsPyMM
6
- Author: Avants, Gosselin, Tustison, Reardon
7
- Author-email: stnava@gmail.com
5
+ Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
8
6
  License: Apache 2.0
9
- Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
7
+ Requires-Python: >=3.8
8
+ Description-Content-Type: text/markdown
10
9
  License-File: LICENSE
11
10
  Requires-Dist: h5py>=2.10.0
12
11
  Requires-Dist: numpy>=1.19.4
13
12
  Requires-Dist: pandas>=1.0.1
14
13
  Requires-Dist: antspyx
15
- Requires-Dist: antspyt1w>=0.2.3
14
+ Requires-Dist: antspynet>=0.2.5
15
+ Requires-Dist: antspyt1w>=0.9.3
16
16
  Requires-Dist: pathlib
17
17
  Requires-Dist: dipy
18
18
  Requires-Dist: nibabel
@@ -35,7 +35,7 @@ production environments.
35
35
  install the `dev` version by calling (within the source directory):
36
36
 
37
37
  ```
38
- python setup.py install
38
+ python3 -m build .
39
39
  ```
40
40
 
41
41
  or install the latest release via
@@ -450,9 +450,12 @@ ssl._create_default_https_context = ssl._create_unverified_context
450
450
 
451
451
  ## to publish a release
452
452
 
453
+ before doing this - make sure you have a recent run of `pip-compile pyproject.toml`
454
+
453
455
  ```
454
456
  rm -r -f build/ antspymm.egg-info/ dist/
455
- python3 setup.py sdist bdist_wheel
456
- twine upload --repository antspymm dist/*
457
+ python3 -m build .
458
+ python3 -m pip install --upgrade twine
459
+ python3 -m twine upload --repository antspymm dist/*
457
460
  ```
458
461
 
@@ -13,7 +13,7 @@ production environments.
13
13
  install the `dev` version by calling (within the source directory):
14
14
 
15
15
  ```
16
- python setup.py install
16
+ python3 -m build .
17
17
  ```
18
18
 
19
19
  or install the latest release via
@@ -428,9 +428,12 @@ ssl._create_default_https_context = ssl._create_unverified_context
428
428
 
429
429
  ## to publish a release
430
430
 
431
+ before doing this - make sure you have a recent run of `pip-compile pyproject.toml`
432
+
431
433
  ```
432
434
  rm -r -f build/ antspymm.egg-info/ dist/
433
- python3 setup.py sdist bdist_wheel
434
- twine upload --repository antspymm dist/*
435
+ python3 -m build .
436
+ python3 -m pip install --upgrade twine
437
+ python3 -m twine upload --repository antspymm dist/*
435
438
  ```
436
439
 
@@ -3451,6 +3451,9 @@ def dipy_dti_recon(
3451
3451
  bvals = bvalsfn.copy()
3452
3452
  bvecs = bvecsfn.copy()
3453
3453
 
3454
+ if bvals.max() < 1.0:
3455
+ raise ValueError("DTI recon error: maximum bvalues are too small.")
3456
+
3454
3457
  b0_idx = segment_timeseries_by_bvalue( bvals )['highermeans']
3455
3458
 
3456
3459
  b0 = ants.slice_image( image, axis=3, idx=b0_idx[0] )
@@ -7975,16 +7978,20 @@ def mm_csv(
7975
7978
  hier['dataframes'], identifier=None )
7976
7979
  t1wide.to_csv( hierfn + 'mmwide.csv' )
7977
7980
  ################# read the hierarchical data ###############################
7981
+ # over-write the rbp data with a consistent and recent approach ############
7982
+ myx = antspyt1w.inspect_raw_t1( t1, hierfn + 'rbp' , option='both' )
7983
+ myx['brain'].to_csv( hierfn + 'rbp.csv', index=False )
7984
+ myx['brain'].to_csv( hierfn + 'rbpbrain.csv', index=False )
7985
+ del myx
7978
7986
  hier = antspyt1w.read_hierarchical( hierfn )
7979
- if exists( hierfn + 'mmwide.csv' ) :
7980
- t1wide = pd.read_csv( hierfn + 'mmwide.csv' )
7981
- elif not testloop:
7982
- t1wide = antspyt1w.merge_hierarchical_csvs_to_wide_format(
7983
- hier['dataframes'], identifier=None )
7984
- if t1wide['resnetGrade'].iloc[0] < 0.35:
7985
- rgrade = str( t1wide['resnetGrade'].iloc[0] )
7987
+ t1wide = antspyt1w.merge_hierarchical_csvs_to_wide_format(
7988
+ hier['dataframes'], identifier=None )
7989
+ rgrade = str( t1wide['resnetGrade'].iloc[0] )
7990
+ if t1wide['resnetGrade'].iloc[0] < 0.20:
7986
7991
  warnings.warn('T1w quality check indicates failure: ' + rgrade + " will not process." )
7987
7992
  return
7993
+ else:
7994
+ print('T1w quality check indicates success: ' + rgrade + " will process." )
7988
7995
 
7989
7996
  if srmodel_T1 is not False :
7990
7997
  hierfntest = hierfnSR + 'mtl.csv'
@@ -8079,6 +8086,8 @@ def mm_csv(
8079
8086
  print( 'example image name is : ' )
8080
8087
  print( myimgsr )
8081
8088
  if overmodX == 'NM2DMT':
8089
+ dowrite = True
8090
+ visualize = True
8082
8091
  subjectpropath = os.path.dirname( mydoc['outprefix'] )
8083
8092
  if verbose:
8084
8093
  print("subjectpropath is")
@@ -8138,16 +8147,17 @@ def mm_csv(
8138
8147
  print(f"antspymmerror occurred while processing {overmodX}: {e}")
8139
8148
  pass
8140
8149
  if not test_run:
8141
- write_mm( output_prefix=mymm, mm=tabPro, mm_norm=normPro, t1wide=None, separator=mysep )
8142
- nmpro = tabPro['NM']
8143
- mysl = range( nmpro['NM_avg'].shape[2] )
8144
- if visualize:
8145
- mysl = range( nmpro['NM_avg'].shape[2] )
8146
- ants.plot( nmpro['NM_avg'], nmpro['t1_to_NM'], slices=mysl, axis=2, title='nm + t1', filename=mymm+mysep+"NMavg.png" )
8147
- mysl = range( nmpro['NM_avg_cropped'].shape[2] )
8148
- ants.plot( nmpro['NM_avg_cropped'], axis=2, slices=mysl, overlay_alpha=0.3, title='nm crop', filename=mymm+mysep+"NMavgcrop.png" )
8149
- ants.plot( nmpro['NM_avg_cropped'], nmpro['t1_to_NM'], axis=2, slices=mysl, overlay_alpha=0.3, title='nm crop + t1', filename=mymm+mysep+"NMavgcropt1.png" )
8150
- ants.plot( nmpro['NM_avg_cropped'], nmpro['NM_labels'], axis=2, slices=mysl, title='nm crop + labels', filename=mymm+mysep+"NMavgcroplabels.png" )
8150
+ if dowrite:
8151
+ write_mm( output_prefix=mymm, mm=tabPro,
8152
+ mm_norm=normPro, t1wide=None, separator=mysep )
8153
+ if visualize :
8154
+ nmpro = tabPro['NM']
8155
+ mysl = range( nmpro['NM_avg'].shape[2] )
8156
+ ants.plot( nmpro['NM_avg'], nmpro['t1_to_NM'], slices=mysl, axis=2, title='nm + t1', filename=mymm+mysep+"NMavg.png" )
8157
+ mysl = range( nmpro['NM_avg_cropped'].shape[2] )
8158
+ ants.plot( nmpro['NM_avg_cropped'], axis=2, slices=mysl, overlay_alpha=0.3, title='nm crop', filename=mymm+mysep+"NMavgcrop.png" )
8159
+ ants.plot( nmpro['NM_avg_cropped'], nmpro['t1_to_NM'], axis=2, slices=mysl, overlay_alpha=0.3, title='nm crop + t1', filename=mymm+mysep+"NMavgcropt1.png" )
8160
+ ants.plot( nmpro['NM_avg_cropped'], nmpro['NM_labels'], axis=2, slices=mysl, title='nm crop + labels', filename=mymm+mysep+"NMavgcroplabels.png" )
8151
8161
  else :
8152
8162
  if len( myimgsr ) > 0:
8153
8163
  dowrite=False
@@ -9544,6 +9554,7 @@ def blind_image_assessment(
9544
9554
  image_reference = ants.image_clone( image )
9545
9555
  ntimepoints = 1
9546
9556
  bvalueMax=None
9557
+ bvecnorm=None
9547
9558
  if image_reference.dimension == 4:
9548
9559
  ntimepoints = image_reference.shape[3]
9549
9560
  if "DTI" in image_filename:
@@ -9556,6 +9567,8 @@ def blind_image_assessment(
9556
9567
  if exists( bval_name ) and exists( bvec_name ):
9557
9568
  bvals, bvecs = read_bvals_bvecs( bval_name , bvec_name )
9558
9569
  bvalueMax = bvals.max()
9570
+ bvecnorm = np.linalg.norm(bvecs,axis=1).reshape( bvecs.shape[0],1 )
9571
+ bvecnorm = bvecnorm.max()
9559
9572
  else:
9560
9573
  image_b0 = ants.get_average_of_timeseries( image_reference ).iMath("Normalize")
9561
9574
  else:
@@ -9721,11 +9734,11 @@ def blind_image_assessment(
9721
9734
  noizlevel, snrref, cnrref, psnrref, ssimref, mymi, asym_err, myevr, msk_vol,
9722
9735
  spc[0], spc[1], spc[2],org[0], org[1], org[2],
9723
9736
  image.shape[0], image.shape[1], image.shape[2], ntimepoints,
9724
- jjj, modality, mriseries, mrimfg, mrimodel, MagneticFieldStrength, mriSAR, PixelBandwidth, BandwidthPerPixelPhaseEncode, bvalueMax ]],
9737
+ jjj, modality, mriseries, mrimfg, mrimodel, MagneticFieldStrength, mriSAR, PixelBandwidth, BandwidthPerPixelPhaseEncode, bvalueMax, bvecnorm ]],
9725
9738
  columns=[
9726
9739
  'filename',
9727
9740
  'dimensionality',
9728
- 'noise', 'snr', 'cnr', 'psnr', 'ssim', 'mi', 'reflection_err', 'EVR', 'msk_vol', 'spc0','spc1','spc2','org0','org1','org2','dimx','dimy','dimz','dimt','slice','modality', 'mriseries', 'mrimfg', 'mrimodel', 'mriMagneticFieldStrength', 'mriSAR', 'mriPixelBandwidth', 'mriPixelBandwidthPE', 'dti_bvalueMax' ])
9741
+ 'noise', 'snr', 'cnr', 'psnr', 'ssim', 'mi', 'reflection_err', 'EVR', 'msk_vol', 'spc0','spc1','spc2','org0','org1','org2','dimx','dimy','dimz','dimt','slice','modality', 'mriseries', 'mrimfg', 'mrimodel', 'mriMagneticFieldStrength', 'mriSAR', 'mriPixelBandwidth', 'mriPixelBandwidthPE', 'dti_bvalueMax', 'dti_bvecnorm' ])
9729
9742
  outdf = pd.concat( [outdf, df ], axis=0, ignore_index=False )
9730
9743
  if verbose:
9731
9744
  print( outdf )
@@ -10893,6 +10906,7 @@ def aggregate_antspymm_results_sdf(
10893
10906
  splitsep='-',
10894
10907
  idsep='-',
10895
10908
  wild_card_modality_id=False,
10909
+ second_split=False,
10896
10910
  verbose=False ):
10897
10911
  """
10898
10912
  Aggregate ANTsPyMM results from the specified study data frame and store the aggregated results in a new data frame. This assumes data is organized on disk
@@ -10911,6 +10925,7 @@ def aggregate_antspymm_results_sdf(
10911
10925
  - idsep (str): the separator used to partition subjectid date and imageid
10912
10926
  for example, if idsep is - then we have subjectid-date-imageid
10913
10927
  - wild_card_modality_id (bool): keep if False for safer execution
10928
+ - second_split (bool): this is a hack that will split the imageID by . and keep the first part of the split; may be needed when the input filenames contain .
10914
10929
  - verbose : boolean
10915
10930
 
10916
10931
  Note:
@@ -10992,12 +11007,24 @@ def aggregate_antspymm_results_sdf(
10992
11007
  myproj = str(df[project_col].iloc[x])
10993
11008
  mydate = str(df[date_col].iloc[x])
10994
11009
  myid = str(df[image_col].iloc[x])
11010
+ if second_split:
11011
+ myid = myid.split(".")[0]
10995
11012
  path_template = base_path + "/" + myproj + "/" + sid + "/" + mydate + '/' + hiervariable + '/' + str(myid) + "/"
10996
11013
  hierfn = sorted(glob( path_template + "*" + hiervariable + "*wide.csv" ) )
11014
+ if len( hierfn ) == 0:
11015
+ print( hierfn )
11016
+ print( path_template )
11017
+ print( myproj )
11018
+ print( sid )
11019
+ print( mydate )
11020
+ print( myid )
10997
11021
  if len( hierfn ) > 0:
10998
11022
  keep[x]=True
10999
11023
 
11000
- df=df[keep]
11024
+ # df=df[keep]
11025
+ if df.shape[0] == 0:
11026
+ warnings.warn("input data frame shape is filtered down to zero")
11027
+ return df
11001
11028
 
11002
11029
  if not df.index.is_unique:
11003
11030
  warnings.warn("data frame does not have unique indices. we therefore reset the index to allow the function to continue on." )
@@ -11030,7 +11057,8 @@ def aggregate_antspymm_results_sdf(
11030
11057
  myproj = str(df[project_col].iloc[x])
11031
11058
  mydate = str(df[date_col].iloc[x])
11032
11059
  myid = str(df[image_col].iloc[x])
11033
- myt1id = myid
11060
+ if second_split:
11061
+ myid = myid.split(".")[0]
11034
11062
  if verbose:
11035
11063
  print( myfn )
11036
11064
  print( temp )
@@ -11090,7 +11118,6 @@ def aggregate_antspymm_results_sdf(
11090
11118
  nlarge = len(t1wfn)
11091
11119
  t1wfn = find_most_recent_file( t1wfn )
11092
11120
  warnings.warn("there are " + str( nlarge ) + " number of wide fns with search path " + modsearch + " we take the most recent of these " + t1wfn[0] )
11093
- # raise ValueError("there are " + str( len( t1wfn ) ) + " number of wide fns with search path " + modsearch )
11094
11121
  if len( t1wfn ) == 1:
11095
11122
  if verbose:
11096
11123
  print(t1wfn)
@@ -11108,9 +11135,11 @@ def aggregate_antspymm_results_sdf(
11108
11135
  hdf.index = subdf.index.copy()
11109
11136
  subdf = pd.concat( [subdf,hdf], axis=1, ignore_index=False)
11110
11137
  dfout = pd.concat( [dfout,subdf], axis=0, ignore_index=False )
11111
- badnames = get_names_from_data_frame( ['Unnamed'], dfout )
11112
- dfout=dfout.drop(badnames, axis=1)
11113
- return( dfout )
11138
+
11139
+ if dfout.shape[0] > 0:
11140
+ badnames = get_names_from_data_frame( ['Unnamed'], dfout )
11141
+ dfout=dfout.drop(badnames, axis=1)
11142
+ return dfout
11114
11143
 
11115
11144
  def enantiomorphic_filling_without_mask( image, axis=0, intensity='low' ):
11116
11145
  """
@@ -1,18 +1,18 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: antspymm
3
- Version: 1.3.4
3
+ Version: 1.3.6
4
4
  Summary: multi-channel/time-series medical image processing with antspyx
5
- Home-page: https://github.com/stnava/ANTsPyMM
6
- Author: Avants, Gosselin, Tustison, Reardon
7
- Author-email: stnava@gmail.com
5
+ Author-email: "Avants, Gosselin, Tustison, Reardon" <stnava@gmail.com>
8
6
  License: Apache 2.0
9
- Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
7
+ Requires-Python: >=3.8
8
+ Description-Content-Type: text/markdown
10
9
  License-File: LICENSE
11
10
  Requires-Dist: h5py>=2.10.0
12
11
  Requires-Dist: numpy>=1.19.4
13
12
  Requires-Dist: pandas>=1.0.1
14
13
  Requires-Dist: antspyx
15
- Requires-Dist: antspyt1w>=0.2.3
14
+ Requires-Dist: antspynet>=0.2.5
15
+ Requires-Dist: antspyt1w>=0.9.3
16
16
  Requires-Dist: pathlib
17
17
  Requires-Dist: dipy
18
18
  Requires-Dist: nibabel
@@ -35,7 +35,7 @@ production environments.
35
35
  install the `dev` version by calling (within the source directory):
36
36
 
37
37
  ```
38
- python setup.py install
38
+ python3 -m build .
39
39
  ```
40
40
 
41
41
  or install the latest release via
@@ -450,9 +450,12 @@ ssl._create_default_https_context = ssl._create_unverified_context
450
450
 
451
451
  ## to publish a release
452
452
 
453
+ before doing this - make sure you have a recent run of `pip-compile pyproject.toml`
454
+
453
455
  ```
454
456
  rm -r -f build/ antspymm.egg-info/ dist/
455
- python3 setup.py sdist bdist_wheel
456
- twine upload --repository antspymm dist/*
457
+ python3 -m build .
458
+ python3 -m pip install --upgrade twine
459
+ python3 -m twine upload --repository antspymm dist/*
457
460
  ```
458
461
 
@@ -1,13 +1,12 @@
1
1
  LICENSE
2
2
  MANIFEST.in
3
3
  README.md
4
- setup.py
4
+ pyproject.toml
5
5
  antspymm/__init__.py
6
6
  antspymm/mm.py
7
7
  antspymm.egg-info/PKG-INFO
8
8
  antspymm.egg-info/SOURCES.txt
9
9
  antspymm.egg-info/dependency_links.txt
10
- antspymm.egg-info/not-zip-safe
11
10
  antspymm.egg-info/requires.txt
12
11
  antspymm.egg-info/top_level.txt
13
12
  docs/adni_rsfmri_2_nrg_conversion.py
@@ -23,7 +22,9 @@ docs/make_dict_table.Rmd
23
22
  docs/make_dict_table.html
24
23
  docs/nrg_cohort_example.py
25
24
  docs/ptbp_nrg.py
25
+ docs/release_notes.py
26
26
  docs/roi_visualization.py
27
+ docs/roi_visualization_limbic.py
27
28
  docs/roi_visualization_ppmi.py
28
29
  docs/step1_blind_qc.py
29
30
  docs/step2_outlierness.py
@@ -36,6 +37,7 @@ tests/mm.py
36
37
  tests/mm_nrg.py
37
38
  tests/outlierness.py
38
39
  tests/parallel_study_aggregation_example.py
40
+ tests/test_00_setup.py
39
41
  tests/test_bids_2_nrg.py
40
42
  tests/test_deformation_gradient_reo.py
41
43
  tests/test_dti_recon.py
@@ -48,6 +50,7 @@ tests/test_joint_dti_recon.py
48
50
  tests/test_mm_csv.py
49
51
  tests/test_nrg_validation.py
50
52
  tests/test_perfusion_ptbp.py
53
+ tests/test_perfusion_ptbp2.py
51
54
  tests/test_perfusion_run.py
52
55
  tests/test_rsfmri_run.py
53
56
  tests/test_rsfmri_run_minimal.py
@@ -2,7 +2,8 @@ h5py>=2.10.0
2
2
  numpy>=1.19.4
3
3
  pandas>=1.0.1
4
4
  antspyx
5
- antspyt1w>=0.2.3
5
+ antspynet>=0.2.5
6
+ antspyt1w>=0.9.3
6
7
  pathlib
7
8
  dipy
8
9
  nibabel
@@ -0,0 +1,50 @@
1
+
2
+ import os
3
+ from git import Repo
4
+ from github import Github
5
+
6
+ def generate_release_notes(repo_path, from_tag, to_tag, github_repo_name, github_token, release_notes_file):
7
+ """
8
+ Generates release notes by extracting commit messages between two tags and fetching closed issues from GitHub.
9
+
10
+ Parameters:
11
+ - repo_path (str): Local path to the Git repository.
12
+ - from_tag (str): Tag to start collecting commits.
13
+ - to_tag (str): Tag to end collecting commits.
14
+ - github_repo_name (str): Full GitHub repository name (e.g., 'username/repository').
15
+ - github_token (str): GitHub access token for authentication.
16
+ - release_notes_file (str): File path to save the release notes.
17
+ """
18
+ # Extract commits
19
+ repo = Repo(repo_path)
20
+ commits = list(repo.iter_commits(f'{from_tag}...{to_tag}'))
21
+ commit_messages = [commit.message.strip() for commit in commits]
22
+
23
+ # Fetch issues from GitHub
24
+ g = Github(github_token)
25
+ repo = g.get_repo(github_repo_name)
26
+ issues = repo.get_issues(state='closed')
27
+ issue_details = [(issue.number, issue.title) for issue in issues if issue.pull_request is None]
28
+
29
+ # Write to release notes file
30
+ with open(release_notes_file, 'w') as file:
31
+ file.write('Release Notes:\n\n')
32
+ file.write('Features and Enhancements:\n')
33
+ for message in commit_messages:
34
+ file.write(f'- {message}\n')
35
+ file.write('\nFixed Issues:\n')
36
+ for issue in issue_details:
37
+ file.write(f'- Issue #{issue[0]}: {issue[1]}\n')
38
+
39
+ print(f"Release notes saved to {release_notes_file}")
40
+
41
+ # Example usage
42
+ repo_path = '/path/to/your/repo'
43
+ from_tag = 'v1.0.0'
44
+ to_tag = 'v1.1.0'
45
+ github_repo_name = 'username/repo'
46
+ github_token = 'your_github_token'
47
+ release_notes_file = 'release_notes.md'
48
+
49
+ generate_release_notes(repo_path, from_tag, to_tag, github_repo_name, github_token, release_notes_file)
50
+
@@ -0,0 +1,28 @@
1
+ import ants
2
+ import pandas as pd
3
+ import numpy as np
4
+ import antspymm
5
+
6
+
7
+ # Example file paths and output prefix
8
+ x='dtPC12'
9
+ statistical_file_path="/tmp/"+x+".csv"
10
+ data_dictionary_path = "~/code/ANTsPyMM/docs/antspymm_data_dictionary.csv"
11
+ output_prefix = '/tmp/vizit_'+x
12
+ edge_image_path = '~/.antspymm/PPMI_template0_edge.nii.gz'
13
+ edge_image_path = '~/.antspymm/PPMI_template0_brain.nii.gz'
14
+ brain_image = ants.image_read( edge_image_path )
15
+ brain_image_t = ants.iMath( brain_image, 'TruncateIntensity', 0.002, 0.99)
16
+ # Call the function
17
+ zz = pd.read_csv( statistical_file_path )
18
+ ocols = zz.keys()
19
+
20
+ qqq = zz.copy()
21
+ qqq['anat'] = qqq['anat'].str.replace(r'(vol_|thk_|LRAVG_|_LRAVG|Asym_|_Asym|volAsym|volLRAVG|thkAsym|thkLRAVG)', '', regex=True)
22
+ # olimg = antspymm.brainmap_figure(qqq, data_dictionary_path, output_prefix + myco, brain_image_t, nslices=21, black_bg=False, axes=[1], fixed_overlay_range=[-1.0,1.0],verbose=True )
23
+
24
+ qqq['anat'] = qqq['anat'].str.replace(r'(DTI_mean_fa_|DTI_mean_md.|DTI_mean_md_|.LRAVG.)', '', regex=True)
25
+
26
+ olimg = antspymm.brainmap_figure(qqq, data_dictionary_path, output_prefix, brain_image_t, nslices=21, black_bg=True, crop=0, overlay_cmap='winter', fixed_overlay_range=[0,1.],verbose=True )
27
+
28
+ ants.image_write( olimg, '/tmp/'+x+'.nii.gz' )
@@ -0,0 +1,32 @@
1
+ [build-system]
2
+ requires = ["setuptools>=42", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "antspymm"
7
+ version = "1.3.6"
8
+ description = "multi-channel/time-series medical image processing with antspyx"
9
+ readme = "README.md"
10
+ requires-python = ">=3.8"
11
+ license = { text = "Apache 2.0" }
12
+ authors = [
13
+ { name = "Avants, Gosselin, Tustison, Reardon", email = "stnava@gmail.com" }
14
+ ]
15
+
16
+ dependencies = [
17
+ "h5py>=2.10.0",
18
+ "numpy>=1.19.4",
19
+ "pandas>=1.0.1",
20
+ "antspyx",
21
+ "antspynet>=0.2.5",
22
+ "antspyt1w>=0.9.3",
23
+ "pathlib",
24
+ "dipy",
25
+ "nibabel",
26
+ "scipy",
27
+ "siq",
28
+ "scikit-learn"
29
+ ]
30
+
31
+ [tool.setuptools.packages]
32
+ find = { include = ["antspymm"] }
@@ -0,0 +1,7 @@
1
+ import ants
2
+ import antspyt1w
3
+ import antspymm
4
+ import sys
5
+ antspyt1w.get_data()
6
+ antspymm.get_data()
7
+ sys.exit(0)
@@ -14,6 +14,8 @@ import tensorflow as tf
14
14
  from tempfile import mktemp
15
15
  import numpy as np
16
16
  import antspymm
17
+ import sys
18
+ sys.exit(0) # this is not an actual test that we want to run regularly
17
19
  print(" Load in JHU atlas and labels ")
18
20
  ex_path = os.path.expanduser( "~/.antspyt1w/" )
19
21
  ex_path_mm = os.path.expanduser( "~/.antspymm/" )
@@ -3,4 +3,6 @@ import antspymm
3
3
  img1 = ants.image_read( antspymm.get_data( "I1499279_Anon_20210819142214_5", target_extension=".nii.gz") )
4
4
  bvec = antspymm.get_data( "I1499279_Anon_20210819142214_5", target_extension=".bvec")
5
5
  bval = antspymm.get_data( "I1499279_Anon_20210819142214_5", target_extension=".bval")
6
- dd = antspymm.dipy_dti_recon( img1, bval, bvec, motion_correct=True, mask_dilation=0 )
6
+ dd = antspymm.dipy_dti_recon( img1, bval, bvec )
7
+ import sys
8
+ sys.exit(0)
@@ -0,0 +1,36 @@
1
+ import sys, os
2
+ os.environ["TF_NUM_INTEROP_THREADS"] = "8"
3
+ os.environ["TF_NUM_INTRAOP_THREADS"] = "8"
4
+ os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "8"
5
+ import tempfile
6
+ import shutil
7
+ import tensorflow as tf
8
+ import antspymm
9
+ import antspyt1w
10
+ import antspynet
11
+ import ants
12
+ import numpy as np
13
+ from scipy.stats import median_abs_deviation
14
+ import math
15
+ import statsmodels.api as sm
16
+ t1fn = "/Users/stnava/data/PTBP/images/PEDS049/20110217//Anatomy/PEDS049_20110217_mprage_t1.nii.gz"
17
+ idpfn = "/Users/stnava/data/PTBP/images/PEDS049/20110217//PCASL/PEDS049_20110217_pcasl_1.nii.gz"
18
+ if not 'dkt' in globals():
19
+ t1head = ants.image_read( t1fn ).n3_bias_field_correction( 8 ).n3_bias_field_correction( 4 )
20
+ t1bxt = antspynet.brain_extraction( t1head, 't1' ).threshold_image( 0.3, 1.0 )
21
+ t1 = t1bxt * t1head
22
+ t1seg = antspynet.deep_atropos( t1head )
23
+ t1segmentation = t1seg['segmentation_image']
24
+ dkt = antspynet.desikan_killiany_tourville_labeling( t1head )
25
+ #################
26
+ type_of_transform='Rigid'
27
+ tc='alternating'
28
+ fmri = ants.image_read( idpfn )
29
+ print("do perf")
30
+ perf = antspymm.bold_perfusion( fmri, t1head, t1,
31
+ t1segmentation, dkt, robust=True, verbose=True )
32
+ ##################
33
+ ants.plot( perf['cbf'], axis=2, crop=True )
34
+ # ants.image_write( ants.iMath( perf['perfusion'], "Normalize" ), '/tmp/temp.nii.gz' )
35
+ # ants.image_write( perf['motion_corrected'], '/tmp/temp2.nii.gz' )
36
+ # ants.image_write( perf['cbf'], '/tmp/temp3ptb.nii.gz' )
@@ -1 +0,0 @@
1
-
antspymm-1.3.4/setup.py DELETED
@@ -1,30 +0,0 @@
1
-
2
- from setuptools import setup, find_packages
3
-
4
- long_description = open("README.md").read()
5
-
6
- requirements = [
7
- "h5py>=2.10.0",
8
- "numpy>=1.19.4",
9
- "pandas>=1.0.1",
10
- "antspyx",
11
- "antspyt1w>=0.2.3",
12
- "pathlib",
13
- "dipy",
14
- "nibabel",
15
- "scipy",
16
- "siq",
17
- "scikit-learn"]
18
-
19
- setup(name='antspymm',
20
- version='1.3.4',
21
- description='multi-channel/time-series medical image processing with antspyx',
22
- long_description=long_description,
23
- long_description_content_type="text/markdown; charset=UTF-8; variant=GFM",
24
- url='https://github.com/stnava/ANTsPyMM',
25
- author='Avants, Gosselin, Tustison, Reardon',
26
- author_email='stnava@gmail.com',
27
- license='Apache 2.0',
28
- install_requires=requirements,
29
- packages=['antspymm'],
30
- zip_safe=False)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes