petpal 0.5.4__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
petpal/cli/cli_preproc.py CHANGED
@@ -345,89 +345,84 @@ def main():
345
345
  preproc_parser.print_help()
346
346
  raise SystemExit('Exiting without command')
347
347
 
348
- if len(args.motion_target)==1:
349
- motion_target = args.motion_target[0]
348
+ if 'motion_target' in dir(args):
349
+ if len(args.motion_target)==1:
350
+ motion_target = args.motion_target[0]
351
+ else:
352
+ motion_target = args.motion_target
350
353
  else:
351
- motion_target = args.motion_target
354
+ motion_target = None
352
355
 
353
356
  command = str(args.command).replace('-','_')
354
357
 
355
- if command=='weighted_series_sum':
356
- useful_functions.weighted_series_sum(input_image_4d_path=args.input_img,
357
- out_image_path=args.out_img,
358
- half_life=args.half_life,
359
- start_time=args.start_time,
360
- end_time=args.end_time,
361
- verbose=True)
362
-
363
- if command=='auto_crop':
364
- image_operations_4d.SimpleAutoImageCropper(input_image_path=args.input_img,
365
- out_image_path=args.out_img,
366
- thresh_val=args.thresh_val,
367
- verbose=True)
368
-
369
- if command=='motion_correction':
370
- motion_corr.motion_corr(input_image_4d_path=args.input_img,
358
+ match command:
359
+ case 'weighted_series_sum':
360
+ useful_functions.weighted_series_sum(input_image_4d_path=args.input_img,
361
+ out_image_path=args.out_img,
362
+ half_life=args.half_life,
363
+ start_time=args.start_time,
364
+ end_time=args.end_time,
365
+ verbose=True)
366
+ case 'auto_crop':
367
+ image_operations_4d.SimpleAutoImageCropper(input_image_path=args.input_img,
368
+ out_image_path=args.out_img,
369
+ thresh_val=args.thresh_val,
370
+ verbose=True)
371
+ case 'motion_correction':
372
+ motion_corr.motion_corr(input_image_4d_path=args.input_img,
373
+ out_image_path=args.out_img,
374
+ motion_target_option=motion_target,
375
+ verbose=True,
376
+ type_of_transform=args.transform_type,
377
+ half_life=args.half_life)
378
+ case 'register_pet':
379
+ register.register_pet(input_reg_image_path=args.input_img,
371
380
  out_image_path=args.out_img,
381
+ reference_image_path=args.anatomical,
372
382
  motion_target_option=motion_target,
373
383
  verbose=True,
374
- type_of_transform=args.transform_type,
375
384
  half_life=args.half_life)
376
- if command=='register_pet':
377
- register.register_pet(input_reg_image_path=args.input_img,
378
- out_image_path=args.out_img,
379
- reference_image_path=args.anatomical,
380
- motion_target_option=motion_target,
381
- verbose=True,
382
- half_life=args.half_life)
383
-
384
- if command=='write_tacs_old':
385
- regional_tac_extraction.write_tacs(input_image_path=args.input_img,
386
- out_tac_dir=args.out_tac_dir,
387
- segmentation_image_path=args.segmentation,
388
- label_map_path=args.label_map_path,
389
- verbose=True)
390
-
391
- if command=='write_tacs':
392
- tac_obj = regional_tac_extraction.WriteRegionalTacs(input_image_path=args.input_img,
393
- segmentation_path=args.segmentation,
394
- label_map=args.label_map)
395
- tac_obj(out_tac_prefix=args.patid,
396
- out_tac_dir=args.out_tac_dir,
397
- one_tsv_per_region=not args.excel)
398
-
399
- if command=='warp_pet_atlas':
400
- register.warp_pet_atlas(input_image_path=args.input_img,
401
- anat_image_path=args.anatomical,
402
- atlas_image_path=args.reference_atlas,
403
- out_image_path=args.out_img,
404
- verbose=True)
405
-
406
- if command=='gauss_blur':
407
- image_operations_4d.gauss_blur(input_image_path=args.input_img,
408
- blur_size_mm=args.blur_size_mm,
385
+ case 'write_tacs_old':
386
+ regional_tac_extraction.write_tacs(input_image_path=args.input_img,
387
+ out_tac_dir=args.out_tac_dir,
388
+ segmentation_image_path=args.segmentation,
389
+ label_map_path=args.label_map_path,
390
+ verbose=True)
391
+ case 'write_tacs':
392
+ tac_obj = regional_tac_extraction.WriteRegionalTacs(input_image_path=args.input_img,
393
+ segmentation_path=args.segmentation,
394
+ label_map=args.label_map)
395
+ tac_obj(out_tac_prefix=args.patid,
396
+ out_tac_dir=args.out_tac_dir,
397
+ one_tsv_per_region=not args.excel)
398
+ case 'warp_pet_atlas':
399
+ register.warp_pet_to_atlas(input_image_path=args.input_img,
400
+ anat_image_path=args.anatomical,
401
+ atlas_image_path=args.reference_atlas,
409
402
  out_image_path=args.out_img,
410
- verbose=True,
411
- use_fwhm=True)
412
-
413
- if command=='suvr':
414
- image_operations_4d.suvr(input_image_path=args.input_img,
415
- out_image_path=args.out_img,
416
- segmentation_image_path=args.segmentation,
417
- ref_region=args.ref_region)
418
-
419
- if command=='windowed_motion_corr':
420
- motion_corr.windowed_motion_corr_to_target(input_image_path=args.input_img,
421
- out_image_path=args.out_img,
422
- motion_target_option=motion_target,
423
- w_size=args.window_size,
424
- type_of_transform=args.transform_type)
425
-
426
- if command=='rescale_image':
427
- input_img = ants.image_read(filename=args.input_img)
428
- out_img = image_operations_4d.rescale_image(input_image=input_img,
429
- rescale_constant=args.scale_factor)
430
- ants.image_write(image=out_img, filename=args.out_img)
403
+ verbose=True)
404
+ case 'gauss_blur':
405
+ image_operations_4d.gauss_blur(input_image_path=args.input_img,
406
+ blur_size_mm=args.blur_size_mm,
407
+ out_image_path=args.out_img,
408
+ verbose=True,
409
+ use_fwhm=True)
410
+ case 'suvr':
411
+ image_operations_4d.suvr(input_image_path=args.input_img,
412
+ out_image_path=args.out_img,
413
+ segmentation_image_path=args.segmentation,
414
+ ref_region=args.ref_region)
415
+ case 'windowed_motion_corr':
416
+ motion_corr.windowed_motion_corr_to_target(input_image_path=args.input_img,
417
+ out_image_path=args.out_img,
418
+ motion_target_option=motion_target,
419
+ w_size=args.window_size,
420
+ type_of_transform=args.transform_type)
421
+ case 'rescale_image':
422
+ input_img = ants.image_read(filename=args.input_img)
423
+ out_img = image_operations_4d.rescale_image(input_image=input_img,
424
+ rescale_constant=args.scale_factor)
425
+ ants.image_write(image=out_img, filename=args.out_img)
431
426
 
432
427
  if __name__ == "__main__":
433
428
  main()
@@ -91,6 +91,50 @@ def fit_line_to_data_using_lls_with_rsquared(xdata: np.ndarray,
91
91
  return fit_ans[0][0], fit_ans[0][1], r_squared
92
92
 
93
93
 
94
+ @numba.njit()
95
+ def linear_least_squares_fit_with_stats(xdata: np.ndarray,
96
+ ydata: np.ndarray) -> tuple[float, float, float, float, float]:
97
+ """Fits a line to the data using least squares and explicitly computes:
98
+ - Fit R^2
99
+ - Standard error of the intercept
100
+ - Standard error of the slope
101
+
102
+ Args:
103
+ xdata (np.ndarray): X-coordinates of the data.
104
+ ydata (np.ndarray): Y-coordinates of the data.
105
+
106
+ Returns:
107
+ tuple: A tuple containing five float values: the intercept of the fitted line, the slope
108
+ of the fitted line, the r-squared value, the intercept standard error, and the slope
109
+ standard error.
110
+
111
+ See:
112
+ - https://mathworld.wolfram.com/LeastSquaresFitting.html
113
+ """
114
+ make_2d_matrix = _line_fitting_make_rhs_matrix_from_xdata
115
+ matrix = make_2d_matrix(xdata)
116
+ fit_ans = np.linalg.lstsq(matrix, ydata)
117
+
118
+ x_mean = np.mean(xdata)
119
+ y_mean = np.mean(ydata)
120
+
121
+ ss_res = fit_ans[1][0]
122
+ ss_tot = np.sum((y_mean - ydata) ** 2.)
123
+
124
+ n = len(xdata)
125
+
126
+ sum_square_xdiff = np.sum(xdata**2)-n*x_mean**2
127
+ sum_square_ydiff = np.sum(ydata**2)-n*y_mean**2
128
+ sum_square_xydiff = np.sum(xdata*ydata)-n*y_mean*x_mean
129
+
130
+ s = np.sqrt((sum_square_ydiff-sum_square_xydiff**2/sum_square_xdiff)/(n-2))
131
+
132
+ r_squared = 1.0 - ss_res / ss_tot
133
+
134
+ se_intercept = s*np.sqrt(1/n+np.mean(xdata)**2/sum_square_xdiff)
135
+ se_slope = s*sum_square_xdiff**(-0.5)
136
+ return fit_ans[0][0], fit_ans[0][1], r_squared, se_intercept, se_slope
137
+
94
138
  @numba.njit()
95
139
  def cumulative_trapezoidal_integral(xdata: np.ndarray,
96
140
  ydata: np.ndarray,
@@ -1,10 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: petpal
3
- Version: 0.5.4
3
+ Version: 0.5.6
4
4
  Summary: PET-PAL (Positron Emission Tomography Processing and Analysis Library)
5
5
  Project-URL: Repository, https://github.com/PETPAL-WUSM/PETPAL.git
6
- Author: Bradley Judge
7
- Author-email: Furqan Dar <dar@wustl.edu>, Noah Goldman <noahg@wustl.edu>, Kenan Oestreich <kenan.oestreich@wustl.edu>
6
+ Author-email: Noah Goldman <noahg@wustl.edu>, Bradley Judge <bjudge@wustl.edu>, Furqan Dar <dar@wustl.edu>, Kenan Oestreich <kenan.oestreich@wustl.edu>
8
7
  License-File: LICENSE
9
8
  Classifier: Development Status :: 2 - Pre-Alpha
10
9
  Classifier: Intended Audience :: Science/Research
@@ -45,7 +44,17 @@ Description-Content-Type: text/markdown
45
44
 
46
45
  ## Installation
47
46
 
48
- Currently, we only support building the package directly from source. Clone the repository using your preferred method. After navigating to the top-level directory (where `pyproject.toml` exists), we run the following command in the terminal:
47
+ ### Using Pip
48
+
49
+ The simplest way to install PETPAL is using pip. First, ensure you are using Python version >=3.12. Then, run the following:
50
+
51
+ ```shell
52
+ pip install petpal
53
+ ```
54
+
55
+ ### Build from source
56
+
57
+ Clone the repository using your preferred method. After navigating to the top-level directory (where `pyproject.toml` exists), we run the following command in the terminal:
49
58
 
50
59
  ```shell
51
60
  pip install . # Installs the package
@@ -6,7 +6,7 @@ petpal/cli/cli_idif.py,sha256=6lh_kJHcGjlHDXZOvbiuHrNqpk5FovVV5_j7_dPHTHU,5145
6
6
  petpal/cli/cli_parametric_images.py,sha256=JBFb8QlxZoGOzqvCJPFuZ7czzGWntJP5ZcfeM5-QF4Y,7385
7
7
  petpal/cli/cli_pib_processing.py,sha256=6EjoY0wSJZ7PImgjV9PxlGPgcZkcZISvFh9ctsZW3Gw,6935
8
8
  petpal/cli/cli_plot_tacs.py,sha256=XycaYQQl9Jp5jqDp3QXOlVT2sXHYYpYSraEArxsfJec,6479
9
- petpal/cli/cli_preproc.py,sha256=H8yfAa_fGzhLcyAU8JSOEAxsIF_DN81Ek8iWGkGj63Q,20106
9
+ petpal/cli/cli_preproc.py,sha256=_l4OoOnKUTgu0T5lT9ctZK1h5qj7yBZgtf0m0PK_HS0,20355
10
10
  petpal/cli/cli_pvc.py,sha256=DC0JZ6p1pkc5BDgQ006bi9y0Mz32ENrjUaOtSvFobP4,3967
11
11
  petpal/cli/cli_reference_tissue_models.py,sha256=18BlKN4rMehyFbdq_yr88oztqR99_gBtWKImhwf7CQY,13283
12
12
  petpal/cli/cli_stats.py,sha256=Mqbzc9yYo9SVAfxzaYbAbk7EheW472l1PY_ptlD1DOI,2050
@@ -19,7 +19,7 @@ petpal/input_function/idif_necktangle.py,sha256=o5kyAqyT4C6o7zELY4EjyHrkJyX1BWcx
19
19
  petpal/input_function/pca_guided_idif.py,sha256=MPB59K5Z5oyIunIWFqFQts61z647xawLNkv8wICrKYM,44821
20
20
  petpal/kinetic_modeling/__init__.py,sha256=tW4yRH3TwaXPwKPqdkrbQmSk9hjrF1yRkV_C59PPboQ,382
21
21
  petpal/kinetic_modeling/fit_tac_with_rtms.py,sha256=HpK7VWVCCNoSQABY9i28vYpZsMRmvgs4vdcM_ZbdaYE,20971
22
- petpal/kinetic_modeling/graphical_analysis.py,sha256=uq3cnBwPEcSccQb_vy6cSH1j3kI3GePYMvYehD5HZKs,49456
22
+ petpal/kinetic_modeling/graphical_analysis.py,sha256=a7IOwYnG3Wao2XTjFgsPK563txm7s4lpMbaqUMQ5wUQ,51003
23
23
  petpal/kinetic_modeling/parametric_images.py,sha256=sXYracBFUtFyttO-6oiDAldnU8hPN6Y4vKOD1V-DnlE,47301
24
24
  petpal/kinetic_modeling/reference_tissue_models.py,sha256=FkLziIgtpA8tOL2gZJFg_nB8VPEBs40T7RsDAJ3nJ-A,39510
25
25
  petpal/kinetic_modeling/rtm_analysis.py,sha256=e3EuaHXml4PDALEczwyOPpnThINAGh41UKNlOQHAPqc,25945
@@ -63,8 +63,8 @@ petpal/visualizations/graphical_plots.py,sha256=ZCKUeLX2TAQscuHjA4bzlFm1bACHIyCw
63
63
  petpal/visualizations/image_visualization.py,sha256=Ob6TD4Q0pIrxi0m9SznK1TRWbX1Ea9Pt4wNMdRrTfTs,9124
64
64
  petpal/visualizations/qc_plots.py,sha256=iaCPe-LWWyM3OZzDPZodHZhP-z5fRdpUgaH7QS9VxPM,1243
65
65
  petpal/visualizations/tac_plots.py,sha256=zSGdptL-EnqhfDViAX8LFunln5a1b-NJ5ft7ZDcxQ38,15116
66
- petpal-0.5.4.dist-info/METADATA,sha256=hUe3oDjdNZ9hW9e8E_nnoo-9-2geYFMHjlWMAVcsLQ4,2478
67
- petpal-0.5.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
68
- petpal-0.5.4.dist-info/entry_points.txt,sha256=-AAg5GCaTRuwcLR7QWKPuKyBo8mTEMzspRmcBwaPdLo,692
69
- petpal-0.5.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
70
- petpal-0.5.4.dist-info/RECORD,,
66
+ petpal-0.5.6.dist-info/METADATA,sha256=W0Fo_jDNeBMUBbXybn9HymGcieOEqygaZDqETObBvrw,2617
67
+ petpal-0.5.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
68
+ petpal-0.5.6.dist-info/entry_points.txt,sha256=0SZmyXqBxKzQg2eerDA16n2BdUEXyixEm0_AUo2dFns,653
69
+ petpal-0.5.6.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
70
+ petpal-0.5.6.dist-info/RECORD,,
@@ -1,5 +1,4 @@
1
1
  [console_scripts]
2
- petpal-bids = petpal.cli.cli_bids:main
3
2
  petpal-graph-analysis = petpal.cli.cli_graphical_analysis:main
4
3
  petpal-graph-plot = petpal.cli.cli_graphical_plots:main
5
4
  petpal-parametric-image = petpal.cli.cli_parametric_images:main
File without changes