petpal 0.5.10__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
petpal/cli/cli_preproc.py CHANGED
@@ -83,7 +83,12 @@ Examples:
83
83
  .. code-block:: bash
84
84
 
85
85
  petpal-preproc warp-pet-atlas -i /path/to/input_img.nii.gz -o petpal_reg-atlas.nii.gz --anatomical /path/to/anat.nii.gz --reference-atlas /path/to/atlas.nii.gz
86
-
86
+
87
+ * Crop segmentation to PET FOV:
88
+
89
+ .. code-block:: bash
90
+
91
+ petpal-preproc seg-crop -i /path/to/input_img.nii.gz -o petpal_cropped_seg.nii.gz --segmentation /path/to/segmentation.nii.gz
87
92
 
88
93
  See Also:
89
94
  * :mod:`~petpal.preproc.image_operations_4d` - module used for operations on 4D images.
@@ -99,7 +104,8 @@ from ..preproc import (image_operations_4d,
99
104
  motion_corr,
100
105
  register,
101
106
  regional_tac_extraction,
102
- standard_uptake_value)
107
+ standard_uptake_value,
108
+ segmentation_tools)
103
109
 
104
110
 
105
111
  _PREPROC_EXAMPLES_ = r"""
@@ -126,6 +132,10 @@ Examples:
126
132
  petpal-preproc warp-pet-atlas -i /path/to/input_img.nii.gz -o petpal_reg-atlas.nii.gz --anatomical /path/to/anat.nii.gz --reference-atlas /path/to/atlas.nii.gz
127
133
  - SUV:
128
134
  petpal-preproc suv -i /path/to/input_img.nii.gz -o petpal_suv.nii.gz --weight 75 --dose 250 --start-time 1200 --end-time 3600
135
+ - Crop segmentation image to PET FOV:
136
+ petpal-preproc seg-crop -i /path/to/input_img.nii.gz -o petpal_cropped_seg.nii.gz --segmentation /path/to/segmentation.nii.gz
137
+ - Add eroded white matter region to segmentation image:
138
+ petpal-preproc eroded-wm -i /path/to/input_segmentation.nii.gz -o petpal_seg_with_eroded_wm.nii.gz
129
139
  """
130
140
 
131
141
 
@@ -357,6 +367,19 @@ def _generate_args() -> argparse.ArgumentParser:
357
367
  required=True,
358
368
  help='End time for SUV calculation in seconds from scan start',
359
369
  type=float)
370
+
371
+ parser_seg_crop = subparsers.add_parser('seg-crop',help='Crop segmentation image to PET FOV')
372
+ _add_common_args(parser_seg_crop)
373
+ parser_seg_crop.add_argument('-s',
374
+ '--segmentation',
375
+ required=True,
376
+ help='Path to segmentation image',
377
+ type=str)
378
+
379
+ parser_eroded_wm = subparsers.add_parser('eroded-wm',
380
+ help='Add eroded white matter region to segmentation image')
381
+ _add_common_args(parser_eroded_wm)
382
+
360
383
  return parser
361
384
 
362
385
 
@@ -454,6 +477,15 @@ def main():
454
477
  end_time=args.end_time,
455
478
  weight=args.weight,
456
479
  dose=args.dose)
480
+ case 'seg_crop':
481
+ input_img = ants.image_read(filename=args.input_img)
482
+ seg_img = ants.image_read(filename=args.segmentation)
483
+ seg_cropped = segmentation_tools.seg_crop_to_pet_fov(pet_img=input_img,
484
+ segmentation_img=seg_img)
485
+ ants.image_write(seg_cropped,args.out_img)
486
+ case 'eroded_wm':
487
+ segmentation_tools.eroded_wm_segmentation(input_segmentation_path=args.input_img,
488
+ out_segmentation_path=args.out_img)
457
489
 
458
490
  if __name__ == "__main__":
459
491
  main()
@@ -84,7 +84,9 @@ def segmentations_merge(segmentation_primary: np.ndarray,
84
84
  regions added.
85
85
  """
86
86
  for region in regions:
87
- region_mask = (segmentation_secondary > region - 0.1) & (segmentation_secondary < region + 0.1)
87
+ condition_above = segmentation_secondary > region - 0.1
88
+ condition_below = segmentation_secondary < region + 0.1
89
+ region_mask = condition_above & condition_below
88
90
  segmentation_primary[region_mask] = region
89
91
  return segmentation_primary
90
92
 
@@ -229,8 +231,9 @@ def resample_segmentation(input_image_path: str,
229
231
  seg_image = nibabel.load(segmentation_image_path)
230
232
  pet_series = pet_image.get_fdata()
231
233
  image_first_frame = pet_series[:, :, :, 0]
234
+ to_vox_map_tuple = (image_first_frame.shape, pet_image.affine)
232
235
  seg_resampled = processing.resample_from_to(from_img=seg_image,
233
- to_vox_map=(image_first_frame.shape, pet_image.affine),
236
+ to_vox_map=to_vox_map_tuple,
234
237
  order=0)
235
238
  nibabel.save(seg_resampled, out_seg_path)
236
239
  if verbose:
@@ -238,23 +241,30 @@ def resample_segmentation(input_image_path: str,
238
241
 
239
242
 
240
243
  def vat_wm_ref_region(input_segmentation_path: str,
241
- out_segmentation_path: str):
244
+ out_segmentation_path: str | None) -> ants.ANTsImage:
242
245
  """
243
246
  Generates the cortical white matter reference region described in O'Donnell
244
- JL et al. (2024) PET Quantification of [18F]VAT in Human Brain and Its
247
+ JL et al. (2024).
248
+
249
+ Reference: O'Donnell JL et al. (2024). PET Quantification of [18F]VAT in Human Brain and Its
245
250
  Test-Retest Reproducibility and Age Dependence. J Nucl Med. 2024 Jun
246
251
  3;65(6):956-961. doi: 10.2967/jnumed.123.266860. PMID: 38604762; PMCID:
247
- PMC11149597. Requires FreeSurfer segmentation with original label mappings.
252
+ PMC11149597.
253
+
254
+ Requires FreeSurfer segmentation with original label mappings.
248
255
 
249
256
  Args:
250
257
  input_segmentation_path (str): Path to segmentation on which white
251
258
  matter reference region is computed.
252
259
  out_segmentation_path (str): Path to which white matter reference
253
260
  region mask image is saved.
261
+
262
+ Returns:
263
+ wm_erode (ants.ANTsImage): Eroded white matter reference region mask image.
254
264
  """
255
265
  wm_regions = [2,41,251,252,253,254,255,77,3000,3001,3002,3003,3004,3005,
256
266
  3006,3007,3008,3009,3010,3011,3012,3013,3014,3015,3016,3017,
257
- 3018,3019,3020,3021,3022,3023,3024,3025,3026,3027,3018,3029,
267
+ 3018,3019,3020,3021,3022,3023,3024,3025,3026,3027,3028,3029,
258
268
  3030,3031,3032,3033,3034,3035,4000,4001,4002,4003,4004,4005,
259
269
  4006,4007,4008,4009,4010,4011,4012,4013,4014,4015,4016,4017,
260
270
  4018,4019,4020,4021,4022,4023,4024,4025,4026,4027,4028,4029,
@@ -277,7 +287,45 @@ def vat_wm_ref_region(input_segmentation_path: str,
277
287
  wm_csf_eroded = ants.threshold_image(image=wm_csf_blurred, low_thresh=0.95, binary=True)
278
288
  wm_erode = ants.mask_image(image=wm_merged, mask=wm_csf_eroded)
279
289
 
280
- ants.image_write(image=wm_erode, filename=out_segmentation_path)
290
+ if out_segmentation_path is not None:
291
+ ants.image_write(image=wm_erode, filename=out_segmentation_path)
292
+
293
+ return wm_erode
294
+
295
+
296
+ def eroded_wm_segmentation(input_segmentation_path: str,
297
+ out_segmentation_path: str | None,
298
+ eroded_wm_region_mapping: int = 1) -> ants.ANTsImage:
299
+ """
300
+ Generates eroded white matter region on a segmentation image and merges it into the image,
301
+ saving result as a new segmentation image.
302
+
303
+ Requires FreeSurfer segmentation with original label mappings.
304
+
305
+ Args:
306
+ input_segmentation_path (str): Path to input freesurfer segmentation, such as aparc+aseg or
307
+ wmparc.
308
+ out_segmentation_path (str): Path to output segmentation image with replaced values in
309
+ eroded whited matter region.
310
+ eroded_wm_region_mapping (int): Segmentation mapping for the eroded white matter region in
311
+ the output image. Default "1".
312
+
313
+ Returns:
314
+ seg_img (ants.ANTsImage): Input segmentation image with values in eroded white matter
315
+ replaced with `eroded_wm_region_mapping`.
316
+
317
+ See also:
318
+ :meth:`~petpal.preproc.segmentation_tools.vat_wm_ref_region` - function that generates the
319
+ eroded white matter region.
320
+ """
321
+ wm_erode = vat_wm_ref_region(input_segmentation_path=input_segmentation_path,
322
+ out_segmentation_path=None)
323
+ seg_img = ants.image_read(input_segmentation_path)
324
+ seg_img[wm_erode==1] = int(eroded_wm_region_mapping)
325
+ if out_segmentation_path is not None:
326
+ ants.image_write(image=seg_img, filename=out_segmentation_path)
327
+
328
+ return seg_img
281
329
 
282
330
 
283
331
  def vat_wm_region_merge(wmparc_segmentation_path: str,
@@ -500,7 +548,8 @@ def calc_vesselness_mask_from_quantiled_vesselness(input_image: ants.core.ANTsIm
500
548
  morph_dil_radius: int = 0,
501
549
  z_crop: int = 3) -> ants.core.ANTsImage:
502
550
  """
503
- Generates a binary vesselness mask from a given vesselness image using quantile-based thresholding.
551
+ Generates a binary vesselness mask from a given vesselness image using quantile-based
552
+ thresholding.
504
553
 
505
554
  This function creates a binary mask by thresholding a vesselness image at a specified
506
555
  quantile of non-zero voxel values. Additionally, it allows for optional z-axis cropping
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: petpal
3
- Version: 0.5.10
3
+ Version: 0.6.0
4
4
  Summary: PET-PAL (Positron Emission Tomography Processing and Analysis Library)
5
5
  Project-URL: Repository, https://github.com/PETPAL-WUSM/PETPAL.git
6
6
  Author-email: Noah Goldman <noahg@wustl.edu>, Bradley Judge <bjudge@wustl.edu>, Furqan Dar <dar@wustl.edu>, Kenan Oestreich <kenan.oestreich@wustl.edu>
@@ -6,7 +6,7 @@ petpal/cli/cli_idif.py,sha256=6lh_kJHcGjlHDXZOvbiuHrNqpk5FovVV5_j7_dPHTHU,5145
6
6
  petpal/cli/cli_parametric_images.py,sha256=JBFb8QlxZoGOzqvCJPFuZ7czzGWntJP5ZcfeM5-QF4Y,7385
7
7
  petpal/cli/cli_pib_processing.py,sha256=ye_yw0ZQ4cSrMNemGR7cU9v6epD7Wbq1xaNAJwLzV_8,6889
8
8
  petpal/cli/cli_plot_tacs.py,sha256=XycaYQQl9Jp5jqDp3QXOlVT2sXHYYpYSraEArxsfJec,6479
9
- petpal/cli/cli_preproc.py,sha256=y5YvRliXC3zNY8oBJgTkzwPH9YNwWnEUWgiltUAv7AU,21850
9
+ petpal/cli/cli_preproc.py,sha256=Wy0Y_2hTXg_Xyg2F7xOA93B4X4wzKX6QJDkB5mSikOI,23607
10
10
  petpal/cli/cli_pvc.py,sha256=DC0JZ6p1pkc5BDgQ006bi9y0Mz32ENrjUaOtSvFobP4,3967
11
11
  petpal/cli/cli_reference_tissue_models.py,sha256=18BlKN4rMehyFbdq_yr88oztqR99_gBtWKImhwf7CQY,13283
12
12
  petpal/cli/cli_stats.py,sha256=Mqbzc9yYo9SVAfxzaYbAbk7EheW472l1PY_ptlD1DOI,2050
@@ -44,7 +44,7 @@ petpal/preproc/motion_target.py,sha256=_OJp3NoYcyD3Ke3wl2KbfOhbJ6dp6ZduR9LLz0rIa
44
44
  petpal/preproc/partial_volume_corrections.py,sha256=J06j_Y_lhj3b3b9M5FbB2r2EPWQvoymG3GRUffSlYdE,6799
45
45
  petpal/preproc/regional_tac_extraction.py,sha256=ZXo2u-EAUg5wZj7GGYLMEaOAfLv8OCOR-Gd0xvih6Y4,22358
46
46
  petpal/preproc/register.py,sha256=NKg8mt_XMGa5HBdxYZh3sMu_KMJ0W41VHlX4Zl8wlyE,14171
47
- petpal/preproc/segmentation_tools.py,sha256=Xi1ZnBs3sp23MHWPPOLjuXi6qp4-igwIPXFJ4B_Yzsk,27186
47
+ petpal/preproc/segmentation_tools.py,sha256=CDD0NWV23rkNB56HjZGLsO4HbV61O57KsNzQsR4d06g,29106
48
48
  petpal/preproc/standard_uptake_value.py,sha256=YJIt0fl3fwMLl0tRYHpPPprMTaN4Q5JjQ5dx_CQX1nI,7494
49
49
  petpal/preproc/symmetric_geometric_transfer_matrix.py,sha256=Sr5qMTiNC76ZRKiGG5So7fceV_Lr0ql7UybO_kJgmNo,20360
50
50
  petpal/utils/__init__.py,sha256=PlxBIKUtNvtSFnNZqz8myszOysaYzS8nSILMK4haVGg,412
@@ -65,8 +65,8 @@ petpal/visualizations/graphical_plots.py,sha256=ZCKUeLX2TAQscuHjA4bzlFm1bACHIyCw
65
65
  petpal/visualizations/image_visualization.py,sha256=Ob6TD4Q0pIrxi0m9SznK1TRWbX1Ea9Pt4wNMdRrTfTs,9124
66
66
  petpal/visualizations/qc_plots.py,sha256=iaCPe-LWWyM3OZzDPZodHZhP-z5fRdpUgaH7QS9VxPM,1243
67
67
  petpal/visualizations/tac_plots.py,sha256=zSGdptL-EnqhfDViAX8LFunln5a1b-NJ5ft7ZDcxQ38,15116
68
- petpal-0.5.10.dist-info/METADATA,sha256=QadOtBFu8TbKNi-WDticmziZXkREKywO0pP4F-AxAcI,2618
69
- petpal-0.5.10.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
70
- petpal-0.5.10.dist-info/entry_points.txt,sha256=0SZmyXqBxKzQg2eerDA16n2BdUEXyixEm0_AUo2dFns,653
71
- petpal-0.5.10.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
72
- petpal-0.5.10.dist-info/RECORD,,
68
+ petpal-0.6.0.dist-info/METADATA,sha256=L5uinIwYmIUU0Ie_1wiLf-9TDeUeJrq6cHUuVXKSIq0,2617
69
+ petpal-0.6.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
70
+ petpal-0.6.0.dist-info/entry_points.txt,sha256=0SZmyXqBxKzQg2eerDA16n2BdUEXyixEm0_AUo2dFns,653
71
+ petpal-0.6.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
72
+ petpal-0.6.0.dist-info/RECORD,,