setiastrosuitepro 1.6.2.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of setiastrosuitepro might be problematic. Click here for more details.
- setiastro/__init__.py +2 -0
- setiastro/data/SASP_data.fits +0 -0
- setiastro/data/catalogs/List_of_Galaxies_with_Distances_Gly.csv +488 -0
- setiastro/data/catalogs/astrobin_filters.csv +890 -0
- setiastro/data/catalogs/astrobin_filters_page1_local.csv +51 -0
- setiastro/data/catalogs/cali2.csv +63 -0
- setiastro/data/catalogs/cali2color.csv +65 -0
- setiastro/data/catalogs/celestial_catalog - original.csv +16471 -0
- setiastro/data/catalogs/celestial_catalog.csv +24031 -0
- setiastro/data/catalogs/detected_stars.csv +24784 -0
- setiastro/data/catalogs/fits_header_data.csv +46 -0
- setiastro/data/catalogs/test.csv +8 -0
- setiastro/data/catalogs/updated_celestial_catalog.csv +16471 -0
- setiastro/images/Astro_Spikes.png +0 -0
- setiastro/images/Background_startup.jpg +0 -0
- setiastro/images/HRDiagram.png +0 -0
- setiastro/images/LExtract.png +0 -0
- setiastro/images/LInsert.png +0 -0
- setiastro/images/Oxygenation-atm-2.svg.png +0 -0
- setiastro/images/RGB080604.png +0 -0
- setiastro/images/abeicon.png +0 -0
- setiastro/images/aberration.png +0 -0
- setiastro/images/andromedatry.png +0 -0
- setiastro/images/andromedatry_satellited.png +0 -0
- setiastro/images/annotated.png +0 -0
- setiastro/images/aperture.png +0 -0
- setiastro/images/astrosuite.ico +0 -0
- setiastro/images/astrosuite.png +0 -0
- setiastro/images/astrosuitepro.icns +0 -0
- setiastro/images/astrosuitepro.ico +0 -0
- setiastro/images/astrosuitepro.png +0 -0
- setiastro/images/background.png +0 -0
- setiastro/images/background2.png +0 -0
- setiastro/images/benchmark.png +0 -0
- setiastro/images/big_moon_stabilizer_timeline.png +0 -0
- setiastro/images/big_moon_stabilizer_timeline_clean.png +0 -0
- setiastro/images/blaster.png +0 -0
- setiastro/images/blink.png +0 -0
- setiastro/images/clahe.png +0 -0
- setiastro/images/collage.png +0 -0
- setiastro/images/colorwheel.png +0 -0
- setiastro/images/contsub.png +0 -0
- setiastro/images/convo.png +0 -0
- setiastro/images/copyslot.png +0 -0
- setiastro/images/cosmic.png +0 -0
- setiastro/images/cosmicsat.png +0 -0
- setiastro/images/crop1.png +0 -0
- setiastro/images/cropicon.png +0 -0
- setiastro/images/curves.png +0 -0
- setiastro/images/cvs.png +0 -0
- setiastro/images/debayer.png +0 -0
- setiastro/images/denoise_cnn_custom.png +0 -0
- setiastro/images/denoise_cnn_graph.png +0 -0
- setiastro/images/disk.png +0 -0
- setiastro/images/dse.png +0 -0
- setiastro/images/exoicon.png +0 -0
- setiastro/images/eye.png +0 -0
- setiastro/images/fliphorizontal.png +0 -0
- setiastro/images/flipvertical.png +0 -0
- setiastro/images/font.png +0 -0
- setiastro/images/freqsep.png +0 -0
- setiastro/images/functionbundle.png +0 -0
- setiastro/images/graxpert.png +0 -0
- setiastro/images/green.png +0 -0
- setiastro/images/gridicon.png +0 -0
- setiastro/images/halo.png +0 -0
- setiastro/images/hdr.png +0 -0
- setiastro/images/histogram.png +0 -0
- setiastro/images/hubble.png +0 -0
- setiastro/images/imagecombine.png +0 -0
- setiastro/images/invert.png +0 -0
- setiastro/images/isophote.png +0 -0
- setiastro/images/isophote_demo_figure.png +0 -0
- setiastro/images/isophote_demo_image.png +0 -0
- setiastro/images/isophote_demo_model.png +0 -0
- setiastro/images/isophote_demo_residual.png +0 -0
- setiastro/images/jwstpupil.png +0 -0
- setiastro/images/linearfit.png +0 -0
- setiastro/images/livestacking.png +0 -0
- setiastro/images/mask.png +0 -0
- setiastro/images/maskapply.png +0 -0
- setiastro/images/maskcreate.png +0 -0
- setiastro/images/maskremove.png +0 -0
- setiastro/images/morpho.png +0 -0
- setiastro/images/mosaic.png +0 -0
- setiastro/images/multiscale_decomp.png +0 -0
- setiastro/images/nbtorgb.png +0 -0
- setiastro/images/neutral.png +0 -0
- setiastro/images/nuke.png +0 -0
- setiastro/images/openfile.png +0 -0
- setiastro/images/pedestal.png +0 -0
- setiastro/images/pen.png +0 -0
- setiastro/images/pixelmath.png +0 -0
- setiastro/images/platesolve.png +0 -0
- setiastro/images/ppp.png +0 -0
- setiastro/images/pro.png +0 -0
- setiastro/images/project.png +0 -0
- setiastro/images/psf.png +0 -0
- setiastro/images/redo.png +0 -0
- setiastro/images/redoicon.png +0 -0
- setiastro/images/rescale.png +0 -0
- setiastro/images/rgbalign.png +0 -0
- setiastro/images/rgbcombo.png +0 -0
- setiastro/images/rgbextract.png +0 -0
- setiastro/images/rotate180.png +0 -0
- setiastro/images/rotateclockwise.png +0 -0
- setiastro/images/rotatecounterclockwise.png +0 -0
- setiastro/images/satellite.png +0 -0
- setiastro/images/script.png +0 -0
- setiastro/images/selectivecolor.png +0 -0
- setiastro/images/simbad.png +0 -0
- setiastro/images/slot0.png +0 -0
- setiastro/images/slot1.png +0 -0
- setiastro/images/slot2.png +0 -0
- setiastro/images/slot3.png +0 -0
- setiastro/images/slot4.png +0 -0
- setiastro/images/slot5.png +0 -0
- setiastro/images/slot6.png +0 -0
- setiastro/images/slot7.png +0 -0
- setiastro/images/slot8.png +0 -0
- setiastro/images/slot9.png +0 -0
- setiastro/images/spcc.png +0 -0
- setiastro/images/spin_precession_vs_lunar_distance.png +0 -0
- setiastro/images/spinner.gif +0 -0
- setiastro/images/stacking.png +0 -0
- setiastro/images/staradd.png +0 -0
- setiastro/images/staralign.png +0 -0
- setiastro/images/starnet.png +0 -0
- setiastro/images/starregistration.png +0 -0
- setiastro/images/starspike.png +0 -0
- setiastro/images/starstretch.png +0 -0
- setiastro/images/statstretch.png +0 -0
- setiastro/images/supernova.png +0 -0
- setiastro/images/uhs.png +0 -0
- setiastro/images/undoicon.png +0 -0
- setiastro/images/upscale.png +0 -0
- setiastro/images/viewbundle.png +0 -0
- setiastro/images/whitebalance.png +0 -0
- setiastro/images/wimi_icon_256x256.png +0 -0
- setiastro/images/wimilogo.png +0 -0
- setiastro/images/wims.png +0 -0
- setiastro/images/wrench_icon.png +0 -0
- setiastro/images/xisfliberator.png +0 -0
- setiastro/qml/ResourceMonitor.qml +126 -0
- setiastro/saspro/__init__.py +20 -0
- setiastro/saspro/__main__.py +945 -0
- setiastro/saspro/_generated/__init__.py +7 -0
- setiastro/saspro/_generated/build_info.py +3 -0
- setiastro/saspro/abe.py +1346 -0
- setiastro/saspro/abe_preset.py +196 -0
- setiastro/saspro/aberration_ai.py +694 -0
- setiastro/saspro/aberration_ai_preset.py +224 -0
- setiastro/saspro/accel_installer.py +218 -0
- setiastro/saspro/accel_workers.py +30 -0
- setiastro/saspro/add_stars.py +624 -0
- setiastro/saspro/astrobin_exporter.py +1010 -0
- setiastro/saspro/astrospike.py +153 -0
- setiastro/saspro/astrospike_python.py +1841 -0
- setiastro/saspro/autostretch.py +198 -0
- setiastro/saspro/backgroundneutral.py +602 -0
- setiastro/saspro/batch_convert.py +328 -0
- setiastro/saspro/batch_renamer.py +522 -0
- setiastro/saspro/blemish_blaster.py +491 -0
- setiastro/saspro/blink_comparator_pro.py +2926 -0
- setiastro/saspro/bundles.py +61 -0
- setiastro/saspro/bundles_dock.py +114 -0
- setiastro/saspro/cheat_sheet.py +213 -0
- setiastro/saspro/clahe.py +368 -0
- setiastro/saspro/comet_stacking.py +1442 -0
- setiastro/saspro/common_tr.py +107 -0
- setiastro/saspro/config.py +38 -0
- setiastro/saspro/config_bootstrap.py +40 -0
- setiastro/saspro/config_manager.py +316 -0
- setiastro/saspro/continuum_subtract.py +1617 -0
- setiastro/saspro/convo.py +1400 -0
- setiastro/saspro/convo_preset.py +414 -0
- setiastro/saspro/copyastro.py +190 -0
- setiastro/saspro/cosmicclarity.py +1589 -0
- setiastro/saspro/cosmicclarity_preset.py +407 -0
- setiastro/saspro/crop_dialog_pro.py +973 -0
- setiastro/saspro/crop_preset.py +189 -0
- setiastro/saspro/curve_editor_pro.py +2562 -0
- setiastro/saspro/curves_preset.py +375 -0
- setiastro/saspro/debayer.py +673 -0
- setiastro/saspro/debug_utils.py +29 -0
- setiastro/saspro/dnd_mime.py +35 -0
- setiastro/saspro/doc_manager.py +2664 -0
- setiastro/saspro/exoplanet_detector.py +2166 -0
- setiastro/saspro/file_utils.py +284 -0
- setiastro/saspro/fitsmodifier.py +748 -0
- setiastro/saspro/fix_bom.py +32 -0
- setiastro/saspro/free_torch_memory.py +48 -0
- setiastro/saspro/frequency_separation.py +1349 -0
- setiastro/saspro/function_bundle.py +1596 -0
- setiastro/saspro/generate_translations.py +3092 -0
- setiastro/saspro/ghs_dialog_pro.py +663 -0
- setiastro/saspro/ghs_preset.py +284 -0
- setiastro/saspro/graxpert.py +637 -0
- setiastro/saspro/graxpert_preset.py +287 -0
- setiastro/saspro/gui/__init__.py +0 -0
- setiastro/saspro/gui/main_window.py +8810 -0
- setiastro/saspro/gui/mixins/__init__.py +33 -0
- setiastro/saspro/gui/mixins/dock_mixin.py +362 -0
- setiastro/saspro/gui/mixins/file_mixin.py +450 -0
- setiastro/saspro/gui/mixins/geometry_mixin.py +403 -0
- setiastro/saspro/gui/mixins/header_mixin.py +441 -0
- setiastro/saspro/gui/mixins/mask_mixin.py +421 -0
- setiastro/saspro/gui/mixins/menu_mixin.py +389 -0
- setiastro/saspro/gui/mixins/theme_mixin.py +367 -0
- setiastro/saspro/gui/mixins/toolbar_mixin.py +1457 -0
- setiastro/saspro/gui/mixins/update_mixin.py +309 -0
- setiastro/saspro/gui/mixins/view_mixin.py +435 -0
- setiastro/saspro/gui/statistics_dialog.py +47 -0
- setiastro/saspro/halobgon.py +488 -0
- setiastro/saspro/header_viewer.py +448 -0
- setiastro/saspro/headless_utils.py +88 -0
- setiastro/saspro/histogram.py +756 -0
- setiastro/saspro/history_explorer.py +941 -0
- setiastro/saspro/i18n.py +168 -0
- setiastro/saspro/image_combine.py +417 -0
- setiastro/saspro/image_peeker_pro.py +1604 -0
- setiastro/saspro/imageops/__init__.py +37 -0
- setiastro/saspro/imageops/mdi_snap.py +292 -0
- setiastro/saspro/imageops/scnr.py +36 -0
- setiastro/saspro/imageops/starbasedwhitebalance.py +210 -0
- setiastro/saspro/imageops/stretch.py +236 -0
- setiastro/saspro/isophote.py +1182 -0
- setiastro/saspro/layers.py +208 -0
- setiastro/saspro/layers_dock.py +714 -0
- setiastro/saspro/lazy_imports.py +193 -0
- setiastro/saspro/legacy/__init__.py +2 -0
- setiastro/saspro/legacy/image_manager.py +2226 -0
- setiastro/saspro/legacy/numba_utils.py +3676 -0
- setiastro/saspro/legacy/xisf.py +1071 -0
- setiastro/saspro/linear_fit.py +537 -0
- setiastro/saspro/live_stacking.py +1841 -0
- setiastro/saspro/log_bus.py +5 -0
- setiastro/saspro/logging_config.py +460 -0
- setiastro/saspro/luminancerecombine.py +309 -0
- setiastro/saspro/main_helpers.py +201 -0
- setiastro/saspro/mask_creation.py +931 -0
- setiastro/saspro/masks_core.py +56 -0
- setiastro/saspro/mdi_widgets.py +353 -0
- setiastro/saspro/memory_utils.py +666 -0
- setiastro/saspro/metadata_patcher.py +75 -0
- setiastro/saspro/mfdeconv.py +3831 -0
- setiastro/saspro/mfdeconv_earlystop.py +71 -0
- setiastro/saspro/mfdeconvcudnn.py +3263 -0
- setiastro/saspro/mfdeconvsport.py +2382 -0
- setiastro/saspro/minorbodycatalog.py +567 -0
- setiastro/saspro/morphology.py +407 -0
- setiastro/saspro/multiscale_decomp.py +1293 -0
- setiastro/saspro/nbtorgb_stars.py +541 -0
- setiastro/saspro/numba_utils.py +3145 -0
- setiastro/saspro/numba_warmup.py +141 -0
- setiastro/saspro/ops/__init__.py +9 -0
- setiastro/saspro/ops/command_help_dialog.py +623 -0
- setiastro/saspro/ops/command_runner.py +217 -0
- setiastro/saspro/ops/commands.py +1594 -0
- setiastro/saspro/ops/script_editor.py +1102 -0
- setiastro/saspro/ops/scripts.py +1473 -0
- setiastro/saspro/ops/settings.py +637 -0
- setiastro/saspro/parallel_utils.py +554 -0
- setiastro/saspro/pedestal.py +121 -0
- setiastro/saspro/perfect_palette_picker.py +1071 -0
- setiastro/saspro/pipeline.py +110 -0
- setiastro/saspro/pixelmath.py +1604 -0
- setiastro/saspro/plate_solver.py +2445 -0
- setiastro/saspro/project_io.py +797 -0
- setiastro/saspro/psf_utils.py +136 -0
- setiastro/saspro/psf_viewer.py +549 -0
- setiastro/saspro/pyi_rthook_astroquery.py +95 -0
- setiastro/saspro/remove_green.py +331 -0
- setiastro/saspro/remove_stars.py +1599 -0
- setiastro/saspro/remove_stars_preset.py +404 -0
- setiastro/saspro/resources.py +501 -0
- setiastro/saspro/rgb_combination.py +208 -0
- setiastro/saspro/rgb_extract.py +19 -0
- setiastro/saspro/rgbalign.py +723 -0
- setiastro/saspro/runtime_imports.py +7 -0
- setiastro/saspro/runtime_torch.py +754 -0
- setiastro/saspro/save_options.py +73 -0
- setiastro/saspro/selective_color.py +1552 -0
- setiastro/saspro/sfcc.py +1472 -0
- setiastro/saspro/shortcuts.py +3043 -0
- setiastro/saspro/signature_insert.py +1102 -0
- setiastro/saspro/stacking_suite.py +18470 -0
- setiastro/saspro/star_alignment.py +7435 -0
- setiastro/saspro/star_alignment_preset.py +329 -0
- setiastro/saspro/star_metrics.py +49 -0
- setiastro/saspro/star_spikes.py +765 -0
- setiastro/saspro/star_stretch.py +507 -0
- setiastro/saspro/stat_stretch.py +538 -0
- setiastro/saspro/status_log_dock.py +78 -0
- setiastro/saspro/subwindow.py +3328 -0
- setiastro/saspro/supernovaasteroidhunter.py +1719 -0
- setiastro/saspro/swap_manager.py +99 -0
- setiastro/saspro/torch_backend.py +89 -0
- setiastro/saspro/torch_rejection.py +434 -0
- setiastro/saspro/translations/all_source_strings.json +3654 -0
- setiastro/saspro/translations/ar_translations.py +3865 -0
- setiastro/saspro/translations/de_translations.py +3749 -0
- setiastro/saspro/translations/es_translations.py +3939 -0
- setiastro/saspro/translations/fr_translations.py +3858 -0
- setiastro/saspro/translations/hi_translations.py +3571 -0
- setiastro/saspro/translations/integrate_translations.py +270 -0
- setiastro/saspro/translations/it_translations.py +3678 -0
- setiastro/saspro/translations/ja_translations.py +3601 -0
- setiastro/saspro/translations/pt_translations.py +3869 -0
- setiastro/saspro/translations/ru_translations.py +2848 -0
- setiastro/saspro/translations/saspro_ar.qm +0 -0
- setiastro/saspro/translations/saspro_ar.ts +255 -0
- setiastro/saspro/translations/saspro_de.qm +0 -0
- setiastro/saspro/translations/saspro_de.ts +253 -0
- setiastro/saspro/translations/saspro_es.qm +0 -0
- setiastro/saspro/translations/saspro_es.ts +12520 -0
- setiastro/saspro/translations/saspro_fr.qm +0 -0
- setiastro/saspro/translations/saspro_fr.ts +12514 -0
- setiastro/saspro/translations/saspro_hi.qm +0 -0
- setiastro/saspro/translations/saspro_hi.ts +257 -0
- setiastro/saspro/translations/saspro_it.qm +0 -0
- setiastro/saspro/translations/saspro_it.ts +12520 -0
- setiastro/saspro/translations/saspro_ja.qm +0 -0
- setiastro/saspro/translations/saspro_ja.ts +257 -0
- setiastro/saspro/translations/saspro_pt.qm +0 -0
- setiastro/saspro/translations/saspro_pt.ts +257 -0
- setiastro/saspro/translations/saspro_ru.qm +0 -0
- setiastro/saspro/translations/saspro_ru.ts +237 -0
- setiastro/saspro/translations/saspro_sw.qm +0 -0
- setiastro/saspro/translations/saspro_sw.ts +257 -0
- setiastro/saspro/translations/saspro_uk.qm +0 -0
- setiastro/saspro/translations/saspro_uk.ts +10771 -0
- setiastro/saspro/translations/saspro_zh.qm +0 -0
- setiastro/saspro/translations/saspro_zh.ts +12520 -0
- setiastro/saspro/translations/sw_translations.py +3671 -0
- setiastro/saspro/translations/uk_translations.py +3700 -0
- setiastro/saspro/translations/zh_translations.py +3675 -0
- setiastro/saspro/versioning.py +77 -0
- setiastro/saspro/view_bundle.py +1558 -0
- setiastro/saspro/wavescale_hdr.py +645 -0
- setiastro/saspro/wavescale_hdr_preset.py +101 -0
- setiastro/saspro/wavescalede.py +680 -0
- setiastro/saspro/wavescalede_preset.py +230 -0
- setiastro/saspro/wcs_update.py +374 -0
- setiastro/saspro/whitebalance.py +492 -0
- setiastro/saspro/widgets/__init__.py +48 -0
- setiastro/saspro/widgets/common_utilities.py +306 -0
- setiastro/saspro/widgets/graphics_views.py +122 -0
- setiastro/saspro/widgets/image_utils.py +518 -0
- setiastro/saspro/widgets/minigame/game.js +986 -0
- setiastro/saspro/widgets/minigame/index.html +53 -0
- setiastro/saspro/widgets/minigame/style.css +241 -0
- setiastro/saspro/widgets/preview_dialogs.py +280 -0
- setiastro/saspro/widgets/resource_monitor.py +237 -0
- setiastro/saspro/widgets/spinboxes.py +275 -0
- setiastro/saspro/widgets/themed_buttons.py +13 -0
- setiastro/saspro/widgets/wavelet_utils.py +331 -0
- setiastro/saspro/wimi.py +7996 -0
- setiastro/saspro/wims.py +578 -0
- setiastro/saspro/window_shelf.py +185 -0
- setiastro/saspro/xisf.py +1123 -0
- setiastrosuitepro-1.6.2.post1.dist-info/METADATA +278 -0
- setiastrosuitepro-1.6.2.post1.dist-info/RECORD +367 -0
- setiastrosuitepro-1.6.2.post1.dist-info/WHEEL +4 -0
- setiastrosuitepro-1.6.2.post1.dist-info/entry_points.txt +6 -0
- setiastrosuitepro-1.6.2.post1.dist-info/licenses/LICENSE +674 -0
- setiastrosuitepro-1.6.2.post1.dist-info/licenses/license.txt +2580 -0
|
@@ -0,0 +1,554 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Parallelization utilities for Seti Astro Suite Pro.
|
|
3
|
+
|
|
4
|
+
This module provides standardized parallelization helpers to ensure consistent
|
|
5
|
+
behavior across the codebase. It addresses the optimization recommendation #4
|
|
6
|
+
from OTTIMIZZAZIONI_PROPOSTE.md.
|
|
7
|
+
|
|
8
|
+
Key features:
|
|
9
|
+
- Centralized worker count calculation
|
|
10
|
+
- Consistent ThreadPoolExecutor/ProcessPoolExecutor usage
|
|
11
|
+
- Memory-aware parallelization
|
|
12
|
+
- Task-type specific worker allocation
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
from setiastro.saspro.parallel_utils import (
|
|
16
|
+
get_optimal_workers,
|
|
17
|
+
get_io_workers,
|
|
18
|
+
get_cpu_workers,
|
|
19
|
+
run_parallel,
|
|
20
|
+
run_in_thread_pool,
|
|
21
|
+
run_in_process_pool,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
# For CPU-bound tasks
|
|
25
|
+
workers = get_cpu_workers()
|
|
26
|
+
|
|
27
|
+
# For I/O-bound tasks
|
|
28
|
+
workers = get_io_workers()
|
|
29
|
+
|
|
30
|
+
# Run parallel tasks
|
|
31
|
+
results = run_in_thread_pool(process_image, images, max_workers=workers)
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from __future__ import annotations
|
|
35
|
+
|
|
36
|
+
import os
|
|
37
|
+
import sys
|
|
38
|
+
import gc
|
|
39
|
+
import logging
|
|
40
|
+
from concurrent.futures import (
|
|
41
|
+
ThreadPoolExecutor,
|
|
42
|
+
ProcessPoolExecutor,
|
|
43
|
+
Future,
|
|
44
|
+
as_completed,
|
|
45
|
+
)
|
|
46
|
+
from typing import (
|
|
47
|
+
Callable,
|
|
48
|
+
TypeVar,
|
|
49
|
+
Iterable,
|
|
50
|
+
Iterator,
|
|
51
|
+
Optional,
|
|
52
|
+
Any,
|
|
53
|
+
List,
|
|
54
|
+
Tuple,
|
|
55
|
+
Union,
|
|
56
|
+
)
|
|
57
|
+
from functools import wraps
|
|
58
|
+
from contextlib import contextmanager
|
|
59
|
+
|
|
60
|
+
logger = logging.getLogger(__name__)
|
|
61
|
+
|
|
62
|
+
T = TypeVar('T')
|
|
63
|
+
R = TypeVar('R')
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
# ---------------------------------------------------------------------------
|
|
67
|
+
# Constants and Defaults
|
|
68
|
+
# ---------------------------------------------------------------------------
|
|
69
|
+
|
|
70
|
+
# Maximum workers to prevent over-subscription
|
|
71
|
+
MAX_WORKERS_CAP = 32
|
|
72
|
+
|
|
73
|
+
# Default I/O worker multiplier (I/O-bound can use more threads)
|
|
74
|
+
IO_WORKER_MULTIPLIER = 2
|
|
75
|
+
|
|
76
|
+
# Minimum workers
|
|
77
|
+
MIN_WORKERS = 1
|
|
78
|
+
|
|
79
|
+
# Memory threshold for reducing workers (in bytes, ~1GB)
|
|
80
|
+
MEMORY_THRESHOLD_LOW = 1 * 1024 * 1024 * 1024
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# ---------------------------------------------------------------------------
|
|
84
|
+
# Worker Count Calculation
|
|
85
|
+
# ---------------------------------------------------------------------------
|
|
86
|
+
|
|
87
|
+
def get_cpu_count() -> int:
|
|
88
|
+
"""
|
|
89
|
+
Get the number of available CPU cores.
|
|
90
|
+
|
|
91
|
+
Falls back to a conservative value if detection fails.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Number of CPU cores (at least 1)
|
|
95
|
+
"""
|
|
96
|
+
try:
|
|
97
|
+
count = os.cpu_count()
|
|
98
|
+
return max(1, count) if count else 4
|
|
99
|
+
except Exception:
|
|
100
|
+
return 4
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_available_memory() -> int:
|
|
104
|
+
"""
|
|
105
|
+
Get available system memory in bytes.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
Available memory in bytes, or 0 if unable to determine
|
|
109
|
+
"""
|
|
110
|
+
try:
|
|
111
|
+
import psutil
|
|
112
|
+
return psutil.virtual_memory().available
|
|
113
|
+
except ImportError:
|
|
114
|
+
return 0
|
|
115
|
+
except Exception:
|
|
116
|
+
return 0
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def is_memory_constrained(threshold: int = MEMORY_THRESHOLD_LOW) -> bool:
|
|
120
|
+
"""
|
|
121
|
+
Check if the system is running low on memory.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
threshold: Memory threshold in bytes
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
True if available memory is below threshold
|
|
128
|
+
"""
|
|
129
|
+
available = get_available_memory()
|
|
130
|
+
if available == 0:
|
|
131
|
+
return False # Can't determine, assume OK
|
|
132
|
+
return available < threshold
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def get_optimal_workers(
|
|
136
|
+
task_count: Optional[int] = None,
|
|
137
|
+
max_cap: int = MAX_WORKERS_CAP,
|
|
138
|
+
min_workers: int = MIN_WORKERS,
|
|
139
|
+
memory_aware: bool = True,
|
|
140
|
+
io_bound: bool = False,
|
|
141
|
+
) -> int:
|
|
142
|
+
"""
|
|
143
|
+
Calculate the optimal number of workers for parallel execution.
|
|
144
|
+
|
|
145
|
+
This function considers:
|
|
146
|
+
- Available CPU cores
|
|
147
|
+
- Task count (no point having more workers than tasks)
|
|
148
|
+
- System memory constraints
|
|
149
|
+
- Task type (I/O-bound vs CPU-bound)
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
task_count: Number of tasks to process (None for default)
|
|
153
|
+
max_cap: Maximum workers cap
|
|
154
|
+
min_workers: Minimum workers to use
|
|
155
|
+
memory_aware: Reduce workers if memory is low
|
|
156
|
+
io_bound: True for I/O-bound tasks (allows more workers)
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
Optimal number of workers
|
|
160
|
+
|
|
161
|
+
Example:
|
|
162
|
+
>>> workers = get_optimal_workers(task_count=100, io_bound=True)
|
|
163
|
+
>>> with ThreadPoolExecutor(max_workers=workers) as pool:
|
|
164
|
+
... results = list(pool.map(process, items))
|
|
165
|
+
"""
|
|
166
|
+
cpu_count = get_cpu_count()
|
|
167
|
+
|
|
168
|
+
# Start with CPU count
|
|
169
|
+
if io_bound:
|
|
170
|
+
# I/O-bound tasks can use more threads (they spend time waiting)
|
|
171
|
+
workers = min(cpu_count * IO_WORKER_MULTIPLIER, max_cap)
|
|
172
|
+
else:
|
|
173
|
+
# CPU-bound tasks should not exceed core count
|
|
174
|
+
workers = min(cpu_count, max_cap)
|
|
175
|
+
|
|
176
|
+
# Don't exceed task count
|
|
177
|
+
if task_count is not None and task_count > 0:
|
|
178
|
+
workers = min(workers, task_count)
|
|
179
|
+
|
|
180
|
+
# Reduce if memory constrained
|
|
181
|
+
if memory_aware and is_memory_constrained():
|
|
182
|
+
workers = max(min_workers, workers // 2)
|
|
183
|
+
logger.debug("Memory constrained, reduced workers to %d", workers)
|
|
184
|
+
|
|
185
|
+
# Ensure within bounds
|
|
186
|
+
return max(min_workers, min(workers, max_cap))
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def get_cpu_workers(task_count: Optional[int] = None) -> int:
|
|
190
|
+
"""
|
|
191
|
+
Get optimal worker count for CPU-bound tasks.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
task_count: Number of tasks to process
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Optimal worker count
|
|
198
|
+
"""
|
|
199
|
+
return get_optimal_workers(
|
|
200
|
+
task_count=task_count,
|
|
201
|
+
max_cap=MAX_WORKERS_CAP,
|
|
202
|
+
io_bound=False,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def get_io_workers(task_count: Optional[int] = None) -> int:
|
|
207
|
+
"""
|
|
208
|
+
Get optimal worker count for I/O-bound tasks.
|
|
209
|
+
|
|
210
|
+
Args:
|
|
211
|
+
task_count: Number of tasks to process
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
Optimal worker count (typically higher than CPU workers)
|
|
215
|
+
"""
|
|
216
|
+
return get_optimal_workers(
|
|
217
|
+
task_count=task_count,
|
|
218
|
+
max_cap=MAX_WORKERS_CAP * 2, # Allow more for I/O
|
|
219
|
+
io_bound=True,
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def get_image_processing_workers(
|
|
224
|
+
image_count: int,
|
|
225
|
+
image_size_mb: float = 100.0,
|
|
226
|
+
) -> int:
|
|
227
|
+
"""
|
|
228
|
+
Get optimal worker count for image processing tasks.
|
|
229
|
+
|
|
230
|
+
Takes into account typical memory usage per image.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
image_count: Number of images to process
|
|
234
|
+
image_size_mb: Estimated size per image in MB
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
Optimal worker count
|
|
238
|
+
"""
|
|
239
|
+
cpu_count = get_cpu_count()
|
|
240
|
+
|
|
241
|
+
# Estimate memory needed per worker
|
|
242
|
+
available_mb = get_available_memory() / (1024 * 1024)
|
|
243
|
+
if available_mb > 0:
|
|
244
|
+
# Reserve 2GB for system, rest for workers
|
|
245
|
+
usable_mb = max(0, available_mb - 2048)
|
|
246
|
+
memory_limited_workers = max(1, int(usable_mb / image_size_mb))
|
|
247
|
+
else:
|
|
248
|
+
memory_limited_workers = cpu_count
|
|
249
|
+
|
|
250
|
+
# Use the more restrictive limit
|
|
251
|
+
workers = min(cpu_count, memory_limited_workers, image_count)
|
|
252
|
+
|
|
253
|
+
return max(1, workers)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
# ---------------------------------------------------------------------------
|
|
257
|
+
# Parallel Execution Helpers
|
|
258
|
+
# ---------------------------------------------------------------------------
|
|
259
|
+
|
|
260
|
+
def run_in_thread_pool(
|
|
261
|
+
func: Callable[[T], R],
|
|
262
|
+
items: Iterable[T],
|
|
263
|
+
max_workers: Optional[int] = None,
|
|
264
|
+
progress_callback: Optional[Callable[[int, int], None]] = None,
|
|
265
|
+
) -> List[R]:
|
|
266
|
+
"""
|
|
267
|
+
Execute a function across items using ThreadPoolExecutor.
|
|
268
|
+
|
|
269
|
+
Best for I/O-bound tasks (file operations, network requests).
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
func: Function to apply to each item
|
|
273
|
+
items: Items to process
|
|
274
|
+
max_workers: Max parallel workers (auto-calculated if None)
|
|
275
|
+
progress_callback: Optional callback(completed, total)
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
List of results in original order
|
|
279
|
+
|
|
280
|
+
Example:
|
|
281
|
+
>>> def load_image(path):
|
|
282
|
+
... return Image.open(path)
|
|
283
|
+
>>> images = run_in_thread_pool(load_image, file_paths, max_workers=8)
|
|
284
|
+
"""
|
|
285
|
+
items_list = list(items)
|
|
286
|
+
total = len(items_list)
|
|
287
|
+
|
|
288
|
+
if total == 0:
|
|
289
|
+
return []
|
|
290
|
+
|
|
291
|
+
if max_workers is None:
|
|
292
|
+
max_workers = get_io_workers(total)
|
|
293
|
+
|
|
294
|
+
results: List[Optional[R]] = [None] * total
|
|
295
|
+
completed = 0
|
|
296
|
+
|
|
297
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
298
|
+
# Submit all tasks with their indices
|
|
299
|
+
future_to_idx = {
|
|
300
|
+
executor.submit(func, item): idx
|
|
301
|
+
for idx, item in enumerate(items_list)
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
for future in as_completed(future_to_idx):
|
|
305
|
+
idx = future_to_idx[future]
|
|
306
|
+
try:
|
|
307
|
+
results[idx] = future.result()
|
|
308
|
+
except Exception as e:
|
|
309
|
+
logger.error("Task %d failed: %s", idx, e)
|
|
310
|
+
raise
|
|
311
|
+
|
|
312
|
+
completed += 1
|
|
313
|
+
if progress_callback:
|
|
314
|
+
progress_callback(completed, total)
|
|
315
|
+
|
|
316
|
+
return results # type: ignore
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def run_in_process_pool(
|
|
320
|
+
func: Callable[[T], R],
|
|
321
|
+
items: Iterable[T],
|
|
322
|
+
max_workers: Optional[int] = None,
|
|
323
|
+
progress_callback: Optional[Callable[[int, int], None]] = None,
|
|
324
|
+
) -> List[R]:
|
|
325
|
+
"""
|
|
326
|
+
Execute a function across items using ProcessPoolExecutor.
|
|
327
|
+
|
|
328
|
+
Best for CPU-bound tasks that benefit from true parallelism.
|
|
329
|
+
Note: func and items must be picklable.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
func: Function to apply to each item (must be picklable)
|
|
333
|
+
items: Items to process (must be picklable)
|
|
334
|
+
max_workers: Max parallel workers (auto-calculated if None)
|
|
335
|
+
progress_callback: Optional callback(completed, total)
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
List of results in original order
|
|
339
|
+
|
|
340
|
+
Example:
|
|
341
|
+
>>> def heavy_compute(data):
|
|
342
|
+
... return np.fft.fft2(data)
|
|
343
|
+
>>> results = run_in_process_pool(heavy_compute, data_arrays)
|
|
344
|
+
"""
|
|
345
|
+
items_list = list(items)
|
|
346
|
+
total = len(items_list)
|
|
347
|
+
|
|
348
|
+
if total == 0:
|
|
349
|
+
return []
|
|
350
|
+
|
|
351
|
+
if max_workers is None:
|
|
352
|
+
max_workers = get_cpu_workers(total)
|
|
353
|
+
|
|
354
|
+
results: List[Optional[R]] = [None] * total
|
|
355
|
+
completed = 0
|
|
356
|
+
|
|
357
|
+
with ProcessPoolExecutor(max_workers=max_workers) as executor:
|
|
358
|
+
future_to_idx = {
|
|
359
|
+
executor.submit(func, item): idx
|
|
360
|
+
for idx, item in enumerate(items_list)
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
for future in as_completed(future_to_idx):
|
|
364
|
+
idx = future_to_idx[future]
|
|
365
|
+
try:
|
|
366
|
+
results[idx] = future.result()
|
|
367
|
+
except Exception as e:
|
|
368
|
+
logger.error("Task %d failed: %s", idx, e)
|
|
369
|
+
raise
|
|
370
|
+
|
|
371
|
+
completed += 1
|
|
372
|
+
if progress_callback:
|
|
373
|
+
progress_callback(completed, total)
|
|
374
|
+
|
|
375
|
+
return results # type: ignore
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
@contextmanager
|
|
379
|
+
def thread_pool(max_workers: Optional[int] = None, io_bound: bool = True):
|
|
380
|
+
"""
|
|
381
|
+
Context manager for ThreadPoolExecutor with automatic worker count.
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
max_workers: Max workers (auto-calculated if None)
|
|
385
|
+
io_bound: Whether tasks are I/O-bound
|
|
386
|
+
|
|
387
|
+
Yields:
|
|
388
|
+
ThreadPoolExecutor instance
|
|
389
|
+
|
|
390
|
+
Example:
|
|
391
|
+
>>> with thread_pool(io_bound=True) as pool:
|
|
392
|
+
... futures = [pool.submit(load_file, f) for f in files]
|
|
393
|
+
... results = [f.result() for f in futures]
|
|
394
|
+
"""
|
|
395
|
+
if max_workers is None:
|
|
396
|
+
max_workers = get_io_workers() if io_bound else get_cpu_workers()
|
|
397
|
+
|
|
398
|
+
executor = ThreadPoolExecutor(max_workers=max_workers)
|
|
399
|
+
try:
|
|
400
|
+
yield executor
|
|
401
|
+
finally:
|
|
402
|
+
executor.shutdown(wait=True)
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
@contextmanager
|
|
406
|
+
def process_pool(max_workers: Optional[int] = None):
|
|
407
|
+
"""
|
|
408
|
+
Context manager for ProcessPoolExecutor with automatic worker count.
|
|
409
|
+
|
|
410
|
+
Args:
|
|
411
|
+
max_workers: Max workers (auto-calculated if None)
|
|
412
|
+
|
|
413
|
+
Yields:
|
|
414
|
+
ProcessPoolExecutor instance
|
|
415
|
+
|
|
416
|
+
Example:
|
|
417
|
+
>>> with process_pool() as pool:
|
|
418
|
+
... futures = [pool.submit(compute, data) for data in datasets]
|
|
419
|
+
... results = [f.result() for f in futures]
|
|
420
|
+
"""
|
|
421
|
+
if max_workers is None:
|
|
422
|
+
max_workers = get_cpu_workers()
|
|
423
|
+
|
|
424
|
+
executor = ProcessPoolExecutor(max_workers=max_workers)
|
|
425
|
+
try:
|
|
426
|
+
yield executor
|
|
427
|
+
finally:
|
|
428
|
+
executor.shutdown(wait=True)
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
# ---------------------------------------------------------------------------
|
|
432
|
+
# Batch Processing
|
|
433
|
+
# ---------------------------------------------------------------------------
|
|
434
|
+
|
|
435
|
+
def batch_items(items: List[T], batch_size: int) -> Iterator[List[T]]:
|
|
436
|
+
"""
|
|
437
|
+
Split items into batches of specified size.
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
items: List of items to batch
|
|
441
|
+
batch_size: Maximum items per batch
|
|
442
|
+
|
|
443
|
+
Yields:
|
|
444
|
+
Batches of items
|
|
445
|
+
|
|
446
|
+
Example:
|
|
447
|
+
>>> for batch in batch_items(images, batch_size=10):
|
|
448
|
+
... process_batch(batch)
|
|
449
|
+
"""
|
|
450
|
+
for i in range(0, len(items), batch_size):
|
|
451
|
+
yield items[i:i + batch_size]
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def run_in_batches(
|
|
455
|
+
func: Callable[[List[T]], List[R]],
|
|
456
|
+
items: List[T],
|
|
457
|
+
batch_size: int,
|
|
458
|
+
max_workers: Optional[int] = None,
|
|
459
|
+
progress_callback: Optional[Callable[[int, int], None]] = None,
|
|
460
|
+
) -> List[R]:
|
|
461
|
+
"""
|
|
462
|
+
Process items in batches using parallel execution.
|
|
463
|
+
|
|
464
|
+
Useful for reducing overhead when processing many small items.
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
func: Function that processes a batch and returns list of results
|
|
468
|
+
items: All items to process
|
|
469
|
+
batch_size: Items per batch
|
|
470
|
+
max_workers: Max parallel workers
|
|
471
|
+
progress_callback: Optional callback(completed_items, total_items)
|
|
472
|
+
|
|
473
|
+
Returns:
|
|
474
|
+
Flattened list of all results
|
|
475
|
+
"""
|
|
476
|
+
batches = list(batch_items(items, batch_size))
|
|
477
|
+
total_items = len(items)
|
|
478
|
+
completed_items = 0
|
|
479
|
+
all_results: List[R] = []
|
|
480
|
+
|
|
481
|
+
if max_workers is None:
|
|
482
|
+
max_workers = get_cpu_workers(len(batches))
|
|
483
|
+
|
|
484
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
485
|
+
future_to_batch = {
|
|
486
|
+
executor.submit(func, batch): batch
|
|
487
|
+
for batch in batches
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
for future in as_completed(future_to_batch):
|
|
491
|
+
batch = future_to_batch[future]
|
|
492
|
+
try:
|
|
493
|
+
batch_results = future.result()
|
|
494
|
+
all_results.extend(batch_results)
|
|
495
|
+
except Exception as e:
|
|
496
|
+
logger.error("Batch processing failed: %s", e)
|
|
497
|
+
raise
|
|
498
|
+
|
|
499
|
+
completed_items += len(batch)
|
|
500
|
+
if progress_callback:
|
|
501
|
+
progress_callback(completed_items, total_items)
|
|
502
|
+
|
|
503
|
+
return all_results
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
# ---------------------------------------------------------------------------
|
|
507
|
+
# Cleanup and Memory Management
|
|
508
|
+
# ---------------------------------------------------------------------------
|
|
509
|
+
|
|
510
|
+
def cleanup_after_parallel(force_gc: bool = True) -> None:
|
|
511
|
+
"""
|
|
512
|
+
Clean up after parallel processing.
|
|
513
|
+
|
|
514
|
+
Should be called after large parallel operations to free memory.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
force_gc: Whether to force garbage collection
|
|
518
|
+
"""
|
|
519
|
+
if force_gc:
|
|
520
|
+
gc.collect()
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
# ---------------------------------------------------------------------------
|
|
524
|
+
# Module exports
|
|
525
|
+
# ---------------------------------------------------------------------------
|
|
526
|
+
|
|
527
|
+
__all__ = [
|
|
528
|
+
# Constants
|
|
529
|
+
'MAX_WORKERS_CAP',
|
|
530
|
+
'IO_WORKER_MULTIPLIER',
|
|
531
|
+
'MIN_WORKERS',
|
|
532
|
+
|
|
533
|
+
# Worker calculation
|
|
534
|
+
'get_cpu_count',
|
|
535
|
+
'get_available_memory',
|
|
536
|
+
'is_memory_constrained',
|
|
537
|
+
'get_optimal_workers',
|
|
538
|
+
'get_cpu_workers',
|
|
539
|
+
'get_io_workers',
|
|
540
|
+
'get_image_processing_workers',
|
|
541
|
+
|
|
542
|
+
# Execution helpers
|
|
543
|
+
'run_in_thread_pool',
|
|
544
|
+
'run_in_process_pool',
|
|
545
|
+
'thread_pool',
|
|
546
|
+
'process_pool',
|
|
547
|
+
|
|
548
|
+
# Batch processing
|
|
549
|
+
'batch_items',
|
|
550
|
+
'run_in_batches',
|
|
551
|
+
|
|
552
|
+
# Cleanup
|
|
553
|
+
'cleanup_after_parallel',
|
|
554
|
+
]
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# pro/pedestal.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
import cv2 # not required
|
|
7
|
+
# just for mask helpers if you later want it
|
|
8
|
+
except Exception:
|
|
9
|
+
cv2 = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _as_float01(img: np.ndarray) -> np.ndarray:
|
|
13
|
+
"""Return float32 image; compress crazy-high float ranges to ~[0..1]."""
|
|
14
|
+
a = np.asarray(img)
|
|
15
|
+
if a.dtype != np.float32:
|
|
16
|
+
a = a.astype(np.float32, copy=False)
|
|
17
|
+
# If upstream keeps images normalized, this is a noop.
|
|
18
|
+
# If someone slipped a >1.0 range, compress gently so 'min-subtract' behaves.
|
|
19
|
+
if a.size:
|
|
20
|
+
mx = float(a.max())
|
|
21
|
+
if mx > 5.0:
|
|
22
|
+
a = a / mx
|
|
23
|
+
return a
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _remove_pedestal_array(a: np.ndarray) -> np.ndarray:
|
|
27
|
+
"""
|
|
28
|
+
Subtract per-channel minimum. Preserves shape (2D or 3D).
|
|
29
|
+
Result is clipped to [0,1] for safety.
|
|
30
|
+
"""
|
|
31
|
+
a = _as_float01(a)
|
|
32
|
+
|
|
33
|
+
if a.ndim == 2:
|
|
34
|
+
mn = float(a.min()) if a.size else 0.0
|
|
35
|
+
out = a - mn
|
|
36
|
+
else:
|
|
37
|
+
# H, W, C
|
|
38
|
+
out = np.empty_like(a, dtype=np.float32)
|
|
39
|
+
for c in range(a.shape[2]):
|
|
40
|
+
ch = a[..., c]
|
|
41
|
+
mn = float(ch.min()) if ch.size else 0.0
|
|
42
|
+
out[..., c] = ch - mn
|
|
43
|
+
|
|
44
|
+
return np.clip(out, 0.0, 1.0).astype(np.float32, copy=False)
|
|
45
|
+
|
|
46
|
+
def remove_pedestal(main, target_doc=None):
|
|
47
|
+
"""
|
|
48
|
+
Subtract per-channel minimum on the *currently active* content:
|
|
49
|
+
- If a Preview/ROI tab is active, operate on that ROI document.
|
|
50
|
+
- Otherwise, operate on the full base document.
|
|
51
|
+
Creates a single undo step via doc.apply_edit (ROI docs manage their
|
|
52
|
+
own preview undo; base docs go into full undo stack).
|
|
53
|
+
"""
|
|
54
|
+
doc = target_doc
|
|
55
|
+
|
|
56
|
+
# Prefer DocManager's view-aware resolution
|
|
57
|
+
dm = getattr(main, "doc_manager", None) or getattr(main, "docman", None)
|
|
58
|
+
|
|
59
|
+
if doc is None and dm is not None:
|
|
60
|
+
# 1) Try to resolve from the currently active view (ROI-aware)
|
|
61
|
+
vw = None
|
|
62
|
+
try:
|
|
63
|
+
if hasattr(dm, "_active_view_widget") and callable(dm._active_view_widget):
|
|
64
|
+
vw = dm._active_view_widget()
|
|
65
|
+
except Exception:
|
|
66
|
+
vw = None
|
|
67
|
+
|
|
68
|
+
if vw is not None and hasattr(dm, "get_document_for_view"):
|
|
69
|
+
try:
|
|
70
|
+
candidate = dm.get_document_for_view(vw)
|
|
71
|
+
if candidate is not None:
|
|
72
|
+
doc = candidate
|
|
73
|
+
except Exception:
|
|
74
|
+
doc = None
|
|
75
|
+
|
|
76
|
+
# 2) If that failed, fall back to whatever DM thinks is active
|
|
77
|
+
if doc is None and hasattr(dm, "get_active_document"):
|
|
78
|
+
try:
|
|
79
|
+
doc = dm.get_active_document()
|
|
80
|
+
except Exception:
|
|
81
|
+
doc = None
|
|
82
|
+
|
|
83
|
+
# 3) Last resort: legacy _active_doc on the main window
|
|
84
|
+
if doc is None:
|
|
85
|
+
ad = getattr(main, "_active_doc", None)
|
|
86
|
+
if callable(ad):
|
|
87
|
+
doc = ad()
|
|
88
|
+
else:
|
|
89
|
+
doc = ad
|
|
90
|
+
|
|
91
|
+
if doc is None or getattr(doc, "image", None) is None:
|
|
92
|
+
# Quiet exit if nothing to do.
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
# Optional debug so you can confirm it hits ROI when Preview is active
|
|
96
|
+
try:
|
|
97
|
+
is_roi = bool(getattr(doc, "_roi", None)) or bool(getattr(doc, "_roi_info", None))
|
|
98
|
+
print(f"[Pedestal] target_doc={doc!r}, type={type(doc)}, is_roi={is_roi}")
|
|
99
|
+
except Exception:
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
# ---- actual operation ----
|
|
103
|
+
try:
|
|
104
|
+
src = np.asarray(doc.image)
|
|
105
|
+
out = _remove_pedestal_array(src)
|
|
106
|
+
|
|
107
|
+
meta = {
|
|
108
|
+
"step_name": "Pedestal Removal",
|
|
109
|
+
"bit_depth": "32-bit floating point",
|
|
110
|
+
"is_mono": (out.ndim == 2),
|
|
111
|
+
}
|
|
112
|
+
doc.apply_edit(out, metadata=meta, step_name="Pedestal Removal")
|
|
113
|
+
|
|
114
|
+
if hasattr(main, "_log"):
|
|
115
|
+
main._log("Pedestal Removal")
|
|
116
|
+
except Exception as e:
|
|
117
|
+
try:
|
|
118
|
+
from PyQt6.QtWidgets import QMessageBox
|
|
119
|
+
QMessageBox.critical(main, "Pedestal Removal", f"Failed:\n{e}")
|
|
120
|
+
except Exception:
|
|
121
|
+
pass
|