datalab-platform 0.0.1.dev0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datalab/__init__.py +35 -2
- datalab/adapters_metadata/__init__.py +31 -0
- datalab/adapters_metadata/base_adapter.py +316 -0
- datalab/adapters_metadata/common.py +422 -0
- datalab/adapters_metadata/geometry_adapter.py +98 -0
- datalab/adapters_metadata/table_adapter.py +84 -0
- datalab/adapters_plotpy/__init__.py +54 -0
- datalab/adapters_plotpy/annotations.py +124 -0
- datalab/adapters_plotpy/base.py +110 -0
- datalab/adapters_plotpy/converters.py +86 -0
- datalab/adapters_plotpy/factories.py +80 -0
- datalab/adapters_plotpy/objects/__init__.py +0 -0
- datalab/adapters_plotpy/objects/base.py +197 -0
- datalab/adapters_plotpy/objects/image.py +157 -0
- datalab/adapters_plotpy/objects/scalar.py +565 -0
- datalab/adapters_plotpy/objects/signal.py +264 -0
- datalab/adapters_plotpy/roi/__init__.py +0 -0
- datalab/adapters_plotpy/roi/base.py +146 -0
- datalab/adapters_plotpy/roi/factory.py +93 -0
- datalab/adapters_plotpy/roi/image.py +207 -0
- datalab/adapters_plotpy/roi/signal.py +72 -0
- datalab/app.py +98 -0
- datalab/config.py +817 -0
- datalab/control/__init__.py +0 -0
- datalab/control/baseproxy.py +776 -0
- datalab/control/proxy.py +343 -0
- datalab/control/remote.py +1005 -0
- datalab/data/doc/DataLab_en.pdf +0 -0
- datalab/data/doc/DataLab_fr.pdf +0 -0
- datalab/data/icons/analysis/delete_results.svg +109 -0
- datalab/data/icons/analysis/fw1e2.svg +156 -0
- datalab/data/icons/analysis/fwhm.svg +156 -0
- datalab/data/icons/analysis/histogram.svg +49 -0
- datalab/data/icons/analysis/peak_detect.svg +160 -0
- datalab/data/icons/analysis/plot_results.svg +151 -0
- datalab/data/icons/analysis/show_results.svg +83 -0
- datalab/data/icons/analysis/stats.svg +49 -0
- datalab/data/icons/analysis.svg +120 -0
- datalab/data/icons/apply.svg +3 -0
- datalab/data/icons/check_all.svg +15 -0
- datalab/data/icons/collapse.svg +44 -0
- datalab/data/icons/collapse_selection.svg +63 -0
- datalab/data/icons/console.svg +101 -0
- datalab/data/icons/create/1d-normal.svg +8 -0
- datalab/data/icons/create/1d-poisson.svg +9 -0
- datalab/data/icons/create/1d-uniform.svg +8 -0
- datalab/data/icons/create/1d-zero.svg +57 -0
- datalab/data/icons/create/2d-gaussian.svg +56 -0
- datalab/data/icons/create/2d-normal.svg +38 -0
- datalab/data/icons/create/2d-poisson.svg +38 -0
- datalab/data/icons/create/2d-ramp.svg +90 -0
- datalab/data/icons/create/2d-sinc.svg +62 -0
- datalab/data/icons/create/2d-uniform.svg +38 -0
- datalab/data/icons/create/2d-zero.svg +13 -0
- datalab/data/icons/create/checkerboard.svg +39 -0
- datalab/data/icons/create/cosine.svg +12 -0
- datalab/data/icons/create/exponential.svg +55 -0
- datalab/data/icons/create/gaussian.svg +12 -0
- datalab/data/icons/create/grating.svg +29 -0
- datalab/data/icons/create/linear_chirp.svg +7 -0
- datalab/data/icons/create/logistic.svg +7 -0
- datalab/data/icons/create/lorentzian.svg +12 -0
- datalab/data/icons/create/planck.svg +12 -0
- datalab/data/icons/create/polynomial.svg +7 -0
- datalab/data/icons/create/pulse.svg +12 -0
- datalab/data/icons/create/ring.svg +18 -0
- datalab/data/icons/create/sawtooth.svg +7 -0
- datalab/data/icons/create/siemens.svg +35 -0
- datalab/data/icons/create/sinc.svg +12 -0
- datalab/data/icons/create/sine.svg +7 -0
- datalab/data/icons/create/square.svg +7 -0
- datalab/data/icons/create/square_pulse.svg +7 -0
- datalab/data/icons/create/step.svg +7 -0
- datalab/data/icons/create/step_pulse.svg +12 -0
- datalab/data/icons/create/triangle.svg +7 -0
- datalab/data/icons/create/voigt.svg +12 -0
- datalab/data/icons/edit/annotations.svg +72 -0
- datalab/data/icons/edit/annotations_copy.svg +114 -0
- datalab/data/icons/edit/annotations_delete.svg +83 -0
- datalab/data/icons/edit/annotations_edit.svg +98 -0
- datalab/data/icons/edit/annotations_export.svg +85 -0
- datalab/data/icons/edit/annotations_import.svg +85 -0
- datalab/data/icons/edit/annotations_paste.svg +100 -0
- datalab/data/icons/edit/copy_titles.svg +109 -0
- datalab/data/icons/edit/delete.svg +84 -0
- datalab/data/icons/edit/delete_all.svg +214 -0
- datalab/data/icons/edit/duplicate.svg +64 -0
- datalab/data/icons/edit/goto_source.svg +60 -0
- datalab/data/icons/edit/metadata.svg +60 -0
- datalab/data/icons/edit/metadata_add.svg +80 -0
- datalab/data/icons/edit/metadata_copy.svg +96 -0
- datalab/data/icons/edit/metadata_delete.svg +62 -0
- datalab/data/icons/edit/metadata_export.svg +68 -0
- datalab/data/icons/edit/metadata_import.svg +68 -0
- datalab/data/icons/edit/metadata_paste.svg +79 -0
- datalab/data/icons/edit/move_down.svg +55 -0
- datalab/data/icons/edit/move_up.svg +54 -0
- datalab/data/icons/edit/new_group.svg +76 -0
- datalab/data/icons/edit/recompute.svg +60 -0
- datalab/data/icons/edit/rename.svg +49 -0
- datalab/data/icons/edit.svg +16 -0
- datalab/data/icons/expand.svg +44 -0
- datalab/data/icons/expand_selection.svg +63 -0
- datalab/data/icons/fit/cdf_fit.svg +56 -0
- datalab/data/icons/fit/exponential_fit.svg +55 -0
- datalab/data/icons/fit/gaussian_fit.svg +62 -0
- datalab/data/icons/fit/interactive_fit.svg +101 -0
- datalab/data/icons/fit/linear_fit.svg +57 -0
- datalab/data/icons/fit/lorentzian_fit.svg +209 -0
- datalab/data/icons/fit/multigaussian_fit.svg +85 -0
- datalab/data/icons/fit/multilorentzian_fit.svg +85 -0
- datalab/data/icons/fit/piecewiseexponential_fit.svg +209 -0
- datalab/data/icons/fit/planckian_fit.svg +62 -0
- datalab/data/icons/fit/polynomial_fit.svg +59 -0
- datalab/data/icons/fit/sigmoid_fit.svg +56 -0
- datalab/data/icons/fit/sinusoidal_fit.svg +72 -0
- datalab/data/icons/fit/twohalfgaussian_fit.svg +63 -0
- datalab/data/icons/fit/voigt_fit.svg +57 -0
- datalab/data/icons/group.svg +56 -0
- datalab/data/icons/h5/h5array.svg +59 -0
- datalab/data/icons/h5/h5attrs.svg +75 -0
- datalab/data/icons/h5/h5browser.svg +133 -0
- datalab/data/icons/h5/h5file.svg +69 -0
- datalab/data/icons/h5/h5group.svg +49 -0
- datalab/data/icons/h5/h5scalar.svg +1 -0
- datalab/data/icons/help_pdf.svg +46 -0
- datalab/data/icons/history.svg +7 -0
- datalab/data/icons/image.svg +135 -0
- datalab/data/icons/io/fileopen_directory.svg +60 -0
- datalab/data/icons/io/fileopen_h5.svg +84 -0
- datalab/data/icons/io/fileopen_ima.svg +187 -0
- datalab/data/icons/io/fileopen_py.svg +123 -0
- datalab/data/icons/io/fileopen_sig.svg +138 -0
- datalab/data/icons/io/filesave_h5.svg +97 -0
- datalab/data/icons/io/filesave_ima.svg +200 -0
- datalab/data/icons/io/filesave_py.svg +136 -0
- datalab/data/icons/io/filesave_sig.svg +151 -0
- datalab/data/icons/io/import_text.svg +144 -0
- datalab/data/icons/io/save_to_directory.svg +134 -0
- datalab/data/icons/io.svg +84 -0
- datalab/data/icons/libre-camera-flash-off.svg +1 -0
- datalab/data/icons/libre-camera-flash-on.svg +1 -0
- datalab/data/icons/libre-gui-about.svg +1 -0
- datalab/data/icons/libre-gui-action-delete.svg +1 -0
- datalab/data/icons/libre-gui-add.svg +1 -0
- datalab/data/icons/libre-gui-arrow-down.svg +1 -0
- datalab/data/icons/libre-gui-arrow-left.svg +1 -0
- datalab/data/icons/libre-gui-arrow-right.svg +1 -0
- datalab/data/icons/libre-gui-arrow-up.svg +1 -0
- datalab/data/icons/libre-gui-close.svg +40 -0
- datalab/data/icons/libre-gui-cogs.svg +1 -0
- datalab/data/icons/libre-gui-globe.svg +1 -0
- datalab/data/icons/libre-gui-help.svg +1 -0
- datalab/data/icons/libre-gui-link.svg +1 -0
- datalab/data/icons/libre-gui-menu.svg +1 -0
- datalab/data/icons/libre-gui-pencil.svg +1 -0
- datalab/data/icons/libre-gui-plugin.svg +1 -0
- datalab/data/icons/libre-gui-questions.svg +1 -0
- datalab/data/icons/libre-gui-settings.svg +1 -0
- datalab/data/icons/libre-gui-unlink.svg +1 -0
- datalab/data/icons/libre-tech-ram.svg +1 -0
- datalab/data/icons/libre-toolbox.svg +1 -0
- datalab/data/icons/logs.svg +1 -0
- datalab/data/icons/markers.svg +74 -0
- datalab/data/icons/menu.svg +13 -0
- datalab/data/icons/new_ima.svg +148 -0
- datalab/data/icons/new_sig.svg +123 -0
- datalab/data/icons/operations/abs.svg +116 -0
- datalab/data/icons/operations/arithmetic.svg +123 -0
- datalab/data/icons/operations/average.svg +124 -0
- datalab/data/icons/operations/complex_from_magnitude_phase.svg +116 -0
- datalab/data/icons/operations/complex_from_real_imag.svg +124 -0
- datalab/data/icons/operations/constant.svg +116 -0
- datalab/data/icons/operations/constant_add.svg +109 -0
- datalab/data/icons/operations/constant_divide.svg +109 -0
- datalab/data/icons/operations/constant_multiply.svg +109 -0
- datalab/data/icons/operations/constant_subtract.svg +109 -0
- datalab/data/icons/operations/convert_dtype.svg +117 -0
- datalab/data/icons/operations/convolution.svg +46 -0
- datalab/data/icons/operations/deconvolution.svg +57 -0
- datalab/data/icons/operations/derivative.svg +127 -0
- datalab/data/icons/operations/difference.svg +52 -0
- datalab/data/icons/operations/division.svg +139 -0
- datalab/data/icons/operations/exp.svg +116 -0
- datalab/data/icons/operations/flip_horizontally.svg +69 -0
- datalab/data/icons/operations/flip_vertically.svg +74 -0
- datalab/data/icons/operations/im.svg +124 -0
- datalab/data/icons/operations/integral.svg +50 -0
- datalab/data/icons/operations/inverse.svg +143 -0
- datalab/data/icons/operations/log10.svg +109 -0
- datalab/data/icons/operations/phase.svg +116 -0
- datalab/data/icons/operations/power.svg +118 -0
- datalab/data/icons/operations/product.svg +124 -0
- datalab/data/icons/operations/profile.svg +379 -0
- datalab/data/icons/operations/profile_average.svg +399 -0
- datalab/data/icons/operations/profile_radial.svg +261 -0
- datalab/data/icons/operations/profile_segment.svg +262 -0
- datalab/data/icons/operations/quadratic_difference.svg +84 -0
- datalab/data/icons/operations/re.svg +124 -0
- datalab/data/icons/operations/rotate_left.svg +72 -0
- datalab/data/icons/operations/rotate_right.svg +72 -0
- datalab/data/icons/operations/signals_to_image.svg +314 -0
- datalab/data/icons/operations/sqrt.svg +110 -0
- datalab/data/icons/operations/std.svg +124 -0
- datalab/data/icons/operations/sum.svg +102 -0
- datalab/data/icons/play_demo.svg +9 -0
- datalab/data/icons/processing/axis_transform.svg +62 -0
- datalab/data/icons/processing/bandpass.svg +79 -0
- datalab/data/icons/processing/bandstop.svg +71 -0
- datalab/data/icons/processing/binning.svg +126 -0
- datalab/data/icons/processing/clip.svg +119 -0
- datalab/data/icons/processing/detrending.svg +173 -0
- datalab/data/icons/processing/distribute_on_grid.svg +769 -0
- datalab/data/icons/processing/edge_detection.svg +46 -0
- datalab/data/icons/processing/erase.svg +1 -0
- datalab/data/icons/processing/exposure.svg +143 -0
- datalab/data/icons/processing/fourier.svg +104 -0
- datalab/data/icons/processing/highpass.svg +59 -0
- datalab/data/icons/processing/interpolation.svg +71 -0
- datalab/data/icons/processing/level_adjustment.svg +70 -0
- datalab/data/icons/processing/lowpass.svg +60 -0
- datalab/data/icons/processing/morphology.svg +49 -0
- datalab/data/icons/processing/noise_addition.svg +114 -0
- datalab/data/icons/processing/noise_reduction.svg +38 -0
- datalab/data/icons/processing/normalize.svg +84 -0
- datalab/data/icons/processing/offset_correction.svg +131 -0
- datalab/data/icons/processing/resampling1d.svg +101 -0
- datalab/data/icons/processing/resampling2d.svg +240 -0
- datalab/data/icons/processing/reset_positions.svg +185 -0
- datalab/data/icons/processing/resize.svg +9 -0
- datalab/data/icons/processing/reverse_signal_x.svg +171 -0
- datalab/data/icons/processing/stability.svg +11 -0
- datalab/data/icons/processing/swap_x_y.svg +65 -0
- datalab/data/icons/processing/thresholding.svg +63 -0
- datalab/data/icons/processing/windowing.svg +45 -0
- datalab/data/icons/properties.svg +26 -0
- datalab/data/icons/reset.svg +9 -0
- datalab/data/icons/restore.svg +40 -0
- datalab/data/icons/roi/roi.svg +76 -0
- datalab/data/icons/roi/roi_coordinate.svg +78 -0
- datalab/data/icons/roi/roi_copy.svg +112 -0
- datalab/data/icons/roi/roi_delete.svg +81 -0
- datalab/data/icons/roi/roi_export.svg +87 -0
- datalab/data/icons/roi/roi_graphical.svg +78 -0
- datalab/data/icons/roi/roi_grid.svg +67 -0
- datalab/data/icons/roi/roi_ima.svg +188 -0
- datalab/data/icons/roi/roi_import.svg +87 -0
- datalab/data/icons/roi/roi_new.svg +81 -0
- datalab/data/icons/roi/roi_new_circle.svg +95 -0
- datalab/data/icons/roi/roi_new_polygon.svg +110 -0
- datalab/data/icons/roi/roi_new_rectangle.svg +70 -0
- datalab/data/icons/roi/roi_paste.svg +98 -0
- datalab/data/icons/roi/roi_sig.svg +124 -0
- datalab/data/icons/shapes.svg +134 -0
- datalab/data/icons/signal.svg +103 -0
- datalab/data/icons/table.svg +85 -0
- datalab/data/icons/table_unavailable.svg +102 -0
- datalab/data/icons/to_signal.svg +124 -0
- datalab/data/icons/tour/next.svg +44 -0
- datalab/data/icons/tour/previous.svg +44 -0
- datalab/data/icons/tour/rewind.svg +51 -0
- datalab/data/icons/tour/stop.svg +47 -0
- datalab/data/icons/tour/tour.svg +16 -0
- datalab/data/icons/uncheck_all.svg +78 -0
- datalab/data/icons/view/curve_antialiasing.svg +50 -0
- datalab/data/icons/view/new_window.svg +98 -0
- datalab/data/icons/view/refresh-auto.svg +57 -0
- datalab/data/icons/view/refresh-manual.svg +51 -0
- datalab/data/icons/view/reset_curve_styles.svg +96 -0
- datalab/data/icons/view/show_first.svg +55 -0
- datalab/data/icons/view/show_titles.svg +46 -0
- datalab/data/icons/visualization.svg +51 -0
- datalab/data/logo/DataLab-Banner-150.png +0 -0
- datalab/data/logo/DataLab-Banner-200.png +0 -0
- datalab/data/logo/DataLab-Banner2-100.png +0 -0
- datalab/data/logo/DataLab-Splash.png +0 -0
- datalab/data/logo/DataLab-watermark.png +0 -0
- datalab/data/logo/DataLab.svg +83 -0
- datalab/data/tests/reordering_test.h5 +0 -0
- datalab/data/tutorials/fabry_perot/fabry-perot1.jpg +0 -0
- datalab/data/tutorials/fabry_perot/fabry-perot2.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_13.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_18.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_23.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_30.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_35.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_40.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_45.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_50.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_55.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_60.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_65.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_70.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_75.jpg +0 -0
- datalab/data/tutorials/laser_beam/TEM00_z_80.jpg +0 -0
- datalab/env.py +542 -0
- datalab/gui/__init__.py +89 -0
- datalab/gui/actionhandler.py +1701 -0
- datalab/gui/docks.py +473 -0
- datalab/gui/h5io.py +150 -0
- datalab/gui/macroeditor.py +310 -0
- datalab/gui/main.py +2081 -0
- datalab/gui/newobject.py +217 -0
- datalab/gui/objectview.py +766 -0
- datalab/gui/panel/__init__.py +48 -0
- datalab/gui/panel/base.py +3254 -0
- datalab/gui/panel/image.py +157 -0
- datalab/gui/panel/macro.py +607 -0
- datalab/gui/panel/signal.py +164 -0
- datalab/gui/plothandler.py +800 -0
- datalab/gui/processor/__init__.py +84 -0
- datalab/gui/processor/base.py +2456 -0
- datalab/gui/processor/catcher.py +75 -0
- datalab/gui/processor/image.py +1214 -0
- datalab/gui/processor/signal.py +755 -0
- datalab/gui/profiledialog.py +333 -0
- datalab/gui/roieditor.py +633 -0
- datalab/gui/roigrideditor.py +208 -0
- datalab/gui/settings.py +612 -0
- datalab/gui/tour.py +908 -0
- datalab/h5/__init__.py +12 -0
- datalab/h5/common.py +314 -0
- datalab/h5/generic.py +580 -0
- datalab/h5/native.py +39 -0
- datalab/h5/utils.py +95 -0
- datalab/objectmodel.py +640 -0
- datalab/plugins/_readme_.txt +9 -0
- datalab/plugins/datalab_imageformats.py +175 -0
- datalab/plugins/datalab_testdata.py +190 -0
- datalab/plugins.py +355 -0
- datalab/tests/__init__.py +199 -0
- datalab/tests/backbone/__init__.py +1 -0
- datalab/tests/backbone/config_unit_test.py +170 -0
- datalab/tests/backbone/config_versioning_unit_test.py +34 -0
- datalab/tests/backbone/dictlistserial_app_test.py +38 -0
- datalab/tests/backbone/errorcatcher_unit_test.py +69 -0
- datalab/tests/backbone/errormsgbox_unit_test.py +50 -0
- datalab/tests/backbone/execenv_unit.py +262 -0
- datalab/tests/backbone/loadtest_gdi.py +147 -0
- datalab/tests/backbone/long_callback.py +96 -0
- datalab/tests/backbone/main_app_test.py +137 -0
- datalab/tests/backbone/memory_leak.py +43 -0
- datalab/tests/backbone/procisolation1_unit.py +128 -0
- datalab/tests/backbone/procisolation2_unit.py +171 -0
- datalab/tests/backbone/procisolation_unit_test.py +22 -0
- datalab/tests/backbone/profiling_app.py +27 -0
- datalab/tests/backbone/strings_unit_test.py +65 -0
- datalab/tests/backbone/title_formatting_unit_test.py +82 -0
- datalab/tests/conftest.py +131 -0
- datalab/tests/features/__init__.py +1 -0
- datalab/tests/features/applauncher/__init__.py +1 -0
- datalab/tests/features/applauncher/launcher1_app_test.py +28 -0
- datalab/tests/features/applauncher/launcher2_app_test.py +30 -0
- datalab/tests/features/common/__init__.py +1 -0
- datalab/tests/features/common/add_metadata_app_test.py +134 -0
- datalab/tests/features/common/add_metadata_unit_test.py +267 -0
- datalab/tests/features/common/annotations_management_unit_test.py +152 -0
- datalab/tests/features/common/auto_analysis_recompute_unit_test.py +240 -0
- datalab/tests/features/common/createobject_unit_test.py +50 -0
- datalab/tests/features/common/geometry_results_app_test.py +135 -0
- datalab/tests/features/common/interactive_processing_test.py +1109 -0
- datalab/tests/features/common/io_app_test.py +75 -0
- datalab/tests/features/common/large_results_app_test.py +187 -0
- datalab/tests/features/common/metadata_all_patterns_test.py +103 -0
- datalab/tests/features/common/metadata_app_test.py +139 -0
- datalab/tests/features/common/metadata_io_unit_test.py +60 -0
- datalab/tests/features/common/misc_app_test.py +236 -0
- datalab/tests/features/common/multiple_geometry_results_unit_test.py +122 -0
- datalab/tests/features/common/multiple_table_results_unit_test.py +64 -0
- datalab/tests/features/common/operation_modes_app_test.py +392 -0
- datalab/tests/features/common/plot_results_app_test.py +278 -0
- datalab/tests/features/common/reorder_app_test.py +75 -0
- datalab/tests/features/common/result_deletion_unit_test.py +96 -0
- datalab/tests/features/common/result_merged_label_unit_test.py +154 -0
- datalab/tests/features/common/result_shape_settings_unit_test.py +223 -0
- datalab/tests/features/common/roi_plotitem_unit_test.py +64 -0
- datalab/tests/features/common/roieditor_unit_test.py +102 -0
- datalab/tests/features/common/save_to_dir_app_test.py +163 -0
- datalab/tests/features/common/save_to_dir_unit_test.py +474 -0
- datalab/tests/features/common/stat_app_test.py +40 -0
- datalab/tests/features/common/stats_tools_unit_test.py +77 -0
- datalab/tests/features/common/table_results_app_test.py +52 -0
- datalab/tests/features/common/textimport_unit_test.py +131 -0
- datalab/tests/features/common/uuid_preservation_test.py +281 -0
- datalab/tests/features/common/worker_unit_test.py +402 -0
- datalab/tests/features/control/__init__.py +1 -0
- datalab/tests/features/control/connect_dialog.py +28 -0
- datalab/tests/features/control/embedded1_unit_test.py +304 -0
- datalab/tests/features/control/embedded2_unit_test.py +52 -0
- datalab/tests/features/control/remoteclient_app_test.py +219 -0
- datalab/tests/features/control/remoteclient_unit.py +75 -0
- datalab/tests/features/control/simpleclient_unit_test.py +321 -0
- datalab/tests/features/hdf5/__init__.py +1 -0
- datalab/tests/features/hdf5/h5browser1_unit_test.py +31 -0
- datalab/tests/features/hdf5/h5browser2_unit.py +55 -0
- datalab/tests/features/hdf5/h5browser_app_test.py +77 -0
- datalab/tests/features/hdf5/h5import_app_test.py +25 -0
- datalab/tests/features/hdf5/h5importer_app_test.py +34 -0
- datalab/tests/features/image/__init__.py +1 -0
- datalab/tests/features/image/annotations_app_test.py +28 -0
- datalab/tests/features/image/annotations_unit_test.py +80 -0
- datalab/tests/features/image/average_app_test.py +46 -0
- datalab/tests/features/image/background_dialog_test.py +70 -0
- datalab/tests/features/image/blobs_app_test.py +50 -0
- datalab/tests/features/image/contour_app_test.py +42 -0
- datalab/tests/features/image/contour_fabryperot_app_test.py +51 -0
- datalab/tests/features/image/denoise_app_test.py +31 -0
- datalab/tests/features/image/distribute_on_grid_app_test.py +95 -0
- datalab/tests/features/image/edges_app_test.py +31 -0
- datalab/tests/features/image/erase_app_test.py +21 -0
- datalab/tests/features/image/fft2d_app_test.py +27 -0
- datalab/tests/features/image/flatfield_app_test.py +40 -0
- datalab/tests/features/image/geometry_transform_unit_test.py +396 -0
- datalab/tests/features/image/imagetools_app_test.py +51 -0
- datalab/tests/features/image/imagetools_unit_test.py +27 -0
- datalab/tests/features/image/load_app_test.py +73 -0
- datalab/tests/features/image/morph_app_test.py +32 -0
- datalab/tests/features/image/offsetcorrection_app_test.py +30 -0
- datalab/tests/features/image/peak2d_app_test.py +53 -0
- datalab/tests/features/image/profile_app_test.py +73 -0
- datalab/tests/features/image/profile_dialog_test.py +56 -0
- datalab/tests/features/image/roi_app_test.py +98 -0
- datalab/tests/features/image/roi_circ_app_test.py +62 -0
- datalab/tests/features/image/roi_manipulation_app_test.py +268 -0
- datalab/tests/features/image/roigrid_unit_test.py +60 -0
- datalab/tests/features/image/side_by_side_app_test.py +52 -0
- datalab/tests/features/macro/__init__.py +1 -0
- datalab/tests/features/macro/macro_app_test.py +28 -0
- datalab/tests/features/macro/macroeditor_unit_test.py +102 -0
- datalab/tests/features/signal/__init__.py +1 -0
- datalab/tests/features/signal/baseline_dialog_test.py +53 -0
- datalab/tests/features/signal/deltax_dialog_unit_test.py +34 -0
- datalab/tests/features/signal/fft1d_app_test.py +26 -0
- datalab/tests/features/signal/filter_app_test.py +44 -0
- datalab/tests/features/signal/fitdialog_unit_test.py +50 -0
- datalab/tests/features/signal/interpolation_app_test.py +110 -0
- datalab/tests/features/signal/loadbigsignal_app_test.py +80 -0
- datalab/tests/features/signal/multiple_rois_unit_test.py +132 -0
- datalab/tests/features/signal/pulse_features_app_test.py +118 -0
- datalab/tests/features/signal/pulse_features_roi_app_test.py +55 -0
- datalab/tests/features/signal/roi_app_test.py +78 -0
- datalab/tests/features/signal/roi_manipulation_app_test.py +261 -0
- datalab/tests/features/signal/select_xy_cursor_unit_test.py +46 -0
- datalab/tests/features/signal/signalpeakdetection_dialog_test.py +33 -0
- datalab/tests/features/signal/signals_to_image_app_test.py +98 -0
- datalab/tests/features/signal/xarray_compat_app_test.py +128 -0
- datalab/tests/features/tour_unit_test.py +22 -0
- datalab/tests/features/utilities/__init__.py +1 -0
- datalab/tests/features/utilities/installconf_unit_test.py +21 -0
- datalab/tests/features/utilities/logview_app_test.py +21 -0
- datalab/tests/features/utilities/logview_error.py +24 -0
- datalab/tests/features/utilities/logview_unit_test.py +21 -0
- datalab/tests/features/utilities/memstatus_app_test.py +42 -0
- datalab/tests/features/utilities/settings_unit_test.py +88 -0
- datalab/tests/scenarios/__init__.py +1 -0
- datalab/tests/scenarios/beautiful_app.py +121 -0
- datalab/tests/scenarios/common.py +463 -0
- datalab/tests/scenarios/demo.py +212 -0
- datalab/tests/scenarios/example_app_test.py +47 -0
- datalab/tests/scenarios/scenario_h5_app_test.py +75 -0
- datalab/tests/scenarios/scenario_ima1_app_test.py +34 -0
- datalab/tests/scenarios/scenario_ima2_app_test.py +34 -0
- datalab/tests/scenarios/scenario_mac_app_test.py +58 -0
- datalab/tests/scenarios/scenario_sig1_app_test.py +36 -0
- datalab/tests/scenarios/scenario_sig2_app_test.py +35 -0
- datalab/utils/__init__.py +1 -0
- datalab/utils/conf.py +304 -0
- datalab/utils/dephash.py +105 -0
- datalab/utils/qthelpers.py +633 -0
- datalab/utils/strings.py +34 -0
- datalab/utils/tests.py +0 -0
- datalab/widgets/__init__.py +1 -0
- datalab/widgets/connection.py +138 -0
- datalab/widgets/filedialog.py +91 -0
- datalab/widgets/fileviewer.py +84 -0
- datalab/widgets/fitdialog.py +788 -0
- datalab/widgets/h5browser.py +1048 -0
- datalab/widgets/imagebackground.py +111 -0
- datalab/widgets/instconfviewer.py +175 -0
- datalab/widgets/logviewer.py +80 -0
- datalab/widgets/signalbaseline.py +90 -0
- datalab/widgets/signalcursor.py +208 -0
- datalab/widgets/signaldeltax.py +151 -0
- datalab/widgets/signalpeak.py +199 -0
- datalab/widgets/status.py +249 -0
- datalab/widgets/textimport.py +786 -0
- datalab/widgets/warningerror.py +223 -0
- datalab/widgets/wizard.py +286 -0
- datalab_platform-1.0.1.dist-info/METADATA +121 -0
- datalab_platform-1.0.1.dist-info/RECORD +494 -0
- datalab_platform-0.0.1.dev0.dist-info/METADATA +0 -67
- datalab_platform-0.0.1.dev0.dist-info/RECORD +0 -7
- {datalab_platform-0.0.1.dev0.dist-info → datalab_platform-1.0.1.dist-info}/WHEEL +0 -0
- {datalab_platform-0.0.1.dev0.dist-info → datalab_platform-1.0.1.dist-info}/entry_points.txt +0 -0
- {datalab_platform-0.0.1.dev0.dist-info → datalab_platform-1.0.1.dist-info}/licenses/LICENSE +0 -0
- {datalab_platform-0.0.1.dev0.dist-info → datalab_platform-1.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,2456 @@
|
|
|
1
|
+
# Copyright (c) DataLab Platform Developers, BSD 3-Clause license, see LICENSE file.
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
.. Base processor object (see parent package :mod:`datalab.gui.processor`)
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# pylint: disable=invalid-name # Allows short reference names like x, y, ...
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import abc
|
|
12
|
+
import multiprocessing
|
|
13
|
+
import time
|
|
14
|
+
import warnings
|
|
15
|
+
from dataclasses import asdict, dataclass
|
|
16
|
+
from enum import Enum, auto
|
|
17
|
+
from multiprocessing.pool import Pool
|
|
18
|
+
from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, Optional
|
|
19
|
+
|
|
20
|
+
import guidata.dataset as gds
|
|
21
|
+
import numpy as np
|
|
22
|
+
from qtpy import QtCore as QC
|
|
23
|
+
from qtpy import QtWidgets as QW
|
|
24
|
+
from sigima.config import options as sigima_options
|
|
25
|
+
from sigima.enums import Interpolation1DMethod
|
|
26
|
+
from sigima.objects import (
|
|
27
|
+
GeometryResult,
|
|
28
|
+
ImageObj,
|
|
29
|
+
SignalObj,
|
|
30
|
+
TableResult,
|
|
31
|
+
TypeROI,
|
|
32
|
+
TypeROIParam,
|
|
33
|
+
concat_geometries,
|
|
34
|
+
)
|
|
35
|
+
from sigima.proc.decorator import is_computation_function
|
|
36
|
+
from sigima.tools.signal.interpolation import interpolate
|
|
37
|
+
|
|
38
|
+
from datalab import env
|
|
39
|
+
from datalab.adapters_metadata import (
|
|
40
|
+
GeometryAdapter,
|
|
41
|
+
ResultData,
|
|
42
|
+
TableAdapter,
|
|
43
|
+
show_resultdata,
|
|
44
|
+
)
|
|
45
|
+
from datalab.config import Conf, _
|
|
46
|
+
from datalab.gui.processor.catcher import CompOut, wng_err_func
|
|
47
|
+
from datalab.objectmodel import get_short_id, get_uuid, patch_title_with_ids
|
|
48
|
+
from datalab.utils.qthelpers import create_progress_bar, qt_try_except
|
|
49
|
+
from datalab.widgets.warningerror import show_warning_error
|
|
50
|
+
|
|
51
|
+
if TYPE_CHECKING:
|
|
52
|
+
from multiprocessing.pool import AsyncResult
|
|
53
|
+
|
|
54
|
+
from plotpy.plot import PlotWidget
|
|
55
|
+
|
|
56
|
+
from datalab.gui.panel.image import ImagePanel
|
|
57
|
+
from datalab.gui.panel.signal import SignalPanel
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class ProcessingParameters:
|
|
62
|
+
"""Processing parameters stored in object metadata.
|
|
63
|
+
|
|
64
|
+
Attributes:
|
|
65
|
+
func_name: Processing function name
|
|
66
|
+
pattern: Processing pattern ("1-to-1", "n-to-1", or "2-to-1")
|
|
67
|
+
param: Processing parameter dataset (optional, for 1-to-1 only)
|
|
68
|
+
source_uuid: Source object UUID (for 1-to-1 pattern)
|
|
69
|
+
source_uuids: Source object UUIDs (for n-to-1 and 2-to-1 patterns)
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
func_name: str
|
|
73
|
+
pattern: str
|
|
74
|
+
param: gds.DataSet | None = None
|
|
75
|
+
source_uuid: str | None = None
|
|
76
|
+
source_uuids: list[str] | None = None
|
|
77
|
+
|
|
78
|
+
def set_param_from_json(self, param_json: str | list[str]) -> None:
|
|
79
|
+
"""Set the param attribute from a JSON string or list of JSON strings.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
param_json: JSON string or list of JSON strings representing the parameters
|
|
83
|
+
"""
|
|
84
|
+
try:
|
|
85
|
+
if isinstance(param_json, list):
|
|
86
|
+
# Handle list of JSON strings
|
|
87
|
+
self.param = [gds.json_to_dataset(p) for p in param_json]
|
|
88
|
+
else:
|
|
89
|
+
# Handle single JSON string
|
|
90
|
+
self.param = gds.json_to_dataset(param_json)
|
|
91
|
+
except Exception: # pylint: disable=broad-except
|
|
92
|
+
warnings.warn(_("Failed to deserialize processing parameters from JSON."))
|
|
93
|
+
self.param = None
|
|
94
|
+
|
|
95
|
+
def to_dict(self) -> dict[str, Any]:
|
|
96
|
+
"""Convert ProcessingParameters to a dictionary.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Dictionary representation of ProcessingParameters, ignoring None values.
|
|
100
|
+
"""
|
|
101
|
+
pp_dict = {k: v for k, v in asdict(self).items() if v is not None}
|
|
102
|
+
param = pp_dict.pop("param", None)
|
|
103
|
+
if param is not None:
|
|
104
|
+
if isinstance(param, list):
|
|
105
|
+
# Handle list of DataSet objects
|
|
106
|
+
pp_dict["param_json"] = [gds.dataset_to_json(p) for p in param]
|
|
107
|
+
else:
|
|
108
|
+
# Handle single DataSet object
|
|
109
|
+
pp_dict["param_json"] = gds.dataset_to_json(param)
|
|
110
|
+
return pp_dict
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_dict(cls, data: dict[str, Any]) -> ProcessingParameters:
|
|
114
|
+
"""Create ProcessingParameters from a dictionary.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
data: Dictionary representation of ProcessingParameters
|
|
118
|
+
"""
|
|
119
|
+
instance = cls("", "") # Temporary values
|
|
120
|
+
for key, value in data.items():
|
|
121
|
+
if key == "param_json":
|
|
122
|
+
instance.set_param_from_json(value)
|
|
123
|
+
else:
|
|
124
|
+
setattr(instance, key, value)
|
|
125
|
+
return instance
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
# Metadata options for storing processing parameters (DataLab-specific)
|
|
129
|
+
PROCESSING_PARAMETERS_OPTION = "processing_parameters" # Transformation history
|
|
130
|
+
ANALYSIS_PARAMETERS_OPTION = "analysis_parameters" # Analysis operation (1-to-0)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def extract_processing_parameters(
|
|
134
|
+
obj: SignalObj | ImageObj,
|
|
135
|
+
) -> ProcessingParameters | None:
|
|
136
|
+
"""Extract processing parameters from object metadata.
|
|
137
|
+
|
|
138
|
+
This extracts transformation history (1-to-1, n-to-1, 2-to-1, 1-to-n operations).
|
|
139
|
+
For analysis operations (1-to-0), use extract_analysis_parameters instead.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
obj: Signal or Image object
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
ProcessingParameters instance if processing metadata exists, None otherwise.
|
|
146
|
+
"""
|
|
147
|
+
try:
|
|
148
|
+
pp_dict = obj.get_metadata_option(PROCESSING_PARAMETERS_OPTION)
|
|
149
|
+
except ValueError:
|
|
150
|
+
return None
|
|
151
|
+
return ProcessingParameters.from_dict(pp_dict)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def extract_analysis_parameters(
|
|
155
|
+
obj: SignalObj | ImageObj,
|
|
156
|
+
) -> ProcessingParameters | None:
|
|
157
|
+
"""Extract analysis parameters from object metadata.
|
|
158
|
+
|
|
159
|
+
This extracts analysis operation parameters (1-to-0 pattern only).
|
|
160
|
+
For transformation history, use extract_processing_parameters instead.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
obj: Signal or Image object
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
ProcessingParameters instance if analysis metadata exists, None otherwise.
|
|
167
|
+
"""
|
|
168
|
+
try:
|
|
169
|
+
pp_dict = obj.get_metadata_option(ANALYSIS_PARAMETERS_OPTION)
|
|
170
|
+
except ValueError:
|
|
171
|
+
return None
|
|
172
|
+
return ProcessingParameters.from_dict(pp_dict)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def insert_processing_parameters(
|
|
176
|
+
obj: SignalObj | ImageObj,
|
|
177
|
+
pp: ProcessingParameters,
|
|
178
|
+
) -> None:
|
|
179
|
+
"""Insert processing parameters into object metadata.
|
|
180
|
+
|
|
181
|
+
This stores transformation history (1-to-1, n-to-1, 2-to-1, 1-to-n) or
|
|
182
|
+
analysis parameters (1-to-0) in separate metadata options to avoid overwriting
|
|
183
|
+
transformation history when performing analysis operations.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
obj: Signal or Image object
|
|
187
|
+
pp: ProcessingParameters instance containing all processing metadata
|
|
188
|
+
"""
|
|
189
|
+
if pp.pattern == "1-to-0":
|
|
190
|
+
# Store analysis parameters separately to preserve transformation history
|
|
191
|
+
obj.set_metadata_option(ANALYSIS_PARAMETERS_OPTION, pp.to_dict())
|
|
192
|
+
else:
|
|
193
|
+
# Store transformation history
|
|
194
|
+
obj.set_metadata_option(PROCESSING_PARAMETERS_OPTION, pp.to_dict())
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
# Enable multiprocessing support for Windows, with frozen executable (e.g. PyInstaller)
|
|
198
|
+
multiprocessing.freeze_support()
|
|
199
|
+
|
|
200
|
+
# Set start method to 'spawn' for Linux (default is 'fork' which is not safe here
|
|
201
|
+
# because of the use of Qt and multithreading) - for other OS, the default is
|
|
202
|
+
# 'spawn' anyway
|
|
203
|
+
try:
|
|
204
|
+
multiprocessing.set_start_method("spawn")
|
|
205
|
+
except RuntimeError:
|
|
206
|
+
# This exception is raised if the method is already set (this may happen because
|
|
207
|
+
# this module is imported more than once, e.g. when running tests)
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
COMPUTATION_TIP = _(
|
|
212
|
+
"DataLab relies on various libraries to perform the computation. During the "
|
|
213
|
+
"computation, errors may occur because of the data (e.g. division by zero, "
|
|
214
|
+
"unexpected data type, etc.) or because of the libraries (e.g. memory error, "
|
|
215
|
+
"etc.). If you encounter an error, before reporting it, please ensure that "
|
|
216
|
+
"the computation is correct, by checking the data and the parameters."
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
POOL: Pool | None = None
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def run_with_env(func: Callable, args: tuple, env_json: str) -> CompOut:
|
|
224
|
+
"""Wrapper to apply environment config before calling func
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
func: function to call
|
|
228
|
+
args: function arguments
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Computation output object containing the result, error message,
|
|
232
|
+
or warning message.
|
|
233
|
+
"""
|
|
234
|
+
sigima_options.set_env(env_json)
|
|
235
|
+
sigima_options.ensure_loaded_from_env() # recharge depuis l'env
|
|
236
|
+
return wng_err_func(func, args)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class WorkerState(Enum):
|
|
240
|
+
"""Worker states for computation lifecycle."""
|
|
241
|
+
|
|
242
|
+
IDLE = auto() # Ready to start new computation
|
|
243
|
+
STARTING = auto() # Computation starting (prevents race conditions)
|
|
244
|
+
RUNNING = auto() # Computation in progress
|
|
245
|
+
FINISHED = auto() # Computation completed, result available
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
class WorkerStateMachine:
|
|
249
|
+
"""State machine for managing worker computation lifecycle.
|
|
250
|
+
|
|
251
|
+
This class handles state transitions for worker computations,
|
|
252
|
+
ensuring valid state flow and preventing invalid operations.
|
|
253
|
+
"""
|
|
254
|
+
|
|
255
|
+
def __init__(self) -> None:
|
|
256
|
+
"""Initialize the state machine in IDLE state."""
|
|
257
|
+
self._current_state = WorkerState.IDLE
|
|
258
|
+
|
|
259
|
+
@property
|
|
260
|
+
def current_state(self) -> WorkerState:
|
|
261
|
+
"""Get the current state.
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
Current WorkerState.
|
|
265
|
+
"""
|
|
266
|
+
return self._current_state
|
|
267
|
+
|
|
268
|
+
def transition_to(self, target_state: WorkerState) -> None:
|
|
269
|
+
"""Transition to the specified target state.
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
target_state: The state to transition to.
|
|
273
|
+
|
|
274
|
+
Raises:
|
|
275
|
+
ValueError: If the transition is not valid from the current state.
|
|
276
|
+
"""
|
|
277
|
+
# Define valid state transitions
|
|
278
|
+
valid_transitions = {
|
|
279
|
+
WorkerState.IDLE: {WorkerState.STARTING},
|
|
280
|
+
WorkerState.STARTING: {WorkerState.RUNNING},
|
|
281
|
+
WorkerState.RUNNING: {WorkerState.FINISHED},
|
|
282
|
+
WorkerState.FINISHED: {WorkerState.IDLE},
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
# Allow transitions to the same state (no-op)
|
|
286
|
+
if target_state == self._current_state:
|
|
287
|
+
return
|
|
288
|
+
|
|
289
|
+
# Check if transition is valid
|
|
290
|
+
allowed_targets = valid_transitions.get(self._current_state, set())
|
|
291
|
+
if target_state not in allowed_targets:
|
|
292
|
+
raise ValueError(
|
|
293
|
+
f"Invalid transition from {self._current_state} to {target_state}. "
|
|
294
|
+
f"Valid transitions: {allowed_targets}"
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
self._current_state = target_state
|
|
298
|
+
|
|
299
|
+
def reset_to_idle(self) -> None:
|
|
300
|
+
"""Reset state to IDLE unconditionally.
|
|
301
|
+
|
|
302
|
+
This is used for restart/cancel operations where we need
|
|
303
|
+
to force the state back to IDLE regardless of current state.
|
|
304
|
+
"""
|
|
305
|
+
self._current_state = WorkerState.IDLE
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
class Worker:
|
|
309
|
+
"""Multiprocessing worker, to run long-running tasks in a separate process"""
|
|
310
|
+
|
|
311
|
+
def __init__(self) -> None:
|
|
312
|
+
self.asyncresult: AsyncResult = None
|
|
313
|
+
self.state_machine = WorkerStateMachine()
|
|
314
|
+
|
|
315
|
+
@staticmethod
|
|
316
|
+
def create_pool() -> None:
|
|
317
|
+
"""Create multiprocessing pool"""
|
|
318
|
+
global POOL # pylint: disable=global-statement
|
|
319
|
+
# Create a pool with one process
|
|
320
|
+
POOL = Pool(processes=1) # pylint: disable=not-callable,consider-using-with
|
|
321
|
+
|
|
322
|
+
@staticmethod
|
|
323
|
+
def terminate_pool(wait: bool = False) -> None:
|
|
324
|
+
"""Terminate multiprocessing pool.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
wait: wait for all tasks to finish. Defaults to False.
|
|
328
|
+
"""
|
|
329
|
+
global POOL # pylint: disable=global-statement
|
|
330
|
+
if POOL is not None:
|
|
331
|
+
if wait:
|
|
332
|
+
# Close the pool properly (wait for all tasks to finish)
|
|
333
|
+
POOL.close()
|
|
334
|
+
else:
|
|
335
|
+
# Terminate the pool and stop the timer
|
|
336
|
+
POOL.terminate()
|
|
337
|
+
POOL.join()
|
|
338
|
+
POOL = None
|
|
339
|
+
|
|
340
|
+
def restart_pool(self) -> None:
|
|
341
|
+
"""Terminate and recreate the pool"""
|
|
342
|
+
# Terminate the process and stop the timer
|
|
343
|
+
Worker.terminate_pool(wait=False)
|
|
344
|
+
# Recreate the pool for the next computation
|
|
345
|
+
Worker.create_pool()
|
|
346
|
+
# Reset worker state after pool restart
|
|
347
|
+
self.asyncresult = None
|
|
348
|
+
self.state_machine.reset_to_idle()
|
|
349
|
+
|
|
350
|
+
def run(self, func: Callable, args: tuple[Any]) -> None:
|
|
351
|
+
"""Run computation.
|
|
352
|
+
|
|
353
|
+
Args:
|
|
354
|
+
func: function to run
|
|
355
|
+
args: arguments
|
|
356
|
+
|
|
357
|
+
Raises:
|
|
358
|
+
ValueError: If not in IDLE state or pool is not available.
|
|
359
|
+
"""
|
|
360
|
+
# Check if we can start computation
|
|
361
|
+
if self.state_machine.current_state != WorkerState.IDLE:
|
|
362
|
+
current_state = self.state_machine.current_state
|
|
363
|
+
raise ValueError(f"Cannot start computation from {current_state} state")
|
|
364
|
+
|
|
365
|
+
# Transition to starting state
|
|
366
|
+
self.state_machine.transition_to(WorkerState.STARTING)
|
|
367
|
+
|
|
368
|
+
global POOL # pylint: disable=global-statement,global-variable-not-assigned
|
|
369
|
+
if POOL is None:
|
|
370
|
+
raise ValueError("Multiprocessing pool is not available")
|
|
371
|
+
|
|
372
|
+
# Start the computation
|
|
373
|
+
env_json = sigima_options.get_env()
|
|
374
|
+
self.asyncresult = POOL.apply_async(run_with_env, (func, args, env_json))
|
|
375
|
+
|
|
376
|
+
# Transition to running state
|
|
377
|
+
self.state_machine.transition_to(WorkerState.RUNNING)
|
|
378
|
+
|
|
379
|
+
def restart(self) -> None:
|
|
380
|
+
"""Restart/cancel current computation"""
|
|
381
|
+
current_state = self.state_machine.current_state
|
|
382
|
+
|
|
383
|
+
if current_state == WorkerState.IDLE:
|
|
384
|
+
return # Already idle, nothing to restart
|
|
385
|
+
if current_state == WorkerState.STARTING:
|
|
386
|
+
# If we're still starting, just go back to idle
|
|
387
|
+
self.asyncresult = None
|
|
388
|
+
elif current_state == WorkerState.RUNNING:
|
|
389
|
+
# Cancel the running computation - use restart_pool for consistency
|
|
390
|
+
self.restart_pool()
|
|
391
|
+
return # restart_pool already handles state reset
|
|
392
|
+
if current_state == WorkerState.FINISHED:
|
|
393
|
+
# Clean up and go to idle
|
|
394
|
+
self.asyncresult = None
|
|
395
|
+
|
|
396
|
+
# Let state machine handle the transition to idle
|
|
397
|
+
self.state_machine.reset_to_idle()
|
|
398
|
+
|
|
399
|
+
def close(self) -> None:
|
|
400
|
+
"""Close worker: close pool properly and wait for all tasks to finish"""
|
|
401
|
+
# Close multiprocessing Pool properly, but only if no computation is running,
|
|
402
|
+
# to avoid blocking the GUI at exit (so, when wait=True, we wait for the
|
|
403
|
+
# task to finish before closing the pool but there is actually no task running,
|
|
404
|
+
# so the pool is closed immediately but *properly*)
|
|
405
|
+
Worker.terminate_pool(wait=self.asyncresult is None)
|
|
406
|
+
|
|
407
|
+
def is_computation_finished(self) -> bool:
|
|
408
|
+
"""Return True if computation is finished.
|
|
409
|
+
|
|
410
|
+
Returns:
|
|
411
|
+
bool: True if computation is finished
|
|
412
|
+
"""
|
|
413
|
+
current_state = self.state_machine.current_state
|
|
414
|
+
|
|
415
|
+
if current_state == WorkerState.IDLE:
|
|
416
|
+
return True # No computation has been started
|
|
417
|
+
if current_state == WorkerState.STARTING:
|
|
418
|
+
return False # Computation is starting, not finished yet
|
|
419
|
+
if current_state == WorkerState.FINISHED:
|
|
420
|
+
return True # Already finished
|
|
421
|
+
if current_state == WorkerState.RUNNING:
|
|
422
|
+
if self.asyncresult is None:
|
|
423
|
+
return False # Should not happen, but defensive
|
|
424
|
+
finished = self.asyncresult.ready()
|
|
425
|
+
if finished:
|
|
426
|
+
# Transition to finished state
|
|
427
|
+
self.state_machine.transition_to(WorkerState.FINISHED)
|
|
428
|
+
return finished
|
|
429
|
+
raise ValueError(f"Invalid worker state: {current_state}")
|
|
430
|
+
|
|
431
|
+
def get_result(self) -> CompOut:
|
|
432
|
+
"""Return computation result.
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
CompOut: computation result
|
|
436
|
+
|
|
437
|
+
Raises:
|
|
438
|
+
ValueError: If not in FINISHED state or no result available.
|
|
439
|
+
"""
|
|
440
|
+
# Check if we can get result
|
|
441
|
+
if self.state_machine.current_state != WorkerState.FINISHED:
|
|
442
|
+
current_state = self.state_machine.current_state
|
|
443
|
+
raise ValueError(f"Cannot get result from {current_state} state")
|
|
444
|
+
|
|
445
|
+
if self.asyncresult is None:
|
|
446
|
+
raise ValueError("No result available")
|
|
447
|
+
|
|
448
|
+
# Get result and clean up (ensure cleanup happens even if exception occurs)
|
|
449
|
+
try:
|
|
450
|
+
result = self.asyncresult.get()
|
|
451
|
+
return result
|
|
452
|
+
finally:
|
|
453
|
+
# Always clean up, even if get() raises an exception
|
|
454
|
+
self.asyncresult = None
|
|
455
|
+
self.state_machine.transition_to(WorkerState.IDLE)
|
|
456
|
+
|
|
457
|
+
def has_result_available(self) -> bool:
|
|
458
|
+
"""Check if computation finished successfully and result is available.
|
|
459
|
+
|
|
460
|
+
Returns:
|
|
461
|
+
True if computation completed successfully and result can be retrieved.
|
|
462
|
+
"""
|
|
463
|
+
return self.state_machine.current_state == WorkerState.FINISHED
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
def is_pairwise_mode() -> bool:
|
|
467
|
+
"""Return True if operation mode is pairwise.
|
|
468
|
+
|
|
469
|
+
Returns:
|
|
470
|
+
bool: True if operation mode is pairwise
|
|
471
|
+
"""
|
|
472
|
+
state = Conf.proc.operation_mode.get() == "pairwise"
|
|
473
|
+
return state
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
@dataclass
|
|
477
|
+
class ComputingFeature:
|
|
478
|
+
"""Computing feature dataclass.
|
|
479
|
+
|
|
480
|
+
Args:
|
|
481
|
+
pattern: pattern
|
|
482
|
+
function: function
|
|
483
|
+
paramclass: parameter class
|
|
484
|
+
title: title
|
|
485
|
+
icon_name: icon name
|
|
486
|
+
comment: comment
|
|
487
|
+
edit: whether to edit the parameters
|
|
488
|
+
obj2_name: name of the second object
|
|
489
|
+
skip_xarray_compat: whether to skip X-array compatibility check for this feature
|
|
490
|
+
"""
|
|
491
|
+
|
|
492
|
+
pattern: Literal["1_to_1", "1_to_0", "1_to_n", "n_to_1", "2_to_1"]
|
|
493
|
+
function: Optional[Callable] = None
|
|
494
|
+
paramclass: Optional[type[gds.DataSet]] = None
|
|
495
|
+
title: Optional[str] = None
|
|
496
|
+
icon_name: Optional[str] = None
|
|
497
|
+
comment: Optional[str] = None
|
|
498
|
+
edit: Optional[bool] = None
|
|
499
|
+
obj2_name: Optional[str] = None
|
|
500
|
+
skip_xarray_compat: Optional[bool] = None
|
|
501
|
+
|
|
502
|
+
def __post_init__(self):
|
|
503
|
+
"""Validate the function after initialization."""
|
|
504
|
+
if self.function is not None and not is_computation_function(self.function):
|
|
505
|
+
raise ValueError(
|
|
506
|
+
f"'{self.function.__name__}' is not a valid computation function."
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
@property
|
|
510
|
+
def name(self) -> str:
|
|
511
|
+
"""Return the name of the computing feature."""
|
|
512
|
+
if self.function is None:
|
|
513
|
+
raise ValueError(
|
|
514
|
+
"ComputingFeature must have a 'function' to derive its name."
|
|
515
|
+
)
|
|
516
|
+
return self.function.__name__
|
|
517
|
+
|
|
518
|
+
@property
|
|
519
|
+
def action_title(self) -> str:
|
|
520
|
+
"""Return the action title of the computing feature."""
|
|
521
|
+
title = self.title
|
|
522
|
+
if (
|
|
523
|
+
self.paramclass is not None and (self.edit is None or self.edit)
|
|
524
|
+
) or self.pattern == "1_to_0":
|
|
525
|
+
title += "..."
|
|
526
|
+
return title
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
class BaseProcessor(QC.QObject, Generic[TypeROI, TypeROIParam]):
|
|
530
|
+
"""Object handling data processing: operations, processing, analysis.
|
|
531
|
+
|
|
532
|
+
Args:
|
|
533
|
+
panel: panel
|
|
534
|
+
plotwidget: plot widget
|
|
535
|
+
"""
|
|
536
|
+
|
|
537
|
+
SIG_ADD_SHAPE = QC.Signal(str)
|
|
538
|
+
PARAM_DEFAULTS: dict[str, gds.DataSet] = {}
|
|
539
|
+
|
|
540
|
+
def __init__(self, panel: SignalPanel | ImagePanel, plotwidget: PlotWidget):
|
|
541
|
+
super().__init__()
|
|
542
|
+
self.panel = panel
|
|
543
|
+
self.mainwindow = panel.mainwindow
|
|
544
|
+
self.plotwidget = plotwidget
|
|
545
|
+
self.worker: Worker | None = None
|
|
546
|
+
self.set_process_isolation_enabled(Conf.main.process_isolation_enabled.get())
|
|
547
|
+
self.computing_registry: dict[str, ComputingFeature] = {}
|
|
548
|
+
self.register_computations()
|
|
549
|
+
|
|
550
|
+
def close(self):
|
|
551
|
+
"""Close processor properly"""
|
|
552
|
+
if self.worker is not None:
|
|
553
|
+
self.worker.close()
|
|
554
|
+
self.worker = None
|
|
555
|
+
|
|
556
|
+
def set_process_isolation_enabled(self, enabled: bool) -> None:
|
|
557
|
+
"""Set process isolation enabled.
|
|
558
|
+
|
|
559
|
+
Args:
|
|
560
|
+
enabled: enabled
|
|
561
|
+
"""
|
|
562
|
+
if enabled:
|
|
563
|
+
if self.worker is None:
|
|
564
|
+
self.worker = Worker()
|
|
565
|
+
Worker.create_pool()
|
|
566
|
+
else:
|
|
567
|
+
if self.worker is not None:
|
|
568
|
+
self.worker.terminate_pool()
|
|
569
|
+
self.worker = None
|
|
570
|
+
|
|
571
|
+
def _is_signal_panel(self) -> bool:
|
|
572
|
+
"""Check if the current panel is a signal panel.
|
|
573
|
+
|
|
574
|
+
Returns:
|
|
575
|
+
True if processing signals, False if processing images
|
|
576
|
+
"""
|
|
577
|
+
return self.panel.PARAMCLASS == SignalObj
|
|
578
|
+
|
|
579
|
+
def _check_signal_xarray_compatibility(
|
|
580
|
+
self, signals: list[SignalObj], progress: QW.QProgressDialog | None = None
|
|
581
|
+
) -> tuple[list[SignalObj], bool] | None:
|
|
582
|
+
"""Check X-array compatibility for multiple signals and handle conflicts.
|
|
583
|
+
|
|
584
|
+
Args:
|
|
585
|
+
signals: List of signal objects to check
|
|
586
|
+
progress: Progress dialog (if method is called from a long-running task,
|
|
587
|
+
we need to handle the progress dialog: the dialog will show up after a
|
|
588
|
+
short delay on top of the message box if we don't handle it here)
|
|
589
|
+
|
|
590
|
+
Returns:
|
|
591
|
+
Tuple of (signals, yes_to_all_selected) where signals is the list of
|
|
592
|
+
signals (potentially with interpolated signals) and yes_to_all_selected
|
|
593
|
+
is True if user chose "Yes to All". Returns None if user canceled.
|
|
594
|
+
"""
|
|
595
|
+
if not self._is_signal_panel() or len(signals) <= 1:
|
|
596
|
+
return signals, False
|
|
597
|
+
|
|
598
|
+
initial_duration = 0
|
|
599
|
+
if progress is not None:
|
|
600
|
+
initial_duration = progress.minimumDuration()
|
|
601
|
+
# Set progress dialog minimum duration to a very high value to effectively
|
|
602
|
+
# hide it if it shows up (we handle the dialog manually here)
|
|
603
|
+
progress.setMinimumDuration(2000000)
|
|
604
|
+
QW.QApplication.processEvents()
|
|
605
|
+
|
|
606
|
+
# Get X arrays for comparison
|
|
607
|
+
x_arrays = [sig.x for sig in signals]
|
|
608
|
+
|
|
609
|
+
# Check if all X arrays are identical
|
|
610
|
+
x_arrays_identical = True
|
|
611
|
+
if len(x_arrays) > 1:
|
|
612
|
+
# Compare sizes first
|
|
613
|
+
sizes = [len(x) for x in x_arrays]
|
|
614
|
+
if len(set(sizes)) > 1:
|
|
615
|
+
x_arrays_identical = False
|
|
616
|
+
else:
|
|
617
|
+
# Same sizes - check if xmin and xmax are also the same
|
|
618
|
+
xmins = [x.min() for x in x_arrays]
|
|
619
|
+
xmaxs = [x.max() for x in x_arrays]
|
|
620
|
+
# Use relative tolerance for floating point comparison
|
|
621
|
+
if not (
|
|
622
|
+
np.allclose(xmins, xmins[0], rtol=1e-12)
|
|
623
|
+
and np.allclose(xmaxs, xmaxs[0], rtol=1e-12)
|
|
624
|
+
):
|
|
625
|
+
x_arrays_identical = False
|
|
626
|
+
|
|
627
|
+
# If X arrays are identical, proceed normally
|
|
628
|
+
if x_arrays_identical:
|
|
629
|
+
if initial_duration > 0:
|
|
630
|
+
# Restore initial progress dialog duration
|
|
631
|
+
progress.setMinimumDuration(initial_duration)
|
|
632
|
+
return signals, False
|
|
633
|
+
|
|
634
|
+
# X arrays differ - handle based on configuration
|
|
635
|
+
behavior = Conf.proc.xarray_compat_behavior.get("ask")
|
|
636
|
+
yes_to_all_selected = False
|
|
637
|
+
|
|
638
|
+
if behavior == "ask" and not env.execenv.unattended:
|
|
639
|
+
# Create custom message box with "Yes to All" option
|
|
640
|
+
msg_box = QW.QMessageBox(self.mainwindow)
|
|
641
|
+
msg_box.setWindowTitle(_("X-array incompatibility"))
|
|
642
|
+
msg_box.setText(
|
|
643
|
+
_(
|
|
644
|
+
"The selected signals have different X arrays.\n\n"
|
|
645
|
+
"To perform the computation, signals need to be interpolated "
|
|
646
|
+
"to match a common X array.\n\n"
|
|
647
|
+
"Do you want to continue with automatic interpolation?"
|
|
648
|
+
)
|
|
649
|
+
)
|
|
650
|
+
msg_box.setIcon(QW.QMessageBox.Icon.Question)
|
|
651
|
+
|
|
652
|
+
# Add custom buttons
|
|
653
|
+
msg_box.addButton(_("Yes"), QW.QMessageBox.ButtonRole.YesRole)
|
|
654
|
+
yes_all_button = msg_box.addButton(
|
|
655
|
+
_("Yes to All"), QW.QMessageBox.ButtonRole.YesRole
|
|
656
|
+
)
|
|
657
|
+
no_button = msg_box.addButton(_("No"), QW.QMessageBox.ButtonRole.NoRole)
|
|
658
|
+
msg_box.setDefaultButton(no_button)
|
|
659
|
+
|
|
660
|
+
# Execute dialog and get user choice
|
|
661
|
+
msg_box.exec()
|
|
662
|
+
clicked_button = msg_box.clickedButton()
|
|
663
|
+
|
|
664
|
+
if clicked_button == no_button:
|
|
665
|
+
return None
|
|
666
|
+
if clicked_button == yes_all_button:
|
|
667
|
+
yes_to_all_selected = True
|
|
668
|
+
|
|
669
|
+
# Perform interpolation to the smallest X array
|
|
670
|
+
sizes = [len(x) for x in x_arrays]
|
|
671
|
+
min_size_idx = np.argmin(sizes)
|
|
672
|
+
target_x = x_arrays[min_size_idx]
|
|
673
|
+
|
|
674
|
+
interpolated_signals = []
|
|
675
|
+
for i, sig in enumerate(signals):
|
|
676
|
+
if i == min_size_idx:
|
|
677
|
+
# Keep the target signal as-is
|
|
678
|
+
interpolated_signals.append(sig)
|
|
679
|
+
else:
|
|
680
|
+
# Create interpolated copy
|
|
681
|
+
interpolated_sig = sig.copy(
|
|
682
|
+
title=f"{sig.title} (interpolated)", all_metadata=True
|
|
683
|
+
)
|
|
684
|
+
x_orig, y_orig = sig.x, sig.y
|
|
685
|
+
|
|
686
|
+
# Interpolate using linear method (safe default)
|
|
687
|
+
y_new = interpolate(
|
|
688
|
+
x_orig,
|
|
689
|
+
y_orig,
|
|
690
|
+
target_x,
|
|
691
|
+
Interpolation1DMethod.LINEAR,
|
|
692
|
+
fill_value=None,
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
interpolated_sig.set_xydata(target_x, y_new)
|
|
696
|
+
interpolated_signals.append(interpolated_sig)
|
|
697
|
+
|
|
698
|
+
signals = interpolated_signals
|
|
699
|
+
|
|
700
|
+
if initial_duration > 0:
|
|
701
|
+
# Restore initial progress dialog duration
|
|
702
|
+
progress.setMinimumDuration(initial_duration)
|
|
703
|
+
|
|
704
|
+
return signals, yes_to_all_selected
|
|
705
|
+
|
|
706
|
+
def _add_object_to_appropriate_panel(
|
|
707
|
+
self,
|
|
708
|
+
new_obj: SignalObj | ImageObj,
|
|
709
|
+
group_id: str | None = None,
|
|
710
|
+
use_group_for_non_native: bool = True,
|
|
711
|
+
) -> None:
|
|
712
|
+
"""Add object to the appropriate panel based on its type.
|
|
713
|
+
|
|
714
|
+
For native objects (e.g., SignalObj in Signal panel, ImageObj in Image panel),
|
|
715
|
+
adds to the current panel. For non-native objects (e.g., ImageObj created in
|
|
716
|
+
Signal panel), adds to the target panel via mainwindow.
|
|
717
|
+
|
|
718
|
+
Args:
|
|
719
|
+
new_obj: Object to add
|
|
720
|
+
group_id: Group ID to add the object to (optional)
|
|
721
|
+
use_group_for_non_native: If True, use group_id even for non-native objects.
|
|
722
|
+
If False, non-native objects are added to default group. Set to False when
|
|
723
|
+
group_id is from the source panel and object goes to a different panel.
|
|
724
|
+
"""
|
|
725
|
+
is_new_obj_native = isinstance(new_obj, self.panel.PARAMCLASS)
|
|
726
|
+
if is_new_obj_native:
|
|
727
|
+
self.panel.add_object(new_obj, group_id=group_id)
|
|
728
|
+
else:
|
|
729
|
+
if use_group_for_non_native:
|
|
730
|
+
self.panel.mainwindow.add_object(new_obj, group_id=group_id)
|
|
731
|
+
else:
|
|
732
|
+
self.panel.mainwindow.add_object(new_obj)
|
|
733
|
+
|
|
734
|
+
def _create_group_for_result(
|
|
735
|
+
self, new_obj: SignalObj | ImageObj, group_name: str
|
|
736
|
+
) -> str:
|
|
737
|
+
"""Create a group in the appropriate panel for the result object.
|
|
738
|
+
|
|
739
|
+
For native objects, creates group in current panel. For non-native objects,
|
|
740
|
+
creates group in the target panel.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
new_obj: Result object to determine target panel
|
|
744
|
+
group_name: Name for the new group
|
|
745
|
+
|
|
746
|
+
Returns:
|
|
747
|
+
UUID of the created group
|
|
748
|
+
"""
|
|
749
|
+
is_new_obj_native = isinstance(new_obj, self.panel.PARAMCLASS)
|
|
750
|
+
if is_new_obj_native:
|
|
751
|
+
return get_uuid(self.panel.add_group(group_name))
|
|
752
|
+
# Create group in target panel for non-native objects
|
|
753
|
+
target_panel = (
|
|
754
|
+
self.panel.mainwindow.signalpanel
|
|
755
|
+
if isinstance(new_obj, SignalObj)
|
|
756
|
+
else self.panel.mainwindow.imagepanel
|
|
757
|
+
)
|
|
758
|
+
return get_uuid(target_panel.add_group(group_name))
|
|
759
|
+
|
|
760
|
+
@abc.abstractmethod
|
|
761
|
+
def register_operations(self) -> None:
|
|
762
|
+
"""Register operations."""
|
|
763
|
+
|
|
764
|
+
@abc.abstractmethod
|
|
765
|
+
def register_processing(self) -> None:
|
|
766
|
+
"""Register processing functions."""
|
|
767
|
+
|
|
768
|
+
@abc.abstractmethod
|
|
769
|
+
def register_analysis(self) -> None:
|
|
770
|
+
"""Register analysis functions."""
|
|
771
|
+
|
|
772
|
+
def register_computations(self) -> None:
|
|
773
|
+
"""Register computations."""
|
|
774
|
+
self.register_operations()
|
|
775
|
+
self.register_processing()
|
|
776
|
+
self.register_analysis()
|
|
777
|
+
|
|
778
|
+
# pylint: disable=unused-argument
|
|
779
|
+
def postprocess_1_to_0_result(
|
|
780
|
+
self, obj: SignalObj | ImageObj, result: GeometryResult | TableResult
|
|
781
|
+
) -> bool:
|
|
782
|
+
"""Post-process results from 1-to-0 operations (hook method).
|
|
783
|
+
|
|
784
|
+
This method is called after a 1-to-0 computation function has been executed
|
|
785
|
+
and the result has been added to the object's metadata. Subclasses can
|
|
786
|
+
override this method to perform additional processing on the result.
|
|
787
|
+
|
|
788
|
+
Args:
|
|
789
|
+
obj: The object that was analyzed
|
|
790
|
+
result: The analysis result (GeometryResult or TableResult)
|
|
791
|
+
|
|
792
|
+
Returns:
|
|
793
|
+
True if the object was modified and needs a plot refresh, False otherwise
|
|
794
|
+
"""
|
|
795
|
+
# Default implementation does nothing and needs no refresh
|
|
796
|
+
return False
|
|
797
|
+
|
|
798
|
+
def has_param_defaults(self, paramclass: type[gds.DataSet]) -> bool:
|
|
799
|
+
"""Return True if parameter defaults are available.
|
|
800
|
+
|
|
801
|
+
Args:
|
|
802
|
+
paramclass: parameter class
|
|
803
|
+
|
|
804
|
+
Returns:
|
|
805
|
+
bool: True if parameter defaults are available
|
|
806
|
+
"""
|
|
807
|
+
return paramclass.__name__ in self.PARAM_DEFAULTS
|
|
808
|
+
|
|
809
|
+
def update_param_defaults(self, param: gds.DataSet) -> None:
|
|
810
|
+
"""Update parameter defaults.
|
|
811
|
+
|
|
812
|
+
Args:
|
|
813
|
+
param: parameters
|
|
814
|
+
"""
|
|
815
|
+
key = param.__class__.__name__
|
|
816
|
+
pdefaults = self.PARAM_DEFAULTS.get(key)
|
|
817
|
+
if pdefaults is not None:
|
|
818
|
+
gds.update_dataset(param, pdefaults)
|
|
819
|
+
self.PARAM_DEFAULTS[key] = param
|
|
820
|
+
|
|
821
|
+
def init_param(
|
|
822
|
+
self,
|
|
823
|
+
param: gds.DataSet,
|
|
824
|
+
paramclass: type[gds.DataSet],
|
|
825
|
+
title: str,
|
|
826
|
+
comment: str | None = None,
|
|
827
|
+
) -> tuple[bool, gds.DataSet]:
|
|
828
|
+
"""Initialize processing parameters.
|
|
829
|
+
|
|
830
|
+
Args:
|
|
831
|
+
param: parameter
|
|
832
|
+
paramclass: parameter class
|
|
833
|
+
title: title
|
|
834
|
+
comment: comment
|
|
835
|
+
|
|
836
|
+
Returns:
|
|
837
|
+
Tuple (edit, param) where edit is True if parameters have been edited,
|
|
838
|
+
False otherwise.
|
|
839
|
+
"""
|
|
840
|
+
edit = param is None
|
|
841
|
+
if edit:
|
|
842
|
+
param = paramclass(title, comment)
|
|
843
|
+
self.update_param_defaults(param)
|
|
844
|
+
if hasattr(param, "update_from_obj"):
|
|
845
|
+
obj = self.panel.objview.get_sel_objects(include_groups=True)[0]
|
|
846
|
+
param.update_from_obj(obj)
|
|
847
|
+
return edit, param
|
|
848
|
+
|
|
849
|
+
def handle_output(
|
|
850
|
+
self, compout: CompOut, context: str, progress: QW.QProgressDialog
|
|
851
|
+
) -> SignalObj | ImageObj | GeometryResult | TableResult | None:
|
|
852
|
+
"""Handle computation output: if error, display error message,
|
|
853
|
+
if warning, display warning message.
|
|
854
|
+
|
|
855
|
+
Args:
|
|
856
|
+
compout: computation output
|
|
857
|
+
context: context (e.g. "Computing: Gaussian filter")
|
|
858
|
+
progress: progress dialog
|
|
859
|
+
|
|
860
|
+
Returns:
|
|
861
|
+
Output object: a signal or image object, or a geometry/table result object,
|
|
862
|
+
or None if error
|
|
863
|
+
"""
|
|
864
|
+
if compout.error_msg or compout.warning_msg:
|
|
865
|
+
mindur = progress.minimumDuration()
|
|
866
|
+
progress.setMinimumDuration(1000000)
|
|
867
|
+
if compout.error_msg:
|
|
868
|
+
show_warning_error(
|
|
869
|
+
self.panel, "error", context, compout.error_msg, COMPUTATION_TIP
|
|
870
|
+
)
|
|
871
|
+
if compout.warning_msg:
|
|
872
|
+
show_warning_error(self.panel, "warning", context, compout.warning_msg)
|
|
873
|
+
progress.setMinimumDuration(mindur)
|
|
874
|
+
if compout.error_msg:
|
|
875
|
+
return None
|
|
876
|
+
result = compout.result
|
|
877
|
+
return result
|
|
878
|
+
|
|
879
|
+
def _merge_geometry_results_for_n_to_1(
|
|
880
|
+
self, result_obj: SignalObj | ImageObj, src_obj_list: list[SignalObj | ImageObj]
|
|
881
|
+
) -> None:
|
|
882
|
+
"""Merge geometry results from source objects into the result object.
|
|
883
|
+
|
|
884
|
+
This method handles geometry result merging for n_to_1 operations when
|
|
885
|
+
keep_results is enabled, providing a clean alternative to monkey patching.
|
|
886
|
+
|
|
887
|
+
Args:
|
|
888
|
+
result_obj: The result object from the computation
|
|
889
|
+
src_obj_list: The list of source objects used in the computation
|
|
890
|
+
"""
|
|
891
|
+
# Only merge if keep_results is enabled and we have multiple source objects
|
|
892
|
+
if not Conf.proc.keep_results.get() or len(src_obj_list) <= 1:
|
|
893
|
+
return
|
|
894
|
+
|
|
895
|
+
# Group geometry results by title for merging
|
|
896
|
+
geometry_by_title = {}
|
|
897
|
+
|
|
898
|
+
# Collect all geometry results from all source objects
|
|
899
|
+
for src_obj in src_obj_list:
|
|
900
|
+
for geom_adapter in GeometryAdapter.iterate_from_obj(src_obj):
|
|
901
|
+
title = geom_adapter.title
|
|
902
|
+
if title not in geometry_by_title:
|
|
903
|
+
geometry_by_title[title] = []
|
|
904
|
+
geometry_by_title[title].append(geom_adapter.result)
|
|
905
|
+
|
|
906
|
+
# Only proceed if we have geometry results to merge
|
|
907
|
+
if not geometry_by_title:
|
|
908
|
+
return
|
|
909
|
+
|
|
910
|
+
# Remove any existing geometry results from the result object
|
|
911
|
+
result_keys_to_remove = []
|
|
912
|
+
for key in result_obj.metadata.keys():
|
|
913
|
+
if GeometryAdapter.match(key, result_obj.metadata[key]):
|
|
914
|
+
result_keys_to_remove.append(key)
|
|
915
|
+
|
|
916
|
+
for key in result_keys_to_remove:
|
|
917
|
+
result_obj.metadata.pop(key, None)
|
|
918
|
+
|
|
919
|
+
# Merge and add back concatenated geometry results
|
|
920
|
+
for title, geometries in geometry_by_title.items():
|
|
921
|
+
if len(geometries) > 1:
|
|
922
|
+
# Concatenate multiple geometry results
|
|
923
|
+
merged_geometry = concat_geometries(title, geometries)
|
|
924
|
+
adapter = GeometryAdapter(merged_geometry)
|
|
925
|
+
adapter.add_to(result_obj)
|
|
926
|
+
elif len(geometries) == 1:
|
|
927
|
+
# Just one geometry result, add it
|
|
928
|
+
adapter = GeometryAdapter(geometries[0])
|
|
929
|
+
adapter.add_to(result_obj)
|
|
930
|
+
|
|
931
|
+
def _handle_keep_results(self, result_obj: SignalObj | ImageObj) -> None:
|
|
932
|
+
"""Handle keep_results logic by removing all results if keep_results is False.
|
|
933
|
+
|
|
934
|
+
This method implements the logic that was previously in Sigima's dst_1_to_1,
|
|
935
|
+
dst_n_to_1, and dst_2_to_1 functions, where results were deleted from the
|
|
936
|
+
destination object when keep_results was False.
|
|
937
|
+
|
|
938
|
+
Args:
|
|
939
|
+
result_obj: The result object from the computation
|
|
940
|
+
"""
|
|
941
|
+
if not Conf.proc.keep_results.get():
|
|
942
|
+
# Remove all table and geometry results when keep_results is disabled
|
|
943
|
+
TableAdapter.remove_all_from(result_obj)
|
|
944
|
+
GeometryAdapter.remove_all_from(result_obj)
|
|
945
|
+
|
|
946
|
+
def auto_recompute_analysis(self, obj: SignalObj | ImageObj) -> None:
|
|
947
|
+
"""Automatically recompute analysis (1-to-0) operations after data changes.
|
|
948
|
+
|
|
949
|
+
This method checks if the object has 1-to-0 analysis parameters (analysis
|
|
950
|
+
operations like statistics, measurements, etc.) and automatically recomputes
|
|
951
|
+
the analysis to update the results based on the modified data.
|
|
952
|
+
|
|
953
|
+
This should be called after:
|
|
954
|
+
- ROI modifications (which change the data to be analyzed)
|
|
955
|
+
- Data transformations via recompute_1_to_1 (which modify data in-place)
|
|
956
|
+
|
|
957
|
+
Note: Should be called explicitly after ROI modifications, not during
|
|
958
|
+
selection changes, to avoid interfering with the ROI change detection
|
|
959
|
+
mechanism used by the mask refresh system.
|
|
960
|
+
|
|
961
|
+
Args:
|
|
962
|
+
obj: The object whose data was modified
|
|
963
|
+
"""
|
|
964
|
+
# Check if object has 1-to-0 analysis parameters (analysis operations)
|
|
965
|
+
proc_params = extract_analysis_parameters(obj)
|
|
966
|
+
if proc_params is None or proc_params.pattern != "1-to-0":
|
|
967
|
+
return
|
|
968
|
+
|
|
969
|
+
# Get the parameter from processing parameters
|
|
970
|
+
param = proc_params.param
|
|
971
|
+
|
|
972
|
+
# Get the actual function from the function name
|
|
973
|
+
feature = self.get_feature(proc_params.func_name)
|
|
974
|
+
|
|
975
|
+
# Recompute the analysis operation silently
|
|
976
|
+
with Conf.proc.show_result_dialog.temp(False):
|
|
977
|
+
self.compute_1_to_0(feature.function, param, edit=False)
|
|
978
|
+
|
|
979
|
+
# Update the view
|
|
980
|
+
obj_uuid = get_uuid(obj)
|
|
981
|
+
self.panel.objview.update_item(obj_uuid)
|
|
982
|
+
self.panel.refresh_plot(obj_uuid, update_items=True, force=True)
|
|
983
|
+
|
|
984
|
+
def __exec_func(
|
|
985
|
+
self,
|
|
986
|
+
func: Callable,
|
|
987
|
+
args: tuple,
|
|
988
|
+
progress: QW.QProgressDialog,
|
|
989
|
+
) -> CompOut | None:
|
|
990
|
+
"""Execute function, eventually in a separate process.
|
|
991
|
+
|
|
992
|
+
Args:
|
|
993
|
+
func: function to execute
|
|
994
|
+
args: function arguments
|
|
995
|
+
progress: progress dialog
|
|
996
|
+
|
|
997
|
+
Returns:
|
|
998
|
+
Computation output object or None if canceled
|
|
999
|
+
"""
|
|
1000
|
+
QW.QApplication.processEvents()
|
|
1001
|
+
if not progress.wasCanceled():
|
|
1002
|
+
if self.worker is None:
|
|
1003
|
+
# No process isolation: run function directly
|
|
1004
|
+
return wng_err_func(func, args)
|
|
1005
|
+
# Process isolation: run function in a separate process
|
|
1006
|
+
self.worker.run(func, args)
|
|
1007
|
+
while not self.worker.is_computation_finished():
|
|
1008
|
+
QW.QApplication.processEvents()
|
|
1009
|
+
time.sleep(0) # Just yields to other threads - no forced delay
|
|
1010
|
+
if progress.wasCanceled(): # User canceled the operation
|
|
1011
|
+
self.worker.restart() # Cancel computation and reset to idle
|
|
1012
|
+
break
|
|
1013
|
+
# Only get result if computation actually finished (not canceled)
|
|
1014
|
+
if self.worker.has_result_available():
|
|
1015
|
+
return self.worker.get_result()
|
|
1016
|
+
return None
|
|
1017
|
+
|
|
1018
|
+
def recompute_1_to_1(
|
|
1019
|
+
self,
|
|
1020
|
+
func_name: str,
|
|
1021
|
+
obj: SignalObj | ImageObj,
|
|
1022
|
+
param: gds.DataSet | None = None,
|
|
1023
|
+
) -> SignalObj | ImageObj | None:
|
|
1024
|
+
"""Recompute a 1-to-1 processing operation without adding result to panel.
|
|
1025
|
+
|
|
1026
|
+
This method is specifically designed for the interactive re-processing feature
|
|
1027
|
+
where we want to update an existing object in-place. It executes the processing
|
|
1028
|
+
with full multiprocessing support (allowing cancellation) but returns the result
|
|
1029
|
+
without adding it to the panel.
|
|
1030
|
+
|
|
1031
|
+
Args:
|
|
1032
|
+
func_name: Name of the processing function
|
|
1033
|
+
obj: Source object to process
|
|
1034
|
+
param: Processing parameters (optional)
|
|
1035
|
+
|
|
1036
|
+
Returns:
|
|
1037
|
+
New processed object (not added to panel), or None if cancelled or error
|
|
1038
|
+
|
|
1039
|
+
Raises:
|
|
1040
|
+
ValueError: If function is not found in registry
|
|
1041
|
+
"""
|
|
1042
|
+
# Get the function from the registry
|
|
1043
|
+
try:
|
|
1044
|
+
feature = self.get_feature(func_name)
|
|
1045
|
+
except ValueError as exc:
|
|
1046
|
+
raise ValueError(f"Function '{func_name}' not found in registry") from exc
|
|
1047
|
+
|
|
1048
|
+
func = feature.function
|
|
1049
|
+
|
|
1050
|
+
# Create progress dialog with short delay so it appears for long computations
|
|
1051
|
+
with create_progress_bar(self.panel, _("Recomputing..."), max_=1) as progress:
|
|
1052
|
+
progress.setValue(0)
|
|
1053
|
+
progress.setLabelText(_("Processing object with updated parameters..."))
|
|
1054
|
+
|
|
1055
|
+
# Execute with multiprocessing support
|
|
1056
|
+
args = (obj, param) if param is not None else (obj,)
|
|
1057
|
+
comp_out = self.__exec_func(func, args, progress)
|
|
1058
|
+
|
|
1059
|
+
if comp_out is None: # Cancelled by user
|
|
1060
|
+
return None
|
|
1061
|
+
|
|
1062
|
+
# Handle the output
|
|
1063
|
+
new_obj = self.handle_output(comp_out, _("Recomputing"), progress)
|
|
1064
|
+
|
|
1065
|
+
if new_obj is None:
|
|
1066
|
+
return None
|
|
1067
|
+
|
|
1068
|
+
# Handle keep_results logic
|
|
1069
|
+
if isinstance(new_obj, (SignalObj, ImageObj)):
|
|
1070
|
+
self._handle_keep_results(new_obj)
|
|
1071
|
+
|
|
1072
|
+
patch_title_with_ids(new_obj, [obj], get_short_id)
|
|
1073
|
+
return new_obj
|
|
1074
|
+
|
|
1075
|
+
def _compute_1_to_1_subroutine(
|
|
1076
|
+
self, funcs: list[Callable], params: list, title: str
|
|
1077
|
+
) -> None:
|
|
1078
|
+
"""Generic subroutine for 1-to-1 processing.
|
|
1079
|
+
|
|
1080
|
+
Args:
|
|
1081
|
+
funcs: list of functions to execute
|
|
1082
|
+
params: list of parameters
|
|
1083
|
+
title: title of progress bar
|
|
1084
|
+
"""
|
|
1085
|
+
assert len(funcs) == len(params)
|
|
1086
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
1087
|
+
grps = self.panel.objview.get_sel_groups()
|
|
1088
|
+
n_glob = len(objs) * len(params)
|
|
1089
|
+
new_gids = {}
|
|
1090
|
+
with create_progress_bar(self.panel, title, max_=n_glob) as progress:
|
|
1091
|
+
for i_row, obj in enumerate(objs):
|
|
1092
|
+
for i_param, (param, func) in enumerate(zip(params, funcs)):
|
|
1093
|
+
name = func.__name__
|
|
1094
|
+
pvalue = (i_row + 1) * (i_param + 1)
|
|
1095
|
+
pvalue = 0 if pvalue == 1 else pvalue
|
|
1096
|
+
i_title = f"{title} ({pvalue}/{n_glob})"
|
|
1097
|
+
progress.setLabelText(i_title)
|
|
1098
|
+
progress.setValue(pvalue)
|
|
1099
|
+
args = (obj,) if param is None else (obj, param)
|
|
1100
|
+
result = self.__exec_func(func, args, progress)
|
|
1101
|
+
if result is None:
|
|
1102
|
+
break
|
|
1103
|
+
new_obj = self.handle_output(
|
|
1104
|
+
result, _("Computing: %s") % i_title, progress
|
|
1105
|
+
)
|
|
1106
|
+
if new_obj is None:
|
|
1107
|
+
continue
|
|
1108
|
+
assert isinstance(new_obj, (SignalObj, ImageObj))
|
|
1109
|
+
|
|
1110
|
+
patch_title_with_ids(new_obj, [obj], get_short_id)
|
|
1111
|
+
|
|
1112
|
+
# Handle keep_results logic for 1_to_1 operations
|
|
1113
|
+
self._handle_keep_results(new_obj)
|
|
1114
|
+
|
|
1115
|
+
# Store processing metadata for interactive re-processing
|
|
1116
|
+
pp = ProcessingParameters(
|
|
1117
|
+
func_name=name,
|
|
1118
|
+
pattern="1-to-1",
|
|
1119
|
+
param=param,
|
|
1120
|
+
source_uuid=get_uuid(obj),
|
|
1121
|
+
)
|
|
1122
|
+
insert_processing_parameters(new_obj, pp)
|
|
1123
|
+
|
|
1124
|
+
new_gid = None
|
|
1125
|
+
if grps:
|
|
1126
|
+
# If groups are selected, then it means that there is no
|
|
1127
|
+
# individual object selected: we work on groups only
|
|
1128
|
+
old_gid = self.panel.objmodel.get_object_group_id(obj)
|
|
1129
|
+
new_gid = new_gids.get(old_gid)
|
|
1130
|
+
if new_gid is None:
|
|
1131
|
+
# Create a new group for each selected group
|
|
1132
|
+
old_g = self.panel.objmodel.get_group(old_gid)
|
|
1133
|
+
new_gid = self._create_group_for_result(
|
|
1134
|
+
new_obj, f"{name}({get_short_id(old_g)})"
|
|
1135
|
+
)
|
|
1136
|
+
new_gids[old_gid] = new_gid
|
|
1137
|
+
self._add_object_to_appropriate_panel(
|
|
1138
|
+
new_obj, group_id=new_gid, use_group_for_non_native=True
|
|
1139
|
+
)
|
|
1140
|
+
# Select newly created groups, if any
|
|
1141
|
+
for group_id in new_gids.values():
|
|
1142
|
+
self.panel.objview.set_current_item_id(group_id, extend=True)
|
|
1143
|
+
|
|
1144
|
+
def __get_src_grps_gids_objs_nbobj_valid(
|
|
1145
|
+
self, min_group_nb: int
|
|
1146
|
+
) -> tuple[list, list, dict, int]:
|
|
1147
|
+
"""In pairwise mode only: get source groups, group ids, objects,
|
|
1148
|
+
and number of objects. Check if the number of objects is valid.
|
|
1149
|
+
|
|
1150
|
+
Args:
|
|
1151
|
+
min_group_nb: minimum number of groups (typically, 2 for `n1` functions
|
|
1152
|
+
and 1 for `n1n` functions)
|
|
1153
|
+
|
|
1154
|
+
Returns:
|
|
1155
|
+
Tuple (source groups, group ids, objects, number of objects, valid)
|
|
1156
|
+
"""
|
|
1157
|
+
# In pairwise mode, we need to create a new object for each pair of objects
|
|
1158
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
1159
|
+
objmodel = self.panel.objmodel
|
|
1160
|
+
src_grps = sorted(
|
|
1161
|
+
{objmodel.get_group_from_object(obj) for obj in objs},
|
|
1162
|
+
key=objmodel.get_number,
|
|
1163
|
+
)
|
|
1164
|
+
src_gids = [get_uuid(grp) for grp in src_grps]
|
|
1165
|
+
|
|
1166
|
+
# [src_objs dictionary] keys: old group id, values: list of old objects
|
|
1167
|
+
src_objs: dict[str, list[SignalObj | ImageObj]] = {}
|
|
1168
|
+
for src_gid in src_gids:
|
|
1169
|
+
src_objs[src_gid] = [
|
|
1170
|
+
obj for obj in objs if objmodel.get_object_group_id(obj) == src_gid
|
|
1171
|
+
]
|
|
1172
|
+
|
|
1173
|
+
nbobj = len(src_objs[src_gids[0]])
|
|
1174
|
+
|
|
1175
|
+
valid = len(src_grps) >= min_group_nb
|
|
1176
|
+
if not valid:
|
|
1177
|
+
# In pairwise mode, we need selected objects in at least two groups.
|
|
1178
|
+
if env.execenv.unattended:
|
|
1179
|
+
raise ValueError(
|
|
1180
|
+
"Pairwise mode: objects must be selected in at least two groups"
|
|
1181
|
+
)
|
|
1182
|
+
QW.QMessageBox.warning(
|
|
1183
|
+
self.mainwindow,
|
|
1184
|
+
_("Warning"),
|
|
1185
|
+
_(
|
|
1186
|
+
"In pairwise mode, you need to select objects "
|
|
1187
|
+
"in at least two groups."
|
|
1188
|
+
),
|
|
1189
|
+
)
|
|
1190
|
+
if valid:
|
|
1191
|
+
valid = all(len(src_objs[src_gid]) == nbobj for src_gid in src_gids)
|
|
1192
|
+
if not valid:
|
|
1193
|
+
if env.execenv.unattended:
|
|
1194
|
+
raise ValueError(
|
|
1195
|
+
"Pairwise mode: invalid number of objects in each group"
|
|
1196
|
+
)
|
|
1197
|
+
QW.QMessageBox.warning(
|
|
1198
|
+
self.mainwindow,
|
|
1199
|
+
_("Warning"),
|
|
1200
|
+
_(
|
|
1201
|
+
"In pairwise mode, you need to select "
|
|
1202
|
+
"the same number of objects in each group."
|
|
1203
|
+
),
|
|
1204
|
+
)
|
|
1205
|
+
return src_grps, src_gids, src_objs, nbobj, valid
|
|
1206
|
+
|
|
1207
|
+
def compute_1_to_1(
|
|
1208
|
+
self,
|
|
1209
|
+
func: Callable,
|
|
1210
|
+
param: gds.DataSet | None = None,
|
|
1211
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1212
|
+
title: str | None = None,
|
|
1213
|
+
comment: str | None = None,
|
|
1214
|
+
edit: bool | None = None,
|
|
1215
|
+
) -> None:
|
|
1216
|
+
"""Generic processing method: 1 object in → 1 object out.
|
|
1217
|
+
|
|
1218
|
+
Applies a function independently to each selected object in the active panel.
|
|
1219
|
+
The result of each computation is a new object appended to the same panel.
|
|
1220
|
+
|
|
1221
|
+
Args:
|
|
1222
|
+
func: Function to execute, that takes either `(dst_obj, src_obj)` or
|
|
1223
|
+
`(dst_obj, src_obj, param)` as arguments, where `dst_obj` is the output
|
|
1224
|
+
object, `src_obj` is the input object, and `param` is an optional
|
|
1225
|
+
parameter set.
|
|
1226
|
+
param: Optional parameter instance.
|
|
1227
|
+
paramclass: Optional parameter class for editing.
|
|
1228
|
+
title: Optional progress bar title.
|
|
1229
|
+
comment: Optional comment for parameter dialog.
|
|
1230
|
+
edit: Whether to open the parameter editor before execution.
|
|
1231
|
+
|
|
1232
|
+
.. note::
|
|
1233
|
+
With k selected objects, the method produces k outputs (one per input).
|
|
1234
|
+
|
|
1235
|
+
.. note::
|
|
1236
|
+
This method does not support pairwise mode.
|
|
1237
|
+
"""
|
|
1238
|
+
if (edit is None or param is None) and paramclass is not None:
|
|
1239
|
+
old_edit = edit
|
|
1240
|
+
edit, param = self.init_param(param, paramclass, title, comment)
|
|
1241
|
+
if old_edit is not None:
|
|
1242
|
+
edit = old_edit
|
|
1243
|
+
if param is not None:
|
|
1244
|
+
if edit and not param.edit(parent=self.mainwindow):
|
|
1245
|
+
return
|
|
1246
|
+
self._compute_1_to_1_subroutine([func], [param], title)
|
|
1247
|
+
|
|
1248
|
+
def compute_multiple_1_to_1(
|
|
1249
|
+
self,
|
|
1250
|
+
funcs: list[Callable],
|
|
1251
|
+
params: list[gds.DataSet] | None = None,
|
|
1252
|
+
title: str | None = None,
|
|
1253
|
+
edit: bool | None = None,
|
|
1254
|
+
) -> None:
|
|
1255
|
+
"""Generic processing method: 1 object in → n objects out.
|
|
1256
|
+
|
|
1257
|
+
Applies multiple functions to each selected object, generating multiple
|
|
1258
|
+
outputs per object. The resulting objects are appended to the active panel.
|
|
1259
|
+
|
|
1260
|
+
Args:
|
|
1261
|
+
funcs: List of functions to apply. Each function takes either
|
|
1262
|
+
`(dst_obj, src_obj)` or `(dst_obj, src_obj, param)` as arguments,
|
|
1263
|
+
where `dst_obj` is the output object, `src_obj` is the input object,
|
|
1264
|
+
and `param` is an optional parameter set.
|
|
1265
|
+
params: List of parameter instances corresponding to each function.
|
|
1266
|
+
title: Optional progress bar title.
|
|
1267
|
+
edit: Whether to open the parameter editor before execution.
|
|
1268
|
+
|
|
1269
|
+
.. note::
|
|
1270
|
+
With k selected objects and n outputs per function,
|
|
1271
|
+
the method produces k × n outputs.
|
|
1272
|
+
|
|
1273
|
+
.. note::
|
|
1274
|
+
This method does not support pairwise mode.
|
|
1275
|
+
"""
|
|
1276
|
+
if params is None:
|
|
1277
|
+
params = [None] * len(funcs)
|
|
1278
|
+
else:
|
|
1279
|
+
group = gds.DataSetGroup(params, title=_("Parameters"))
|
|
1280
|
+
if edit and not group.edit(parent=self.mainwindow):
|
|
1281
|
+
return
|
|
1282
|
+
if len(funcs) != len(params):
|
|
1283
|
+
raise ValueError("Number of functions must match number of parameters")
|
|
1284
|
+
self._compute_1_to_1_subroutine(funcs, params, title)
|
|
1285
|
+
|
|
1286
|
+
def compute_1_to_n(
|
|
1287
|
+
self,
|
|
1288
|
+
func: Callable,
|
|
1289
|
+
params: list[gds.DataSet],
|
|
1290
|
+
title: str | None = None,
|
|
1291
|
+
edit: bool | None = None,
|
|
1292
|
+
) -> None:
|
|
1293
|
+
"""Generic processing method: 1 object in → n objects out.
|
|
1294
|
+
|
|
1295
|
+
Applies a single function to each selected object, with n different parameters
|
|
1296
|
+
set, thus generating n outputs per object. The resulting objects are appended to
|
|
1297
|
+
the active panel.
|
|
1298
|
+
|
|
1299
|
+
Args:
|
|
1300
|
+
func: Single function to apply, that takes either `(dst_obj, src_obj)`
|
|
1301
|
+
or `(dst_obj, src_obj, param)` as arguments,
|
|
1302
|
+
where `dst_obj` is the output object, `src_obj` is the input object,
|
|
1303
|
+
and `param` is an optional parameter set.
|
|
1304
|
+
params: List of parameter instances.
|
|
1305
|
+
title: Optional progress bar title.
|
|
1306
|
+
edit: Whether to open the parameter editor before execution.
|
|
1307
|
+
|
|
1308
|
+
.. note::
|
|
1309
|
+
With k selected objects and n parameter sets,
|
|
1310
|
+
the method produces k × n outputs.
|
|
1311
|
+
|
|
1312
|
+
.. note::
|
|
1313
|
+
This method does not support pairwise mode.
|
|
1314
|
+
"""
|
|
1315
|
+
assert params is not None
|
|
1316
|
+
if edit:
|
|
1317
|
+
group = gds.DataSetGroup(params, title=_("Parameters"))
|
|
1318
|
+
if not group.edit(parent=self.mainwindow):
|
|
1319
|
+
return
|
|
1320
|
+
self._compute_1_to_1_subroutine([func] * len(params), params, title)
|
|
1321
|
+
|
|
1322
|
+
def compute_1_to_0(
|
|
1323
|
+
self,
|
|
1324
|
+
func: Callable,
|
|
1325
|
+
param: gds.DataSet | None = None,
|
|
1326
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1327
|
+
title: str | None = None,
|
|
1328
|
+
comment: str | None = None,
|
|
1329
|
+
edit: bool | None = None,
|
|
1330
|
+
) -> ResultData:
|
|
1331
|
+
"""Generic processing method: 1 object in → no object out.
|
|
1332
|
+
|
|
1333
|
+
Applies a function to each selected object, returning metadata or measurement
|
|
1334
|
+
results (e.g. peak coordinates, statistical properties) without generating
|
|
1335
|
+
new objects. Results are stored in the object's metadata and returned as a
|
|
1336
|
+
ResultData instance.
|
|
1337
|
+
|
|
1338
|
+
Args:
|
|
1339
|
+
func: Function to execute, that takes either `(obj)` or `(obj, param)` as
|
|
1340
|
+
arguments, where `obj` is the input object and `param` is an optional
|
|
1341
|
+
parameter set.
|
|
1342
|
+
param: Optional parameter instance.
|
|
1343
|
+
paramclass: Optional parameter class for editing.
|
|
1344
|
+
title: Optional progress bar title.
|
|
1345
|
+
comment: Optional comment for parameter dialog.
|
|
1346
|
+
edit: Whether to open the parameter editor before execution.
|
|
1347
|
+
|
|
1348
|
+
Returns:
|
|
1349
|
+
ResultData instance containing the results for all processed objects.
|
|
1350
|
+
|
|
1351
|
+
.. note::
|
|
1352
|
+
With k selected objects, the method performs k analyses and produces
|
|
1353
|
+
no output objects.
|
|
1354
|
+
|
|
1355
|
+
.. note::
|
|
1356
|
+
This method does not support pairwise mode.
|
|
1357
|
+
"""
|
|
1358
|
+
if (edit is None or param is None) and paramclass is not None:
|
|
1359
|
+
edit, param = self.init_param(param, paramclass, title, comment)
|
|
1360
|
+
if param is not None:
|
|
1361
|
+
if edit and not param.edit(parent=self.mainwindow):
|
|
1362
|
+
return None
|
|
1363
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
1364
|
+
current_obj = self.panel.objview.get_current_object()
|
|
1365
|
+
title = func.__name__ if title is None else title
|
|
1366
|
+
refresh_needed = False
|
|
1367
|
+
with create_progress_bar(self.panel, title, max_=len(objs)) as progress:
|
|
1368
|
+
rdata = ResultData()
|
|
1369
|
+
for idx, obj in enumerate(objs):
|
|
1370
|
+
pvalue = idx + 1
|
|
1371
|
+
pvalue = 0 if pvalue == 1 else pvalue
|
|
1372
|
+
progress.setValue(pvalue)
|
|
1373
|
+
args = (obj,) if param is None else (obj, param)
|
|
1374
|
+
|
|
1375
|
+
# Execute function
|
|
1376
|
+
compout = self.__exec_func(func, args, progress)
|
|
1377
|
+
if compout is None:
|
|
1378
|
+
break
|
|
1379
|
+
result = self.handle_output(
|
|
1380
|
+
compout, _("Computing: %s") % title, progress
|
|
1381
|
+
)
|
|
1382
|
+
if result is None:
|
|
1383
|
+
continue
|
|
1384
|
+
|
|
1385
|
+
# Using the adapters:
|
|
1386
|
+
if isinstance(result, GeometryResult):
|
|
1387
|
+
adapter = GeometryAdapter(result)
|
|
1388
|
+
elif isinstance(result, TableResult):
|
|
1389
|
+
adapter = TableAdapter(result)
|
|
1390
|
+
else:
|
|
1391
|
+
# For "compute 1 to 0" functions, the result is either a
|
|
1392
|
+
# GeometryResult or TableResult:
|
|
1393
|
+
raise TypeError("Unsupported result type")
|
|
1394
|
+
|
|
1395
|
+
# Add result shape to object's metadata
|
|
1396
|
+
# Pass function name for better parameter context in the Analysis tab
|
|
1397
|
+
adapter.add_to(obj, param)
|
|
1398
|
+
|
|
1399
|
+
# Store processing parameters for auto-recompute on ROI change
|
|
1400
|
+
# This enables automatic recalculation when ROI is modified
|
|
1401
|
+
# Analysis parameters (1-to-0) are stored separately from
|
|
1402
|
+
# transformation history to avoid overwriting the processing chain
|
|
1403
|
+
# when analyzing objects.
|
|
1404
|
+
pp = ProcessingParameters(
|
|
1405
|
+
func_name=func.__name__,
|
|
1406
|
+
pattern="1-to-0",
|
|
1407
|
+
param=param,
|
|
1408
|
+
source_uuid=get_uuid(obj),
|
|
1409
|
+
)
|
|
1410
|
+
insert_processing_parameters(obj, pp)
|
|
1411
|
+
|
|
1412
|
+
# Apply processor-specific post-processing on the result
|
|
1413
|
+
refresh_needed |= self.postprocess_1_to_0_result(obj, result)
|
|
1414
|
+
|
|
1415
|
+
# Append result to result data for later display
|
|
1416
|
+
rdata.append(adapter, obj)
|
|
1417
|
+
|
|
1418
|
+
if obj is current_obj:
|
|
1419
|
+
self.panel.selection_changed(update_items=True)
|
|
1420
|
+
else:
|
|
1421
|
+
self.panel.refresh_plot(get_uuid(obj), True, False)
|
|
1422
|
+
|
|
1423
|
+
# Refresh plot if post-processing modified any objects (e.g., ROI creation)
|
|
1424
|
+
if refresh_needed:
|
|
1425
|
+
self.panel.refresh_plot("selected", only_visible=False, only_existing=True)
|
|
1426
|
+
|
|
1427
|
+
if rdata and Conf.proc.show_result_dialog.get():
|
|
1428
|
+
show_resultdata(self.mainwindow, rdata, f"{objs[0].PREFIX}_results")
|
|
1429
|
+
return rdata
|
|
1430
|
+
|
|
1431
|
+
def compute_n_to_1(
|
|
1432
|
+
self,
|
|
1433
|
+
func: Callable,
|
|
1434
|
+
param: gds.DataSet | None = None,
|
|
1435
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1436
|
+
title: str | None = None,
|
|
1437
|
+
comment: str | None = None,
|
|
1438
|
+
edit: bool | None = None,
|
|
1439
|
+
) -> None:
|
|
1440
|
+
"""Generic processing method: n objects in → 1 object out.
|
|
1441
|
+
|
|
1442
|
+
Aggregates multiple selected objects into a single result using the provided
|
|
1443
|
+
function. In pairwise mode, applies the function to object pairs (grouped by
|
|
1444
|
+
index) and generates one output per pair.
|
|
1445
|
+
|
|
1446
|
+
Args:
|
|
1447
|
+
func: Function to apply, that takes either `(dst_obj, src_obj_list)` or
|
|
1448
|
+
`(dst_obj, src_obj_list, param)` as arguments, where `dst_obj` is the
|
|
1449
|
+
output object, `src_obj_list` is the input object list,
|
|
1450
|
+
and `param` is an optional parameter set.
|
|
1451
|
+
param: Optional parameter instance.
|
|
1452
|
+
paramclass: Optional parameter class for editing.
|
|
1453
|
+
title: Optional progress bar title.
|
|
1454
|
+
comment: Optional comment for parameter dialog.
|
|
1455
|
+
edit: Whether to open the parameter editor before execution.
|
|
1456
|
+
|
|
1457
|
+
.. note::
|
|
1458
|
+
With n selected objects:
|
|
1459
|
+
|
|
1460
|
+
- in default mode, produces 1 output.
|
|
1461
|
+
- in pairwise mode, produces n outputs (one per pair).
|
|
1462
|
+
"""
|
|
1463
|
+
if (edit is None or param is None) and paramclass is not None:
|
|
1464
|
+
edit, param = self.init_param(param, paramclass, title, comment)
|
|
1465
|
+
if param is not None:
|
|
1466
|
+
if edit and not param.edit(parent=self.mainwindow):
|
|
1467
|
+
return
|
|
1468
|
+
|
|
1469
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
1470
|
+
objmodel = self.panel.objmodel
|
|
1471
|
+
pairwise = is_pairwise_mode()
|
|
1472
|
+
name = func.__name__
|
|
1473
|
+
|
|
1474
|
+
if pairwise:
|
|
1475
|
+
src_grps, src_gids, src_objs, _nbobj, valid = (
|
|
1476
|
+
self.__get_src_grps_gids_objs_nbobj_valid(min_group_nb=2)
|
|
1477
|
+
)
|
|
1478
|
+
if not valid:
|
|
1479
|
+
return
|
|
1480
|
+
dst_gname = (
|
|
1481
|
+
f"{name}({','.join([get_short_id(grp) for grp in src_grps])})|pairwise"
|
|
1482
|
+
)
|
|
1483
|
+
group_exclusive = len(self.panel.objview.get_sel_groups()) != 0
|
|
1484
|
+
if not group_exclusive:
|
|
1485
|
+
# This is not a group exclusive selection
|
|
1486
|
+
dst_gname += "[...]"
|
|
1487
|
+
# Delay group creation until after first result to determine target panel
|
|
1488
|
+
dst_gid = None
|
|
1489
|
+
n_pairs = len(src_objs[src_gids[0]])
|
|
1490
|
+
max_i_pair = min(
|
|
1491
|
+
n_pairs, max(len(src_objs[get_uuid(grp)]) for grp in src_grps)
|
|
1492
|
+
)
|
|
1493
|
+
# Track "Yes to All" choice for this compute operation
|
|
1494
|
+
auto_interpolate_for_operation = False
|
|
1495
|
+
|
|
1496
|
+
with create_progress_bar(self.panel, title, max_=n_pairs) as progress:
|
|
1497
|
+
for i_pair, src_obj1 in enumerate(src_objs[src_gids[0]][:max_i_pair]):
|
|
1498
|
+
progress.setValue(i_pair + 1)
|
|
1499
|
+
progress.setLabelText(title)
|
|
1500
|
+
src_objs_pair = [src_obj1]
|
|
1501
|
+
for src_gid in src_gids[1:]:
|
|
1502
|
+
src_obj = src_objs[src_gid][i_pair]
|
|
1503
|
+
src_objs_pair.append(src_obj)
|
|
1504
|
+
|
|
1505
|
+
# Check signal x-array compatibility for n-to-1 operations
|
|
1506
|
+
if auto_interpolate_for_operation:
|
|
1507
|
+
# "Yes to All" selected, automatically interpolate
|
|
1508
|
+
# by temporarily changing the configuration
|
|
1509
|
+
with Conf.proc.xarray_compat_behavior.temp("interpolate"):
|
|
1510
|
+
result = self._check_signal_xarray_compatibility(
|
|
1511
|
+
src_objs_pair, progress=progress
|
|
1512
|
+
)
|
|
1513
|
+
else:
|
|
1514
|
+
# Normal compatibility check with dialog
|
|
1515
|
+
result = self._check_signal_xarray_compatibility(
|
|
1516
|
+
src_objs_pair, progress=progress
|
|
1517
|
+
)
|
|
1518
|
+
|
|
1519
|
+
if result is None:
|
|
1520
|
+
# User canceled or compatibility check failed
|
|
1521
|
+
return
|
|
1522
|
+
|
|
1523
|
+
checked_objs, yes_to_all_selected = result
|
|
1524
|
+
if yes_to_all_selected:
|
|
1525
|
+
auto_interpolate_for_operation = True
|
|
1526
|
+
|
|
1527
|
+
src_objs_pair = checked_objs
|
|
1528
|
+
if param is None:
|
|
1529
|
+
args = (src_objs_pair,)
|
|
1530
|
+
else:
|
|
1531
|
+
args = (src_objs_pair, param)
|
|
1532
|
+
result = self.__exec_func(func, args, progress)
|
|
1533
|
+
if result is None:
|
|
1534
|
+
break
|
|
1535
|
+
new_obj = self.handle_output(
|
|
1536
|
+
result, _("Calculating: %s") % title, progress
|
|
1537
|
+
)
|
|
1538
|
+
if new_obj is None:
|
|
1539
|
+
break
|
|
1540
|
+
assert isinstance(new_obj, (SignalObj, ImageObj))
|
|
1541
|
+
|
|
1542
|
+
patch_title_with_ids(new_obj, src_objs_pair, get_short_id)
|
|
1543
|
+
|
|
1544
|
+
# Handle keep_results and geometry result merging
|
|
1545
|
+
self._handle_keep_results(new_obj)
|
|
1546
|
+
self._merge_geometry_results_for_n_to_1(new_obj, src_objs_pair)
|
|
1547
|
+
|
|
1548
|
+
# Store lightweight processing metadata (non-interactive)
|
|
1549
|
+
proc_params = ProcessingParameters(
|
|
1550
|
+
func_name=name,
|
|
1551
|
+
pattern="n-to-1",
|
|
1552
|
+
param=param,
|
|
1553
|
+
source_uuids=[get_uuid(obj) for obj in src_objs_pair],
|
|
1554
|
+
)
|
|
1555
|
+
insert_processing_parameters(new_obj, proc_params)
|
|
1556
|
+
|
|
1557
|
+
# Create destination group on first result, in appropriate panel
|
|
1558
|
+
if dst_gid is None:
|
|
1559
|
+
dst_gid = self._create_group_for_result(new_obj, dst_gname)
|
|
1560
|
+
|
|
1561
|
+
self._add_object_to_appropriate_panel(new_obj, group_id=dst_gid)
|
|
1562
|
+
|
|
1563
|
+
else:
|
|
1564
|
+
# In single operand mode, we create a single object for all selected objects
|
|
1565
|
+
|
|
1566
|
+
# [src_objs dictionary] keys: old group id, values: list of old objects
|
|
1567
|
+
src_objs: dict[str, list[SignalObj | ImageObj]] = {}
|
|
1568
|
+
|
|
1569
|
+
grps = self.panel.objview.get_sel_groups()
|
|
1570
|
+
dst_group_name = None
|
|
1571
|
+
if grps:
|
|
1572
|
+
# (Group exclusive selection)
|
|
1573
|
+
# At least one group is selected: create a new group
|
|
1574
|
+
dst_gname = f"{name}({','.join([get_uuid(grp) for grp in grps])})"
|
|
1575
|
+
# Delay group creation until after first result
|
|
1576
|
+
dst_gid = None
|
|
1577
|
+
dst_group_name = dst_gname # Store name for later use
|
|
1578
|
+
else:
|
|
1579
|
+
# (Object exclusive selection)
|
|
1580
|
+
# No group is selected: use each object's group
|
|
1581
|
+
dst_gid = None
|
|
1582
|
+
|
|
1583
|
+
for src_obj in objs:
|
|
1584
|
+
src_gid = objmodel.get_object_group_id(src_obj)
|
|
1585
|
+
src_objs.setdefault(src_gid, []).append(src_obj)
|
|
1586
|
+
|
|
1587
|
+
# Track "Yes to All" choice for this compute operation
|
|
1588
|
+
auto_interpolate_for_operation = False
|
|
1589
|
+
|
|
1590
|
+
with create_progress_bar(self.panel, title, max_=len(objs)) as progress:
|
|
1591
|
+
progress.setValue(0)
|
|
1592
|
+
progress.setLabelText(title)
|
|
1593
|
+
for src_gid, src_obj_list in src_objs.items():
|
|
1594
|
+
# Check signal x-array compatibility for n-to-1 operations
|
|
1595
|
+
if auto_interpolate_for_operation:
|
|
1596
|
+
# "Yes to All" selected, automatically interpolate
|
|
1597
|
+
with Conf.proc.xarray_compat_behavior.temp("interpolate"):
|
|
1598
|
+
result = self._check_signal_xarray_compatibility(
|
|
1599
|
+
src_obj_list, progress=progress
|
|
1600
|
+
)
|
|
1601
|
+
else:
|
|
1602
|
+
# Normal compatibility check with dialog
|
|
1603
|
+
result = self._check_signal_xarray_compatibility(
|
|
1604
|
+
src_obj_list, progress=progress
|
|
1605
|
+
)
|
|
1606
|
+
|
|
1607
|
+
if result is None:
|
|
1608
|
+
# User canceled or compatibility check failed
|
|
1609
|
+
return
|
|
1610
|
+
|
|
1611
|
+
checked_objs, yes_to_all_selected = result
|
|
1612
|
+
if yes_to_all_selected:
|
|
1613
|
+
auto_interpolate_for_operation = True
|
|
1614
|
+
|
|
1615
|
+
src_obj_list = checked_objs
|
|
1616
|
+
|
|
1617
|
+
if param is None:
|
|
1618
|
+
args = (src_obj_list,)
|
|
1619
|
+
else:
|
|
1620
|
+
args = (src_obj_list, param)
|
|
1621
|
+
result = self.__exec_func(func, args, progress)
|
|
1622
|
+
if result is None:
|
|
1623
|
+
break
|
|
1624
|
+
new_obj = self.handle_output(
|
|
1625
|
+
result, _("Calculating: %s") % title, progress
|
|
1626
|
+
)
|
|
1627
|
+
if new_obj is None:
|
|
1628
|
+
break
|
|
1629
|
+
assert isinstance(new_obj, (SignalObj, ImageObj))
|
|
1630
|
+
|
|
1631
|
+
group_id = dst_gid if dst_gid is not None else src_gid
|
|
1632
|
+
patch_title_with_ids(new_obj, src_obj_list, get_short_id)
|
|
1633
|
+
|
|
1634
|
+
# Handle keep_results and geometry result merging
|
|
1635
|
+
self._handle_keep_results(new_obj)
|
|
1636
|
+
self._merge_geometry_results_for_n_to_1(new_obj, src_obj_list)
|
|
1637
|
+
|
|
1638
|
+
# Store lightweight processing metadata (non-interactive)
|
|
1639
|
+
proc_params = ProcessingParameters(
|
|
1640
|
+
func_name=name,
|
|
1641
|
+
pattern="n-to-1",
|
|
1642
|
+
param=param,
|
|
1643
|
+
source_uuids=[get_uuid(obj) for obj in src_obj_list],
|
|
1644
|
+
)
|
|
1645
|
+
insert_processing_parameters(new_obj, proc_params)
|
|
1646
|
+
|
|
1647
|
+
# Create destination group on first result, in appropriate panel
|
|
1648
|
+
use_group_for_non_native = False
|
|
1649
|
+
if dst_gid is None and dst_group_name is not None:
|
|
1650
|
+
dst_gid = self._create_group_for_result(new_obj, dst_group_name)
|
|
1651
|
+
group_id = dst_gid
|
|
1652
|
+
use_group_for_non_native = True
|
|
1653
|
+
|
|
1654
|
+
self._add_object_to_appropriate_panel(
|
|
1655
|
+
new_obj,
|
|
1656
|
+
group_id=group_id,
|
|
1657
|
+
use_group_for_non_native=use_group_for_non_native,
|
|
1658
|
+
)
|
|
1659
|
+
|
|
1660
|
+
# Select newly created group, if any
|
|
1661
|
+
if dst_gid is not None:
|
|
1662
|
+
self.panel.objview.set_current_item_id(dst_gid)
|
|
1663
|
+
|
|
1664
|
+
def compute_2_to_1(
|
|
1665
|
+
self,
|
|
1666
|
+
obj2: SignalObj | ImageObj | list[SignalObj | ImageObj] | None,
|
|
1667
|
+
obj2_name: str,
|
|
1668
|
+
func: Callable,
|
|
1669
|
+
param: gds.DataSet | None = None,
|
|
1670
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1671
|
+
title: str | None = None,
|
|
1672
|
+
comment: str | None = None,
|
|
1673
|
+
edit: bool | None = None,
|
|
1674
|
+
skip_xarray_compat: bool | None = None,
|
|
1675
|
+
) -> None:
|
|
1676
|
+
"""Generic processing method: binary operation 1+1 → 1.
|
|
1677
|
+
|
|
1678
|
+
Applies a binary function between each selected object and a second operand.
|
|
1679
|
+
Supports both single operand mode (same operand for all objects)
|
|
1680
|
+
and pairwise mode (one-to-one matching between two object lists).
|
|
1681
|
+
|
|
1682
|
+
Args:
|
|
1683
|
+
obj2: Second operand (single object or list for pairwise mode).
|
|
1684
|
+
obj2_name: Display name for the second operand (used in selection dialog).
|
|
1685
|
+
func: Function to apply, that takes either `(dst_obj, src_obj1, src_obj2)`
|
|
1686
|
+
or `(dst_obj, src_obj1, src_obj2, param)` as arguments, where
|
|
1687
|
+
`dst_obj` is the output object, `src_obj1` is the first input object,
|
|
1688
|
+
`src_obj2` is the second input object (operand), and `param` is an
|
|
1689
|
+
optional parameter set.
|
|
1690
|
+
param: Optional parameter instance.
|
|
1691
|
+
paramclass: Optional parameter class for editing.
|
|
1692
|
+
title: Optional progress bar title.
|
|
1693
|
+
comment: Optional comment for parameter dialog.
|
|
1694
|
+
edit: Whether to open the parameter editor before execution.
|
|
1695
|
+
skip_xarray_compat: If True, skip x-array compatibility checks
|
|
1696
|
+
(only for signal panels).
|
|
1697
|
+
|
|
1698
|
+
.. note::
|
|
1699
|
+
With k selected objects:
|
|
1700
|
+
|
|
1701
|
+
- in single operand mode and 1 secondary object: produces k outputs.
|
|
1702
|
+
- in pairwise mode with k secondary objects: produces k outputs
|
|
1703
|
+
(one per pair).
|
|
1704
|
+
"""
|
|
1705
|
+
if (edit is None or param is None) and paramclass is not None:
|
|
1706
|
+
edit, param = self.init_param(param, paramclass, title, comment)
|
|
1707
|
+
if param is not None:
|
|
1708
|
+
if edit and not param.edit(parent=self.mainwindow):
|
|
1709
|
+
return
|
|
1710
|
+
|
|
1711
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
1712
|
+
objmodel = self.panel.objmodel
|
|
1713
|
+
pairwise = is_pairwise_mode()
|
|
1714
|
+
name = func.__name__
|
|
1715
|
+
|
|
1716
|
+
if obj2 is None:
|
|
1717
|
+
objs2 = []
|
|
1718
|
+
elif isinstance(obj2, list):
|
|
1719
|
+
objs2 = obj2
|
|
1720
|
+
assert pairwise
|
|
1721
|
+
else:
|
|
1722
|
+
objs2 = [obj2]
|
|
1723
|
+
|
|
1724
|
+
dlg_title = _("Select %s") % obj2_name
|
|
1725
|
+
|
|
1726
|
+
if pairwise:
|
|
1727
|
+
group_exclusive = len(self.panel.objview.get_sel_groups()) != 0
|
|
1728
|
+
|
|
1729
|
+
src_grps, src_gids, src_objs, nbobj, valid = (
|
|
1730
|
+
self.__get_src_grps_gids_objs_nbobj_valid(min_group_nb=1)
|
|
1731
|
+
)
|
|
1732
|
+
if not valid:
|
|
1733
|
+
return
|
|
1734
|
+
if not objs2:
|
|
1735
|
+
objs2 = self.panel.get_objects_with_dialog(
|
|
1736
|
+
dlg_title,
|
|
1737
|
+
_(
|
|
1738
|
+
"<u>Note:</u> operation mode is <i>pairwise</i>: "
|
|
1739
|
+
"%s object(s) expected (i.e. as many as in the first group)"
|
|
1740
|
+
)
|
|
1741
|
+
% nbobj,
|
|
1742
|
+
nbobj,
|
|
1743
|
+
)
|
|
1744
|
+
if objs2 is None:
|
|
1745
|
+
return
|
|
1746
|
+
|
|
1747
|
+
n_pairs = len(src_objs[src_gids[0]])
|
|
1748
|
+
max_i_pair = min(
|
|
1749
|
+
n_pairs, max(len(src_objs[get_uuid(grp)]) for grp in src_grps)
|
|
1750
|
+
)
|
|
1751
|
+
grp2_id = objmodel.get_object_group_id(objs2[0])
|
|
1752
|
+
grp2 = objmodel.get_group(grp2_id)
|
|
1753
|
+
|
|
1754
|
+
# Initialize pair mapping for potential interpolations
|
|
1755
|
+
pair_maps = {}
|
|
1756
|
+
|
|
1757
|
+
# Check x-array compatibility for signal processing (pairwise mode)
|
|
1758
|
+
if self._is_signal_panel() and not skip_xarray_compat:
|
|
1759
|
+
# Check compatibility between objects from both groups
|
|
1760
|
+
all_pairs = []
|
|
1761
|
+
for src_gid in src_gids:
|
|
1762
|
+
for i_pair in range(max_i_pair):
|
|
1763
|
+
src_obj1 = src_objs[src_gid][i_pair]
|
|
1764
|
+
src_obj2 = objs2[i_pair]
|
|
1765
|
+
if isinstance(src_obj1, SignalObj) and isinstance(
|
|
1766
|
+
src_obj2, SignalObj
|
|
1767
|
+
):
|
|
1768
|
+
all_pairs.append((src_obj1, src_obj2))
|
|
1769
|
+
|
|
1770
|
+
# Track "Yes to All" choice for this compute operation
|
|
1771
|
+
auto_interpolate_for_operation = False
|
|
1772
|
+
|
|
1773
|
+
# Check all pairs for compatibility and create interpolation maps
|
|
1774
|
+
for src_obj1, src_obj2 in all_pairs:
|
|
1775
|
+
if auto_interpolate_for_operation:
|
|
1776
|
+
# "Yes to All" selected, automatically interpolate
|
|
1777
|
+
with Conf.proc.xarray_compat_behavior.temp("interpolate"):
|
|
1778
|
+
result = self._check_signal_xarray_compatibility(
|
|
1779
|
+
[src_obj1, src_obj2]
|
|
1780
|
+
)
|
|
1781
|
+
else:
|
|
1782
|
+
# Normal compatibility check with dialog
|
|
1783
|
+
result = self._check_signal_xarray_compatibility(
|
|
1784
|
+
[src_obj1, src_obj2]
|
|
1785
|
+
)
|
|
1786
|
+
|
|
1787
|
+
if result is None:
|
|
1788
|
+
return # User cancelled or error occurred
|
|
1789
|
+
|
|
1790
|
+
checked_pair, yes_to_all_selected = result
|
|
1791
|
+
if yes_to_all_selected:
|
|
1792
|
+
auto_interpolate_for_operation = True
|
|
1793
|
+
|
|
1794
|
+
# Store mapping for this specific pair
|
|
1795
|
+
pair_maps[(src_obj1, src_obj2)] = checked_pair
|
|
1796
|
+
|
|
1797
|
+
with create_progress_bar(self.panel, title, max_=len(src_gids)) as progress:
|
|
1798
|
+
for i_group, src_gid in enumerate(src_gids):
|
|
1799
|
+
progress.setValue(i_group + 1)
|
|
1800
|
+
progress.setLabelText(title)
|
|
1801
|
+
if group_exclusive:
|
|
1802
|
+
# This is a group exclusive selection
|
|
1803
|
+
src_grp = objmodel.get_group(src_gid)
|
|
1804
|
+
grp_short_ids = [get_uuid(grp) for grp in (src_grp, grp2)]
|
|
1805
|
+
dst_gname = f"{name}({','.join(grp_short_ids)})|pairwise"
|
|
1806
|
+
else:
|
|
1807
|
+
dst_gname = f"{name}[...]"
|
|
1808
|
+
# Delay group creation until after first result
|
|
1809
|
+
dst_gid = None
|
|
1810
|
+
for i_pair in range(max_i_pair):
|
|
1811
|
+
orig_obj1, orig_obj2 = src_objs[src_gid][i_pair], objs2[i_pair]
|
|
1812
|
+
|
|
1813
|
+
# Use interpolated signals if available, keep original refs
|
|
1814
|
+
actual_obj1, actual_obj2 = orig_obj1, orig_obj2
|
|
1815
|
+
if (orig_obj1, orig_obj2) in pair_maps:
|
|
1816
|
+
interpolated_pair = pair_maps[(orig_obj1, orig_obj2)]
|
|
1817
|
+
actual_obj1 = interpolated_pair[0]
|
|
1818
|
+
actual_obj2 = interpolated_pair[1]
|
|
1819
|
+
|
|
1820
|
+
args = [actual_obj1, actual_obj2]
|
|
1821
|
+
if param is not None:
|
|
1822
|
+
args.append(param)
|
|
1823
|
+
result = self.__exec_func(func, tuple(args), progress)
|
|
1824
|
+
if result is None:
|
|
1825
|
+
break
|
|
1826
|
+
new_obj = self.handle_output(
|
|
1827
|
+
result, _("Calculating: %s") % title, progress
|
|
1828
|
+
)
|
|
1829
|
+
if new_obj is None:
|
|
1830
|
+
continue
|
|
1831
|
+
assert isinstance(new_obj, (SignalObj, ImageObj))
|
|
1832
|
+
|
|
1833
|
+
# Use original objects for title generation
|
|
1834
|
+
patch_title_with_ids(
|
|
1835
|
+
new_obj, [orig_obj1, orig_obj2], get_short_id
|
|
1836
|
+
)
|
|
1837
|
+
|
|
1838
|
+
# Handle keep_results logic for 2_to_1 operations
|
|
1839
|
+
self._handle_keep_results(new_obj)
|
|
1840
|
+
|
|
1841
|
+
# Store lightweight processing metadata (non-interactive)
|
|
1842
|
+
proc_params = ProcessingParameters(
|
|
1843
|
+
func_name=name,
|
|
1844
|
+
pattern="2-to-1",
|
|
1845
|
+
param=param,
|
|
1846
|
+
source_uuids=[
|
|
1847
|
+
get_uuid(orig_obj1),
|
|
1848
|
+
get_uuid(orig_obj2),
|
|
1849
|
+
],
|
|
1850
|
+
)
|
|
1851
|
+
insert_processing_parameters(new_obj, proc_params)
|
|
1852
|
+
|
|
1853
|
+
# Create destination group on first result, in appropriate panel
|
|
1854
|
+
if dst_gid is None:
|
|
1855
|
+
dst_gid = self._create_group_for_result(new_obj, dst_gname)
|
|
1856
|
+
|
|
1857
|
+
self._add_object_to_appropriate_panel(new_obj, group_id=dst_gid)
|
|
1858
|
+
|
|
1859
|
+
else:
|
|
1860
|
+
if not objs2:
|
|
1861
|
+
objs2 = self.panel.get_objects_with_dialog(
|
|
1862
|
+
dlg_title,
|
|
1863
|
+
_(
|
|
1864
|
+
"<u>Note:</u> operation mode is <i>single operand</i>: "
|
|
1865
|
+
"1 object expected"
|
|
1866
|
+
),
|
|
1867
|
+
)
|
|
1868
|
+
if objs2 is None:
|
|
1869
|
+
return
|
|
1870
|
+
obj2 = objs2[0]
|
|
1871
|
+
|
|
1872
|
+
# Initialize signal mapping for potential interpolations
|
|
1873
|
+
signal_map = {}
|
|
1874
|
+
|
|
1875
|
+
# Check x-array compatibility for signal processing (single operand mode)
|
|
1876
|
+
orig_obj2 = obj2 # Keep reference to original obj2 for title generation
|
|
1877
|
+
if (
|
|
1878
|
+
self._is_signal_panel()
|
|
1879
|
+
and isinstance(obj2, SignalObj)
|
|
1880
|
+
and not skip_xarray_compat
|
|
1881
|
+
):
|
|
1882
|
+
signal_objs = [obj for obj in objs if isinstance(obj, SignalObj)]
|
|
1883
|
+
if signal_objs:
|
|
1884
|
+
# Check compatibility and get potentially interpolated signals
|
|
1885
|
+
result = self._check_signal_xarray_compatibility(
|
|
1886
|
+
signal_objs + [obj2]
|
|
1887
|
+
)
|
|
1888
|
+
if result is None:
|
|
1889
|
+
return # User cancelled or error occurred
|
|
1890
|
+
|
|
1891
|
+
checked_objs, _yes_to_all_selected = result
|
|
1892
|
+
# Note: In single operand mode, "Yes to All" doesn't apply
|
|
1893
|
+
# since there's only one compatibility check
|
|
1894
|
+
|
|
1895
|
+
# Replace obj2 with the potentially interpolated version
|
|
1896
|
+
obj2 = checked_objs[-1] # obj2 was added last
|
|
1897
|
+
|
|
1898
|
+
# Create a mapping of original to interpolated signals
|
|
1899
|
+
for orig_obj, checked_obj in zip(signal_objs, checked_objs[:-1]):
|
|
1900
|
+
signal_map[orig_obj] = checked_obj
|
|
1901
|
+
|
|
1902
|
+
with create_progress_bar(self.panel, title, max_=len(objs)) as progress:
|
|
1903
|
+
for index, obj in enumerate(objs):
|
|
1904
|
+
progress.setValue(index + 1)
|
|
1905
|
+
progress.setLabelText(title)
|
|
1906
|
+
|
|
1907
|
+
# Use interpolated signal if available
|
|
1908
|
+
actual_obj = obj
|
|
1909
|
+
if (
|
|
1910
|
+
self._is_signal_panel()
|
|
1911
|
+
and isinstance(obj, SignalObj)
|
|
1912
|
+
and obj in signal_map
|
|
1913
|
+
):
|
|
1914
|
+
actual_obj = signal_map[obj]
|
|
1915
|
+
|
|
1916
|
+
args = (
|
|
1917
|
+
(actual_obj, obj2)
|
|
1918
|
+
if param is None
|
|
1919
|
+
else (actual_obj, obj2, param)
|
|
1920
|
+
)
|
|
1921
|
+
result = self.__exec_func(func, args, progress)
|
|
1922
|
+
if result is None:
|
|
1923
|
+
break
|
|
1924
|
+
new_obj = self.handle_output(
|
|
1925
|
+
result, _("Calculating: %s") % title, progress
|
|
1926
|
+
)
|
|
1927
|
+
if new_obj is None:
|
|
1928
|
+
continue
|
|
1929
|
+
assert isinstance(new_obj, (SignalObj, ImageObj))
|
|
1930
|
+
|
|
1931
|
+
group_id = objmodel.get_object_group_id(obj)
|
|
1932
|
+
# Use original objects for title generation
|
|
1933
|
+
patch_title_with_ids(new_obj, [obj, orig_obj2], get_short_id)
|
|
1934
|
+
|
|
1935
|
+
# Handle keep_results logic for 2_to_1 operations
|
|
1936
|
+
self._handle_keep_results(new_obj)
|
|
1937
|
+
|
|
1938
|
+
# Store lightweight processing metadata (non-interactive)
|
|
1939
|
+
proc_params = ProcessingParameters(
|
|
1940
|
+
func_name=name,
|
|
1941
|
+
pattern="2-to-1",
|
|
1942
|
+
param=param,
|
|
1943
|
+
source_uuids=[
|
|
1944
|
+
get_uuid(obj),
|
|
1945
|
+
get_uuid(orig_obj2),
|
|
1946
|
+
],
|
|
1947
|
+
)
|
|
1948
|
+
insert_processing_parameters(new_obj, proc_params)
|
|
1949
|
+
|
|
1950
|
+
# group_id is from source panel, don't use for non-native objects
|
|
1951
|
+
self._add_object_to_appropriate_panel(
|
|
1952
|
+
new_obj, group_id=group_id, use_group_for_non_native=False
|
|
1953
|
+
)
|
|
1954
|
+
|
|
1955
|
+
def register_1_to_1(
|
|
1956
|
+
self,
|
|
1957
|
+
function: Callable,
|
|
1958
|
+
title: str,
|
|
1959
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1960
|
+
icon_name: str | None = None,
|
|
1961
|
+
comment: str | None = None,
|
|
1962
|
+
edit: bool | None = None,
|
|
1963
|
+
) -> ComputingFeature:
|
|
1964
|
+
"""Register a 1-to-1 processing function.
|
|
1965
|
+
|
|
1966
|
+
The `register_1_to_1` method is used to register a function that takes one
|
|
1967
|
+
object as input and produces one object as output. The function is called
|
|
1968
|
+
with the input object and an optional parameter set. The result of the
|
|
1969
|
+
function is returned.
|
|
1970
|
+
|
|
1971
|
+
Args:
|
|
1972
|
+
function: function to register
|
|
1973
|
+
title: title of the function
|
|
1974
|
+
paramclass: parameter class. Defaults to None.
|
|
1975
|
+
icon_name: icon name. Defaults to None.
|
|
1976
|
+
comment: comment. Defaults to None.
|
|
1977
|
+
edit: whether to open the parameter editor before execution.
|
|
1978
|
+
|
|
1979
|
+
Returns:
|
|
1980
|
+
Registered feature.
|
|
1981
|
+
"""
|
|
1982
|
+
feature = ComputingFeature(
|
|
1983
|
+
pattern="1_to_1",
|
|
1984
|
+
function=function,
|
|
1985
|
+
title=title,
|
|
1986
|
+
paramclass=paramclass,
|
|
1987
|
+
icon_name=icon_name,
|
|
1988
|
+
comment=comment,
|
|
1989
|
+
edit=edit,
|
|
1990
|
+
)
|
|
1991
|
+
self.add_feature(feature)
|
|
1992
|
+
return feature
|
|
1993
|
+
|
|
1994
|
+
def register_1_to_0(
|
|
1995
|
+
self,
|
|
1996
|
+
function: Callable,
|
|
1997
|
+
title: str,
|
|
1998
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
1999
|
+
icon_name: str | None = None,
|
|
2000
|
+
comment: str | None = None,
|
|
2001
|
+
edit: bool | None = None,
|
|
2002
|
+
) -> ComputingFeature:
|
|
2003
|
+
"""Register a 1-to-0 processing function.
|
|
2004
|
+
|
|
2005
|
+
The function takes one object as input and produces no output.
|
|
2006
|
+
The function is called with the input object and an optional parameter set.
|
|
2007
|
+
The result of the function is returned.
|
|
2008
|
+
|
|
2009
|
+
Args:
|
|
2010
|
+
function: function to register
|
|
2011
|
+
title: title of the function
|
|
2012
|
+
paramclass: parameter class. Defaults to None.
|
|
2013
|
+
icon_name: icon name. Defaults to None.
|
|
2014
|
+
comment: comment. Defaults to None.
|
|
2015
|
+
edit: whether to open the parameter editor before execution.
|
|
2016
|
+
|
|
2017
|
+
Returns:
|
|
2018
|
+
Registered feature.
|
|
2019
|
+
"""
|
|
2020
|
+
feature = ComputingFeature(
|
|
2021
|
+
pattern="1_to_0",
|
|
2022
|
+
function=function,
|
|
2023
|
+
title=title,
|
|
2024
|
+
paramclass=paramclass,
|
|
2025
|
+
icon_name=icon_name,
|
|
2026
|
+
comment=comment,
|
|
2027
|
+
edit=edit,
|
|
2028
|
+
)
|
|
2029
|
+
self.add_feature(feature)
|
|
2030
|
+
return feature
|
|
2031
|
+
|
|
2032
|
+
def register_1_to_n(
|
|
2033
|
+
self, function: Callable, title: str, icon_name: str | None = None
|
|
2034
|
+
) -> ComputingFeature:
|
|
2035
|
+
"""Register a 1-to-n processing function.
|
|
2036
|
+
|
|
2037
|
+
The function takes one object as input and produces multiple objects as output.
|
|
2038
|
+
The function is called with the input object and an optional parameter set.
|
|
2039
|
+
The result of the function is returned.
|
|
2040
|
+
|
|
2041
|
+
Args:
|
|
2042
|
+
function: function to register
|
|
2043
|
+
title: title of the function
|
|
2044
|
+
icon_name: icon name. Defaults to None.
|
|
2045
|
+
|
|
2046
|
+
Returns:
|
|
2047
|
+
Registered feature.
|
|
2048
|
+
"""
|
|
2049
|
+
feature = ComputingFeature(
|
|
2050
|
+
pattern="1_to_n",
|
|
2051
|
+
function=function,
|
|
2052
|
+
title=title,
|
|
2053
|
+
icon_name=icon_name,
|
|
2054
|
+
)
|
|
2055
|
+
self.add_feature(feature)
|
|
2056
|
+
return feature
|
|
2057
|
+
|
|
2058
|
+
def register_n_to_1(
|
|
2059
|
+
self,
|
|
2060
|
+
function: Callable,
|
|
2061
|
+
title: str,
|
|
2062
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
2063
|
+
icon_name: str | None = None,
|
|
2064
|
+
comment: str | None = None,
|
|
2065
|
+
edit: bool | None = None,
|
|
2066
|
+
) -> ComputingFeature:
|
|
2067
|
+
"""Register a n-to-1 processing function.
|
|
2068
|
+
|
|
2069
|
+
The function takes multiple objects as input and produces one object as output.
|
|
2070
|
+
The function is called with the input objects and an optional parameter set.
|
|
2071
|
+
The result of the function is returned.
|
|
2072
|
+
|
|
2073
|
+
Args:
|
|
2074
|
+
function: function to register
|
|
2075
|
+
title: title of the function
|
|
2076
|
+
paramclass: parameter class. Defaults to None.
|
|
2077
|
+
icon_name: icon name. Defaults to None.
|
|
2078
|
+
comment: comment. Defaults to None.
|
|
2079
|
+
edit: whether to open the parameter editor before execution.
|
|
2080
|
+
|
|
2081
|
+
Returns:
|
|
2082
|
+
Registered feature.
|
|
2083
|
+
"""
|
|
2084
|
+
feature = ComputingFeature(
|
|
2085
|
+
pattern="n_to_1",
|
|
2086
|
+
function=function,
|
|
2087
|
+
title=title,
|
|
2088
|
+
paramclass=paramclass,
|
|
2089
|
+
icon_name=icon_name,
|
|
2090
|
+
comment=comment,
|
|
2091
|
+
edit=edit,
|
|
2092
|
+
)
|
|
2093
|
+
self.add_feature(feature)
|
|
2094
|
+
return feature
|
|
2095
|
+
|
|
2096
|
+
def register_2_to_1(
|
|
2097
|
+
self,
|
|
2098
|
+
function: Callable,
|
|
2099
|
+
title: str,
|
|
2100
|
+
paramclass: type[gds.DataSet] | None = None,
|
|
2101
|
+
icon_name: str | None = None,
|
|
2102
|
+
comment: str | None = None,
|
|
2103
|
+
edit: bool | None = None,
|
|
2104
|
+
obj2_name: str | None = None,
|
|
2105
|
+
skip_xarray_compat: bool | None = None,
|
|
2106
|
+
) -> ComputingFeature:
|
|
2107
|
+
"""Register a 2-to-1 processing function.
|
|
2108
|
+
|
|
2109
|
+
The function takes two objects as input and produces one object as output.
|
|
2110
|
+
The function is called with the input objects and an optional parameter set.
|
|
2111
|
+
The result of the function is returned.
|
|
2112
|
+
|
|
2113
|
+
Args:
|
|
2114
|
+
function: function to register
|
|
2115
|
+
title: title of the function
|
|
2116
|
+
paramclass: parameter class. Defaults to None.
|
|
2117
|
+
icon_name: icon name. Defaults to None.
|
|
2118
|
+
comment: comment. Defaults to None.
|
|
2119
|
+
edit: whether to open the parameter editor before execution.
|
|
2120
|
+
obj2_name: name of the second object. Defaults to None.
|
|
2121
|
+
skip_xarray_compat: whether to skip X-array compatibility check.
|
|
2122
|
+
Defaults to None. Set to True for operations like interpolation where
|
|
2123
|
+
different X-arrays are expected and desired.
|
|
2124
|
+
|
|
2125
|
+
Returns:
|
|
2126
|
+
Registered feature.
|
|
2127
|
+
"""
|
|
2128
|
+
feature = ComputingFeature(
|
|
2129
|
+
pattern="2_to_1",
|
|
2130
|
+
function=function,
|
|
2131
|
+
title=title,
|
|
2132
|
+
paramclass=paramclass,
|
|
2133
|
+
icon_name=icon_name,
|
|
2134
|
+
comment=comment,
|
|
2135
|
+
edit=edit,
|
|
2136
|
+
obj2_name=obj2_name,
|
|
2137
|
+
skip_xarray_compat=skip_xarray_compat,
|
|
2138
|
+
)
|
|
2139
|
+
self.add_feature(feature)
|
|
2140
|
+
return feature
|
|
2141
|
+
|
|
2142
|
+
def add_feature(self, feature: ComputingFeature) -> None:
|
|
2143
|
+
"""Add a computing feature to the registry.
|
|
2144
|
+
|
|
2145
|
+
Args:
|
|
2146
|
+
feature: ComputingFeature instance to add.
|
|
2147
|
+
"""
|
|
2148
|
+
self.computing_registry[feature.function] = feature
|
|
2149
|
+
|
|
2150
|
+
def get_feature(self, function_or_name: Callable | str) -> ComputingFeature:
|
|
2151
|
+
"""Get a computing feature by name or function.
|
|
2152
|
+
|
|
2153
|
+
Args:
|
|
2154
|
+
function_or_name: Name of the feature or the function itself.
|
|
2155
|
+
|
|
2156
|
+
Returns:
|
|
2157
|
+
Computing feature instance.
|
|
2158
|
+
"""
|
|
2159
|
+
try:
|
|
2160
|
+
return self.computing_registry[function_or_name]
|
|
2161
|
+
except KeyError as exc:
|
|
2162
|
+
for _func, feature in self.computing_registry.items():
|
|
2163
|
+
if feature.name == function_or_name:
|
|
2164
|
+
return feature
|
|
2165
|
+
raise ValueError(f"Unknown computing feature: {function_or_name}") from exc
|
|
2166
|
+
|
|
2167
|
+
@qt_try_except()
|
|
2168
|
+
def run_feature(
|
|
2169
|
+
self,
|
|
2170
|
+
key: str | Callable | ComputingFeature,
|
|
2171
|
+
*args,
|
|
2172
|
+
**kwargs,
|
|
2173
|
+
) -> ResultData | list[SignalObj | ImageObj] | SignalObj | ImageObj | None:
|
|
2174
|
+
"""Run a computing feature that has been previously registered.
|
|
2175
|
+
|
|
2176
|
+
This method is a generic dispatcher for all compute methods.
|
|
2177
|
+
It uses the central registry to find the appropriate compute method
|
|
2178
|
+
based on the pattern (`1_to_1`, `1_to_0`, `n_to_1`, `2_to_1`, `1_to_n`).
|
|
2179
|
+
It then calls the appropriate compute method with the provided arguments.
|
|
2180
|
+
|
|
2181
|
+
Depending on the pattern, this method can take different arguments:
|
|
2182
|
+
|
|
2183
|
+
.. code-block:: python
|
|
2184
|
+
|
|
2185
|
+
import sigima.proc.signal as sips
|
|
2186
|
+
import sigima.params
|
|
2187
|
+
|
|
2188
|
+
proc = win.signalpanel.processor # where `win` is DataLab's main window
|
|
2189
|
+
|
|
2190
|
+
# For patterns `1_to_1`, `1_to_0`, `n_to_1`:
|
|
2191
|
+
proc.run_feature(sips.normalize)
|
|
2192
|
+
param = sigima.params.MovingAverageParam(n=3)
|
|
2193
|
+
proc.run_feature(sips.moving_average, param)
|
|
2194
|
+
proc.run_feature(computation_function, param, edit=False)
|
|
2195
|
+
|
|
2196
|
+
# For pattern `2_to_1`:
|
|
2197
|
+
proc.run_feature(sips.difference, obj2)
|
|
2198
|
+
param = sigima.params.InterpolationParam(method="cubic")
|
|
2199
|
+
proc.run_feature(sips.interpolation, obj2, param)
|
|
2200
|
+
|
|
2201
|
+
# For pattern `1_to_n`:
|
|
2202
|
+
params = roi.to_params(obj)
|
|
2203
|
+
proc.run_feature(sips.extract_roi, params=params)
|
|
2204
|
+
|
|
2205
|
+
Args:
|
|
2206
|
+
key: The key to look up in the registry. It can be a string, a callable,
|
|
2207
|
+
or a ComputingFeature instance.
|
|
2208
|
+
*args: Positional arguments to pass to the compute method.
|
|
2209
|
+
**kwargs: Keyword arguments to pass to the compute method.
|
|
2210
|
+
|
|
2211
|
+
Returns:
|
|
2212
|
+
The result of the computation or None.
|
|
2213
|
+
"""
|
|
2214
|
+
if not isinstance(key, ComputingFeature):
|
|
2215
|
+
feature = self.get_feature(key)
|
|
2216
|
+
else:
|
|
2217
|
+
feature = key
|
|
2218
|
+
|
|
2219
|
+
# Some keyword parameters may be overridden
|
|
2220
|
+
edit = kwargs.pop("edit", feature.edit)
|
|
2221
|
+
title = kwargs.pop("title", feature.title)
|
|
2222
|
+
comment = kwargs.pop("comment", feature.comment)
|
|
2223
|
+
|
|
2224
|
+
pattern = feature.pattern
|
|
2225
|
+
|
|
2226
|
+
if pattern in {"1_to_1", "1_to_0", "n_to_1"}:
|
|
2227
|
+
compute_method = getattr(self, f"compute_{pattern}")
|
|
2228
|
+
param = kwargs.pop("param", args[0] if args else None)
|
|
2229
|
+
assert isinstance(param, (gds.DataSet, type(None))), (
|
|
2230
|
+
f"For pattern '{pattern}', 'param' must be a DataSet or None"
|
|
2231
|
+
)
|
|
2232
|
+
return compute_method(
|
|
2233
|
+
feature.function,
|
|
2234
|
+
param=param,
|
|
2235
|
+
paramclass=feature.paramclass,
|
|
2236
|
+
title=title,
|
|
2237
|
+
comment=comment,
|
|
2238
|
+
edit=edit,
|
|
2239
|
+
)
|
|
2240
|
+
if pattern == "2_to_1":
|
|
2241
|
+
obj2 = kwargs.pop("obj2", args[0] if args else None)
|
|
2242
|
+
assert isinstance(obj2, (SignalObj, ImageObj, list, type(None))), (
|
|
2243
|
+
"For pattern '2_to_1', 'obj2' must be a SignalObj, ImageObj, "
|
|
2244
|
+
"list of SignalObj/ImageObj, or None"
|
|
2245
|
+
)
|
|
2246
|
+
param = kwargs.pop("param", args[1] if args and len(args) > 1 else None)
|
|
2247
|
+
assert isinstance(param, (gds.DataSet, type(None))), (
|
|
2248
|
+
"For pattern '2_to_1', 'param' must be a DataSet or None"
|
|
2249
|
+
)
|
|
2250
|
+
return self.compute_2_to_1(
|
|
2251
|
+
obj2,
|
|
2252
|
+
feature.obj2_name or _("Second operand"),
|
|
2253
|
+
feature.function,
|
|
2254
|
+
param=param,
|
|
2255
|
+
paramclass=feature.paramclass,
|
|
2256
|
+
title=title,
|
|
2257
|
+
comment=comment,
|
|
2258
|
+
edit=edit,
|
|
2259
|
+
skip_xarray_compat=feature.skip_xarray_compat,
|
|
2260
|
+
)
|
|
2261
|
+
if pattern == "1_to_n":
|
|
2262
|
+
params = kwargs.get("params", args[0] if args else [])
|
|
2263
|
+
if not isinstance(params, list) or any(
|
|
2264
|
+
not isinstance(param, gds.DataSet) for param in params
|
|
2265
|
+
):
|
|
2266
|
+
raise ValueError(
|
|
2267
|
+
"For pattern '1_to_n', 'params' must be "
|
|
2268
|
+
"a list of DataSet or a DataSetGroup"
|
|
2269
|
+
)
|
|
2270
|
+
return self.compute_1_to_n(
|
|
2271
|
+
feature.function,
|
|
2272
|
+
params=params,
|
|
2273
|
+
title=title,
|
|
2274
|
+
edit=edit,
|
|
2275
|
+
)
|
|
2276
|
+
raise ValueError(f"Unsupported compute pattern: {pattern}")
|
|
2277
|
+
|
|
2278
|
+
# ------Data Processing-------------------------------------------------------------
|
|
2279
|
+
|
|
2280
|
+
@qt_try_except()
|
|
2281
|
+
def compute_roi_extraction(self, roi: TypeROI | None = None) -> None:
|
|
2282
|
+
"""Extract Region Of Interest (ROI) from data with:
|
|
2283
|
+
|
|
2284
|
+
- :py:func:`sigima.proc.image.compute_extract_roi` for single ROI
|
|
2285
|
+
- :py:func:`sigima.proc.image.compute_extract_rois` for multiple ROIs"""
|
|
2286
|
+
# Expected behavior:
|
|
2287
|
+
# -----------------
|
|
2288
|
+
# * If `roi` is not None or not empty, skip the ROI dialog
|
|
2289
|
+
# * If first selected obj has a ROI, use this ROI as default but open
|
|
2290
|
+
# ROI Editor dialog anyway
|
|
2291
|
+
# * If multiple objs are selected, then apply the first obj ROI to all
|
|
2292
|
+
if roi is None or roi.is_empty():
|
|
2293
|
+
roi = self.edit_roi_graphically(mode="extract")
|
|
2294
|
+
if roi is None or roi.is_empty():
|
|
2295
|
+
return
|
|
2296
|
+
obj = self.panel.objview.get_sel_objects(include_groups=True)[0]
|
|
2297
|
+
params = roi.to_params(obj)
|
|
2298
|
+
if Conf.proc.extract_roi_singleobj.get() and len(params) > 1:
|
|
2299
|
+
# Extract multiple ROIs into a single object (remove all the ROIs),
|
|
2300
|
+
# if the "Extract all ROIs into a single image object"
|
|
2301
|
+
# option is checked and if there are more than one ROI
|
|
2302
|
+
self._extract_multiple_roi_in_single_object(params)
|
|
2303
|
+
else:
|
|
2304
|
+
# Extract each ROI into a separate object (keep the ROI in the case of
|
|
2305
|
+
# a circular ROI), if the "Extract all ROIs into a single image object"
|
|
2306
|
+
# option is not checked or if there is only one ROI (See Issue #31)
|
|
2307
|
+
self.run_feature("extract_roi", params=params, edit=False)
|
|
2308
|
+
|
|
2309
|
+
@abc.abstractmethod
|
|
2310
|
+
@qt_try_except()
|
|
2311
|
+
def _extract_multiple_roi_in_single_object(
|
|
2312
|
+
self, params: list[TypeROIParam]
|
|
2313
|
+
) -> None:
|
|
2314
|
+
"""Extract multiple Regions Of Interest (ROIs) from data in a single object"""
|
|
2315
|
+
|
|
2316
|
+
# ------Analysis-------------------------------------------------------------------
|
|
2317
|
+
|
|
2318
|
+
def edit_roi_graphically(
|
|
2319
|
+
self, mode: Literal["apply", "extract", "define"] = "apply"
|
|
2320
|
+
) -> TypeROI | None:
|
|
2321
|
+
"""Define Region Of Interest (ROI).
|
|
2322
|
+
|
|
2323
|
+
Args:
|
|
2324
|
+
mode: Mode of operation, either "apply" (define ROI, then apply it to
|
|
2325
|
+
selected objects), "extract" (define ROI, then extract data from it),
|
|
2326
|
+
or "define" (define ROI without applying or extracting).
|
|
2327
|
+
|
|
2328
|
+
Returns:
|
|
2329
|
+
ROI object or None if ROI dialog has been canceled.
|
|
2330
|
+
"""
|
|
2331
|
+
assert mode in ("apply", "extract", "define"), (
|
|
2332
|
+
f"Invalid mode: {mode}. Must be either 'apply', 'extract' or 'define'."
|
|
2333
|
+
)
|
|
2334
|
+
# Expected behavior:
|
|
2335
|
+
# -----------------
|
|
2336
|
+
# * If first selected obj has a ROI, use this ROI as default but open
|
|
2337
|
+
# ROI Editor dialog anyway
|
|
2338
|
+
# * If multiple objs are selected, then apply the first obj ROI to all
|
|
2339
|
+
results = self.panel.get_roi_editor_output(mode=mode)
|
|
2340
|
+
if results is None:
|
|
2341
|
+
return None
|
|
2342
|
+
edited_roi, modified = results
|
|
2343
|
+
objs = self.panel.objview.get_sel_objects(include_groups=True)
|
|
2344
|
+
obj = objs[-1]
|
|
2345
|
+
params = edited_roi.to_params(obj)
|
|
2346
|
+
group = gds.DataSetGroup(params, title=_("Regions of Interest"))
|
|
2347
|
+
if (
|
|
2348
|
+
env.execenv.unattended # Unattended mode (automated unit tests)
|
|
2349
|
+
or edited_roi.is_empty() # No ROI has been defined
|
|
2350
|
+
or group.edit(parent=self.mainwindow) # ROI dialog has been accepted
|
|
2351
|
+
):
|
|
2352
|
+
if modified:
|
|
2353
|
+
# If ROI has been modified, save ROI (not in "extract" mode)
|
|
2354
|
+
if edited_roi.is_empty() and mode != "define":
|
|
2355
|
+
# If ROI is empty, remove it from all selected objects
|
|
2356
|
+
# (not in "define" mode because the ROI is just defined and used
|
|
2357
|
+
# in a processing function for example: it's not bound to any
|
|
2358
|
+
# object yet)
|
|
2359
|
+
for obj_i in objs:
|
|
2360
|
+
obj_i.roi = None
|
|
2361
|
+
else:
|
|
2362
|
+
edited_roi = edited_roi.__class__.from_params(obj, params)
|
|
2363
|
+
if mode == "apply":
|
|
2364
|
+
# Apply ROI to all selected objects
|
|
2365
|
+
for obj_i in objs:
|
|
2366
|
+
obj_i.roi = edited_roi
|
|
2367
|
+
self.SIG_ADD_SHAPE.emit(get_uuid(obj))
|
|
2368
|
+
self.panel.selection_changed(update_items=True)
|
|
2369
|
+
self.panel.refresh_plot(
|
|
2370
|
+
"selected",
|
|
2371
|
+
update_items=True,
|
|
2372
|
+
only_visible=False,
|
|
2373
|
+
only_existing=True,
|
|
2374
|
+
)
|
|
2375
|
+
# Auto-recompute analysis operations for objects with modified ROIs
|
|
2376
|
+
if mode == "apply":
|
|
2377
|
+
for obj_i in objs:
|
|
2378
|
+
self.auto_recompute_analysis(obj_i)
|
|
2379
|
+
return edited_roi
|
|
2380
|
+
|
|
2381
|
+
def edit_roi_numerically(self) -> TypeROI:
|
|
2382
|
+
"""Edit Regions Of Interest (ROIs) numerically.
|
|
2383
|
+
|
|
2384
|
+
Opens a dialog to edit the parameters of the selected ROIs.
|
|
2385
|
+
If no ROIs are selected, it will prompt the user to select ROIs.
|
|
2386
|
+
|
|
2387
|
+
Returns:
|
|
2388
|
+
The edited ROI object if the dialog is accepted, otherwise the original ROI.
|
|
2389
|
+
"""
|
|
2390
|
+
obj = self.panel.objview.get_sel_objects()[0]
|
|
2391
|
+
assert obj.roi is not None, _("No ROI selected for editing.")
|
|
2392
|
+
params = obj.roi.to_params(obj)
|
|
2393
|
+
group = gds.DataSetGroup(params, title=_("Regions of Interest"))
|
|
2394
|
+
if group.edit(parent=self.mainwindow):
|
|
2395
|
+
edited_roi = obj.roi.__class__.from_params(obj, params)
|
|
2396
|
+
obj.roi = edited_roi
|
|
2397
|
+
self.SIG_ADD_SHAPE.emit(get_uuid(obj))
|
|
2398
|
+
self.panel.refresh_plot(
|
|
2399
|
+
"selected",
|
|
2400
|
+
update_items=True,
|
|
2401
|
+
only_visible=False,
|
|
2402
|
+
only_existing=True,
|
|
2403
|
+
)
|
|
2404
|
+
# Auto-recompute analysis operations after ROI modification
|
|
2405
|
+
self.auto_recompute_analysis(obj)
|
|
2406
|
+
return edited_roi
|
|
2407
|
+
return obj.roi
|
|
2408
|
+
|
|
2409
|
+
def delete_regions_of_interest(self) -> None:
|
|
2410
|
+
"""Delete Regions Of Interest"""
|
|
2411
|
+
if (
|
|
2412
|
+
env.execenv.unattended
|
|
2413
|
+
or QW.QMessageBox.question(
|
|
2414
|
+
self.mainwindow,
|
|
2415
|
+
_("Remove all ROIs"),
|
|
2416
|
+
_("Are you sure you want to remove all ROIs?"),
|
|
2417
|
+
)
|
|
2418
|
+
== QW.QMessageBox.Yes
|
|
2419
|
+
):
|
|
2420
|
+
modified_objs = []
|
|
2421
|
+
for obj in self.panel.objview.get_sel_objects():
|
|
2422
|
+
if obj.roi is not None:
|
|
2423
|
+
obj.roi = None
|
|
2424
|
+
modified_objs.append(obj)
|
|
2425
|
+
self.panel.selection_changed(update_items=True)
|
|
2426
|
+
# Auto-recompute analysis operations after ROI deletion
|
|
2427
|
+
for obj in modified_objs:
|
|
2428
|
+
self.auto_recompute_analysis(obj)
|
|
2429
|
+
|
|
2430
|
+
def delete_single_roi(self, roi_index: int) -> None:
|
|
2431
|
+
"""Delete a single ROI by index
|
|
2432
|
+
|
|
2433
|
+
Args:
|
|
2434
|
+
roi_index: Index of the ROI to remove
|
|
2435
|
+
"""
|
|
2436
|
+
obj = self.panel.objview.get_sel_objects()[0]
|
|
2437
|
+
if obj.roi is not None and 0 <= roi_index < len(obj.roi.single_rois):
|
|
2438
|
+
roi_title = obj.roi.get_single_roi_title(roi_index)
|
|
2439
|
+
if (
|
|
2440
|
+
env.execenv.unattended
|
|
2441
|
+
or QW.QMessageBox.question(
|
|
2442
|
+
self.mainwindow,
|
|
2443
|
+
_("Remove ROI"),
|
|
2444
|
+
_("Are you sure you want to remove ROI '%s'?") % roi_title,
|
|
2445
|
+
)
|
|
2446
|
+
== QW.QMessageBox.Yes
|
|
2447
|
+
):
|
|
2448
|
+
obj.roi.single_rois.pop(roi_index)
|
|
2449
|
+
# If no ROIs left, set roi to None
|
|
2450
|
+
if len(obj.roi.single_rois) == 0:
|
|
2451
|
+
obj.roi = None
|
|
2452
|
+
obj.mark_roi_as_changed()
|
|
2453
|
+
# Auto-recompute analysis operations after ROI modification
|
|
2454
|
+
# (must be done BEFORE selection_changed to avoid stale results)
|
|
2455
|
+
self.auto_recompute_analysis(obj)
|
|
2456
|
+
self.panel.selection_changed(update_items=True)
|