darfix 4.2.0__py3-none-any.whl → 4.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- darfix/core/data_selection.py +11 -2
- darfix/core/dataset.py +72 -157
- darfix/core/grainplot.py +44 -56
- darfix/core/{imageStack.py → image_stack.py} +9 -15
- darfix/core/moment_types.py +6 -0
- darfix/core/{noiseremoval.py → noise_removal.py} +25 -24
- darfix/core/noise_removal_type.py +14 -0
- darfix/core/positioners.py +6 -0
- darfix/core/rocking_curves.py +6 -3
- darfix/core/rocking_curves_map.py +1 -1
- darfix/core/{shiftcorrection.py → shift_correction.py} +1 -2
- darfix/core/state_of_operation.py +7 -46
- darfix/core/utils.py +0 -39
- darfix/dtypes.py +1 -9
- darfix/gui/{binningWidget.py → binning_widget.py} +2 -29
- darfix/gui/{blindSourceSeparationWidget.py → blind_source_separation_widget.py} +4 -16
- darfix/gui/{chooseDimensions.py → choose_dimensions.py} +1 -1
- darfix/gui/concatenate_scans.py +4 -4
- darfix/gui/data_selection/{hdf5_data_selection_widgets.py → hdf5_dataset_selection_widget.py} +3 -56
- darfix/gui/data_selection/line_edits.py +54 -8
- darfix/gui/data_selection/scan_selection_widgets.py +24 -11
- darfix/gui/data_selection/utils.py +11 -0
- darfix/gui/data_selection/{WorkingDirSelectionWidget.py → working_dir_selection_widget.py} +15 -14
- darfix/gui/{dimensionsWidget.py → dimensions_widget.py} +1 -1
- darfix/gui/{displayComponentsWidget.py → display_components_widget.py} +1 -1
- darfix/gui/{filterByDimension.py → filter_by_dimension.py} +1 -1
- darfix/gui/{grainplot/dimensionRangeSlider2D.py → grain_plot/dimension_range_slider_2d.py} +2 -2
- darfix/gui/{grainplot/grainPlotWidget.py → grain_plot/grain_plot_widget.py} +1 -1
- darfix/gui/{grainplot/mosaicityWidget.py → grain_plot/mosaicity_widget.py} +21 -23
- darfix/gui/{magnificationWidget.py → magnification_widget.py} +1 -1
- darfix/gui/{noiseremoval → noise_removal}/noise_removal_widget.py +12 -16
- darfix/gui/{noiseremoval → noise_removal}/operation_list_widget.py +2 -2
- darfix/gui/{noiseremoval → noise_removal}/parameters_widget.py +6 -6
- darfix/gui/{PCAWidget.py → pca_widget.py} +2 -4
- darfix/gui/{projectionWidget.py → projection_widget.py} +1 -1
- darfix/gui/rocking_curves/{rockingCurvesPlot.py → rocking_curves_plot.py} +13 -13
- darfix/gui/rocking_curves/{rockingCurvesWidget.py → rocking_curves_widget.py} +10 -18
- darfix/gui/{roiSelectionWidget.py → roi_selection_widget.py} +9 -101
- darfix/gui/{shiftcorrection/shiftCorrectionWidget.py → shift_correction/shift_correction_widget.py} +4 -7
- darfix/gui/utils/data_path_completer.py +7 -7
- darfix/gui/utils/data_path_selection.py +4 -4
- darfix/gui/utils/{rangeSlider.py → range_slider.py} +1 -1
- darfix/gui/{weakBeamWidget.py → weak_beam_widget.py} +13 -28
- darfix/gui/{zSumWidget.py → zsum_widget.py} +1 -2
- darfix/main.py +19 -3
- darfix/processing/rocking_curves.py +12 -13
- darfix/tasks/binning.py +6 -17
- darfix/tasks/blind_source_separation.py +121 -0
- darfix/tasks/blindsourceseparation.py +8 -131
- darfix/tasks/copy.py +0 -2
- darfix/tasks/data_partition.py +39 -0
- darfix/tasks/datapartition.py +8 -50
- darfix/tasks/dimension_definition.py +197 -0
- darfix/tasks/dimensiondefinition.py +8 -197
- darfix/tasks/grain_plot.py +93 -0
- darfix/tasks/grainplot.py +8 -103
- darfix/tasks/hdf5_data_selection.py +5 -11
- darfix/tasks/hdf5_scans_concatenation.py +4 -4
- darfix/tasks/noise_removal.py +88 -0
- darfix/tasks/noiseremoval.py +8 -86
- darfix/tasks/pca.py +1 -3
- darfix/tasks/projection.py +1 -6
- darfix/tasks/rocking_curves.py +10 -5
- darfix/tasks/roi.py +0 -2
- darfix/tasks/shift_correction.py +45 -0
- darfix/tasks/shiftcorrection.py +8 -43
- darfix/tasks/transformation.py +0 -2
- darfix/tasks/weak_beam.py +71 -0
- darfix/tasks/weakbeam.py +8 -67
- darfix/tasks/zsum.py +1 -1
- darfix/tests/conftest.py +1 -1
- darfix/tests/gui/test_data_path_completer.py +4 -4
- darfix/tests/gui/test_dimension_range_slider_2d.py +2 -2
- darfix/tests/gui/test_range_slider_with_spinboxes.py +1 -1
- darfix/tests/orange/test_ewoks.py +13 -9
- darfix/tests/orange/widgets/test_hdf5_data_selection.py +93 -0
- darfix/tests/tasks/test_data_copy.py +0 -2
- darfix/tests/tasks/{test_dimensiondefinition.py → test_dimension_definition.py} +1 -1
- darfix/tests/tasks/test_weak_beam.py +9 -0
- darfix/tests/test_components_matching.py +2 -2
- darfix/tests/test_dataset.py +2 -28
- darfix/tests/test_dimension.py +1 -1
- darfix/tests/test_generate_grain_maps_nxdict.py +4 -5
- darfix/tests/test_image_operations.py +4 -4
- darfix/tests/test_image_registration.py +17 -17
- darfix/tests/test_image_stack.py +2 -13
- darfix/tests/test_mask.py +1 -1
- darfix/tests/test_moments.py +2 -2
- darfix/tests/test_rocking_curves.py +1 -3
- darfix/tests/test_shift.py +7 -7
- darfix/tests/test_workflow.py +4 -4
- darfix/tests/test_zsum.py +3 -6
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/METADATA +5 -3
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/RECORD +141 -135
- orangecontrib/darfix/widgets/__init__.py +10 -1
- orangecontrib/darfix/widgets/binning.py +3 -3
- orangecontrib/darfix/widgets/blindsourceseparation.py +4 -6
- orangecontrib/darfix/widgets/concatenateHDF5.py +1 -1
- orangecontrib/darfix/widgets/datacopy.py +1 -1
- orangecontrib/darfix/widgets/datapartition.py +7 -102
- orangecontrib/darfix/widgets/{datasetWidgetBase.py → dataset_widget_base.py} +17 -5
- orangecontrib/darfix/widgets/dimensions.py +6 -6
- orangecontrib/darfix/widgets/grainplot.py +3 -3
- orangecontrib/darfix/widgets/hdf5dataselection.py +34 -14
- orangecontrib/darfix/widgets/metadata.py +2 -2
- orangecontrib/darfix/widgets/noiseremoval.py +4 -4
- orangecontrib/darfix/widgets/{operationWidgetBase.py → operation_widget_base.py} +2 -2
- orangecontrib/darfix/widgets/pca.py +2 -2
- orangecontrib/darfix/widgets/projection.py +2 -2
- orangecontrib/darfix/widgets/rockingcurves.py +5 -2
- orangecontrib/darfix/widgets/roiselection.py +24 -106
- orangecontrib/darfix/widgets/rsmhistogram.py +2 -2
- orangecontrib/darfix/widgets/shiftcorrection.py +3 -3
- orangecontrib/darfix/widgets/transformation.py +4 -4
- orangecontrib/darfix/widgets/weakbeam.py +20 -103
- orangecontrib/darfix/widgets/zsum.py +3 -5
- darfix/gui/dataPartitionWidget.py +0 -167
- darfix/gui/data_selection/DataSelectionBase.py +0 -59
- darfix/tests/tasks/test_datapartition.py +0 -52
- /darfix/core/{componentsMatching.py → components_matching.py} +0 -0
- /darfix/core/{datapathfinder.py → data_path_finder.py} +0 -0
- /darfix/core/{imageRegistration.py → image_registration.py} +0 -0
- /darfix/gui/{grainplot → grain_plot}/__init__.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/_oridist_toolbar_buttons.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/flashlight.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/flashlight_mode_action.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/oridist_toolbar.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/utils.py +0 -0
- /darfix/gui/{metadataWidget.py → metadata_widget.py} +0 -0
- /darfix/gui/{operationProcess.py → parallel/operation_process.py} +0 -0
- /darfix/gui/{operationThread.py → parallel/operation_thread.py} +0 -0
- /darfix/gui/rocking_curves/{fitComboBox.py → fit_combobox.py} +0 -0
- /darfix/gui/{roiLimitsToolbar.py → roi_limits_toolbar.py} +0 -0
- /darfix/gui/{rsmHistogramWidget.py → rsm_histogram_widget.py} +0 -0
- /darfix/gui/{rsmWidget.py → rsm_widget.py} +0 -0
- /darfix/gui/{shiftcorrection → shift_correction}/__init__.py +0 -0
- /darfix/gui/{shiftcorrection/shiftInput.py → shift_correction/shift_input.py} +0 -0
- /darfix/gui/utils/{standardButtonBox.py → standard_buttonbox.py} +0 -0
- /darfix/processing/{imageOperations.py → image_operations.py} +0 -0
- /darfix/tests/{test_datapathfinder.py → test_data_path_finder.py} +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/WHEEL +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/entry_points.txt +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/licenses/LICENSE +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/top_level.txt +0 -0
|
@@ -3,13 +3,13 @@ from __future__ import annotations
|
|
|
3
3
|
import h5py
|
|
4
4
|
from silx.gui import qt
|
|
5
5
|
|
|
6
|
-
from darfix.core.
|
|
7
|
-
from darfix.core.
|
|
8
|
-
from darfix.core.
|
|
9
|
-
from darfix.core.
|
|
10
|
-
from darfix.core.
|
|
11
|
-
from darfix.core.
|
|
12
|
-
from darfix.core.
|
|
6
|
+
from darfix.core.data_path_finder import DETECTOR_KEYWORD
|
|
7
|
+
from darfix.core.data_path_finder import EXISTING_KEYWORDS
|
|
8
|
+
from darfix.core.data_path_finder import FIRST_SCAN_KEYWORD
|
|
9
|
+
from darfix.core.data_path_finder import LAST_SCAN_KEYWORD
|
|
10
|
+
from darfix.core.data_path_finder import SCAN_KEYWORD
|
|
11
|
+
from darfix.core.data_path_finder import get_first_group
|
|
12
|
+
from darfix.core.data_path_finder import get_last_group
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class _TreeBuilder:
|
|
@@ -10,10 +10,10 @@ from silx.gui import qt
|
|
|
10
10
|
from silx.gui.dialog.DatasetDialog import DatasetDialog
|
|
11
11
|
from silx.gui.dialog.GroupDialog import GroupDialog
|
|
12
12
|
|
|
13
|
-
from darfix.core.
|
|
14
|
-
from darfix.core.
|
|
15
|
-
from darfix.core.
|
|
16
|
-
from darfix.core.
|
|
13
|
+
from darfix.core.data_path_finder import DataPathFinder
|
|
14
|
+
from darfix.core.data_path_finder import UnsolvablePatternError
|
|
15
|
+
from darfix.core.data_path_finder import get_first_group
|
|
16
|
+
from darfix.core.data_path_finder import get_last_group
|
|
17
17
|
from darfix.gui.utils.data_path_completer import DataPathLineEditWithCompleter
|
|
18
18
|
from darfix.gui.utils.message import unable_to_search_inside_hdf5_file
|
|
19
19
|
|
|
@@ -6,23 +6,16 @@ from silx.gui.colors import Colormap
|
|
|
6
6
|
from silx.gui.plot import Plot2D
|
|
7
7
|
|
|
8
8
|
import darfix
|
|
9
|
+
from darfix import dtypes
|
|
10
|
+
from darfix.core.moment_types import MomentType
|
|
9
11
|
from darfix.gui.utils.custom_doublespinbox import createCustomDoubleSpinBox
|
|
10
12
|
|
|
11
|
-
from ..core.transformation import Transformation
|
|
12
|
-
|
|
13
13
|
|
|
14
14
|
class WeakBeamWidget(qt.QMainWindow):
|
|
15
15
|
"""
|
|
16
16
|
Widget to recover weak beam to obtain dislocations.
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
|
-
sigValidate = qt.Signal()
|
|
20
|
-
"""Emit when user validate weak beam (ok pressed)"""
|
|
21
|
-
sigApplyThreshold = qt.Signal()
|
|
22
|
-
"""Emit when user ask to apply a threshold"""
|
|
23
|
-
sigNValueChanged = qt.Signal()
|
|
24
|
-
"""Emit when N value has changed"""
|
|
25
|
-
|
|
26
19
|
def __init__(self, parent=None):
|
|
27
20
|
qt.QMainWindow.__init__(self, parent)
|
|
28
21
|
|
|
@@ -30,9 +23,6 @@ class WeakBeamWidget(qt.QMainWindow):
|
|
|
30
23
|
layout = qt.QGridLayout()
|
|
31
24
|
|
|
32
25
|
self._nLE = createCustomDoubleSpinBox()
|
|
33
|
-
_buttons = qt.QDialogButtonBox(parent=self)
|
|
34
|
-
self._okB = _buttons.addButton(_buttons.Ok)
|
|
35
|
-
self._applyThresholdB = _buttons.addButton(_buttons.Apply)
|
|
36
26
|
|
|
37
27
|
self._plot = Plot2D()
|
|
38
28
|
self._plot.setDefaultColormap(
|
|
@@ -42,19 +32,13 @@ class WeakBeamWidget(qt.QMainWindow):
|
|
|
42
32
|
)
|
|
43
33
|
)
|
|
44
34
|
layout.addWidget(
|
|
45
|
-
qt.QLabel("
|
|
35
|
+
qt.QLabel("Threshold of X times the standart deviation : "), 0, 0
|
|
46
36
|
)
|
|
47
37
|
layout.addWidget(self._nLE, 0, 1)
|
|
48
38
|
layout.addWidget(self._plot, 1, 0, 1, 2)
|
|
49
|
-
layout.addWidget(_buttons, 2, 0, 1, 2)
|
|
50
39
|
widget.setLayout(layout)
|
|
51
40
|
self.setCentralWidget(widget)
|
|
52
41
|
|
|
53
|
-
# connect signal / slot
|
|
54
|
-
self._applyThresholdB.clicked.connect(self.sigApplyThreshold)
|
|
55
|
-
self._okB.clicked.connect(self.sigValidate)
|
|
56
|
-
self._nLE.editingFinished.connect(self.sigNValueChanged)
|
|
57
|
-
|
|
58
42
|
# set up
|
|
59
43
|
self.nvalue = 1
|
|
60
44
|
|
|
@@ -66,14 +50,19 @@ class WeakBeamWidget(qt.QMainWindow):
|
|
|
66
50
|
def nvalue(self, nvalue: float):
|
|
67
51
|
self._nLE.setValue(nvalue)
|
|
68
52
|
|
|
69
|
-
def
|
|
70
|
-
|
|
71
|
-
|
|
53
|
+
def updateDataset(self, dataset: dtypes.Dataset):
|
|
54
|
+
imgDataset = dataset.dataset
|
|
55
|
+
center_of_mass = imgDataset.moments_dims[0][MomentType.COM]
|
|
56
|
+
transformation = imgDataset.transformation
|
|
72
57
|
self._plot.clear()
|
|
73
58
|
if transformation is None:
|
|
74
|
-
self._plot.addImage(
|
|
59
|
+
self._plot.addImage(
|
|
60
|
+
center_of_mass,
|
|
61
|
+
xlabel="pixels",
|
|
62
|
+
ylabel="pixels",
|
|
63
|
+
)
|
|
75
64
|
else:
|
|
76
|
-
if
|
|
65
|
+
if transformation.rotate:
|
|
77
66
|
center_of_mass = numpy.rot90(center_of_mass, 3)
|
|
78
67
|
self._plot.addImage(
|
|
79
68
|
center_of_mass,
|
|
@@ -82,7 +71,3 @@ class WeakBeamWidget(qt.QMainWindow):
|
|
|
82
71
|
xlabel=transformation.label,
|
|
83
72
|
ylabel=transformation.label,
|
|
84
73
|
)
|
|
85
|
-
|
|
86
|
-
def setProcessingButtonsEnabled(self, enabled):
|
|
87
|
-
self._applyThresholdB.setEnabled(enabled)
|
|
88
|
-
self._okB.setEnabled(enabled)
|
|
@@ -7,7 +7,7 @@ from silx.gui.plot.StackView import StackViewMainWindow
|
|
|
7
7
|
import darfix
|
|
8
8
|
|
|
9
9
|
from ..dtypes import Dataset
|
|
10
|
-
from .
|
|
10
|
+
from .filter_by_dimension import FilterByDimensionWidget
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class ZSumWidget(qt.QMainWindow):
|
|
@@ -38,7 +38,6 @@ class ZSumWidget(qt.QMainWindow):
|
|
|
38
38
|
|
|
39
39
|
def setDataset(self, dataset: Dataset):
|
|
40
40
|
self.dataset = dataset.dataset
|
|
41
|
-
self.indices = dataset.indices
|
|
42
41
|
self._selector.setDimensions(self.dataset.dims)
|
|
43
42
|
self._sv.setGraphTitle(self.dataset.title)
|
|
44
43
|
|
darfix/main.py
CHANGED
|
@@ -1,18 +1,33 @@
|
|
|
1
1
|
import importlib.metadata
|
|
2
|
+
import os
|
|
3
|
+
import signal
|
|
2
4
|
import sys
|
|
3
5
|
from argparse import ArgumentParser
|
|
4
6
|
|
|
5
7
|
from silx import config
|
|
6
8
|
|
|
7
9
|
try:
|
|
8
|
-
from ewoksorange.canvas.main import arg_parser
|
|
9
10
|
from ewoksorange.canvas.main import main as ewoksorange_main
|
|
11
|
+
from ewoksorange.gui.canvas.main import arg_parser
|
|
10
12
|
except ImportError as e:
|
|
11
13
|
error_msg = f"ERROR: {e.msg}.\n"
|
|
12
14
|
error_msg += "To use `darfix` command, please use the full installation of darfix:\npip install darfix[full]\n"
|
|
13
15
|
sys.stdout.write(error_msg)
|
|
14
16
|
exit()
|
|
15
17
|
|
|
18
|
+
__CTRL_C_PRESSED_ONCE = False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def __handle_ctrl_c(*args):
|
|
22
|
+
global __CTRL_C_PRESSED_ONCE
|
|
23
|
+
if not __CTRL_C_PRESSED_ONCE:
|
|
24
|
+
__CTRL_C_PRESSED_ONCE = True
|
|
25
|
+
sys.stdout.write("\nPress CTRL+C again to force kill app.\n")
|
|
26
|
+
else:
|
|
27
|
+
# harakiri
|
|
28
|
+
sys.stdout.write("\nApp killed by user.\n")
|
|
29
|
+
os.kill(os.getpid(), signal.SIGKILL)
|
|
30
|
+
|
|
16
31
|
|
|
17
32
|
def main(argv=None):
|
|
18
33
|
|
|
@@ -43,8 +58,9 @@ def main(argv=None):
|
|
|
43
58
|
if options.version:
|
|
44
59
|
print(f"Darfix version: {importlib.metadata.version('darfix')}")
|
|
45
60
|
return
|
|
46
|
-
|
|
47
|
-
|
|
61
|
+
|
|
62
|
+
signal.signal(signal.SIGINT, __handle_ctrl_c)
|
|
63
|
+
|
|
48
64
|
ewoksorange_main()
|
|
49
65
|
|
|
50
66
|
|
|
@@ -265,7 +265,7 @@ def _fit_xd_rocking_curve(
|
|
|
265
265
|
:param x_values: 2D array: K dimensions, N points (float64)
|
|
266
266
|
|
|
267
267
|
:return: Tuple with:
|
|
268
|
-
- the fitted gaussian : 1D array of N points (float64)
|
|
268
|
+
- the fitted gaussian : 1D array of N points (float64) (Only non-zero value of original `y_values` are fitted)
|
|
269
269
|
- fit parameters : 1D array of length len_maps
|
|
270
270
|
"""
|
|
271
271
|
if method is None:
|
|
@@ -277,17 +277,14 @@ def _fit_xd_rocking_curve(
|
|
|
277
277
|
|
|
278
278
|
ptp_y = numpy.ptp(y_values)
|
|
279
279
|
|
|
280
|
-
|
|
281
|
-
y_not_zero_mask = y_values > 0
|
|
280
|
+
y_zeros = numpy.zeros_like(y_values)
|
|
282
281
|
|
|
283
282
|
if ptp_y <= thresh:
|
|
284
283
|
# Ptp under threshold
|
|
285
|
-
return
|
|
284
|
+
return y_zeros, numpy.full(len_maps, numpy.nan)
|
|
286
285
|
|
|
287
286
|
if len(y_values) < len_maps:
|
|
288
|
-
return
|
|
289
|
-
|
|
290
|
-
ptp_y = numpy.ptp(y_values)
|
|
287
|
+
return y_zeros, numpy.full(len_maps, numpy.nan)
|
|
291
288
|
|
|
292
289
|
p0 = p0_function(com, fwhm, ptp_y)
|
|
293
290
|
|
|
@@ -300,6 +297,7 @@ def _fit_xd_rocking_curve(
|
|
|
300
297
|
max_y=vmax,
|
|
301
298
|
)
|
|
302
299
|
|
|
300
|
+
y_not_zero_mask = y_values > 0
|
|
303
301
|
x_values_masked = x_values[:, y_not_zero_mask]
|
|
304
302
|
|
|
305
303
|
try:
|
|
@@ -316,11 +314,12 @@ def _fit_xd_rocking_curve(
|
|
|
316
314
|
f"Encountered the following error while fitting rocking curves: '{e}'"
|
|
317
315
|
)
|
|
318
316
|
_logger.debug(f"p0 : \n{p0}\nbounds : \n{bounds}")
|
|
319
|
-
return
|
|
317
|
+
return y_zeros, numpy.full(len_maps, numpy.nan)
|
|
320
318
|
|
|
321
|
-
|
|
319
|
+
y_fitted = y_zeros
|
|
320
|
+
y_fitted[y_not_zero_mask] = gaussian_function(x_values_masked, *fit_params)
|
|
322
321
|
|
|
323
|
-
return
|
|
322
|
+
return y_fitted, fit_params
|
|
324
323
|
|
|
325
324
|
|
|
326
325
|
def fit_1d_rocking_curve(
|
|
@@ -334,7 +333,7 @@ def fit_1d_rocking_curve(
|
|
|
334
333
|
:param x_values: 1D array of N float64
|
|
335
334
|
|
|
336
335
|
:return: Tuple with:
|
|
337
|
-
- the fitted gaussian : 1D array of float64
|
|
336
|
+
- the fitted gaussian : 1D array of float64 (Only non-zero value of original `y_values` are fitted)
|
|
338
337
|
- fit parameters : 1D array of length len(MAPS_1D)
|
|
339
338
|
"""
|
|
340
339
|
# _fit_xd_rocking_curve expect array with shape (N_dims, N_points)
|
|
@@ -367,7 +366,7 @@ def fit_2d_rocking_curve(
|
|
|
367
366
|
:param x_values: 2D array (2,N) of float64
|
|
368
367
|
|
|
369
368
|
:return: Tuple with:
|
|
370
|
-
- the fitted gaussian : 1D array of N float64
|
|
369
|
+
- the fitted gaussian : 1D array of N float64 (Only non-zero value of original `y_values` are fitted)
|
|
371
370
|
- fit parameters : 1D array of length len(MAPS_2D)
|
|
372
371
|
"""
|
|
373
372
|
return _fit_xd_rocking_curve(
|
|
@@ -393,7 +392,7 @@ def fit_3d_rocking_curve(
|
|
|
393
392
|
:param x_values: 2D array (3,N) of float64
|
|
394
393
|
|
|
395
394
|
Return: Tuple with:
|
|
396
|
-
- the fitted gaussian : 1D array of N float64
|
|
395
|
+
- the fitted gaussian : 1D array of N float64 (Only non-zero value of original `y_values` are fitted)
|
|
397
396
|
- fit parameters : 1D array of length len(MAPS_3D)
|
|
398
397
|
"""
|
|
399
398
|
return _fit_xd_rocking_curve(
|
darfix/tasks/binning.py
CHANGED
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import os
|
|
4
|
-
|
|
5
3
|
import numpy
|
|
4
|
+
import tqdm
|
|
6
5
|
from ewokscore import Task
|
|
7
|
-
from ewokscore.missing_data import MISSING_DATA
|
|
8
|
-
from ewokscore.missing_data import MissingData
|
|
9
6
|
from ewokscore.model import BaseInputModel
|
|
10
7
|
from pydantic import ConfigDict
|
|
11
8
|
from skimage.transform import rescale
|
|
@@ -19,8 +16,6 @@ class Inputs(BaseInputModel):
|
|
|
19
16
|
""" Input dataset containing a stack of images """
|
|
20
17
|
scale: float
|
|
21
18
|
"""Factor to rescale images of the dataset."""
|
|
22
|
-
output_dir: str | MissingData = MISSING_DATA
|
|
23
|
-
""" Output directory where the binned data will be saved. If not set, use the input dataset directory."""
|
|
24
19
|
|
|
25
20
|
|
|
26
21
|
class Binning(
|
|
@@ -34,33 +29,27 @@ class Binning(
|
|
|
34
29
|
input_dataset: Dataset = self.inputs.dataset
|
|
35
30
|
dataset = input_dataset.dataset
|
|
36
31
|
|
|
37
|
-
if len(dataset.data.shape) >= 4:
|
|
38
|
-
# TODO: Is this expected ? Or should it be fixed for higher dimensionality ?
|
|
39
|
-
raise ValueError("Binning cannot only be applied to 4D datasets or higher")
|
|
40
|
-
|
|
41
32
|
scale = self.inputs.scale
|
|
42
|
-
output_dir = self.get_input_value("output_dir", dataset.dir)
|
|
43
|
-
os.makedirs(output_dir, exist_ok=True)
|
|
44
33
|
|
|
45
34
|
# rescale data
|
|
46
35
|
new_data = None
|
|
47
|
-
for i, image in enumerate(
|
|
36
|
+
for i, image in enumerate(
|
|
37
|
+
tqdm.tqdm(dataset.as_array3d(), desc="Binning", total=dataset.nframes)
|
|
38
|
+
):
|
|
48
39
|
simage = rescale(image, scale, anti_aliasing=True, preserve_range=True)
|
|
49
40
|
if new_data is None:
|
|
50
41
|
new_data = numpy.empty(
|
|
51
|
-
(
|
|
42
|
+
(dataset.nframes,) + simage.shape, dtype=dataset.data.dtype
|
|
52
43
|
)
|
|
53
44
|
new_data[i] = simage
|
|
54
45
|
if self.cancelled:
|
|
55
46
|
# if cancelled then self.outputs.dataset will be MISSING_DATA
|
|
56
47
|
return
|
|
57
48
|
|
|
58
|
-
new_dataset = dataset.copy(
|
|
49
|
+
new_dataset = dataset.copy(new_data=new_data)
|
|
59
50
|
|
|
60
51
|
self.outputs.dataset = Dataset(
|
|
61
52
|
dataset=new_dataset,
|
|
62
|
-
indices=input_dataset.indices,
|
|
63
|
-
bg_indices=input_dataset.bg_indices,
|
|
64
53
|
bg_dataset=input_dataset.bg_dataset,
|
|
65
54
|
)
|
|
66
55
|
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from enum import Enum as _Enum
|
|
5
|
+
|
|
6
|
+
from ewokscore import Task
|
|
7
|
+
from ewokscore.missing_data import MISSING_DATA
|
|
8
|
+
from ewokscore.missing_data import MissingData
|
|
9
|
+
from ewokscore.model import BaseInputModel
|
|
10
|
+
from pydantic import ConfigDict
|
|
11
|
+
from pydantic import Field
|
|
12
|
+
|
|
13
|
+
from darfix import dtypes
|
|
14
|
+
from darfix.io.utils import write_components
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Method(_Enum):
|
|
18
|
+
"""
|
|
19
|
+
Different blind source separation approaches that can be applied
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
PCA = "PCA"
|
|
23
|
+
|
|
24
|
+
NICA = "NICA"
|
|
25
|
+
|
|
26
|
+
NMF = "NMF"
|
|
27
|
+
|
|
28
|
+
NICA_NMF = "NICA_NMF"
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def _descriptions() -> dict:
|
|
32
|
+
return {
|
|
33
|
+
Method.PCA: (
|
|
34
|
+
"The process of computing the principal components \n"
|
|
35
|
+
"and using them to perform a change of basis on the data"
|
|
36
|
+
),
|
|
37
|
+
Method.NICA: "Find components independent from each other and non-negative",
|
|
38
|
+
Method.NMF: (
|
|
39
|
+
"Non-negative matrix factorization factorizes the data matrix into \n"
|
|
40
|
+
"two matrices, with the property that all three matrices have no negative elements"
|
|
41
|
+
),
|
|
42
|
+
Method.NICA_NMF: "Apply Non-negative ICA followed by NMF",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
@staticmethod
|
|
46
|
+
def get_description(method) -> str:
|
|
47
|
+
method = Method(method)
|
|
48
|
+
if method in Method._descriptions():
|
|
49
|
+
return Method._descriptions()[method]
|
|
50
|
+
else:
|
|
51
|
+
raise NotImplementedError
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class Inputs(BaseInputModel):
|
|
55
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
56
|
+
dataset: dtypes.Dataset
|
|
57
|
+
""" Input dataset containing a stack of images """
|
|
58
|
+
method: Method
|
|
59
|
+
"Method to use for blind source separation"
|
|
60
|
+
n_comp: int | MissingData = Field(
|
|
61
|
+
default=MISSING_DATA, description="Number of components to extract"
|
|
62
|
+
)
|
|
63
|
+
save: bool | MissingData = MISSING_DATA
|
|
64
|
+
processing_order: int | MissingData = MISSING_DATA
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class BlindSourceSeparation(
|
|
68
|
+
Task,
|
|
69
|
+
input_model=Inputs,
|
|
70
|
+
output_names=["dataset", "comp", "W"],
|
|
71
|
+
):
|
|
72
|
+
"""Perform blind source separation on a Darfix dataset.
|
|
73
|
+
Blind source separation (BSS) comprises all techniques that try to decouple a set of source signals from a set of mixed signals with unknown (or very little) information.
|
|
74
|
+
|
|
75
|
+
Supported methods are PCA, NICA, NMF and NICA_NMF.
|
|
76
|
+
|
|
77
|
+
Related article : https://pmc.ncbi.nlm.nih.gov/articles/PMC10161887/#sec3.3.3
|
|
78
|
+
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
def run(self):
|
|
82
|
+
if not isinstance(self.inputs.dataset, dtypes.Dataset):
|
|
83
|
+
raise TypeError(
|
|
84
|
+
f"'dataset' input should be an instance of Dataset. Got {type(self.inputs.dataset)}."
|
|
85
|
+
)
|
|
86
|
+
dataset = self.inputs.dataset.dataset
|
|
87
|
+
bg_dataset = self.inputs.dataset.bg_dataset
|
|
88
|
+
|
|
89
|
+
n_comp = self.get_input_value("n_comp", None)
|
|
90
|
+
method = Method(self.inputs.method)
|
|
91
|
+
if method == Method.PCA:
|
|
92
|
+
comp, W = dataset.pca(n_comp)
|
|
93
|
+
elif method == Method.NICA:
|
|
94
|
+
comp, W = dataset.nica(n_comp)
|
|
95
|
+
elif method == Method.NMF:
|
|
96
|
+
comp, W = dataset.nmf(n_comp)
|
|
97
|
+
elif method == Method.NICA_NMF:
|
|
98
|
+
comp, W = dataset.nica_nmf(n_comp)
|
|
99
|
+
else:
|
|
100
|
+
raise ValueError("BSS method not managed")
|
|
101
|
+
n_comp = comp.shape[0]
|
|
102
|
+
shape = dataset.frame_shape
|
|
103
|
+
comp = comp.reshape(n_comp, shape[0], shape[1])
|
|
104
|
+
|
|
105
|
+
if self.get_input_value("save", False):
|
|
106
|
+
write_components(
|
|
107
|
+
h5file=os.path.join(dataset.dir, "components.h5"),
|
|
108
|
+
entry="entry",
|
|
109
|
+
dimensions=dataset.dims.to_dict(),
|
|
110
|
+
W=W,
|
|
111
|
+
data=comp,
|
|
112
|
+
values=dataset.get_dimensions_values(),
|
|
113
|
+
processing_order=self.get_input_value("processing_order", 0),
|
|
114
|
+
)
|
|
115
|
+
self.outputs.dataset = dtypes.Dataset(
|
|
116
|
+
dataset=dataset,
|
|
117
|
+
bg_dataset=bg_dataset,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
self.outputs.W = W
|
|
121
|
+
self.outputs.comp = comp
|
|
@@ -1,133 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
import warnings
|
|
2
2
|
|
|
3
|
-
import
|
|
4
|
-
from enum import Enum as _Enum
|
|
3
|
+
from .blind_source_separation import BlindSourceSeparation # noqa: F401
|
|
5
4
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
from pydantic import Field
|
|
13
|
-
|
|
14
|
-
from darfix import dtypes
|
|
15
|
-
from darfix.io.utils import write_components
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class Method(_Enum):
|
|
19
|
-
"""
|
|
20
|
-
Different blind source separation approaches that can be applied
|
|
21
|
-
"""
|
|
22
|
-
|
|
23
|
-
PCA = "PCA"
|
|
24
|
-
|
|
25
|
-
NICA = "NICA"
|
|
26
|
-
|
|
27
|
-
NMF = "NMF"
|
|
28
|
-
|
|
29
|
-
NICA_NMF = "NICA_NMF"
|
|
30
|
-
|
|
31
|
-
@staticmethod
|
|
32
|
-
def _descriptions() -> dict:
|
|
33
|
-
return {
|
|
34
|
-
Method.PCA: (
|
|
35
|
-
"The process of computing the principal components \n"
|
|
36
|
-
"and using them to perform a change of basis on the data"
|
|
37
|
-
),
|
|
38
|
-
Method.NICA: "Find components independent from each other and non-negative",
|
|
39
|
-
Method.NMF: (
|
|
40
|
-
"Non-negative matrix factorization factorizes the data matrix into \n"
|
|
41
|
-
"two matrices, with the property that all three matrices have no negative elements"
|
|
42
|
-
),
|
|
43
|
-
Method.NICA_NMF: "Apply Non-negative ICA followed by NMF",
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
@staticmethod
|
|
47
|
-
def get_description(method) -> str:
|
|
48
|
-
method = Method(method)
|
|
49
|
-
if method in Method._descriptions():
|
|
50
|
-
return Method._descriptions()[method]
|
|
51
|
-
else:
|
|
52
|
-
raise NotImplementedError
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
class Inputs(BaseInputModel):
|
|
56
|
-
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
57
|
-
dataset: dtypes.Dataset
|
|
58
|
-
""" Input dataset containing a stack of images """
|
|
59
|
-
method: Method
|
|
60
|
-
"Method to use for blind source separation"
|
|
61
|
-
n_comp: int | MissingData = Field(
|
|
62
|
-
default=MISSING_DATA, description="Number of components to extract"
|
|
63
|
-
)
|
|
64
|
-
save: bool | MissingData = MISSING_DATA
|
|
65
|
-
processing_order: int | MissingData = MISSING_DATA
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
class BlindSourceSeparation(
|
|
69
|
-
Task,
|
|
70
|
-
input_model=Inputs,
|
|
71
|
-
output_names=["dataset", "comp", "W"],
|
|
72
|
-
):
|
|
73
|
-
"""Perform blind source separation on a Darfix dataset.
|
|
74
|
-
Blind source separation (BSS) comprises all techniques that try to decouple a set of source signals from a set of mixed signals with unknown (or very little) information.
|
|
75
|
-
|
|
76
|
-
Supported methods are PCA, NICA, NMF and NICA_NMF.
|
|
77
|
-
|
|
78
|
-
Related article : https://pmc.ncbi.nlm.nih.gov/articles/PMC10161887/#sec3.3.3
|
|
79
|
-
|
|
80
|
-
"""
|
|
81
|
-
|
|
82
|
-
def run(self):
|
|
83
|
-
if not isinstance(self.inputs.dataset, dtypes.Dataset):
|
|
84
|
-
raise TypeError(
|
|
85
|
-
f"'dataset' input should be an instance of Dataset. Got {type(self.inputs.dataset)}."
|
|
86
|
-
)
|
|
87
|
-
dataset = self.inputs.dataset.dataset
|
|
88
|
-
indices = self.inputs.dataset.indices
|
|
89
|
-
bg_indices = self.inputs.dataset.bg_indices
|
|
90
|
-
bg_dataset = self.inputs.dataset.bg_dataset
|
|
91
|
-
|
|
92
|
-
n_comp = self.get_input_value("n_comp", None)
|
|
93
|
-
method = Method(self.inputs.method)
|
|
94
|
-
if method == Method.PCA:
|
|
95
|
-
comp, W = dataset.pca(n_comp, indices=indices)
|
|
96
|
-
elif method == Method.NICA:
|
|
97
|
-
comp, W = dataset.nica(n_comp, indices=indices)
|
|
98
|
-
elif method == Method.NMF:
|
|
99
|
-
comp, W = dataset.nmf(n_comp, indices=indices)
|
|
100
|
-
elif method == Method.NICA_NMF:
|
|
101
|
-
comp, W = dataset.nica_nmf(n_comp, indices=indices)
|
|
102
|
-
else:
|
|
103
|
-
raise ValueError("BSS method not managed")
|
|
104
|
-
n_comp = comp.shape[0]
|
|
105
|
-
shape = dataset.frame_shape
|
|
106
|
-
comp = comp.reshape(n_comp, shape[0], shape[1])
|
|
107
|
-
if bg_indices is not None:
|
|
108
|
-
# If filter data is activated, the matrix W has reduced dimensionality, so reshaping is not possible
|
|
109
|
-
# Create empty array with shape the total number of frames
|
|
110
|
-
W = numpy.zeros((dataset.nframes, n_comp))
|
|
111
|
-
# Set actual values of W where threshold of filter is True
|
|
112
|
-
W[indices] = W
|
|
113
|
-
W = W
|
|
114
|
-
|
|
115
|
-
if self.get_input_value("save", False):
|
|
116
|
-
write_components(
|
|
117
|
-
h5file=os.path.join(dataset.dir, "components.h5"),
|
|
118
|
-
entry="entry",
|
|
119
|
-
dimensions=dataset.dims.to_dict(),
|
|
120
|
-
W=W,
|
|
121
|
-
data=comp,
|
|
122
|
-
values=dataset.get_dimensions_values(),
|
|
123
|
-
processing_order=self.get_input_value("processing_order", 0),
|
|
124
|
-
)
|
|
125
|
-
self.outputs.dataset = dtypes.Dataset(
|
|
126
|
-
dataset=dataset,
|
|
127
|
-
indices=indices,
|
|
128
|
-
bg_indices=bg_indices,
|
|
129
|
-
bg_dataset=bg_dataset,
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
self.outputs.W = W
|
|
133
|
-
self.outputs.comp = comp
|
|
5
|
+
warnings.warn(
|
|
6
|
+
f"The '{__name__}' module is deprecated and will be removed in a future release. "
|
|
7
|
+
"Please replace module name `blindsourceseparation` by `blind_source_separation`",
|
|
8
|
+
DeprecationWarning,
|
|
9
|
+
stacklevel=2,
|
|
10
|
+
)
|
darfix/tasks/copy.py
CHANGED
|
@@ -24,7 +24,5 @@ class DataCopy(
|
|
|
24
24
|
raise dtypes.DatasetTypeError(dataset)
|
|
25
25
|
self.outputs.dataset = dtypes.Dataset(
|
|
26
26
|
dataset=copy.deepcopy(dataset.dataset),
|
|
27
|
-
indices=copy.deepcopy(dataset.indices),
|
|
28
|
-
bg_indices=copy.deepcopy(dataset.bg_indices),
|
|
29
27
|
bg_dataset=copy.deepcopy(dataset.bg_dataset),
|
|
30
28
|
)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
|
|
5
|
+
from ewokscore import Task
|
|
6
|
+
from ewokscore.missing_data import MISSING_DATA
|
|
7
|
+
from ewokscore.missing_data import MissingData
|
|
8
|
+
from ewokscore.model import BaseInputModel
|
|
9
|
+
from pydantic import ConfigDict
|
|
10
|
+
|
|
11
|
+
from darfix import dtypes
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Inputs(BaseInputModel):
|
|
15
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
16
|
+
dataset: dtypes.Dataset
|
|
17
|
+
bins: int | MissingData = MISSING_DATA
|
|
18
|
+
filter_bottom_bin_idx: int | MissingData = MISSING_DATA
|
|
19
|
+
filter_top_bin_idx: int | MissingData = MISSING_DATA
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DataPartition(
|
|
23
|
+
Task,
|
|
24
|
+
input_model=Inputs,
|
|
25
|
+
output_names=["dataset"],
|
|
26
|
+
):
|
|
27
|
+
"""
|
|
28
|
+
:deprecated: Deprecated task to be removed in 5.0.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def run(self):
|
|
32
|
+
warnings.warn(
|
|
33
|
+
"`DataPartition` is a legacy task and is poorly tested in Darfix. Before 4.3, it might induce weird behaviour in the next tasks of the workflow."
|
|
34
|
+
"In 4.3, the low intensity filtering is deactivate and the task does not modify the dataset. This just does nothing in order to not break compatibility with existing workflows."
|
|
35
|
+
"In 5.0, the task will be removed.",
|
|
36
|
+
DeprecationWarning,
|
|
37
|
+
)
|
|
38
|
+
# Just a pass through
|
|
39
|
+
self.outputs.dataset = self.inputs.dataset
|