darfix 4.2.0__py3-none-any.whl → 4.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- darfix/core/data_selection.py +11 -2
- darfix/core/dataset.py +72 -157
- darfix/core/grainplot.py +44 -56
- darfix/core/{imageStack.py → image_stack.py} +9 -15
- darfix/core/moment_types.py +6 -0
- darfix/core/{noiseremoval.py → noise_removal.py} +25 -24
- darfix/core/noise_removal_type.py +14 -0
- darfix/core/positioners.py +6 -0
- darfix/core/rocking_curves.py +6 -3
- darfix/core/rocking_curves_map.py +1 -1
- darfix/core/{shiftcorrection.py → shift_correction.py} +1 -2
- darfix/core/state_of_operation.py +7 -46
- darfix/core/utils.py +0 -39
- darfix/dtypes.py +1 -9
- darfix/gui/{binningWidget.py → binning_widget.py} +2 -29
- darfix/gui/{blindSourceSeparationWidget.py → blind_source_separation_widget.py} +4 -16
- darfix/gui/{chooseDimensions.py → choose_dimensions.py} +1 -1
- darfix/gui/concatenate_scans.py +4 -4
- darfix/gui/data_selection/{hdf5_data_selection_widgets.py → hdf5_dataset_selection_widget.py} +3 -56
- darfix/gui/data_selection/line_edits.py +54 -8
- darfix/gui/data_selection/scan_selection_widgets.py +24 -11
- darfix/gui/data_selection/utils.py +11 -0
- darfix/gui/data_selection/{WorkingDirSelectionWidget.py → working_dir_selection_widget.py} +15 -14
- darfix/gui/{dimensionsWidget.py → dimensions_widget.py} +1 -1
- darfix/gui/{displayComponentsWidget.py → display_components_widget.py} +1 -1
- darfix/gui/{filterByDimension.py → filter_by_dimension.py} +1 -1
- darfix/gui/{grainplot/dimensionRangeSlider2D.py → grain_plot/dimension_range_slider_2d.py} +2 -2
- darfix/gui/{grainplot/grainPlotWidget.py → grain_plot/grain_plot_widget.py} +1 -1
- darfix/gui/{grainplot/mosaicityWidget.py → grain_plot/mosaicity_widget.py} +21 -23
- darfix/gui/{magnificationWidget.py → magnification_widget.py} +1 -1
- darfix/gui/{noiseremoval → noise_removal}/noise_removal_widget.py +12 -16
- darfix/gui/{noiseremoval → noise_removal}/operation_list_widget.py +2 -2
- darfix/gui/{noiseremoval → noise_removal}/parameters_widget.py +6 -6
- darfix/gui/{PCAWidget.py → pca_widget.py} +2 -4
- darfix/gui/{projectionWidget.py → projection_widget.py} +1 -1
- darfix/gui/rocking_curves/{rockingCurvesPlot.py → rocking_curves_plot.py} +13 -13
- darfix/gui/rocking_curves/{rockingCurvesWidget.py → rocking_curves_widget.py} +10 -18
- darfix/gui/{roiSelectionWidget.py → roi_selection_widget.py} +9 -101
- darfix/gui/{shiftcorrection/shiftCorrectionWidget.py → shift_correction/shift_correction_widget.py} +4 -7
- darfix/gui/utils/data_path_completer.py +7 -7
- darfix/gui/utils/data_path_selection.py +4 -4
- darfix/gui/utils/{rangeSlider.py → range_slider.py} +1 -1
- darfix/gui/{weakBeamWidget.py → weak_beam_widget.py} +13 -28
- darfix/gui/{zSumWidget.py → zsum_widget.py} +1 -2
- darfix/main.py +19 -3
- darfix/processing/rocking_curves.py +12 -13
- darfix/tasks/binning.py +6 -17
- darfix/tasks/blind_source_separation.py +121 -0
- darfix/tasks/blindsourceseparation.py +8 -131
- darfix/tasks/copy.py +0 -2
- darfix/tasks/data_partition.py +39 -0
- darfix/tasks/datapartition.py +8 -50
- darfix/tasks/dimension_definition.py +197 -0
- darfix/tasks/dimensiondefinition.py +8 -197
- darfix/tasks/grain_plot.py +93 -0
- darfix/tasks/grainplot.py +8 -103
- darfix/tasks/hdf5_data_selection.py +5 -11
- darfix/tasks/hdf5_scans_concatenation.py +4 -4
- darfix/tasks/noise_removal.py +88 -0
- darfix/tasks/noiseremoval.py +8 -86
- darfix/tasks/pca.py +1 -3
- darfix/tasks/projection.py +1 -6
- darfix/tasks/rocking_curves.py +10 -5
- darfix/tasks/roi.py +0 -2
- darfix/tasks/shift_correction.py +45 -0
- darfix/tasks/shiftcorrection.py +8 -43
- darfix/tasks/transformation.py +0 -2
- darfix/tasks/weak_beam.py +71 -0
- darfix/tasks/weakbeam.py +8 -67
- darfix/tasks/zsum.py +1 -1
- darfix/tests/conftest.py +1 -1
- darfix/tests/gui/test_data_path_completer.py +4 -4
- darfix/tests/gui/test_dimension_range_slider_2d.py +2 -2
- darfix/tests/gui/test_range_slider_with_spinboxes.py +1 -1
- darfix/tests/orange/test_ewoks.py +13 -9
- darfix/tests/orange/widgets/test_hdf5_data_selection.py +93 -0
- darfix/tests/tasks/test_data_copy.py +0 -2
- darfix/tests/tasks/{test_dimensiondefinition.py → test_dimension_definition.py} +1 -1
- darfix/tests/tasks/test_weak_beam.py +9 -0
- darfix/tests/test_components_matching.py +2 -2
- darfix/tests/test_dataset.py +2 -28
- darfix/tests/test_dimension.py +1 -1
- darfix/tests/test_generate_grain_maps_nxdict.py +4 -5
- darfix/tests/test_image_operations.py +4 -4
- darfix/tests/test_image_registration.py +17 -17
- darfix/tests/test_image_stack.py +2 -13
- darfix/tests/test_mask.py +1 -1
- darfix/tests/test_moments.py +2 -2
- darfix/tests/test_rocking_curves.py +1 -3
- darfix/tests/test_shift.py +7 -7
- darfix/tests/test_workflow.py +4 -4
- darfix/tests/test_zsum.py +3 -6
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/METADATA +5 -3
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/RECORD +141 -135
- orangecontrib/darfix/widgets/__init__.py +10 -1
- orangecontrib/darfix/widgets/binning.py +3 -3
- orangecontrib/darfix/widgets/blindsourceseparation.py +4 -6
- orangecontrib/darfix/widgets/concatenateHDF5.py +1 -1
- orangecontrib/darfix/widgets/datacopy.py +1 -1
- orangecontrib/darfix/widgets/datapartition.py +7 -102
- orangecontrib/darfix/widgets/{datasetWidgetBase.py → dataset_widget_base.py} +17 -5
- orangecontrib/darfix/widgets/dimensions.py +6 -6
- orangecontrib/darfix/widgets/grainplot.py +3 -3
- orangecontrib/darfix/widgets/hdf5dataselection.py +34 -14
- orangecontrib/darfix/widgets/metadata.py +2 -2
- orangecontrib/darfix/widgets/noiseremoval.py +4 -4
- orangecontrib/darfix/widgets/{operationWidgetBase.py → operation_widget_base.py} +2 -2
- orangecontrib/darfix/widgets/pca.py +2 -2
- orangecontrib/darfix/widgets/projection.py +2 -2
- orangecontrib/darfix/widgets/rockingcurves.py +5 -2
- orangecontrib/darfix/widgets/roiselection.py +24 -106
- orangecontrib/darfix/widgets/rsmhistogram.py +2 -2
- orangecontrib/darfix/widgets/shiftcorrection.py +3 -3
- orangecontrib/darfix/widgets/transformation.py +4 -4
- orangecontrib/darfix/widgets/weakbeam.py +20 -103
- orangecontrib/darfix/widgets/zsum.py +3 -5
- darfix/gui/dataPartitionWidget.py +0 -167
- darfix/gui/data_selection/DataSelectionBase.py +0 -59
- darfix/tests/tasks/test_datapartition.py +0 -52
- /darfix/core/{componentsMatching.py → components_matching.py} +0 -0
- /darfix/core/{datapathfinder.py → data_path_finder.py} +0 -0
- /darfix/core/{imageRegistration.py → image_registration.py} +0 -0
- /darfix/gui/{grainplot → grain_plot}/__init__.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/_oridist_toolbar_buttons.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/flashlight.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/flashlight_mode_action.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/oridist_toolbar.py +0 -0
- /darfix/gui/{grainplot → grain_plot}/utils.py +0 -0
- /darfix/gui/{metadataWidget.py → metadata_widget.py} +0 -0
- /darfix/gui/{operationProcess.py → parallel/operation_process.py} +0 -0
- /darfix/gui/{operationThread.py → parallel/operation_thread.py} +0 -0
- /darfix/gui/rocking_curves/{fitComboBox.py → fit_combobox.py} +0 -0
- /darfix/gui/{roiLimitsToolbar.py → roi_limits_toolbar.py} +0 -0
- /darfix/gui/{rsmHistogramWidget.py → rsm_histogram_widget.py} +0 -0
- /darfix/gui/{rsmWidget.py → rsm_widget.py} +0 -0
- /darfix/gui/{shiftcorrection → shift_correction}/__init__.py +0 -0
- /darfix/gui/{shiftcorrection/shiftInput.py → shift_correction/shift_input.py} +0 -0
- /darfix/gui/utils/{standardButtonBox.py → standard_buttonbox.py} +0 -0
- /darfix/processing/{imageOperations.py → image_operations.py} +0 -0
- /darfix/tests/{test_datapathfinder.py → test_data_path_finder.py} +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/WHEEL +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/entry_points.txt +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/licenses/LICENSE +0 -0
- {darfix-4.2.0.dist-info → darfix-4.3.0.dist-info}/top_level.txt +0 -0
darfix/core/data_selection.py
CHANGED
|
@@ -4,6 +4,7 @@ import urllib.request
|
|
|
4
4
|
from typing import Optional
|
|
5
5
|
from typing import Union
|
|
6
6
|
|
|
7
|
+
from esrf_pathlib import ESRFPath
|
|
7
8
|
from silx.io.url import DataUrl
|
|
8
9
|
|
|
9
10
|
from darfix.core.dataset import ImageDataset
|
|
@@ -26,7 +27,6 @@ def load_process_data(
|
|
|
26
27
|
:param detector_url: detector_url to be loaded.
|
|
27
28
|
:param metadata_url: path to the scan metadata for HDF5 containing positioner information in order to load metadata for non-edf files
|
|
28
29
|
"""
|
|
29
|
-
indices = li_indices = None
|
|
30
30
|
root_dir_specified = bool(root_dir)
|
|
31
31
|
|
|
32
32
|
if isinstance(detector_url, DataUrl):
|
|
@@ -87,9 +87,18 @@ def load_process_data(
|
|
|
87
87
|
|
|
88
88
|
assert dataset.data is not None and dataset.data.size > 0, "No data was loaded!"
|
|
89
89
|
|
|
90
|
-
return dataset,
|
|
90
|
+
return dataset, bg_dataset
|
|
91
91
|
|
|
92
92
|
|
|
93
93
|
def _get_root_dir(filename: str) -> str:
|
|
94
94
|
url = urllib.parse.urlparse(filename, scheme="file")
|
|
95
95
|
return os.path.dirname(urllib.request.url2pathname(url.path))
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def get_default_output_directory(raw_data_file: str) -> str:
|
|
99
|
+
esrf_raw_data_file = ESRFPath(raw_data_file)
|
|
100
|
+
try:
|
|
101
|
+
return str(esrf_raw_data_file.processed_dataset_path)
|
|
102
|
+
except AttributeError:
|
|
103
|
+
# Not an ESRF path : Default directory is cwd.
|
|
104
|
+
return os.getcwd()
|
darfix/core/dataset.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import os
|
|
5
|
+
import threading
|
|
5
6
|
import warnings
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
from typing import Literal
|
|
@@ -22,9 +23,9 @@ from sklearn.exceptions import ConvergenceWarning
|
|
|
22
23
|
from darfix import __version__
|
|
23
24
|
from darfix.core.dimension import AcquisitionDims
|
|
24
25
|
from darfix.core.dimension import find_dimensions_from_metadata
|
|
25
|
-
from darfix.core.
|
|
26
|
-
from darfix.core.
|
|
27
|
-
from darfix.core.
|
|
26
|
+
from darfix.core.image_registration import apply_opencv_shift
|
|
27
|
+
from darfix.core.image_registration import shift_detection
|
|
28
|
+
from darfix.core.image_stack import ImageStack
|
|
28
29
|
from darfix.core.mapping import calculate_RSM_histogram
|
|
29
30
|
from darfix.core.mapping import compute_magnification
|
|
30
31
|
from darfix.core.mapping import compute_moments
|
|
@@ -33,15 +34,14 @@ from darfix.core.positioners import Positioners
|
|
|
33
34
|
from darfix.core.rocking_curves import fit_rocking_curve_parallel
|
|
34
35
|
from darfix.core.roi import apply_2D_ROI
|
|
35
36
|
from darfix.core.roi import apply_3D_ROI
|
|
36
|
-
from darfix.core.state_of_operation import Operation
|
|
37
|
-
from darfix.core.state_of_operation import StateOfOperations
|
|
38
37
|
from darfix.core.utils import NoDimensionsError
|
|
39
38
|
from darfix.core.utils import TooManyDimensionsForRockingCurvesError
|
|
40
39
|
from darfix.decomposition.nica import NICA
|
|
41
40
|
from darfix.io import utils as io_utils_legacy
|
|
42
|
-
from darfix.processing.
|
|
41
|
+
from darfix.processing.image_operations import threshold_removal
|
|
43
42
|
|
|
44
43
|
from ..math import Vector3D
|
|
44
|
+
from .moment_types import MomentsPerDimension
|
|
45
45
|
from .moment_types import MomentType
|
|
46
46
|
from .transformation import Transformation
|
|
47
47
|
|
|
@@ -99,7 +99,7 @@ class ImageDataset(ImageStack):
|
|
|
99
99
|
title: str | None = None,
|
|
100
100
|
metadata_url: DataUrl | str | None = None,
|
|
101
101
|
positioners: Positioners | None = None,
|
|
102
|
-
moments:
|
|
102
|
+
moments: MomentsPerDimension = {},
|
|
103
103
|
):
|
|
104
104
|
|
|
105
105
|
if raw_data is None:
|
|
@@ -108,7 +108,6 @@ class ImageDataset(ImageStack):
|
|
|
108
108
|
|
|
109
109
|
self._frames_intensity = []
|
|
110
110
|
self.moments_dims = moments
|
|
111
|
-
self.state_of_operations = StateOfOperations()
|
|
112
111
|
self._dir = _dir
|
|
113
112
|
self._transformation = transformation
|
|
114
113
|
self._title = title or ""
|
|
@@ -135,18 +134,6 @@ class ImageDataset(ImageStack):
|
|
|
135
134
|
# No metadata in Dark dataset for instance
|
|
136
135
|
self._positioners = None
|
|
137
136
|
|
|
138
|
-
def stop_operation(self, operation):
|
|
139
|
-
"""
|
|
140
|
-
Method used for cases where threads are created to apply functions to the dataset.
|
|
141
|
-
If method is called, the flag concerning the stop is set to 0 so that if the concerned
|
|
142
|
-
operation is running in another thread it knows to stop.
|
|
143
|
-
|
|
144
|
-
:param int operation: operation to stop
|
|
145
|
-
:type int: Union[int, `Operation`]
|
|
146
|
-
"""
|
|
147
|
-
if self.state_of_operations.is_running(operation):
|
|
148
|
-
self.state_of_operations.stop(operation)
|
|
149
|
-
|
|
150
137
|
@property
|
|
151
138
|
def transformation(self) -> Transformation:
|
|
152
139
|
return self._transformation
|
|
@@ -175,6 +162,10 @@ class ImageDataset(ImageStack):
|
|
|
175
162
|
return {}
|
|
176
163
|
return self._positioners.data
|
|
177
164
|
|
|
165
|
+
@property
|
|
166
|
+
def positioners(self) -> Positioners:
|
|
167
|
+
return self._positioners
|
|
168
|
+
|
|
178
169
|
def compute_frames_intensity(self, kernel=(3, 3), sigma=20):
|
|
179
170
|
"""
|
|
180
171
|
Returns for every image a number representing its intensity. This number
|
|
@@ -183,20 +174,17 @@ class ImageDataset(ImageStack):
|
|
|
183
174
|
_logger.info("Computing intensity per frame")
|
|
184
175
|
io_utils_legacy.advancement_display(0, self.nframes, "Computing intensity")
|
|
185
176
|
frames_intensity = []
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
)
|
|
198
|
-
self._frames_intensity = frames_intensity
|
|
199
|
-
return frames_intensity
|
|
177
|
+
for i in range(self.nframes):
|
|
178
|
+
import cv2
|
|
179
|
+
|
|
180
|
+
frames_intensity += [
|
|
181
|
+
cv2.GaussianBlur(self.as_array3d()[i], kernel, sigma).var()
|
|
182
|
+
]
|
|
183
|
+
io_utils_legacy.advancement_display(
|
|
184
|
+
i + 1, self.nframes, "Computing intensity"
|
|
185
|
+
)
|
|
186
|
+
self._frames_intensity = frames_intensity
|
|
187
|
+
return frames_intensity
|
|
200
188
|
|
|
201
189
|
def partition_by_intensity(
|
|
202
190
|
self,
|
|
@@ -277,8 +265,8 @@ class ImageDataset(ImageStack):
|
|
|
277
265
|
)
|
|
278
266
|
self.__dims = _dims
|
|
279
267
|
|
|
280
|
-
def zsum(self,
|
|
281
|
-
data = self.get_filtered_data(
|
|
268
|
+
def zsum(self, dimension=None) -> numpy.ndarray:
|
|
269
|
+
data = self.get_filtered_data(dimension)
|
|
282
270
|
return data.sum(axis=0)
|
|
283
271
|
|
|
284
272
|
def reshape_data(self):
|
|
@@ -307,31 +295,27 @@ class ImageDataset(ImageStack):
|
|
|
307
295
|
"""
|
|
308
296
|
self.__dims = find_dimensions_from_metadata(self.metadata_dict, tolerance)
|
|
309
297
|
|
|
310
|
-
def get_metadata_values(self, key: str
|
|
298
|
+
def get_metadata_values(self, key: str) -> numpy.ndarray:
|
|
311
299
|
if key not in self.metadata_dict:
|
|
312
300
|
# The key does not exist -> return an array of the desired size and with NaN value
|
|
313
301
|
return numpy.full(self.nframes, numpy.nan)
|
|
314
|
-
|
|
315
|
-
return self.metadata_dict[key]
|
|
316
|
-
return self.metadata_dict[key][indices]
|
|
302
|
+
return self.metadata_dict[key]
|
|
317
303
|
|
|
318
|
-
def get_dimensions_values(self
|
|
304
|
+
def get_dimensions_values(self):
|
|
319
305
|
"""
|
|
320
306
|
Returns all the metadata values of the dimensions.
|
|
321
307
|
The values are assumed to be numbers.
|
|
322
308
|
|
|
323
309
|
:returns: array_like
|
|
324
310
|
"""
|
|
325
|
-
if not self._dimensions_values
|
|
311
|
+
if not self._dimensions_values:
|
|
326
312
|
for dimension in self.__dims.values():
|
|
327
313
|
self._dimensions_values[dimension.name] = self.get_metadata_values(
|
|
328
|
-
key=dimension.name
|
|
314
|
+
key=dimension.name
|
|
329
315
|
)
|
|
330
316
|
return self._dimensions_values
|
|
331
317
|
|
|
332
|
-
def apply_roi(
|
|
333
|
-
self, origin=None, size=None, center=None, indices=None, roi_dir=None
|
|
334
|
-
):
|
|
318
|
+
def apply_roi(self, origin=None, size=None, center=None, roi_dir=None):
|
|
335
319
|
"""
|
|
336
320
|
Applies a region of interest to the data.
|
|
337
321
|
|
|
@@ -341,14 +325,9 @@ class ImageDataset(ImageStack):
|
|
|
341
325
|
:type origin: Union[2d vector, None]
|
|
342
326
|
:type center: Union[2d vector, None]
|
|
343
327
|
:type size: Union[2d vector, None]
|
|
344
|
-
:param indices: Indices of the images to apply background subtraction.
|
|
345
|
-
If None, the roi is applied to all the data.
|
|
346
|
-
:type indices: Union[None, array_like]
|
|
347
328
|
:param roi_dir: Directory path for the new dataset
|
|
348
329
|
:type roi_dir: str
|
|
349
330
|
:returns: dataset with data with roi applied.
|
|
350
|
-
Note: To preserve consistence of shape between images, if `indices`
|
|
351
|
-
is not None, only the data modified is returned.
|
|
352
331
|
:rtype: Dataset
|
|
353
332
|
"""
|
|
354
333
|
|
|
@@ -383,16 +362,14 @@ class ImageDataset(ImageStack):
|
|
|
383
362
|
return self.data.sum(axis_array)
|
|
384
363
|
|
|
385
364
|
def find_shift(
|
|
386
|
-
self, selected_axis: int | None = None, steps=50
|
|
365
|
+
self, selected_axis: int | None = None, steps=50
|
|
387
366
|
) -> tuple[float, float]:
|
|
388
367
|
"""
|
|
389
368
|
Find shift of the data or part of it.
|
|
390
369
|
|
|
391
370
|
:param int selected_axis: specify one motor (axis). The method try to find shift along this axis.
|
|
392
|
-
:param array_like indices: Boolean index list with True in the images to apply the shift to.
|
|
393
371
|
:returns: tuple with shift x and y
|
|
394
372
|
"""
|
|
395
|
-
# TODO : indices are not used. Maybe it is possible to use it for performance or maybe it is better to simply remove it.
|
|
396
373
|
data = (
|
|
397
374
|
self.z_sum_along_axis(selected_axis)
|
|
398
375
|
if selected_axis is not None
|
|
@@ -406,7 +383,6 @@ class ImageDataset(ImageStack):
|
|
|
406
383
|
shift: tuple[float, float],
|
|
407
384
|
axis: int | None = None,
|
|
408
385
|
shift_approach: str | None = None,
|
|
409
|
-
indices=None,
|
|
410
386
|
):
|
|
411
387
|
"""
|
|
412
388
|
Apply shift of the data or part of it and save new data into disk.
|
|
@@ -414,7 +390,6 @@ class ImageDataset(ImageStack):
|
|
|
414
390
|
:param array_like shift: Shift per frame.
|
|
415
391
|
:param int axis: Select one axis (motor) where to apply the shift. If None, the shift id not applied to one axis but to the whole stack.
|
|
416
392
|
:param Literal['fft', 'linear'] shift_approach: Method to use to apply the shift.
|
|
417
|
-
:param array_like indices: Boolean index list with True in the images to apply the shift to.
|
|
418
393
|
If None, the hot pixel removal is applied to all the data.
|
|
419
394
|
"""
|
|
420
395
|
assert len(shift) > 0, "Shift list can't be empty"
|
|
@@ -445,9 +420,6 @@ class ImageDataset(ImageStack):
|
|
|
445
420
|
if numpy.isclose(shift[0], 0) and numpy.isclose(shift[1], 0):
|
|
446
421
|
# No need to apply a (0,0) shift
|
|
447
422
|
continue
|
|
448
|
-
if indices is not None and img_index not in indices:
|
|
449
|
-
# Not really optimized but the simplest way i found to take indices into account.
|
|
450
|
-
continue
|
|
451
423
|
data[img_index] = apply_opencv_shift(
|
|
452
424
|
data[img_index],
|
|
453
425
|
shift,
|
|
@@ -458,7 +430,6 @@ class ImageDataset(ImageStack):
|
|
|
458
430
|
self,
|
|
459
431
|
selected_axis: int | None = None,
|
|
460
432
|
steps=100,
|
|
461
|
-
indices=None,
|
|
462
433
|
shift_approach: str = "linear",
|
|
463
434
|
):
|
|
464
435
|
"""
|
|
@@ -467,16 +438,17 @@ class ImageDataset(ImageStack):
|
|
|
467
438
|
:param int selected_axis: specify one motor (axis). The method try to find shift along this axis.
|
|
468
439
|
:param float h_step: See `core.imageRegistration.shift_detection`
|
|
469
440
|
:param Union['fft', 'linear'] shift_approach: Method to use to apply the shift.
|
|
470
|
-
:param array_like indices: Indices of the images to find and apply the shift to.
|
|
471
441
|
:param Literal['fft', 'linear'] shift_approach: Method to use to apply the shift.
|
|
472
442
|
"""
|
|
473
|
-
shift = self.find_shift(selected_axis, steps
|
|
474
|
-
self.apply_shift(
|
|
475
|
-
shift, selected_axis, indices=indices, shift_approach=shift_approach
|
|
476
|
-
)
|
|
443
|
+
shift = self.find_shift(selected_axis, steps)
|
|
444
|
+
self.apply_shift(shift, selected_axis, shift_approach=shift_approach)
|
|
477
445
|
|
|
478
446
|
def _waterfall_nmf(
|
|
479
|
-
self,
|
|
447
|
+
self,
|
|
448
|
+
num_components,
|
|
449
|
+
iterations,
|
|
450
|
+
vstep=None,
|
|
451
|
+
hstep=None,
|
|
480
452
|
):
|
|
481
453
|
"""
|
|
482
454
|
This method is used as a way to improve the speed of convergence of
|
|
@@ -492,7 +464,6 @@ class ImageDataset(ImageStack):
|
|
|
492
464
|
:param int num_components: Number of components to find.
|
|
493
465
|
:param array_like iterations: Array with number of iterations per step of the waterfall.
|
|
494
466
|
The size of the array sets the size of the waterfall.
|
|
495
|
-
:param Union[None, array_like] indices: If not None, apply method only to indices of data.
|
|
496
467
|
"""
|
|
497
468
|
|
|
498
469
|
from skimage.transform import resize
|
|
@@ -523,7 +494,7 @@ class ImageDataset(ImageStack):
|
|
|
523
494
|
|
|
524
495
|
return H, W
|
|
525
496
|
|
|
526
|
-
def pca(self, num_components=None,
|
|
497
|
+
def pca(self, num_components=None, return_vals=False):
|
|
527
498
|
"""
|
|
528
499
|
Compute Principal Component Analysis on the data.
|
|
529
500
|
The method, first converts, if not already, the data into an hdf5 file object
|
|
@@ -533,8 +504,6 @@ class ImageDataset(ImageStack):
|
|
|
533
504
|
If None, it uses the minimum between the number of images and the
|
|
534
505
|
number of pixels.
|
|
535
506
|
:type num_components: Union[None, int]
|
|
536
|
-
:param indices: If not None, apply method only to indices of data, defaults to None
|
|
537
|
-
:type indices: Union[None, array_like], optional
|
|
538
507
|
:param return_vals: If True, returns only the singular values of PCA, else returns
|
|
539
508
|
the components and the mixing matrix, defaults to False
|
|
540
509
|
:type return_vals: bool, optional
|
|
@@ -546,7 +515,7 @@ class ImageDataset(ImageStack):
|
|
|
546
515
|
|
|
547
516
|
model = decomposition.PCA(n_components=num_components)
|
|
548
517
|
|
|
549
|
-
W = model.fit_transform(self.as_array2d(
|
|
518
|
+
W = model.fit_transform(self.as_array2d())
|
|
550
519
|
|
|
551
520
|
H, vals, W = model.components_, model.singular_values_, W
|
|
552
521
|
|
|
@@ -558,7 +527,6 @@ class ImageDataset(ImageStack):
|
|
|
558
527
|
chunksize=None,
|
|
559
528
|
num_iter=500,
|
|
560
529
|
error_step=None,
|
|
561
|
-
indices=None,
|
|
562
530
|
):
|
|
563
531
|
"""
|
|
564
532
|
Compute Non-negative Independent Component Analysis on the data.
|
|
@@ -572,8 +540,6 @@ class ImageDataset(ImageStack):
|
|
|
572
540
|
:type num_iter: int, optional
|
|
573
541
|
:param error_step: If not None, find the error every error_step and compares it
|
|
574
542
|
to check for convergence. TODO: not able for huge datasets.
|
|
575
|
-
:param indices: If not None, apply method only to indices of data, defaults to None
|
|
576
|
-
:type indices: Union[None, array_like], optional
|
|
577
543
|
|
|
578
544
|
:return: (H, W): The components matrix and the mixing matrix.
|
|
579
545
|
"""
|
|
@@ -582,7 +548,6 @@ class ImageDataset(ImageStack):
|
|
|
582
548
|
self.as_array2d(),
|
|
583
549
|
num_components,
|
|
584
550
|
chunksize,
|
|
585
|
-
indices=indices,
|
|
586
551
|
)
|
|
587
552
|
model.fit_transform(max_iter=num_iter, error_step=error_step)
|
|
588
553
|
return numpy.abs(model.H), numpy.abs(model.W)
|
|
@@ -597,7 +562,6 @@ class ImageDataset(ImageStack):
|
|
|
597
562
|
W=None,
|
|
598
563
|
vstep=100,
|
|
599
564
|
hstep=1000,
|
|
600
|
-
indices=None,
|
|
601
565
|
init=None,
|
|
602
566
|
):
|
|
603
567
|
"""
|
|
@@ -621,8 +585,6 @@ class ImageDataset(ImageStack):
|
|
|
621
585
|
:type H: Union[None, array_like], optional
|
|
622
586
|
:param W: Init matrix for W of shape (n_features, n_components), defaults to None
|
|
623
587
|
:type W: Union[None, array_like], optional
|
|
624
|
-
:param indices: If not None, apply method only to indices of data, defaults to None
|
|
625
|
-
:type indices: Union[None, array_like], optional
|
|
626
588
|
|
|
627
589
|
:return: (H, W): The components matrix and the mixing matrix.
|
|
628
590
|
"""
|
|
@@ -632,7 +594,7 @@ class ImageDataset(ImageStack):
|
|
|
632
594
|
model = decomposition.NMF(
|
|
633
595
|
n_components=num_components, init=init, max_iter=num_iter
|
|
634
596
|
)
|
|
635
|
-
X = self.as_array2d(
|
|
597
|
+
X = self.as_array2d()
|
|
636
598
|
|
|
637
599
|
if numpy.any(X[:, :] < 0):
|
|
638
600
|
_logger.warning("Setting negative values to 0 to compute NMF")
|
|
@@ -655,13 +617,12 @@ class ImageDataset(ImageStack):
|
|
|
655
617
|
error_step=None,
|
|
656
618
|
vstep=100,
|
|
657
619
|
hstep=1000,
|
|
658
|
-
indices=None,
|
|
659
620
|
):
|
|
660
621
|
"""
|
|
661
622
|
Applies both NICA and NMF to the data. The init H and W for NMF are the
|
|
662
623
|
result of NICA.
|
|
663
624
|
"""
|
|
664
|
-
H, W = self.nica(num_components, chunksize, num_iter
|
|
625
|
+
H, W = self.nica(num_components, chunksize, num_iter)
|
|
665
626
|
|
|
666
627
|
# Initial NMF factorization: X = F0 * G0
|
|
667
628
|
W = numpy.abs(W)
|
|
@@ -676,27 +637,23 @@ class ImageDataset(ImageStack):
|
|
|
676
637
|
W,
|
|
677
638
|
vstep,
|
|
678
639
|
hstep,
|
|
679
|
-
indices=indices,
|
|
680
640
|
init="custom",
|
|
681
641
|
)
|
|
682
642
|
|
|
683
643
|
def apply_moments(
|
|
684
644
|
self,
|
|
685
|
-
indices=None,
|
|
686
645
|
):
|
|
687
646
|
"""
|
|
688
647
|
Compute the COM, FWHM, skewness and kurtosis of the data for very dimension.
|
|
689
648
|
|
|
690
|
-
:param indices: If not None, apply method only to indices of data, defaults to None
|
|
691
|
-
:type indices: Union[None, array_like], optional
|
|
692
649
|
"""
|
|
693
650
|
|
|
694
651
|
if not self.dims.ndim:
|
|
695
652
|
raise NoDimensionsError("apply_moments")
|
|
696
653
|
for axis, dim in self.dims.items():
|
|
697
654
|
# Get motor values per image of the stack
|
|
698
|
-
values = self.get_dimensions_values(
|
|
699
|
-
mean, fwhm, skew, kurt = compute_moments(values, self.as_array3d(
|
|
655
|
+
values = self.get_dimensions_values()[dim.name]
|
|
656
|
+
mean, fwhm, skew, kurt = compute_moments(values, self.as_array3d())
|
|
700
657
|
self.moments_dims[axis] = {
|
|
701
658
|
MomentType.COM: mean,
|
|
702
659
|
MomentType.FWHM: fwhm,
|
|
@@ -708,17 +665,12 @@ class ImageDataset(ImageStack):
|
|
|
708
665
|
|
|
709
666
|
def apply_fit(
|
|
710
667
|
self,
|
|
711
|
-
indices=None,
|
|
712
668
|
int_thresh: float | None = 15.0,
|
|
713
669
|
method: str | None = None,
|
|
714
|
-
|
|
670
|
+
abort_event: threading.Event = threading.Event(),
|
|
715
671
|
) -> Tuple[ImageDataset, numpy.ndarray]:
|
|
716
672
|
"""
|
|
717
673
|
Fits the data around axis 0 and saves the new data into disk.
|
|
718
|
-
|
|
719
|
-
:param indices: Indices of the images to fit.
|
|
720
|
-
If None, the fit is done to all the data.
|
|
721
|
-
:type indices: Union[None, array_like]
|
|
722
674
|
:param int_thresh: see `mapping.fit_pixel`
|
|
723
675
|
:type int_thresh: Union[None, float]
|
|
724
676
|
:returns: dataset with data of same size as `self.data` but with the
|
|
@@ -727,46 +679,32 @@ class ImageDataset(ImageStack):
|
|
|
727
679
|
"""
|
|
728
680
|
if not self.dims.ndim:
|
|
729
681
|
raise NoDimensionsError("apply_fit")
|
|
730
|
-
if indices is None:
|
|
731
|
-
indices = Ellipsis
|
|
732
682
|
|
|
733
|
-
|
|
683
|
+
if self.dims.ndim == 1:
|
|
684
|
+
dim0 = self.dims.get(0)
|
|
685
|
+
motor_values = self.metadata_dict[dim0.name].ravel()
|
|
686
|
+
elif self.dims.ndim <= 3:
|
|
734
687
|
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
# Fit can only be done if rocking curves are at least of size 3
|
|
738
|
-
if len(self.data) < 3:
|
|
739
|
-
raise ValueError(
|
|
740
|
-
f"Fit can only be done if dataset has not at least 3 dimensions. Got {len(data)}"
|
|
741
|
-
)
|
|
688
|
+
ndim = self.dims.ndim
|
|
689
|
+
dimension_names = [self.dims.get(dim_idx).name for dim_idx in range(ndim)]
|
|
742
690
|
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
else:
|
|
759
|
-
raise TooManyDimensionsForRockingCurvesError()
|
|
760
|
-
|
|
761
|
-
data[indices], maps = fit_rocking_curve_parallel(
|
|
762
|
-
data=self.as_array3d(indices),
|
|
763
|
-
motor_values=motor_values,
|
|
764
|
-
thresh=int_thresh,
|
|
765
|
-
method=method,
|
|
766
|
-
)
|
|
691
|
+
motor_values = [
|
|
692
|
+
self.metadata_dict[dim_name].ravel() for dim_name in dimension_names
|
|
693
|
+
]
|
|
694
|
+
|
|
695
|
+
else:
|
|
696
|
+
raise TooManyDimensionsForRockingCurvesError()
|
|
697
|
+
|
|
698
|
+
data, maps = fit_rocking_curve_parallel(
|
|
699
|
+
data=self.as_array3d(),
|
|
700
|
+
motor_values=motor_values,
|
|
701
|
+
thresh=int_thresh,
|
|
702
|
+
method=method,
|
|
703
|
+
abort_event=abort_event,
|
|
704
|
+
)
|
|
767
705
|
|
|
768
706
|
return (
|
|
769
|
-
|
|
707
|
+
self.copy(new_data=data),
|
|
770
708
|
maps,
|
|
771
709
|
)
|
|
772
710
|
|
|
@@ -823,7 +761,7 @@ class ImageDataset(ImageStack):
|
|
|
823
761
|
)
|
|
824
762
|
self.transformation = Transformation(kind, x, y, rotate)
|
|
825
763
|
|
|
826
|
-
def project_data(self, dimension: Sequence[int]
|
|
764
|
+
def project_data(self, dimension: Sequence[int]):
|
|
827
765
|
"""
|
|
828
766
|
Applies a projection to the data.
|
|
829
767
|
The new Dataset will have the same size as the chosen dimension, where
|
|
@@ -831,9 +769,6 @@ class ImageDataset(ImageStack):
|
|
|
831
769
|
|
|
832
770
|
:param dimension: Dimensions to project the data onto
|
|
833
771
|
:type dimension: array_like
|
|
834
|
-
:param indices: Indices of the images to use for the projection.
|
|
835
|
-
If None, the projection is done using all data.
|
|
836
|
-
:type indices: Union[None, array_like]
|
|
837
772
|
:param str _dir: Directory filename to save the new data
|
|
838
773
|
"""
|
|
839
774
|
|
|
@@ -846,7 +781,7 @@ class ImageDataset(ImageStack):
|
|
|
846
781
|
dim = self.dims.get(dimension[0])
|
|
847
782
|
data = []
|
|
848
783
|
for i in range(dim.size):
|
|
849
|
-
_sum = self.zsum(
|
|
784
|
+
_sum = self.zsum(dimension=[dimension[0], i])
|
|
850
785
|
if len(_sum):
|
|
851
786
|
data += [_sum]
|
|
852
787
|
data = numpy.array(data)
|
|
@@ -860,7 +795,7 @@ class ImageDataset(ImageStack):
|
|
|
860
795
|
data = []
|
|
861
796
|
for i in range(dim1.size):
|
|
862
797
|
for j in range(dim2.size):
|
|
863
|
-
_sum = self.zsum(
|
|
798
|
+
_sum = self.zsum(dimension=[dimension, [i, j]])
|
|
864
799
|
if len(_sum):
|
|
865
800
|
data += [_sum]
|
|
866
801
|
data = numpy.array(data)
|
|
@@ -906,35 +841,15 @@ class ImageDataset(ImageStack):
|
|
|
906
841
|
E=energy,
|
|
907
842
|
)
|
|
908
843
|
|
|
909
|
-
def
|
|
910
|
-
from darfix.tasks.binning import Binning # avoid cyclic import
|
|
911
|
-
|
|
912
|
-
_dir = self.dir if _dir is None else _dir
|
|
913
|
-
task = Binning(
|
|
914
|
-
inputs={
|
|
915
|
-
"dataset": self,
|
|
916
|
-
"output_dir": _dir,
|
|
917
|
-
"scale": scale,
|
|
918
|
-
}
|
|
919
|
-
)
|
|
920
|
-
task.run()
|
|
921
|
-
return task.outputs.dataset
|
|
922
|
-
|
|
923
|
-
def recover_weak_beam(self, n, indices=None):
|
|
844
|
+
def recover_weak_beam(self, n):
|
|
924
845
|
"""
|
|
925
846
|
Set to zero all pixels higher than n times the standard deviation across the stack dimension
|
|
926
847
|
|
|
927
848
|
:param n: Increase or decrease the top threshold by this fixed value.
|
|
928
849
|
:type n: float
|
|
929
|
-
:param indices: Indices of the images to use for the filtering.
|
|
930
|
-
If None, the filtering is done using all data.
|
|
931
|
-
:type indices: Union[None, array_like]
|
|
932
850
|
"""
|
|
933
|
-
std = numpy.std(self.as_array3d(
|
|
934
|
-
|
|
935
|
-
dataset = self.copy(new_data=self.data.copy())
|
|
936
|
-
threshold_removal(dataset.as_array3d(indices), top=n * std)
|
|
937
|
-
return dataset
|
|
851
|
+
std = numpy.std(self.as_array3d(), axis=0)
|
|
852
|
+
threshold_removal(self.as_array3d(), top=n * std)
|
|
938
853
|
|
|
939
854
|
@staticmethod
|
|
940
855
|
def load(file: str):
|