modacor 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modacor/__init__.py +30 -0
- modacor/dataclasses/__init__.py +0 -0
- modacor/dataclasses/basedata.py +973 -0
- modacor/dataclasses/databundle.py +23 -0
- modacor/dataclasses/helpers.py +45 -0
- modacor/dataclasses/messagehandler.py +75 -0
- modacor/dataclasses/process_step.py +233 -0
- modacor/dataclasses/process_step_describer.py +146 -0
- modacor/dataclasses/processing_data.py +59 -0
- modacor/dataclasses/trace_event.py +118 -0
- modacor/dataclasses/uncertainty_tools.py +132 -0
- modacor/dataclasses/validators.py +84 -0
- modacor/debug/pipeline_tracer.py +548 -0
- modacor/io/__init__.py +33 -0
- modacor/io/csv/__init__.py +0 -0
- modacor/io/csv/csv_sink.py +114 -0
- modacor/io/csv/csv_source.py +210 -0
- modacor/io/hdf/__init__.py +27 -0
- modacor/io/hdf/hdf_source.py +120 -0
- modacor/io/io_sink.py +41 -0
- modacor/io/io_sinks.py +61 -0
- modacor/io/io_source.py +164 -0
- modacor/io/io_sources.py +208 -0
- modacor/io/processing_path.py +113 -0
- modacor/io/tiled/__init__.py +16 -0
- modacor/io/tiled/tiled_source.py +403 -0
- modacor/io/yaml/__init__.py +27 -0
- modacor/io/yaml/yaml_source.py +116 -0
- modacor/modules/__init__.py +53 -0
- modacor/modules/base_modules/__init__.py +0 -0
- modacor/modules/base_modules/append_processing_data.py +329 -0
- modacor/modules/base_modules/append_sink.py +141 -0
- modacor/modules/base_modules/append_source.py +181 -0
- modacor/modules/base_modules/bitwise_or_masks.py +113 -0
- modacor/modules/base_modules/combine_uncertainties.py +120 -0
- modacor/modules/base_modules/combine_uncertainties_max.py +105 -0
- modacor/modules/base_modules/divide.py +82 -0
- modacor/modules/base_modules/find_scale_factor1d.py +373 -0
- modacor/modules/base_modules/multiply.py +77 -0
- modacor/modules/base_modules/multiply_databundles.py +73 -0
- modacor/modules/base_modules/poisson_uncertainties.py +69 -0
- modacor/modules/base_modules/reduce_dimensionality.py +252 -0
- modacor/modules/base_modules/sink_processing_data.py +80 -0
- modacor/modules/base_modules/subtract.py +80 -0
- modacor/modules/base_modules/subtract_databundles.py +67 -0
- modacor/modules/base_modules/units_label_update.py +66 -0
- modacor/modules/instrument_modules/__init__.py +0 -0
- modacor/modules/instrument_modules/readme.md +9 -0
- modacor/modules/technique_modules/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/geometry_helpers.py +114 -0
- modacor/modules/technique_modules/scattering/index_pixels.py +492 -0
- modacor/modules/technique_modules/scattering/indexed_averager.py +628 -0
- modacor/modules/technique_modules/scattering/pixel_coordinates_3d.py +417 -0
- modacor/modules/technique_modules/scattering/solid_angle_correction.py +63 -0
- modacor/modules/technique_modules/scattering/xs_geometry.py +571 -0
- modacor/modules/technique_modules/scattering/xs_geometry_from_pixel_coordinates.py +293 -0
- modacor/runner/__init__.py +0 -0
- modacor/runner/pipeline.py +749 -0
- modacor/runner/process_step_registry.py +224 -0
- modacor/tests/__init__.py +27 -0
- modacor/tests/dataclasses/test_basedata.py +519 -0
- modacor/tests/dataclasses/test_basedata_operations.py +439 -0
- modacor/tests/dataclasses/test_basedata_to_base_units.py +57 -0
- modacor/tests/dataclasses/test_process_step_describer.py +73 -0
- modacor/tests/dataclasses/test_processstep.py +282 -0
- modacor/tests/debug/test_tracing_integration.py +188 -0
- modacor/tests/integration/__init__.py +0 -0
- modacor/tests/integration/test_pipeline_run.py +238 -0
- modacor/tests/io/__init__.py +27 -0
- modacor/tests/io/csv/__init__.py +0 -0
- modacor/tests/io/csv/test_csv_source.py +156 -0
- modacor/tests/io/hdf/__init__.py +27 -0
- modacor/tests/io/hdf/test_hdf_source.py +92 -0
- modacor/tests/io/test_io_sources.py +119 -0
- modacor/tests/io/tiled/__init__.py +12 -0
- modacor/tests/io/tiled/test_tiled_source.py +120 -0
- modacor/tests/io/yaml/__init__.py +27 -0
- modacor/tests/io/yaml/static_data_example.yaml +26 -0
- modacor/tests/io/yaml/test_yaml_source.py +47 -0
- modacor/tests/modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/test_append_processing_data.py +219 -0
- modacor/tests/modules/base_modules/test_append_sink.py +76 -0
- modacor/tests/modules/base_modules/test_append_source.py +180 -0
- modacor/tests/modules/base_modules/test_bitwise_or_masks.py +264 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties.py +105 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties_max.py +109 -0
- modacor/tests/modules/base_modules/test_divide.py +140 -0
- modacor/tests/modules/base_modules/test_find_scale_factor1d.py +220 -0
- modacor/tests/modules/base_modules/test_multiply.py +113 -0
- modacor/tests/modules/base_modules/test_multiply_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_poisson_uncertainties.py +61 -0
- modacor/tests/modules/base_modules/test_reduce_dimensionality.py +358 -0
- modacor/tests/modules/base_modules/test_sink_processing_data.py +119 -0
- modacor/tests/modules/base_modules/test_subtract.py +111 -0
- modacor/tests/modules/base_modules/test_subtract_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_units_label_update.py +91 -0
- modacor/tests/modules/technique_modules/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/test_geometry_helpers.py +198 -0
- modacor/tests/modules/technique_modules/scattering/test_index_pixels.py +426 -0
- modacor/tests/modules/technique_modules/scattering/test_indexed_averaging.py +559 -0
- modacor/tests/modules/technique_modules/scattering/test_pixel_coordinates_3d.py +282 -0
- modacor/tests/modules/technique_modules/scattering/test_xs_geometry_from_pixel_coordinates.py +224 -0
- modacor/tests/modules/technique_modules/scattering/test_xsgeometry.py +635 -0
- modacor/tests/requirements.txt +12 -0
- modacor/tests/runner/test_pipeline.py +438 -0
- modacor/tests/runner/test_process_step_registry.py +65 -0
- modacor/tests/test_import.py +43 -0
- modacor/tests/test_modacor.py +17 -0
- modacor/tests/test_units.py +79 -0
- modacor/units.py +97 -0
- modacor-1.0.0.dist-info/METADATA +482 -0
- modacor-1.0.0.dist-info/RECORD +120 -0
- modacor-1.0.0.dist-info/WHEEL +5 -0
- modacor-1.0.0.dist-info/licenses/AUTHORS.md +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE.txt +11 -0
- modacor-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "09/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
__all__ = ["BitwiseOrMasks"]
|
|
15
|
+
__version__ = "20260109.3"
|
|
16
|
+
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
import numpy as np
|
|
20
|
+
|
|
21
|
+
from modacor.dataclasses.basedata import BaseData
|
|
22
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
23
|
+
from modacor.dataclasses.process_step import ProcessStep
|
|
24
|
+
from modacor.dataclasses.process_step_describer import ProcessStepDescriber
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class BitwiseOrMasks(ProcessStep):
|
|
28
|
+
"""
|
|
29
|
+
Bitwise-OR one or more mask BaseData entries into a target mask BaseData entry
|
|
30
|
+
within the same DataBundle.
|
|
31
|
+
|
|
32
|
+
MoDaCor's Masks are 32-bit integer bitfields (NeXus convention). This step updates the
|
|
33
|
+
target mask in-place and preserves reason bits.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
documentation = ProcessStepDescriber(
|
|
37
|
+
calling_name="Combine masks within one DataBundle (bitwise OR)",
|
|
38
|
+
calling_id="BitwiseOrMasksInBundle",
|
|
39
|
+
calling_module_path=Path(__file__),
|
|
40
|
+
calling_version=__version__,
|
|
41
|
+
required_data_keys=["mask"],
|
|
42
|
+
modifies={"mask": ["signal"]},
|
|
43
|
+
arguments={
|
|
44
|
+
"with_processing_keys": {
|
|
45
|
+
"type": list,
|
|
46
|
+
"required": True,
|
|
47
|
+
"default": ["sample"],
|
|
48
|
+
"doc": "Single processing key identifying the DataBundle to update.",
|
|
49
|
+
},
|
|
50
|
+
"target_mask_key": {
|
|
51
|
+
"type": str,
|
|
52
|
+
"default": "mask",
|
|
53
|
+
"doc": "BaseData key for the target mask inside the DataBundle.",
|
|
54
|
+
},
|
|
55
|
+
"source_mask_keys": {
|
|
56
|
+
"type": list,
|
|
57
|
+
"required": True,
|
|
58
|
+
"default": [],
|
|
59
|
+
"doc": "List of BaseData keys to OR into the target mask.",
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
step_keywords=["mask", "bitmask", "bitwise", "or", "databundle"],
|
|
63
|
+
step_doc="Combine multiple mask arrays stored as different BaseData keys in the same DataBundle.",
|
|
64
|
+
step_reference="NeXus mask bit-field convention (NXdata/NXdetector masks)",
|
|
65
|
+
step_note="""
|
|
66
|
+
Configuration:
|
|
67
|
+
with_processing_keys: [sample] # required, single databundle key
|
|
68
|
+
target_mask_key: mask # optional, default: mask
|
|
69
|
+
source_mask_keys: [bs_mask, ...] # required, one or more
|
|
70
|
+
|
|
71
|
+
Performs:
|
|
72
|
+
target_mask |= source_mask (in-place, for each source)
|
|
73
|
+
""",
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
@staticmethod
|
|
77
|
+
def _require_int(arr: np.ndarray, name: str) -> None:
|
|
78
|
+
assert np.issubdtype(arr.dtype, np.integer), f"{name} must be an integer mask, got {arr.dtype}."
|
|
79
|
+
|
|
80
|
+
def calculate(self) -> dict[str, DataBundle]:
|
|
81
|
+
cfg = self.configuration
|
|
82
|
+
|
|
83
|
+
keys = self._normalised_processing_keys()
|
|
84
|
+
assert len(keys) == 1, "BitwiseOrMasks requires a single databundle processing key."
|
|
85
|
+
processing_key = keys[0]
|
|
86
|
+
target_key = cfg.get("target_mask_key", "mask")
|
|
87
|
+
source_keys = cfg["source_mask_keys"]
|
|
88
|
+
|
|
89
|
+
assert isinstance(source_keys, list) and source_keys, "source_mask_keys must be a non-empty list."
|
|
90
|
+
|
|
91
|
+
bundle = self.processing_data[processing_key]
|
|
92
|
+
target_bd: BaseData = bundle[target_key]
|
|
93
|
+
tgt = target_bd.signal
|
|
94
|
+
|
|
95
|
+
self._require_int(tgt, f"{processing_key}::{target_key}") # noqa: E231
|
|
96
|
+
|
|
97
|
+
# Canonicalize target to uint32 once (needed for NeXus-style 32-bit bitfields)
|
|
98
|
+
if tgt.dtype != np.uint32:
|
|
99
|
+
tgt = tgt.astype(np.uint32, copy=True) # one-time allocation
|
|
100
|
+
target_bd.signal = tgt
|
|
101
|
+
|
|
102
|
+
for sk in source_keys:
|
|
103
|
+
src_bd: BaseData = bundle[sk]
|
|
104
|
+
src = src_bd.signal
|
|
105
|
+
self._require_int(src, f"{processing_key}::{sk}") # noqa: E231
|
|
106
|
+
|
|
107
|
+
# Convert only if needed (uint8/int16/etc -> uint32)
|
|
108
|
+
src_u32 = src if src.dtype == np.uint32 else src.astype(np.uint32, copy=False)
|
|
109
|
+
|
|
110
|
+
# In-place OR; NumPy handles broadcasting or raises
|
|
111
|
+
np.bitwise_or(tgt, src_u32, out=tgt)
|
|
112
|
+
|
|
113
|
+
return {processing_key: bundle}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "20/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
__all__ = ["CombineUncertainties"]
|
|
15
|
+
__version__ = "20260120.1"
|
|
16
|
+
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
20
|
+
from modacor.dataclasses.process_step import ProcessStep
|
|
21
|
+
from modacor.dataclasses.process_step_describer import ProcessStepDescriber
|
|
22
|
+
from modacor.dataclasses.uncertainty_tools import (
|
|
23
|
+
combine_uncertainty_keys,
|
|
24
|
+
normalize_uncertainty_combinations,
|
|
25
|
+
quadrature_aggregator,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CombineUncertainties(ProcessStep):
|
|
30
|
+
"""Combine multiple uncertainty entries on a :class:`~modacor.dataclasses.basedata.BaseData` element.
|
|
31
|
+
|
|
32
|
+
The configured combinations are evaluated as root-sum-of-squares of the listed one-sigma
|
|
33
|
+
uncertainties. Each combination writes (or overwrites) the target uncertainty key.
|
|
34
|
+
|
|
35
|
+
Example configuration::
|
|
36
|
+
|
|
37
|
+
combinations:
|
|
38
|
+
stat_total: ["poisson", "readout"]
|
|
39
|
+
geometry: ["pixel_index_slow", "pixel_index_fast"]
|
|
40
|
+
|
|
41
|
+
The example above will create/update the keys ``"stat_total"`` and ``"geometry"`` on the
|
|
42
|
+
target BaseData, combining the referenced uncertainties in quadrature.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
documentation = ProcessStepDescriber(
|
|
46
|
+
calling_name="Combine uncertainties in quadrature",
|
|
47
|
+
calling_id="CombineUncertainties",
|
|
48
|
+
calling_module_path=Path(__file__),
|
|
49
|
+
calling_version=__version__,
|
|
50
|
+
required_data_keys=["signal"],
|
|
51
|
+
modifies={"signal": ["uncertainties"]},
|
|
52
|
+
arguments={
|
|
53
|
+
"target_basedata_key": {
|
|
54
|
+
"type": str,
|
|
55
|
+
"default": "signal",
|
|
56
|
+
"doc": "Name of the BaseData entry within each DataBundle to modify (default: 'signal').",
|
|
57
|
+
},
|
|
58
|
+
"combinations": {
|
|
59
|
+
"type": dict,
|
|
60
|
+
"required": True,
|
|
61
|
+
"default": {},
|
|
62
|
+
"doc": "Mapping of output uncertainty key to an iterable of source keys to combine.",
|
|
63
|
+
},
|
|
64
|
+
"drop_source_keys": {
|
|
65
|
+
"type": bool,
|
|
66
|
+
"default": False,
|
|
67
|
+
"doc": "Remove source uncertainty keys after combination (default: False).",
|
|
68
|
+
},
|
|
69
|
+
"ignore_missing": {
|
|
70
|
+
"type": bool,
|
|
71
|
+
"default": False,
|
|
72
|
+
"doc": (
|
|
73
|
+
"If True, missing source keys are ignored (combinations use the available ones). "
|
|
74
|
+
"If all listed keys are missing, the combination is skipped."
|
|
75
|
+
),
|
|
76
|
+
},
|
|
77
|
+
},
|
|
78
|
+
step_keywords=["uncertainties", "combine", "quadrature", "propagation"],
|
|
79
|
+
step_doc="Combine selected uncertainties in quadrature and expose the result under new keys.",
|
|
80
|
+
step_reference="DOI 10.1088/0953-8984/25/38/383201",
|
|
81
|
+
step_note=(
|
|
82
|
+
"Designed for SAXS/SANS pipelines where uncertainties such as Poisson, readout noise, "
|
|
83
|
+
"and flat-field corrections are stored separately (see the MOUSE notebook examples)."
|
|
84
|
+
),
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# ------------------------------------------------------------------
|
|
88
|
+
# Helpers
|
|
89
|
+
# ------------------------------------------------------------------
|
|
90
|
+
|
|
91
|
+
def calculate(self) -> dict[str, DataBundle]:
|
|
92
|
+
combinations_raw = self.configuration.get("combinations", {})
|
|
93
|
+
combinations = normalize_uncertainty_combinations(combinations_raw)
|
|
94
|
+
if not combinations:
|
|
95
|
+
raise ValueError("CombineUncertainties requires a non-empty 'combinations' mapping in its configuration.")
|
|
96
|
+
|
|
97
|
+
target_basedata_key = str(self.configuration.get("target_basedata_key", "signal"))
|
|
98
|
+
drop_sources = bool(self.configuration.get("drop_source_keys", False))
|
|
99
|
+
ignore_missing = bool(self.configuration.get("ignore_missing", False))
|
|
100
|
+
|
|
101
|
+
output: dict[str, DataBundle] = {}
|
|
102
|
+
|
|
103
|
+
for processing_key in self._normalised_processing_keys():
|
|
104
|
+
databundle: DataBundle = self.processing_data.get(processing_key)
|
|
105
|
+
if target_basedata_key not in databundle:
|
|
106
|
+
raise KeyError(f"DataBundle '{processing_key}' does not contain BaseData '{target_basedata_key}'.")
|
|
107
|
+
|
|
108
|
+
combine_uncertainty_keys(
|
|
109
|
+
basedata=databundle[target_basedata_key],
|
|
110
|
+
combinations=combinations,
|
|
111
|
+
aggregator=quadrature_aggregator,
|
|
112
|
+
drop_sources=drop_sources,
|
|
113
|
+
ignore_missing=ignore_missing,
|
|
114
|
+
logger=self.logger,
|
|
115
|
+
target_name=f"BaseData '{target_basedata_key}' in DataBundle '{processing_key}'",
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
output[processing_key] = databundle
|
|
119
|
+
|
|
120
|
+
return output
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "20/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
__all__ = ["CombineUncertaintiesMax"]
|
|
15
|
+
__version__ = "20260120.1"
|
|
16
|
+
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
20
|
+
from modacor.dataclasses.process_step import ProcessStep
|
|
21
|
+
from modacor.dataclasses.process_step_describer import ProcessStepDescriber
|
|
22
|
+
from modacor.dataclasses.uncertainty_tools import (
|
|
23
|
+
combine_uncertainty_keys,
|
|
24
|
+
maximum_aggregator,
|
|
25
|
+
normalize_uncertainty_combinations,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CombineUncertaintiesMax(ProcessStep):
|
|
30
|
+
"""Combine uncertainties by taking the element-wise maximum across selected keys."""
|
|
31
|
+
|
|
32
|
+
documentation = ProcessStepDescriber(
|
|
33
|
+
calling_name="Combine uncertainties by maximum",
|
|
34
|
+
calling_id="CombineUncertaintiesMax",
|
|
35
|
+
calling_module_path=Path(__file__),
|
|
36
|
+
calling_version=__version__,
|
|
37
|
+
required_data_keys=["signal"],
|
|
38
|
+
modifies={"signal": ["uncertainties"]},
|
|
39
|
+
arguments={
|
|
40
|
+
"target_basedata_key": {
|
|
41
|
+
"type": str,
|
|
42
|
+
"default": "signal",
|
|
43
|
+
"doc": "Name of the BaseData entry within each DataBundle to modify (default: 'signal').",
|
|
44
|
+
},
|
|
45
|
+
"combinations": {
|
|
46
|
+
"type": dict,
|
|
47
|
+
"required": True,
|
|
48
|
+
"default": {},
|
|
49
|
+
"doc": "Mapping of output uncertainty key to an iterable of source keys to combine.",
|
|
50
|
+
},
|
|
51
|
+
"drop_source_keys": {
|
|
52
|
+
"type": bool,
|
|
53
|
+
"default": False,
|
|
54
|
+
"doc": "Remove source uncertainty keys after combination (default: False).",
|
|
55
|
+
},
|
|
56
|
+
"ignore_missing": {
|
|
57
|
+
"type": bool,
|
|
58
|
+
"default": False,
|
|
59
|
+
"doc": (
|
|
60
|
+
"If True, missing source keys are ignored. "
|
|
61
|
+
"If all listed keys are missing, the combination is skipped."
|
|
62
|
+
),
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
step_keywords=["uncertainties", "combine", "maximum", "propagation"],
|
|
66
|
+
step_doc="Select the maximum absolute uncertainty among configured source keys.",
|
|
67
|
+
step_reference="DOI 10.1088/0953-8984/25/38/383201",
|
|
68
|
+
step_note=(
|
|
69
|
+
"Useful when systematic contributions must be bounded by the most conservative estimate, "
|
|
70
|
+
"mirroring needs in certain MOUSE workflows."
|
|
71
|
+
),
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
def calculate(self) -> dict[str, DataBundle]:
|
|
75
|
+
combinations_raw = self.configuration.get("combinations", {})
|
|
76
|
+
combinations = normalize_uncertainty_combinations(combinations_raw)
|
|
77
|
+
if not combinations:
|
|
78
|
+
raise ValueError(
|
|
79
|
+
"CombineUncertaintiesMax requires a non-empty 'combinations' mapping in its configuration."
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
target_basedata_key = str(self.configuration.get("target_basedata_key", "signal"))
|
|
83
|
+
drop_sources = bool(self.configuration.get("drop_source_keys", False))
|
|
84
|
+
ignore_missing = bool(self.configuration.get("ignore_missing", False))
|
|
85
|
+
|
|
86
|
+
output: dict[str, DataBundle] = {}
|
|
87
|
+
|
|
88
|
+
for processing_key in self._normalised_processing_keys():
|
|
89
|
+
databundle: DataBundle = self.processing_data.get(processing_key)
|
|
90
|
+
if target_basedata_key not in databundle:
|
|
91
|
+
raise KeyError(f"DataBundle '{processing_key}' does not contain BaseData '{target_basedata_key}'.")
|
|
92
|
+
|
|
93
|
+
combine_uncertainty_keys(
|
|
94
|
+
basedata=databundle[target_basedata_key],
|
|
95
|
+
combinations=combinations,
|
|
96
|
+
aggregator=maximum_aggregator,
|
|
97
|
+
drop_sources=drop_sources,
|
|
98
|
+
ignore_missing=ignore_missing,
|
|
99
|
+
logger=self.logger,
|
|
100
|
+
target_name=f"BaseData '{target_basedata_key}' in DataBundle '{processing_key}'",
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
output[processing_key] = databundle
|
|
104
|
+
|
|
105
|
+
return output
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw", "Armin Moser"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "29/10/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
__all__ = ["Divide"]
|
|
15
|
+
__version__ = "20251029.1"
|
|
16
|
+
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
# from modacor import ureg
|
|
20
|
+
# from modacor.dataclasss.basedata import BaseData
|
|
21
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
22
|
+
from modacor.dataclasses.helpers import basedata_from_sources
|
|
23
|
+
from modacor.dataclasses.process_step import ProcessStep
|
|
24
|
+
from modacor.dataclasses.process_step_describer import ProcessStepDescriber
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Divide(ProcessStep):
|
|
28
|
+
"""
|
|
29
|
+
Divide DataBundle by a BaseData from an IoSource
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
documentation = ProcessStepDescriber(
|
|
33
|
+
calling_name="Divide by IoSource data",
|
|
34
|
+
calling_id="DivideBySourceData",
|
|
35
|
+
calling_module_path=Path(__file__),
|
|
36
|
+
calling_version=__version__,
|
|
37
|
+
required_data_keys=["signal"],
|
|
38
|
+
modifies={"signal": ["signal", "uncertainties", "units"]},
|
|
39
|
+
arguments={
|
|
40
|
+
"divisor_source": {
|
|
41
|
+
"type": str,
|
|
42
|
+
"default": None,
|
|
43
|
+
"doc": "IoSources key for the divisor signal.",
|
|
44
|
+
},
|
|
45
|
+
"divisor_units_source": {
|
|
46
|
+
"type": str,
|
|
47
|
+
"default": None,
|
|
48
|
+
"doc": "IoSources key for divisor units metadata.",
|
|
49
|
+
},
|
|
50
|
+
"divisor_uncertainties_sources": {
|
|
51
|
+
"type": dict,
|
|
52
|
+
"default": {},
|
|
53
|
+
"doc": "Mapping of uncertainty name to IoSources key.",
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
step_keywords=["divide", "scalar", "array"],
|
|
57
|
+
step_doc="Divide a DataBundle element by a divisor loaded from a data source",
|
|
58
|
+
step_reference="DOI 10.1088/0953-8984/25/38/383201",
|
|
59
|
+
step_note="""This loads a scalar (value, units and uncertainty)
|
|
60
|
+
from an IOSource and applies it to the data signal""",
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
def calculate(self) -> dict[str, DataBundle]:
|
|
64
|
+
# build up the divisor BaseData object from the IoSources
|
|
65
|
+
|
|
66
|
+
divisor = basedata_from_sources(
|
|
67
|
+
io_sources=self.io_sources,
|
|
68
|
+
signal_source=self.configuration.get("divisor_source"),
|
|
69
|
+
units_source=self.configuration.get("divisor_units_source", None),
|
|
70
|
+
uncertainty_sources=self.configuration.get("divisor_uncertainties_sources", {}),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
output: dict[str, DataBundle] = {}
|
|
74
|
+
|
|
75
|
+
# actual work happens here:
|
|
76
|
+
for key in self._normalised_processing_keys():
|
|
77
|
+
databundle = self.processing_data.get(key)
|
|
78
|
+
# divide the data
|
|
79
|
+
# Rely on BaseData.__truediv__ for units + uncertainty propagation
|
|
80
|
+
databundle["signal"] /= divisor
|
|
81
|
+
output[key] = databundle
|
|
82
|
+
return output
|