modacor 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modacor/__init__.py +30 -0
- modacor/dataclasses/__init__.py +0 -0
- modacor/dataclasses/basedata.py +973 -0
- modacor/dataclasses/databundle.py +23 -0
- modacor/dataclasses/helpers.py +45 -0
- modacor/dataclasses/messagehandler.py +75 -0
- modacor/dataclasses/process_step.py +233 -0
- modacor/dataclasses/process_step_describer.py +146 -0
- modacor/dataclasses/processing_data.py +59 -0
- modacor/dataclasses/trace_event.py +118 -0
- modacor/dataclasses/uncertainty_tools.py +132 -0
- modacor/dataclasses/validators.py +84 -0
- modacor/debug/pipeline_tracer.py +548 -0
- modacor/io/__init__.py +33 -0
- modacor/io/csv/__init__.py +0 -0
- modacor/io/csv/csv_sink.py +114 -0
- modacor/io/csv/csv_source.py +210 -0
- modacor/io/hdf/__init__.py +27 -0
- modacor/io/hdf/hdf_source.py +120 -0
- modacor/io/io_sink.py +41 -0
- modacor/io/io_sinks.py +61 -0
- modacor/io/io_source.py +164 -0
- modacor/io/io_sources.py +208 -0
- modacor/io/processing_path.py +113 -0
- modacor/io/tiled/__init__.py +16 -0
- modacor/io/tiled/tiled_source.py +403 -0
- modacor/io/yaml/__init__.py +27 -0
- modacor/io/yaml/yaml_source.py +116 -0
- modacor/modules/__init__.py +53 -0
- modacor/modules/base_modules/__init__.py +0 -0
- modacor/modules/base_modules/append_processing_data.py +329 -0
- modacor/modules/base_modules/append_sink.py +141 -0
- modacor/modules/base_modules/append_source.py +181 -0
- modacor/modules/base_modules/bitwise_or_masks.py +113 -0
- modacor/modules/base_modules/combine_uncertainties.py +120 -0
- modacor/modules/base_modules/combine_uncertainties_max.py +105 -0
- modacor/modules/base_modules/divide.py +82 -0
- modacor/modules/base_modules/find_scale_factor1d.py +373 -0
- modacor/modules/base_modules/multiply.py +77 -0
- modacor/modules/base_modules/multiply_databundles.py +73 -0
- modacor/modules/base_modules/poisson_uncertainties.py +69 -0
- modacor/modules/base_modules/reduce_dimensionality.py +252 -0
- modacor/modules/base_modules/sink_processing_data.py +80 -0
- modacor/modules/base_modules/subtract.py +80 -0
- modacor/modules/base_modules/subtract_databundles.py +67 -0
- modacor/modules/base_modules/units_label_update.py +66 -0
- modacor/modules/instrument_modules/__init__.py +0 -0
- modacor/modules/instrument_modules/readme.md +9 -0
- modacor/modules/technique_modules/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/geometry_helpers.py +114 -0
- modacor/modules/technique_modules/scattering/index_pixels.py +492 -0
- modacor/modules/technique_modules/scattering/indexed_averager.py +628 -0
- modacor/modules/technique_modules/scattering/pixel_coordinates_3d.py +417 -0
- modacor/modules/technique_modules/scattering/solid_angle_correction.py +63 -0
- modacor/modules/technique_modules/scattering/xs_geometry.py +571 -0
- modacor/modules/technique_modules/scattering/xs_geometry_from_pixel_coordinates.py +293 -0
- modacor/runner/__init__.py +0 -0
- modacor/runner/pipeline.py +749 -0
- modacor/runner/process_step_registry.py +224 -0
- modacor/tests/__init__.py +27 -0
- modacor/tests/dataclasses/test_basedata.py +519 -0
- modacor/tests/dataclasses/test_basedata_operations.py +439 -0
- modacor/tests/dataclasses/test_basedata_to_base_units.py +57 -0
- modacor/tests/dataclasses/test_process_step_describer.py +73 -0
- modacor/tests/dataclasses/test_processstep.py +282 -0
- modacor/tests/debug/test_tracing_integration.py +188 -0
- modacor/tests/integration/__init__.py +0 -0
- modacor/tests/integration/test_pipeline_run.py +238 -0
- modacor/tests/io/__init__.py +27 -0
- modacor/tests/io/csv/__init__.py +0 -0
- modacor/tests/io/csv/test_csv_source.py +156 -0
- modacor/tests/io/hdf/__init__.py +27 -0
- modacor/tests/io/hdf/test_hdf_source.py +92 -0
- modacor/tests/io/test_io_sources.py +119 -0
- modacor/tests/io/tiled/__init__.py +12 -0
- modacor/tests/io/tiled/test_tiled_source.py +120 -0
- modacor/tests/io/yaml/__init__.py +27 -0
- modacor/tests/io/yaml/static_data_example.yaml +26 -0
- modacor/tests/io/yaml/test_yaml_source.py +47 -0
- modacor/tests/modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/test_append_processing_data.py +219 -0
- modacor/tests/modules/base_modules/test_append_sink.py +76 -0
- modacor/tests/modules/base_modules/test_append_source.py +180 -0
- modacor/tests/modules/base_modules/test_bitwise_or_masks.py +264 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties.py +105 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties_max.py +109 -0
- modacor/tests/modules/base_modules/test_divide.py +140 -0
- modacor/tests/modules/base_modules/test_find_scale_factor1d.py +220 -0
- modacor/tests/modules/base_modules/test_multiply.py +113 -0
- modacor/tests/modules/base_modules/test_multiply_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_poisson_uncertainties.py +61 -0
- modacor/tests/modules/base_modules/test_reduce_dimensionality.py +358 -0
- modacor/tests/modules/base_modules/test_sink_processing_data.py +119 -0
- modacor/tests/modules/base_modules/test_subtract.py +111 -0
- modacor/tests/modules/base_modules/test_subtract_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_units_label_update.py +91 -0
- modacor/tests/modules/technique_modules/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/test_geometry_helpers.py +198 -0
- modacor/tests/modules/technique_modules/scattering/test_index_pixels.py +426 -0
- modacor/tests/modules/technique_modules/scattering/test_indexed_averaging.py +559 -0
- modacor/tests/modules/technique_modules/scattering/test_pixel_coordinates_3d.py +282 -0
- modacor/tests/modules/technique_modules/scattering/test_xs_geometry_from_pixel_coordinates.py +224 -0
- modacor/tests/modules/technique_modules/scattering/test_xsgeometry.py +635 -0
- modacor/tests/requirements.txt +12 -0
- modacor/tests/runner/test_pipeline.py +438 -0
- modacor/tests/runner/test_process_step_registry.py +65 -0
- modacor/tests/test_import.py +43 -0
- modacor/tests/test_modacor.py +17 -0
- modacor/tests/test_units.py +79 -0
- modacor/units.py +97 -0
- modacor-1.0.0.dist-info/METADATA +482 -0
- modacor-1.0.0.dist-info/RECORD +120 -0
- modacor-1.0.0.dist-info/WHEEL +5 -0
- modacor-1.0.0.dist-info/licenses/AUTHORS.md +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE.txt +11 -0
- modacor-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "16/11/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
import modacor.modules.base_modules.divide as divide_module
|
|
19
|
+
from modacor import ureg
|
|
20
|
+
from modacor.dataclasses.basedata import BaseData
|
|
21
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
22
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
23
|
+
from modacor.io.io_sources import IoSources
|
|
24
|
+
|
|
25
|
+
# The processing step under test
|
|
26
|
+
from modacor.modules.base_modules.divide import Divide
|
|
27
|
+
|
|
28
|
+
TEST_IO_SOURCES = IoSources()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class TestDivideProcessingStep(unittest.TestCase):
|
|
32
|
+
"""Testing class for modacor/modules/base_modules/divide.py"""
|
|
33
|
+
|
|
34
|
+
def setUp(self):
|
|
35
|
+
# Small test signal: shape (2, 3) to avoid trivial broadcasting bugs
|
|
36
|
+
signal = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], dtype=float)
|
|
37
|
+
|
|
38
|
+
# absolute 1σ uncertainties on the data
|
|
39
|
+
data_unc = 0.1 * np.ones_like(signal)
|
|
40
|
+
|
|
41
|
+
self.test_processing_data = ProcessingData()
|
|
42
|
+
self.test_basedata = BaseData(
|
|
43
|
+
signal=signal,
|
|
44
|
+
units=ureg.Unit("count"),
|
|
45
|
+
uncertainties={"u": data_unc},
|
|
46
|
+
)
|
|
47
|
+
self.test_data_bundle = DataBundle(signal=self.test_basedata)
|
|
48
|
+
self.test_processing_data["bundle"] = self.test_data_bundle
|
|
49
|
+
|
|
50
|
+
# Divisor: scalar BaseData with uncertainty
|
|
51
|
+
# (propagate_to_all for reuse with any key)
|
|
52
|
+
self.divisor = BaseData(
|
|
53
|
+
signal=2.0,
|
|
54
|
+
units=ureg.Unit("second"),
|
|
55
|
+
uncertainties={"propagate_to_all": np.array(0.2, dtype=float)},
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Ground truth result using the already-tested BaseData.__truediv__
|
|
59
|
+
self.expected_result = self.test_basedata / self.divisor
|
|
60
|
+
|
|
61
|
+
# Monkeypatch basedata_from_sources to return our known divisor
|
|
62
|
+
self._orig_basedata_from_sources = divide_module.basedata_from_sources
|
|
63
|
+
divide_module.basedata_from_sources = self._fake_basedata_from_sources
|
|
64
|
+
|
|
65
|
+
def tearDown(self):
|
|
66
|
+
# Restore original helper to avoid leaking the patch to other tests
|
|
67
|
+
divide_module.basedata_from_sources = self._orig_basedata_from_sources
|
|
68
|
+
|
|
69
|
+
# ------------------------------------------------------------------ #
|
|
70
|
+
# Helper used for monkeypatch
|
|
71
|
+
# ------------------------------------------------------------------ #
|
|
72
|
+
|
|
73
|
+
def _fake_basedata_from_sources(self, io_sources, signal_source, units_source=None, uncertainty_sources=None):
|
|
74
|
+
"""
|
|
75
|
+
Fake basedata_from_sources that ignores its inputs and returns
|
|
76
|
+
the pre-constructed self.divisor.
|
|
77
|
+
"""
|
|
78
|
+
return self.divisor
|
|
79
|
+
|
|
80
|
+
# ------------------------------------------------------------------ #
|
|
81
|
+
# Actual tests
|
|
82
|
+
# ------------------------------------------------------------------ #
|
|
83
|
+
|
|
84
|
+
def test_divide_calculation(self):
|
|
85
|
+
"""
|
|
86
|
+
Divide.calculate() should divide the DataBundle's BaseData by the divisor
|
|
87
|
+
returned from basedata_from_sources, using BaseData.__truediv__ semantics.
|
|
88
|
+
"""
|
|
89
|
+
divide_step = Divide(io_sources=TEST_IO_SOURCES)
|
|
90
|
+
divide_step.modify_config_by_kwargs(
|
|
91
|
+
with_processing_keys=["bundle"],
|
|
92
|
+
divisor_source="dummy", # ignored by our fake basedata_from_sources
|
|
93
|
+
)
|
|
94
|
+
divide_step.processing_data = self.test_processing_data
|
|
95
|
+
|
|
96
|
+
divide_step.calculate()
|
|
97
|
+
|
|
98
|
+
result_bd: BaseData = self.test_processing_data["bundle"]["signal"]
|
|
99
|
+
|
|
100
|
+
# Signal should match the pre-computed expected_result
|
|
101
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
102
|
+
|
|
103
|
+
# Uncertainties per key should also match
|
|
104
|
+
for key in self.expected_result.uncertainties:
|
|
105
|
+
np.testing.assert_allclose(
|
|
106
|
+
result_bd.uncertainties[key],
|
|
107
|
+
self.expected_result.uncertainties[key],
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
# Units should be derived via pint from count / second
|
|
111
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
112
|
+
# double-check as I'm having issues..
|
|
113
|
+
self.assertEqual(result_bd.units, ureg.Unit("count / second"))
|
|
114
|
+
|
|
115
|
+
def test_divide_execution_via_call(self):
|
|
116
|
+
"""
|
|
117
|
+
Divide.__call__ should behave like in other ProcessSteps:
|
|
118
|
+
calling the object with ProcessingData runs the step and updates in-place.
|
|
119
|
+
"""
|
|
120
|
+
divide_step = Divide(io_sources=TEST_IO_SOURCES)
|
|
121
|
+
divide_step.modify_config_by_kwargs(
|
|
122
|
+
with_processing_keys=["bundle"],
|
|
123
|
+
divisor_source="dummy",
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Execute via __call__
|
|
127
|
+
divide_step(self.test_processing_data)
|
|
128
|
+
|
|
129
|
+
result_bd: BaseData = self.test_processing_data["bundle"]["signal"]
|
|
130
|
+
|
|
131
|
+
# Same checks as in test_divide_calculation
|
|
132
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
133
|
+
|
|
134
|
+
for key in self.expected_result.uncertainties:
|
|
135
|
+
np.testing.assert_allclose(
|
|
136
|
+
result_bd.uncertainties[key],
|
|
137
|
+
self.expected_result.uncertainties[key],
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "12/12/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import numpy as np
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
from modacor.dataclasses.basedata import BaseData
|
|
18
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
19
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
20
|
+
from modacor.io.io_sources import IoSources
|
|
21
|
+
from modacor.modules.base_modules.find_scale_factor1d import FindScaleFactor1D
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _make_1d_bd(
|
|
25
|
+
arr: np.ndarray,
|
|
26
|
+
*,
|
|
27
|
+
units: str,
|
|
28
|
+
sigma: float | np.ndarray = 0.02,
|
|
29
|
+
weights: float | np.ndarray = 1.0,
|
|
30
|
+
rank_of_data: int = 1,
|
|
31
|
+
) -> BaseData:
|
|
32
|
+
arr = np.asarray(arr, dtype=float)
|
|
33
|
+
|
|
34
|
+
if np.isscalar(sigma):
|
|
35
|
+
sig_arr = np.full_like(arr, float(sigma), dtype=float)
|
|
36
|
+
else:
|
|
37
|
+
sig_arr = np.asarray(sigma, dtype=float)
|
|
38
|
+
if sig_arr.size == 1:
|
|
39
|
+
sig_arr = np.full_like(arr, float(sig_arr.ravel()[0]), dtype=float)
|
|
40
|
+
|
|
41
|
+
if np.isscalar(weights):
|
|
42
|
+
w_arr = np.full_like(arr, float(weights), dtype=float)
|
|
43
|
+
else:
|
|
44
|
+
w_arr = np.asarray(weights, dtype=float)
|
|
45
|
+
if w_arr.size == 1:
|
|
46
|
+
w_arr = np.full_like(arr, float(w_arr.ravel()[0]), dtype=float)
|
|
47
|
+
|
|
48
|
+
return BaseData(
|
|
49
|
+
signal=arr,
|
|
50
|
+
units=units,
|
|
51
|
+
uncertainties={"propagate_to_all": sig_arr} if rank_of_data == 1 else {},
|
|
52
|
+
weights=w_arr,
|
|
53
|
+
axes=[],
|
|
54
|
+
rank_of_data=rank_of_data,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _make_curve_bundle(
|
|
59
|
+
x: np.ndarray,
|
|
60
|
+
y: np.ndarray,
|
|
61
|
+
*,
|
|
62
|
+
x_units: str = "1/nm",
|
|
63
|
+
y_units: str = "dimensionless",
|
|
64
|
+
sigma_y: float | np.ndarray = 0.02,
|
|
65
|
+
weights_y: float | np.ndarray = 1.0,
|
|
66
|
+
) -> DataBundle:
|
|
67
|
+
"""
|
|
68
|
+
Build a DataBundle compatible with FindScaleFactor1D's new contract:
|
|
69
|
+
- independent axis as databundle key "Q"
|
|
70
|
+
- dependent as databundle key "signal"
|
|
71
|
+
"""
|
|
72
|
+
x_bd = BaseData(signal=np.asarray(x, dtype=float), units=x_units, rank_of_data=1)
|
|
73
|
+
y_bd = BaseData(
|
|
74
|
+
signal=np.asarray(y, dtype=float),
|
|
75
|
+
units=y_units,
|
|
76
|
+
uncertainties=(
|
|
77
|
+
{"propagate_to_all": np.full_like(y, float(sigma_y))}
|
|
78
|
+
if np.isscalar(sigma_y)
|
|
79
|
+
else {"propagate_to_all": np.asarray(sigma_y, dtype=float)}
|
|
80
|
+
),
|
|
81
|
+
weights=np.full_like(y, float(weights_y)) if np.isscalar(weights_y) else np.asarray(weights_y, dtype=float),
|
|
82
|
+
rank_of_data=1,
|
|
83
|
+
)
|
|
84
|
+
return DataBundle({"signal": y_bd, "Q": x_bd})
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _run_step(pd: ProcessingData, cfg: dict) -> None:
|
|
88
|
+
step = FindScaleFactor1D(io_sources=IoSources())
|
|
89
|
+
step.modify_config_by_dict(cfg)
|
|
90
|
+
step.execute(pd)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def test_find_scale_factor_scale_only_perfect_overlap():
|
|
94
|
+
x = np.linspace(0.0, 10.0, 500)
|
|
95
|
+
y_work = np.sin(x) + 0.2
|
|
96
|
+
true_scale = 2.5
|
|
97
|
+
y_ref = true_scale * y_work
|
|
98
|
+
|
|
99
|
+
pd = ProcessingData()
|
|
100
|
+
pd["work"] = _make_curve_bundle(x, y_work, sigma_y=0.01)
|
|
101
|
+
pd["ref"] = _make_curve_bundle(x, y_ref, sigma_y=0.01)
|
|
102
|
+
|
|
103
|
+
_run_step(
|
|
104
|
+
pd,
|
|
105
|
+
{
|
|
106
|
+
"with_processing_keys": ["work", "ref"],
|
|
107
|
+
"fit_background": False,
|
|
108
|
+
"fit_min_val": 2.0,
|
|
109
|
+
"fit_max_val": 8.0,
|
|
110
|
+
"fit_val_units": "1/nm",
|
|
111
|
+
"require_overlap": True,
|
|
112
|
+
"robust_loss": "linear",
|
|
113
|
+
"use_basedata_weights": True,
|
|
114
|
+
"independent_axis_key": "Q",
|
|
115
|
+
"signal_key": "signal",
|
|
116
|
+
},
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
sf_bd = pd["work"]["scale_factor"]
|
|
120
|
+
sf = float(sf_bd.signal.item())
|
|
121
|
+
assert sf == pytest.approx(true_scale, rel=1e-3, abs=1e-3)
|
|
122
|
+
|
|
123
|
+
assert "propagate_to_all" in sf_bd.uncertainties
|
|
124
|
+
assert sf_bd.uncertainties["propagate_to_all"].size == 1
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def test_find_scale_factor_scale_and_background_mismatched_axes_robust():
|
|
128
|
+
x_w = np.linspace(0.0, 10.0, 700)
|
|
129
|
+
x_r = np.linspace(1.0, 9.0, 400)
|
|
130
|
+
|
|
131
|
+
base = np.exp(-0.2 * x_w) + 0.1 * np.cos(3 * x_w)
|
|
132
|
+
true_scale = 1.7
|
|
133
|
+
true_bg = 0.35
|
|
134
|
+
|
|
135
|
+
y_work_on_ref = np.interp(x_r, x_w, base)
|
|
136
|
+
y_ref = true_scale * y_work_on_ref + true_bg
|
|
137
|
+
|
|
138
|
+
y_ref_noisy = y_ref.copy()
|
|
139
|
+
y_ref_noisy[50] += 5.0
|
|
140
|
+
y_ref_noisy[120] -= 4.0
|
|
141
|
+
|
|
142
|
+
pd = ProcessingData()
|
|
143
|
+
pd["work"] = _make_curve_bundle(x_w, base, sigma_y=0.02)
|
|
144
|
+
pd["ref"] = _make_curve_bundle(x_r, y_ref_noisy, sigma_y=0.02)
|
|
145
|
+
|
|
146
|
+
_run_step(
|
|
147
|
+
pd,
|
|
148
|
+
{
|
|
149
|
+
"with_processing_keys": ["work", "ref"],
|
|
150
|
+
"fit_background": True,
|
|
151
|
+
"fit_min_val": 2.0,
|
|
152
|
+
"fit_max_val": 8.0,
|
|
153
|
+
"fit_val_units": "1/nm",
|
|
154
|
+
"require_overlap": True,
|
|
155
|
+
"interpolation_kind": "linear",
|
|
156
|
+
"robust_loss": "huber",
|
|
157
|
+
"robust_fscale": 1.0,
|
|
158
|
+
"use_basedata_weights": True,
|
|
159
|
+
"independent_axis_key": "Q",
|
|
160
|
+
"signal_key": "signal",
|
|
161
|
+
},
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
sf = float(pd["work"]["scale_factor"].signal.item())
|
|
165
|
+
bg = float(pd["work"]["scale_background"].signal.item())
|
|
166
|
+
|
|
167
|
+
assert sf == pytest.approx(true_scale, rel=3e-2, abs=3e-2)
|
|
168
|
+
assert bg == pytest.approx(true_bg, rel=3e-2, abs=3e-2)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def test_find_scale_factor_raises_on_no_overlap_when_required():
|
|
172
|
+
x_w = np.linspace(0.0, 1.0, 200)
|
|
173
|
+
x_r = np.linspace(2.0, 3.0, 200)
|
|
174
|
+
|
|
175
|
+
pd = ProcessingData()
|
|
176
|
+
pd["work"] = _make_curve_bundle(x_w, np.ones_like(x_w), sigma_y=0.01)
|
|
177
|
+
pd["ref"] = _make_curve_bundle(x_r, np.ones_like(x_r) * 2.0, sigma_y=0.01)
|
|
178
|
+
|
|
179
|
+
with pytest.raises(ValueError, match="No overlap"):
|
|
180
|
+
_run_step(
|
|
181
|
+
pd,
|
|
182
|
+
{
|
|
183
|
+
"with_processing_keys": ["work", "ref"],
|
|
184
|
+
"require_overlap": True,
|
|
185
|
+
"independent_axis_key": "Q",
|
|
186
|
+
"signal_key": "signal",
|
|
187
|
+
},
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def test_find_scale_factor_weights_have_effect():
|
|
192
|
+
x = np.linspace(0.0, 10.0, 600)
|
|
193
|
+
y_work = 0.5 + np.sin(x)
|
|
194
|
+
true_scale = 3.0
|
|
195
|
+
y_ref = true_scale * y_work
|
|
196
|
+
|
|
197
|
+
weights_ref = np.ones_like(x)
|
|
198
|
+
weights_ref[x > 5.0] = 0.1
|
|
199
|
+
|
|
200
|
+
pd = ProcessingData()
|
|
201
|
+
pd["work"] = _make_curve_bundle(x, y_work, sigma_y=0.01, weights_y=1.0)
|
|
202
|
+
pd["ref"] = _make_curve_bundle(x, y_ref, sigma_y=0.01, weights_y=weights_ref)
|
|
203
|
+
|
|
204
|
+
_run_step(
|
|
205
|
+
pd,
|
|
206
|
+
{
|
|
207
|
+
"with_processing_keys": ["work", "ref"],
|
|
208
|
+
"fit_background": False,
|
|
209
|
+
"fit_min_val": 0.5,
|
|
210
|
+
"fit_max_val": 9.5,
|
|
211
|
+
"fit_val_units": "1/nm",
|
|
212
|
+
"robust_loss": "linear",
|
|
213
|
+
"use_basedata_weights": True,
|
|
214
|
+
"independent_axis_key": "Q",
|
|
215
|
+
"signal_key": "signal",
|
|
216
|
+
},
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
sf = float(pd["work"]["scale_factor"].signal.item())
|
|
220
|
+
assert sf == pytest.approx(true_scale, rel=1e-3, abs=1e-3)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "16/11/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
import modacor.modules.base_modules.multiply as multiply_module
|
|
19
|
+
from modacor import ureg
|
|
20
|
+
from modacor.dataclasses.basedata import BaseData
|
|
21
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
22
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
23
|
+
from modacor.io.io_sources import IoSources
|
|
24
|
+
from modacor.modules.base_modules.multiply import Multiply
|
|
25
|
+
|
|
26
|
+
TEST_IO_SOURCES = IoSources()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class TestMultiplyProcessingStep(unittest.TestCase):
|
|
30
|
+
"""Testing class for modacor/modules/base_modules/multiply.py"""
|
|
31
|
+
|
|
32
|
+
def setUp(self):
|
|
33
|
+
# Simple 2x3 BaseData
|
|
34
|
+
signal = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], dtype=float)
|
|
35
|
+
data_unc = 0.1 * np.ones_like(signal)
|
|
36
|
+
|
|
37
|
+
self.test_processing_data = ProcessingData()
|
|
38
|
+
self.base = BaseData(
|
|
39
|
+
signal=signal,
|
|
40
|
+
units=ureg.Unit("count"),
|
|
41
|
+
uncertainties={"u": data_unc},
|
|
42
|
+
)
|
|
43
|
+
self.test_data_bundle = DataBundle(signal=self.base)
|
|
44
|
+
self.test_processing_data["bundle"] = self.test_data_bundle
|
|
45
|
+
|
|
46
|
+
# Factor: scalar BaseData with its own unit + uncertainty
|
|
47
|
+
self.factor = BaseData(
|
|
48
|
+
signal=2.0,
|
|
49
|
+
units=ureg.Unit("second"),
|
|
50
|
+
uncertainties={"propagate_to_all": np.array(0.2, dtype=float)},
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Ground truth using BaseData.__mul__
|
|
54
|
+
self.expected_result = self.base * self.factor
|
|
55
|
+
|
|
56
|
+
# Monkeypatch basedata_from_sources
|
|
57
|
+
self._orig_basedata_from_sources = multiply_module.basedata_from_sources
|
|
58
|
+
multiply_module.basedata_from_sources = self._fake_basedata_from_sources
|
|
59
|
+
|
|
60
|
+
def tearDown(self):
|
|
61
|
+
# Restore original helper
|
|
62
|
+
multiply_module.basedata_from_sources = self._orig_basedata_from_sources
|
|
63
|
+
|
|
64
|
+
def _fake_basedata_from_sources(self, io_sources, signal_source, units_source=None, uncertainty_sources=None):
|
|
65
|
+
"""Fake basedata_from_sources that always returns self.factor."""
|
|
66
|
+
return self.factor
|
|
67
|
+
|
|
68
|
+
def test_multiply_calculation(self):
|
|
69
|
+
"""
|
|
70
|
+
Multiply.calculate() should multiply the DataBundle's BaseData by the factor
|
|
71
|
+
returned from basedata_from_sources, using BaseData.__mul__ semantics.
|
|
72
|
+
"""
|
|
73
|
+
step = Multiply(io_sources=TEST_IO_SOURCES)
|
|
74
|
+
step.modify_config_by_kwargs(
|
|
75
|
+
with_processing_keys=["bundle"],
|
|
76
|
+
multiplier_source="dummy", # ignored by fake basedata_from_sources
|
|
77
|
+
)
|
|
78
|
+
step.processing_data = self.test_processing_data
|
|
79
|
+
|
|
80
|
+
step.calculate()
|
|
81
|
+
|
|
82
|
+
result_bd: BaseData = self.test_processing_data["bundle"]["signal"]
|
|
83
|
+
|
|
84
|
+
# Compare with ground truth
|
|
85
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
86
|
+
for key in self.expected_result.uncertainties:
|
|
87
|
+
np.testing.assert_allclose(
|
|
88
|
+
result_bd.uncertainties[key],
|
|
89
|
+
self.expected_result.uncertainties[key],
|
|
90
|
+
)
|
|
91
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
92
|
+
|
|
93
|
+
def test_multiply_execution_via_call(self):
|
|
94
|
+
"""
|
|
95
|
+
Multiply.__call__ should run the step and update ProcessingData in-place.
|
|
96
|
+
"""
|
|
97
|
+
step = Multiply(io_sources=TEST_IO_SOURCES)
|
|
98
|
+
step.modify_config_by_kwargs(
|
|
99
|
+
with_processing_keys=["bundle"],
|
|
100
|
+
multiplier_source="dummy",
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
step(self.test_processing_data)
|
|
104
|
+
|
|
105
|
+
result_bd: BaseData = self.test_processing_data["bundle"]["signal"]
|
|
106
|
+
|
|
107
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
108
|
+
for key in self.expected_result.uncertainties:
|
|
109
|
+
np.testing.assert_allclose(
|
|
110
|
+
result_bd.uncertainties[key],
|
|
111
|
+
self.expected_result.uncertainties[key],
|
|
112
|
+
)
|
|
113
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "12/12/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
from modacor import ureg
|
|
19
|
+
from modacor.dataclasses.basedata import BaseData
|
|
20
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
21
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
22
|
+
from modacor.io.io_sources import IoSources
|
|
23
|
+
from modacor.modules.base_modules.multiply_databundles import MultiplyDatabundles
|
|
24
|
+
|
|
25
|
+
TEST_IO_SOURCES = IoSources()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class TestMultiplyDatabundles(unittest.TestCase):
|
|
29
|
+
"""Testing class for modacor/modules/base_modules/multiply_databundles.py"""
|
|
30
|
+
|
|
31
|
+
def setUp(self):
|
|
32
|
+
# Two simple 2x3 BaseData objects to multiply
|
|
33
|
+
signal1 = np.array([[10.0, 20.0, 30.0], [40.0, 50.0, 60.0]], dtype=float)
|
|
34
|
+
signal2 = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], dtype=float)
|
|
35
|
+
|
|
36
|
+
# Absolute 1σ uncertainties
|
|
37
|
+
unc1 = 0.5 * np.ones_like(signal1)
|
|
38
|
+
unc2 = 0.2 * np.ones_like(signal2)
|
|
39
|
+
|
|
40
|
+
self.bd1 = BaseData(
|
|
41
|
+
signal=signal1,
|
|
42
|
+
units=ureg.Unit("count"),
|
|
43
|
+
uncertainties={"u": unc1},
|
|
44
|
+
)
|
|
45
|
+
self.bd2 = BaseData(
|
|
46
|
+
signal=signal2,
|
|
47
|
+
units=ureg.Unit("count"),
|
|
48
|
+
uncertainties={"u": unc2},
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
self.bundle1 = DataBundle(signal=self.bd1)
|
|
52
|
+
self.bundle2 = DataBundle(signal=self.bd2)
|
|
53
|
+
|
|
54
|
+
self.processing_data = ProcessingData()
|
|
55
|
+
self.processing_data["bundle1"] = self.bundle1
|
|
56
|
+
self.processing_data["bundle2"] = self.bundle2
|
|
57
|
+
|
|
58
|
+
# Ground truth using BaseData.__mul__
|
|
59
|
+
self.expected_result = self.bd1 * self.bd2
|
|
60
|
+
|
|
61
|
+
def tearDown(self):
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
# ------------------------------------------------------------------ #
|
|
65
|
+
# Tests
|
|
66
|
+
# ------------------------------------------------------------------ #
|
|
67
|
+
|
|
68
|
+
def test_multiply_databundles_calculation(self):
|
|
69
|
+
"""
|
|
70
|
+
MultiplyDatabundles.calculate() should multiply the second DataBundle's
|
|
71
|
+
signal with the first, using BaseData.__mul__ semantics.
|
|
72
|
+
"""
|
|
73
|
+
step = MultiplyDatabundles(io_sources=TEST_IO_SOURCES)
|
|
74
|
+
step.modify_config_by_kwargs(
|
|
75
|
+
with_processing_keys=["bundle1", "bundle2"],
|
|
76
|
+
)
|
|
77
|
+
step.processing_data = self.processing_data
|
|
78
|
+
|
|
79
|
+
output = step.calculate()
|
|
80
|
+
|
|
81
|
+
# Only the multiplicand key should be in output
|
|
82
|
+
self.assertEqual(list(output.keys()), ["bundle1"])
|
|
83
|
+
|
|
84
|
+
result_bd: BaseData = self.processing_data["bundle1"]["signal"]
|
|
85
|
+
|
|
86
|
+
# Signal and uncertainties should match the precomputed result
|
|
87
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
88
|
+
for key in self.expected_result.uncertainties:
|
|
89
|
+
np.testing.assert_allclose(
|
|
90
|
+
result_bd.uncertainties[key],
|
|
91
|
+
self.expected_result.uncertainties[key],
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# Units should be preserved (count * count → count)
|
|
95
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
96
|
+
|
|
97
|
+
def test_multiply_databundles_execution_via_call(self):
|
|
98
|
+
"""
|
|
99
|
+
MultiplyDatabundles.__call__ should run the step and update ProcessingData in-place.
|
|
100
|
+
"""
|
|
101
|
+
# Re-initialize processing_data to original state
|
|
102
|
+
processing_data = ProcessingData()
|
|
103
|
+
processing_data["bundle1"] = DataBundle(signal=self.bd1)
|
|
104
|
+
processing_data["bundle2"] = DataBundle(signal=self.bd2)
|
|
105
|
+
|
|
106
|
+
step = MultiplyDatabundles(io_sources=TEST_IO_SOURCES)
|
|
107
|
+
step.modify_config_by_kwargs(
|
|
108
|
+
with_processing_keys=["bundle1", "bundle2"],
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
step(processing_data)
|
|
112
|
+
|
|
113
|
+
result_bd: BaseData = processing_data["bundle1"]["signal"]
|
|
114
|
+
|
|
115
|
+
np.testing.assert_allclose(result_bd.signal, self.expected_result.signal)
|
|
116
|
+
for key in self.expected_result.uncertainties:
|
|
117
|
+
np.testing.assert_allclose(
|
|
118
|
+
result_bd.uncertainties[key],
|
|
119
|
+
self.expected_result.uncertainties[key],
|
|
120
|
+
)
|
|
121
|
+
self.assertEqual(result_bd.units, self.expected_result.units)
|
|
122
|
+
|
|
123
|
+
def test_requires_exactly_two_keys(self):
|
|
124
|
+
"""
|
|
125
|
+
MultiplyDatabundles should assert if 'with_processing_keys' does not
|
|
126
|
+
contain exactly two keys.
|
|
127
|
+
"""
|
|
128
|
+
step = MultiplyDatabundles(io_sources=TEST_IO_SOURCES)
|
|
129
|
+
# Only one key → should trigger the assertion in calculate()
|
|
130
|
+
step.modify_config_by_kwargs(
|
|
131
|
+
with_processing_keys=["bundle1"],
|
|
132
|
+
)
|
|
133
|
+
step.processing_data = self.processing_data
|
|
134
|
+
|
|
135
|
+
with self.assertRaises(AssertionError):
|
|
136
|
+
step.calculate()
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "15/11/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
from modacor import ureg
|
|
19
|
+
from modacor.dataclasses.basedata import BaseData
|
|
20
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
21
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
22
|
+
from modacor.io.io_sources import IoSources
|
|
23
|
+
from modacor.modules.base_modules.poisson_uncertainties import PoissonUncertainties
|
|
24
|
+
|
|
25
|
+
# import h5py
|
|
26
|
+
|
|
27
|
+
TEST_IO_SOURCES = IoSources()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TestPoissonUncertainties(unittest.TestCase):
|
|
31
|
+
"""Testing class for modacor/modules/base_modules/poisson_uncertainties.py"""
|
|
32
|
+
|
|
33
|
+
def setUp(self):
|
|
34
|
+
self.test_processing_data = ProcessingData()
|
|
35
|
+
self.test_data = BaseData(signal=np.arange(0, 100).reshape((10, 10)), units=ureg.Unit("count"))
|
|
36
|
+
self.test_data_bundle = DataBundle(signal=self.test_data)
|
|
37
|
+
self.test_processing_data["bundle"] = self.test_data_bundle
|
|
38
|
+
|
|
39
|
+
def tearDown(self):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
def test_poisson_calculation(self):
|
|
43
|
+
poisson_uncertainties = PoissonUncertainties(io_sources=TEST_IO_SOURCES)
|
|
44
|
+
poisson_uncertainties.modify_config_by_kwargs(with_processing_keys=["bundle"])
|
|
45
|
+
poisson_uncertainties.processing_data = self.test_processing_data
|
|
46
|
+
poisson_uncertainties.calculate()
|
|
47
|
+
assert "Poisson" in self.test_processing_data["bundle"]["signal"].variances.keys()
|
|
48
|
+
|
|
49
|
+
def test_poisson_execution(self):
|
|
50
|
+
poisson_uncertainties = PoissonUncertainties(io_sources=TEST_IO_SOURCES)
|
|
51
|
+
poisson_uncertainties.modify_config_by_kwargs(with_processing_keys=["bundle"])
|
|
52
|
+
poisson_uncertainties(self.test_processing_data)
|
|
53
|
+
assert "Poisson" in self.test_processing_data["bundle"]["signal"].variances.keys()
|
|
54
|
+
|
|
55
|
+
def test_poisson_result_values(self):
|
|
56
|
+
poisson_uncertainties = PoissonUncertainties(io_sources=TEST_IO_SOURCES)
|
|
57
|
+
poisson_uncertainties.modify_config_by_kwargs(with_processing_keys=["bundle"])
|
|
58
|
+
poisson_uncertainties(self.test_processing_data)
|
|
59
|
+
expected_variances = np.arange(0, 100).reshape((10, 10)).astype(float).clip(min=1)
|
|
60
|
+
actual_variances = self.test_processing_data["bundle"]["signal"].variances["Poisson"]
|
|
61
|
+
np.testing.assert_allclose(expected_variances, actual_variances)
|