modacor 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modacor/__init__.py +30 -0
- modacor/dataclasses/__init__.py +0 -0
- modacor/dataclasses/basedata.py +973 -0
- modacor/dataclasses/databundle.py +23 -0
- modacor/dataclasses/helpers.py +45 -0
- modacor/dataclasses/messagehandler.py +75 -0
- modacor/dataclasses/process_step.py +233 -0
- modacor/dataclasses/process_step_describer.py +146 -0
- modacor/dataclasses/processing_data.py +59 -0
- modacor/dataclasses/trace_event.py +118 -0
- modacor/dataclasses/uncertainty_tools.py +132 -0
- modacor/dataclasses/validators.py +84 -0
- modacor/debug/pipeline_tracer.py +548 -0
- modacor/io/__init__.py +33 -0
- modacor/io/csv/__init__.py +0 -0
- modacor/io/csv/csv_sink.py +114 -0
- modacor/io/csv/csv_source.py +210 -0
- modacor/io/hdf/__init__.py +27 -0
- modacor/io/hdf/hdf_source.py +120 -0
- modacor/io/io_sink.py +41 -0
- modacor/io/io_sinks.py +61 -0
- modacor/io/io_source.py +164 -0
- modacor/io/io_sources.py +208 -0
- modacor/io/processing_path.py +113 -0
- modacor/io/tiled/__init__.py +16 -0
- modacor/io/tiled/tiled_source.py +403 -0
- modacor/io/yaml/__init__.py +27 -0
- modacor/io/yaml/yaml_source.py +116 -0
- modacor/modules/__init__.py +53 -0
- modacor/modules/base_modules/__init__.py +0 -0
- modacor/modules/base_modules/append_processing_data.py +329 -0
- modacor/modules/base_modules/append_sink.py +141 -0
- modacor/modules/base_modules/append_source.py +181 -0
- modacor/modules/base_modules/bitwise_or_masks.py +113 -0
- modacor/modules/base_modules/combine_uncertainties.py +120 -0
- modacor/modules/base_modules/combine_uncertainties_max.py +105 -0
- modacor/modules/base_modules/divide.py +82 -0
- modacor/modules/base_modules/find_scale_factor1d.py +373 -0
- modacor/modules/base_modules/multiply.py +77 -0
- modacor/modules/base_modules/multiply_databundles.py +73 -0
- modacor/modules/base_modules/poisson_uncertainties.py +69 -0
- modacor/modules/base_modules/reduce_dimensionality.py +252 -0
- modacor/modules/base_modules/sink_processing_data.py +80 -0
- modacor/modules/base_modules/subtract.py +80 -0
- modacor/modules/base_modules/subtract_databundles.py +67 -0
- modacor/modules/base_modules/units_label_update.py +66 -0
- modacor/modules/instrument_modules/__init__.py +0 -0
- modacor/modules/instrument_modules/readme.md +9 -0
- modacor/modules/technique_modules/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/geometry_helpers.py +114 -0
- modacor/modules/technique_modules/scattering/index_pixels.py +492 -0
- modacor/modules/technique_modules/scattering/indexed_averager.py +628 -0
- modacor/modules/technique_modules/scattering/pixel_coordinates_3d.py +417 -0
- modacor/modules/technique_modules/scattering/solid_angle_correction.py +63 -0
- modacor/modules/technique_modules/scattering/xs_geometry.py +571 -0
- modacor/modules/technique_modules/scattering/xs_geometry_from_pixel_coordinates.py +293 -0
- modacor/runner/__init__.py +0 -0
- modacor/runner/pipeline.py +749 -0
- modacor/runner/process_step_registry.py +224 -0
- modacor/tests/__init__.py +27 -0
- modacor/tests/dataclasses/test_basedata.py +519 -0
- modacor/tests/dataclasses/test_basedata_operations.py +439 -0
- modacor/tests/dataclasses/test_basedata_to_base_units.py +57 -0
- modacor/tests/dataclasses/test_process_step_describer.py +73 -0
- modacor/tests/dataclasses/test_processstep.py +282 -0
- modacor/tests/debug/test_tracing_integration.py +188 -0
- modacor/tests/integration/__init__.py +0 -0
- modacor/tests/integration/test_pipeline_run.py +238 -0
- modacor/tests/io/__init__.py +27 -0
- modacor/tests/io/csv/__init__.py +0 -0
- modacor/tests/io/csv/test_csv_source.py +156 -0
- modacor/tests/io/hdf/__init__.py +27 -0
- modacor/tests/io/hdf/test_hdf_source.py +92 -0
- modacor/tests/io/test_io_sources.py +119 -0
- modacor/tests/io/tiled/__init__.py +12 -0
- modacor/tests/io/tiled/test_tiled_source.py +120 -0
- modacor/tests/io/yaml/__init__.py +27 -0
- modacor/tests/io/yaml/static_data_example.yaml +26 -0
- modacor/tests/io/yaml/test_yaml_source.py +47 -0
- modacor/tests/modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/test_append_processing_data.py +219 -0
- modacor/tests/modules/base_modules/test_append_sink.py +76 -0
- modacor/tests/modules/base_modules/test_append_source.py +180 -0
- modacor/tests/modules/base_modules/test_bitwise_or_masks.py +264 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties.py +105 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties_max.py +109 -0
- modacor/tests/modules/base_modules/test_divide.py +140 -0
- modacor/tests/modules/base_modules/test_find_scale_factor1d.py +220 -0
- modacor/tests/modules/base_modules/test_multiply.py +113 -0
- modacor/tests/modules/base_modules/test_multiply_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_poisson_uncertainties.py +61 -0
- modacor/tests/modules/base_modules/test_reduce_dimensionality.py +358 -0
- modacor/tests/modules/base_modules/test_sink_processing_data.py +119 -0
- modacor/tests/modules/base_modules/test_subtract.py +111 -0
- modacor/tests/modules/base_modules/test_subtract_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_units_label_update.py +91 -0
- modacor/tests/modules/technique_modules/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/test_geometry_helpers.py +198 -0
- modacor/tests/modules/technique_modules/scattering/test_index_pixels.py +426 -0
- modacor/tests/modules/technique_modules/scattering/test_indexed_averaging.py +559 -0
- modacor/tests/modules/technique_modules/scattering/test_pixel_coordinates_3d.py +282 -0
- modacor/tests/modules/technique_modules/scattering/test_xs_geometry_from_pixel_coordinates.py +224 -0
- modacor/tests/modules/technique_modules/scattering/test_xsgeometry.py +635 -0
- modacor/tests/requirements.txt +12 -0
- modacor/tests/runner/test_pipeline.py +438 -0
- modacor/tests/runner/test_process_step_registry.py +65 -0
- modacor/tests/test_import.py +43 -0
- modacor/tests/test_modacor.py +17 -0
- modacor/tests/test_units.py +79 -0
- modacor/units.py +97 -0
- modacor-1.0.0.dist-info/METADATA +482 -0
- modacor-1.0.0.dist-info/RECORD +120 -0
- modacor-1.0.0.dist-info/WHEEL +5 -0
- modacor-1.0.0.dist-info/licenses/AUTHORS.md +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE.txt +11 -0
- modacor-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "30/11/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import sys
|
|
15
|
+
import types
|
|
16
|
+
from typing import List, Tuple
|
|
17
|
+
|
|
18
|
+
import pytest
|
|
19
|
+
|
|
20
|
+
from modacor.io.io_sources import IoSource, IoSources
|
|
21
|
+
from modacor.modules.base_modules.append_source import AppendSource
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _install_dummy_iosource_module(monkeypatch) -> Tuple[str, List[tuple]]:
|
|
25
|
+
"""
|
|
26
|
+
Install a dummy loader module into sys.modules and return:
|
|
27
|
+
|
|
28
|
+
- the fully qualified loader path string
|
|
29
|
+
- a list that will collect call arguments for assertions
|
|
30
|
+
"""
|
|
31
|
+
calls: List[tuple] = []
|
|
32
|
+
|
|
33
|
+
module_name = "modacor.tests.dummy_iosource_module"
|
|
34
|
+
mod = types.ModuleType(module_name)
|
|
35
|
+
|
|
36
|
+
class DummySource(IoSource):
|
|
37
|
+
"""
|
|
38
|
+
Minimal IoSource subclass used only for testing.
|
|
39
|
+
|
|
40
|
+
We deliberately override __init__ and *do not* call super().__init__
|
|
41
|
+
to avoid depending on IoSource's real constructor signature.
|
|
42
|
+
We just provide the attributes IoSources.register_source is expected
|
|
43
|
+
to use (source_reference, resource_location).
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(self, ref: str, loc: str) -> None:
|
|
47
|
+
self.source_reference = ref
|
|
48
|
+
self.resource_location = loc
|
|
49
|
+
|
|
50
|
+
def dummy_loader(*, source_reference: str, resource_location: str, **kwargs) -> IoSource:
|
|
51
|
+
"""
|
|
52
|
+
Minimal loader that mimics the real loader signature used by AppendSource.
|
|
53
|
+
Returns a DummySource instance and records calls for assertions.
|
|
54
|
+
"""
|
|
55
|
+
calls.append((source_reference, resource_location))
|
|
56
|
+
return DummySource(source_reference, resource_location)
|
|
57
|
+
|
|
58
|
+
# The attribute name here is what AppendSource._resolve_iosource_callable
|
|
59
|
+
# will try to retrieve from the module.
|
|
60
|
+
mod.DummyLoader = dummy_loader # type: ignore[attr-defined]
|
|
61
|
+
|
|
62
|
+
# Inject into sys.modules so import_module() can find it
|
|
63
|
+
monkeypatch.setitem(sys.modules, module_name, mod)
|
|
64
|
+
|
|
65
|
+
loader_path = f"{module_name}.DummyLoader"
|
|
66
|
+
return loader_path, calls
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _make_append_source_instance() -> AppendSource:
|
|
70
|
+
"""
|
|
71
|
+
Create an AppendSource instance without going through ProcessStep.__init__.
|
|
72
|
+
|
|
73
|
+
We only need 'configuration' and 'io_sources' for these tests, so __new__
|
|
74
|
+
is sufficient and avoids coupling to ProcessStep's constructor signature.
|
|
75
|
+
"""
|
|
76
|
+
instance = AppendSource.__new__(AppendSource)
|
|
77
|
+
return instance
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def test_append_single_source(monkeypatch):
|
|
81
|
+
loader_path, calls = _install_dummy_iosource_module(monkeypatch)
|
|
82
|
+
|
|
83
|
+
step = _make_append_source_instance()
|
|
84
|
+
step.configuration = {
|
|
85
|
+
"source_identifier": "sample_source",
|
|
86
|
+
"source_location": "/tmp/sample.dat",
|
|
87
|
+
"iosource_module": loader_path,
|
|
88
|
+
}
|
|
89
|
+
step.io_sources = IoSources()
|
|
90
|
+
|
|
91
|
+
result = step.calculate()
|
|
92
|
+
|
|
93
|
+
# No databundles modified
|
|
94
|
+
assert result == {}
|
|
95
|
+
|
|
96
|
+
# The new source should be present
|
|
97
|
+
assert "sample_source" in step.io_sources.defined_sources
|
|
98
|
+
|
|
99
|
+
# Loader should be called exactly once with the expected args
|
|
100
|
+
assert calls == [("sample_source", "/tmp/sample.dat")]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def test_append_multiple_sources(monkeypatch):
|
|
104
|
+
loader_path, calls = _install_dummy_iosource_module(monkeypatch)
|
|
105
|
+
|
|
106
|
+
step = _make_append_source_instance()
|
|
107
|
+
step.configuration = {
|
|
108
|
+
"source_identifier": ["src1", "src2"],
|
|
109
|
+
"source_location": ["/tmp/file1.dat", "/tmp/file2.dat"],
|
|
110
|
+
"iosource_module": loader_path,
|
|
111
|
+
}
|
|
112
|
+
step.io_sources = IoSources()
|
|
113
|
+
|
|
114
|
+
result = step.calculate()
|
|
115
|
+
|
|
116
|
+
# Still no databundles modified
|
|
117
|
+
assert result == {}
|
|
118
|
+
|
|
119
|
+
# Both sources should be present
|
|
120
|
+
assert "src1" in step.io_sources.defined_sources
|
|
121
|
+
assert "src2" in step.io_sources.defined_sources
|
|
122
|
+
|
|
123
|
+
# Loader should have been called twice in order
|
|
124
|
+
assert calls == [
|
|
125
|
+
("src1", "/tmp/file1.dat"),
|
|
126
|
+
("src2", "/tmp/file2.dat"),
|
|
127
|
+
]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def test_mismatched_source_lengths_raises(monkeypatch):
|
|
131
|
+
loader_path, _ = _install_dummy_iosource_module(monkeypatch)
|
|
132
|
+
|
|
133
|
+
step = _make_append_source_instance()
|
|
134
|
+
step.configuration = {
|
|
135
|
+
"source_identifier": ["src1", "src2"],
|
|
136
|
+
"source_location": ["/tmp/only_one.dat"],
|
|
137
|
+
"iosource_module": loader_path,
|
|
138
|
+
}
|
|
139
|
+
step.io_sources = IoSources()
|
|
140
|
+
|
|
141
|
+
with pytest.raises(ValueError, match="counts must match"):
|
|
142
|
+
step.calculate()
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def test_existing_source_is_not_overwritten(monkeypatch):
|
|
146
|
+
loader_path, calls = _install_dummy_iosource_module(monkeypatch)
|
|
147
|
+
|
|
148
|
+
step = _make_append_source_instance()
|
|
149
|
+
step.io_sources = IoSources()
|
|
150
|
+
|
|
151
|
+
class PreExistingSource(IoSource):
|
|
152
|
+
"""
|
|
153
|
+
Pre-registered IoSource subclass for testing overwrite behaviour.
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
def __init__(self, ref: str, loc: str) -> None:
|
|
157
|
+
self.source_reference = ref
|
|
158
|
+
self.resource_location = loc
|
|
159
|
+
|
|
160
|
+
# Pre-register a source with identifier "existing"
|
|
161
|
+
step.io_sources.register_source(PreExistingSource(ref="existing", loc="/tmp/original.dat"))
|
|
162
|
+
|
|
163
|
+
# Configuration attempts to append a source with the same identifier
|
|
164
|
+
step.configuration = {
|
|
165
|
+
"source_identifier": "existing",
|
|
166
|
+
"source_location": "/tmp/new_location.dat",
|
|
167
|
+
"iosource_module": loader_path,
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
result = step.calculate()
|
|
171
|
+
|
|
172
|
+
# No databundles modified
|
|
173
|
+
assert result == {}
|
|
174
|
+
|
|
175
|
+
# The identifier should still be present…
|
|
176
|
+
assert "existing" in step.io_sources.defined_sources
|
|
177
|
+
|
|
178
|
+
# …but the loader should not have been called at all,
|
|
179
|
+
# since AppendSource checks defined_sources before appending.
|
|
180
|
+
assert calls == []
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "09/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
from modacor import ureg
|
|
19
|
+
from modacor.dataclasses.basedata import BaseData
|
|
20
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
21
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
22
|
+
from modacor.io.io_sources import IoSources
|
|
23
|
+
|
|
24
|
+
# module under test
|
|
25
|
+
from modacor.modules.base_modules.bitwise_or_masks import BitwiseOrMasks
|
|
26
|
+
|
|
27
|
+
TEST_IO_SOURCES = IoSources()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TestBitwiseOrMasksProcessingStep(unittest.TestCase):
|
|
31
|
+
"""Testing class for modacor/modules/base_modules/bitwise_or_masks.py"""
|
|
32
|
+
|
|
33
|
+
def setUp(self):
|
|
34
|
+
self.test_processing_data = ProcessingData()
|
|
35
|
+
|
|
36
|
+
tgt = np.zeros((2, 3), dtype=np.uint32)
|
|
37
|
+
bs = np.array([[4, 0, 0], [0, 32, 0]], dtype=np.uint32)
|
|
38
|
+
|
|
39
|
+
db = DataBundle(
|
|
40
|
+
mask=BaseData(signal=tgt, units=ureg.dimensionless, uncertainties={}),
|
|
41
|
+
bs_mask=BaseData(signal=bs, units=ureg.dimensionless, uncertainties={}),
|
|
42
|
+
)
|
|
43
|
+
self.test_processing_data["sample"] = db
|
|
44
|
+
|
|
45
|
+
def _make_step(self, *, target="mask", sources=None) -> BitwiseOrMasks:
|
|
46
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
47
|
+
step.configuration = {
|
|
48
|
+
"with_processing_keys": ["sample"],
|
|
49
|
+
"target_mask_key": target,
|
|
50
|
+
"source_mask_keys": list(sources or []),
|
|
51
|
+
}
|
|
52
|
+
step.processing_data = self.test_processing_data
|
|
53
|
+
return step
|
|
54
|
+
|
|
55
|
+
# ------------------------------------------------------------------ #
|
|
56
|
+
# Actual tests
|
|
57
|
+
# ------------------------------------------------------------------ #
|
|
58
|
+
|
|
59
|
+
def test_bitwise_or_masks_calculate_inplace_when_target_is_uint32(self):
|
|
60
|
+
"""
|
|
61
|
+
If the target is already uint32, the OR should be performed truly in-place
|
|
62
|
+
(target array object remains the same).
|
|
63
|
+
"""
|
|
64
|
+
step = self._make_step(sources=["bs_mask"])
|
|
65
|
+
|
|
66
|
+
before_id = id(self.test_processing_data["sample"]["mask"].signal)
|
|
67
|
+
step.calculate()
|
|
68
|
+
|
|
69
|
+
out = self.test_processing_data["sample"]["mask"].signal
|
|
70
|
+
self.assertEqual(id(out), before_id) # in-place, no replacement
|
|
71
|
+
|
|
72
|
+
expected = np.array([[4, 0, 0], [0, 32, 0]], dtype=np.uint32)
|
|
73
|
+
np.testing.assert_array_equal(out, expected)
|
|
74
|
+
|
|
75
|
+
def test_bitwise_or_masks_execution_via_call(self):
|
|
76
|
+
"""
|
|
77
|
+
__call__ should run the step and update processing_data in-place.
|
|
78
|
+
"""
|
|
79
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
80
|
+
step.configuration = {
|
|
81
|
+
"with_processing_keys": ["sample"],
|
|
82
|
+
"target_mask_key": "mask",
|
|
83
|
+
"source_mask_keys": ["bs_mask"],
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
step(self.test_processing_data)
|
|
87
|
+
|
|
88
|
+
out = self.test_processing_data["sample"]["mask"].signal
|
|
89
|
+
expected = np.array([[4, 0, 0], [0, 32, 0]], dtype=np.uint32)
|
|
90
|
+
np.testing.assert_array_equal(out, expected)
|
|
91
|
+
|
|
92
|
+
def test_uint8_source_is_converted_and_or_applied(self):
|
|
93
|
+
"""
|
|
94
|
+
A uint8 mask source should be accepted and widened to uint32 only if needed.
|
|
95
|
+
"""
|
|
96
|
+
self.test_processing_data = ProcessingData()
|
|
97
|
+
|
|
98
|
+
tgt = np.zeros((2, 3), dtype=np.uint32)
|
|
99
|
+
# uint8 source containing bit 2 and bit 5
|
|
100
|
+
src_u8 = np.array([[4, 0, 0], [0, 32, 0]], dtype=np.uint8)
|
|
101
|
+
|
|
102
|
+
db = DataBundle(
|
|
103
|
+
mask=BaseData(signal=tgt, units=ureg.dimensionless, uncertainties={}),
|
|
104
|
+
bs_mask=BaseData(signal=src_u8, units=ureg.dimensionless, uncertainties={}),
|
|
105
|
+
)
|
|
106
|
+
self.test_processing_data["sample"] = db
|
|
107
|
+
|
|
108
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
109
|
+
step.configuration = {
|
|
110
|
+
"with_processing_keys": ["sample"],
|
|
111
|
+
"target_mask_key": "mask",
|
|
112
|
+
"source_mask_keys": ["bs_mask"],
|
|
113
|
+
}
|
|
114
|
+
step.processing_data = self.test_processing_data
|
|
115
|
+
step.calculate()
|
|
116
|
+
|
|
117
|
+
out = self.test_processing_data["sample"]["mask"].signal
|
|
118
|
+
self.assertEqual(out.dtype, np.uint32) # canonical dtype
|
|
119
|
+
expected = np.array([[4, 0, 0], [0, 32, 0]], dtype=np.uint32)
|
|
120
|
+
np.testing.assert_array_equal(out, expected)
|
|
121
|
+
|
|
122
|
+
def test_target_non_uint32_is_upcast_to_uint32_once(self):
|
|
123
|
+
"""
|
|
124
|
+
If the target mask isn't uint32 (e.g. int64), the step should convert it to uint32
|
|
125
|
+
(one-time allocation) and then OR into that.
|
|
126
|
+
"""
|
|
127
|
+
self.test_processing_data = ProcessingData()
|
|
128
|
+
|
|
129
|
+
tgt_i64 = np.zeros((2, 3), dtype=np.int64)
|
|
130
|
+
src = np.array([[1, 0, 0], [0, 2, 0]], dtype=np.uint32)
|
|
131
|
+
|
|
132
|
+
db = DataBundle(
|
|
133
|
+
mask=BaseData(signal=tgt_i64, units=ureg.dimensionless, uncertainties={}),
|
|
134
|
+
bs_mask=BaseData(signal=src, units=ureg.dimensionless, uncertainties={}),
|
|
135
|
+
)
|
|
136
|
+
self.test_processing_data["sample"] = db
|
|
137
|
+
|
|
138
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
139
|
+
step.configuration = {
|
|
140
|
+
"with_processing_keys": ["sample"],
|
|
141
|
+
"target_mask_key": "mask",
|
|
142
|
+
"source_mask_keys": ["bs_mask"],
|
|
143
|
+
}
|
|
144
|
+
step.processing_data = self.test_processing_data
|
|
145
|
+
|
|
146
|
+
before_id = id(self.test_processing_data["sample"]["mask"].signal)
|
|
147
|
+
step.calculate()
|
|
148
|
+
|
|
149
|
+
out = self.test_processing_data["sample"]["mask"].signal
|
|
150
|
+
self.assertEqual(out.dtype, np.uint32)
|
|
151
|
+
self.assertNotEqual(id(out), before_id) # replacement happened due to upcast
|
|
152
|
+
|
|
153
|
+
expected = np.array([[1, 0, 0], [0, 2, 0]], dtype=np.uint32)
|
|
154
|
+
np.testing.assert_array_equal(out, expected)
|
|
155
|
+
|
|
156
|
+
def test_mixed_signed_int_preserves_high_bit_after_uint32_canonicalization(self):
|
|
157
|
+
"""
|
|
158
|
+
OR in bit 31 (0x80000000), stored as negative in int32.
|
|
159
|
+
After canonicalization, target is uint32 and should contain 0x80000000.
|
|
160
|
+
"""
|
|
161
|
+
self.test_processing_data = ProcessingData()
|
|
162
|
+
|
|
163
|
+
tgt = np.zeros((1,), dtype=np.uint32)
|
|
164
|
+
src = np.array([np.int32(-2147483648)], dtype=np.int32) # 0x80000000
|
|
165
|
+
|
|
166
|
+
db = DataBundle(
|
|
167
|
+
mask=BaseData(signal=tgt, units=ureg.dimensionless, uncertainties={}),
|
|
168
|
+
bs_mask=BaseData(signal=src, units=ureg.dimensionless, uncertainties={}),
|
|
169
|
+
)
|
|
170
|
+
self.test_processing_data["sample"] = db
|
|
171
|
+
|
|
172
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
173
|
+
step.configuration = {
|
|
174
|
+
"with_processing_keys": ["sample"],
|
|
175
|
+
"target_mask_key": "mask",
|
|
176
|
+
"source_mask_keys": ["bs_mask"],
|
|
177
|
+
}
|
|
178
|
+
step.processing_data = self.test_processing_data
|
|
179
|
+
step.calculate()
|
|
180
|
+
|
|
181
|
+
out = self.test_processing_data["sample"]["mask"].signal
|
|
182
|
+
self.assertEqual(out.dtype, np.uint32)
|
|
183
|
+
self.assertEqual(int(out[0]), 0x80000000)
|
|
184
|
+
|
|
185
|
+
# source unchanged
|
|
186
|
+
self.assertEqual(src.dtype, np.int32)
|
|
187
|
+
self.assertEqual(int(src[0]), -2147483648)
|
|
188
|
+
|
|
189
|
+
def test_rejects_non_integer_masks(self):
|
|
190
|
+
"""
|
|
191
|
+
Float masks should be rejected (masks must be integer dtype).
|
|
192
|
+
"""
|
|
193
|
+
self.test_processing_data = ProcessingData()
|
|
194
|
+
|
|
195
|
+
tgt = np.zeros((2, 3), dtype=np.uint32)
|
|
196
|
+
bad = np.zeros((2, 3), dtype=float)
|
|
197
|
+
|
|
198
|
+
db = DataBundle(
|
|
199
|
+
mask=BaseData(signal=tgt, units=ureg.dimensionless, uncertainties={}),
|
|
200
|
+
bs_mask=BaseData(signal=bad, units=ureg.dimensionless, uncertainties={}),
|
|
201
|
+
)
|
|
202
|
+
self.test_processing_data["sample"] = db
|
|
203
|
+
|
|
204
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
205
|
+
step.configuration = {
|
|
206
|
+
"with_processing_keys": ["sample"],
|
|
207
|
+
"target_mask_key": "mask",
|
|
208
|
+
"source_mask_keys": ["bs_mask"],
|
|
209
|
+
}
|
|
210
|
+
step.processing_data = self.test_processing_data
|
|
211
|
+
|
|
212
|
+
with self.assertRaises(AssertionError):
|
|
213
|
+
step.calculate()
|
|
214
|
+
|
|
215
|
+
def test_broadcasting_and_shape_errors(self):
|
|
216
|
+
"""
|
|
217
|
+
Broadcasting is handled by NumPy; incompatible shapes should raise ValueError.
|
|
218
|
+
"""
|
|
219
|
+
# broadcastable (2,3) |= (2,1)
|
|
220
|
+
self.test_processing_data = ProcessingData()
|
|
221
|
+
|
|
222
|
+
tgt = np.zeros((2, 3), dtype=np.uint32)
|
|
223
|
+
src = np.array([[1], [2]], dtype=np.uint8) # now explicitly uint8
|
|
224
|
+
|
|
225
|
+
db = DataBundle(
|
|
226
|
+
mask=BaseData(signal=tgt, units=ureg.dimensionless, uncertainties={}),
|
|
227
|
+
bs_mask=BaseData(signal=src, units=ureg.dimensionless, uncertainties={}),
|
|
228
|
+
)
|
|
229
|
+
self.test_processing_data["sample"] = db
|
|
230
|
+
|
|
231
|
+
step = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
232
|
+
step.configuration = {
|
|
233
|
+
"with_processing_keys": ["sample"],
|
|
234
|
+
"target_mask_key": "mask",
|
|
235
|
+
"source_mask_keys": ["bs_mask"],
|
|
236
|
+
}
|
|
237
|
+
step.processing_data = self.test_processing_data
|
|
238
|
+
step.calculate()
|
|
239
|
+
|
|
240
|
+
expected = np.array([[1, 1, 1], [2, 2, 2]], dtype=np.uint32)
|
|
241
|
+
np.testing.assert_array_equal(self.test_processing_data["sample"]["mask"].signal, expected)
|
|
242
|
+
|
|
243
|
+
# incompatible -> ValueError from NumPy ufunc broadcasting rules (especially with out=)
|
|
244
|
+
self.test_processing_data = ProcessingData()
|
|
245
|
+
|
|
246
|
+
tgt2 = np.zeros((2, 3), dtype=np.uint32)
|
|
247
|
+
bad = np.zeros((4,), dtype=np.uint8)
|
|
248
|
+
|
|
249
|
+
db2 = DataBundle(
|
|
250
|
+
mask=BaseData(signal=tgt2, units=ureg.dimensionless, uncertainties={}),
|
|
251
|
+
bs_mask=BaseData(signal=bad, units=ureg.dimensionless, uncertainties={}),
|
|
252
|
+
)
|
|
253
|
+
self.test_processing_data["sample"] = db2
|
|
254
|
+
|
|
255
|
+
step2 = BitwiseOrMasks(io_sources=TEST_IO_SOURCES)
|
|
256
|
+
step2.configuration = {
|
|
257
|
+
"with_processing_keys": ["sample"],
|
|
258
|
+
"target_mask_key": "mask",
|
|
259
|
+
"source_mask_keys": ["bs_mask"],
|
|
260
|
+
}
|
|
261
|
+
step2.processing_data = self.test_processing_data
|
|
262
|
+
|
|
263
|
+
with self.assertRaises(ValueError):
|
|
264
|
+
step2.calculate()
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "20/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import numpy as np
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
from modacor import ureg
|
|
18
|
+
from modacor.dataclasses.basedata import BaseData
|
|
19
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
20
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
21
|
+
from modacor.modules.base_modules.combine_uncertainties import CombineUncertainties
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _make_processing_data() -> ProcessingData:
|
|
25
|
+
signal = np.array([[10.0, 12.0], [8.0, 9.0]], dtype=float)
|
|
26
|
+
poisson = 0.1 * np.ones_like(signal)
|
|
27
|
+
readout = 0.2 * np.ones_like(signal)
|
|
28
|
+
background = 0.05 * np.ones_like(signal)
|
|
29
|
+
|
|
30
|
+
basedata = BaseData(
|
|
31
|
+
signal=signal,
|
|
32
|
+
units=ureg.Unit("count"),
|
|
33
|
+
uncertainties={
|
|
34
|
+
"poisson": poisson,
|
|
35
|
+
"readout": readout,
|
|
36
|
+
"background": background,
|
|
37
|
+
},
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
databundle = DataBundle(signal=basedata)
|
|
41
|
+
processing_data = ProcessingData()
|
|
42
|
+
processing_data["sample"] = databundle
|
|
43
|
+
return processing_data
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def test_combine_uncertainties_creates_new_key():
|
|
47
|
+
processing_data = _make_processing_data()
|
|
48
|
+
step = CombineUncertainties()
|
|
49
|
+
step.modify_config_by_kwargs(
|
|
50
|
+
with_processing_keys=["sample"],
|
|
51
|
+
combinations={"stat_total": ["poisson", "readout"]},
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
step(processing_data)
|
|
55
|
+
|
|
56
|
+
bd = processing_data["sample"]["signal"]
|
|
57
|
+
expected = np.sqrt(0.1**2 + 0.2**2) * np.ones_like(bd.signal)
|
|
58
|
+
np.testing.assert_allclose(bd.uncertainties["stat_total"], expected)
|
|
59
|
+
assert "poisson" in bd.uncertainties
|
|
60
|
+
assert "readout" in bd.uncertainties
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def test_combine_uncertainties_can_drop_sources():
|
|
64
|
+
processing_data = _make_processing_data()
|
|
65
|
+
step = CombineUncertainties()
|
|
66
|
+
step.modify_config_by_kwargs(
|
|
67
|
+
with_processing_keys=["sample"],
|
|
68
|
+
combinations={"sigma_total": ["poisson", "readout"]},
|
|
69
|
+
drop_source_keys=True,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
step(processing_data)
|
|
73
|
+
|
|
74
|
+
bd = processing_data["sample"]["signal"]
|
|
75
|
+
assert "sigma_total" in bd.uncertainties
|
|
76
|
+
assert "poisson" not in bd.uncertainties
|
|
77
|
+
assert "readout" not in bd.uncertainties
|
|
78
|
+
assert "background" in bd.uncertainties
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def test_combine_uncertainties_missing_sources_error():
|
|
82
|
+
processing_data = _make_processing_data()
|
|
83
|
+
step = CombineUncertainties()
|
|
84
|
+
step.modify_config_by_kwargs(
|
|
85
|
+
with_processing_keys=["sample"],
|
|
86
|
+
combinations={"sigma_total": ["poisson", "absent_key"]},
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
with pytest.raises(KeyError):
|
|
90
|
+
step(processing_data)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def test_combine_uncertainties_ignore_missing_uses_available():
|
|
94
|
+
processing_data = _make_processing_data()
|
|
95
|
+
step = CombineUncertainties()
|
|
96
|
+
step.modify_config_by_kwargs(
|
|
97
|
+
with_processing_keys=["sample"],
|
|
98
|
+
combinations={"stat_total": ["poisson", "absent_key"]},
|
|
99
|
+
ignore_missing=True,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
step(processing_data)
|
|
103
|
+
|
|
104
|
+
bd = processing_data["sample"]["signal"]
|
|
105
|
+
np.testing.assert_allclose(bd.uncertainties["stat_total"], bd.uncertainties["poisson"])
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "20/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
import numpy as np
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
from modacor import ureg
|
|
18
|
+
from modacor.dataclasses.basedata import BaseData
|
|
19
|
+
from modacor.dataclasses.databundle import DataBundle
|
|
20
|
+
from modacor.dataclasses.processing_data import ProcessingData
|
|
21
|
+
from modacor.modules.base_modules.combine_uncertainties_max import CombineUncertaintiesMax
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _make_processing_data() -> ProcessingData:
|
|
25
|
+
signal = np.array([[10.0, 12.0], [8.0, 9.0]], dtype=float)
|
|
26
|
+
poisson = 0.1 * np.ones_like(signal)
|
|
27
|
+
readout = np.array([[0.2, 0.05], [0.15, 0.25]])
|
|
28
|
+
background = 0.05 * np.ones_like(signal)
|
|
29
|
+
|
|
30
|
+
basedata = BaseData(
|
|
31
|
+
signal=signal,
|
|
32
|
+
units=ureg.Unit("count"),
|
|
33
|
+
uncertainties={
|
|
34
|
+
"poisson": poisson,
|
|
35
|
+
"readout": readout,
|
|
36
|
+
"background": background,
|
|
37
|
+
},
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
databundle = DataBundle(signal=basedata)
|
|
41
|
+
processing_data = ProcessingData()
|
|
42
|
+
processing_data["sample"] = databundle
|
|
43
|
+
return processing_data
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def test_combine_uncertainties_maximum():
|
|
47
|
+
processing_data = _make_processing_data()
|
|
48
|
+
step = CombineUncertaintiesMax()
|
|
49
|
+
step.modify_config_by_kwargs(
|
|
50
|
+
with_processing_keys=["sample"],
|
|
51
|
+
combinations={"sigma_max": ["poisson", "readout", "background"]},
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
step(processing_data)
|
|
55
|
+
|
|
56
|
+
bd = processing_data["sample"]["signal"]
|
|
57
|
+
expected = np.maximum.reduce(
|
|
58
|
+
[
|
|
59
|
+
np.broadcast_to(bd.uncertainties["poisson"], bd.signal.shape),
|
|
60
|
+
np.broadcast_to(bd.uncertainties["readout"], bd.signal.shape),
|
|
61
|
+
np.broadcast_to(bd.uncertainties["background"], bd.signal.shape),
|
|
62
|
+
]
|
|
63
|
+
)
|
|
64
|
+
np.testing.assert_allclose(bd.uncertainties["sigma_max"], expected)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def test_combine_uncertainties_max_drop_sources():
|
|
68
|
+
processing_data = _make_processing_data()
|
|
69
|
+
step = CombineUncertaintiesMax()
|
|
70
|
+
step.modify_config_by_kwargs(
|
|
71
|
+
with_processing_keys=["sample"],
|
|
72
|
+
combinations={"sigma_max": ["poisson", "readout"]},
|
|
73
|
+
drop_source_keys=True,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
step(processing_data)
|
|
77
|
+
|
|
78
|
+
bd = processing_data["sample"]["signal"]
|
|
79
|
+
assert "sigma_max" in bd.uncertainties
|
|
80
|
+
assert "poisson" not in bd.uncertainties
|
|
81
|
+
assert "readout" not in bd.uncertainties
|
|
82
|
+
assert "background" in bd.uncertainties
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def test_combine_uncertainties_max_missing_sources_error():
|
|
86
|
+
processing_data = _make_processing_data()
|
|
87
|
+
step = CombineUncertaintiesMax()
|
|
88
|
+
step.modify_config_by_kwargs(
|
|
89
|
+
with_processing_keys=["sample"],
|
|
90
|
+
combinations={"sigma_max": ["poisson", "absent_key"]},
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
with pytest.raises(KeyError):
|
|
94
|
+
step(processing_data)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def test_combine_uncertainties_max_ignore_missing():
|
|
98
|
+
processing_data = _make_processing_data()
|
|
99
|
+
step = CombineUncertaintiesMax()
|
|
100
|
+
step.modify_config_by_kwargs(
|
|
101
|
+
with_processing_keys=["sample"],
|
|
102
|
+
combinations={"sigma_max": ["poisson", "absent_key"]},
|
|
103
|
+
ignore_missing=True,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
step(processing_data)
|
|
107
|
+
|
|
108
|
+
bd = processing_data["sample"]["signal"]
|
|
109
|
+
np.testing.assert_allclose(bd.uncertainties["sigma_max"], bd.uncertainties["poisson"])
|