modacor 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modacor/__init__.py +30 -0
- modacor/dataclasses/__init__.py +0 -0
- modacor/dataclasses/basedata.py +973 -0
- modacor/dataclasses/databundle.py +23 -0
- modacor/dataclasses/helpers.py +45 -0
- modacor/dataclasses/messagehandler.py +75 -0
- modacor/dataclasses/process_step.py +233 -0
- modacor/dataclasses/process_step_describer.py +146 -0
- modacor/dataclasses/processing_data.py +59 -0
- modacor/dataclasses/trace_event.py +118 -0
- modacor/dataclasses/uncertainty_tools.py +132 -0
- modacor/dataclasses/validators.py +84 -0
- modacor/debug/pipeline_tracer.py +548 -0
- modacor/io/__init__.py +33 -0
- modacor/io/csv/__init__.py +0 -0
- modacor/io/csv/csv_sink.py +114 -0
- modacor/io/csv/csv_source.py +210 -0
- modacor/io/hdf/__init__.py +27 -0
- modacor/io/hdf/hdf_source.py +120 -0
- modacor/io/io_sink.py +41 -0
- modacor/io/io_sinks.py +61 -0
- modacor/io/io_source.py +164 -0
- modacor/io/io_sources.py +208 -0
- modacor/io/processing_path.py +113 -0
- modacor/io/tiled/__init__.py +16 -0
- modacor/io/tiled/tiled_source.py +403 -0
- modacor/io/yaml/__init__.py +27 -0
- modacor/io/yaml/yaml_source.py +116 -0
- modacor/modules/__init__.py +53 -0
- modacor/modules/base_modules/__init__.py +0 -0
- modacor/modules/base_modules/append_processing_data.py +329 -0
- modacor/modules/base_modules/append_sink.py +141 -0
- modacor/modules/base_modules/append_source.py +181 -0
- modacor/modules/base_modules/bitwise_or_masks.py +113 -0
- modacor/modules/base_modules/combine_uncertainties.py +120 -0
- modacor/modules/base_modules/combine_uncertainties_max.py +105 -0
- modacor/modules/base_modules/divide.py +82 -0
- modacor/modules/base_modules/find_scale_factor1d.py +373 -0
- modacor/modules/base_modules/multiply.py +77 -0
- modacor/modules/base_modules/multiply_databundles.py +73 -0
- modacor/modules/base_modules/poisson_uncertainties.py +69 -0
- modacor/modules/base_modules/reduce_dimensionality.py +252 -0
- modacor/modules/base_modules/sink_processing_data.py +80 -0
- modacor/modules/base_modules/subtract.py +80 -0
- modacor/modules/base_modules/subtract_databundles.py +67 -0
- modacor/modules/base_modules/units_label_update.py +66 -0
- modacor/modules/instrument_modules/__init__.py +0 -0
- modacor/modules/instrument_modules/readme.md +9 -0
- modacor/modules/technique_modules/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/modules/technique_modules/scattering/geometry_helpers.py +114 -0
- modacor/modules/technique_modules/scattering/index_pixels.py +492 -0
- modacor/modules/technique_modules/scattering/indexed_averager.py +628 -0
- modacor/modules/technique_modules/scattering/pixel_coordinates_3d.py +417 -0
- modacor/modules/technique_modules/scattering/solid_angle_correction.py +63 -0
- modacor/modules/technique_modules/scattering/xs_geometry.py +571 -0
- modacor/modules/technique_modules/scattering/xs_geometry_from_pixel_coordinates.py +293 -0
- modacor/runner/__init__.py +0 -0
- modacor/runner/pipeline.py +749 -0
- modacor/runner/process_step_registry.py +224 -0
- modacor/tests/__init__.py +27 -0
- modacor/tests/dataclasses/test_basedata.py +519 -0
- modacor/tests/dataclasses/test_basedata_operations.py +439 -0
- modacor/tests/dataclasses/test_basedata_to_base_units.py +57 -0
- modacor/tests/dataclasses/test_process_step_describer.py +73 -0
- modacor/tests/dataclasses/test_processstep.py +282 -0
- modacor/tests/debug/test_tracing_integration.py +188 -0
- modacor/tests/integration/__init__.py +0 -0
- modacor/tests/integration/test_pipeline_run.py +238 -0
- modacor/tests/io/__init__.py +27 -0
- modacor/tests/io/csv/__init__.py +0 -0
- modacor/tests/io/csv/test_csv_source.py +156 -0
- modacor/tests/io/hdf/__init__.py +27 -0
- modacor/tests/io/hdf/test_hdf_source.py +92 -0
- modacor/tests/io/test_io_sources.py +119 -0
- modacor/tests/io/tiled/__init__.py +12 -0
- modacor/tests/io/tiled/test_tiled_source.py +120 -0
- modacor/tests/io/yaml/__init__.py +27 -0
- modacor/tests/io/yaml/static_data_example.yaml +26 -0
- modacor/tests/io/yaml/test_yaml_source.py +47 -0
- modacor/tests/modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/__init__.py +27 -0
- modacor/tests/modules/base_modules/test_append_processing_data.py +219 -0
- modacor/tests/modules/base_modules/test_append_sink.py +76 -0
- modacor/tests/modules/base_modules/test_append_source.py +180 -0
- modacor/tests/modules/base_modules/test_bitwise_or_masks.py +264 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties.py +105 -0
- modacor/tests/modules/base_modules/test_combine_uncertainties_max.py +109 -0
- modacor/tests/modules/base_modules/test_divide.py +140 -0
- modacor/tests/modules/base_modules/test_find_scale_factor1d.py +220 -0
- modacor/tests/modules/base_modules/test_multiply.py +113 -0
- modacor/tests/modules/base_modules/test_multiply_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_poisson_uncertainties.py +61 -0
- modacor/tests/modules/base_modules/test_reduce_dimensionality.py +358 -0
- modacor/tests/modules/base_modules/test_sink_processing_data.py +119 -0
- modacor/tests/modules/base_modules/test_subtract.py +111 -0
- modacor/tests/modules/base_modules/test_subtract_databundles.py +136 -0
- modacor/tests/modules/base_modules/test_units_label_update.py +91 -0
- modacor/tests/modules/technique_modules/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/__init__.py +0 -0
- modacor/tests/modules/technique_modules/scattering/test_geometry_helpers.py +198 -0
- modacor/tests/modules/technique_modules/scattering/test_index_pixels.py +426 -0
- modacor/tests/modules/technique_modules/scattering/test_indexed_averaging.py +559 -0
- modacor/tests/modules/technique_modules/scattering/test_pixel_coordinates_3d.py +282 -0
- modacor/tests/modules/technique_modules/scattering/test_xs_geometry_from_pixel_coordinates.py +224 -0
- modacor/tests/modules/technique_modules/scattering/test_xsgeometry.py +635 -0
- modacor/tests/requirements.txt +12 -0
- modacor/tests/runner/test_pipeline.py +438 -0
- modacor/tests/runner/test_process_step_registry.py +65 -0
- modacor/tests/test_import.py +43 -0
- modacor/tests/test_modacor.py +17 -0
- modacor/tests/test_units.py +79 -0
- modacor/units.py +97 -0
- modacor-1.0.0.dist-info/METADATA +482 -0
- modacor-1.0.0.dist-info/RECORD +120 -0
- modacor-1.0.0.dist-info/WHEEL +5 -0
- modacor-1.0.0.dist-info/licenses/AUTHORS.md +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE +11 -0
- modacor-1.0.0.dist-info/licenses/LICENSE.txt +11 -0
- modacor-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "13/12/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
__version__ = "20251213.1"
|
|
13
|
+
|
|
14
|
+
__all__ = ["TraceEvent"]
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
from hashlib import sha256
|
|
18
|
+
from typing import Any
|
|
19
|
+
|
|
20
|
+
from attrs import define, field, validators
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _to_jsonable(value: Any) -> Any:
|
|
24
|
+
"""
|
|
25
|
+
Convert arbitrary objects into a JSON-serializable structure.
|
|
26
|
+
|
|
27
|
+
Rules:
|
|
28
|
+
- dict keys become strings
|
|
29
|
+
- tuples/sets become lists
|
|
30
|
+
- unknown objects become str(value)
|
|
31
|
+
"""
|
|
32
|
+
if value is None or isinstance(value, (str, int, float, bool)):
|
|
33
|
+
return value
|
|
34
|
+
|
|
35
|
+
if isinstance(value, dict):
|
|
36
|
+
return {str(k): _to_jsonable(v) for k, v in value.items()}
|
|
37
|
+
|
|
38
|
+
if isinstance(value, (list, tuple, set)):
|
|
39
|
+
return [_to_jsonable(v) for v in value]
|
|
40
|
+
|
|
41
|
+
# Common numpy-like scalars without importing numpy
|
|
42
|
+
if hasattr(value, "item") and callable(getattr(value, "item")):
|
|
43
|
+
try:
|
|
44
|
+
return _to_jsonable(value.item())
|
|
45
|
+
except Exception:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
return str(value)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _stable_hash_dict(d: dict[str, Any]) -> str:
|
|
52
|
+
"""
|
|
53
|
+
Stable content hash of a dict (order-independent).
|
|
54
|
+
"""
|
|
55
|
+
canonical = json.dumps(_to_jsonable(d), sort_keys=True, separators=(",", ":"), ensure_ascii=False)
|
|
56
|
+
return sha256(canonical.encode("utf-8")).hexdigest()
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@define(frozen=True, slots=True)
|
|
60
|
+
class TraceEvent:
|
|
61
|
+
"""
|
|
62
|
+
A small, UI-friendly trace record for a single executed step.
|
|
63
|
+
|
|
64
|
+
Intended to be embedded into Pipeline.to_spec() so graph viewers can show:
|
|
65
|
+
- configuration used by the step
|
|
66
|
+
- what changed (units/dimensionality/shape/NaNs/etc.)
|
|
67
|
+
- optional human messages (later)
|
|
68
|
+
|
|
69
|
+
Notes
|
|
70
|
+
-----
|
|
71
|
+
Keep this JSON-friendly and lightweight: no arrays, no heavy objects.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
step_id: str
|
|
75
|
+
module: str
|
|
76
|
+
label: str = ""
|
|
77
|
+
|
|
78
|
+
module_path: str = ""
|
|
79
|
+
version: str = ""
|
|
80
|
+
|
|
81
|
+
requires_steps: tuple[str, ...] = field(factory=tuple)
|
|
82
|
+
|
|
83
|
+
# configuration as used for execution (JSON-friendly)
|
|
84
|
+
config: dict[str, Any] = field(factory=dict)
|
|
85
|
+
|
|
86
|
+
# computed stable hash of config
|
|
87
|
+
config_hash: str = field(init=False)
|
|
88
|
+
|
|
89
|
+
# dataset key -> { "diff": [...], "prev": {...} | None, "now": {...} }
|
|
90
|
+
# Use a simple key like "sample.signal" or "sample_background.signal"
|
|
91
|
+
datasets: dict[str, Any] = field(factory=dict)
|
|
92
|
+
|
|
93
|
+
# reserved for later (MessageHandler, timing, etc.)
|
|
94
|
+
messages: list[dict[str, Any]] = field(factory=list)
|
|
95
|
+
|
|
96
|
+
# wall-clock runtime for this step execution (seconds)
|
|
97
|
+
duration_s: float | None = field(default=None, validator=validators.optional(validators.instance_of(float)))
|
|
98
|
+
|
|
99
|
+
def __attrs_post_init__(self) -> None:
|
|
100
|
+
object.__setattr__(self, "config_hash", _stable_hash_dict(self.config))
|
|
101
|
+
|
|
102
|
+
def to_dict(self) -> dict[str, Any]:
|
|
103
|
+
"""
|
|
104
|
+
JSON-serializable representation suitable for Pipeline.to_spec().
|
|
105
|
+
"""
|
|
106
|
+
return {
|
|
107
|
+
"step_id": self.step_id,
|
|
108
|
+
"module": self.module,
|
|
109
|
+
"label": self.label,
|
|
110
|
+
"module_path": self.module_path,
|
|
111
|
+
"version": self.version,
|
|
112
|
+
"requires_steps": list(self.requires_steps),
|
|
113
|
+
"config": _to_jsonable(self.config),
|
|
114
|
+
"config_hash": self.config_hash,
|
|
115
|
+
"duration_s": self.duration_s,
|
|
116
|
+
"datasets": _to_jsonable(self.datasets),
|
|
117
|
+
"messages": _to_jsonable(self.messages),
|
|
118
|
+
}
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2026, The MoDaCor team"
|
|
10
|
+
__date__ = "20/01/2026"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"normalize_uncertainty_combinations",
|
|
16
|
+
"combine_uncertainty_keys",
|
|
17
|
+
"quadrature_aggregator",
|
|
18
|
+
"maximum_aggregator",
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
from collections.abc import Callable, Iterable, Mapping
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
import numpy as np
|
|
25
|
+
|
|
26
|
+
from .basedata import BaseData
|
|
27
|
+
from .messagehandler import MessageHandler
|
|
28
|
+
|
|
29
|
+
Aggregator = Callable[[list[np.ndarray], tuple[int, ...]], np.ndarray]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def normalize_uncertainty_combinations(raw: Mapping[str, Any] | None) -> dict[str, tuple[str, ...]]:
|
|
33
|
+
"""Normalise combination configuration into deterministic tuples."""
|
|
34
|
+
if raw is None:
|
|
35
|
+
return {}
|
|
36
|
+
if not isinstance(raw, Mapping):
|
|
37
|
+
raise TypeError("'combinations' must be a mapping of output key -> iterable of source keys.")
|
|
38
|
+
|
|
39
|
+
normalised: dict[str, tuple[str, ...]] = {}
|
|
40
|
+
for dest_key, sources in raw.items():
|
|
41
|
+
if isinstance(sources, str):
|
|
42
|
+
source_tuple = (sources,)
|
|
43
|
+
elif isinstance(sources, Iterable):
|
|
44
|
+
source_tuple = tuple(str(s).strip() for s in sources if str(s).strip())
|
|
45
|
+
else:
|
|
46
|
+
raise TypeError("Each combinations entry must be a string or iterable of strings.")
|
|
47
|
+
|
|
48
|
+
dest_key_str = str(dest_key).strip()
|
|
49
|
+
if not dest_key_str:
|
|
50
|
+
raise ValueError("Combination keys must be non-empty strings.")
|
|
51
|
+
if not source_tuple:
|
|
52
|
+
raise ValueError(f"Combination '{dest_key_str}' must list at least one source uncertainty key.")
|
|
53
|
+
normalised[dest_key_str] = source_tuple
|
|
54
|
+
return normalised
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def quadrature_aggregator(uncertainties: list[np.ndarray], shape: tuple[int, ...]) -> np.ndarray:
|
|
58
|
+
"""Combine absolute uncertainties via root-sum-of-squares."""
|
|
59
|
+
total_var: np.ndarray | None = None
|
|
60
|
+
for sigma in uncertainties:
|
|
61
|
+
arr = np.asarray(sigma, dtype=float)
|
|
62
|
+
broadcast = np.broadcast_to(arr, shape).astype(float, copy=False)
|
|
63
|
+
squared = np.square(broadcast)
|
|
64
|
+
total_var = squared if total_var is None else total_var + squared
|
|
65
|
+
if total_var is None:
|
|
66
|
+
raise RuntimeError("Cannot compute quadrature of an empty sequence.")
|
|
67
|
+
return np.sqrt(total_var)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def maximum_aggregator(uncertainties: list[np.ndarray], shape: tuple[int, ...]) -> np.ndarray:
|
|
71
|
+
"""Combine absolute uncertainties by taking the element-wise maximum."""
|
|
72
|
+
if not uncertainties:
|
|
73
|
+
raise RuntimeError("Cannot compute maximum of an empty sequence.")
|
|
74
|
+
broadcasted = [np.broadcast_to(np.asarray(sigma, dtype=float), shape) for sigma in uncertainties]
|
|
75
|
+
return np.maximum.reduce(broadcasted)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def combine_uncertainty_keys(
|
|
79
|
+
*,
|
|
80
|
+
basedata: BaseData,
|
|
81
|
+
combinations: Mapping[str, tuple[str, ...]],
|
|
82
|
+
aggregator: Aggregator,
|
|
83
|
+
drop_sources: bool,
|
|
84
|
+
ignore_missing: bool,
|
|
85
|
+
logger: MessageHandler | None = None,
|
|
86
|
+
target_name: str = "",
|
|
87
|
+
) -> None:
|
|
88
|
+
"""Apply configured combinations to ``basedata.uncertainties`` in-place."""
|
|
89
|
+
signal_shape = basedata.signal.shape
|
|
90
|
+
new_keys: set[str] = set()
|
|
91
|
+
sources_to_remove: set[str] = set()
|
|
92
|
+
|
|
93
|
+
for dest_key, source_keys in combinations.items():
|
|
94
|
+
available: list[np.ndarray] = []
|
|
95
|
+
present_sources: list[str] = []
|
|
96
|
+
missing_sources: list[str] = []
|
|
97
|
+
|
|
98
|
+
for src_key in source_keys:
|
|
99
|
+
if src_key in basedata.uncertainties:
|
|
100
|
+
present_sources.append(src_key)
|
|
101
|
+
available.append(basedata.uncertainties[src_key])
|
|
102
|
+
else:
|
|
103
|
+
missing_sources.append(src_key)
|
|
104
|
+
|
|
105
|
+
if missing_sources and not ignore_missing:
|
|
106
|
+
missing_formatted = ", ".join(sorted(missing_sources))
|
|
107
|
+
target_descr = target_name or "BaseData"
|
|
108
|
+
raise KeyError(
|
|
109
|
+
f"Missing uncertainties {{{missing_formatted}}} required for '{dest_key}' on {target_descr}."
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
if not available:
|
|
113
|
+
if ignore_missing:
|
|
114
|
+
if logger is not None:
|
|
115
|
+
logger.debug(
|
|
116
|
+
"Skipping destination '%s' – none of the source keys were present.",
|
|
117
|
+
dest_key,
|
|
118
|
+
)
|
|
119
|
+
continue
|
|
120
|
+
target_descr = target_name or "BaseData"
|
|
121
|
+
raise RuntimeError(f"No uncertainties available to combine for destination '{dest_key}' on {target_descr}.")
|
|
122
|
+
|
|
123
|
+
combined = aggregator(available, signal_shape)
|
|
124
|
+
basedata.uncertainties[dest_key] = combined
|
|
125
|
+
new_keys.add(dest_key)
|
|
126
|
+
|
|
127
|
+
if drop_sources:
|
|
128
|
+
sources_to_remove.update(present_sources)
|
|
129
|
+
|
|
130
|
+
if drop_sources:
|
|
131
|
+
for source_key in sources_to_remove - new_keys:
|
|
132
|
+
basedata.uncertainties.pop(source_key, None)
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
|
+
# /usr/bin/env python3
|
|
3
|
+
# -*- coding: utf-8 -*-
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
__coding__ = "utf-8"
|
|
8
|
+
__authors__ = ["Jérôme Kieffer", "Brian R. Pauw"] # add names to the list as appropriate
|
|
9
|
+
__copyright__ = "Copyright 2025, The MoDaCor team"
|
|
10
|
+
__date__ = "16/11/2025"
|
|
11
|
+
__status__ = "Development" # "Development", "Production"
|
|
12
|
+
# end of header and standard imports
|
|
13
|
+
|
|
14
|
+
from numbers import Integral
|
|
15
|
+
from typing import Any, Type
|
|
16
|
+
|
|
17
|
+
import numpy as np
|
|
18
|
+
|
|
19
|
+
from modacor import ureg
|
|
20
|
+
|
|
21
|
+
from .databundle import DataBundle
|
|
22
|
+
from .messagehandler import MessageHandler
|
|
23
|
+
|
|
24
|
+
# from .scatteringdata import ScatteringData
|
|
25
|
+
_dummy_handler = MessageHandler()
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
"check_data_element_and_units",
|
|
29
|
+
"is_list_of_ints",
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def is_list_of_ints(instance: Type, attribute: str, value: Any):
|
|
34
|
+
"""
|
|
35
|
+
Check if the value is a list of integers.
|
|
36
|
+
"""
|
|
37
|
+
if not isinstance(value, list):
|
|
38
|
+
return False
|
|
39
|
+
return all(isinstance(i, Integral) for i in value)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def check_data(
|
|
43
|
+
data: DataBundle,
|
|
44
|
+
data_element_name: str = None,
|
|
45
|
+
required_unit: ureg.Unit = None,
|
|
46
|
+
logger: MessageHandler = _dummy_handler,
|
|
47
|
+
) -> bool:
|
|
48
|
+
"""
|
|
49
|
+
Check that the required data element is present in the DataBundle object.
|
|
50
|
+
"""
|
|
51
|
+
if not isinstance(data, DataBundle):
|
|
52
|
+
return False
|
|
53
|
+
if data_element_name is not None:
|
|
54
|
+
if (intensity_object := data.data.get(data_element_name, None)) is None:
|
|
55
|
+
logger.error(f"{data_element_name} is required.")
|
|
56
|
+
return False
|
|
57
|
+
if not (intensity_object.internal_units == required_unit):
|
|
58
|
+
logger.error(f"{data_element_name} should have units of {required_unit}.")
|
|
59
|
+
return False
|
|
60
|
+
return True
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def arrays_are_equal_shape(
|
|
64
|
+
array1: np.ndarray,
|
|
65
|
+
array2: np.ndarray,
|
|
66
|
+
) -> bool:
|
|
67
|
+
"""
|
|
68
|
+
Check if two arrays have the same shape.
|
|
69
|
+
"""
|
|
70
|
+
if array1.shape != array2.shape:
|
|
71
|
+
return False
|
|
72
|
+
return True
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def check_data_element_and_units(
|
|
76
|
+
data: DataBundle,
|
|
77
|
+
data_element_name: str,
|
|
78
|
+
required_unit: ureg.Unit,
|
|
79
|
+
logger: MessageHandler,
|
|
80
|
+
) -> bool:
|
|
81
|
+
"""
|
|
82
|
+
Check that the required data element is present with the correct units in the DataBundle object.
|
|
83
|
+
"""
|
|
84
|
+
return check_data(data, data_element_name, required_unit, logger)
|