modacor 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. modacor/__init__.py +30 -0
  2. modacor/dataclasses/__init__.py +0 -0
  3. modacor/dataclasses/basedata.py +973 -0
  4. modacor/dataclasses/databundle.py +23 -0
  5. modacor/dataclasses/helpers.py +45 -0
  6. modacor/dataclasses/messagehandler.py +75 -0
  7. modacor/dataclasses/process_step.py +233 -0
  8. modacor/dataclasses/process_step_describer.py +146 -0
  9. modacor/dataclasses/processing_data.py +59 -0
  10. modacor/dataclasses/trace_event.py +118 -0
  11. modacor/dataclasses/uncertainty_tools.py +132 -0
  12. modacor/dataclasses/validators.py +84 -0
  13. modacor/debug/pipeline_tracer.py +548 -0
  14. modacor/io/__init__.py +33 -0
  15. modacor/io/csv/__init__.py +0 -0
  16. modacor/io/csv/csv_sink.py +114 -0
  17. modacor/io/csv/csv_source.py +210 -0
  18. modacor/io/hdf/__init__.py +27 -0
  19. modacor/io/hdf/hdf_source.py +120 -0
  20. modacor/io/io_sink.py +41 -0
  21. modacor/io/io_sinks.py +61 -0
  22. modacor/io/io_source.py +164 -0
  23. modacor/io/io_sources.py +208 -0
  24. modacor/io/processing_path.py +113 -0
  25. modacor/io/tiled/__init__.py +16 -0
  26. modacor/io/tiled/tiled_source.py +403 -0
  27. modacor/io/yaml/__init__.py +27 -0
  28. modacor/io/yaml/yaml_source.py +116 -0
  29. modacor/modules/__init__.py +53 -0
  30. modacor/modules/base_modules/__init__.py +0 -0
  31. modacor/modules/base_modules/append_processing_data.py +329 -0
  32. modacor/modules/base_modules/append_sink.py +141 -0
  33. modacor/modules/base_modules/append_source.py +181 -0
  34. modacor/modules/base_modules/bitwise_or_masks.py +113 -0
  35. modacor/modules/base_modules/combine_uncertainties.py +120 -0
  36. modacor/modules/base_modules/combine_uncertainties_max.py +105 -0
  37. modacor/modules/base_modules/divide.py +82 -0
  38. modacor/modules/base_modules/find_scale_factor1d.py +373 -0
  39. modacor/modules/base_modules/multiply.py +77 -0
  40. modacor/modules/base_modules/multiply_databundles.py +73 -0
  41. modacor/modules/base_modules/poisson_uncertainties.py +69 -0
  42. modacor/modules/base_modules/reduce_dimensionality.py +252 -0
  43. modacor/modules/base_modules/sink_processing_data.py +80 -0
  44. modacor/modules/base_modules/subtract.py +80 -0
  45. modacor/modules/base_modules/subtract_databundles.py +67 -0
  46. modacor/modules/base_modules/units_label_update.py +66 -0
  47. modacor/modules/instrument_modules/__init__.py +0 -0
  48. modacor/modules/instrument_modules/readme.md +9 -0
  49. modacor/modules/technique_modules/__init__.py +0 -0
  50. modacor/modules/technique_modules/scattering/__init__.py +0 -0
  51. modacor/modules/technique_modules/scattering/geometry_helpers.py +114 -0
  52. modacor/modules/technique_modules/scattering/index_pixels.py +492 -0
  53. modacor/modules/technique_modules/scattering/indexed_averager.py +628 -0
  54. modacor/modules/technique_modules/scattering/pixel_coordinates_3d.py +417 -0
  55. modacor/modules/technique_modules/scattering/solid_angle_correction.py +63 -0
  56. modacor/modules/technique_modules/scattering/xs_geometry.py +571 -0
  57. modacor/modules/technique_modules/scattering/xs_geometry_from_pixel_coordinates.py +293 -0
  58. modacor/runner/__init__.py +0 -0
  59. modacor/runner/pipeline.py +749 -0
  60. modacor/runner/process_step_registry.py +224 -0
  61. modacor/tests/__init__.py +27 -0
  62. modacor/tests/dataclasses/test_basedata.py +519 -0
  63. modacor/tests/dataclasses/test_basedata_operations.py +439 -0
  64. modacor/tests/dataclasses/test_basedata_to_base_units.py +57 -0
  65. modacor/tests/dataclasses/test_process_step_describer.py +73 -0
  66. modacor/tests/dataclasses/test_processstep.py +282 -0
  67. modacor/tests/debug/test_tracing_integration.py +188 -0
  68. modacor/tests/integration/__init__.py +0 -0
  69. modacor/tests/integration/test_pipeline_run.py +238 -0
  70. modacor/tests/io/__init__.py +27 -0
  71. modacor/tests/io/csv/__init__.py +0 -0
  72. modacor/tests/io/csv/test_csv_source.py +156 -0
  73. modacor/tests/io/hdf/__init__.py +27 -0
  74. modacor/tests/io/hdf/test_hdf_source.py +92 -0
  75. modacor/tests/io/test_io_sources.py +119 -0
  76. modacor/tests/io/tiled/__init__.py +12 -0
  77. modacor/tests/io/tiled/test_tiled_source.py +120 -0
  78. modacor/tests/io/yaml/__init__.py +27 -0
  79. modacor/tests/io/yaml/static_data_example.yaml +26 -0
  80. modacor/tests/io/yaml/test_yaml_source.py +47 -0
  81. modacor/tests/modules/__init__.py +27 -0
  82. modacor/tests/modules/base_modules/__init__.py +27 -0
  83. modacor/tests/modules/base_modules/test_append_processing_data.py +219 -0
  84. modacor/tests/modules/base_modules/test_append_sink.py +76 -0
  85. modacor/tests/modules/base_modules/test_append_source.py +180 -0
  86. modacor/tests/modules/base_modules/test_bitwise_or_masks.py +264 -0
  87. modacor/tests/modules/base_modules/test_combine_uncertainties.py +105 -0
  88. modacor/tests/modules/base_modules/test_combine_uncertainties_max.py +109 -0
  89. modacor/tests/modules/base_modules/test_divide.py +140 -0
  90. modacor/tests/modules/base_modules/test_find_scale_factor1d.py +220 -0
  91. modacor/tests/modules/base_modules/test_multiply.py +113 -0
  92. modacor/tests/modules/base_modules/test_multiply_databundles.py +136 -0
  93. modacor/tests/modules/base_modules/test_poisson_uncertainties.py +61 -0
  94. modacor/tests/modules/base_modules/test_reduce_dimensionality.py +358 -0
  95. modacor/tests/modules/base_modules/test_sink_processing_data.py +119 -0
  96. modacor/tests/modules/base_modules/test_subtract.py +111 -0
  97. modacor/tests/modules/base_modules/test_subtract_databundles.py +136 -0
  98. modacor/tests/modules/base_modules/test_units_label_update.py +91 -0
  99. modacor/tests/modules/technique_modules/__init__.py +0 -0
  100. modacor/tests/modules/technique_modules/scattering/__init__.py +0 -0
  101. modacor/tests/modules/technique_modules/scattering/test_geometry_helpers.py +198 -0
  102. modacor/tests/modules/technique_modules/scattering/test_index_pixels.py +426 -0
  103. modacor/tests/modules/technique_modules/scattering/test_indexed_averaging.py +559 -0
  104. modacor/tests/modules/technique_modules/scattering/test_pixel_coordinates_3d.py +282 -0
  105. modacor/tests/modules/technique_modules/scattering/test_xs_geometry_from_pixel_coordinates.py +224 -0
  106. modacor/tests/modules/technique_modules/scattering/test_xsgeometry.py +635 -0
  107. modacor/tests/requirements.txt +12 -0
  108. modacor/tests/runner/test_pipeline.py +438 -0
  109. modacor/tests/runner/test_process_step_registry.py +65 -0
  110. modacor/tests/test_import.py +43 -0
  111. modacor/tests/test_modacor.py +17 -0
  112. modacor/tests/test_units.py +79 -0
  113. modacor/units.py +97 -0
  114. modacor-1.0.0.dist-info/METADATA +482 -0
  115. modacor-1.0.0.dist-info/RECORD +120 -0
  116. modacor-1.0.0.dist-info/WHEEL +5 -0
  117. modacor-1.0.0.dist-info/licenses/AUTHORS.md +11 -0
  118. modacor-1.0.0.dist-info/licenses/LICENSE +11 -0
  119. modacor-1.0.0.dist-info/licenses/LICENSE.txt +11 -0
  120. modacor-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,403 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # /usr/bin/env python3
3
+ # -*- coding: utf-8 -*-
4
+
5
+ from __future__ import annotations
6
+
7
+ __coding__ = "utf-8"
8
+ __authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
9
+ __copyright__ = "Copyright 2026, The MoDaCor team"
10
+ __date__ = "20/01/2026"
11
+ __status__ = "Development" # "Development", "Production"
12
+ # end of header and standard imports
13
+
14
+ __all__ = ["TiledSource"]
15
+
16
+ from collections.abc import Sequence
17
+ from typing import Any, Optional
18
+
19
+ import numpy as np
20
+ from attrs import define, field
21
+
22
+ from modacor.dataclasses.messagehandler import MessageHandler
23
+ from modacor.io.io_source import ArraySlice
24
+
25
+ from ..io_source import IoSource
26
+
27
+ _TILED_IMPORT_ERROR = (
28
+ "TiledSource requires the 'tiled' dependency. Install the optional extra with "
29
+ "'pip install modacor[tiled]' or otherwise ensure 'tiled' is available."
30
+ )
31
+
32
+
33
+ def _normalise_path_tokens(path: str | Sequence[str] | None) -> tuple[str, ...]:
34
+ if path is None:
35
+ return ()
36
+ if isinstance(path, str):
37
+ tokens = path.split("/")
38
+ else:
39
+ tokens = []
40
+ for element in path:
41
+ tokens.extend(str(element).split("/"))
42
+ return tuple(part for part in (token.strip() for token in tokens) if part)
43
+
44
+
45
+ @define(kw_only=True)
46
+ class TiledSource(IoSource):
47
+ """
48
+ IoSource implementation backed by a Tiled data service.
49
+
50
+ Parameters
51
+ ----------
52
+ resource_location:
53
+ Connection descriptor for the Tiled service. Supported formats:
54
+
55
+ - ``"profile:my-profile"`` or ``"profile://my-profile"``
56
+ → connects via :func:`tiled.client.from_profile`.
57
+ - Any other string (including http/https URLs)
58
+ → connects via :func:`tiled.client.from_uri`.
59
+ - A mapping with one of the keys ``{"profile", "uri", "from_profile", "from_uri"}``.
60
+ Additional keys ``"kwargs"`` or ``"connection_kwargs"`` may provide dicts merged into the
61
+ call.
62
+ - A mapping containing ``"client"`` or ``"node"`` with a pre-constructed Tiled node; in that
63
+ case no connection attempt is made.
64
+
65
+ When ``root_node`` is supplied, ``resource_location`` is optional and ignored.
66
+
67
+ root_node:
68
+ Existing Tiled node to use as the root. Supplying this allows using in-memory catalogues or
69
+ stubbed objects without importing ``tiled`` at module import time.
70
+
71
+ iosource_method_kwargs:
72
+ Optional keywords to control the connection. Recognised entries:
73
+
74
+ - ``base_item_path`` or ``base_path``: prefix applied to every ``data_key`` before resolving in
75
+ the Tiled tree.
76
+ - ``connection_kwargs``: nested dict merged into the connection call.
77
+ - Any remaining items are forwarded directly to ``from_uri``/``from_profile``.
78
+
79
+ Notes
80
+ -----
81
+ Data and metadata are cached per resolved path when retrieved without an explicit slice to reduce
82
+ repeated network round-trips. Sliced reads bypass the cache.
83
+ """
84
+
85
+ resource_location: str | dict[str, Any] | None = field(default=None)
86
+ root_node: Any | None = field(default=None, repr=False)
87
+
88
+ logger: MessageHandler = field(init=False)
89
+ _root_node: Any = field(init=False, default=None, repr=False)
90
+ _base_path: tuple[str, ...] = field(init=False, factory=tuple, repr=False)
91
+ _node_cache: dict[str, Any] = field(init=False, factory=dict, repr=False)
92
+ _data_cache: dict[str, np.ndarray] = field(init=False, factory=dict, repr=False)
93
+ _attribute_cache: dict[str, dict[str, Any]] = field(init=False, factory=dict, repr=False)
94
+ _structure_cache: dict[str, dict[str, Any]] = field(init=False, factory=dict, repr=False)
95
+
96
+ def __attrs_post_init__(self) -> None:
97
+ self.logger = MessageHandler(level=self.logging_level, name="TiledSource")
98
+
99
+ method_kwargs = dict(self.iosource_method_kwargs or {})
100
+ base_path_setting = (
101
+ method_kwargs.pop("base_item_path", None)
102
+ or method_kwargs.pop("base_path", None)
103
+ or _extract_from_mapping(self.resource_location, ("base_item_path", "base_path"))
104
+ )
105
+ self._base_path = _normalise_path_tokens(base_path_setting)
106
+
107
+ connection_kwargs = method_kwargs.pop("connection_kwargs", {})
108
+ if not isinstance(connection_kwargs, dict):
109
+ raise TypeError("connection_kwargs must be a dictionary if provided.")
110
+ method_kwargs.update(connection_kwargs)
111
+
112
+ if self.root_node is not None:
113
+ self._root_node = self.root_node
114
+ else:
115
+ self._root_node = self._connect(resource_location=self.resource_location, connection_kwargs=method_kwargs)
116
+
117
+ if self._root_node is None:
118
+ raise ValueError("TiledSource requires either a root_node or a valid resource_location to connect to.")
119
+
120
+ # ------------------------------------------------------------------
121
+ # IoSource API
122
+ # ------------------------------------------------------------------
123
+
124
+ def get_data(self, data_key: str, load_slice: ArraySlice = ...) -> np.ndarray:
125
+ key_path, _ = self._split_key(data_key)
126
+
127
+ if load_slice is Ellipsis or load_slice is None:
128
+ if key_path in self._data_cache:
129
+ return self._data_cache[key_path]
130
+ slice_arg: Optional[ArraySlice] = None
131
+ else:
132
+ slice_arg = self._prepare_slice(load_slice)
133
+
134
+ node = self._resolve_node(key_path)
135
+ read_kwargs = {}
136
+ if slice_arg is not None:
137
+ read_kwargs["slice"] = slice_arg
138
+
139
+ try:
140
+ data_obj = node.read(**read_kwargs)
141
+ except TypeError as exc:
142
+ if slice_arg is not None:
143
+ self.logger.warning(
144
+ "Slice %s not supported for '%s' (%s); fetching complete dataset instead.",
145
+ slice_arg,
146
+ key_path,
147
+ exc,
148
+ )
149
+ data_obj = node.read()
150
+ else:
151
+ raise
152
+ except AttributeError as exc:
153
+ raise KeyError(f"Path '{key_path}' does not resolve to a readable Tiled node.") from exc
154
+
155
+ array = self._to_numpy(data_obj)
156
+
157
+ if slice_arg is None:
158
+ self._data_cache[key_path] = array
159
+ self._update_structure_cache(key_path, array)
160
+
161
+ return array
162
+
163
+ def get_data_shape(self, data_key: str) -> tuple[int, ...]:
164
+ key_path, _ = self._split_key(data_key)
165
+ cached = self._structure_cache.get(key_path)
166
+ if cached and cached.get("shape"):
167
+ return cached["shape"]
168
+
169
+ node = self._resolve_node(key_path)
170
+ shape = self._extract_shape(node)
171
+ if shape:
172
+ self._structure_cache.setdefault(key_path, {})["shape"] = shape
173
+ return shape
174
+
175
+ if key_path in self._data_cache:
176
+ shape = tuple(self._data_cache[key_path].shape)
177
+ self._structure_cache.setdefault(key_path, {})["shape"] = shape
178
+ return shape
179
+ return ()
180
+
181
+ def get_data_dtype(self, data_key: str) -> np.dtype | None:
182
+ key_path, _ = self._split_key(data_key)
183
+ cached = self._structure_cache.get(key_path)
184
+ if cached and cached.get("dtype") is not None:
185
+ return cached["dtype"]
186
+
187
+ node = self._resolve_node(key_path)
188
+ dtype = self._extract_dtype(node)
189
+ if dtype is not None:
190
+ self._structure_cache.setdefault(key_path, {})["dtype"] = dtype
191
+ return dtype
192
+
193
+ if key_path in self._data_cache:
194
+ dtype = self._data_cache[key_path].dtype
195
+ self._structure_cache.setdefault(key_path, {})["dtype"] = dtype
196
+ return dtype
197
+ return None
198
+
199
+ def get_data_attributes(self, data_key: str) -> dict[str, Any]:
200
+ key_path, _ = self._split_key(data_key)
201
+ if key_path in self._attribute_cache:
202
+ return self._attribute_cache[key_path]
203
+
204
+ node = self._resolve_node(key_path)
205
+ attributes = self._extract_attributes(node)
206
+ self._attribute_cache[key_path] = attributes
207
+ return attributes
208
+
209
+ def get_static_metadata(self, data_key: str) -> Any:
210
+ key_path, attribute = self._split_key(data_key)
211
+ if attribute is None:
212
+ node = self._resolve_node(key_path)
213
+ metadata = getattr(node, "metadata", None)
214
+ return metadata
215
+
216
+ attributes = self.get_data_attributes(key_path)
217
+ return attributes.get(attribute)
218
+
219
+ # ------------------------------------------------------------------
220
+ # Internal helpers
221
+ # ------------------------------------------------------------------
222
+
223
+ def _connect(self, resource_location: str | dict[str, Any] | None, connection_kwargs: dict[str, Any]) -> Any:
224
+ if resource_location is None:
225
+ return None
226
+
227
+ # Import within the method to keep the module importable without tiled installed
228
+ try:
229
+ from tiled.client import from_profile, from_uri
230
+ except ImportError as exc: # noqa: PERF203 - explicit, user-facing error message
231
+ self.logger.error(_TILED_IMPORT_ERROR)
232
+ raise ImportError(_TILED_IMPORT_ERROR) from exc
233
+
234
+ if isinstance(resource_location, dict):
235
+ location_map = dict(resource_location)
236
+ explicit_client = location_map.pop("client", None) or location_map.pop("node", None)
237
+ if explicit_client is not None:
238
+ return explicit_client
239
+
240
+ extra_kwargs = location_map.pop("kwargs", {}) or location_map.pop("connection_kwargs", {})
241
+ if extra_kwargs and not isinstance(extra_kwargs, dict):
242
+ raise TypeError("kwargs/connection_kwargs in resource_location must be a dictionary if provided.")
243
+ merged_kwargs = {**(extra_kwargs or {}), **connection_kwargs}
244
+
245
+ if "uri" in location_map:
246
+ return from_uri(location_map["uri"], **merged_kwargs)
247
+ if "from_uri" in location_map:
248
+ return from_uri(location_map["from_uri"], **merged_kwargs)
249
+ if "profile" in location_map:
250
+ return from_profile(location_map["profile"], **merged_kwargs)
251
+ if "from_profile" in location_map:
252
+ return from_profile(location_map["from_profile"], **merged_kwargs)
253
+
254
+ if not location_map:
255
+ raise ValueError("resource_location mapping did not contain a recognised connection descriptor.")
256
+ raise ValueError(
257
+ f"Unsupported keys in resource_location mapping for TiledSource: {', '.join(sorted(location_map))}"
258
+ )
259
+
260
+ if not isinstance(resource_location, str):
261
+ raise TypeError("resource_location must be a string, mapping, or None.")
262
+
263
+ location_str = resource_location.strip()
264
+ merged_kwargs = dict(connection_kwargs)
265
+
266
+ if location_str.startswith("profile://"):
267
+ profile_name = location_str[len("profile://") :]
268
+ return from_profile(profile_name, **merged_kwargs)
269
+ if location_str.startswith("profile:"):
270
+ profile_name = location_str.split(":", 1)[1]
271
+ return from_profile(profile_name, **merged_kwargs)
272
+
273
+ return from_uri(location_str, **merged_kwargs)
274
+
275
+ def _split_key(self, data_key: str) -> tuple[str, Optional[str]]:
276
+ if "@" in data_key:
277
+ base, attribute = data_key.rsplit("@", 1)
278
+ return base.strip(), attribute.strip()
279
+ return data_key.strip(), None
280
+
281
+ def _resolve_node(self, data_key: str) -> Any:
282
+ if data_key == "":
283
+ return self._root_node
284
+
285
+ tokens = self._base_path + _normalise_path_tokens(data_key)
286
+ cache_key = "/".join(tokens)
287
+ if cache_key in self._node_cache:
288
+ return self._node_cache[cache_key]
289
+
290
+ node = self._root_node
291
+ try:
292
+ for token in tokens:
293
+ node = node[token]
294
+ except (KeyError, TypeError, AttributeError) as exc:
295
+ raise KeyError(f"Path '{data_key}' could not be resolved in the Tiled tree.") from exc
296
+
297
+ self._node_cache[cache_key] = node
298
+ return node
299
+
300
+ def _prepare_slice(self, load_slice: ArraySlice) -> ArraySlice:
301
+ if isinstance(load_slice, tuple):
302
+ return tuple(load_slice)
303
+ return load_slice
304
+
305
+ def _to_numpy(self, data_obj: Any) -> np.ndarray:
306
+ if isinstance(data_obj, np.ndarray):
307
+ return data_obj
308
+
309
+ to_records = getattr(data_obj, "to_records", None)
310
+ if callable(to_records):
311
+ try:
312
+ records = to_records(index=False)
313
+ return np.asarray(records)
314
+ except Exception: # noqa: BLE001 - best-effort conversion
315
+ pass
316
+
317
+ to_numpy = getattr(data_obj, "to_numpy", None)
318
+ if callable(to_numpy):
319
+ try:
320
+ array = to_numpy()
321
+ if isinstance(array, np.ndarray):
322
+ return array
323
+ return np.asarray(array)
324
+ except TypeError:
325
+ pass
326
+
327
+ values = getattr(data_obj, "values", None)
328
+ if values is not None:
329
+ try:
330
+ return np.asarray(values)
331
+ except Exception: # noqa: BLE001
332
+ pass
333
+
334
+ return np.asarray(data_obj)
335
+
336
+ def _extract_shape(self, node: Any) -> tuple[int, ...]:
337
+ shape = getattr(node, "shape", None)
338
+ if shape is not None:
339
+ return tuple(shape)
340
+
341
+ structure = self._call_structure(node)
342
+ if structure is not None:
343
+ shape_attr = getattr(structure, "shape", None)
344
+ if shape_attr is not None:
345
+ return tuple(shape_attr)
346
+ return ()
347
+
348
+ def _extract_dtype(self, node: Any) -> np.dtype | None:
349
+ dtype = getattr(node, "dtype", None)
350
+ if dtype is not None:
351
+ try:
352
+ return np.dtype(dtype)
353
+ except TypeError:
354
+ return None
355
+
356
+ structure = self._call_structure(node)
357
+ if structure is not None:
358
+ dtype_attr = getattr(structure, "dtype", None)
359
+ if dtype_attr is not None:
360
+ try:
361
+ return np.dtype(dtype_attr)
362
+ except TypeError:
363
+ return None
364
+ return None
365
+
366
+ def _extract_attributes(self, node: Any) -> dict[str, Any]:
367
+ attributes: dict[str, Any] = {}
368
+ metadata = getattr(node, "metadata", None)
369
+ if isinstance(metadata, dict):
370
+ if isinstance(metadata.get("attrs"), dict):
371
+ attributes.update(metadata["attrs"])
372
+ else:
373
+ attributes.update(metadata)
374
+
375
+ attrs_obj = getattr(node, "attrs", None)
376
+ if isinstance(attrs_obj, dict):
377
+ attributes.update(attrs_obj)
378
+
379
+ return attributes
380
+
381
+ def _update_structure_cache(self, key_path: str, array: np.ndarray) -> None:
382
+ entry = self._structure_cache.setdefault(key_path, {})
383
+ entry["shape"] = tuple(array.shape)
384
+ entry["dtype"] = array.dtype
385
+
386
+ def _call_structure(self, node: Any) -> Any:
387
+ structure = getattr(node, "structure", None)
388
+ if callable(structure):
389
+ try:
390
+ return structure()
391
+ except Exception as exc: # noqa: BLE001
392
+ self.logger.debug("Failed to obtain structure for node %s: %s", node, exc)
393
+ return None
394
+ return None
395
+
396
+
397
+ def _extract_from_mapping(mapping: str | dict[str, Any] | None, keys: Sequence[str]) -> str | Sequence[str] | None:
398
+ if not isinstance(mapping, dict):
399
+ return None
400
+ for key in keys:
401
+ if key in mapping:
402
+ return mapping[key]
403
+ return None
@@ -0,0 +1,27 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright 2025 MoDaCor Authors
3
+ #
4
+ # Redistribution and use in source and binary forms, with or without modification,
5
+ # are permitted provided that the following conditions are met:
6
+ # 1. Redistributions of source code must retain the above copyright notice, this
7
+ # list of conditions and the following disclaimer.
8
+ # 2. Redistributions in binary form must reproduce the above copyright notice,
9
+ # this list of conditions and the following disclaimer in the documentation
10
+ # and/or other materials provided with the distribution.
11
+ # 3. Neither the name of the copyright holder nor the names of its contributors
12
+ # may be used to endorse or promote products derived from this software without
13
+ # specific prior written permission.
14
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
15
+ # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16
+ # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17
+ # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
18
+ # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
19
+ # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20
+ # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
21
+ # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
23
+ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24
+
25
+ __license__ = "BSD-3-Clause"
26
+ __copyright__ = "Copyright 2025 MoDaCor Authors"
27
+ __status__ = "Alpha"
@@ -0,0 +1,116 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # /usr/bin/env python3
3
+ # -*- coding: utf-8 -*-
4
+
5
+ from __future__ import annotations
6
+
7
+ __coding__ = "utf-8"
8
+ __authors__ = ["Brian R. Pauw"]
9
+ __copyright__ = "Copyright 2025, The MoDaCor team"
10
+ __date__ = "06/06/2025"
11
+ __status__ = "Development" # "Development", "Production"
12
+ # end of header and standard imports
13
+
14
+ __all__ = ["YAMLSource"]
15
+
16
+ from logging import WARNING
17
+ from pathlib import Path
18
+ from typing import Any
19
+
20
+ import numpy as np
21
+ import yaml
22
+ from attrs import define, field, validators
23
+
24
+ from modacor.dataclasses.messagehandler import MessageHandler
25
+ from modacor.io.io_source import ArraySlice
26
+
27
+ from ..io_source import IoSource
28
+
29
+
30
+ def get_from_nested_dict_by_path(data, path):
31
+ """
32
+ Get a value from a nested dictionary using a slash-separated path.
33
+ """
34
+ # remove leading and trailing slashes
35
+ path = path.strip("/")
36
+ for key in path.split("/"):
37
+ data = data[key]
38
+ return data
39
+
40
+
41
+ @define(kw_only=True)
42
+ class YAMLSource(IoSource):
43
+ """
44
+ This IoSource is used to load and make experiment metadata available to
45
+ the processing pipeline modules.
46
+ It can be filled in with information such as wavelength,
47
+ geometry and other relevant information which is needed in multiple
48
+ processing steps.
49
+ The metadata can be loaded from a yaml file with mappings. this is set in the configuraiton
50
+ The entries are returned as BaseData elements, with units and uncertainties.
51
+ """
52
+
53
+ resource_location: Path = field(converter=Path, validator=validators.instance_of((Path)))
54
+ _yaml_data: dict[str, Any] = field(factory=dict, validator=validators.instance_of(dict))
55
+ _data_cache: dict[str, np.ndarray] = field(factory=dict, validator=validators.instance_of(dict))
56
+ _file_path: Path | None = field(default=None, validator=validators.optional(validators.instance_of(Path)))
57
+ _static_metadata_cache: dict[str, Any] = field(factory=dict, validator=validators.instance_of(dict))
58
+ logging_level: int = field(default=WARNING, validator=validators.instance_of(int))
59
+ logger: MessageHandler = field(init=False)
60
+
61
+ def __attrs_post_init__(self):
62
+ # super().__init__(source_reference=source_reference)
63
+ self.logger = MessageHandler(level=self.logging_level, name="YAMLSource")
64
+ self._file_path = Path(self.resource_location) if self.resource_location is not None else None
65
+ self._data_cache = {} # for values that are float
66
+ self._static_metadata_cache = {} # for other elements such as strings and tags
67
+ self._preload() # load the yaml data immediately
68
+
69
+ def _preload(self) -> None:
70
+ """
71
+ Load static metadata from a YAML file.
72
+ This method should be implemented to parse the YAML file and populate
73
+ the _data_cache with SourceData objects.
74
+ """
75
+ assert self._file_path.is_file(), self.logger.error(f"Static metadata file {self._file_path} does not exist.")
76
+ with open(self._file_path, "r") as f:
77
+ self._yaml_data.update(yaml.safe_load(f))
78
+
79
+ def get_static_metadata(self, data_key: str) -> Any:
80
+ """Returns static metadata, which can be anything"""
81
+ try:
82
+ return get_from_nested_dict_by_path(self._yaml_data, data_key)
83
+ except KeyError as e:
84
+ self.logger.error(f"Static metadata key '{data_key}' not in YAML data: {e}")
85
+ return None
86
+
87
+ def get_data(self, data_key: str, load_slice: ArraySlice = ...) -> np.ndarray:
88
+ """
89
+ Get the data from the static metadata.
90
+ """
91
+ if data_key not in self._data_cache:
92
+ self.logger.info(f"Data key '{data_key}' not in static metadata cache yet.")
93
+ # try to convert from the yaml data into an np.asarray
94
+ self._data_cache.update({data_key: self.get_static_metadata(data_key)})
95
+
96
+ return np.asarray(self._data_cache.get(data_key), dtype=float)[load_slice]
97
+
98
+ def get_data_shape(self, data_key: str) -> tuple[int, ...]:
99
+ """
100
+ Get the shape of the data from the static metadata.
101
+ """
102
+ if data_key in self._data_cache:
103
+ return np.asarray(self._data_cache.get(data_key)).shape
104
+ return ()
105
+
106
+ def get_data_dtype(self, data_key: str) -> np.dtype | None:
107
+ """
108
+ Get the data type of the data from the static metadata.
109
+ """
110
+ if data_key in self._data_cache:
111
+ return np.asarray(self._data_cache.get(data_key)).dtype
112
+ return None
113
+
114
+ def get_data_attributes(self, data_key):
115
+ # not implemented for YAML, so just call the superclass method
116
+ return super().get_data_attributes(data_key)
@@ -0,0 +1,53 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # /usr/bin/env python3
3
+ # -*- coding: utf-8 -*-
4
+
5
+ from __future__ import annotations
6
+
7
+ __coding__ = "utf-8"
8
+ __authors__ = ["Brian R. Pauw"] # add names to the list as appropriate
9
+ __copyright__ = "Copyright 2025, The MoDaCor team"
10
+ __date__ = "25/11/2025"
11
+ __status__ = "Development" # "Development", "Production"
12
+ # end of header and standard imports
13
+
14
+ # official steps are imported here for ease
15
+ from modacor.modules.base_modules.bitwise_or_masks import BitwiseOrMasks
16
+ from modacor.modules.base_modules.combine_uncertainties import CombineUncertainties
17
+ from modacor.modules.base_modules.combine_uncertainties_max import CombineUncertaintiesMax
18
+ from modacor.modules.base_modules.divide import Divide
19
+ from modacor.modules.base_modules.find_scale_factor1d import FindScaleFactor1D
20
+ from modacor.modules.base_modules.multiply import Multiply
21
+ from modacor.modules.base_modules.multiply_databundles import MultiplyDatabundles
22
+ from modacor.modules.base_modules.poisson_uncertainties import PoissonUncertainties
23
+ from modacor.modules.base_modules.reduce_dimensionality import ReduceDimensionality
24
+ from modacor.modules.base_modules.subtract import Subtract
25
+ from modacor.modules.base_modules.subtract_databundles import SubtractDatabundles
26
+ from modacor.modules.technique_modules.scattering.index_pixels import IndexPixels
27
+ from modacor.modules.technique_modules.scattering.indexed_averager import IndexedAverager
28
+ from modacor.modules.technique_modules.scattering.pixel_coordinates_3d import PixelCoordinates3D
29
+ from modacor.modules.technique_modules.scattering.solid_angle_correction import SolidAngleCorrection
30
+ from modacor.modules.technique_modules.scattering.xs_geometry import XSGeometry
31
+ from modacor.modules.technique_modules.scattering.xs_geometry_from_pixel_coordinates import (
32
+ XSGeometryFromPixelCoordinates,
33
+ )
34
+
35
+ __all__ = [
36
+ "BitwiseOrMasks",
37
+ "CombineUncertainties",
38
+ "CombineUncertaintiesMax",
39
+ "Divide",
40
+ "IndexPixels",
41
+ "IndexedAverager",
42
+ "FindScaleFactor1D",
43
+ "Multiply",
44
+ "MultiplyDatabundles",
45
+ "PixelCoordinates3D",
46
+ "PoissonUncertainties",
47
+ "ReduceDimensionality",
48
+ "SolidAngleCorrection",
49
+ "SubtractDatabundles",
50
+ "Subtract",
51
+ "XSGeometry",
52
+ "XSGeometryFromPixelCoordinates",
53
+ ]
File without changes