essreduce 25.2.4__py3-none-any.whl → 25.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ess/reduce/live/raw.py CHANGED
@@ -15,14 +15,12 @@ options:
15
15
  - `'xy_plane'`: Project the data onto the x-y plane, i.e., perpendicular to the beam.
16
16
  - `'cylinder_mantle_z'`: Project the data onto the mantle of a cylinder aligned with the
17
17
  z-axis.
18
- - `LogicalView`: Not a projection in the traditional sense, but a way to select and
19
- flatten dimensions of the data.
18
+ - A callable, e.g., to select and flatten dimensions of the data.
20
19
  """
21
20
 
22
21
  from __future__ import annotations
23
22
 
24
23
  from collections.abc import Callable, Sequence
25
- from dataclasses import dataclass, field
26
24
  from math import ceil
27
25
  from typing import Literal, NewType
28
26
 
@@ -140,44 +138,6 @@ class Histogrammer:
140
138
  return self._hist(replicated, coords=self._coords) / self.replicas
141
139
 
142
140
 
143
- @dataclass
144
- class LogicalView:
145
- """
146
- Logical view of a multi-dimensional detector.
147
-
148
- Instances can be used as a "projection" function for a detector view.
149
-
150
- Parameters
151
- ----------
152
- fold:
153
- Dimensions to fold. This is useful is the raw data has a single dimension that
154
- corresponds to multiple dimensions in the logical view.
155
- transpose:
156
- Dimensions to transpose. This is useful for reordering dimensions.
157
- select:
158
- Dimensions with associated index to select from the data. This extracts a slice
159
- of the data for each given dimension.
160
- flatten:
161
- Dimensions to flatten.
162
- """
163
-
164
- fold: dict[str, int] | None = None
165
- transpose: tuple[str, ...] | None = None
166
- select: dict[str, int] = field(default_factory=dict)
167
- flatten: dict[str, list[str]] = field(default_factory=dict)
168
-
169
- def __call__(self, da: sc.DataArray) -> sc.DataArray:
170
- if self.fold is not None:
171
- da = da.fold(da.dim, sizes=self.fold)
172
- if self.transpose is not None:
173
- da = da.transpose(self.transpose)
174
- for dim, index in self.select.items():
175
- da = da[dim, index]
176
- for to, dims in self.flatten.items():
177
- da = da.flatten(dims, to=to)
178
- return da.copy()
179
-
180
-
181
141
  class Detector:
182
142
  def __init__(self, detector_number: sc.Variable):
183
143
  self._data = sc.DataArray(
@@ -295,7 +255,7 @@ class RollingDetectorView(Detector):
295
255
  else:
296
256
  indices = sc.ones(sizes=self.data.sizes, dtype='int32', unit=None)
297
257
  indices = sc.cumsum(indices, mode='exclusive')
298
- if isinstance(self._projection, LogicalView):
258
+ if self._projection is not None:
299
259
  indices = self._projection(indices)
300
260
  return roi.ROIFilter(indices=indices, norm=norm)
301
261
 
@@ -357,17 +317,23 @@ class RollingDetectorView(Detector):
357
317
  )
358
318
 
359
319
  @staticmethod
360
- def from_detector_and_logical_view(
361
- detector: CalibratedDetector[SampleRun],
362
- window: RollingDetectorViewWindow,
363
- projection: LogicalView,
364
- ) -> RollingDetectorView:
365
- """Helper for constructing via a Sciline workflow."""
366
- return RollingDetectorView(
367
- detector_number=detector.coords['detector_number'],
368
- window=window,
369
- projection=projection,
370
- )
320
+ def from_detector_with_projection(
321
+ projection: Callable[[sc.DataArray], sc.DataArray] | None,
322
+ ) -> Callable[
323
+ [CalibratedDetector[SampleRun], RollingDetectorViewWindow], RollingDetectorView
324
+ ]:
325
+ def factory(
326
+ detector: CalibratedDetector[SampleRun],
327
+ window: RollingDetectorViewWindow,
328
+ ) -> RollingDetectorView:
329
+ """Helper for constructing via a Sciline workflow."""
330
+ return RollingDetectorView(
331
+ detector_number=detector.coords['detector_number'],
332
+ window=window,
333
+ projection=projection,
334
+ )
335
+
336
+ return factory
371
337
 
372
338
  @staticmethod
373
339
  def from_nexus(
@@ -375,7 +341,9 @@ class RollingDetectorView(Detector):
375
341
  *,
376
342
  detector_name: str,
377
343
  window: int,
378
- projection: Literal['xy_plane', 'cylinder_mantle_z'] | LogicalView,
344
+ projection: Literal['xy_plane', 'cylinder_mantle_z']
345
+ | Callable[[sc.DataArray], sc.DataArray]
346
+ | None = None,
379
347
  resolution: dict[str, int] | None = None,
380
348
  pixel_noise: Literal['cylindrical'] | sc.Variable | None = None,
381
349
  ) -> RollingDetectorView:
@@ -396,10 +364,12 @@ class RollingDetectorView(Detector):
396
364
  Size of the rolling window.
397
365
  projection:
398
366
  Projection to use for the detector data. This can be a string selecting a
399
- predefined projection or a LogicalView instance.
367
+ predefined projection or a function that takes a DataArray and returns a
368
+ DataArray. The predefined projections are 'xy_plane' and
369
+ 'cylinder_mantle_z'.
400
370
  resolution:
401
- Resolution to use for histogramming the detector data. Not required when the
402
- projection is a LogicalView.
371
+ Resolution to use for histogramming the detector data. Only required for
372
+ 'xy_plane' and 'cylinder_mantle_z' projections.
403
373
  pixel_noise:
404
374
  Noise to add to the pixel positions. This can be a scalar value to add
405
375
  Gaussian noise to the pixel positions or the string 'cylindrical' to add
@@ -413,13 +383,7 @@ class RollingDetectorView(Detector):
413
383
  noise_replica_count = 16
414
384
  wf = GenericNeXusWorkflow(run_types=[SampleRun], monitor_types=[])
415
385
  wf[RollingDetectorViewWindow] = window
416
- if isinstance(projection, LogicalView):
417
- wf[LogicalView] = projection
418
- wf[NeXusTransformation[snx.NXdetector, SampleRun]] = NeXusTransformation[
419
- snx.NXdetector, SampleRun
420
- ](sc.scalar(1))
421
- wf.insert(RollingDetectorView.from_detector_and_logical_view)
422
- elif projection == 'cylinder_mantle_z':
386
+ if projection == 'cylinder_mantle_z':
423
387
  wf.insert(make_cylinder_mantle_coords)
424
388
  wf.insert(RollingDetectorView.from_detector_and_histogrammer)
425
389
  wf[DetectorViewResolution] = resolution
@@ -428,7 +392,12 @@ class RollingDetectorView(Detector):
428
392
  wf.insert(RollingDetectorView.from_detector_and_histogrammer)
429
393
  wf[DetectorViewResolution] = resolution
430
394
  else:
431
- raise ValueError(f"Invalid {projection=}.")
395
+ wf[NeXusTransformation[snx.NXdetector, SampleRun]] = NeXusTransformation[
396
+ snx.NXdetector, SampleRun
397
+ ](sc.scalar(1))
398
+ wf.insert(
399
+ RollingDetectorView.from_detector_with_projection(projection=projection)
400
+ )
432
401
  if isinstance(pixel_noise, sc.Variable):
433
402
  wf.insert(gaussian_position_noise)
434
403
  wf[PositionNoiseSigma] = pixel_noise
@@ -180,10 +180,15 @@ def _attempt_to_open_without_locking(
180
180
  # HDF5 tracks file locking flags internally within a single process.
181
181
  # If the same file is opened multiple times, we can get a flag mismatch.
182
182
  # We can try opening without locking, maybe this matches the original flags.
183
- if "file locking flag values don't match" in err.args[0]:
184
- return True
185
- if "file locking 'ignore disabled locks' flag values don't match" in err.args[0]:
186
- return True
183
+ error_message = err.args[0]
184
+ if isinstance(error_message, str):
185
+ if "file locking flag values don't match" in error_message:
186
+ return True
187
+ if (
188
+ "file locking 'ignore disabled locks' flag values don't match"
189
+ in error_message
190
+ ):
191
+ return True
187
192
  return False
188
193
 
189
194
 
ess/reduce/streaming.py CHANGED
@@ -68,7 +68,18 @@ class Accumulator(ABC, Generic[T]):
68
68
  def _do_push(self, value: T) -> None: ...
69
69
 
70
70
  @property
71
- @abstractmethod
71
+ def is_empty(self) -> bool:
72
+ """
73
+ Check if the accumulator is empty.
74
+
75
+ Returns
76
+ -------
77
+ :
78
+ True if the accumulator is empty, False otherwise.
79
+ """
80
+ return False
81
+
82
+ @property
72
83
  def value(self) -> T:
73
84
  """
74
85
  Get the accumulated value.
@@ -77,6 +88,24 @@ class Accumulator(ABC, Generic[T]):
77
88
  -------
78
89
  :
79
90
  Accumulated value.
91
+
92
+ Raises
93
+ ------
94
+ ValueError
95
+ If the accumulator is empty.
96
+ """
97
+ if self.is_empty:
98
+ raise ValueError("Cannot get value from empty accumulator")
99
+ return self._get_value()
100
+
101
+ @abstractmethod
102
+ def _get_value(self) -> T:
103
+ """Return the accumulated value, assuming it exists."""
104
+
105
+ @abstractmethod
106
+ def clear(self) -> None:
107
+ """
108
+ Clear the accumulator, resetting it to its initial state.
80
109
  """
81
110
 
82
111
 
@@ -92,7 +121,10 @@ class EternalAccumulator(Accumulator[T]):
92
121
  self._value: T | None = None
93
122
 
94
123
  @property
95
- def value(self) -> T:
124
+ def is_empty(self) -> bool:
125
+ return self._value is None
126
+
127
+ def _get_value(self) -> T:
96
128
  return deepcopy(self._value)
97
129
 
98
130
  def _do_push(self, value: T) -> None:
@@ -101,6 +133,10 @@ class EternalAccumulator(Accumulator[T]):
101
133
  else:
102
134
  self._value += value
103
135
 
136
+ def clear(self) -> None:
137
+ """Clear the accumulated value."""
138
+ self._value = None
139
+
104
140
 
105
141
  class RollingAccumulator(Accumulator[T]):
106
142
  """
@@ -121,7 +157,10 @@ class RollingAccumulator(Accumulator[T]):
121
157
  self._values: list[T] = []
122
158
 
123
159
  @property
124
- def value(self) -> T:
160
+ def is_empty(self) -> bool:
161
+ return len(self._values) == 0
162
+
163
+ def _get_value(self) -> T:
125
164
  # Naive and potentially slow implementation if values and/or window are large!
126
165
  return sc.reduce(self._values).sum()
127
166
 
@@ -130,6 +169,68 @@ class RollingAccumulator(Accumulator[T]):
130
169
  if len(self._values) > self._window:
131
170
  self._values.pop(0)
132
171
 
172
+ def clear(self) -> None:
173
+ """Clear the accumulated values."""
174
+ self._values = []
175
+
176
+
177
+ class MinAccumulator(Accumulator):
178
+ """Keeps the minimum value seen so far.
179
+
180
+ Only supports scalar values.
181
+ """
182
+
183
+ def __init__(self, **kwargs: Any) -> None:
184
+ super().__init__(**kwargs)
185
+ self._cur_min: sc.Variable | None = None
186
+
187
+ def _do_push(self, value: sc.Variable) -> None:
188
+ if self._cur_min is None:
189
+ self._cur_min = value
190
+ else:
191
+ self._cur_min = min(self._cur_min, value)
192
+
193
+ @property
194
+ def is_empty(self) -> bool:
195
+ """Check if the accumulator has collected a minimum value."""
196
+ return self._cur_min is None
197
+
198
+ def _get_value(self) -> Any:
199
+ return self._cur_min
200
+
201
+ def clear(self) -> None:
202
+ """Clear the accumulated minimum value."""
203
+ self._cur_min = None
204
+
205
+
206
+ class MaxAccumulator(Accumulator):
207
+ """Keeps the maximum value seen so far.
208
+
209
+ Only supports scalar values.
210
+ """
211
+
212
+ def __init__(self, **kwargs: Any) -> None:
213
+ super().__init__(**kwargs)
214
+ self._cur_max: sc.Variable | None = None
215
+
216
+ @property
217
+ def is_empty(self) -> bool:
218
+ """Check if the accumulator has collected a maximum value."""
219
+ return self._cur_max is None
220
+
221
+ def _do_push(self, value: sc.Variable) -> None:
222
+ if self._cur_max is None:
223
+ self._cur_max = value
224
+ else:
225
+ self._cur_max = max(self._cur_max, value)
226
+
227
+ def _get_value(self) -> sc.Variable | None:
228
+ return self._cur_max
229
+
230
+ def clear(self) -> None:
231
+ """Clear the accumulated maximum value."""
232
+ self._cur_max = None
233
+
133
234
 
134
235
  class StreamProcessor:
135
236
  """
@@ -299,6 +400,16 @@ class StreamProcessor:
299
400
  self._finalize_workflow[key] = self._accumulators[key].value
300
401
  return self._finalize_workflow.compute(self._target_keys)
301
402
 
403
+ def clear(self) -> None:
404
+ """
405
+ Clear all accumulators, resetting them to their initial state.
406
+
407
+ This is useful for restarting a streaming computation without
408
+ creating a new StreamProcessor instance.
409
+ """
410
+ for accumulator in self._accumulators.values():
411
+ accumulator.clear()
412
+
302
413
 
303
414
  def _find_descendants(
304
415
  workflow: sciline.Pipeline, keys: tuple[sciline.typing.Key, ...]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: essreduce
3
- Version: 25.2.4
3
+ Version: 25.2.6
4
4
  Summary: Common data reduction tools for the ESS facility
5
5
  Author: Scipp contributors
6
6
  License: BSD 3-Clause License
@@ -3,16 +3,16 @@ ess/reduce/data.py,sha256=vaoeAJ6EpK1YghOiAALLdWiW17TgUnnnt0H-RGiGzXk,3756
3
3
  ess/reduce/logging.py,sha256=6n8Czq4LZ3OK9ENlKsWSI1M3KvKv6_HSoUiV4__IUlU,357
4
4
  ess/reduce/parameter.py,sha256=4sCfoKOI2HuO_Q7JLH_jAXnEOFANSn5P3NdaOBzhJxc,4635
5
5
  ess/reduce/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- ess/reduce/streaming.py,sha256=ffFiWpq9AK1GOfRG-rlvT_Gz7HMBnE4FD0qPZiej4Gg,10306
6
+ ess/reduce/streaming.py,sha256=_zFaT9vZQ4HZ1g9qbCQxetenITdqGZMPhyNNffMpGKw,13244
7
7
  ess/reduce/ui.py,sha256=zmorAbDwX1cU3ygDT--OP58o0qU7OBcmJz03jPeYSLA,10884
8
8
  ess/reduce/uncertainty.py,sha256=LR4O6ApB6Z-W9gC_XW0ajupl8yFG-du0eee1AX_R-gk,6990
9
9
  ess/reduce/workflow.py,sha256=sL34T_2Cjl_8iFlegujxI9VyOUwo6erVC8pOXnfWgYw,3060
10
10
  ess/reduce/live/__init__.py,sha256=jPQVhihRVNtEDrE20PoKkclKV2aBF1lS7cCHootgFgI,204
11
- ess/reduce/live/raw.py,sha256=pzXsPZQERtUm5tabTXjxd-XHH4WDDP13TTBG0lGPcqg,25262
11
+ ess/reduce/live/raw.py,sha256=hyWkDJ0WYE2TS12dVxpRUh6RkzcUJL0bVDd4JjTidi0,24217
12
12
  ess/reduce/live/roi.py,sha256=Hs-pW98k41WU6Kl3UQ41kQawk80c2QNOQ_WNctLzDPE,3795
13
13
  ess/reduce/live/workflow.py,sha256=bsbwvTqPhRO6mC__3b7MgU7DWwAnOvGvG-t2n22EKq8,4285
14
14
  ess/reduce/nexus/__init__.py,sha256=59bxKkNYg8DYcSykNvH6nCa5SYchJC4SbgZEKhkNdYc,967
15
- ess/reduce/nexus/_nexus_loader.py,sha256=Y8ILMFEP9KxVfyEMGSFKoZZS79DIs0niRqI2Lq2TqZk,19720
15
+ ess/reduce/nexus/_nexus_loader.py,sha256=NpBTBjWLKSnW0B1EarFdt2QxV0X7QBQLk2dSJq2ZuOw,19847
16
16
  ess/reduce/nexus/json_generator.py,sha256=ME2Xn8L7Oi3uHJk9ZZdCRQTRX-OV_wh9-DJn07Alplk,2529
17
17
  ess/reduce/nexus/json_nexus.py,sha256=QrVc0p424nZ5dHX9gebAJppTw6lGZq9404P_OFl1giA,10282
18
18
  ess/reduce/nexus/types.py,sha256=15XcHbNbOfnAYjWXzzKyYDVNyNixRnP0hJ-Q2duwMWE,9896
@@ -36,9 +36,9 @@ ess/reduce/widgets/_spinner.py,sha256=2VY4Fhfa7HMXox2O7UbofcdKsYG-AJGrsgGJB85nDX
36
36
  ess/reduce/widgets/_string_widget.py,sha256=iPAdfANyXHf-nkfhgkyH6gQDklia0LebLTmwi3m-iYQ,1482
37
37
  ess/reduce/widgets/_switchable_widget.py,sha256=fjKz99SKLhIF1BLgGVBSKKn3Lu_jYBwDYGeAjbJY3Q8,2390
38
38
  ess/reduce/widgets/_vector_widget.py,sha256=aTaBqCFHZQhrIoX6-sSqFWCPePEW8HQt5kUio8jP1t8,1203
39
- essreduce-25.2.4.dist-info/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
40
- essreduce-25.2.4.dist-info/METADATA,sha256=4esgkOAUN-XvPn_1HnmisKvJvG5RA0UGFUrmVQPyg50,3708
41
- essreduce-25.2.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
42
- essreduce-25.2.4.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
43
- essreduce-25.2.4.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
44
- essreduce-25.2.4.dist-info/RECORD,,
39
+ essreduce-25.2.6.dist-info/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
40
+ essreduce-25.2.6.dist-info/METADATA,sha256=Gz0Tmn2PFemCCTqJEHbPOMPGLl0ssONSerL6spFiYa4,3708
41
+ essreduce-25.2.6.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
42
+ essreduce-25.2.6.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
43
+ essreduce-25.2.6.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
44
+ essreduce-25.2.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5