essreduce 25.11.6__py3-none-any.whl → 25.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ess/reduce/live/raw.py +3 -1
- ess/reduce/live/roi.py +191 -7
- ess/reduce/time_of_flight/eto_to_tof.py +14 -10
- ess/reduce/time_of_flight/fakes.py +6 -13
- ess/reduce/time_of_flight/lut.py +25 -23
- ess/reduce/time_of_flight/types.py +34 -5
- ess/reduce/time_of_flight/workflow.py +24 -1
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/METADATA +4 -3
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/RECORD +13 -13
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/WHEEL +0 -0
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/entry_points.txt +0 -0
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/licenses/LICENSE +0 -0
- {essreduce-25.11.6.dist-info → essreduce-25.12.0.dist-info}/top_level.txt +0 -0
ess/reduce/live/raw.py
CHANGED
|
@@ -434,7 +434,9 @@ class RollingDetectorView(Detector):
|
|
|
434
434
|
indices = sc.cumsum(indices, mode='exclusive')
|
|
435
435
|
if self._projection is not None:
|
|
436
436
|
indices = self._projection(indices)
|
|
437
|
-
return roi.ROIFilter(
|
|
437
|
+
return roi.ROIFilter(
|
|
438
|
+
indices=indices, norm=norm, spatial_dims=self._detector_number.dims
|
|
439
|
+
)
|
|
438
440
|
|
|
439
441
|
def transform_weights(
|
|
440
442
|
self,
|
ess/reduce/live/roi.py
CHANGED
|
@@ -4,10 +4,16 @@
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
+
from collections.abc import Sequence
|
|
7
8
|
from typing import TypeVar
|
|
8
9
|
|
|
9
10
|
import numpy as np
|
|
10
11
|
import scipp as sc
|
|
12
|
+
from matplotlib.path import Path
|
|
13
|
+
|
|
14
|
+
# Type for polygon vertices: either scipp Variable (coord-based) or sequence of
|
|
15
|
+
# floats (index-based)
|
|
16
|
+
PolygonVertices = sc.Variable | Sequence[float]
|
|
11
17
|
|
|
12
18
|
|
|
13
19
|
def select_indices_in_intervals(
|
|
@@ -40,11 +46,137 @@ def select_indices_in_intervals(
|
|
|
40
46
|
return indices.rename_dims({indices.dim: out_dim})
|
|
41
47
|
|
|
42
48
|
|
|
49
|
+
def _get_polygon_axis_data(
|
|
50
|
+
axis_name: str,
|
|
51
|
+
vertices: PolygonVertices,
|
|
52
|
+
indices: sc.DataArray,
|
|
53
|
+
) -> tuple[np.ndarray, sc.Variable]:
|
|
54
|
+
"""
|
|
55
|
+
Get polygon vertices and corresponding coordinates for one axis.
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
axis_name:
|
|
60
|
+
Name of the axis (dimension or coordinate name).
|
|
61
|
+
vertices:
|
|
62
|
+
Polygon vertices for this axis - either a scipp Variable (coord-based)
|
|
63
|
+
or a sequence of floats (index-based).
|
|
64
|
+
indices:
|
|
65
|
+
DataArray with indices to select from.
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
:
|
|
70
|
+
Tuple of (polygon_vertices_array, coordinate_values).
|
|
71
|
+
"""
|
|
72
|
+
if isinstance(vertices, sc.Variable):
|
|
73
|
+
# Coord-based: use named coordinate from indices
|
|
74
|
+
if axis_name not in indices.coords:
|
|
75
|
+
raise KeyError(
|
|
76
|
+
f"Coordinate '{axis_name}' not found in indices. "
|
|
77
|
+
f"Available coordinates: {list(indices.coords.keys())}"
|
|
78
|
+
)
|
|
79
|
+
coords = indices.coords[axis_name]
|
|
80
|
+
if indices.coords.is_edges(axis_name):
|
|
81
|
+
coords = sc.midpoints(coords, dim=axis_name)
|
|
82
|
+
# Validate units match
|
|
83
|
+
if vertices.unit != coords.unit:
|
|
84
|
+
raise sc.UnitError(
|
|
85
|
+
f"Unit mismatch for '{axis_name}': "
|
|
86
|
+
f"polygon has unit '{vertices.unit}' "
|
|
87
|
+
f"but coordinates have unit '{coords.unit}'"
|
|
88
|
+
)
|
|
89
|
+
return vertices.values, coords
|
|
90
|
+
else:
|
|
91
|
+
# Index-based: use dimension indices as coordinates
|
|
92
|
+
if axis_name not in indices.sizes:
|
|
93
|
+
raise KeyError(
|
|
94
|
+
f"Dimension '{axis_name}' not found in indices. "
|
|
95
|
+
f"Available dimensions: {list(indices.dims)}"
|
|
96
|
+
)
|
|
97
|
+
# Create coordinate from dimension indices
|
|
98
|
+
coords = sc.arange(axis_name, indices.sizes[axis_name], dtype='float64')
|
|
99
|
+
return np.asarray(vertices, dtype='float64'), coords
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def select_indices_in_polygon(
|
|
103
|
+
polygon: dict[str, PolygonVertices],
|
|
104
|
+
indices: sc.DataArray,
|
|
105
|
+
) -> sc.Variable:
|
|
106
|
+
"""
|
|
107
|
+
Return subset of indices that fall within the polygon.
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
polygon:
|
|
112
|
+
Polygon vertices as a dict mapping axis names to 1-D arrays of vertex
|
|
113
|
+
positions. Must contain exactly two entries. Each entry can be either:
|
|
114
|
+
|
|
115
|
+
- A scipp Variable for coordinate-based selection (uses named coordinates
|
|
116
|
+
from the indices DataArray, with unit validation)
|
|
117
|
+
- A sequence of floats for index-based selection (uses dimension indices
|
|
118
|
+
as coordinates, no unit handling)
|
|
119
|
+
|
|
120
|
+
The two axes can independently use either mode.
|
|
121
|
+
indices:
|
|
122
|
+
DataArray with indices to select from. For coordinate-based axes, must have
|
|
123
|
+
coordinates matching the axis names. For index-based axes, must have
|
|
124
|
+
dimensions matching the axis names.
|
|
125
|
+
|
|
126
|
+
Returns
|
|
127
|
+
-------
|
|
128
|
+
:
|
|
129
|
+
Variable with selected indices.
|
|
130
|
+
"""
|
|
131
|
+
out_dim = 'index'
|
|
132
|
+
|
|
133
|
+
if len(polygon) != 2:
|
|
134
|
+
raise ValueError(
|
|
135
|
+
f"Polygon must have exactly two coordinate arrays, got {len(polygon)}"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# Get the two axis names from the polygon dict
|
|
139
|
+
axis_a, axis_b = polygon.keys()
|
|
140
|
+
|
|
141
|
+
# Get polygon vertices and coordinates for each axis
|
|
142
|
+
poly_a, a_coords = _get_polygon_axis_data(axis_a, polygon[axis_a], indices)
|
|
143
|
+
poly_b, b_coords = _get_polygon_axis_data(axis_b, polygon[axis_b], indices)
|
|
144
|
+
|
|
145
|
+
# Extract polygon vertices as 2D array
|
|
146
|
+
vertices_2d = np.column_stack([poly_a, poly_b])
|
|
147
|
+
|
|
148
|
+
# Broadcast coordinates to match indices shape and flatten
|
|
149
|
+
a_flat = sc.broadcast(a_coords, sizes=indices.sizes).values.flatten()
|
|
150
|
+
b_flat = sc.broadcast(b_coords, sizes=indices.sizes).values.flatten()
|
|
151
|
+
points = np.column_stack([a_flat, b_flat])
|
|
152
|
+
|
|
153
|
+
# Use matplotlib Path for point-in-polygon test
|
|
154
|
+
polygon_path = Path(vertices_2d)
|
|
155
|
+
mask = polygon_path.contains_points(points)
|
|
156
|
+
|
|
157
|
+
# Get indices that are inside the polygon
|
|
158
|
+
all_indices = indices.data.flatten(to=out_dim)
|
|
159
|
+
|
|
160
|
+
# Apply mask first, then concat if binned (mask is per-bin, not per-index)
|
|
161
|
+
sc_mask = sc.array(dims=[out_dim], values=mask)
|
|
162
|
+
selected = all_indices[sc_mask]
|
|
163
|
+
|
|
164
|
+
if selected.bins is not None:
|
|
165
|
+
selected = selected.bins.concat().value
|
|
166
|
+
selected = selected.rename_dims({selected.dim: out_dim})
|
|
167
|
+
|
|
168
|
+
return sc.array(dims=[out_dim], values=selected.values, dtype='int32', unit=None)
|
|
169
|
+
|
|
170
|
+
|
|
43
171
|
T = TypeVar('T', sc.DataArray, sc.Variable)
|
|
44
172
|
|
|
45
173
|
|
|
46
174
|
def apply_selection(
|
|
47
|
-
data: T,
|
|
175
|
+
data: T,
|
|
176
|
+
*,
|
|
177
|
+
selection: sc.Variable,
|
|
178
|
+
norm: float = 1.0,
|
|
179
|
+
spatial_dims: tuple[str, ...] | None = None,
|
|
48
180
|
) -> tuple[T, sc.Variable]:
|
|
49
181
|
"""
|
|
50
182
|
Apply selection to data.
|
|
@@ -58,6 +190,9 @@ def apply_selection(
|
|
|
58
190
|
norm:
|
|
59
191
|
Normalization factor to apply to the selected data. This is used for cases where
|
|
60
192
|
indices may be selected multiple times.
|
|
193
|
+
spatial_dims:
|
|
194
|
+
Dimensions to flatten into 'detector_number'. If None, all dims are flattened.
|
|
195
|
+
For dense data like (time, x, y), pass ('x', 'y') to preserve time.
|
|
61
196
|
|
|
62
197
|
Returns
|
|
63
198
|
-------
|
|
@@ -65,16 +200,25 @@ def apply_selection(
|
|
|
65
200
|
Filtered data and scale factor.
|
|
66
201
|
"""
|
|
67
202
|
indices, counts = np.unique(selection.values, return_counts=True)
|
|
68
|
-
if
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
203
|
+
if spatial_dims is None:
|
|
204
|
+
dims_to_flatten = data.dims
|
|
205
|
+
else:
|
|
206
|
+
dims_to_flatten = tuple(d for d in data.dims if d in spatial_dims)
|
|
207
|
+
if len(dims_to_flatten) > 0 and data.dims != ('detector_number',):
|
|
208
|
+
data = data.flatten(dims=dims_to_flatten, to='detector_number')
|
|
209
|
+
scale = sc.array(dims=['detector_number'], values=counts) / norm
|
|
210
|
+
return data['detector_number', indices], scale
|
|
72
211
|
|
|
73
212
|
|
|
74
213
|
class ROIFilter:
|
|
75
214
|
"""Filter for selecting a region of interest (ROI)."""
|
|
76
215
|
|
|
77
|
-
def __init__(
|
|
216
|
+
def __init__(
|
|
217
|
+
self,
|
|
218
|
+
indices: sc.Variable | sc.DataArray,
|
|
219
|
+
norm: float = 1.0,
|
|
220
|
+
spatial_dims: tuple[str, ...] | None = None,
|
|
221
|
+
) -> None:
|
|
78
222
|
"""
|
|
79
223
|
Create a new ROI filter.
|
|
80
224
|
|
|
@@ -86,15 +230,50 @@ class ROIFilter:
|
|
|
86
230
|
2-D array. Each element in the array may correspond to a single index (when
|
|
87
231
|
there is no projection) or a list of indices that were projected into an
|
|
88
232
|
output pixel.
|
|
233
|
+
spatial_dims:
|
|
234
|
+
Dimensions of the detector that should be flattened when applying the
|
|
235
|
+
filter. If None, defaults to indices.dims. For projections where indices
|
|
236
|
+
represent a subset of the detector, this should be set to the full
|
|
237
|
+
detector dimensions.
|
|
89
238
|
"""
|
|
90
239
|
self._indices = indices
|
|
91
240
|
self._selection = sc.array(dims=['index'], values=[])
|
|
92
241
|
self._norm = norm
|
|
242
|
+
self._spatial_dims = spatial_dims if spatial_dims is not None else indices.dims
|
|
93
243
|
|
|
94
244
|
def set_roi_from_intervals(self, intervals: sc.DataGroup) -> None:
|
|
95
245
|
"""Set the ROI from (typically 1 or 2) intervals."""
|
|
96
246
|
self._selection = select_indices_in_intervals(intervals, self._indices)
|
|
97
247
|
|
|
248
|
+
def set_roi_from_polygon(self, polygon: dict[str, PolygonVertices]) -> None:
|
|
249
|
+
"""
|
|
250
|
+
Set the ROI from polygon vertices.
|
|
251
|
+
|
|
252
|
+
Parameters
|
|
253
|
+
----------
|
|
254
|
+
polygon:
|
|
255
|
+
Polygon vertices as a dict mapping axis names to 1-D arrays of vertex
|
|
256
|
+
positions. Must contain exactly two entries. Each entry can be either:
|
|
257
|
+
|
|
258
|
+
- A scipp Variable for coordinate-based selection (uses named coordinates
|
|
259
|
+
from the indices DataArray, with unit validation)
|
|
260
|
+
- A sequence of floats for index-based selection (uses dimension indices
|
|
261
|
+
as coordinates, no unit handling)
|
|
262
|
+
|
|
263
|
+
The two axes can independently use either mode.
|
|
264
|
+
"""
|
|
265
|
+
if not isinstance(self._indices, sc.DataArray):
|
|
266
|
+
raise TypeError("Polygon ROI requires indices to be a DataArray")
|
|
267
|
+
self._selection = select_indices_in_polygon(
|
|
268
|
+
polygon=polygon,
|
|
269
|
+
indices=self._indices,
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
@property
|
|
273
|
+
def spatial_dims(self) -> tuple[str, ...]:
|
|
274
|
+
"""Dimensions that define the spatial extent of the ROI."""
|
|
275
|
+
return self._spatial_dims
|
|
276
|
+
|
|
98
277
|
def apply(self, data: T) -> tuple[T, sc.Variable]:
|
|
99
278
|
"""
|
|
100
279
|
Apply the ROI filter to data.
|
|
@@ -113,4 +292,9 @@ class ROIFilter:
|
|
|
113
292
|
:
|
|
114
293
|
Filtered data and scale factor.
|
|
115
294
|
"""
|
|
116
|
-
return apply_selection(
|
|
295
|
+
return apply_selection(
|
|
296
|
+
data,
|
|
297
|
+
selection=self._selection,
|
|
298
|
+
norm=self._norm,
|
|
299
|
+
spatial_dims=self.spatial_dims,
|
|
300
|
+
)
|
|
@@ -96,14 +96,14 @@ class TofInterpolator:
|
|
|
96
96
|
|
|
97
97
|
|
|
98
98
|
def _time_of_flight_data_histogram(
|
|
99
|
-
da: sc.DataArray, lookup:
|
|
99
|
+
da: sc.DataArray, lookup: TimeOfFlightLookupTable, ltotal: sc.Variable
|
|
100
100
|
) -> sc.DataArray:
|
|
101
101
|
# In NeXus, 'time_of_flight' is the canonical name in NXmonitor, but in some files,
|
|
102
102
|
# it may be called 'tof' or 'frame_time'.
|
|
103
103
|
key = next(iter(set(da.coords.keys()) & {"time_of_flight", "tof", "frame_time"}))
|
|
104
104
|
raw_eto = da.coords[key].to(dtype=float, copy=False)
|
|
105
105
|
eto_unit = raw_eto.unit
|
|
106
|
-
pulse_period = lookup.
|
|
106
|
+
pulse_period = lookup.pulse_period.to(unit=eto_unit)
|
|
107
107
|
|
|
108
108
|
# In histogram mode, because there is a wrap around at the end of the pulse, we
|
|
109
109
|
# need to insert a bin edge at that exact location to avoid having the last bin
|
|
@@ -117,7 +117,9 @@ def _time_of_flight_data_histogram(
|
|
|
117
117
|
etos = rebinned.coords[key]
|
|
118
118
|
|
|
119
119
|
# Create linear interpolator
|
|
120
|
-
interp = TofInterpolator(
|
|
120
|
+
interp = TofInterpolator(
|
|
121
|
+
lookup.array, distance_unit=ltotal.unit, time_unit=eto_unit
|
|
122
|
+
)
|
|
121
123
|
|
|
122
124
|
# Compute time-of-flight of the bin edges using the interpolator
|
|
123
125
|
tofs = interp(
|
|
@@ -199,7 +201,7 @@ def _guess_pulse_stride_offset(
|
|
|
199
201
|
|
|
200
202
|
def _prepare_tof_interpolation_inputs(
|
|
201
203
|
da: sc.DataArray,
|
|
202
|
-
lookup:
|
|
204
|
+
lookup: TimeOfFlightLookupTable,
|
|
203
205
|
ltotal: sc.Variable,
|
|
204
206
|
pulse_stride_offset: int | None,
|
|
205
207
|
) -> dict:
|
|
@@ -227,15 +229,17 @@ def _prepare_tof_interpolation_inputs(
|
|
|
227
229
|
eto_unit = elem_unit(etos)
|
|
228
230
|
|
|
229
231
|
# Create linear interpolator
|
|
230
|
-
interp = TofInterpolator(
|
|
232
|
+
interp = TofInterpolator(
|
|
233
|
+
lookup.array, distance_unit=ltotal.unit, time_unit=eto_unit
|
|
234
|
+
)
|
|
231
235
|
|
|
232
236
|
# Operate on events (broadcast distances to all events)
|
|
233
237
|
ltotal = sc.bins_like(etos, ltotal).bins.constituents["data"]
|
|
234
238
|
etos = etos.bins.constituents["data"]
|
|
235
239
|
|
|
236
240
|
pulse_index = None
|
|
237
|
-
pulse_period = lookup.
|
|
238
|
-
pulse_stride = lookup.
|
|
241
|
+
pulse_period = lookup.pulse_period.to(unit=eto_unit)
|
|
242
|
+
pulse_stride = lookup.pulse_stride
|
|
239
243
|
|
|
240
244
|
if pulse_stride > 1:
|
|
241
245
|
# Compute a pulse index for every event: it is the index of the pulse within a
|
|
@@ -291,7 +295,7 @@ def _prepare_tof_interpolation_inputs(
|
|
|
291
295
|
|
|
292
296
|
def _time_of_flight_data_events(
|
|
293
297
|
da: sc.DataArray,
|
|
294
|
-
lookup:
|
|
298
|
+
lookup: TimeOfFlightLookupTable,
|
|
295
299
|
ltotal: sc.Variable,
|
|
296
300
|
pulse_stride_offset: int | None,
|
|
297
301
|
) -> sc.DataArray:
|
|
@@ -375,7 +379,7 @@ def monitor_ltotal_from_straight_line_approximation(
|
|
|
375
379
|
|
|
376
380
|
def _compute_tof_data(
|
|
377
381
|
da: sc.DataArray,
|
|
378
|
-
lookup:
|
|
382
|
+
lookup: TimeOfFlightLookupTable,
|
|
379
383
|
ltotal: sc.Variable,
|
|
380
384
|
pulse_stride_offset: int,
|
|
381
385
|
) -> sc.DataArray:
|
|
@@ -503,7 +507,7 @@ def detector_time_of_arrival_data(
|
|
|
503
507
|
result = detector_data.bins.assign_coords(
|
|
504
508
|
toa=sc.bins(**parts, validate_indices=False)
|
|
505
509
|
)
|
|
506
|
-
return result
|
|
510
|
+
return ToaDetector[RunType](result)
|
|
507
511
|
|
|
508
512
|
|
|
509
513
|
def providers() -> tuple[Callable]:
|
|
@@ -48,20 +48,13 @@ class FakeBeamline:
|
|
|
48
48
|
self.source = source(pulses=self.npulses)
|
|
49
49
|
|
|
50
50
|
# Convert the choppers to tof.Chopper
|
|
51
|
-
self.choppers = [
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
else tof_pkg.Clockwise,
|
|
57
|
-
open=ch.slit_begin,
|
|
58
|
-
close=ch.slit_end,
|
|
59
|
-
phase=ch.phase if ch.frequency.value > 0.0 else -ch.phase,
|
|
60
|
-
distance=sc.norm(ch.axle_position - source_position),
|
|
61
|
-
name=name,
|
|
51
|
+
self.choppers = []
|
|
52
|
+
for name, ch in choppers.items():
|
|
53
|
+
chop = tof_pkg.Chopper.from_diskchopper(ch, name=name)
|
|
54
|
+
chop.distance = sc.norm(
|
|
55
|
+
ch.axle_position - source_position.to(unit=ch.axle_position.unit)
|
|
62
56
|
)
|
|
63
|
-
|
|
64
|
-
]
|
|
57
|
+
self.choppers.append(chop)
|
|
65
58
|
|
|
66
59
|
# Add detectors
|
|
67
60
|
self.monitors = [
|
ess/reduce/time_of_flight/lut.py
CHANGED
|
@@ -43,6 +43,8 @@ class SimulationResults:
|
|
|
43
43
|
For a ``tof`` simulation, this is just the position of the detector where the
|
|
44
44
|
events are recorded. For a ``McStas`` simulation, this is the distance between
|
|
45
45
|
the source and the event monitor.
|
|
46
|
+
choppers:
|
|
47
|
+
The parameters of the choppers used in the simulation (if any).
|
|
46
48
|
"""
|
|
47
49
|
|
|
48
50
|
time_of_arrival: sc.Variable
|
|
@@ -50,6 +52,7 @@ class SimulationResults:
|
|
|
50
52
|
wavelength: sc.Variable
|
|
51
53
|
weight: sc.Variable
|
|
52
54
|
distance: sc.Variable
|
|
55
|
+
choppers: DiskChoppers[AnyRun] | None = None
|
|
53
56
|
|
|
54
57
|
|
|
55
58
|
NumberOfSimulatedNeutrons = NewType("NumberOfSimulatedNeutrons", int)
|
|
@@ -363,20 +366,26 @@ def make_tof_lookup_table(
|
|
|
363
366
|
[table.coords["event_time_offset"], frame_period],
|
|
364
367
|
dim='event_time_offset',
|
|
365
368
|
),
|
|
366
|
-
"pulse_period": pulse_period,
|
|
367
|
-
"pulse_stride": sc.scalar(pulse_stride, unit=None),
|
|
368
|
-
"distance_resolution": table.coords["distance"][1]
|
|
369
|
-
- table.coords["distance"][0],
|
|
370
|
-
"time_resolution": table.coords["event_time_offset"][1]
|
|
371
|
-
- table.coords["event_time_offset"][0],
|
|
372
|
-
"error_threshold": sc.scalar(error_threshold),
|
|
373
369
|
},
|
|
374
370
|
)
|
|
375
371
|
|
|
376
372
|
# In-place masking for better performance
|
|
377
373
|
_mask_large_uncertainty(table, error_threshold)
|
|
378
374
|
|
|
379
|
-
return TimeOfFlightLookupTable(
|
|
375
|
+
return TimeOfFlightLookupTable(
|
|
376
|
+
array=table,
|
|
377
|
+
pulse_period=pulse_period,
|
|
378
|
+
pulse_stride=pulse_stride,
|
|
379
|
+
distance_resolution=table.coords["distance"][1] - table.coords["distance"][0],
|
|
380
|
+
time_resolution=table.coords["event_time_offset"][1]
|
|
381
|
+
- table.coords["event_time_offset"][0],
|
|
382
|
+
error_threshold=error_threshold,
|
|
383
|
+
choppers=sc.DataGroup(
|
|
384
|
+
{k: sc.DataGroup(ch.as_dict()) for k, ch in simulation.choppers.items()}
|
|
385
|
+
)
|
|
386
|
+
if simulation.choppers is not None
|
|
387
|
+
else None,
|
|
388
|
+
)
|
|
380
389
|
|
|
381
390
|
|
|
382
391
|
def simulate_chopper_cascade_using_tof(
|
|
@@ -412,22 +421,14 @@ def simulate_chopper_cascade_using_tof(
|
|
|
412
421
|
"""
|
|
413
422
|
import tof
|
|
414
423
|
|
|
415
|
-
tof_choppers = [
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
else tof.Clockwise,
|
|
421
|
-
open=ch.slit_begin,
|
|
422
|
-
close=ch.slit_end,
|
|
423
|
-
phase=ch.phase if ch.frequency.value > 0.0 else -ch.phase,
|
|
424
|
-
distance=sc.norm(
|
|
425
|
-
ch.axle_position - source_position.to(unit=ch.axle_position.unit)
|
|
426
|
-
),
|
|
427
|
-
name=name,
|
|
424
|
+
tof_choppers = []
|
|
425
|
+
for name, ch in choppers.items():
|
|
426
|
+
chop = tof.Chopper.from_diskchopper(ch, name=name)
|
|
427
|
+
chop.distance = sc.norm(
|
|
428
|
+
ch.axle_position - source_position.to(unit=ch.axle_position.unit)
|
|
428
429
|
)
|
|
429
|
-
|
|
430
|
-
|
|
430
|
+
tof_choppers.append(chop)
|
|
431
|
+
|
|
431
432
|
source = tof.Source(
|
|
432
433
|
facility=facility, neutrons=neutrons, pulses=pulse_stride, seed=seed
|
|
433
434
|
)
|
|
@@ -454,6 +455,7 @@ def simulate_chopper_cascade_using_tof(
|
|
|
454
455
|
wavelength=events.coords["wavelength"],
|
|
455
456
|
weight=events.data,
|
|
456
457
|
distance=furthest_chopper.distance,
|
|
458
|
+
choppers=choppers,
|
|
457
459
|
)
|
|
458
460
|
|
|
459
461
|
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
# SPDX-License-Identifier: BSD-3-Clause
|
|
2
2
|
# Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from dataclasses import asdict, dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, NewType
|
|
5
7
|
|
|
6
8
|
import sciline as sl
|
|
7
9
|
import scipp as sc
|
|
@@ -12,10 +14,37 @@ TimeOfFlightLookupTableFilename = NewType("TimeOfFlightLookupTableFilename", str
|
|
|
12
14
|
"""Filename of the time-of-flight lookup table."""
|
|
13
15
|
|
|
14
16
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
17
|
+
@dataclass
|
|
18
|
+
class TimeOfFlightLookupTable:
|
|
19
|
+
"""
|
|
20
|
+
Lookup table giving time-of-flight as a function of distance and time of arrival.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
array: sc.DataArray
|
|
24
|
+
"""The lookup table data array that maps (distance, time_of_arrival) to
|
|
25
|
+
time_of_flight."""
|
|
26
|
+
pulse_period: sc.Variable
|
|
27
|
+
"""Pulse period of the neutron source."""
|
|
28
|
+
pulse_stride: int
|
|
29
|
+
"""Pulse stride used when generating the lookup table."""
|
|
30
|
+
distance_resolution: sc.Variable
|
|
31
|
+
"""Resolution of the distance coordinate in the lookup table."""
|
|
32
|
+
time_resolution: sc.Variable
|
|
33
|
+
"""Resolution of the time_of_arrival coordinate in the lookup table."""
|
|
34
|
+
error_threshold: float
|
|
35
|
+
"""The table is masked with NaNs in regions where the standard deviation of the
|
|
36
|
+
time-of-flight is above this threshold."""
|
|
37
|
+
choppers: sc.DataGroup | None = None
|
|
38
|
+
"""Chopper parameters used when generating the lookup table, if any. This is made
|
|
39
|
+
optional so we can still support old lookup tables without chopper info."""
|
|
40
|
+
|
|
41
|
+
def save_hdf5(self, filename: str | Path) -> None:
|
|
42
|
+
"""Save the lookup table to an HDF5 file."""
|
|
43
|
+
sc.DataGroup(asdict(self)).save_hdf5(filename)
|
|
44
|
+
|
|
45
|
+
def plot(self, *args, **kwargs) -> Any:
|
|
46
|
+
"""Plot the data array of the lookup table."""
|
|
47
|
+
return self.array.plot(*args, **kwargs)
|
|
19
48
|
|
|
20
49
|
|
|
21
50
|
PulseStrideOffset = NewType("PulseStrideOffset", int | None)
|
|
@@ -17,7 +17,30 @@ from .types import (
|
|
|
17
17
|
def load_tof_lookup_table(
|
|
18
18
|
filename: TimeOfFlightLookupTableFilename,
|
|
19
19
|
) -> TimeOfFlightLookupTable:
|
|
20
|
-
|
|
20
|
+
"""Load a time-of-flight lookup table from an HDF5 file."""
|
|
21
|
+
table = sc.io.load_hdf5(filename)
|
|
22
|
+
|
|
23
|
+
# Support old format where the metadata were stored as coordinates of the DataArray.
|
|
24
|
+
# Note that no chopper info was saved in the old format.
|
|
25
|
+
if isinstance(table, sc.DataArray):
|
|
26
|
+
table = {
|
|
27
|
+
"array": table.drop_coords(
|
|
28
|
+
[
|
|
29
|
+
"pulse_period",
|
|
30
|
+
"pulse_stride",
|
|
31
|
+
"distance_resolution",
|
|
32
|
+
"time_resolution",
|
|
33
|
+
"error_threshold",
|
|
34
|
+
]
|
|
35
|
+
),
|
|
36
|
+
"pulse_period": table.coords["pulse_period"],
|
|
37
|
+
"pulse_stride": table.coords["pulse_stride"].value,
|
|
38
|
+
"distance_resolution": table.coords["distance_resolution"],
|
|
39
|
+
"time_resolution": table.coords["time_resolution"],
|
|
40
|
+
"error_threshold": table.coords["error_threshold"].value,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return TimeOfFlightLookupTable(**table)
|
|
21
44
|
|
|
22
45
|
|
|
23
46
|
def GenericTofWorkflow(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: essreduce
|
|
3
|
-
Version: 25.
|
|
3
|
+
Version: 25.12.0
|
|
4
4
|
Summary: Common data reduction tools for the ESS facility
|
|
5
5
|
Author: Scipp contributors
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -22,15 +22,16 @@ Description-Content-Type: text/markdown
|
|
|
22
22
|
License-File: LICENSE
|
|
23
23
|
Requires-Dist: sciline>=25.11.0
|
|
24
24
|
Requires-Dist: scipp>=25.04.0
|
|
25
|
-
Requires-Dist: scippneutron>=25.
|
|
25
|
+
Requires-Dist: scippneutron>=25.11.1
|
|
26
26
|
Requires-Dist: scippnexus>=25.06.0
|
|
27
27
|
Provides-Extra: test
|
|
28
28
|
Requires-Dist: ipywidgets>=8.1; extra == "test"
|
|
29
|
+
Requires-Dist: matplotlib>=3.10.7; extra == "test"
|
|
29
30
|
Requires-Dist: numba>=0.59; extra == "test"
|
|
30
31
|
Requires-Dist: pooch>=1.5; extra == "test"
|
|
31
32
|
Requires-Dist: pytest>=7.0; extra == "test"
|
|
32
33
|
Requires-Dist: scipy>=1.14; extra == "test"
|
|
33
|
-
Requires-Dist: tof>=25.
|
|
34
|
+
Requires-Dist: tof>=25.11.1; extra == "test"
|
|
34
35
|
Dynamic: license-file
|
|
35
36
|
|
|
36
37
|
[](CODE_OF_CONDUCT.md)
|
|
@@ -10,8 +10,8 @@ ess/reduce/workflow.py,sha256=738-lcdgsORYfQ4A0UTk2IgnbVxC3jBdpscpaOFIpdc,3114
|
|
|
10
10
|
ess/reduce/data/__init__.py,sha256=uDtqkmKA_Zwtj6II25zntz9T812XhdCn3tktYev4uyY,486
|
|
11
11
|
ess/reduce/data/_registry.py,sha256=ngJMzP-AuMN0EKtws5vYSEPsv_Bn3TZjjIvNUKWQDeA,13992
|
|
12
12
|
ess/reduce/live/__init__.py,sha256=jPQVhihRVNtEDrE20PoKkclKV2aBF1lS7cCHootgFgI,204
|
|
13
|
-
ess/reduce/live/raw.py,sha256=
|
|
14
|
-
ess/reduce/live/roi.py,sha256=
|
|
13
|
+
ess/reduce/live/raw.py,sha256=z3JzKl1tOH51z1PWT3MJERactSFRXrNI_MBmpAHX71g,31094
|
|
14
|
+
ess/reduce/live/roi.py,sha256=t65SfGtCtb8r-f4hkfg2I02CEnOp6Hh5Tv9qOqPOeK0,10588
|
|
15
15
|
ess/reduce/live/workflow.py,sha256=bsbwvTqPhRO6mC__3b7MgU7DWwAnOvGvG-t2n22EKq8,4285
|
|
16
16
|
ess/reduce/nexus/__init__.py,sha256=xXc982vZqRba4jR4z5hA2iim17Z7niw4KlS1aLFbn1Q,1107
|
|
17
17
|
ess/reduce/nexus/_nexus_loader.py,sha256=5J26y_t-kabj0ik0jf3OLSYda3lDLDQhvPd2_ro7Q_0,23927
|
|
@@ -21,14 +21,14 @@ ess/reduce/nexus/types.py,sha256=g5oBBEYPH7urF1tDP0tqXtixhQN8JDpe8vmiKrPiUW0,932
|
|
|
21
21
|
ess/reduce/nexus/workflow.py,sha256=KRzG_flkAGNCkwDGwhTjX3h3Hi4GexMEz84trMw7HIg,24944
|
|
22
22
|
ess/reduce/scripts/grow_nexus.py,sha256=hET3h06M0xlJd62E3palNLFvJMyNax2kK4XyJcOhl-I,3387
|
|
23
23
|
ess/reduce/time_of_flight/__init__.py,sha256=jn8x9rZ6PzyP_wK8ACd3cg9rOpDAu_IqHyTNSeKfVn0,1461
|
|
24
|
-
ess/reduce/time_of_flight/eto_to_tof.py,sha256=
|
|
25
|
-
ess/reduce/time_of_flight/fakes.py,sha256=
|
|
24
|
+
ess/reduce/time_of_flight/eto_to_tof.py,sha256=Zh7jxah1hwHY7_O9XNmYD3IfJz6mYUKo5vtr74NWUUQ,18236
|
|
25
|
+
ess/reduce/time_of_flight/fakes.py,sha256=BqpO56PQyO9ua7QlZw6xXMAPBrqjKZEM_jc-VB83CyE,4289
|
|
26
26
|
ess/reduce/time_of_flight/interpolator_numba.py,sha256=wh2YS3j2rOu30v1Ok3xNHcwS7t8eEtZyZvbfXOCtgrQ,3835
|
|
27
27
|
ess/reduce/time_of_flight/interpolator_scipy.py,sha256=_InoAPuMm2qhJKZQBAHOGRFqtvvuQ8TStoN7j_YgS4M,1853
|
|
28
|
-
ess/reduce/time_of_flight/lut.py,sha256=
|
|
28
|
+
ess/reduce/time_of_flight/lut.py,sha256=lle3Kl4AV0Z9-nxT3XwfhZS2DzHYYn0KRpUvNDlNuOk,18812
|
|
29
29
|
ess/reduce/time_of_flight/resample.py,sha256=Opmi-JA4zNH725l9VB99U4O9UlM37f5ACTCGtwBcows,3718
|
|
30
|
-
ess/reduce/time_of_flight/types.py,sha256=
|
|
31
|
-
ess/reduce/time_of_flight/workflow.py,sha256=
|
|
30
|
+
ess/reduce/time_of_flight/types.py,sha256=v7oUWY2tX1FL1FceK7EIOtRnMJevWD-kdBK04t10vlY,3082
|
|
31
|
+
ess/reduce/time_of_flight/workflow.py,sha256=iaCHqY5-CxxUDrgbnOuECJm81QZZl-j0_ihXE4NaAUM,3129
|
|
32
32
|
ess/reduce/widgets/__init__.py,sha256=SoSHBv8Dc3QXV9HUvPhjSYWMwKTGYZLpsWwsShIO97Q,5325
|
|
33
33
|
ess/reduce/widgets/_base.py,sha256=_wN3FOlXgx_u0c-A_3yyoIH-SdUvDENGgquh9S-h5GI,4852
|
|
34
34
|
ess/reduce/widgets/_binedges_widget.py,sha256=ZCQsGjYHnJr9GFUn7NjoZc1CdsnAzm_fMzyF-fTKKVY,2785
|
|
@@ -41,9 +41,9 @@ ess/reduce/widgets/_spinner.py,sha256=2VY4Fhfa7HMXox2O7UbofcdKsYG-AJGrsgGJB85nDX
|
|
|
41
41
|
ess/reduce/widgets/_string_widget.py,sha256=iPAdfANyXHf-nkfhgkyH6gQDklia0LebLTmwi3m-iYQ,1482
|
|
42
42
|
ess/reduce/widgets/_switchable_widget.py,sha256=fjKz99SKLhIF1BLgGVBSKKn3Lu_jYBwDYGeAjbJY3Q8,2390
|
|
43
43
|
ess/reduce/widgets/_vector_widget.py,sha256=aTaBqCFHZQhrIoX6-sSqFWCPePEW8HQt5kUio8jP1t8,1203
|
|
44
|
-
essreduce-25.
|
|
45
|
-
essreduce-25.
|
|
46
|
-
essreduce-25.
|
|
47
|
-
essreduce-25.
|
|
48
|
-
essreduce-25.
|
|
49
|
-
essreduce-25.
|
|
44
|
+
essreduce-25.12.0.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
|
|
45
|
+
essreduce-25.12.0.dist-info/METADATA,sha256=ZsK4VI14-0O1zyzUlUAZS3PuqygkEgO7wsc5yIvjWyI,1988
|
|
46
|
+
essreduce-25.12.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
47
|
+
essreduce-25.12.0.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
|
|
48
|
+
essreduce-25.12.0.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
|
|
49
|
+
essreduce-25.12.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|