essreduce 25.4.1__py3-none-any.whl → 25.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,111 +8,82 @@ from numba import njit, prange
8
8
  def interpolate(
9
9
  x: np.ndarray,
10
10
  y: np.ndarray,
11
- z: np.ndarray,
12
11
  values: np.ndarray,
13
12
  xp: np.ndarray,
14
13
  yp: np.ndarray,
15
- zp: np.ndarray,
14
+ xoffset: np.ndarray | None,
15
+ deltax: float,
16
16
  fill_value: float,
17
17
  out: np.ndarray,
18
18
  ):
19
19
  """
20
- Linear interpolation of data on a 3D regular grid.
20
+ Linear interpolation of data on a 2D regular grid.
21
21
 
22
22
  Parameters
23
23
  ----------
24
24
  x:
25
- 1D array of grid edges along the x-axis. They must be linspaced.
25
+ 1D array of grid edges along the x-axis (size nx). They must be linspaced.
26
26
  y:
27
- 1D array of grid edges along the y-axis. They must be linspaced.
28
- z:
29
- 1D array of grid edges along the z-axis. They must be linspaced.
27
+ 1D array of grid edges along the y-axis (size ny). They must be linspaced.
30
28
  values:
31
- 3D array of values on the grid. The shape must be (nz, ny, nx).
29
+ 2D array of values on the grid. The shape must be (ny, nx).
32
30
  xp:
33
31
  1D array of x-coordinates where to interpolate (size N).
34
32
  yp:
35
33
  1D array of y-coordinates where to interpolate (size N).
36
- zp:
37
- 1D array of z-coordinates where to interpolate (size N).
34
+ xoffset:
35
+ 1D array of integer offsets to apply to the x-coordinates (size N).
36
+ deltax:
37
+ Multiplier to apply to the integer offsets (i.e. the step size).
38
38
  fill_value:
39
39
  Value to use for points outside of the grid.
40
40
  out:
41
41
  1D array where the interpolated values will be stored (size N).
42
42
  """
43
- if not (len(xp) == len(yp) == len(zp) == len(out)):
43
+ if not (len(xp) == len(yp) == len(out)):
44
44
  raise ValueError("Interpolator: all input arrays must have the same size.")
45
45
 
46
46
  nx = len(x)
47
47
  ny = len(y)
48
- nz = len(z)
49
48
  npoints = len(xp)
50
49
  xmin = x[0]
51
50
  xmax = x[nx - 1]
52
51
  ymin = y[0]
53
52
  ymax = y[ny - 1]
54
- zmin = z[0]
55
- zmax = z[nz - 1]
56
53
  dx = x[1] - xmin
57
54
  dy = y[1] - ymin
58
- dz = z[1] - zmin
59
55
 
60
56
  one_over_dx = 1.0 / dx
61
57
  one_over_dy = 1.0 / dy
62
- one_over_dz = 1.0 / dz
63
- norm = one_over_dx * one_over_dy * one_over_dz
58
+ norm = one_over_dx * one_over_dy
64
59
 
65
60
  for i in prange(npoints):
66
- xx = xp[i]
61
+ xx = xp[i] + (xoffset[i] * deltax if xoffset is not None else 0.0)
67
62
  yy = yp[i]
68
- zz = zp[i]
69
-
70
- if (
71
- (xx < xmin)
72
- or (xx > xmax)
73
- or (yy < ymin)
74
- or (yy > ymax)
75
- or (zz < zmin)
76
- or (zz > zmax)
77
- ):
63
+
64
+ if (xx < xmin) or (xx > xmax) or (yy < ymin) or (yy > ymax):
78
65
  out[i] = fill_value
79
66
 
80
67
  else:
81
68
  ix = nx - 2 if xx == xmax else int((xx - xmin) * one_over_dx)
82
69
  iy = ny - 2 if yy == ymax else int((yy - ymin) * one_over_dy)
83
- iz = nz - 2 if zz == zmax else int((zz - zmin) * one_over_dz)
84
70
 
85
71
  x1 = x[ix]
86
72
  x2 = x[ix + 1]
87
73
  y1 = y[iy]
88
74
  y2 = y[iy + 1]
89
- z1 = z[iz]
90
- z2 = z[iz + 1]
91
-
92
- a111 = values[iz, iy, ix]
93
- a211 = values[iz, iy, ix + 1]
94
- a121 = values[iz, iy + 1, ix]
95
- a221 = values[iz, iy + 1, ix + 1]
96
- a112 = values[iz + 1, iy, ix]
97
- a212 = values[iz + 1, iy, ix + 1]
98
- a122 = values[iz + 1, iy + 1, ix]
99
- a222 = values[iz + 1, iy + 1, ix + 1]
75
+
76
+ a11 = values[iy, ix]
77
+ a21 = values[iy, ix + 1]
78
+ a12 = values[iy + 1, ix]
79
+ a22 = values[iy + 1, ix + 1]
100
80
 
101
81
  x2mxx = x2 - xx
102
82
  xxmx1 = xx - x1
103
- y2myy = y2 - yy
104
- yymy1 = yy - y1
83
+
105
84
  out[i] = (
106
- (z2 - zz)
107
- * (
108
- y2myy * (x2mxx * a111 + xxmx1 * a211)
109
- + yymy1 * (x2mxx * a121 + xxmx1 * a221)
110
- )
111
- + (zz - z1)
112
- * (
113
- y2myy * (x2mxx * a112 + xxmx1 * a212)
114
- + yymy1 * (x2mxx * a122 + xxmx1 * a222)
115
- )
85
+ (y2 - yy) * (x2mxx * a11 + xxmx1 * a21)
86
+ + (yy - y1) * (x2mxx * a12 + xxmx1 * a22)
116
87
  ) * norm
117
88
 
118
89
 
@@ -121,12 +92,11 @@ class Interpolator:
121
92
  self,
122
93
  time_edges: np.ndarray,
123
94
  distance_edges: np.ndarray,
124
- pulse_edges: np.ndarray,
125
95
  values: np.ndarray,
126
96
  fill_value: float = np.nan,
127
97
  ):
128
98
  """
129
- Interpolator for 3D regular grid data (Numba implementation).
99
+ Interpolator for 2D regular grid data (Numba implementation).
130
100
 
131
101
  Parameters
132
102
  ----------
@@ -134,31 +104,32 @@ class Interpolator:
134
104
  1D array of time edges.
135
105
  distance_edges:
136
106
  1D array of distance edges.
137
- pulse_edges:
138
- 1D array of pulse edges.
139
107
  values:
140
- 3D array of values on the grid. The shape must be (nz, ny, nx).
108
+ 2D array of values on the grid. The shape must be (ny, nx).
141
109
  fill_value:
142
110
  Value to use for points outside of the grid.
143
111
  """
144
112
  self.time_edges = time_edges
145
113
  self.distance_edges = distance_edges
146
- self.pulse_edges = pulse_edges
147
114
  self.values = values
148
115
  self.fill_value = fill_value
149
116
 
150
117
  def __call__(
151
- self, times: np.ndarray, distances: np.ndarray, pulse_indices: np.ndarray
118
+ self,
119
+ times: np.ndarray,
120
+ distances: np.ndarray,
121
+ pulse_period: float = 0.0,
122
+ pulse_index: np.ndarray | None = None,
152
123
  ) -> np.ndarray:
153
124
  out = np.empty_like(times)
154
125
  interpolate(
155
126
  x=self.time_edges,
156
127
  y=self.distance_edges,
157
- z=self.pulse_edges,
158
128
  values=self.values,
159
129
  xp=times,
160
130
  yp=distances,
161
- zp=pulse_indices,
131
+ xoffset=pulse_index,
132
+ deltax=pulse_period,
162
133
  fill_value=self.fill_value,
163
134
  out=out,
164
135
  )
@@ -9,7 +9,6 @@ class Interpolator:
9
9
  self,
10
10
  time_edges: np.ndarray,
11
11
  distance_edges: np.ndarray,
12
- pulse_edges: np.ndarray,
13
12
  values: np.ndarray,
14
13
  method: str = "linear",
15
14
  bounds_error: bool = False,
@@ -17,18 +16,16 @@ class Interpolator:
17
16
  **kwargs,
18
17
  ):
19
18
  """
20
- Interpolator for 3D regular grid data (SciPy implementation).
19
+ Interpolator for 2D regular grid data (SciPy implementation).
21
20
 
22
21
  Parameters
23
22
  ----------
24
23
  time_edges:
25
- 1D array of time edges.
24
+ 1D array of time edges (length N_time).
26
25
  distance_edges:
27
- 1D array of distance edges.
28
- pulse_edges:
29
- 1D array of pulse edges.
26
+ 1D array of distance edges (length N_dist).
30
27
  values:
31
- 3D array of values on the grid. The shape must be (nz, ny, nx).
28
+ 2D array of values on the grid. The shape must be (N_dist, N_time).
32
29
  method:
33
30
  Method of interpolation. Default is "linear".
34
31
  bounds_error:
@@ -42,11 +39,7 @@ class Interpolator:
42
39
  from scipy.interpolate import RegularGridInterpolator
43
40
 
44
41
  self._interp = RegularGridInterpolator(
45
- (
46
- pulse_edges,
47
- distance_edges,
48
- time_edges,
49
- ),
42
+ (distance_edges, time_edges),
50
43
  values,
51
44
  method=method,
52
45
  bounds_error=bounds_error,
@@ -55,6 +48,12 @@ class Interpolator:
55
48
  )
56
49
 
57
50
  def __call__(
58
- self, times: np.ndarray, distances: np.ndarray, pulse_indices: np.ndarray
51
+ self,
52
+ times: np.ndarray,
53
+ distances: np.ndarray,
54
+ pulse_period: float = 0.0,
55
+ pulse_index: np.ndarray | None = None,
59
56
  ) -> np.ndarray:
60
- return self._interp((pulse_indices, distances, times))
57
+ if pulse_index is not None:
58
+ times = times + (pulse_index * pulse_period)
59
+ return self._interp((distances, times))
@@ -3,9 +3,11 @@
3
3
  from collections.abc import Mapping
4
4
 
5
5
  import scipp as sc
6
+ import scippnexus as snx
6
7
  from scippneutron.chopper import DiskChopper
7
8
 
8
- from .types import SimulationResults
9
+ from ..nexus.types import Choppers, Position, SampleRun
10
+ from .types import NumberOfSimulatedNeutrons, SimulationResults
9
11
 
10
12
 
11
13
  def simulate_beamline(
@@ -82,3 +84,25 @@ def simulate_beamline(
82
84
  weight=events.data,
83
85
  distance=furthest_chopper.distance,
84
86
  )
87
+
88
+
89
+ def simulate_chopper_cascade_using_tof(
90
+ choppers: Choppers[SampleRun],
91
+ neutrons: NumberOfSimulatedNeutrons,
92
+ source_position: Position[snx.NXsource, SampleRun],
93
+ ) -> SimulationResults:
94
+ """
95
+ Simulate neutrons traveling through the chopper cascade using the ``tof`` package.
96
+
97
+ Parameters
98
+ ----------
99
+ choppers:
100
+ Chopper settings.
101
+ neutrons:
102
+ Number of neutrons to simulate.
103
+ source_position:
104
+ Position of the source.
105
+ """
106
+ return simulate_beamline(
107
+ choppers=choppers, neutrons=neutrons, source_position=source_position
108
+ )
@@ -4,12 +4,10 @@
4
4
  from dataclasses import dataclass
5
5
  from typing import NewType
6
6
 
7
+ import sciline as sl
7
8
  import scipp as sc
8
9
 
9
- Ltotal = NewType("Ltotal", sc.Variable)
10
- """
11
- Total length of the flight path from the source to the detector.
12
- """
10
+ from ..nexus.types import MonitorType, RunType
13
11
 
14
12
 
15
13
  @dataclass
@@ -48,6 +46,12 @@ class SimulationResults:
48
46
  distance: sc.Variable
49
47
 
50
48
 
49
+ NumberOfSimulatedNeutrons = NewType("NumberOfSimulatedNeutrons", int)
50
+ """
51
+ Number of neutrons simulated in the simulation that is used to create the lookup table.
52
+ This is typically a large number, e.g., 1e6 or 1e7.
53
+ """
54
+
51
55
  LtotalRange = NewType("LtotalRange", tuple[sc.Variable, sc.Variable])
52
56
  """
53
57
  Range (min, max) of the total length of the flight path from the source to the detector.
@@ -80,6 +84,10 @@ resolution in the lookup table will be at least the supplied value here, but may
80
84
  smaller if the pulse period is not an integer multiple of the time resolution.
81
85
  """
82
86
 
87
+ TimeOfFlightLookupTableFilename = NewType("TimeOfFlightLookupTableFilename", str)
88
+ """Filename of the time-of-flight lookup table."""
89
+
90
+
83
91
  TimeOfFlightLookupTable = NewType("TimeOfFlightLookupTable", sc.DataArray)
84
92
  """
85
93
  Lookup table giving time-of-flight as a function of distance and time of arrival.
@@ -107,30 +115,58 @@ When pulse-skipping, the offset of the first pulse in the stride. This is typica
107
115
  zero but can be a small integer < pulse_stride. If None, a guess is made.
108
116
  """
109
117
 
110
- RawData = NewType("RawData", sc.DataArray)
111
- """
112
- Raw detector data loaded from a NeXus file, e.g., NXdetector containing NXevent_data.
113
- """
114
118
 
115
- TofData = NewType("TofData", sc.DataArray)
116
- """
117
- Detector data with time-of-flight coordinate.
118
- """
119
+ class DetectorLtotal(sl.Scope[RunType, sc.Variable], sc.Variable):
120
+ """Total path length of neutrons from source to detector (L1 + L2)."""
119
121
 
120
- ResampledTofData = NewType("ResampledTofData", sc.DataArray)
121
- """
122
- Histogrammed detector data with time-of-flight coordinate, that has been resampled.
123
122
 
124
- Histogrammed data that has been converted to `tof` will typically have
125
- unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
126
- overlap between subframes).
127
- We thus resample the data to ensure that the bin edges are sorted.
128
- It makes use of the ``to_events`` helper which generates a number of events in each
129
- bin with a uniform distribution. The new events are then histogrammed using a set of
130
- sorted bin edges to yield a new histogram with sorted bin edges.
123
+ class MonitorLtotal(sl.Scope[RunType, MonitorType, sc.Variable], sc.Variable):
124
+ """Total path length of neutrons from source to monitor."""
131
125
 
132
- WARNING:
133
- This function is highly experimental, has limitations and should be used with
134
- caution. It is a workaround to the issue that rebinning data with unsorted bin
135
- edges is not supported in scipp.
136
- """
126
+
127
+ class DetectorTofData(sl.Scope[RunType, sc.DataArray], sc.DataArray):
128
+ """Detector data with time-of-flight coordinate."""
129
+
130
+
131
+ class MonitorTofData(sl.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray):
132
+ """Monitor data with time-of-flight coordinate."""
133
+
134
+
135
+ class ResampledDetectorTofData(sl.Scope[RunType, sc.DataArray], sc.DataArray):
136
+ """
137
+ Histogrammed detector data with time-of-flight coordinate, that has been resampled.
138
+
139
+ Histogrammed data that has been converted to `tof` will typically have
140
+ unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
141
+ overlap between subframes).
142
+ We thus resample the data to ensure that the bin edges are sorted.
143
+ It makes use of the ``to_events`` helper which generates a number of events in each
144
+ bin with a uniform distribution. The new events are then histogrammed using a set of
145
+ sorted bin edges to yield a new histogram with sorted bin edges.
146
+
147
+ WARNING:
148
+ This function is highly experimental, has limitations and should be used with
149
+ caution. It is a workaround to the issue that rebinning data with unsorted bin
150
+ edges is not supported in scipp.
151
+ """
152
+
153
+
154
+ class ResampledMonitorTofData(
155
+ sl.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray
156
+ ):
157
+ """
158
+ Histogrammed monitor data with time-of-flight coordinate, that has been resampled.
159
+
160
+ Histogrammed data that has been converted to `tof` will typically have
161
+ unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
162
+ overlap between subframes).
163
+ We thus resample the data to ensure that the bin edges are sorted.
164
+ It makes use of the ``to_events`` helper which generates a number of events in each
165
+ bin with a uniform distribution. The new events are then histogrammed using a set of
166
+ sorted bin edges to yield a new histogram with sorted bin edges.
167
+
168
+ WARNING:
169
+ This function is highly experimental, has limitations and should be used with
170
+ caution. It is a workaround to the issue that rebinning data with unsorted bin
171
+ edges is not supported in scipp.
172
+ """
@@ -0,0 +1,94 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+ from collections.abc import Iterable
4
+ from enum import Enum, auto
5
+
6
+ import sciline
7
+ import scipp as sc
8
+
9
+ from ..nexus import GenericNeXusWorkflow
10
+ from ..utils import prune_type_vars
11
+ from . import eto_to_tof, simulation
12
+ from .types import TimeOfFlightLookupTable, TimeOfFlightLookupTableFilename
13
+
14
+
15
+ class TofLutProvider(Enum):
16
+ """Provider for the time-of-flight lookup table."""
17
+
18
+ FILE = auto() # From file
19
+ TOF = auto() # Computed with 'tof' package from chopper settings
20
+ MCSTAS = auto() # McStas simulation (not implemented yet)
21
+
22
+
23
+ def load_tof_lookup_table(
24
+ filename: TimeOfFlightLookupTableFilename,
25
+ ) -> TimeOfFlightLookupTable:
26
+ return TimeOfFlightLookupTable(sc.io.load_hdf5(filename))
27
+
28
+
29
+ def GenericTofWorkflow(
30
+ *,
31
+ run_types: Iterable[sciline.typing.Key] | None = None,
32
+ monitor_types: Iterable[sciline.typing.Key] | None = None,
33
+ tof_lut_provider: TofLutProvider = TofLutProvider.FILE,
34
+ ) -> sciline.Pipeline:
35
+ """
36
+ Generic workflow for computing the neutron time-of-flight for detector and monitor
37
+ data.
38
+ This workflow builds on the ``GenericNeXusWorkflow`` and computes time-of-flight
39
+ from a lookup table that is created from the chopper settings, detector Ltotal and
40
+ the neutron time-of-arrival.
41
+
42
+ It is possible to limit which run types and monitor types
43
+ are supported by the returned workflow.
44
+ This is useful to reduce the size of the workflow and make it easier to inspect.
45
+ Make sure to add *all* required run types and monitor types when using this feature.
46
+
47
+ Attention
48
+ ---------
49
+ Filtering by run type and monitor type does not work with nested type vars.
50
+ E.g., if you have a type like ``Outer[Inner[RunType]]``, this type and its
51
+ provider will be removed.
52
+
53
+ Parameters
54
+ ----------
55
+ run_types:
56
+ List of run types to include in the workflow. If not provided, all run types
57
+ are included.
58
+ Must be a possible value of :class:`ess.reduce.nexus.types.RunType`.
59
+ monitor_types:
60
+ List of monitor types to include in the workflow. If not provided, all monitor
61
+ types are included.
62
+ Must be a possible value of :class:`ess.reduce.nexus.types.MonitorType`.
63
+ tof_lut_provider:
64
+ Specifies how the time-of-flight lookup table is provided:
65
+ - FILE: Read from a file
66
+ - TOF: Computed from chopper settings using the 'tof' package
67
+ - MCSTAS: From McStas simulation (not implemented yet)
68
+
69
+ Returns
70
+ -------
71
+ :
72
+ The workflow.
73
+ """
74
+ wf = GenericNeXusWorkflow(run_types=run_types, monitor_types=monitor_types)
75
+
76
+ for provider in eto_to_tof.providers():
77
+ wf.insert(provider)
78
+
79
+ if tof_lut_provider == TofLutProvider.FILE:
80
+ wf.insert(load_tof_lookup_table)
81
+ else:
82
+ wf.insert(eto_to_tof.compute_tof_lookup_table)
83
+ if tof_lut_provider == TofLutProvider.TOF:
84
+ wf.insert(simulation.simulate_chopper_cascade_using_tof)
85
+ if tof_lut_provider == TofLutProvider.MCSTAS:
86
+ raise NotImplementedError("McStas simulation not implemented yet")
87
+
88
+ for key, value in eto_to_tof.default_parameters().items():
89
+ wf[key] = value
90
+
91
+ if run_types is not None or monitor_types is not None:
92
+ prune_type_vars(wf, run_types=run_types, monitor_types=monitor_types)
93
+
94
+ return wf
ess/reduce/utils.py ADDED
@@ -0,0 +1,36 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+
4
+ from collections.abc import Iterable
5
+ from typing import Any
6
+
7
+ import sciline
8
+
9
+ from .nexus.types import MonitorType, RunType
10
+
11
+
12
+ def prune_type_vars(
13
+ workflow: sciline.Pipeline,
14
+ *,
15
+ run_types: Iterable[sciline.typing.Key] | None,
16
+ monitor_types: Iterable[sciline.typing.Key] | None,
17
+ ) -> None:
18
+ # Remove all nodes that use a run type or monitor types that is
19
+ # not listed in the function arguments.
20
+ excluded_run_types = excluded_type_args(RunType, run_types)
21
+ excluded_monitor_types = excluded_type_args(MonitorType, monitor_types)
22
+ excluded_types = excluded_run_types | excluded_monitor_types
23
+
24
+ graph = workflow.underlying_graph
25
+ to_remove = [
26
+ node for node in graph if excluded_types & set(getattr(node, "__args__", set()))
27
+ ]
28
+ graph.remove_nodes_from(to_remove)
29
+
30
+
31
+ def excluded_type_args(
32
+ type_var: Any, keep: Iterable[sciline.typing.Key] | None
33
+ ) -> set[sciline.typing.Key]:
34
+ if keep is None:
35
+ return set()
36
+ return set(type_var.__constraints__) - set(keep)
ess/reduce/workflow.py CHANGED
@@ -53,12 +53,14 @@ def get_typical_outputs(pipeline: Pipeline) -> tuple[Key, ...]:
53
53
  if (typical_outputs := getattr(pipeline, "typical_outputs", None)) is None:
54
54
  graph = pipeline.underlying_graph
55
55
  sink_nodes = [node for node, degree in graph.out_degree if degree == 0]
56
- return sorted(_with_pretty_names(sink_nodes))
56
+ return sorted(_with_pretty_names(sink_nodes), key=lambda x: x[0])
57
57
  return _with_pretty_names(typical_outputs)
58
58
 
59
59
 
60
60
  def get_possible_outputs(pipeline: Pipeline) -> tuple[Key, ...]:
61
- return sorted(_with_pretty_names(tuple(pipeline.underlying_graph.nodes)))
61
+ return sorted(
62
+ _with_pretty_names(tuple(pipeline.underlying_graph.nodes)), key=lambda x: x[0]
63
+ )
62
64
 
63
65
 
64
66
  def _with_pretty_names(outputs: Sequence[Key]) -> tuple[tuple[str, Key], ...]:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: essreduce
3
- Version: 25.4.1
3
+ Version: 25.5.1
4
4
  Summary: Common data reduction tools for the ESS facility
5
5
  Author: Scipp contributors
6
6
  License: BSD 3-Clause License
@@ -61,7 +61,7 @@ Requires-Dist: numba; extra == "test"
61
61
  Requires-Dist: pooch; extra == "test"
62
62
  Requires-Dist: pytest; extra == "test"
63
63
  Requires-Dist: scipy>=1.7.0; extra == "test"
64
- Requires-Dist: tof>=25.01.2; extra == "test"
64
+ Requires-Dist: tof>=25.05.0; extra == "test"
65
65
  Dynamic: license-file
66
66
 
67
67
  [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md)
@@ -6,7 +6,8 @@ ess/reduce/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  ess/reduce/streaming.py,sha256=TBttQV5WdSpUKh38J0pdv53seMWtUFswxd6-ltaZb_M,17403
7
7
  ess/reduce/ui.py,sha256=zmorAbDwX1cU3ygDT--OP58o0qU7OBcmJz03jPeYSLA,10884
8
8
  ess/reduce/uncertainty.py,sha256=LR4O6ApB6Z-W9gC_XW0ajupl8yFG-du0eee1AX_R-gk,6990
9
- ess/reduce/workflow.py,sha256=sL34T_2Cjl_8iFlegujxI9VyOUwo6erVC8pOXnfWgYw,3060
9
+ ess/reduce/utils.py,sha256=RBAfJRNil6JjVF-jPaxeL0ssEEfPBBQEZ3ObEorpDLo,1132
10
+ ess/reduce/workflow.py,sha256=738-lcdgsORYfQ4A0UTk2IgnbVxC3jBdpscpaOFIpdc,3114
10
11
  ess/reduce/live/__init__.py,sha256=jPQVhihRVNtEDrE20PoKkclKV2aBF1lS7cCHootgFgI,204
11
12
  ess/reduce/live/raw.py,sha256=66qV0G2rP8gK5tXuk-syTlDLE2jT3ehfmSnET7Xzfd0,24392
12
13
  ess/reduce/live/roi.py,sha256=Hs-pW98k41WU6Kl3UQ41kQawk80c2QNOQ_WNctLzDPE,3795
@@ -15,17 +16,18 @@ ess/reduce/nexus/__init__.py,sha256=59bxKkNYg8DYcSykNvH6nCa5SYchJC4SbgZEKhkNdYc,
15
16
  ess/reduce/nexus/_nexus_loader.py,sha256=5N48AMJx1AaFZb6WZPPbVKUlXyFMVVtZrn7Bae57O3A,19842
16
17
  ess/reduce/nexus/json_generator.py,sha256=ME2Xn8L7Oi3uHJk9ZZdCRQTRX-OV_wh9-DJn07Alplk,2529
17
18
  ess/reduce/nexus/json_nexus.py,sha256=QrVc0p424nZ5dHX9gebAJppTw6lGZq9404P_OFl1giA,10282
18
- ess/reduce/nexus/types.py,sha256=vTQD4oQ5JKBHAYy9LWFICSo-dhVi3wX5IinMgjRDtF8,9806
19
- ess/reduce/nexus/workflow.py,sha256=EiD6-58eGwoN5fbo47UTZy_oYFitCbwlIH-xqDOSp4c,24326
19
+ ess/reduce/nexus/types.py,sha256=-pj8PpHu7QJrjrOOQ-VD9QhYh1L92onkX4xtmY3mgXE,9645
20
+ ess/reduce/nexus/workflow.py,sha256=hqK58dsr8KtQn065-PS4Eiyir90qnZ3GQNyNDupOg4I,23084
20
21
  ess/reduce/scripts/grow_nexus.py,sha256=hET3h06M0xlJd62E3palNLFvJMyNax2kK4XyJcOhl-I,3387
21
- ess/reduce/time_of_flight/__init__.py,sha256=TSHfyoROwFhM2k3jHzamw3zeb0OQOaiuvgCgDEPEQ_g,1097
22
- ess/reduce/time_of_flight/eto_to_tof.py,sha256=Nq2gx7aejoZ_ExLTr9I6KZMqDxCKAx1PpGHslpNXkKU,25271
23
- ess/reduce/time_of_flight/fakes.py,sha256=REyHkJsSSq2_l5UOtpsv2aKkhCuro_i3KpVsxxITbW0,4470
24
- ess/reduce/time_of_flight/interpolator_numba.py,sha256=AgB2R8iw-IOb3YXLWTQVBflhWq5qgb7aqfvDExwLRW8,4682
25
- ess/reduce/time_of_flight/interpolator_scipy.py,sha256=sRJj2ncBiUMv6g9h-MJzI9xyY0Ir0degpAv6FIeSMBw,1834
26
- ess/reduce/time_of_flight/simulation.py,sha256=cIF_nWkLQlcWUCW2_wvWBU2ocg_8CSfOnfkoqdLdUgs,2923
22
+ ess/reduce/time_of_flight/__init__.py,sha256=UxMvY4aFkhZQmIbGSo4FBpBGRD2wDJbczLMVqcEhCSg,1583
23
+ ess/reduce/time_of_flight/eto_to_tof.py,sha256=JCu7C3AmJnB7GDJrL76oPjgxGesp67nct9xXRp3O8E4,28204
24
+ ess/reduce/time_of_flight/fakes.py,sha256=0gtbSX3ZQilaM4ZP5dMr3fqbnhpyoVsZX2YEb8GgREE,4489
25
+ ess/reduce/time_of_flight/interpolator_numba.py,sha256=wh2YS3j2rOu30v1Ok3xNHcwS7t8eEtZyZvbfXOCtgrQ,3835
26
+ ess/reduce/time_of_flight/interpolator_scipy.py,sha256=_InoAPuMm2qhJKZQBAHOGRFqtvvuQ8TStoN7j_YgS4M,1853
27
+ ess/reduce/time_of_flight/simulation.py,sha256=nJe-pkVvG9V6VdfB0_HyVYQoOSNJSMo_QydCHHW1dqM,3624
27
28
  ess/reduce/time_of_flight/to_events.py,sha256=w9mHpnWd3vwN2ouob-GK_1NPrTjCaOzPuC2QuEey-m0,4342
28
- ess/reduce/time_of_flight/types.py,sha256=Iv1XGLbrZ9bD4CPAVhsIPkAaB46YC7l7yf5XweljLqk,5047
29
+ ess/reduce/time_of_flight/types.py,sha256=OQeMYNN7QinXs_HDcoE6kkh_xNcyD0dEJWtnHQy5-uA,6675
30
+ ess/reduce/time_of_flight/workflow.py,sha256=ooSVwbL0hPBBVCfuTsAVaGEENs9P4kDN-FlH42NVzJQ,3427
29
31
  ess/reduce/widgets/__init__.py,sha256=SoSHBv8Dc3QXV9HUvPhjSYWMwKTGYZLpsWwsShIO97Q,5325
30
32
  ess/reduce/widgets/_base.py,sha256=_wN3FOlXgx_u0c-A_3yyoIH-SdUvDENGgquh9S-h5GI,4852
31
33
  ess/reduce/widgets/_binedges_widget.py,sha256=ZCQsGjYHnJr9GFUn7NjoZc1CdsnAzm_fMzyF-fTKKVY,2785
@@ -38,9 +40,9 @@ ess/reduce/widgets/_spinner.py,sha256=2VY4Fhfa7HMXox2O7UbofcdKsYG-AJGrsgGJB85nDX
38
40
  ess/reduce/widgets/_string_widget.py,sha256=iPAdfANyXHf-nkfhgkyH6gQDklia0LebLTmwi3m-iYQ,1482
39
41
  ess/reduce/widgets/_switchable_widget.py,sha256=fjKz99SKLhIF1BLgGVBSKKn3Lu_jYBwDYGeAjbJY3Q8,2390
40
42
  ess/reduce/widgets/_vector_widget.py,sha256=aTaBqCFHZQhrIoX6-sSqFWCPePEW8HQt5kUio8jP1t8,1203
41
- essreduce-25.4.1.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
42
- essreduce-25.4.1.dist-info/METADATA,sha256=_E84IwG_gnTsMoorflvf6T4K5oJB7IjpsUUTB4bhVh8,3768
43
- essreduce-25.4.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
44
- essreduce-25.4.1.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
45
- essreduce-25.4.1.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
46
- essreduce-25.4.1.dist-info/RECORD,,
43
+ essreduce-25.5.1.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
44
+ essreduce-25.5.1.dist-info/METADATA,sha256=8VVFEjGw_n02K81UZs5IexKEOuN4IyNfaQybrzKn4j0,3768
45
+ essreduce-25.5.1.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
46
+ essreduce-25.5.1.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
47
+ essreduce-25.5.1.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
48
+ essreduce-25.5.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (80.4.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5