essreduce 25.3.1__py3-none-any.whl → 25.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ess/reduce/live/raw.py CHANGED
@@ -380,7 +380,8 @@ class RollingDetectorView(Detector):
380
380
  pixel_noise = sc.scalar(0.0, unit='m')
381
381
  noise_replica_count = 0
382
382
  else:
383
- noise_replica_count = 16
383
+ # Unclear what a good number is, could be made configurable.
384
+ noise_replica_count = 4
384
385
  wf = GenericNeXusWorkflow(run_types=[SampleRun], monitor_types=[])
385
386
  wf[RollingDetectorViewWindow] = window
386
387
  if projection == 'cylinder_mantle_z':
@@ -606,9 +607,14 @@ def position_with_noisy_replicas(
606
607
  noise_dim = position_noise.dim
607
608
  size = position.size * replicas
608
609
  # "Paint" the short array of noise on top of the (replicated) position data.
609
- noise = sc.concat(
610
- [position_noise] * ceil(size / position_noise.size), dim=noise_dim
611
- )[:size].fold(dim=noise_dim, sizes={'replica': replicas, **position.sizes})
610
+ noise = (
611
+ sc.broadcast(
612
+ position_noise,
613
+ sizes={'dummy': ceil(size / position_noise.size), **position_noise.sizes},
614
+ )
615
+ .flatten(to=noise_dim)[:size]
616
+ .fold(dim=noise_dim, sizes={'replica': replicas, **position.sizes})
617
+ )
612
618
  return sc.concat([position, noise + position], dim='replica')
613
619
 
614
620
 
ess/reduce/nexus/types.py CHANGED
@@ -198,19 +198,15 @@ NeXusDetectorName = NeXusName[snx.NXdetector]
198
198
  """Name of a detector (bank) in a NeXus file."""
199
199
 
200
200
 
201
- class NeXusComponent(
202
- sciline.ScopeTwoParams[Component, RunType, sc.DataGroup], sc.DataGroup
203
- ):
201
+ class NeXusComponent(sciline.Scope[Component, RunType, sc.DataGroup], sc.DataGroup):
204
202
  """Raw data from a NeXus component."""
205
203
 
206
204
 
207
- class AllNeXusComponents(
208
- sciline.ScopeTwoParams[Component, RunType, sc.DataGroup], sc.DataGroup
209
- ):
205
+ class AllNeXusComponents(sciline.Scope[Component, RunType, sc.DataGroup], sc.DataGroup):
210
206
  """Raw data from all NeXus components of one class."""
211
207
 
212
208
 
213
- class NeXusData(sciline.ScopeTwoParams[Component, RunType, sc.DataArray], sc.DataArray):
209
+ class NeXusData(sciline.Scope[Component, RunType, sc.DataArray], sc.DataArray):
214
210
  """
215
211
  Data array loaded from an NXevent_data or NXdata group.
216
212
 
@@ -218,7 +214,7 @@ class NeXusData(sciline.ScopeTwoParams[Component, RunType, sc.DataArray], sc.Dat
218
214
  """
219
215
 
220
216
 
221
- class Position(sciline.ScopeTwoParams[Component, RunType, sc.Variable], sc.Variable):
217
+ class Position(sciline.Scope[Component, RunType, sc.Variable], sc.Variable):
222
218
  """Position of a component such as source, sample, monitor, or detector."""
223
219
 
224
220
 
@@ -227,7 +223,7 @@ class DetectorPositionOffset(sciline.Scope[RunType, sc.Variable], sc.Variable):
227
223
 
228
224
 
229
225
  class MonitorPositionOffset(
230
- sciline.ScopeTwoParams[RunType, MonitorType, sc.Variable], sc.Variable
226
+ sciline.Scope[RunType, MonitorType, sc.Variable], sc.Variable
231
227
  ):
232
228
  """Offset for the monitor position, added to base position."""
233
229
 
@@ -241,7 +237,7 @@ class CalibratedBeamline(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
241
237
 
242
238
 
243
239
  class CalibratedMonitor(
244
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
240
+ sciline.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray
245
241
  ):
246
242
  """Calibrated data from a monitor."""
247
243
 
@@ -250,9 +246,7 @@ class DetectorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
250
246
  """Calibrated detector merged with neutron event or histogram data."""
251
247
 
252
248
 
253
- class MonitorData(
254
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
255
- ):
249
+ class MonitorData(sciline.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray):
256
250
  """Calibrated monitor merged with neutron event or histogram data."""
257
251
 
258
252
 
@@ -314,7 +308,7 @@ class NeXusDataLocationSpec(NeXusLocationSpec, Generic[Component, RunType]):
314
308
 
315
309
 
316
310
  class NeXusTransformationChain(
317
- sciline.ScopeTwoParams[Component, RunType, snx.TransformationChain],
311
+ sciline.Scope[Component, RunType, snx.TransformationChain],
318
312
  snx.TransformationChain,
319
313
  ): ...
320
314
 
@@ -14,6 +14,10 @@ import scipp as sc
14
14
  from scipp._scipp.core import _bins_no_validate
15
15
  from scippneutron._utils import elem_unit
16
16
 
17
+ try:
18
+ from .interpolator_numba import Interpolator as InterpolatorImpl
19
+ except ImportError:
20
+ from .interpolator_scipy import Interpolator as InterpolatorImpl
17
21
  from .to_events import to_events
18
22
  from .types import (
19
23
  DistanceResolution,
@@ -286,12 +290,6 @@ def compute_tof_lookup_table(
286
290
 
287
291
  class TofInterpolator:
288
292
  def __init__(self, lookup: sc.DataArray, distance_unit: str, time_unit: str):
289
- from scipy.interpolate import RegularGridInterpolator
290
-
291
- # TODO: to make use of multi-threading, we could write our own interpolator.
292
- # This should be simple enough as we are making the bins linspace, so computing
293
- # bin indices is fast.
294
-
295
293
  self._distance_unit = distance_unit
296
294
  self._time_unit = time_unit
297
295
 
@@ -307,20 +305,26 @@ class TofInterpolator:
307
305
  # monotonic, so we cannot use e.g. [-0.5, 0.5, 0.5, 1.5] in the case of
308
306
  # pulse_stride=2. Instead we use [-0.25, 0.25, 0.75, 1.25].
309
307
  base_grid = np.arange(float(lookup.sizes["pulse"]))
310
- self._interpolator = RegularGridInterpolator(
311
- (
312
- np.sort(np.concatenate([base_grid - 0.25, base_grid + 0.25])),
313
- lookup.coords["distance"].to(unit=distance_unit, copy=False).values,
314
- lookup.coords["event_time_offset"]
315
- .to(unit=self._time_unit, copy=False)
316
- .values,
317
- ),
318
- np.repeat(
308
+ self._pulse_edges = np.sort(
309
+ np.concatenate([base_grid - 0.25, base_grid + 0.25])
310
+ )
311
+
312
+ self._time_edges = (
313
+ lookup.coords["event_time_offset"]
314
+ .to(unit=self._time_unit, copy=False)
315
+ .values
316
+ )
317
+ self._distance_edges = (
318
+ lookup.coords["distance"].to(unit=distance_unit, copy=False).values
319
+ )
320
+
321
+ self._interpolator = InterpolatorImpl(
322
+ time_edges=self._time_edges,
323
+ distance_edges=self._distance_edges,
324
+ pulse_edges=self._pulse_edges,
325
+ values=np.repeat(
319
326
  lookup.data.to(unit=self._time_unit, copy=False).values, 2, axis=0
320
327
  ),
321
- method="linear",
322
- bounds_error=False,
323
- fill_value=np.nan,
324
328
  )
325
329
 
326
330
  def __call__(
@@ -348,24 +352,12 @@ class TofInterpolator:
348
352
  pulse_index = pulse_index.values
349
353
  ltotal = ltotal.values
350
354
  event_time_offset = event_time_offset.values
351
- # Check bounds for pulse_index and ltotal.
352
- # We do not check the event_time_offset dimension because histogrammed monitors
353
- # often have binning which can be anything (does not necessarily stop at 71ms).
354
- # Raising an error here would be too restrictive, and warnings would add noise
355
- # to the workflows.
356
- for i, (name, values) in enumerate(
357
- {'pulse_index': pulse_index, 'ltotal': ltotal}.items()
358
- ):
359
- vmin = self._interpolator.grid[i][0]
360
- vmax = self._interpolator.grid[i][-1]
361
- if np.any(values < vmin) or np.any(values > vmax):
362
- raise ValueError(
363
- "Some requested values are outside of lookup table bounds for "
364
- f"axis {i}: {name}, min: {vmin}, max: {vmax}."
365
- )
355
+
366
356
  return sc.array(
367
357
  dims=out_dims,
368
- values=self._interpolator((pulse_index, ltotal, event_time_offset)),
358
+ values=self._interpolator(
359
+ times=event_time_offset, distances=ltotal, pulse_indices=pulse_index
360
+ ),
369
361
  unit=self._time_unit,
370
362
  )
371
363
 
@@ -379,7 +371,8 @@ def _time_of_flight_data_histogram(
379
371
  # In NeXus, 'time_of_flight' is the canonical name in NXmonitor, but in some files,
380
372
  # it may be called 'tof'.
381
373
  key = next(iter(set(da.coords.keys()) & {"time_of_flight", "tof"}))
382
- eto_unit = da.coords[key].unit
374
+ raw_eto = da.coords[key].to(dtype=float, copy=False)
375
+ eto_unit = raw_eto.unit
383
376
  pulse_period = pulse_period.to(unit=eto_unit)
384
377
 
385
378
  # In histogram mode, because there is a wrap around at the end of the pulse, we
@@ -387,9 +380,7 @@ def _time_of_flight_data_histogram(
387
380
  # with one finite left edge and a NaN right edge (it becomes NaN as it would be
388
381
  # outside the range of the lookup table).
389
382
  new_bins = sc.sort(
390
- sc.concat(
391
- [da.coords[key], sc.scalar(0.0, unit=eto_unit), pulse_period], dim=key
392
- ),
383
+ sc.concat([raw_eto, sc.scalar(0.0, unit=eto_unit), pulse_period], dim=key),
393
384
  key=key,
394
385
  )
395
386
  rebinned = da.rebin({key: new_bins})
@@ -416,7 +407,11 @@ def _time_of_flight_data_histogram(
416
407
  interp = TofInterpolator(lookup, distance_unit=ltotal.unit, time_unit=eto_unit)
417
408
 
418
409
  # Compute time-of-flight of the bin edges using the interpolator
419
- tofs = interp(pulse_index=pulse_index, ltotal=ltotal, event_time_offset=etos)
410
+ tofs = interp(
411
+ pulse_index=pulse_index,
412
+ ltotal=ltotal.broadcast(sizes=etos.sizes),
413
+ event_time_offset=etos,
414
+ )
420
415
 
421
416
  return rebinned.assign_coords(tof=tofs)
422
417
 
@@ -487,7 +482,7 @@ def _time_of_flight_data_events(
487
482
  pulse_stride: int,
488
483
  pulse_stride_offset: int,
489
484
  ) -> sc.DataArray:
490
- etos = da.bins.coords["event_time_offset"]
485
+ etos = da.bins.coords["event_time_offset"].to(dtype=float, copy=False)
491
486
  eto_unit = elem_unit(etos)
492
487
 
493
488
  # Create linear interpolator
@@ -621,17 +616,21 @@ def resample_tof_data(da: TofData) -> ResampledTofData:
621
616
  Histogrammed data with the time-of-flight coordinate.
622
617
  """
623
618
  dim = next(iter(set(da.dims) & {"time_of_flight", "tof"}))
624
- events = to_events(da.rename_dims({dim: "tof"}), "event")
619
+ data = da.rename_dims({dim: "tof"}).drop_coords(
620
+ [name for name in da.coords if name != "tof"]
621
+ )
622
+ events = to_events(data, "event")
625
623
 
626
624
  # Define a new bin width, close to the original bin width.
627
625
  # TODO: this could be a workflow parameter
628
626
  coord = da.coords["tof"]
629
627
  bin_width = (coord[dim, 1:] - coord[dim, :-1]).nanmedian()
630
628
  rehist = events.hist(tof=bin_width)
631
- for key, var in da.coords.items():
632
- if dim not in var.dims:
633
- rehist.coords[key] = var
634
- return ResampledTofData(rehist)
629
+ return ResampledTofData(
630
+ rehist.assign_coords(
631
+ {key: var for key, var in da.coords.items() if dim not in var.dims}
632
+ )
633
+ )
635
634
 
636
635
 
637
636
  def default_parameters() -> dict:
@@ -23,6 +23,7 @@ class FakeBeamline:
23
23
  events_per_pulse: int = 200000,
24
24
  seed: int | None = None,
25
25
  source: Callable | None = None,
26
+ source_position: sc.Variable | None = None,
26
27
  ):
27
28
  import math
28
29
 
@@ -32,6 +33,8 @@ class FakeBeamline:
32
33
  self.frequency = pulse.frequency
33
34
  self.npulses = math.ceil((run_length * self.frequency).to(unit="").value)
34
35
  self.events_per_pulse = events_per_pulse
36
+ if source_position is None:
37
+ source_position = sc.vector([0, 0, 0], unit='m')
35
38
 
36
39
  # Create a source
37
40
  if source is None:
@@ -54,7 +57,7 @@ class FakeBeamline:
54
57
  open=ch.slit_begin,
55
58
  close=ch.slit_end,
56
59
  phase=abs(ch.phase),
57
- distance=ch.axle_position.fields.z,
60
+ distance=sc.norm(ch.axle_position - source_position),
58
61
  name=name,
59
62
  )
60
63
  for name, ch in choppers.items()
@@ -117,3 +120,7 @@ def pulse_skipping_choppers():
117
120
  radius=sc.scalar(30.0, unit="cm"),
118
121
  ),
119
122
  }
123
+
124
+
125
+ def source_position():
126
+ return sc.vector([0, 0, 0], unit='m')
@@ -0,0 +1,165 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+ import numpy as np
4
+ from numba import njit, prange
5
+
6
+
7
+ @njit(boundscheck=False, cache=True, fastmath=False, parallel=True)
8
+ def interpolate(
9
+ x: np.ndarray,
10
+ y: np.ndarray,
11
+ z: np.ndarray,
12
+ values: np.ndarray,
13
+ xp: np.ndarray,
14
+ yp: np.ndarray,
15
+ zp: np.ndarray,
16
+ fill_value: float,
17
+ out: np.ndarray,
18
+ ):
19
+ """
20
+ Linear interpolation of data on a 3D regular grid.
21
+
22
+ Parameters
23
+ ----------
24
+ x:
25
+ 1D array of grid edges along the x-axis. They must be linspaced.
26
+ y:
27
+ 1D array of grid edges along the y-axis. They must be linspaced.
28
+ z:
29
+ 1D array of grid edges along the z-axis. They must be linspaced.
30
+ values:
31
+ 3D array of values on the grid. The shape must be (nz, ny, nx).
32
+ xp:
33
+ 1D array of x-coordinates where to interpolate (size N).
34
+ yp:
35
+ 1D array of y-coordinates where to interpolate (size N).
36
+ zp:
37
+ 1D array of z-coordinates where to interpolate (size N).
38
+ fill_value:
39
+ Value to use for points outside of the grid.
40
+ out:
41
+ 1D array where the interpolated values will be stored (size N).
42
+ """
43
+ if not (len(xp) == len(yp) == len(zp) == len(out)):
44
+ raise ValueError("Interpolator: all input arrays must have the same size.")
45
+
46
+ nx = len(x)
47
+ ny = len(y)
48
+ nz = len(z)
49
+ npoints = len(xp)
50
+ xmin = x[0]
51
+ xmax = x[nx - 1]
52
+ ymin = y[0]
53
+ ymax = y[ny - 1]
54
+ zmin = z[0]
55
+ zmax = z[nz - 1]
56
+ dx = x[1] - xmin
57
+ dy = y[1] - ymin
58
+ dz = z[1] - zmin
59
+
60
+ one_over_dx = 1.0 / dx
61
+ one_over_dy = 1.0 / dy
62
+ one_over_dz = 1.0 / dz
63
+ norm = one_over_dx * one_over_dy * one_over_dz
64
+
65
+ for i in prange(npoints):
66
+ xx = xp[i]
67
+ yy = yp[i]
68
+ zz = zp[i]
69
+
70
+ if (
71
+ (xx < xmin)
72
+ or (xx > xmax)
73
+ or (yy < ymin)
74
+ or (yy > ymax)
75
+ or (zz < zmin)
76
+ or (zz > zmax)
77
+ ):
78
+ out[i] = fill_value
79
+
80
+ else:
81
+ ix = nx - 2 if xx == xmax else int((xx - xmin) * one_over_dx)
82
+ iy = ny - 2 if yy == ymax else int((yy - ymin) * one_over_dy)
83
+ iz = nz - 2 if zz == zmax else int((zz - zmin) * one_over_dz)
84
+
85
+ x1 = x[ix]
86
+ x2 = x[ix + 1]
87
+ y1 = y[iy]
88
+ y2 = y[iy + 1]
89
+ z1 = z[iz]
90
+ z2 = z[iz + 1]
91
+
92
+ a111 = values[iz, iy, ix]
93
+ a211 = values[iz, iy, ix + 1]
94
+ a121 = values[iz, iy + 1, ix]
95
+ a221 = values[iz, iy + 1, ix + 1]
96
+ a112 = values[iz + 1, iy, ix]
97
+ a212 = values[iz + 1, iy, ix + 1]
98
+ a122 = values[iz + 1, iy + 1, ix]
99
+ a222 = values[iz + 1, iy + 1, ix + 1]
100
+
101
+ x2mxx = x2 - xx
102
+ xxmx1 = xx - x1
103
+ y2myy = y2 - yy
104
+ yymy1 = yy - y1
105
+ out[i] = (
106
+ (z2 - zz)
107
+ * (
108
+ y2myy * (x2mxx * a111 + xxmx1 * a211)
109
+ + yymy1 * (x2mxx * a121 + xxmx1 * a221)
110
+ )
111
+ + (zz - z1)
112
+ * (
113
+ y2myy * (x2mxx * a112 + xxmx1 * a212)
114
+ + yymy1 * (x2mxx * a122 + xxmx1 * a222)
115
+ )
116
+ ) * norm
117
+
118
+
119
+ class Interpolator:
120
+ def __init__(
121
+ self,
122
+ time_edges: np.ndarray,
123
+ distance_edges: np.ndarray,
124
+ pulse_edges: np.ndarray,
125
+ values: np.ndarray,
126
+ fill_value: float = np.nan,
127
+ ):
128
+ """
129
+ Interpolator for 3D regular grid data (Numba implementation).
130
+
131
+ Parameters
132
+ ----------
133
+ time_edges:
134
+ 1D array of time edges.
135
+ distance_edges:
136
+ 1D array of distance edges.
137
+ pulse_edges:
138
+ 1D array of pulse edges.
139
+ values:
140
+ 3D array of values on the grid. The shape must be (nz, ny, nx).
141
+ fill_value:
142
+ Value to use for points outside of the grid.
143
+ """
144
+ self.time_edges = time_edges
145
+ self.distance_edges = distance_edges
146
+ self.pulse_edges = pulse_edges
147
+ self.values = values
148
+ self.fill_value = fill_value
149
+
150
+ def __call__(
151
+ self, times: np.ndarray, distances: np.ndarray, pulse_indices: np.ndarray
152
+ ) -> np.ndarray:
153
+ out = np.empty_like(times)
154
+ interpolate(
155
+ x=self.time_edges,
156
+ y=self.distance_edges,
157
+ z=self.pulse_edges,
158
+ values=self.values,
159
+ xp=times,
160
+ yp=distances,
161
+ zp=pulse_indices,
162
+ fill_value=self.fill_value,
163
+ out=out,
164
+ )
165
+ return out
@@ -0,0 +1,60 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+
4
+ import numpy as np
5
+
6
+
7
+ class Interpolator:
8
+ def __init__(
9
+ self,
10
+ time_edges: np.ndarray,
11
+ distance_edges: np.ndarray,
12
+ pulse_edges: np.ndarray,
13
+ values: np.ndarray,
14
+ method: str = "linear",
15
+ bounds_error: bool = False,
16
+ fill_value: float = np.nan,
17
+ **kwargs,
18
+ ):
19
+ """
20
+ Interpolator for 3D regular grid data (SciPy implementation).
21
+
22
+ Parameters
23
+ ----------
24
+ time_edges:
25
+ 1D array of time edges.
26
+ distance_edges:
27
+ 1D array of distance edges.
28
+ pulse_edges:
29
+ 1D array of pulse edges.
30
+ values:
31
+ 3D array of values on the grid. The shape must be (nz, ny, nx).
32
+ method:
33
+ Method of interpolation. Default is "linear".
34
+ bounds_error:
35
+ If True, when interpolated values are requested outside of the domain,
36
+ a ValueError is raised. If False, fill_value is used.
37
+ fill_value:
38
+ Value to use for points outside of the grid.
39
+ kwargs:
40
+ Additional arguments to pass to scipy.interpolate.RegularGridInterpolator.
41
+ """
42
+ from scipy.interpolate import RegularGridInterpolator
43
+
44
+ self._interp = RegularGridInterpolator(
45
+ (
46
+ pulse_edges,
47
+ distance_edges,
48
+ time_edges,
49
+ ),
50
+ values,
51
+ method=method,
52
+ bounds_error=bounds_error,
53
+ fill_value=fill_value,
54
+ **kwargs,
55
+ )
56
+
57
+ def __call__(
58
+ self, times: np.ndarray, distances: np.ndarray, pulse_indices: np.ndarray
59
+ ) -> np.ndarray:
60
+ return self._interp((pulse_indices, distances, times))
@@ -9,7 +9,9 @@ from .types import SimulationResults
9
9
 
10
10
 
11
11
  def simulate_beamline(
12
+ *,
12
13
  choppers: Mapping[str, DiskChopper],
14
+ source_position: sc.Variable,
13
15
  neutrons: int = 1_000_000,
14
16
  pulses: int = 1,
15
17
  seed: int | None = None,
@@ -25,6 +27,9 @@ def simulate_beamline(
25
27
  A dict of DiskChopper objects representing the choppers in the beamline. See
26
28
  https://scipp.github.io/scippneutron/user-guide/chopper/processing-nexus-choppers.html#Build-DiskChopper
27
29
  for more information.
30
+ source_position:
31
+ A scalar variable with ``dtype=vector3`` that defines the source position.
32
+ Must be in the same coordinate system as the choppers' axle positions.
28
33
  neutrons:
29
34
  Number of neutrons to simulate.
30
35
  pulses:
@@ -45,7 +50,9 @@ def simulate_beamline(
45
50
  open=ch.slit_begin,
46
51
  close=ch.slit_end,
47
52
  phase=abs(ch.phase),
48
- distance=ch.axle_position.fields.z,
53
+ distance=sc.norm(
54
+ ch.axle_position - source_position.to(unit=ch.axle_position.unit)
55
+ ),
49
56
  name=name,
50
57
  )
51
58
  for name, ch in choppers.items()
@@ -54,7 +61,7 @@ def simulate_beamline(
54
61
  if not tof_choppers:
55
62
  events = source.data.squeeze().flatten(to='event')
56
63
  return SimulationResults(
57
- time_of_arrival=events.coords["time"],
64
+ time_of_arrival=events.coords["birth_time"],
58
65
  speed=events.coords["speed"],
59
66
  wavelength=events.coords["wavelength"],
60
67
  weight=events.data,
@@ -34,7 +34,7 @@ def to_events(
34
34
  rng = np.random.default_rng()
35
35
  event_coords = {}
36
36
  edge_dims = []
37
- midp_dims = set()
37
+ midp_dims = set(da.dims)
38
38
  midp_coord_names = []
39
39
  # Separate bin-edge and midpoints coords
40
40
  for name in da.coords:
@@ -43,9 +43,9 @@ def to_events(
43
43
  if is_edges:
44
44
  if name in dims:
45
45
  edge_dims.append(name)
46
+ midp_dims -= {name}
46
47
  else:
47
48
  midp_coord_names.append(name)
48
- midp_dims.update(set(dims))
49
49
 
50
50
  edge_sizes = {dim: da.sizes[da.coords[dim].dim] for dim in edge_dims}
51
51
  for dim in edge_dims:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: essreduce
3
- Version: 25.3.1
3
+ Version: 25.4.1
4
4
  Summary: Common data reduction tools for the ESS facility
5
5
  Author: Scipp contributors
6
6
  License: BSD 3-Clause License
@@ -51,16 +51,18 @@ Classifier: Typing :: Typed
51
51
  Requires-Python: >=3.10
52
52
  Description-Content-Type: text/markdown
53
53
  License-File: LICENSE
54
- Requires-Dist: sciline>=24.06.2
54
+ Requires-Dist: sciline>=25.04.1
55
55
  Requires-Dist: scipp>=25.01.0
56
56
  Requires-Dist: scippneutron>=25.02.0
57
57
  Requires-Dist: scippnexus>=24.11.0
58
58
  Provides-Extra: test
59
59
  Requires-Dist: ipywidgets; extra == "test"
60
+ Requires-Dist: numba; extra == "test"
60
61
  Requires-Dist: pooch; extra == "test"
61
62
  Requires-Dist: pytest; extra == "test"
62
63
  Requires-Dist: scipy>=1.7.0; extra == "test"
63
64
  Requires-Dist: tof>=25.01.2; extra == "test"
65
+ Dynamic: license-file
64
66
 
65
67
  [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md)
66
68
  [![PyPI badge](http://img.shields.io/pypi/v/essreduce.svg)](https://pypi.python.org/pypi/essreduce)
@@ -8,21 +8,23 @@ ess/reduce/ui.py,sha256=zmorAbDwX1cU3ygDT--OP58o0qU7OBcmJz03jPeYSLA,10884
8
8
  ess/reduce/uncertainty.py,sha256=LR4O6ApB6Z-W9gC_XW0ajupl8yFG-du0eee1AX_R-gk,6990
9
9
  ess/reduce/workflow.py,sha256=sL34T_2Cjl_8iFlegujxI9VyOUwo6erVC8pOXnfWgYw,3060
10
10
  ess/reduce/live/__init__.py,sha256=jPQVhihRVNtEDrE20PoKkclKV2aBF1lS7cCHootgFgI,204
11
- ess/reduce/live/raw.py,sha256=hyWkDJ0WYE2TS12dVxpRUh6RkzcUJL0bVDd4JjTidi0,24217
11
+ ess/reduce/live/raw.py,sha256=66qV0G2rP8gK5tXuk-syTlDLE2jT3ehfmSnET7Xzfd0,24392
12
12
  ess/reduce/live/roi.py,sha256=Hs-pW98k41WU6Kl3UQ41kQawk80c2QNOQ_WNctLzDPE,3795
13
13
  ess/reduce/live/workflow.py,sha256=bsbwvTqPhRO6mC__3b7MgU7DWwAnOvGvG-t2n22EKq8,4285
14
14
  ess/reduce/nexus/__init__.py,sha256=59bxKkNYg8DYcSykNvH6nCa5SYchJC4SbgZEKhkNdYc,967
15
15
  ess/reduce/nexus/_nexus_loader.py,sha256=5N48AMJx1AaFZb6WZPPbVKUlXyFMVVtZrn7Bae57O3A,19842
16
16
  ess/reduce/nexus/json_generator.py,sha256=ME2Xn8L7Oi3uHJk9ZZdCRQTRX-OV_wh9-DJn07Alplk,2529
17
17
  ess/reduce/nexus/json_nexus.py,sha256=QrVc0p424nZ5dHX9gebAJppTw6lGZq9404P_OFl1giA,10282
18
- ess/reduce/nexus/types.py,sha256=15XcHbNbOfnAYjWXzzKyYDVNyNixRnP0hJ-Q2duwMWE,9896
18
+ ess/reduce/nexus/types.py,sha256=vTQD4oQ5JKBHAYy9LWFICSo-dhVi3wX5IinMgjRDtF8,9806
19
19
  ess/reduce/nexus/workflow.py,sha256=EiD6-58eGwoN5fbo47UTZy_oYFitCbwlIH-xqDOSp4c,24326
20
20
  ess/reduce/scripts/grow_nexus.py,sha256=hET3h06M0xlJd62E3palNLFvJMyNax2kK4XyJcOhl-I,3387
21
21
  ess/reduce/time_of_flight/__init__.py,sha256=TSHfyoROwFhM2k3jHzamw3zeb0OQOaiuvgCgDEPEQ_g,1097
22
- ess/reduce/time_of_flight/eto_to_tof.py,sha256=4itpEB2Vb5-6HvgFfKIV0_-l8zS4UJgB5uBp12L6cls,25827
23
- ess/reduce/time_of_flight/fakes.py,sha256=rlBgceFVbHIhP_xPyUzYVf-2wEu--G8hA-kxPzAnPbM,4236
24
- ess/reduce/time_of_flight/simulation.py,sha256=CireE9m9kFbUXhGUeY2L3SoMy7kpqopxKj__h4tSKzo,2614
25
- ess/reduce/time_of_flight/to_events.py,sha256=_5CcUOWvguDcK8uo2pPZWzXnWoiZhC1w-zF8xysaIvU,4339
22
+ ess/reduce/time_of_flight/eto_to_tof.py,sha256=Nq2gx7aejoZ_ExLTr9I6KZMqDxCKAx1PpGHslpNXkKU,25271
23
+ ess/reduce/time_of_flight/fakes.py,sha256=REyHkJsSSq2_l5UOtpsv2aKkhCuro_i3KpVsxxITbW0,4470
24
+ ess/reduce/time_of_flight/interpolator_numba.py,sha256=AgB2R8iw-IOb3YXLWTQVBflhWq5qgb7aqfvDExwLRW8,4682
25
+ ess/reduce/time_of_flight/interpolator_scipy.py,sha256=sRJj2ncBiUMv6g9h-MJzI9xyY0Ir0degpAv6FIeSMBw,1834
26
+ ess/reduce/time_of_flight/simulation.py,sha256=cIF_nWkLQlcWUCW2_wvWBU2ocg_8CSfOnfkoqdLdUgs,2923
27
+ ess/reduce/time_of_flight/to_events.py,sha256=w9mHpnWd3vwN2ouob-GK_1NPrTjCaOzPuC2QuEey-m0,4342
26
28
  ess/reduce/time_of_flight/types.py,sha256=Iv1XGLbrZ9bD4CPAVhsIPkAaB46YC7l7yf5XweljLqk,5047
27
29
  ess/reduce/widgets/__init__.py,sha256=SoSHBv8Dc3QXV9HUvPhjSYWMwKTGYZLpsWwsShIO97Q,5325
28
30
  ess/reduce/widgets/_base.py,sha256=_wN3FOlXgx_u0c-A_3yyoIH-SdUvDENGgquh9S-h5GI,4852
@@ -36,9 +38,9 @@ ess/reduce/widgets/_spinner.py,sha256=2VY4Fhfa7HMXox2O7UbofcdKsYG-AJGrsgGJB85nDX
36
38
  ess/reduce/widgets/_string_widget.py,sha256=iPAdfANyXHf-nkfhgkyH6gQDklia0LebLTmwi3m-iYQ,1482
37
39
  ess/reduce/widgets/_switchable_widget.py,sha256=fjKz99SKLhIF1BLgGVBSKKn3Lu_jYBwDYGeAjbJY3Q8,2390
38
40
  ess/reduce/widgets/_vector_widget.py,sha256=aTaBqCFHZQhrIoX6-sSqFWCPePEW8HQt5kUio8jP1t8,1203
39
- essreduce-25.3.1.dist-info/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
40
- essreduce-25.3.1.dist-info/METADATA,sha256=xzclNXH4P_JVuujgIOjGtTw_7OQtH5s2e4_FlwUWDBs,3708
41
- essreduce-25.3.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
42
- essreduce-25.3.1.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
43
- essreduce-25.3.1.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
44
- essreduce-25.3.1.dist-info/RECORD,,
41
+ essreduce-25.4.1.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
42
+ essreduce-25.4.1.dist-info/METADATA,sha256=_E84IwG_gnTsMoorflvf6T4K5oJB7IjpsUUTB4bhVh8,3768
43
+ essreduce-25.4.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
44
+ essreduce-25.4.1.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
45
+ essreduce-25.4.1.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
46
+ essreduce-25.4.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.0.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5