essreduce 25.5.2__py3-none-any.whl → 25.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,14 +6,8 @@ Utilities for computing real neutron time-of-flight from chopper settings and
6
6
  neutron time-of-arrival at the detectors.
7
7
  """
8
8
 
9
- from .eto_to_tof import (
10
- default_parameters,
11
- providers,
12
- resample_detector_time_of_flight_data,
13
- resample_monitor_time_of_flight_data,
14
- )
9
+ from .eto_to_tof import default_parameters, providers
15
10
  from .simulation import simulate_beamline
16
- from .to_events import to_events
17
11
  from .types import (
18
12
  DetectorLtotal,
19
13
  DetectorTofData,
@@ -25,8 +19,6 @@ from .types import (
25
19
  PulsePeriod,
26
20
  PulseStride,
27
21
  PulseStrideOffset,
28
- ResampledDetectorTofData,
29
- ResampledMonitorTofData,
30
22
  SimulationResults,
31
23
  TimeOfFlightLookupTable,
32
24
  TimeOfFlightLookupTableFilename,
@@ -37,17 +29,17 @@ from .workflow import GenericTofWorkflow, TofLutProvider
37
29
  __all__ = [
38
30
  "DetectorLtotal",
39
31
  "DetectorTofData",
32
+ "DetectorTofData",
40
33
  "DistanceResolution",
41
34
  "GenericTofWorkflow",
42
35
  "LookupTableRelativeErrorThreshold",
43
36
  "LtotalRange",
44
37
  "MonitorLtotal",
45
38
  "MonitorTofData",
39
+ "MonitorTofData",
46
40
  "PulsePeriod",
47
41
  "PulseStride",
48
42
  "PulseStrideOffset",
49
- "ResampledDetectorTofData",
50
- "ResampledMonitorTofData",
51
43
  "SimulationResults",
52
44
  "TimeOfFlightLookupTable",
53
45
  "TimeOfFlightLookupTableFilename",
@@ -55,8 +47,5 @@ __all__ = [
55
47
  "TofLutProvider",
56
48
  "default_parameters",
57
49
  "providers",
58
- "resample_detector_time_of_flight_data",
59
- "resample_monitor_time_of_flight_data",
60
50
  "simulate_beamline",
61
- "to_events",
62
51
  ]
@@ -27,7 +27,7 @@ from ..nexus.types import (
27
27
  MonitorType,
28
28
  RunType,
29
29
  )
30
- from .to_events import to_events
30
+ from .resample import rebin_strictly_increasing
31
31
  from .types import (
32
32
  DetectorLtotal,
33
33
  DetectorTofData,
@@ -39,8 +39,6 @@ from .types import (
39
39
  PulsePeriod,
40
40
  PulseStride,
41
41
  PulseStrideOffset,
42
- ResampledDetectorTofData,
43
- ResampledMonitorTofData,
44
42
  SimulationResults,
45
43
  TimeOfFlightLookupTable,
46
44
  TimeResolution,
@@ -586,7 +584,8 @@ def _compute_tof_data(
586
584
  pulse_stride_offset: int,
587
585
  ) -> sc.DataArray:
588
586
  if da.bins is None:
589
- return _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
587
+ data = _time_of_flight_data_histogram(da=da, lookup=lookup, ltotal=ltotal)
588
+ return rebin_strictly_increasing(data, dim='tof')
590
589
  else:
591
590
  return _time_of_flight_data_events(
592
591
  da=da,
@@ -664,62 +663,6 @@ def monitor_time_of_flight_data(
664
663
  )
665
664
 
666
665
 
667
- def _resample_tof_data(da: sc.DataArray) -> sc.DataArray:
668
- """
669
- Histogrammed data that has been converted to `tof` will typically have
670
- unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
671
- overlap between subframes).
672
- This function re-histograms the data to ensure that the bin edges are sorted.
673
- It makes use of the ``to_events`` helper which generates a number of events in each
674
- bin with a uniform distribution. The new events are then histogrammed using a set of
675
- sorted bin edges.
676
-
677
- WARNING:
678
- This function is highly experimental, has limitations and should be used with
679
- caution. It is a workaround to the issue that rebinning data with unsorted bin
680
- edges is not supported in scipp.
681
- As such, this function is not part of the default set of providers, and needs to be
682
- inserted manually into the workflow.
683
-
684
- Parameters
685
- ----------
686
- da:
687
- Histogrammed data with the time-of-flight coordinate.
688
- """
689
- dim = next(iter(set(da.dims) & {"time_of_flight", "tof"}))
690
- data = da.rename_dims({dim: "tof"}).drop_coords(
691
- [name for name in da.coords if name != "tof"]
692
- )
693
- events = to_events(data, "event")
694
-
695
- # Define a new bin width, close to the original bin width.
696
- # TODO: this could be a workflow parameter
697
- coord = da.coords["tof"]
698
- bin_width = (coord[dim, 1:] - coord[dim, :-1]).nanmedian()
699
- rehist = events.hist(tof=bin_width)
700
- return rehist.assign_coords(
701
- {key: var for key, var in da.coords.items() if dim not in var.dims}
702
- )
703
-
704
-
705
- def resample_detector_time_of_flight_data(
706
- da: DetectorTofData[RunType],
707
- ) -> ResampledDetectorTofData[RunType]:
708
- """
709
- Resample the detector time-of-flight data to ensure that the bin edges are sorted.
710
- """
711
- return ResampledDetectorTofData(_resample_tof_data(da))
712
-
713
-
714
- def resample_monitor_time_of_flight_data(
715
- da: MonitorTofData[RunType, MonitorType],
716
- ) -> ResampledMonitorTofData[RunType, MonitorType]:
717
- """
718
- Resample the monitor time-of-flight data to ensure that the bin edges are sorted.
719
- """
720
- return ResampledMonitorTofData(_resample_tof_data(da))
721
-
722
-
723
666
  def default_parameters() -> dict:
724
667
  """
725
668
  Default parameters of the time-of-flight workflow.
@@ -0,0 +1,97 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+
4
+
5
+ import numpy as np
6
+ import scipp as sc
7
+
8
+
9
+ def find_strictly_increasing_sections(var: sc.Variable) -> list[slice]:
10
+ """
11
+ Find strictly increasing sections in a coordinate dimension (minimum length 2).
12
+
13
+ Parameters
14
+ ----------
15
+ var:
16
+ The variable to analyze, which should be one-dimensional.
17
+
18
+ Returns
19
+ -------
20
+ sections:
21
+ Slice objects that can be used extract strictly increasing sections.
22
+ """
23
+ values = var.values
24
+ finite = np.isfinite(values)
25
+ increasing = (np.sign(np.diff(values)) > 0) & finite[:-1] & finite[1:]
26
+ # 1 marks the start of an increasing section, -1 marks the end
27
+ transitions = np.diff(np.concatenate(([False], increasing, [False])).astype(int))
28
+ section_starts = np.where(transitions == 1)[0]
29
+ section_ends = np.where(transitions == -1)[0] + np.array(1)
30
+ return [
31
+ slice(start, end)
32
+ for start, end in zip(section_starts, section_ends, strict=True)
33
+ if end - start >= 2 # Ensure section has at least 2 points
34
+ ]
35
+
36
+
37
+ def get_min_max(
38
+ var: sc.Variable, *, dim: str, slices: list[slice]
39
+ ) -> tuple[sc.Variable, sc.Variable]:
40
+ if not slices:
41
+ raise ValueError("No strictly increasing sections found.")
42
+ combined = sc.concat([var[dim, slice] for slice in slices], dim)
43
+ return combined.min(), combined.max()
44
+
45
+
46
+ def make_regular_grid(
47
+ var: sc.Variable, *, dim: str, slices: list[slice]
48
+ ) -> sc.Variable:
49
+ """
50
+ Create a regular grid variable based on the min and max of the slices.
51
+
52
+ The grid is constructed such that it includes the minimum and maximum values
53
+ of the strictly increasing sections, with a step size equal to the difference
54
+ between the first two values of the section with the minimum start value (which is
55
+ not necessarily the first section).
56
+ """
57
+ min_val, max_val = get_min_max(var, dim=dim, slices=slices)
58
+ first: sc.Variable | None = None
59
+ for s in slices:
60
+ first = var[dim, s]
61
+ if sc.identical(first[0], min_val):
62
+ break
63
+ if first is None:
64
+ # This should not happen if slices are correctly identified and passed from
65
+ # find_strictly_increasing_sections.
66
+ raise ValueError("Section is not strictly increasing.")
67
+ step = first[1] - first[0]
68
+ return sc.arange(
69
+ dim=dim,
70
+ start=min_val.value,
71
+ stop=max_val.value + step.value, # Ensure the last bin edge is included
72
+ step=step.value,
73
+ unit=step.unit,
74
+ dtype=step.dtype,
75
+ )
76
+
77
+
78
+ def rebin_strictly_increasing(da: sc.DataArray, dim: str) -> sc.DataArray:
79
+ """
80
+ Find strictly monotonic sections in a coordinate dimension and rebin the data array
81
+ into a regular grid based on these sections.
82
+ """
83
+ # Ensure the dimension is named like the coordinate.
84
+ da = da.rename_dims({da.coords[dim].dim: dim})
85
+ slices = find_strictly_increasing_sections(da.coords[dim])
86
+ if len(slices) == 1:
87
+ return da[dim, slices[0]]
88
+ if not slices:
89
+ raise ValueError("No strictly increasing sections found.")
90
+ if da.coords[dim].dtype not in (sc.DType.float64, sc.DType.float32):
91
+ # rebin does not like integer coords.
92
+ da = da.assign_coords({dim: da.coords[dim].to(dtype='float64')})
93
+ # Slices refer to the indices in the coord, which are bin edges. For slicing data
94
+ # we need to stop at the last index minus one.
95
+ sections = [da[dim, section.start : section.stop - 1] for section in slices]
96
+ edges = make_regular_grid(da.coords[dim], dim=dim, slices=slices)
97
+ return sc.reduce([sc.rebin(section, {dim: edges}) for section in sections]).sum()
@@ -130,43 +130,3 @@ class DetectorTofData(sl.Scope[RunType, sc.DataArray], sc.DataArray):
130
130
 
131
131
  class MonitorTofData(sl.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray):
132
132
  """Monitor data with time-of-flight coordinate."""
133
-
134
-
135
- class ResampledDetectorTofData(sl.Scope[RunType, sc.DataArray], sc.DataArray):
136
- """
137
- Histogrammed detector data with time-of-flight coordinate, that has been resampled.
138
-
139
- Histogrammed data that has been converted to `tof` will typically have
140
- unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
141
- overlap between subframes).
142
- We thus resample the data to ensure that the bin edges are sorted.
143
- It makes use of the ``to_events`` helper which generates a number of events in each
144
- bin with a uniform distribution. The new events are then histogrammed using a set of
145
- sorted bin edges to yield a new histogram with sorted bin edges.
146
-
147
- WARNING:
148
- This function is highly experimental, has limitations and should be used with
149
- caution. It is a workaround to the issue that rebinning data with unsorted bin
150
- edges is not supported in scipp.
151
- """
152
-
153
-
154
- class ResampledMonitorTofData(
155
- sl.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray
156
- ):
157
- """
158
- Histogrammed monitor data with time-of-flight coordinate, that has been resampled.
159
-
160
- Histogrammed data that has been converted to `tof` will typically have
161
- unsorted bin edges (due to either wrapping of `time_of_flight` or wavelength
162
- overlap between subframes).
163
- We thus resample the data to ensure that the bin edges are sorted.
164
- It makes use of the ``to_events`` helper which generates a number of events in each
165
- bin with a uniform distribution. The new events are then histogrammed using a set of
166
- sorted bin edges to yield a new histogram with sorted bin edges.
167
-
168
- WARNING:
169
- This function is highly experimental, has limitations and should be used with
170
- caution. It is a workaround to the issue that rebinning data with unsorted bin
171
- edges is not supported in scipp.
172
- """
@@ -34,6 +34,7 @@ def GenericTofWorkflow(
34
34
  """
35
35
  Generic workflow for computing the neutron time-of-flight for detector and monitor
36
36
  data.
37
+
37
38
  This workflow builds on the ``GenericNeXusWorkflow`` and computes time-of-flight
38
39
  from a lookup table that is created from the chopper settings, detector Ltotal and
39
40
  the neutron time-of-arrival.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: essreduce
3
- Version: 25.5.2
3
+ Version: 25.5.3
4
4
  Summary: Common data reduction tools for the ESS facility
5
5
  Author: Scipp contributors
6
6
  License: BSD 3-Clause License
@@ -18,15 +18,15 @@ ess/reduce/nexus/json_nexus.py,sha256=QrVc0p424nZ5dHX9gebAJppTw6lGZq9404P_OFl1gi
18
18
  ess/reduce/nexus/types.py,sha256=DE82JnbgpTlQnt7UN2a2Gur2N9QupV3CDL9j4Iy4lsE,9178
19
19
  ess/reduce/nexus/workflow.py,sha256=Ytt80-muk5EiXmip890ahb_m5DQqlTGRQUyaTVXRNzo,24568
20
20
  ess/reduce/scripts/grow_nexus.py,sha256=hET3h06M0xlJd62E3palNLFvJMyNax2kK4XyJcOhl-I,3387
21
- ess/reduce/time_of_flight/__init__.py,sha256=UxMvY4aFkhZQmIbGSo4FBpBGRD2wDJbczLMVqcEhCSg,1583
22
- ess/reduce/time_of_flight/eto_to_tof.py,sha256=JCu7C3AmJnB7GDJrL76oPjgxGesp67nct9xXRp3O8E4,28204
21
+ ess/reduce/time_of_flight/__init__.py,sha256=jbvLcVgODURuweuicrsDqEYqiL_GNJa_t4c5ik344Ro,1269
22
+ ess/reduce/time_of_flight/eto_to_tof.py,sha256=FYujFQSqDoxOLiVbNId4YcpuhKmNdvtBMUr9nK5poIk,26070
23
23
  ess/reduce/time_of_flight/fakes.py,sha256=0gtbSX3ZQilaM4ZP5dMr3fqbnhpyoVsZX2YEb8GgREE,4489
24
24
  ess/reduce/time_of_flight/interpolator_numba.py,sha256=wh2YS3j2rOu30v1Ok3xNHcwS7t8eEtZyZvbfXOCtgrQ,3835
25
25
  ess/reduce/time_of_flight/interpolator_scipy.py,sha256=_InoAPuMm2qhJKZQBAHOGRFqtvvuQ8TStoN7j_YgS4M,1853
26
+ ess/reduce/time_of_flight/resample.py,sha256=Opmi-JA4zNH725l9VB99U4O9UlM37f5ACTCGtwBcows,3718
26
27
  ess/reduce/time_of_flight/simulation.py,sha256=vo-zjG6t-PLetv2_nj4dhMSTEyTQ1MsrhlM2XkhOtf8,3632
27
- ess/reduce/time_of_flight/to_events.py,sha256=w9mHpnWd3vwN2ouob-GK_1NPrTjCaOzPuC2QuEey-m0,4342
28
- ess/reduce/time_of_flight/types.py,sha256=OQeMYNN7QinXs_HDcoE6kkh_xNcyD0dEJWtnHQy5-uA,6675
29
- ess/reduce/time_of_flight/workflow.py,sha256=mwNEXwvOnm-M8n4G9Cau1Vuyqzo4qMrhCbZ-S5vszW4,3181
28
+ ess/reduce/time_of_flight/types.py,sha256=LJlK_5u5yeFj2TLnz-LI3eApkp8vEg5q8OncHkmHjj8,4844
29
+ ess/reduce/time_of_flight/workflow.py,sha256=BAIMeA1bSJlS6JSG7r2srVdtBsAK6VD0DuOiYZuQvNg,3182
30
30
  ess/reduce/widgets/__init__.py,sha256=SoSHBv8Dc3QXV9HUvPhjSYWMwKTGYZLpsWwsShIO97Q,5325
31
31
  ess/reduce/widgets/_base.py,sha256=_wN3FOlXgx_u0c-A_3yyoIH-SdUvDENGgquh9S-h5GI,4852
32
32
  ess/reduce/widgets/_binedges_widget.py,sha256=ZCQsGjYHnJr9GFUn7NjoZc1CdsnAzm_fMzyF-fTKKVY,2785
@@ -39,9 +39,9 @@ ess/reduce/widgets/_spinner.py,sha256=2VY4Fhfa7HMXox2O7UbofcdKsYG-AJGrsgGJB85nDX
39
39
  ess/reduce/widgets/_string_widget.py,sha256=iPAdfANyXHf-nkfhgkyH6gQDklia0LebLTmwi3m-iYQ,1482
40
40
  ess/reduce/widgets/_switchable_widget.py,sha256=fjKz99SKLhIF1BLgGVBSKKn3Lu_jYBwDYGeAjbJY3Q8,2390
41
41
  ess/reduce/widgets/_vector_widget.py,sha256=aTaBqCFHZQhrIoX6-sSqFWCPePEW8HQt5kUio8jP1t8,1203
42
- essreduce-25.5.2.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
43
- essreduce-25.5.2.dist-info/METADATA,sha256=LNEHFWDdswQNdu6s_5_eVHlmkVz35siN2fvirZ0KhFI,3768
44
- essreduce-25.5.2.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
45
- essreduce-25.5.2.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
46
- essreduce-25.5.2.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
47
- essreduce-25.5.2.dist-info/RECORD,,
42
+ essreduce-25.5.3.dist-info/licenses/LICENSE,sha256=nVEiume4Qj6jMYfSRjHTM2jtJ4FGu0g-5Sdh7osfEYw,1553
43
+ essreduce-25.5.3.dist-info/METADATA,sha256=WNGTtuRJz8G7G3mFV0sWWQy5ByR1uzJ3iDQi7hVyYps,3768
44
+ essreduce-25.5.3.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
45
+ essreduce-25.5.3.dist-info/entry_points.txt,sha256=PMZOIYzCifHMTe4pK3HbhxUwxjFaZizYlLD0td4Isb0,66
46
+ essreduce-25.5.3.dist-info/top_level.txt,sha256=0JxTCgMKPLKtp14wb1-RKisQPQWX7i96innZNvHBr-s,4
47
+ essreduce-25.5.3.dist-info/RECORD,,
@@ -1,111 +0,0 @@
1
- # SPDX-License-Identifier: BSD-3-Clause
2
- # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
-
4
- from functools import reduce
5
-
6
- import numpy as np
7
- import scipp as sc
8
-
9
-
10
- def to_events(
11
- da: sc.DataArray, event_dim: str, events_per_bin: int = 500
12
- ) -> sc.DataArray:
13
- """
14
- Convert a histogrammed data array to an event list.
15
- The generated events have a uniform distribution within each bin.
16
- Each dimension with a bin-edge coordinate is converted to an event coordinate.
17
- The contract is that if we re-histogram the event list with the same bin edges,
18
- we should get the original counts back.
19
- Masks on non-bin-edge dimensions are preserved.
20
- If there are masks on bin-edge dimensions, the masked values are zeroed out in the
21
- original data before the conversion to events.
22
-
23
- Parameters
24
- ----------
25
- da:
26
- DataArray to convert to events.
27
- event_dim:
28
- Name of the new event dimension.
29
- events_per_bin:
30
- Number of events to generate per bin.
31
- """
32
- if da.bins is not None:
33
- raise ValueError("Cannot convert a binned DataArray to events.")
34
- rng = np.random.default_rng()
35
- event_coords = {}
36
- edge_dims = []
37
- midp_dims = set(da.dims)
38
- midp_coord_names = []
39
- # Separate bin-edge and midpoints coords
40
- for name in da.coords:
41
- dims = da.coords[name].dims
42
- is_edges = False if not dims else da.coords.is_edges(name)
43
- if is_edges:
44
- if name in dims:
45
- edge_dims.append(name)
46
- midp_dims -= {name}
47
- else:
48
- midp_coord_names.append(name)
49
-
50
- edge_sizes = {dim: da.sizes[da.coords[dim].dim] for dim in edge_dims}
51
- for dim in edge_dims:
52
- coord = da.coords[dim]
53
- left = sc.broadcast(coord[dim, :-1], sizes=edge_sizes).values
54
- right = sc.broadcast(coord[dim, 1:], sizes=edge_sizes).values
55
-
56
- # The numpy.random.uniform function below does not support NaNs, so we need to
57
- # replace them with zeros, and then replace them back after the random numbers
58
- # have been generated.
59
- nans = np.isnan(left) | np.isnan(right)
60
- left = np.where(nans, 0.0, left)
61
- right = np.where(nans, 0.0, right)
62
- # Ensure left <= right
63
- left, right = np.minimum(left, right), np.maximum(left, right)
64
-
65
- # In each bin, we generate a number of events with a uniform distribution.
66
- events = rng.uniform(
67
- left, right, size=(events_per_bin, *list(edge_sizes.values()))
68
- )
69
- events[..., nans] = np.nan
70
- event_coords[dim] = sc.array(
71
- dims=[event_dim, *edge_dims], values=events, unit=coord.unit
72
- )
73
-
74
- # Find and apply masks that are on a bin-edge dimension
75
- event_masks = {}
76
- other_masks = {}
77
- edge_dims_set = set(edge_dims)
78
- for key, mask in da.masks.items():
79
- if set(mask.dims) & edge_dims_set:
80
- event_masks[key] = mask
81
- else:
82
- other_masks[key] = mask
83
-
84
- data = da.data
85
- if event_masks:
86
- inv_mask = (~reduce(lambda a, b: a | b, event_masks.values())).to(dtype=int)
87
- inv_mask.unit = ""
88
- data = data * inv_mask
89
-
90
- # Create the data counts, which are the original counts divided by the number of
91
- # events per bin
92
- sizes = {event_dim: events_per_bin} | da.sizes
93
- val = sc.broadcast(sc.values(data) / float(events_per_bin), sizes=sizes)
94
- kwargs = {"dims": sizes.keys(), "values": val.values, "unit": data.unit}
95
- if data.variances is not None:
96
- # Note here that all the events are correlated.
97
- # If we later histogram the events with different edges than the original
98
- # histogram, then neighboring bins will be correlated, and the error obtained
99
- # will be too small. It is however not clear what can be done to improve this.
100
- kwargs["variances"] = sc.broadcast(
101
- sc.variances(data) / float(events_per_bin), sizes=sizes
102
- ).values
103
- new_data = sc.array(**kwargs)
104
-
105
- new = sc.DataArray(data=new_data, coords=event_coords)
106
- new = new.transpose((*midp_dims, *edge_dims, event_dim)).flatten(
107
- dims=[*edge_dims, event_dim], to=event_dim
108
- )
109
- return new.assign_coords(
110
- {dim: da.coords[dim].copy() for dim in midp_coord_names}
111
- ).assign_masks({key: mask.copy() for key, mask in other_masks.items()})