essreduce 24.11.3__py3-none-any.whl → 25.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ess/reduce/__init__.py CHANGED
@@ -4,7 +4,7 @@
4
4
 
5
5
  import importlib.metadata
6
6
 
7
- from . import nexus, uncertainty
7
+ from . import nexus, uncertainty, time_of_flight
8
8
 
9
9
  try:
10
10
  __version__ = importlib.metadata.version("essreduce")
@@ -13,4 +13,4 @@ except importlib.metadata.PackageNotFoundError:
13
13
 
14
14
  del importlib
15
15
 
16
- __all__ = ['nexus', 'uncertainty']
16
+ __all__ = ["nexus", "uncertainty", "time_of_flight"]
ess/reduce/live/raw.py CHANGED
@@ -111,6 +111,19 @@ class LogicalView:
111
111
  Logical view of a multi-dimensional detector.
112
112
 
113
113
  Instances can be used as a "projection" function for a detector view.
114
+
115
+ Parameters
116
+ ----------
117
+ fold:
118
+ Dimensions to fold. This is useful is the raw data has a single dimension that
119
+ corresponds to multiple dimensions in the logical view.
120
+ transpose:
121
+ Dimensions to transpose. This is useful for reordering dimensions.
122
+ select:
123
+ Dimensions with associated index to select from the data. This extracts a slice
124
+ of the data for each given dimension.
125
+ flatten:
126
+ Dimensions to flatten.
114
127
  """
115
128
 
116
129
  fold: dict[str, int] | None = None
@@ -281,11 +294,13 @@ class RollingDetectorView(Detector):
281
294
  noise_replica_count = 0
282
295
  else:
283
296
  noise_replica_count = 4
284
- wf = GenericNeXusWorkflow()
297
+ wf = GenericNeXusWorkflow(run_types=[SampleRun], monitor_types=[])
285
298
  wf[RollingDetectorViewWindow] = window
286
299
  if isinstance(projection, LogicalView):
287
300
  wf[LogicalView] = projection
288
- wf[NeXusTransformation[snx.NXdetector, SampleRun]] = sc.scalar(1)
301
+ wf[NeXusTransformation[snx.NXdetector, SampleRun]] = NeXusTransformation[
302
+ snx.NXdetector, SampleRun
303
+ ](sc.scalar(1))
289
304
  wf.insert(RollingDetectorView.from_detector_and_logical_view)
290
305
  elif projection == 'cylinder_mantle_z':
291
306
  wf.insert(make_cylinder_mantle_coords)
@@ -3,6 +3,7 @@
3
3
 
4
4
  """NeXus loaders."""
5
5
 
6
+ import errno
6
7
  from collections.abc import Generator, Mapping
7
8
  from contextlib import AbstractContextManager, contextmanager, nullcontext
8
9
  from dataclasses import dataclass
@@ -91,6 +92,8 @@ def compute_component_position(dg: sc.DataGroup) -> sc.DataGroup:
91
92
  def _open_nexus_file(
92
93
  file_path: FilePath | NeXusFile | NeXusGroup,
93
94
  definitions: Mapping | None | NoNewDefinitionsType = NoNewDefinitions,
95
+ *,
96
+ locking: bool | None = None,
94
97
  ) -> AbstractContextManager[snx.Group]:
95
98
  if isinstance(file_path, getattr(NeXusGroup, '__supertype__', type(None))):
96
99
  if (
@@ -101,9 +104,42 @@ def _open_nexus_file(
101
104
  "Cannot apply new definitions to open nexus file or nexus group."
102
105
  )
103
106
  return nullcontext(file_path)
107
+
108
+ try:
109
+ return _open_nexus_file_from_path(file_path, definitions, locking=locking)
110
+ except OSError as err:
111
+ if err.errno == errno.EROFS:
112
+ # Failed to open because the filesystem is read-only.
113
+ # (According to https://www.ioplex.com/%7Emiallen/errcmpp.html
114
+ # this error code is universal.)
115
+ #
116
+ # On ESS machines, this happens for network filesystems of data that was
117
+ # ingested into SciCat, including raw data.
118
+ # In this case, it is safe to open the file without locking because:
119
+ # - For raw files, they were written on a separate machine and are synced
120
+ # with the one running reduction software. So there cannot be concurrent
121
+ # write and read accesses to the same file on the same filesystem.
122
+ # The ground truth on the filesystem used by the file writer is protected
123
+ # and cannot be corrupted by our reader.
124
+ # - For processed data, the file was copied to the read-only filesystem.
125
+ # So the copy we are opening was not written by HDF5 directly and thus
126
+ # locking has no effect anyway.
127
+ #
128
+ # When running on user machines, disabling locking can potentially corrupt
129
+ # files. But the risk is minimal because very few users will have read-only
130
+ # filesystems and do concurrent reads and writes.
131
+ return _open_nexus_file_from_path(file_path, definitions, locking=False)
132
+ raise
133
+
134
+
135
+ def _open_nexus_file_from_path(
136
+ file_path: FilePath,
137
+ definitions: Mapping | None | NoNewDefinitionsType,
138
+ **kwargs: object,
139
+ ) -> AbstractContextManager[snx.Group]:
104
140
  if definitions is NoNewDefinitions:
105
- return snx.File(file_path)
106
- return snx.File(file_path, definitions=definitions)
141
+ return snx.File(file_path, **kwargs)
142
+ return snx.File(file_path, definitions=definitions, **kwargs)
107
143
 
108
144
 
109
145
  @contextmanager
ess/reduce/nexus/types.py CHANGED
@@ -78,7 +78,17 @@ RunType = TypeVar(
78
78
  TransmissionRun[BackgroundRun],
79
79
  VanadiumRun,
80
80
  )
81
- """TypeVar used for specifying BackgroundRun, EmptyBeamRun or SampleRun"""
81
+ """TypeVar for specifying what run some data belongs to.
82
+
83
+ Possible values:
84
+
85
+ - :class:`BackgroundRun`
86
+ - :class:`EmptyBeamRun`
87
+ - :class:`SampleRun`
88
+ - :class:`TransmissionRun`
89
+ - :class:`VanadiumRun`
90
+ """
91
+
82
92
 
83
93
  # 1.2 Monitor types
84
94
  Monitor1 = NewType('Monitor1', int)
@@ -97,6 +107,16 @@ IncidentMonitor = NewType('IncidentMonitor', int)
97
107
  """Incident monitor"""
98
108
  TransmissionMonitor = NewType('TransmissionMonitor', int)
99
109
  """Transmission monitor"""
110
+ FrameMonitor0 = NewType('FrameMonitor', int)
111
+ """Frame monitor number 0"""
112
+ FrameMonitor1 = NewType('FrameMonitor', int)
113
+ """Frame monitor number 1"""
114
+ FrameMonitor2 = NewType('FrameMonitor', int)
115
+ """Frame monitor number 2"""
116
+ FrameMonitor3 = NewType('FrameMonitor', int)
117
+ """Frame monitor number 3"""
118
+ CaveMonitor = NewType('CaveMonitor', int)
119
+ """A monitor located in the instrument cave"""
100
120
  MonitorType = TypeVar(
101
121
  'MonitorType',
102
122
  Monitor1,
@@ -107,8 +127,31 @@ MonitorType = TypeVar(
107
127
  Monitor6,
108
128
  IncidentMonitor,
109
129
  TransmissionMonitor,
130
+ FrameMonitor0,
131
+ FrameMonitor1,
132
+ FrameMonitor2,
133
+ FrameMonitor3,
134
+ CaveMonitor,
110
135
  )
111
- """TypeVar used for specifying the monitor type such as Incident or Transmission"""
136
+ """TypeVar for specifying what monitor some data belongs to.
137
+
138
+ Possible values:
139
+
140
+ - :class:`Monitor1`
141
+ - :class:`Monitor2`
142
+ - :class:`Monitor3`
143
+ - :class:`Monitor4`
144
+ - :class:`Monitor5`
145
+ - :class:`Monitor6`
146
+ - :class:`IncidentMonitor`
147
+ - :class:`TransmissionMonitor`
148
+ - :class:`FrameMonitor0`
149
+ - :class:`FrameMonitor1`
150
+ - :class:`FrameMonitor2`
151
+ - :class:`FrameMonitor3`
152
+ - :class:`CaveMonitor`
153
+ """
154
+
112
155
 
113
156
  Component = TypeVar(
114
157
  'Component',
@@ -125,6 +168,11 @@ Component = TypeVar(
125
168
  Monitor6,
126
169
  IncidentMonitor,
127
170
  TransmissionMonitor,
171
+ FrameMonitor0,
172
+ FrameMonitor1,
173
+ FrameMonitor2,
174
+ FrameMonitor3,
175
+ CaveMonitor,
128
176
  )
129
177
  UniqueComponent = TypeVar('UniqueComponent', snx.NXsample, snx.NXsource)
130
178
  """Components that can be identified by their type as there will only be one."""
@@ -3,12 +3,12 @@
3
3
 
4
4
  """Workflow and workflow components for interacting with NeXus files."""
5
5
 
6
- from collections.abc import Sequence
6
+ from collections.abc import Iterable
7
7
  from copy import deepcopy
8
8
  from typing import Any
9
9
 
10
- import networkx as nx
11
10
  import sciline
11
+ import sciline.typing
12
12
  import scipp as sc
13
13
  import scippnexus as snx
14
14
  from scipp.constants import g
@@ -649,30 +649,39 @@ def LoadDetectorWorkflow() -> sciline.Pipeline:
649
649
 
650
650
  def GenericNeXusWorkflow(
651
651
  *,
652
- run_types: Sequence[sciline.typing.Key] | None = None,
653
- monitor_types: Sequence[sciline.typing.Key] | None = None,
652
+ run_types: Iterable[sciline.typing.Key] | None = None,
653
+ monitor_types: Iterable[sciline.typing.Key] | None = None,
654
654
  ) -> sciline.Pipeline:
655
655
  """
656
656
  Generic workflow for loading detector and monitor data from a NeXus file.
657
657
 
658
+ It is possible to limit which run types and monitor types
659
+ are supported by the returned workflow.
660
+ This is useful to reduce the size of the workflow and make it easier to inspect.
661
+ Make sure to add *all* required run types and monitor types when using this feature.
662
+
663
+ Attention
664
+ ---------
665
+ Filtering by run type and monitor type does not work with nested type vars.
666
+ E.g., if you have a type like ``Outer[Inner[RunType]]``, this type and its
667
+ provider will be removed.
668
+
658
669
  Parameters
659
670
  ----------
660
671
  run_types:
661
672
  List of run types to include in the workflow. If not provided, all run types
662
- are included. It is recommended to specify run types to avoid creating very
663
- large workflows.
673
+ are included.
674
+ Must be a possible value of :class:`ess.reduce.nexus.types.RunType`.
664
675
  monitor_types:
665
676
  List of monitor types to include in the workflow. If not provided, all monitor
666
- types are included. It is recommended to specify monitor types to avoid creating
667
- very large workflows.
677
+ types are included.
678
+ Must be a possible value of :class:`ess.reduce.nexus.types.MonitorType`.
668
679
 
669
680
  Returns
670
681
  -------
671
682
  :
672
683
  The workflow.
673
684
  """
674
- if monitor_types is not None and run_types is None:
675
- raise ValueError("run_types must be specified if monitor_types is specified")
676
685
  wf = sciline.Pipeline(
677
686
  (
678
687
  *_common_providers,
@@ -685,16 +694,34 @@ def GenericNeXusWorkflow(
685
694
  wf[DetectorBankSizes] = DetectorBankSizes({})
686
695
  wf[PreopenNeXusFile] = PreopenNeXusFile(False)
687
696
 
688
- g = wf.underlying_graph
689
- ancestors = set()
690
- # DetectorData and MonitorData are the "final" outputs, so finding and removing all
691
- # their ancestors is what we need to strip unused run and monitor types.
692
- for rt in run_types or ():
693
- ancestors |= nx.ancestors(g, DetectorData[rt])
694
- ancestors.add(DetectorData[rt])
695
- for mt in monitor_types or ():
696
- ancestors |= nx.ancestors(g, MonitorData[rt, mt])
697
- ancestors.add(MonitorData[rt, mt])
698
- if run_types is not None:
699
- g.remove_nodes_from(set(g.nodes) - ancestors)
697
+ if run_types is not None or monitor_types is not None:
698
+ _prune_type_vars(wf, run_types=run_types, monitor_types=monitor_types)
699
+
700
700
  return wf
701
+
702
+
703
+ def _prune_type_vars(
704
+ workflow: sciline.Pipeline,
705
+ *,
706
+ run_types: Iterable[sciline.typing.Key] | None,
707
+ monitor_types: Iterable[sciline.typing.Key] | None,
708
+ ) -> None:
709
+ # Remove all nodes that use a run type or monitor types that is
710
+ # not listed in the function arguments.
711
+ excluded_run_types = _excluded_type_args(RunType, run_types)
712
+ excluded_monitor_types = _excluded_type_args(MonitorType, monitor_types)
713
+ excluded_types = excluded_run_types | excluded_monitor_types
714
+
715
+ graph = workflow.underlying_graph
716
+ to_remove = [
717
+ node for node in graph if excluded_types & set(getattr(node, "__args__", set()))
718
+ ]
719
+ graph.remove_nodes_from(to_remove)
720
+
721
+
722
+ def _excluded_type_args(
723
+ type_var: Any, keep: Iterable[sciline.typing.Key] | None
724
+ ) -> set[sciline.typing.Key]:
725
+ if keep is None:
726
+ return set()
727
+ return set(type_var.__constraints__) - set(keep)
ess/reduce/parameter.py CHANGED
@@ -71,7 +71,12 @@ class ParamWithOptions(Parameter[T]):
71
71
 
72
72
  @classmethod
73
73
  def from_enum(cls: type[C], t: type[T], default: T) -> C:
74
- return cls(name=str(t), description=t.__doc__, options=t, default=default)
74
+ return cls(
75
+ name=t.__name__,
76
+ description=t.__doc__,
77
+ options=t.__members__,
78
+ default=default,
79
+ )
75
80
 
76
81
 
77
82
  @dataclass
@@ -0,0 +1,59 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
3
+
4
+ """
5
+ Utilities for computing real neutron time-of-flight from chopper settings and
6
+ neutron time-of-arrival at the detectors.
7
+ """
8
+
9
+ from .toa_to_tof import default_parameters, resample_tof_data, providers, TofWorkflow
10
+ from .simulation import simulate_beamline
11
+ from .types import (
12
+ DistanceResolution,
13
+ FrameFoldedTimeOfArrival,
14
+ FramePeriod,
15
+ LookupTableRelativeErrorThreshold,
16
+ Ltotal,
17
+ LtotalRange,
18
+ MaskedTimeOfFlightLookupTable,
19
+ PivotTimeAtDetector,
20
+ PulsePeriod,
21
+ PulseStride,
22
+ PulseStrideOffset,
23
+ RawData,
24
+ ResampledTofData,
25
+ SimulationResults,
26
+ TimeOfArrivalMinusPivotTimeModuloPeriod,
27
+ TimeOfFlightLookupTable,
28
+ TofData,
29
+ UnwrappedTimeOfArrival,
30
+ UnwrappedTimeOfArrivalMinusPivotTime,
31
+ )
32
+
33
+
34
+ __all__ = [
35
+ "DistanceResolution",
36
+ "FrameFoldedTimeOfArrival",
37
+ "FramePeriod",
38
+ "LookupTableRelativeErrorThreshold",
39
+ "Ltotal",
40
+ "LtotalRange",
41
+ "MaskedTimeOfFlightLookupTable",
42
+ "PivotTimeAtDetector",
43
+ "PulsePeriod",
44
+ "PulseStride",
45
+ "PulseStrideOffset",
46
+ "RawData",
47
+ "ResampledTofData",
48
+ "SimulationResults",
49
+ "TimeOfArrivalMinusPivotTimeModuloPeriod",
50
+ "TimeOfFlightLookupTable",
51
+ "TofData",
52
+ "TofWorkflow",
53
+ "UnwrappedTimeOfArrival",
54
+ "UnwrappedTimeOfArrivalMinusPivotTime",
55
+ "default_parameters",
56
+ "providers",
57
+ "resample_tof_data",
58
+ "simulate_beamline",
59
+ ]
@@ -0,0 +1,240 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+ """
4
+ A fake time-of-flight neutron beamline for documentation and testing.
5
+
6
+ This provides detector event data in a structure as typically provided in a NeXus file,
7
+ with event_time_offset and event_time_zero information.
8
+ """
9
+
10
+ from collections.abc import Callable
11
+
12
+ import numpy as np
13
+ import scipp as sc
14
+ from scippneutron.chopper import DiskChopper
15
+
16
+
17
+ class FakeBeamline:
18
+ def __init__(
19
+ self,
20
+ choppers: dict[str, DiskChopper],
21
+ monitors: dict[str, sc.Variable],
22
+ run_length: sc.Variable,
23
+ events_per_pulse: int = 200000,
24
+ source: Callable | None = None,
25
+ ):
26
+ import math
27
+
28
+ import tof as tof_pkg
29
+ from tof.facilities.ess_pulse import pulse
30
+
31
+ self.frequency = pulse.frequency
32
+ self.npulses = math.ceil((run_length * self.frequency).to(unit="").value)
33
+ self.events_per_pulse = events_per_pulse
34
+
35
+ # Create a source
36
+ if source is None:
37
+ self.source = tof_pkg.Source(
38
+ facility="ess", neutrons=self.events_per_pulse, pulses=self.npulses
39
+ )
40
+ else:
41
+ self.source = source(pulses=self.npulses)
42
+
43
+ # Convert the choppers to tof.Chopper
44
+ self.choppers = [
45
+ tof_pkg.Chopper(
46
+ frequency=abs(ch.frequency),
47
+ direction=tof_pkg.AntiClockwise
48
+ if (ch.frequency.value > 0.0)
49
+ else tof_pkg.Clockwise,
50
+ open=ch.slit_begin,
51
+ close=ch.slit_end,
52
+ phase=abs(ch.phase),
53
+ distance=ch.axle_position.fields.z,
54
+ name=name,
55
+ )
56
+ for name, ch in choppers.items()
57
+ ]
58
+
59
+ # Add detectors
60
+ self.monitors = [
61
+ tof_pkg.Detector(distance=distance, name=key)
62
+ for key, distance in monitors.items()
63
+ ]
64
+
65
+ # Propagate the neutrons
66
+ self.model = tof_pkg.Model(
67
+ source=self.source, choppers=self.choppers, detectors=self.monitors
68
+ )
69
+ self.model_result = self.model.run()
70
+
71
+ def get_monitor(self, name: str) -> sc.DataGroup:
72
+ # Create some fake pulse time zero
73
+ start = sc.datetime("2024-01-01T12:00:00.000000")
74
+ period = sc.reciprocal(self.frequency)
75
+
76
+ detector = self.model_result.detectors[name]
77
+ raw_data = detector.data.flatten(to="event")
78
+ # Select only the neutrons that make it to the detector
79
+ raw_data = raw_data[~raw_data.masks["blocked_by_others"]].copy()
80
+ raw_data.coords["Ltotal"] = detector.distance
81
+
82
+ # Format the data in a way that resembles data loaded from NeXus
83
+ event_data = raw_data.copy(deep=False)
84
+ dt = period.to(unit="us")
85
+ event_time_zero = (dt * (event_data.coords["toa"] // dt)).to(dtype=int) + start
86
+ raw_data.coords["event_time_zero"] = event_time_zero
87
+ event_data.coords["event_time_zero"] = event_time_zero
88
+ event_data.coords["event_time_offset"] = (
89
+ event_data.coords.pop("toa").to(unit="s") % period
90
+ )
91
+ del event_data.coords["tof"]
92
+ del event_data.coords["speed"]
93
+ del event_data.coords["time"]
94
+ del event_data.coords["wavelength"]
95
+
96
+ return (
97
+ event_data.group("event_time_zero").rename_dims(event_time_zero="pulse"),
98
+ raw_data.group("event_time_zero").rename_dims(event_time_zero="pulse"),
99
+ )
100
+
101
+
102
+ wfm1_chopper = DiskChopper(
103
+ frequency=sc.scalar(-70.0, unit="Hz"),
104
+ beam_position=sc.scalar(0.0, unit="deg"),
105
+ phase=sc.scalar(-47.10, unit="deg"),
106
+ axle_position=sc.vector(value=[0, 0, 6.6], unit="m"),
107
+ slit_begin=sc.array(
108
+ dims=["cutout"],
109
+ values=np.array([83.71, 140.49, 193.26, 242.32, 287.91, 330.3]) + 15.0,
110
+ unit="deg",
111
+ ),
112
+ slit_end=sc.array(
113
+ dims=["cutout"],
114
+ values=np.array([94.7, 155.79, 212.56, 265.33, 314.37, 360.0]) + 15.0,
115
+ unit="deg",
116
+ ),
117
+ slit_height=sc.scalar(10.0, unit="cm"),
118
+ radius=sc.scalar(30.0, unit="cm"),
119
+ )
120
+
121
+ wfm2_chopper = DiskChopper(
122
+ frequency=sc.scalar(-70.0, unit="Hz"),
123
+ beam_position=sc.scalar(0.0, unit="deg"),
124
+ phase=sc.scalar(-76.76, unit="deg"),
125
+ axle_position=sc.vector(value=[0, 0, 7.1], unit="m"),
126
+ slit_begin=sc.array(
127
+ dims=["cutout"],
128
+ values=np.array([65.04, 126.1, 182.88, 235.67, 284.73, 330.32]) + 15.0,
129
+ unit="deg",
130
+ ),
131
+ slit_end=sc.array(
132
+ dims=["cutout"],
133
+ values=np.array([76.03, 141.4, 202.18, 254.97, 307.74, 360.0]) + 15.0,
134
+ unit="deg",
135
+ ),
136
+ slit_height=sc.scalar(10.0, unit="cm"),
137
+ radius=sc.scalar(30.0, unit="cm"),
138
+ )
139
+
140
+ foc1_chopper = DiskChopper(
141
+ frequency=sc.scalar(-56.0, unit="Hz"),
142
+ beam_position=sc.scalar(0.0, unit="deg"),
143
+ phase=sc.scalar(-62.40, unit="deg"),
144
+ axle_position=sc.vector(value=[0, 0, 8.8], unit="m"),
145
+ slit_begin=sc.array(
146
+ dims=["cutout"],
147
+ values=np.array([74.6, 139.6, 194.3, 245.3, 294.8, 347.2]),
148
+ unit="deg",
149
+ ),
150
+ slit_end=sc.array(
151
+ dims=["cutout"],
152
+ values=np.array([95.2, 162.8, 216.1, 263.1, 310.5, 371.6]),
153
+ unit="deg",
154
+ ),
155
+ slit_height=sc.scalar(10.0, unit="cm"),
156
+ radius=sc.scalar(30.0, unit="cm"),
157
+ )
158
+
159
+ foc2_chopper = DiskChopper(
160
+ frequency=sc.scalar(-28.0, unit="Hz"),
161
+ beam_position=sc.scalar(0.0, unit="deg"),
162
+ phase=sc.scalar(-12.27, unit="deg"),
163
+ axle_position=sc.vector(value=[0, 0, 15.9], unit="m"),
164
+ slit_begin=sc.array(
165
+ dims=["cutout"],
166
+ values=np.array([98.0, 154.0, 206.8, 255.0, 299.0, 344.65]),
167
+ unit="deg",
168
+ ),
169
+ slit_end=sc.array(
170
+ dims=["cutout"],
171
+ values=np.array([134.6, 190.06, 237.01, 280.88, 323.56, 373.76]),
172
+ unit="deg",
173
+ ),
174
+ slit_height=sc.scalar(10.0, unit="cm"),
175
+ radius=sc.scalar(30.0, unit="cm"),
176
+ )
177
+
178
+ pol_chopper = DiskChopper(
179
+ frequency=sc.scalar(-14.0, unit="Hz"),
180
+ beam_position=sc.scalar(0.0, unit="deg"),
181
+ phase=sc.scalar(0.0, unit="deg"),
182
+ axle_position=sc.vector(value=[0, 0, 17.0], unit="m"),
183
+ slit_begin=sc.array(
184
+ dims=["cutout"],
185
+ values=np.array([40.0]),
186
+ unit="deg",
187
+ ),
188
+ slit_end=sc.array(
189
+ dims=["cutout"],
190
+ values=np.array([240.0]),
191
+ unit="deg",
192
+ ),
193
+ slit_height=sc.scalar(10.0, unit="cm"),
194
+ radius=sc.scalar(30.0, unit="cm"),
195
+ )
196
+
197
+ pulse_skipping = DiskChopper(
198
+ frequency=sc.scalar(-7.0, unit="Hz"),
199
+ beam_position=sc.scalar(0.0, unit="deg"),
200
+ phase=sc.scalar(0.0, unit="deg"),
201
+ axle_position=sc.vector(value=[0, 0, 30.0], unit="m"),
202
+ slit_begin=sc.array(
203
+ dims=["cutout"],
204
+ values=np.array([40.0]),
205
+ unit="deg",
206
+ ),
207
+ slit_end=sc.array(
208
+ dims=["cutout"],
209
+ values=np.array([140.0]),
210
+ unit="deg",
211
+ ),
212
+ slit_height=sc.scalar(10.0, unit="cm"),
213
+ radius=sc.scalar(30.0, unit="cm"),
214
+ )
215
+
216
+
217
+ def wfm_choppers():
218
+ return {
219
+ "wfm1": wfm1_chopper,
220
+ "wfm2": wfm2_chopper,
221
+ "foc1": foc1_chopper,
222
+ "foc2": foc2_chopper,
223
+ "pol": pol_chopper,
224
+ }
225
+
226
+
227
+ def psc_choppers():
228
+ return {
229
+ name: DiskChopper(
230
+ frequency=ch.frequency,
231
+ beam_position=ch.beam_position,
232
+ phase=ch.phase,
233
+ axle_position=ch.axle_position,
234
+ slit_begin=ch.slit_begin[0:1],
235
+ slit_end=ch.slit_end[0:1],
236
+ slit_height=ch.slit_height[0:1],
237
+ radius=ch.radius,
238
+ )
239
+ for name, ch in wfm_choppers().items()
240
+ }
@@ -0,0 +1,74 @@
1
+ # SPDX-License-Identifier: BSD-3-Clause
2
+ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
3
+ from collections.abc import Mapping
4
+
5
+ import scipp as sc
6
+ from scippneutron.chopper import DiskChopper
7
+
8
+ from .types import SimulationResults
9
+
10
+
11
+ def simulate_beamline(
12
+ choppers: Mapping[str, DiskChopper],
13
+ neutrons: int = 1_000_000,
14
+ seed: int | None = None,
15
+ facility: str = 'ess',
16
+ ) -> SimulationResults:
17
+ """
18
+ Simulate a pulse of neutrons propagating through a chopper cascade using the
19
+ ``tof`` package (https://tof.readthedocs.io).
20
+
21
+ Parameters
22
+ ----------
23
+ choppers:
24
+ A dict of DiskChopper objects representing the choppers in the beamline. See
25
+ https://scipp.github.io/scippneutron/user-guide/chopper/processing-nexus-choppers.html#Build-DiskChopper
26
+ for more information.
27
+ neutrons:
28
+ Number of neutrons to simulate.
29
+ seed:
30
+ Seed for the random number generator used in the simulation.
31
+ facility:
32
+ Facility where the experiment is performed.
33
+ """
34
+ import tof
35
+
36
+ tof_choppers = [
37
+ tof.Chopper(
38
+ frequency=abs(ch.frequency),
39
+ direction=tof.AntiClockwise
40
+ if (ch.frequency.value > 0.0)
41
+ else tof.Clockwise,
42
+ open=ch.slit_begin,
43
+ close=ch.slit_end,
44
+ phase=abs(ch.phase),
45
+ distance=ch.axle_position.fields.z,
46
+ name=name,
47
+ )
48
+ for name, ch in choppers.items()
49
+ ]
50
+ source = tof.Source(facility=facility, neutrons=neutrons, seed=seed)
51
+ if not tof_choppers:
52
+ events = source.data.squeeze()
53
+ return SimulationResults(
54
+ time_of_arrival=events.coords["time"],
55
+ speed=events.coords["speed"],
56
+ wavelength=events.coords["wavelength"],
57
+ weight=events.data,
58
+ distance=0.0 * sc.units.m,
59
+ )
60
+ model = tof.Model(source=source, choppers=tof_choppers)
61
+ results = model.run()
62
+ # Find name of the furthest chopper in tof_choppers
63
+ furthest_chopper = max(tof_choppers, key=lambda c: c.distance)
64
+ events = results[furthest_chopper.name].data.squeeze()
65
+ events = events[
66
+ ~(events.masks["blocked_by_others"] | events.masks["blocked_by_me"])
67
+ ]
68
+ return SimulationResults(
69
+ time_of_arrival=events.coords["toa"],
70
+ speed=events.coords["speed"],
71
+ wavelength=events.coords["wavelength"],
72
+ weight=events.data,
73
+ distance=furthest_chopper.distance,
74
+ )