essreduce 25.1.0__py3-none-any.whl → 25.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,6 +21,7 @@ class FakeBeamline:
21
21
  monitors: dict[str, sc.Variable],
22
22
  run_length: sc.Variable,
23
23
  events_per_pulse: int = 200000,
24
+ seed: int | None = None,
24
25
  source: Callable | None = None,
25
26
  ):
26
27
  import math
@@ -35,7 +36,10 @@ class FakeBeamline:
35
36
  # Create a source
36
37
  if source is None:
37
38
  self.source = tof_pkg.Source(
38
- facility="ess", neutrons=self.events_per_pulse, pulses=self.npulses
39
+ facility="ess",
40
+ neutrons=self.events_per_pulse,
41
+ pulses=self.npulses,
42
+ seed=seed,
39
43
  )
40
44
  else:
41
45
  self.source = source(pulses=self.npulses)
@@ -69,34 +73,10 @@ class FakeBeamline:
69
73
  self.model_result = self.model.run()
70
74
 
71
75
  def get_monitor(self, name: str) -> sc.DataGroup:
72
- # Create some fake pulse time zero
73
- start = sc.datetime("2024-01-01T12:00:00.000000")
74
- period = sc.reciprocal(self.frequency)
75
-
76
- detector = self.model_result.detectors[name]
77
- raw_data = detector.data.flatten(to="event")
78
- # Select only the neutrons that make it to the detector
76
+ nx_event_data = self.model_result.to_nxevent_data(name)
77
+ raw_data = self.model_result.detectors[name].data.flatten(to="event")
79
78
  raw_data = raw_data[~raw_data.masks["blocked_by_others"]].copy()
80
- raw_data.coords["Ltotal"] = detector.distance
81
-
82
- # Format the data in a way that resembles data loaded from NeXus
83
- event_data = raw_data.copy(deep=False)
84
- dt = period.to(unit="us")
85
- event_time_zero = (dt * (event_data.coords["toa"] // dt)).to(dtype=int) + start
86
- raw_data.coords["event_time_zero"] = event_time_zero
87
- event_data.coords["event_time_zero"] = event_time_zero
88
- event_data.coords["event_time_offset"] = (
89
- event_data.coords.pop("toa").to(unit="s") % period
90
- )
91
- del event_data.coords["tof"]
92
- del event_data.coords["speed"]
93
- del event_data.coords["time"]
94
- del event_data.coords["wavelength"]
95
-
96
- return (
97
- event_data.group("event_time_zero").rename_dims(event_time_zero="pulse"),
98
- raw_data.group("event_time_zero").rename_dims(event_time_zero="pulse"),
99
- )
79
+ return nx_event_data, raw_data
100
80
 
101
81
 
102
82
  wfm1_chopper = DiskChopper(
@@ -194,25 +174,6 @@ pol_chopper = DiskChopper(
194
174
  radius=sc.scalar(30.0, unit="cm"),
195
175
  )
196
176
 
197
- pulse_skipping = DiskChopper(
198
- frequency=sc.scalar(-7.0, unit="Hz"),
199
- beam_position=sc.scalar(0.0, unit="deg"),
200
- phase=sc.scalar(0.0, unit="deg"),
201
- axle_position=sc.vector(value=[0, 0, 30.0], unit="m"),
202
- slit_begin=sc.array(
203
- dims=["cutout"],
204
- values=np.array([40.0]),
205
- unit="deg",
206
- ),
207
- slit_end=sc.array(
208
- dims=["cutout"],
209
- values=np.array([140.0]),
210
- unit="deg",
211
- ),
212
- slit_height=sc.scalar(10.0, unit="cm"),
213
- radius=sc.scalar(30.0, unit="cm"),
214
- )
215
-
216
177
 
217
178
  def wfm_choppers():
218
179
  return {
@@ -238,3 +199,24 @@ def psc_choppers():
238
199
  )
239
200
  for name, ch in wfm_choppers().items()
240
201
  }
202
+
203
+
204
+ def pulse_skipping_chopper():
205
+ return DiskChopper(
206
+ frequency=sc.scalar(-7.0, unit="Hz"),
207
+ beam_position=sc.scalar(0.0, unit="deg"),
208
+ phase=sc.scalar(0.0, unit="deg"),
209
+ axle_position=sc.vector(value=[0, 0, 30.0], unit="m"),
210
+ slit_begin=sc.array(
211
+ dims=["cutout"],
212
+ values=np.array([40.0]),
213
+ unit="deg",
214
+ ),
215
+ slit_end=sc.array(
216
+ dims=["cutout"],
217
+ values=np.array([140.0]),
218
+ unit="deg",
219
+ ),
220
+ slit_height=sc.scalar(10.0, unit="cm"),
221
+ radius=sc.scalar(30.0, unit="cm"),
222
+ )
@@ -11,6 +11,7 @@ from .types import SimulationResults
11
11
  def simulate_beamline(
12
12
  choppers: Mapping[str, DiskChopper],
13
13
  neutrons: int = 1_000_000,
14
+ pulses: int = 1,
14
15
  seed: int | None = None,
15
16
  facility: str = 'ess',
16
17
  ) -> SimulationResults:
@@ -26,6 +27,8 @@ def simulate_beamline(
26
27
  for more information.
27
28
  neutrons:
28
29
  Number of neutrons to simulate.
30
+ pulses:
31
+ Number of pulses to simulate.
29
32
  seed:
30
33
  Seed for the random number generator used in the simulation.
31
34
  facility:
@@ -47,9 +50,9 @@ def simulate_beamline(
47
50
  )
48
51
  for name, ch in choppers.items()
49
52
  ]
50
- source = tof.Source(facility=facility, neutrons=neutrons, seed=seed)
53
+ source = tof.Source(facility=facility, neutrons=neutrons, pulses=pulses, seed=seed)
51
54
  if not tof_choppers:
52
- events = source.data.squeeze()
55
+ events = source.data.squeeze().flatten(to='event')
53
56
  return SimulationResults(
54
57
  time_of_arrival=events.coords["time"],
55
58
  speed=events.coords["speed"],
@@ -61,7 +64,7 @@ def simulate_beamline(
61
64
  results = model.run()
62
65
  # Find name of the furthest chopper in tof_choppers
63
66
  furthest_chopper = max(tof_choppers, key=lambda c: c.distance)
64
- events = results[furthest_chopper.name].data.squeeze()
67
+ events = results[furthest_chopper.name].data.squeeze().flatten(to='event')
65
68
  events = events[
66
69
  ~(events.masks["blocked_by_others"] | events.masks["blocked_by_me"])
67
70
  ]
@@ -34,30 +34,37 @@ def to_events(
34
34
  rng = np.random.default_rng()
35
35
  event_coords = {}
36
36
  edge_dims = []
37
- midp_dims = []
37
+ midp_dims = set()
38
+ midp_coord_names = []
38
39
  # Separate bin-edge and midpoints coords
39
- for dim in da.dims:
40
- if da.coords.is_edges(dim):
41
- edge_dims.append(dim)
40
+ for name in da.coords:
41
+ dims = da.coords[name].dims
42
+ is_edges = False if not dims else da.coords.is_edges(name)
43
+ if is_edges:
44
+ if name in dims:
45
+ edge_dims.append(name)
42
46
  else:
43
- midp_dims.append(dim)
47
+ midp_coord_names.append(name)
48
+ midp_dims.update(set(dims))
44
49
 
45
- edge_sizes = {dim: da.sizes[dim] for dim in edge_dims}
50
+ edge_sizes = {dim: da.sizes[da.coords[dim].dim] for dim in edge_dims}
46
51
  for dim in edge_dims:
47
52
  coord = da.coords[dim]
48
- low = sc.broadcast(coord[dim, :-1], sizes=edge_sizes).values
49
- high = sc.broadcast(coord[dim, 1:], sizes=edge_sizes).values
53
+ left = sc.broadcast(coord[dim, :-1], sizes=edge_sizes).values
54
+ right = sc.broadcast(coord[dim, 1:], sizes=edge_sizes).values
50
55
 
51
56
  # The numpy.random.uniform function below does not support NaNs, so we need to
52
57
  # replace them with zeros, and then replace them back after the random numbers
53
58
  # have been generated.
54
- nans = np.isnan(low) | np.isnan(high)
55
- low = np.where(nans, 0.0, low)
56
- high = np.where(nans, 0.0, high)
59
+ nans = np.isnan(left) | np.isnan(right)
60
+ left = np.where(nans, 0.0, left)
61
+ right = np.where(nans, 0.0, right)
62
+ # Ensure left <= right
63
+ left, right = np.minimum(left, right), np.maximum(left, right)
57
64
 
58
65
  # In each bin, we generate a number of events with a uniform distribution.
59
66
  events = rng.uniform(
60
- low, high, size=(events_per_bin, *list(edge_sizes.values()))
67
+ left, right, size=(events_per_bin, *list(edge_sizes.values()))
61
68
  )
62
69
  events[..., nans] = np.nan
63
70
  event_coords[dim] = sc.array(
@@ -77,20 +84,20 @@ def to_events(
77
84
  data = da.data
78
85
  if event_masks:
79
86
  inv_mask = (~reduce(lambda a, b: a | b, event_masks.values())).to(dtype=int)
80
- inv_mask.unit = ''
87
+ inv_mask.unit = ""
81
88
  data = data * inv_mask
82
89
 
83
90
  # Create the data counts, which are the original counts divided by the number of
84
91
  # events per bin
85
92
  sizes = {event_dim: events_per_bin} | da.sizes
86
93
  val = sc.broadcast(sc.values(data) / float(events_per_bin), sizes=sizes)
87
- kwargs = {'dims': sizes.keys(), 'values': val.values, 'unit': data.unit}
94
+ kwargs = {"dims": sizes.keys(), "values": val.values, "unit": data.unit}
88
95
  if data.variances is not None:
89
96
  # Note here that all the events are correlated.
90
97
  # If we later histogram the events with different edges than the original
91
98
  # histogram, then neighboring bins will be correlated, and the error obtained
92
99
  # will be too small. It is however not clear what can be done to improve this.
93
- kwargs['variances'] = sc.broadcast(
100
+ kwargs["variances"] = sc.broadcast(
94
101
  sc.variances(data) / float(events_per_bin), sizes=sizes
95
102
  ).values
96
103
  new_data = sc.array(**kwargs)
@@ -100,5 +107,5 @@ def to_events(
100
107
  dims=[*edge_dims, event_dim], to=event_dim
101
108
  )
102
109
  return new.assign_coords(
103
- {dim: da.coords[dim].copy() for dim in midp_dims}
110
+ {dim: da.coords[dim].copy() for dim in midp_coord_names}
104
111
  ).assign_masks({key: mask.copy() for key, mask in other_masks.items()})