ophyd-async 0.10.1__py3-none-any.whl → 0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +12 -1
  3. ophyd_async/core/_derived_signal.py +68 -22
  4. ophyd_async/core/_derived_signal_backend.py +46 -24
  5. ophyd_async/core/_detector.py +3 -3
  6. ophyd_async/core/_device.py +24 -16
  7. ophyd_async/core/_flyer.py +35 -1
  8. ophyd_async/core/_hdf_dataset.py +11 -10
  9. ophyd_async/core/_signal.py +43 -28
  10. ophyd_async/core/_table.py +3 -3
  11. ophyd_async/core/_utils.py +25 -0
  12. ophyd_async/core/_yaml_settings.py +3 -3
  13. ophyd_async/epics/adandor/__init__.py +7 -1
  14. ophyd_async/epics/adandor/_andor_controller.py +5 -8
  15. ophyd_async/epics/adandor/_andor_io.py +12 -19
  16. ophyd_async/epics/adcore/_hdf_writer.py +12 -19
  17. ophyd_async/epics/eiger/_odin_io.py +4 -2
  18. ophyd_async/epics/motor.py +46 -96
  19. ophyd_async/epics/pmac/__init__.py +3 -0
  20. ophyd_async/epics/pmac/_pmac_io.py +100 -0
  21. ophyd_async/fastcs/eiger/__init__.py +1 -2
  22. ophyd_async/fastcs/eiger/_eiger.py +3 -9
  23. ophyd_async/fastcs/panda/_trigger.py +4 -4
  24. ophyd_async/fastcs/panda/_writer.py +15 -13
  25. ophyd_async/sim/__init__.py +1 -2
  26. ophyd_async/sim/_blob_detector_writer.py +6 -12
  27. ophyd_async/sim/_mirror_horizontal.py +3 -2
  28. ophyd_async/sim/_mirror_vertical.py +1 -0
  29. ophyd_async/sim/_motor.py +13 -43
  30. {ophyd_async-0.10.1.dist-info → ophyd_async-0.11.dist-info}/METADATA +2 -2
  31. {ophyd_async-0.10.1.dist-info → ophyd_async-0.11.dist-info}/RECORD +34 -32
  32. {ophyd_async-0.10.1.dist-info → ophyd_async-0.11.dist-info}/WHEEL +0 -0
  33. {ophyd_async-0.10.1.dist-info → ophyd_async-0.11.dist-info}/licenses/LICENSE +0 -0
  34. {ophyd_async-0.10.1.dist-info → ophyd_async-0.11.dist-info}/top_level.txt +0 -0
ophyd_async/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.10.1'
21
- __version_tuple__ = version_tuple = (0, 10, 1)
20
+ __version__ = version = '0.11'
21
+ __version_tuple__ = version_tuple = (0, 11)
@@ -16,7 +16,7 @@ from ._detector import (
16
16
  )
17
17
  from ._device import Device, DeviceConnector, DeviceVector, init_devices
18
18
  from ._device_filler import DeviceFiller
19
- from ._flyer import FlyerController, StandardFlyer
19
+ from ._flyer import FlyerController, FlyMotorInfo, StandardFlyer
20
20
  from ._hdf_dataset import HDFDatasetDescription, HDFDocumentComposer
21
21
  from ._log import config_ophyd_async_logging
22
22
  from ._mock_signal_backend import MockSignalBackend
@@ -56,11 +56,14 @@ from ._signal import (
56
56
  soft_signal_rw,
57
57
  wait_for_value,
58
58
  walk_config_signals,
59
+ walk_devices,
59
60
  walk_rw_signals,
61
+ walk_signal_sources,
60
62
  )
61
63
  from ._signal_backend import (
62
64
  Array1D,
63
65
  DTypeScalar_co,
66
+ Primitive,
64
67
  SignalBackend,
65
68
  SignalDatatype,
66
69
  SignalDatatypeT,
@@ -75,6 +78,7 @@ from ._utils import (
75
78
  DEFAULT_TIMEOUT,
76
79
  CalculatableTimeout,
77
80
  Callback,
81
+ ConfinedModel,
78
82
  EnumTypes,
79
83
  LazyMock,
80
84
  NotConnected,
@@ -83,6 +87,7 @@ from ._utils import (
83
87
  SubsetEnum,
84
88
  SupersetEnum,
85
89
  WatcherUpdate,
90
+ error_if_none,
86
91
  gather_dict,
87
92
  get_dtype,
88
93
  get_enum_cls,
@@ -128,6 +133,7 @@ __all__ = [
128
133
  "EnumTypes",
129
134
  "Table",
130
135
  "SignalMetadata",
136
+ "Primitive",
131
137
  # Soft signal
132
138
  "SoftSignalBackend",
133
139
  "soft_signal_r_and_setter",
@@ -143,6 +149,8 @@ __all__ = [
143
149
  "set_and_wait_for_other_value",
144
150
  "walk_rw_signals",
145
151
  "walk_config_signals",
152
+ "walk_devices",
153
+ "walk_signal_sources",
146
154
  # Readable
147
155
  "StandardReadable",
148
156
  "StandardReadableFormat",
@@ -168,6 +176,7 @@ __all__ = [
168
176
  "HDFDocumentComposer",
169
177
  # Flyer
170
178
  "StandardFlyer",
179
+ "FlyMotorInfo",
171
180
  "FlyerController",
172
181
  # Settings
173
182
  "Settings",
@@ -179,8 +188,10 @@ __all__ = [
179
188
  "CalculatableTimeout",
180
189
  "DEFAULT_TIMEOUT",
181
190
  "Callback",
191
+ "ConfinedModel",
182
192
  "NotConnected",
183
193
  "Reference",
194
+ "error_if_none",
184
195
  "gather_dict",
185
196
  "get_dtype",
186
197
  "get_enum_cls",
@@ -11,7 +11,7 @@ from ._derived_signal_backend import (
11
11
  )
12
12
  from ._device import Device
13
13
  from ._signal import Signal, SignalR, SignalRW, SignalT, SignalW
14
- from ._signal_backend import SignalDatatypeT
14
+ from ._signal_backend import Primitive, SignalDatatypeT
15
15
 
16
16
 
17
17
  class DerivedSignalFactory(Generic[TransformT]):
@@ -23,18 +23,31 @@ class DerivedSignalFactory(Generic[TransformT]):
23
23
  :param set_derived:
24
24
  An optional async function that takes the output of
25
25
  `transform_cls.raw_to_derived` and applies it to the raw devices.
26
- :param raw_and_transform_devices:
27
- Devices whose values will be passed as parameters to the `transform_cls`,
28
- and as arguments to `transform_cls.raw_to_derived`.
26
+ :param raw_and_transform_devices_and_constants:
27
+ Devices and Constants whose values will be passed as parameters
28
+ to the `transform_cls`, and as arguments to `transform_cls.raw_to_derived`.
29
29
  """
30
30
 
31
31
  def __init__(
32
32
  self,
33
33
  transform_cls: type[TransformT],
34
34
  set_derived: Callable[..., Awaitable[None]] | None = None,
35
- **raw_and_transform_devices,
35
+ **raw_and_transform_devices_and_constants,
36
36
  ):
37
37
  self._set_derived = set_derived
38
+ _raw_and_transform_devices, _raw_and_transform_constants = (
39
+ {
40
+ k: v
41
+ for k, v in raw_and_transform_devices_and_constants.items()
42
+ if isinstance(v, Device)
43
+ },
44
+ {
45
+ k: v
46
+ for k, v in raw_and_transform_devices_and_constants.items()
47
+ if isinstance(v, Primitive)
48
+ },
49
+ )
50
+
38
51
  # Check the raw and transform devices match the input arguments of the Transform
39
52
  if transform_cls is not Transform:
40
53
  # Populate expected parameters and types
@@ -48,26 +61,42 @@ class DerivedSignalFactory(Generic[TransformT]):
48
61
  }
49
62
 
50
63
  # Populate received parameters and types
51
- # Use Signal datatype, or Locatable datatype, or set type as None
64
+ # Use Primitive's type, Signal's datatype,
65
+ # Locatable's datatype, or set type as None
52
66
  received = {
53
- k: v.datatype if isinstance(v, Signal) else get_locatable_type(v)
54
- for k, v in raw_and_transform_devices.items()
67
+ **{
68
+ k: v.datatype if isinstance(v, Signal) else get_locatable_type(v)
69
+ for k, v in _raw_and_transform_devices.items()
70
+ },
71
+ **{k: type(v) for k, v in _raw_and_transform_constants.items()},
55
72
  }
56
73
 
57
74
  if expected != received:
58
75
  msg = (
59
- f"Expected devices to be passed as keyword arguments "
76
+ f"Expected the following to be passed as keyword arguments "
60
77
  f"{expected}, got {received}"
61
78
  )
62
79
  raise TypeError(msg)
63
80
  self._set_derived_takes_dict = (
64
81
  is_typeddict(_get_first_arg_datatype(set_derived)) if set_derived else False
65
82
  )
83
+
84
+ _raw_constants, _transform_constants = _partition_by_keys(
85
+ _raw_and_transform_constants, set(transform_cls.model_fields)
86
+ )
87
+
88
+ _raw_devices, _transform_devices = _partition_by_keys(
89
+ _raw_and_transform_devices, set(transform_cls.model_fields)
90
+ )
91
+
66
92
  self._transformer = SignalTransformer(
67
93
  transform_cls,
68
94
  set_derived,
69
95
  self._set_derived_takes_dict,
70
- **raw_and_transform_devices,
96
+ _raw_devices,
97
+ _raw_constants,
98
+ _transform_devices,
99
+ _transform_constants,
71
100
  )
72
101
 
73
102
  def _make_signal(
@@ -177,7 +206,7 @@ def _get_first_arg_datatype(
177
206
  def _make_factory(
178
207
  raw_to_derived: Callable[..., SignalDatatypeT] | None = None,
179
208
  set_derived: Callable[[SignalDatatypeT], Awaitable[None]] | None = None,
180
- raw_devices: dict[str, Device] | None = None,
209
+ raw_devices_and_constants: dict[str, Device | Primitive] | None = None,
181
210
  ) -> DerivedSignalFactory:
182
211
  if raw_to_derived:
183
212
 
@@ -190,7 +219,9 @@ def _make_factory(
190
219
  DerivedTransform.raw_to_derived.__annotations__ = get_type_hints(raw_to_derived)
191
220
 
192
221
  return DerivedSignalFactory(
193
- DerivedTransform, set_derived=set_derived, **(raw_devices or {})
222
+ DerivedTransform,
223
+ set_derived=set_derived,
224
+ **(raw_devices_and_constants or {}),
194
225
  )
195
226
  else:
196
227
  return DerivedSignalFactory(Transform, set_derived=set_derived)
@@ -200,7 +231,7 @@ def derived_signal_r(
200
231
  raw_to_derived: Callable[..., SignalDatatypeT],
201
232
  derived_units: str | None = None,
202
233
  derived_precision: int | None = None,
203
- **raw_devices: Device,
234
+ **raw_devices_and_constants: Device | Primitive,
204
235
  ) -> SignalR[SignalDatatypeT]:
205
236
  """Create a read only derived signal.
206
237
 
@@ -209,11 +240,14 @@ def derived_signal_r(
209
240
  returns the derived value.
210
241
  :param derived_units: Engineering units for the derived signal
211
242
  :param derived_precision: Number of digits after the decimal place to display
212
- :param raw_devices:
213
- A dictionary of Devices to provide the values for raw_to_derived. The names
214
- of these arguments must match the arguments of raw_to_derived.
243
+ :param raw_devices_and_constants:
244
+ A dictionary of Devices and Constants to provide the values for raw_to_derived.
245
+ The names of these arguments must match the arguments of raw_to_derived.
215
246
  """
216
- factory = _make_factory(raw_to_derived=raw_to_derived, raw_devices=raw_devices)
247
+ factory = _make_factory(
248
+ raw_to_derived=raw_to_derived,
249
+ raw_devices_and_constants=raw_devices_and_constants,
250
+ )
217
251
  return factory.derived_signal_r(
218
252
  datatype=_get_return_datatype(raw_to_derived),
219
253
  name="value",
@@ -227,7 +261,7 @@ def derived_signal_rw(
227
261
  set_derived: Callable[[SignalDatatypeT], Awaitable[None]],
228
262
  derived_units: str | None = None,
229
263
  derived_precision: int | None = None,
230
- **raw_devices: Device,
264
+ **raw_devices_and_constants: Device | Primitive,
231
265
  ) -> SignalRW[SignalDatatypeT]:
232
266
  """Create a read-write derived signal.
233
267
 
@@ -239,9 +273,9 @@ def derived_signal_rw(
239
273
  either be an async function, or return an [](#AsyncStatus)
240
274
  :param derived_units: Engineering units for the derived signal
241
275
  :param derived_precision: Number of digits after the decimal place to display
242
- :param raw_devices:
243
- A dictionary of Devices to provide the values for raw_to_derived. The names
244
- of these arguments must match the arguments of raw_to_derived.
276
+ :param raw_devices_and_constants:
277
+ A dictionary of Devices and Constants to provide the values for raw_to_derived.
278
+ The names of these arguments must match the arguments of raw_to_derived.
245
279
  """
246
280
  raw_to_derived_datatype = _get_return_datatype(raw_to_derived)
247
281
  set_derived_datatype = _get_first_arg_datatype(set_derived)
@@ -253,7 +287,9 @@ def derived_signal_rw(
253
287
  raise TypeError(msg)
254
288
 
255
289
  factory = _make_factory(
256
- raw_to_derived=raw_to_derived, set_derived=set_derived, raw_devices=raw_devices
290
+ raw_to_derived=raw_to_derived,
291
+ set_derived=set_derived,
292
+ raw_devices_and_constants=raw_devices_and_constants,
257
293
  )
258
294
  return factory.derived_signal_rw(
259
295
  datatype=raw_to_derived_datatype,
@@ -297,3 +333,13 @@ def get_locatable_type(obj: object) -> type | None:
297
333
  if args:
298
334
  return args[0]
299
335
  return None
336
+
337
+
338
+ def _partition_by_keys(data: dict, keys: set) -> tuple[dict, dict]:
339
+ group_excluded, group_included = {}, {}
340
+ for k, v in data.items():
341
+ if k in keys:
342
+ group_included[k] = v
343
+ else:
344
+ group_excluded[k] = v
345
+ return group_excluded, group_included
@@ -7,17 +7,23 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar
7
7
 
8
8
  from bluesky.protocols import Location, Reading, Subscribable
9
9
  from event_model import DataKey
10
- from pydantic import BaseModel
11
10
 
12
11
  from ._protocol import AsyncLocatable, AsyncReadable
13
12
  from ._signal_backend import SignalBackend, SignalDatatypeT, make_datakey, make_metadata
14
- from ._utils import Callback, T, gather_dict, merge_gathered_dicts
13
+ from ._utils import (
14
+ Callback,
15
+ ConfinedModel,
16
+ T,
17
+ error_if_none,
18
+ gather_dict,
19
+ merge_gathered_dicts,
20
+ )
15
21
 
16
22
  RawT = TypeVar("RawT")
17
23
  DerivedT = TypeVar("DerivedT")
18
24
 
19
25
 
20
- class Transform(BaseModel, Generic[RawT, DerivedT]):
26
+ class Transform(ConfinedModel, Generic[RawT, DerivedT]):
21
27
  """Baseclass for bidirectional transforms for Derived Signals.
22
28
 
23
29
  Subclass and add:
@@ -77,15 +83,20 @@ class SignalTransformer(Generic[TransformT]):
77
83
  transform_cls: type[TransformT],
78
84
  set_derived: Callable[..., Awaitable[None]] | None,
79
85
  set_derived_takes_dict: bool,
80
- **raw_and_transform_devices,
86
+ raw_devices,
87
+ raw_constants,
88
+ transform_devices,
89
+ transform_constants,
81
90
  ):
82
91
  self._transform_cls = transform_cls
83
92
  self._set_derived = set_derived
84
93
  self._set_derived_takes_dict = set_derived_takes_dict
85
- self._transform_devices = {
86
- k: raw_and_transform_devices.pop(k) for k in transform_cls.model_fields
87
- }
88
- self._raw_devices = raw_and_transform_devices
94
+
95
+ self._transform_devices = transform_devices
96
+ self._transform_constants = transform_constants
97
+ self._raw_devices = raw_devices
98
+ self._raw_constants = raw_constants
99
+
89
100
  self._derived_callbacks: dict[str, Callback[Reading]] = {}
90
101
  self._cached_readings: dict[str, Reading] | None = None
91
102
 
@@ -124,7 +135,7 @@ class SignalTransformer(Generic[TransformT]):
124
135
  k: transform_readings[sig.name]["value"]
125
136
  for k, sig in self.transform_readables.items()
126
137
  }
127
- return self._transform_cls(**transform_args)
138
+ return self._transform_cls(**(transform_args | self._transform_constants))
128
139
 
129
140
  def _make_derived_readings(
130
141
  self, raw_and_transform_readings: dict[str, Reading]
@@ -142,10 +153,15 @@ class SignalTransformer(Generic[TransformT]):
142
153
  transform = self._make_transform_from_readings(raw_and_transform_readings)
143
154
  # Create the raw values from the rest then calculate the derived readings
144
155
  # using the transform
156
+ # Extend dictionary with values of any Constants passed as arguments
145
157
  raw_values = {
146
- k: raw_and_transform_readings[sig.name]["value"]
147
- for k, sig in self._raw_devices.items()
158
+ **{
159
+ k: raw_and_transform_readings[sig.name]["value"]
160
+ for k, sig in self._raw_devices.items()
161
+ },
162
+ **self._raw_constants,
148
163
  }
164
+
149
165
  derived_readings = {
150
166
  name: Reading(
151
167
  value=derived, timestamp=timestamp, alarm_severity=alarm_severity
@@ -175,13 +191,15 @@ class SignalTransformer(Generic[TransformT]):
175
191
  return {k: v["value"] for k, v in derived_readings.items()}
176
192
 
177
193
  def _update_cached_reading(self, value: dict[str, Reading]):
178
- if self._cached_readings is None:
179
- msg = "Cannot update cached reading as it has not been initialised"
180
- raise RuntimeError(msg)
181
- self._cached_readings.update(value)
194
+ _cached_readings = error_if_none(
195
+ self._cached_readings,
196
+ "Cannot update cached reading as it has not been initialised",
197
+ )
198
+
199
+ _cached_readings.update(value)
182
200
  if self._complete_cached_reading():
183
201
  # We've got a complete set of values, callback on them
184
- derived_readings = self._make_derived_readings(self._cached_readings)
202
+ derived_readings = self._make_derived_readings(_cached_readings)
185
203
  for name, callback in self._derived_callbacks.items():
186
204
  callback(derived_readings[name])
187
205
 
@@ -228,18 +246,19 @@ class SignalTransformer(Generic[TransformT]):
228
246
  }
229
247
 
230
248
  async def set_derived(self, name: str, value: Any):
231
- if self._set_derived is None:
232
- msg = "Cannot put as no set_derived method given"
233
- raise RuntimeError(msg)
249
+ _set_derived = error_if_none(
250
+ self._set_derived,
251
+ "Cannot put as no set_derived method given",
252
+ )
234
253
  if self._set_derived_takes_dict:
235
254
  # Need to get the other derived values and update the one that's changing
236
255
  derived = await self.get_locations()
237
256
  setpoints = {k: v["setpoint"] for k, v in derived.items()}
238
257
  setpoints[name] = value
239
- await self._set_derived(setpoints)
258
+ await _set_derived(setpoints)
240
259
  else:
241
260
  # Only one derived signal, so pass it directly
242
- await self._set_derived(value)
261
+ await _set_derived(value)
243
262
 
244
263
 
245
264
  class DerivedSignalBackend(SignalBackend[SignalDatatypeT]):
@@ -275,9 +294,12 @@ class DerivedSignalBackend(SignalBackend[SignalDatatypeT]):
275
294
  if wait is False:
276
295
  msg = "Cannot put with wait=False"
277
296
  raise RuntimeError(msg)
278
- if value is None:
279
- msg = "Must be given a value to put"
280
- raise RuntimeError(msg)
297
+
298
+ value = error_if_none(
299
+ value,
300
+ "Must be given a value to put",
301
+ )
302
+
281
303
  await self.transformer.set_derived(self.name, value)
282
304
 
283
305
  async def get_datakey(self, source: str) -> DataKey:
@@ -23,13 +23,13 @@ from bluesky.protocols import (
23
23
  WritesStreamAssets,
24
24
  )
25
25
  from event_model import DataKey
26
- from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt, computed_field
26
+ from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
27
27
 
28
28
  from ._device import Device, DeviceConnector
29
29
  from ._protocol import AsyncConfigurable, AsyncReadable
30
30
  from ._signal import SignalR
31
31
  from ._status import AsyncStatus, WatchableAsyncStatus
32
- from ._utils import DEFAULT_TIMEOUT, WatcherUpdate, merge_gathered_dicts
32
+ from ._utils import DEFAULT_TIMEOUT, ConfinedModel, WatcherUpdate, merge_gathered_dicts
33
33
 
34
34
 
35
35
  class DetectorTrigger(Enum):
@@ -48,7 +48,7 @@ class DetectorTrigger(Enum):
48
48
  """Expect a series of variable width external gate signals"""
49
49
 
50
50
 
51
- class TriggerInfo(BaseModel):
51
+ class TriggerInfo(ConfinedModel):
52
52
  """Minimal set of information required to setup triggering on a detector."""
53
53
 
54
54
  number_of_events: NonNegativeInt | list[NonNegativeInt] = Field(default=1)
@@ -10,7 +10,13 @@ from typing import Any, TypeVar
10
10
  from bluesky.protocols import HasName
11
11
  from bluesky.run_engine import call_in_bluesky_event_loop, in_bluesky_event_loop
12
12
 
13
- from ._utils import DEFAULT_TIMEOUT, LazyMock, NotConnected, wait_for_connection
13
+ from ._utils import (
14
+ DEFAULT_TIMEOUT,
15
+ LazyMock,
16
+ NotConnected,
17
+ error_if_none,
18
+ wait_for_connection,
19
+ )
14
20
 
15
21
 
16
22
  class DeviceConnector:
@@ -148,6 +154,10 @@ class Device(HasName):
148
154
  elif name not in _not_device_attrs and isinstance(value, Device):
149
155
  value.parent = self
150
156
  self._child_devices[name] = value
157
+ # And if the name is set, then set the name of all children,
158
+ # including the child
159
+ if self._name:
160
+ self.set_name(self._name)
151
161
  # ...and avoiding the super call as we know it resolves to `object`
152
162
  return object.__setattr__(self, name, value)
153
163
 
@@ -171,12 +181,11 @@ class Device(HasName):
171
181
  :param force_reconnect:
172
182
  If True, force a reconnect even if the last connect succeeded.
173
183
  """
174
- if not hasattr(self, "_connector"):
175
- msg = (
176
- f"{self}: doesn't have attribute `_connector`,"
177
- " did you call `super().__init__` in your `__init__` method?"
178
- )
179
- raise RuntimeError(msg)
184
+ connector = error_if_none(
185
+ getattr(self, "_connector", None),
186
+ f"{self}: doesn't have attribute `_connector`,"
187
+ f" did you call `super().__init__` in your `__init__` method?",
188
+ )
180
189
  if mock:
181
190
  # Always connect in mock mode serially
182
191
  if isinstance(mock, LazyMock):
@@ -185,7 +194,7 @@ class Device(HasName):
185
194
  elif not self._mock:
186
195
  # Make one
187
196
  self._mock = LazyMock()
188
- await self._connector.connect_mock(self, self._mock)
197
+ await connector.connect_mock(self, self._mock)
189
198
  else:
190
199
  # Try to cache the connect in real mode
191
200
  can_use_previous_connect = (
@@ -195,13 +204,13 @@ class Device(HasName):
195
204
  )
196
205
  if force_reconnect or not can_use_previous_connect:
197
206
  self._mock = None
198
- coro = self._connector.connect_real(self, timeout, force_reconnect)
207
+ coro = connector.connect_real(self, timeout, force_reconnect)
199
208
  self._connect_task = asyncio.create_task(coro)
200
- if not self._connect_task:
201
- msg = "Connect task not created, this shouldn't happen"
202
- raise RuntimeError(msg)
209
+ connect_task = error_if_none(
210
+ self._connect_task, "Connect task not created, this shouldn't happen"
211
+ )
203
212
  # Wait for it to complete
204
- await self._connect_task
213
+ await connect_task
205
214
 
206
215
 
207
216
  _not_device_attrs = {
@@ -289,9 +298,8 @@ class DeviceProcessor:
289
298
  raise ValueError
290
299
  except ValueError:
291
300
  _, _, tb = sys.exc_info()
292
- if not tb:
293
- msg = "Can't get traceback, this shouldn't happen"
294
- raise RuntimeError(msg) # noqa: B904
301
+ tb = error_if_none(tb, "Can't get traceback, this shouldn't happen")
302
+
295
303
  caller_frame = tb.tb_frame
296
304
  while caller_frame.f_locals.get("self", None) is self:
297
305
  caller_frame = caller_frame.f_back
@@ -2,10 +2,11 @@ from abc import ABC, abstractmethod
2
2
  from typing import Any, Generic
3
3
 
4
4
  from bluesky.protocols import Flyable, Preparable, Stageable
5
+ from pydantic import Field
5
6
 
6
7
  from ._device import Device
7
8
  from ._status import AsyncStatus
8
- from ._utils import T
9
+ from ._utils import CALCULATE_TIMEOUT, CalculatableTimeout, ConfinedModel, T
9
10
 
10
11
 
11
12
  class FlyerController(ABC, Generic[T]):
@@ -31,6 +32,39 @@ class FlyerController(ABC, Generic[T]):
31
32
  """Stop flying and wait everything to be stopped."""
32
33
 
33
34
 
35
+ class FlyMotorInfo(ConfinedModel):
36
+ """Minimal set of information required to fly a motor."""
37
+
38
+ start_position: float = Field(frozen=True)
39
+ """Absolute position of the motor once it finishes accelerating to desired
40
+ velocity, in motor EGUs"""
41
+
42
+ end_position: float = Field(frozen=True)
43
+ """Absolute position of the motor once it begins decelerating from desired
44
+ velocity, in EGUs"""
45
+
46
+ time_for_move: float = Field(frozen=True, gt=0)
47
+ """Time taken for the motor to get from start_position to end_position, excluding
48
+ run-up and run-down, in seconds."""
49
+
50
+ timeout: CalculatableTimeout = Field(frozen=True, default=CALCULATE_TIMEOUT)
51
+ """Maximum time for the complete motor move, including run up and run down.
52
+ Defaults to `time_for_move` + run up and run down times + 10s."""
53
+
54
+ @property
55
+ def velocity(self) -> float:
56
+ """Calculate the velocity of the constant velocity phase."""
57
+ return (self.end_position - self.start_position) / self.time_for_move
58
+
59
+ def ramp_up_start_pos(self, acceleration_time: float) -> float:
60
+ """Calculate the start position with run-up distance added on."""
61
+ return self.start_position - acceleration_time * self.velocity / 2
62
+
63
+ def ramp_down_end_pos(self, acceleration_time: float) -> float:
64
+ """Calculate the end position with run-down distance added on."""
65
+ return self.end_position + acceleration_time * self.velocity / 2
66
+
67
+
34
68
  class StandardFlyer(
35
69
  Device,
36
70
  Stageable,
@@ -2,17 +2,18 @@ from collections.abc import Iterator
2
2
  from pathlib import Path
3
3
  from urllib.parse import urlunparse
4
4
 
5
+ from bluesky.protocols import StreamAsset
5
6
  from event_model import ( # type: ignore
6
7
  ComposeStreamResource,
7
8
  ComposeStreamResourceBundle,
8
- StreamDatum,
9
9
  StreamRange,
10
- StreamResource,
11
10
  )
12
- from pydantic import BaseModel, Field
11
+ from pydantic import Field
13
12
 
13
+ from ._utils import ConfinedModel
14
14
 
15
- class HDFDatasetDescription(BaseModel):
15
+
16
+ class HDFDatasetDescription(ConfinedModel):
16
17
  """A description of the type and shape of a dataset in an HDF file."""
17
18
 
18
19
  data_key: str
@@ -79,12 +80,12 @@ class HDFDocumentComposer:
79
80
  for ds in datasets
80
81
  ]
81
82
 
82
- def stream_resources(self) -> Iterator[StreamResource]:
83
- for bundle in self._bundles:
84
- yield bundle.stream_resource_doc
83
+ def make_stream_docs(self, indices_written: int) -> Iterator[StreamAsset]:
84
+ # TODO: fail if we get dropped frames
85
+ if indices_written and not self._last_emitted:
86
+ for bundle in self._bundles:
87
+ yield "stream_resource", bundle.stream_resource_doc
85
88
 
86
- def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
87
- # Indices are relative to resource
88
89
  if indices_written > self._last_emitted:
89
90
  indices: StreamRange = {
90
91
  "start": self._last_emitted,
@@ -92,4 +93,4 @@ class HDFDocumentComposer:
92
93
  }
93
94
  self._last_emitted = indices_written
94
95
  for bundle in self._bundles:
95
- yield bundle.compose_stream_datum(indices)
96
+ yield "stream_datum", bundle.compose_stream_datum(indices)