ophyd-async 0.10.0a2__py3-none-any.whl → 0.10.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +4 -2
  3. ophyd_async/core/_derived_signal.py +42 -14
  4. ophyd_async/core/_derived_signal_backend.py +4 -4
  5. ophyd_async/core/_detector.py +71 -57
  6. ophyd_async/core/_device.py +3 -3
  7. ophyd_async/core/_hdf_dataset.py +1 -5
  8. ophyd_async/core/_providers.py +0 -8
  9. ophyd_async/core/_readable.py +13 -1
  10. ophyd_async/core/_signal.py +21 -5
  11. ophyd_async/core/_signal_backend.py +18 -8
  12. ophyd_async/core/_utils.py +31 -14
  13. ophyd_async/epics/adandor/_andor_controller.py +1 -1
  14. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  15. ophyd_async/epics/adcore/_core_detector.py +2 -2
  16. ophyd_async/epics/adcore/_core_io.py +3 -3
  17. ophyd_async/epics/adcore/_core_logic.py +3 -3
  18. ophyd_async/epics/adcore/_core_writer.py +22 -29
  19. ophyd_async/epics/adcore/_hdf_writer.py +17 -15
  20. ophyd_async/epics/adcore/_jpeg_writer.py +1 -3
  21. ophyd_async/epics/adcore/_tiff_writer.py +1 -3
  22. ophyd_async/epics/adcore/_utils.py +11 -2
  23. ophyd_async/epics/adkinetix/_kinetix_controller.py +1 -1
  24. ophyd_async/epics/adpilatus/_pilatus.py +1 -1
  25. ophyd_async/epics/adpilatus/_pilatus_controller.py +6 -13
  26. ophyd_async/epics/adpilatus/_pilatus_io.py +1 -1
  27. ophyd_async/epics/advimba/_vimba_controller.py +1 -1
  28. ophyd_async/epics/core/_aioca.py +2 -2
  29. ophyd_async/epics/core/_p4p.py +1 -1
  30. ophyd_async/epics/core/_pvi_connector.py +5 -3
  31. ophyd_async/epics/core/_util.py +21 -13
  32. ophyd_async/epics/eiger/__init__.py +2 -4
  33. ophyd_async/epics/eiger/_odin_io.py +58 -36
  34. ophyd_async/epics/motor.py +3 -2
  35. ophyd_async/epics/testing/_example_ioc.py +1 -0
  36. ophyd_async/epics/testing/test_records.db +5 -0
  37. ophyd_async/fastcs/eiger/__init__.py +13 -0
  38. ophyd_async/{epics → fastcs}/eiger/_eiger.py +15 -6
  39. ophyd_async/{epics → fastcs}/eiger/_eiger_controller.py +17 -27
  40. ophyd_async/fastcs/eiger/_eiger_io.py +54 -0
  41. ophyd_async/fastcs/panda/_block.py +2 -0
  42. ophyd_async/fastcs/panda/_hdf_panda.py +0 -1
  43. ophyd_async/fastcs/panda/_writer.py +23 -22
  44. ophyd_async/plan_stubs/_fly.py +2 -2
  45. ophyd_async/sim/_blob_detector.py +0 -1
  46. ophyd_async/sim/_blob_detector_controller.py +1 -1
  47. ophyd_async/sim/_blob_detector_writer.py +15 -19
  48. ophyd_async/sim/_motor.py +2 -2
  49. ophyd_async/sim/_pattern_generator.py +2 -0
  50. ophyd_async/tango/core/_base_device.py +2 -1
  51. ophyd_async/tango/core/_converters.py +2 -6
  52. ophyd_async/tango/core/_signal.py +8 -8
  53. ophyd_async/tango/core/_tango_transport.py +12 -12
  54. ophyd_async/tango/demo/_tango/_servers.py +0 -1
  55. ophyd_async/tango/testing/_one_of_everything.py +2 -2
  56. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a4.dist-info}/METADATA +1 -1
  57. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a4.dist-info}/RECORD +60 -59
  58. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a4.dist-info}/WHEEL +1 -1
  59. ophyd_async/epics/eiger/_eiger_io.py +0 -42
  60. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a4.dist-info}/licenses/LICENSE +0 -0
  61. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a4.dist-info}/top_level.txt +0 -0
ophyd_async/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.10.0a2'
21
- __version_tuple__ = version_tuple = (0, 10, 0)
20
+ __version__ = version = '0.10.0a4'
21
+ __version_tuple__ = version_tuple = (0, 10, 0, 'a4')
@@ -26,7 +26,6 @@ from ._providers import (
26
26
  AutoIncrementingPathProvider,
27
27
  DatasetDescriber,
28
28
  FilenameProvider,
29
- NameProvider,
30
29
  PathInfo,
31
30
  PathProvider,
32
31
  StaticFilenameProvider,
@@ -76,11 +75,13 @@ from ._utils import (
76
75
  DEFAULT_TIMEOUT,
77
76
  CalculatableTimeout,
78
77
  Callback,
78
+ EnumTypes,
79
79
  LazyMock,
80
80
  NotConnected,
81
81
  Reference,
82
82
  StrictEnum,
83
83
  SubsetEnum,
84
+ SupersetEnum,
84
85
  WatcherUpdate,
85
86
  gather_dict,
86
87
  get_dtype,
@@ -123,6 +124,8 @@ __all__ = [
123
124
  "Array1D",
124
125
  "StrictEnum",
125
126
  "SubsetEnum",
127
+ "SupersetEnum",
128
+ "EnumTypes",
126
129
  "Table",
127
130
  "SignalMetadata",
128
131
  # Soft signal
@@ -160,7 +163,6 @@ __all__ = [
160
163
  "AutoIncrementFilenameProvider",
161
164
  "UUIDFilenameProvider",
162
165
  # Datatset
163
- "NameProvider",
164
166
  "DatasetDescriber",
165
167
  "HDFDatasetDescription",
166
168
  "HDFDocumentComposer",
@@ -1,5 +1,7 @@
1
1
  from collections.abc import Awaitable, Callable
2
- from typing import Any, Generic, get_type_hints
2
+ from typing import Any, Generic, get_args, get_origin, get_type_hints, is_typeddict
3
+
4
+ from bluesky.protocols import Locatable
3
5
 
4
6
  from ._derived_signal_backend import (
5
7
  DerivedSignalBackend,
@@ -8,7 +10,7 @@ from ._derived_signal_backend import (
8
10
  TransformT,
9
11
  )
10
12
  from ._device import Device
11
- from ._signal import SignalR, SignalRW, SignalT, SignalW
13
+ from ._signal import Signal, SignalR, SignalRW, SignalT, SignalW
12
14
  from ._signal_backend import SignalDatatypeT
13
15
 
14
16
 
@@ -35,24 +37,36 @@ class DerivedSignalFactory(Generic[TransformT]):
35
37
  self._set_derived = set_derived
36
38
  # Check the raw and transform devices match the input arguments of the Transform
37
39
  if transform_cls is not Transform:
38
- expected = list(transform_cls.model_fields) + [
39
- x
40
- for x in get_type_hints(transform_cls.raw_to_derived)
41
- if x not in ["self", "return"]
42
- ]
43
- if set(expected) != set(raw_and_transform_devices):
40
+ # Populate expected parameters and types
41
+ expected = {
42
+ **{k: f.annotation for k, f in transform_cls.model_fields.items()},
43
+ **{
44
+ k: v
45
+ for k, v in get_type_hints(transform_cls.raw_to_derived).items()
46
+ if k not in {"self", "return"}
47
+ },
48
+ }
49
+
50
+ # Populate received parameters and types
51
+ # Use Signal datatype, or Locatable datatype, or set type as None
52
+ received = {
53
+ k: v.datatype if isinstance(v, Signal) else get_locatable_type(v)
54
+ for k, v in raw_and_transform_devices.items()
55
+ }
56
+
57
+ if expected != received:
44
58
  msg = (
45
- f"Expected devices to be passed as keyword arguments {expected}, "
46
- f"got {list(raw_and_transform_devices)}"
59
+ f"Expected devices to be passed as keyword arguments "
60
+ f"{expected}, got {received}"
47
61
  )
48
62
  raise TypeError(msg)
49
- set_derived_datatype = (
50
- _get_first_arg_datatype(set_derived) if set_derived else None
63
+ self._set_derived_takes_dict = (
64
+ is_typeddict(_get_first_arg_datatype(set_derived)) if set_derived else False
51
65
  )
52
66
  self._transformer = SignalTransformer(
53
67
  transform_cls,
54
68
  set_derived,
55
- set_derived_datatype,
69
+ self._set_derived_takes_dict,
56
70
  **raw_and_transform_devices,
57
71
  )
58
72
 
@@ -75,7 +89,7 @@ class DerivedSignalFactory(Generic[TransformT]):
75
89
  f"{signal_cls.__name__}s"
76
90
  )
77
91
  raise ValueError(msg)
78
- if issubclass(signal_cls, SignalRW):
92
+ if issubclass(signal_cls, SignalRW) and self._set_derived_takes_dict:
79
93
  self._transformer.raw_locatables # noqa: B018
80
94
  backend = DerivedSignalBackend(
81
95
  datatype, name, self._transformer, units, precision
@@ -269,3 +283,17 @@ def derived_signal_w(
269
283
  units=derived_units,
270
284
  precision=derived_precision,
271
285
  )
286
+
287
+
288
+ def get_locatable_type(obj: object) -> type | None:
289
+ """Extract datatype from Locatable parent class.
290
+
291
+ :param obj: Object with possible Locatable inheritance
292
+ :return: Type hint associated with Locatable, or None if not found.
293
+ """
294
+ for base in getattr(obj.__class__, "__orig_bases__", []):
295
+ if get_origin(base) is Locatable:
296
+ args = get_args(base)
297
+ if args:
298
+ return args[0]
299
+ return None
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import asyncio
4
4
  from collections.abc import Awaitable, Callable, Mapping
5
5
  from functools import cached_property
6
- from typing import TYPE_CHECKING, Any, Generic, TypeVar, is_typeddict
6
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar
7
7
 
8
8
  from bluesky.protocols import Location, Reading, Subscribable
9
9
  from event_model import DataKey
@@ -76,12 +76,12 @@ class SignalTransformer(Generic[TransformT]):
76
76
  self,
77
77
  transform_cls: type[TransformT],
78
78
  set_derived: Callable[..., Awaitable[None]] | None,
79
- set_derived_datatype: type | None,
79
+ set_derived_takes_dict: bool,
80
80
  **raw_and_transform_devices,
81
81
  ):
82
82
  self._transform_cls = transform_cls
83
83
  self._set_derived = set_derived
84
- self._need_dict = is_typeddict(set_derived_datatype)
84
+ self._set_derived_takes_dict = set_derived_takes_dict
85
85
  self._transform_devices = {
86
86
  k: raw_and_transform_devices.pop(k) for k in transform_cls.model_fields
87
87
  }
@@ -229,7 +229,7 @@ class SignalTransformer(Generic[TransformT]):
229
229
  if self._set_derived is None:
230
230
  msg = "Cannot put as no set_derived method given"
231
231
  raise RuntimeError(msg)
232
- if self._need_dict:
232
+ if self._set_derived_takes_dict:
233
233
  # Need to get the other derived values and update the one that's changing
234
234
  derived = await self.get_locations()
235
235
  setpoints = {k: v["setpoint"] for k, v in derived.items()}
@@ -23,7 +23,7 @@ from bluesky.protocols import (
23
23
  WritesStreamAssets,
24
24
  )
25
25
  from event_model import DataKey
26
- from pydantic import BaseModel, Field, NonNegativeInt, computed_field
26
+ from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt, computed_field
27
27
 
28
28
  from ._device import Device, DeviceConnector
29
29
  from ._protocol import AsyncConfigurable, AsyncReadable
@@ -51,49 +51,52 @@ class DetectorTrigger(Enum):
51
51
  class TriggerInfo(BaseModel):
52
52
  """Minimal set of information required to setup triggering on a detector."""
53
53
 
54
- number_of_triggers: NonNegativeInt | list[NonNegativeInt] = Field(default=1)
55
- """Number of triggers that will be sent, (0 means infinite).
54
+ number_of_events: NonNegativeInt | list[NonNegativeInt] = Field(default=1)
55
+ """Number of events that will be processed, (0 means infinite).
56
56
 
57
57
  Can be:
58
58
  - A single integer or
59
- - A list of integers for multiple triggers
59
+ - A list of integers for multiple events
60
60
 
61
- Example for tomography: ``TriggerInfo(number=[2,3,100,3])``.
62
- This would trigger:
61
+ Example for tomography: ``TriggerInfo(number_of_events=[2,3,100,3])``.
62
+ This would process:
63
63
 
64
- - 2 times for dark field images
65
- - 3 times for initial flat field images
66
- - 100 times for projections
67
- - 3 times for final flat field images
64
+ - 2 events for dark field images
65
+ - 3 events for initial flat field images
66
+ - 100 events for projections
67
+ - 3 events for final flat field images
68
68
  """
69
69
 
70
70
  trigger: DetectorTrigger = Field(default=DetectorTrigger.INTERNAL)
71
71
  """Sort of triggers that will be sent"""
72
72
 
73
73
  deadtime: float = Field(default=0.0, ge=0)
74
- """What is the minimum deadtime between triggers"""
74
+ """What is the minimum deadtime between exposures"""
75
75
 
76
76
  livetime: float | None = Field(default=None, ge=0)
77
- """What is the maximum high time of the triggers"""
78
-
79
- frame_timeout: float | None = Field(default=None, gt=0)
80
- """What is the maximum timeout on waiting for a frame"""
81
-
82
- multiplier: int = 1
83
- """How many triggers make up a single StreamDatum index, to allow multiple frames
84
- from a faster detector to be zipped with a single frame from a slow detector
85
- e.g. if num=10 and multiplier=5 then the detector will take 10 frames,
86
- but publish 2 indices, and describe() will show a shape of (5, h, w)
77
+ """What is the maximum high time of the exposures"""
78
+
79
+ exposure_timeout: float | None = Field(default=None, gt=0)
80
+ """What is the maximum timeout on waiting for an exposure"""
81
+
82
+ exposures_per_event: PositiveInt = 1
83
+ """The number of exposures that are grouped into a single StreamDatum index.
84
+ A exposures_per_event > 1 can be useful to have exposures from a faster detector
85
+ able to be zipped with a single exposure from a slower detector. E.g. if
86
+ number_of_events=10 and exposures_per_event=5 then the detector will take
87
+ 10 exposures, but publish 2 StreamDatum indices, and describe() will show a
88
+ shape of (5, h, w) for each.
89
+ Default is 1.
87
90
  """
88
91
 
89
92
  @computed_field
90
93
  @cached_property
91
- def total_number_of_triggers(self) -> int:
94
+ def total_number_of_exposures(self) -> int:
92
95
  return (
93
- sum(self.number_of_triggers)
94
- if isinstance(self.number_of_triggers, list)
95
- else self.number_of_triggers
96
- )
96
+ sum(self.number_of_events)
97
+ if isinstance(self.number_of_events, list)
98
+ else self.number_of_events
99
+ ) * self.exposures_per_event
97
100
 
98
101
 
99
102
  class DetectorController(ABC):
@@ -127,16 +130,17 @@ class DetectorWriter(ABC):
127
130
  """Logic for making detector write data to somewhere persistent (e.g. HDF5 file)."""
128
131
 
129
132
  @abstractmethod
130
- async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
133
+ async def open(
134
+ self, name: str, exposures_per_event: PositiveInt = 1
135
+ ) -> dict[str, DataKey]:
131
136
  """Open writer and wait for it to be ready for data.
132
137
 
133
- :param multiplier:
138
+ :param exposures_per_event:
134
139
  Each StreamDatum index corresponds to this many written exposures
135
140
  :return: Output for ``describe()``
136
141
  """
137
142
 
138
- @property
139
- def hints(self) -> Hints:
143
+ def get_hints(self, name: str) -> Hints:
140
144
  """The hints to be used for the detector."""
141
145
  return {}
142
146
 
@@ -153,7 +157,9 @@ class DetectorWriter(ABC):
153
157
  """Yield the index of each frame (or equivalent data point) as it is written."""
154
158
 
155
159
  @abstractmethod
156
- def collect_stream_docs(self, indices_written: int) -> AsyncIterator[StreamAsset]:
160
+ def collect_stream_docs(
161
+ self, name: str, indices_written: int
162
+ ) -> AsyncIterator[StreamAsset]:
157
163
  """Create Stream docs up to given number written."""
158
164
 
159
165
  @abstractmethod
@@ -215,11 +221,11 @@ class StandardDetector(
215
221
  self._watchers: list[Callable] = []
216
222
  self._fly_status: WatchableAsyncStatus | None = None
217
223
  self._fly_start: float | None = None
218
- self._frames_to_complete: int = 0
219
- # Represents the total number of frames that will have been completed at the
224
+ self._events_to_complete: int = 0
225
+ # Represents the total number of exposures that will have been completed at the
220
226
  # end of the next `complete`.
221
- self._completable_frames: int = 0
222
- self._number_of_triggers_iter: Iterator[int] | None = None
227
+ self._completable_exposures: int = 0
228
+ self._number_of_events_iter: Iterator[int] | None = None
223
229
  self._initial_frame: int = 0
224
230
  super().__init__(name, connector=connector)
225
231
 
@@ -269,15 +275,19 @@ class StandardDetector(
269
275
  if self._trigger_info is None:
270
276
  await self.prepare(
271
277
  TriggerInfo(
272
- number_of_triggers=1,
278
+ number_of_events=1,
273
279
  trigger=DetectorTrigger.INTERNAL,
274
280
  )
275
281
  )
276
-
277
282
  trigger_info = _ensure_trigger_info_exists(self._trigger_info)
278
283
  if trigger_info.trigger is not DetectorTrigger.INTERNAL:
279
284
  msg = "The trigger method can only be called with INTERNAL triggering"
280
285
  raise ValueError(msg)
286
+ if trigger_info.number_of_events != 1:
287
+ raise ValueError(
288
+ "Triggering is not supported for multiple events, the detector was "
289
+ f"prepared with number_of_events={trigger_info.number_of_events}."
290
+ )
281
291
 
282
292
  # Arm the detector and wait for it to finish.
283
293
  indices_written = await self._writer.get_indices_written()
@@ -312,14 +322,16 @@ class StandardDetector(
312
322
  raise ValueError(msg)
313
323
  elif not value.deadtime:
314
324
  value.deadtime = self._controller.get_deadtime(value.livetime)
315
- self._number_of_triggers_iter = iter(
316
- value.number_of_triggers
317
- if isinstance(value.number_of_triggers, list)
318
- else [value.number_of_triggers]
319
- )
320
- self._describe, _ = await asyncio.gather(
321
- self._writer.open(value.multiplier), self._controller.prepare(value)
325
+ self._trigger_info = value
326
+ self._number_of_events_iter = iter(
327
+ value.number_of_events
328
+ if isinstance(value.number_of_events, list)
329
+ else [value.number_of_events]
322
330
  )
331
+
332
+ await self._controller.prepare(value)
333
+ self._describe = await self._writer.open(self.name, value.exposures_per_event)
334
+
323
335
  self._initial_frame = await self._writer.get_indices_written()
324
336
  if value.trigger != DetectorTrigger.INTERNAL:
325
337
  await self._controller.arm()
@@ -327,25 +339,27 @@ class StandardDetector(
327
339
 
328
340
  @AsyncStatus.wrap
329
341
  async def kickoff(self):
330
- if self._trigger_info is None or self._number_of_triggers_iter is None:
342
+ if self._trigger_info is None or self._number_of_events_iter is None:
331
343
  raise RuntimeError("Prepare must be called before kickoff!")
332
344
  if self._trigger_info.trigger == DetectorTrigger.INTERNAL:
333
345
  await self._controller.arm()
334
346
  self._fly_start = time.monotonic()
335
347
  try:
336
- self._frames_to_complete = next(self._number_of_triggers_iter)
337
- self._completable_frames += self._frames_to_complete
348
+ self._events_to_complete = next(self._number_of_events_iter)
349
+ self._completable_exposures += (
350
+ self._events_to_complete * self._trigger_info.exposures_per_event
351
+ )
338
352
  except StopIteration as err:
339
353
  raise RuntimeError(
340
354
  f"Kickoff called more than the configured number of "
341
- f"{self._trigger_info.total_number_of_triggers} iteration(s)!"
355
+ f"{self._trigger_info.total_number_of_exposures} iteration(s)!"
342
356
  ) from err
343
357
 
344
358
  @WatchableAsyncStatus.wrap
345
359
  async def complete(self):
346
360
  trigger_info = _ensure_trigger_info_exists(self._trigger_info)
347
361
  indices_written = self._writer.observe_indices_written(
348
- trigger_info.frame_timeout
362
+ trigger_info.exposure_timeout
349
363
  or (
350
364
  DEFAULT_TIMEOUT
351
365
  + (trigger_info.livetime or 0)
@@ -358,21 +372,21 @@ class StandardDetector(
358
372
  name=self.name,
359
373
  current=index,
360
374
  initial=self._initial_frame,
361
- target=self._frames_to_complete,
375
+ target=self._events_to_complete,
362
376
  unit="",
363
377
  precision=0,
364
378
  time_elapsed=time.monotonic() - self._fly_start
365
379
  if self._fly_start
366
380
  else None,
367
381
  )
368
- if index >= self._frames_to_complete:
382
+ if index >= self._events_to_complete:
369
383
  break
370
384
  finally:
371
385
  await indices_written.aclose()
372
- if self._completable_frames >= trigger_info.total_number_of_triggers:
373
- self._completable_frames = 0
374
- self._frames_to_complete = 0
375
- self._number_of_triggers_iter = None
386
+ if self._completable_exposures >= trigger_info.total_number_of_exposures:
387
+ self._completable_exposures = 0
388
+ self._events_to_complete = 0
389
+ self._number_of_events_iter = None
376
390
  await self._controller.wait_for_idle()
377
391
 
378
392
  async def describe_collect(self) -> dict[str, DataKey]:
@@ -386,7 +400,7 @@ class StandardDetector(
386
400
  # retrieved for step scans.
387
401
  if index is None:
388
402
  index = await self._writer.get_indices_written()
389
- async for doc in self._writer.collect_stream_docs(index):
403
+ async for doc in self._writer.collect_stream_docs(self.name, index):
390
404
  yield doc
391
405
 
392
406
  async def get_index(self) -> int:
@@ -394,4 +408,4 @@ class StandardDetector(
394
408
 
395
409
  @property
396
410
  def hints(self) -> Hints:
397
- return self._writer.hints
411
+ return self._writer.get_hints(self.name)
@@ -346,10 +346,10 @@ class DeviceProcessor:
346
346
 
347
347
 
348
348
  def init_devices(
349
- set_name=True,
349
+ set_name: bool = True,
350
350
  child_name_separator: str = "-",
351
- connect=True,
352
- mock=False,
351
+ connect: bool = True,
352
+ mock: bool = False,
353
353
  timeout: float = 10.0,
354
354
  ):
355
355
  """Auto initialize top level Device instances: to be used as a context manager.
@@ -2,7 +2,7 @@ from collections.abc import Iterator
2
2
  from pathlib import Path
3
3
  from urllib.parse import urlunparse
4
4
 
5
- from event_model import (
5
+ from event_model import ( # type: ignore
6
6
  ComposeStreamResource,
7
7
  ComposeStreamResourceBundle,
8
8
  StreamDatum,
@@ -34,9 +34,6 @@ class HDFDatasetDescription(BaseModel):
34
34
  chunk_shape: tuple[int, ...]
35
35
  """The explicit chunk size written to disk"""
36
36
 
37
- multiplier: int = 1
38
- """Won't be used soon."""
39
-
40
37
 
41
38
  SLICE_NAME = "AD_HDF5_SWMR_SLICE"
42
39
 
@@ -74,7 +71,6 @@ class HDFDocumentComposer:
74
71
  data_key=ds.data_key,
75
72
  parameters={
76
73
  "dataset": ds.dataset,
77
- "multiplier": ds.multiplier,
78
74
  "chunk_shape": ds.chunk_shape,
79
75
  },
80
76
  uid=None,
@@ -220,14 +220,6 @@ class YMDPathProvider(PathProvider):
220
220
  )
221
221
 
222
222
 
223
- class NameProvider(Protocol):
224
- """Base class for callable classes providing data keys."""
225
-
226
- @abstractmethod
227
- def __call__(self) -> str:
228
- """Get the name to be used as a data_key in the descriptor document."""
229
-
230
-
231
223
  class DatasetDescriber(Protocol):
232
224
  """For describing datasets in file writing."""
233
225
 
@@ -179,8 +179,20 @@ class StandardReadable(
179
179
 
180
180
  yield
181
181
 
182
- # Set symmetric difference operator gives all newly added keys
182
+ # Set symmetric difference operator gives all newly added keys.
183
183
  new_dict = dict(self.children())
184
+ for key, value in new_dict.items():
185
+ # Check if key already exists in dict_copy and if the value has changed.
186
+ if key in dict_copy and value != dict_copy[key]:
187
+ error_msg = (
188
+ f"Duplicate readable device found: '{key}' in {value.parent}. "
189
+ "Derived class must not redefine a readable. "
190
+ "See: https://github.com/bluesky/ophyd-async/issues/848. "
191
+ "If this functionality is required, please raise an issue: "
192
+ "https://github.com/bluesky/ophyd-async"
193
+ )
194
+ raise KeyError(error_msg)
195
+
184
196
  new_keys = dict_copy.keys() ^ new_dict.keys()
185
197
  new_values = [new_dict[key] for key in new_keys]
186
198
 
@@ -100,6 +100,11 @@ class Signal(Device, Generic[SignalDatatypeT]):
100
100
  """
101
101
  return self._connector.backend.source(self.name, read=True)
102
102
 
103
+ @property
104
+ def datatype(self) -> type[SignalDatatypeT] | None:
105
+ """Returns the datatype of the signal."""
106
+ return self._connector.backend.datatype
107
+
103
108
 
104
109
  SignalT = TypeVar("SignalT", bound=Signal)
105
110
 
@@ -431,8 +436,8 @@ async def observe_signals_value(
431
436
  Call subscribe_value on all the signals at the start, and clear_sub on
432
437
  it at the end.
433
438
  :param timeout:
434
- If given, how long to wait for each updated value in seconds. If an
435
- update is not produced in this time then raise asyncio.TimeoutError.
439
+ If given, how long to wait for ANY updated value from shared queue in seconds.
440
+ If an update is not produced in this time then raise asyncio.TimeoutError.
436
441
  :param done_status:
437
442
  If this status is complete, stop observing and make the iterator return.
438
443
  If it raises an exception then this exception will be raised by the
@@ -454,8 +459,10 @@ async def observe_signals_value(
454
459
  q: asyncio.Queue[tuple[SignalR[SignalDatatypeT], SignalDatatypeT] | Status] = (
455
460
  asyncio.Queue()
456
461
  )
457
-
462
+ # dict to store signal subscription to remove it later
458
463
  cbs: dict[SignalR, Callback] = {}
464
+
465
+ # subscribe signal to update queue and fill cbs dict
459
466
  for signal in signals:
460
467
 
461
468
  def queue_value(value: SignalDatatypeT, signal=signal):
@@ -468,6 +475,7 @@ async def observe_signals_value(
468
475
  done_status.add_callback(q.put_nowait)
469
476
  overall_deadline = time.monotonic() + done_timeout if done_timeout else None
470
477
  try:
478
+ last_item = ()
471
479
  while True:
472
480
  if overall_deadline and time.monotonic() >= overall_deadline:
473
481
  raise asyncio.TimeoutError(
@@ -476,14 +484,22 @@ async def observe_signals_value(
476
484
  f"timeout {done_timeout}s"
477
485
  )
478
486
  iteration_timeout = _get_iteration_timeout(timeout, overall_deadline)
479
- item = await asyncio.wait_for(q.get(), iteration_timeout)
487
+ try:
488
+ item = await asyncio.wait_for(q.get(), iteration_timeout)
489
+ except asyncio.TimeoutError as exc:
490
+ raise asyncio.TimeoutError(
491
+ f"Timeout Error while waiting {iteration_timeout}s to update "
492
+ f"{[signal.source for signal in signals]}. "
493
+ f"Last observed signal and value were {last_item}"
494
+ ) from exc
480
495
  if done_status and item is done_status:
481
496
  if exc := done_status.exception():
482
497
  raise exc
483
498
  else:
484
499
  break
485
500
  else:
486
- yield cast(tuple[SignalR[SignalDatatypeT], SignalDatatypeT], item)
501
+ last_item = cast(tuple[SignalR[SignalDatatypeT], SignalDatatypeT], item)
502
+ yield last_item
487
503
  finally:
488
504
  for signal, cb in cbs.items():
489
505
  signal.clear_sub(cb)
@@ -6,8 +6,16 @@ import numpy as np
6
6
  from bluesky.protocols import Reading
7
7
  from event_model import DataKey, Dtype, Limits
8
8
 
9
+ from ophyd_async.core._utils import (
10
+ Callback,
11
+ EnumTypes,
12
+ StrictEnum,
13
+ SubsetEnum,
14
+ SupersetEnum,
15
+ get_enum_cls,
16
+ )
17
+
9
18
  from ._table import Table
10
- from ._utils import Callback, StrictEnum, get_enum_cls
11
19
 
12
20
  DTypeScalar_co = TypeVar("DTypeScalar_co", covariant=True, bound=np.generic)
13
21
  """A numpy dtype like [](#numpy.float64)."""
@@ -24,7 +32,7 @@ E.g. `Array1D[np.float64]` is a 1D numpy array of 64-bit floats."""
24
32
  Primitive = bool | int | float | str
25
33
  SignalDatatype = (
26
34
  Primitive
27
- | StrictEnum
35
+ | EnumTypes
28
36
  | Array1D[np.bool_]
29
37
  | Array1D[np.int8]
30
38
  | Array1D[np.uint8]
@@ -39,16 +47,18 @@ SignalDatatype = (
39
47
  | np.ndarray
40
48
  | Sequence[str]
41
49
  | Sequence[StrictEnum]
50
+ | Sequence[SubsetEnum]
51
+ | Sequence[SupersetEnum]
42
52
  | Table
43
53
  )
44
54
  """The supported [](#Signal) datatypes:
45
55
 
46
56
  - A python primitive [](#bool), [](#int), [](#float), [](#str)
47
- - A [](#StrictEnum) or [](#SubsetEnum) subclass
57
+ - An [](#EnumTypes) subclass
48
58
  - A fixed datatype [](#Array1D) of numpy bool, signed and unsigned integers or float
49
59
  - A [](#numpy.ndarray) which can change dimensions and datatype at runtime
50
60
  - A sequence of [](#str)
51
- - A sequence of [](#StrictEnum) or [](#SubsetEnum) subclass
61
+ - A sequence of [](#EnumTypes) subclasses
52
62
  - A [](#Table) subclass
53
63
  """
54
64
  # TODO: These typevars will not be needed when we drop python 3.11
@@ -58,7 +68,7 @@ PrimitiveT = TypeVar("PrimitiveT", bound=Primitive)
58
68
  SignalDatatypeT = TypeVar("SignalDatatypeT", bound=SignalDatatype)
59
69
  """A typevar for a [](#SignalDatatype)."""
60
70
  SignalDatatypeV = TypeVar("SignalDatatypeV", bound=SignalDatatype)
61
- EnumT = TypeVar("EnumT", bound=StrictEnum)
71
+ EnumT = TypeVar("EnumT", bound=EnumTypes)
62
72
  TableT = TypeVar("TableT", bound=Table)
63
73
 
64
74
 
@@ -136,7 +146,7 @@ def _datakey_dtype(datatype: type[SignalDatatype]) -> Dtype:
136
146
  or issubclass(datatype, Table)
137
147
  ):
138
148
  return "array"
139
- elif issubclass(datatype, StrictEnum):
149
+ elif issubclass(datatype, EnumTypes):
140
150
  return "string"
141
151
  elif issubclass(datatype, Primitive):
142
152
  return _primitive_dtype[datatype]
@@ -153,7 +163,7 @@ def _datakey_dtype_numpy(
153
163
  elif (
154
164
  get_origin(datatype) is Sequence
155
165
  or datatype is str
156
- or issubclass(datatype, StrictEnum)
166
+ or issubclass(datatype, EnumTypes)
157
167
  ):
158
168
  # TODO: use np.dtypes.StringDType when we can use in structured arrays
159
169
  # https://github.com/numpy/numpy/issues/25693
@@ -167,7 +177,7 @@ def _datakey_dtype_numpy(
167
177
 
168
178
 
169
179
  def _datakey_shape(value: SignalDatatype) -> list[int]:
170
- if type(value) in _primitive_dtype or isinstance(value, StrictEnum):
180
+ if type(value) in _primitive_dtype or isinstance(value, EnumTypes):
171
181
  return []
172
182
  elif isinstance(value, np.ndarray):
173
183
  return list(value.shape)