ophyd-async 0.9.0a1__py3-none-any.whl → 0.10.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. ophyd_async/__init__.py +5 -8
  2. ophyd_async/_docs_parser.py +12 -0
  3. ophyd_async/_version.py +9 -4
  4. ophyd_async/core/__init__.py +102 -74
  5. ophyd_async/core/_derived_signal.py +271 -0
  6. ophyd_async/core/_derived_signal_backend.py +300 -0
  7. ophyd_async/core/_detector.py +158 -153
  8. ophyd_async/core/_device.py +143 -115
  9. ophyd_async/core/_device_filler.py +82 -9
  10. ophyd_async/core/_flyer.py +16 -7
  11. ophyd_async/core/_hdf_dataset.py +29 -22
  12. ophyd_async/core/_log.py +14 -23
  13. ophyd_async/core/_mock_signal_backend.py +11 -3
  14. ophyd_async/core/_protocol.py +65 -45
  15. ophyd_async/core/_providers.py +28 -9
  16. ophyd_async/core/_readable.py +74 -58
  17. ophyd_async/core/_settings.py +113 -0
  18. ophyd_async/core/_signal.py +304 -174
  19. ophyd_async/core/_signal_backend.py +60 -14
  20. ophyd_async/core/_soft_signal_backend.py +18 -12
  21. ophyd_async/core/_status.py +72 -24
  22. ophyd_async/core/_table.py +54 -17
  23. ophyd_async/core/_utils.py +101 -52
  24. ophyd_async/core/_yaml_settings.py +66 -0
  25. ophyd_async/epics/__init__.py +1 -0
  26. ophyd_async/epics/adandor/__init__.py +9 -0
  27. ophyd_async/epics/adandor/_andor.py +45 -0
  28. ophyd_async/epics/adandor/_andor_controller.py +51 -0
  29. ophyd_async/epics/adandor/_andor_io.py +34 -0
  30. ophyd_async/epics/adaravis/__init__.py +8 -1
  31. ophyd_async/epics/adaravis/_aravis.py +23 -41
  32. ophyd_async/epics/adaravis/_aravis_controller.py +23 -55
  33. ophyd_async/epics/adaravis/_aravis_io.py +13 -28
  34. ophyd_async/epics/adcore/__init__.py +36 -14
  35. ophyd_async/epics/adcore/_core_detector.py +81 -0
  36. ophyd_async/epics/adcore/_core_io.py +145 -95
  37. ophyd_async/epics/adcore/_core_logic.py +179 -88
  38. ophyd_async/epics/adcore/_core_writer.py +223 -0
  39. ophyd_async/epics/adcore/_hdf_writer.py +51 -92
  40. ophyd_async/epics/adcore/_jpeg_writer.py +26 -0
  41. ophyd_async/epics/adcore/_single_trigger.py +6 -5
  42. ophyd_async/epics/adcore/_tiff_writer.py +26 -0
  43. ophyd_async/epics/adcore/_utils.py +3 -2
  44. ophyd_async/epics/adkinetix/__init__.py +2 -1
  45. ophyd_async/epics/adkinetix/_kinetix.py +32 -27
  46. ophyd_async/epics/adkinetix/_kinetix_controller.py +11 -21
  47. ophyd_async/epics/adkinetix/_kinetix_io.py +12 -13
  48. ophyd_async/epics/adpilatus/__init__.py +7 -2
  49. ophyd_async/epics/adpilatus/_pilatus.py +28 -40
  50. ophyd_async/epics/adpilatus/_pilatus_controller.py +25 -22
  51. ophyd_async/epics/adpilatus/_pilatus_io.py +11 -9
  52. ophyd_async/epics/adsimdetector/__init__.py +8 -1
  53. ophyd_async/epics/adsimdetector/_sim.py +22 -16
  54. ophyd_async/epics/adsimdetector/_sim_controller.py +9 -43
  55. ophyd_async/epics/adsimdetector/_sim_io.py +10 -0
  56. ophyd_async/epics/advimba/__init__.py +10 -1
  57. ophyd_async/epics/advimba/_vimba.py +26 -25
  58. ophyd_async/epics/advimba/_vimba_controller.py +12 -24
  59. ophyd_async/epics/advimba/_vimba_io.py +23 -28
  60. ophyd_async/epics/core/_aioca.py +66 -30
  61. ophyd_async/epics/core/_epics_connector.py +4 -0
  62. ophyd_async/epics/core/_epics_device.py +2 -0
  63. ophyd_async/epics/core/_p4p.py +50 -18
  64. ophyd_async/epics/core/_pvi_connector.py +65 -8
  65. ophyd_async/epics/core/_signal.py +51 -51
  66. ophyd_async/epics/core/_util.py +5 -5
  67. ophyd_async/epics/demo/__init__.py +11 -49
  68. ophyd_async/epics/demo/__main__.py +31 -0
  69. ophyd_async/epics/demo/_ioc.py +32 -0
  70. ophyd_async/epics/demo/_motor.py +82 -0
  71. ophyd_async/epics/demo/_point_detector.py +42 -0
  72. ophyd_async/epics/demo/_point_detector_channel.py +22 -0
  73. ophyd_async/epics/demo/_stage.py +15 -0
  74. ophyd_async/epics/demo/{mover.db → motor.db} +2 -1
  75. ophyd_async/epics/demo/point_detector.db +59 -0
  76. ophyd_async/epics/demo/point_detector_channel.db +21 -0
  77. ophyd_async/epics/eiger/_eiger.py +1 -3
  78. ophyd_async/epics/eiger/_eiger_controller.py +11 -4
  79. ophyd_async/epics/eiger/_eiger_io.py +2 -0
  80. ophyd_async/epics/eiger/_odin_io.py +1 -2
  81. ophyd_async/epics/motor.py +83 -38
  82. ophyd_async/epics/signal.py +4 -1
  83. ophyd_async/epics/testing/__init__.py +14 -14
  84. ophyd_async/epics/testing/_example_ioc.py +68 -73
  85. ophyd_async/epics/testing/_utils.py +19 -44
  86. ophyd_async/epics/testing/test_records.db +16 -0
  87. ophyd_async/epics/testing/test_records_pva.db +17 -16
  88. ophyd_async/fastcs/__init__.py +1 -0
  89. ophyd_async/fastcs/core.py +6 -0
  90. ophyd_async/fastcs/odin/__init__.py +1 -0
  91. ophyd_async/fastcs/panda/__init__.py +8 -8
  92. ophyd_async/fastcs/panda/_block.py +29 -9
  93. ophyd_async/fastcs/panda/_control.py +12 -2
  94. ophyd_async/fastcs/panda/_hdf_panda.py +5 -1
  95. ophyd_async/fastcs/panda/_table.py +13 -7
  96. ophyd_async/fastcs/panda/_trigger.py +23 -9
  97. ophyd_async/fastcs/panda/_writer.py +27 -30
  98. ophyd_async/plan_stubs/__init__.py +16 -0
  99. ophyd_async/plan_stubs/_ensure_connected.py +12 -17
  100. ophyd_async/plan_stubs/_fly.py +3 -5
  101. ophyd_async/plan_stubs/_nd_attributes.py +9 -5
  102. ophyd_async/plan_stubs/_panda.py +14 -0
  103. ophyd_async/plan_stubs/_settings.py +152 -0
  104. ophyd_async/plan_stubs/_utils.py +3 -0
  105. ophyd_async/plan_stubs/_wait_for_awaitable.py +13 -0
  106. ophyd_async/sim/__init__.py +29 -0
  107. ophyd_async/sim/__main__.py +43 -0
  108. ophyd_async/sim/_blob_detector.py +33 -0
  109. ophyd_async/sim/_blob_detector_controller.py +48 -0
  110. ophyd_async/sim/_blob_detector_writer.py +105 -0
  111. ophyd_async/sim/_mirror_horizontal.py +46 -0
  112. ophyd_async/sim/_mirror_vertical.py +74 -0
  113. ophyd_async/sim/_motor.py +233 -0
  114. ophyd_async/sim/_pattern_generator.py +124 -0
  115. ophyd_async/sim/_point_detector.py +86 -0
  116. ophyd_async/sim/_stage.py +19 -0
  117. ophyd_async/tango/__init__.py +1 -0
  118. ophyd_async/tango/core/__init__.py +6 -1
  119. ophyd_async/tango/core/_base_device.py +41 -33
  120. ophyd_async/tango/core/_converters.py +81 -0
  121. ophyd_async/tango/core/_signal.py +21 -33
  122. ophyd_async/tango/core/_tango_readable.py +2 -19
  123. ophyd_async/tango/core/_tango_transport.py +148 -74
  124. ophyd_async/tango/core/_utils.py +47 -0
  125. ophyd_async/tango/demo/_counter.py +2 -0
  126. ophyd_async/tango/demo/_detector.py +2 -0
  127. ophyd_async/tango/demo/_mover.py +10 -6
  128. ophyd_async/tango/demo/_tango/_servers.py +4 -0
  129. ophyd_async/tango/testing/__init__.py +6 -0
  130. ophyd_async/tango/testing/_one_of_everything.py +200 -0
  131. ophyd_async/testing/__init__.py +48 -7
  132. ophyd_async/testing/__pytest_assert_rewrite.py +4 -0
  133. ophyd_async/testing/_assert.py +200 -96
  134. ophyd_async/testing/_mock_signal_utils.py +59 -73
  135. ophyd_async/testing/_one_of_everything.py +146 -0
  136. ophyd_async/testing/_single_derived.py +87 -0
  137. ophyd_async/testing/_utils.py +3 -0
  138. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/METADATA +25 -26
  139. ophyd_async-0.10.0a1.dist-info/RECORD +149 -0
  140. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/WHEEL +1 -1
  141. ophyd_async/core/_device_save_loader.py +0 -274
  142. ophyd_async/epics/demo/_mover.py +0 -95
  143. ophyd_async/epics/demo/_sensor.py +0 -37
  144. ophyd_async/epics/demo/sensor.db +0 -19
  145. ophyd_async/fastcs/panda/_utils.py +0 -16
  146. ophyd_async/sim/demo/__init__.py +0 -19
  147. ophyd_async/sim/demo/_pattern_detector/__init__.py +0 -13
  148. ophyd_async/sim/demo/_pattern_detector/_pattern_detector.py +0 -42
  149. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +0 -62
  150. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_writer.py +0 -41
  151. ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +0 -207
  152. ophyd_async/sim/demo/_sim_motor.py +0 -107
  153. ophyd_async/sim/testing/__init__.py +0 -0
  154. ophyd_async-0.9.0a1.dist-info/RECORD +0 -119
  155. ophyd_async-0.9.0a1.dist-info/entry_points.txt +0 -2
  156. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info/licenses}/LICENSE +0 -0
  157. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/top_level.txt +0 -0
@@ -11,10 +11,12 @@ from unittest.mock import Mock
11
11
  import numpy as np
12
12
 
13
13
  T = TypeVar("T")
14
+ V = TypeVar("V")
14
15
  P = ParamSpec("P")
15
16
  Callback = Callable[[T], None]
16
17
  DEFAULT_TIMEOUT = 10.0
17
- ErrorText = str | Mapping[str, Exception]
18
+
19
+ logger = logging.getLogger("ophyd_async")
18
20
 
19
21
 
20
22
  class StrictEnumMeta(EnumMeta):
@@ -27,18 +29,33 @@ class StrictEnumMeta(EnumMeta):
27
29
 
28
30
 
29
31
  class StrictEnum(str, Enum, metaclass=StrictEnumMeta):
30
- """All members should exist in the Backend, and there will be no extras"""
32
+ """All members should exist in the Backend, and there will be no extras."""
31
33
 
32
34
 
33
35
  class SubsetEnumMeta(StrictEnumMeta):
34
36
  def __call__(self, value, *args, **kwargs): # type: ignore
37
+ """Return given value if it is a string and not a member of the enum.
38
+
39
+ If the value is not a string or is an enum member, default enum behavior
40
+ is applied. Type checking will complain if provided arbitrary string.
41
+
42
+ Returns:
43
+ Union[str, SubsetEnum]: If the value is a string and not a member of the
44
+ enum, the string is returned as is. Otherwise, the corresponding enum
45
+ member is returned.
46
+
47
+ Raises:
48
+ ValueError: If the value is not a string and cannot be converted to an enum
49
+ member.
50
+
51
+ """
35
52
  if isinstance(value, str) and not isinstance(value, self):
36
53
  return value
37
54
  return super().__call__(value, *args, **kwargs)
38
55
 
39
56
 
40
57
  class SubsetEnum(StrictEnum, metaclass=SubsetEnumMeta):
41
- """All members should exist in the Backend, but there may be extras"""
58
+ """All members should exist in the Backend, but there may be extras."""
42
59
 
43
60
 
44
61
  CALCULATE_TIMEOUT = "CALCULATE_TIMEOUT"
@@ -53,22 +70,16 @@ CalculatableTimeout = float | None | Literal["CALCULATE_TIMEOUT"]
53
70
 
54
71
 
55
72
  class NotConnected(Exception):
56
- """Exception to be raised if a `Device.connect` is cancelled"""
73
+ """Exception to be raised if a `Device.connect` is cancelled.
57
74
 
58
- _indent_width = " "
75
+ :param errors:
76
+ Mapping of device name to Exception or another NotConnected.
77
+ Alternatively a string with the signal error text.
78
+ """
59
79
 
60
- def __init__(self, errors: ErrorText):
61
- """
62
- NotConnected holds a mapping of device/signal names to
63
- errors.
64
-
65
- Parameters
66
- ----------
67
- errors: ErrorText
68
- Mapping of device name to Exception or another NotConnected.
69
- Alternatively a string with the signal error text.
70
- """
80
+ _indent_width = " "
71
81
 
82
+ def __init__(self, errors: str | Mapping[str, Exception]):
72
83
  self._errors = errors
73
84
 
74
85
  @property
@@ -94,7 +105,7 @@ class NotConnected(Exception):
94
105
  def format_error_string(self, indent="") -> str:
95
106
  if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
96
107
  raise RuntimeError(
97
- f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
108
+ f"Unexpected type `{type(self._errors)}` expected `str` or `dict`"
98
109
  )
99
110
 
100
111
  if isinstance(self._errors, str):
@@ -114,7 +125,7 @@ class NotConnected(Exception):
114
125
  ) -> NotConnected:
115
126
  for name, exception in exceptions.items():
116
127
  if not isinstance(exception, NotConnected):
117
- logging.exception(
128
+ logger.exception(
118
129
  f"device `{name}` raised unexpected exception "
119
130
  f"{type(exception).__name__}",
120
131
  exc_info=exception,
@@ -124,21 +135,38 @@ class NotConnected(Exception):
124
135
 
125
136
  @dataclass(frozen=True)
126
137
  class WatcherUpdate(Generic[T]):
127
- """A dataclass such that, when expanded, it provides the kwargs for a watcher"""
138
+ """A dataclass such that, when expanded, it provides the kwargs for a watcher."""
128
139
 
129
140
  current: T
141
+ """The current value, where it currently is."""
142
+
130
143
  initial: T
144
+ """The initial value, where it was when it started."""
145
+
131
146
  target: T
147
+ """The target value, where it will be when it finishes."""
148
+
132
149
  name: str | None = None
150
+ """An optional name for the device, if available."""
151
+
133
152
  unit: str | None = None
153
+ """Units of the value, if applicable."""
154
+
134
155
  precision: float | None = None
156
+ """How many decimal places the value should be displayed to."""
157
+
135
158
  fraction: float | None = None
159
+ """The fraction of the way between initial and target."""
160
+
136
161
  time_elapsed: float | None = None
162
+ """The time elapsed since the start of the operation."""
163
+
137
164
  time_remaining: float | None = None
165
+ """The time remaining until the operation completes."""
138
166
 
139
167
 
140
168
  async def wait_for_connection(**coros: Awaitable[None]):
141
- """Call many underlying signals, accumulating exceptions and returning them
169
+ """Call many underlying signals, accumulating exceptions and returning them.
142
170
 
143
171
  Expected kwargs should be a mapping of names to coroutine tasks to execute.
144
172
  """
@@ -162,12 +190,15 @@ async def wait_for_connection(**coros: Awaitable[None]):
162
190
 
163
191
 
164
192
  def get_dtype(datatype: type) -> np.dtype:
165
- """Get the runtime dtype from a numpy ndarray type annotation
193
+ """Get the runtime dtype from a numpy ndarray type annotation.
166
194
 
195
+ ```python
167
196
  >>> from ophyd_async.core import Array1D
168
197
  >>> import numpy as np
169
198
  >>> get_dtype(Array1D[np.int8])
170
199
  dtype('int8')
200
+
201
+ ```
171
202
  """
172
203
  if not get_origin(datatype) == np.ndarray:
173
204
  raise TypeError(f"Expected Array1D[dtype], got {datatype}")
@@ -177,33 +208,46 @@ def get_dtype(datatype: type) -> np.dtype:
177
208
 
178
209
 
179
210
  def get_enum_cls(datatype: type | None) -> type[StrictEnum] | None:
180
- """Get the runtime dtype from a numpy ndarray type annotation
181
-
182
- >>> import numpy.typing as npt
183
- >>> import numpy as np
184
- >>> get_dtype(npt.NDArray[np.int8])
185
- dtype('int8')
211
+ """Get the enum class from a datatype.
212
+
213
+ :raises TypeError: if type is not a [](#StrictEnum) or [](#SubsetEnum) subclass
214
+ ```python
215
+ >>> from ophyd_async.core import StrictEnum
216
+ >>> from collections.abc import Sequence
217
+ >>> class MyEnum(StrictEnum):
218
+ ... A = "A value"
219
+ >>> get_enum_cls(str)
220
+ >>> get_enum_cls(MyEnum)
221
+ <enum 'MyEnum'>
222
+ >>> get_enum_cls(Sequence[MyEnum])
223
+ <enum 'MyEnum'>
224
+
225
+ ```
186
226
  """
187
227
  if get_origin(datatype) is Sequence:
188
228
  datatype = get_args(datatype)[0]
189
229
  if datatype and issubclass(datatype, Enum):
190
230
  if not issubclass(datatype, StrictEnum):
191
231
  raise TypeError(
192
- f"{datatype} should inherit from .SubsetEnum "
232
+ f"{datatype} should inherit from ophyd_async.core.SubsetEnum "
193
233
  "or ophyd_async.core.StrictEnum"
194
234
  )
195
235
  return datatype
236
+ return None
196
237
 
197
238
 
198
239
  def get_unique(values: dict[str, T], types: str) -> T:
199
- """If all values are the same, return that value, otherwise raise TypeError
240
+ """If all values are the same, return that value, otherwise raise TypeError.
200
241
 
242
+ ```python
201
243
  >>> get_unique({"a": 1, "b": 1}, "integers")
202
244
  1
203
245
  >>> get_unique({"a": 1, "b": 2}, "integers")
204
246
  Traceback (most recent call last):
205
247
  ...
206
248
  TypeError: Differing integers: a has 1, b has 2
249
+
250
+ ```
207
251
  """
208
252
  set_values = set(values.values())
209
253
  if len(set_values) != 1:
@@ -217,9 +261,12 @@ async def merge_gathered_dicts(
217
261
  ) -> dict[str, T]:
218
262
  """Merge dictionaries produced by a sequence of coroutines.
219
263
 
220
- Can be used for merging ``read()`` or ``describe``. For instance::
264
+ Can be used for merging `read()` or `describe()`.
221
265
 
222
- combined_read = await merge_gathered_dicts(s.read() for s in signals)
266
+ :example:
267
+ ```python
268
+ combined_read = await merge_gathered_dicts(s.read() for s in signals)
269
+ ```
223
270
  """
224
271
  ret: dict[str, T] = {}
225
272
  for result in await asyncio.gather(*coros):
@@ -227,21 +274,18 @@ async def merge_gathered_dicts(
227
274
  return ret
228
275
 
229
276
 
230
- async def gather_list(coros: Iterable[Awaitable[T]]) -> list[T]:
231
- return await asyncio.gather(*coros)
277
+ async def gather_dict(coros: Mapping[T, Awaitable[V]]) -> dict[T, V]:
278
+ """Take named coros and return a dict of their name to their return value."""
279
+ values = await asyncio.gather(*coros.values())
280
+ return dict(zip(coros, values, strict=True))
232
281
 
233
282
 
234
283
  def in_micros(t: float) -> int:
235
- """
236
- Converts between a positive number of seconds and an equivalent
237
- number of microseconds.
238
-
239
- Args:
240
- t (float): A time in seconds
241
- Raises:
242
- ValueError: if t < 0
243
- Returns:
244
- t (int): A time in microseconds, rounded up to the nearest whole microsecond,
284
+ """Convert between a seconds and microseconds.
285
+
286
+ :param t: A time in seconds
287
+ :return: A time in microseconds, rounded up to the nearest whole microsecond
288
+ :raises ValueError: if t < 0
245
289
  """
246
290
  if t < 0:
247
291
  raise ValueError(f"Expected a positive time in seconds, got {t!r}")
@@ -252,6 +296,7 @@ def get_origin_class(annotatation: Any) -> type | None:
252
296
  origin = get_origin(annotatation) or annotatation
253
297
  if isinstance(origin, type):
254
298
  return origin
299
+ return None
255
300
 
256
301
 
257
302
  class Reference(Generic[T]):
@@ -259,16 +304,16 @@ class Reference(Generic[T]):
259
304
 
260
305
  Used to opt out of the naming/parent-child relationship of `Device`.
261
306
 
262
- For example::
263
-
264
- class DeviceWithRefToSignal(Device):
265
- def __init__(self, signal: SignalRW[int]):
266
- self.signal_ref = Reference(signal)
267
- super().__init__()
268
-
269
- def set(self, value) -> AsyncStatus:
270
- return self.signal_ref().set(value + 1)
307
+ :example:
308
+ ```python
309
+ class DeviceWithRefToSignal(Device):
310
+ def __init__(self, signal: SignalRW[int]):
311
+ self.signal_ref = Reference(signal)
312
+ super().__init__()
271
313
 
314
+ def set(self, value) -> AsyncStatus:
315
+ return self.signal_ref().set(value + 1)
316
+ ```
272
317
  """
273
318
 
274
319
  def __init__(self, obj: T):
@@ -287,6 +332,7 @@ class LazyMock:
287
332
  constructed so that when the leaf is created, so are its parents.
288
333
  Any calls to the child are then accessible from the parent mock.
289
334
 
335
+ ```python
290
336
  >>> parent = LazyMock()
291
337
  >>> child = parent.child("child")
292
338
  >>> child_mock = child()
@@ -295,6 +341,8 @@ class LazyMock:
295
341
  >>> parent_mock = parent()
296
342
  >>> parent_mock.mock_calls
297
343
  [call.child()]
344
+
345
+ ```
298
346
  """
299
347
 
300
348
  def __init__(self, name: str = "", parent: LazyMock | None = None) -> None:
@@ -303,6 +351,7 @@ class LazyMock:
303
351
  self._mock: Mock | None = None
304
352
 
305
353
  def child(self, name: str) -> LazyMock:
354
+ """Return a child of this LazyMock with the given name."""
306
355
  return LazyMock(name, self)
307
356
 
308
357
  def __call__(self) -> Mock:
@@ -0,0 +1,66 @@
1
+ import warnings
2
+ from enum import Enum
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ import numpy as np
7
+ import numpy.typing as npt
8
+ import yaml
9
+ from pydantic import BaseModel
10
+
11
+ from ._settings import SettingsProvider
12
+
13
+
14
+ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.Node:
15
+ return dumper.represent_sequence(
16
+ "tag:yaml.org,2002:seq", array.tolist(), flow_style=True
17
+ )
18
+
19
+
20
+ def pydantic_model_abstraction_representer(
21
+ dumper: yaml.Dumper, model: BaseModel
22
+ ) -> yaml.Node:
23
+ return dumper.represent_data(model.model_dump(mode="python"))
24
+
25
+
26
+ def enum_representer(dumper: yaml.Dumper, enum: Enum) -> yaml.Node:
27
+ return dumper.represent_data(enum.value)
28
+
29
+
30
+ class YamlSettingsProvider(SettingsProvider):
31
+ """For providing settings from yaml to signals."""
32
+
33
+ def __init__(self, directory: Path | str):
34
+ self._directory = Path(directory)
35
+
36
+ def _file_path(self, name: str) -> Path:
37
+ return self._directory / (name + ".yaml")
38
+
39
+ async def store(self, name: str, data: dict[str, Any]):
40
+ yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
41
+ yaml.add_multi_representer(
42
+ BaseModel,
43
+ pydantic_model_abstraction_representer,
44
+ Dumper=yaml.Dumper,
45
+ )
46
+ yaml.add_multi_representer(Enum, enum_representer, Dumper=yaml.Dumper)
47
+ with open(self._file_path(name), "w") as file:
48
+ yaml.dump(data, file)
49
+
50
+ async def retrieve(self, name: str) -> dict[str, Any]:
51
+ with open(self._file_path(name)) as file:
52
+ data = yaml.full_load(file)
53
+ if isinstance(data, list):
54
+ warnings.warn(
55
+ DeprecationWarning(
56
+ "Found old save file. Re-save your yaml settings file "
57
+ f"{self._file_path(name)} using "
58
+ "ophyd_async.plan_stubs.store_settings"
59
+ ),
60
+ stacklevel=2,
61
+ )
62
+ merge = {}
63
+ for d in data:
64
+ merge.update(d)
65
+ return merge
66
+ return data
@@ -0,0 +1 @@
1
+ """EPICS support for Signals, and Devices that use them."""
@@ -0,0 +1,9 @@
1
+ from ._andor import Andor2Detector
2
+ from ._andor_controller import Andor2Controller
3
+ from ._andor_io import Andor2DriverIO
4
+
5
+ __all__ = [
6
+ "Andor2Detector",
7
+ "Andor2Controller",
8
+ "Andor2DriverIO",
9
+ ]
@@ -0,0 +1,45 @@
1
+ from collections.abc import Sequence
2
+
3
+ from ophyd_async.core import PathProvider
4
+ from ophyd_async.core._signal import SignalR
5
+ from ophyd_async.epics import adcore
6
+
7
+ from ._andor_controller import Andor2Controller
8
+ from ._andor_io import Andor2DriverIO
9
+
10
+
11
+ class Andor2Detector(adcore.AreaDetector[Andor2Controller]):
12
+ """Andor 2 area detector device (CCD detector 56fps with full chip readout).
13
+
14
+ Andor model:DU897_BV.
15
+ """
16
+
17
+ def __init__(
18
+ self,
19
+ prefix: str,
20
+ path_provider: PathProvider,
21
+ drv_suffix="cam1:",
22
+ writer_cls: type[adcore.ADWriter] = adcore.ADHDFWriter,
23
+ fileio_suffix: str | None = None,
24
+ name: str = "",
25
+ config_sigs: Sequence[SignalR] = (),
26
+ plugins: dict[str, adcore.NDPluginBaseIO] | None = None,
27
+ ):
28
+ driver = Andor2DriverIO(prefix + drv_suffix)
29
+ controller = Andor2Controller(driver)
30
+
31
+ writer = writer_cls.with_io(
32
+ prefix,
33
+ path_provider,
34
+ dataset_source=driver,
35
+ fileio_suffix=fileio_suffix,
36
+ plugins=plugins,
37
+ )
38
+
39
+ super().__init__(
40
+ controller=controller,
41
+ writer=writer,
42
+ plugins=plugins,
43
+ name=name,
44
+ config_sigs=config_sigs,
45
+ )
@@ -0,0 +1,51 @@
1
+ import asyncio
2
+
3
+ from ophyd_async.core import (
4
+ DetectorTrigger,
5
+ TriggerInfo,
6
+ )
7
+ from ophyd_async.epics import adcore
8
+
9
+ from ._andor_io import Andor2DriverIO, Andor2TriggerMode
10
+
11
+ _MIN_DEAD_TIME = 0.1
12
+ _MAX_NUM_IMAGE = 999_999
13
+
14
+
15
+ class Andor2Controller(adcore.ADBaseController[Andor2DriverIO]):
16
+ """For controlling the Andor 2 detector."""
17
+
18
+ def __init__(
19
+ self,
20
+ driver: Andor2DriverIO,
21
+ good_states: frozenset[adcore.ADState] = adcore.DEFAULT_GOOD_STATES,
22
+ ) -> None:
23
+ super().__init__(driver, good_states=good_states)
24
+
25
+ def get_deadtime(self, exposure: float | None) -> float:
26
+ return _MIN_DEAD_TIME + (exposure or 0)
27
+
28
+ async def prepare(self, trigger_info: TriggerInfo):
29
+ await self.set_exposure_time_and_acquire_period_if_supplied(
30
+ trigger_info.livetime
31
+ )
32
+ await asyncio.gather(
33
+ self.driver.trigger_mode.set(self._get_trigger_mode(trigger_info.trigger)),
34
+ self.driver.num_images.set(
35
+ trigger_info.total_number_of_triggers or _MAX_NUM_IMAGE
36
+ ),
37
+ self.driver.image_mode.set(adcore.ADImageMode.MULTIPLE),
38
+ )
39
+
40
+ def _get_trigger_mode(self, trigger: DetectorTrigger) -> Andor2TriggerMode:
41
+ supported_trigger_types = {
42
+ DetectorTrigger.INTERNAL: Andor2TriggerMode.INTERNAL,
43
+ DetectorTrigger.EDGE_TRIGGER: Andor2TriggerMode.EXT_TRIGGER,
44
+ }
45
+ if trigger not in supported_trigger_types:
46
+ raise ValueError(
47
+ f"{self.__class__.__name__} only supports the following trigger "
48
+ f"types: {supported_trigger_types} but was asked to "
49
+ f"use {trigger}"
50
+ )
51
+ return supported_trigger_types[trigger]
@@ -0,0 +1,34 @@
1
+ from ophyd_async.core import StrictEnum, SubsetEnum
2
+ from ophyd_async.epics.adcore import ADBaseIO
3
+ from ophyd_async.epics.core import (
4
+ epics_signal_r,
5
+ epics_signal_rw,
6
+ )
7
+
8
+
9
+ class Andor2TriggerMode(StrictEnum):
10
+ INTERNAL = "Internal"
11
+ EXT_TRIGGER = "External"
12
+ EXT_START = "External Start"
13
+ EXT_EXPOSURE = "External Exposure"
14
+ EXT_FVP = "External FVP"
15
+ SOFTWARE = "Software"
16
+
17
+
18
+ class Andor2DataType(SubsetEnum):
19
+ UINT16 = "UInt16"
20
+ UINT32 = "UInt32"
21
+ FLOAT32 = "Float32"
22
+ FLOAT64 = "Float64"
23
+
24
+
25
+ class Andor2DriverIO(ADBaseIO):
26
+ """Epics pv for andor model:DU897_BV as deployed on p99."""
27
+
28
+ def __init__(self, prefix: str, name: str = "") -> None:
29
+ super().__init__(prefix, name=name)
30
+ self.trigger_mode = epics_signal_rw(Andor2TriggerMode, prefix + "TriggerMode")
31
+ self.data_type = epics_signal_r(Andor2DataType, prefix + "DataType_RBV") # type: ignore
32
+ self.andor_accumulate_period = epics_signal_r(
33
+ float, prefix + "AndorAccumulatePeriod_RBV"
34
+ )
@@ -1,9 +1,16 @@
1
+ """Support for the ADAravis areaDetector driver.
2
+
3
+ https://github.com/areaDetector/ADAravis
4
+ """
5
+
1
6
  from ._aravis import AravisDetector
2
7
  from ._aravis_controller import AravisController
3
- from ._aravis_io import AravisDriverIO
8
+ from ._aravis_io import AravisDriverIO, AravisTriggerMode, AravisTriggerSource
4
9
 
5
10
  __all__ = [
6
11
  "AravisDetector",
7
12
  "AravisController",
8
13
  "AravisDriverIO",
14
+ "AravisTriggerMode",
15
+ "AravisTriggerSource",
9
16
  ]
@@ -1,61 +1,43 @@
1
- from typing import get_args
1
+ from collections.abc import Sequence
2
2
 
3
- from bluesky.protocols import HasHints, Hints
4
-
5
- from ophyd_async.core import PathProvider, StandardDetector
3
+ from ophyd_async.core import PathProvider, SignalR
6
4
  from ophyd_async.epics import adcore
7
5
 
8
6
  from ._aravis_controller import AravisController
9
7
  from ._aravis_io import AravisDriverIO
10
8
 
11
9
 
12
- class AravisDetector(StandardDetector, HasHints):
13
- """
14
- Ophyd-async implementation of an ADAravis Detector.
10
+ class AravisDetector(adcore.AreaDetector[AravisController]):
11
+ """Implementation of an ADAravis Detector.
12
+
15
13
  The detector may be configured for an external trigger on a GPIO port,
16
14
  which must be done prior to preparing the detector
17
15
  """
18
16
 
19
- _controller: AravisController
20
- _writer: adcore.ADHDFWriter
21
-
22
17
  def __init__(
23
18
  self,
24
19
  prefix: str,
25
20
  path_provider: PathProvider,
26
21
  drv_suffix="cam1:",
27
- hdf_suffix="HDF1:",
28
- name="",
29
- gpio_number: AravisController.GPIO_NUMBER = 1,
22
+ writer_cls: type[adcore.ADWriter] = adcore.ADHDFWriter,
23
+ fileio_suffix: str | None = None,
24
+ name: str = "",
25
+ config_sigs: Sequence[SignalR] = (),
26
+ plugins: dict[str, adcore.NDPluginBaseIO] | None = None,
30
27
  ):
31
- self.drv = AravisDriverIO(prefix + drv_suffix)
32
- self.hdf = adcore.NDFileHDFIO(prefix + hdf_suffix)
33
-
28
+ driver = AravisDriverIO(prefix + drv_suffix)
29
+ controller = AravisController(driver)
30
+ writer = writer_cls.with_io(
31
+ prefix,
32
+ path_provider,
33
+ dataset_source=driver,
34
+ fileio_suffix=fileio_suffix,
35
+ plugins=plugins,
36
+ )
34
37
  super().__init__(
35
- AravisController(self.drv, gpio_number=gpio_number),
36
- adcore.ADHDFWriter(
37
- self.hdf,
38
- path_provider,
39
- lambda: self.name,
40
- adcore.ADBaseDatasetDescriber(self.drv),
41
- ),
42
- config_sigs=(self.drv.acquire_time,),
38
+ controller=controller,
39
+ writer=writer,
40
+ plugins=plugins,
43
41
  name=name,
42
+ config_sigs=config_sigs,
44
43
  )
45
-
46
- def get_external_trigger_gpio(self):
47
- return self._controller.gpio_number
48
-
49
- def set_external_trigger_gpio(self, gpio_number: AravisController.GPIO_NUMBER):
50
- supported_gpio_numbers = get_args(AravisController.GPIO_NUMBER)
51
- if gpio_number not in supported_gpio_numbers:
52
- raise ValueError(
53
- f"{self.__class__.__name__} only supports the following GPIO "
54
- f"indices: {supported_gpio_numbers} but was asked to "
55
- f"use {gpio_number}"
56
- )
57
- self._controller.gpio_number = gpio_number
58
-
59
- @property
60
- def hints(self) -> Hints:
61
- return self._writer.hints