ophyd-async 0.9.0a1__py3-none-any.whl → 0.9.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. ophyd_async/_version.py +1 -1
  2. ophyd_async/core/__init__.py +13 -20
  3. ophyd_async/core/_detector.py +61 -37
  4. ophyd_async/core/_device.py +102 -80
  5. ophyd_async/core/_device_filler.py +17 -8
  6. ophyd_async/core/_flyer.py +2 -2
  7. ophyd_async/core/_readable.py +30 -23
  8. ophyd_async/core/_settings.py +104 -0
  9. ophyd_async/core/_signal.py +55 -17
  10. ophyd_async/core/_signal_backend.py +4 -1
  11. ophyd_async/core/_soft_signal_backend.py +2 -1
  12. ophyd_async/core/_table.py +18 -10
  13. ophyd_async/core/_utils.py +5 -3
  14. ophyd_async/core/_yaml_settings.py +64 -0
  15. ophyd_async/epics/adandor/__init__.py +9 -0
  16. ophyd_async/epics/adandor/_andor.py +45 -0
  17. ophyd_async/epics/adandor/_andor_controller.py +49 -0
  18. ophyd_async/epics/adandor/_andor_io.py +36 -0
  19. ophyd_async/epics/adaravis/__init__.py +3 -1
  20. ophyd_async/epics/adaravis/_aravis.py +23 -37
  21. ophyd_async/epics/adaravis/_aravis_controller.py +13 -22
  22. ophyd_async/epics/adcore/__init__.py +15 -8
  23. ophyd_async/epics/adcore/_core_detector.py +41 -0
  24. ophyd_async/epics/adcore/_core_io.py +35 -10
  25. ophyd_async/epics/adcore/_core_logic.py +98 -86
  26. ophyd_async/epics/adcore/_core_writer.py +219 -0
  27. ophyd_async/epics/adcore/_hdf_writer.py +38 -62
  28. ophyd_async/epics/adcore/_jpeg_writer.py +26 -0
  29. ophyd_async/epics/adcore/_single_trigger.py +4 -3
  30. ophyd_async/epics/adcore/_tiff_writer.py +26 -0
  31. ophyd_async/epics/adcore/_utils.py +2 -1
  32. ophyd_async/epics/adkinetix/_kinetix.py +29 -24
  33. ophyd_async/epics/adkinetix/_kinetix_controller.py +9 -21
  34. ophyd_async/epics/adpilatus/__init__.py +2 -2
  35. ophyd_async/epics/adpilatus/_pilatus.py +27 -39
  36. ophyd_async/epics/adpilatus/_pilatus_controller.py +44 -22
  37. ophyd_async/epics/adsimdetector/__init__.py +3 -3
  38. ophyd_async/epics/adsimdetector/_sim.py +33 -17
  39. ophyd_async/epics/advimba/_vimba.py +23 -23
  40. ophyd_async/epics/advimba/_vimba_controller.py +10 -24
  41. ophyd_async/epics/core/_aioca.py +31 -14
  42. ophyd_async/epics/core/_p4p.py +40 -16
  43. ophyd_async/epics/core/_util.py +1 -1
  44. ophyd_async/epics/motor.py +18 -10
  45. ophyd_async/epics/sim/_ioc.py +29 -0
  46. ophyd_async/epics/{demo → sim}/_mover.py +10 -4
  47. ophyd_async/epics/testing/__init__.py +14 -14
  48. ophyd_async/epics/testing/_example_ioc.py +48 -65
  49. ophyd_async/epics/testing/_utils.py +17 -45
  50. ophyd_async/epics/testing/test_records.db +8 -0
  51. ophyd_async/fastcs/panda/__init__.py +0 -2
  52. ophyd_async/fastcs/panda/_control.py +7 -2
  53. ophyd_async/fastcs/panda/_hdf_panda.py +3 -1
  54. ophyd_async/fastcs/panda/_table.py +4 -1
  55. ophyd_async/plan_stubs/__init__.py +14 -0
  56. ophyd_async/plan_stubs/_ensure_connected.py +11 -17
  57. ophyd_async/plan_stubs/_fly.py +1 -1
  58. ophyd_async/plan_stubs/_nd_attributes.py +7 -5
  59. ophyd_async/plan_stubs/_panda.py +13 -0
  60. ophyd_async/plan_stubs/_settings.py +125 -0
  61. ophyd_async/plan_stubs/_wait_for_awaitable.py +13 -0
  62. ophyd_async/sim/__init__.py +19 -0
  63. ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_controller.py +9 -2
  64. ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_generator.py +13 -6
  65. ophyd_async/tango/core/_signal.py +3 -1
  66. ophyd_async/tango/core/_tango_transport.py +12 -14
  67. ophyd_async/tango/{demo → sim}/_mover.py +5 -2
  68. ophyd_async/testing/__init__.py +19 -0
  69. ophyd_async/testing/__pytest_assert_rewrite.py +4 -0
  70. ophyd_async/testing/_assert.py +88 -40
  71. ophyd_async/testing/_mock_signal_utils.py +3 -3
  72. ophyd_async/testing/_one_of_everything.py +126 -0
  73. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/METADATA +2 -2
  74. ophyd_async-0.9.0a2.dist-info/RECORD +129 -0
  75. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/WHEEL +1 -1
  76. ophyd_async/core/_device_save_loader.py +0 -274
  77. ophyd_async/epics/adsimdetector/_sim_controller.py +0 -51
  78. ophyd_async/fastcs/panda/_utils.py +0 -16
  79. ophyd_async/sim/demo/__init__.py +0 -19
  80. ophyd_async/sim/testing/__init__.py +0 -0
  81. ophyd_async-0.9.0a1.dist-info/RECORD +0 -119
  82. ophyd_async-0.9.0a1.dist-info/entry_points.txt +0 -2
  83. /ophyd_async/epics/{demo → sim}/__init__.py +0 -0
  84. /ophyd_async/epics/{demo → sim}/_sensor.py +0 -0
  85. /ophyd_async/epics/{demo → sim}/mover.db +0 -0
  86. /ophyd_async/epics/{demo → sim}/sensor.db +0 -0
  87. /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/__init__.py +0 -0
  88. /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector.py +0 -0
  89. /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_writer.py +0 -0
  90. /ophyd_async/sim/{demo/_sim_motor.py → _sim_motor.py} +0 -0
  91. /ophyd_async/tango/{demo → sim}/__init__.py +0 -0
  92. /ophyd_async/tango/{demo → sim}/_counter.py +0 -0
  93. /ophyd_async/tango/{demo → sim}/_detector.py +0 -0
  94. /ophyd_async/tango/{demo → sim}/_tango/__init__.py +0 -0
  95. /ophyd_async/tango/{demo → sim}/_tango/_servers.py +0 -0
  96. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/LICENSE +0 -0
  97. {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/top_level.txt +0 -0
@@ -123,29 +123,31 @@ class StandardReadable(
123
123
  # we want to combine them when they are Sequences, and ensure they are
124
124
  # identical when string values.
125
125
  for key, value in new_hint.hints.items():
126
+ # fail early for unkwon types
126
127
  if isinstance(value, str):
127
128
  if key in hints:
128
- assert (
129
- hints[key] == value # type: ignore[literal-required]
130
- ), f"Hints key {key} value may not be overridden"
129
+ if hints[key] != value:
130
+ msg = f"Hints key {key} value may not be overridden"
131
+ raise RuntimeError(msg)
131
132
  else:
132
133
  hints[key] = value # type: ignore[literal-required]
133
134
  elif isinstance(value, Sequence):
134
135
  if key in hints:
135
136
  for new_val in value:
136
- assert (
137
- new_val not in hints[key] # type: ignore[literal-required]
138
- ), f"Hint {key} {new_val} overrides existing hint"
137
+ if new_val in hints[key]:
138
+ msg = f"Hint {key} {new_val} overrides existing hint"
139
+ raise RuntimeError(msg)
139
140
  hints[key] = ( # type: ignore[literal-required]
140
141
  hints[key] + value # type: ignore[literal-required]
141
142
  )
142
143
  else:
143
144
  hints[key] = value # type: ignore[literal-required]
144
145
  else:
145
- raise TypeError(
146
- f"{new_hint.name}: Unknown type for value '{value}' "
146
+ msg = (
147
+ f"{new_hint.name}: Unknown type for value '{value}'"
147
148
  f" for key '{key}'"
148
149
  )
150
+ raise TypeError(msg)
149
151
 
150
152
  return hints
151
153
 
@@ -204,6 +206,11 @@ class StandardReadable(
204
206
  `StandardReadableFormat` documentation
205
207
  """
206
208
 
209
+ def assert_device_is_signalr(device: Device) -> SignalR:
210
+ if not isinstance(device, SignalR):
211
+ raise TypeError(f"{device} is not a SignalR")
212
+ return device
213
+
207
214
  for device in devices:
208
215
  match format:
209
216
  case StandardReadableFormat.CHILD:
@@ -218,24 +225,24 @@ class StandardReadable(
218
225
  if isinstance(device, HasHints):
219
226
  self._has_hints += (device,)
220
227
  case StandardReadableFormat.CONFIG_SIGNAL:
221
- assert isinstance(device, SignalR), f"{device} is not a SignalR"
222
- self._describe_config_funcs += (device.describe,)
223
- self._read_config_funcs += (device.read,)
228
+ signalr_device = assert_device_is_signalr(device=device)
229
+ self._describe_config_funcs += (signalr_device.describe,)
230
+ self._read_config_funcs += (signalr_device.read,)
224
231
  case StandardReadableFormat.HINTED_SIGNAL:
225
- assert isinstance(device, SignalR), f"{device} is not a SignalR"
226
- self._describe_funcs += (device.describe,)
227
- self._read_funcs += (device.read,)
228
- self._stageables += (device,)
229
- self._has_hints += (_HintsFromName(device),)
232
+ signalr_device = assert_device_is_signalr(device=device)
233
+ self._describe_funcs += (signalr_device.describe,)
234
+ self._read_funcs += (signalr_device.read,)
235
+ self._stageables += (signalr_device,)
236
+ self._has_hints += (_HintsFromName(signalr_device),)
230
237
  case StandardReadableFormat.UNCACHED_SIGNAL:
231
- assert isinstance(device, SignalR), f"{device} is not a SignalR"
232
- self._describe_funcs += (device.describe,)
233
- self._read_funcs += (_UncachedRead(device),)
238
+ signalr_device = assert_device_is_signalr(device=device)
239
+ self._describe_funcs += (signalr_device.describe,)
240
+ self._read_funcs += (_UncachedRead(signalr_device),)
234
241
  case StandardReadableFormat.HINTED_UNCACHED_SIGNAL:
235
- assert isinstance(device, SignalR), f"{device} is not a SignalR"
236
- self._describe_funcs += (device.describe,)
237
- self._read_funcs += (_UncachedRead(device),)
238
- self._has_hints += (_HintsFromName(device),)
242
+ signalr_device = assert_device_is_signalr(device=device)
243
+ self._describe_funcs += (signalr_device.describe,)
244
+ self._read_funcs += (_UncachedRead(signalr_device),)
245
+ self._has_hints += (_HintsFromName(signalr_device),)
239
246
 
240
247
 
241
248
  class _UncachedRead:
@@ -0,0 +1,104 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import abstractmethod
4
+ from collections.abc import Callable, Iterator, MutableMapping
5
+ from typing import Any, Generic
6
+
7
+ from ._device import Device, DeviceT
8
+ from ._signal import SignalRW
9
+ from ._signal_backend import SignalDatatypeT
10
+
11
+
12
+ class Settings(MutableMapping[SignalRW[Any], Any], Generic[DeviceT]):
13
+ def __init__(
14
+ self, device: DeviceT, settings: MutableMapping[SignalRW, Any] | None = None
15
+ ):
16
+ self.device = device
17
+ self._settings = {}
18
+ self.update(settings or {})
19
+
20
+ def __getitem__(self, key: SignalRW[SignalDatatypeT]) -> SignalDatatypeT:
21
+ return self._settings[key]
22
+
23
+ def _is_in_device(self, device: Device) -> bool:
24
+ while device.parent and device.parent is not self.device:
25
+ # While we have a parent that is not the right device
26
+ # continue searching up the tree
27
+ device = device.parent
28
+ return device.parent is self.device
29
+
30
+ def __setitem__(
31
+ self, key: SignalRW[SignalDatatypeT], value: SignalDatatypeT | None
32
+ ) -> None:
33
+ # Check the types on entry to dict to make sure we can't accidentally
34
+ # add a non-signal type
35
+ if not isinstance(key, SignalRW):
36
+ raise TypeError(f"Expected SignalRW, got {key}")
37
+ if not self._is_in_device(key):
38
+ raise KeyError(f"Signal {key} is not a child of {self.device}")
39
+ self._settings[key] = value
40
+
41
+ def __delitem__(self, key: SignalRW) -> None:
42
+ del self._settings[key]
43
+
44
+ def __iter__(self) -> Iterator[SignalRW]:
45
+ yield from iter(self._settings)
46
+
47
+ def __len__(self) -> int:
48
+ return len(self._settings)
49
+
50
+ def __or__(self, other: MutableMapping[SignalRW, Any]) -> Settings[DeviceT]:
51
+ """Create a new Settings that is the union of self overridden by other.
52
+
53
+ For example::
54
+
55
+ settings1 = Settings(device, {device.sig1: 1, device.sig2: 2})
56
+ settings2 = Settings(device, {device.sig1: 10, device.sig3: 3})
57
+ settings = settings1 | settings2
58
+ assert dict(settings) == {
59
+ device.sig1: 10,
60
+ device.sig2: 2,
61
+ device.sig3: 3,
62
+ }
63
+ """
64
+ if isinstance(other, Settings) and not self._is_in_device(other.device):
65
+ raise ValueError(f"{other.device} is not a child of {self.device}")
66
+ return Settings(self.device, self._settings | dict(other))
67
+
68
+ def partition(
69
+ self, predicate: Callable[[SignalRW], bool]
70
+ ) -> tuple[Settings[DeviceT], Settings[DeviceT]]:
71
+ """Partition into two Settings based on a predicate.
72
+
73
+ Parameters
74
+ ----------
75
+ predicate
76
+ Callable that takes each signal, and returns a boolean to say if it
77
+ should be in the first returned Settings
78
+
79
+ Returns
80
+ -------
81
+ (where_true, where_false)
82
+
83
+ For example::
84
+
85
+ settings = Settings(device, {device.special: 1, device.sig: 2})
86
+ specials, others = settings.partition(lambda sig: "special" in sig.name)
87
+ """
88
+ where_true, where_false = Settings(self.device), Settings(self.device)
89
+ for signal, value in self.items():
90
+ dest = where_true if predicate(signal) else where_false
91
+ dest[signal] = value
92
+ return where_true, where_false
93
+
94
+
95
+ class SettingsProvider:
96
+ @abstractmethod
97
+ async def store(self, name: str, data: dict[str, Any]):
98
+ """Store the data, associating it with the given name."""
99
+ ...
100
+
101
+ @abstractmethod
102
+ async def retrieve(self, name: str) -> dict[str, Any]:
103
+ """Retrieve the data associated with the given name."""
104
+ ...
@@ -4,7 +4,7 @@ import asyncio
4
4
  import functools
5
5
  import time
6
6
  from collections.abc import AsyncGenerator, Awaitable, Callable
7
- from typing import Generic, cast
7
+ from typing import Any, Generic, cast
8
8
 
9
9
  from bluesky.protocols import (
10
10
  Locatable,
@@ -97,30 +97,35 @@ class Signal(Device, Generic[SignalDatatypeT]):
97
97
 
98
98
 
99
99
  class _SignalCache(Generic[SignalDatatypeT]):
100
- def __init__(self, backend: SignalBackend[SignalDatatypeT], signal: Signal):
101
- self._signal = signal
100
+ def __init__(self, backend: SignalBackend[SignalDatatypeT], signal: Signal) -> None:
101
+ self._signal: Signal[Any] = signal
102
102
  self._staged = False
103
103
  self._listeners: dict[Callback, bool] = {}
104
104
  self._valid = asyncio.Event()
105
105
  self._reading: Reading[SignalDatatypeT] | None = None
106
- self.backend = backend
106
+ self.backend: SignalBackend[SignalDatatypeT] = backend
107
107
  signal.log.debug(f"Making subscription on source {signal.source}")
108
108
  backend.set_callback(self._callback)
109
109
 
110
- def close(self):
110
+ def close(self) -> None:
111
111
  self.backend.set_callback(None)
112
112
  self._signal.log.debug(f"Closing subscription on source {self._signal.source}")
113
113
 
114
+ def _ensure_reading(self) -> Reading[SignalDatatypeT]:
115
+ if not self._reading:
116
+ msg = "Monitor not working"
117
+ raise RuntimeError(msg)
118
+ return self._reading
119
+
114
120
  async def get_reading(self) -> Reading[SignalDatatypeT]:
115
121
  await self._valid.wait()
116
- assert self._reading is not None, "Monitor not working"
117
- return self._reading
122
+ return self._ensure_reading()
118
123
 
119
124
  async def get_value(self) -> SignalDatatypeT:
120
- reading = await self.get_reading()
125
+ reading: Reading[SignalDatatypeT] = await self.get_reading()
121
126
  return reading["value"]
122
127
 
123
- def _callback(self, reading: Reading[SignalDatatypeT]):
128
+ def _callback(self, reading: Reading[SignalDatatypeT]) -> None:
124
129
  self._signal.log.debug(
125
130
  f"Updated subscription: reading of source {self._signal.source} changed "
126
131
  f"from {self._reading} to {reading}"
@@ -134,12 +139,10 @@ class _SignalCache(Generic[SignalDatatypeT]):
134
139
  self,
135
140
  function: Callback[dict[str, Reading[SignalDatatypeT]] | SignalDatatypeT],
136
141
  want_value: bool,
137
- ):
138
- assert self._reading, "Monitor not working"
139
- if want_value:
140
- function(self._reading["value"])
141
- else:
142
- function({self._signal.name: self._reading})
142
+ ) -> None:
143
+ function(self._ensure_reading()["value"]) if want_value else function(
144
+ {self._signal.name: self._ensure_reading()}
145
+ )
143
146
 
144
147
  def subscribe(self, function: Callback, want_value: bool) -> None:
145
148
  self._listeners[function] = want_value
@@ -150,7 +153,7 @@ class _SignalCache(Generic[SignalDatatypeT]):
150
153
  self._listeners.pop(function)
151
154
  return self._staged or bool(self._listeners)
152
155
 
153
- def set_staged(self, staged: bool):
156
+ def set_staged(self, staged: bool) -> bool:
154
157
  self._staged = staged
155
158
  return self._staged or bool(self._listeners)
156
159
 
@@ -167,7 +170,10 @@ class SignalR(Signal[SignalDatatypeT], AsyncReadable, AsyncStageable, Subscribab
167
170
  if cached is None:
168
171
  cached = self._cache is not None
169
172
  if cached:
170
- assert self._cache, f"{self.source} not being monitored"
173
+ if not self._cache:
174
+ msg = f"{self.source} not being monitored"
175
+ raise RuntimeError(msg)
176
+ # assert self._cache, f"{self.source} not being monitored"
171
177
  return self._cache
172
178
  else:
173
179
  return self._connector.backend
@@ -598,3 +604,35 @@ async def set_and_wait_for_value(
598
604
  status_timeout,
599
605
  wait_for_set_completion,
600
606
  )
607
+
608
+
609
+ def walk_rw_signals(device: Device, path_prefix: str = "") -> dict[str, SignalRW[Any]]:
610
+ """Retrieve all SignalRWs from a device.
611
+
612
+ Stores retrieved signals with their dotted attribute paths in a dictionary. Used as
613
+ part of saving and loading a device.
614
+
615
+ Parameters
616
+ ----------
617
+ device : Device
618
+ Ophyd device to retrieve read-write signals from.
619
+
620
+ path_prefix : str
621
+ For internal use, leave blank when calling the method.
622
+
623
+ Returns
624
+ -------
625
+ SignalRWs : dict
626
+ A dictionary matching the string attribute path of a SignalRW with the
627
+ signal itself.
628
+
629
+ """
630
+ signals: dict[str, SignalRW[Any]] = {}
631
+
632
+ for attr_name, attr in device.children():
633
+ dot_path = f"{path_prefix}{attr_name}"
634
+ if type(attr) is SignalRW:
635
+ signals[dot_path] = attr
636
+ attr_signals = walk_rw_signals(attr, path_prefix=dot_path + ".")
637
+ signals.update(attr_signals)
638
+ return signals
@@ -10,7 +10,10 @@ from ._table import Table
10
10
  from ._utils import Callback, StrictEnum, T
11
11
 
12
12
  DTypeScalar_co = TypeVar("DTypeScalar_co", covariant=True, bound=np.generic)
13
- Array1D = np.ndarray[tuple[int], np.dtype[DTypeScalar_co]]
13
+ # To be a 1D array shape should really be tuple[int], but np.array()
14
+ # currently produces tuple[int, ...] even when it has 1D input args
15
+ # https://github.com/numpy/numpy/issues/28077#issuecomment-2566485178
16
+ Array1D = np.ndarray[tuple[int, ...], np.dtype[DTypeScalar_co]]
14
17
  Primitive = bool | int | float | str
15
18
  # NOTE: if you change this union then update the docs to match
16
19
  SignalDatatype = (
@@ -175,7 +175,8 @@ class SoftSignalBackend(SignalBackend[SignalDatatypeT]):
175
175
  return self.reading["value"]
176
176
 
177
177
  def set_callback(self, callback: Callback[Reading[SignalDatatypeT]] | None) -> None:
178
+ if callback and self.callback:
179
+ raise RuntimeError("Cannot set a callback when one is already set")
178
180
  if callback:
179
- assert not self.callback, "Cannot set a callback when one is already set"
180
181
  callback(self.reading)
181
182
  self.callback = callback
@@ -39,6 +39,11 @@ class Table(BaseModel):
39
39
  # so it is strictly checked against the BaseModel we are supplied.
40
40
  model_config = ConfigDict(extra="allow")
41
41
 
42
+ # Add an init method to match the above model config, otherwise the type
43
+ # checker will not think we can pass arbitrary kwargs into the base class init
44
+ def __init__(self, **kwargs):
45
+ super().__init__(**kwargs)
46
+
42
47
  @classmethod
43
48
  def __init_subclass__(cls):
44
49
  # But forbit extra in subclasses so it gets validated
@@ -78,9 +83,6 @@ class Table(BaseModel):
78
83
  }
79
84
  )
80
85
 
81
- def __eq__(self, value: object) -> bool:
82
- return super().__eq__(value)
83
-
84
86
  def numpy_dtype(self) -> np.dtype:
85
87
  dtype = []
86
88
  for k, v in self:
@@ -99,8 +101,10 @@ class Table(BaseModel):
99
101
  v = v[selection]
100
102
  if array is None:
101
103
  array = np.empty(v.shape, dtype=self.numpy_dtype())
102
- array[k] = v
103
- assert array is not None
104
+ array[k] = v # type: ignore
105
+ if array is None:
106
+ msg = "No arrays found in table"
107
+ raise ValueError(msg)
104
108
  return array
105
109
 
106
110
  @model_validator(mode="before")
@@ -123,10 +127,12 @@ class Table(BaseModel):
123
127
  # Convert to correct dtype, but only if we don't lose precision
124
128
  # as a result
125
129
  cast_value = np.array(data_value).astype(expected_dtype)
126
- assert np.array_equal(data_value, cast_value), (
127
- f"{field_name}: Cannot cast {data_value} to {expected_dtype} "
128
- "without losing precision"
129
- )
130
+ if not np.array_equal(data_value, cast_value):
131
+ msg = (
132
+ f"{field_name}: Cannot cast {data_value} to {expected_dtype} "
133
+ "without losing precision"
134
+ )
135
+ raise ValueError(msg)
130
136
  data_dict[field_name] = cast_value
131
137
  return data_dict
132
138
 
@@ -135,7 +141,9 @@ class Table(BaseModel):
135
141
  lengths: dict[int, set[str]] = {}
136
142
  for field_name, field_value in self:
137
143
  lengths.setdefault(len(field_value), set()).add(field_name)
138
- assert len(lengths) <= 1, f"Columns should be same length, got {lengths=}"
144
+ if len(lengths) > 1:
145
+ msg = f"Columns should be same length, got {lengths=}"
146
+ raise ValueError(msg)
139
147
  return self
140
148
 
141
149
  def __len__(self) -> int:
@@ -16,6 +16,8 @@ Callback = Callable[[T], None]
16
16
  DEFAULT_TIMEOUT = 10.0
17
17
  ErrorText = str | Mapping[str, Exception]
18
18
 
19
+ logger = logging.getLogger("ophyd_async")
20
+
19
21
 
20
22
  class StrictEnumMeta(EnumMeta):
21
23
  def __new__(metacls, *args, **kwargs):
@@ -94,7 +96,7 @@ class NotConnected(Exception):
94
96
  def format_error_string(self, indent="") -> str:
95
97
  if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
96
98
  raise RuntimeError(
97
- f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
99
+ f"Unexpected type `{type(self._errors)}` expected `str` or `dict`"
98
100
  )
99
101
 
100
102
  if isinstance(self._errors, str):
@@ -114,7 +116,7 @@ class NotConnected(Exception):
114
116
  ) -> NotConnected:
115
117
  for name, exception in exceptions.items():
116
118
  if not isinstance(exception, NotConnected):
117
- logging.exception(
119
+ logger.exception(
118
120
  f"device `{name}` raised unexpected exception "
119
121
  f"{type(exception).__name__}",
120
122
  exc_info=exception,
@@ -189,7 +191,7 @@ def get_enum_cls(datatype: type | None) -> type[StrictEnum] | None:
189
191
  if datatype and issubclass(datatype, Enum):
190
192
  if not issubclass(datatype, StrictEnum):
191
193
  raise TypeError(
192
- f"{datatype} should inherit from .SubsetEnum "
194
+ f"{datatype} should inherit from ophyd_async.core.SubsetEnum "
193
195
  "or ophyd_async.core.StrictEnum"
194
196
  )
195
197
  return datatype
@@ -0,0 +1,64 @@
1
+ import warnings
2
+ from enum import Enum
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ import numpy as np
7
+ import numpy.typing as npt
8
+ import yaml
9
+ from pydantic import BaseModel
10
+
11
+ from ._settings import SettingsProvider
12
+
13
+
14
+ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.Node:
15
+ return dumper.represent_sequence(
16
+ "tag:yaml.org,2002:seq", array.tolist(), flow_style=True
17
+ )
18
+
19
+
20
+ def pydantic_model_abstraction_representer(
21
+ dumper: yaml.Dumper, model: BaseModel
22
+ ) -> yaml.Node:
23
+ return dumper.represent_data(model.model_dump(mode="python"))
24
+
25
+
26
+ def enum_representer(dumper: yaml.Dumper, enum: Enum) -> yaml.Node:
27
+ return dumper.represent_data(enum.value)
28
+
29
+
30
+ class YamlSettingsProvider(SettingsProvider):
31
+ def __init__(self, directory: Path | str):
32
+ self._directory = Path(directory)
33
+
34
+ def _file_path(self, name: str) -> Path:
35
+ return self._directory / (name + ".yaml")
36
+
37
+ async def store(self, name: str, data: dict[str, Any]):
38
+ yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
39
+ yaml.add_multi_representer(
40
+ BaseModel,
41
+ pydantic_model_abstraction_representer,
42
+ Dumper=yaml.Dumper,
43
+ )
44
+ yaml.add_multi_representer(Enum, enum_representer, Dumper=yaml.Dumper)
45
+ with open(self._file_path(name), "w") as file:
46
+ yaml.dump(data, file)
47
+
48
+ async def retrieve(self, name: str) -> dict[str, Any]:
49
+ with open(self._file_path(name)) as file:
50
+ data = yaml.full_load(file)
51
+ if isinstance(data, list):
52
+ warnings.warn(
53
+ DeprecationWarning(
54
+ "Found old save file. Re-save your yaml settings file "
55
+ f"{self._file_path(name)} using "
56
+ "ophyd_async.plan_stubs.store_settings"
57
+ ),
58
+ stacklevel=2,
59
+ )
60
+ merge = {}
61
+ for d in data:
62
+ merge.update(d)
63
+ return merge
64
+ return data
@@ -0,0 +1,9 @@
1
+ from ._andor import Andor2Detector
2
+ from ._andor_controller import Andor2Controller
3
+ from ._andor_io import Andor2DriverIO
4
+
5
+ __all__ = [
6
+ "Andor2Detector",
7
+ "Andor2Controller",
8
+ "Andor2DriverIO",
9
+ ]
@@ -0,0 +1,45 @@
1
+ from collections.abc import Sequence
2
+
3
+ from ophyd_async.core import PathProvider
4
+ from ophyd_async.core._signal import SignalR
5
+ from ophyd_async.epics import adcore
6
+
7
+ from ._andor_controller import Andor2Controller
8
+ from ._andor_io import Andor2DriverIO
9
+
10
+
11
+ class Andor2Detector(adcore.AreaDetector[Andor2Controller]):
12
+ """
13
+ Andor 2 area detector device (CCD detector 56fps with full chip readout).
14
+ Andor model:DU897_BV.
15
+ """
16
+
17
+ def __init__(
18
+ self,
19
+ prefix: str,
20
+ path_provider: PathProvider,
21
+ drv_suffix="cam1:",
22
+ writer_cls: type[adcore.ADWriter] = adcore.ADHDFWriter,
23
+ fileio_suffix: str | None = None,
24
+ name: str = "",
25
+ config_sigs: Sequence[SignalR] = (),
26
+ plugins: dict[str, adcore.NDPluginBaseIO] | None = None,
27
+ ):
28
+ driver = Andor2DriverIO(prefix + drv_suffix)
29
+ controller = Andor2Controller(driver)
30
+
31
+ writer = writer_cls.with_io(
32
+ prefix,
33
+ path_provider,
34
+ dataset_source=driver,
35
+ fileio_suffix=fileio_suffix,
36
+ plugins=plugins,
37
+ )
38
+
39
+ super().__init__(
40
+ controller=controller,
41
+ writer=writer,
42
+ plugins=plugins,
43
+ name=name,
44
+ config_sigs=config_sigs,
45
+ )
@@ -0,0 +1,49 @@
1
+ import asyncio
2
+
3
+ from ophyd_async.core import (
4
+ DetectorTrigger,
5
+ TriggerInfo,
6
+ )
7
+ from ophyd_async.epics import adcore
8
+
9
+ from ._andor_io import Andor2DriverIO, Andor2TriggerMode
10
+
11
+ _MIN_DEAD_TIME = 0.1
12
+ _MAX_NUM_IMAGE = 999_999
13
+
14
+
15
+ class Andor2Controller(adcore.ADBaseController[Andor2DriverIO]):
16
+ def __init__(
17
+ self,
18
+ driver: Andor2DriverIO,
19
+ good_states: frozenset[adcore.DetectorState] = adcore.DEFAULT_GOOD_STATES,
20
+ ) -> None:
21
+ super().__init__(driver, good_states=good_states)
22
+
23
+ def get_deadtime(self, exposure: float | None) -> float:
24
+ return _MIN_DEAD_TIME + (exposure or 0)
25
+
26
+ async def prepare(self, trigger_info: TriggerInfo):
27
+ await self.set_exposure_time_and_acquire_period_if_supplied(
28
+ trigger_info.livetime
29
+ )
30
+ await asyncio.gather(
31
+ self.driver.trigger_mode.set(self._get_trigger_mode(trigger_info.trigger)),
32
+ self.driver.num_images.set(
33
+ trigger_info.total_number_of_triggers or _MAX_NUM_IMAGE
34
+ ),
35
+ self.driver.image_mode.set(adcore.ImageMode.MULTIPLE),
36
+ )
37
+
38
+ def _get_trigger_mode(self, trigger: DetectorTrigger) -> Andor2TriggerMode:
39
+ supported_trigger_types = {
40
+ DetectorTrigger.INTERNAL: Andor2TriggerMode.INTERNAL,
41
+ DetectorTrigger.EDGE_TRIGGER: Andor2TriggerMode.EXT_TRIGGER,
42
+ }
43
+ if trigger not in supported_trigger_types:
44
+ raise ValueError(
45
+ f"{self.__class__.__name__} only supports the following trigger "
46
+ f"types: {supported_trigger_types} but was asked to "
47
+ f"use {trigger}"
48
+ )
49
+ return supported_trigger_types[trigger]
@@ -0,0 +1,36 @@
1
+ from ophyd_async.core import StrictEnum, SubsetEnum
2
+ from ophyd_async.epics.adcore import ADBaseIO
3
+ from ophyd_async.epics.core import (
4
+ epics_signal_r,
5
+ epics_signal_rw,
6
+ )
7
+
8
+
9
+ class Andor2TriggerMode(StrictEnum):
10
+ INTERNAL = "Internal"
11
+ EXT_TRIGGER = "External"
12
+ EXT_START = "External Start"
13
+ EXT_EXPOSURE = "External Exposure"
14
+ EXT_FVP = "External FVP"
15
+ SOFTWARE = "Software"
16
+
17
+
18
+ class Andor2DataType(SubsetEnum):
19
+ UINT16 = "UInt16"
20
+ UINT32 = "UInt32"
21
+ FLOAT32 = "Float32"
22
+ FLOAT64 = "Float64"
23
+
24
+
25
+ class Andor2DriverIO(ADBaseIO):
26
+ """
27
+ Epics pv for andor model:DU897_BV as deployed on p99
28
+ """
29
+
30
+ def __init__(self, prefix: str, name: str = "") -> None:
31
+ super().__init__(prefix, name=name)
32
+ self.trigger_mode = epics_signal_rw(Andor2TriggerMode, prefix + "TriggerMode")
33
+ self.data_type = epics_signal_r(Andor2DataType, prefix + "DataType_RBV")
34
+ self.andor_accumulate_period = epics_signal_r(
35
+ float, prefix + "AndorAccumulatePeriod_RBV"
36
+ )
@@ -1,9 +1,11 @@
1
1
  from ._aravis import AravisDetector
2
2
  from ._aravis_controller import AravisController
3
- from ._aravis_io import AravisDriverIO
3
+ from ._aravis_io import AravisDriverIO, AravisTriggerMode, AravisTriggerSource
4
4
 
5
5
  __all__ = [
6
6
  "AravisDetector",
7
7
  "AravisController",
8
8
  "AravisDriverIO",
9
+ "AravisTriggerMode",
10
+ "AravisTriggerSource",
9
11
  ]