ophyd-async 0.7.0a1__py3-none-any.whl → 0.8.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +30 -9
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +146 -67
  5. ophyd_async/core/_device_filler.py +269 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +32 -40
  8. ophyd_async/core/_mock_signal_utils.py +22 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +133 -134
  11. ophyd_async/core/_signal.py +140 -152
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +125 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +97 -100
  16. ophyd_async/core/_utils.py +79 -18
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +8 -6
  19. ophyd_async/epics/adcore/_core_io.py +5 -7
  20. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  21. ophyd_async/epics/adcore/_single_trigger.py +4 -9
  22. ophyd_async/epics/adcore/_utils.py +15 -10
  23. ophyd_async/epics/adkinetix/__init__.py +2 -1
  24. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  25. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
  26. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  27. ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
  28. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  29. ophyd_async/epics/advimba/__init__.py +4 -1
  30. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  31. ophyd_async/epics/advimba/_vimba_io.py +8 -9
  32. ophyd_async/epics/core/__init__.py +26 -0
  33. ophyd_async/epics/core/_aioca.py +323 -0
  34. ophyd_async/epics/core/_epics_connector.py +53 -0
  35. ophyd_async/epics/core/_epics_device.py +13 -0
  36. ophyd_async/epics/core/_p4p.py +382 -0
  37. ophyd_async/epics/core/_pvi_connector.py +92 -0
  38. ophyd_async/epics/core/_signal.py +171 -0
  39. ophyd_async/epics/core/_util.py +61 -0
  40. ophyd_async/epics/demo/_mover.py +4 -5
  41. ophyd_async/epics/demo/_sensor.py +14 -13
  42. ophyd_async/epics/eiger/_eiger.py +1 -2
  43. ophyd_async/epics/eiger/_eiger_controller.py +1 -1
  44. ophyd_async/epics/eiger/_eiger_io.py +3 -5
  45. ophyd_async/epics/eiger/_odin_io.py +5 -5
  46. ophyd_async/epics/motor.py +4 -5
  47. ophyd_async/epics/signal.py +11 -0
  48. ophyd_async/fastcs/core.py +9 -0
  49. ophyd_async/fastcs/panda/__init__.py +4 -4
  50. ophyd_async/fastcs/panda/_block.py +23 -11
  51. ophyd_async/fastcs/panda/_control.py +3 -5
  52. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  53. ophyd_async/fastcs/panda/_table.py +29 -51
  54. ophyd_async/fastcs/panda/_trigger.py +8 -8
  55. ophyd_async/fastcs/panda/_writer.py +4 -7
  56. ophyd_async/plan_stubs/_ensure_connected.py +3 -1
  57. ophyd_async/plan_stubs/_fly.py +2 -2
  58. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  59. ophyd_async/py.typed +0 -0
  60. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  61. ophyd_async/sim/demo/_sim_motor.py +3 -4
  62. ophyd_async/tango/__init__.py +2 -4
  63. ophyd_async/tango/base_devices/_base_device.py +76 -144
  64. ophyd_async/tango/demo/_counter.py +8 -18
  65. ophyd_async/tango/demo/_mover.py +5 -6
  66. ophyd_async/tango/signal/__init__.py +2 -4
  67. ophyd_async/tango/signal/_signal.py +29 -50
  68. ophyd_async/tango/signal/_tango_transport.py +38 -40
  69. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/METADATA +8 -12
  70. ophyd_async-0.8.0a3.dist-info/RECORD +112 -0
  71. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/WHEEL +1 -1
  72. ophyd_async/epics/pvi/__init__.py +0 -3
  73. ophyd_async/epics/pvi/_pvi.py +0 -338
  74. ophyd_async/epics/signal/__init__.py +0 -21
  75. ophyd_async/epics/signal/_aioca.py +0 -378
  76. ophyd_async/epics/signal/_common.py +0 -57
  77. ophyd_async/epics/signal/_epics_transport.py +0 -34
  78. ophyd_async/epics/signal/_p4p.py +0 -518
  79. ophyd_async/epics/signal/_signal.py +0 -114
  80. ophyd_async-0.7.0a1.dist-info/RECORD +0 -108
  81. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/LICENSE +0 -0
  82. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/entry_points.txt +0 -0
  83. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,13 @@
1
- from enum import Enum
2
- from typing import TypeVar, get_args, get_origin
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Sequence
4
+ from typing import Annotated, Any, TypeVar, get_origin
3
5
 
4
6
  import numpy as np
5
- from pydantic import BaseModel, ConfigDict, model_validator
7
+ from pydantic import BaseModel, ConfigDict, Field, model_validator
8
+ from pydantic_numpy.helper.annotation import NpArrayPydanticAnnotation
9
+
10
+ from ._utils import get_dtype
6
11
 
7
12
  TableSubclass = TypeVar("TableSubclass", bound="Table")
8
13
 
@@ -17,34 +22,38 @@ def _concat(value1, value2):
17
22
  class Table(BaseModel):
18
23
  """An abstraction of a Table of str to numpy array."""
19
24
 
20
- model_config = ConfigDict(validate_assignment=True, strict=False)
21
-
22
- @staticmethod
23
- def row(cls: type[TableSubclass], **kwargs) -> TableSubclass: # type: ignore
24
- arrayified_kwargs = {}
25
- for field_name, field_value in cls.model_fields.items():
26
- value = kwargs.pop(field_name)
27
- if field_value.default_factory is None:
28
- raise ValueError(
29
- "`Table` models should have default factories for their "
30
- "mutable empty columns."
31
- )
32
- default_array = field_value.default_factory()
33
- if isinstance(default_array, np.ndarray):
34
- arrayified_kwargs[field_name] = np.array(
35
- [value], dtype=default_array.dtype
36
- )
37
- elif issubclass(type(value), Enum) and isinstance(value, str):
38
- arrayified_kwargs[field_name] = [value]
25
+ # You can use Table in 2 ways:
26
+ # 1. Table(**whatever_pva_gives_us) when pvi adds a Signal to a Device that is not
27
+ # type hinted
28
+ # 2. MyTable(**whatever_pva_gives_us) where the Signal is type hinted
29
+ #
30
+ # For 1 we want extra="allow" so it is passed through as is. There are no base class
31
+ # fields, only "extra" fields, so they must be allowed. For 2 we want extra="forbid"
32
+ # so it is strictly checked against the BaseModel we are supplied.
33
+ model_config = ConfigDict(extra="allow")
34
+
35
+ @classmethod
36
+ def __init_subclass__(cls):
37
+ # But forbit extra in subclasses so it gets validated
38
+ cls.model_config = ConfigDict(validate_assignment=True, extra="forbid")
39
+ # Change fields to have the correct annotations
40
+ for k, anno in cls.__annotations__.items():
41
+ if get_origin(anno) is np.ndarray:
42
+ dtype = get_dtype(anno)
43
+ new_anno = Annotated[
44
+ anno,
45
+ NpArrayPydanticAnnotation.factory(
46
+ data_type=dtype.type, dimensions=1, strict_data_typing=False
47
+ ),
48
+ Field(
49
+ default_factory=lambda dtype=dtype: np.array([], dtype=dtype)
50
+ ),
51
+ ]
52
+ elif get_origin(anno) is Sequence:
53
+ new_anno = Annotated[anno, Field(default_factory=list)]
39
54
  else:
40
- raise TypeError(
41
- "Row column should be numpy arrays or sequence of string `Enum`."
42
- )
43
- if kwargs:
44
- raise TypeError(
45
- f"Unexpected keyword arguments {kwargs.keys()} for {cls.__name__}."
46
- )
47
- return cls(**arrayified_kwargs)
55
+ raise TypeError(f"Cannot use annotation {anno} in a Table")
56
+ cls.__annotations__[k] = new_anno
48
57
 
49
58
  def __add__(self, right: TableSubclass) -> TableSubclass:
50
59
  """Concatenate the arrays in field values."""
@@ -64,83 +73,71 @@ class Table(BaseModel):
64
73
  }
65
74
  )
66
75
 
76
+ def __eq__(self, value: object) -> bool:
77
+ return super().__eq__(value)
78
+
67
79
  def numpy_dtype(self) -> np.dtype:
68
80
  dtype = []
69
- for field_name, field_value in self.model_fields.items():
70
- if np.ndarray in (
71
- get_origin(field_value.annotation),
72
- field_value.annotation,
73
- ):
74
- dtype.append((field_name, getattr(self, field_name).dtype))
81
+ for k, v in self:
82
+ if isinstance(v, np.ndarray):
83
+ dtype.append((k, v.dtype))
75
84
  else:
76
- enum_type = get_args(field_value.annotation)[0]
77
- assert issubclass(enum_type, Enum)
78
- enum_values = [element.value for element in enum_type]
79
- max_length_in_enum = max(len(value) for value in enum_values)
80
- dtype.append((field_name, np.dtype(f"<U{max_length_in_enum}")))
81
-
85
+ # TODO: use np.dtypes.StringDType when we can use in structured arrays
86
+ # https://github.com/numpy/numpy/issues/25693
87
+ dtype.append((k, np.dtype("S40")))
82
88
  return np.dtype(dtype)
83
89
 
84
- def numpy_table(self):
85
- # It would be nice to be able to use np.transpose for this,
86
- # but it defaults to the largest dtype for everything.
87
- dtype = self.numpy_dtype()
88
- transposed_list = [
89
- np.array(tuple(row), dtype=dtype)
90
- for row in zip(*self.numpy_columns(), strict=False)
91
- ]
92
- transposed = np.array(transposed_list, dtype=dtype)
93
- return transposed
94
-
95
- def numpy_columns(self) -> list[np.ndarray]:
96
- """Columns in the table can be lists of string enums or numpy arrays.
97
-
98
- This method returns the columns, converting the string enums to numpy arrays.
99
- """
100
-
101
- columns = []
102
- for field_name, field_value in self.model_fields.items():
103
- if np.ndarray in (
104
- get_origin(field_value.annotation),
105
- field_value.annotation,
90
+ def numpy_table(self, selection: slice | None = None) -> np.ndarray:
91
+ array = None
92
+ for k, v in self:
93
+ if selection:
94
+ v = v[selection]
95
+ if array is None:
96
+ array = np.empty(v.shape, dtype=self.numpy_dtype())
97
+ array[k] = v
98
+ assert array is not None
99
+ return array
100
+
101
+ @model_validator(mode="before")
102
+ @classmethod
103
+ def validate_array_dtypes(cls, data: Any) -> Any:
104
+ if isinstance(data, dict):
105
+ data_dict = data
106
+ elif isinstance(data, Table):
107
+ data_dict = data.model_dump()
108
+ else:
109
+ raise AssertionError(f"Cannot construct Table from {data}")
110
+ for field_name, field_value in cls.model_fields.items():
111
+ if (
112
+ get_origin(field_value.annotation) is np.ndarray
113
+ and field_value.annotation
114
+ and field_name in data_dict
106
115
  ):
107
- columns.append(getattr(self, field_name))
108
- else:
109
- enum_type = get_args(field_value.annotation)[0]
110
- assert issubclass(enum_type, Enum)
111
- enum_values = [element.value for element in enum_type]
112
- max_length_in_enum = max(len(value) for value in enum_values)
113
- dtype = np.dtype(f"<U{max_length_in_enum}")
114
-
115
- columns.append(
116
- np.array(
117
- [enum.value for enum in getattr(self, field_name)], dtype=dtype
118
- )
116
+ data_value = data_dict[field_name]
117
+ expected_dtype = get_dtype(field_value.annotation)
118
+ # Convert to correct dtype, but only if we don't lose precision
119
+ # as a result
120
+ cast_value = np.array(data_value).astype(expected_dtype)
121
+ assert np.array_equal(data_value, cast_value), (
122
+ f"{field_name}: Cannot cast {data_value} to {expected_dtype} "
123
+ "without losing precision"
119
124
  )
120
-
121
- return columns
125
+ data_dict[field_name] = cast_value
126
+ return data_dict
122
127
 
123
128
  @model_validator(mode="after")
124
- def validate_arrays(self) -> "Table":
125
- first_length = len(next(iter(self))[1])
126
- assert all(
127
- len(field_value) == first_length for _, field_value in self
128
- ), "Rows should all be of equal size."
129
-
130
- if not all(
131
- # Checks if the values are numpy subtypes if the array is a numpy array,
132
- # or if the value is a string enum.
133
- np.issubdtype(getattr(self, field_name).dtype, default_array.dtype)
134
- if isinstance(
135
- default_array := self.model_fields[field_name].default_factory(), # type: ignore
136
- np.ndarray,
137
- )
138
- else issubclass(get_args(field_value.annotation)[0], Enum)
139
- for field_name, field_value in self.model_fields.items()
140
- ):
141
- raise ValueError(
142
- f"Cannot construct a `{type(self).__name__}`, "
143
- "some rows have incorrect types."
144
- )
145
-
129
+ def validate_lengths(self) -> Table:
130
+ lengths: dict[int, set[str]] = {}
131
+ for field_name, field_value in self:
132
+ lengths.setdefault(len(field_value), set()).add(field_name)
133
+ assert len(lengths) <= 1, f"Columns should be same length, got {lengths=}"
146
134
  return self
135
+
136
+ def __len__(self) -> int:
137
+ return len(next(iter(self))[1])
138
+
139
+ def __getitem__(self, item: int | slice) -> np.ndarray:
140
+ if isinstance(item, int):
141
+ return self.numpy_table(slice(item, item + 1))
142
+ else:
143
+ return self.numpy_table(item)
@@ -2,25 +2,43 @@ from __future__ import annotations
2
2
 
3
3
  import asyncio
4
4
  import logging
5
- from collections.abc import Awaitable, Callable, Iterable
5
+ from collections.abc import Awaitable, Callable, Iterable, Sequence
6
6
  from dataclasses import dataclass
7
- from typing import Generic, Literal, ParamSpec, TypeVar, get_origin
7
+ from enum import Enum, EnumMeta
8
+ from typing import (
9
+ Any,
10
+ Generic,
11
+ Literal,
12
+ ParamSpec,
13
+ TypeVar,
14
+ get_args,
15
+ get_origin,
16
+ )
8
17
 
9
18
  import numpy as np
10
- from bluesky.protocols import Reading
11
- from pydantic import BaseModel
12
19
 
13
20
  T = TypeVar("T")
14
21
  P = ParamSpec("P")
15
22
  Callback = Callable[[T], None]
16
-
17
- #: A function that will be called with the Reading and value when the
18
- #: monitor updates
19
- ReadingValueCallback = Callable[[Reading, T], None]
20
23
  DEFAULT_TIMEOUT = 10.0
21
24
  ErrorText = str | dict[str, Exception]
22
25
 
23
26
 
27
+ class StrictEnum(str, Enum):
28
+ """All members should exist in the Backend, and there will be no extras"""
29
+
30
+
31
+ class SubsetEnumMeta(EnumMeta):
32
+ def __call__(self, value, *args, **kwargs): # type: ignore
33
+ if isinstance(value, str) and not isinstance(value, self):
34
+ return value
35
+ return super().__call__(value, *args, **kwargs)
36
+
37
+
38
+ class SubsetEnum(StrictEnum, metaclass=SubsetEnumMeta):
39
+ """All members should exist in the Backend, but there may be extras"""
40
+
41
+
24
42
  CALCULATE_TIMEOUT = "CALCULATE_TIMEOUT"
25
43
  """Sentinel used to implement ``myfunc(timeout=CalculateTimeout)``
26
44
 
@@ -119,7 +137,22 @@ async def wait_for_connection(**coros: Awaitable[None]):
119
137
  raise NotConnected(exceptions)
120
138
 
121
139
 
122
- def get_dtype(typ: type) -> np.dtype | None:
140
+ def get_dtype(datatype: type) -> np.dtype:
141
+ """Get the runtime dtype from a numpy ndarray type annotation
142
+
143
+ >>> from ophyd_async.core import Array1D
144
+ >>> import numpy as np
145
+ >>> get_dtype(Array1D[np.int8])
146
+ dtype('int8')
147
+ """
148
+ if not get_origin(datatype) == np.ndarray:
149
+ raise TypeError(f"Expected Array1D[dtype], got {datatype}")
150
+ # datatype = numpy.ndarray[typing.Any, numpy.dtype[numpy.float64]]
151
+ # so extract numpy.float64 from it
152
+ return np.dtype(get_args(get_args(datatype)[1])[0])
153
+
154
+
155
+ def get_enum_cls(datatype: type | None) -> type[StrictEnum] | None:
123
156
  """Get the runtime dtype from a numpy ndarray type annotation
124
157
 
125
158
  >>> import numpy.typing as npt
@@ -127,11 +160,15 @@ def get_dtype(typ: type) -> np.dtype | None:
127
160
  >>> get_dtype(npt.NDArray[np.int8])
128
161
  dtype('int8')
129
162
  """
130
- if getattr(typ, "__origin__", None) == np.ndarray:
131
- # datatype = numpy.ndarray[typing.Any, numpy.dtype[numpy.float64]]
132
- # so extract numpy.float64 from it
133
- return np.dtype(typ.__args__[1].__args__[0]) # type: ignore
134
- return None
163
+ if get_origin(datatype) is Sequence:
164
+ datatype = get_args(datatype)[0]
165
+ if datatype and issubclass(datatype, Enum):
166
+ if not issubclass(datatype, StrictEnum):
167
+ raise TypeError(
168
+ f"{datatype} should inherit from .SubsetEnum "
169
+ "or ophyd_async.core.StrictEnum"
170
+ )
171
+ return datatype
135
172
 
136
173
 
137
174
  def get_unique(values: dict[str, T], types: str) -> T:
@@ -187,7 +224,31 @@ def in_micros(t: float) -> int:
187
224
  return int(np.ceil(t * 1e6))
188
225
 
189
226
 
190
- def is_pydantic_model(datatype) -> bool:
191
- while origin := get_origin(datatype):
192
- datatype = origin
193
- return datatype and issubclass(datatype, BaseModel)
227
+ def get_origin_class(annotatation: Any) -> type | None:
228
+ origin = get_origin(annotatation) or annotatation
229
+ if isinstance(origin, type):
230
+ return origin
231
+
232
+
233
+ class Reference(Generic[T]):
234
+ """Hide an object behind a reference.
235
+
236
+ Used to opt out of the naming/parent-child relationship of `Device`.
237
+
238
+ For example::
239
+
240
+ class DeviceWithRefToSignal(Device):
241
+ def __init__(self, signal: SignalRW[int]):
242
+ self.signal_ref = Reference(signal)
243
+ super().__init__()
244
+
245
+ def set(self, value) -> AsyncStatus:
246
+ return self.signal_ref().set(value + 1)
247
+
248
+ """
249
+
250
+ def __init__(self, obj: T):
251
+ self._obj = obj
252
+
253
+ def __call__(self) -> T:
254
+ return self._obj
@@ -2,12 +2,12 @@ import asyncio
2
2
  from typing import Literal
3
3
 
4
4
  from ophyd_async.core import (
5
+ AsyncStatus,
5
6
  DetectorController,
6
7
  DetectorTrigger,
7
8
  TriggerInfo,
8
9
  set_and_wait_for_value,
9
10
  )
10
- from ophyd_async.core._status import AsyncStatus
11
11
  from ophyd_async.epics import adcore
12
12
 
13
13
  from ._aravis_io import AravisDriverIO, AravisTriggerMode, AravisTriggerSource
@@ -69,7 +69,7 @@ class AravisController(DetectorController):
69
69
  f"use {trigger}"
70
70
  )
71
71
  if trigger == DetectorTrigger.internal:
72
- return AravisTriggerMode.off, "Freerun"
72
+ return AravisTriggerMode.off, AravisTriggerSource.freerun
73
73
  else:
74
74
  return (AravisTriggerMode.on, f"Line{self.gpio_number}") # type: ignore
75
75
 
@@ -1,11 +1,9 @@
1
- from enum import Enum
2
-
3
- from ophyd_async.core import SubsetEnum
1
+ from ophyd_async.core import StrictEnum, SubsetEnum
4
2
  from ophyd_async.epics import adcore
5
- from ophyd_async.epics.signal import epics_signal_rw_rbv
3
+ from ophyd_async.epics.core import epics_signal_rw_rbv
6
4
 
7
5
 
8
- class AravisTriggerMode(str, Enum):
6
+ class AravisTriggerMode(StrictEnum):
9
7
  """GigEVision GenICAM standard: on=externally triggered"""
10
8
 
11
9
  on = "On"
@@ -19,7 +17,11 @@ class AravisTriggerMode(str, Enum):
19
17
  To prevent requiring one Enum class per possible configuration, we set as this Enum
20
18
  but read from the underlying signal as a str.
21
19
  """
22
- AravisTriggerSource = SubsetEnum["Freerun", "Line1"]
20
+
21
+
22
+ class AravisTriggerSource(SubsetEnum):
23
+ freerun = "Freerun"
24
+ line1 = "Line1"
23
25
 
24
26
 
25
27
  class AravisDriverIO(adcore.ADBaseIO):
@@ -1,7 +1,5 @@
1
- from enum import Enum
2
-
3
- from ophyd_async.core import Device
4
- from ophyd_async.epics.signal import (
1
+ from ophyd_async.core import Device, StrictEnum
2
+ from ophyd_async.epics.core import (
5
3
  epics_signal_r,
6
4
  epics_signal_rw,
7
5
  epics_signal_rw_rbv,
@@ -10,7 +8,7 @@ from ophyd_async.epics.signal import (
10
8
  from ._utils import ADBaseDataType, FileWriteMode, ImageMode
11
9
 
12
10
 
13
- class Callback(str, Enum):
11
+ class Callback(StrictEnum):
14
12
  Enable = "Enable"
15
13
  Disable = "Disable"
16
14
 
@@ -68,7 +66,7 @@ class NDPluginStatsIO(NDPluginBaseIO):
68
66
  super().__init__(prefix, name)
69
67
 
70
68
 
71
- class DetectorState(str, Enum):
69
+ class DetectorState(StrictEnum):
72
70
  """
73
71
  Default set of states of an AreaDetector driver.
74
72
  See definition in ADApp/ADSrc/ADDriver.h in https://github.com/areaDetector/ADCore
@@ -100,7 +98,7 @@ class ADBaseIO(NDArrayBaseIO):
100
98
  super().__init__(prefix, name=name)
101
99
 
102
100
 
103
- class Compression(str, Enum):
101
+ class Compression(StrictEnum):
104
102
  none = "None"
105
103
  nbit = "N-bit"
106
104
  szip = "szip"
@@ -134,9 +134,9 @@ class ADHDFWriter(DetectorWriter):
134
134
  describe = {
135
135
  ds.data_key: DataKey(
136
136
  source=self.hdf.full_file_name.source,
137
- shape=outer_shape + tuple(ds.shape),
137
+ shape=list(outer_shape + tuple(ds.shape)),
138
138
  dtype="array" if ds.shape else "number",
139
- dtype_numpy=ds.dtype_numpy, # type: ignore
139
+ dtype_numpy=ds.dtype_numpy,
140
140
  external="STREAM:",
141
141
  )
142
142
  for ds in self._datasets
@@ -3,13 +3,8 @@ from collections.abc import Sequence
3
3
 
4
4
  from bluesky.protocols import Triggerable
5
5
 
6
- from ophyd_async.core import (
7
- AsyncStatus,
8
- ConfigSignal,
9
- HintedSignal,
10
- SignalR,
11
- StandardReadable,
12
- )
6
+ from ophyd_async.core import AsyncStatus, SignalR, StandardReadable
7
+ from ophyd_async.core import StandardReadableFormat as Format
13
8
 
14
9
  from ._core_io import ADBaseIO, NDPluginBaseIO
15
10
  from ._utils import ImageMode
@@ -28,10 +23,10 @@ class SingleTriggerDetector(StandardReadable, Triggerable):
28
23
 
29
24
  self.add_readables(
30
25
  [self.drv.array_counter, *read_uncached],
31
- wrapper=HintedSignal.uncached,
26
+ Format.HINTED_UNCACHED_SIGNAL,
32
27
  )
33
28
 
34
- self.add_readables([self.drv.acquire_time], wrapper=ConfigSignal)
29
+ self.add_readables([self.drv.acquire_time], Format.CONFIG_SIGNAL)
35
30
 
36
31
  super().__init__(name=name)
37
32
 
@@ -1,11 +1,16 @@
1
1
  from dataclasses import dataclass
2
- from enum import Enum
3
2
 
4
- from ophyd_async.core import DEFAULT_TIMEOUT, SignalRW, T, wait_for_value
5
- from ophyd_async.core._signal import SignalR
3
+ from ophyd_async.core import (
4
+ DEFAULT_TIMEOUT,
5
+ SignalDatatypeT,
6
+ SignalR,
7
+ SignalRW,
8
+ StrictEnum,
9
+ wait_for_value,
10
+ )
6
11
 
7
12
 
8
- class ADBaseDataType(str, Enum):
13
+ class ADBaseDataType(StrictEnum):
9
14
  Int8 = "Int8"
10
15
  UInt8 = "UInt8"
11
16
  Int16 = "Int16"
@@ -73,25 +78,25 @@ def convert_param_dtype_to_np(datatype: str) -> str:
73
78
  return np_datatype
74
79
 
75
80
 
76
- class FileWriteMode(str, Enum):
81
+ class FileWriteMode(StrictEnum):
77
82
  single = "Single"
78
83
  capture = "Capture"
79
84
  stream = "Stream"
80
85
 
81
86
 
82
- class ImageMode(str, Enum):
87
+ class ImageMode(StrictEnum):
83
88
  single = "Single"
84
89
  multiple = "Multiple"
85
90
  continuous = "Continuous"
86
91
 
87
92
 
88
- class NDAttributeDataType(str, Enum):
93
+ class NDAttributeDataType(StrictEnum):
89
94
  INT = "INT"
90
95
  DOUBLE = "DOUBLE"
91
96
  STRING = "STRING"
92
97
 
93
98
 
94
- class NDAttributePvDbrType(str, Enum):
99
+ class NDAttributePvDbrType(StrictEnum):
95
100
  DBR_SHORT = "DBR_SHORT"
96
101
  DBR_ENUM = "DBR_ENUM"
97
102
  DBR_INT = "DBR_INT"
@@ -122,8 +127,8 @@ class NDAttributeParam:
122
127
 
123
128
 
124
129
  async def stop_busy_record(
125
- signal: SignalRW[T],
126
- value: T,
130
+ signal: SignalRW[SignalDatatypeT],
131
+ value: SignalDatatypeT,
127
132
  timeout: float = DEFAULT_TIMEOUT,
128
133
  status_timeout: float | None = None,
129
134
  ) -> None:
@@ -1,9 +1,10 @@
1
1
  from ._kinetix import KinetixDetector
2
2
  from ._kinetix_controller import KinetixController
3
- from ._kinetix_io import KinetixDriverIO
3
+ from ._kinetix_io import KinetixDriverIO, KinetixTriggerMode
4
4
 
5
5
  __all__ = [
6
6
  "KinetixDetector",
7
7
  "KinetixController",
8
8
  "KinetixDriverIO",
9
+ "KinetixTriggerMode",
9
10
  ]
@@ -1,8 +1,11 @@
1
1
  import asyncio
2
2
 
3
- from ophyd_async.core import DetectorController, DetectorTrigger
4
- from ophyd_async.core._detector import TriggerInfo
5
- from ophyd_async.core._status import AsyncStatus
3
+ from ophyd_async.core import (
4
+ AsyncStatus,
5
+ DetectorController,
6
+ DetectorTrigger,
7
+ TriggerInfo,
8
+ )
6
9
  from ophyd_async.epics import adcore
7
10
 
8
11
  from ._kinetix_io import KinetixDriverIO, KinetixTriggerMode
@@ -1,16 +1,15 @@
1
- from enum import Enum
2
-
1
+ from ophyd_async.core import StrictEnum
3
2
  from ophyd_async.epics import adcore
4
- from ophyd_async.epics.signal import epics_signal_rw_rbv
3
+ from ophyd_async.epics.core import epics_signal_rw_rbv
5
4
 
6
5
 
7
- class KinetixTriggerMode(str, Enum):
6
+ class KinetixTriggerMode(StrictEnum):
8
7
  internal = "Internal"
9
8
  edge = "Rising Edge"
10
9
  gate = "Exp. Gate"
11
10
 
12
11
 
13
- class KinetixReadoutMode(str, Enum):
12
+ class KinetixReadoutMode(StrictEnum):
14
13
  sensitivity = 1
15
14
  speed = 2
16
15
  dynamic_range = 3
@@ -2,12 +2,12 @@ import asyncio
2
2
 
3
3
  from ophyd_async.core import (
4
4
  DEFAULT_TIMEOUT,
5
+ AsyncStatus,
5
6
  DetectorController,
6
7
  DetectorTrigger,
8
+ TriggerInfo,
7
9
  wait_for_value,
8
10
  )
9
- from ophyd_async.core._detector import TriggerInfo
10
- from ophyd_async.core._status import AsyncStatus
11
11
  from ophyd_async.epics import adcore
12
12
 
13
13
  from ._pilatus_io import PilatusDriverIO, PilatusTriggerMode
@@ -1,10 +1,9 @@
1
- from enum import Enum
2
-
1
+ from ophyd_async.core import StrictEnum
3
2
  from ophyd_async.epics import adcore
4
- from ophyd_async.epics.signal import epics_signal_r, epics_signal_rw_rbv
3
+ from ophyd_async.epics.core import epics_signal_r, epics_signal_rw_rbv
5
4
 
6
5
 
7
- class PilatusTriggerMode(str, Enum):
6
+ class PilatusTriggerMode(StrictEnum):
8
7
  internal = "Internal"
9
8
  ext_enable = "Ext. Enable"
10
9
  ext_trigger = "Ext. Trigger"
@@ -2,11 +2,11 @@ import asyncio
2
2
 
3
3
  from ophyd_async.core import (
4
4
  DEFAULT_TIMEOUT,
5
+ AsyncStatus,
5
6
  DetectorController,
6
7
  DetectorTrigger,
8
+ TriggerInfo,
7
9
  )
8
- from ophyd_async.core._detector import TriggerInfo
9
- from ophyd_async.core._status import AsyncStatus
10
10
  from ophyd_async.epics import adcore
11
11
 
12
12
 
@@ -1,9 +1,12 @@
1
1
  from ._vimba import VimbaDetector
2
2
  from ._vimba_controller import VimbaController
3
- from ._vimba_io import VimbaDriverIO
3
+ from ._vimba_io import VimbaDriverIO, VimbaExposeOutMode, VimbaOnOff, VimbaTriggerSource
4
4
 
5
5
  __all__ = [
6
6
  "VimbaDetector",
7
7
  "VimbaController",
8
8
  "VimbaDriverIO",
9
+ "VimbaExposeOutMode",
10
+ "VimbaOnOff",
11
+ "VimbaTriggerSource",
9
12
  ]