ophyd-async 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +34 -9
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +170 -68
  5. ophyd_async/core/_device_filler.py +269 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +35 -40
  8. ophyd_async/core/_mock_signal_utils.py +25 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +133 -134
  11. ophyd_async/core/_signal.py +219 -163
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +131 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +102 -100
  16. ophyd_async/core/_utils.py +143 -32
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +8 -6
  19. ophyd_async/epics/adcore/_core_io.py +5 -7
  20. ophyd_async/epics/adcore/_core_logic.py +3 -1
  21. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  22. ophyd_async/epics/adcore/_single_trigger.py +6 -10
  23. ophyd_async/epics/adcore/_utils.py +15 -10
  24. ophyd_async/epics/adkinetix/__init__.py +2 -1
  25. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  26. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
  27. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  28. ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
  29. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  30. ophyd_async/epics/advimba/__init__.py +4 -1
  31. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  32. ophyd_async/epics/advimba/_vimba_io.py +8 -9
  33. ophyd_async/epics/core/__init__.py +26 -0
  34. ophyd_async/epics/core/_aioca.py +323 -0
  35. ophyd_async/epics/core/_epics_connector.py +53 -0
  36. ophyd_async/epics/core/_epics_device.py +13 -0
  37. ophyd_async/epics/core/_p4p.py +383 -0
  38. ophyd_async/epics/core/_pvi_connector.py +91 -0
  39. ophyd_async/epics/core/_signal.py +171 -0
  40. ophyd_async/epics/core/_util.py +61 -0
  41. ophyd_async/epics/demo/_mover.py +4 -5
  42. ophyd_async/epics/demo/_sensor.py +14 -13
  43. ophyd_async/epics/eiger/_eiger.py +1 -2
  44. ophyd_async/epics/eiger/_eiger_controller.py +7 -2
  45. ophyd_async/epics/eiger/_eiger_io.py +3 -5
  46. ophyd_async/epics/eiger/_odin_io.py +5 -5
  47. ophyd_async/epics/motor.py +4 -5
  48. ophyd_async/epics/signal.py +11 -0
  49. ophyd_async/epics/testing/__init__.py +24 -0
  50. ophyd_async/epics/testing/_example_ioc.py +105 -0
  51. ophyd_async/epics/testing/_utils.py +78 -0
  52. ophyd_async/epics/testing/test_records.db +152 -0
  53. ophyd_async/epics/testing/test_records_pva.db +177 -0
  54. ophyd_async/fastcs/core.py +9 -0
  55. ophyd_async/fastcs/panda/__init__.py +4 -4
  56. ophyd_async/fastcs/panda/_block.py +18 -13
  57. ophyd_async/fastcs/panda/_control.py +3 -5
  58. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  59. ophyd_async/fastcs/panda/_table.py +30 -52
  60. ophyd_async/fastcs/panda/_trigger.py +8 -8
  61. ophyd_async/fastcs/panda/_writer.py +2 -5
  62. ophyd_async/plan_stubs/_ensure_connected.py +20 -13
  63. ophyd_async/plan_stubs/_fly.py +2 -2
  64. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  65. ophyd_async/py.typed +0 -0
  66. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  67. ophyd_async/sim/demo/_sim_motor.py +3 -4
  68. ophyd_async/tango/__init__.py +0 -45
  69. ophyd_async/tango/{signal → core}/__init__.py +9 -6
  70. ophyd_async/tango/core/_base_device.py +132 -0
  71. ophyd_async/tango/{signal → core}/_signal.py +42 -53
  72. ophyd_async/tango/{base_devices → core}/_tango_readable.py +3 -4
  73. ophyd_async/tango/{signal → core}/_tango_transport.py +38 -40
  74. ophyd_async/tango/demo/_counter.py +12 -23
  75. ophyd_async/tango/demo/_mover.py +13 -13
  76. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/METADATA +52 -55
  77. ophyd_async-0.8.0.dist-info/RECORD +116 -0
  78. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/WHEEL +1 -1
  79. ophyd_async/epics/pvi/__init__.py +0 -3
  80. ophyd_async/epics/pvi/_pvi.py +0 -338
  81. ophyd_async/epics/signal/__init__.py +0 -21
  82. ophyd_async/epics/signal/_aioca.py +0 -378
  83. ophyd_async/epics/signal/_common.py +0 -57
  84. ophyd_async/epics/signal/_epics_transport.py +0 -34
  85. ophyd_async/epics/signal/_p4p.py +0 -518
  86. ophyd_async/epics/signal/_signal.py +0 -114
  87. ophyd_async/tango/base_devices/__init__.py +0 -4
  88. ophyd_async/tango/base_devices/_base_device.py +0 -225
  89. ophyd_async-0.7.0.dist-info/RECORD +0 -108
  90. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/LICENSE +0 -0
  91. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/entry_points.txt +0 -0
  92. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/top_level.txt +0 -0
@@ -1,97 +1,164 @@
1
1
  from abc import abstractmethod
2
- from typing import (
3
- TYPE_CHECKING,
4
- Any,
5
- ClassVar,
6
- Generic,
7
- Literal,
8
- )
2
+ from collections.abc import Sequence
3
+ from typing import Generic, TypedDict, TypeVar, get_origin
9
4
 
5
+ import numpy as np
10
6
  from bluesky.protocols import Reading
11
- from event_model import DataKey
12
-
13
- from ._utils import DEFAULT_TIMEOUT, ReadingValueCallback, T
7
+ from event_model import DataKey, Dtype, Limits
8
+
9
+ from ._table import Table
10
+ from ._utils import Callback, StrictEnum, T
11
+
12
+ DTypeScalar_co = TypeVar("DTypeScalar_co", covariant=True, bound=np.generic)
13
+ Array1D = np.ndarray[tuple[int], np.dtype[DTypeScalar_co]]
14
+ Primitive = bool | int | float | str
15
+ # NOTE: if you change this union then update the docs to match
16
+ SignalDatatype = (
17
+ Primitive
18
+ | Array1D[np.bool_]
19
+ | Array1D[np.int8]
20
+ | Array1D[np.uint8]
21
+ | Array1D[np.int16]
22
+ | Array1D[np.uint16]
23
+ | Array1D[np.int32]
24
+ | Array1D[np.uint32]
25
+ | Array1D[np.int64]
26
+ | Array1D[np.uint64]
27
+ | Array1D[np.float32]
28
+ | Array1D[np.float64]
29
+ | np.ndarray
30
+ | StrictEnum
31
+ | Sequence[str]
32
+ | Sequence[StrictEnum]
33
+ | Table
34
+ )
35
+ # TODO: These typevars will not be needed when we drop python 3.11
36
+ # as you can do MyConverter[SignalType: SignalTypeUnion]:
37
+ # rather than MyConverter(Generic[SignalType])
38
+ PrimitiveT = TypeVar("PrimitiveT", bound=Primitive)
39
+ SignalDatatypeT = TypeVar("SignalDatatypeT", bound=SignalDatatype)
40
+ SignalDatatypeV = TypeVar("SignalDatatypeV", bound=SignalDatatype)
41
+ EnumT = TypeVar("EnumT", bound=StrictEnum)
42
+ TableT = TypeVar("TableT", bound=Table)
14
43
 
15
44
 
16
- class SignalBackend(Generic[T]):
45
+ class SignalBackend(Generic[SignalDatatypeT]):
17
46
  """A read/write/monitor backend for a Signals"""
18
47
 
19
- #: Datatype of the signal value
20
- datatype: type[T] | None = None
48
+ def __init__(self, datatype: type[SignalDatatypeT] | None):
49
+ self.datatype = datatype
21
50
 
22
- @classmethod
23
51
  @abstractmethod
24
- def datatype_allowed(cls, dtype: Any) -> bool:
25
- """Check if a given datatype is acceptable for this signal backend."""
52
+ def source(self, name: str, read: bool) -> str:
53
+ """Return source of signal.
26
54
 
27
- #: Like ca://PV_PREFIX:SIGNAL
28
- @abstractmethod
29
- def source(self, name: str) -> str:
30
- """Return source of signal. Signals may pass a name to the backend, which can be
31
- used or discarded."""
55
+ Signals may pass a name to the backend, which can be used or discarded.
56
+ """
32
57
 
33
58
  @abstractmethod
34
- async def connect(self, timeout: float = DEFAULT_TIMEOUT):
59
+ async def connect(self, timeout: float):
35
60
  """Connect to underlying hardware"""
36
61
 
37
62
  @abstractmethod
38
- async def put(self, value: T | None, wait=True, timeout=None):
39
- """Put a value to the PV, if wait then wait for completion for up to timeout"""
63
+ async def put(self, value: SignalDatatypeT | None, wait: bool):
64
+ """Put a value to the PV, if wait then wait for completion"""
40
65
 
41
66
  @abstractmethod
42
67
  async def get_datakey(self, source: str) -> DataKey:
43
68
  """Metadata like source, dtype, shape, precision, units"""
44
69
 
45
70
  @abstractmethod
46
- async def get_reading(self) -> Reading:
71
+ async def get_reading(self) -> Reading[SignalDatatypeT]:
47
72
  """The current value, timestamp and severity"""
48
73
 
49
74
  @abstractmethod
50
- async def get_value(self) -> T:
75
+ async def get_value(self) -> SignalDatatypeT:
51
76
  """The current value"""
52
77
 
53
78
  @abstractmethod
54
- async def get_setpoint(self) -> T:
79
+ async def get_setpoint(self) -> SignalDatatypeT:
55
80
  """The point that a signal was requested to move to."""
56
81
 
57
82
  @abstractmethod
58
- def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
83
+ def set_callback(self, callback: Callback[T] | None) -> None:
59
84
  """Observe changes to the current value, timestamp and severity"""
60
85
 
61
86
 
62
- class _RuntimeSubsetEnumMeta(type):
63
- def __str__(cls):
64
- if hasattr(cls, "choices"):
65
- return f"SubsetEnum{list(cls.choices)}" # type: ignore
66
- return "SubsetEnum"
67
-
68
- def __getitem__(cls, _choices):
69
- if isinstance(_choices, str):
70
- _choices = (_choices,)
71
- else:
72
- if not isinstance(_choices, tuple) or not all(
73
- isinstance(c, str) for c in _choices
74
- ):
75
- raise TypeError(
76
- "Choices must be a str or a tuple of str, " f"not {type(_choices)}."
77
- )
78
- if len(set(_choices)) != len(_choices):
79
- raise TypeError("Duplicate elements in runtime enum choices.")
80
-
81
- class _RuntimeSubsetEnum(cls):
82
- choices = _choices
83
-
84
- return _RuntimeSubsetEnum
85
-
86
-
87
- class RuntimeSubsetEnum(metaclass=_RuntimeSubsetEnumMeta):
88
- choices: ClassVar[tuple[str, ...]]
89
-
90
- def __init__(self):
91
- raise RuntimeError("SubsetEnum cannot be instantiated")
92
-
93
-
94
- if TYPE_CHECKING:
95
- SubsetEnum = Literal
96
- else:
97
- SubsetEnum = RuntimeSubsetEnum
87
+ _primitive_dtype: dict[type[Primitive], Dtype] = {
88
+ bool: "boolean",
89
+ int: "integer",
90
+ float: "number",
91
+ str: "string",
92
+ }
93
+
94
+
95
+ class SignalMetadata(TypedDict, total=False):
96
+ limits: Limits
97
+ choices: list[str]
98
+ precision: int
99
+ units: str
100
+
101
+
102
+ def _datakey_dtype(datatype: type[SignalDatatype]) -> Dtype:
103
+ if (
104
+ datatype is np.ndarray
105
+ or get_origin(datatype) in (Sequence, np.ndarray)
106
+ or issubclass(datatype, Table)
107
+ ):
108
+ return "array"
109
+ elif issubclass(datatype, StrictEnum):
110
+ return "string"
111
+ elif issubclass(datatype, Primitive):
112
+ return _primitive_dtype[datatype]
113
+ else:
114
+ raise TypeError(f"Can't make dtype for {datatype}")
115
+
116
+
117
+ def _datakey_dtype_numpy(
118
+ datatype: type[SignalDatatypeT], value: SignalDatatypeT
119
+ ) -> np.dtype:
120
+ if isinstance(value, np.ndarray):
121
+ # The value already has a dtype, use that
122
+ return value.dtype
123
+ elif (
124
+ get_origin(datatype) is Sequence
125
+ or datatype is str
126
+ or issubclass(datatype, StrictEnum)
127
+ ):
128
+ # TODO: use np.dtypes.StringDType when we can use in structured arrays
129
+ # https://github.com/numpy/numpy/issues/25693
130
+ return np.dtype("S40")
131
+ elif isinstance(value, Table):
132
+ return value.numpy_dtype()
133
+ elif issubclass(datatype, Primitive):
134
+ return np.dtype(datatype)
135
+ else:
136
+ raise TypeError(f"Can't make dtype_numpy for {datatype}")
137
+
138
+
139
+ def _datakey_shape(value: SignalDatatype) -> list[int]:
140
+ if type(value) in _primitive_dtype or isinstance(value, StrictEnum):
141
+ return []
142
+ elif isinstance(value, np.ndarray):
143
+ return list(value.shape)
144
+ elif isinstance(value, Sequence | Table):
145
+ return [len(value)]
146
+ else:
147
+ raise TypeError(f"Can't make shape for {value}")
148
+
149
+
150
+ def make_datakey(
151
+ datatype: type[SignalDatatypeT],
152
+ value: SignalDatatypeT,
153
+ source: str,
154
+ metadata: SignalMetadata,
155
+ ) -> DataKey:
156
+ dtn = _datakey_dtype_numpy(datatype, value)
157
+ return DataKey(
158
+ dtype=_datakey_dtype(datatype),
159
+ shape=_datakey_shape(value),
160
+ # Ignore until https://github.com/bluesky/event-model/issues/308
161
+ dtype_numpy=dtn.descr if len(dtn.descr) > 1 else dtn.str, # type: ignore
162
+ source=source,
163
+ **metadata,
164
+ )
@@ -1,244 +1,181 @@
1
1
  from __future__ import annotations
2
2
 
3
- import inspect
4
3
  import time
5
- from collections import abc
6
- from enum import Enum
7
- from typing import Generic, cast, get_origin
4
+ from abc import abstractmethod
5
+ from collections.abc import Sequence
6
+ from dataclasses import dataclass
7
+ from functools import lru_cache
8
+ from typing import Any, Generic, get_args, get_origin
8
9
 
9
10
  import numpy as np
10
11
  from bluesky.protocols import Reading
11
12
  from event_model import DataKey
12
- from event_model.documents.event_descriptor import Dtype
13
- from pydantic import BaseModel
14
- from typing_extensions import TypedDict
15
13
 
16
14
  from ._signal_backend import (
17
- RuntimeSubsetEnum,
15
+ Array1D,
16
+ EnumT,
17
+ Primitive,
18
+ PrimitiveT,
18
19
  SignalBackend,
20
+ SignalDatatype,
21
+ SignalDatatypeT,
22
+ SignalMetadata,
23
+ TableT,
24
+ make_datakey,
19
25
  )
20
- from ._utils import (
21
- DEFAULT_TIMEOUT,
22
- ReadingValueCallback,
23
- T,
24
- get_dtype,
25
- is_pydantic_model,
26
- )
27
-
28
- primitive_dtypes: dict[type, Dtype] = {
29
- str: "string",
30
- int: "integer",
31
- float: "number",
32
- bool: "boolean",
33
- }
26
+ from ._table import Table
27
+ from ._utils import Callback, get_dtype, get_enum_cls
34
28
 
35
29
 
36
- class SignalMetadata(TypedDict):
37
- units: str | None
38
- precision: int | None
30
+ class SoftConverter(Generic[SignalDatatypeT]):
31
+ # This is Any -> SignalDatatypeT because we support coercing
32
+ # value types to SignalDatatype to allow people to do things like
33
+ # SignalRW[Enum].set("enum value")
34
+ @abstractmethod
35
+ def write_value(self, value: Any) -> SignalDatatypeT: ...
39
36
 
40
37
 
41
- class SoftConverter(Generic[T]):
42
- def value(self, value: T) -> T:
43
- return value
38
+ @dataclass
39
+ class PrimitiveSoftConverter(SoftConverter[PrimitiveT]):
40
+ datatype: type[PrimitiveT]
44
41
 
45
- def write_value(self, value: T) -> T:
46
- return value
42
+ def write_value(self, value: Any) -> PrimitiveT:
43
+ return self.datatype(value) if value else self.datatype()
47
44
 
48
- def reading(self, value: T, timestamp: float, severity: int) -> Reading:
49
- return Reading(
50
- value=value,
51
- timestamp=timestamp,
52
- alarm_severity=-1 if severity > 2 else severity,
53
- )
54
45
 
55
- def get_datakey(self, source: str, value, **metadata) -> DataKey:
56
- dk: DataKey = {"source": source, "shape": [], **metadata} # type: ignore
57
- dtype = type(value)
58
- if np.issubdtype(dtype, np.integer):
59
- dtype = int
60
- elif np.issubdtype(dtype, np.floating):
61
- dtype = float
62
- assert (
63
- dtype in primitive_dtypes
64
- ), f"invalid converter for value of type {type(value)}"
65
- dk["dtype"] = primitive_dtypes[dtype]
66
- # type ignore until https://github.com/bluesky/event-model/issues/308
67
- try:
68
- dk["dtype_numpy"] = np.dtype(dtype).descr[0][1] # type: ignore
69
- except TypeError:
70
- dk["dtype_numpy"] = "" # type: ignore
71
- return dk
72
-
73
- def make_initial_value(self, datatype: type[T] | None) -> T:
74
- if datatype is None:
75
- return cast(T, None)
76
-
77
- return datatype()
78
-
79
-
80
- class SoftArrayConverter(SoftConverter):
81
- def get_datakey(self, source: str, value, **metadata) -> DataKey:
82
- dtype_numpy = ""
83
- if isinstance(value, list):
84
- if len(value) > 0:
85
- dtype_numpy = np.dtype(type(value[0])).descr[0][1]
86
- else:
87
- dtype_numpy = np.dtype(value.dtype).descr[0][1]
46
+ class SequenceStrSoftConverter(SoftConverter[Sequence[str]]):
47
+ def write_value(self, value: Any) -> Sequence[str]:
48
+ return [str(v) for v in value] if value else []
88
49
 
89
- return {
90
- "source": source,
91
- "dtype": "array",
92
- "dtype_numpy": dtype_numpy, # type: ignore
93
- "shape": [len(value)],
94
- **metadata,
95
- }
96
50
 
97
- def make_initial_value(self, datatype: type[T] | None) -> T:
98
- if datatype is None:
99
- return cast(T, None)
51
+ @dataclass
52
+ class SequenceEnumSoftConverter(SoftConverter[Sequence[EnumT]]):
53
+ datatype: type[EnumT]
100
54
 
101
- if get_origin(datatype) == abc.Sequence:
102
- return cast(T, [])
55
+ def write_value(self, value: Any) -> Sequence[EnumT]:
56
+ return [self.datatype(v) for v in value] if value else []
103
57
 
104
- return cast(T, datatype(shape=0)) # type: ignore
105
58
 
59
+ @dataclass
60
+ class NDArraySoftConverter(SoftConverter[Array1D]):
61
+ datatype: np.dtype | None = None
106
62
 
107
- class SoftEnumConverter(SoftConverter):
108
- choices: tuple[str, ...]
109
-
110
- def __init__(self, datatype: RuntimeSubsetEnum | type[Enum]):
111
- if issubclass(datatype, Enum): # type: ignore
112
- self.choices = tuple(v.value for v in datatype)
113
- else:
114
- self.choices = datatype.choices
115
-
116
- def write_value(self, value: Enum | str) -> str:
117
- return value # type: ignore
63
+ def write_value(self, value: Any) -> Array1D:
64
+ return np.array(() if value is None else value, dtype=self.datatype)
118
65
 
119
- def get_datakey(self, source: str, value, **metadata) -> DataKey:
120
- return {
121
- "source": source,
122
- "dtype": "string",
123
- # type ignore until https://github.com/bluesky/event-model/issues/308
124
- "dtype_numpy": "|S40", # type: ignore
125
- "shape": [],
126
- "choices": self.choices,
127
- **metadata,
128
- }
129
66
 
130
- def make_initial_value(self, datatype: type[T] | None) -> T:
131
- if datatype is None:
132
- return cast(T, None)
67
+ @dataclass
68
+ class EnumSoftConverter(SoftConverter[EnumT]):
69
+ datatype: type[EnumT]
133
70
 
134
- if issubclass(datatype, Enum):
135
- return cast(T, list(datatype.__members__.values())[0]) # type: ignore
136
- return cast(T, self.choices[0])
71
+ def write_value(self, value: Any) -> EnumT:
72
+ return (
73
+ self.datatype(value)
74
+ if value
75
+ else list(self.datatype.__members__.values())[0]
76
+ )
137
77
 
138
78
 
139
- class SoftPydanticModelConverter(SoftConverter):
140
- def __init__(self, datatype: type[BaseModel]):
141
- self.datatype = datatype
79
+ @dataclass
80
+ class TableSoftConverter(SoftConverter[TableT]):
81
+ datatype: type[TableT]
142
82
 
143
- def write_value(self, value):
83
+ def write_value(self, value: Any) -> TableT:
144
84
  if isinstance(value, dict):
145
85
  return self.datatype(**value)
146
- return value
147
-
148
-
149
- def make_converter(datatype):
150
- is_array = get_dtype(datatype) is not None
151
- is_sequence = get_origin(datatype) == abc.Sequence
152
- is_enum = inspect.isclass(datatype) and (
153
- issubclass(datatype, Enum) or issubclass(datatype, RuntimeSubsetEnum)
154
- )
155
-
156
- if is_array or is_sequence:
157
- return SoftArrayConverter()
158
- if is_enum:
159
- return SoftEnumConverter(datatype) # type: ignore
160
- if is_pydantic_model(datatype):
161
- return SoftPydanticModelConverter(datatype) # type: ignore
162
-
163
- return SoftConverter()
164
-
165
-
166
- class SoftSignalBackend(SignalBackend[T]):
86
+ elif isinstance(value, self.datatype):
87
+ return value
88
+ elif value is None:
89
+ return self.datatype()
90
+ else:
91
+ raise TypeError(f"Cannot convert {value} to {self.datatype}")
92
+
93
+
94
+ @lru_cache
95
+ def make_converter(datatype: type[SignalDatatype]) -> SoftConverter:
96
+ enum_cls = get_enum_cls(datatype)
97
+ if datatype == Sequence[str]:
98
+ return SequenceStrSoftConverter()
99
+ elif get_origin(datatype) == Sequence and enum_cls:
100
+ return SequenceEnumSoftConverter(enum_cls)
101
+ elif datatype is np.ndarray:
102
+ return NDArraySoftConverter()
103
+ elif get_origin(datatype) == np.ndarray:
104
+ if datatype not in get_args(SignalDatatype):
105
+ raise TypeError(f"Expected Array1D[dtype], got {datatype}")
106
+ return NDArraySoftConverter(get_dtype(datatype))
107
+ elif enum_cls:
108
+ return EnumSoftConverter(enum_cls)
109
+ elif issubclass(datatype, Table):
110
+ return TableSoftConverter(datatype)
111
+ elif issubclass(datatype, Primitive):
112
+ return PrimitiveSoftConverter(datatype)
113
+ raise TypeError(f"Can't make converter for {datatype}")
114
+
115
+
116
+ class SoftSignalBackend(SignalBackend[SignalDatatypeT]):
167
117
  """An backend to a soft Signal, for test signals see ``MockSignalBackend``."""
168
118
 
169
- _value: T
170
- _initial_value: T | None
171
- _timestamp: float
172
- _severity: int
173
-
174
- @classmethod
175
- def datatype_allowed(cls, dtype: type) -> bool:
176
- return True # Any value allowed in a soft signal
177
-
178
119
  def __init__(
179
120
  self,
180
- datatype: type[T] | None,
181
- initial_value: T | None = None,
182
- metadata: SignalMetadata = None, # type: ignore
183
- ) -> None:
184
- self.datatype = datatype
185
- self._initial_value = initial_value
186
- self._metadata = metadata or {}
187
- self.converter: SoftConverter = make_converter(datatype)
188
- if self._initial_value is None:
189
- self._initial_value = self.converter.make_initial_value(self.datatype)
190
- else:
191
- self._initial_value = self.converter.write_value(self._initial_value) # type: ignore
192
-
193
- self.callback: ReadingValueCallback[T] | None = None
194
- self._severity = 0
195
- self.set_value(self._initial_value) # type: ignore
121
+ datatype: type[SignalDatatypeT] | None,
122
+ initial_value: SignalDatatypeT | None = None,
123
+ units: str | None = None,
124
+ precision: int | None = None,
125
+ ):
126
+ # Create the right converter for the datatype
127
+ self.converter = make_converter(datatype or float)
128
+ # Add the extra static metadata to the dictionary
129
+ self.metadata: SignalMetadata = {}
130
+ if units is not None:
131
+ self.metadata["units"] = units
132
+ if precision is not None:
133
+ self.metadata["precision"] = precision
134
+ if enum_cls := get_enum_cls(datatype):
135
+ self.metadata["choices"] = [v.value for v in enum_cls]
136
+ # Create and set the initial value
137
+ self.initial_value = self.converter.write_value(initial_value)
138
+ self.reading: Reading[SignalDatatypeT]
139
+ self.callback: Callback[Reading[SignalDatatypeT]] | None = None
140
+ self.set_value(self.initial_value)
141
+ super().__init__(datatype)
142
+
143
+ def set_value(self, value: SignalDatatypeT):
144
+ self.reading = Reading(
145
+ value=self.converter.write_value(value),
146
+ timestamp=time.monotonic(),
147
+ alarm_severity=0,
148
+ )
149
+ if self.callback:
150
+ self.callback(self.reading)
196
151
 
197
- def source(self, name: str) -> str:
152
+ def source(self, name: str, read: bool) -> str:
198
153
  return f"soft://{name}"
199
154
 
200
- async def connect(self, timeout: float = DEFAULT_TIMEOUT) -> None:
201
- """Connection isn't required for soft signals."""
155
+ async def connect(self, timeout: float):
202
156
  pass
203
157
 
204
- async def put(self, value: T | None, wait=True, timeout=None):
205
- write_value = (
206
- self.converter.write_value(value)
207
- if value is not None
208
- else self._initial_value
209
- )
210
-
211
- self.set_value(write_value) # type: ignore
212
-
213
- def set_value(self, value: T):
214
- """Method to bypass asynchronous logic."""
215
- self._value = value
216
- self._timestamp = time.monotonic()
217
- reading: Reading = self.converter.reading(
218
- self._value, self._timestamp, self._severity
219
- )
220
-
221
- if self.callback:
222
- self.callback(reading, self._value)
158
+ async def put(self, value: SignalDatatypeT | None, wait: bool) -> None:
159
+ write_value = self.initial_value if value is None else value
160
+ self.set_value(write_value)
223
161
 
224
162
  async def get_datakey(self, source: str) -> DataKey:
225
- return self.converter.get_datakey(source, self._value, **self._metadata)
163
+ return make_datakey(
164
+ self.datatype or float, self.reading["value"], source, self.metadata
165
+ )
226
166
 
227
- async def get_reading(self) -> Reading:
228
- return self.converter.reading(self._value, self._timestamp, self._severity)
167
+ async def get_reading(self) -> Reading[SignalDatatypeT]:
168
+ return self.reading
229
169
 
230
- async def get_value(self) -> T:
231
- return self.converter.value(self._value)
170
+ async def get_value(self) -> SignalDatatypeT:
171
+ return self.reading["value"]
232
172
 
233
- async def get_setpoint(self) -> T:
234
- """For a soft signal, the setpoint and readback values are the same."""
235
- return await self.get_value()
173
+ async def get_setpoint(self) -> SignalDatatypeT:
174
+ # For a soft signal, the setpoint and readback values are the same.
175
+ return self.reading["value"]
236
176
 
237
- def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
177
+ def set_callback(self, callback: Callback[Reading[SignalDatatypeT]] | None) -> None:
238
178
  if callback:
239
179
  assert not self.callback, "Cannot set a callback when one is already set"
240
- reading: Reading = self.converter.reading(
241
- self._value, self._timestamp, self._severity
242
- )
243
- callback(reading, self._value)
180
+ callback(self.reading)
244
181
  self.callback = callback