ophyd-async 0.7.0__py3-none-any.whl → 0.8.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +23 -8
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +139 -66
  5. ophyd_async/core/_device_filler.py +191 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +32 -40
  8. ophyd_async/core/_mock_signal_utils.py +22 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +5 -5
  11. ophyd_async/core/_signal.py +140 -152
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +125 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +97 -100
  16. ophyd_async/core/_utils.py +71 -18
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +7 -5
  19. ophyd_async/epics/adcore/_core_io.py +4 -6
  20. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  21. ophyd_async/epics/adcore/_utils.py +15 -10
  22. ophyd_async/epics/adkinetix/__init__.py +2 -1
  23. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  24. ophyd_async/epics/adkinetix/_kinetix_io.py +3 -4
  25. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  26. ophyd_async/epics/adpilatus/_pilatus_io.py +2 -3
  27. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  28. ophyd_async/epics/advimba/__init__.py +4 -1
  29. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  30. ophyd_async/epics/advimba/_vimba_io.py +7 -8
  31. ophyd_async/epics/demo/_sensor.py +8 -4
  32. ophyd_async/epics/eiger/_eiger.py +1 -2
  33. ophyd_async/epics/eiger/_eiger_controller.py +1 -1
  34. ophyd_async/epics/eiger/_eiger_io.py +2 -4
  35. ophyd_async/epics/eiger/_odin_io.py +4 -4
  36. ophyd_async/epics/pvi/__init__.py +2 -2
  37. ophyd_async/epics/pvi/_pvi.py +56 -321
  38. ophyd_async/epics/signal/__init__.py +3 -4
  39. ophyd_async/epics/signal/_aioca.py +184 -236
  40. ophyd_async/epics/signal/_common.py +35 -49
  41. ophyd_async/epics/signal/_p4p.py +254 -387
  42. ophyd_async/epics/signal/_signal.py +63 -21
  43. ophyd_async/fastcs/core.py +9 -0
  44. ophyd_async/fastcs/panda/__init__.py +4 -4
  45. ophyd_async/fastcs/panda/_block.py +18 -13
  46. ophyd_async/fastcs/panda/_control.py +3 -5
  47. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  48. ophyd_async/fastcs/panda/_table.py +29 -51
  49. ophyd_async/fastcs/panda/_trigger.py +8 -8
  50. ophyd_async/fastcs/panda/_writer.py +2 -5
  51. ophyd_async/plan_stubs/_ensure_connected.py +3 -1
  52. ophyd_async/plan_stubs/_fly.py +2 -2
  53. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  54. ophyd_async/py.typed +0 -0
  55. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  56. ophyd_async/tango/__init__.py +2 -4
  57. ophyd_async/tango/base_devices/_base_device.py +76 -143
  58. ophyd_async/tango/demo/_counter.py +2 -2
  59. ophyd_async/tango/demo/_mover.py +2 -2
  60. ophyd_async/tango/signal/__init__.py +2 -4
  61. ophyd_async/tango/signal/_signal.py +29 -50
  62. ophyd_async/tango/signal/_tango_transport.py +38 -40
  63. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/METADATA +8 -12
  64. ophyd_async-0.8.0a2.dist-info/RECORD +110 -0
  65. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/WHEEL +1 -1
  66. ophyd_async/epics/signal/_epics_transport.py +0 -34
  67. ophyd_async-0.7.0.dist-info/RECORD +0 -108
  68. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/LICENSE +0 -0
  69. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/entry_points.txt +0 -0
  70. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/top_level.txt +0 -0
@@ -1,198 +1,107 @@
1
+ from __future__ import annotations
2
+
1
3
  import asyncio
2
4
  import atexit
3
- import inspect
4
5
  import logging
5
- import time
6
- from collections.abc import Sequence
7
- from dataclasses import dataclass
8
- from enum import Enum
6
+ from collections.abc import Mapping, Sequence
9
7
  from math import isnan, nan
10
- from typing import Any, get_origin
8
+ from typing import Any, Generic
11
9
 
12
10
  import numpy as np
13
11
  from bluesky.protocols import Reading
14
- from event_model import DataKey
15
- from event_model.documents.event_descriptor import Dtype
12
+ from event_model import DataKey, Limits, LimitsRange
16
13
  from p4p import Value
17
14
  from p4p.client.asyncio import Context, Subscription
18
15
  from pydantic import BaseModel
19
16
 
20
17
  from ophyd_async.core import (
21
- DEFAULT_TIMEOUT,
18
+ Array1D,
19
+ Callback,
22
20
  NotConnected,
23
- ReadingValueCallback,
24
- RuntimeSubsetEnum,
25
21
  SignalBackend,
26
- T,
27
- get_dtype,
22
+ SignalDatatype,
23
+ SignalDatatypeT,
24
+ SignalMetadata,
25
+ StrictEnum,
26
+ Table,
27
+ get_enum_cls,
28
28
  get_unique,
29
- is_pydantic_model,
29
+ make_datakey,
30
30
  wait_for_connection,
31
31
  )
32
32
 
33
- from ._common import LimitPair, Limits, common_meta, get_supported_values
34
-
35
- # https://mdavidsaver.github.io/p4p/values.html
36
- specifier_to_dtype: dict[str, Dtype] = {
37
- "?": "integer", # bool
38
- "b": "integer", # int8
39
- "B": "integer", # uint8
40
- "h": "integer", # int16
41
- "H": "integer", # uint16
42
- "i": "integer", # int32
43
- "I": "integer", # uint32
44
- "l": "integer", # int64
45
- "L": "integer", # uint64
46
- "f": "number", # float32
47
- "d": "number", # float64
48
- "s": "string",
49
- }
50
-
51
- specifier_to_np_dtype: dict[str, str] = {
52
- "?": "<i2", # bool
53
- "b": "|i1", # int8
54
- "B": "|u1", # uint8
55
- "h": "<i2", # int16
56
- "H": "<u2", # uint16
57
- "i": "<i4", # int32
58
- "I": "<u4", # uint32
59
- "l": "<i8", # int64
60
- "L": "<u8", # uint64
61
- "f": "<f4", # float32
62
- "d": "<f8", # float64
63
- "s": "|S40",
64
- }
65
-
66
-
67
- def _data_key_from_value(
68
- source: str,
69
- value: Value,
70
- *,
71
- shape: list[int] | None = None,
72
- choices: list[str] | None = None,
73
- dtype: Dtype | None = None,
74
- ) -> DataKey:
75
- """
76
- Args:
77
- value (Value): Description of the the return type of a DB record
78
- shape: Optional override shape when len(shape) > 1
79
- choices: Optional list of enum choices to pass as metadata in the datakey
80
- dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
81
-
82
- Returns:
83
- DataKey: A rich DataKey describing the DB record
84
- """
85
- shape = shape or []
86
- type_code = value.type().aspy("value")
87
-
88
- dtype = dtype or specifier_to_dtype[type_code]
89
-
90
- try:
91
- if isinstance(type_code, tuple):
92
- dtype_numpy = ""
93
- if type_code[1] == "enum_t":
94
- if dtype == "boolean":
95
- dtype_numpy = "<i2"
96
- else:
97
- for item in type_code[2]:
98
- if item[0] == "choices":
99
- dtype_numpy = specifier_to_np_dtype[item[1][1]]
100
- elif not type_code.startswith("a"):
101
- dtype_numpy = specifier_to_np_dtype[type_code]
102
- else:
103
- # Array type, use typecode of internal element
104
- dtype_numpy = specifier_to_np_dtype[type_code[1]]
105
- except KeyError:
106
- # Case where we can't determine dtype string from value
107
- dtype_numpy = ""
108
-
109
- display_data = getattr(value, "display", None)
110
-
111
- d = DataKey(
112
- source=source,
113
- dtype=dtype,
114
- # type ignore until https://github.com/bluesky/event-model/issues/308
115
- dtype_numpy=dtype_numpy, # type: ignore
116
- shape=shape,
117
- )
118
- if display_data is not None:
119
- for key in common_meta:
120
- attr = getattr(display_data, key, nan)
121
- if isinstance(attr, str) or not isnan(attr):
122
- d[key] = attr
123
-
124
- if choices is not None:
125
- # type ignore until https://github.com/bluesky/event-model/issues/309
126
- d["choices"] = choices # type: ignore
127
-
128
- if limits := _limits_from_value(value):
129
- # type ignore until https://github.com/bluesky/event-model/issues/309
130
- d["limits"] = limits # type: ignore
33
+ from ._common import format_datatype, get_supported_values
131
34
 
132
- return d
133
35
 
134
-
135
- def _limits_from_value(value: Value) -> Limits:
36
+ def _limits_from_value(value: Any) -> Limits:
136
37
  def get_limits(
137
38
  substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
138
- ) -> LimitPair:
39
+ ) -> LimitsRange | None:
139
40
  substructure = getattr(value, substucture_name, None)
140
41
  low = getattr(substructure, low_name, nan)
141
42
  high = getattr(substructure, high_name, nan)
142
- return LimitPair(
143
- low=None if isnan(low) else low, high=None if isnan(high) else high
144
- )
145
-
146
- return Limits(
147
- alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
148
- control=get_limits("control"),
149
- display=get_limits("display"),
150
- warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
151
- )
152
-
43
+ if not (isnan(low) and isnan(high)):
44
+ return LimitsRange(
45
+ low=None if isnan(low) else low,
46
+ high=None if isnan(high) else high,
47
+ )
153
48
 
154
- class PvaConverter:
155
- def write_value(self, value):
156
- return value
49
+ limits = Limits()
50
+ if limits_range := get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"):
51
+ limits["alarm"] = limits_range
52
+ if limits_range := get_limits("control"):
53
+ limits["control"] = limits_range
54
+ if limits_range := get_limits("display"):
55
+ limits["display"] = limits_range
56
+ if limits_range := get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"):
57
+ limits["warning"] = limits_range
58
+ return limits
59
+
60
+
61
+ def _metadata_from_value(datatype: type[SignalDatatype], value: Any) -> SignalMetadata:
62
+ metadata = SignalMetadata()
63
+ value_data: Any = getattr(value, "value", None)
64
+ display_data: Any = getattr(value, "display", None)
65
+ if hasattr(display_data, "units"):
66
+ metadata["units"] = display_data.units
67
+ if hasattr(display_data, "precision") and not isnan(display_data.precision):
68
+ metadata["precision"] = display_data.precision
69
+ if limits := _limits_from_value(value):
70
+ metadata["limits"] = limits
71
+ # Get choices from display or value
72
+ if datatype is str or issubclass(datatype, StrictEnum):
73
+ if hasattr(display_data, "choices"):
74
+ metadata["choices"] = display_data.choices
75
+ elif hasattr(value_data, "choices"):
76
+ metadata["choices"] = value_data.choices
77
+ return metadata
157
78
 
158
- def value(self, value):
159
- return value["value"]
160
79
 
161
- def reading(self, value) -> Reading:
162
- ts = value["timeStamp"]
163
- sv = value["alarm"]["severity"]
164
- return {
165
- "value": self.value(value),
166
- "timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
167
- "alarm_severity": -1 if sv > 2 else sv,
168
- }
80
+ class PvaConverter(Generic[SignalDatatypeT]):
81
+ value_fields = ("value",)
82
+ reading_fields = ("alarm", "timeStamp")
169
83
 
170
- def get_datakey(self, source: str, value) -> DataKey:
171
- return _data_key_from_value(source, value)
84
+ def __init__(self, datatype: type[SignalDatatypeT]):
85
+ self.datatype = datatype
172
86
 
173
- def metadata_fields(self) -> list[str]:
174
- """
175
- Fields to request from PVA for metadata.
176
- """
177
- return ["alarm", "timeStamp"]
87
+ def value(self, value: Any) -> SignalDatatypeT:
88
+ # for channel access ca_xxx classes, this
89
+ # invokes __pos__ operator to return an instance of
90
+ # the builtin base class
91
+ return value["value"]
178
92
 
179
- def value_fields(self) -> list[str]:
180
- """
181
- Fields to request from PVA for the value.
182
- """
183
- return ["value"]
93
+ def write_value(self, value: Any) -> Any:
94
+ # The pva library will do the conversion for us
95
+ return value
184
96
 
185
97
 
186
- class PvaArrayConverter(PvaConverter):
187
- def get_datakey(self, source: str, value) -> DataKey:
188
- return _data_key_from_value(
189
- source, value, dtype="array", shape=[len(value["value"])]
190
- )
98
+ class DisconnectedPvaConverter(PvaConverter):
99
+ def __getattribute__(self, __name: str) -> Any:
100
+ raise NotImplementedError("No PV has been set as connect() has not been called")
191
101
 
192
102
 
193
- class PvaNDArrayConverter(PvaConverter):
194
- def metadata_fields(self) -> list[str]:
195
- return super().metadata_fields() + ["dimension"]
103
+ class PvaNDArrayConverter(PvaConverter[SignalDatatypeT]):
104
+ value_fields = ("value", "dimension")
196
105
 
197
106
  def _get_dimensions(self, value) -> list[int]:
198
107
  dimensions: list[Value] = value["dimension"]
@@ -205,243 +114,206 @@ class PvaNDArrayConverter(PvaConverter):
205
114
  # last index changing fastest.
206
115
  return dims[::-1]
207
116
 
208
- def value(self, value):
117
+ def value(self, value: Any) -> SignalDatatypeT:
209
118
  dims = self._get_dimensions(value)
210
119
  return value["value"].reshape(dims)
211
120
 
212
- def get_datakey(self, source: str, value) -> DataKey:
213
- dims = self._get_dimensions(value)
214
- return _data_key_from_value(source, value, dtype="array", shape=dims)
215
-
216
- def write_value(self, value):
121
+ def write_value(self, value: Any) -> Any:
217
122
  # No clear use-case for writing directly to an NDArray, and some
218
123
  # complexities around flattening to 1-D - e.g. dimension-order.
219
124
  # Don't support this for now.
220
125
  raise TypeError("Writing to NDArray not supported")
221
126
 
222
127
 
223
- @dataclass
224
- class PvaEnumConverter(PvaConverter):
225
- """To prevent issues when a signal is restarted and returns with different enum
226
- values or orders, we put treat an Enum signal as a string, and cache the
227
- choices on this class.
228
- """
229
-
230
- def __init__(self, choices: dict[str, str]):
231
- self.choices = tuple(choices.values())
128
+ class PvaEnumConverter(PvaConverter[str]):
129
+ def __init__(
130
+ self, datatype: type[str] = str, supported_values: Mapping[str, str] = {}
131
+ ):
132
+ self.supported_values = supported_values
133
+ super().__init__(datatype)
232
134
 
233
- def write_value(self, value: Enum | str):
234
- if isinstance(value, Enum):
235
- return value.value
135
+ def value(self, value: Any) -> str:
136
+ str_value = value["value"]["choices"][value["value"]["index"]]
137
+ if self.supported_values:
138
+ return self.supported_values[str_value]
236
139
  else:
237
- return value
238
-
239
- def value(self, value):
240
- return self.choices[value["value"]["index"]]
140
+ return str_value
241
141
 
242
- def get_datakey(self, source: str, value) -> DataKey:
243
- return _data_key_from_value(
244
- source, value, choices=list(self.choices), dtype="string"
245
- )
246
142
 
143
+ class PvaEnumBoolConverter(PvaConverter[bool]):
144
+ def __init__(self):
145
+ super().__init__(bool)
247
146
 
248
- class PvaEmumBoolConverter(PvaConverter):
249
- def value(self, value):
147
+ def value(self, value: Any) -> bool:
250
148
  return bool(value["value"]["index"])
251
149
 
252
- def get_datakey(self, source: str, value) -> DataKey:
253
- return _data_key_from_value(source, value, dtype="boolean")
254
-
255
-
256
- class PvaTableConverter(PvaConverter):
257
- def value(self, value):
258
- return value["value"].todict()
259
150
 
260
- def get_datakey(self, source: str, value) -> DataKey:
261
- # This is wrong, but defer until we know how to actually describe a table
262
- return _data_key_from_value(source, value, dtype="object") # type: ignore
151
+ class PvaTableConverter(PvaConverter[Table]):
152
+ def value(self, value) -> Table:
153
+ return self.datatype(**value["value"].todict())
263
154
 
264
-
265
- class PvaPydanticModelConverter(PvaConverter):
266
- def __init__(self, datatype: BaseModel):
267
- self.datatype = datatype
268
-
269
- def value(self, value: Value):
270
- return self.datatype(**value.todict()) # type: ignore
271
-
272
- def write_value(self, value: BaseModel | dict[str, Any]):
273
- if isinstance(value, self.datatype): # type: ignore
274
- return value.model_dump(mode="python") # type: ignore
155
+ def write_value(self, value: BaseModel | dict[str, Any]) -> Any:
156
+ if isinstance(value, self.datatype):
157
+ return value.model_dump(mode="python")
275
158
  return value
276
159
 
277
160
 
278
- class PvaDictConverter(PvaConverter):
279
- def reading(self, value) -> Reading:
280
- ts = time.time()
281
- value = value.todict()
282
- # Alarm severity is vacuously 0 for a table
283
- return {"value": value, "timestamp": ts, "alarm_severity": 0}
284
-
285
- def value(self, value: Value):
286
- return value.todict()
287
-
288
- def get_datakey(self, source: str, value) -> DataKey:
289
- raise NotImplementedError("Describing Dict signals not currently supported")
290
-
291
- def metadata_fields(self) -> list[str]:
292
- """
293
- Fields to request from PVA for metadata.
294
- """
295
- return []
296
-
297
- def value_fields(self) -> list[str]:
298
- """
299
- Fields to request from PVA for the value.
300
- """
301
- return []
161
+ # https://mdavidsaver.github.io/p4p/values.html
162
+ _datatype_converter_from_typeid: dict[
163
+ tuple[str, str], tuple[type[SignalDatatype], type[PvaConverter]]
164
+ ] = {
165
+ ("epics:nt/NTScalar:1.0", "?"): (bool, PvaConverter),
166
+ ("epics:nt/NTScalar:1.0", "b"): (int, PvaConverter),
167
+ ("epics:nt/NTScalar:1.0", "B"): (int, PvaConverter),
168
+ ("epics:nt/NTScalar:1.0", "h"): (int, PvaConverter),
169
+ ("epics:nt/NTScalar:1.0", "H"): (int, PvaConverter),
170
+ ("epics:nt/NTScalar:1.0", "i"): (int, PvaConverter),
171
+ ("epics:nt/NTScalar:1.0", "I"): (int, PvaConverter),
172
+ ("epics:nt/NTScalar:1.0", "l"): (int, PvaConverter),
173
+ ("epics:nt/NTScalar:1.0", "L"): (int, PvaConverter),
174
+ ("epics:nt/NTScalar:1.0", "f"): (float, PvaConverter),
175
+ ("epics:nt/NTScalar:1.0", "d"): (float, PvaConverter),
176
+ ("epics:nt/NTScalar:1.0", "s"): (str, PvaConverter),
177
+ ("epics:nt/NTEnum:1.0", "S"): (str, PvaEnumConverter),
178
+ ("epics:nt/NTScalarArray:1.0", "a?"): (Array1D[np.bool_], PvaConverter),
179
+ ("epics:nt/NTScalarArray:1.0", "ab"): (Array1D[np.int8], PvaConverter),
180
+ ("epics:nt/NTScalarArray:1.0", "aB"): (Array1D[np.uint8], PvaConverter),
181
+ ("epics:nt/NTScalarArray:1.0", "ah"): (Array1D[np.int16], PvaConverter),
182
+ ("epics:nt/NTScalarArray:1.0", "aH"): (Array1D[np.uint16], PvaConverter),
183
+ ("epics:nt/NTScalarArray:1.0", "ai"): (Array1D[np.int32], PvaConverter),
184
+ ("epics:nt/NTScalarArray:1.0", "aI"): (Array1D[np.uint32], PvaConverter),
185
+ ("epics:nt/NTScalarArray:1.0", "al"): (Array1D[np.int64], PvaConverter),
186
+ ("epics:nt/NTScalarArray:1.0", "aL"): (Array1D[np.uint64], PvaConverter),
187
+ ("epics:nt/NTScalarArray:1.0", "af"): (Array1D[np.float32], PvaConverter),
188
+ ("epics:nt/NTScalarArray:1.0", "ad"): (Array1D[np.float64], PvaConverter),
189
+ ("epics:nt/NTScalarArray:1.0", "as"): (Sequence[str], PvaConverter),
190
+ ("epics:nt/NTTable:1.0", "S"): (Table, PvaTableConverter),
191
+ ("epics:nt/NTNDArray:1.0", "v"): (np.ndarray, PvaNDArrayConverter),
192
+ }
302
193
 
303
194
 
304
- class DisconnectedPvaConverter(PvaConverter):
305
- def __getattribute__(self, __name: str) -> Any:
306
- raise NotImplementedError("No PV has been set as connect() has not been called")
195
+ def _get_specifier(value: Value):
196
+ typ = value.type("value").aspy()
197
+ if isinstance(typ, tuple):
198
+ return typ[0]
199
+ else:
200
+ return str(typ)
307
201
 
308
202
 
309
203
  def make_converter(datatype: type | None, values: dict[str, Any]) -> PvaConverter:
310
204
  pv = list(values)[0]
311
205
  typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
312
- typ = get_unique(
313
- {k: type(v.get("value")) for k, v in values.items()}, "value types"
206
+ specifier = get_unique(
207
+ {k: _get_specifier(v) for k, v in values.items()},
208
+ "value type specifiers",
314
209
  )
315
- if "NTScalarArray" in typeid and typ is list:
316
- # Waveform of strings, check we wanted this
317
- if datatype and datatype != Sequence[str]:
318
- raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
319
- return PvaArrayConverter()
320
- elif "NTScalarArray" in typeid or "NTNDArray" in typeid:
321
- pv_dtype = get_unique(
322
- {k: v["value"].dtype for k, v in values.items()}, "dtypes"
323
- )
324
- # This is an array
325
- if datatype:
326
- # Check we wanted an array of this type
327
- dtype = get_dtype(datatype)
328
- if not dtype:
329
- raise TypeError(f"{pv} has type [{pv_dtype}] not {datatype.__name__}")
330
- if dtype != pv_dtype:
331
- raise TypeError(f"{pv} has type [{pv_dtype}] not [{dtype}]")
332
- if "NTNDArray" in typeid:
333
- return PvaNDArrayConverter()
334
- else:
335
- return PvaArrayConverter()
336
- elif "NTEnum" in typeid and datatype is bool:
337
- # Wanted a bool, but database represents as an enum
338
- pv_choices_len = get_unique(
210
+ # Infer a datatype and converter from the typeid and specifier
211
+ inferred_datatype, converter_cls = _datatype_converter_from_typeid[
212
+ (typeid, specifier)
213
+ ]
214
+ # Some override cases
215
+ if datatype is bool and typeid == "epics:nt/NTEnum:1.0":
216
+ # Database can't do bools, so are often representated as enums of len 2
217
+ pv_num_choices = get_unique(
339
218
  {k: len(v["value"]["choices"]) for k, v in values.items()},
340
219
  "number of choices",
341
220
  )
342
- if pv_choices_len != 2:
343
- raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
344
- return PvaEmumBoolConverter()
345
- elif "NTEnum" in typeid:
346
- # This is an Enum
221
+ if pv_num_choices != 2:
222
+ raise TypeError(f"{pv} has {pv_num_choices} choices, can't map to bool")
223
+ return PvaEnumBoolConverter()
224
+ elif typeid == "epics:nt/NTEnum:1.0":
347
225
  pv_choices = get_unique(
348
226
  {k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
349
227
  )
350
- return PvaEnumConverter(get_supported_values(pv, datatype, pv_choices))
351
- elif "NTScalar" in typeid:
352
- if (
353
- typ is str
354
- and inspect.isclass(datatype)
355
- and issubclass(datatype, RuntimeSubsetEnum)
356
- ):
228
+ if enum_cls := get_enum_cls(datatype):
229
+ # We were given an enum class, so make class from that
357
230
  return PvaEnumConverter(
358
- get_supported_values(pv, datatype, datatype.choices) # type: ignore
359
- )
360
- elif datatype and not issubclass(typ, datatype):
361
- # Allow int signals to represent float records when prec is 0
362
- is_prec_zero_float = typ is float and (
363
- get_unique(
364
- {k: v["display"]["precision"] for k, v in values.items()},
365
- "precision",
366
- )
367
- == 0
231
+ supported_values=get_supported_values(pv, enum_cls, pv_choices)
368
232
  )
369
- if not (datatype is int and is_prec_zero_float):
370
- raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
371
- return PvaConverter()
372
- elif "NTTable" in typeid:
373
- if is_pydantic_model(datatype):
374
- return PvaPydanticModelConverter(datatype) # type: ignore
375
- return PvaTableConverter()
376
- elif "structure" in typeid:
377
- return PvaDictConverter()
378
- else:
379
- raise TypeError(f"{pv}: Unsupported typeid {typeid}")
380
-
381
-
382
- class PvaSignalBackend(SignalBackend[T]):
383
- _ctxt: Context | None = None
384
-
385
- _ALLOWED_DATATYPES = (
386
- bool,
387
- int,
388
- float,
389
- str,
390
- Sequence,
391
- np.ndarray,
392
- Enum,
393
- RuntimeSubsetEnum,
394
- BaseModel,
395
- dict,
233
+ elif datatype in (None, str):
234
+ # Still use the Enum converter, but make choices from what it has
235
+ return PvaEnumConverter()
236
+ elif (
237
+ inferred_datatype is float
238
+ and datatype is int
239
+ and get_unique(
240
+ {k: v["display"]["precision"] for k, v in values.items()}, "precision"
241
+ )
242
+ == 0
243
+ ):
244
+ # Allow int signals to represent float records when prec is 0
245
+ return PvaConverter(int)
246
+ elif inferred_datatype is str and (enum_cls := get_enum_cls(datatype)):
247
+ # Allow strings to be used as enums until QSRV supports this
248
+ return PvaConverter(str)
249
+ elif inferred_datatype is Table and datatype and issubclass(datatype, Table):
250
+ # Use a custom table class
251
+ return PvaTableConverter(datatype)
252
+ elif datatype in (None, inferred_datatype):
253
+ # If datatype matches what we are given then allow it and use inferred converter
254
+ return converter_cls(inferred_datatype)
255
+ raise TypeError(
256
+ f"{pv} with inferred datatype {format_datatype(inferred_datatype)}"
257
+ f" from {typeid=} {specifier=}"
258
+ f" cannot be coerced to {format_datatype(datatype)}"
396
259
  )
397
260
 
398
- @classmethod
399
- def datatype_allowed(cls, dtype: Any) -> bool:
400
- stripped_origin = get_origin(dtype) or dtype
401
- if dtype is None:
402
- return True
403
- return inspect.isclass(stripped_origin) and issubclass(
404
- stripped_origin, cls._ALLOWED_DATATYPES
405
- )
406
261
 
407
- def __init__(self, datatype: type[T] | None, read_pv: str, write_pv: str):
408
- self.datatype = datatype
409
- if not PvaSignalBackend.datatype_allowed(self.datatype):
410
- raise TypeError(f"Given datatype {self.datatype} unsupported in PVA.")
262
+ _context: Context | None = None
263
+
264
+
265
+ def context() -> Context:
266
+ global _context
267
+ if _context is None:
268
+ _context = Context("pva", nt=False)
269
+
270
+ @atexit.register
271
+ def _del_ctxt():
272
+ # If we don't do this we get messages like this on close:
273
+ # Error in sys.excepthook:
274
+ # Original exception was:
275
+ global _context
276
+ del _context
411
277
 
278
+ return _context
279
+
280
+
281
+ async def pvget_with_timeout(pv: str, timeout: float) -> Any:
282
+ try:
283
+ return await asyncio.wait_for(context().get(pv), timeout=timeout)
284
+ except asyncio.TimeoutError as exc:
285
+ logging.debug(f"signal pva://{pv} timed out", exc_info=True)
286
+ raise NotConnected(f"pva://{pv}") from exc
287
+
288
+
289
+ def _pva_request_string(fields: Sequence[str]) -> str:
290
+ """Converts a list of requested fields into a PVA request string which can be
291
+ passed to p4p.
292
+ """
293
+ return f"field({','.join(fields)})"
294
+
295
+
296
+ class PvaSignalBackend(SignalBackend[SignalDatatypeT]):
297
+ def __init__(
298
+ self,
299
+ datatype: type[SignalDatatypeT] | None,
300
+ read_pv: str = "",
301
+ write_pv: str = "",
302
+ ):
412
303
  self.read_pv = read_pv
413
304
  self.write_pv = write_pv
305
+ self.converter: PvaConverter = DisconnectedPvaConverter(float)
414
306
  self.initial_values: dict[str, Any] = {}
415
- self.converter: PvaConverter = DisconnectedPvaConverter()
416
307
  self.subscription: Subscription | None = None
308
+ super().__init__(datatype)
417
309
 
418
- def source(self, name: str):
419
- return f"pva://{self.read_pv}"
420
-
421
- @property
422
- def ctxt(self) -> Context:
423
- if PvaSignalBackend._ctxt is None:
424
- PvaSignalBackend._ctxt = Context("pva", nt=False)
310
+ def source(self, name: str, read: bool):
311
+ return f"pva://{self.read_pv if read else self.write_pv}"
425
312
 
426
- @atexit.register
427
- def _del_ctxt():
428
- # If we don't do this we get messages like this on close:
429
- # Error in sys.excepthook:
430
- # Original exception was:
431
- PvaSignalBackend._ctxt = None
432
-
433
- return PvaSignalBackend._ctxt
434
-
435
- async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
436
- try:
437
- self.initial_values[pv] = await asyncio.wait_for(
438
- self.ctxt.get(pv), timeout=timeout
439
- )
440
- except asyncio.TimeoutError as exc:
441
- logging.debug(f"signal pva://{pv} timed out", exc_info=True)
442
- raise NotConnected(f"pva://{pv}") from exc
313
+ async def _store_initial_value(self, pv: str, timeout: float):
314
+ self.initial_values[pv] = await pvget_with_timeout(pv, timeout)
443
315
 
444
- async def connect(self, timeout: float = DEFAULT_TIMEOUT):
316
+ async def connect(self, timeout: float):
445
317
  if self.read_pv != self.write_pv:
446
318
  # Different, need to connect both
447
319
  await wait_for_connection(
@@ -453,66 +325,61 @@ class PvaSignalBackend(SignalBackend[T]):
453
325
  await self._store_initial_value(self.read_pv, timeout=timeout)
454
326
  self.converter = make_converter(self.datatype, self.initial_values)
455
327
 
456
- async def put(self, value: T | None, wait=True, timeout=None):
328
+ def _make_reading(self, value: Any) -> Reading[SignalDatatypeT]:
329
+ ts = value["timeStamp"]
330
+ sv = value["alarm"]["severity"]
331
+ return {
332
+ "value": self.converter.value(value),
333
+ "timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
334
+ "alarm_severity": -1 if sv > 2 else sv,
335
+ }
336
+
337
+ async def put(self, value: SignalDatatypeT | None, wait: bool):
457
338
  if value is None:
458
339
  write_value = self.initial_values[self.write_pv]
459
340
  else:
460
341
  write_value = self.converter.write_value(value)
461
- coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
462
- try:
463
- await asyncio.wait_for(coro, timeout)
464
- except asyncio.TimeoutError as exc:
465
- logging.debug(
466
- f"signal pva://{self.write_pv} timed out \
467
- put value: {write_value}",
468
- exc_info=True,
469
- )
470
- raise NotConnected(f"pva://{self.write_pv}") from exc
342
+ await context().put(self.write_pv, {"value": write_value}, wait=wait)
471
343
 
472
344
  async def get_datakey(self, source: str) -> DataKey:
473
- value = await self.ctxt.get(self.read_pv)
474
- return self.converter.get_datakey(source, value)
475
-
476
- def _pva_request_string(self, fields: list[str]) -> str:
477
- """
478
- Converts a list of requested fields into a PVA request string which can be
479
- passed to p4p.
480
- """
481
- return f"field({','.join(fields)})"
345
+ value = await context().get(self.read_pv)
346
+ metadata = _metadata_from_value(self.converter.datatype, value)
347
+ return make_datakey(
348
+ self.converter.datatype, self.converter.value(value), source, metadata
349
+ )
482
350
 
483
351
  async def get_reading(self) -> Reading:
484
- request: str = self._pva_request_string(
485
- self.converter.value_fields() + self.converter.metadata_fields()
352
+ request = _pva_request_string(
353
+ self.converter.value_fields + self.converter.reading_fields
486
354
  )
487
- value = await self.ctxt.get(self.read_pv, request=request)
488
- return self.converter.reading(value)
355
+ value = await context().get(self.read_pv, request=request)
356
+ return self._make_reading(value)
489
357
 
490
- async def get_value(self) -> T:
491
- request: str = self._pva_request_string(self.converter.value_fields())
492
- value = await self.ctxt.get(self.read_pv, request=request)
358
+ async def get_value(self) -> SignalDatatypeT:
359
+ request = _pva_request_string(self.converter.value_fields)
360
+ value = await context().get(self.read_pv, request=request)
493
361
  return self.converter.value(value)
494
362
 
495
- async def get_setpoint(self) -> T:
496
- value = await self.ctxt.get(self.write_pv, "field(value)")
363
+ async def get_setpoint(self) -> SignalDatatypeT:
364
+ request = _pva_request_string(self.converter.value_fields)
365
+ value = await context().get(self.write_pv, request=request)
497
366
  return self.converter.value(value)
498
367
 
499
- def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
368
+ def set_callback(self, callback: Callback[Reading[SignalDatatypeT]] | None) -> None:
500
369
  if callback:
501
370
  assert (
502
371
  not self.subscription
503
372
  ), "Cannot set a callback when one is already set"
504
373
 
505
374
  async def async_callback(v):
506
- callback(self.converter.reading(v), self.converter.value(v))
375
+ callback(self._make_reading(v))
507
376
 
508
- request: str = self._pva_request_string(
509
- self.converter.value_fields() + self.converter.metadata_fields()
377
+ request = _pva_request_string(
378
+ self.converter.value_fields + self.converter.reading_fields
510
379
  )
511
-
512
- self.subscription = self.ctxt.monitor(
380
+ self.subscription = context().monitor(
513
381
  self.read_pv, async_callback, request=request
514
382
  )
515
- else:
516
- if self.subscription:
517
- self.subscription.close()
383
+ elif self.subscription:
384
+ self.subscription.close()
518
385
  self.subscription = None