ophyd-async 0.3.1a1__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ophyd_async/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.3.1a1'
16
- __version_tuple__ = version_tuple = (0, 3, 1)
15
+ __version__ = version = '0.3.2'
16
+ __version_tuple__ = version_tuple = (0, 3, 2)
@@ -1,6 +1,5 @@
1
1
  from enum import Enum
2
- from functools import partial
3
- from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Union
2
+ from typing import Any, Callable, Dict, Generator, List, Optional, Sequence
4
3
 
5
4
  import numpy as np
6
5
  import numpy.typing as npt
@@ -8,13 +7,10 @@ import yaml
8
7
  from bluesky.plan_stubs import abs_set, wait
9
8
  from bluesky.protocols import Location
10
9
  from bluesky.utils import Msg
11
- from epicscorelibs.ca.dbr import ca_array, ca_float, ca_int, ca_str
12
10
 
13
11
  from .device import Device
14
12
  from .signal import SignalRW
15
13
 
16
- CaType = Union[ca_float, ca_int, ca_str, ca_array]
17
-
18
14
 
19
15
  def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.Node:
20
16
  return dumper.represent_sequence(
@@ -22,19 +18,6 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
22
18
  )
23
19
 
24
20
 
25
- def ca_dbr_representer(dumper: yaml.Dumper, value: CaType) -> yaml.Node:
26
- # if it's an array, just call ndarray_representer...
27
- represent_array = partial(ndarray_representer, dumper)
28
-
29
- representers: Dict[CaType, Callable[[CaType], yaml.Node]] = {
30
- ca_float: dumper.represent_float,
31
- ca_int: dumper.represent_int,
32
- ca_str: dumper.represent_str,
33
- ca_array: represent_array,
34
- }
35
- return representers[type(value)](value)
36
-
37
-
38
21
  class OphydDumper(yaml.Dumper):
39
22
  def represent_data(self, data: Any) -> Any:
40
23
  if isinstance(data, Enum):
@@ -152,11 +135,6 @@ def save_to_yaml(phases: Sequence[Dict[str, Any]], save_path: str) -> None:
152
135
 
153
136
  yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
154
137
 
155
- yaml.add_representer(ca_float, ca_dbr_representer, Dumper=yaml.Dumper)
156
- yaml.add_representer(ca_int, ca_dbr_representer, Dumper=yaml.Dumper)
157
- yaml.add_representer(ca_str, ca_dbr_representer, Dumper=yaml.Dumper)
158
- yaml.add_representer(ca_array, ca_dbr_representer, Dumper=yaml.Dumper)
159
-
160
138
  with open(save_path, "w") as file:
161
139
  yaml.dump(phases, file, Dumper=OphydDumper, default_flow_style=False)
162
140
 
@@ -31,7 +31,7 @@ from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageab
31
31
  from .async_status import AsyncStatus
32
32
  from .device import Device
33
33
  from .signal_backend import SignalBackend
34
- from .soft_signal_backend import SoftSignalBackend
34
+ from .soft_signal_backend import SignalMetadata, SoftSignalBackend
35
35
  from .utils import DEFAULT_TIMEOUT, CalculatableTimeout, CalculateTimeout, Callback, T
36
36
 
37
37
 
@@ -57,7 +57,7 @@ class Signal(Device, Generic[T]):
57
57
 
58
58
  def __init__(
59
59
  self,
60
- backend: SignalBackend[T],
60
+ backend: Optional[SignalBackend[T]] = None,
61
61
  timeout: Optional[float] = DEFAULT_TIMEOUT,
62
62
  name: str = "",
63
63
  ) -> None:
@@ -66,13 +66,24 @@ class Signal(Device, Generic[T]):
66
66
  super().__init__(name)
67
67
 
68
68
  async def connect(
69
- self, mock=False, timeout=DEFAULT_TIMEOUT, force_reconnect: bool = False
69
+ self,
70
+ mock=False,
71
+ timeout=DEFAULT_TIMEOUT,
72
+ force_reconnect: bool = False,
73
+ backend: Optional[SignalBackend[T]] = None,
70
74
  ):
75
+ if backend:
76
+ if self._initial_backend and backend is not self._initial_backend:
77
+ raise ValueError(
78
+ "Backend at connection different from initialised one."
79
+ )
80
+ self._backend = backend
71
81
  if mock and not isinstance(self._backend, MockSignalBackend):
72
82
  # Using a soft backend, look to the initial value
73
- self._backend = MockSignalBackend(
74
- initial_backend=self._initial_backend,
75
- )
83
+ self._backend = MockSignalBackend(initial_backend=self._backend)
84
+
85
+ if self._backend is None:
86
+ raise RuntimeError("`connect` called on signal without backend")
76
87
  self.log.debug(f"Connecting to {self.source}")
77
88
  await self._backend.connect(timeout=timeout)
78
89
 
@@ -261,9 +272,17 @@ def soft_signal_rw(
261
272
  datatype: Optional[Type[T]] = None,
262
273
  initial_value: Optional[T] = None,
263
274
  name: str = "",
275
+ units: str | None = None,
276
+ precision: int | None = None,
264
277
  ) -> SignalRW[T]:
265
- """Creates a read-writable Signal with a SoftSignalBackend"""
266
- signal = SignalRW(SoftSignalBackend(datatype, initial_value), name=name)
278
+ """Creates a read-writable Signal with a SoftSignalBackend.
279
+ May pass metadata, which are propagated into describe.
280
+ """
281
+ metadata = SignalMetadata(units=units, precision=precision)
282
+ signal = SignalRW(
283
+ SoftSignalBackend(datatype, initial_value, metadata=metadata),
284
+ name=name,
285
+ )
267
286
  return signal
268
287
 
269
288
 
@@ -271,27 +290,31 @@ def soft_signal_r_and_setter(
271
290
  datatype: Optional[Type[T]] = None,
272
291
  initial_value: Optional[T] = None,
273
292
  name: str = "",
293
+ units: str | None = None,
294
+ precision: int | None = None,
274
295
  ) -> Tuple[SignalR[T], Callable[[T], None]]:
275
296
  """Returns a tuple of a read-only Signal and a callable through
276
- which the signal can be internally modified within the device. Use
277
- soft_signal_rw if you want a device that is externally modifiable
297
+ which the signal can be internally modified within the device.
298
+ May pass metadata, which are propagated into describe.
299
+ Use soft_signal_rw if you want a device that is externally modifiable
278
300
  """
279
- backend = SoftSignalBackend(datatype, initial_value)
301
+ metadata = SignalMetadata(units=units, precision=precision)
302
+ backend = SoftSignalBackend(datatype, initial_value, metadata=metadata)
280
303
  signal = SignalR(backend, name=name)
281
304
 
282
305
  return (signal, backend.set_value)
283
306
 
284
307
 
285
308
  def _generate_assert_error_msg(
286
- name: str, expected_result: str, actuall_result: str
309
+ name: str, expected_result: str, actual_result: str
287
310
  ) -> str:
288
311
  WARNING = "\033[93m"
289
312
  FAIL = "\033[91m"
290
313
  ENDC = "\033[0m"
291
314
  return (
292
315
  f"Expected {WARNING}{name}{ENDC} to produce"
293
- + f"\n{FAIL}{actuall_result}{ENDC}"
294
- + f"\nbut actually got \n{FAIL}{expected_result}{ENDC}"
316
+ + f"\n{FAIL}{expected_result}{ENDC}"
317
+ + f"\nbut actually got \n{FAIL}{actual_result}{ENDC}"
295
318
  )
296
319
 
297
320
 
@@ -313,7 +336,9 @@ async def assert_value(signal: SignalR[T], value: Any) -> None:
313
336
  """
314
337
  actual_value = await signal.get_value()
315
338
  assert actual_value == value, _generate_assert_error_msg(
316
- signal.name, value, actual_value
339
+ name=signal.name,
340
+ expected_result=value,
341
+ actual_result=actual_value,
317
342
  )
318
343
 
319
344
 
@@ -338,7 +363,9 @@ async def assert_reading(
338
363
  """
339
364
  actual_reading = await readable.read()
340
365
  assert expected_reading == actual_reading, _generate_assert_error_msg(
341
- readable.name, expected_reading, actual_reading
366
+ name=readable.name,
367
+ expected_result=expected_reading,
368
+ actual_result=actual_reading,
342
369
  )
343
370
 
344
371
 
@@ -364,7 +391,9 @@ async def assert_configuration(
364
391
  """
365
392
  actual_configurable = await configurable.read_configuration()
366
393
  assert configuration == actual_configurable, _generate_assert_error_msg(
367
- configurable.name, configuration, actual_configurable
394
+ name=configurable.name,
395
+ expected_result=configuration,
396
+ actual_result=actual_configurable,
368
397
  )
369
398
 
370
399
 
@@ -386,11 +415,15 @@ def assert_emitted(docs: Mapping[str, list[dict]], **numbers: int):
386
415
  resource=1, datum=1, event=1, stop=1)
387
416
  """
388
417
  assert list(docs) == list(numbers), _generate_assert_error_msg(
389
- "documents", list(numbers), list(docs)
418
+ name="documents",
419
+ expected_result=list(numbers),
420
+ actual_result=list(docs),
390
421
  )
391
422
  actual_numbers = {name: len(d) for name, d in docs.items()}
392
423
  assert actual_numbers == numbers, _generate_assert_error_msg(
393
- "emitted", numbers, actual_numbers
424
+ name="emitted",
425
+ expected_result=numbers,
426
+ actual_result=actual_numbers,
394
427
  )
395
428
 
396
429
 
@@ -5,7 +5,7 @@ import time
5
5
  from collections import abc
6
6
  from dataclasses import dataclass
7
7
  from enum import Enum
8
- from typing import Dict, Generic, Optional, Type, Union, cast, get_origin
8
+ from typing import Dict, Generic, Optional, Type, TypedDict, Union, cast, get_origin
9
9
 
10
10
  import numpy as np
11
11
  from bluesky.protocols import DataKey, Dtype, Reading
@@ -21,6 +21,11 @@ primitive_dtypes: Dict[type, Dtype] = {
21
21
  }
22
22
 
23
23
 
24
+ class SignalMetadata(TypedDict):
25
+ units: str | None = None
26
+ precision: int | None = None
27
+
28
+
24
29
  class SoftConverter(Generic[T]):
25
30
  def value(self, value: T) -> T:
26
31
  return value
@@ -35,7 +40,8 @@ class SoftConverter(Generic[T]):
35
40
  alarm_severity=-1 if severity > 2 else severity,
36
41
  )
37
42
 
38
- def get_datakey(self, source: str, value) -> DataKey:
43
+ def get_datakey(self, source: str, value, **metadata) -> DataKey:
44
+ dk = {"source": source, "shape": [], **metadata}
39
45
  dtype = type(value)
40
46
  if np.issubdtype(dtype, np.integer):
41
47
  dtype = int
@@ -44,8 +50,8 @@ class SoftConverter(Generic[T]):
44
50
  assert (
45
51
  dtype in primitive_dtypes
46
52
  ), f"invalid converter for value of type {type(value)}"
47
- dtype_name = primitive_dtypes[dtype]
48
- return {"source": source, "dtype": dtype_name, "shape": []}
53
+ dk["dtype"] = primitive_dtypes[dtype]
54
+ return dk
49
55
 
50
56
  def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
51
57
  if datatype is None:
@@ -55,8 +61,8 @@ class SoftConverter(Generic[T]):
55
61
 
56
62
 
57
63
  class SoftArrayConverter(SoftConverter):
58
- def get_datakey(self, source: str, value) -> DataKey:
59
- return {"source": source, "dtype": "array", "shape": [len(value)]}
64
+ def get_datakey(self, source: str, value, **metadata) -> DataKey:
65
+ return {"source": source, "dtype": "array", "shape": [len(value)], **metadata}
60
66
 
61
67
  def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
62
68
  if datatype is None:
@@ -78,9 +84,15 @@ class SoftEnumConverter(SoftConverter):
78
84
  else:
79
85
  return self.enum_class(value)
80
86
 
81
- def get_datakey(self, source: str, value) -> DataKey:
87
+ def get_datakey(self, source: str, value, **metadata) -> DataKey:
82
88
  choices = [e.value for e in self.enum_class]
83
- return {"source": source, "dtype": "string", "shape": [], "choices": choices} # type: ignore
89
+ return {
90
+ "source": source,
91
+ "dtype": "string",
92
+ "shape": [],
93
+ "choices": choices,
94
+ **metadata,
95
+ }
84
96
 
85
97
  def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
86
98
  if datatype is None:
@@ -114,9 +126,11 @@ class SoftSignalBackend(SignalBackend[T]):
114
126
  self,
115
127
  datatype: Optional[Type[T]],
116
128
  initial_value: Optional[T] = None,
129
+ metadata: SignalMetadata = None,
117
130
  ) -> None:
118
131
  self.datatype = datatype
119
132
  self._initial_value = initial_value
133
+ self._metadata = metadata or {}
120
134
  self.converter: SoftConverter = make_converter(datatype)
121
135
  if self._initial_value is None:
122
136
  self._initial_value = self.converter.make_initial_value(self.datatype)
@@ -155,7 +169,7 @@ class SoftSignalBackend(SignalBackend[T]):
155
169
  self.callback(reading, self._value)
156
170
 
157
171
  async def get_datakey(self, source: str) -> DataKey:
158
- return self.converter.get_datakey(source, self._value)
172
+ return self.converter.get_datakey(source, self._value, **self._metadata)
159
173
 
160
174
  async def get_reading(self) -> Reading:
161
175
  return self.converter.reading(self._value, self._timestamp, self._severity)
@@ -2,8 +2,10 @@ import logging
2
2
  import sys
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
- from typing import Any, Dict, Optional, Sequence, Type, Union
5
+ from math import isnan, nan
6
+ from typing import Any, Dict, List, Optional, Type, Union
6
7
 
8
+ import numpy as np
7
9
  from aioca import (
8
10
  FORMAT_CTRL,
9
11
  FORMAT_RAW,
@@ -28,7 +30,7 @@ from ophyd_async.core import (
28
30
  )
29
31
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
32
 
31
- from .common import get_supported_values
33
+ from .common import LimitPair, Limits, common_meta, get_supported_values
32
34
 
33
35
  dbr_to_dtype: Dict[Dbr, Dtype] = {
34
36
  dbr.DBR_STRING: "string",
@@ -40,6 +42,66 @@ dbr_to_dtype: Dict[Dbr, Dtype] = {
40
42
  }
41
43
 
42
44
 
45
+ def _data_key_from_augmented_value(
46
+ value: AugmentedValue,
47
+ *,
48
+ choices: Optional[List[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """Use the return value of get with FORMAT_CTRL to construct a DataKey
52
+ describing the signal. See docstring of AugmentedValue for expected
53
+ value fields by DBR type.
54
+
55
+ Args:
56
+ value (AugmentedValue): Description of the the return type of a DB record
57
+ choices: Optional list of enum choices to pass as metadata in the datakey
58
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
59
+
60
+ Returns:
61
+ DataKey: A rich DataKey describing the DB record
62
+ """
63
+ source = f"ca://{value.name}"
64
+ assert value.ok, f"Error reading {source}: {value}"
65
+
66
+ scalar = value.element_count == 1
67
+ dtype = dtype or dbr_to_dtype[value.datatype]
68
+
69
+ d = DataKey(
70
+ source=source,
71
+ dtype=dtype if scalar else "array",
72
+ # strictly value.element_count >= len(value)
73
+ shape=[] if scalar else [len(value)],
74
+ )
75
+ for key in common_meta:
76
+ attr = getattr(value, key, nan)
77
+ if isinstance(attr, str) or not isnan(attr):
78
+ d[key] = attr
79
+
80
+ if choices is not None:
81
+ d["choices"] = choices
82
+
83
+ if limits := _limits_from_augmented_value(value):
84
+ d["limits"] = limits
85
+
86
+ return d
87
+
88
+
89
+ def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
90
+ def get_limits(limit: str) -> LimitPair:
91
+ low = getattr(value, f"lower_{limit}_limit", nan)
92
+ high = getattr(value, f"upper_{limit}_limit", nan)
93
+ return LimitPair(
94
+ low=None if isnan(low) else low, high=None if isnan(high) else high
95
+ )
96
+
97
+ return Limits(
98
+ alarm=get_limits("alarm"),
99
+ control=get_limits("ctrl"),
100
+ display=get_limits("disp"),
101
+ warning=get_limits("warning"),
102
+ )
103
+
104
+
43
105
  @dataclass
44
106
  class CaConverter:
45
107
  read_dbr: Optional[Dbr]
@@ -49,7 +111,10 @@ class CaConverter:
49
111
  return value
50
112
 
51
113
  def value(self, value: AugmentedValue):
52
- return value
114
+ # for channel access ca_xxx classes, this
115
+ # invokes __pos__ operator to return an instance of
116
+ # the builtin base class
117
+ return +value
53
118
 
54
119
  def reading(self, value: AugmentedValue):
55
120
  return {
@@ -58,8 +123,8 @@ class CaConverter:
58
123
  "alarm_severity": -1 if value.severity > 2 else value.severity,
59
124
  }
60
125
 
61
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
- return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
126
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
127
+ return _data_key_from_augmented_value(value)
63
128
 
64
129
 
65
130
  class CaLongStrConverter(CaConverter):
@@ -73,12 +138,17 @@ class CaLongStrConverter(CaConverter):
73
138
 
74
139
 
75
140
  class CaArrayConverter(CaConverter):
76
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value)]}
141
+ def value(self, value: AugmentedValue):
142
+ return np.array(value, copy=False)
78
143
 
79
144
 
80
145
  @dataclass
81
146
  class CaEnumConverter(CaConverter):
147
+ """To prevent issues when a signal is restarted and returns with different enum
148
+ values or orders, we put treat an Enum signal as a string, and cache the
149
+ choices on this class.
150
+ """
151
+
82
152
  choices: dict[str, str]
83
153
 
84
154
  def write_value(self, value: Union[Enum, str]):
@@ -90,13 +160,18 @@ class CaEnumConverter(CaConverter):
90
160
  def value(self, value: AugmentedValue):
91
161
  return self.choices[value]
92
162
 
93
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
- return {
95
- "source": source,
96
- "dtype": "string",
97
- "shape": [],
98
- "choices": list(self.choices),
99
- }
163
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
164
+ # Sometimes DBR_TYPE returns as String, must pass choices still
165
+ return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))
166
+
167
+
168
+ @dataclass
169
+ class CaBoolConverter(CaConverter):
170
+ def value(self, value: AugmentedValue) -> bool:
171
+ return bool(value)
172
+
173
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
174
+ return _data_key_from_augmented_value(value, dtype="bool")
100
175
 
101
176
 
102
177
  class DisconnectedCaConverter(CaConverter):
@@ -115,8 +190,10 @@ def make_converter(
115
190
  return CaLongStrConverter()
116
191
  elif is_array and pv_dbr == dbr.DBR_STRING:
117
192
  # Waveform of strings, check we wanted this
118
- if datatype and datatype != Sequence[str]:
119
- raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
193
+ if datatype:
194
+ datatype_dtype = get_dtype(datatype)
195
+ if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
196
+ raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
120
197
  return CaArrayConverter(pv_dbr, None)
121
198
  elif is_array:
122
199
  pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
@@ -136,7 +213,7 @@ def make_converter(
136
213
  )
137
214
  if pv_choices_len != 2:
138
215
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
139
- return CaConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
216
+ return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
140
217
  elif pv_dbr == dbr.DBR_ENUM:
141
218
  # This is an Enum
142
219
  pv_choices = get_unique(
@@ -224,7 +301,7 @@ class CaSignalBackend(SignalBackend[T]):
224
301
 
225
302
  async def get_datakey(self, source: str) -> DataKey:
226
303
  value = await self._caget(FORMAT_CTRL)
227
- return self.converter.get_datakey(source, value)
304
+ return self.converter.get_datakey(value)
228
305
 
229
306
  async def get_reading(self) -> Reading:
230
307
  value = await self._caget(FORMAT_TIME)
@@ -4,6 +4,7 @@ import logging
4
4
  import time
5
5
  from dataclasses import dataclass
6
6
  from enum import Enum
7
+ from math import isnan, nan
7
8
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
8
9
 
9
10
  from bluesky.protocols import DataKey, Dtype, Reading
@@ -20,7 +21,7 @@ from ophyd_async.core import (
20
21
  )
21
22
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
23
 
23
- from .common import get_supported_values
24
+ from .common import LimitPair, Limits, common_meta, get_supported_values
24
25
 
25
26
  # https://mdavidsaver.github.io/p4p/values.html
26
27
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -39,6 +40,67 @@ specifier_to_dtype: Dict[str, Dtype] = {
39
40
  }
40
41
 
41
42
 
43
+ def _data_key_from_value(
44
+ source: str,
45
+ value: Value,
46
+ *,
47
+ shape: Optional[list[int]] = None,
48
+ choices: Optional[list[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """
52
+ Args:
53
+ value (Value): Description of the the return type of a DB record
54
+ shape: Optional override shape when len(shape) > 1
55
+ choices: Optional list of enum choices to pass as metadata in the datakey
56
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
57
+
58
+ Returns:
59
+ DataKey: A rich DataKey describing the DB record
60
+ """
61
+ shape = shape or []
62
+ dtype = dtype or specifier_to_dtype[value.type().aspy("value")]
63
+ display_data = getattr(value, "display", None)
64
+
65
+ d = DataKey(
66
+ source=source,
67
+ dtype=dtype,
68
+ shape=shape,
69
+ )
70
+ if display_data is not None:
71
+ for key in common_meta:
72
+ attr = getattr(display_data, key, nan)
73
+ if isinstance(attr, str) or not isnan(attr):
74
+ d[key] = attr
75
+
76
+ if choices is not None:
77
+ d["choices"] = choices
78
+
79
+ if limits := _limits_from_value(value):
80
+ d["limits"] = limits
81
+
82
+ return d
83
+
84
+
85
+ def _limits_from_value(value: Value) -> Limits:
86
+ def get_limits(
87
+ substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
88
+ ) -> LimitPair:
89
+ substructure = getattr(value, substucture_name, None)
90
+ low = getattr(substructure, low_name, nan)
91
+ high = getattr(substructure, high_name, nan)
92
+ return LimitPair(
93
+ low=None if isnan(low) else low, high=None if isnan(high) else high
94
+ )
95
+
96
+ return Limits(
97
+ alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
98
+ control=get_limits("control"),
99
+ display=get_limits("display"),
100
+ warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
101
+ )
102
+
103
+
42
104
  class PvaConverter:
43
105
  def write_value(self, value):
44
106
  return value
@@ -56,8 +118,7 @@ class PvaConverter:
56
118
  }
57
119
 
58
120
  def get_datakey(self, source: str, value) -> DataKey:
59
- dtype = specifier_to_dtype[value.type().aspy("value")]
60
- return {"source": source, "dtype": dtype, "shape": []}
121
+ return _data_key_from_value(source, value)
61
122
 
62
123
  def metadata_fields(self) -> List[str]:
63
124
  """
@@ -74,7 +135,9 @@ class PvaConverter:
74
135
 
75
136
  class PvaArrayConverter(PvaConverter):
76
137
  def get_datakey(self, source: str, value) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
138
+ return _data_key_from_value(
139
+ source, value, dtype="array", shape=[len(value["value"])]
140
+ )
78
141
 
79
142
 
80
143
  class PvaNDArrayConverter(PvaConverter):
@@ -98,7 +161,7 @@ class PvaNDArrayConverter(PvaConverter):
98
161
 
99
162
  def get_datakey(self, source: str, value) -> DataKey:
100
163
  dims = self._get_dimensions(value)
101
- return {"source": source, "dtype": "array", "shape": dims}
164
+ return _data_key_from_value(source, value, dtype="array", shape=dims)
102
165
 
103
166
  def write_value(self, value):
104
167
  # No clear use-case for writing directly to an NDArray, and some
@@ -109,6 +172,11 @@ class PvaNDArrayConverter(PvaConverter):
109
172
 
110
173
  @dataclass
111
174
  class PvaEnumConverter(PvaConverter):
175
+ """To prevent issues when a signal is restarted and returns with different enum
176
+ values or orders, we put treat an Enum signal as a string, and cache the
177
+ choices on this class.
178
+ """
179
+
112
180
  def __init__(self, choices: dict[str, str]):
113
181
  self.choices = tuple(choices.values())
114
182
 
@@ -122,20 +190,17 @@ class PvaEnumConverter(PvaConverter):
122
190
  return self.choices[value["value"]["index"]]
123
191
 
124
192
  def get_datakey(self, source: str, value) -> DataKey:
125
- return {
126
- "source": source,
127
- "dtype": "string",
128
- "shape": [],
129
- "choices": list(self.choices),
130
- }
193
+ return _data_key_from_value(
194
+ source, value, choices=list(self.choices), dtype="string"
195
+ )
131
196
 
132
197
 
133
- class PvaEnumBoolConverter(PvaConverter):
198
+ class PvaEmumBoolConverter(PvaConverter):
134
199
  def value(self, value):
135
- return value["value"]["index"]
200
+ return bool(value["value"]["index"])
136
201
 
137
202
  def get_datakey(self, source: str, value) -> DataKey:
138
- return {"source": source, "dtype": "integer", "shape": []}
203
+ return _data_key_from_value(source, value, dtype="bool")
139
204
 
140
205
 
141
206
  class PvaTableConverter(PvaConverter):
@@ -144,7 +209,7 @@ class PvaTableConverter(PvaConverter):
144
209
 
145
210
  def get_datakey(self, source: str, value) -> DataKey:
146
211
  # This is wrong, but defer until we know how to actually describe a table
147
- return {"source": source, "dtype": "object", "shape": []} # type: ignore
212
+ return _data_key_from_value(source, value, dtype="object")
148
213
 
149
214
 
150
215
  class PvaDictConverter(PvaConverter):
@@ -213,7 +278,7 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
213
278
  )
214
279
  if pv_choices_len != 2:
215
280
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
216
- return PvaEnumBoolConverter()
281
+ return PvaEmumBoolConverter()
217
282
  elif "NTEnum" in typeid:
218
283
  # This is an Enum
219
284
  pv_choices = get_unique(
@@ -1,5 +1,28 @@
1
1
  from enum import Enum
2
- from typing import Dict, Optional, Tuple, Type
2
+ from typing import Dict, Optional, Tuple, Type, TypedDict
3
+
4
+ common_meta = {
5
+ "units",
6
+ "precision",
7
+ }
8
+
9
+
10
+ class LimitPair(TypedDict):
11
+ high: float | None
12
+ low: float | None
13
+
14
+ def __bool__(self) -> bool:
15
+ return self.low is None and self.high is None
16
+
17
+
18
+ class Limits(TypedDict):
19
+ alarm: LimitPair
20
+ control: LimitPair
21
+ display: LimitPair
22
+ warning: LimitPair
23
+
24
+ def __bool__(self) -> bool:
25
+ return any(self.alarm, self.control, self.display, self.warning)
3
26
 
4
27
 
5
28
  def get_supported_values(
@@ -179,7 +179,15 @@ def _mock_common_blocks(device: Device, stripped_type: Optional[Type] = None):
179
179
  sub_device_2 = device_cls(SoftSignalBackend(signal_dtype))
180
180
  sub_device = DeviceVector({1: sub_device_1, 2: sub_device_2})
181
181
  else:
182
- sub_device = DeviceVector({1: device_cls(), 2: device_cls()})
182
+ if hasattr(device, device_name):
183
+ sub_device = getattr(device, device_name)
184
+ else:
185
+ sub_device = DeviceVector(
186
+ {
187
+ 1: device_cls(),
188
+ 2: device_cls(),
189
+ }
190
+ )
183
191
 
184
192
  for sub_device_in_vector in sub_device.values():
185
193
  _mock_common_blocks(sub_device_in_vector, stripped_type=device_cls)
@@ -296,7 +304,9 @@ async def fill_pvi_entries(
296
304
 
297
305
 
298
306
  def create_children_from_annotations(
299
- device: Device, included_optional_fields: Tuple[str, ...] = ()
307
+ device: Device,
308
+ included_optional_fields: Tuple[str, ...] = (),
309
+ device_vectors: Optional[Dict[str, int]] = None,
300
310
  ):
301
311
  """For intializing blocks at __init__ of ``device``."""
302
312
  for name, device_type in get_type_hints(type(device)).items():
@@ -307,12 +317,22 @@ def create_children_from_annotations(
307
317
  continue
308
318
  is_device_vector, device_type = _strip_device_vector(device_type)
309
319
  if (
310
- is_device_vector
320
+ (is_device_vector and (not device_vectors or name not in device_vectors))
311
321
  or ((origin := get_origin(device_type)) and issubclass(origin, Signal))
312
322
  or (isclass(device_type) and issubclass(device_type, Signal))
313
323
  ):
314
324
  continue
315
325
 
316
- sub_device = device_type()
317
- setattr(device, name, sub_device)
318
- create_children_from_annotations(sub_device)
326
+ if is_device_vector:
327
+ n_device_vector = DeviceVector(
328
+ {i: device_type() for i in range(1, device_vectors[name] + 1)}
329
+ )
330
+ setattr(device, name, n_device_vector)
331
+ for sub_device in n_device_vector.values():
332
+ create_children_from_annotations(
333
+ sub_device, device_vectors=device_vectors
334
+ )
335
+ else:
336
+ sub_device = device_type()
337
+ setattr(device, name, sub_device)
338
+ create_children_from_annotations(sub_device, device_vectors=device_vectors)
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from enum import Enum
4
4
 
5
5
  from ophyd_async.core import Device, DeviceVector, SignalR, SignalRW
6
- from ophyd_async.panda._table import SeqTable
6
+ from ophyd_async.panda._table import DatasetTable, SeqTable
7
7
 
8
8
 
9
9
  class DataBlock(Device):
@@ -14,6 +14,7 @@ class DataBlock(Device):
14
14
  num_captured: SignalR[int]
15
15
  capture: SignalRW[bool]
16
16
  flush_period: SignalRW[float]
17
+ datasets: SignalR[DatasetTable]
17
18
 
18
19
 
19
20
  class PulseBlock(Device):
@@ -28,9 +28,7 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
28
28
  create_children_from_annotations(self)
29
29
  controller = PandaPcapController(pcap=self.pcap)
30
30
  writer = PandaHDFWriter(
31
- prefix=prefix,
32
31
  directory_provider=directory_provider,
33
- name_provider=lambda: name,
34
32
  panda_device=self,
35
33
  )
36
34
  super().__init__(
@@ -6,6 +6,16 @@ import numpy as np
6
6
  import numpy.typing as npt
7
7
 
8
8
 
9
+ class PandaHdf5DatasetType(str, Enum):
10
+ FLOAT_64 = "float64"
11
+ UINT_32 = "uint32"
12
+
13
+
14
+ class DatasetTable(TypedDict):
15
+ name: npt.NDArray[np.str_]
16
+ hdf5_type: Sequence[PandaHdf5DatasetType]
17
+
18
+
9
19
  class SeqTrigger(str, Enum):
10
20
  IMMEDIATE = "Immediate"
11
21
  BITA_0 = "BITA=0"
@@ -1,8 +1,6 @@
1
1
  import asyncio
2
- from dataclasses import dataclass
3
- from enum import Enum
4
2
  from pathlib import Path
5
- from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional
3
+ from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
6
4
 
7
5
  from bluesky.protocols import DataKey, StreamAsset
8
6
  from p4p.client.thread import Context
@@ -10,98 +8,25 @@ from p4p.client.thread import Context
10
8
  from ophyd_async.core import (
11
9
  DEFAULT_TIMEOUT,
12
10
  DetectorWriter,
13
- Device,
14
11
  DirectoryProvider,
15
- NameProvider,
16
- SignalR,
17
12
  wait_for_value,
18
13
  )
19
14
  from ophyd_async.core.signal import observe_value
20
- from ophyd_async.panda import CommonPandaBlocks
21
15
 
16
+ from .._common_blocks import CommonPandaBlocks
22
17
  from ._panda_hdf_file import _HDFDataset, _HDFFile
23
18
 
24
19
 
25
- class Capture(str, Enum):
26
- # Capture signals for the HDF Panda
27
- No = "No"
28
- Value = "Value"
29
- Diff = "Diff"
30
- Sum = "Sum"
31
- Mean = "Mean"
32
- Min = "Min"
33
- Max = "Max"
34
- MinMax = "Min Max"
35
- MinMaxMean = "Min Max Mean"
36
-
37
-
38
- def get_capture_signals(
39
- block: Device, path_prefix: Optional[str] = ""
40
- ) -> Dict[str, SignalR]:
41
- """Get dict mapping a capture signal's name to the signal itself"""
42
- if not path_prefix:
43
- path_prefix = ""
44
- signals: Dict[str, SignalR[Any]] = {}
45
- for attr_name, attr in block.children():
46
- # Capture signals end in _capture, but num_capture is a red herring
47
- if attr_name == "num_capture":
48
- continue
49
- dot_path = f"{path_prefix}{attr_name}"
50
- if isinstance(attr, SignalR) and attr_name.endswith("_capture"):
51
- signals[dot_path] = attr
52
- attr_signals = get_capture_signals(attr, path_prefix=dot_path + ".")
53
- signals.update(attr_signals)
54
- return signals
55
-
56
-
57
- @dataclass
58
- class CaptureSignalWrapper:
59
- signal: SignalR
60
- capture_type: Capture
61
-
62
-
63
- # This should return a dictionary which contains a dict, containing the Capture
64
- # signal object, and the value of that signal
65
- async def get_signals_marked_for_capture(
66
- capture_signals: Dict[str, SignalR],
67
- ) -> Dict[str, CaptureSignalWrapper]:
68
- # Read signals to see if they should be captured
69
- do_read = [signal.get_value() for signal in capture_signals.values()]
70
-
71
- signal_values = await asyncio.gather(*do_read)
72
-
73
- assert len(signal_values) == len(
74
- capture_signals
75
- ), "Length of read signals are different to length of signals"
76
-
77
- signals_to_capture: Dict[str, CaptureSignalWrapper] = {}
78
- for signal_path, signal_object, signal_value in zip(
79
- capture_signals.keys(), capture_signals.values(), signal_values
80
- ):
81
- signal_path = signal_path.replace("_capture", "")
82
- if (signal_value in iter(Capture)) and (signal_value != Capture.No):
83
- signals_to_capture[signal_path] = CaptureSignalWrapper(
84
- signal_object,
85
- signal_value,
86
- )
87
-
88
- return signals_to_capture
89
-
90
-
91
20
  class PandaHDFWriter(DetectorWriter):
92
21
  _ctxt: Optional[Context] = None
93
22
 
94
23
  def __init__(
95
24
  self,
96
- prefix: str,
97
25
  directory_provider: DirectoryProvider,
98
- name_provider: NameProvider,
99
26
  panda_device: CommonPandaBlocks,
100
27
  ) -> None:
101
28
  self.panda_device = panda_device
102
- self._prefix = prefix
103
29
  self._directory_provider = directory_provider
104
- self._name_provider = name_provider
105
30
  self._datasets: List[_HDFDataset] = []
106
31
  self._file: Optional[_HDFFile] = None
107
32
  self._multiplier = 1
@@ -110,14 +35,9 @@ class PandaHDFWriter(DetectorWriter):
110
35
  async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
111
36
  """Retrieve and get descriptor of all PandA signals marked for capture"""
112
37
 
113
- # Get capture PVs by looking at panda. Gives mapping of dotted attribute path
114
- # to Signal object
115
- self.capture_signals = get_capture_signals(self.panda_device)
116
-
117
38
  # Ensure flushes are immediate
118
39
  await self.panda_device.data.flush_period.set(0)
119
40
 
120
- to_capture = await get_signals_marked_for_capture(self.capture_signals)
121
41
  self._file = None
122
42
  info = self._directory_provider()
123
43
  # Set the initial values
@@ -133,36 +53,21 @@ class PandaHDFWriter(DetectorWriter):
133
53
 
134
54
  # Wait for it to start, stashing the status that tells us when it finishes
135
55
  await self.panda_device.data.capture.set(True)
136
- name = self._name_provider()
137
56
  if multiplier > 1:
138
57
  raise ValueError(
139
58
  "All PandA datasets should be scalar, multiplier should be 1"
140
59
  )
141
- self._datasets = []
142
- for attribute_path, capture_signal in to_capture.items():
143
- split_path = attribute_path.split(".")
144
- signal_name = split_path[-1]
145
- # Get block names from numbered blocks, eg INENC[1]
146
- block_name = (
147
- f"{split_path[-3]}{split_path[-2]}"
148
- if split_path[-2].isnumeric()
149
- else split_path[-2]
150
- )
151
60
 
152
- for suffix in capture_signal.capture_type.split(" "):
153
- self._datasets.append(
154
- _HDFDataset(
155
- name,
156
- block_name,
157
- f"{name}-{block_name}-{signal_name}-{suffix}",
158
- f"{block_name}-{signal_name}".upper() + f"-{suffix}",
159
- [1],
160
- multiplier=1,
161
- )
162
- )
61
+ return await self._describe()
163
62
 
63
+ async def _describe(self) -> Dict[str, DataKey]:
64
+ """
65
+ Return a describe based on the datasets PV
66
+ """
67
+
68
+ await self._update_datasets()
164
69
  describe = {
165
- ds.name: DataKey(
70
+ ds.data_key: DataKey(
166
71
  source=self.panda_device.data.hdf_directory.source,
167
72
  shape=ds.shape,
168
73
  dtype="array" if ds.shape != [1] else "number",
@@ -172,6 +77,18 @@ class PandaHDFWriter(DetectorWriter):
172
77
  }
173
78
  return describe
174
79
 
80
+ async def _update_datasets(self) -> None:
81
+ """
82
+ Load data from the datasets PV on the panda, update internal
83
+ representation of datasets that the panda will write.
84
+ """
85
+
86
+ capture_table = await self.panda_device.data.datasets.get_value()
87
+ self._datasets = [
88
+ _HDFDataset(dataset_name, "/" + dataset_name, [1], multiplier=1)
89
+ for dataset_name in capture_table["name"]
90
+ ]
91
+
175
92
  # Next few functions are exactly the same as AD writer. Could move as default
176
93
  # StandardDetector behavior
177
94
  async def wait_for_index(
@@ -9,10 +9,8 @@ from ophyd_async.core import DirectoryInfo
9
9
 
10
10
  @dataclass
11
11
  class _HDFDataset:
12
- device_name: str
13
- block: str
14
- name: str
15
- path: str
12
+ data_key: str
13
+ internal_path: str
16
14
  shape: List[int]
17
15
  multiplier: int
18
16
 
@@ -29,12 +27,10 @@ class _HDFFile:
29
27
  compose_stream_resource(
30
28
  spec="AD_HDF5_SWMR_SLICE",
31
29
  root=str(directory_info.root),
32
- data_key=ds.name,
30
+ data_key=ds.data_key,
33
31
  resource_path=(f"{str(directory_info.root)}/{full_file_name}"),
34
32
  resource_kwargs={
35
- "name": ds.name,
36
- "block": ds.block,
37
- "path": ds.path,
33
+ "path": ds.internal_path,
38
34
  "multiplier": ds.multiplier,
39
35
  "timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
40
36
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ophyd-async
3
- Version: 0.3.1a1
3
+ Version: 0.3.2
4
4
  Summary: Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango
5
5
  Author-email: Tom Cobb <tom.cobb@diamond.ac.uk>
6
6
  License: BSD 3-Clause License
@@ -41,7 +41,7 @@ Requires-Python: >=3.10
41
41
  Description-Content-Type: text/markdown
42
42
  License-File: LICENSE
43
43
  Requires-Dist: networkx >=2.0
44
- Requires-Dist: numpy
44
+ Requires-Dist: numpy <2.0.0
45
45
  Requires-Dist: packaging
46
46
  Requires-Dist: pint
47
47
  Requires-Dist: bluesky >=1.13.0a3
@@ -1,6 +1,6 @@
1
1
  ophyd_async/__init__.py,sha256=v-rRiDOgZ3sQSMQKq0vgUQZvpeOkoHFXissAx6Ktg84,61
2
2
  ophyd_async/__main__.py,sha256=G-Zcv_G9zK7Nhx6o5L5w-wyhMxdl_WgyMELu8IMFqAE,328
3
- ophyd_async/_version.py,sha256=39a0mpBwpK0E4r9y1sT8m6uZMtoy7wbaTItax3Zomyc,413
3
+ ophyd_async/_version.py,sha256=9jP8Fo8egXoMs_T3DFqSuJYg4n9o9mnwYubl_hnut4k,411
4
4
  ophyd_async/log.py,sha256=DbMjt0bkfUOLHIinZYt0Q0FHZmCXXi5x8y0uFiEmqoQ,3587
5
5
  ophyd_async/protocols.py,sha256=EF2W9nfElV-0QNMYrX1zusL1PqDJR3kNsjlalR29j0I,3412
6
6
  ophyd_async/core/__init__.py,sha256=znjVeRfrDVJbGLEkUczeKMW46kV6HDrlE4lV0SqvZt4,2952
@@ -8,20 +8,20 @@ ophyd_async/core/_providers.py,sha256=LrlTMPHKXWOPVkpAOw-pqBq0kip-c3C9ZZPoFfiaV4
8
8
  ophyd_async/core/async_status.py,sha256=9TOgOXIAuH62RDo5t-Y5GdjrJ76d_6TFlBxYv-5_a88,4367
9
9
  ophyd_async/core/detector.py,sha256=NMX8y_yiViHbv3CaJ7LxzXYkH6tCWI3LocpQ3w4lGEQ,11176
10
10
  ophyd_async/core/device.py,sha256=280zFnLCoiMZAA-Dh1_AjUSnhxUfKYGgj4H_2S1njOA,7086
11
- ophyd_async/core/device_save_loader.py,sha256=RXA3dPUPihAR2ZGDStlGiA-TAsr_xqL0snsCjMsMnfA,9138
11
+ ophyd_async/core/device_save_loader.py,sha256=EK7FB5oWiLI_s2KZ1BNDQ2AUKVbFLlimMl0vXfsjcgo,8223
12
12
  ophyd_async/core/flyer.py,sha256=bIjzBkrl8HVAlKgsZ_FF0WL69Qvksyzp9ZWmTLl8Yrw,2304
13
13
  ophyd_async/core/mock_signal_backend.py,sha256=Ug6jK72wm9vM6EueoUrYgcXtiFzdPUEISRe86LdyYKc,2844
14
14
  ophyd_async/core/mock_signal_utils.py,sha256=LE8VxNq3jfaTePnHHpZpKCi1vwKi8EIg-g1jfw-Q5bQ,4726
15
- ophyd_async/core/signal.py,sha256=FbTb5qDPLhVxEbh6gimqXfkZwcqB4ymHTEYVXZVZYrk,16456
15
+ ophyd_async/core/signal.py,sha256=hPpMcdq7zx6HOkyQCJrMD5F3uLBEJJTfwL6DsbcTELo,17601
16
16
  ophyd_async/core/signal_backend.py,sha256=fT3q0WED3JHmNKYCs7PzDLCK4cUPVin3wQjDNPdHqAY,1525
17
- ophyd_async/core/soft_signal_backend.py,sha256=56zvcEi4c8n1yYbafTbp7X0VhSkhoehm3L8RBhu2fik,5596
17
+ ophyd_async/core/soft_signal_backend.py,sha256=n1Wp0jzgKTkQPlDnlUIr8Bm4JUwWc97dXiOWGjpwf4s,5926
18
18
  ophyd_async/core/standard_readable.py,sha256=fhq_WAZtLYWrw6DvvrFRYRAPOUP2_IcX4qLucoEEeOg,9049
19
19
  ophyd_async/core/utils.py,sha256=3oZcXNqAUHX4ZWMBH5gSuK6cFWEhSkZ9GSDYv0pf8jc,5783
20
20
  ophyd_async/epics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  ophyd_async/epics/_backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- ophyd_async/epics/_backend/_aioca.py,sha256=cpPNZmRMi7FnAh2-3ec5uklLVFOqsmEmpI1nh5Ud1Ls,8794
23
- ophyd_async/epics/_backend/_p4p.py,sha256=lIKx7kQ2o8h3M4wPwrq8JBo8xuDnasJxpDH2ATVBY78,12257
24
- ophyd_async/epics/_backend/common.py,sha256=16mAuxDwA3eZFjUW8DHMabaW3CtEI0Qe8DLpP2xlW7Y,814
22
+ ophyd_async/epics/_backend/_aioca.py,sha256=YsKHGaHtYjBte5Tz-AftTENaeV6UGRnonX-1UmAQGqg,11319
23
+ ophyd_async/epics/_backend/_p4p.py,sha256=oCT8MeVWlhmsxZ8YRSrelrY8W3NvfpXcMlfAKL_AUNY,14331
24
+ ophyd_async/epics/_backend/common.py,sha256=VDL4hiSn-C_yF3-PZoc46IXnmOHyrClNwIwLzEvx9Ww,1259
25
25
  ophyd_async/epics/areadetector/__init__.py,sha256=ViKzx-wUxkRyNR33wfpL11QB97o0J47_KMyI2C_NphI,510
26
26
  ophyd_async/epics/areadetector/aravis.py,sha256=YklN4V0loqUQBs4swVX304N49JIGPvrNOk8iA5EWofg,2127
27
27
  ophyd_async/epics/areadetector/kinetix.py,sha256=7rE2MLnz9DEmeiN9pCekDfpXuZ2DErnMajRp_9eoLZY,1359
@@ -54,20 +54,20 @@ ophyd_async/epics/demo/sensor.db,sha256=AVtiydrdtwAz2EFurO2Ult9SSRtre3r0akOBbL98
54
54
  ophyd_async/epics/motion/__init__.py,sha256=tnmVRIwKa9PdN_xonJdAUD04UpEceh-hoD7XI62yDB0,46
55
55
  ophyd_async/epics/motion/motor.py,sha256=G8cc-okSXJ6s2fGxRO155xm7PrBbVImBmBMRWts895k,3630
56
56
  ophyd_async/epics/pvi/__init__.py,sha256=TbOQNY4enQWgtr1T7x129vpo2p7FIFlr8cyZqqv5Lk4,158
57
- ophyd_async/epics/pvi/pvi.py,sha256=PJdY3rCRyIQbsbHDru-TJ-IVOItyaQwCQKAC0Widu6A,11363
57
+ ophyd_async/epics/pvi/pvi.py,sha256=Kc3klnA9F82h_p2atFYXe-wFO9OzN5TV69Tc56tD2do,12204
58
58
  ophyd_async/epics/signal/__init__.py,sha256=JXKBSGpRL9y3auh27JRxsqDn_rBOXpJjtd4nCuDOX2g,261
59
59
  ophyd_async/epics/signal/_epics_transport.py,sha256=DEIL0iYUAWssysVEgWGu1fHSM1l-ATV2kjUgPtDN9LY,858
60
60
  ophyd_async/epics/signal/signal.py,sha256=M8ZVG_zLdYJfroCRX-u_w8c3yIhswSRw8e3RkW2szio,3166
61
61
  ophyd_async/panda/__init__.py,sha256=FuSnvp-RtdA0X4RcHEF0nTiXymRts2MNdFmF_1_i41w,775
62
- ophyd_async/panda/_common_blocks.py,sha256=n0PPc1rar43oDSIA-yNubTc8fR5YCW1tyjQU58whsg0,1038
63
- ophyd_async/panda/_hdf_panda.py,sha256=TWzBnyJcLmltQyOr5nXbCAZdVRqY633ogBX6pY06p3g,1375
62
+ ophyd_async/panda/_common_blocks.py,sha256=OrA_mSJslxuhjIAD8Liye1-1yTl3DihqFDBXvJGlPFs,1088
63
+ ophyd_async/panda/_hdf_panda.py,sha256=LWWQErG_cxM7zWqKG8c40O55ZpfTu3F70d4jVvRaeMo,1308
64
64
  ophyd_async/panda/_panda_controller.py,sha256=dIqcjmaIHVrki8UXSoDx46kk6I2Lhpe2o3sXNg5f-RQ,1238
65
- ophyd_async/panda/_table.py,sha256=dLoRP4zYNOkD_s0Vkp2wVYAwkjVG8nNdf8-FaXOTfPo,5655
65
+ ophyd_async/panda/_table.py,sha256=keCGT66y91feO_MD3a6aMsgx27JuLYWLrAkl0lRXUKY,5854
66
66
  ophyd_async/panda/_trigger.py,sha256=tBH8uq_4o1ASG9yofVxq3tjf5v8LPzniDTRL4yjramI,1195
67
67
  ophyd_async/panda/_utils.py,sha256=VHW5kPVISyEkmse_qQcyisBkkEwMO6GG2Ago-CH1AFA,487
68
68
  ophyd_async/panda/writers/__init__.py,sha256=xy7BguVQG4HNIDBfKPjMj0KQo1tptC9LbCpEuMcVGaM,70
69
- ophyd_async/panda/writers/_hdf_writer.py,sha256=vnyIg3JmlzMIIq75o0IDMfGzBm_GJAhOUisAZE_0cyg,7597
70
- ophyd_async/panda/writers/_panda_hdf_file.py,sha256=42iHaTax4JjOBpNC7d4nkNL9SM14OTnFPTIcXv2jg-4,1759
69
+ ophyd_async/panda/writers/_hdf_writer.py,sha256=SP71y2-LTKhlNIFxLIddmtjmrg6MDwGvuwQ7X-5OP98,4698
70
+ ophyd_async/panda/writers/_panda_hdf_file.py,sha256=WnGvNWuLe4KljhlmBLM4Y0HKSJCIBNWOwXxioPkGu6Y,1673
71
71
  ophyd_async/plan_stubs/__init__.py,sha256=nO9ELG9J7fYwfVTVRWVorz4kffeszYpwk1ROh6Ha--w,405
72
72
  ophyd_async/plan_stubs/ensure_connected.py,sha256=1MkDu8UqVRPHLnW9IXRn-QvKiG8-rCV8T4KDbjf9K6w,557
73
73
  ophyd_async/plan_stubs/fly.py,sha256=fQwBeLw57-NeBsroVxKDa8kpuu6fgTWYWimbsatCL28,4999
@@ -78,9 +78,9 @@ ophyd_async/sim/sim_pattern_detector_writer.py,sha256=ESpcVyHd1TP7Cojznv2hJAwLin
78
78
  ophyd_async/sim/sim_pattern_generator.py,sha256=L4jTnEVUFBRXIWq_UMHqx00YDdbGO2pjo_IuuVwpzXE,1258
79
79
  ophyd_async/sim/demo/__init__.py,sha256=9mxKpslrL89cfSj4g3og8Br3O--pMj3hhWZS-Xu6kyA,56
80
80
  ophyd_async/sim/demo/sim_motor.py,sha256=a2p5wnHXjF-V5zOFai7jnszk4kbGmrZRnUqBtkOgEfQ,3733
81
- ophyd_async-0.3.1a1.dist-info/LICENSE,sha256=pU5shZcsvWgz701EbT7yjFZ8rMvZcWgRH54CRt8ld_c,1517
82
- ophyd_async-0.3.1a1.dist-info/METADATA,sha256=UzvVcuHH--IPgAEy0SCsfaBBUXHs6QcDJkzuw4l3eDs,6287
83
- ophyd_async-0.3.1a1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
84
- ophyd_async-0.3.1a1.dist-info/entry_points.txt,sha256=O0YNJTEufO0w9BozXi-JurTy2U1_o0ypeCgJLQ727Jk,58
85
- ophyd_async-0.3.1a1.dist-info/top_level.txt,sha256=-hjorMsv5Rmjo3qrgqhjpal1N6kW5vMxZO3lD4iEaXs,12
86
- ophyd_async-0.3.1a1.dist-info/RECORD,,
81
+ ophyd_async-0.3.2.dist-info/LICENSE,sha256=pU5shZcsvWgz701EbT7yjFZ8rMvZcWgRH54CRt8ld_c,1517
82
+ ophyd_async-0.3.2.dist-info/METADATA,sha256=Jb_3qY-9kWt64Ko26THH3TIjLgYf_2dzEccB1cGa2q4,6292
83
+ ophyd_async-0.3.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
84
+ ophyd_async-0.3.2.dist-info/entry_points.txt,sha256=O0YNJTEufO0w9BozXi-JurTy2U1_o0ypeCgJLQ727Jk,58
85
+ ophyd_async-0.3.2.dist-info/top_level.txt,sha256=-hjorMsv5Rmjo3qrgqhjpal1N6kW5vMxZO3lD4iEaXs,12
86
+ ophyd_async-0.3.2.dist-info/RECORD,,