ophyd-async 0.3rc2__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ophyd_async/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.3rc2'
16
- __version_tuple__ = version_tuple = (0, 3)
15
+ __version__ = version = '0.3.1'
16
+ __version_tuple__ = version_tuple = (0, 3, 1)
@@ -63,6 +63,8 @@ class TriggerInfo:
63
63
  deadtime: float
64
64
  #: What is the maximum high time of the triggers
65
65
  livetime: float
66
+ #: What is the maximum timeout on waiting for a frame
67
+ frame_timeout: float | None = None
66
68
 
67
69
 
68
70
  class DetectorControl(ABC):
@@ -162,7 +164,6 @@ class StandardDetector(
162
164
  writer: DetectorWriter,
163
165
  config_sigs: Sequence[AsyncReadable] = (),
164
166
  name: str = "",
165
- writer_timeout: float = DEFAULT_TIMEOUT,
166
167
  ) -> None:
167
168
  """
168
169
  Constructor
@@ -173,16 +174,11 @@ class StandardDetector(
173
174
  config_sigs: Signals to read when describe and read
174
175
  configuration are called. Defaults to ().
175
176
  name: Device name. Defaults to "".
176
- writer_timeout: Timeout for frame writing to start, if the
177
- timeout is reached, ophyd-async assumes the detector
178
- has a problem and raises an error.
179
- Defaults to DEFAULT_TIMEOUT.
180
177
  """
181
178
  self._controller = controller
182
179
  self._writer = writer
183
180
  self._describe: Dict[str, DataKey] = {}
184
181
  self._config_sigs = list(config_sigs)
185
- self._frame_writing_timeout = writer_timeout
186
182
  # For prepare
187
183
  self._arm_status: Optional[AsyncStatus] = None
188
184
  self._trigger_info: Optional[TriggerInfo] = None
@@ -245,17 +241,21 @@ class StandardDetector(
245
241
 
246
242
  @AsyncStatus.wrap
247
243
  async def trigger(self) -> None:
244
+ # set default trigger_info
245
+ self._trigger_info = TriggerInfo(
246
+ num=1, trigger=DetectorTrigger.internal, deadtime=0.0, livetime=0.0
247
+ )
248
248
  # Arm the detector and wait for it to finish.
249
249
  indices_written = await self.writer.get_indices_written()
250
250
  written_status = await self.controller.arm(
251
- num=1,
252
- trigger=DetectorTrigger.internal,
251
+ num=self._trigger_info.num,
252
+ trigger=self._trigger_info.trigger,
253
253
  )
254
254
  await written_status
255
255
  end_observation = indices_written + 1
256
256
 
257
257
  async for index in self.writer.observe_indices_written(
258
- self._frame_writing_timeout
258
+ DEFAULT_TIMEOUT + self._trigger_info.livetime + self._trigger_info.deadtime
259
259
  ):
260
260
  if index >= end_observation:
261
261
  break
@@ -309,7 +309,12 @@ class StandardDetector(
309
309
  assert self._arm_status, "Prepare not run"
310
310
  assert self._trigger_info
311
311
  async for index in self.writer.observe_indices_written(
312
- self._frame_writing_timeout
312
+ self._trigger_info.frame_timeout
313
+ or (
314
+ DEFAULT_TIMEOUT
315
+ + self._trigger_info.livetime
316
+ + self._trigger_info.deadtime
317
+ )
313
318
  ):
314
319
  yield WatcherUpdate(
315
320
  name=self.name,
@@ -1,6 +1,5 @@
1
1
  from enum import Enum
2
- from functools import partial
3
- from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Union
2
+ from typing import Any, Callable, Dict, Generator, List, Optional, Sequence
4
3
 
5
4
  import numpy as np
6
5
  import numpy.typing as npt
@@ -8,13 +7,10 @@ import yaml
8
7
  from bluesky.plan_stubs import abs_set, wait
9
8
  from bluesky.protocols import Location
10
9
  from bluesky.utils import Msg
11
- from epicscorelibs.ca.dbr import ca_array, ca_float, ca_int, ca_str
12
10
 
13
11
  from .device import Device
14
12
  from .signal import SignalRW
15
13
 
16
- CaType = Union[ca_float, ca_int, ca_str, ca_array]
17
-
18
14
 
19
15
  def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.Node:
20
16
  return dumper.represent_sequence(
@@ -22,19 +18,6 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
22
18
  )
23
19
 
24
20
 
25
- def ca_dbr_representer(dumper: yaml.Dumper, value: CaType) -> yaml.Node:
26
- # if it's an array, just call ndarray_representer...
27
- represent_array = partial(ndarray_representer, dumper)
28
-
29
- representers: Dict[CaType, Callable[[CaType], yaml.Node]] = {
30
- ca_float: dumper.represent_float,
31
- ca_int: dumper.represent_int,
32
- ca_str: dumper.represent_str,
33
- ca_array: represent_array,
34
- }
35
- return representers[type(value)](value)
36
-
37
-
38
21
  class OphydDumper(yaml.Dumper):
39
22
  def represent_data(self, data: Any) -> Any:
40
23
  if isinstance(data, Enum):
@@ -152,11 +135,6 @@ def save_to_yaml(phases: Sequence[Dict[str, Any]], save_path: str) -> None:
152
135
 
153
136
  yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
154
137
 
155
- yaml.add_representer(ca_float, ca_dbr_representer, Dumper=yaml.Dumper)
156
- yaml.add_representer(ca_int, ca_dbr_representer, Dumper=yaml.Dumper)
157
- yaml.add_representer(ca_str, ca_dbr_representer, Dumper=yaml.Dumper)
158
- yaml.add_representer(ca_array, ca_dbr_representer, Dumper=yaml.Dumper)
159
-
160
138
  with open(save_path, "w") as file:
161
139
  yaml.dump(phases, file, Dumper=OphydDumper, default_flow_style=False)
162
140
 
@@ -79,8 +79,22 @@ class _SetValuesIterator:
79
79
  return next_value
80
80
 
81
81
  def __del__(self):
82
- if self.require_all_consumed and self.index != len(list(self.values)):
83
- raise AssertionError("Not all values have been consumed.")
82
+ if self.require_all_consumed:
83
+ # Values is cast to a list here because the user has supplied
84
+ # require_all_consumed=True, we can therefore assume they
85
+ # supplied a finite list.
86
+ # In the case of require_all_consumed=False, an infinite
87
+ # iterble is permitted
88
+ values = list(self.values)
89
+ if self.index != len(values):
90
+ # Report the values consumed and the values yet to be
91
+ # consumed
92
+ consumed = values[0 : self.index]
93
+ to_be_consumed = values[self.index :]
94
+ raise AssertionError(
95
+ f"{self.signal.name}: {consumed} were consumed "
96
+ f"but {to_be_consumed} were not consumed"
97
+ )
84
98
 
85
99
 
86
100
  def set_mock_values(
@@ -57,7 +57,7 @@ class Signal(Device, Generic[T]):
57
57
 
58
58
  def __init__(
59
59
  self,
60
- backend: SignalBackend[T],
60
+ backend: Optional[SignalBackend[T]] = None,
61
61
  timeout: Optional[float] = DEFAULT_TIMEOUT,
62
62
  name: str = "",
63
63
  ) -> None:
@@ -66,13 +66,24 @@ class Signal(Device, Generic[T]):
66
66
  super().__init__(name)
67
67
 
68
68
  async def connect(
69
- self, mock=False, timeout=DEFAULT_TIMEOUT, force_reconnect: bool = False
69
+ self,
70
+ mock=False,
71
+ timeout=DEFAULT_TIMEOUT,
72
+ force_reconnect: bool = False,
73
+ backend: Optional[SignalBackend[T]] = None,
70
74
  ):
75
+ if backend:
76
+ if self._initial_backend and backend is not self._initial_backend:
77
+ raise ValueError(
78
+ "Backend at connection different from initialised one."
79
+ )
80
+ self._backend = backend
71
81
  if mock and not isinstance(self._backend, MockSignalBackend):
72
82
  # Using a soft backend, look to the initial value
73
- self._backend = MockSignalBackend(
74
- initial_backend=self._initial_backend,
75
- )
83
+ self._backend = MockSignalBackend(initial_backend=self._backend)
84
+
85
+ if self._backend is None:
86
+ raise RuntimeError("`connect` called on signal without backend")
76
87
  self.log.debug(f"Connecting to {self.source}")
77
88
  await self._backend.connect(timeout=timeout)
78
89
 
@@ -283,15 +294,15 @@ def soft_signal_r_and_setter(
283
294
 
284
295
 
285
296
  def _generate_assert_error_msg(
286
- name: str, expected_result: str, actuall_result: str
297
+ name: str, expected_result: str, actual_result: str
287
298
  ) -> str:
288
299
  WARNING = "\033[93m"
289
300
  FAIL = "\033[91m"
290
301
  ENDC = "\033[0m"
291
302
  return (
292
303
  f"Expected {WARNING}{name}{ENDC} to produce"
293
- + f"\n{FAIL}{actuall_result}{ENDC}"
294
- + f"\nbut actually got \n{FAIL}{expected_result}{ENDC}"
304
+ + f"\n{FAIL}{expected_result}{ENDC}"
305
+ + f"\nbut actually got \n{FAIL}{actual_result}{ENDC}"
295
306
  )
296
307
 
297
308
 
@@ -313,7 +324,9 @@ async def assert_value(signal: SignalR[T], value: Any) -> None:
313
324
  """
314
325
  actual_value = await signal.get_value()
315
326
  assert actual_value == value, _generate_assert_error_msg(
316
- signal.name, value, actual_value
327
+ name=signal.name,
328
+ expected_result=value,
329
+ actual_result=actual_value,
317
330
  )
318
331
 
319
332
 
@@ -338,7 +351,9 @@ async def assert_reading(
338
351
  """
339
352
  actual_reading = await readable.read()
340
353
  assert expected_reading == actual_reading, _generate_assert_error_msg(
341
- readable.name, expected_reading, actual_reading
354
+ name=readable.name,
355
+ expected_result=expected_reading,
356
+ actual_result=actual_reading,
342
357
  )
343
358
 
344
359
 
@@ -364,7 +379,9 @@ async def assert_configuration(
364
379
  """
365
380
  actual_configurable = await configurable.read_configuration()
366
381
  assert configuration == actual_configurable, _generate_assert_error_msg(
367
- configurable.name, configuration, actual_configurable
382
+ name=configurable.name,
383
+ expected_result=configuration,
384
+ actual_result=actual_configurable,
368
385
  )
369
386
 
370
387
 
@@ -386,11 +403,15 @@ def assert_emitted(docs: Mapping[str, list[dict]], **numbers: int):
386
403
  resource=1, datum=1, event=1, stop=1)
387
404
  """
388
405
  assert list(docs) == list(numbers), _generate_assert_error_msg(
389
- "documents", list(numbers), list(docs)
406
+ name="documents",
407
+ expected_result=list(numbers),
408
+ actual_result=list(docs),
390
409
  )
391
410
  actual_numbers = {name: len(d) for name, d in docs.items()}
392
411
  assert actual_numbers == numbers, _generate_assert_error_msg(
393
- "emitted", numbers, actual_numbers
412
+ name="emitted",
413
+ expected_result=numbers,
414
+ actual_result=actual_numbers,
394
415
  )
395
416
 
396
417
 
@@ -254,6 +254,8 @@ class HintedSignal(HasHints, AsyncReadable):
254
254
 
255
255
  @property
256
256
  def hints(self) -> Hints:
257
+ if self.signal.name == "":
258
+ return {"fields": []}
257
259
  return {"fields": [self.signal.name]}
258
260
 
259
261
  @classmethod
@@ -2,8 +2,10 @@ import logging
2
2
  import sys
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
- from typing import Any, Dict, Optional, Sequence, Type, Union
5
+ from math import isnan, nan
6
+ from typing import Any, Dict, List, Optional, Type, Union
6
7
 
8
+ import numpy as np
7
9
  from aioca import (
8
10
  FORMAT_CTRL,
9
11
  FORMAT_RAW,
@@ -28,7 +30,7 @@ from ophyd_async.core import (
28
30
  )
29
31
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
32
 
31
- from .common import get_supported_values
33
+ from .common import LimitPair, Limits, common_meta, get_supported_values
32
34
 
33
35
  dbr_to_dtype: Dict[Dbr, Dtype] = {
34
36
  dbr.DBR_STRING: "string",
@@ -40,6 +42,64 @@ dbr_to_dtype: Dict[Dbr, Dtype] = {
40
42
  }
41
43
 
42
44
 
45
+ def _data_key_from_augmented_value(
46
+ value: AugmentedValue,
47
+ *,
48
+ choices: Optional[List[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """Use the return value of get with FORMAT_CTRL to construct a DataKey
52
+ describing the signal. See docstring of AugmentedValue for expected
53
+ value fields by DBR type.
54
+
55
+ Args:
56
+ value (AugmentedValue): Description of the the return type of a DB record
57
+ choices: Optional list of enum choices to pass as metadata in the datakey
58
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
59
+
60
+ Returns:
61
+ DataKey: A rich DataKey describing the DB record
62
+ """
63
+ source = f"ca://{value.name}"
64
+ assert value.ok, f"Error reading {source}: {value}"
65
+
66
+ scalar = value.element_count == 1
67
+ dtype = dtype or dbr_to_dtype[value.datatype]
68
+
69
+ d = DataKey(
70
+ source=source,
71
+ dtype=dtype if scalar else "array",
72
+ # strictly value.element_count >= len(value)
73
+ shape=[] if scalar else [len(value)],
74
+ )
75
+ for key in common_meta:
76
+ attr = getattr(value, key, nan)
77
+ if isinstance(attr, str) or not isnan(attr):
78
+ d[key] = attr
79
+
80
+ if choices is not None:
81
+ d["choices"] = choices
82
+
83
+ if limits := _limits_from_augmented_value(value):
84
+ d["limits"] = limits
85
+
86
+ return d
87
+
88
+
89
+ def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
90
+ def get_limits(limit: str) -> LimitPair:
91
+ low = getattr(value, f"lower_{limit}_limit", None)
92
+ high = getattr(value, f"upper_{limit}_limit", None)
93
+ return LimitPair(low=low, high=high)
94
+
95
+ return Limits(
96
+ alarm=get_limits("alarm"),
97
+ control=get_limits("ctrl"),
98
+ display=get_limits("disp"),
99
+ warning=get_limits("warning"),
100
+ )
101
+
102
+
43
103
  @dataclass
44
104
  class CaConverter:
45
105
  read_dbr: Optional[Dbr]
@@ -49,7 +109,10 @@ class CaConverter:
49
109
  return value
50
110
 
51
111
  def value(self, value: AugmentedValue):
52
- return value
112
+ # for channel access ca_xxx classes, this
113
+ # invokes __pos__ operator to return an instance of
114
+ # the builtin base class
115
+ return +value
53
116
 
54
117
  def reading(self, value: AugmentedValue):
55
118
  return {
@@ -58,8 +121,8 @@ class CaConverter:
58
121
  "alarm_severity": -1 if value.severity > 2 else value.severity,
59
122
  }
60
123
 
61
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
- return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
124
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
125
+ return _data_key_from_augmented_value(value)
63
126
 
64
127
 
65
128
  class CaLongStrConverter(CaConverter):
@@ -73,12 +136,17 @@ class CaLongStrConverter(CaConverter):
73
136
 
74
137
 
75
138
  class CaArrayConverter(CaConverter):
76
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value)]}
139
+ def value(self, value: AugmentedValue):
140
+ return np.array(value, copy=False)
78
141
 
79
142
 
80
143
  @dataclass
81
144
  class CaEnumConverter(CaConverter):
145
+ """To prevent issues when a signal is restarted and returns with different enum
146
+ values or orders, we put treat an Enum signal as a string, and cache the
147
+ choices on this class.
148
+ """
149
+
82
150
  choices: dict[str, str]
83
151
 
84
152
  def write_value(self, value: Union[Enum, str]):
@@ -90,13 +158,18 @@ class CaEnumConverter(CaConverter):
90
158
  def value(self, value: AugmentedValue):
91
159
  return self.choices[value]
92
160
 
93
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
- return {
95
- "source": source,
96
- "dtype": "string",
97
- "shape": [],
98
- "choices": list(self.choices),
99
- }
161
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
162
+ # Sometimes DBR_TYPE returns as String, must pass choices still
163
+ return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))
164
+
165
+
166
+ @dataclass
167
+ class CaBoolConverter(CaConverter):
168
+ def value(self, value: AugmentedValue) -> bool:
169
+ return bool(value)
170
+
171
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
172
+ return _data_key_from_augmented_value(value, dtype="bool")
100
173
 
101
174
 
102
175
  class DisconnectedCaConverter(CaConverter):
@@ -115,8 +188,10 @@ def make_converter(
115
188
  return CaLongStrConverter()
116
189
  elif is_array and pv_dbr == dbr.DBR_STRING:
117
190
  # Waveform of strings, check we wanted this
118
- if datatype and datatype != Sequence[str]:
119
- raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
191
+ if datatype:
192
+ datatype_dtype = get_dtype(datatype)
193
+ if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
194
+ raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
120
195
  return CaArrayConverter(pv_dbr, None)
121
196
  elif is_array:
122
197
  pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
@@ -136,7 +211,7 @@ def make_converter(
136
211
  )
137
212
  if pv_choices_len != 2:
138
213
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
139
- return CaConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
214
+ return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
140
215
  elif pv_dbr == dbr.DBR_ENUM:
141
216
  # This is an Enum
142
217
  pv_choices = get_unique(
@@ -224,7 +299,7 @@ class CaSignalBackend(SignalBackend[T]):
224
299
 
225
300
  async def get_datakey(self, source: str) -> DataKey:
226
301
  value = await self._caget(FORMAT_CTRL)
227
- return self.converter.get_datakey(source, value)
302
+ return self.converter.get_datakey(value)
228
303
 
229
304
  async def get_reading(self) -> Reading:
230
305
  value = await self._caget(FORMAT_TIME)
@@ -4,6 +4,7 @@ import logging
4
4
  import time
5
5
  from dataclasses import dataclass
6
6
  from enum import Enum
7
+ from math import isnan, nan
7
8
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
8
9
 
9
10
  from bluesky.protocols import DataKey, Dtype, Reading
@@ -20,7 +21,7 @@ from ophyd_async.core import (
20
21
  )
21
22
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
23
 
23
- from .common import get_supported_values
24
+ from .common import LimitPair, Limits, common_meta, get_supported_values
24
25
 
25
26
  # https://mdavidsaver.github.io/p4p/values.html
26
27
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -39,6 +40,67 @@ specifier_to_dtype: Dict[str, Dtype] = {
39
40
  }
40
41
 
41
42
 
43
+ def _data_key_from_value(
44
+ source: str,
45
+ value: Value,
46
+ *,
47
+ shape: Optional[list[int]] = None,
48
+ choices: Optional[list[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """
52
+ Args:
53
+ value (Value): Description of the the return type of a DB record
54
+ shape: Optional override shape when len(shape) > 1
55
+ choices: Optional list of enum choices to pass as metadata in the datakey
56
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
57
+
58
+ Returns:
59
+ DataKey: A rich DataKey describing the DB record
60
+ """
61
+ shape = shape or []
62
+ dtype = dtype or specifier_to_dtype[value.type().aspy("value")]
63
+ display_data = getattr(value, "display", None)
64
+
65
+ d = DataKey(
66
+ source=source,
67
+ dtype=dtype,
68
+ shape=shape,
69
+ )
70
+ if display_data is not None:
71
+ for key in common_meta:
72
+ attr = getattr(display_data, key, nan)
73
+ if isinstance(attr, str) or not isnan(attr):
74
+ d[key] = attr
75
+
76
+ if choices is not None:
77
+ d["choices"] = choices
78
+
79
+ if limits := _limits_from_value(value):
80
+ d["limits"] = limits
81
+
82
+ return d
83
+
84
+
85
+ def _limits_from_value(value: Value) -> Limits:
86
+ def get_limits(
87
+ substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
88
+ ) -> LimitPair:
89
+ substructure = getattr(value, substucture_name, None)
90
+ low = getattr(substructure, low_name, nan)
91
+ high = getattr(substructure, high_name, nan)
92
+ return LimitPair(
93
+ low=None if isnan(low) else low, high=None if isnan(high) else high
94
+ )
95
+
96
+ return Limits(
97
+ alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
98
+ control=get_limits("control"),
99
+ display=get_limits("display"),
100
+ warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
101
+ )
102
+
103
+
42
104
  class PvaConverter:
43
105
  def write_value(self, value):
44
106
  return value
@@ -56,8 +118,7 @@ class PvaConverter:
56
118
  }
57
119
 
58
120
  def get_datakey(self, source: str, value) -> DataKey:
59
- dtype = specifier_to_dtype[value.type().aspy("value")]
60
- return {"source": source, "dtype": dtype, "shape": []}
121
+ return _data_key_from_value(source, value)
61
122
 
62
123
  def metadata_fields(self) -> List[str]:
63
124
  """
@@ -74,7 +135,9 @@ class PvaConverter:
74
135
 
75
136
  class PvaArrayConverter(PvaConverter):
76
137
  def get_datakey(self, source: str, value) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
138
+ return _data_key_from_value(
139
+ source, value, dtype="array", shape=[len(value["value"])]
140
+ )
78
141
 
79
142
 
80
143
  class PvaNDArrayConverter(PvaConverter):
@@ -98,7 +161,7 @@ class PvaNDArrayConverter(PvaConverter):
98
161
 
99
162
  def get_datakey(self, source: str, value) -> DataKey:
100
163
  dims = self._get_dimensions(value)
101
- return {"source": source, "dtype": "array", "shape": dims}
164
+ return _data_key_from_value(source, value, dtype="array", shape=dims)
102
165
 
103
166
  def write_value(self, value):
104
167
  # No clear use-case for writing directly to an NDArray, and some
@@ -109,6 +172,11 @@ class PvaNDArrayConverter(PvaConverter):
109
172
 
110
173
  @dataclass
111
174
  class PvaEnumConverter(PvaConverter):
175
+ """To prevent issues when a signal is restarted and returns with different enum
176
+ values or orders, we put treat an Enum signal as a string, and cache the
177
+ choices on this class.
178
+ """
179
+
112
180
  def __init__(self, choices: dict[str, str]):
113
181
  self.choices = tuple(choices.values())
114
182
 
@@ -122,20 +190,17 @@ class PvaEnumConverter(PvaConverter):
122
190
  return self.choices[value["value"]["index"]]
123
191
 
124
192
  def get_datakey(self, source: str, value) -> DataKey:
125
- return {
126
- "source": source,
127
- "dtype": "string",
128
- "shape": [],
129
- "choices": list(self.choices),
130
- }
193
+ return _data_key_from_value(
194
+ source, value, choices=list(self.choices), dtype="string"
195
+ )
131
196
 
132
197
 
133
- class PvaEnumBoolConverter(PvaConverter):
198
+ class PvaEmumBoolConverter(PvaConverter):
134
199
  def value(self, value):
135
- return value["value"]["index"]
200
+ return bool(value["value"]["index"])
136
201
 
137
202
  def get_datakey(self, source: str, value) -> DataKey:
138
- return {"source": source, "dtype": "integer", "shape": []}
203
+ return _data_key_from_value(source, value, dtype="bool")
139
204
 
140
205
 
141
206
  class PvaTableConverter(PvaConverter):
@@ -144,7 +209,7 @@ class PvaTableConverter(PvaConverter):
144
209
 
145
210
  def get_datakey(self, source: str, value) -> DataKey:
146
211
  # This is wrong, but defer until we know how to actually describe a table
147
- return {"source": source, "dtype": "object", "shape": []} # type: ignore
212
+ return _data_key_from_value(source, value, dtype="object")
148
213
 
149
214
 
150
215
  class PvaDictConverter(PvaConverter):
@@ -213,7 +278,7 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
213
278
  )
214
279
  if pv_choices_len != 2:
215
280
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
216
- return PvaEnumBoolConverter()
281
+ return PvaEmumBoolConverter()
217
282
  elif "NTEnum" in typeid:
218
283
  # This is an Enum
219
284
  pv_choices = get_unique(
@@ -1,5 +1,28 @@
1
1
  from enum import Enum
2
- from typing import Dict, Optional, Tuple, Type
2
+ from typing import Dict, Optional, Tuple, Type, TypedDict
3
+
4
+ common_meta = {
5
+ "units",
6
+ "precision",
7
+ }
8
+
9
+
10
+ class LimitPair(TypedDict):
11
+ high: float | None
12
+ low: float | None
13
+
14
+ def __bool__(self) -> bool:
15
+ return self.low is None and self.high is None
16
+
17
+
18
+ class Limits(TypedDict):
19
+ alarm: LimitPair
20
+ control: LimitPair
21
+ display: LimitPair
22
+ warning: LimitPair
23
+
24
+ def __bool__(self) -> bool:
25
+ return any(self.alarm, self.control, self.display, self.warning)
3
26
 
4
27
 
5
28
  def get_supported_values(
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  from typing import Optional
3
3
 
4
+ from ophyd_async.core import DEFAULT_TIMEOUT, wait_for_value
4
5
  from ophyd_async.core.async_status import AsyncStatus
5
6
  from ophyd_async.core.detector import DetectorControl, DetectorTrigger
6
7
  from ophyd_async.epics.areadetector.drivers.ad_base import (
@@ -23,14 +24,13 @@ class PilatusController(DetectorControl):
23
24
  def __init__(
24
25
  self,
25
26
  driver: PilatusDriver,
27
+ readout_time: float,
26
28
  ) -> None:
27
29
  self._drv = driver
30
+ self._readout_time = readout_time
28
31
 
29
32
  def get_deadtime(self, exposure: float) -> float:
30
- # Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf
31
- """The required minimum time difference between ExpPeriod and ExpTime
32
- (readout time) is 2.28 ms"""
33
- return 2.28e-3
33
+ return self._readout_time
34
34
 
35
35
  async def arm(
36
36
  self,
@@ -45,7 +45,20 @@ class PilatusController(DetectorControl):
45
45
  self._drv.num_images.set(999_999 if num == 0 else num),
46
46
  self._drv.image_mode.set(ImageMode.multiple),
47
47
  )
48
- return await start_acquiring_driver_and_ensure_status(self._drv)
48
+
49
+ # Standard arm the detector and wait for the acquire PV to be True
50
+ idle_status = await start_acquiring_driver_and_ensure_status(self._drv)
51
+
52
+ # The pilatus has an additional PV that goes True when the camserver
53
+ # is actually ready. Should wait for that too or we risk dropping
54
+ # a frame
55
+ await wait_for_value(
56
+ self._drv.armed_for_triggers,
57
+ True,
58
+ timeout=DEFAULT_TIMEOUT,
59
+ )
60
+
61
+ return idle_status
49
62
 
50
63
  @classmethod
51
64
  def _get_trigger_mode(cls, trigger: DetectorTrigger) -> PilatusTriggerMode:
@@ -1,7 +1,6 @@
1
1
  from enum import Enum
2
2
 
3
- from ophyd_async.epics.signal.signal import epics_signal_rw_rbv
4
-
3
+ from ...signal import epics_signal_r, epics_signal_rw_rbv
5
4
  from .ad_base import ADBase
6
5
 
7
6
 
@@ -18,4 +17,5 @@ class PilatusDriver(ADBase):
18
17
  self.trigger_mode = epics_signal_rw_rbv(
19
18
  PilatusTriggerMode, prefix + "TriggerMode"
20
19
  )
20
+ self.armed_for_triggers = epics_signal_r(bool, prefix + "Armed")
21
21
  super().__init__(prefix, name)
@@ -1,3 +1,5 @@
1
+ from enum import Enum
2
+
1
3
  from bluesky.protocols import Hints
2
4
 
3
5
  from ophyd_async.core import DirectoryProvider
@@ -11,6 +13,20 @@ from ophyd_async.epics.areadetector.writers.hdf_writer import HDFWriter
11
13
  from ophyd_async.epics.areadetector.writers.nd_file_hdf import NDFileHDF
12
14
 
13
15
 
16
+ #: Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf
17
+ #: The required minimum time difference between ExpPeriod and ExpTime
18
+ #: (readout time) is 2.28 ms
19
+ #: We provide an option to override for newer Pilatus models
20
+ class PilatusReadoutTime(float, Enum):
21
+ """Pilatus readout time per model in ms"""
22
+
23
+ # Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf
24
+ pilatus2 = 2.28e-3
25
+
26
+ # Cite: https://media.dectris.com/user-manual-pilatus3-2020.pdf
27
+ pilatus3 = 0.95e-3
28
+
29
+
14
30
  class PilatusDetector(StandardDetector):
15
31
  """A Pilatus StandardDetector writing HDF files"""
16
32
 
@@ -21,15 +37,16 @@ class PilatusDetector(StandardDetector):
21
37
  self,
22
38
  prefix: str,
23
39
  directory_provider: DirectoryProvider,
24
- drv_suffix="cam1:",
25
- hdf_suffix="HDF1:",
26
- name="",
40
+ readout_time: PilatusReadoutTime = PilatusReadoutTime.pilatus3,
41
+ drv_suffix: str = "cam1:",
42
+ hdf_suffix: str = "HDF1:",
43
+ name: str = "",
27
44
  ):
28
45
  self.drv = PilatusDriver(prefix + drv_suffix)
29
46
  self.hdf = NDFileHDF(prefix + hdf_suffix)
30
47
 
31
48
  super().__init__(
32
- PilatusController(self.drv),
49
+ PilatusController(self.drv, readout_time=readout_time.value),
33
50
  HDFWriter(
34
51
  self.hdf,
35
52
  directory_provider,
@@ -179,7 +179,15 @@ def _mock_common_blocks(device: Device, stripped_type: Optional[Type] = None):
179
179
  sub_device_2 = device_cls(SoftSignalBackend(signal_dtype))
180
180
  sub_device = DeviceVector({1: sub_device_1, 2: sub_device_2})
181
181
  else:
182
- sub_device = DeviceVector({1: device_cls(), 2: device_cls()})
182
+ if hasattr(device, device_name):
183
+ sub_device = getattr(device, device_name)
184
+ else:
185
+ sub_device = DeviceVector(
186
+ {
187
+ 1: device_cls(),
188
+ 2: device_cls(),
189
+ }
190
+ )
183
191
 
184
192
  for sub_device_in_vector in sub_device.values():
185
193
  _mock_common_blocks(sub_device_in_vector, stripped_type=device_cls)
@@ -296,7 +304,9 @@ async def fill_pvi_entries(
296
304
 
297
305
 
298
306
  def create_children_from_annotations(
299
- device: Device, included_optional_fields: Tuple[str, ...] = ()
307
+ device: Device,
308
+ included_optional_fields: Tuple[str, ...] = (),
309
+ device_vectors: Optional[Dict[str, int]] = None,
300
310
  ):
301
311
  """For intializing blocks at __init__ of ``device``."""
302
312
  for name, device_type in get_type_hints(type(device)).items():
@@ -307,12 +317,22 @@ def create_children_from_annotations(
307
317
  continue
308
318
  is_device_vector, device_type = _strip_device_vector(device_type)
309
319
  if (
310
- is_device_vector
320
+ (is_device_vector and (not device_vectors or name not in device_vectors))
311
321
  or ((origin := get_origin(device_type)) and issubclass(origin, Signal))
312
322
  or (isclass(device_type) and issubclass(device_type, Signal))
313
323
  ):
314
324
  continue
315
325
 
316
- sub_device = device_type()
317
- setattr(device, name, sub_device)
318
- create_children_from_annotations(sub_device)
326
+ if is_device_vector:
327
+ n_device_vector = DeviceVector(
328
+ {i: device_type() for i in range(1, device_vectors[name] + 1)}
329
+ )
330
+ setattr(device, name, n_device_vector)
331
+ for sub_device in n_device_vector.values():
332
+ create_children_from_annotations(
333
+ sub_device, device_vectors=device_vectors
334
+ )
335
+ else:
336
+ sub_device = device_type()
337
+ setattr(device, name, sub_device)
338
+ create_children_from_annotations(sub_device, device_vectors=device_vectors)
@@ -38,7 +38,6 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
38
38
  writer=writer,
39
39
  config_sigs=config_sigs,
40
40
  name=name,
41
- writer_timeout=DEFAULT_TIMEOUT,
42
41
  )
43
42
 
44
43
  async def connect(
@@ -18,6 +18,7 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
18
18
  shutter_time: float,
19
19
  repeats: int = 1,
20
20
  period: float = 0.0,
21
+ frame_timeout: float | None = None,
21
22
  ):
22
23
  """Prepare a hardware triggered flyable and one or more detectors.
23
24
 
@@ -39,6 +40,7 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
39
40
  trigger=DetectorTrigger.constant_gate,
40
41
  deadtime=deadtime,
41
42
  livetime=exposure,
43
+ frame_timeout=frame_timeout,
42
44
  )
43
45
  trigger_time = number_of_frames * (exposure + deadtime)
44
46
  pre_delay = max(period - 2 * shutter_time - trigger_time, 0)
@@ -120,6 +122,7 @@ def time_resolved_fly_and_collect_with_static_seq_table(
120
122
  shutter_time: float,
121
123
  repeats: int = 1,
122
124
  period: float = 0.0,
125
+ frame_timeout: float | None = None,
123
126
  ):
124
127
  """Run a scan wth a flyer and multiple detectors.
125
128
 
@@ -144,6 +147,7 @@ def time_resolved_fly_and_collect_with_static_seq_table(
144
147
  shutter_time=shutter_time,
145
148
  repeats=repeats,
146
149
  period=period,
150
+ frame_timeout=frame_timeout,
147
151
  )
148
152
  # Run the fly scan
149
153
  yield from fly_and_collect(stream_name, flyer, detectors)
@@ -16,7 +16,6 @@ class SimPatternDetector(StandardDetector):
16
16
  path: Path,
17
17
  config_sigs: Sequence[AsyncReadable] = [],
18
18
  name: str = "sim_pattern_detector",
19
- writer_timeout: float = 1,
20
19
  ) -> None:
21
20
  self.directory_provider: DirectoryProvider = StaticDirectoryProvider(path)
22
21
  self.pattern_generator = PatternGenerator()
@@ -33,5 +32,4 @@ class SimPatternDetector(StandardDetector):
33
32
  writer=writer,
34
33
  config_sigs=config_sigs,
35
34
  name=name,
36
- writer_timeout=writer_timeout,
37
35
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ophyd-async
3
- Version: 0.3rc2
3
+ Version: 0.3.1
4
4
  Summary: Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango
5
5
  Author-email: Tom Cobb <tom.cobb@diamond.ac.uk>
6
6
  License: BSD 3-Clause License
@@ -96,7 +96,7 @@ Requires-Dist: p4p ; extra == 'pva'
96
96
 
97
97
  # ophyd-async
98
98
 
99
- Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango
99
+ Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango.
100
100
 
101
101
  | Source | <https://github.com/bluesky/ophyd-async> |
102
102
  | :-----------: | :-----------------------------------------------: |
@@ -1,31 +1,31 @@
1
1
  ophyd_async/__init__.py,sha256=v-rRiDOgZ3sQSMQKq0vgUQZvpeOkoHFXissAx6Ktg84,61
2
2
  ophyd_async/__main__.py,sha256=G-Zcv_G9zK7Nhx6o5L5w-wyhMxdl_WgyMELu8IMFqAE,328
3
- ophyd_async/_version.py,sha256=Q6NCoABIZDCaoLXsuU8odM9VJngCLNAvP46RIhMvPN0,409
3
+ ophyd_async/_version.py,sha256=HzPz9rq3s1AiZXregKlqKaJJ2wGMtvH_a3V9la9CnpM,411
4
4
  ophyd_async/log.py,sha256=DbMjt0bkfUOLHIinZYt0Q0FHZmCXXi5x8y0uFiEmqoQ,3587
5
5
  ophyd_async/protocols.py,sha256=EF2W9nfElV-0QNMYrX1zusL1PqDJR3kNsjlalR29j0I,3412
6
6
  ophyd_async/core/__init__.py,sha256=znjVeRfrDVJbGLEkUczeKMW46kV6HDrlE4lV0SqvZt4,2952
7
7
  ophyd_async/core/_providers.py,sha256=LrlTMPHKXWOPVkpAOw-pqBq0kip-c3C9ZZPoFfiaV4M,2212
8
8
  ophyd_async/core/async_status.py,sha256=9TOgOXIAuH62RDo5t-Y5GdjrJ76d_6TFlBxYv-5_a88,4367
9
- ophyd_async/core/detector.py,sha256=8mdLKphirgit5CVCklJI9eHqKKiCz4CYs9BElo10-lc,11007
9
+ ophyd_async/core/detector.py,sha256=NMX8y_yiViHbv3CaJ7LxzXYkH6tCWI3LocpQ3w4lGEQ,11176
10
10
  ophyd_async/core/device.py,sha256=280zFnLCoiMZAA-Dh1_AjUSnhxUfKYGgj4H_2S1njOA,7086
11
- ophyd_async/core/device_save_loader.py,sha256=RXA3dPUPihAR2ZGDStlGiA-TAsr_xqL0snsCjMsMnfA,9138
11
+ ophyd_async/core/device_save_loader.py,sha256=EK7FB5oWiLI_s2KZ1BNDQ2AUKVbFLlimMl0vXfsjcgo,8223
12
12
  ophyd_async/core/flyer.py,sha256=bIjzBkrl8HVAlKgsZ_FF0WL69Qvksyzp9ZWmTLl8Yrw,2304
13
13
  ophyd_async/core/mock_signal_backend.py,sha256=Ug6jK72wm9vM6EueoUrYgcXtiFzdPUEISRe86LdyYKc,2844
14
- ophyd_async/core/mock_signal_utils.py,sha256=bF8MVZA1j9zCmS2tBbgUdfwNmcHniHixnmotjd0g7hs,4083
15
- ophyd_async/core/signal.py,sha256=FbTb5qDPLhVxEbh6gimqXfkZwcqB4ymHTEYVXZVZYrk,16456
14
+ ophyd_async/core/mock_signal_utils.py,sha256=LE8VxNq3jfaTePnHHpZpKCi1vwKi8EIg-g1jfw-Q5bQ,4726
15
+ ophyd_async/core/signal.py,sha256=_sEe__VbP7fZfdOG2NjiTMZRFHQ0ckSmwhq1qiAEiXM,17144
16
16
  ophyd_async/core/signal_backend.py,sha256=fT3q0WED3JHmNKYCs7PzDLCK4cUPVin3wQjDNPdHqAY,1525
17
17
  ophyd_async/core/soft_signal_backend.py,sha256=56zvcEi4c8n1yYbafTbp7X0VhSkhoehm3L8RBhu2fik,5596
18
- ophyd_async/core/standard_readable.py,sha256=uVG3vs3s7-Kzg5dRCtT4I2mhZPqwVGYy2dxNmaOpDVU,8980
18
+ ophyd_async/core/standard_readable.py,sha256=fhq_WAZtLYWrw6DvvrFRYRAPOUP2_IcX4qLucoEEeOg,9049
19
19
  ophyd_async/core/utils.py,sha256=3oZcXNqAUHX4ZWMBH5gSuK6cFWEhSkZ9GSDYv0pf8jc,5783
20
20
  ophyd_async/epics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  ophyd_async/epics/_backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- ophyd_async/epics/_backend/_aioca.py,sha256=cpPNZmRMi7FnAh2-3ec5uklLVFOqsmEmpI1nh5Ud1Ls,8794
23
- ophyd_async/epics/_backend/_p4p.py,sha256=lIKx7kQ2o8h3M4wPwrq8JBo8xuDnasJxpDH2ATVBY78,12257
24
- ophyd_async/epics/_backend/common.py,sha256=16mAuxDwA3eZFjUW8DHMabaW3CtEI0Qe8DLpP2xlW7Y,814
22
+ ophyd_async/epics/_backend/_aioca.py,sha256=dDwzjR8Xez7qF50b0cH3f0lAteaFMlQu2HLpBtkmz5c,11250
23
+ ophyd_async/epics/_backend/_p4p.py,sha256=oCT8MeVWlhmsxZ8YRSrelrY8W3NvfpXcMlfAKL_AUNY,14331
24
+ ophyd_async/epics/_backend/common.py,sha256=VDL4hiSn-C_yF3-PZoc46IXnmOHyrClNwIwLzEvx9Ww,1259
25
25
  ophyd_async/epics/areadetector/__init__.py,sha256=ViKzx-wUxkRyNR33wfpL11QB97o0J47_KMyI2C_NphI,510
26
26
  ophyd_async/epics/areadetector/aravis.py,sha256=YklN4V0loqUQBs4swVX304N49JIGPvrNOk8iA5EWofg,2127
27
27
  ophyd_async/epics/areadetector/kinetix.py,sha256=7rE2MLnz9DEmeiN9pCekDfpXuZ2DErnMajRp_9eoLZY,1359
28
- ophyd_async/epics/areadetector/pilatus.py,sha256=ki-BOBCEIiUD2wAtmujBIB1eX-nbXB4yMLJK_Q3opRM,1398
28
+ ophyd_async/epics/areadetector/pilatus.py,sha256=hs3v8QUIwTHNg7i1mRSg9SbIIsoUZg90OxJ740gEKpo,2044
29
29
  ophyd_async/epics/areadetector/single_trigger_det.py,sha256=U92dqhioIfnve3jtCThq9gXBCdEzzqzY4ezk6rZV19g,1182
30
30
  ophyd_async/epics/areadetector/utils.py,sha256=p66UbVdKRFj6Sm1Qvm23kmlVyBMMqIvXFxA3x17YnSk,2824
31
31
  ophyd_async/epics/areadetector/vimba.py,sha256=IxG8KLzfb84iLtzf6ZoX9JikqZLP49lwkWu33bkDV9Y,1291
@@ -33,13 +33,13 @@ ophyd_async/epics/areadetector/controllers/__init__.py,sha256=af58ci7X2z2s_FyUwR
33
33
  ophyd_async/epics/areadetector/controllers/ad_sim_controller.py,sha256=mthZ6WxajMEgUKptq3bnkIctbLhjzTagV66i1auB8cg,1587
34
34
  ophyd_async/epics/areadetector/controllers/aravis_controller.py,sha256=CIfnZdq_NobO_UMC2TJoAfUEP9GlzZg5z5bz6Dn1DxY,2669
35
35
  ophyd_async/epics/areadetector/controllers/kinetix_controller.py,sha256=9QmydX85QOXfQL_UX49M9EQ2b2hUZPVzLxgGQn-A9Oc,1611
36
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py,sha256=cd1CKkaXlwkpQ0I1VL7nN0U8R4VweTsa08WhvHYI4nY,2243
36
+ ophyd_async/epics/areadetector/controllers/pilatus_controller.py,sha256=jJdY97JATz-bWtEPtmnyQ-Zyjvhy2M78i0wUuXO2dtE,2617
37
37
  ophyd_async/epics/areadetector/controllers/vimba_controller.py,sha256=Eh4Hr9rWgq1mKvE93JzgixntjPHxF3_07GTFqiOdZqE,2123
38
38
  ophyd_async/epics/areadetector/drivers/__init__.py,sha256=-Ib0Lz4fFQQmB7K0uFxMDvAerkLxadMQERH7lNAvrs4,495
39
39
  ophyd_async/epics/areadetector/drivers/ad_base.py,sha256=18WFAiWEUg0H2LcvTQHrKYj2wThGafQzDpiyAWki6vo,3411
40
40
  ophyd_async/epics/areadetector/drivers/aravis_driver.py,sha256=PmIygsVNoxxYHvZZzFAbAm2DXmXFc13nAzL_DJB6YSU,1464
41
41
  ophyd_async/epics/areadetector/drivers/kinetix_driver.py,sha256=yIV23BkGBJ4i0VskLiLL7AFbadCCR6Ch1UwUDJ9r2YM,743
42
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py,sha256=0DsUu9vAPXDa2v8_V0f_kPjBtLu3y4_EkmFfFjYO4Gk,553
42
+ ophyd_async/epics/areadetector/drivers/pilatus_driver.py,sha256=0DBBuiR_FtwzVVdDW0ifdSrdKZtnprWuy87g66o8RlQ,619
43
43
  ophyd_async/epics/areadetector/drivers/vimba_driver.py,sha256=J54VtWkOklfbSqZYxGWH1e6Uzm9_Gph_ZbCf9Zax0LU,1713
44
44
  ophyd_async/epics/areadetector/writers/__init__.py,sha256=tpPcrYd1hs8WS7C0gmCnR2EBwjE5RzCljI7WwZ2V_LM,191
45
45
  ophyd_async/epics/areadetector/writers/_hdfdataset.py,sha256=E0C9VgsPyY35h7k0mvcIhjsIVNavApLxizqNWlM388w,167
@@ -54,13 +54,13 @@ ophyd_async/epics/demo/sensor.db,sha256=AVtiydrdtwAz2EFurO2Ult9SSRtre3r0akOBbL98
54
54
  ophyd_async/epics/motion/__init__.py,sha256=tnmVRIwKa9PdN_xonJdAUD04UpEceh-hoD7XI62yDB0,46
55
55
  ophyd_async/epics/motion/motor.py,sha256=G8cc-okSXJ6s2fGxRO155xm7PrBbVImBmBMRWts895k,3630
56
56
  ophyd_async/epics/pvi/__init__.py,sha256=TbOQNY4enQWgtr1T7x129vpo2p7FIFlr8cyZqqv5Lk4,158
57
- ophyd_async/epics/pvi/pvi.py,sha256=PJdY3rCRyIQbsbHDru-TJ-IVOItyaQwCQKAC0Widu6A,11363
57
+ ophyd_async/epics/pvi/pvi.py,sha256=Kc3klnA9F82h_p2atFYXe-wFO9OzN5TV69Tc56tD2do,12204
58
58
  ophyd_async/epics/signal/__init__.py,sha256=JXKBSGpRL9y3auh27JRxsqDn_rBOXpJjtd4nCuDOX2g,261
59
59
  ophyd_async/epics/signal/_epics_transport.py,sha256=DEIL0iYUAWssysVEgWGu1fHSM1l-ATV2kjUgPtDN9LY,858
60
60
  ophyd_async/epics/signal/signal.py,sha256=M8ZVG_zLdYJfroCRX-u_w8c3yIhswSRw8e3RkW2szio,3166
61
61
  ophyd_async/panda/__init__.py,sha256=FuSnvp-RtdA0X4RcHEF0nTiXymRts2MNdFmF_1_i41w,775
62
62
  ophyd_async/panda/_common_blocks.py,sha256=n0PPc1rar43oDSIA-yNubTc8fR5YCW1tyjQU58whsg0,1038
63
- ophyd_async/panda/_hdf_panda.py,sha256=QjfZyYos0ZBlIqBiZ5UbyEd_wuh_cGzwV8QE9jvLiIY,1419
63
+ ophyd_async/panda/_hdf_panda.py,sha256=TWzBnyJcLmltQyOr5nXbCAZdVRqY633ogBX6pY06p3g,1375
64
64
  ophyd_async/panda/_panda_controller.py,sha256=dIqcjmaIHVrki8UXSoDx46kk6I2Lhpe2o3sXNg5f-RQ,1238
65
65
  ophyd_async/panda/_table.py,sha256=dLoRP4zYNOkD_s0Vkp2wVYAwkjVG8nNdf8-FaXOTfPo,5655
66
66
  ophyd_async/panda/_trigger.py,sha256=tBH8uq_4o1ASG9yofVxq3tjf5v8LPzniDTRL4yjramI,1195
@@ -70,17 +70,17 @@ ophyd_async/panda/writers/_hdf_writer.py,sha256=vnyIg3JmlzMIIq75o0IDMfGzBm_GJAhO
70
70
  ophyd_async/panda/writers/_panda_hdf_file.py,sha256=42iHaTax4JjOBpNC7d4nkNL9SM14OTnFPTIcXv2jg-4,1759
71
71
  ophyd_async/plan_stubs/__init__.py,sha256=nO9ELG9J7fYwfVTVRWVorz4kffeszYpwk1ROh6Ha--w,405
72
72
  ophyd_async/plan_stubs/ensure_connected.py,sha256=1MkDu8UqVRPHLnW9IXRn-QvKiG8-rCV8T4KDbjf9K6w,557
73
- ophyd_async/plan_stubs/fly.py,sha256=nl8XLoY7hvlam6H3zl4NcPRUiEJ3xIjopHEfA0ehTDg,4845
73
+ ophyd_async/plan_stubs/fly.py,sha256=fQwBeLw57-NeBsroVxKDa8kpuu6fgTWYWimbsatCL28,4999
74
74
  ophyd_async/sim/__init__.py,sha256=ScjH1g7FMo5yPACfJRZE6xGBWCHU4bKDzNQk1tqObnA,366
75
75
  ophyd_async/sim/pattern_generator.py,sha256=pvSk2zb82D08j2jiKAMqMAfRohGnYd_rpjUraLrCD6c,10640
76
76
  ophyd_async/sim/sim_pattern_detector_control.py,sha256=Ypz8IuRYAY2J243IhVbNyGr_Z-XtpJZ1qxma6NR3TgM,1838
77
77
  ophyd_async/sim/sim_pattern_detector_writer.py,sha256=ESpcVyHd1TP7Cojznv2hJAwLinu3XbgAiVKfX12FCII,1237
78
- ophyd_async/sim/sim_pattern_generator.py,sha256=fbcwWxTPYKLK33OzIY15vGylnonOO8HIudz1y_56GZU,1336
78
+ ophyd_async/sim/sim_pattern_generator.py,sha256=L4jTnEVUFBRXIWq_UMHqx00YDdbGO2pjo_IuuVwpzXE,1258
79
79
  ophyd_async/sim/demo/__init__.py,sha256=9mxKpslrL89cfSj4g3og8Br3O--pMj3hhWZS-Xu6kyA,56
80
80
  ophyd_async/sim/demo/sim_motor.py,sha256=a2p5wnHXjF-V5zOFai7jnszk4kbGmrZRnUqBtkOgEfQ,3733
81
- ophyd_async-0.3rc2.dist-info/LICENSE,sha256=pU5shZcsvWgz701EbT7yjFZ8rMvZcWgRH54CRt8ld_c,1517
82
- ophyd_async-0.3rc2.dist-info/METADATA,sha256=TRb7FrOHb4YrISGe-fkGY_W3kkrnBvioFJsgErNLmo8,6285
83
- ophyd_async-0.3rc2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
84
- ophyd_async-0.3rc2.dist-info/entry_points.txt,sha256=O0YNJTEufO0w9BozXi-JurTy2U1_o0ypeCgJLQ727Jk,58
85
- ophyd_async-0.3rc2.dist-info/top_level.txt,sha256=-hjorMsv5Rmjo3qrgqhjpal1N6kW5vMxZO3lD4iEaXs,12
86
- ophyd_async-0.3rc2.dist-info/RECORD,,
81
+ ophyd_async-0.3.1.dist-info/LICENSE,sha256=pU5shZcsvWgz701EbT7yjFZ8rMvZcWgRH54CRt8ld_c,1517
82
+ ophyd_async-0.3.1.dist-info/METADATA,sha256=1xYSDLCGtf6W32lecy26DElymc-bVECyyKjtKhAU-Eg,6285
83
+ ophyd_async-0.3.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
84
+ ophyd_async-0.3.1.dist-info/entry_points.txt,sha256=O0YNJTEufO0w9BozXi-JurTy2U1_o0ypeCgJLQ727Jk,58
85
+ ophyd_async-0.3.1.dist-info/top_level.txt,sha256=-hjorMsv5Rmjo3qrgqhjpal1N6kW5vMxZO3lD4iEaXs,12
86
+ ophyd_async-0.3.1.dist-info/RECORD,,