ophyd-async 0.3a1__py3-none-any.whl → 0.3a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +1 -1
  3. ophyd_async/core/__init__.py +23 -3
  4. ophyd_async/core/_providers.py +3 -1
  5. ophyd_async/core/detector.py +72 -46
  6. ophyd_async/core/device.py +8 -0
  7. ophyd_async/core/flyer.py +12 -21
  8. ophyd_async/core/signal.py +134 -20
  9. ophyd_async/core/signal_backend.py +6 -3
  10. ophyd_async/core/sim_signal_backend.py +32 -20
  11. ophyd_async/core/standard_readable.py +212 -23
  12. ophyd_async/core/utils.py +18 -1
  13. ophyd_async/epics/_backend/_aioca.py +17 -15
  14. ophyd_async/epics/_backend/_p4p.py +34 -25
  15. ophyd_async/epics/_backend/common.py +16 -11
  16. ophyd_async/epics/areadetector/__init__.py +8 -0
  17. ophyd_async/epics/areadetector/aravis.py +67 -0
  18. ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
  19. ophyd_async/epics/areadetector/controllers/aravis_controller.py +73 -0
  20. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  21. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +36 -24
  22. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  23. ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
  24. ophyd_async/epics/areadetector/drivers/aravis_driver.py +154 -0
  25. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +24 -0
  26. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +4 -4
  27. ophyd_async/epics/areadetector/drivers/vimba_driver.py +58 -0
  28. ophyd_async/epics/areadetector/kinetix.py +46 -0
  29. ophyd_async/epics/areadetector/pilatus.py +45 -0
  30. ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
  31. ophyd_async/epics/areadetector/vimba.py +43 -0
  32. ophyd_async/epics/areadetector/writers/_hdffile.py +4 -4
  33. ophyd_async/epics/areadetector/writers/hdf_writer.py +12 -4
  34. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +1 -0
  35. ophyd_async/epics/demo/__init__.py +45 -18
  36. ophyd_async/epics/motion/motor.py +24 -19
  37. ophyd_async/epics/pvi/__init__.py +3 -0
  38. ophyd_async/epics/pvi/pvi.py +318 -0
  39. ophyd_async/epics/signal/signal.py +26 -9
  40. ophyd_async/log.py +130 -0
  41. ophyd_async/panda/__init__.py +17 -6
  42. ophyd_async/panda/_common_blocks.py +49 -0
  43. ophyd_async/panda/_hdf_panda.py +48 -0
  44. ophyd_async/panda/{panda_controller.py → _panda_controller.py} +3 -7
  45. ophyd_async/panda/_trigger.py +39 -0
  46. ophyd_async/panda/writers/__init__.py +3 -0
  47. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  48. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  49. ophyd_async/planstubs/__init__.py +5 -0
  50. ophyd_async/planstubs/prepare_trigger_and_dets.py +57 -0
  51. ophyd_async/protocols.py +96 -0
  52. ophyd_async/sim/__init__.py +11 -0
  53. ophyd_async/sim/demo/__init__.py +3 -0
  54. ophyd_async/sim/demo/sim_motor.py +118 -0
  55. ophyd_async/sim/pattern_generator.py +318 -0
  56. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  57. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  58. ophyd_async/sim/sim_pattern_generator.py +37 -0
  59. {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/METADATA +30 -69
  60. ophyd_async-0.3a3.dist-info/RECORD +83 -0
  61. ophyd_async/epics/pvi.py +0 -70
  62. ophyd_async/panda/panda.py +0 -241
  63. ophyd_async-0.3a1.dist-info/RECORD +0 -56
  64. /ophyd_async/panda/{table.py → _table.py} +0 -0
  65. /ophyd_async/panda/{utils.py → _utils.py} +0 -0
  66. {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/LICENSE +0 -0
  67. {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/WHEEL +0 -0
  68. {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/entry_points.txt +0 -0
  69. {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,318 @@
1
+ import re
2
+ from dataclasses import dataclass
3
+ from inspect import isclass
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Dict,
8
+ FrozenSet,
9
+ Literal,
10
+ Optional,
11
+ Tuple,
12
+ Type,
13
+ TypeVar,
14
+ Union,
15
+ get_args,
16
+ get_origin,
17
+ get_type_hints,
18
+ )
19
+
20
+ from ophyd_async.core import Device, DeviceVector, SimSignalBackend
21
+ from ophyd_async.core.signal import Signal
22
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT
23
+ from ophyd_async.epics._backend._p4p import PvaSignalBackend
24
+ from ophyd_async.epics.signal.signal import (
25
+ epics_signal_r,
26
+ epics_signal_rw,
27
+ epics_signal_w,
28
+ epics_signal_x,
29
+ )
30
+
31
+ T = TypeVar("T")
32
+ Access = FrozenSet[
33
+ Union[Literal["r"], Literal["w"], Literal["rw"], Literal["x"], Literal["d"]]
34
+ ]
35
+
36
+
37
+ def _strip_number_from_string(string: str) -> Tuple[str, Optional[int]]:
38
+ match = re.match(r"(.*?)(\d*)$", string)
39
+ assert match
40
+
41
+ name = match.group(1)
42
+ number = match.group(2) or None
43
+ if number:
44
+ number = int(number)
45
+ return name, number
46
+
47
+
48
+ def _split_subscript(tp: T) -> Union[Tuple[Any, Tuple[Any]], Tuple[T, None]]:
49
+ """Split a subscripted type into the its origin and args.
50
+
51
+ If `tp` is not a subscripted type, then just return the type and None as args.
52
+
53
+ """
54
+ if get_origin(tp) is not None:
55
+ return get_origin(tp), get_args(tp)
56
+
57
+ return tp, None
58
+
59
+
60
+ def _strip_union(field: Union[Union[T], T]) -> Tuple[T, bool]:
61
+ if get_origin(field) is Union:
62
+ args = get_args(field)
63
+ is_optional = type(None) in args
64
+ for arg in args:
65
+ if arg is not type(None):
66
+ return arg, is_optional
67
+ return field, False
68
+
69
+
70
+ def _strip_device_vector(field: Union[Type[Device]]) -> Tuple[bool, Type[Device]]:
71
+ if get_origin(field) is DeviceVector:
72
+ return True, get_args(field)[0]
73
+ return False, field
74
+
75
+
76
+ @dataclass
77
+ class PVIEntry:
78
+ """
79
+ A dataclass to represent a single entry in the PVI table.
80
+ This could either be a signal or a sub-table.
81
+ """
82
+
83
+ sub_entries: Dict[str, Union[Dict[int, "PVIEntry"], "PVIEntry"]]
84
+ pvi_pv: Optional[str] = None
85
+ device: Optional[Device] = None
86
+ common_device_type: Optional[Type[Device]] = None
87
+
88
+
89
+ def _verify_common_blocks(entry: PVIEntry, common_device: Type[Device]):
90
+ if not entry.sub_entries:
91
+ return
92
+ common_sub_devices = get_type_hints(common_device)
93
+ for sub_name, sub_device in common_sub_devices.items():
94
+ if sub_name in ("_name", "parent"):
95
+ continue
96
+ assert entry.sub_entries
97
+ device_t, is_optional = _strip_union(sub_device)
98
+ if sub_name not in entry.sub_entries and not is_optional:
99
+ raise RuntimeError(
100
+ f"sub device `{sub_name}:{type(sub_device)}` " "was not provided by pvi"
101
+ )
102
+ if isinstance(entry.sub_entries[sub_name], dict):
103
+ for sub_sub_entry in entry.sub_entries[sub_name].values(): # type: ignore
104
+ _verify_common_blocks(sub_sub_entry, sub_device) # type: ignore
105
+ else:
106
+ _verify_common_blocks(
107
+ entry.sub_entries[sub_name],
108
+ sub_device, # type: ignore
109
+ )
110
+
111
+
112
+ _pvi_mapping: Dict[FrozenSet[str], Callable[..., Signal]] = {
113
+ frozenset({"r", "w"}): lambda dtype, read_pv, write_pv: epics_signal_rw(
114
+ dtype, "pva://" + read_pv, "pva://" + write_pv
115
+ ),
116
+ frozenset({"rw"}): lambda dtype, read_write_pv: epics_signal_rw(
117
+ dtype, "pva://" + read_write_pv, write_pv="pva://" + read_write_pv
118
+ ),
119
+ frozenset({"r"}): lambda dtype, read_pv: epics_signal_r(dtype, "pva://" + read_pv),
120
+ frozenset({"w"}): lambda dtype, write_pv: epics_signal_w(
121
+ dtype, "pva://" + write_pv
122
+ ),
123
+ frozenset({"x"}): lambda _, write_pv: epics_signal_x("pva://" + write_pv),
124
+ }
125
+
126
+
127
+ def _parse_type(
128
+ is_pvi_table: bool,
129
+ number_suffix: Optional[int],
130
+ common_device_type: Optional[Type[Device]],
131
+ ):
132
+ if common_device_type:
133
+ # pre-defined type
134
+ device_cls, _ = _strip_union(common_device_type)
135
+ is_device_vector, device_cls = _strip_device_vector(device_cls)
136
+ device_cls, device_args = _split_subscript(device_cls)
137
+ assert issubclass(device_cls, Device)
138
+
139
+ is_signal = issubclass(device_cls, Signal)
140
+ signal_dtype = device_args[0] if device_args is not None else None
141
+
142
+ elif is_pvi_table:
143
+ # is a block, we can make it a DeviceVector if it ends in a number
144
+ is_device_vector = number_suffix is not None
145
+ is_signal = False
146
+ signal_dtype = None
147
+ device_cls = Device
148
+ else:
149
+ # is a signal, signals aren't stored in DeviceVectors unless
150
+ # they're defined as such in the common_device_type
151
+ is_device_vector = False
152
+ is_signal = True
153
+ signal_dtype = None
154
+ device_cls = Signal
155
+
156
+ return is_device_vector, is_signal, signal_dtype, device_cls
157
+
158
+
159
+ def _sim_common_blocks(device: Device, stripped_type: Optional[Type] = None):
160
+ device_t = stripped_type or type(device)
161
+ sub_devices = (
162
+ (field, field_type)
163
+ for field, field_type in get_type_hints(device_t).items()
164
+ if field not in ("_name", "parent")
165
+ )
166
+
167
+ for device_name, device_cls in sub_devices:
168
+ device_cls, _ = _strip_union(device_cls)
169
+ is_device_vector, device_cls = _strip_device_vector(device_cls)
170
+ device_cls, device_args = _split_subscript(device_cls)
171
+ assert issubclass(device_cls, Device)
172
+
173
+ is_signal = issubclass(device_cls, Signal)
174
+ signal_dtype = device_args[0] if device_args is not None else None
175
+
176
+ if is_device_vector:
177
+ if is_signal:
178
+ sub_device_1 = device_cls(SimSignalBackend(signal_dtype))
179
+ sub_device_2 = device_cls(SimSignalBackend(signal_dtype))
180
+ sub_device = DeviceVector({1: sub_device_1, 2: sub_device_2})
181
+ else:
182
+ sub_device = DeviceVector({1: device_cls(), 2: device_cls()})
183
+
184
+ for sub_device_in_vector in sub_device.values():
185
+ _sim_common_blocks(sub_device_in_vector, stripped_type=device_cls)
186
+
187
+ for value in sub_device.values():
188
+ value.parent = sub_device
189
+ else:
190
+ if is_signal:
191
+ sub_device = device_cls(SimSignalBackend(signal_dtype))
192
+ else:
193
+ sub_device = getattr(device, device_name, device_cls())
194
+ _sim_common_blocks(sub_device, stripped_type=device_cls)
195
+
196
+ setattr(device, device_name, sub_device)
197
+ sub_device.parent = device
198
+
199
+
200
+ async def _get_pvi_entries(entry: PVIEntry, timeout=DEFAULT_TIMEOUT):
201
+ if not entry.pvi_pv or not entry.pvi_pv.endswith(":PVI"):
202
+ raise RuntimeError("Top level entry must be a pvi table")
203
+
204
+ pvi_table_signal_backend: PvaSignalBackend = PvaSignalBackend(
205
+ None, entry.pvi_pv, entry.pvi_pv
206
+ )
207
+ await pvi_table_signal_backend.connect(
208
+ timeout=timeout
209
+ ) # create table signal backend
210
+
211
+ pva_table = (await pvi_table_signal_backend.get_value())["pvi"]
212
+ common_device_type_hints = (
213
+ get_type_hints(entry.common_device_type) if entry.common_device_type else {}
214
+ )
215
+
216
+ for sub_name, pva_entries in pva_table.items():
217
+ pvs = list(pva_entries.values())
218
+ is_pvi_table = len(pvs) == 1 and pvs[0].endswith(":PVI")
219
+ sub_name_split, sub_number_split = _strip_number_from_string(sub_name)
220
+ is_device_vector, is_signal, signal_dtype, device_type = _parse_type(
221
+ is_pvi_table,
222
+ sub_number_split,
223
+ common_device_type_hints.get(sub_name_split),
224
+ )
225
+ if is_signal:
226
+ device = _pvi_mapping[frozenset(pva_entries.keys())](signal_dtype, *pvs)
227
+ else:
228
+ device = getattr(entry.device, sub_name, device_type())
229
+
230
+ sub_entry = PVIEntry(
231
+ device=device, common_device_type=device_type, sub_entries={}
232
+ )
233
+
234
+ if is_device_vector:
235
+ # If device vector then we store sub_name -> {sub_number -> sub_entry}
236
+ # and aggregate into `DeviceVector` in `_set_device_attributes`
237
+ sub_number_split = 1 if sub_number_split is None else sub_number_split
238
+ if sub_name_split not in entry.sub_entries:
239
+ entry.sub_entries[sub_name_split] = {}
240
+ entry.sub_entries[sub_name_split][sub_number_split] = sub_entry # type: ignore
241
+ else:
242
+ entry.sub_entries[sub_name] = sub_entry
243
+
244
+ if is_pvi_table:
245
+ sub_entry.pvi_pv = pvs[0]
246
+ await _get_pvi_entries(sub_entry)
247
+
248
+ if entry.common_device_type:
249
+ _verify_common_blocks(entry, entry.common_device_type)
250
+
251
+
252
+ def _set_device_attributes(entry: PVIEntry):
253
+ for sub_name, sub_entry in entry.sub_entries.items():
254
+ if isinstance(sub_entry, dict):
255
+ sub_device = DeviceVector() # type: ignore
256
+ for key, device_vector_sub_entry in sub_entry.items():
257
+ sub_device[key] = device_vector_sub_entry.device
258
+ if device_vector_sub_entry.pvi_pv:
259
+ _set_device_attributes(device_vector_sub_entry)
260
+ # Set the device vector entry to have the device vector as a parent
261
+ device_vector_sub_entry.device.parent = sub_device # type: ignore
262
+ else:
263
+ sub_device = sub_entry.device # type: ignore
264
+ if sub_entry.pvi_pv:
265
+ _set_device_attributes(sub_entry)
266
+
267
+ sub_device.parent = entry.device
268
+ setattr(entry.device, sub_name, sub_device)
269
+
270
+
271
+ async def fill_pvi_entries(
272
+ device: Device, root_pv: str, timeout=DEFAULT_TIMEOUT, sim=False
273
+ ):
274
+ """
275
+ Fills a ``device`` with signals from a the ``root_pvi:PVI`` table.
276
+
277
+ If the device names match with parent devices of ``device`` then types are used.
278
+ """
279
+ if sim:
280
+ # set up sim signals for the common annotations
281
+ _sim_common_blocks(device)
282
+ else:
283
+ # check the pvi table for devices and fill the device with them
284
+ root_entry = PVIEntry(
285
+ pvi_pv=root_pv,
286
+ device=device,
287
+ common_device_type=type(device),
288
+ sub_entries={},
289
+ )
290
+ await _get_pvi_entries(root_entry, timeout=timeout)
291
+ _set_device_attributes(root_entry)
292
+
293
+ # We call set name now the parent field has been set in all of the
294
+ # introspect-initialized devices. This will recursively set the names.
295
+ device.set_name(device.name)
296
+
297
+
298
+ def create_children_from_annotations(
299
+ device: Device, included_optional_fields: Tuple[str, ...] = ()
300
+ ):
301
+ """For intializing blocks at __init__ of ``device``."""
302
+ for name, device_type in get_type_hints(type(device)).items():
303
+ if name in ("_name", "parent"):
304
+ continue
305
+ device_type, is_optional = _strip_union(device_type)
306
+ if is_optional and name not in included_optional_fields:
307
+ continue
308
+ is_device_vector, device_type = _strip_device_vector(device_type)
309
+ if (
310
+ is_device_vector
311
+ or ((origin := get_origin(device_type)) and issubclass(origin, Signal))
312
+ or (isclass(device_type) and issubclass(device_type, Signal))
313
+ ):
314
+ continue
315
+
316
+ sub_device = device_type()
317
+ setattr(device, name, sub_device)
318
+ create_children_from_annotations(sub_device)
@@ -41,7 +41,7 @@ def _make_backend(
41
41
 
42
42
 
43
43
  def epics_signal_rw(
44
- datatype: Type[T], read_pv: str, write_pv: Optional[str] = None
44
+ datatype: Type[T], read_pv: str, write_pv: Optional[str] = None, name: str = ""
45
45
  ) -> SignalRW[T]:
46
46
  """Create a `SignalRW` backed by 1 or 2 EPICS PVs
47
47
 
@@ -55,24 +55,41 @@ def epics_signal_rw(
55
55
  If given, use this PV to write to, otherwise use read_pv
56
56
  """
57
57
  backend = _make_backend(datatype, read_pv, write_pv or read_pv)
58
- return SignalRW(backend)
58
+ return SignalRW(backend, name=name)
59
59
 
60
60
 
61
- def epics_signal_r(datatype: Type[T], read_pv: str) -> SignalR[T]:
62
- """Create a `SignalR` backed by 1 EPICS PV
61
+ def epics_signal_rw_rbv(
62
+ datatype: Type[T], write_pv: str, read_suffix: str = "_RBV", name: str = ""
63
+ ) -> SignalRW[T]:
64
+ """Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv
63
65
 
64
66
  Parameters
65
67
  ----------
66
68
  datatype:
67
69
  Check that the PV is of this type
70
+ write_pv:
71
+ The PV to write to
72
+ read_suffix:
73
+ Append this suffix to the write pv to create the readback pv
74
+ """
75
+ return epics_signal_rw(datatype, f"{write_pv}{read_suffix}", write_pv, name)
76
+
77
+
78
+ def epics_signal_r(datatype: Type[T], read_pv: str, name: str = "") -> SignalR[T]:
79
+ """Create a `SignalR` backed by 1 EPICS PV
80
+
81
+ Parameters
82
+ ----------
83
+ datatype
84
+ Check that the PV is of this type
68
85
  read_pv:
69
86
  The PV to read and monitor
70
87
  """
71
88
  backend = _make_backend(datatype, read_pv, read_pv)
72
- return SignalR(backend)
89
+ return SignalR(backend, name=name)
73
90
 
74
91
 
75
- def epics_signal_w(datatype: Type[T], write_pv: str) -> SignalW[T]:
92
+ def epics_signal_w(datatype: Type[T], write_pv: str, name: str = "") -> SignalW[T]:
76
93
  """Create a `SignalW` backed by 1 EPICS PVs
77
94
 
78
95
  Parameters
@@ -83,10 +100,10 @@ def epics_signal_w(datatype: Type[T], write_pv: str) -> SignalW[T]:
83
100
  The PV to write to
84
101
  """
85
102
  backend = _make_backend(datatype, write_pv, write_pv)
86
- return SignalW(backend)
103
+ return SignalW(backend, name=name)
87
104
 
88
105
 
89
- def epics_signal_x(write_pv: str) -> SignalX:
106
+ def epics_signal_x(write_pv: str, name: str = "") -> SignalX:
90
107
  """Create a `SignalX` backed by 1 EPICS PVs
91
108
 
92
109
  Parameters
@@ -95,4 +112,4 @@ def epics_signal_x(write_pv: str) -> SignalX:
95
112
  The PV to write its initial value to on trigger
96
113
  """
97
114
  backend: SignalBackend = _make_backend(None, write_pv, write_pv)
98
- return SignalX(backend)
115
+ return SignalX(backend, name=name)
ophyd_async/log.py ADDED
@@ -0,0 +1,130 @@
1
+ import logging
2
+ import sys
3
+
4
+ import colorlog
5
+
6
+ __all__ = (
7
+ "config_ophyd_async_logging",
8
+ "logger",
9
+ "set_handler",
10
+ )
11
+
12
+ DEFAULT_FORMAT = (
13
+ "%(log_color)s[%(levelname)1.1s %(asctime)s.%(msecs)03d "
14
+ "%(module)s:%(lineno)d] %(message)s"
15
+ )
16
+
17
+ DEFAULT_DATE_FORMAT = "%y%m%d %H:%M:%S"
18
+
19
+ DEFAULT_LOG_COLORS = {
20
+ "DEBUG": "cyan",
21
+ "INFO": "green",
22
+ "WARNING": "yellow",
23
+ "ERROR": "red",
24
+ "CRITICAL": "red,bg_white",
25
+ }
26
+
27
+
28
+ class ColoredFormatterWithDeviceName(colorlog.ColoredFormatter):
29
+ def format(self, record):
30
+ message = super().format(record)
31
+ if hasattr(record, "ophyd_async_device_name"):
32
+ message = f"[{record.ophyd_async_device_name}]{message}"
33
+ return message
34
+
35
+
36
+ def _validate_level(level) -> int:
37
+ """
38
+ Return an int for level comparison
39
+ """
40
+ if isinstance(level, int):
41
+ levelno = level
42
+ elif isinstance(level, str):
43
+ levelno = logging.getLevelName(level)
44
+
45
+ if isinstance(levelno, int):
46
+ return levelno
47
+ else:
48
+ raise ValueError(
49
+ "Your level is illegal, please use "
50
+ "'CRITICAL', 'FATAL', 'ERROR', 'WARNING', 'INFO', or 'DEBUG'."
51
+ )
52
+
53
+
54
+ logger = logging.getLogger("ophyd_async")
55
+
56
+ current_handler = None # overwritten below
57
+
58
+
59
+ def config_ophyd_async_logging(
60
+ file=sys.stdout,
61
+ fmt=DEFAULT_FORMAT,
62
+ datefmt=DEFAULT_DATE_FORMAT,
63
+ color=True,
64
+ level="WARNING",
65
+ ):
66
+ """
67
+ Set a new handler on the ``logging.getLogger('ophyd_async')`` logger.
68
+ If this is called more than once, the handler from the previous invocation
69
+ is removed (if still present) and replaced.
70
+
71
+ Parameters
72
+ ----------
73
+ file : object with ``write`` method or filename string
74
+ Default is ``sys.stdout``.
75
+ fmt : Overall logging format
76
+ datefmt : string
77
+ Date format. Default is ``'%H:%M:%S'``.
78
+ color : boolean
79
+ Use ANSI color codes. True by default.
80
+ level : str or int
81
+ Python logging level, given as string or corresponding integer.
82
+ Default is 'WARNING'.
83
+ Returns
84
+ -------
85
+ handler : logging.Handler
86
+ The handler, which has already been added to the 'ophyd_async' logger.
87
+ Examples
88
+ --------
89
+ Log to a file.
90
+ config_ophyd_async_logging(file='/tmp/what_is_happening.txt')
91
+ Include the date along with the time. (The log messages will always include
92
+ microseconds, which are configured separately, not as part of 'datefmt'.)
93
+ config_ophyd_async_logging(datefmt="%Y-%m-%d %H:%M:%S")
94
+ Turn off ANSI color codes.
95
+ config_ophyd_async_logging(color=False)
96
+ Increase verbosity: show level DEBUG or higher.
97
+ config_ophyd_async_logging(level='DEBUG')
98
+ """
99
+ global current_handler
100
+
101
+ if isinstance(file, str):
102
+ handler = logging.FileHandler(file)
103
+ formatter = ColoredFormatterWithDeviceName(
104
+ fmt=fmt, datefmt=datefmt, no_color=True
105
+ )
106
+ else:
107
+ handler = colorlog.StreamHandler(file)
108
+ formatter = ColoredFormatterWithDeviceName(
109
+ fmt=fmt, datefmt=datefmt, log_colors=DEFAULT_LOG_COLORS, no_color=color
110
+ )
111
+
112
+ levelno = _validate_level(level)
113
+ handler.setFormatter(formatter)
114
+ handler.setLevel(levelno)
115
+
116
+ if current_handler in logger.handlers:
117
+ logger.removeHandler(current_handler)
118
+ logger.addHandler(handler)
119
+
120
+ current_handler = handler
121
+
122
+ if logger.getEffectiveLevel() > levelno:
123
+ logger.setLevel(levelno)
124
+ try:
125
+ return handler
126
+ finally:
127
+ handler.close()
128
+
129
+
130
+ set_handler = config_ophyd_async_logging # for back-compat
@@ -1,19 +1,27 @@
1
- from .panda import PandA, PcapBlock, PulseBlock, PVIEntry, SeqBlock, SeqTable
2
- from .panda_controller import PandaPcapController
3
- from .table import (
1
+ from ._common_blocks import (
2
+ CommonPandaBlocks,
3
+ DataBlock,
4
+ PcapBlock,
5
+ PulseBlock,
6
+ SeqBlock,
7
+ TimeUnits,
8
+ )
9
+ from ._hdf_panda import HDFPanda
10
+ from ._panda_controller import PandaPcapController
11
+ from ._table import (
4
12
  SeqTable,
5
13
  SeqTableRow,
6
14
  SeqTrigger,
7
15
  seq_table_from_arrays,
8
16
  seq_table_from_rows,
9
17
  )
10
- from .utils import phase_sorter
18
+ from ._utils import phase_sorter
11
19
 
12
20
  __all__ = [
13
- "PandA",
21
+ "CommonPandaBlocks",
22
+ "HDFPanda",
14
23
  "PcapBlock",
15
24
  "PulseBlock",
16
- "PVIEntry",
17
25
  "seq_table_from_arrays",
18
26
  "seq_table_from_rows",
19
27
  "SeqBlock",
@@ -22,4 +30,7 @@ __all__ = [
22
30
  "SeqTrigger",
23
31
  "phase_sorter",
24
32
  "PandaPcapController",
33
+ "TimeUnits",
34
+ "DataBlock",
35
+ "CommonPandABlocks",
25
36
  ]
@@ -0,0 +1,49 @@
1
+ from __future__ import annotations
2
+
3
+ from enum import Enum
4
+
5
+ from ophyd_async.core import Device, DeviceVector, SignalR, SignalRW
6
+ from ophyd_async.panda._table import SeqTable
7
+
8
+
9
+ class DataBlock(Device):
10
+ # In future we may decide to make hdf_* optional
11
+ hdf_directory: SignalRW[str]
12
+ hdf_file_name: SignalRW[str]
13
+ num_capture: SignalRW[int]
14
+ num_captured: SignalR[int]
15
+ capture: SignalRW[bool]
16
+ flush_period: SignalRW[float]
17
+
18
+
19
+ class PulseBlock(Device):
20
+ delay: SignalRW[float]
21
+ width: SignalRW[float]
22
+
23
+
24
+ class TimeUnits(str, Enum):
25
+ min = "min"
26
+ s = "s"
27
+ ms = "ms"
28
+ us = "us"
29
+
30
+
31
+ class SeqBlock(Device):
32
+ table: SignalRW[SeqTable]
33
+ active: SignalRW[bool]
34
+ repeats: SignalRW[int]
35
+ prescale: SignalRW[float]
36
+ prescale_units: SignalRW[TimeUnits]
37
+ enable: SignalRW[str]
38
+
39
+
40
+ class PcapBlock(Device):
41
+ active: SignalR[bool]
42
+ arm: SignalRW[bool]
43
+
44
+
45
+ class CommonPandaBlocks(Device):
46
+ pulse: DeviceVector[PulseBlock]
47
+ seq: DeviceVector[SeqBlock]
48
+ pcap: PcapBlock
49
+ data: DataBlock
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Sequence
4
+
5
+ from ophyd_async.core import (
6
+ DEFAULT_TIMEOUT,
7
+ DirectoryProvider,
8
+ SignalR,
9
+ StandardDetector,
10
+ )
11
+ from ophyd_async.epics.pvi import create_children_from_annotations, fill_pvi_entries
12
+
13
+ from ._common_blocks import CommonPandaBlocks
14
+ from ._panda_controller import PandaPcapController
15
+ from .writers._hdf_writer import PandaHDFWriter
16
+
17
+
18
+ class HDFPanda(CommonPandaBlocks, StandardDetector):
19
+ def __init__(
20
+ self,
21
+ prefix: str,
22
+ directory_provider: DirectoryProvider,
23
+ config_sigs: Sequence[SignalR] = (),
24
+ name: str = "",
25
+ ):
26
+ self._prefix = prefix
27
+
28
+ create_children_from_annotations(self)
29
+ controller = PandaPcapController(pcap=self.pcap)
30
+ writer = PandaHDFWriter(
31
+ prefix=prefix,
32
+ directory_provider=directory_provider,
33
+ name_provider=lambda: name,
34
+ panda_device=self,
35
+ )
36
+ super().__init__(
37
+ controller=controller,
38
+ writer=writer,
39
+ config_sigs=config_sigs,
40
+ name=name,
41
+ writer_timeout=DEFAULT_TIMEOUT,
42
+ )
43
+
44
+ async def connect(
45
+ self, sim: bool = False, timeout: float = DEFAULT_TIMEOUT
46
+ ) -> None:
47
+ await fill_pvi_entries(self, self._prefix + "PVI", timeout=timeout, sim=sim)
48
+ await super().connect(sim=sim, timeout=timeout)
@@ -7,15 +7,11 @@ from ophyd_async.core import (
7
7
  DetectorTrigger,
8
8
  wait_for_value,
9
9
  )
10
-
11
- from .panda import PcapBlock
10
+ from ophyd_async.panda import PcapBlock
12
11
 
13
12
 
14
13
  class PandaPcapController(DetectorControl):
15
- def __init__(
16
- self,
17
- pcap: PcapBlock,
18
- ) -> None:
14
+ def __init__(self, pcap: PcapBlock) -> None:
19
15
  self.pcap = pcap
20
16
 
21
17
  def get_deadtime(self, exposure: float) -> float:
@@ -35,7 +31,7 @@ class PandaPcapController(DetectorControl):
35
31
  await wait_for_value(self.pcap.active, True, timeout=1)
36
32
  return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))
37
33
 
38
- async def disarm(self):
34
+ async def disarm(self) -> AsyncStatus:
39
35
  await asyncio.gather(self.pcap.arm.set(False))
40
36
  await wait_for_value(self.pcap.active, False, timeout=1)
41
37
  return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))