ophyd-async 0.3.4a1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +20 -8
- ophyd_async/core/_providers.py +186 -24
- ophyd_async/core/detector.py +14 -15
- ophyd_async/core/device.py +18 -6
- ophyd_async/core/signal.py +32 -8
- ophyd_async/core/soft_signal_backend.py +21 -6
- ophyd_async/epics/_backend/_aioca.py +3 -0
- ophyd_async/epics/_backend/_p4p.py +50 -2
- ophyd_async/epics/_backend/common.py +3 -1
- ophyd_async/epics/areadetector/aravis.py +3 -3
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +1 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +3 -2
- ophyd_async/epics/areadetector/kinetix.py +3 -3
- ophyd_async/epics/areadetector/pilatus.py +3 -3
- ophyd_async/epics/areadetector/vimba.py +3 -3
- ophyd_async/epics/areadetector/writers/__init__.py +2 -2
- ophyd_async/epics/areadetector/writers/general_hdffile.py +97 -0
- ophyd_async/epics/areadetector/writers/hdf_writer.py +27 -10
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +3 -0
- ophyd_async/epics/areadetector/writers/nd_plugin.py +30 -0
- ophyd_async/epics/demo/demo_ad_sim_detector.py +3 -3
- ophyd_async/epics/motion/motor.py +132 -2
- ophyd_async/panda/__init__.py +15 -1
- ophyd_async/panda/_common_blocks.py +22 -1
- ophyd_async/panda/_hdf_panda.py +5 -3
- ophyd_async/panda/_table.py +20 -18
- ophyd_async/panda/_trigger.py +62 -7
- ophyd_async/panda/writers/_hdf_writer.py +17 -8
- ophyd_async/plan_stubs/ensure_connected.py +7 -2
- ophyd_async/plan_stubs/fly.py +58 -7
- ophyd_async/sim/pattern_generator.py +71 -182
- ophyd_async/sim/sim_pattern_detector_control.py +3 -3
- ophyd_async/sim/sim_pattern_detector_writer.py +9 -5
- ophyd_async/sim/sim_pattern_generator.py +12 -5
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/METADATA +7 -2
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/RECORD +41 -43
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/WHEEL +1 -1
- ophyd_async/epics/areadetector/writers/_hdfdataset.py +0 -10
- ophyd_async/epics/areadetector/writers/_hdffile.py +0 -54
- ophyd_async/panda/writers/_panda_hdf_file.py +0 -54
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.3.4a1.dist-info → ophyd_async-0.4.0.dist-info}/top_level.txt +0 -0
ophyd_async/_version.py
CHANGED
ophyd_async/core/__init__.py
CHANGED
|
@@ -1,9 +1,15 @@
|
|
|
1
1
|
from ._providers import (
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
AutoIncrementFilenameProvider,
|
|
3
|
+
AutoIncrementingPathProvider,
|
|
4
|
+
FilenameProvider,
|
|
4
5
|
NameProvider,
|
|
6
|
+
PathInfo,
|
|
7
|
+
PathProvider,
|
|
5
8
|
ShapeProvider,
|
|
6
|
-
|
|
9
|
+
StaticFilenameProvider,
|
|
10
|
+
StaticPathProvider,
|
|
11
|
+
UUIDFilenameProvider,
|
|
12
|
+
YMDPathProvider,
|
|
7
13
|
)
|
|
8
14
|
from .async_status import AsyncStatus, WatchableAsyncStatus
|
|
9
15
|
from .detector import (
|
|
@@ -69,6 +75,8 @@ from .utils import (
|
|
|
69
75
|
|
|
70
76
|
__all__ = [
|
|
71
77
|
"AsyncStatus",
|
|
78
|
+
"AutoIncrementFilenameProvider",
|
|
79
|
+
"AutoIncrementingPathProvider",
|
|
72
80
|
"CalculatableTimeout",
|
|
73
81
|
"CalculateTimeout",
|
|
74
82
|
"Callback",
|
|
@@ -80,16 +88,16 @@ __all__ = [
|
|
|
80
88
|
"Device",
|
|
81
89
|
"DeviceCollector",
|
|
82
90
|
"DeviceVector",
|
|
83
|
-
"
|
|
84
|
-
"DirectoryProvider",
|
|
91
|
+
"FilenameProvider",
|
|
85
92
|
"HardwareTriggeredFlyable",
|
|
86
93
|
"HintedSignal",
|
|
87
94
|
"MockSignalBackend",
|
|
88
95
|
"NameProvider",
|
|
89
96
|
"NotConnected",
|
|
97
|
+
"PathInfo",
|
|
98
|
+
"PathProvider",
|
|
90
99
|
"ReadingValueCallback",
|
|
91
100
|
"RuntimeSubsetEnum",
|
|
92
|
-
"SubsetEnum",
|
|
93
101
|
"ShapeProvider",
|
|
94
102
|
"Signal",
|
|
95
103
|
"SignalBackend",
|
|
@@ -100,14 +108,18 @@ __all__ = [
|
|
|
100
108
|
"SoftSignalBackend",
|
|
101
109
|
"StandardDetector",
|
|
102
110
|
"StandardReadable",
|
|
103
|
-
"
|
|
111
|
+
"StaticFilenameProvider",
|
|
112
|
+
"StaticPathProvider",
|
|
113
|
+
"SubsetEnum",
|
|
104
114
|
"T",
|
|
105
115
|
"TriggerInfo",
|
|
106
116
|
"TriggerLogic",
|
|
117
|
+
"UUIDFilenameProvider",
|
|
107
118
|
"WatchableAsyncStatus",
|
|
119
|
+
"YMDPathProvider",
|
|
120
|
+
# Lower-cased imports
|
|
108
121
|
"assert_configuration",
|
|
109
122
|
"assert_emitted",
|
|
110
|
-
"assert_mock_put_called_with",
|
|
111
123
|
"assert_reading",
|
|
112
124
|
"assert_value",
|
|
113
125
|
"callback_on_mock_put",
|
ophyd_async/core/_providers.py
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import uuid
|
|
1
3
|
from abc import abstractmethod
|
|
4
|
+
from collections.abc import Callable
|
|
2
5
|
from dataclasses import dataclass
|
|
6
|
+
from datetime import date
|
|
3
7
|
from pathlib import Path
|
|
4
|
-
from typing import Optional, Protocol
|
|
8
|
+
from typing import List, Optional, Protocol
|
|
5
9
|
|
|
6
10
|
|
|
7
11
|
@dataclass
|
|
8
|
-
class
|
|
12
|
+
class PathInfo:
|
|
9
13
|
"""
|
|
10
14
|
Information about where and how to write a file.
|
|
11
15
|
|
|
@@ -17,43 +21,201 @@ class DirectoryInfo:
|
|
|
17
21
|
|
|
18
22
|
:param root: Path of a root directory, relevant only for the file writer
|
|
19
23
|
:param resource_dir: Directory into which files should be written, relative to root
|
|
20
|
-
:param
|
|
21
|
-
:param
|
|
24
|
+
:param filename: Base filename to use generated by FilenameProvider, w/o extension
|
|
25
|
+
:param create_dir_depth: Optional depth of directories to create if they do not
|
|
26
|
+
exist
|
|
22
27
|
"""
|
|
23
28
|
|
|
24
29
|
root: Path
|
|
25
30
|
resource_dir: Path
|
|
26
|
-
|
|
27
|
-
|
|
31
|
+
filename: str
|
|
32
|
+
create_dir_depth: int = 0
|
|
28
33
|
|
|
29
34
|
|
|
30
|
-
class
|
|
35
|
+
class FilenameProvider(Protocol):
|
|
31
36
|
@abstractmethod
|
|
32
|
-
def __call__(self) ->
|
|
37
|
+
def __call__(self) -> str:
|
|
38
|
+
"""Get a filename to use for output data, w/o extension"""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class PathProvider(Protocol):
|
|
42
|
+
_filename_provider: FilenameProvider
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def __call__(self, device_name: Optional[str] = None) -> PathInfo:
|
|
33
46
|
"""Get the current directory to write files into"""
|
|
34
47
|
|
|
35
48
|
|
|
36
|
-
class
|
|
49
|
+
class StaticFilenameProvider(FilenameProvider):
|
|
50
|
+
def __init__(self, filename: str):
|
|
51
|
+
self._static_filename = filename
|
|
52
|
+
|
|
53
|
+
def __call__(self) -> str:
|
|
54
|
+
return self._static_filename
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class UUIDFilenameProvider(FilenameProvider):
|
|
37
58
|
def __init__(
|
|
38
59
|
self,
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
60
|
+
uuid_call_func: Callable = uuid.uuid4,
|
|
61
|
+
uuid_call_args: Optional[List] = None,
|
|
62
|
+
):
|
|
63
|
+
self._uuid_call_func = uuid_call_func
|
|
64
|
+
self._uuid_call_args = uuid_call_args or []
|
|
65
|
+
|
|
66
|
+
def __call__(self) -> str:
|
|
67
|
+
if (
|
|
68
|
+
self._uuid_call_func in [uuid.uuid3, uuid.uuid5]
|
|
69
|
+
and len(self._uuid_call_args) < 2
|
|
70
|
+
):
|
|
71
|
+
raise ValueError(
|
|
72
|
+
f"To use {self._uuid_call_func} to generate UUID filenames,"
|
|
73
|
+
" UUID namespace and name must be passed as args!"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
uuid_str = self._uuid_call_func(*self._uuid_call_args)
|
|
77
|
+
return f"{uuid_str}"
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class AutoIncrementFilenameProvider(FilenameProvider):
|
|
81
|
+
def __init__(
|
|
82
|
+
self,
|
|
83
|
+
base_filename: str = "",
|
|
84
|
+
max_digits: int = 5,
|
|
85
|
+
starting_value: int = 0,
|
|
86
|
+
increment: int = 1,
|
|
87
|
+
inc_delimeter: str = "_",
|
|
88
|
+
):
|
|
89
|
+
self._base_filename = base_filename
|
|
90
|
+
self._max_digits = max_digits
|
|
91
|
+
self._current_value = starting_value
|
|
92
|
+
self._increment = increment
|
|
93
|
+
self._inc_delimeter = inc_delimeter
|
|
94
|
+
|
|
95
|
+
def __call__(self):
|
|
96
|
+
if len(str(self._current_value)) > self._max_digits:
|
|
97
|
+
raise ValueError(
|
|
98
|
+
f"Auto incrementing filename counter \
|
|
99
|
+
exceeded maximum of {self._max_digits} digits!"
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
padded_counter = f"{self._current_value:0{self._max_digits}}"
|
|
103
|
+
|
|
104
|
+
filename = f"{self._base_filename}{self._inc_delimeter}{padded_counter}"
|
|
105
|
+
|
|
106
|
+
self._current_value += self._increment
|
|
107
|
+
return filename
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class StaticPathProvider(PathProvider):
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
filename_provider: FilenameProvider,
|
|
114
|
+
directory_path: Path,
|
|
115
|
+
resource_dir: Path = Path("."),
|
|
116
|
+
create_dir_depth: int = 0,
|
|
43
117
|
) -> None:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
118
|
+
self._filename_provider = filename_provider
|
|
119
|
+
self._directory_path = directory_path
|
|
120
|
+
self._resource_dir = resource_dir
|
|
121
|
+
self._create_dir_depth = create_dir_depth
|
|
122
|
+
|
|
123
|
+
def __call__(self, device_name: Optional[str] = None) -> PathInfo:
|
|
124
|
+
filename = self._filename_provider()
|
|
125
|
+
|
|
126
|
+
return PathInfo(
|
|
127
|
+
root=self._directory_path,
|
|
128
|
+
resource_dir=self._resource_dir,
|
|
129
|
+
filename=filename,
|
|
130
|
+
create_dir_depth=self._create_dir_depth,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class AutoIncrementingPathProvider(PathProvider):
|
|
135
|
+
def __init__(
|
|
136
|
+
self,
|
|
137
|
+
filename_provider: FilenameProvider,
|
|
138
|
+
directory_path: Path,
|
|
139
|
+
create_dir_depth: int = 0,
|
|
140
|
+
max_digits: int = 5,
|
|
141
|
+
starting_value: int = 0,
|
|
142
|
+
num_calls_per_inc: int = 1,
|
|
143
|
+
increment: int = 1,
|
|
144
|
+
inc_delimeter: str = "_",
|
|
145
|
+
base_name: str = None,
|
|
146
|
+
) -> None:
|
|
147
|
+
self._filename_provider = filename_provider
|
|
148
|
+
self._directory_path = directory_path
|
|
149
|
+
self._create_dir_depth = create_dir_depth
|
|
150
|
+
self._base_name = base_name
|
|
151
|
+
self._starting_value = starting_value
|
|
152
|
+
self._current_value = starting_value
|
|
153
|
+
self._num_calls_per_inc = num_calls_per_inc
|
|
154
|
+
self._inc_counter = 0
|
|
155
|
+
self._max_digits = max_digits
|
|
156
|
+
self._increment = increment
|
|
157
|
+
self._inc_delimeter = inc_delimeter
|
|
158
|
+
|
|
159
|
+
def __call__(self, device_name: Optional[str] = None) -> PathInfo:
|
|
160
|
+
filename = self._filename_provider()
|
|
161
|
+
|
|
162
|
+
padded_counter = f"{self._current_value:0{self._max_digits}}"
|
|
163
|
+
|
|
164
|
+
resource_dir = str(padded_counter)
|
|
165
|
+
if self._base_name is not None:
|
|
166
|
+
resource_dir = f"{self._base_name}{self._inc_delimeter}{padded_counter}"
|
|
167
|
+
elif device_name is not None:
|
|
168
|
+
resource_dir = f"{device_name}{self._inc_delimeter}{padded_counter}"
|
|
169
|
+
|
|
170
|
+
self._inc_counter += 1
|
|
171
|
+
if self._inc_counter == self._num_calls_per_inc:
|
|
172
|
+
self._inc_counter = 0
|
|
173
|
+
self._current_value += self._increment
|
|
174
|
+
|
|
175
|
+
return PathInfo(
|
|
176
|
+
root=self._directory_path,
|
|
50
177
|
resource_dir=resource_dir,
|
|
51
|
-
|
|
52
|
-
|
|
178
|
+
filename=filename,
|
|
179
|
+
create_dir_depth=self._create_dir_depth,
|
|
53
180
|
)
|
|
54
181
|
|
|
55
|
-
|
|
56
|
-
|
|
182
|
+
|
|
183
|
+
class YMDPathProvider(PathProvider):
|
|
184
|
+
def __init__(
|
|
185
|
+
self,
|
|
186
|
+
filename_provider: FilenameProvider,
|
|
187
|
+
directory_path: Path,
|
|
188
|
+
create_dir_depth: int = -3, # Default to -3 to create YMD dirs
|
|
189
|
+
device_name_as_base_dir: bool = False,
|
|
190
|
+
) -> None:
|
|
191
|
+
self._filename_provider = filename_provider
|
|
192
|
+
self._directory_path = Path(directory_path)
|
|
193
|
+
self._create_dir_depth = create_dir_depth
|
|
194
|
+
self._device_name_as_base_dir = device_name_as_base_dir
|
|
195
|
+
|
|
196
|
+
def __call__(self, device_name: Optional[str] = None) -> PathInfo:
|
|
197
|
+
sep = os.path.sep
|
|
198
|
+
current_date = date.today().strftime(f"%Y{sep}%m{sep}%d")
|
|
199
|
+
if device_name is None:
|
|
200
|
+
resource_dir = current_date
|
|
201
|
+
elif self._device_name_as_base_dir:
|
|
202
|
+
resource_dir = os.path.join(
|
|
203
|
+
current_date,
|
|
204
|
+
device_name,
|
|
205
|
+
)
|
|
206
|
+
else:
|
|
207
|
+
resource_dir = os.path.join(
|
|
208
|
+
device_name,
|
|
209
|
+
current_date,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
filename = self._filename_provider()
|
|
213
|
+
return PathInfo(
|
|
214
|
+
root=self._directory_path,
|
|
215
|
+
resource_dir=resource_dir,
|
|
216
|
+
filename=filename,
|
|
217
|
+
create_dir_depth=self._create_dir_depth,
|
|
218
|
+
)
|
|
57
219
|
|
|
58
220
|
|
|
59
221
|
class NameProvider(Protocol):
|
|
@@ -64,5 +226,5 @@ class NameProvider(Protocol):
|
|
|
64
226
|
|
|
65
227
|
class ShapeProvider(Protocol):
|
|
66
228
|
@abstractmethod
|
|
67
|
-
async def __call__(self) ->
|
|
229
|
+
async def __call__(self) -> tuple:
|
|
68
230
|
"""Get the shape of the data collection"""
|
ophyd_async/core/detector.py
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import time
|
|
5
5
|
from abc import ABC, abstractmethod
|
|
6
|
-
from dataclasses import dataclass
|
|
7
6
|
from enum import Enum
|
|
8
7
|
from typing import (
|
|
9
8
|
AsyncGenerator,
|
|
@@ -28,6 +27,7 @@ from bluesky.protocols import (
|
|
|
28
27
|
Triggerable,
|
|
29
28
|
WritesStreamAssets,
|
|
30
29
|
)
|
|
30
|
+
from pydantic import BaseModel, Field
|
|
31
31
|
|
|
32
32
|
from ophyd_async.protocols import AsyncConfigurable, AsyncReadable
|
|
33
33
|
|
|
@@ -51,20 +51,19 @@ class DetectorTrigger(str, Enum):
|
|
|
51
51
|
variable_gate = "variable_gate"
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
|
|
55
|
-
class TriggerInfo:
|
|
54
|
+
class TriggerInfo(BaseModel):
|
|
56
55
|
"""Minimal set of information required to setup triggering on a detector"""
|
|
57
56
|
|
|
58
|
-
#: Number of triggers that will be sent
|
|
59
|
-
|
|
57
|
+
#: Number of triggers that will be sent, 0 means infinite
|
|
58
|
+
number: int = Field(gt=0)
|
|
60
59
|
#: Sort of triggers that will be sent
|
|
61
|
-
trigger: DetectorTrigger
|
|
60
|
+
trigger: DetectorTrigger = Field()
|
|
62
61
|
#: What is the minimum deadtime between triggers
|
|
63
|
-
deadtime: float
|
|
62
|
+
deadtime: float = Field(ge=0)
|
|
64
63
|
#: What is the maximum high time of the triggers
|
|
65
|
-
livetime: float
|
|
64
|
+
livetime: float = Field(ge=0)
|
|
66
65
|
#: What is the maximum timeout on waiting for a frame
|
|
67
|
-
frame_timeout: float | None = None
|
|
66
|
+
frame_timeout: float | None = Field(None, gt=0)
|
|
68
67
|
|
|
69
68
|
|
|
70
69
|
class DetectorControl(ABC):
|
|
@@ -243,12 +242,12 @@ class StandardDetector(
|
|
|
243
242
|
async def trigger(self) -> None:
|
|
244
243
|
# set default trigger_info
|
|
245
244
|
self._trigger_info = TriggerInfo(
|
|
246
|
-
|
|
245
|
+
number=1, trigger=DetectorTrigger.internal, deadtime=0.0, livetime=0.0
|
|
247
246
|
)
|
|
248
247
|
# Arm the detector and wait for it to finish.
|
|
249
248
|
indices_written = await self.writer.get_indices_written()
|
|
250
249
|
written_status = await self.controller.arm(
|
|
251
|
-
num=self._trigger_info.
|
|
250
|
+
num=self._trigger_info.number,
|
|
252
251
|
trigger=self._trigger_info.trigger,
|
|
253
252
|
)
|
|
254
253
|
await written_status
|
|
@@ -285,7 +284,7 @@ class StandardDetector(
|
|
|
285
284
|
assert type(value) is TriggerInfo
|
|
286
285
|
self._trigger_info = value
|
|
287
286
|
self._initial_frame = await self.writer.get_indices_written()
|
|
288
|
-
self._last_frame = self._initial_frame + self._trigger_info.
|
|
287
|
+
self._last_frame = self._initial_frame + self._trigger_info.number
|
|
289
288
|
|
|
290
289
|
required = self.controller.get_deadtime(self._trigger_info.livetime)
|
|
291
290
|
assert required <= self._trigger_info.deadtime, (
|
|
@@ -293,7 +292,7 @@ class StandardDetector(
|
|
|
293
292
|
f"but trigger logic provides only {self._trigger_info.deadtime}s"
|
|
294
293
|
)
|
|
295
294
|
self._arm_status = await self.controller.arm(
|
|
296
|
-
num=self._trigger_info.
|
|
295
|
+
num=self._trigger_info.number,
|
|
297
296
|
trigger=self._trigger_info.trigger,
|
|
298
297
|
exposure=self._trigger_info.livetime,
|
|
299
298
|
)
|
|
@@ -320,12 +319,12 @@ class StandardDetector(
|
|
|
320
319
|
name=self.name,
|
|
321
320
|
current=index,
|
|
322
321
|
initial=self._initial_frame,
|
|
323
|
-
target=self._trigger_info.
|
|
322
|
+
target=self._trigger_info.number,
|
|
324
323
|
unit="",
|
|
325
324
|
precision=0,
|
|
326
325
|
time_elapsed=time.monotonic() - self._fly_start,
|
|
327
326
|
)
|
|
328
|
-
if index >= self._trigger_info.
|
|
327
|
+
if index >= self._trigger_info.number:
|
|
329
328
|
break
|
|
330
329
|
|
|
331
330
|
async def describe_collect(self) -> Dict[str, DataKey]:
|
ophyd_async/core/device.py
CHANGED
|
@@ -33,7 +33,10 @@ class Device(HasName):
|
|
|
33
33
|
parent: Optional["Device"] = None
|
|
34
34
|
# None if connect hasn't started, a Task if it has
|
|
35
35
|
_connect_task: Optional[asyncio.Task] = None
|
|
36
|
-
|
|
36
|
+
|
|
37
|
+
# Used to check if the previous connect was mocked,
|
|
38
|
+
# if the next mock value differs then we fail
|
|
39
|
+
_previous_connect_was_mock = None
|
|
37
40
|
|
|
38
41
|
def __init__(self, name: str = "") -> None:
|
|
39
42
|
self.set_name(name)
|
|
@@ -90,11 +93,21 @@ class Device(HasName):
|
|
|
90
93
|
timeout:
|
|
91
94
|
Time to wait before failing with a TimeoutError.
|
|
92
95
|
"""
|
|
96
|
+
|
|
97
|
+
if (
|
|
98
|
+
self._previous_connect_was_mock is not None
|
|
99
|
+
and self._previous_connect_was_mock != mock
|
|
100
|
+
):
|
|
101
|
+
raise RuntimeError(
|
|
102
|
+
f"`connect(mock={mock})` called on a `Device` where the previous "
|
|
103
|
+
f"connect was `mock={self._previous_connect_was_mock}`. Changing mock "
|
|
104
|
+
"value between connects is not permitted."
|
|
105
|
+
)
|
|
106
|
+
self._previous_connect_was_mock = mock
|
|
107
|
+
|
|
93
108
|
# If previous connect with same args has started and not errored, can use it
|
|
94
|
-
can_use_previous_connect = (
|
|
95
|
-
self._connect_task
|
|
96
|
-
and not (self._connect_task.done() and self._connect_task.exception())
|
|
97
|
-
and self._connect_mock_arg == mock
|
|
109
|
+
can_use_previous_connect = self._connect_task and not (
|
|
110
|
+
self._connect_task.done() and self._connect_task.exception()
|
|
98
111
|
)
|
|
99
112
|
if force_reconnect or not can_use_previous_connect:
|
|
100
113
|
# Kick off a connection
|
|
@@ -105,7 +118,6 @@ class Device(HasName):
|
|
|
105
118
|
for name, child_device in self.children()
|
|
106
119
|
}
|
|
107
120
|
self._connect_task = asyncio.create_task(wait_for_connection(**coros))
|
|
108
|
-
self._connect_mock_arg = mock
|
|
109
121
|
|
|
110
122
|
assert self._connect_task, "Connect task not created, this shouldn't happen"
|
|
111
123
|
# Wait for it to complete
|
ophyd_async/core/signal.py
CHANGED
|
@@ -62,7 +62,7 @@ class Signal(Device, Generic[T]):
|
|
|
62
62
|
name: str = "",
|
|
63
63
|
) -> None:
|
|
64
64
|
self._timeout = timeout
|
|
65
|
-
self.
|
|
65
|
+
self._backend = backend
|
|
66
66
|
super().__init__(name)
|
|
67
67
|
|
|
68
68
|
async def connect(
|
|
@@ -73,19 +73,43 @@ class Signal(Device, Generic[T]):
|
|
|
73
73
|
backend: Optional[SignalBackend[T]] = None,
|
|
74
74
|
):
|
|
75
75
|
if backend:
|
|
76
|
-
if self.
|
|
77
|
-
raise ValueError(
|
|
78
|
-
|
|
79
|
-
)
|
|
76
|
+
if self._backend and backend is not self._backend:
|
|
77
|
+
raise ValueError("Backend at connection different from previous one.")
|
|
78
|
+
|
|
80
79
|
self._backend = backend
|
|
81
|
-
if
|
|
80
|
+
if (
|
|
81
|
+
self._previous_connect_was_mock is not None
|
|
82
|
+
and self._previous_connect_was_mock != mock
|
|
83
|
+
):
|
|
84
|
+
raise RuntimeError(
|
|
85
|
+
f"`connect(mock={mock})` called on a `Signal` where the previous "
|
|
86
|
+
f"connect was `mock={self._previous_connect_was_mock}`. Changing mock "
|
|
87
|
+
"value between connects is not permitted."
|
|
88
|
+
)
|
|
89
|
+
self._previous_connect_was_mock = mock
|
|
90
|
+
|
|
91
|
+
if mock and not issubclass(type(self._backend), MockSignalBackend):
|
|
82
92
|
# Using a soft backend, look to the initial value
|
|
83
93
|
self._backend = MockSignalBackend(initial_backend=self._backend)
|
|
84
94
|
|
|
85
95
|
if self._backend is None:
|
|
86
96
|
raise RuntimeError("`connect` called on signal without backend")
|
|
87
|
-
|
|
88
|
-
|
|
97
|
+
|
|
98
|
+
can_use_previous_connection: bool = self._connect_task is not None and not (
|
|
99
|
+
self._connect_task.done() and self._connect_task.exception()
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
if force_reconnect or not can_use_previous_connection:
|
|
103
|
+
self.log.debug(f"Connecting to {self.source}")
|
|
104
|
+
self._connect_task = asyncio.create_task(
|
|
105
|
+
self._backend.connect(timeout=timeout)
|
|
106
|
+
)
|
|
107
|
+
else:
|
|
108
|
+
self.log.debug(f"Reusing previous connection to {self.source}")
|
|
109
|
+
assert (
|
|
110
|
+
self._connect_task
|
|
111
|
+
), "this assert is for type analysis and will never fail"
|
|
112
|
+
await self._connect_task
|
|
89
113
|
|
|
90
114
|
@property
|
|
91
115
|
def source(self) -> str:
|
|
@@ -10,7 +10,6 @@ from typing import (
|
|
|
10
10
|
Optional,
|
|
11
11
|
Tuple,
|
|
12
12
|
Type,
|
|
13
|
-
TypedDict,
|
|
14
13
|
Union,
|
|
15
14
|
cast,
|
|
16
15
|
get_origin,
|
|
@@ -18,6 +17,7 @@ from typing import (
|
|
|
18
17
|
|
|
19
18
|
import numpy as np
|
|
20
19
|
from bluesky.protocols import DataKey, Dtype, Reading
|
|
20
|
+
from typing_extensions import TypedDict
|
|
21
21
|
|
|
22
22
|
from .signal_backend import RuntimeSubsetEnum, SignalBackend
|
|
23
23
|
from .utils import DEFAULT_TIMEOUT, ReadingValueCallback, T, get_dtype
|
|
@@ -60,6 +60,10 @@ class SoftConverter(Generic[T]):
|
|
|
60
60
|
dtype in primitive_dtypes
|
|
61
61
|
), f"invalid converter for value of type {type(value)}"
|
|
62
62
|
dk["dtype"] = primitive_dtypes[dtype]
|
|
63
|
+
try:
|
|
64
|
+
dk["dtype_numpy"] = np.dtype(dtype).descr[0][1]
|
|
65
|
+
except TypeError:
|
|
66
|
+
dk["dtype_numpy"] = ""
|
|
63
67
|
return dk
|
|
64
68
|
|
|
65
69
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
@@ -71,7 +75,20 @@ class SoftConverter(Generic[T]):
|
|
|
71
75
|
|
|
72
76
|
class SoftArrayConverter(SoftConverter):
|
|
73
77
|
def get_datakey(self, source: str, value, **metadata) -> DataKey:
|
|
74
|
-
|
|
78
|
+
dtype_numpy = ""
|
|
79
|
+
if isinstance(value, list):
|
|
80
|
+
if len(value) > 0:
|
|
81
|
+
dtype_numpy = np.dtype(type(value[0])).descr[0][1]
|
|
82
|
+
else:
|
|
83
|
+
dtype_numpy = np.dtype(value.dtype).descr[0][1]
|
|
84
|
+
|
|
85
|
+
return {
|
|
86
|
+
"source": source,
|
|
87
|
+
"dtype": "array",
|
|
88
|
+
"dtype_numpy": dtype_numpy,
|
|
89
|
+
"shape": [len(value)],
|
|
90
|
+
**metadata,
|
|
91
|
+
}
|
|
75
92
|
|
|
76
93
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
77
94
|
if datatype is None:
|
|
@@ -93,15 +110,13 @@ class SoftEnumConverter(SoftConverter):
|
|
|
93
110
|
self.choices = datatype.choices
|
|
94
111
|
|
|
95
112
|
def write_value(self, value: Union[Enum, str]) -> str:
|
|
96
|
-
|
|
97
|
-
return value.value
|
|
98
|
-
else: # Runtime enum
|
|
99
|
-
return value
|
|
113
|
+
return value
|
|
100
114
|
|
|
101
115
|
def get_datakey(self, source: str, value, **metadata) -> DataKey:
|
|
102
116
|
return {
|
|
103
117
|
"source": source,
|
|
104
118
|
"dtype": "string",
|
|
119
|
+
"dtype_numpy": "|S40",
|
|
105
120
|
"shape": [],
|
|
106
121
|
"choices": self.choices,
|
|
107
122
|
**metadata,
|
|
@@ -66,9 +66,12 @@ def _data_key_from_augmented_value(
|
|
|
66
66
|
scalar = value.element_count == 1
|
|
67
67
|
dtype = dtype or dbr_to_dtype[value.datatype]
|
|
68
68
|
|
|
69
|
+
dtype_numpy = np.dtype(dbr.DbrCodeToType[value.datatype].dtype).descr[0][1]
|
|
70
|
+
|
|
69
71
|
d = DataKey(
|
|
70
72
|
source=source,
|
|
71
73
|
dtype=dtype if scalar else "array",
|
|
74
|
+
dtype_numpy=dtype_numpy,
|
|
72
75
|
# strictly value.element_count >= len(value)
|
|
73
76
|
shape=[] if scalar else [len(value)],
|
|
74
77
|
)
|