nbs-bl 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nbs_bl/__init__.py +15 -0
- nbs_bl/beamline.py +450 -0
- nbs_bl/configuration.py +838 -0
- nbs_bl/detectors.py +89 -0
- nbs_bl/devices/__init__.py +12 -0
- nbs_bl/devices/detectors.py +154 -0
- nbs_bl/devices/motors.py +242 -0
- nbs_bl/devices/sampleholders.py +360 -0
- nbs_bl/devices/shutters.py +120 -0
- nbs_bl/devices/slits.py +51 -0
- nbs_bl/gGrEqns.py +171 -0
- nbs_bl/geometry/__init__.py +0 -0
- nbs_bl/geometry/affine.py +197 -0
- nbs_bl/geometry/bars.py +189 -0
- nbs_bl/geometry/frames.py +534 -0
- nbs_bl/geometry/linalg.py +138 -0
- nbs_bl/geometry/polygons.py +56 -0
- nbs_bl/help.py +126 -0
- nbs_bl/hw.py +270 -0
- nbs_bl/load.py +113 -0
- nbs_bl/motors.py +19 -0
- nbs_bl/planStatus.py +5 -0
- nbs_bl/plans/__init__.py +8 -0
- nbs_bl/plans/batches.py +174 -0
- nbs_bl/plans/conditions.py +77 -0
- nbs_bl/plans/flyscan_base.py +180 -0
- nbs_bl/plans/groups.py +55 -0
- nbs_bl/plans/maximizers.py +423 -0
- nbs_bl/plans/metaplans.py +179 -0
- nbs_bl/plans/plan_stubs.py +246 -0
- nbs_bl/plans/preprocessors.py +160 -0
- nbs_bl/plans/scan_base.py +58 -0
- nbs_bl/plans/scan_decorators.py +524 -0
- nbs_bl/plans/scans.py +145 -0
- nbs_bl/plans/suspenders.py +87 -0
- nbs_bl/plans/time_estimation.py +168 -0
- nbs_bl/plans/xas.py +123 -0
- nbs_bl/printing.py +221 -0
- nbs_bl/qt/models/beamline.py +11 -0
- nbs_bl/qt/models/energy.py +53 -0
- nbs_bl/qt/widgets/energy.py +225 -0
- nbs_bl/queueserver.py +249 -0
- nbs_bl/redisDevice.py +96 -0
- nbs_bl/run_engine.py +63 -0
- nbs_bl/samples.py +130 -0
- nbs_bl/settings.py +68 -0
- nbs_bl/shutters.py +39 -0
- nbs_bl/sim/__init__.py +2 -0
- nbs_bl/sim/config/polphase.nc +0 -0
- nbs_bl/sim/energy.py +403 -0
- nbs_bl/sim/manipulator.py +14 -0
- nbs_bl/sim/utils.py +36 -0
- nbs_bl/startup.py +27 -0
- nbs_bl/status.py +114 -0
- nbs_bl/tests/__init__.py +0 -0
- nbs_bl/tests/modify_regions.py +160 -0
- nbs_bl/tests/test_frames.py +99 -0
- nbs_bl/tests/test_panels.py +69 -0
- nbs_bl/utils.py +235 -0
- nbs_bl-0.2.0.dist-info/METADATA +71 -0
- nbs_bl-0.2.0.dist-info/RECORD +64 -0
- nbs_bl-0.2.0.dist-info/WHEEL +4 -0
- nbs_bl-0.2.0.dist-info/entry_points.txt +2 -0
- nbs_bl-0.2.0.dist-info/licenses/LICENSE +13 -0
nbs_bl/plans/batches.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tools to manage "batches" of runs.
|
|
3
|
+
|
|
4
|
+
The goal is to provide tools to easily group more than one run into a larger
|
|
5
|
+
unit that can be atomically retried for analysis.
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import uuid
|
|
10
|
+
from functools import partial
|
|
11
|
+
from itertools import count
|
|
12
|
+
|
|
13
|
+
from bluesky import Msg
|
|
14
|
+
from bluesky.plan_stubs import open_run, mv, trigger_and_read
|
|
15
|
+
from bluesky.preprocessors import set_run_key_wrapper, subs_wrapper, msg_mutator
|
|
16
|
+
from ophyd import Device, Signal, Component as Cpt
|
|
17
|
+
|
|
18
|
+
# do not leak imports or helpers
|
|
19
|
+
__all__ = ["setup_batch"]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class RunMd(Device):
|
|
23
|
+
"""A helper synthetic device to read per-run batch data from."""
|
|
24
|
+
|
|
25
|
+
uid = Cpt(Signal, value="", kind="hinted")
|
|
26
|
+
comment = Cpt(Signal, value="", kind="normal")
|
|
27
|
+
index = Cpt(Signal, value=0, kind="hinted")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def setup_batch(batch_md, *, comment_function=None):
|
|
31
|
+
"""
|
|
32
|
+
Set up a "batch" run.
|
|
33
|
+
|
|
34
|
+
This will create an additional run, on top of any wrapped runs that
|
|
35
|
+
includes *batch_md* flatted into the start document, a key `'purpose'` with
|
|
36
|
+
the value `"batch header"` and a key `'batch_uid'` with a generated uid..
|
|
37
|
+
|
|
38
|
+
The primary event stream of this run will include the keys: `'step_uid'`,
|
|
39
|
+
`'step_comment'` and `'step_index'` extracted from the "batched" runs.
|
|
40
|
+
|
|
41
|
+
Each wrapped run will have the key `'batch_md'` with the *batch_md* as the
|
|
42
|
+
value, `'batch_uid'` with the generated uid as the value and
|
|
43
|
+
`'batch_index'` with the running count of runs in this batch (starting from
|
|
44
|
+
0). If the start documents already contain any of these keys the user
|
|
45
|
+
values will be respected (but do this at your own risk).
|
|
46
|
+
|
|
47
|
+
Parameters
|
|
48
|
+
----------
|
|
49
|
+
batch_md : dict[str, Any]
|
|
50
|
+
Needs to be insertable to a start document.
|
|
51
|
+
|
|
52
|
+
comment_function : Optional[Callable[Start, str]]
|
|
53
|
+
A function to extarct a string comment from a start document. If
|
|
54
|
+
this raises it will kill the scan.
|
|
55
|
+
|
|
56
|
+
If not specific defaults to `f"step {index}"`
|
|
57
|
+
|
|
58
|
+
Yields
|
|
59
|
+
------
|
|
60
|
+
msg : Msg
|
|
61
|
+
To open a run for the "header" run.
|
|
62
|
+
|
|
63
|
+
Returns
|
|
64
|
+
-------
|
|
65
|
+
add_to_batch : GeneratorFunction[plan] -> Any
|
|
66
|
+
This wraps the inner plan in the batch.
|
|
67
|
+
|
|
68
|
+
What ever the inner plan returns (if anything) will be returned by the
|
|
69
|
+
wrapper.
|
|
70
|
+
|
|
71
|
+
close_batch : Callable -> None
|
|
72
|
+
Yield from this plan to close the batch (emit a stop document to
|
|
73
|
+
|
|
74
|
+
Only run this once!
|
|
75
|
+
|
|
76
|
+
Examples
|
|
77
|
+
--------
|
|
78
|
+
Typical usage::
|
|
79
|
+
|
|
80
|
+
def batch(batch_md, *, N=5, comment_function=None):
|
|
81
|
+
add_to_batch, close_batch = yield from setup_batch(
|
|
82
|
+
batch_md, comment_function=comment_function
|
|
83
|
+
)
|
|
84
|
+
for j in range(N):
|
|
85
|
+
yield from add_to_batch(inner_plan())
|
|
86
|
+
yield from close_batch()
|
|
87
|
+
|
|
88
|
+
"""
|
|
89
|
+
# do not mutate the input!
|
|
90
|
+
batch_md = dict(batch_md)
|
|
91
|
+
batch_md.pop("batch_uid", None)
|
|
92
|
+
md = RunMd(name="step")
|
|
93
|
+
run_index = count()
|
|
94
|
+
batch_uid = str(uuid.uuid4())
|
|
95
|
+
|
|
96
|
+
srk_wrapper = partial(set_run_key_wrapper, run=f"batch_leader-{batch_uid}")
|
|
97
|
+
|
|
98
|
+
yield from srk_wrapper(
|
|
99
|
+
open_run(md={**batch_md, "purpose": "batch header", "batch_uid": batch_uid})
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def enrich_metadata(msg):
|
|
103
|
+
if msg.command != "open_run":
|
|
104
|
+
return msg
|
|
105
|
+
# TODO maybe force these?
|
|
106
|
+
msg.kwargs.setdefault("batch_md", batch_md)
|
|
107
|
+
msg.kwargs.setdefault("batch_index", next(run_index))
|
|
108
|
+
msg.kwargs.setdefault("batch_uid", batch_uid)
|
|
109
|
+
return msg
|
|
110
|
+
|
|
111
|
+
def add_to_batch(inner):
|
|
112
|
+
"""
|
|
113
|
+
Wrap a plan to be included in the batch.
|
|
114
|
+
|
|
115
|
+
This function is bound to the batch that created it via closures.
|
|
116
|
+
|
|
117
|
+
Parameters
|
|
118
|
+
----------
|
|
119
|
+
inner : plan
|
|
120
|
+
The plan to wrap. This may create any number of runs.
|
|
121
|
+
"""
|
|
122
|
+
starts = []
|
|
123
|
+
ret = yield from subs_wrapper(
|
|
124
|
+
msg_mutator(inner, enrich_metadata),
|
|
125
|
+
{"start": [lambda name, doc: starts.append(doc)]},
|
|
126
|
+
)
|
|
127
|
+
for start in starts:
|
|
128
|
+
j = start["batch_index"]
|
|
129
|
+
comment = (
|
|
130
|
+
comment_function(start) if comment_function is not None else f"step {j}"
|
|
131
|
+
)
|
|
132
|
+
yield from mv(
|
|
133
|
+
*(md.uid, start["uid"]),
|
|
134
|
+
*(md.index, j),
|
|
135
|
+
*(md.comment, comment),
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
yield from srk_wrapper(trigger_and_read([md]))
|
|
139
|
+
# return what ever the wrapped plan returned
|
|
140
|
+
return ret
|
|
141
|
+
|
|
142
|
+
def close_batch(exit_status=None, reason=None):
|
|
143
|
+
"""
|
|
144
|
+
Close the "header" run.
|
|
145
|
+
|
|
146
|
+
This function is bound to the batch that created it via closures.
|
|
147
|
+
|
|
148
|
+
.. warning ::
|
|
149
|
+
|
|
150
|
+
Only run this once!
|
|
151
|
+
|
|
152
|
+
Parameters
|
|
153
|
+
----------
|
|
154
|
+
exit_status : {None, 'success', 'abort', 'fail'}
|
|
155
|
+
The exit status to report in the Stop document
|
|
156
|
+
reason : str, optional
|
|
157
|
+
Long-form description of why the run ended
|
|
158
|
+
|
|
159
|
+
Yields
|
|
160
|
+
------
|
|
161
|
+
msg : Msg
|
|
162
|
+
Msg('close_run')
|
|
163
|
+
|
|
164
|
+
"""
|
|
165
|
+
return (
|
|
166
|
+
yield Msg(
|
|
167
|
+
"close_run",
|
|
168
|
+
exit_status=exit_status,
|
|
169
|
+
reason=reason,
|
|
170
|
+
run=f"batch_leader-{batch_uid}",
|
|
171
|
+
)
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
return add_to_batch, close_batch
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from ..help import add_to_condition_list
|
|
2
|
+
from bluesky.plan_stubs import rd
|
|
3
|
+
from bluesky.protocols import Readable
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@add_to_condition_list
|
|
7
|
+
def is_signal_below(sig: Readable, val: float):
|
|
8
|
+
"""
|
|
9
|
+
Check if a readable object is below a threshold value.
|
|
10
|
+
|
|
11
|
+
Parameters
|
|
12
|
+
----------
|
|
13
|
+
sig : Readable
|
|
14
|
+
Any object that implements the Readable protocol (can be read with rd()).
|
|
15
|
+
val : float
|
|
16
|
+
The threshold value to wait for.
|
|
17
|
+
|
|
18
|
+
Returns
|
|
19
|
+
-------
|
|
20
|
+
bool
|
|
21
|
+
True when signal is below threshold.
|
|
22
|
+
|
|
23
|
+
Raises
|
|
24
|
+
------
|
|
25
|
+
"""
|
|
26
|
+
reading = yield from rd(sig)
|
|
27
|
+
return reading < val
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@add_to_condition_list
|
|
31
|
+
def is_signal_equals(sig: Readable, val: float):
|
|
32
|
+
"""
|
|
33
|
+
Check if a readable object equals a target value.
|
|
34
|
+
|
|
35
|
+
Parameters
|
|
36
|
+
----------
|
|
37
|
+
sig : Readable
|
|
38
|
+
Any object that implements the Readable protocol (can be read with rd()).
|
|
39
|
+
val : float
|
|
40
|
+
The target value to wait for.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
bool
|
|
45
|
+
True when signal equals target.
|
|
46
|
+
|
|
47
|
+
Raises
|
|
48
|
+
------
|
|
49
|
+
"""
|
|
50
|
+
reading = yield from rd(sig)
|
|
51
|
+
return reading == val
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@add_to_condition_list
|
|
55
|
+
def is_signal_above(sig: Readable, val: float):
|
|
56
|
+
"""
|
|
57
|
+
Check if a readable object is above a threshold value.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
sig : Readable
|
|
62
|
+
Any object that implements the Readable protocol (can be read with rd()).
|
|
63
|
+
val : float
|
|
64
|
+
The threshold value to wait for.
|
|
65
|
+
|
|
66
|
+
Returns
|
|
67
|
+
-------
|
|
68
|
+
bool
|
|
69
|
+
True when signal is above threshold.
|
|
70
|
+
|
|
71
|
+
Raises
|
|
72
|
+
------
|
|
73
|
+
TimeoutError
|
|
74
|
+
If timeout is reached before condition is met.
|
|
75
|
+
"""
|
|
76
|
+
reading = yield from rd(sig)
|
|
77
|
+
return reading > val
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
from bluesky.protocols import Readable, Flyable
|
|
2
|
+
from bluesky.utils import get_hinted_fields
|
|
3
|
+
import bluesky.preprocessors as bpp
|
|
4
|
+
from bluesky.plan_stubs import trigger_and_read
|
|
5
|
+
from warnings import warn
|
|
6
|
+
from .plan_stubs import call_obj
|
|
7
|
+
|
|
8
|
+
from bluesky.utils import Msg, ensure_generator, short_uid as _short_uid, single_gen
|
|
9
|
+
from bluesky.preprocessors import plan_mutator
|
|
10
|
+
from typing import Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def flystream_during_wrapper(plan, flyers):
|
|
14
|
+
"""
|
|
15
|
+
Kickoff and collect "flyer" (asynchronously collect) objects during runs.
|
|
16
|
+
This is a preprocessor that insert messages immediately after a run is
|
|
17
|
+
opened and before it is closed.
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
plan : iterable or iterator
|
|
21
|
+
a generator, list, or similar containing `Msg` objects
|
|
22
|
+
flyers : collection
|
|
23
|
+
objects that support the flyer interface
|
|
24
|
+
Yields
|
|
25
|
+
------
|
|
26
|
+
msg : Msg
|
|
27
|
+
messages from plan with 'kickoff', 'wait' and 'collect' messages
|
|
28
|
+
inserted
|
|
29
|
+
See Also
|
|
30
|
+
--------
|
|
31
|
+
:func:`bluesky.plans.fly`
|
|
32
|
+
"""
|
|
33
|
+
grp1 = _short_uid("flyers-kickoff")
|
|
34
|
+
grp2 = _short_uid("flyers-complete")
|
|
35
|
+
kickoff_msgs = [Msg("kickoff", flyer, group=grp1) for flyer in flyers]
|
|
36
|
+
complete_msgs = [Msg("complete", flyer, group=grp2) for flyer in flyers]
|
|
37
|
+
collect_msgs = [Msg("collect", flyer) for flyer in flyers]
|
|
38
|
+
if flyers:
|
|
39
|
+
# If there are any flyers, insert a 'wait' Msg after kickoff, complete
|
|
40
|
+
kickoff_msgs += [Msg("wait", None, group=grp1)]
|
|
41
|
+
complete_msgs += [Msg("wait", None, group=grp2)]
|
|
42
|
+
|
|
43
|
+
def insert_after_open(msg):
|
|
44
|
+
if msg.command == "open_run":
|
|
45
|
+
|
|
46
|
+
def new_gen():
|
|
47
|
+
yield from ensure_generator(kickoff_msgs)
|
|
48
|
+
|
|
49
|
+
return single_gen(msg), new_gen()
|
|
50
|
+
else:
|
|
51
|
+
return None, None
|
|
52
|
+
|
|
53
|
+
def insert_before_close(msg):
|
|
54
|
+
if msg.command == "close_run":
|
|
55
|
+
|
|
56
|
+
def new_gen():
|
|
57
|
+
yield from ensure_generator(complete_msgs)
|
|
58
|
+
yield from ensure_generator(collect_msgs)
|
|
59
|
+
yield msg
|
|
60
|
+
|
|
61
|
+
return new_gen(), None
|
|
62
|
+
else:
|
|
63
|
+
return None, None
|
|
64
|
+
|
|
65
|
+
# Apply nested mutations.
|
|
66
|
+
plan1 = plan_mutator(plan, insert_after_open)
|
|
67
|
+
plan2 = plan_mutator(plan1, insert_before_close)
|
|
68
|
+
return (yield from plan2)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def fly_scan(
|
|
72
|
+
detectors,
|
|
73
|
+
motor,
|
|
74
|
+
start,
|
|
75
|
+
stop,
|
|
76
|
+
*args,
|
|
77
|
+
md: Optional[dict] = None,
|
|
78
|
+
period: Optional[float] = None,
|
|
79
|
+
stream: bool = True,
|
|
80
|
+
**kwargs,
|
|
81
|
+
):
|
|
82
|
+
"""
|
|
83
|
+
Perform a fly scan over the specified motor range.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
detectors : list
|
|
88
|
+
List of detectors to use for the scan
|
|
89
|
+
motor : ophyd.Device
|
|
90
|
+
Motor to be scanned
|
|
91
|
+
start : float
|
|
92
|
+
Starting position of the scan
|
|
93
|
+
stop : float
|
|
94
|
+
Ending position of the scan
|
|
95
|
+
*args : float, optional
|
|
96
|
+
Additional scan parameters in groups of 3: start, stop, speed.
|
|
97
|
+
For example:
|
|
98
|
+
start1, stop1, speed1[, start2, stop2, speed2, ...]
|
|
99
|
+
This allows for multiple trajectory segments in a single scan
|
|
100
|
+
md : dict, optional
|
|
101
|
+
Metadata dictionary to be included with the scan
|
|
102
|
+
period : float, optional
|
|
103
|
+
Time period between data points. If None, uses detector's default period
|
|
104
|
+
stream : bool, optional
|
|
105
|
+
If True, continuously stream data from detectors during the scan.
|
|
106
|
+
If False, collect data only at specified points. Default is True
|
|
107
|
+
|
|
108
|
+
Returns
|
|
109
|
+
-------
|
|
110
|
+
uid : str
|
|
111
|
+
Unique identifier for the scan
|
|
112
|
+
|
|
113
|
+
Notes
|
|
114
|
+
-----
|
|
115
|
+
When stream=True, detectors will continuously collect data during motor movement,
|
|
116
|
+
providing higher time resolution but potentially more data volume.
|
|
117
|
+
When stream=False, data is collected only at specific points, reducing data volume
|
|
118
|
+
but potentially missing intermediate states.
|
|
119
|
+
|
|
120
|
+
Examples
|
|
121
|
+
--------
|
|
122
|
+
# Simple scan with one trajectory, default motor speed
|
|
123
|
+
>>> fly_scan([det], motor, 0, 10)
|
|
124
|
+
|
|
125
|
+
# Multi-segment scan with different speeds
|
|
126
|
+
>>> fly_scan([det], motor, 0, 10, 2, 10, 20, 5)
|
|
127
|
+
# This will scan from 0->10 at speed 2, then 10->20 at speed 5
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
md = md or {}
|
|
131
|
+
|
|
132
|
+
flyers = [d for d in detectors + [motor] if isinstance(d, Flyable)]
|
|
133
|
+
readers = [d for d in detectors + [motor] if isinstance(d, Readable)]
|
|
134
|
+
|
|
135
|
+
_md = {
|
|
136
|
+
"detectors": [det.name for det in readers],
|
|
137
|
+
"motors": [motor.name],
|
|
138
|
+
"plan_args": {
|
|
139
|
+
"detectors": list(map(repr, detectors)),
|
|
140
|
+
"args": [repr(motor), start, stop] + [a for a in args],
|
|
141
|
+
},
|
|
142
|
+
"plan_name": "fly_scan",
|
|
143
|
+
"hints": {},
|
|
144
|
+
}
|
|
145
|
+
_md.update(md or {})
|
|
146
|
+
|
|
147
|
+
x_fields = get_hinted_fields(motor)
|
|
148
|
+
default_dimensions = [(x_fields, "primary")]
|
|
149
|
+
|
|
150
|
+
default_hints = {}
|
|
151
|
+
if len(x_fields) > 0:
|
|
152
|
+
default_hints.update(dimensions=default_dimensions)
|
|
153
|
+
|
|
154
|
+
_md["hints"] = default_hints
|
|
155
|
+
_md["hints"].update(md.get("hints", {}) or {})
|
|
156
|
+
|
|
157
|
+
if period is not None:
|
|
158
|
+
for d in readers:
|
|
159
|
+
try:
|
|
160
|
+
if hasattr(d, "set_exposure"):
|
|
161
|
+
yield from call_obj(d, "set_exposure", period)
|
|
162
|
+
except RuntimeError as ex:
|
|
163
|
+
warn(repr(ex), RuntimeWarning)
|
|
164
|
+
|
|
165
|
+
yield from call_obj(motor, "preflight", start, stop, *args, **kwargs)
|
|
166
|
+
|
|
167
|
+
@bpp.stage_decorator(readers)
|
|
168
|
+
@bpp.run_decorator(md=_md)
|
|
169
|
+
def inner_flyscan():
|
|
170
|
+
status = yield from call_obj(motor, "fly")
|
|
171
|
+
|
|
172
|
+
while not status.done:
|
|
173
|
+
yield from trigger_and_read(readers)
|
|
174
|
+
|
|
175
|
+
yield from call_obj(motor, "land")
|
|
176
|
+
|
|
177
|
+
if stream:
|
|
178
|
+
return (yield from flystream_during_wrapper(inner_flyscan(), flyers))
|
|
179
|
+
else:
|
|
180
|
+
return (yield from inner_flyscan())
|
nbs_bl/plans/groups.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from functools import wraps
|
|
3
|
+
from bluesky.preprocessors import inject_md_wrapper
|
|
4
|
+
from .preprocessors import wrap_metadata, merge_func
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def repeat(func):
|
|
8
|
+
@merge_func(func)
|
|
9
|
+
def inner(*args, repeat: int = 1, **kwargs):
|
|
10
|
+
"""
|
|
11
|
+
Parameters
|
|
12
|
+
----------
|
|
13
|
+
repeat : int, optional
|
|
14
|
+
The number of times to repeat the entire scan
|
|
15
|
+
"""
|
|
16
|
+
if repeat > 1:
|
|
17
|
+
repeat_uid = str(uuid.uuid4())
|
|
18
|
+
return_list = []
|
|
19
|
+
for i in range(repeat):
|
|
20
|
+
repeat_md = {"repeat": {"uid": repeat_uid, "len": repeat, "index": i}}
|
|
21
|
+
r = yield from wrap_metadata(repeat_md)(func)(*args, **kwargs)
|
|
22
|
+
return_list.append(r)
|
|
23
|
+
return return_list
|
|
24
|
+
else:
|
|
25
|
+
return (yield from func(*args, **kwargs))
|
|
26
|
+
|
|
27
|
+
return inner
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def group(groupname):
|
|
31
|
+
def decorator(func):
|
|
32
|
+
@merge_func(func)
|
|
33
|
+
def inner(*args, **kwargs):
|
|
34
|
+
md = {"group_md": {"uid": str(uuid.uuid4()), "name": groupname}}
|
|
35
|
+
return (yield from inject_md_wrapper(func(*args, **kwargs), md))
|
|
36
|
+
|
|
37
|
+
return inner
|
|
38
|
+
|
|
39
|
+
return decorator
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def simple_1d_sequence_factory(length, label, device=None):
|
|
43
|
+
sq = {"shape": length, "label": label, "uid": str(uuid.uuid4())}
|
|
44
|
+
if device is not None:
|
|
45
|
+
sq["device_name"] = device.name
|
|
46
|
+
index = iter(range(length))
|
|
47
|
+
|
|
48
|
+
def add_to_sequence(plan, value):
|
|
49
|
+
return (
|
|
50
|
+
yield from inject_md_wrapper(
|
|
51
|
+
plan, {"sequence": {**sq, "index": next(index), "value": value}}
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
return add_to_sequence
|