mccode-plumber 0.11.0__tar.gz → 0.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mccode_plumber-0.11.0/src/mccode_plumber.egg-info → mccode_plumber-0.12.0}/PKG-INFO +3 -2
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/pyproject.toml +5 -1
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/epics.py +53 -6
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/WorkerJobPool.py +1 -1
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/forwarder.py +10 -5
- mccode_plumber-0.12.0/src/mccode_plumber/kafka.py +68 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/__init__.py +26 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/efu.py +133 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/ensure.py +73 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/epics.py +33 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/forwarder.py +79 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/manager.py +113 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/orchestrate.py +430 -0
- mccode_plumber-0.12.0/src/mccode_plumber/manage/writer.py +60 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/writer.py +64 -29
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0/src/mccode_plumber.egg-info}/PKG-INFO +3 -2
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber.egg-info/SOURCES.txt +8 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber.egg-info/entry_points.txt +3 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber.egg-info/requires.txt +2 -1
- mccode_plumber-0.11.0/src/mccode_plumber/kafka.py +0 -29
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/.github/workflows/pip.yml +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/.github/workflows/wheels.yml +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/.gitignore +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/README.md +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/setup.cfg +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/__init__.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/conductor.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/CommandChannel.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/CommandHandler.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/CommandStatus.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/InThreadStatusTracker.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/JobHandler.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/JobStatus.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/KafkaTopicUrl.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/StateExtractor.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/WorkerFinder.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/WorkerStatus.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/WriteJob.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/file_writer_control/__init__.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/mccode.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/splitrun.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber/utils.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber.egg-info/dependency_links.txt +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/src/mccode_plumber.egg-info/top_level.txt +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/tests/test_epics.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/tests/test_splitrun.py +0 -0
- {mccode_plumber-0.11.0 → mccode_plumber-0.12.0}/tests/test_writer.py +0 -0
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mccode-plumber
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.12.0
|
|
4
4
|
Author-email: Gregory Tucker <gregory.tucker@ess.eu>
|
|
5
5
|
Classifier: License :: OSI Approved :: BSD License
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
7
7
|
Requires-Dist: p4p
|
|
8
8
|
Requires-Dist: kafka-python>=2.2.11
|
|
9
9
|
Requires-Dist: ess-streaming-data-types>=0.14.0
|
|
10
|
-
Requires-Dist: restage>=0.7.
|
|
10
|
+
Requires-Dist: restage>=0.7.2
|
|
11
11
|
Requires-Dist: mccode-to-kafka>=0.2.2
|
|
12
12
|
Requires-Dist: moreniius>=0.4.0
|
|
13
13
|
Requires-Dist: icecream
|
|
14
|
+
Requires-Dist: ephemeral-port-reserve
|
|
14
15
|
|
|
15
16
|
# McCode Plumber
|
|
16
17
|
Setup, run, and teardown the infrastructure for splitrun McCode scans sending data through Kafka into NeXus
|
|
@@ -8,10 +8,11 @@ dependencies = [
|
|
|
8
8
|
'p4p',
|
|
9
9
|
'kafka-python>=2.2.11',
|
|
10
10
|
'ess-streaming-data-types>=0.14.0',
|
|
11
|
-
'restage>=0.7.
|
|
11
|
+
'restage>=0.7.2',
|
|
12
12
|
'mccode-to-kafka>=0.2.2',
|
|
13
13
|
'moreniius>=0.4.0',
|
|
14
14
|
'icecream',
|
|
15
|
+
'ephemeral-port-reserve',
|
|
15
16
|
]
|
|
16
17
|
readme = "README.md"
|
|
17
18
|
authors = [
|
|
@@ -25,6 +26,7 @@ dynamic = ['version']
|
|
|
25
26
|
[project.scripts]
|
|
26
27
|
mp-splitrun = 'mccode_plumber.splitrun:main'
|
|
27
28
|
mp-epics = 'mccode_plumber.epics:run'
|
|
29
|
+
mp-epics-strings = 'mccode_plumber.epics:run_strings'
|
|
28
30
|
mp-epics-update = 'mccode_plumber.epics:update'
|
|
29
31
|
mp-forwarder-setup = 'mccode_plumber.forwarder:setup'
|
|
30
32
|
mp-forwarder-teardown = 'mccode_plumber.forwarder:teardown'
|
|
@@ -36,6 +38,8 @@ mp-writer-kill = 'mccode_plumber.writer:kill_job'
|
|
|
36
38
|
mp-writer-killall = 'mccode_plumber.writer:kill_all'
|
|
37
39
|
mp-register-topics = 'mccode_plumber.kafka:register_topics'
|
|
38
40
|
mp-insert-hdf5-instr = 'mccode_plumber.mccode:insert'
|
|
41
|
+
mp-nexus-splitrun = 'mccode_plumber.manage.orchestrate:main'
|
|
42
|
+
mp-nexus-services = 'mccode_plumber.manage.orchestrate:services'
|
|
39
43
|
|
|
40
44
|
[tool.setuptools_scm]
|
|
41
45
|
|
|
@@ -5,10 +5,9 @@ from p4p.server.thread import SharedPV
|
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
from typing import Union
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
def convert_instr_parameters_to_nt(parameters):
|
|
8
|
+
def instr_par_to_nt_primitive(parameters):
|
|
10
9
|
from mccode_antlr.common.expression import DataType, ShapeType
|
|
11
|
-
out =
|
|
10
|
+
out = []
|
|
12
11
|
for p in parameters:
|
|
13
12
|
expr = p.value
|
|
14
13
|
if expr.is_str:
|
|
@@ -21,7 +20,37 @@ def convert_instr_parameters_to_nt(parameters):
|
|
|
21
20
|
raise ValueError(f"Unknown parameter type {expr.data_type}")
|
|
22
21
|
if expr.shape_type == ShapeType.vector:
|
|
23
22
|
t, d = 'a' + t, [d]
|
|
24
|
-
out
|
|
23
|
+
out.append((p.name, t, d))
|
|
24
|
+
return out
|
|
25
|
+
|
|
26
|
+
def instr_par_nt_to_strings(parameters):
|
|
27
|
+
return [f'{n}:{t}:{d}'.replace(' ','') for n, t, d in instr_par_to_nt_primitive(parameters)]
|
|
28
|
+
|
|
29
|
+
def strings_to_instr_par_nt(strings):
|
|
30
|
+
out = []
|
|
31
|
+
for string in strings:
|
|
32
|
+
name, t, dstr = string.split(':')
|
|
33
|
+
trans = None
|
|
34
|
+
if 'i' in t:
|
|
35
|
+
trans = int
|
|
36
|
+
elif 'd' in t:
|
|
37
|
+
trans = float
|
|
38
|
+
elif 's' in t:
|
|
39
|
+
trans = str
|
|
40
|
+
else:
|
|
41
|
+
ValueError(f"Unknown type in {string}")
|
|
42
|
+
if t.startswith('a'):
|
|
43
|
+
d = [trans(x) for x in dstr.translate(str.maketrans(',',' ','[]')).split()]
|
|
44
|
+
else:
|
|
45
|
+
d = trans(dstr)
|
|
46
|
+
out.append((name, t, d))
|
|
47
|
+
return out
|
|
48
|
+
|
|
49
|
+
def convert_strings_to_nt(strings):
|
|
50
|
+
return {n: NTScalar(t).wrap(d) for n, t, d in strings_to_instr_par_nt(strings)}
|
|
51
|
+
|
|
52
|
+
def convert_instr_parameters_to_nt(parameters):
|
|
53
|
+
out = {n: NTScalar(t).wrap(d) for n, t, d in instr_par_to_nt_primitive(parameters)}
|
|
25
54
|
return out
|
|
26
55
|
|
|
27
56
|
|
|
@@ -67,14 +96,16 @@ def parse_args():
|
|
|
67
96
|
return parameters, args
|
|
68
97
|
|
|
69
98
|
|
|
70
|
-
def main(names: dict[str, NTScalar], prefix: str = None):
|
|
99
|
+
def main(names: dict[str, NTScalar], prefix: str = None, filename_required: bool = True):
|
|
71
100
|
provider = StaticProvider('mailbox') # 'mailbox' is an arbitrary name
|
|
72
101
|
|
|
102
|
+
if filename_required and 'mcpl_filename' not in names:
|
|
103
|
+
names['mcpl_filename'] = NTScalar('s').wrap('')
|
|
104
|
+
|
|
73
105
|
pvs = [] # we must keep a reference in order to keep the Handler from being collected
|
|
74
106
|
for name, value in names.items():
|
|
75
107
|
pv = SharedPV(initial=value, handler=MailboxHandler())
|
|
76
108
|
provider.add(f'{prefix}{name}' if prefix else name, pv)
|
|
77
|
-
print(f'Add mailbox for {prefix}{name}')
|
|
78
109
|
pvs.append(pv)
|
|
79
110
|
|
|
80
111
|
print(f'Start mailbox server for {len(pvs)} PVs with prefix {prefix}')
|
|
@@ -135,5 +166,21 @@ def update():
|
|
|
135
166
|
ctx.disconnect()
|
|
136
167
|
|
|
137
168
|
|
|
169
|
+
def get_strings_parser():
|
|
170
|
+
from argparse import ArgumentParser
|
|
171
|
+
from mccode_plumber import __version__
|
|
172
|
+
p = ArgumentParser()
|
|
173
|
+
p.add_argument('strings', type=str, nargs='+', help='The string encoded NTScalars to read, each name:type-char:default')
|
|
174
|
+
p.add_argument('-p', '--prefix', type=str, help='The EPICS PV prefix to use', default='mcstas:')
|
|
175
|
+
p.add_argument('-v', '--version', action='version', version=__version__)
|
|
176
|
+
return p
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def run_strings():
|
|
180
|
+
args = get_strings_parser().parse_args()
|
|
181
|
+
main(convert_strings_to_nt(args.strings), prefix=args.prefix)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
|
|
138
185
|
if __name__ == '__main__':
|
|
139
186
|
run()
|
|
@@ -19,7 +19,7 @@ class WorkerJobPool(WorkerFinder):
|
|
|
19
19
|
self,
|
|
20
20
|
job_topic_url: str,
|
|
21
21
|
command_topic_url: str,
|
|
22
|
-
max_message_size: int =
|
|
22
|
+
max_message_size: int = 104857600, # matching the default for Kafka -- previously was 2x larger
|
|
23
23
|
kafka_config: Dict[str, str] = {},
|
|
24
24
|
):
|
|
25
25
|
"""
|
|
@@ -59,6 +59,15 @@ def reset_forwarder(pvs: list[dict], config=None, prefix=None, topic=None):
|
|
|
59
59
|
return pvs
|
|
60
60
|
|
|
61
61
|
|
|
62
|
+
def forwarder_partial_streams(prefix, topic, parameters):
|
|
63
|
+
names = [p.name for p in parameters]
|
|
64
|
+
if 'mcpl_filename' not in names:
|
|
65
|
+
names.append("mcpl_filename")
|
|
66
|
+
# Minimal information used by the forwarder for stream setup:
|
|
67
|
+
partial = [dict(source=f'{prefix}{n}', module='f144', topic=topic) for n in names]
|
|
68
|
+
return partial
|
|
69
|
+
|
|
70
|
+
|
|
62
71
|
def parse_registrar_args():
|
|
63
72
|
from argparse import ArgumentParser
|
|
64
73
|
from .mccode import get_mccode_instr_parameters
|
|
@@ -72,11 +81,7 @@ def parse_registrar_args():
|
|
|
72
81
|
parser.add_argument('-v', '--version', action='version', version=__version__)
|
|
73
82
|
|
|
74
83
|
args = parser.parse_args()
|
|
75
|
-
|
|
76
|
-
if 'mcpl_filename' not in parameter_names:
|
|
77
|
-
parameter_names.append('mcpl_filename')
|
|
78
|
-
# the forwarder only cares about: "source", "module", "topic"
|
|
79
|
-
params = [{'source': f'{args.prefix}{name}', 'module': 'f144', 'topic': args.topic} for name in parameter_names]
|
|
84
|
+
params = forwarder_partial_streams(args.prefix, args.topic, get_mccode_instr_parameters(args.instrument))
|
|
80
85
|
return params, args
|
|
81
86
|
|
|
82
87
|
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class KafkaTopic(Enum):
|
|
5
|
+
CREATED = 1
|
|
6
|
+
EXISTS = 2
|
|
7
|
+
ERROR = 3
|
|
8
|
+
UNKNOWN = 4
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def all_exist(topic_enums):
|
|
12
|
+
if any(not isinstance(v, KafkaTopic) for v in topic_enums):
|
|
13
|
+
raise ValueError('Only KafkaTopic enumerated values supported')
|
|
14
|
+
return all(v == KafkaTopic.EXISTS or v == KafkaTopic.CREATED for v in topic_enums)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def parse_kafka_topic_args():
|
|
18
|
+
from argparse import ArgumentParser
|
|
19
|
+
from mccode_plumber import __version__
|
|
20
|
+
parser = ArgumentParser(description="Prepare the named Kafka broker to host one or more topics")
|
|
21
|
+
parser.add_argument('-b', '--broker', type=str, help='The Kafka broker server to interact with')
|
|
22
|
+
parser.add_argument('topic', nargs="+", type=str, help='The Kafka topic(s) to register')
|
|
23
|
+
parser.add_argument('-q', '--quiet', action='store_true', help='Quiet (positive) failure')
|
|
24
|
+
parser.add_argument('-v', '--version', action='version', version=__version__)
|
|
25
|
+
|
|
26
|
+
args = parser.parse_args()
|
|
27
|
+
return args
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def register_kafka_topics(broker: str, topics: list[str]):
|
|
31
|
+
from confluent_kafka.admin import AdminClient, NewTopic
|
|
32
|
+
client = AdminClient({"bootstrap.servers": broker})
|
|
33
|
+
config = {
|
|
34
|
+
# 'cleanup.policy': 'delete',
|
|
35
|
+
# 'delete.retention.ms': 60000,
|
|
36
|
+
'max.message.bytes': 104857600,
|
|
37
|
+
# 'retention.bytes': 10737418240,
|
|
38
|
+
# 'retention.ms': 30000,
|
|
39
|
+
# 'segment.bytes': 104857600,
|
|
40
|
+
# 'segment.ms': 60000
|
|
41
|
+
}
|
|
42
|
+
new_ts = [NewTopic(t, num_partitions=1, replication_factor=1, config=config) for t in topics]
|
|
43
|
+
futures = client.create_topics(new_ts)
|
|
44
|
+
results = {}
|
|
45
|
+
for topic, future in futures.items():
|
|
46
|
+
try:
|
|
47
|
+
future.result()
|
|
48
|
+
results[topic] = KafkaTopic.CREATED
|
|
49
|
+
except Exception as e:
|
|
50
|
+
from confluent_kafka.error import KafkaError
|
|
51
|
+
if e.args[0] == KafkaError.TOPIC_ALREADY_EXISTS:
|
|
52
|
+
results[topic] = KafkaTopic.EXISTS
|
|
53
|
+
else:
|
|
54
|
+
results[topic] = e.args[0]
|
|
55
|
+
return results
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def register_topics():
|
|
59
|
+
args = parse_kafka_topic_args()
|
|
60
|
+
results = register_kafka_topics(args.broker, args.topic)
|
|
61
|
+
if not args.quiet:
|
|
62
|
+
for topic, result in results.items():
|
|
63
|
+
if result == KafkaTopic.CREATED:
|
|
64
|
+
print(f'Created topic {topic}')
|
|
65
|
+
elif result == KafkaTopic.EXISTS:
|
|
66
|
+
print(f'Topic {topic} already exists')
|
|
67
|
+
else:
|
|
68
|
+
print(f'Failed to register topic "{topic}"? {result}')
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from .manager import Manager
|
|
2
|
+
from .efu import EventFormationUnit
|
|
3
|
+
from .epics import EPICSMailbox
|
|
4
|
+
from .forwarder import Forwarder
|
|
5
|
+
from .writer import KafkaToNexus
|
|
6
|
+
from .ensure import (
|
|
7
|
+
ensure_accessible_directory, ensure_accessible_file, ensure_executable,
|
|
8
|
+
ensure_readable_directory, ensure_readable_file,
|
|
9
|
+
ensure_writable_directory, ensure_writable_file
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
__all__ = (
|
|
14
|
+
"Manager",
|
|
15
|
+
"EventFormationUnit",
|
|
16
|
+
"EPICSMailbox",
|
|
17
|
+
"Forwarder",
|
|
18
|
+
"KafkaToNexus",
|
|
19
|
+
"ensure_accessible_directory",
|
|
20
|
+
"ensure_accessible_file",
|
|
21
|
+
"ensure_executable",
|
|
22
|
+
"ensure_readable_directory",
|
|
23
|
+
"ensure_readable_file",
|
|
24
|
+
"ensure_writable_directory",
|
|
25
|
+
"ensure_writable_file",
|
|
26
|
+
)
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from ephemeral_port_reserve import reserve
|
|
5
|
+
from .manager import Manager
|
|
6
|
+
from .ensure import ensure_readable_file, ensure_executable
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class EventFormationUnitConfig:
|
|
10
|
+
name: str
|
|
11
|
+
binary: Path
|
|
12
|
+
config: Path
|
|
13
|
+
calibration: Path
|
|
14
|
+
topic: str
|
|
15
|
+
samples_topic: str
|
|
16
|
+
port: int
|
|
17
|
+
monitor_every: int
|
|
18
|
+
monitor_consecutive: int
|
|
19
|
+
|
|
20
|
+
@classmethod
|
|
21
|
+
def from_dict(cls, data: dict):
|
|
22
|
+
required = ('binary', 'config', 'calibration', 'topic', 'port')
|
|
23
|
+
if any(req not in data for req in required):
|
|
24
|
+
msg = [req for req in required if req not in data]
|
|
25
|
+
msg = ', '.join(msg)
|
|
26
|
+
raise ValueError(f"Missing required value{'' if len(msg)==1 else 's'}: {msg}")
|
|
27
|
+
binary = ensure_readable_file(data['binary'])
|
|
28
|
+
config = ensure_readable_file(data['config'])
|
|
29
|
+
calibration = ensure_readable_file(data['calibration'])
|
|
30
|
+
topic = data['topic']
|
|
31
|
+
port = int(data['port'])
|
|
32
|
+
monitor_every = int(data.get('monitor_every', 1000))
|
|
33
|
+
monitor_consecutive = int(data.get('monitor_consecutive', 2))
|
|
34
|
+
name = data.get('name', binary.stem)
|
|
35
|
+
samples_topic = data.get('samples_topic', f'{topic}_samples')
|
|
36
|
+
return cls(name, binary, config, calibration, topic, samples_topic, port, monitor_every, monitor_consecutive)
|
|
37
|
+
|
|
38
|
+
def to_dict(self):
|
|
39
|
+
d = {
|
|
40
|
+
'name': self.name,
|
|
41
|
+
'binary': self.binary.as_posix(),
|
|
42
|
+
'config': self.config.as_posix(),
|
|
43
|
+
'calibration': self.calibration.as_posix(),
|
|
44
|
+
'topic': self.topic,
|
|
45
|
+
'samples_topic': self.samples_topic,
|
|
46
|
+
'port': self.port,
|
|
47
|
+
'monitor_every': self.monitor_every,
|
|
48
|
+
'monitor_consecutive': self.monitor_consecutive,
|
|
49
|
+
}
|
|
50
|
+
return d
|
|
51
|
+
|
|
52
|
+
def to_cli_str(self):
|
|
53
|
+
from json import dumps
|
|
54
|
+
return dumps(self.to_dict()).translate(str.maketrans(',',';',' {}"'))
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_cli_str(cls, cli_str: str):
|
|
58
|
+
data ={k: v for k, v in [z.split(':') for z in [x for x in cli_str.split(';')]]}
|
|
59
|
+
return cls.from_dict(data)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class EventFormationUnit(Manager):
|
|
65
|
+
"""
|
|
66
|
+
Command and control of an Event Formation Unit
|
|
67
|
+
|
|
68
|
+
Properties
|
|
69
|
+
----------
|
|
70
|
+
binary: the full path to a binary file which is the EFU
|
|
71
|
+
config: the full path to its configuration JSON file
|
|
72
|
+
calibration: the full path to its calibration JSON file
|
|
73
|
+
broker: the domain name or IP and port of the Kafka broker
|
|
74
|
+
topic: the EV44 detector data Kafka stream topic
|
|
75
|
+
samples_topic: the raw AR51 detector data Kafka stream topic
|
|
76
|
+
port: the UDP port at which the EFU will listen for Readout messages
|
|
77
|
+
command: the TCP port the EFU will use to listen for command messages, e.g. EXIT
|
|
78
|
+
monitor_every: For every `monitor_every`th Readout packet
|
|
79
|
+
monitor_consecutive: Send `monitor_consecutive` raw packets to `samples_topic`
|
|
80
|
+
"""
|
|
81
|
+
binary: Path
|
|
82
|
+
config: Path
|
|
83
|
+
calibration: Path
|
|
84
|
+
broker: str | None = None
|
|
85
|
+
topic: str | None = None
|
|
86
|
+
samples_topic: str | None = None
|
|
87
|
+
port: int = 9000
|
|
88
|
+
command: int = field(default_factory=reserve)
|
|
89
|
+
monitor_every: int = 1000
|
|
90
|
+
monitor_consecutive: int = 2
|
|
91
|
+
|
|
92
|
+
def __post_init__(self):
|
|
93
|
+
self.binary = ensure_executable(self.binary)
|
|
94
|
+
self.config = ensure_readable_file(self.config)
|
|
95
|
+
self.calibration = ensure_readable_file(self.calibration)
|
|
96
|
+
if self.broker is None:
|
|
97
|
+
self.broker = 'localhost:9092'
|
|
98
|
+
if self.topic is None:
|
|
99
|
+
self.topic = self.binary.stem
|
|
100
|
+
if self.samples_topic is None:
|
|
101
|
+
self.samples_topic = f'{self.topic}_samples'
|
|
102
|
+
|
|
103
|
+
def __run_command__(self):
|
|
104
|
+
argv = [self.binary.as_posix(),
|
|
105
|
+
'-b', self.broker,
|
|
106
|
+
'-t', self.topic,
|
|
107
|
+
'--ar51_topic', self.samples_topic,
|
|
108
|
+
'--file', self.config.as_posix(),
|
|
109
|
+
'--calibration', self.calibration.as_posix(),
|
|
110
|
+
'--port', str(self.port),
|
|
111
|
+
'--cmdport', str(self.command),
|
|
112
|
+
'--monitor_every', str(self.monitor_every),
|
|
113
|
+
'--monitor_consecutive', str(self.monitor_consecutive),
|
|
114
|
+
'--nohwcheck']
|
|
115
|
+
return argv
|
|
116
|
+
|
|
117
|
+
def finalize(self):
|
|
118
|
+
import socket
|
|
119
|
+
message = f"Check your system status manager whether {self.binary} is active."
|
|
120
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
121
|
+
try:
|
|
122
|
+
sock.settimeout(1.0)
|
|
123
|
+
sock.connect(('localhost', self.command))
|
|
124
|
+
sock.sendall(bytes("EXIT\n", "utf-8"))
|
|
125
|
+
received = str(sock.recv(1024), "utf-8")
|
|
126
|
+
except TimeoutError:
|
|
127
|
+
print(f"Communication timed out, is the EFU running? {message}")
|
|
128
|
+
return
|
|
129
|
+
except ConnectionRefusedError:
|
|
130
|
+
# the server is already dead or was not started?
|
|
131
|
+
received = '<OK>'
|
|
132
|
+
if received.strip() != "<OK>":
|
|
133
|
+
print(f"EFU responded '{received.strip()}' when asked to exit. {message}")
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from os import access, R_OK, W_OK, X_OK
|
|
4
|
+
|
|
5
|
+
def message(mode) -> str:
|
|
6
|
+
return {R_OK: 'readable', W_OK: 'writable', X_OK: 'executable'}.get(mode, 'unknown')
|
|
7
|
+
|
|
8
|
+
def ensure_executable(path: str| Path) -> Path:
|
|
9
|
+
from shutil import which
|
|
10
|
+
found = which(path)
|
|
11
|
+
if found is None:
|
|
12
|
+
raise FileNotFoundError(path)
|
|
13
|
+
return Path(found)
|
|
14
|
+
|
|
15
|
+
def ensure_accessible_file(path: str| Path, mode, must_exist=True) -> Path:
|
|
16
|
+
if isinstance(path, str):
|
|
17
|
+
path = Path(path)
|
|
18
|
+
if not isinstance(path, Path):
|
|
19
|
+
raise ValueError(f'{path} is not a Path object')
|
|
20
|
+
if must_exist:
|
|
21
|
+
if not path.exists():
|
|
22
|
+
raise ValueError(f'{path} does not exist')
|
|
23
|
+
if not path.is_file():
|
|
24
|
+
raise ValueError(f'{path} is not a file')
|
|
25
|
+
if not access(path, mode):
|
|
26
|
+
raise ValueError(f'{path} is not {message(mode)}')
|
|
27
|
+
return path
|
|
28
|
+
|
|
29
|
+
def ensure_accessible_directory(path: str| Path, mode) -> Path:
|
|
30
|
+
if isinstance(path, str):
|
|
31
|
+
path = Path(path)
|
|
32
|
+
if not isinstance(path, Path):
|
|
33
|
+
raise ValueError(f'{path} is not a Path object')
|
|
34
|
+
if not path.exists():
|
|
35
|
+
raise ValueError(f'{path} does not exist')
|
|
36
|
+
if not path.is_dir():
|
|
37
|
+
raise ValueError(f'{path} is not a directory')
|
|
38
|
+
if not access(path, mode):
|
|
39
|
+
raise ValueError(f'{path} is not a {message(mode)} directory')
|
|
40
|
+
return path
|
|
41
|
+
|
|
42
|
+
def ensure_readable_file(path: str| Path) -> Path:
|
|
43
|
+
return ensure_accessible_file(path, R_OK)
|
|
44
|
+
|
|
45
|
+
def ensure_writable_file(path: str| Path) -> Path:
|
|
46
|
+
if not isinstance(path, Path):
|
|
47
|
+
path = Path(path)
|
|
48
|
+
return (
|
|
49
|
+
ensure_accessible_directory(path.parent, W_OK)
|
|
50
|
+
and ensure_accessible_file(path, W_OK, must_exist=False)
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def ensure_readable_directory(path: str| Path) -> Path:
|
|
54
|
+
return ensure_accessible_directory(path, R_OK)
|
|
55
|
+
|
|
56
|
+
def ensure_writable_directory(path: str| Path) -> Path:
|
|
57
|
+
return ensure_accessible_directory(path, W_OK)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def ensure_path(path: str| Path, access_type, is_dir: bool = False) -> Path:
|
|
61
|
+
if isinstance(path, str):
|
|
62
|
+
path = Path(path)
|
|
63
|
+
if not isinstance(path, Path):
|
|
64
|
+
raise ValueError(f'{path} is not a Path object')
|
|
65
|
+
if not path.exists():
|
|
66
|
+
raise ValueError(f'{path} does not exist')
|
|
67
|
+
if is_dir and not path.is_dir():
|
|
68
|
+
raise ValueError(f'{path} is not a directory')
|
|
69
|
+
if not is_dir and not path.is_file():
|
|
70
|
+
raise ValueError(f'{path} is not a file')
|
|
71
|
+
if not access(path, access_type):
|
|
72
|
+
raise ValueError(f'{path} does not support {access_type}')
|
|
73
|
+
return path
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from mccode_antlr.common import InstrumentParameter
|
|
5
|
+
from .manager import Manager
|
|
6
|
+
from .ensure import ensure_executable
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class EPICSMailbox(Manager):
|
|
10
|
+
"""
|
|
11
|
+
Command and control of an EPICS Mailbox server for an instrument
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
parameters: the instrument parameters which define the PV values
|
|
16
|
+
prefix: a PV value prefix to use with all instrument-defined parameters
|
|
17
|
+
strings: optional list of NT parameter information to configure the
|
|
18
|
+
mailbox when the instrument parameters are not available for
|
|
19
|
+
use in determining the same information.
|
|
20
|
+
"""
|
|
21
|
+
parameters: tuple[InstrumentParameter, ...]
|
|
22
|
+
prefix: str
|
|
23
|
+
strings: list[str] = field(default_factory=list)
|
|
24
|
+
_command: Path = field(default_factory=lambda: Path('mp-epics-strings'))
|
|
25
|
+
|
|
26
|
+
def __post_init__(self):
|
|
27
|
+
from mccode_plumber.epics import instr_par_nt_to_strings
|
|
28
|
+
self._command = ensure_executable(self._command)
|
|
29
|
+
if not len(self.strings):
|
|
30
|
+
self.strings = instr_par_nt_to_strings(self.parameters)
|
|
31
|
+
|
|
32
|
+
def __run_command__(self) -> list[str]:
|
|
33
|
+
return [self._command.as_posix(), '--prefix', self.prefix] + self.strings
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from .manager import Manager
|
|
5
|
+
from .ensure import ensure_executable
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Forwarder(Manager):
|
|
10
|
+
"""
|
|
11
|
+
Manage the execution of a Forwarder to send EPICS PV updates to Kafka
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
broker: the name or address and port of the broker to which updated
|
|
16
|
+
EPICS values will be sent, once configured. (localhost:9092)
|
|
17
|
+
config: the broker and topic used for configuring the forwarder
|
|
18
|
+
(localhost:9092/ForwardConfig)
|
|
19
|
+
status: the broker and topic used for forwarder status messages
|
|
20
|
+
(localhost:9092/ForwardStatus)
|
|
21
|
+
retrieve: Retrieve values from Kafka at configuration (False == don't)
|
|
22
|
+
verbosity: Control if (Trace, Debug, Warning, Error, or Critical) messages
|
|
23
|
+
should be printed to STDOUT
|
|
24
|
+
|
|
25
|
+
Note
|
|
26
|
+
----
|
|
27
|
+
`config` and `status` can be provided as _only_ their topic if they use the same
|
|
28
|
+
broker as PV updates. In such a case, there will be no '/' character in their input
|
|
29
|
+
value and `lambda value = f'{broker}/{value}'` will replace them.
|
|
30
|
+
|
|
31
|
+
"""
|
|
32
|
+
broker: str | None = None
|
|
33
|
+
config: str | None = None
|
|
34
|
+
status: str | None = None
|
|
35
|
+
retrieve: bool = False
|
|
36
|
+
verbosity: str | None = None
|
|
37
|
+
_command: Path = field(default_factory=lambda: Path('forwarder-launch'))
|
|
38
|
+
|
|
39
|
+
def __post_init__(self):
|
|
40
|
+
from mccode_plumber.kafka import register_kafka_topics, all_exist
|
|
41
|
+
self._command =ensure_executable(self._command)
|
|
42
|
+
if self.broker is None:
|
|
43
|
+
self.broker = 'localhost:9092'
|
|
44
|
+
if self.config is None:
|
|
45
|
+
self.config = 'ForwardConfig'
|
|
46
|
+
if self.status is None:
|
|
47
|
+
self.status = 'ForwardStatus'
|
|
48
|
+
if '/' not in self.config:
|
|
49
|
+
self.config = f'{self.broker}/{self.config}'
|
|
50
|
+
if '/' not in self.status:
|
|
51
|
+
self.status = f'{self.broker}/{self.status}'
|
|
52
|
+
|
|
53
|
+
for broker_topic in (self.config, self.status):
|
|
54
|
+
b, t = broker_topic.split('/')
|
|
55
|
+
res = register_kafka_topics(b, [t])
|
|
56
|
+
if not all_exist(res.values()):
|
|
57
|
+
raise RuntimeError(f'Missing Kafka topics? {res}')
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def __run_command__(self) -> list[str]:
|
|
61
|
+
args = [
|
|
62
|
+
self._command.as_posix(),
|
|
63
|
+
'--config-topic', self.config,
|
|
64
|
+
'--status-topic', self.status,
|
|
65
|
+
'--output-broker', self.broker,
|
|
66
|
+
]
|
|
67
|
+
if not self.retrieve:
|
|
68
|
+
args.append('--skip-retrieval')
|
|
69
|
+
if (v:=forwarder_verbosity(self.verbosity)) is not None:
|
|
70
|
+
args.extend(['-v', v])
|
|
71
|
+
return args
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def forwarder_verbosity(v):
|
|
75
|
+
if isinstance(v, str):
|
|
76
|
+
for k in ('Trace', 'Debug', 'Warning', 'Error', 'Critical'):
|
|
77
|
+
if k.lower() == v.lower():
|
|
78
|
+
return k
|
|
79
|
+
return None
|