mccode-plumber 0.11.1__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mccode_plumber/epics.py CHANGED
@@ -5,10 +5,9 @@ from p4p.server.thread import SharedPV
5
5
  from pathlib import Path
6
6
  from typing import Union
7
7
 
8
-
9
- def convert_instr_parameters_to_nt(parameters):
8
+ def instr_par_to_nt_primitive(parameters):
10
9
  from mccode_antlr.common.expression import DataType, ShapeType
11
- out = {}
10
+ out = []
12
11
  for p in parameters:
13
12
  expr = p.value
14
13
  if expr.is_str:
@@ -21,7 +20,37 @@ def convert_instr_parameters_to_nt(parameters):
21
20
  raise ValueError(f"Unknown parameter type {expr.data_type}")
22
21
  if expr.shape_type == ShapeType.vector:
23
22
  t, d = 'a' + t, [d]
24
- out[p.name] = NTScalar(t).wrap(expr.value if expr.has_value else d)
23
+ out.append((p.name, t, d))
24
+ return out
25
+
26
+ def instr_par_nt_to_strings(parameters):
27
+ return [f'{n}:{t}:{d}'.replace(' ','') for n, t, d in instr_par_to_nt_primitive(parameters)]
28
+
29
+ def strings_to_instr_par_nt(strings):
30
+ out = []
31
+ for string in strings:
32
+ name, t, dstr = string.split(':')
33
+ trans = None
34
+ if 'i' in t:
35
+ trans = int
36
+ elif 'd' in t:
37
+ trans = float
38
+ elif 's' in t:
39
+ trans = str
40
+ else:
41
+ ValueError(f"Unknown type in {string}")
42
+ if t.startswith('a'):
43
+ d = [trans(x) for x in dstr.translate(str.maketrans(',',' ','[]')).split()]
44
+ else:
45
+ d = trans(dstr)
46
+ out.append((name, t, d))
47
+ return out
48
+
49
+ def convert_strings_to_nt(strings):
50
+ return {n: NTScalar(t).wrap(d) for n, t, d in strings_to_instr_par_nt(strings)}
51
+
52
+ def convert_instr_parameters_to_nt(parameters):
53
+ out = {n: NTScalar(t).wrap(d) for n, t, d in instr_par_to_nt_primitive(parameters)}
25
54
  return out
26
55
 
27
56
 
@@ -67,14 +96,16 @@ def parse_args():
67
96
  return parameters, args
68
97
 
69
98
 
70
- def main(names: dict[str, NTScalar], prefix: str = None):
99
+ def main(names: dict[str, NTScalar], prefix: str = None, filename_required: bool = True):
71
100
  provider = StaticProvider('mailbox') # 'mailbox' is an arbitrary name
72
101
 
102
+ if filename_required and 'mcpl_filename' not in names:
103
+ names['mcpl_filename'] = NTScalar('s').wrap('')
104
+
73
105
  pvs = [] # we must keep a reference in order to keep the Handler from being collected
74
106
  for name, value in names.items():
75
107
  pv = SharedPV(initial=value, handler=MailboxHandler())
76
108
  provider.add(f'{prefix}{name}' if prefix else name, pv)
77
- print(f'Add mailbox for {prefix}{name}')
78
109
  pvs.append(pv)
79
110
 
80
111
  print(f'Start mailbox server for {len(pvs)} PVs with prefix {prefix}')
@@ -135,5 +166,21 @@ def update():
135
166
  ctx.disconnect()
136
167
 
137
168
 
169
+ def get_strings_parser():
170
+ from argparse import ArgumentParser
171
+ from mccode_plumber import __version__
172
+ p = ArgumentParser()
173
+ p.add_argument('strings', type=str, nargs='+', help='The string encoded NTScalars to read, each name:type-char:default')
174
+ p.add_argument('-p', '--prefix', type=str, help='The EPICS PV prefix to use', default='mcstas:')
175
+ p.add_argument('-v', '--version', action='version', version=__version__)
176
+ return p
177
+
178
+
179
+ def run_strings():
180
+ args = get_strings_parser().parse_args()
181
+ main(convert_strings_to_nt(args.strings), prefix=args.prefix)
182
+
183
+
184
+
138
185
  if __name__ == '__main__':
139
186
  run()
@@ -19,7 +19,7 @@ class WorkerJobPool(WorkerFinder):
19
19
  self,
20
20
  job_topic_url: str,
21
21
  command_topic_url: str,
22
- max_message_size: int = 1048576 * 200,
22
+ max_message_size: int = 104857600, # matching the default for Kafka -- previously was 2x larger
23
23
  kafka_config: Dict[str, str] = {},
24
24
  ):
25
25
  """
@@ -59,6 +59,15 @@ def reset_forwarder(pvs: list[dict], config=None, prefix=None, topic=None):
59
59
  return pvs
60
60
 
61
61
 
62
+ def forwarder_partial_streams(prefix, topic, parameters):
63
+ names = [p.name for p in parameters]
64
+ if 'mcpl_filename' not in names:
65
+ names.append("mcpl_filename")
66
+ # Minimal information used by the forwarder for stream setup:
67
+ partial = [dict(source=f'{prefix}{n}', module='f144', topic=topic) for n in names]
68
+ return partial
69
+
70
+
62
71
  def parse_registrar_args():
63
72
  from argparse import ArgumentParser
64
73
  from .mccode import get_mccode_instr_parameters
@@ -72,11 +81,7 @@ def parse_registrar_args():
72
81
  parser.add_argument('-v', '--version', action='version', version=__version__)
73
82
 
74
83
  args = parser.parse_args()
75
- parameter_names = [p.name for p in get_mccode_instr_parameters(args.instrument)]
76
- if 'mcpl_filename' not in parameter_names:
77
- parameter_names.append('mcpl_filename')
78
- # the forwarder only cares about: "source", "module", "topic"
79
- params = [{'source': f'{args.prefix}{name}', 'module': 'f144', 'topic': args.topic} for name in parameter_names]
84
+ params = forwarder_partial_streams(args.prefix, args.topic, get_mccode_instr_parameters(args.instrument))
80
85
  return params, args
81
86
 
82
87
 
mccode_plumber/kafka.py CHANGED
@@ -1,3 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class KafkaTopic(Enum):
5
+ CREATED = 1
6
+ EXISTS = 2
7
+ ERROR = 3
8
+ UNKNOWN = 4
9
+
10
+
11
+ def all_exist(topic_enums):
12
+ if any(not isinstance(v, KafkaTopic) for v in topic_enums):
13
+ raise ValueError('Only KafkaTopic enumerated values supported')
14
+ return all(v == KafkaTopic.EXISTS or v == KafkaTopic.CREATED for v in topic_enums)
15
+
16
+
1
17
  def parse_kafka_topic_args():
2
18
  from argparse import ArgumentParser
3
19
  from mccode_plumber import __version__
@@ -11,19 +27,42 @@ def parse_kafka_topic_args():
11
27
  return args
12
28
 
13
29
 
14
- def register_topics():
30
+ def register_kafka_topics(broker: str, topics: list[str]):
15
31
  from confluent_kafka.admin import AdminClient, NewTopic
16
- args = parse_kafka_topic_args()
17
-
18
- client = AdminClient({"bootstrap.servers": args.broker})
19
- topics = [NewTopic(t, num_partitions=1, replication_factor=1) for t in args.topic]
20
- futures = client.create_topics(topics)
21
-
32
+ client = AdminClient({"bootstrap.servers": broker})
33
+ config = {
34
+ # 'cleanup.policy': 'delete',
35
+ # 'delete.retention.ms': 60000,
36
+ 'max.message.bytes': 104857600,
37
+ # 'retention.bytes': 10737418240,
38
+ # 'retention.ms': 30000,
39
+ # 'segment.bytes': 104857600,
40
+ # 'segment.ms': 60000
41
+ }
42
+ new_ts = [NewTopic(t, num_partitions=1, replication_factor=1, config=config) for t in topics]
43
+ futures = client.create_topics(new_ts)
44
+ results = {}
22
45
  for topic, future in futures.items():
23
46
  try:
24
47
  future.result()
25
- print(f"Topic {topic} created")
48
+ results[topic] = KafkaTopic.CREATED
26
49
  except Exception as e:
27
50
  from confluent_kafka.error import KafkaError
28
- if not (args.quiet and e.args[0] == KafkaError.TOPIC_ALREADY_EXISTS):
29
- print(f"Failed to create topic {topic}: {e.args[0].str()}")
51
+ if e.args[0] == KafkaError.TOPIC_ALREADY_EXISTS:
52
+ results[topic] = KafkaTopic.EXISTS
53
+ else:
54
+ results[topic] = e.args[0]
55
+ return results
56
+
57
+
58
+ def register_topics():
59
+ args = parse_kafka_topic_args()
60
+ results = register_kafka_topics(args.broker, args.topic)
61
+ if not args.quiet:
62
+ for topic, result in results.items():
63
+ if result == KafkaTopic.CREATED:
64
+ print(f'Created topic {topic}')
65
+ elif result == KafkaTopic.EXISTS:
66
+ print(f'Topic {topic} already exists')
67
+ else:
68
+ print(f'Failed to register topic "{topic}"? {result}')
@@ -0,0 +1,26 @@
1
+ from .manager import Manager
2
+ from .efu import EventFormationUnit
3
+ from .epics import EPICSMailbox
4
+ from .forwarder import Forwarder
5
+ from .writer import KafkaToNexus
6
+ from .ensure import (
7
+ ensure_accessible_directory, ensure_accessible_file, ensure_executable,
8
+ ensure_readable_directory, ensure_readable_file,
9
+ ensure_writable_directory, ensure_writable_file
10
+ )
11
+
12
+
13
+ __all__ = (
14
+ "Manager",
15
+ "EventFormationUnit",
16
+ "EPICSMailbox",
17
+ "Forwarder",
18
+ "KafkaToNexus",
19
+ "ensure_accessible_directory",
20
+ "ensure_accessible_file",
21
+ "ensure_executable",
22
+ "ensure_readable_directory",
23
+ "ensure_readable_file",
24
+ "ensure_writable_directory",
25
+ "ensure_writable_file",
26
+ )
@@ -0,0 +1,142 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from ephemeral_port_reserve import reserve
5
+ from .manager import Manager
6
+ from .ensure import ensure_readable_file, ensure_executable
7
+
8
+ @dataclass
9
+ class EventFormationUnitConfig:
10
+ name: str
11
+ binary: Path
12
+ config: Path
13
+ calibration: Path
14
+ topic: str
15
+ samples_topic: str
16
+ port: int
17
+ monitor_every: int
18
+ monitor_consecutive: int
19
+
20
+ @classmethod
21
+ def from_dict(cls, data: dict):
22
+ required = ('binary', 'config', 'calibration', 'topic', 'port')
23
+ if any(req not in data for req in required):
24
+ msg = [req for req in required if req not in data]
25
+ msg = ', '.join(msg)
26
+ raise ValueError(f"Missing required value{'' if len(msg)==1 else 's'}: {msg}")
27
+ binary = ensure_readable_file(data['binary'])
28
+ config = ensure_readable_file(data['config'])
29
+ calibration = ensure_readable_file(data['calibration'])
30
+ topic = data['topic']
31
+ port = int(data['port'])
32
+ monitor_every = int(data.get('monitor_every', 1000))
33
+ monitor_consecutive = int(data.get('monitor_consecutive', 2))
34
+ name = data.get('name', binary.stem)
35
+ samples_topic = data.get('samples_topic', f'{topic}_samples')
36
+ return cls(name, binary, config, calibration, topic, samples_topic, port, monitor_every, monitor_consecutive)
37
+
38
+ def to_dict(self):
39
+ d = {
40
+ 'name': self.name,
41
+ 'binary': self.binary.as_posix(),
42
+ 'config': self.config.as_posix(),
43
+ 'calibration': self.calibration.as_posix(),
44
+ 'topic': self.topic,
45
+ 'samples_topic': self.samples_topic,
46
+ 'port': self.port,
47
+ 'monitor_every': self.monitor_every,
48
+ 'monitor_consecutive': self.monitor_consecutive,
49
+ }
50
+ return d
51
+
52
+ def to_cli_str(self):
53
+ from json import dumps
54
+ return dumps(self.to_dict()).translate(str.maketrans('','',' {}"'))
55
+
56
+ @classmethod
57
+ def from_cli_str(cls, cli_str: str):
58
+ """Split a command-line argument string into a EventFormationUnitConfig
59
+
60
+ Note
61
+ ----
62
+ A command-line argument string is expected to have the following format:
63
+ "arg1:value1,arg2:value2,...,argN:valueN"
64
+ That is, key-value pairs separated from each other by commas and internally
65
+ by a colon. This allows for _values_ which contain colons, but not keys.
66
+ """
67
+ data =dict(z.split(':', maxsplit=1) for z in [x for x in cli_str.split(',')])
68
+ return cls.from_dict(data)
69
+
70
+
71
+
72
+ @dataclass
73
+ class EventFormationUnit(Manager):
74
+ """
75
+ Command and control of an Event Formation Unit
76
+
77
+ Properties
78
+ ----------
79
+ binary: the full path to a binary file which is the EFU
80
+ config: the full path to its configuration JSON file
81
+ calibration: the full path to its calibration JSON file
82
+ broker: the domain name or IP and port of the Kafka broker
83
+ topic: the EV44 detector data Kafka stream topic
84
+ samples_topic: the raw AR51 detector data Kafka stream topic
85
+ port: the UDP port at which the EFU will listen for Readout messages
86
+ command: the TCP port the EFU will use to listen for command messages, e.g. EXIT
87
+ monitor_every: For every `monitor_every`th Readout packet
88
+ monitor_consecutive: Send `monitor_consecutive` raw packets to `samples_topic`
89
+ """
90
+ binary: Path
91
+ config: Path
92
+ calibration: Path
93
+ broker: str | None = None
94
+ topic: str | None = None
95
+ samples_topic: str | None = None
96
+ port: int = 9000
97
+ command: int = field(default_factory=reserve)
98
+ monitor_every: int = 1000
99
+ monitor_consecutive: int = 2
100
+
101
+ def __post_init__(self):
102
+ self.binary = ensure_executable(self.binary)
103
+ self.config = ensure_readable_file(self.config)
104
+ self.calibration = ensure_readable_file(self.calibration)
105
+ if self.broker is None:
106
+ self.broker = 'localhost:9092'
107
+ if self.topic is None:
108
+ self.topic = self.binary.stem
109
+ if self.samples_topic is None:
110
+ self.samples_topic = f'{self.topic}_samples'
111
+
112
+ def __run_command__(self):
113
+ argv = [self.binary.as_posix(),
114
+ '-b', self.broker,
115
+ '-t', self.topic,
116
+ '--ar51_topic', self.samples_topic,
117
+ '--file', self.config.as_posix(),
118
+ '--calibration', self.calibration.as_posix(),
119
+ '--port', str(self.port),
120
+ '--cmdport', str(self.command),
121
+ '--monitor_every', str(self.monitor_every),
122
+ '--monitor_consecutive', str(self.monitor_consecutive),
123
+ '--nohwcheck']
124
+ return argv
125
+
126
+ def finalize(self):
127
+ import socket
128
+ message = f"Check your system status manager whether {self.binary} is active."
129
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
130
+ try:
131
+ sock.settimeout(1.0)
132
+ sock.connect(('localhost', self.command))
133
+ sock.sendall(bytes("EXIT\n", "utf-8"))
134
+ received = str(sock.recv(1024), "utf-8")
135
+ except TimeoutError:
136
+ print(f"Communication timed out, is the EFU running? {message}")
137
+ return
138
+ except ConnectionRefusedError:
139
+ # the server is already dead or was not started?
140
+ received = '<OK>'
141
+ if received.strip() != "<OK>":
142
+ print(f"EFU responded '{received.strip()}' when asked to exit. {message}")
@@ -0,0 +1,73 @@
1
+ from __future__ import annotations
2
+ from pathlib import Path
3
+ from os import access, R_OK, W_OK, X_OK
4
+
5
+ def message(mode) -> str:
6
+ return {R_OK: 'readable', W_OK: 'writable', X_OK: 'executable'}.get(mode, 'unknown')
7
+
8
+ def ensure_executable(path: str| Path) -> Path:
9
+ from shutil import which
10
+ found = which(path)
11
+ if found is None:
12
+ raise FileNotFoundError(path)
13
+ return Path(found)
14
+
15
+ def ensure_accessible_file(path: str| Path, mode, must_exist=True) -> Path:
16
+ if isinstance(path, str):
17
+ path = Path(path)
18
+ if not isinstance(path, Path):
19
+ raise ValueError(f'{path} is not a Path object')
20
+ if must_exist:
21
+ if not path.exists():
22
+ raise ValueError(f'{path} does not exist')
23
+ if not path.is_file():
24
+ raise ValueError(f'{path} is not a file')
25
+ if not access(path, mode):
26
+ raise ValueError(f'{path} is not {message(mode)}')
27
+ return path
28
+
29
+ def ensure_accessible_directory(path: str| Path, mode) -> Path:
30
+ if isinstance(path, str):
31
+ path = Path(path)
32
+ if not isinstance(path, Path):
33
+ raise ValueError(f'{path} is not a Path object')
34
+ if not path.exists():
35
+ raise ValueError(f'{path} does not exist')
36
+ if not path.is_dir():
37
+ raise ValueError(f'{path} is not a directory')
38
+ if not access(path, mode):
39
+ raise ValueError(f'{path} is not a {message(mode)} directory')
40
+ return path
41
+
42
+ def ensure_readable_file(path: str| Path) -> Path:
43
+ return ensure_accessible_file(path, R_OK)
44
+
45
+ def ensure_writable_file(path: str| Path) -> Path:
46
+ if not isinstance(path, Path):
47
+ path = Path(path)
48
+ return (
49
+ ensure_accessible_directory(path.parent, W_OK)
50
+ and ensure_accessible_file(path, W_OK, must_exist=False)
51
+ )
52
+
53
+ def ensure_readable_directory(path: str| Path) -> Path:
54
+ return ensure_accessible_directory(path, R_OK)
55
+
56
+ def ensure_writable_directory(path: str| Path) -> Path:
57
+ return ensure_accessible_directory(path, W_OK)
58
+
59
+
60
+ def ensure_path(path: str| Path, access_type, is_dir: bool = False) -> Path:
61
+ if isinstance(path, str):
62
+ path = Path(path)
63
+ if not isinstance(path, Path):
64
+ raise ValueError(f'{path} is not a Path object')
65
+ if not path.exists():
66
+ raise ValueError(f'{path} does not exist')
67
+ if is_dir and not path.is_dir():
68
+ raise ValueError(f'{path} is not a directory')
69
+ if not is_dir and not path.is_file():
70
+ raise ValueError(f'{path} is not a file')
71
+ if not access(path, access_type):
72
+ raise ValueError(f'{path} does not support {access_type}')
73
+ return path
@@ -0,0 +1,33 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from mccode_antlr.common import InstrumentParameter
5
+ from .manager import Manager
6
+ from .ensure import ensure_executable
7
+
8
+ @dataclass
9
+ class EPICSMailbox(Manager):
10
+ """
11
+ Command and control of an EPICS Mailbox server for an instrument
12
+
13
+ Parameters
14
+ ----------
15
+ parameters: the instrument parameters which define the PV values
16
+ prefix: a PV value prefix to use with all instrument-defined parameters
17
+ strings: optional list of NT parameter information to configure the
18
+ mailbox when the instrument parameters are not available for
19
+ use in determining the same information.
20
+ """
21
+ parameters: tuple[InstrumentParameter, ...]
22
+ prefix: str
23
+ strings: list[str] = field(default_factory=list)
24
+ _command: Path = field(default_factory=lambda: Path('mp-epics-strings'))
25
+
26
+ def __post_init__(self):
27
+ from mccode_plumber.epics import instr_par_nt_to_strings
28
+ self._command = ensure_executable(self._command)
29
+ if not len(self.strings):
30
+ self.strings = instr_par_nt_to_strings(self.parameters)
31
+
32
+ def __run_command__(self) -> list[str]:
33
+ return [self._command.as_posix(), '--prefix', self.prefix] + self.strings
@@ -0,0 +1,79 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from .manager import Manager
5
+ from .ensure import ensure_executable
6
+
7
+
8
+ @dataclass
9
+ class Forwarder(Manager):
10
+ """
11
+ Manage the execution of a Forwarder to send EPICS PV updates to Kafka
12
+
13
+ Parameters
14
+ ----------
15
+ broker: the name or address and port of the broker to which updated
16
+ EPICS values will be sent, once configured. (localhost:9092)
17
+ config: the broker and topic used for configuring the forwarder
18
+ (localhost:9092/ForwardConfig)
19
+ status: the broker and topic used for forwarder status messages
20
+ (localhost:9092/ForwardStatus)
21
+ retrieve: Retrieve values from Kafka at configuration (False == don't)
22
+ verbosity: Control if (Trace, Debug, Warning, Error, or Critical) messages
23
+ should be printed to STDOUT
24
+
25
+ Note
26
+ ----
27
+ `config` and `status` can be provided as _only_ their topic if they use the same
28
+ broker as PV updates. In such a case, there will be no '/' character in their input
29
+ value and `lambda value = f'{broker}/{value}'` will replace them.
30
+
31
+ """
32
+ broker: str | None = None
33
+ config: str | None = None
34
+ status: str | None = None
35
+ retrieve: bool = False
36
+ verbosity: str | None = None
37
+ _command: Path = field(default_factory=lambda: Path('forwarder-launch'))
38
+
39
+ def __post_init__(self):
40
+ from mccode_plumber.kafka import register_kafka_topics, all_exist
41
+ self._command =ensure_executable(self._command)
42
+ if self.broker is None:
43
+ self.broker = 'localhost:9092'
44
+ if self.config is None:
45
+ self.config = 'ForwardConfig'
46
+ if self.status is None:
47
+ self.status = 'ForwardStatus'
48
+ if '/' not in self.config:
49
+ self.config = f'{self.broker}/{self.config}'
50
+ if '/' not in self.status:
51
+ self.status = f'{self.broker}/{self.status}'
52
+
53
+ for broker_topic in (self.config, self.status):
54
+ b, t = broker_topic.split('/')
55
+ res = register_kafka_topics(b, [t])
56
+ if not all_exist(res.values()):
57
+ raise RuntimeError(f'Missing Kafka topics? {res}')
58
+
59
+
60
+ def __run_command__(self) -> list[str]:
61
+ args = [
62
+ self._command.as_posix(),
63
+ '--config-topic', self.config,
64
+ '--status-topic', self.status,
65
+ '--output-broker', self.broker,
66
+ ]
67
+ if not self.retrieve:
68
+ args.append('--skip-retrieval')
69
+ if (v:=forwarder_verbosity(self.verbosity)) is not None:
70
+ args.extend(['-v', v])
71
+ return args
72
+
73
+
74
+ def forwarder_verbosity(v):
75
+ if isinstance(v, str):
76
+ for k in ('Trace', 'Debug', 'Warning', 'Error', 'Critical'):
77
+ if k.lower() == v.lower():
78
+ return k
79
+ return None
@@ -0,0 +1,113 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ from multiprocessing import Process, Pipe
5
+ from multiprocessing.connection import Connection
6
+ from enum import Enum
7
+ from colorama import Fore, Back, Style
8
+
9
+ class IOType(Enum):
10
+ stdout = 1
11
+ stderr = 2
12
+
13
+
14
+ @dataclass
15
+ class Manager:
16
+ """
17
+ Command and control of a process
18
+
19
+ Properties
20
+ ----------
21
+ _process: a multiprocessing.Process instance, which is undefined for a short
22
+ period during instance creation inside the `start` class method
23
+ """
24
+ name: str
25
+ style: Style
26
+ _process: Process | None
27
+ _connection: Connection | None
28
+
29
+ def __run_command__(self) -> list[str]:
30
+ pass
31
+
32
+ def finalize(self):
33
+ pass
34
+
35
+ @classmethod
36
+ def fieldnames(cls) -> list[str]:
37
+ from dataclasses import fields
38
+ return [field.name for field in fields(cls)]
39
+
40
+ @classmethod
41
+ def start(cls, **config):
42
+ names = cls.fieldnames()
43
+ kwargs = {k: config[k] for k in names if k in config}
44
+ if any(k not in names for k in config):
45
+ raise ValueError(f'{config} expected to contain only {names}')
46
+ if '_process' not in kwargs:
47
+ kwargs['_process'] = None
48
+ if '_connection' not in kwargs:
49
+ kwargs['_connection'] = None
50
+ if 'name' not in kwargs:
51
+ kwargs['name'] = 'Managed process'
52
+ if 'style' not in kwargs:
53
+ kwargs['style'] = Fore.WHITE + Back.BLACK
54
+ manager = cls(**kwargs)
55
+ manager._connection, child_conn = Pipe()
56
+ manager._process = Process(target=manager.run, args=(child_conn,))
57
+ manager._process.start()
58
+ return manager
59
+
60
+ def stop(self):
61
+ self.finalize()
62
+ self._process.terminate()
63
+
64
+ def poll(self):
65
+ from sys import stderr
66
+ attn = Fore.BLACK + Back.RED + Style.BRIGHT
67
+ # check for anything received on our end of the connection
68
+ while self._connection.poll():
69
+ # examine what was returned:
70
+ try:
71
+ ret = self._connection.recv()
72
+ except EOFError:
73
+ print(f'{attn}{self.name}: [unexpected halt]{Style.RESET_ALL}')
74
+ return False
75
+ if len(ret) == 2:
76
+ t, line = ret
77
+ line = f'{self.style}{self.name}:{Style.RESET_ALL} {line}'
78
+ if t == IOType.stdout:
79
+ print(line, end='')
80
+ else:
81
+ print(line, file=stderr, end='')
82
+ else:
83
+ print(f'{attn}{self.name}: [unknown received data on connection]{Style.RESET_ALL}')
84
+ return self._process.is_alive()
85
+
86
+ def run(self, conn):
87
+ from subprocess import Popen, PIPE
88
+ from select import select
89
+ argv = self.__run_command__()
90
+
91
+ shell = isinstance(argv, str)
92
+ conn.send((IOType.stdout, f'Starting {argv if shell else " ".join(argv)}\n'))
93
+ process = Popen(argv, shell=shell, stdout=PIPE, stderr=PIPE, bufsize=1, universal_newlines=True, )
94
+ out, err = process.stdout.fileno(), process.stderr.fileno()
95
+ check = [process.stdout, process.stderr]
96
+ while process.poll() is None:
97
+ r, w, x = select(check, [], check, 0.5,)
98
+ for stream in r:
99
+ if stream.fileno() == out:
100
+ conn.send((IOType.stdout, process.stdout.readline()))
101
+ elif stream.fileno() == err:
102
+ conn.send((IOType.stderr, process.stderr.readline()))
103
+ for stream in x:
104
+ if stream.fileno() == out:
105
+ conn.send((IOType.stdout, "EXCEPTION ON STDOUT"))
106
+ elif stream.fileno() == err:
107
+ conn.send((IOType.stderr, "EXCEPTION ON STDERR"))
108
+ # Process finished, but the buffers may still contain data:
109
+ for stream in check:
110
+ if stream.fileno() == out:
111
+ map(lambda line: conn.send(IOType.stdout, line), stream.readlines())
112
+ elif stream.fileno() == err:
113
+ map(lambda line: conn.send(IOType.stderr, line), stream.readlines())