mccode-plumber 0.14.5__py3-none-any.whl → 0.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mccode_plumber/epics.py CHANGED
@@ -1,4 +1,6 @@
1
1
  #!/usr/bin/env python3
2
+ from __future__ import annotations
3
+
2
4
  from p4p.nt import NTScalar
3
5
  from p4p.server import Server, StaticProvider
4
6
  from p4p.server.thread import SharedPV
@@ -96,7 +98,7 @@ def parse_args():
96
98
  return parameters, args
97
99
 
98
100
 
99
- def main(names: dict[str, NTScalar], prefix: str = None, filename_required: bool = True):
101
+ def main(names: dict[str, NTScalar], prefix: str | None = None, filename_required: bool = True):
100
102
  provider = StaticProvider('mailbox') # 'mailbox' is an arbitrary name
101
103
 
102
104
  if filename_required and 'mcpl_filename' not in names:
@@ -118,7 +120,7 @@ def run():
118
120
  main(parameters, prefix=args.prefix)
119
121
 
120
122
 
121
- def start(parameters, prefix: str = None):
123
+ def start(parameters, prefix: str | None = None):
122
124
  from multiprocessing import Process
123
125
  proc = Process(target=main, args=(parameters, prefix))
124
126
  proc.start()
@@ -74,8 +74,8 @@ class CommandChannel(object):
74
74
  :param command_topic_url: The url of the Kafka topic to where the file-writer status/command messages are published.
75
75
  """
76
76
  kafka_address = KafkaTopicUrl(command_topic_url)
77
- self.status_queue = Queue()
78
- self.to_thread_queue = Queue()
77
+ self.status_queue: Queue = Queue()
78
+ self.to_thread_queue: Queue = Queue()
79
79
  thread_kwargs = {
80
80
  "host_port": kafka_address.host_port,
81
81
  "topic": kafka_address.topic,
@@ -32,7 +32,10 @@ class CommandHandler:
32
32
  """
33
33
  :return: True if the command completed successfully. False otherwise.
34
34
  """
35
- current_state = self.command_channel.get_command(self.command_id).state
35
+ command = self.command_channel.get_command(self.command_id)
36
+ if command is None:
37
+ return False
38
+ current_state = command.state
36
39
  if current_state == CommandState.ERROR:
37
40
  raise RuntimeError(
38
41
  f'Command failed with error message "{self.get_message()}".'
@@ -52,7 +55,10 @@ class CommandHandler:
52
55
  return command.message
53
56
 
54
57
  def set_timeout(self, new_timeout: timedelta):
55
- self.command_channel.get_command(self.command_id).timeout = new_timeout
58
+ if command := self.command_channel.get_command(self.command_id):
59
+ command.timeout = new_timeout
56
60
 
57
61
  def get_timeout(self):
58
- return self.command_channel.get_command(self.command_id).timeout
62
+ if command := self.command_channel.get_command(self.command_id):
63
+ return command.timeout
64
+ return None
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from datetime import datetime, timedelta
2
4
  from enum import Enum, auto
3
5
  from typing import Optional
@@ -18,7 +20,7 @@ class CommandState(Enum):
18
20
  SUCCESS = auto()
19
21
 
20
22
 
21
- class CommandStatus(object):
23
+ class CommandStatus:
22
24
  """
23
25
  The status of a command.
24
26
  """
@@ -35,11 +37,11 @@ class CommandStatus(object):
35
37
  self._last_update = datetime.now()
36
38
  self._state = CommandState.NO_COMMAND
37
39
  self._message = ""
38
- self._response_code = None
40
+ self._response_code: int | None = None
39
41
 
40
- def __eq__(self, other_status: "CommandStatus"):
42
+ def __eq__(self, other_status):
41
43
  if not isinstance(other_status, CommandStatus):
42
- raise NotImplementedError
44
+ return NotImplemented
43
45
  return (
44
46
  other_status.command_id == self.command_id
45
47
  and other_status.job_id == self.job_id
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from datetime import datetime, timedelta
2
4
  from enum import Enum, auto
3
5
  from typing import Dict, Optional
@@ -48,7 +50,7 @@ class JobStatus:
48
50
  if new_status.message:
49
51
  self._message = new_status.message
50
52
  self._service_id = new_status.service_id
51
- self._file_name = new_status.file_name
53
+ self._file_name = new_status.file_name or ""
52
54
  self._last_update = new_status.last_update
53
55
  self._metadata = new_status.metadata
54
56
 
@@ -61,7 +63,7 @@ class JobStatus:
61
63
  self.state != JobState.DONE
62
64
  and self.state != JobState.ERROR
63
65
  and self.state != JobState.TIMEOUT
64
- and current_time - self.last_update > self._timeout
66
+ and self._timeout and current_time - self.last_update > self._timeout
65
67
  ):
66
68
  self._state = JobState.TIMEOUT
67
69
  self._last_update = current_time
@@ -106,8 +108,13 @@ class JobStatus:
106
108
  """
107
109
  return self._state
108
110
 
111
+ @state.setter
112
+ def state(self, new_state: JobState) -> None:
113
+ self._state = new_state
114
+ self._last_update = datetime.now()
115
+
109
116
  @property
110
- def file_name(self) -> str:
117
+ def file_name(self) -> str | None:
111
118
  """
112
119
  The file name of the job. None if the file name is not known.
113
120
  """
@@ -120,11 +127,6 @@ class JobStatus:
120
127
  self._file_name = new_file_name
121
128
  self._last_update = datetime.now()
122
129
 
123
- @state.setter
124
- def state(self, new_state: JobState) -> None:
125
- self._state = new_state
126
- self._last_update = datetime.now()
127
-
128
130
  @property
129
131
  def message(self) -> str:
130
132
  """
@@ -132,6 +134,12 @@ class JobStatus:
132
134
  """
133
135
  return self._message
134
136
 
137
+ @message.setter
138
+ def message(self, new_message: str) -> None:
139
+ if new_message:
140
+ self._message = new_message
141
+ self._last_update = datetime.now()
142
+
135
143
  @property
136
144
  def metadata(self) -> Optional[Dict]:
137
145
  return self._metadata
@@ -140,8 +148,3 @@ class JobStatus:
140
148
  def metadata(self, metadata: Dict) -> None:
141
149
  self._metadata = metadata
142
150
 
143
- @message.setter
144
- def message(self, new_message: str) -> None:
145
- if new_message:
146
- self._message = new_message
147
- self._last_update = datetime.now()
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import uuid
2
4
  from datetime import datetime
3
5
  from typing import Dict, List, Optional
@@ -84,7 +86,8 @@ class WorkerFinderBase:
84
86
  :param job_id: The job identifier of the currently running file-writer job.
85
87
  :return: A CommandHandler instance for (more) easily checking the outcome of the "abort" command.
86
88
  """
87
- return self.try_send_stop_time(service_id, job_id, 0)
89
+ from datetime import datetime, UTC
90
+ return self.try_send_stop_time(service_id, job_id, datetime.fromtimestamp(0, UTC))
88
91
 
89
92
  def list_known_workers(self) -> List[WorkerStatus]:
90
93
  """
@@ -115,7 +118,7 @@ class WorkerFinderBase:
115
118
  return JobState.UNAVAILABLE
116
119
  return current_job.state
117
120
 
118
- def get_job_status(self, job_id: str) -> JobStatus:
121
+ def get_job_status(self, job_id: str) -> JobStatus | None:
119
122
  """
120
123
  Get the full (known) status of a specific job.
121
124
  :param job_id: The (unique) identifier of the job that we are trying to find the status of.
@@ -63,8 +63,7 @@ class WorkerJobPool(WorkerFinder):
63
63
  """
64
64
  self.command_channel.add_job_id(job.job_id)
65
65
  self.command_channel.add_command_id(job.job_id, job.job_id)
66
- self.command_channel.get_command(
67
- job.job_id
68
- ).state = CommandState.WAITING_RESPONSE
66
+ if command := self.command_channel.get_command(job.job_id):
67
+ command.state = CommandState.WAITING_RESPONSE
69
68
  self._send_pool_message(job.get_start_message())
70
69
  return CommandHandler(self.command_channel, job.job_id)
@@ -16,7 +16,7 @@ class WorkerState(Enum):
16
16
  UNAVAILABLE = auto()
17
17
 
18
18
 
19
- class WorkerStatus(object):
19
+ class WorkerStatus:
20
20
  """
21
21
  Contains general status information about a worker.
22
22
  """
@@ -27,9 +27,9 @@ class WorkerStatus(object):
27
27
  self._timeout = timeout
28
28
  self._state = WorkerState.UNAVAILABLE
29
29
 
30
- def __eq__(self, other_status: "WorkerStatus") -> bool:
30
+ def __eq__(self, other_status) -> bool:
31
31
  if not isinstance(other_status, WorkerStatus):
32
- raise NotImplementedError
32
+ return NotImplemented
33
33
  return (
34
34
  self.service_id == other_status.service_id
35
35
  and self.state == other_status.state
@@ -55,18 +55,11 @@ class WorkerStatus(object):
55
55
  """
56
56
  if (
57
57
  self.state != WorkerState.UNAVAILABLE
58
- and current_time - self.last_update > self._timeout
58
+ and self._timeout and current_time - self.last_update > self._timeout
59
59
  ):
60
60
  self._state = WorkerState.UNAVAILABLE
61
61
  self._last_update = current_time
62
62
 
63
- @property
64
- def state(self) -> WorkerState:
65
- """
66
- The current state of the worker.
67
- """
68
- return self._state
69
-
70
63
  @property
71
64
  def service_id(self) -> str:
72
65
  """
@@ -82,7 +75,13 @@ class WorkerStatus(object):
82
75
  """
83
76
  return self._last_update
84
77
 
78
+ @property
79
+ def state(self) -> WorkerState:
80
+ return self._state
81
+
85
82
  @state.setter
86
83
  def state(self, new_state: WorkerState):
87
84
  self._last_update = datetime.now()
88
85
  self._state = new_state
86
+
87
+
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import uuid
2
4
  from datetime import datetime, timedelta
3
5
 
@@ -15,7 +17,7 @@ class WriteJob:
15
17
  file_name: str,
16
18
  broker: str,
17
19
  start_time: datetime,
18
- stop_time: datetime = None,
20
+ stop_time: datetime | None = None,
19
21
  job_id="",
20
22
  instrument_name: str = "",
21
23
  run_name: str = "",
@@ -33,10 +35,7 @@ class WriteJob:
33
35
  else:
34
36
  self.job_id = str(uuid.uuid1())
35
37
  self.start = start_time
36
- if stop_time is None:
37
- self.stop = self.start + timedelta(days=365.25 * 10)
38
- else:
39
- self.stop = stop_time
38
+ self.stop = stop_time or self.start + timedelta(days=365.25 * 10)
40
39
  self._service_id = ""
41
40
  self.broker = broker
42
41
  self.instrument_name = instrument_name
mccode_plumber/kafka.py CHANGED
@@ -33,7 +33,7 @@ def register_kafka_topics(broker: str, topics: list[str]):
33
33
  config = {
34
34
  # 'cleanup.policy': 'delete',
35
35
  # 'delete.retention.ms': 60000,
36
- 'max.message.bytes': 104857600,
36
+ 'max.message.bytes': '104857600',
37
37
  # 'retention.bytes': 10737418240,
38
38
  # 'retention.ms': 30000,
39
39
  # 'segment.bytes': 104857600,
@@ -21,9 +21,10 @@ class EventFormationUnitConfig:
21
21
  def from_dict(cls, data: dict):
22
22
  required = ('binary', 'config', 'calibration', 'topic', 'port')
23
23
  if any(req not in data for req in required):
24
- msg = [req for req in required if req not in data]
25
- msg = ', '.join(msg)
26
- raise ValueError(f"Missing required value{'' if len(msg)==1 else 's'}: {msg}")
24
+ missing = [req for req in required if req not in data]
25
+ msg = ', '.join(missing)
26
+ val = f"value{'' if len(missing) == 1 else 's'}"
27
+ raise ValueError(f"Missing required {val}: {msg}")
27
28
  binary = ensure_readable_file(data['binary'])
28
29
  config = ensure_readable_file(data['config'])
29
30
  calibration = ensure_readable_file(data['calibration'])
@@ -7,7 +7,23 @@ def message(mode) -> str:
7
7
 
8
8
  def ensure_executable(path: str| Path) -> Path:
9
9
  from shutil import which
10
- found = which(path)
10
+ import os
11
+ p = Path(path)
12
+ # If the path exists as given, accept it (handles absolute and relative files)
13
+ if p.exists():
14
+ return p
15
+
16
+ # On Windows try PATHEXT extensions for provided path (handles .py etc.)
17
+ if os.name == "nt":
18
+ pathext = os.environ.get("PATHEXT", ".COM;.EXE;.BAT;.CMD;.PY;.PYW").split(
19
+ os.pathsep)
20
+ for ext in pathext:
21
+ candidate = Path(str(p) + ext)
22
+ if candidate.exists():
23
+ return candidate
24
+
25
+ # Fallback to shutil.which (searches PATH and PATHEXT)
26
+ found = which(str(path))
11
27
  if found is None:
12
28
  raise FileNotFoundError(path)
13
29
  return Path(found)
@@ -35,20 +35,26 @@ class Forwarder(Manager):
35
35
  retrieve: bool = False
36
36
  verbosity: str | None = None
37
37
  _command: Path = field(default_factory=lambda: Path('forwarder-launch'))
38
+ _broker: str = field(default='localhost:9092')
39
+ _config: str = field(default='ForwardConfig')
40
+ _status: str = field(default='ForwardStatus')
38
41
 
39
42
  def __post_init__(self):
40
43
  from mccode_plumber.kafka import register_kafka_topics, all_exist
41
44
  self._command =ensure_executable(self._command)
42
45
  if self.broker is None:
43
- self.broker = 'localhost:9092'
46
+ self.broker = self._broker
44
47
  if self.config is None:
45
- self.config = 'ForwardConfig'
48
+ self.config = self._config
46
49
  if self.status is None:
47
- self.status = 'ForwardStatus'
50
+ self.status = self._status
48
51
  if '/' not in self.config:
49
52
  self.config = f'{self.broker}/{self.config}'
50
53
  if '/' not in self.status:
51
54
  self.status = f'{self.broker}/{self.status}'
55
+ self._broker = self.broker
56
+ self._config = self.config
57
+ self._status = self.status
52
58
 
53
59
  for broker_topic in (self.config, self.status):
54
60
  b, t = broker_topic.split('/')
@@ -58,11 +64,11 @@ class Forwarder(Manager):
58
64
 
59
65
 
60
66
  def __run_command__(self) -> list[str]:
61
- args = [
67
+ args: list[str] = [
62
68
  self._command.as_posix(),
63
- '--config-topic', self.config,
64
- '--status-topic', self.status,
65
- '--output-broker', self.broker,
69
+ '--config-topic', self._config,
70
+ '--status-topic', self._status,
71
+ '--output-broker', self._broker,
66
72
  ]
67
73
  if not self.retrieve:
68
74
  args.append('--skip-retrieval')
@@ -1,10 +1,11 @@
1
1
  from __future__ import annotations
2
2
  from dataclasses import dataclass
3
- from pathlib import Path
4
- from multiprocessing import Process, Pipe
5
- from multiprocessing.connection import Connection
3
+ from subprocess import Popen, PIPE
4
+ from threading import Thread
6
5
  from enum import Enum
7
6
  from colorama import Fore, Back, Style
7
+ from colorama.ansi import AnsiStyle
8
+
8
9
 
9
10
  class IOType(Enum):
10
11
  stdout = 1
@@ -18,16 +19,16 @@ class Manager:
18
19
 
19
20
  Properties
20
21
  ----------
21
- _process: a multiprocessing.Process instance, which is undefined for a short
22
- period during instance creation inside the `start` class method
22
+ _process: a subprocess.Popen instance
23
23
  """
24
24
  name: str
25
- style: Style
26
- _process: Process | None
27
- _connection: Connection | None
25
+ style: AnsiStyle
26
+ _process: Popen | None
27
+ _stdout_thread: Thread | None
28
+ _stderr_thread: Thread | None
28
29
 
29
30
  def __run_command__(self) -> list[str]:
30
- pass
31
+ return []
31
32
 
32
33
  def finalize(self):
33
34
  pass
@@ -37,77 +38,86 @@ class Manager:
37
38
  from dataclasses import fields
38
39
  return [field.name for field in fields(cls)]
39
40
 
41
+ def _read_stream(self, stream, io_type: IOType):
42
+ """Read lines from stream and print them until EOF.
43
+
44
+ This replaces the previous behaviour of sending lines over a
45
+ multiprocessing Connection. Printing directly from the reader
46
+ threads is sufficient because the manager previously only used
47
+ the connection to relay subprocess stdout/stderr back to the
48
+ parent process for display.
49
+ """
50
+ try:
51
+ for line in iter(stream.readline, ''):
52
+ if not line:
53
+ break
54
+ # format and print the line, preserving original behaviour
55
+ formatted = f'{self.style}{self.name}:{Style.RESET_ALL} {line}'
56
+ if io_type == IOType.stdout:
57
+ print(formatted, end='')
58
+ else:
59
+ from sys import stderr
60
+ print(formatted, file=stderr, end='')
61
+ except ValueError:
62
+ pass # stream closed
63
+ finally:
64
+ try:
65
+ stream.close()
66
+ except Exception:
67
+ pass
68
+
40
69
  @classmethod
41
70
  def start(cls, **config):
42
71
  names = cls.fieldnames()
43
72
  kwargs = {k: config[k] for k in names if k in config}
44
73
  if any(k not in names for k in config):
45
74
  raise ValueError(f'{config} expected to contain only {names}')
46
- if '_process' not in kwargs:
47
- kwargs['_process'] = None
48
- if '_connection' not in kwargs:
49
- kwargs['_connection'] = None
75
+ for p in ('_process', '_stdout_thread', '_stderr_thread'):
76
+ if p not in kwargs:
77
+ kwargs[p] = None
50
78
  if 'name' not in kwargs:
51
79
  kwargs['name'] = 'Managed process'
52
80
  if 'style' not in kwargs:
53
81
  kwargs['style'] = Fore.WHITE + Back.BLACK
82
+
54
83
  manager = cls(**kwargs)
55
- manager._connection, child_conn = Pipe()
56
- manager._process = Process(target=manager.run, args=(child_conn,))
57
- manager._process.start()
84
+
85
+ argv = manager.__run_command__()
86
+ shell = isinstance(argv, str)
87
+ # announce start directly instead of sending via a Connection
88
+ print(f'Starting {argv if shell else " ".join(argv)}')
89
+
90
+ manager._process = Popen(
91
+ argv, shell=shell, stdout=PIPE, stderr=PIPE, bufsize=1,
92
+ universal_newlines=True,
93
+ )
94
+ manager._stdout_thread = Thread(
95
+ target=manager._read_stream,
96
+ args=(manager._process.stdout, IOType.stdout),
97
+ daemon=True,
98
+ )
99
+ manager._stderr_thread = Thread(
100
+ target=manager._read_stream,
101
+ args=(manager._process.stderr, IOType.stderr),
102
+ daemon=True,
103
+ )
104
+ manager._stdout_thread.start()
105
+ manager._stderr_thread.start()
58
106
  return manager
59
107
 
60
108
  def stop(self):
61
109
  self.finalize()
62
- self._process.terminate()
110
+ if self._process:
111
+ self._process.terminate()
112
+ self._process.wait()
63
113
 
64
114
  def poll(self):
65
- from sys import stderr
66
- attn = Fore.BLACK + Back.RED + Style.BRIGHT
67
- # check for anything received on our end of the connection
68
- while self._connection.poll():
69
- # examine what was returned:
70
- try:
71
- ret = self._connection.recv()
72
- except EOFError:
73
- print(f'{attn}{self.name}: [unexpected halt]{Style.RESET_ALL}')
74
- return False
75
- if len(ret) == 2:
76
- t, line = ret
77
- line = f'{self.style}{self.name}:{Style.RESET_ALL} {line}'
78
- if t == IOType.stdout:
79
- print(line, end='')
80
- else:
81
- print(line, file=stderr, end='')
82
- else:
83
- print(f'{attn}{self.name}: [unknown received data on connection]{Style.RESET_ALL}')
84
- return self._process.is_alive()
85
-
86
- def run(self, conn):
87
- from subprocess import Popen, PIPE
88
- from select import select
89
- argv = self.__run_command__()
115
+ """Check whether the managed process is still running.
90
116
 
91
- shell = isinstance(argv, str)
92
- conn.send((IOType.stdout, f'Starting {argv if shell else " ".join(argv)}\n'))
93
- process = Popen(argv, shell=shell, stdout=PIPE, stderr=PIPE, bufsize=1, universal_newlines=True, )
94
- out, err = process.stdout.fileno(), process.stderr.fileno()
95
- check = [process.stdout, process.stderr]
96
- while process.poll() is None:
97
- r, w, x = select(check, [], check, 0.5,)
98
- for stream in r:
99
- if stream.fileno() == out:
100
- conn.send((IOType.stdout, process.stdout.readline()))
101
- elif stream.fileno() == err:
102
- conn.send((IOType.stderr, process.stderr.readline()))
103
- for stream in x:
104
- if stream.fileno() == out:
105
- conn.send((IOType.stdout, "EXCEPTION ON STDOUT"))
106
- elif stream.fileno() == err:
107
- conn.send((IOType.stderr, "EXCEPTION ON STDERR"))
108
- # Process finished, but the buffers may still contain data:
109
- for stream in check:
110
- if stream.fileno() == out:
111
- map(lambda line: conn.send(IOType.stdout, line), stream.readlines())
112
- elif stream.fileno() == err:
113
- map(lambda line: conn.send(IOType.stderr, line), stream.readlines())
117
+ Previously this drained and printed any messages received over a
118
+ multiprocessing Connection. Reader threads now handle printing,
119
+ so poll only needs to report process liveness.
120
+ """
121
+ if not self._process:
122
+ return False
123
+ return self._process.poll() is None
@@ -17,17 +17,17 @@ TOPICS = {
17
17
  }
18
18
  PREFIX = 'mcstas:'
19
19
 
20
- def guess_instr_config(name: str):
20
+ def guess_instr_config(name: str) -> Path:
21
21
  guess = f'/event-formation-unit/configs/{name}/configs/{name}.json'
22
22
  return ensure_readable_file(Path(guess))
23
23
 
24
24
 
25
- def guess_instr_calibration(name: str):
25
+ def guess_instr_calibration(name: str) -> Path:
26
26
  guess = f'/event-formation-unit/configs/{name}/configs/{name}nullcalib.json'
27
27
  return ensure_readable_file(Path(guess))
28
28
 
29
29
 
30
- def guess_instr_efu(name: str):
30
+ def guess_instr_efu(name: str) -> Path:
31
31
  guess = name.split('_')[0].split('.')[0].split('-')[0].lower()
32
32
  return ensure_executable(Path(guess))
33
33
 
@@ -193,8 +193,10 @@ def efu_parameter(s: str):
193
193
  # likely to be needed. Finally, the config file can also be supplied to change, e.g.,
194
194
  # number of pixels or rings, etc.
195
195
  parts = s.split(',')
196
- data = {'topic': TOPICS['event'], 'port': 9000, 'binary': ensure_executable(parts[0]),}
197
- data['name'] = data['binary'].stem
196
+ binary: Path = ensure_executable(parts[0])
197
+ data : dict[str, int | str | Path] = {
198
+ 'topic': TOPICS['event'], 'port': 9000, 'binary': binary, 'name': binary.stem
199
+ }
198
200
 
199
201
  if len(parts) > 1 and (len(parts) > 2 or not parts[1].isnumeric()):
200
202
  data['calibration'] = parts[1]
@@ -1,3 +1,8 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Union
4
+
5
+
1
6
  def make_parser():
2
7
  from mccode_plumber import __version__
3
8
  from restage.splitrun import make_splitrun_parser
@@ -16,7 +21,7 @@ def monitors_to_kafka_callback_with_arguments(
16
21
  ):
17
22
  from mccode_to_kafka.sender import send_histograms
18
23
 
19
- partial_kwargs = {'broker': broker}
24
+ partial_kwargs: dict[str, Union[str,list[str]]] = {'broker': broker}
20
25
  if topic is not None and source is not None and names is not None and len(names) > 1:
21
26
  raise ValueError("Cannot specify both topic/source and multiple names simultaneously.")
22
27
 
mccode_plumber/writer.py CHANGED
@@ -137,7 +137,7 @@ def insert_events_in_nexus_structure(ns: dict, config: dict):
137
137
  return ns
138
138
 
139
139
 
140
- def get_writer_pool(broker: str = None, job: str = None, command: str = None):
140
+ def get_writer_pool(broker: str | None = None, job: str | None = None, command: str | None = None):
141
141
  from .file_writer_control import WorkerJobPool
142
142
  print(f'Create a Writer pool for {broker=} {job=} {command=}')
143
143
  pool = WorkerJobPool(f"{broker}/{job}", f"{broker}/{command}")
@@ -151,17 +151,17 @@ def make_define_nexus_structure():
151
151
  def define_nexus_structure(
152
152
  instr: Path | str,
153
153
  pvs: list[dict],
154
- title: str = None,
155
- event_stream: dict[str, str] = None,
154
+ title: str | None = None,
155
+ event_stream: dict[str, str] | None = None,
156
156
  file: Path | None = None,
157
157
  func: Callable[[Instr], dict] | None = None,
158
158
  binary: Path | None = None,
159
- origin: str = None):
159
+ origin: str | None = None):
160
160
  import json
161
161
  from .mccode import get_mcstas_instr
162
162
  if file is not None and file.exists():
163
- with open(file, 'r') as file:
164
- nexus_structure = json.load(file)
163
+ with open(file, 'r') as f:
164
+ nexus_structure = json.load(f)
165
165
  elif func is not None:
166
166
  nexus_structure = func(get_mcstas_instr(instr))
167
167
  elif binary is not None and binary.exists():
@@ -173,7 +173,7 @@ def make_define_nexus_structure():
173
173
  else:
174
174
  nexus_structure = default_nexus_structure(get_mcstas_instr(instr), origin=origin)
175
175
  nexus_structure = add_pvs_to_nexus_structure(nexus_structure, pvs)
176
- nexus_structure = add_title_to_nexus_structure(nexus_structure, title)
176
+ nexus_structure = add_title_to_nexus_structure(nexus_structure, title or 'Unknown title')
177
177
  # nexus_structure = insert_events_in_nexus_structure(nexus_structure, event_stream)
178
178
  return nexus_structure
179
179
  return define_nexus_structure
@@ -1,17 +1,25 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.14.5
3
+ Version: 0.15.1
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
+ Classifier: Programming Language :: Python :: 3
7
+ Classifier: Programming Language :: Python :: 3 :: Only
8
+ Classifier: Programming Language :: Python :: 3.11
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
6
11
  Description-Content-Type: text/markdown
7
12
  Requires-Dist: p4p
8
13
  Requires-Dist: kafka-python>=2.2.11
9
14
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
- Requires-Dist: restage>=0.9.1
15
+ Requires-Dist: restage>=0.10.1
11
16
  Requires-Dist: mccode-to-kafka>=0.3.1
12
- Requires-Dist: moreniius>=0.6.2
17
+ Requires-Dist: moreniius>=0.6.3
13
18
  Requires-Dist: icecream
14
19
  Requires-Dist: ephemeral-port-reserve
20
+ Provides-Extra: test
21
+ Requires-Dist: pytest; extra == "test"
22
+ Requires-Dist: niess>=0.1.4; extra == "test"
15
23
 
16
24
  # McCode Plumber
17
25
  Setup, run, and teardown the infrastructure for splitrun McCode scans sending data through Kafka into NeXus
@@ -0,0 +1,36 @@
1
+ mccode_plumber/__init__.py,sha256=ZQMwyPQ6GyoEkKXj7aWETPqhXfwUbDOD5EpvlXj1c18,148
2
+ mccode_plumber/conductor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ mccode_plumber/epics.py,sha256=BqkT4A0i1VRMshZU-L0NlJy3BoVDcpHkoKm6gnVB4C8,6217
4
+ mccode_plumber/epics_watcher.py,sha256=Jiz761A5NfoUzJ6ZzBGg8_BewFTmHDo5qYgh_DZHx_4,3973
5
+ mccode_plumber/forwarder.py,sha256=yzjb--r8M6vNSsWvvOi5IjXqpRn8MvT13cv89ezNEeU,3994
6
+ mccode_plumber/kafka.py,sha256=1BVTsGmiFJPkl-5RVJbkLm68AKRCp2NyngPjMcSutBs,2501
7
+ mccode_plumber/mccode.py,sha256=vc4Gb5WhdOfuDRjEejU3Flps0C2A60sfFbMrxmKLhn0,2189
8
+ mccode_plumber/splitrun.py,sha256=E9O88_GnoZ6vdmtgqW2kcHhg0ytJ_FH-oQDtMbSzbAI,2067
9
+ mccode_plumber/utils.py,sha256=E8NoGo_3Z-pPEpzicVGSWfQOX8p3eR-GxElT33-kX5U,2167
10
+ mccode_plumber/writer.py,sha256=10Y6r6s9oXWM76HYJcBSW_a0em-l_E9WJ8y22vn_-JA,21240
11
+ mccode_plumber/file_writer_control/CommandChannel.py,sha256=Ge5O71tZqpPmgcyYVcQe6XnxkE9r71Tf3he1ndvOFlQ,9038
12
+ mccode_plumber/file_writer_control/CommandHandler.py,sha256=LAmr6SpriPfVJd6C38XZ2TsSoO2Dh0iSIzyo8Q3gfrc,2376
13
+ mccode_plumber/file_writer_control/CommandStatus.py,sha256=acvS-KTH4hYD4xNw3i3FX0d4IukheWvKBmZ--rh1aj0,4536
14
+ mccode_plumber/file_writer_control/InThreadStatusTracker.py,sha256=CT20ch2Kv3VPvrM6reNMKfdH3Lbi4y6J_JXV10RALXM,10162
15
+ mccode_plumber/file_writer_control/JobHandler.py,sha256=KKBTaXWSzZpfuj2ak1kboRkXKmuSuOLmT3KHwUqNHLc,4220
16
+ mccode_plumber/file_writer_control/JobStatus.py,sha256=_hALDNF8ohi0KeSmG1g2BKoMB5d-ZjJN8_D7u5Q7t2c,4484
17
+ mccode_plumber/file_writer_control/KafkaTopicUrl.py,sha256=dG6Aj2tYo1UGZferxyvURUAr77z9dhBN_SRzizzt1Vo,736
18
+ mccode_plumber/file_writer_control/StateExtractor.py,sha256=DmHYtfzUuEX4kkqOKSw2Q2CWagvS2XJX5R3I6fBBglU,2011
19
+ mccode_plumber/file_writer_control/WorkerFinder.py,sha256=sy2jBwAYGddc29LwFCmAusrMcSKQuHJBTKeoHf8ga6A,6075
20
+ mccode_plumber/file_writer_control/WorkerJobPool.py,sha256=F5L1AxMr9V9brjCrJ9MMmeMeLuE2qvubXB6neyMz2KU,2941
21
+ mccode_plumber/file_writer_control/WorkerStatus.py,sha256=07ua29ORq31SdTUWs2GIXzuDih_3xpuPyX6Wyq7Zfw4,2753
22
+ mccode_plumber/file_writer_control/WriteJob.py,sha256=rmzG8Nbktq8f1y6MDeRzBSfT8bjNCeelSCPRqHQJ4KQ,2695
23
+ mccode_plumber/file_writer_control/__init__.py,sha256=Wp8A7JOB0vgpwAbvI9ik5cBlZa6lY-cq8dxYGRTRs0M,285
24
+ mccode_plumber/manage/__init__.py,sha256=l-ZiKPjvahBP1oG-iwLHWU5m5YbZbyWVtmiZst5Wluo,693
25
+ mccode_plumber/manage/efu.py,sha256=y4V-LBQ9Xt5YATEfW0XkZ9KcPGo230-5BIemthSaMD8,5613
26
+ mccode_plumber/manage/ensure.py,sha256=rS1kjUGhbks42_jsAZ2GjnhzJmpYqn59h6y3HJgshLs,3166
27
+ mccode_plumber/manage/epics.py,sha256=oQt_hL-7KgtI9Kmwyw8L7a7RxQe7YJWRbmHnKIqMLpQ,1310
28
+ mccode_plumber/manage/forwarder.py,sha256=stDKEVVfepCOqs5vBCVepaO3d63KJT-6okxooIl9dGs,3136
29
+ mccode_plumber/manage/manager.py,sha256=hkvtH-brtn1IXQB6vN9VB1UF95nSrs48uiIebS48wJI,3876
30
+ mccode_plumber/manage/orchestrate.py,sha256=Xqu0mm1eBzwf_pRYJzomk0YtnRtlBeQE5dvw78IuqiE,17985
31
+ mccode_plumber/manage/writer.py,sha256=SEv1U14L01Y9-BcaJKPei4Ah2LFfwexDy9FTjpvtSEs,2245
32
+ mccode_plumber-0.15.1.dist-info/METADATA,sha256=S6rNqxvR7-kvA6JZ9vCbC1CsntBynSgRciR0kJENu0g,957
33
+ mccode_plumber-0.15.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
+ mccode_plumber-0.15.1.dist-info/entry_points.txt,sha256=k3LDo_9HG2v4-IgMYlNukphdMmaAT6zkJZYaB1zJh3c,900
35
+ mccode_plumber-0.15.1.dist-info/top_level.txt,sha256=kCCIpYtKHCKWxiPEqX9J1UaGEm-ze0Qb-cemBCEPhDA,15
36
+ mccode_plumber-0.15.1.dist-info/RECORD,,
@@ -1,36 +0,0 @@
1
- mccode_plumber/__init__.py,sha256=ZQMwyPQ6GyoEkKXj7aWETPqhXfwUbDOD5EpvlXj1c18,148
2
- mccode_plumber/conductor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- mccode_plumber/epics.py,sha256=SvZQN_vmI0NCGaIykIOQLO3tbgv79AMm60zGCmFQ56s,6167
4
- mccode_plumber/epics_watcher.py,sha256=Jiz761A5NfoUzJ6ZzBGg8_BewFTmHDo5qYgh_DZHx_4,3973
5
- mccode_plumber/forwarder.py,sha256=yzjb--r8M6vNSsWvvOi5IjXqpRn8MvT13cv89ezNEeU,3994
6
- mccode_plumber/kafka.py,sha256=Q3oAuk7c-PS7b7zWSDhOR3dOO8R6Q8Y3HPI7K-Kt-RU,2499
7
- mccode_plumber/mccode.py,sha256=vc4Gb5WhdOfuDRjEejU3Flps0C2A60sfFbMrxmKLhn0,2189
8
- mccode_plumber/splitrun.py,sha256=K5VetcWpw8Aa-4d2BQLyshkwXkiVdzzJRB1mr2eNyP0,1971
9
- mccode_plumber/utils.py,sha256=E8NoGo_3Z-pPEpzicVGSWfQOX8p3eR-GxElT33-kX5U,2167
10
- mccode_plumber/writer.py,sha256=YN2I_gvoXn8XkHX_BVbdvCGhJC1aWbwU9VtiYFrcGS0,21185
11
- mccode_plumber/file_writer_control/CommandChannel.py,sha256=U32mvk8Ctlw8vg38_ULP__-UAG2funWIkhEFo2xahiY,9024
12
- mccode_plumber/file_writer_control/CommandHandler.py,sha256=4dPsMyBYx5hKuiVw5HoPMOFJ0QFxmrI3XK3TA_a8ZGE,2207
13
- mccode_plumber/file_writer_control/CommandStatus.py,sha256=biNK58vY1H-USYAl9uB_JNQyMohBj1cwJwmXktndd4E,4517
14
- mccode_plumber/file_writer_control/InThreadStatusTracker.py,sha256=CT20ch2Kv3VPvrM6reNMKfdH3Lbi4y6J_JXV10RALXM,10162
15
- mccode_plumber/file_writer_control/JobHandler.py,sha256=KKBTaXWSzZpfuj2ak1kboRkXKmuSuOLmT3KHwUqNHLc,4220
16
- mccode_plumber/file_writer_control/JobStatus.py,sha256=ncLYEcTAOdrIASQvY0jJAX3fC4qF9tyzLavm5YUOhlQ,4416
17
- mccode_plumber/file_writer_control/KafkaTopicUrl.py,sha256=dG6Aj2tYo1UGZferxyvURUAr77z9dhBN_SRzizzt1Vo,736
18
- mccode_plumber/file_writer_control/StateExtractor.py,sha256=DmHYtfzUuEX4kkqOKSw2Q2CWagvS2XJX5R3I6fBBglU,2011
19
- mccode_plumber/file_writer_control/WorkerFinder.py,sha256=nhItfO9_o8UAlvNyCSV6OvbcftHRKGpe48U2PasWsQ0,5960
20
- mccode_plumber/file_writer_control/WorkerJobPool.py,sha256=YVjSLsSULa6aDzGkA0GJIjv7kN9SuS1ipxdjwT2EGk0,2928
21
- mccode_plumber/file_writer_control/WorkerStatus.py,sha256=uw1q-Pvf1o2hxpMIPVwdtnAXLcO7VeEpcZ3wKjxxdsk,2826
22
- mccode_plumber/file_writer_control/WriteJob.py,sha256=-2tQvfajctf6Bn19c9hT9N1lOP0uTo7SZRMnXV1A-aA,2721
23
- mccode_plumber/file_writer_control/__init__.py,sha256=Wp8A7JOB0vgpwAbvI9ik5cBlZa6lY-cq8dxYGRTRs0M,285
24
- mccode_plumber/manage/__init__.py,sha256=l-ZiKPjvahBP1oG-iwLHWU5m5YbZbyWVtmiZst5Wluo,693
25
- mccode_plumber/manage/efu.py,sha256=yzsQ4cpsIvh2GbrQ9rwL_cGAW3SEoEBAdWOTnj2ia74,5572
26
- mccode_plumber/manage/ensure.py,sha256=0HaxcHYzvcDs6hBvBI39EZsIDt_rVA9CjHm5_yvOZcs,2601
27
- mccode_plumber/manage/epics.py,sha256=oQt_hL-7KgtI9Kmwyw8L7a7RxQe7YJWRbmHnKIqMLpQ,1310
28
- mccode_plumber/manage/forwarder.py,sha256=YYvHaOJ4djBXM7PFF2NBbnNDO6nnrwNOfSHaVOh16Ro,2876
29
- mccode_plumber/manage/manager.py,sha256=zzrduroUL-jwQ9BrPTdAm1IW4dGv5bi8-ieIQ6qiQ6M,4141
30
- mccode_plumber/manage/orchestrate.py,sha256=BvtkoBwWuW6wFncblfQA_XmQnONvkhLm0Apx3bFJ_84,17910
31
- mccode_plumber/manage/writer.py,sha256=SEv1U14L01Y9-BcaJKPei4Ah2LFfwexDy9FTjpvtSEs,2245
32
- mccode_plumber-0.14.5.dist-info/METADATA,sha256=7uxHwllR4jvupGljOnKcMN7H5nRLlwF5atGQPjDH4MI,594
33
- mccode_plumber-0.14.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
- mccode_plumber-0.14.5.dist-info/entry_points.txt,sha256=k3LDo_9HG2v4-IgMYlNukphdMmaAT6zkJZYaB1zJh3c,900
35
- mccode_plumber-0.14.5.dist-info/top_level.txt,sha256=kCCIpYtKHCKWxiPEqX9J1UaGEm-ze0Qb-cemBCEPhDA,15
36
- mccode_plumber-0.14.5.dist-info/RECORD,,