mccode-plumber 0.11.1__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,435 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from datetime import datetime, timezone
5
+ from mccode_antlr.common import InstrumentParameter
6
+ from mccode_antlr.instr import Instr
7
+ from mccode_plumber.manage import ensure_readable_file, ensure_writable_file, ensure_executable
8
+ from mccode_plumber.manage.efu import EventFormationUnitConfig
9
+
10
+ TOPICS = {
11
+ 'parameter': 'SimulatedParameters',
12
+ 'event': 'SimulatedEvents',
13
+ 'config': 'ForwardConfig',
14
+ 'status': 'ForwardStatus',
15
+ 'command': 'WriterCommand',
16
+ 'pool': 'WriterPool',
17
+ }
18
+ PREFIX = 'mcstas:'
19
+
20
+ def guess_instr_config(name: str):
21
+ guess = f'/event-formation-unit/configs/{name}/configs/{name}.json'
22
+ return ensure_readable_file(Path(guess))
23
+
24
+
25
+ def guess_instr_calibration(name: str):
26
+ guess = f'/event-formation-unit/configs/{name}/configs/{name}nullcalib.json'
27
+ return ensure_readable_file(Path(guess))
28
+
29
+
30
+ def guess_instr_efu(name: str):
31
+ guess = name.split('_')[0].split('.')[0].split('-')[0].lower()
32
+ return ensure_executable(Path(guess))
33
+
34
+
35
+ def register_topics(broker: str, topics: list[str]):
36
+ """Ensure that topics are registered in the Kafka broker."""
37
+ from mccode_plumber.kafka import register_kafka_topics, all_exist
38
+ res = register_kafka_topics(broker, topics)
39
+ if not all_exist(res.values()):
40
+ raise RuntimeError(f'Missing Kafka topics? {res}')
41
+
42
+
43
+ def augment_structure(
44
+ parameters: tuple[InstrumentParameter,...],
45
+ structure: dict,
46
+ title: str,
47
+ ):
48
+ """Helper to add stream JSON entries for Instr parameters to a NexusStructure
49
+
50
+ Parameters
51
+ ----------
52
+ parameters : tuple[InstrumentParameter,...]
53
+ Instrument runtime parameters
54
+ structure : dict
55
+ NexusStructure JSON representing the instrument
56
+ title : str
57
+ Informative string about the simulation, to be inserted in structure
58
+ """
59
+ from mccode_plumber.writer import (
60
+ add_title_to_nexus_structure, add_pvs_to_nexus_structure,
61
+ construct_writer_pv_dicts_from_parameters,
62
+ )
63
+ pvs = construct_writer_pv_dicts_from_parameters(parameters, PREFIX, TOPICS['parameter'])
64
+ data = add_pvs_to_nexus_structure(structure, pvs)
65
+ data = add_title_to_nexus_structure(data, title)
66
+ return data
67
+
68
+
69
+ def stop_writer(broker, job_id, timeout):
70
+ from time import sleep
71
+ from datetime import timedelta
72
+ from mccode_plumber.file_writer_control import WorkerJobPool
73
+ from mccode_plumber.file_writer_control.JobStatus import JobState
74
+ # The process is now told to switch to a 'control' topic, that is job-specific
75
+ # So we should send the stop-command there. This is the 'command_topic_url'?
76
+ def back_stop(job_topic, command_topic):
77
+ job_topic_url = f"{broker}/{job_topic}"
78
+ command_topic_url = f"{broker}/{command_topic}"
79
+ pool = WorkerJobPool(job_topic_url, command_topic_url)
80
+ sleep(1)
81
+ pool.try_send_stop_now(None, job_id)
82
+ state = pool.get_job_state(job_id)
83
+ give_up = datetime.now() + timedelta(seconds=timeout)
84
+ while state != JobState.DONE and state != JobState.ERROR and state != JobState.TIMEOUT and datetime.now() < give_up:
85
+ sleep(1)
86
+ state = pool.get_job_state(job_id)
87
+ return state
88
+
89
+ jstate = back_stop(TOPICS['pool'], TOPICS['command'])
90
+ if jstate != JobState.DONE:
91
+ print(f'Done trying to stop {job_id} -> {jstate}')
92
+
93
+
94
+ def start_writer(start_time: datetime,
95
+ structure: dict,
96
+ filename: Path,
97
+ broker: str,
98
+ timeout: float):
99
+ from uuid import uuid1
100
+ from mccode_plumber.writer import writer_start
101
+ job_id = str(uuid1())
102
+ success = False
103
+ name = filename.name
104
+ try:
105
+ print(f"Starting {job_id} from {start_time} for file {name} under kafka-to-nexus' working directory")
106
+ start, handler = writer_start(
107
+ start_time.isoformat(), structure, filename=name,
108
+ stop_time_string=None,
109
+ broker=broker, job_topic=TOPICS['pool'], command_topic=TOPICS['command'],
110
+ control_topic=TOPICS['command'], # don't switch topics
111
+ timeout=timeout, job_id=job_id, wait=False
112
+ )
113
+ # success = start.is_done() # this causes an infinite hang?
114
+ success = True
115
+ except RuntimeError as e:
116
+ if job_id in str(e):
117
+ # starting the job failed, so try to kill it
118
+ print(f"Starting {job_id} failed! Error: {e}")
119
+ stop_writer(broker, job_id, timeout)
120
+
121
+ return job_id, success
122
+
123
+
124
+ def get_topics_iter(data: list | tuple):
125
+ topics = set()
126
+ for entry in data:
127
+ if isinstance(entry, dict):
128
+ topics.update(get_topics_dict(entry))
129
+ elif isinstance(entry, (list, tuple)):
130
+ topics.update(get_topics_iter(entry))
131
+ return topics
132
+
133
+
134
+ def get_topics_dict(data: dict):
135
+ topics = set()
136
+ for k, v in data.items():
137
+ if isinstance(v, dict):
138
+ topics.update(get_topics_dict(v))
139
+ elif isinstance(v, (list, tuple)):
140
+ topics.update(get_topics_iter(list(v)))
141
+ elif k == 'topic':
142
+ topics.add(v)
143
+ return topics
144
+
145
+
146
+ def get_topics_json(data: dict) -> list[str]:
147
+ """Traverse a loaded JSON object and return the found list of topic names"""
148
+ return list(get_topics_dict(data))
149
+
150
+
151
+ def load_file_json(file: str | Path):
152
+ from json import load
153
+ file = ensure_readable_file(file)
154
+ with file.open('r') as f:
155
+ return load(f)
156
+
157
+
158
+ def get_instr_name_and_parameters(file: str | Path):
159
+ file = ensure_readable_file(file)
160
+ if file.suffix == '.h5':
161
+ # Shortcut loading the whole Instr:
162
+ import h5py
163
+ from mccode_antlr.io.hdf5 import HDF5IO
164
+ with h5py.File(file, 'r', driver='core', backing_store=False) as f:
165
+ name = f.attrs['name']
166
+ parameters = HDF5IO.load(f['parameters'])
167
+ return name, parameters
168
+ elif file.suffix == '.instr':
169
+ # No shortcuts
170
+ from mccode_antlr.loader import load_mcstas_instr
171
+ instr = load_mcstas_instr(file)
172
+ return instr.name, instr.parameters
173
+ elif file.suffix.lower() == '.json':
174
+ # No shortcuts, but much faster
175
+ from mccode_antlr.io.json import load_json
176
+ instr = load_json(file)
177
+ return instr.name, instr.parameters
178
+
179
+ raise ValueError('Unsupported file extension')
180
+
181
+
182
+ def efu_parameter(s: str):
183
+ if ':' in s:
184
+ # with any ':' we require fully specified
185
+ # name:{name},binary:{binary},config:{config_path},calibration:{calibration_path},topic:{topic},port:{port}
186
+ # what about spaces? or windows-style paths with C:/...
187
+ return EventFormationUnitConfig.from_cli_str(s)
188
+ # otherwise, allow an abbreviated format utilizing guesses
189
+ # Expected format is now:
190
+ # {efu_binary}[,{calibration/file}[,{config/file}]][,{port}]
191
+ # That is, if you specify --efu, you must give its binary path and should
192
+ # give its port. The calibration/file determines pixel calculations, so is more
193
+ # likely to be needed. Finally, the config file can also be supplied to change, e.g.,
194
+ # number of pixels or rings, etc.
195
+ parts = s.split(',')
196
+ data = {'topic': TOPICS['event'], 'port': 9000, 'binary': ensure_executable(parts[0]),}
197
+ data['name'] = data['binary'].stem
198
+
199
+ if len(parts) > 1 and (len(parts) > 2 or not parts[1].isnumeric()):
200
+ data['calibration'] = parts[1]
201
+ else:
202
+ data['calibration'] = guess_instr_calibration(data['name'])
203
+ if len(parts) > 2 and (len(parts) > 3 or not parts[2].isnumeric()):
204
+ data['config'] = parts[2]
205
+ else:
206
+ data['config'] = guess_instr_config(data['name'])
207
+ if len(parts) > 1 and parts[-1].isnumeric():
208
+ data['port'] = int(parts[-1])
209
+
210
+ return EventFormationUnitConfig.from_dict(data)
211
+
212
+
213
+ def make_services_parser():
214
+ from mccode_plumber import __version__
215
+ from argparse import ArgumentParser
216
+ parser = ArgumentParser('mp-nexus-services')
217
+ a=parser.add_argument
218
+ a('instrument', type=str, help='Instrument .instr or .h5 file')
219
+ a('-v', '--version', action='version', version=__version__)
220
+ # No need to specify the broker, or monitor source or topic names
221
+ a('-b', '--broker', type=str, default=None, help='Kafka broker for all services', metavar='address:port')
222
+ a('--efu', type=efu_parameter, action='append', default=None, help='Configuration of one EFU, repeatable', metavar='name,calibration,config,port')
223
+ a('--writer-working-dir', type=str, default=None, help='Working directory for kafka-to-nexus')
224
+ a('--writer-verbosity', type=str, default=None, help='Verbose output type (trace, debug, warning, error, critical)')
225
+ a('--forwarder-verbosity', type=str, default=None, help='Verbose output type (trace, debug, warning, error, critical)')
226
+ return parser
227
+
228
+
229
+ def services():
230
+ args = make_services_parser().parse_args()
231
+ instr_name, instr_parameters = get_instr_name_and_parameters(args.instrument)
232
+ kwargs = {
233
+ 'instr_name': instr_name,
234
+ 'instr_parameters': instr_parameters,
235
+ 'broker': args.broker or 'localhost:9092',
236
+ 'efu': args.efu,
237
+ 'work': args.writer_working_dir,
238
+ 'verbosity_writer': args.writer_verbosity,
239
+ 'verbosity_forwarder': args.forwarder_verbosity,
240
+ }
241
+ load_in_wait_load_out(**kwargs)
242
+
243
+
244
+ def load_in_wait_load_out(
245
+ instr_name: str,
246
+ instr_parameters: tuple[InstrumentParameter, ...],
247
+ broker: str,
248
+ efu: list[EventFormationUnitConfig] | None,
249
+ work: str | None = None,
250
+ manage: bool = True,
251
+ verbosity_writer: str | None = None,
252
+ verbosity_forwarder: str | None = None,
253
+ ):
254
+ import signal
255
+ from time import sleep
256
+ from colorama import Fore, Back, Style
257
+ from mccode_plumber.manage import (
258
+ EventFormationUnit, EPICSMailbox, Forwarder, KafkaToNexus
259
+ )
260
+ from mccode_plumber.manage.forwarder import forwarder_verbosity
261
+ from mccode_plumber.manage.writer import writer_verbosity
262
+
263
+ # Start up services if they should be managed locally
264
+ if manage:
265
+ if efu is None:
266
+ data = {
267
+ 'name': instr_name,
268
+ 'binary': guess_instr_efu(instr_name),
269
+ 'config': guess_instr_config(name=instr_name),
270
+ 'calibration': guess_instr_calibration(name=instr_name),
271
+ 'topic': TOPICS['event'],
272
+ 'port': 9000
273
+ }
274
+ if any('port' in p.name for p in instr_parameters):
275
+ from mccode_antlr.common.expression import DataType
276
+ port_parameter = next(
277
+ p for p in instr_parameters if 'port' in p.name)
278
+ if port_parameter.value.has_value and port_parameter.value.data_type == DataType.int:
279
+ # the instrument parameter has a default, which is an integer
280
+ data['port'] = port_parameter.value.value
281
+ efu = [EventFormationUnitConfig.from_dict(data)]
282
+ things = tuple(
283
+ EventFormationUnit.start(
284
+ style=Fore.BLUE, broker=broker, **x.to_dict()
285
+ ) for x in efu) + (
286
+ Forwarder.start(
287
+ name='FWD',
288
+ style=Fore.GREEN,
289
+ broker=broker,
290
+ config=TOPICS['config'],
291
+ status=TOPICS['status'],
292
+ verbosity=forwarder_verbosity(verbosity_forwarder),
293
+ ),
294
+ EPICSMailbox.start(
295
+ name='MBX',
296
+ style=Fore.YELLOW + Back.LIGHTCYAN_EX,
297
+ parameters=instr_parameters,
298
+ prefix=PREFIX,
299
+ ),
300
+ KafkaToNexus.start(
301
+ name='K2N',
302
+ style=Fore.RED + Style.DIM,
303
+ broker=broker,
304
+ work=work,
305
+ command=TOPICS['command'],
306
+ pool=TOPICS['pool'],
307
+ verbosity=writer_verbosity(verbosity_writer),
308
+ ),
309
+ )
310
+ else:
311
+ things = ()
312
+
313
+ # Ensure stream topics exist
314
+ register_topics(broker, list(TOPICS.values()))
315
+
316
+ def signal_handler(signum, frame):
317
+ if signum == signal.SIGINT:
318
+ print('Done waiting, following SIGINT')
319
+ for service in things:
320
+ service.stop()
321
+ exit(0)
322
+ else:
323
+ print(f'Received signal {signum}, ignoring')
324
+
325
+ signal.signal(signal.SIGINT, signal_handler)
326
+ print(
327
+ Fore.YELLOW+Back.LIGHTGREEN_EX+Style.BRIGHT
328
+ + "\tYou can now run 'mp-nexus-splitrun' in another process"
329
+ + " (Press CTRL+C to exit)." + Style.RESET_ALL
330
+ )
331
+ # signal.pause()
332
+ while all(service.poll() for service in things):
333
+ # Try to grab and print any updates
334
+ sleep(0.01)
335
+ # If we reach here, one or more service has _already_ stopped
336
+ for service in things:
337
+ if not service.poll():
338
+ print(f'{service.name} exited unexpectedly')
339
+ service.stop()
340
+
341
+
342
+ def make_splitrun_nexus_parser():
343
+ from mccode_plumber import __version__
344
+ from restage.splitrun import make_splitrun_parser
345
+ parser = make_splitrun_parser()
346
+ parser.prog = 'mp-nexus-splitrun'
347
+ parser.add_argument('-v' ,'--version', action='version', version=__version__)
348
+ # No need to specify the broker, or monitor source or topic names
349
+ parser.add_argument('--structure', type=str, default=None, help='NeXus Structure JSON path')
350
+ parser.add_argument('--structure-out', type=str, default=None, help='Output configured structure JSON path')
351
+ parser.add_argument('--nexus-file', type=str, default=None, help='Output NeXus file path')
352
+ return parser
353
+
354
+
355
+ def main():
356
+ from mccode_plumber.mccode import get_mcstas_instr
357
+ from restage.splitrun import parse_splitrun
358
+ from mccode_plumber.splitrun import monitors_to_kafka_callback_with_arguments
359
+ args, parameters, precision = parse_splitrun(make_splitrun_nexus_parser())
360
+ instr = get_mcstas_instr(args.instrument[0])
361
+
362
+ structure = load_file_json(args.structure if args.structure else Path(args.instrument[0]).with_suffix('.json'))
363
+ broker = 'localhost:9092'
364
+ monitor_source = 'mccode-to-kafka'
365
+ callback_topics = get_topics_json(structure) # all structure-topics might be monitor topics?
366
+ if len(callback_topics):
367
+ print(f'register {callback_topics}')
368
+ register_topics(broker, callback_topics) # ensure the topics are known to Kafka
369
+ else:
370
+ print('no callback topics registered')
371
+
372
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(broker, monitor_source, callback_topics)
373
+ splitrun_kwargs = {
374
+ 'args': args, 'parameters': parameters, 'precision': precision,
375
+ 'callback': callback, 'callback_arguments': callback_args,
376
+ }
377
+ kwargs = {
378
+ 'nexus_file': args.nexus_file, 'structure_out': args.structure_out
379
+ }
380
+ for k in list(kwargs.keys()) + ['structure']:
381
+ delattr(args, k)
382
+ return orchestrate(instr, structure, broker, splitrun_kwargs, **kwargs)
383
+
384
+
385
+ def orchestrate(
386
+ instr: Instr,
387
+ structure,
388
+ broker: str,
389
+ splitrun_kwargs: dict,
390
+ nexus_file: str | None= None,
391
+ structure_out: str | None = None,
392
+ ):
393
+ from datetime import datetime, timezone
394
+ from restage.splitrun import splitrun_args
395
+ from mccode_plumber.forwarder import (
396
+ forwarder_partial_streams, configure_forwarder, reset_forwarder
397
+ )
398
+ now = datetime.now(timezone.utc)
399
+ title = f'{instr.name} simulation {now}: {splitrun_kwargs["args"]}'
400
+ # kafka-to-nexus will strip off the root part of this path and put the remaining
401
+ # location and filename under _its_ working directory.
402
+ # Since it doesn't seem to create missing folders, we need to ensure we only
403
+ # provide the file stem.
404
+ filename = ensure_writable_file(nexus_file or f'{instr.name}_{now:%y%m%dT%H%M%S}.h5')
405
+
406
+ # Tell the forwarder what to forward
407
+ partial_streams = forwarder_partial_streams(PREFIX, TOPICS['parameter'], instr.parameters)
408
+ forwarder_config = f"{broker}/{TOPICS['config']}"
409
+ configure_forwarder(partial_streams, forwarder_config, PREFIX, TOPICS['parameter'])
410
+
411
+ # Create a file-writer job
412
+ structure = augment_structure(instr.parameters, structure, title)
413
+ if structure_out:
414
+ from json import dump
415
+ with open(structure_out, 'w') as f:
416
+ dump(structure, f)
417
+
418
+ job_id, success = start_writer(now, structure, filename, broker, 30.0)
419
+ if success:
420
+ print("Writer job started -- start the simulation")
421
+ # Do the actual simulation, calling into restage.splitrun after parsing,
422
+ # Using the provided callbacks to send monitor data to Kafka
423
+ splitrun_args(instr, **splitrun_kwargs)
424
+ print("Splitrun simulation finished -- informing file-writer to stop")
425
+ # Wait for the file-writer to finish its job (possibly kill it)
426
+ stop_writer(broker, job_id, 20.0)
427
+ # De-register the forwarder topics
428
+ reset_forwarder(partial_streams, forwarder_config, PREFIX, TOPICS['parameter'])
429
+ # Verify that the file has been written?
430
+ # This only works if the filewriter was stared in the same directory :(
431
+ # ensure_readable_file(filename)
432
+ if filename.exists():
433
+ print(f'Finished writing {filename}')
434
+ else:
435
+ print(f'{filename} not found, check file-writer working directory')
@@ -0,0 +1,60 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from .manager import Manager
5
+ from .ensure import ensure_writable_directory, ensure_executable
6
+
7
+
8
+ @dataclass
9
+ class KafkaToNexus(Manager):
10
+ """
11
+ Manage the execution of a kafka-to-nexus file writer
12
+
13
+ Parameters
14
+ ----------
15
+ broker: the name or address and port of the broker containing the needed
16
+ command and job topics (localhost:9092)
17
+ work: the working directory for file output (`Path()`)
18
+ command: the topic used for receiving commands (WriterCommand)
19
+ pool: the topic used for receiving jobs as part of a pool (WriterJob)
20
+ verbosity: the level of output to print to STDOUT, any of
21
+ (trace, debug, info, warning, error, critical)
22
+ """
23
+ broker: str
24
+ command: str
25
+ pool: str
26
+ work: Path | None = None
27
+ verbosity: str | None = None
28
+ _command: Path = field(default_factory=lambda: Path('kafka-to-nexus'))
29
+
30
+ def __post_init__(self):
31
+ from mccode_plumber.kafka import register_kafka_topics, all_exist
32
+ self._command = ensure_executable(self._command)
33
+ self.work = ensure_writable_directory(self.work or Path()).resolve()
34
+ res = register_kafka_topics(self.broker, [self.command, self.pool])
35
+ if not all_exist(res.values()):
36
+ raise RuntimeError(f'Missing Kafka topics? {res}')
37
+
38
+ def __run_command__(self) -> list[str]:
39
+ args = [
40
+ self._command.as_posix(),
41
+ '--brokers', f"{self.broker},{self.broker}",
42
+ '--command-status-topic', self.command,
43
+ '--job-pool-topic', self.pool,
44
+ #'--service-name', 'mpw',
45
+ f'--hdf-output-prefix={self.work}/',
46
+ '--kafka-error-timeout', '10s',
47
+ '--kafka-metadata-max-timeout', '10s',
48
+ '--time-before-start', '10s',
49
+ ]
50
+ if (v := writer_verbosity(self.verbosity)) is not None:
51
+ args.extend(['--verbosity', v])
52
+ return args
53
+
54
+
55
+ def writer_verbosity(v):
56
+ if isinstance(v, str):
57
+ for k in ('critical', 'error', 'warning', 'info', 'debug', 'trace'):
58
+ if k.lower() == v.lower():
59
+ return k
60
+ return None
mccode_plumber/writer.py CHANGED
@@ -139,22 +139,23 @@ def insert_events_in_nexus_structure(ns: dict, config: dict):
139
139
 
140
140
  def get_writer_pool(broker: str = None, job: str = None, command: str = None):
141
141
  from .file_writer_control import WorkerJobPool
142
+ print(f'Create a Writer pool for {broker=} {job=} {command=}')
142
143
  pool = WorkerJobPool(f"{broker}/{job}", f"{broker}/{command}")
143
144
  return pool
144
145
 
145
146
 
146
147
  def make_define_nexus_structure():
147
- from typing import Union, Callable
148
+ from typing import Callable
148
149
  from mccode_antlr.instr import Instr
149
150
 
150
151
  def define_nexus_structure(
151
- instr: Union[Path, str],
152
+ instr: Path | str,
152
153
  pvs: list[dict],
153
154
  title: str = None,
154
155
  event_stream: dict[str, str] = None,
155
- file: Union[Path, None] = None,
156
- func: Union[Callable[[Instr], dict], None] = None,
157
- binary: Union[Path, None] = None,
156
+ file: Path | None = None,
157
+ func: Callable[[Instr], dict] | None = None,
158
+ binary: Path | None = None,
158
159
  origin: str = None):
159
160
  import json
160
161
  from .mccode import get_mcstas_instr
@@ -178,13 +179,21 @@ def make_define_nexus_structure():
178
179
  return define_nexus_structure
179
180
 
180
181
 
181
- def start_pool_writer(start_time_string, structure, filename=None, stop_time_string: str | None = None,
182
- broker: str | None = None, job_topic: str | None = None, command_topic: str | None = None,
183
- wait: bool = False, timeout: float | None = None, job_id: str | None = None):
184
- from sys import exit
185
- from os import EX_OK, EX_UNAVAILABLE
186
- from time import sleep
182
+ def writer_start(
183
+ start_time_string,
184
+ structure,
185
+ filename,
186
+ stop_time_string,
187
+ broker,
188
+ job_topic,
189
+ command_topic,
190
+ control_topic,
191
+ timeout,
192
+ wait,
193
+ job_id,
194
+ ):
187
195
  from json import dumps
196
+ from time import sleep
188
197
  from datetime import datetime, timedelta
189
198
  from .file_writer_control import JobHandler, WriteJob, CommandState
190
199
 
@@ -201,33 +210,57 @@ def start_pool_writer(start_time_string, structure, filename=None, stop_time_str
201
210
  end_time = datetime.now() if wait else None
202
211
  if stop_time_string is not None:
203
212
  end_time = datetime.fromisoformat(stop_time_string)
204
- print(f"write file from {start_time} until {end_time}")
205
213
 
206
- job = WriteJob(small_string, filename, broker, start_time, end_time, job_id=job_id or "")
214
+ job = WriteJob(small_string, filename, broker, start_time, end_time,
215
+ job_id=job_id or "", control_topic=control_topic)
207
216
  # start the job
208
217
  start = handler.start_job(job)
218
+ # Did the job handler accomplish its job of sending the start message?
219
+ print(f'Writer start {handler.is_done()=} {handler.get_message()=}')
209
220
  if timeout is not None:
210
221
  try:
211
222
  # ensure the start succeeds:
212
- zero_time = datetime.now()
223
+ give_up_time = datetime.now() + timedelta(seconds=timeout)
213
224
  while not start.is_done():
214
- if zero_time + timedelta(seconds=timeout) < datetime.now():
225
+ state = start.get_state()
226
+ if give_up_time < datetime.now():
215
227
  raise RuntimeError(f"Timed out while starting job {job.job_id}")
216
- elif start.get_state() == CommandState.ERROR:
228
+ elif state == CommandState.ERROR:
217
229
  raise RuntimeError(f"Starting job {job.job_id} failed with message {start.get_message()}")
218
230
  sleep(1)
219
231
  except RuntimeError as e:
220
- # raise RuntimeError(e.__str__() + f" The message was: {start.get_message()}")
221
- print(f"{e} The message was: {start.get_message()}")
222
- exit(EX_UNAVAILABLE)
232
+ raise RuntimeError(f"{e} The message was: {start.get_message()}")
233
+ return start, handler
234
+
235
+
236
+ def start_pool_writer(
237
+ start_time_string,
238
+ structure,
239
+ filename=None,
240
+ stop_time_string: str | None = None,
241
+ broker: str | None = None,
242
+ job_topic: str | None = None,
243
+ command_topic: str | None = None,
244
+ control_topic: str | None = None,
245
+ wait: bool = False,
246
+ timeout: float | None = None,
247
+ job_id: str | None = None
248
+ ):
249
+ from sys import exit
250
+ from os import EX_OK, EX_UNAVAILABLE
251
+ from time import sleep
223
252
 
224
- if wait:
225
- try:
253
+ try:
254
+ start, handler = writer_start(
255
+ start_time_string, structure, filename, stop_time_string,
256
+ broker, job_topic, command_topic, control_topic, timeout, wait, job_id
257
+ )
258
+ if wait:
226
259
  while not handler.is_done():
227
260
  sleep(1)
228
- except RuntimeError as error:
229
- print(str(error) + f'Writer failed, producing message:\n{handler.get_message}')
230
- exit(EX_UNAVAILABLE)
261
+ except RuntimeError as error:
262
+ print(str(error))
263
+ exit(EX_UNAVAILABLE)
231
264
  exit(EX_OK)
232
265
 
233
266
 
@@ -243,6 +276,7 @@ def get_arg_parser():
243
276
  a('-b', '--broker', type=str, help="The Kafka broker server used by the Writer")
244
277
  a('-j', '--job', type=str, help='Writer job topic')
245
278
  a('-c', '--command', type=str, help='Writer command topic')
279
+ a('-r', '--control', type=str, help='Active writer job control topic')
246
280
  a('--title', type=str, default='scan title for testing', help='Output file title parameter')
247
281
  a('--event-source', type=str)
248
282
  a('--event-topic', type=str)
@@ -270,7 +304,7 @@ def parameter_description(inst_param):
270
304
  return desc
271
305
 
272
306
 
273
- def construct_writer_pv_dicts(instr: Union[Path, str], prefix: str, topic: str):
307
+ def construct_writer_pv_dicts(instr: Path | str, prefix: str, topic: str):
274
308
  from .mccode import get_mccode_instr_parameters
275
309
  parameters = get_mccode_instr_parameters(instr)
276
310
  return construct_writer_pv_dicts_from_parameters(parameters, prefix, topic)
@@ -306,10 +340,11 @@ def print_time():
306
340
 
307
341
 
308
342
  def start_writer():
309
- args, parameters, structure = parse_writer_args()
310
- return start_pool_writer(args.start_time, structure, args.filename, stop_time_string=args.stop_time,
311
- broker=args.broker, job_topic=args.job, command_topic=args.command,
312
- wait=args.wait, timeout=args.time_out, job_id=args.job_id)
343
+ a, parameters, structure = parse_writer_args()
344
+ return start_pool_writer(
345
+ a.start_time, structure, a.filename, stop_time_string=a.stop_time,
346
+ broker=a.broker, job_topic=a.job, command_topic=a.command,
347
+ control_topic=a.control, wait=a.wait, timeout=a.time_out, job_id=a.job_id)
313
348
 
314
349
 
315
350
  def wait_on_writer():
@@ -1,16 +1,17 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.11.1
3
+ Version: 0.13.0
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
7
7
  Requires-Dist: p4p
8
8
  Requires-Dist: kafka-python>=2.2.11
9
9
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
- Requires-Dist: restage>=0.7.1
10
+ Requires-Dist: restage>=0.8.0
11
11
  Requires-Dist: mccode-to-kafka>=0.2.2
12
- Requires-Dist: moreniius>=0.4.0
12
+ Requires-Dist: moreniius>=0.5.0
13
13
  Requires-Dist: icecream
14
+ Requires-Dist: ephemeral-port-reserve
14
15
 
15
16
  # McCode Plumber
16
17
  Setup, run, and teardown the infrastructure for splitrun McCode scans sending data through Kafka into NeXus