mccode-plumber 0.7.0__tar.gz → 0.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {mccode_plumber-0.7.0/src/mccode_plumber.egg-info → mccode_plumber-0.8.0}/PKG-INFO +2 -2
  2. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/pyproject.toml +4 -1
  3. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/epics.py +25 -11
  4. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/CommandChannel.py +5 -5
  5. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/CommandHandler.py +2 -2
  6. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/InThreadStatusTracker.py +5 -5
  7. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/JobHandler.py +4 -4
  8. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/StateExtractor.py +3 -3
  9. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/WorkerFinder.py +7 -7
  10. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/WorkerJobPool.py +5 -5
  11. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/splitrun.py +8 -6
  12. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/writer.py +115 -0
  13. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0/src/mccode_plumber.egg-info}/PKG-INFO +2 -2
  14. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber.egg-info/entry_points.txt +3 -0
  15. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber.egg-info/requires.txt +1 -1
  16. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/.github/workflows/pip.yml +0 -0
  17. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/.github/workflows/wheels.yml +0 -0
  18. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/.gitignore +0 -0
  19. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/README.md +0 -0
  20. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/setup.cfg +0 -0
  21. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/__init__.py +0 -0
  22. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/conductor.py +0 -0
  23. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/CommandStatus.py +0 -0
  24. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/JobStatus.py +0 -0
  25. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/KafkaTopicUrl.py +0 -0
  26. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/WorkerStatus.py +0 -0
  27. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/WriteJob.py +0 -0
  28. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/file_writer_control/__init__.py +0 -0
  29. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/forwarder.py +0 -0
  30. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/kafka.py +0 -0
  31. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/mccode.py +0 -0
  32. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber/utils.py +0 -0
  33. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber.egg-info/SOURCES.txt +0 -0
  34. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber.egg-info/dependency_links.txt +0 -0
  35. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/src/mccode_plumber.egg-info/top_level.txt +0 -0
  36. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/tests/test_epics.py +0 -0
  37. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/tests/test_splitrun.py +0 -0
  38. {mccode_plumber-0.7.0 → mccode_plumber-0.8.0}/tests/test_writer.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
@@ -8,7 +8,7 @@ Requires-Dist: p4p
8
8
  Requires-Dist: kafka-python>=2.0
9
9
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
10
  Requires-Dist: restage>=0.4.0
11
- Requires-Dist: mccode-to-kafka>=0.2.1
11
+ Requires-Dist: mccode-to-kafka>=0.2.2
12
12
  Requires-Dist: moreniius>=0.2.3
13
13
  Requires-Dist: icecream
14
14
 
@@ -9,7 +9,7 @@ dependencies = [
9
9
  'kafka-python>=2.0',
10
10
  'ess-streaming-data-types>=0.14.0',
11
11
  'restage>=0.4.0',
12
- 'mccode-to-kafka>=0.2.1',
12
+ 'mccode-to-kafka>=0.2.2',
13
13
  'moreniius>=0.2.3',
14
14
  'icecream',
15
15
  ]
@@ -31,6 +31,9 @@ mp-forwarder-teardown = 'mccode_plumber.forwarder:teardown'
31
31
  mp-writer-from = 'mccode_plumber.writer:print_time'
32
32
  mp-writer-write = 'mccode_plumber.writer:start_writer'
33
33
  mp-writer-wait = 'mccode_plumber.writer:wait_on_writer'
34
+ mp-writer-list = 'mccode_plumber.writer:list_status'
35
+ mp-writer-kill = 'mccode_plumber.writer:kill_job'
36
+ mp-writer-killall = 'mccode_plumber.writer:kill_all'
34
37
  mp-register-topics = 'mccode_plumber.kafka:register_topics'
35
38
  mp-insert-hdf5-instr = 'mccode_plumber.mccode:insert'
36
39
 
@@ -72,6 +72,7 @@ def main(names: dict[str, NTScalar], prefix: str = None):
72
72
  for name, value in names.items():
73
73
  pv = SharedPV(initial=value, handler=MailboxHandler())
74
74
  provider.add(f'{prefix}{name}' if prefix else name, pv)
75
+ print(f'Add mailbox for {prefix}{name}')
75
76
  pvs.append(pv)
76
77
 
77
78
  print(f'Start mailbox server for {len(pvs)} PVs with prefix {prefix}')
@@ -101,20 +102,33 @@ def update():
101
102
  from argparse import ArgumentParser
102
103
  from p4p.client.thread import Context
103
104
  parser = ArgumentParser(description="Update the mailbox server with new values")
104
- parser.add_argument('address', type=str, help='The mailbox address of the value to be updated')
105
- parser.add_argument('value', type=str, help='The new value to be assigned to the mailbox')
105
+ parser.add_argument('address value', type=str, nargs='+', help='The mailbox address and value to be updated')
106
106
  args = parser.parse_args()
107
+ addresses_values = getattr(args, 'address value')
108
+
109
+ if len(addresses_values) == 0:
110
+ parser.print_help()
111
+ return
112
+
113
+ addresses = addresses_values[::2]
114
+ values = addresses_values[1::2]
115
+
116
+ if len(addresses_values) % 2:
117
+ print(f'Please provide address-value pairs. Provided {addresses=} {values=}')
107
118
 
108
119
  ctx = Context('pva')
109
- pv = ctx.get(args.address, throw=False)
110
- if isinstance(pv, float):
111
- ctx.put(args.address, float(args.value))
112
- elif isinstance(pv, int):
113
- ctx.put(args.address, int(args.value))
114
- elif isinstance(pv, str):
115
- ctx.put(args.address, str(args.value))
116
- else:
117
- raise ValueError(f'Unknown type {type(pv)} (this is likely a vector that I can not handle yet?)')
120
+ for address, value in zip(addresses, values):
121
+ pv = ctx.get(address, throw=False)
122
+ if isinstance(pv, float):
123
+ ctx.put(address, float(value))
124
+ elif isinstance(pv, int):
125
+ ctx.put(address, int(value))
126
+ elif isinstance(pv, str):
127
+ ctx.put(address, str(value))
128
+ elif isinstance(pv, TimeoutError):
129
+ print(f'[Timeout] Failed to update {address} with {value} (Unknown to EPICS?)')
130
+ else:
131
+ raise ValueError(f'Address {address} has unknown type {type(pv)}')
118
132
 
119
133
  ctx.disconnect()
120
134
 
@@ -7,14 +7,14 @@ from typing import Dict, List, Optional, Union
7
7
  from kafka import KafkaConsumer
8
8
  from kafka.errors import NoBrokersAvailable
9
9
 
10
- from file_writer_control.CommandStatus import CommandState, CommandStatus
11
- from file_writer_control.InThreadStatusTracker import (
10
+ from .CommandStatus import CommandState, CommandStatus
11
+ from .InThreadStatusTracker import (
12
12
  DEAD_ENTITY_TIME_LIMIT,
13
13
  InThreadStatusTracker,
14
14
  )
15
- from file_writer_control.JobStatus import JobStatus
16
- from file_writer_control.KafkaTopicUrl import KafkaTopicUrl
17
- from file_writer_control.WorkerStatus import WorkerStatus
15
+ from .JobStatus import JobStatus
16
+ from .KafkaTopicUrl import KafkaTopicUrl
17
+ from .WorkerStatus import WorkerStatus
18
18
 
19
19
 
20
20
  def thread_function(
@@ -1,7 +1,7 @@
1
1
  from datetime import timedelta
2
2
 
3
- from file_writer_control.CommandChannel import CommandChannel
4
- from file_writer_control.CommandStatus import CommandState
3
+ from .CommandChannel import CommandChannel
4
+ from .CommandStatus import CommandState
5
5
 
6
6
 
7
7
  class CommandHandler:
@@ -22,14 +22,14 @@ from streaming_data_types.status_x5f2 import FILE_IDENTIFIER as STAT_IDENTIFIER
22
22
  from streaming_data_types.status_x5f2 import StatusMessage
23
23
  from streaming_data_types.utils import get_schema
24
24
 
25
- from file_writer_control.CommandStatus import CommandState, CommandStatus
26
- from file_writer_control.JobStatus import JobState, JobStatus
27
- from file_writer_control.StateExtractor import (
25
+ from .CommandStatus import CommandState, CommandStatus
26
+ from .JobStatus import JobState, JobStatus
27
+ from .StateExtractor import (
28
28
  extract_job_state_from_answer,
29
29
  extract_state_from_command_answer,
30
30
  extract_worker_state_from_status,
31
31
  )
32
- from file_writer_control.WorkerStatus import WorkerState, WorkerStatus
32
+ from .WorkerStatus import WorkerState, WorkerStatus
33
33
 
34
34
  DEAD_ENTITY_TIME_LIMIT = timedelta(hours=1)
35
35
 
@@ -223,6 +223,6 @@ class InThreadStatusTracker:
223
223
  current_job.state = JobState.ERROR
224
224
  else:
225
225
  current_job.state = JobState.DONE
226
- current_job.metadata = json.loads(stopped.metadata)
226
+ current_job.metadata = json.loads(stopped.metadata) if stopped.metadata is not None else None
227
227
  current_job.message = stopped.message
228
228
  self.known_workers[stopped.service_id].state = WorkerState.IDLE
@@ -1,9 +1,9 @@
1
1
  from datetime import datetime
2
2
 
3
- from file_writer_control.CommandHandler import CommandHandler
4
- from file_writer_control.JobStatus import JobState
5
- from file_writer_control.WorkerFinder import WorkerFinder
6
- from file_writer_control.WriteJob import WriteJob
3
+ from .CommandHandler import CommandHandler
4
+ from .JobStatus import JobState
5
+ from .WorkerFinder import WorkerFinder
6
+ from .WriteJob import WriteJob
7
7
 
8
8
 
9
9
  class JobHandler:
@@ -8,9 +8,9 @@ from streaming_data_types.action_response_answ import (
8
8
  )
9
9
  from streaming_data_types.status_x5f2 import StatusMessage
10
10
 
11
- from file_writer_control.CommandStatus import CommandState
12
- from file_writer_control.JobStatus import JobState
13
- from file_writer_control.WorkerStatus import WorkerState
11
+ from .CommandStatus import CommandState
12
+ from .JobStatus import JobState
13
+ from .WorkerStatus import WorkerState
14
14
 
15
15
 
16
16
  def extract_worker_state_from_status(status: StatusMessage) -> WorkerState:
@@ -6,13 +6,13 @@ from kafka import KafkaProducer
6
6
  from kafka.errors import NoBrokersAvailable
7
7
  from streaming_data_types.run_stop_6s4t import serialise_6s4t as serialise_stop
8
8
 
9
- from file_writer_control.CommandChannel import CommandChannel
10
- from file_writer_control.CommandHandler import CommandHandler
11
- from file_writer_control.CommandStatus import CommandStatus
12
- from file_writer_control.JobStatus import JobState, JobStatus
13
- from file_writer_control.KafkaTopicUrl import KafkaTopicUrl
14
- from file_writer_control.WorkerStatus import WorkerStatus
15
- from file_writer_control.WriteJob import WriteJob
9
+ from .CommandChannel import CommandChannel
10
+ from .CommandHandler import CommandHandler
11
+ from .CommandStatus import CommandStatus
12
+ from .JobStatus import JobState, JobStatus
13
+ from .KafkaTopicUrl import KafkaTopicUrl
14
+ from .WorkerStatus import WorkerStatus
15
+ from .WriteJob import WriteJob
16
16
 
17
17
 
18
18
  class WorkerFinderBase:
@@ -3,11 +3,11 @@ from typing import Dict
3
3
  from kafka import KafkaProducer
4
4
  from kafka.errors import NoBrokersAvailable
5
5
 
6
- from file_writer_control.CommandHandler import CommandHandler
7
- from file_writer_control.CommandStatus import CommandState
8
- from file_writer_control.KafkaTopicUrl import KafkaTopicUrl
9
- from file_writer_control.WorkerFinder import WorkerFinder
10
- from file_writer_control.WriteJob import WriteJob
6
+ from .CommandHandler import CommandHandler
7
+ from .CommandStatus import CommandState
8
+ from .KafkaTopicUrl import KafkaTopicUrl
9
+ from .WorkerFinder import WorkerFinder
10
+ from .WriteJob import WriteJob
11
11
 
12
12
 
13
13
  class WorkerJobPool(WorkerFinder):
@@ -4,18 +4,20 @@ def make_parser():
4
4
  parser.prog = 'mp-splitrun'
5
5
  parser.add_argument('--broker', type=str, help='The Kafka broker to send monitors to', default=None)
6
6
  parser.add_argument('--source', type=str, help='The Kafka source name to use for monitors', default=None)
7
+ parser.add_argument('--topic', type=str, help='The Kafka topic name(s) to use for monitors', default=None, action='append')
7
8
  return parser
8
9
 
9
10
 
10
- def monitors_to_kafka_callback_with_arguments(broker: str, source: str):
11
- from functools import partial
11
+ def monitors_to_kafka_callback_with_arguments(broker: str, source: str, topics: list[str]):
12
12
  from mccode_to_kafka.sender import send_histograms
13
13
 
14
+ partial_kwargs = {'broker': broker, 'source': source}
15
+ if topics is not None and len(topics) > 0:
16
+ partial_kwargs['names'] = topics
17
+
14
18
  def callback(*args, **kwargs):
15
- print(f'monitors to kafka callback called with {args} and {kwargs}')
16
- return send_histograms(*args, broker=broker, source=source, **kwargs)
19
+ return send_histograms(*args, **partial_kwargs, **kwargs)
17
20
 
18
- # return partial(send_histograms, broker=broker, source=source), {'dir': 'root'}
19
21
  return callback, {'dir': 'root'}
20
22
 
21
23
 
@@ -24,5 +26,5 @@ def main():
24
26
  from restage.splitrun import splitrun_args, parse_splitrun
25
27
  args, parameters, precision = parse_splitrun(make_parser())
26
28
  instr = get_mcstas_instr(args.instrument[0])
27
- callback, callback_args = monitors_to_kafka_callback_with_arguments(args.broker, args.source)
29
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(args.broker, args.source, args.topic)
28
30
  return splitrun_args(instr, parameters, precision, args, callback=callback, callback_arguments=callback_args)
@@ -4,6 +4,8 @@ from pathlib import Path
4
4
  from typing import Union, Callable
5
5
  from mccode_antlr.instr import Instr
6
6
 
7
+ from .file_writer_control import WorkerJobPool
8
+
7
9
 
8
10
  def _is_group(x, group):
9
11
  """Is a (dict) object a (NeXus) group with the specified name?"""
@@ -334,3 +336,116 @@ def wait_on_writer():
334
336
  # raise RuntimeError(e.__str__() + f" The message was: {stop.get_message()}")
335
337
  exit(EX_UNAVAILABLE)
336
338
  exit(EX_OK)
339
+
340
+
341
+ def kill_job():
342
+ import time
343
+ from argparse import ArgumentParser
344
+ from .file_writer_control import WorkerJobPool
345
+ parser = ArgumentParser()
346
+ parser.add_argument('-b', '--broker', help="Kafka broker", default='localhost:9092', type=str)
347
+ parser.add_argument('-c', '--command', help="Writer command topic", default="WriterCommand", type=str)
348
+ parser.add_argument('-t', '--topic', help='Writer job topic', default='WriterJobs', type=str)
349
+ parser.add_argument('-s', '--sleep', help='Post pool creation sleep time (s)', default=1, type=int)
350
+ parser.add_argument('service_id', type=str, help='Writer service id to stop')
351
+ parser.add_argument('job_id', type=str, help='Writer job id to stop')
352
+
353
+ args = parser.parse_args()
354
+ pool = WorkerJobPool(f'{args.broker}/{args.topic}', f'{args.broker}/{args.command}')
355
+ time.sleep(args.sleep)
356
+ pool.try_send_stop_now(args.service_id, args.job_id)
357
+
358
+
359
+ def print_columns(titles: list | tuple, values: list[list | tuple] | tuple[list | tuple, ...]):
360
+ if not len(values) or not len(titles):
361
+ return
362
+ widths = [len(str(x)) for x in titles]
363
+ for row in values:
364
+ for i, v in enumerate(row):
365
+ n = len(str(v))
366
+ if n > widths[i]:
367
+ widths[i] = n
368
+ w_format = ''.join([f'{{:{n + 1:d}s}}' for n in widths])
369
+ print(w_format.format(*[str(x) for x in titles]))
370
+ print(w_format.format(*['-' * n for n in widths]))
371
+ for row in values:
372
+ print(w_format.format(*[str(x) for x in row]))
373
+ print()
374
+
375
+
376
+ def print_workers(workers):
377
+ if len(workers):
378
+ print("Known workers")
379
+ print_columns(("Service id", "Current state"),
380
+ [(w.service_id, w.state) for w in workers])
381
+ else:
382
+ print("No workers")
383
+
384
+
385
+ def print_jobs(jobs):
386
+ if len(jobs):
387
+ print("Known jobs")
388
+ job_info = [(j.service_id, j.job_id, j.state,
389
+ j.file_name if j.file_name else j.message) for j in jobs]
390
+ print_columns(("Service id", "Job id", "Current state", "File name or message"),
391
+ job_info)
392
+ else:
393
+ print("No jobs")
394
+
395
+
396
+ def print_commands(commands):
397
+ if len(commands):
398
+ print("Known commands")
399
+ print_columns(("Job id", "Command id", "Current state", "Message"),
400
+ [(c.job_id, c.command_id, c.state, c.message) for c in
401
+ commands])
402
+ else:
403
+ print("No commands")
404
+
405
+
406
+ def print_current_state(channel: WorkerJobPool):
407
+ print_workers(channel.list_known_workers())
408
+ print_jobs(channel.list_known_jobs())
409
+ print_commands(channel.list_known_commands())
410
+
411
+
412
+ def kill_all():
413
+ import time
414
+ from argparse import ArgumentParser
415
+ from .file_writer_control import WorkerJobPool
416
+ parser = ArgumentParser()
417
+ parser.add_argument('-b', '--broker', help="Kafka broker", default='localhost:9092', type=str)
418
+ parser.add_argument('-c', '--command', help="Writer command topic", default="WriterCommand", type=str)
419
+ parser.add_argument('-t', '--topic', help='Writer job topic', default='WriterJobs', type=str)
420
+ parser.add_argument('-s', '--sleep', help='Post pool creation sleep time (s)', default=1, type=int)
421
+ parser.add_argument('-v', '--verbose', help='Verbose output', action='store_true')
422
+
423
+ args = parser.parse_args()
424
+ pool = WorkerJobPool(f'{args.broker}/{args.topic}', f'{args.broker}/{args.command}')
425
+ time.sleep(args.sleep)
426
+ if args.verbose:
427
+ print_current_state(pool)
428
+ jobs = pool.list_known_jobs()
429
+ for job in jobs:
430
+ print(f'Kill {job.service_id} {job.job_id}')
431
+ pool.try_send_stop_now(job.service_id, job.job_id)
432
+ time.sleep(args.sleep)
433
+ if len(jobs) == 0:
434
+ print("No jobs")
435
+
436
+ if args.verbose:
437
+ print_current_state(pool)
438
+
439
+
440
+ def list_status():
441
+ import time
442
+ from argparse import ArgumentParser
443
+ parser = ArgumentParser()
444
+ parser.add_argument('-b', '--broker', help="Kafka broker", default='localhost:9092', type=str)
445
+ parser.add_argument('-c', '--command', help="Writer command topic", default="WriterCommand", type=str)
446
+ parser.add_argument('-t', '--topic', help='Writer job topic', default='WriterJobs', type=str)
447
+ parser.add_argument('-s', '--sleep', type=int, help='Post pool creation sleep time', default=1)
448
+ args = parser.parse_args()
449
+ pool = WorkerJobPool(f'{args.broker}/{args.topic}', f'{args.broker}/{args.command}')
450
+ time.sleep(args.sleep)
451
+ print_current_state(pool)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
@@ -8,7 +8,7 @@ Requires-Dist: p4p
8
8
  Requires-Dist: kafka-python>=2.0
9
9
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
10
  Requires-Dist: restage>=0.4.0
11
- Requires-Dist: mccode-to-kafka>=0.2.1
11
+ Requires-Dist: mccode-to-kafka>=0.2.2
12
12
  Requires-Dist: moreniius>=0.2.3
13
13
  Requires-Dist: icecream
14
14
 
@@ -7,5 +7,8 @@ mp-insert-hdf5-instr = mccode_plumber.mccode:insert
7
7
  mp-register-topics = mccode_plumber.kafka:register_topics
8
8
  mp-splitrun = mccode_plumber.splitrun:main
9
9
  mp-writer-from = mccode_plumber.writer:print_time
10
+ mp-writer-kill = mccode_plumber.writer:kill_job
11
+ mp-writer-killall = mccode_plumber.writer:kill_all
12
+ mp-writer-list = mccode_plumber.writer:list_status
10
13
  mp-writer-wait = mccode_plumber.writer:wait_on_writer
11
14
  mp-writer-write = mccode_plumber.writer:start_writer
@@ -2,6 +2,6 @@ p4p
2
2
  kafka-python>=2.0
3
3
  ess-streaming-data-types>=0.14.0
4
4
  restage>=0.4.0
5
- mccode-to-kafka>=0.2.1
5
+ mccode-to-kafka>=0.2.2
6
6
  moreniius>=0.2.3
7
7
  icecream
File without changes
File without changes