mccode-plumber 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mccode_plumber/epics.py +25 -11
- mccode_plumber/file_writer_control/CommandChannel.py +236 -0
- mccode_plumber/file_writer_control/CommandHandler.py +58 -0
- mccode_plumber/file_writer_control/CommandStatus.py +151 -0
- mccode_plumber/file_writer_control/InThreadStatusTracker.py +228 -0
- mccode_plumber/file_writer_control/JobHandler.py +102 -0
- mccode_plumber/file_writer_control/JobStatus.py +147 -0
- mccode_plumber/file_writer_control/KafkaTopicUrl.py +22 -0
- mccode_plumber/file_writer_control/StateExtractor.py +58 -0
- mccode_plumber/file_writer_control/WorkerFinder.py +139 -0
- mccode_plumber/file_writer_control/WorkerJobPool.py +70 -0
- mccode_plumber/file_writer_control/WorkerStatus.py +88 -0
- mccode_plumber/file_writer_control/WriteJob.py +83 -0
- mccode_plumber/file_writer_control/__init__.py +13 -0
- mccode_plumber/writer.py +3 -3
- {mccode_plumber-0.6.0.dist-info → mccode_plumber-0.7.1.dist-info}/METADATA +3 -2
- mccode_plumber-0.7.1.dist-info/RECORD +27 -0
- mccode_plumber-0.6.0.dist-info/RECORD +0 -14
- {mccode_plumber-0.6.0.dist-info → mccode_plumber-0.7.1.dist-info}/WHEEL +0 -0
- {mccode_plumber-0.6.0.dist-info → mccode_plumber-0.7.1.dist-info}/entry_points.txt +0 -0
- {mccode_plumber-0.6.0.dist-info → mccode_plumber-0.7.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
from kafka import KafkaProducer
|
|
6
|
+
from kafka.errors import NoBrokersAvailable
|
|
7
|
+
from streaming_data_types.run_stop_6s4t import serialise_6s4t as serialise_stop
|
|
8
|
+
|
|
9
|
+
from file_writer_control.CommandChannel import CommandChannel
|
|
10
|
+
from file_writer_control.CommandHandler import CommandHandler
|
|
11
|
+
from file_writer_control.CommandStatus import CommandStatus
|
|
12
|
+
from file_writer_control.JobStatus import JobState, JobStatus
|
|
13
|
+
from file_writer_control.KafkaTopicUrl import KafkaTopicUrl
|
|
14
|
+
from file_writer_control.WorkerStatus import WorkerStatus
|
|
15
|
+
from file_writer_control.WriteJob import WriteJob
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WorkerFinderBase:
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
command_topic: str,
|
|
22
|
+
command_channel: CommandChannel,
|
|
23
|
+
message_producer: KafkaProducer,
|
|
24
|
+
):
|
|
25
|
+
"""
|
|
26
|
+
Constructor.
|
|
27
|
+
"""
|
|
28
|
+
self.command_channel = command_channel
|
|
29
|
+
self.command_topic = command_topic
|
|
30
|
+
self.message_producer = message_producer
|
|
31
|
+
|
|
32
|
+
def send_command(self, message: bytes):
|
|
33
|
+
"""
|
|
34
|
+
Send a message (command) to the file-writer "command"-topic.
|
|
35
|
+
:param message: The command/message as binary data.
|
|
36
|
+
"""
|
|
37
|
+
self.message_producer.send(self.command_topic, message)
|
|
38
|
+
|
|
39
|
+
def try_start_job(self, job: WriteJob) -> CommandHandler:
|
|
40
|
+
"""
|
|
41
|
+
Attempts to start a file-writing job. This function is not blocking. No guarantees are given that the job will
|
|
42
|
+
be successfully started.
|
|
43
|
+
.. note:: This class must be implemented by the classes inheriting from this one.
|
|
44
|
+
:param job: The file-writing job to be started.
|
|
45
|
+
:return: A CommandHandler instance for (more) easily checking the outcome of attempting to start a write job.
|
|
46
|
+
"""
|
|
47
|
+
raise NotImplementedError("Not implemented in base class.")
|
|
48
|
+
|
|
49
|
+
def try_send_stop_time(
|
|
50
|
+
self, service_id: Optional[str], job_id: str, stop_time: datetime
|
|
51
|
+
) -> CommandHandler:
|
|
52
|
+
"""
|
|
53
|
+
Sends a "set stop time" message to a file-writer running a job as identified by the parameters.
|
|
54
|
+
This function is not blocking. No guarantees are given that this command will be followed.
|
|
55
|
+
:param service_id: The (optional) service identifier of the file-writer to receive the command.
|
|
56
|
+
:param job_id: The job identifier of the currently running file-writer job.
|
|
57
|
+
:param stop_time: The new stop time.
|
|
58
|
+
:return: A CommandHandler instance for (more) easily checking the outcome of setting a new stop time.
|
|
59
|
+
"""
|
|
60
|
+
command_id = str(uuid.uuid1())
|
|
61
|
+
message = serialise_stop(
|
|
62
|
+
job_id=job_id,
|
|
63
|
+
service_id=service_id,
|
|
64
|
+
command_id=command_id,
|
|
65
|
+
stop_time=stop_time,
|
|
66
|
+
)
|
|
67
|
+
self.command_channel.add_command_id(job_id=job_id, command_id=command_id)
|
|
68
|
+
self.send_command(message)
|
|
69
|
+
return CommandHandler(self.command_channel, command_id)
|
|
70
|
+
|
|
71
|
+
def try_send_stop_now(
|
|
72
|
+
self, service_id: Optional[str], job_id: str
|
|
73
|
+
) -> CommandHandler:
|
|
74
|
+
"""
|
|
75
|
+
See documentation for `try_send_abort()`.
|
|
76
|
+
"""
|
|
77
|
+
return self.try_send_abort(service_id, job_id)
|
|
78
|
+
|
|
79
|
+
def try_send_abort(self, service_id: Optional[str], job_id: str) -> CommandHandler:
|
|
80
|
+
"""
|
|
81
|
+
Sends a "abort" message to a file-writer running a job as identified by the parameters of this function.
|
|
82
|
+
This function is not blocking. No guarantees are given that this command will be followed.
|
|
83
|
+
:param service_id: The (optional) service identifier of the file-writer to receive the command.
|
|
84
|
+
:param job_id: The job identifier of the currently running file-writer job.
|
|
85
|
+
:return: A CommandHandler instance for (more) easily checking the outcome of the "abort" command.
|
|
86
|
+
"""
|
|
87
|
+
return self.try_send_stop_time(service_id, job_id, 0)
|
|
88
|
+
|
|
89
|
+
def list_known_workers(self) -> List[WorkerStatus]:
|
|
90
|
+
"""
|
|
91
|
+
:return: A list of the (known) status of the workers publishing status updates to the configured command topic.
|
|
92
|
+
"""
|
|
93
|
+
return self.command_channel.list_workers()
|
|
94
|
+
|
|
95
|
+
def list_known_jobs(self) -> List[JobStatus]:
|
|
96
|
+
"""
|
|
97
|
+
:return: A list of the (known) jobs and their status as published on the configured command topic.
|
|
98
|
+
"""
|
|
99
|
+
return self.command_channel.list_jobs()
|
|
100
|
+
|
|
101
|
+
def list_known_commands(self) -> List[CommandStatus]:
|
|
102
|
+
"""
|
|
103
|
+
:return: A list of the (known) commands and their outcomes as published on the configured command topic.
|
|
104
|
+
"""
|
|
105
|
+
return self.command_channel.list_commands()
|
|
106
|
+
|
|
107
|
+
def get_job_state(self, job_id: str) -> JobState:
|
|
108
|
+
"""
|
|
109
|
+
Get the state of a specific job.
|
|
110
|
+
:param job_id: The (unique) identifier of the job that we are trying to find the state of.
|
|
111
|
+
:return: The state of the job if known, JobState.UNAVAILABLE if job is not known.
|
|
112
|
+
"""
|
|
113
|
+
current_job = self.command_channel.get_job(job_id)
|
|
114
|
+
if current_job is None:
|
|
115
|
+
return JobState.UNAVAILABLE
|
|
116
|
+
return current_job.state
|
|
117
|
+
|
|
118
|
+
def get_job_status(self, job_id: str) -> JobStatus:
|
|
119
|
+
"""
|
|
120
|
+
Get the full (known) status of a specific job.
|
|
121
|
+
:param job_id: The (unique) identifier of the job that we are trying to find the status of.
|
|
122
|
+
:return: The status of the job if known. None if it is not.
|
|
123
|
+
"""
|
|
124
|
+
return self.command_channel.get_job(job_id)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class WorkerFinder(WorkerFinderBase):
|
|
128
|
+
def __init__(self, command_topic_url: str, kafka_config: Dict[str, str] = {}):
|
|
129
|
+
temp_cmd_ch = CommandChannel(command_topic_url, kafka_config=kafka_config)
|
|
130
|
+
command_url = KafkaTopicUrl(command_topic_url)
|
|
131
|
+
try:
|
|
132
|
+
temp_producer = KafkaProducer(
|
|
133
|
+
bootstrap_servers=[command_url.host_port], **kafka_config
|
|
134
|
+
)
|
|
135
|
+
except NoBrokersAvailable as e:
|
|
136
|
+
raise NoBrokersAvailable(
|
|
137
|
+
f'Unable to find brokers (or connect to brokers) on address: "{command_url.host_port}"'
|
|
138
|
+
) from e
|
|
139
|
+
super().__init__(command_url.topic, temp_cmd_ch, temp_producer)
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from typing import Dict
|
|
2
|
+
|
|
3
|
+
from kafka import KafkaProducer
|
|
4
|
+
from kafka.errors import NoBrokersAvailable
|
|
5
|
+
|
|
6
|
+
from file_writer_control.CommandHandler import CommandHandler
|
|
7
|
+
from file_writer_control.CommandStatus import CommandState
|
|
8
|
+
from file_writer_control.KafkaTopicUrl import KafkaTopicUrl
|
|
9
|
+
from file_writer_control.WorkerFinder import WorkerFinder
|
|
10
|
+
from file_writer_control.WriteJob import WriteJob
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class WorkerJobPool(WorkerFinder):
|
|
14
|
+
"""
|
|
15
|
+
A child of WorkerFinder intended for use with "worker pool" style of starting a file-writing job.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
job_topic_url: str,
|
|
21
|
+
command_topic_url: str,
|
|
22
|
+
max_message_size: int = 1048576 * 200,
|
|
23
|
+
kafka_config: Dict[str, str] = {},
|
|
24
|
+
):
|
|
25
|
+
"""
|
|
26
|
+
:param job_topic_url: The Kafka topic that the available file-writers are listening to for write jobs.
|
|
27
|
+
:param command_topic_url: The Kafka topic that a file-writer uses to send status updates to and receive direct
|
|
28
|
+
commands from.
|
|
29
|
+
:param max_message_size: The maximum message (actually "request") size.
|
|
30
|
+
"""
|
|
31
|
+
super().__init__(command_topic_url, kafka_config=kafka_config)
|
|
32
|
+
self._job_pool = KafkaTopicUrl(job_topic_url)
|
|
33
|
+
self._max_message_size = max_message_size
|
|
34
|
+
try:
|
|
35
|
+
self._pool_producer = KafkaProducer(
|
|
36
|
+
bootstrap_servers=[self._job_pool.host_port],
|
|
37
|
+
max_request_size=max_message_size,
|
|
38
|
+
buffer_memory=max_message_size,
|
|
39
|
+
**kafka_config,
|
|
40
|
+
)
|
|
41
|
+
except NoBrokersAvailable as e:
|
|
42
|
+
raise NoBrokersAvailable(
|
|
43
|
+
f'Unable to find brokers (or connect to brokers) on address: "{self._job_pool.host_port}"'
|
|
44
|
+
) from e
|
|
45
|
+
|
|
46
|
+
def _send_pool_message(self, message: bytes):
|
|
47
|
+
"""
|
|
48
|
+
Send a message to the Kafka topic that is configured as the job-pool topic.
|
|
49
|
+
.. note:: If the file-writer has been configured properly, it will only accept start-job messages to this topic.
|
|
50
|
+
:param message: The binary data of the message.
|
|
51
|
+
"""
|
|
52
|
+
if len(message) >= self._max_message_size:
|
|
53
|
+
raise RuntimeError(
|
|
54
|
+
f"Unable to send Kafka message as message size is too large ({len(message)} vs"
|
|
55
|
+
f"{self._max_message_size} bytes). Increase max message size with the 'max_message_size'"
|
|
56
|
+
f"constructor argument."
|
|
57
|
+
)
|
|
58
|
+
self._pool_producer.send(self._job_pool.topic, message)
|
|
59
|
+
|
|
60
|
+
def try_start_job(self, job: WriteJob) -> CommandHandler:
|
|
61
|
+
"""
|
|
62
|
+
See base class for documentation.
|
|
63
|
+
"""
|
|
64
|
+
self.command_channel.add_job_id(job.job_id)
|
|
65
|
+
self.command_channel.add_command_id(job.job_id, job.job_id)
|
|
66
|
+
self.command_channel.get_command(
|
|
67
|
+
job.job_id
|
|
68
|
+
).state = CommandState.WAITING_RESPONSE
|
|
69
|
+
self._send_pool_message(job.get_start_message())
|
|
70
|
+
return CommandHandler(self.command_channel, job.job_id)
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
from datetime import datetime, timedelta
|
|
2
|
+
from enum import Enum, auto
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
DEFAULT_TIMEOUT = timedelta(seconds=15)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class WorkerState(Enum):
|
|
9
|
+
"""
|
|
10
|
+
The state of a worker (i.e. a file-writer instance).
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
IDLE = auto()
|
|
14
|
+
WRITING = auto()
|
|
15
|
+
UNKNOWN = auto()
|
|
16
|
+
UNAVAILABLE = auto()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class WorkerStatus(object):
|
|
20
|
+
"""
|
|
21
|
+
Contains general status information about a worker.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, service_id: str, timeout: Optional[timedelta] = DEFAULT_TIMEOUT):
|
|
25
|
+
self._last_update = datetime.now()
|
|
26
|
+
self._service_id = service_id
|
|
27
|
+
self._timeout = timeout
|
|
28
|
+
self._state = WorkerState.UNAVAILABLE
|
|
29
|
+
|
|
30
|
+
def __eq__(self, other_status: "WorkerStatus") -> bool:
|
|
31
|
+
if not isinstance(other_status, WorkerStatus):
|
|
32
|
+
raise NotImplementedError
|
|
33
|
+
return (
|
|
34
|
+
self.service_id == other_status.service_id
|
|
35
|
+
and self.state == other_status.state
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def update_status(self, new_status: "WorkerStatus"):
|
|
39
|
+
"""
|
|
40
|
+
Updates the status/state of this instance of the WorkerStatus class using another instance.
|
|
41
|
+
.. note:: The service identifier of both this instance and the other one must be identical.
|
|
42
|
+
:param new_status: The other instance of the WorkerStatus class.
|
|
43
|
+
"""
|
|
44
|
+
if new_status.service_id != self.service_id:
|
|
45
|
+
raise RuntimeError(
|
|
46
|
+
f"Service id of status update is not correct ({self.service_id} vs {new_status.service_id})"
|
|
47
|
+
)
|
|
48
|
+
self._state = new_status.state
|
|
49
|
+
self._last_update = new_status.last_update
|
|
50
|
+
|
|
51
|
+
def check_if_outdated(self, current_time: datetime):
|
|
52
|
+
"""
|
|
53
|
+
Given the current time, state and the time of the last update: Have we lost the connection?
|
|
54
|
+
:param current_time: The current time
|
|
55
|
+
"""
|
|
56
|
+
if (
|
|
57
|
+
self.state != WorkerState.UNAVAILABLE
|
|
58
|
+
and current_time - self.last_update > self._timeout
|
|
59
|
+
):
|
|
60
|
+
self._state = WorkerState.UNAVAILABLE
|
|
61
|
+
self._last_update = current_time
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def state(self) -> WorkerState:
|
|
65
|
+
"""
|
|
66
|
+
The current state of the worker.
|
|
67
|
+
"""
|
|
68
|
+
return self._state
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def service_id(self) -> str:
|
|
72
|
+
"""
|
|
73
|
+
The service identifier of the worker that this instance of the WorkerState class represent.
|
|
74
|
+
"""
|
|
75
|
+
return self._service_id
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def last_update(self) -> datetime:
|
|
79
|
+
"""
|
|
80
|
+
The local time stamp of the last update of the status of the file-writer instance that this instance of the
|
|
81
|
+
WorkerStatus class represents.
|
|
82
|
+
"""
|
|
83
|
+
return self._last_update
|
|
84
|
+
|
|
85
|
+
@state.setter
|
|
86
|
+
def state(self, new_state: WorkerState):
|
|
87
|
+
self._last_update = datetime.now()
|
|
88
|
+
self._state = new_state
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
|
|
4
|
+
from streaming_data_types import serialise_pl72
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class WriteJob:
|
|
8
|
+
"""
|
|
9
|
+
Represents a file-writer write job (before it has been started).
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
nexus_structure: str,
|
|
15
|
+
file_name: str,
|
|
16
|
+
broker: str,
|
|
17
|
+
start_time: datetime,
|
|
18
|
+
stop_time: datetime = None,
|
|
19
|
+
job_id="",
|
|
20
|
+
instrument_name: str = "",
|
|
21
|
+
run_name: str = "",
|
|
22
|
+
metadata: str = "",
|
|
23
|
+
control_topic: str = "",
|
|
24
|
+
):
|
|
25
|
+
self.structure = nexus_structure
|
|
26
|
+
self.file = file_name
|
|
27
|
+
if job_id:
|
|
28
|
+
try:
|
|
29
|
+
uuid.UUID(job_id)
|
|
30
|
+
self.job_id = job_id
|
|
31
|
+
except ValueError as e:
|
|
32
|
+
raise RuntimeError("Job ID should be a valid UUID (v1).") from e
|
|
33
|
+
else:
|
|
34
|
+
self.job_id = str(uuid.uuid1())
|
|
35
|
+
self.start = start_time
|
|
36
|
+
if stop_time is None:
|
|
37
|
+
self.stop = self.start + timedelta(days=365.25 * 10)
|
|
38
|
+
else:
|
|
39
|
+
self.stop = stop_time
|
|
40
|
+
self._service_id = ""
|
|
41
|
+
self.broker = broker
|
|
42
|
+
self.instrument_name = instrument_name
|
|
43
|
+
self.run_name = run_name
|
|
44
|
+
self.metadata = metadata
|
|
45
|
+
self.control_topic = control_topic
|
|
46
|
+
|
|
47
|
+
def generate_new_job_id(self):
|
|
48
|
+
"""
|
|
49
|
+
Generate a new job id. Should be called if an attempt at starting this write job fails and another attempt is made.
|
|
50
|
+
"""
|
|
51
|
+
self.job_id = str(uuid.uuid1())
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def service_id(self) -> str:
|
|
55
|
+
"""
|
|
56
|
+
The service identifier that should process this job. Defaults to an empty string.
|
|
57
|
+
.. note:: Must be set if job is to be processed by a specific file-writer instance.
|
|
58
|
+
"""
|
|
59
|
+
return self._service_id
|
|
60
|
+
|
|
61
|
+
@service_id.setter
|
|
62
|
+
def service_id(self, new_service_id: str):
|
|
63
|
+
self._service_id = new_service_id
|
|
64
|
+
|
|
65
|
+
def get_start_message(self) -> bytes:
|
|
66
|
+
"""
|
|
67
|
+
Generate the (flatbuffer) start message that will start this job. If you are sending the message to a specific
|
|
68
|
+
file-writer instance, you have to set the service_id property first or the message will fail to start the job.
|
|
69
|
+
:return: A flatbuffer message that holds the necessary information for starting a write job.
|
|
70
|
+
"""
|
|
71
|
+
return serialise_pl72(
|
|
72
|
+
self.job_id,
|
|
73
|
+
self.file,
|
|
74
|
+
self.start,
|
|
75
|
+
self.stop,
|
|
76
|
+
nexus_structure=self.structure,
|
|
77
|
+
service_id=self.service_id,
|
|
78
|
+
broker=self.broker,
|
|
79
|
+
instrument_name=self.instrument_name,
|
|
80
|
+
run_name=self.run_name,
|
|
81
|
+
metadata=self.metadata,
|
|
82
|
+
control_topic=self.control_topic,
|
|
83
|
+
)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from .CommandStatus import CommandState
|
|
2
|
+
from .JobHandler import JobHandler
|
|
3
|
+
from .JobStatus import JobState
|
|
4
|
+
from .WorkerJobPool import WorkerJobPool
|
|
5
|
+
from .WriteJob import WriteJob
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"JobHandler",
|
|
9
|
+
"WorkerJobPool",
|
|
10
|
+
"WriteJob",
|
|
11
|
+
"CommandState",
|
|
12
|
+
"JobState",
|
|
13
|
+
]
|
mccode_plumber/writer.py
CHANGED
|
@@ -138,7 +138,7 @@ def insert_events_in_nexus_structure(ns: dict, config: dict):
|
|
|
138
138
|
|
|
139
139
|
|
|
140
140
|
def get_writer_pool(broker: str = None, job: str = None, command: str = None):
|
|
141
|
-
from file_writer_control import WorkerJobPool
|
|
141
|
+
from .file_writer_control import WorkerJobPool
|
|
142
142
|
pool = WorkerJobPool(f"{broker}/{job}", f"{broker}/{command}")
|
|
143
143
|
return pool
|
|
144
144
|
|
|
@@ -175,7 +175,7 @@ def start_pool_writer(start_time_string, structure, filename=None, stop_time_str
|
|
|
175
175
|
from time import sleep
|
|
176
176
|
from json import dumps
|
|
177
177
|
from datetime import datetime, timedelta
|
|
178
|
-
from file_writer_control import JobHandler, WriteJob, CommandState
|
|
178
|
+
from .file_writer_control import JobHandler, WriteJob, CommandState
|
|
179
179
|
|
|
180
180
|
start_time = datetime.fromisoformat(start_time_string)
|
|
181
181
|
if filename is None:
|
|
@@ -301,7 +301,7 @@ def wait_on_writer():
|
|
|
301
301
|
from os import EX_OK, EX_UNAVAILABLE
|
|
302
302
|
from time import sleep
|
|
303
303
|
from datetime import datetime, timedelta
|
|
304
|
-
from file_writer_control import JobHandler, CommandState
|
|
304
|
+
from .file_writer_control import JobHandler, CommandState
|
|
305
305
|
|
|
306
306
|
from argparse import ArgumentParser
|
|
307
307
|
parser = ArgumentParser()
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mccode-plumber
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.7.1
|
|
4
4
|
Author-email: Gregory Tucker <gregory.tucker@ess.eu>
|
|
5
5
|
Classifier: License :: OSI Approved :: BSD License
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
7
7
|
Requires-Dist: p4p
|
|
8
|
-
Requires-Dist:
|
|
8
|
+
Requires-Dist: kafka-python>=2.0
|
|
9
|
+
Requires-Dist: ess-streaming-data-types>=0.14.0
|
|
9
10
|
Requires-Dist: restage>=0.4.0
|
|
10
11
|
Requires-Dist: mccode-to-kafka>=0.2.1
|
|
11
12
|
Requires-Dist: moreniius>=0.2.3
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
mccode_plumber/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
mccode_plumber/conductor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
mccode_plumber/epics.py,sha256=5j_ictmDloxUGbGNe2ESlkSDQsPknzgPAbUsoZEUWO0,4441
|
|
4
|
+
mccode_plumber/forwarder.py,sha256=1PdW91qLksDnA41-gOFMtgSae4wDdSLVvgAlrF23Uks,3764
|
|
5
|
+
mccode_plumber/kafka.py,sha256=8fHTy2Zzk638XXj29pfrk-7fUxafuse9OYWmaRgvV7M,1195
|
|
6
|
+
mccode_plumber/mccode.py,sha256=WXlu89Cs6a3s0aKtfRGuzqcOWoFZtdZWibLgBQnaXCI,2210
|
|
7
|
+
mccode_plumber/splitrun.py,sha256=MsR1cUvoayQGNCJNl5fIPG92Wfcy9V_8QNO_ITQTvtA,1295
|
|
8
|
+
mccode_plumber/utils.py,sha256=E8NoGo_3Z-pPEpzicVGSWfQOX8p3eR-GxElT33-kX5U,2167
|
|
9
|
+
mccode_plumber/writer.py,sha256=TSWw7Q1mlXcwOx4OuvNGN-yyB_X10Vmt1KywA79RmBs,16359
|
|
10
|
+
mccode_plumber/file_writer_control/CommandChannel.py,sha256=rrQxmQ6VJsl9OC5n45XkL3g7MXtICT_8tsv_xuGdIKI,9119
|
|
11
|
+
mccode_plumber/file_writer_control/CommandHandler.py,sha256=ETPGNiD3umt2XazCWztoaO6qgtilBzrB1N1EH5EAKUw,2245
|
|
12
|
+
mccode_plumber/file_writer_control/CommandStatus.py,sha256=biNK58vY1H-USYAl9uB_JNQyMohBj1cwJwmXktndd4E,4517
|
|
13
|
+
mccode_plumber/file_writer_control/InThreadStatusTracker.py,sha256=sLb8v1JBs3QSWXu-_kIznku3h_ZqVNwx0o9-RoxaKu4,10196
|
|
14
|
+
mccode_plumber/file_writer_control/JobHandler.py,sha256=H6fr0O6lcGw4EKRUuR9Ak4gk5Rknozru0PGaNxNITeg,4296
|
|
15
|
+
mccode_plumber/file_writer_control/JobStatus.py,sha256=ncLYEcTAOdrIASQvY0jJAX3fC4qF9tyzLavm5YUOhlQ,4416
|
|
16
|
+
mccode_plumber/file_writer_control/KafkaTopicUrl.py,sha256=dG6Aj2tYo1UGZferxyvURUAr77z9dhBN_SRzizzt1Vo,736
|
|
17
|
+
mccode_plumber/file_writer_control/StateExtractor.py,sha256=FaroeEuZCGTIi7kEIxeS7QHjnD7CxU1ONvKRQqRCmrc,2068
|
|
18
|
+
mccode_plumber/file_writer_control/WorkerFinder.py,sha256=vMv-_YQSdchGDo5TAHQfJODWsatjCyCbLiCwtJJGZi4,6093
|
|
19
|
+
mccode_plumber/file_writer_control/WorkerJobPool.py,sha256=2P6qYNgRtcolSO910a2BejoZyJf4H2ZiSfsVgYzwq8g,2966
|
|
20
|
+
mccode_plumber/file_writer_control/WorkerStatus.py,sha256=uw1q-Pvf1o2hxpMIPVwdtnAXLcO7VeEpcZ3wKjxxdsk,2826
|
|
21
|
+
mccode_plumber/file_writer_control/WriteJob.py,sha256=-2tQvfajctf6Bn19c9hT9N1lOP0uTo7SZRMnXV1A-aA,2721
|
|
22
|
+
mccode_plumber/file_writer_control/__init__.py,sha256=Wp8A7JOB0vgpwAbvI9ik5cBlZa6lY-cq8dxYGRTRs0M,285
|
|
23
|
+
mccode_plumber-0.7.1.dist-info/METADATA,sha256=MJPyAqAG4OUUFWy5hs-s9fZfzo9z7qQny3cUsNcUMss,552
|
|
24
|
+
mccode_plumber-0.7.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
25
|
+
mccode_plumber-0.7.1.dist-info/entry_points.txt,sha256=V-ULMAQo7W1O3CLd39iSJcRWNjx9p4OuzEC_8y3T-Zs,520
|
|
26
|
+
mccode_plumber-0.7.1.dist-info/top_level.txt,sha256=kCCIpYtKHCKWxiPEqX9J1UaGEm-ze0Qb-cemBCEPhDA,15
|
|
27
|
+
mccode_plumber-0.7.1.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
mccode_plumber/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
mccode_plumber/conductor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
mccode_plumber/epics.py,sha256=THzech5V1UCLIN9u1r3GiEKxUVCDkYVXhfOQV71aQn4,3989
|
|
4
|
-
mccode_plumber/forwarder.py,sha256=1PdW91qLksDnA41-gOFMtgSae4wDdSLVvgAlrF23Uks,3764
|
|
5
|
-
mccode_plumber/kafka.py,sha256=8fHTy2Zzk638XXj29pfrk-7fUxafuse9OYWmaRgvV7M,1195
|
|
6
|
-
mccode_plumber/mccode.py,sha256=WXlu89Cs6a3s0aKtfRGuzqcOWoFZtdZWibLgBQnaXCI,2210
|
|
7
|
-
mccode_plumber/splitrun.py,sha256=MsR1cUvoayQGNCJNl5fIPG92Wfcy9V_8QNO_ITQTvtA,1295
|
|
8
|
-
mccode_plumber/utils.py,sha256=E8NoGo_3Z-pPEpzicVGSWfQOX8p3eR-GxElT33-kX5U,2167
|
|
9
|
-
mccode_plumber/writer.py,sha256=-gdh6fyOtr6mBhm5AZOs4x2D-bo4_zRSgGkNk-X0ez8,16356
|
|
10
|
-
mccode_plumber-0.6.0.dist-info/METADATA,sha256=7NnOPT9uLE_ogTmdR3pSx0-L3T1oUNj9j4cep7pT4LU,513
|
|
11
|
-
mccode_plumber-0.6.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
12
|
-
mccode_plumber-0.6.0.dist-info/entry_points.txt,sha256=V-ULMAQo7W1O3CLd39iSJcRWNjx9p4OuzEC_8y3T-Zs,520
|
|
13
|
-
mccode_plumber-0.6.0.dist-info/top_level.txt,sha256=kCCIpYtKHCKWxiPEqX9J1UaGEm-ze0Qb-cemBCEPhDA,15
|
|
14
|
-
mccode_plumber-0.6.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|