wool 0.1rc20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wool/__init__.py +122 -0
- wool/_context.py +29 -0
- wool/_protobuf/worker.py +26 -0
- wool/_resource_pool.py +376 -0
- wool/_typing.py +7 -0
- wool/_undefined.py +11 -0
- wool/_work.py +554 -0
- wool/core/__init__.py +0 -0
- wool/core/discovery/__init__.py +0 -0
- wool/core/discovery/base.py +249 -0
- wool/core/discovery/lan.py +534 -0
- wool/core/discovery/local.py +822 -0
- wool/core/loadbalancer/__init__.py +0 -0
- wool/core/loadbalancer/base.py +125 -0
- wool/core/loadbalancer/roundrobin.py +101 -0
- wool/core/protobuf/__init__.py +18 -0
- wool/core/protobuf/exception.py +3 -0
- wool/core/protobuf/task.py +11 -0
- wool/core/protobuf/task_pb2.py +42 -0
- wool/core/protobuf/task_pb2.pyi +43 -0
- wool/core/protobuf/task_pb2_grpc.py +24 -0
- wool/core/protobuf/worker.py +26 -0
- wool/core/protobuf/worker_pb2.py +53 -0
- wool/core/protobuf/worker_pb2.pyi +65 -0
- wool/core/protobuf/worker_pb2_grpc.py +141 -0
- wool/core/typing.py +22 -0
- wool/core/worker/__init__.py +0 -0
- wool/core/worker/base.py +300 -0
- wool/core/worker/connection.py +250 -0
- wool/core/worker/local.py +148 -0
- wool/core/worker/pool.py +386 -0
- wool/core/worker/process.py +249 -0
- wool/core/worker/proxy.py +427 -0
- wool/core/worker/service.py +231 -0
- wool-0.1rc20.dist-info/METADATA +463 -0
- wool-0.1rc20.dist-info/RECORD +38 -0
- wool-0.1rc20.dist-info/WHEEL +4 -0
- wool-0.1rc20.dist-info/entry_points.txt +2 -0
|
File without changes
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from types import MappingProxyType
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
from typing import AsyncIterator
|
|
6
|
+
from typing import Callable
|
|
7
|
+
from typing import Final
|
|
8
|
+
from typing import Protocol
|
|
9
|
+
from typing import TypeAlias
|
|
10
|
+
from typing import runtime_checkable
|
|
11
|
+
|
|
12
|
+
from wool._resource_pool import Resource
|
|
13
|
+
from wool.core.discovery.base import WorkerInfo
|
|
14
|
+
from wool.core.worker.connection import WorkerConnection
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from wool._work import WoolTask
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# public
|
|
21
|
+
ConnectionResourceFactory: TypeAlias = Callable[[], Resource[WorkerConnection]]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# public
|
|
25
|
+
class NoWorkersAvailable(Exception):
|
|
26
|
+
"""Raised when no workers are available for task dispatch.
|
|
27
|
+
|
|
28
|
+
This exception indicates that either no workers exist in the worker pool
|
|
29
|
+
or all available workers have been tried and failed.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# public
|
|
34
|
+
@runtime_checkable
|
|
35
|
+
class LoadBalancerLike(Protocol):
|
|
36
|
+
"""Protocol for load balancers that dispatch tasks to workers.
|
|
37
|
+
|
|
38
|
+
Load balancers implementing this protocol operate on a
|
|
39
|
+
:class:`LoadBalancerContext` to access workers and their connection
|
|
40
|
+
factories. The context provides isolation, allowing a single load balancer
|
|
41
|
+
instance to service multiple worker pools with independent state.
|
|
42
|
+
|
|
43
|
+
The dispatch method accepts a :class:`WoolTask` and returns an async
|
|
44
|
+
iterator that yields task results from the worker.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
async def dispatch(
|
|
48
|
+
self,
|
|
49
|
+
task: WoolTask,
|
|
50
|
+
*,
|
|
51
|
+
context: LoadBalancerContext,
|
|
52
|
+
timeout: float | None = None,
|
|
53
|
+
) -> AsyncIterator: ...
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# public
|
|
57
|
+
class LoadBalancerContext:
|
|
58
|
+
"""Isolated load balancing context for a single worker pool.
|
|
59
|
+
|
|
60
|
+
Manages workers and their connection resource factories for a specific
|
|
61
|
+
worker pool, enabling load balancer instances to service multiple pools
|
|
62
|
+
with independent state and worker lists.
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
_workers: Final[dict[WorkerInfo, ConnectionResourceFactory]]
|
|
66
|
+
|
|
67
|
+
def __init__(self):
|
|
68
|
+
self._workers = {}
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def workers(self) -> MappingProxyType[WorkerInfo, ConnectionResourceFactory]:
|
|
72
|
+
"""Read-only view of workers in this context.
|
|
73
|
+
|
|
74
|
+
:returns:
|
|
75
|
+
Immutable mapping of worker information to connection resource
|
|
76
|
+
factories. Changes to the underlying context are reflected in
|
|
77
|
+
the returned proxy.
|
|
78
|
+
"""
|
|
79
|
+
return MappingProxyType(self._workers)
|
|
80
|
+
|
|
81
|
+
def add_worker(
|
|
82
|
+
self,
|
|
83
|
+
worker_info: WorkerInfo,
|
|
84
|
+
connection_resource_factory: ConnectionResourceFactory,
|
|
85
|
+
):
|
|
86
|
+
"""Add a worker to this context.
|
|
87
|
+
|
|
88
|
+
:param worker_info:
|
|
89
|
+
Information about the worker to add.
|
|
90
|
+
:param connection_resource_factory:
|
|
91
|
+
Factory function that creates connection resources for this worker.
|
|
92
|
+
"""
|
|
93
|
+
self._workers[worker_info] = connection_resource_factory
|
|
94
|
+
|
|
95
|
+
def update_worker(
|
|
96
|
+
self,
|
|
97
|
+
worker_info: WorkerInfo,
|
|
98
|
+
connection_resource_factory: ConnectionResourceFactory,
|
|
99
|
+
*,
|
|
100
|
+
upsert: bool = False,
|
|
101
|
+
):
|
|
102
|
+
"""Update an existing worker's connection resource factory.
|
|
103
|
+
|
|
104
|
+
:param worker_info:
|
|
105
|
+
Information about the worker to update. If the worker is not
|
|
106
|
+
present in the context, this method does nothing.
|
|
107
|
+
:param connection_resource_factory:
|
|
108
|
+
New factory function that creates connection resources for this
|
|
109
|
+
worker.
|
|
110
|
+
:param upsert:
|
|
111
|
+
Flag indicating whether or not to add the worker if it's not
|
|
112
|
+
already in the context.
|
|
113
|
+
"""
|
|
114
|
+
if upsert or worker_info in self._workers:
|
|
115
|
+
self._workers[worker_info] = connection_resource_factory
|
|
116
|
+
|
|
117
|
+
def remove_worker(self, worker_info: WorkerInfo):
|
|
118
|
+
"""Remove a worker from this context.
|
|
119
|
+
|
|
120
|
+
:param worker_info:
|
|
121
|
+
Information about the worker to remove. If the worker is not
|
|
122
|
+
present in the context, this method does nothing.
|
|
123
|
+
"""
|
|
124
|
+
if worker_info in self._workers:
|
|
125
|
+
del self._workers[worker_info]
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
from typing import AsyncIterator
|
|
6
|
+
from typing import Final
|
|
7
|
+
|
|
8
|
+
from wool.core.worker.connection import TransientRpcError
|
|
9
|
+
|
|
10
|
+
from .base import LoadBalancerContext
|
|
11
|
+
from .base import LoadBalancerLike
|
|
12
|
+
from .base import NoWorkersAvailable
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from wool._work import WoolTask
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# public
|
|
19
|
+
class RoundRobinLoadBalancer(LoadBalancerLike):
|
|
20
|
+
"""Round-robin load balancer for distributing tasks across workers.
|
|
21
|
+
|
|
22
|
+
Distributes tasks evenly across available workers using a simple round-robin
|
|
23
|
+
algorithm. Workers are managed through :class:`LoadBalancerContext` instances
|
|
24
|
+
passed to the dispatch method, enabling a single load balancer to service
|
|
25
|
+
multiple worker pools with independent state.
|
|
26
|
+
|
|
27
|
+
Automatically handles worker failures by trying the next worker in the
|
|
28
|
+
round-robin cycle. Workers that encounter transient errors remain in the
|
|
29
|
+
context, while workers that fail with non-transient errors are removed from
|
|
30
|
+
the context's worker list.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
_index: Final[dict[LoadBalancerContext, int]]
|
|
34
|
+
|
|
35
|
+
def __init__(self):
|
|
36
|
+
self._index = {}
|
|
37
|
+
|
|
38
|
+
async def dispatch(
|
|
39
|
+
self,
|
|
40
|
+
task: WoolTask,
|
|
41
|
+
*,
|
|
42
|
+
context: LoadBalancerContext,
|
|
43
|
+
timeout: float | None = None,
|
|
44
|
+
) -> AsyncIterator:
|
|
45
|
+
"""Dispatch a task to the next available worker using round-robin.
|
|
46
|
+
|
|
47
|
+
Tries workers in one round-robin cycle until dispatch succeeds.
|
|
48
|
+
Workers that fail to schedule the task with a non-transient error are
|
|
49
|
+
removed from the context's worker list.
|
|
50
|
+
|
|
51
|
+
:param task:
|
|
52
|
+
The :class:`WoolTask` instance to dispatch to the worker.
|
|
53
|
+
:param context:
|
|
54
|
+
The :class:`LoadBalancerContext` containing workers to dispatch to.
|
|
55
|
+
:param timeout:
|
|
56
|
+
Timeout in seconds for each dispatch attempt. If ``None``, no
|
|
57
|
+
timeout is applied.
|
|
58
|
+
:returns:
|
|
59
|
+
A streaming dispatch result that yields worker responses.
|
|
60
|
+
:raises NoWorkersAvailable:
|
|
61
|
+
If no healthy workers are available to schedule the task.
|
|
62
|
+
"""
|
|
63
|
+
checkpoint = None
|
|
64
|
+
|
|
65
|
+
# Initialize index for this context if not present
|
|
66
|
+
if context not in self._index:
|
|
67
|
+
self._index[context] = 0
|
|
68
|
+
|
|
69
|
+
while context.workers:
|
|
70
|
+
if self._index[context] >= len(context.workers):
|
|
71
|
+
self._index[context] = 0
|
|
72
|
+
|
|
73
|
+
worker_info, connection_resource_factory = next(
|
|
74
|
+
itertools.islice(
|
|
75
|
+
context.workers.items(),
|
|
76
|
+
self._index[context],
|
|
77
|
+
self._index[context] + 1,
|
|
78
|
+
)
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
if checkpoint is None:
|
|
82
|
+
checkpoint = worker_info.uid
|
|
83
|
+
elif worker_info.uid == checkpoint:
|
|
84
|
+
break
|
|
85
|
+
|
|
86
|
+
async with connection_resource_factory() as connection:
|
|
87
|
+
try:
|
|
88
|
+
result = await connection.dispatch(task, timeout=timeout)
|
|
89
|
+
except TransientRpcError:
|
|
90
|
+
self._index[context] = self._index[context] + 1
|
|
91
|
+
continue
|
|
92
|
+
except Exception:
|
|
93
|
+
context.remove_worker(worker_info)
|
|
94
|
+
if worker_info.uid == checkpoint:
|
|
95
|
+
checkpoint = None
|
|
96
|
+
continue
|
|
97
|
+
else:
|
|
98
|
+
self._index[context] = self._index[context] + 1
|
|
99
|
+
return result
|
|
100
|
+
|
|
101
|
+
raise NoWorkersAvailable("No healthy workers available for dispatch")
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from typing import Protocol
|
|
4
|
+
|
|
5
|
+
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
|
6
|
+
|
|
7
|
+
from . import task as task
|
|
8
|
+
from . import worker as worker
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AddServicerToServerProtocol(Protocol):
|
|
12
|
+
@staticmethod
|
|
13
|
+
def __call__(servicer, server) -> None: ...
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
add_to_server: dict[type[worker.WorkerServicer], AddServicerToServerProtocol] = {
|
|
17
|
+
worker.WorkerServicer: worker.add_WorkerServicer_to_server,
|
|
18
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
try:
|
|
2
|
+
from wool.core.protobuf.task_pb2 import Exception
|
|
3
|
+
from wool.core.protobuf.task_pb2 import Result
|
|
4
|
+
from wool.core.protobuf.task_pb2 import Task
|
|
5
|
+
from wool.core.protobuf.task_pb2 import Worker as Worker
|
|
6
|
+
except ImportError as e:
|
|
7
|
+
from wool.core.protobuf.exception import ProtobufImportError
|
|
8
|
+
|
|
9
|
+
raise ProtobufImportError(e) from e
|
|
10
|
+
|
|
11
|
+
__all__ = ["Exception", "Result", "Task", "Worker"]
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# NO CHECKED-IN PROTOBUF GENCODE
|
|
4
|
+
# source: task.proto
|
|
5
|
+
# Protobuf Python Version: 6.31.1
|
|
6
|
+
"""Generated protocol buffer code."""
|
|
7
|
+
from google.protobuf import descriptor as _descriptor
|
|
8
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
9
|
+
from google.protobuf import runtime_version as _runtime_version
|
|
10
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
11
|
+
from google.protobuf.internal import builder as _builder
|
|
12
|
+
_runtime_version.ValidateProtobufRuntimeVersion(
|
|
13
|
+
_runtime_version.Domain.PUBLIC,
|
|
14
|
+
6,
|
|
15
|
+
31,
|
|
16
|
+
1,
|
|
17
|
+
'',
|
|
18
|
+
'task.proto'
|
|
19
|
+
)
|
|
20
|
+
# @@protoc_insertion_point(imports)
|
|
21
|
+
|
|
22
|
+
_sym_db = _symbol_database.Default()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ntask.proto\x12\x17wool.core.protobuf.task\"s\n\x04Task\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x63\x61llable\x18\x02 \x01(\x0c\x12\x0c\n\x04\x61rgs\x18\x03 \x01(\x0c\x12\x0e\n\x06kwargs\x18\x04 \x01(\x0c\x12\x0e\n\x06\x63\x61ller\x18\x05 \x01(\t\x12\r\n\x05proxy\x18\x07 \x01(\x0c\x12\x10\n\x08proxy_id\x18\x08 \x01(\t\"\x16\n\x06Result\x12\x0c\n\x04\x64ump\x18\x01 \x01(\x0c\"\x19\n\tException\x12\x0c\n\x04\x64ump\x18\x01 \x01(\x0c\"%\n\x06Worker\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\tb\x06proto3')
|
|
28
|
+
|
|
29
|
+
_globals = globals()
|
|
30
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
31
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'task_pb2', _globals)
|
|
32
|
+
if not _descriptor._USE_C_DESCRIPTORS:
|
|
33
|
+
DESCRIPTOR._loaded_options = None
|
|
34
|
+
_globals['_TASK']._serialized_start=39
|
|
35
|
+
_globals['_TASK']._serialized_end=154
|
|
36
|
+
_globals['_RESULT']._serialized_start=156
|
|
37
|
+
_globals['_RESULT']._serialized_end=178
|
|
38
|
+
_globals['_EXCEPTION']._serialized_start=180
|
|
39
|
+
_globals['_EXCEPTION']._serialized_end=205
|
|
40
|
+
_globals['_WORKER']._serialized_start=207
|
|
41
|
+
_globals['_WORKER']._serialized_end=244
|
|
42
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from google.protobuf import descriptor as _descriptor
|
|
2
|
+
from google.protobuf import message as _message
|
|
3
|
+
from typing import ClassVar as _ClassVar, Optional as _Optional
|
|
4
|
+
|
|
5
|
+
DESCRIPTOR: _descriptor.FileDescriptor
|
|
6
|
+
|
|
7
|
+
class Task(_message.Message):
|
|
8
|
+
__slots__ = ("id", "callable", "args", "kwargs", "caller", "proxy", "proxy_id")
|
|
9
|
+
ID_FIELD_NUMBER: _ClassVar[int]
|
|
10
|
+
CALLABLE_FIELD_NUMBER: _ClassVar[int]
|
|
11
|
+
ARGS_FIELD_NUMBER: _ClassVar[int]
|
|
12
|
+
KWARGS_FIELD_NUMBER: _ClassVar[int]
|
|
13
|
+
CALLER_FIELD_NUMBER: _ClassVar[int]
|
|
14
|
+
PROXY_FIELD_NUMBER: _ClassVar[int]
|
|
15
|
+
PROXY_ID_FIELD_NUMBER: _ClassVar[int]
|
|
16
|
+
id: str
|
|
17
|
+
callable: bytes
|
|
18
|
+
args: bytes
|
|
19
|
+
kwargs: bytes
|
|
20
|
+
caller: str
|
|
21
|
+
proxy: bytes
|
|
22
|
+
proxy_id: str
|
|
23
|
+
def __init__(self, id: _Optional[str] = ..., callable: _Optional[bytes] = ..., args: _Optional[bytes] = ..., kwargs: _Optional[bytes] = ..., caller: _Optional[str] = ..., proxy: _Optional[bytes] = ..., proxy_id: _Optional[str] = ...) -> None: ...
|
|
24
|
+
|
|
25
|
+
class Result(_message.Message):
|
|
26
|
+
__slots__ = ("dump",)
|
|
27
|
+
DUMP_FIELD_NUMBER: _ClassVar[int]
|
|
28
|
+
dump: bytes
|
|
29
|
+
def __init__(self, dump: _Optional[bytes] = ...) -> None: ...
|
|
30
|
+
|
|
31
|
+
class Exception(_message.Message):
|
|
32
|
+
__slots__ = ("dump",)
|
|
33
|
+
DUMP_FIELD_NUMBER: _ClassVar[int]
|
|
34
|
+
dump: bytes
|
|
35
|
+
def __init__(self, dump: _Optional[bytes] = ...) -> None: ...
|
|
36
|
+
|
|
37
|
+
class Worker(_message.Message):
|
|
38
|
+
__slots__ = ("id", "address")
|
|
39
|
+
ID_FIELD_NUMBER: _ClassVar[int]
|
|
40
|
+
ADDRESS_FIELD_NUMBER: _ClassVar[int]
|
|
41
|
+
id: str
|
|
42
|
+
address: str
|
|
43
|
+
def __init__(self, id: _Optional[str] = ..., address: _Optional[str] = ...) -> None: ...
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2
|
+
"""Client and server classes corresponding to protobuf-defined services."""
|
|
3
|
+
import grpc
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
GRPC_GENERATED_VERSION = '1.76.0'
|
|
8
|
+
GRPC_VERSION = grpc.__version__
|
|
9
|
+
_version_not_supported = False
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from grpc._utilities import first_version_is_lower
|
|
13
|
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
|
14
|
+
except ImportError:
|
|
15
|
+
_version_not_supported = True
|
|
16
|
+
|
|
17
|
+
if _version_not_supported:
|
|
18
|
+
raise RuntimeError(
|
|
19
|
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
|
20
|
+
+ ' but the generated code in task_pb2_grpc.py depends on'
|
|
21
|
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
|
22
|
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
|
23
|
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
|
24
|
+
)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
try:
|
|
2
|
+
from wool.core.protobuf.worker_pb2 import Ack
|
|
3
|
+
from wool.core.protobuf.worker_pb2 import Nack
|
|
4
|
+
from wool.core.protobuf.worker_pb2 import Response
|
|
5
|
+
from wool.core.protobuf.worker_pb2 import StopRequest
|
|
6
|
+
from wool.core.protobuf.worker_pb2 import Void
|
|
7
|
+
from wool.core.protobuf.worker_pb2 import WorkerInfo
|
|
8
|
+
from wool.core.protobuf.worker_pb2_grpc import WorkerServicer
|
|
9
|
+
from wool.core.protobuf.worker_pb2_grpc import WorkerStub
|
|
10
|
+
from wool.core.protobuf.worker_pb2_grpc import add_WorkerServicer_to_server
|
|
11
|
+
except ImportError as e:
|
|
12
|
+
from wool.core.protobuf.exception import ProtobufImportError
|
|
13
|
+
|
|
14
|
+
raise ProtobufImportError(e) from e
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"Ack",
|
|
18
|
+
"Nack",
|
|
19
|
+
"Response",
|
|
20
|
+
"StopRequest",
|
|
21
|
+
"Void",
|
|
22
|
+
"WorkerInfo",
|
|
23
|
+
"WorkerServicer",
|
|
24
|
+
"WorkerStub",
|
|
25
|
+
"add_WorkerServicer_to_server",
|
|
26
|
+
]
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# NO CHECKED-IN PROTOBUF GENCODE
|
|
4
|
+
# source: worker.proto
|
|
5
|
+
# Protobuf Python Version: 6.31.1
|
|
6
|
+
"""Generated protocol buffer code."""
|
|
7
|
+
from google.protobuf import descriptor as _descriptor
|
|
8
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
9
|
+
from google.protobuf import runtime_version as _runtime_version
|
|
10
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
11
|
+
from google.protobuf.internal import builder as _builder
|
|
12
|
+
_runtime_version.ValidateProtobufRuntimeVersion(
|
|
13
|
+
_runtime_version.Domain.PUBLIC,
|
|
14
|
+
6,
|
|
15
|
+
31,
|
|
16
|
+
1,
|
|
17
|
+
'',
|
|
18
|
+
'worker.proto'
|
|
19
|
+
)
|
|
20
|
+
# @@protoc_insertion_point(imports)
|
|
21
|
+
|
|
22
|
+
_sym_db = _symbol_database.Default()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
import task_pb2 as task__pb2
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cworker.proto\x12\x19wool.core.protobuf.worker\x1a\ntask.proto\"\xe1\x01\n\x08Response\x12-\n\x03\x61\x63k\x18\x01 \x01(\x0b\x32\x1e.wool.core.protobuf.worker.AckH\x00\x12/\n\x04nack\x18\x02 \x01(\x0b\x32\x1f.wool.core.protobuf.worker.NackH\x00\x12\x31\n\x06result\x18\x03 \x01(\x0b\x32\x1f.wool.core.protobuf.task.ResultH\x00\x12\x37\n\texception\x18\x04 \x01(\x0b\x32\".wool.core.protobuf.task.ExceptionH\x00\x42\t\n\x07payload\"\x05\n\x03\x41\x63k\"\x16\n\x04Nack\x12\x0e\n\x06reason\x18\x01 \x01(\t\"\x1e\n\x0bStopRequest\x12\x0f\n\x07timeout\x18\x01 \x01(\x02\"\xd0\x01\n\nWorkerInfo\x12\x0b\n\x03uid\x18\x01 \x01(\t\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12\x0b\n\x03pid\x18\x04 \x01(\x05\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\x0c\n\x04tags\x18\x06 \x03(\t\x12?\n\x05\x65xtra\x18\x07 \x03(\x0b\x32\x30.wool.core.protobuf.worker.WorkerInfo.ExtraEntry\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x06\n\x04Void2\xab\x01\n\x06Worker\x12P\n\x08\x64ispatch\x12\x1d.wool.core.protobuf.task.Task\x1a#.wool.core.protobuf.worker.Response0\x01\x12O\n\x04stop\x12&.wool.core.protobuf.worker.StopRequest\x1a\x1f.wool.core.protobuf.worker.Voidb\x06proto3')
|
|
29
|
+
|
|
30
|
+
_globals = globals()
|
|
31
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
32
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'worker_pb2', _globals)
|
|
33
|
+
if not _descriptor._USE_C_DESCRIPTORS:
|
|
34
|
+
DESCRIPTOR._loaded_options = None
|
|
35
|
+
_globals['_WORKERINFO_EXTRAENTRY']._loaded_options = None
|
|
36
|
+
_globals['_WORKERINFO_EXTRAENTRY']._serialized_options = b'8\001'
|
|
37
|
+
_globals['_RESPONSE']._serialized_start=56
|
|
38
|
+
_globals['_RESPONSE']._serialized_end=281
|
|
39
|
+
_globals['_ACK']._serialized_start=283
|
|
40
|
+
_globals['_ACK']._serialized_end=288
|
|
41
|
+
_globals['_NACK']._serialized_start=290
|
|
42
|
+
_globals['_NACK']._serialized_end=312
|
|
43
|
+
_globals['_STOPREQUEST']._serialized_start=314
|
|
44
|
+
_globals['_STOPREQUEST']._serialized_end=344
|
|
45
|
+
_globals['_WORKERINFO']._serialized_start=347
|
|
46
|
+
_globals['_WORKERINFO']._serialized_end=555
|
|
47
|
+
_globals['_WORKERINFO_EXTRAENTRY']._serialized_start=511
|
|
48
|
+
_globals['_WORKERINFO_EXTRAENTRY']._serialized_end=555
|
|
49
|
+
_globals['_VOID']._serialized_start=557
|
|
50
|
+
_globals['_VOID']._serialized_end=563
|
|
51
|
+
_globals['_WORKER']._serialized_start=566
|
|
52
|
+
_globals['_WORKER']._serialized_end=737
|
|
53
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import task_pb2 as _task_pb2
|
|
2
|
+
from google.protobuf.internal import containers as _containers
|
|
3
|
+
from google.protobuf import descriptor as _descriptor
|
|
4
|
+
from google.protobuf import message as _message
|
|
5
|
+
from collections.abc import Iterable as _Iterable, Mapping as _Mapping
|
|
6
|
+
from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union
|
|
7
|
+
|
|
8
|
+
DESCRIPTOR: _descriptor.FileDescriptor
|
|
9
|
+
|
|
10
|
+
class Response(_message.Message):
|
|
11
|
+
__slots__ = ("ack", "nack", "result", "exception")
|
|
12
|
+
ACK_FIELD_NUMBER: _ClassVar[int]
|
|
13
|
+
NACK_FIELD_NUMBER: _ClassVar[int]
|
|
14
|
+
RESULT_FIELD_NUMBER: _ClassVar[int]
|
|
15
|
+
EXCEPTION_FIELD_NUMBER: _ClassVar[int]
|
|
16
|
+
ack: Ack
|
|
17
|
+
nack: Nack
|
|
18
|
+
result: _task_pb2.Result
|
|
19
|
+
exception: _task_pb2.Exception
|
|
20
|
+
def __init__(self, ack: _Optional[_Union[Ack, _Mapping]] = ..., nack: _Optional[_Union[Nack, _Mapping]] = ..., result: _Optional[_Union[_task_pb2.Result, _Mapping]] = ..., exception: _Optional[_Union[_task_pb2.Exception, _Mapping]] = ...) -> None: ...
|
|
21
|
+
|
|
22
|
+
class Ack(_message.Message):
|
|
23
|
+
__slots__ = ()
|
|
24
|
+
def __init__(self) -> None: ...
|
|
25
|
+
|
|
26
|
+
class Nack(_message.Message):
|
|
27
|
+
__slots__ = ("reason",)
|
|
28
|
+
REASON_FIELD_NUMBER: _ClassVar[int]
|
|
29
|
+
reason: str
|
|
30
|
+
def __init__(self, reason: _Optional[str] = ...) -> None: ...
|
|
31
|
+
|
|
32
|
+
class StopRequest(_message.Message):
|
|
33
|
+
__slots__ = ("timeout",)
|
|
34
|
+
TIMEOUT_FIELD_NUMBER: _ClassVar[int]
|
|
35
|
+
timeout: float
|
|
36
|
+
def __init__(self, timeout: _Optional[float] = ...) -> None: ...
|
|
37
|
+
|
|
38
|
+
class WorkerInfo(_message.Message):
|
|
39
|
+
__slots__ = ("uid", "host", "port", "pid", "version", "tags", "extra")
|
|
40
|
+
class ExtraEntry(_message.Message):
|
|
41
|
+
__slots__ = ("key", "value")
|
|
42
|
+
KEY_FIELD_NUMBER: _ClassVar[int]
|
|
43
|
+
VALUE_FIELD_NUMBER: _ClassVar[int]
|
|
44
|
+
key: str
|
|
45
|
+
value: str
|
|
46
|
+
def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ...
|
|
47
|
+
UID_FIELD_NUMBER: _ClassVar[int]
|
|
48
|
+
HOST_FIELD_NUMBER: _ClassVar[int]
|
|
49
|
+
PORT_FIELD_NUMBER: _ClassVar[int]
|
|
50
|
+
PID_FIELD_NUMBER: _ClassVar[int]
|
|
51
|
+
VERSION_FIELD_NUMBER: _ClassVar[int]
|
|
52
|
+
TAGS_FIELD_NUMBER: _ClassVar[int]
|
|
53
|
+
EXTRA_FIELD_NUMBER: _ClassVar[int]
|
|
54
|
+
uid: str
|
|
55
|
+
host: str
|
|
56
|
+
port: int
|
|
57
|
+
pid: int
|
|
58
|
+
version: str
|
|
59
|
+
tags: _containers.RepeatedScalarFieldContainer[str]
|
|
60
|
+
extra: _containers.ScalarMap[str, str]
|
|
61
|
+
def __init__(self, uid: _Optional[str] = ..., host: _Optional[str] = ..., port: _Optional[int] = ..., pid: _Optional[int] = ..., version: _Optional[str] = ..., tags: _Optional[_Iterable[str]] = ..., extra: _Optional[_Mapping[str, str]] = ...) -> None: ...
|
|
62
|
+
|
|
63
|
+
class Void(_message.Message):
|
|
64
|
+
__slots__ = ()
|
|
65
|
+
def __init__(self) -> None: ...
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2
|
+
"""Client and server classes corresponding to protobuf-defined services."""
|
|
3
|
+
import grpc
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
from . import task_pb2 as task__pb2
|
|
7
|
+
from . import worker_pb2 as worker__pb2
|
|
8
|
+
|
|
9
|
+
GRPC_GENERATED_VERSION = '1.76.0'
|
|
10
|
+
GRPC_VERSION = grpc.__version__
|
|
11
|
+
_version_not_supported = False
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from grpc._utilities import first_version_is_lower
|
|
15
|
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
|
16
|
+
except ImportError:
|
|
17
|
+
_version_not_supported = True
|
|
18
|
+
|
|
19
|
+
if _version_not_supported:
|
|
20
|
+
raise RuntimeError(
|
|
21
|
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
|
22
|
+
+ ' but the generated code in worker_pb2_grpc.py depends on'
|
|
23
|
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
|
24
|
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
|
25
|
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class WorkerStub(object):
|
|
30
|
+
"""Missing associated documentation comment in .proto file."""
|
|
31
|
+
|
|
32
|
+
def __init__(self, channel):
|
|
33
|
+
"""Constructor.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
channel: A grpc.Channel.
|
|
37
|
+
"""
|
|
38
|
+
self.dispatch = channel.unary_stream(
|
|
39
|
+
'/wool.core.protobuf.worker.Worker/dispatch',
|
|
40
|
+
request_serializer=task__pb2.Task.SerializeToString,
|
|
41
|
+
response_deserializer=worker__pb2.Response.FromString,
|
|
42
|
+
_registered_method=True)
|
|
43
|
+
self.stop = channel.unary_unary(
|
|
44
|
+
'/wool.core.protobuf.worker.Worker/stop',
|
|
45
|
+
request_serializer=worker__pb2.StopRequest.SerializeToString,
|
|
46
|
+
response_deserializer=worker__pb2.Void.FromString,
|
|
47
|
+
_registered_method=True)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class WorkerServicer(object):
|
|
51
|
+
"""Missing associated documentation comment in .proto file."""
|
|
52
|
+
|
|
53
|
+
def dispatch(self, request, context):
|
|
54
|
+
"""Missing associated documentation comment in .proto file."""
|
|
55
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
56
|
+
context.set_details('Method not implemented!')
|
|
57
|
+
raise NotImplementedError('Method not implemented!')
|
|
58
|
+
|
|
59
|
+
def stop(self, request, context):
|
|
60
|
+
"""Missing associated documentation comment in .proto file."""
|
|
61
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
62
|
+
context.set_details('Method not implemented!')
|
|
63
|
+
raise NotImplementedError('Method not implemented!')
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def add_WorkerServicer_to_server(servicer, server):
|
|
67
|
+
rpc_method_handlers = {
|
|
68
|
+
'dispatch': grpc.unary_stream_rpc_method_handler(
|
|
69
|
+
servicer.dispatch,
|
|
70
|
+
request_deserializer=task__pb2.Task.FromString,
|
|
71
|
+
response_serializer=worker__pb2.Response.SerializeToString,
|
|
72
|
+
),
|
|
73
|
+
'stop': grpc.unary_unary_rpc_method_handler(
|
|
74
|
+
servicer.stop,
|
|
75
|
+
request_deserializer=worker__pb2.StopRequest.FromString,
|
|
76
|
+
response_serializer=worker__pb2.Void.SerializeToString,
|
|
77
|
+
),
|
|
78
|
+
}
|
|
79
|
+
generic_handler = grpc.method_handlers_generic_handler(
|
|
80
|
+
'wool.core.protobuf.worker.Worker', rpc_method_handlers)
|
|
81
|
+
server.add_generic_rpc_handlers((generic_handler,))
|
|
82
|
+
server.add_registered_method_handlers('wool.core.protobuf.worker.Worker', rpc_method_handlers)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# This class is part of an EXPERIMENTAL API.
|
|
86
|
+
class Worker(object):
|
|
87
|
+
"""Missing associated documentation comment in .proto file."""
|
|
88
|
+
|
|
89
|
+
@staticmethod
|
|
90
|
+
def dispatch(request,
|
|
91
|
+
target,
|
|
92
|
+
options=(),
|
|
93
|
+
channel_credentials=None,
|
|
94
|
+
call_credentials=None,
|
|
95
|
+
insecure=False,
|
|
96
|
+
compression=None,
|
|
97
|
+
wait_for_ready=None,
|
|
98
|
+
timeout=None,
|
|
99
|
+
metadata=None):
|
|
100
|
+
return grpc.experimental.unary_stream(
|
|
101
|
+
request,
|
|
102
|
+
target,
|
|
103
|
+
'/wool.core.protobuf.worker.Worker/dispatch',
|
|
104
|
+
task__pb2.Task.SerializeToString,
|
|
105
|
+
worker__pb2.Response.FromString,
|
|
106
|
+
options,
|
|
107
|
+
channel_credentials,
|
|
108
|
+
insecure,
|
|
109
|
+
call_credentials,
|
|
110
|
+
compression,
|
|
111
|
+
wait_for_ready,
|
|
112
|
+
timeout,
|
|
113
|
+
metadata,
|
|
114
|
+
_registered_method=True)
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def stop(request,
|
|
118
|
+
target,
|
|
119
|
+
options=(),
|
|
120
|
+
channel_credentials=None,
|
|
121
|
+
call_credentials=None,
|
|
122
|
+
insecure=False,
|
|
123
|
+
compression=None,
|
|
124
|
+
wait_for_ready=None,
|
|
125
|
+
timeout=None,
|
|
126
|
+
metadata=None):
|
|
127
|
+
return grpc.experimental.unary_unary(
|
|
128
|
+
request,
|
|
129
|
+
target,
|
|
130
|
+
'/wool.core.protobuf.worker.Worker/stop',
|
|
131
|
+
worker__pb2.StopRequest.SerializeToString,
|
|
132
|
+
worker__pb2.Void.FromString,
|
|
133
|
+
options,
|
|
134
|
+
channel_credentials,
|
|
135
|
+
insecure,
|
|
136
|
+
call_credentials,
|
|
137
|
+
compression,
|
|
138
|
+
wait_for_ready,
|
|
139
|
+
timeout,
|
|
140
|
+
metadata,
|
|
141
|
+
_registered_method=True)
|