UncountablePythonSDK 0.0.68__py3-none-any.whl → 0.0.70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {UncountablePythonSDK-0.0.68.dist-info → UncountablePythonSDK-0.0.70.dist-info}/METADATA +3 -1
- {UncountablePythonSDK-0.0.68.dist-info → UncountablePythonSDK-0.0.70.dist-info}/RECORD +47 -19
- docs/requirements.txt +1 -1
- examples/integration-server/jobs/materials_auto/example_cron.py +18 -0
- examples/integration-server/jobs/materials_auto/profile.yaml +19 -0
- examples/integration-server/pyproject.toml +224 -0
- examples/set_recipe_metadata_file.py +40 -0
- examples/set_recipe_output_file_sdk.py +26 -0
- uncountable/core/environment.py +5 -1
- uncountable/integration/cli.py +1 -0
- uncountable/integration/cron.py +12 -28
- uncountable/integration/db/connect.py +12 -2
- uncountable/integration/db/session.py +25 -0
- uncountable/integration/entrypoint.py +6 -6
- uncountable/integration/executors/generic_upload_executor.py +5 -1
- uncountable/integration/job.py +44 -17
- uncountable/integration/queue_runner/__init__.py +0 -0
- uncountable/integration/queue_runner/command_server/__init__.py +24 -0
- uncountable/integration/queue_runner/command_server/command_client.py +68 -0
- uncountable/integration/queue_runner/command_server/command_server.py +64 -0
- uncountable/integration/queue_runner/command_server/protocol/__init__.py +0 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server.proto +22 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +40 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +38 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +129 -0
- uncountable/integration/queue_runner/command_server/types.py +52 -0
- uncountable/integration/queue_runner/datastore/__init__.py +3 -0
- uncountable/integration/queue_runner/datastore/datastore_sqlite.py +93 -0
- uncountable/integration/queue_runner/datastore/interface.py +19 -0
- uncountable/integration/queue_runner/datastore/model.py +17 -0
- uncountable/integration/queue_runner/job_scheduler.py +119 -0
- uncountable/integration/queue_runner/queue_runner.py +26 -0
- uncountable/integration/queue_runner/types.py +7 -0
- uncountable/integration/queue_runner/worker.py +109 -0
- uncountable/integration/scan_profiles.py +2 -0
- uncountable/integration/scheduler.py +144 -0
- uncountable/integration/webhook_server/entrypoint.py +45 -45
- uncountable/types/__init__.py +4 -0
- uncountable/types/api/recipes/get_recipes_data.py +1 -0
- uncountable/types/api/recipes/set_recipe_output_file.py +46 -0
- uncountable/types/client_base.py +20 -0
- uncountable/types/entity_t.py +2 -0
- uncountable/types/queued_job.py +16 -0
- uncountable/types/queued_job_t.py +107 -0
- uncountable/types/recipe_metadata_t.py +1 -0
- {UncountablePythonSDK-0.0.68.dist-info → UncountablePythonSDK-0.0.70.dist-info}/WHEEL +0 -0
- {UncountablePythonSDK-0.0.68.dist-info → UncountablePythonSDK-0.0.70.dist-info}/top_level.txt +0 -0
|
@@ -165,7 +165,7 @@ def _move_files_post_upload(
|
|
|
165
165
|
filesystem_session.move_files([*success_file_transfers, *failed_file_transfers])
|
|
166
166
|
|
|
167
167
|
|
|
168
|
-
class GenericUploadJob(Job):
|
|
168
|
+
class GenericUploadJob(Job[None]):
|
|
169
169
|
def __init__(
|
|
170
170
|
self,
|
|
171
171
|
data_source: GenericUploadDataSource,
|
|
@@ -177,6 +177,10 @@ class GenericUploadJob(Job):
|
|
|
177
177
|
self.upload_strategy = upload_strategy
|
|
178
178
|
self.data_source = data_source
|
|
179
179
|
|
|
180
|
+
@property
|
|
181
|
+
def payload_type(self) -> type[None]:
|
|
182
|
+
return type(None)
|
|
183
|
+
|
|
180
184
|
def _construct_filesystem_session(self, args: JobArguments) -> FileSystemSession:
|
|
181
185
|
match self.data_source:
|
|
182
186
|
case GenericUploadDataSourceSFTP():
|
uncountable/integration/job.py
CHANGED
|
@@ -1,58 +1,85 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import typing
|
|
1
3
|
from abc import ABC, abstractmethod
|
|
2
4
|
from dataclasses import dataclass
|
|
3
5
|
|
|
6
|
+
from pkgs.argument_parser import CachedParser
|
|
4
7
|
from uncountable.core.async_batch import AsyncBatchProcessor
|
|
5
8
|
from uncountable.core.client import Client
|
|
6
9
|
from uncountable.integration.telemetry import JobLogger
|
|
7
|
-
from uncountable.types import webhook_job_t
|
|
10
|
+
from uncountable.types import base_t, webhook_job_t
|
|
8
11
|
from uncountable.types.job_definition_t import JobDefinition, JobResult, ProfileMetadata
|
|
9
12
|
|
|
10
13
|
|
|
11
14
|
@dataclass(kw_only=True)
|
|
12
|
-
class
|
|
15
|
+
class JobArguments:
|
|
13
16
|
job_definition: JobDefinition
|
|
14
17
|
profile_metadata: ProfileMetadata
|
|
15
18
|
client: Client
|
|
16
19
|
batch_processor: AsyncBatchProcessor
|
|
17
20
|
logger: JobLogger
|
|
21
|
+
payload: base_t.JsonValue
|
|
18
22
|
|
|
19
23
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
pass
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
@dataclass(kw_only=True)
|
|
26
|
-
class WebhookJobArguments(JobArgumentsBase):
|
|
27
|
-
payload: webhook_job_t.WebhookEventBody
|
|
24
|
+
# only for compatibility:
|
|
25
|
+
CronJobArguments = JobArguments
|
|
28
26
|
|
|
29
27
|
|
|
30
|
-
|
|
28
|
+
PT = typing.TypeVar("PT")
|
|
31
29
|
|
|
32
30
|
|
|
33
|
-
class Job(ABC):
|
|
31
|
+
class Job(ABC, typing.Generic[PT]):
|
|
34
32
|
_unc_job_registered: bool = False
|
|
35
33
|
|
|
34
|
+
@property
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def payload_type(self) -> type[PT]: ...
|
|
37
|
+
|
|
36
38
|
@abstractmethod
|
|
37
39
|
def run_outer(self, args: JobArguments) -> JobResult: ...
|
|
38
40
|
|
|
41
|
+
@functools.cached_property
|
|
42
|
+
def _cached_payload_parser(self) -> CachedParser[PT]:
|
|
43
|
+
return CachedParser(self.payload_type)
|
|
44
|
+
|
|
45
|
+
def get_payload(self, payload: base_t.JsonValue) -> PT:
|
|
46
|
+
return self._cached_payload_parser.parse_storage(payload)
|
|
47
|
+
|
|
39
48
|
|
|
40
49
|
class CronJob(Job):
|
|
50
|
+
@property
|
|
51
|
+
def payload_type(self) -> type[None]:
|
|
52
|
+
return type(None)
|
|
53
|
+
|
|
41
54
|
def run_outer(self, args: JobArguments) -> JobResult:
|
|
42
55
|
assert isinstance(args, CronJobArguments)
|
|
43
56
|
return self.run(args)
|
|
44
57
|
|
|
45
58
|
@abstractmethod
|
|
46
|
-
def run(self, args:
|
|
59
|
+
def run(self, args: JobArguments) -> JobResult: ...
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
WPT = typing.TypeVar("WPT")
|
|
63
|
+
|
|
47
64
|
|
|
65
|
+
class WebhookJob(Job[webhook_job_t.WebhookEventPayload], typing.Generic[WPT]):
|
|
66
|
+
@property
|
|
67
|
+
def payload_type(self) -> type[webhook_job_t.WebhookEventPayload]:
|
|
68
|
+
return webhook_job_t.WebhookEventPayload
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
@abstractmethod
|
|
72
|
+
def webhook_payload_type(self) -> type[WPT]: ...
|
|
48
73
|
|
|
49
|
-
class WebhookJob(Job):
|
|
50
74
|
def run_outer(self, args: JobArguments) -> JobResult:
|
|
51
|
-
|
|
52
|
-
|
|
75
|
+
webhook_body = self.get_payload(args.payload)
|
|
76
|
+
inner_payload = CachedParser(self.webhook_payload_type).parse_api(
|
|
77
|
+
webhook_body.data
|
|
78
|
+
)
|
|
79
|
+
return self.run(args, inner_payload)
|
|
53
80
|
|
|
54
81
|
@abstractmethod
|
|
55
|
-
def run(self, args:
|
|
82
|
+
def run(self, args: JobArguments, payload: WPT) -> JobResult: ...
|
|
56
83
|
|
|
57
84
|
|
|
58
85
|
def register_job(cls: type[Job]) -> type[Job]:
|
|
File without changes
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from .command_client import check_health, send_job_queue_message
|
|
2
|
+
from .command_server import serve
|
|
3
|
+
from .types import (
|
|
4
|
+
CommandEnqueueJob,
|
|
5
|
+
CommandEnqueueJobResponse,
|
|
6
|
+
CommandQueue,
|
|
7
|
+
CommandServerBadResponse,
|
|
8
|
+
CommandServerException,
|
|
9
|
+
CommandServerTimeout,
|
|
10
|
+
CommandTask,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__all__: list[str] = [
|
|
14
|
+
"serve",
|
|
15
|
+
"check_health",
|
|
16
|
+
"send_job_queue_message",
|
|
17
|
+
"CommandEnqueueJob",
|
|
18
|
+
"CommandEnqueueJobResponse",
|
|
19
|
+
"CommandTask",
|
|
20
|
+
"CommandQueue",
|
|
21
|
+
"CommandServerTimeout",
|
|
22
|
+
"CommandServerException",
|
|
23
|
+
"CommandServerBadResponse",
|
|
24
|
+
]
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from contextlib import contextmanager
|
|
2
|
+
from typing import Generator
|
|
3
|
+
|
|
4
|
+
import grpc
|
|
5
|
+
import simplejson as json
|
|
6
|
+
|
|
7
|
+
from pkgs.serialization_util import serialize_for_api
|
|
8
|
+
from uncountable.integration.queue_runner.command_server.protocol.command_server_pb2 import (
|
|
9
|
+
CheckHealthRequest,
|
|
10
|
+
CheckHealthResult,
|
|
11
|
+
EnqueueJobRequest,
|
|
12
|
+
EnqueueJobResult,
|
|
13
|
+
)
|
|
14
|
+
from uncountable.integration.queue_runner.command_server.types import (
|
|
15
|
+
CommandServerBadResponse,
|
|
16
|
+
CommandServerTimeout,
|
|
17
|
+
)
|
|
18
|
+
from uncountable.types import queued_job_t
|
|
19
|
+
|
|
20
|
+
from .protocol.command_server_pb2_grpc import CommandServerStub
|
|
21
|
+
|
|
22
|
+
_LOCAL_RPC_HOST = "localhost"
|
|
23
|
+
_DEFAULT_MESSAGE_TIMEOUT_SECS = 2
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@contextmanager
|
|
27
|
+
def command_server_connection(
|
|
28
|
+
host: str, port: int
|
|
29
|
+
) -> Generator[CommandServerStub, None, None]:
|
|
30
|
+
try:
|
|
31
|
+
with grpc.insecure_channel(f"{host}:{port}") as channel:
|
|
32
|
+
stub = CommandServerStub(channel)
|
|
33
|
+
yield stub
|
|
34
|
+
except grpc._channel._InactiveRpcError as e:
|
|
35
|
+
raise CommandServerTimeout() from e
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def send_job_queue_message(
|
|
39
|
+
*,
|
|
40
|
+
job_ref_name: str,
|
|
41
|
+
payload: queued_job_t.QueuedJobPayload,
|
|
42
|
+
host: str = "localhost",
|
|
43
|
+
port: int,
|
|
44
|
+
) -> str:
|
|
45
|
+
with command_server_connection(host=host, port=port) as stub:
|
|
46
|
+
request = EnqueueJobRequest(
|
|
47
|
+
job_ref_name=job_ref_name,
|
|
48
|
+
serialized_payload=json.dumps(serialize_for_api(payload)),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
response = stub.EnqueueJob(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
|
|
52
|
+
|
|
53
|
+
assert isinstance(response, EnqueueJobResult)
|
|
54
|
+
if not response.successfully_queued:
|
|
55
|
+
raise CommandServerBadResponse("queue operation was not successful")
|
|
56
|
+
|
|
57
|
+
return response.queued_job_uuid
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def check_health(*, host: str = _LOCAL_RPC_HOST, port: int) -> bool:
|
|
61
|
+
with command_server_connection(host=host, port=port) as stub:
|
|
62
|
+
request = CheckHealthRequest()
|
|
63
|
+
|
|
64
|
+
response = stub.CheckHealth(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
|
|
65
|
+
|
|
66
|
+
assert isinstance(response, CheckHealthResult)
|
|
67
|
+
|
|
68
|
+
return response.success
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
import simplejson as json
|
|
4
|
+
from grpc import aio
|
|
5
|
+
|
|
6
|
+
from pkgs.argument_parser import CachedParser
|
|
7
|
+
from uncountable.core.environment import get_local_admin_server_port
|
|
8
|
+
from uncountable.integration.queue_runner.command_server.protocol.command_server_pb2 import (
|
|
9
|
+
CheckHealthRequest,
|
|
10
|
+
CheckHealthResult,
|
|
11
|
+
EnqueueJobRequest,
|
|
12
|
+
EnqueueJobResult,
|
|
13
|
+
)
|
|
14
|
+
from uncountable.integration.queue_runner.command_server.types import (
|
|
15
|
+
CommandEnqueueJob,
|
|
16
|
+
CommandEnqueueJobResponse,
|
|
17
|
+
CommandQueue,
|
|
18
|
+
)
|
|
19
|
+
from uncountable.types import queued_job_t
|
|
20
|
+
|
|
21
|
+
from .protocol.command_server_pb2_grpc import (
|
|
22
|
+
CommandServerServicer,
|
|
23
|
+
add_CommandServerServicer_to_server,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
async def serve(command_queue: CommandQueue) -> None:
|
|
30
|
+
server = aio.server()
|
|
31
|
+
|
|
32
|
+
class CommandServerHandler(CommandServerServicer):
|
|
33
|
+
async def EnqueueJob(
|
|
34
|
+
self, request: EnqueueJobRequest, context: aio.ServicerContext
|
|
35
|
+
) -> EnqueueJobResult:
|
|
36
|
+
payload_json = json.loads(request.serialized_payload)
|
|
37
|
+
payload = queued_job_payload_parser.parse_api(payload_json)
|
|
38
|
+
response_queue: asyncio.Queue[CommandEnqueueJobResponse] = asyncio.Queue()
|
|
39
|
+
await command_queue.put(
|
|
40
|
+
CommandEnqueueJob(
|
|
41
|
+
job_ref_name=request.job_ref_name,
|
|
42
|
+
payload=payload,
|
|
43
|
+
response_queue=response_queue,
|
|
44
|
+
)
|
|
45
|
+
)
|
|
46
|
+
response = await response_queue.get()
|
|
47
|
+
result = EnqueueJobResult(
|
|
48
|
+
successfully_queued=True, queued_job_uuid=response.queued_job_uuid
|
|
49
|
+
)
|
|
50
|
+
return result
|
|
51
|
+
|
|
52
|
+
async def CheckHealth(
|
|
53
|
+
self, request: CheckHealthRequest, context: aio.ServicerContext
|
|
54
|
+
) -> CheckHealthResult:
|
|
55
|
+
return CheckHealthResult(success=True)
|
|
56
|
+
|
|
57
|
+
add_CommandServerServicer_to_server(CommandServerHandler(), server)
|
|
58
|
+
|
|
59
|
+
listen_addr = f"[::]:{get_local_admin_server_port()}"
|
|
60
|
+
|
|
61
|
+
server.add_insecure_port(listen_addr)
|
|
62
|
+
|
|
63
|
+
await server.start()
|
|
64
|
+
await server.wait_for_termination()
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
syntax = "proto3";
|
|
2
|
+
|
|
3
|
+
service CommandServer {
|
|
4
|
+
rpc EnqueueJob(EnqueueJobRequest) returns (EnqueueJobResult) {}
|
|
5
|
+
rpc CheckHealth(CheckHealthRequest) returns (CheckHealthResult) {}
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
message EnqueueJobRequest {
|
|
9
|
+
string job_ref_name = 1;
|
|
10
|
+
string serialized_payload = 2;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
message EnqueueJobResult {
|
|
14
|
+
bool successfully_queued = 1;
|
|
15
|
+
string queued_job_uuid = 2;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
message CheckHealthRequest {}
|
|
19
|
+
|
|
20
|
+
message CheckHealthResult {
|
|
21
|
+
bool success = 1;
|
|
22
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# ruff: noqa
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
4
|
+
# source: uncountable/integration/queue_runner/command_server/protocol/command_server.proto
|
|
5
|
+
# Protobuf Python Version: 4.25.1
|
|
6
|
+
"""Generated protocol buffer code."""
|
|
7
|
+
|
|
8
|
+
from google.protobuf import descriptor as _descriptor
|
|
9
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
10
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
11
|
+
from google.protobuf.internal import builder as _builder
|
|
12
|
+
# @@protoc_insertion_point(imports)
|
|
13
|
+
|
|
14
|
+
_sym_db = _symbol_database.Default()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
|
|
18
|
+
b'\nQuncountable/integration/queue_runner/command_server/protocol/command_server.proto"E\n\x11\x45nqueueJobRequest\x12\x14\n\x0cjob_ref_name\x18\x01 \x01(\t\x12\x1a\n\x12serialized_payload\x18\x02 \x01(\t"H\n\x10\x45nqueueJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x14\n\x12\x43heckHealthRequest"$\n\x11\x43heckHealthResult\x12\x0f\n\x07success\x18\x01 \x01(\x08\x32\x80\x01\n\rCommandServer\x12\x35\n\nEnqueueJob\x12\x12.EnqueueJobRequest\x1a\x11.EnqueueJobResult"\x00\x12\x38\n\x0b\x43heckHealth\x12\x13.CheckHealthRequest\x1a\x12.CheckHealthResult"\x00\x62\x06proto3'
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
_globals = globals()
|
|
22
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
23
|
+
_builder.BuildTopDescriptorsAndMessages(
|
|
24
|
+
DESCRIPTOR,
|
|
25
|
+
"uncountable.integration.queue_runner.command_server.protocol.command_server_pb2",
|
|
26
|
+
_globals,
|
|
27
|
+
)
|
|
28
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
29
|
+
DESCRIPTOR._options = None
|
|
30
|
+
_globals["_ENQUEUEJOBREQUEST"]._serialized_start = 85
|
|
31
|
+
_globals["_ENQUEUEJOBREQUEST"]._serialized_end = 154
|
|
32
|
+
_globals["_ENQUEUEJOBRESULT"]._serialized_start = 156
|
|
33
|
+
_globals["_ENQUEUEJOBRESULT"]._serialized_end = 228
|
|
34
|
+
_globals["_CHECKHEALTHREQUEST"]._serialized_start = 230
|
|
35
|
+
_globals["_CHECKHEALTHREQUEST"]._serialized_end = 250
|
|
36
|
+
_globals["_CHECKHEALTHRESULT"]._serialized_start = 252
|
|
37
|
+
_globals["_CHECKHEALTHRESULT"]._serialized_end = 288
|
|
38
|
+
_globals["_COMMANDSERVER"]._serialized_start = 291
|
|
39
|
+
_globals["_COMMANDSERVER"]._serialized_end = 419
|
|
40
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# ruff: noqa
|
|
2
|
+
from google.protobuf import descriptor as _descriptor
|
|
3
|
+
from google.protobuf import message as _message
|
|
4
|
+
from typing import ClassVar as _ClassVar, Optional as _Optional
|
|
5
|
+
|
|
6
|
+
DESCRIPTOR: _descriptor.FileDescriptor
|
|
7
|
+
|
|
8
|
+
class EnqueueJobRequest(_message.Message):
|
|
9
|
+
__slots__ = ("job_ref_name", "serialized_payload")
|
|
10
|
+
JOB_REF_NAME_FIELD_NUMBER: _ClassVar[int]
|
|
11
|
+
SERIALIZED_PAYLOAD_FIELD_NUMBER: _ClassVar[int]
|
|
12
|
+
job_ref_name: str
|
|
13
|
+
serialized_payload: str
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
job_ref_name: _Optional[str] = ...,
|
|
17
|
+
serialized_payload: _Optional[str] = ...,
|
|
18
|
+
) -> None: ...
|
|
19
|
+
|
|
20
|
+
class EnqueueJobResult(_message.Message):
|
|
21
|
+
__slots__ = ("successfully_queued", "queued_job_uuid")
|
|
22
|
+
SUCCESSFULLY_QUEUED_FIELD_NUMBER: _ClassVar[int]
|
|
23
|
+
QUEUED_JOB_UUID_FIELD_NUMBER: _ClassVar[int]
|
|
24
|
+
successfully_queued: bool
|
|
25
|
+
queued_job_uuid: str
|
|
26
|
+
def __init__(
|
|
27
|
+
self, successfully_queued: bool = ..., queued_job_uuid: _Optional[str] = ...
|
|
28
|
+
) -> None: ...
|
|
29
|
+
|
|
30
|
+
class CheckHealthRequest(_message.Message):
|
|
31
|
+
__slots__ = ()
|
|
32
|
+
def __init__(self) -> None: ...
|
|
33
|
+
|
|
34
|
+
class CheckHealthResult(_message.Message):
|
|
35
|
+
__slots__ = ("success",)
|
|
36
|
+
SUCCESS_FIELD_NUMBER: _ClassVar[int]
|
|
37
|
+
success: bool
|
|
38
|
+
def __init__(self, success: bool = ...) -> None: ...
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# mypy: disable-error-code="no-untyped-def"
|
|
2
|
+
# ruff: noqa
|
|
3
|
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
4
|
+
"""Client and server classes corresponding to protobuf-defined services."""
|
|
5
|
+
|
|
6
|
+
import grpc
|
|
7
|
+
|
|
8
|
+
from uncountable.integration.queue_runner.command_server.protocol import (
|
|
9
|
+
command_server_pb2 as uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CommandServerStub(object):
|
|
14
|
+
"""Missing associated documentation comment in .proto file."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, channel):
|
|
17
|
+
"""Constructor.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
channel: A grpc.Channel.
|
|
21
|
+
"""
|
|
22
|
+
self.EnqueueJob = channel.unary_unary(
|
|
23
|
+
"/CommandServer/EnqueueJob",
|
|
24
|
+
request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.SerializeToString,
|
|
25
|
+
response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.FromString,
|
|
26
|
+
)
|
|
27
|
+
self.CheckHealth = channel.unary_unary(
|
|
28
|
+
"/CommandServer/CheckHealth",
|
|
29
|
+
request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
|
|
30
|
+
response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.FromString,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CommandServerServicer(object):
|
|
35
|
+
"""Missing associated documentation comment in .proto file."""
|
|
36
|
+
|
|
37
|
+
def EnqueueJob(self, request, context):
|
|
38
|
+
"""Missing associated documentation comment in .proto file."""
|
|
39
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
40
|
+
context.set_details("Method not implemented!")
|
|
41
|
+
raise NotImplementedError("Method not implemented!")
|
|
42
|
+
|
|
43
|
+
def CheckHealth(self, request, context):
|
|
44
|
+
"""Missing associated documentation comment in .proto file."""
|
|
45
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
46
|
+
context.set_details("Method not implemented!")
|
|
47
|
+
raise NotImplementedError("Method not implemented!")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def add_CommandServerServicer_to_server(servicer, server):
|
|
51
|
+
rpc_method_handlers = {
|
|
52
|
+
"EnqueueJob": grpc.unary_unary_rpc_method_handler(
|
|
53
|
+
servicer.EnqueueJob,
|
|
54
|
+
request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.FromString,
|
|
55
|
+
response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.SerializeToString,
|
|
56
|
+
),
|
|
57
|
+
"CheckHealth": grpc.unary_unary_rpc_method_handler(
|
|
58
|
+
servicer.CheckHealth,
|
|
59
|
+
request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.FromString,
|
|
60
|
+
response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.SerializeToString,
|
|
61
|
+
),
|
|
62
|
+
}
|
|
63
|
+
generic_handler = grpc.method_handlers_generic_handler(
|
|
64
|
+
"CommandServer", rpc_method_handlers
|
|
65
|
+
)
|
|
66
|
+
server.add_generic_rpc_handlers((generic_handler,))
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# This class is part of an EXPERIMENTAL API.
|
|
70
|
+
class CommandServer(object):
|
|
71
|
+
"""Missing associated documentation comment in .proto file."""
|
|
72
|
+
|
|
73
|
+
@staticmethod
|
|
74
|
+
def EnqueueJob(
|
|
75
|
+
request,
|
|
76
|
+
target,
|
|
77
|
+
options=(),
|
|
78
|
+
channel_credentials=None,
|
|
79
|
+
call_credentials=None,
|
|
80
|
+
insecure=False,
|
|
81
|
+
compression=None,
|
|
82
|
+
wait_for_ready=None,
|
|
83
|
+
timeout=None,
|
|
84
|
+
metadata=None,
|
|
85
|
+
):
|
|
86
|
+
return grpc.experimental.unary_unary(
|
|
87
|
+
request,
|
|
88
|
+
target,
|
|
89
|
+
"/CommandServer/EnqueueJob",
|
|
90
|
+
uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.SerializeToString,
|
|
91
|
+
uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.FromString,
|
|
92
|
+
options,
|
|
93
|
+
channel_credentials,
|
|
94
|
+
insecure,
|
|
95
|
+
call_credentials,
|
|
96
|
+
compression,
|
|
97
|
+
wait_for_ready,
|
|
98
|
+
timeout,
|
|
99
|
+
metadata,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
@staticmethod
|
|
103
|
+
def CheckHealth(
|
|
104
|
+
request,
|
|
105
|
+
target,
|
|
106
|
+
options=(),
|
|
107
|
+
channel_credentials=None,
|
|
108
|
+
call_credentials=None,
|
|
109
|
+
insecure=False,
|
|
110
|
+
compression=None,
|
|
111
|
+
wait_for_ready=None,
|
|
112
|
+
timeout=None,
|
|
113
|
+
metadata=None,
|
|
114
|
+
):
|
|
115
|
+
return grpc.experimental.unary_unary(
|
|
116
|
+
request,
|
|
117
|
+
target,
|
|
118
|
+
"/CommandServer/CheckHealth",
|
|
119
|
+
uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
|
|
120
|
+
uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.FromString,
|
|
121
|
+
options,
|
|
122
|
+
channel_credentials,
|
|
123
|
+
insecure,
|
|
124
|
+
call_credentials,
|
|
125
|
+
compression,
|
|
126
|
+
wait_for_ready,
|
|
127
|
+
timeout,
|
|
128
|
+
metadata,
|
|
129
|
+
)
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import typing
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from enum import StrEnum
|
|
5
|
+
|
|
6
|
+
from uncountable.types import queued_job_t
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CommandType(StrEnum):
|
|
10
|
+
ENQUEUE_JOB = "enqueue_job"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
RT = typing.TypeVar("RT")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True)
|
|
17
|
+
class CommandBase(typing.Generic[RT]):
|
|
18
|
+
type: CommandType
|
|
19
|
+
response_queue: asyncio.Queue[RT]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass(kw_only=True)
|
|
23
|
+
class CommandEnqueueJobResponse:
|
|
24
|
+
queued_job_uuid: str
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(kw_only=True)
|
|
28
|
+
class CommandEnqueueJob(CommandBase[CommandEnqueueJobResponse]):
|
|
29
|
+
type: CommandType = CommandType.ENQUEUE_JOB
|
|
30
|
+
job_ref_name: str
|
|
31
|
+
payload: queued_job_t.QueuedJobPayload
|
|
32
|
+
response_queue: asyncio.Queue[CommandEnqueueJobResponse]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
_Command = CommandEnqueueJob
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
CommandQueue = asyncio.Queue[_Command]
|
|
39
|
+
|
|
40
|
+
CommandTask = asyncio.Task[_Command]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class CommandServerException(Exception):
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CommandServerTimeout(CommandServerException):
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class CommandServerBadResponse(CommandServerException):
|
|
52
|
+
pass
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
|
|
4
|
+
from sqlalchemy import delete, insert, select, update
|
|
5
|
+
from sqlalchemy.engine import Engine
|
|
6
|
+
|
|
7
|
+
from pkgs.argument_parser import CachedParser
|
|
8
|
+
from pkgs.serialization_util import serialize_for_storage
|
|
9
|
+
from uncountable.integration.db.session import DBSessionMaker
|
|
10
|
+
from uncountable.integration.queue_runner.datastore.interface import Datastore
|
|
11
|
+
from uncountable.integration.queue_runner.datastore.model import Base, QueuedJob
|
|
12
|
+
from uncountable.types import queued_job_t
|
|
13
|
+
|
|
14
|
+
queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DatastoreSqlite(Datastore):
|
|
18
|
+
def __init__(self, session_maker: DBSessionMaker) -> None:
|
|
19
|
+
self.session_maker = session_maker
|
|
20
|
+
super().__init__()
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def setup(cls, engine: Engine) -> None:
|
|
24
|
+
Base.metadata.create_all(engine)
|
|
25
|
+
|
|
26
|
+
def add_job_to_queue(
|
|
27
|
+
self, job_payload: queued_job_t.QueuedJobPayload, job_ref_name: str
|
|
28
|
+
) -> queued_job_t.QueuedJob:
|
|
29
|
+
with self.session_maker() as session:
|
|
30
|
+
serialized_payload = serialize_for_storage(job_payload)
|
|
31
|
+
queued_job_uuid = str(uuid.uuid4())
|
|
32
|
+
num_attempts = 0
|
|
33
|
+
submitted_at = datetime.now(timezone.utc)
|
|
34
|
+
insert_stmt = insert(QueuedJob).values({
|
|
35
|
+
QueuedJob.id.key: queued_job_uuid,
|
|
36
|
+
QueuedJob.job_ref_name.key: job_ref_name,
|
|
37
|
+
QueuedJob.payload.key: serialized_payload,
|
|
38
|
+
QueuedJob.num_attempts: num_attempts,
|
|
39
|
+
QueuedJob.submitted_at: submitted_at,
|
|
40
|
+
})
|
|
41
|
+
session.execute(insert_stmt)
|
|
42
|
+
return queued_job_t.QueuedJob(
|
|
43
|
+
queued_job_uuid=queued_job_uuid,
|
|
44
|
+
job_ref_name=job_ref_name,
|
|
45
|
+
payload=job_payload,
|
|
46
|
+
submitted_at=submitted_at,
|
|
47
|
+
num_attempts=num_attempts,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def increment_num_attempts(self, queued_job_uuid: str) -> int:
|
|
51
|
+
with self.session_maker() as session:
|
|
52
|
+
update_stmt = (
|
|
53
|
+
update(QueuedJob)
|
|
54
|
+
.values({QueuedJob.num_attempts.key: QueuedJob.num_attempts + 1})
|
|
55
|
+
.filter(QueuedJob.id == queued_job_uuid)
|
|
56
|
+
)
|
|
57
|
+
session.execute(update_stmt)
|
|
58
|
+
session.flush()
|
|
59
|
+
# IMPROVE: python3.12's sqlite does not support the RETURNING clause
|
|
60
|
+
select_stmt = select(QueuedJob.num_attempts).filter(
|
|
61
|
+
QueuedJob.id == queued_job_uuid
|
|
62
|
+
)
|
|
63
|
+
return int(session.execute(select_stmt).one().num_attempts)
|
|
64
|
+
|
|
65
|
+
def remove_job_from_queue(self, queued_job_uuid: str) -> None:
|
|
66
|
+
with self.session_maker() as session:
|
|
67
|
+
delete_stmt = delete(QueuedJob).filter(QueuedJob.id == queued_job_uuid)
|
|
68
|
+
session.execute(delete_stmt)
|
|
69
|
+
|
|
70
|
+
def load_job_queue(self) -> list[queued_job_t.QueuedJob]:
|
|
71
|
+
with self.session_maker() as session:
|
|
72
|
+
select_stmt = select(
|
|
73
|
+
QueuedJob.id,
|
|
74
|
+
QueuedJob.payload,
|
|
75
|
+
QueuedJob.num_attempts,
|
|
76
|
+
QueuedJob.job_ref_name,
|
|
77
|
+
QueuedJob.submitted_at,
|
|
78
|
+
).order_by(QueuedJob.submitted_at)
|
|
79
|
+
|
|
80
|
+
queued_jobs: list[queued_job_t.QueuedJob] = []
|
|
81
|
+
for row in session.execute(select_stmt):
|
|
82
|
+
parsed_payload = queued_job_payload_parser.parse_storage(row.payload)
|
|
83
|
+
queued_jobs.append(
|
|
84
|
+
queued_job_t.QueuedJob(
|
|
85
|
+
queued_job_uuid=row.id,
|
|
86
|
+
job_ref_name=row.job_ref_name,
|
|
87
|
+
num_attempts=row.num_attempts,
|
|
88
|
+
submitted_at=row.submitted_at,
|
|
89
|
+
payload=parsed_payload,
|
|
90
|
+
)
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
return queued_jobs
|