UncountablePythonSDK 0.0.69__py3-none-any.whl → 0.0.71__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (39) hide show
  1. {UncountablePythonSDK-0.0.69.dist-info → UncountablePythonSDK-0.0.71.dist-info}/METADATA +3 -1
  2. {UncountablePythonSDK-0.0.69.dist-info → UncountablePythonSDK-0.0.71.dist-info}/RECORD +39 -18
  3. examples/integration-server/jobs/materials_auto/example_cron.py +2 -2
  4. uncountable/core/environment.py +5 -1
  5. uncountable/integration/cli.py +1 -0
  6. uncountable/integration/cron.py +12 -28
  7. uncountable/integration/db/connect.py +12 -2
  8. uncountable/integration/db/session.py +25 -0
  9. uncountable/integration/entrypoint.py +6 -6
  10. uncountable/integration/executors/generic_upload_executor.py +5 -1
  11. uncountable/integration/job.py +44 -17
  12. uncountable/integration/queue_runner/__init__.py +0 -0
  13. uncountable/integration/queue_runner/command_server/__init__.py +24 -0
  14. uncountable/integration/queue_runner/command_server/command_client.py +68 -0
  15. uncountable/integration/queue_runner/command_server/command_server.py +64 -0
  16. uncountable/integration/queue_runner/command_server/protocol/__init__.py +0 -0
  17. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +22 -0
  18. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +40 -0
  19. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +38 -0
  20. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +129 -0
  21. uncountable/integration/queue_runner/command_server/types.py +52 -0
  22. uncountable/integration/queue_runner/datastore/__init__.py +3 -0
  23. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +93 -0
  24. uncountable/integration/queue_runner/datastore/interface.py +19 -0
  25. uncountable/integration/queue_runner/datastore/model.py +17 -0
  26. uncountable/integration/queue_runner/job_scheduler.py +128 -0
  27. uncountable/integration/queue_runner/queue_runner.py +26 -0
  28. uncountable/integration/queue_runner/types.py +7 -0
  29. uncountable/integration/queue_runner/worker.py +109 -0
  30. uncountable/integration/scan_profiles.py +2 -0
  31. uncountable/integration/scheduler.py +40 -3
  32. uncountable/integration/webhook_server/entrypoint.py +27 -31
  33. uncountable/types/__init__.py +2 -0
  34. uncountable/types/api/recipes/get_recipes_data.py +1 -0
  35. uncountable/types/api/recipes/set_recipe_outputs.py +2 -0
  36. uncountable/types/queued_job.py +16 -0
  37. uncountable/types/queued_job_t.py +107 -0
  38. {UncountablePythonSDK-0.0.69.dist-info → UncountablePythonSDK-0.0.71.dist-info}/WHEEL +0 -0
  39. {UncountablePythonSDK-0.0.69.dist-info → UncountablePythonSDK-0.0.71.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,38 @@
1
+ # ruff: noqa
2
+ from google.protobuf import descriptor as _descriptor
3
+ from google.protobuf import message as _message
4
+ from typing import ClassVar as _ClassVar, Optional as _Optional
5
+
6
+ DESCRIPTOR: _descriptor.FileDescriptor
7
+
8
+ class EnqueueJobRequest(_message.Message):
9
+ __slots__ = ("job_ref_name", "serialized_payload")
10
+ JOB_REF_NAME_FIELD_NUMBER: _ClassVar[int]
11
+ SERIALIZED_PAYLOAD_FIELD_NUMBER: _ClassVar[int]
12
+ job_ref_name: str
13
+ serialized_payload: str
14
+ def __init__(
15
+ self,
16
+ job_ref_name: _Optional[str] = ...,
17
+ serialized_payload: _Optional[str] = ...,
18
+ ) -> None: ...
19
+
20
+ class EnqueueJobResult(_message.Message):
21
+ __slots__ = ("successfully_queued", "queued_job_uuid")
22
+ SUCCESSFULLY_QUEUED_FIELD_NUMBER: _ClassVar[int]
23
+ QUEUED_JOB_UUID_FIELD_NUMBER: _ClassVar[int]
24
+ successfully_queued: bool
25
+ queued_job_uuid: str
26
+ def __init__(
27
+ self, successfully_queued: bool = ..., queued_job_uuid: _Optional[str] = ...
28
+ ) -> None: ...
29
+
30
+ class CheckHealthRequest(_message.Message):
31
+ __slots__ = ()
32
+ def __init__(self) -> None: ...
33
+
34
+ class CheckHealthResult(_message.Message):
35
+ __slots__ = ("success",)
36
+ SUCCESS_FIELD_NUMBER: _ClassVar[int]
37
+ success: bool
38
+ def __init__(self, success: bool = ...) -> None: ...
@@ -0,0 +1,129 @@
1
+ # mypy: disable-error-code="no-untyped-def"
2
+ # ruff: noqa
3
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
4
+ """Client and server classes corresponding to protobuf-defined services."""
5
+
6
+ import grpc
7
+
8
+ from uncountable.integration.queue_runner.command_server.protocol import (
9
+ command_server_pb2 as uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2,
10
+ )
11
+
12
+
13
+ class CommandServerStub(object):
14
+ """Missing associated documentation comment in .proto file."""
15
+
16
+ def __init__(self, channel):
17
+ """Constructor.
18
+
19
+ Args:
20
+ channel: A grpc.Channel.
21
+ """
22
+ self.EnqueueJob = channel.unary_unary(
23
+ "/CommandServer/EnqueueJob",
24
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.SerializeToString,
25
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.FromString,
26
+ )
27
+ self.CheckHealth = channel.unary_unary(
28
+ "/CommandServer/CheckHealth",
29
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
30
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.FromString,
31
+ )
32
+
33
+
34
+ class CommandServerServicer(object):
35
+ """Missing associated documentation comment in .proto file."""
36
+
37
+ def EnqueueJob(self, request, context):
38
+ """Missing associated documentation comment in .proto file."""
39
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
40
+ context.set_details("Method not implemented!")
41
+ raise NotImplementedError("Method not implemented!")
42
+
43
+ def CheckHealth(self, request, context):
44
+ """Missing associated documentation comment in .proto file."""
45
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
46
+ context.set_details("Method not implemented!")
47
+ raise NotImplementedError("Method not implemented!")
48
+
49
+
50
+ def add_CommandServerServicer_to_server(servicer, server):
51
+ rpc_method_handlers = {
52
+ "EnqueueJob": grpc.unary_unary_rpc_method_handler(
53
+ servicer.EnqueueJob,
54
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.FromString,
55
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.SerializeToString,
56
+ ),
57
+ "CheckHealth": grpc.unary_unary_rpc_method_handler(
58
+ servicer.CheckHealth,
59
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.FromString,
60
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.SerializeToString,
61
+ ),
62
+ }
63
+ generic_handler = grpc.method_handlers_generic_handler(
64
+ "CommandServer", rpc_method_handlers
65
+ )
66
+ server.add_generic_rpc_handlers((generic_handler,))
67
+
68
+
69
+ # This class is part of an EXPERIMENTAL API.
70
+ class CommandServer(object):
71
+ """Missing associated documentation comment in .proto file."""
72
+
73
+ @staticmethod
74
+ def EnqueueJob(
75
+ request,
76
+ target,
77
+ options=(),
78
+ channel_credentials=None,
79
+ call_credentials=None,
80
+ insecure=False,
81
+ compression=None,
82
+ wait_for_ready=None,
83
+ timeout=None,
84
+ metadata=None,
85
+ ):
86
+ return grpc.experimental.unary_unary(
87
+ request,
88
+ target,
89
+ "/CommandServer/EnqueueJob",
90
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.SerializeToString,
91
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.FromString,
92
+ options,
93
+ channel_credentials,
94
+ insecure,
95
+ call_credentials,
96
+ compression,
97
+ wait_for_ready,
98
+ timeout,
99
+ metadata,
100
+ )
101
+
102
+ @staticmethod
103
+ def CheckHealth(
104
+ request,
105
+ target,
106
+ options=(),
107
+ channel_credentials=None,
108
+ call_credentials=None,
109
+ insecure=False,
110
+ compression=None,
111
+ wait_for_ready=None,
112
+ timeout=None,
113
+ metadata=None,
114
+ ):
115
+ return grpc.experimental.unary_unary(
116
+ request,
117
+ target,
118
+ "/CommandServer/CheckHealth",
119
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
120
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.FromString,
121
+ options,
122
+ channel_credentials,
123
+ insecure,
124
+ call_credentials,
125
+ compression,
126
+ wait_for_ready,
127
+ timeout,
128
+ metadata,
129
+ )
@@ -0,0 +1,52 @@
1
+ import asyncio
2
+ import typing
3
+ from dataclasses import dataclass
4
+ from enum import StrEnum
5
+
6
+ from uncountable.types import queued_job_t
7
+
8
+
9
+ class CommandType(StrEnum):
10
+ ENQUEUE_JOB = "enqueue_job"
11
+
12
+
13
+ RT = typing.TypeVar("RT")
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class CommandBase(typing.Generic[RT]):
18
+ type: CommandType
19
+ response_queue: asyncio.Queue[RT]
20
+
21
+
22
+ @dataclass(kw_only=True)
23
+ class CommandEnqueueJobResponse:
24
+ queued_job_uuid: str
25
+
26
+
27
+ @dataclass(kw_only=True)
28
+ class CommandEnqueueJob(CommandBase[CommandEnqueueJobResponse]):
29
+ type: CommandType = CommandType.ENQUEUE_JOB
30
+ job_ref_name: str
31
+ payload: queued_job_t.QueuedJobPayload
32
+ response_queue: asyncio.Queue[CommandEnqueueJobResponse]
33
+
34
+
35
+ _Command = CommandEnqueueJob
36
+
37
+
38
+ CommandQueue = asyncio.Queue[_Command]
39
+
40
+ CommandTask = asyncio.Task[_Command]
41
+
42
+
43
+ class CommandServerException(Exception):
44
+ pass
45
+
46
+
47
+ class CommandServerTimeout(CommandServerException):
48
+ pass
49
+
50
+
51
+ class CommandServerBadResponse(CommandServerException):
52
+ pass
@@ -0,0 +1,3 @@
1
+ from .datastore_sqlite import DatastoreSqlite
2
+
3
+ __all__: list[str] = ["DatastoreSqlite"]
@@ -0,0 +1,93 @@
1
+ import uuid
2
+ from datetime import datetime, timezone
3
+
4
+ from sqlalchemy import delete, insert, select, update
5
+ from sqlalchemy.engine import Engine
6
+
7
+ from pkgs.argument_parser import CachedParser
8
+ from pkgs.serialization_util import serialize_for_storage
9
+ from uncountable.integration.db.session import DBSessionMaker
10
+ from uncountable.integration.queue_runner.datastore.interface import Datastore
11
+ from uncountable.integration.queue_runner.datastore.model import Base, QueuedJob
12
+ from uncountable.types import queued_job_t
13
+
14
+ queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
15
+
16
+
17
+ class DatastoreSqlite(Datastore):
18
+ def __init__(self, session_maker: DBSessionMaker) -> None:
19
+ self.session_maker = session_maker
20
+ super().__init__()
21
+
22
+ @classmethod
23
+ def setup(cls, engine: Engine) -> None:
24
+ Base.metadata.create_all(engine)
25
+
26
+ def add_job_to_queue(
27
+ self, job_payload: queued_job_t.QueuedJobPayload, job_ref_name: str
28
+ ) -> queued_job_t.QueuedJob:
29
+ with self.session_maker() as session:
30
+ serialized_payload = serialize_for_storage(job_payload)
31
+ queued_job_uuid = str(uuid.uuid4())
32
+ num_attempts = 0
33
+ submitted_at = datetime.now(timezone.utc)
34
+ insert_stmt = insert(QueuedJob).values({
35
+ QueuedJob.id.key: queued_job_uuid,
36
+ QueuedJob.job_ref_name.key: job_ref_name,
37
+ QueuedJob.payload.key: serialized_payload,
38
+ QueuedJob.num_attempts: num_attempts,
39
+ QueuedJob.submitted_at: submitted_at,
40
+ })
41
+ session.execute(insert_stmt)
42
+ return queued_job_t.QueuedJob(
43
+ queued_job_uuid=queued_job_uuid,
44
+ job_ref_name=job_ref_name,
45
+ payload=job_payload,
46
+ submitted_at=submitted_at,
47
+ num_attempts=num_attempts,
48
+ )
49
+
50
+ def increment_num_attempts(self, queued_job_uuid: str) -> int:
51
+ with self.session_maker() as session:
52
+ update_stmt = (
53
+ update(QueuedJob)
54
+ .values({QueuedJob.num_attempts.key: QueuedJob.num_attempts + 1})
55
+ .filter(QueuedJob.id == queued_job_uuid)
56
+ )
57
+ session.execute(update_stmt)
58
+ session.flush()
59
+ # IMPROVE: python3.12's sqlite does not support the RETURNING clause
60
+ select_stmt = select(QueuedJob.num_attempts).filter(
61
+ QueuedJob.id == queued_job_uuid
62
+ )
63
+ return int(session.execute(select_stmt).one().num_attempts)
64
+
65
+ def remove_job_from_queue(self, queued_job_uuid: str) -> None:
66
+ with self.session_maker() as session:
67
+ delete_stmt = delete(QueuedJob).filter(QueuedJob.id == queued_job_uuid)
68
+ session.execute(delete_stmt)
69
+
70
+ def load_job_queue(self) -> list[queued_job_t.QueuedJob]:
71
+ with self.session_maker() as session:
72
+ select_stmt = select(
73
+ QueuedJob.id,
74
+ QueuedJob.payload,
75
+ QueuedJob.num_attempts,
76
+ QueuedJob.job_ref_name,
77
+ QueuedJob.submitted_at,
78
+ ).order_by(QueuedJob.submitted_at)
79
+
80
+ queued_jobs: list[queued_job_t.QueuedJob] = []
81
+ for row in session.execute(select_stmt):
82
+ parsed_payload = queued_job_payload_parser.parse_storage(row.payload)
83
+ queued_jobs.append(
84
+ queued_job_t.QueuedJob(
85
+ queued_job_uuid=row.id,
86
+ job_ref_name=row.job_ref_name,
87
+ num_attempts=row.num_attempts,
88
+ submitted_at=row.submitted_at,
89
+ payload=parsed_payload,
90
+ )
91
+ )
92
+
93
+ return queued_jobs
@@ -0,0 +1,19 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+ from uncountable.types import queued_job_t
4
+
5
+
6
+ class Datastore(ABC):
7
+ @abstractmethod
8
+ def add_job_to_queue(
9
+ self, job_payload: queued_job_t.QueuedJobPayload, job_ref_name: str
10
+ ) -> queued_job_t.QueuedJob: ...
11
+
12
+ @abstractmethod
13
+ def remove_job_from_queue(self, queued_job_uuid: str) -> None: ...
14
+
15
+ @abstractmethod
16
+ def increment_num_attempts(self, queued_job_uuid: str) -> int: ...
17
+
18
+ @abstractmethod
19
+ def load_job_queue(self) -> list[queued_job_t.QueuedJob]: ...
@@ -0,0 +1,17 @@
1
+ from sqlalchemy import JSON, BigInteger, Column, DateTime, Text
2
+ from sqlalchemy.orm import declarative_base
3
+ from sqlalchemy.sql import func
4
+
5
+ Base = declarative_base()
6
+
7
+
8
+ class QueuedJob(Base):
9
+ __tablename__ = "queued_jobs"
10
+
11
+ id = Column(Text, primary_key=True)
12
+ job_ref_name = Column(Text, nullable=False, index=True)
13
+ submitted_at = Column(
14
+ DateTime(timezone=True), server_default=func.current_timestamp(), nullable=False
15
+ )
16
+ payload = Column(JSON, nullable=False)
17
+ num_attempts = Column(BigInteger, nullable=False, default=0, server_default="0")
@@ -0,0 +1,128 @@
1
+ import asyncio
2
+ import typing
3
+ from concurrent.futures import ProcessPoolExecutor
4
+ from dataclasses import dataclass
5
+
6
+ from opentelemetry.trace import get_current_span
7
+
8
+ from uncountable.integration.db.connect import IntegrationDBService, create_db_engine
9
+ from uncountable.integration.db.session import get_session_maker
10
+ from uncountable.integration.queue_runner.command_server import (
11
+ CommandEnqueueJobResponse,
12
+ CommandQueue,
13
+ CommandTask,
14
+ )
15
+ from uncountable.integration.queue_runner.datastore import DatastoreSqlite
16
+ from uncountable.integration.queue_runner.datastore.interface import Datastore
17
+ from uncountable.integration.queue_runner.worker import Worker
18
+ from uncountable.integration.scan_profiles import load_profiles
19
+ from uncountable.integration.telemetry import Logger
20
+ from uncountable.types import job_definition_t, queued_job_t
21
+
22
+ from .types import ResultQueue, ResultTask
23
+
24
+ _MAX_JOB_WORKERS = 5
25
+
26
+
27
+ @dataclass(kw_only=True, frozen=True)
28
+ class JobListenerKey:
29
+ profile_name: str
30
+ subqueue_name: str = "default"
31
+
32
+
33
+ def _get_job_worker_key(
34
+ job_definition: job_definition_t.JobDefinition, profile_name: str
35
+ ) -> JobListenerKey:
36
+ return JobListenerKey(profile_name=profile_name)
37
+
38
+
39
+ def on_worker_crash(
40
+ worker_key: JobListenerKey,
41
+ ) -> typing.Callable[[asyncio.Task], None]:
42
+ def hook(task: asyncio.Task) -> None:
43
+ raise Exception(
44
+ f"worker {worker_key.profile_name}_{worker_key.subqueue_name} crashed unexpectedly"
45
+ )
46
+
47
+ return hook
48
+
49
+
50
+ def _start_workers(
51
+ process_pool: ProcessPoolExecutor, result_queue: ResultQueue, datastore: Datastore
52
+ ) -> dict[str, Worker]:
53
+ profiles = load_profiles()
54
+ job_queue_worker_lookup: dict[JobListenerKey, Worker] = {}
55
+ job_worker_lookup: dict[str, Worker] = {}
56
+ job_definition_lookup: dict[str, job_definition_t.JobDefinition] = {}
57
+ for profile in profiles:
58
+ for job_definition in profile.definition.jobs:
59
+ job_definition_lookup[job_definition.id] = job_definition
60
+ job_worker_key = _get_job_worker_key(job_definition, profile.name)
61
+ if job_worker_key not in job_queue_worker_lookup:
62
+ worker = Worker(
63
+ process_pool=process_pool,
64
+ listen_queue=asyncio.Queue(),
65
+ result_queue=result_queue,
66
+ datastore=datastore,
67
+ )
68
+ task = asyncio.create_task(worker.run_worker_loop())
69
+ task.add_done_callback(on_worker_crash(job_worker_key))
70
+ job_queue_worker_lookup[job_worker_key] = worker
71
+ job_worker_lookup[job_definition.id] = job_queue_worker_lookup[
72
+ job_worker_key
73
+ ]
74
+ return job_worker_lookup
75
+
76
+
77
+ async def start_scheduler(command_queue: CommandQueue) -> None:
78
+ logger = Logger(get_current_span())
79
+ result_queue: ResultQueue = asyncio.Queue()
80
+ engine = create_db_engine(IntegrationDBService.RUNNER)
81
+ session_maker = get_session_maker(engine)
82
+
83
+ datastore = DatastoreSqlite(session_maker)
84
+ datastore.setup(engine)
85
+
86
+ with ProcessPoolExecutor(max_workers=_MAX_JOB_WORKERS) as process_pool:
87
+ job_worker_lookup = _start_workers(
88
+ process_pool, result_queue, datastore=datastore
89
+ )
90
+
91
+ queued_jobs = datastore.load_job_queue()
92
+
93
+ async def enqueue_queued_job(queued_job: queued_job_t.QueuedJob) -> None:
94
+ try:
95
+ worker = job_worker_lookup[queued_job.job_ref_name]
96
+ except KeyError as e:
97
+ logger.log_exception(e)
98
+ datastore.remove_job_from_queue(queued_job.queued_job_uuid)
99
+ return
100
+ await worker.listen_queue.put(queued_job)
101
+
102
+ for queued_job in queued_jobs:
103
+ await enqueue_queued_job(queued_job)
104
+
105
+ result_task: ResultTask = asyncio.create_task(result_queue.get())
106
+ command_task: CommandTask = asyncio.create_task(command_queue.get())
107
+ while True:
108
+ finished, _ = await asyncio.wait(
109
+ [result_task, command_task], return_when=asyncio.FIRST_COMPLETED
110
+ )
111
+
112
+ for task in finished:
113
+ if task == command_task:
114
+ command = command_task.result()
115
+ queued_job = datastore.add_job_to_queue(
116
+ job_payload=command.payload, job_ref_name=command.job_ref_name
117
+ )
118
+ await command.response_queue.put(
119
+ CommandEnqueueJobResponse(
120
+ queued_job_uuid=queued_job.queued_job_uuid
121
+ )
122
+ )
123
+ await enqueue_queued_job(queued_job)
124
+ command_task = asyncio.create_task(command_queue.get())
125
+ elif task == result_task:
126
+ queued_job_result = result_task.result()
127
+ datastore.remove_job_from_queue(queued_job_result.queued_job_uuid)
128
+ result_task = asyncio.create_task(result_queue.get())
@@ -0,0 +1,26 @@
1
+ import asyncio
2
+
3
+ from uncountable.integration.queue_runner.command_server import serve
4
+ from uncountable.integration.queue_runner.command_server.types import CommandQueue
5
+ from uncountable.integration.queue_runner.job_scheduler import start_scheduler
6
+
7
+
8
+ async def queue_runner_loop() -> None:
9
+ command_queue: CommandQueue = asyncio.Queue()
10
+
11
+ command_server = asyncio.create_task(serve(command_queue))
12
+
13
+ scheduler = asyncio.create_task(start_scheduler(command_queue))
14
+
15
+ await scheduler
16
+ await command_server
17
+
18
+
19
+ def start_queue_runner() -> None:
20
+ loop = asyncio.new_event_loop()
21
+ loop.run_until_complete(queue_runner_loop())
22
+ loop.close()
23
+
24
+
25
+ if __name__ == "__main__":
26
+ start_queue_runner()
@@ -0,0 +1,7 @@
1
+ from asyncio import Queue, Task
2
+
3
+ from uncountable.types import queued_job_t
4
+
5
+ ListenQueue = Queue[queued_job_t.QueuedJob]
6
+ ResultQueue = Queue[queued_job_t.QueuedJobResult]
7
+ ResultTask = Task[queued_job_t.QueuedJobResult]
@@ -0,0 +1,109 @@
1
+ import asyncio
2
+ from concurrent.futures import ProcessPoolExecutor
3
+ from dataclasses import dataclass
4
+
5
+ from uncountable.core.async_batch import AsyncBatchProcessor
6
+ from uncountable.integration.construct_client import construct_uncountable_client
7
+ from uncountable.integration.executors.executors import execute_job
8
+ from uncountable.integration.job import JobArguments
9
+ from uncountable.integration.queue_runner.datastore.interface import Datastore
10
+ from uncountable.integration.queue_runner.types import ListenQueue, ResultQueue
11
+ from uncountable.integration.scan_profiles import load_profiles
12
+ from uncountable.integration.telemetry import JobLogger, get_otel_tracer
13
+ from uncountable.types import base_t, job_definition_t, queued_job_t
14
+
15
+
16
+ class Worker:
17
+ def __init__(
18
+ self,
19
+ *,
20
+ process_pool: ProcessPoolExecutor,
21
+ listen_queue: ListenQueue,
22
+ result_queue: ResultQueue,
23
+ datastore: Datastore,
24
+ ) -> None:
25
+ self.process_pool = process_pool
26
+ self.listen_queue = listen_queue
27
+ self.result_queue = result_queue
28
+ self.datastore = datastore
29
+
30
+ async def run_worker_loop(self) -> None:
31
+ while True:
32
+ queued_job = await self.listen_queue.get()
33
+ self.datastore.increment_num_attempts(queued_job.queued_job_uuid)
34
+ loop = asyncio.get_event_loop()
35
+ result = await loop.run_in_executor(
36
+ self.process_pool, run_queued_job, queued_job
37
+ )
38
+ assert isinstance(result, job_definition_t.JobResult)
39
+ await self.result_queue.put(
40
+ queued_job_t.QueuedJobResult(
41
+ job_result=result, queued_job_uuid=queued_job.queued_job_uuid
42
+ )
43
+ )
44
+
45
+
46
+ @dataclass(kw_only=True)
47
+ class RegisteredJobDetails:
48
+ profile_metadata: job_definition_t.ProfileMetadata
49
+ job_definition: job_definition_t.JobDefinition
50
+
51
+
52
+ def get_registered_job_details(job_ref_name: str) -> RegisteredJobDetails:
53
+ profiles = load_profiles()
54
+ for profile in profiles:
55
+ for job_definition in profile.definition.jobs:
56
+ if job_definition.id == job_ref_name:
57
+ return RegisteredJobDetails(
58
+ profile_metadata=job_definition_t.ProfileMetadata(
59
+ name=profile.name,
60
+ base_url=profile.definition.base_url,
61
+ auth_retrieval=profile.definition.auth_retrieval,
62
+ client_options=profile.definition.client_options,
63
+ ),
64
+ job_definition=job_definition,
65
+ )
66
+ raise Exception(f"profile not found for job {job_ref_name}")
67
+
68
+
69
+ def _resolve_queued_job_payload(queued_job: queued_job_t.QueuedJob) -> base_t.JsonValue:
70
+ match queued_job.payload.invocation_context:
71
+ case queued_job_t.InvocationContextCron():
72
+ return None
73
+ case queued_job_t.InvocationContextManual():
74
+ return None
75
+ case queued_job_t.InvocationContextWebhook():
76
+ return queued_job.payload.invocation_context.webhook_payload
77
+
78
+
79
+ def run_queued_job(
80
+ queued_job: queued_job_t.QueuedJob,
81
+ ) -> job_definition_t.JobResult:
82
+ with get_otel_tracer().start_as_current_span(name="run_queued_job") as span:
83
+ job_details = get_registered_job_details(queued_job.job_ref_name)
84
+ job_logger = JobLogger(
85
+ base_span=span,
86
+ profile_metadata=job_details.profile_metadata,
87
+ job_definition=job_details.job_definition,
88
+ )
89
+ client = construct_uncountable_client(
90
+ profile_meta=job_details.profile_metadata, job_logger=job_logger
91
+ )
92
+ batch_processor = AsyncBatchProcessor(client=client)
93
+
94
+ payload = _resolve_queued_job_payload(queued_job)
95
+
96
+ args = JobArguments(
97
+ job_definition=job_details.job_definition,
98
+ client=client,
99
+ batch_processor=batch_processor,
100
+ profile_metadata=job_details.profile_metadata,
101
+ logger=job_logger,
102
+ payload=payload,
103
+ )
104
+
105
+ return execute_job(
106
+ args=args,
107
+ profile_metadata=job_details.profile_metadata,
108
+ job_definition=job_details.job_definition,
109
+ )
@@ -1,3 +1,4 @@
1
+ import functools
1
2
  import os
2
3
  from dataclasses import dataclass
3
4
  from importlib import resources
@@ -14,6 +15,7 @@ class ProfileDetails:
14
15
  definition: job_definition_t.ProfileDefinition
15
16
 
16
17
 
18
+ @functools.cache
17
19
  def load_profiles() -> list[ProfileDetails]:
18
20
  profiles_module = os.environ["UNC_PROFILES_MODULE"]
19
21
  profiles = [