UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (84) hide show
  1. docs/requirements.txt +1 -1
  2. examples/integration-server/jobs/materials_auto/example_cron.py +1 -1
  3. examples/integration-server/jobs/materials_auto/example_instrument.py +68 -38
  4. examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
  5. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  6. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
  7. examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
  8. examples/integration-server/pyproject.toml +4 -4
  9. pkgs/argument_parser/argument_parser.py +20 -1
  10. pkgs/serialization_util/serialization_helpers.py +3 -1
  11. pkgs/type_spec/builder.py +43 -13
  12. pkgs/type_spec/builder_types.py +9 -0
  13. pkgs/type_spec/cross_output_links.py +2 -10
  14. pkgs/type_spec/emit_open_api.py +0 -12
  15. pkgs/type_spec/emit_python.py +72 -11
  16. pkgs/type_spec/emit_typescript.py +2 -2
  17. pkgs/type_spec/emit_typescript_util.py +28 -6
  18. pkgs/type_spec/load_types.py +1 -1
  19. pkgs/type_spec/parts/base.ts.prepart +3 -0
  20. pkgs/type_spec/type_info/emit_type_info.py +27 -3
  21. pkgs/type_spec/value_spec/__main__.py +2 -2
  22. uncountable/core/client.py +10 -3
  23. uncountable/integration/cli.py +89 -2
  24. uncountable/integration/executors/executors.py +1 -2
  25. uncountable/integration/executors/generic_upload_executor.py +1 -1
  26. uncountable/integration/job.py +3 -3
  27. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  28. uncountable/integration/queue_runner/command_server/command_client.py +63 -0
  29. uncountable/integration/queue_runner/command_server/command_server.py +77 -5
  30. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +33 -0
  31. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +27 -13
  32. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +53 -1
  33. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +135 -0
  34. uncountable/integration/queue_runner/command_server/types.py +44 -1
  35. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +132 -8
  36. uncountable/integration/queue_runner/datastore/interface.py +3 -0
  37. uncountable/integration/queue_runner/datastore/model.py +8 -1
  38. uncountable/integration/queue_runner/job_scheduler.py +78 -3
  39. uncountable/integration/queue_runner/types.py +2 -0
  40. uncountable/integration/queue_runner/worker.py +28 -26
  41. uncountable/integration/scheduler.py +64 -13
  42. uncountable/integration/server.py +36 -6
  43. uncountable/integration/telemetry.py +120 -7
  44. uncountable/integration/webhook_server/entrypoint.py +2 -0
  45. uncountable/types/__init__.py +18 -0
  46. uncountable/types/api/entity/list_aggregate.py +79 -0
  47. uncountable/types/api/entity/list_entities.py +25 -0
  48. uncountable/types/api/entity/set_barcode.py +43 -0
  49. uncountable/types/api/entity/transition_entity_phase.py +2 -1
  50. uncountable/types/api/files/download_file.py +15 -1
  51. uncountable/types/api/integrations/push_notification.py +2 -0
  52. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  53. uncountable/types/api/listing/__init__.py +1 -0
  54. uncountable/types/api/listing/fetch_listing.py +57 -0
  55. uncountable/types/api/notebooks/__init__.py +1 -0
  56. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  57. uncountable/types/api/outputs/get_output_organization.py +1 -1
  58. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  59. uncountable/types/api/recipes/get_recipes_data.py +29 -0
  60. uncountable/types/api/recipes/lock_recipes.py +2 -1
  61. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  62. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  63. uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
  64. uncountable/types/api/uploader/complete_async_parse.py +4 -0
  65. uncountable/types/async_batch_processor.py +222 -0
  66. uncountable/types/async_batch_t.py +4 -0
  67. uncountable/types/client_base.py +367 -2
  68. uncountable/types/client_config.py +1 -0
  69. uncountable/types/client_config_t.py +10 -0
  70. uncountable/types/entity_t.py +3 -1
  71. uncountable/types/integration_server_t.py +2 -0
  72. uncountable/types/listing.py +46 -0
  73. uncountable/types/listing_t.py +533 -0
  74. uncountable/types/notices.py +8 -0
  75. uncountable/types/notices_t.py +37 -0
  76. uncountable/types/queued_job.py +1 -0
  77. uncountable/types/queued_job_t.py +9 -0
  78. uncountable/types/sockets.py +9 -0
  79. uncountable/types/sockets_t.py +99 -0
  80. uncountable/types/uploader_t.py +3 -2
  81. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +4 -2
  82. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +84 -68
  83. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
  84. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,6 @@
1
1
  import argparse
2
+ import json
3
+ from typing import assert_never
2
4
 
3
5
  from dateutil import tz
4
6
  from opentelemetry.trace import get_current_span
@@ -6,8 +8,13 @@ from tabulate import tabulate
6
8
 
7
9
  from uncountable.core.environment import get_local_admin_server_port
8
10
  from uncountable.integration.queue_runner.command_server.command_client import (
11
+ send_job_cancellation_message,
9
12
  send_job_queue_message,
10
13
  send_list_queued_jobs_message,
14
+ send_retry_job_message,
15
+ )
16
+ from uncountable.integration.queue_runner.command_server.types import (
17
+ CommandCancelJobStatus,
11
18
  )
12
19
  from uncountable.integration.telemetry import Logger
13
20
  from uncountable.types import queued_job_t
@@ -24,12 +31,28 @@ def register_enqueue_job_parser(
24
31
  description="Process a job with a given host and job ID",
25
32
  )
26
33
  run_parser.add_argument("job_id", type=str, help="The ID of the job to process")
34
+ run_parser.add_argument(
35
+ "--payload", type=str, help="JSON payload for webhook invocation context"
36
+ )
27
37
 
28
38
  def _handle_enqueue_job(args: argparse.Namespace) -> None:
39
+ invocation_context: queued_job_t.InvocationContext
40
+
41
+ if args.payload is not None:
42
+ try:
43
+ webhook_payload = json.loads(args.payload)
44
+ invocation_context = queued_job_t.InvocationContextWebhook(
45
+ webhook_payload=webhook_payload
46
+ )
47
+ except json.JSONDecodeError as e:
48
+ raise ValueError(f"Invalid JSON payload: {e}")
49
+ else:
50
+ invocation_context = queued_job_t.InvocationContextManual()
51
+
29
52
  send_job_queue_message(
30
53
  job_ref_name=args.job_id,
31
54
  payload=queued_job_t.QueuedJobPayload(
32
- invocation_context=queued_job_t.InvocationContextManual()
55
+ invocation_context=invocation_context
33
56
  ),
34
57
  host=args.host,
35
58
  port=get_local_admin_server_port(),
@@ -38,6 +61,42 @@ def register_enqueue_job_parser(
38
61
  run_parser.set_defaults(func=_handle_enqueue_job)
39
62
 
40
63
 
64
+ def register_cancel_queued_job_parser(
65
+ sub_parser_manager: argparse._SubParsersAction,
66
+ parents: list[argparse.ArgumentParser],
67
+ ) -> None:
68
+ cancel_parser = sub_parser_manager.add_parser(
69
+ "cancel",
70
+ parents=parents,
71
+ help="Cancel a queued job with a given host and queued job UUID",
72
+ description="Cancel a job with a given host and queued job UUID",
73
+ )
74
+ cancel_parser.add_argument(
75
+ "uuid", type=str, help="The UUID of the queued job to cancel"
76
+ )
77
+
78
+ def _handle_cancel_queued_job(args: argparse.Namespace) -> None:
79
+ resp = send_job_cancellation_message(
80
+ queued_job_uuid=args.uuid,
81
+ host=args.host,
82
+ port=get_local_admin_server_port(),
83
+ )
84
+
85
+ match resp:
86
+ case CommandCancelJobStatus.CANCELLED_WITH_RESTART:
87
+ print(
88
+ "Job successfully cancelled. The integration server will restart."
89
+ )
90
+ case CommandCancelJobStatus.NO_JOB_FOUND:
91
+ print("Job not found.")
92
+ case CommandCancelJobStatus.JOB_ALREADY_COMPLETED:
93
+ print("Job already completed.")
94
+ case _:
95
+ assert_never(resp)
96
+
97
+ cancel_parser.set_defaults(func=_handle_cancel_queued_job)
98
+
99
+
41
100
  def register_list_queued_jobs(
42
101
  sub_parser_manager: argparse._SubParsersAction,
43
102
  parents: list[argparse.ArgumentParser],
@@ -70,12 +129,13 @@ def register_list_queued_jobs(
70
129
  port=get_local_admin_server_port(),
71
130
  )
72
131
 
73
- headers = ["UUID", "Job Ref Name", "Attempts", "Submitted At"]
132
+ headers = ["UUID", "Job Ref Name", "Attempts", "Status", "Submitted At"]
74
133
  rows = [
75
134
  [
76
135
  job.uuid,
77
136
  job.job_ref_name,
78
137
  job.num_attempts,
138
+ job.status,
79
139
  job.submitted_at.ToDatetime(tz.UTC).astimezone(tz.tzlocal()),
80
140
  ]
81
141
  for job in queued_jobs
@@ -85,6 +145,31 @@ def register_list_queued_jobs(
85
145
  list_queued_jobs_parser.set_defaults(func=_handle_list_queued_jobs)
86
146
 
87
147
 
148
+ def register_retry_job_parser(
149
+ sub_parser_manager: argparse._SubParsersAction,
150
+ parents: list[argparse.ArgumentParser],
151
+ ) -> None:
152
+ retry_failed_jobs_parser = sub_parser_manager.add_parser(
153
+ "retry-job",
154
+ parents=parents,
155
+ help="Retry failed job on the integration server",
156
+ description="Retry failed job on the integration server",
157
+ )
158
+
159
+ retry_failed_jobs_parser.add_argument(
160
+ "job_uuid", type=str, help="The uuid of the job to retry"
161
+ )
162
+
163
+ def _handle_retry_job(args: argparse.Namespace) -> None:
164
+ send_retry_job_message(
165
+ job_uuid=args.job_uuid,
166
+ host=args.host,
167
+ port=get_local_admin_server_port(),
168
+ )
169
+
170
+ retry_failed_jobs_parser.set_defaults(func=_handle_retry_job)
171
+
172
+
88
173
  def main() -> None:
89
174
  logger = Logger(get_current_span())
90
175
 
@@ -104,7 +189,9 @@ def main() -> None:
104
189
  )
105
190
 
106
191
  register_enqueue_job_parser(subparser_action, parents=[base_parser])
192
+ register_retry_job_parser(subparser_action, parents=[base_parser])
107
193
  register_list_queued_jobs(subparser_action, parents=[base_parser])
194
+ register_cancel_queued_job_parser(subparser_action, parents=[base_parser])
108
195
 
109
196
  args = main_parser.parse_args()
110
197
  with logger.push_scope(args.command):
@@ -88,7 +88,6 @@ def execute_job(
88
88
  job_definition: job_definition_t.JobDefinition,
89
89
  profile_metadata: job_definition_t.ProfileMetadata,
90
90
  args: JobArguments,
91
- job_uuid: str,
92
91
  ) -> job_definition_t.JobResult:
93
92
  with args.logger.push_scope(job_definition.name) as job_logger:
94
93
  job = resolve_executor(job_definition.executor, profile_metadata)
@@ -104,7 +103,7 @@ def execute_job(
104
103
  run_entity = _create_run_entity(
105
104
  client=args.client,
106
105
  logging_settings=job_definition.logging_settings,
107
- job_uuid=job_uuid,
106
+ job_uuid=args.job_uuid,
108
107
  )
109
108
  result = job.run_outer(args=args)
110
109
  except Exception as e:
@@ -41,7 +41,7 @@ def _get_extension(filename: str) -> str | None:
41
41
 
42
42
  def _run_keyword_detection(data: io.BytesIO, keyword: str) -> bool:
43
43
  try:
44
- text = io.TextIOWrapper(data)
44
+ text = io.TextIOWrapper(data, encoding="utf-8")
45
45
  for line in text:
46
46
  if (
47
47
  keyword in line
@@ -28,7 +28,6 @@ from uncountable.integration.secret_retrieval.retrieve_secret import retrieve_se
28
28
  from uncountable.integration.telemetry import JobLogger
29
29
  from uncountable.types import (
30
30
  base_t,
31
- entity_t,
32
31
  job_definition_t,
33
32
  queued_job_t,
34
33
  webhook_job_t,
@@ -49,6 +48,7 @@ class JobArguments:
49
48
  batch_processor: AsyncBatchProcessor
50
49
  logger: JobLogger
51
50
  payload: base_t.JsonValue
51
+ job_uuid: str
52
52
 
53
53
 
54
54
  # only for compatibility:
@@ -254,13 +254,13 @@ class RunsheetWebhookJob(WebhookJob[webhook_job_t.RunsheetWebhookPayload]):
254
254
  self,
255
255
  *,
256
256
  args: JobArguments,
257
- entities: list[entity_t.Entity],
257
+ payload: webhook_job_t.RunsheetWebhookPayload,
258
258
  ) -> FileUpload: ...
259
259
 
260
260
  def run(
261
261
  self, args: JobArguments, payload: webhook_job_t.RunsheetWebhookPayload
262
262
  ) -> JobResult:
263
- runsheet = self.build_runsheet(args=args, entities=payload.entities)
263
+ runsheet = self.build_runsheet(args=args, payload=payload)
264
264
 
265
265
  files = args.client.upload_files(file_uploads=[runsheet])
266
266
  args.client.complete_async_upload(
@@ -4,6 +4,8 @@ from .types import (
4
4
  CommandEnqueueJob,
5
5
  CommandEnqueueJobResponse,
6
6
  CommandQueue,
7
+ CommandRetryJob,
8
+ CommandRetryJobResponse,
7
9
  CommandServerBadResponse,
8
10
  CommandServerException,
9
11
  CommandServerTimeout,
@@ -16,6 +18,8 @@ __all__: list[str] = [
16
18
  "send_job_queue_message",
17
19
  "CommandEnqueueJob",
18
20
  "CommandEnqueueJobResponse",
21
+ "CommandRetryJob",
22
+ "CommandRetryJobResponse",
19
23
  "CommandTask",
20
24
  "CommandQueue",
21
25
  "CommandServerTimeout",
@@ -6,14 +6,22 @@ import simplejson as json
6
6
 
7
7
  from pkgs.serialization_util import serialize_for_api
8
8
  from uncountable.integration.queue_runner.command_server.protocol.command_server_pb2 import (
9
+ CancelJobRequest,
10
+ CancelJobResult,
11
+ CancelJobStatus,
9
12
  CheckHealthRequest,
10
13
  CheckHealthResult,
11
14
  EnqueueJobRequest,
12
15
  EnqueueJobResult,
13
16
  ListQueuedJobsRequest,
14
17
  ListQueuedJobsResult,
18
+ RetryJobRequest,
19
+ RetryJobResult,
20
+ VaccuumQueuedJobsRequest,
21
+ VaccuumQueuedJobsResult,
15
22
  )
16
23
  from uncountable.integration.queue_runner.command_server.types import (
24
+ CommandCancelJobStatus,
17
25
  CommandServerBadResponse,
18
26
  CommandServerTimeout,
19
27
  )
@@ -59,6 +67,46 @@ def send_job_queue_message(
59
67
  return response.queued_job_uuid
60
68
 
61
69
 
70
+ def send_job_cancellation_message(
71
+ *, queued_job_uuid: str, host: str = "localhost", port: int
72
+ ) -> CommandCancelJobStatus:
73
+ with command_server_connection(host=host, port=port) as stub:
74
+ request = CancelJobRequest(job_uuid=queued_job_uuid)
75
+
76
+ response = stub.CancelJob(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
77
+
78
+ assert isinstance(response, CancelJobResult)
79
+ match response.status:
80
+ case CancelJobStatus.NO_JOB_FOUND:
81
+ return CommandCancelJobStatus.NO_JOB_FOUND
82
+ case CancelJobStatus.CANCELLED_WITH_RESTART:
83
+ return CommandCancelJobStatus.CANCELLED_WITH_RESTART
84
+ case CancelJobStatus.JOB_ALREADY_COMPLETED:
85
+ return CommandCancelJobStatus.JOB_ALREADY_COMPLETED
86
+ case _:
87
+ raise CommandServerBadResponse(f"unknown status: {response.status}")
88
+
89
+
90
+ def send_retry_job_message(
91
+ *,
92
+ job_uuid: str,
93
+ host: str = "localhost",
94
+ port: int,
95
+ ) -> str:
96
+ with command_server_connection(host=host, port=port) as stub:
97
+ request = RetryJobRequest(uuid=job_uuid)
98
+
99
+ try:
100
+ response = stub.RetryJob(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
101
+ assert isinstance(response, RetryJobResult)
102
+ if not response.successfully_queued:
103
+ raise CommandServerBadResponse("queue operation was not successful")
104
+
105
+ return response.queued_job_uuid
106
+ except grpc.RpcError as e:
107
+ raise ValueError(e.details()) # type: ignore
108
+
109
+
62
110
  def check_health(*, host: str = _LOCAL_RPC_HOST, port: int) -> bool:
63
111
  with command_server_connection(host=host, port=port) as stub:
64
112
  request = CheckHealthRequest()
@@ -92,3 +140,18 @@ def send_list_queued_jobs_message(
92
140
 
93
141
  assert isinstance(response, ListQueuedJobsResult)
94
142
  return list(response.queued_jobs)
143
+
144
+
145
+ def send_vaccuum_queued_jobs_message(*, host: str = "localhost", port: int) -> None:
146
+ with command_server_connection(host=host, port=port) as stub:
147
+ request = VaccuumQueuedJobsRequest()
148
+
149
+ try:
150
+ response = stub.VaccuumQueuedJobs(
151
+ request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS
152
+ )
153
+ except grpc.RpcError as e:
154
+ raise ValueError(e.details()) # type: ignore
155
+
156
+ assert isinstance(response, VaccuumQueuedJobsResult)
157
+ return None
@@ -1,23 +1,39 @@
1
1
  import asyncio
2
+ from typing import assert_never
2
3
 
4
+ import grpc.aio as grpc_aio
3
5
  import simplejson as json
4
6
  from google.protobuf.timestamp_pb2 import Timestamp
5
- from grpc import StatusCode, aio
7
+ from grpc import StatusCode
6
8
 
7
9
  from pkgs.argument_parser import CachedParser
8
10
  from uncountable.core.environment import get_local_admin_server_port
9
11
  from uncountable.integration.queue_runner.command_server.protocol.command_server_pb2 import (
12
+ CancelJobRequest,
13
+ CancelJobResult,
14
+ CancelJobStatus,
10
15
  CheckHealthRequest,
11
16
  CheckHealthResult,
12
17
  EnqueueJobRequest,
13
18
  EnqueueJobResult,
14
19
  ListQueuedJobsRequest,
15
20
  ListQueuedJobsResult,
21
+ RetryJobRequest,
22
+ RetryJobResult,
23
+ VaccuumQueuedJobsRequest,
24
+ VaccuumQueuedJobsResult,
16
25
  )
17
26
  from uncountable.integration.queue_runner.command_server.types import (
27
+ CommandCancelJob,
28
+ CommandCancelJobResponse,
29
+ CommandCancelJobStatus,
18
30
  CommandEnqueueJob,
19
31
  CommandEnqueueJobResponse,
20
32
  CommandQueue,
33
+ CommandRetryJob,
34
+ CommandRetryJobResponse,
35
+ CommandVaccuumQueuedJobs,
36
+ CommandVaccuumQueuedJobsResponse,
21
37
  )
22
38
  from uncountable.integration.queue_runner.datastore import DatastoreSqlite
23
39
  from uncountable.types import queued_job_t
@@ -32,11 +48,11 @@ queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
32
48
 
33
49
 
34
50
  async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None:
35
- server = aio.server()
51
+ server = grpc_aio.server()
36
52
 
37
53
  class CommandServerHandler(CommandServerServicer):
38
54
  async def EnqueueJob(
39
- self, request: EnqueueJobRequest, context: aio.ServicerContext
55
+ self, request: EnqueueJobRequest, context: grpc_aio.ServicerContext
40
56
  ) -> EnqueueJobResult:
41
57
  payload_json = json.loads(request.serialized_payload)
42
58
  payload = queued_job_payload_parser.parse_api(payload_json)
@@ -54,13 +70,57 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
54
70
  )
55
71
  return result
56
72
 
73
+ async def CancelJob(
74
+ self, request: CancelJobRequest, context: grpc_aio.ServicerContext
75
+ ) -> CancelJobResult:
76
+ response_queue: asyncio.Queue[CommandCancelJobResponse] = asyncio.Queue()
77
+ await command_queue.put(
78
+ CommandCancelJob(
79
+ queued_job_uuid=request.job_uuid,
80
+ response_queue=response_queue,
81
+ )
82
+ )
83
+
84
+ response = await response_queue.get()
85
+
86
+ proto_status: CancelJobStatus
87
+ match response.status:
88
+ case CommandCancelJobStatus.NO_JOB_FOUND:
89
+ proto_status = CancelJobStatus.NO_JOB_FOUND
90
+ case CommandCancelJobStatus.CANCELLED_WITH_RESTART:
91
+ proto_status = CancelJobStatus.CANCELLED_WITH_RESTART
92
+ case CommandCancelJobStatus.JOB_ALREADY_COMPLETED:
93
+ proto_status = CancelJobStatus.JOB_ALREADY_COMPLETED
94
+ case _:
95
+ assert_never(response.status)
96
+
97
+ result = CancelJobResult(status=proto_status)
98
+ return result
99
+
100
+ async def RetryJob(
101
+ self, request: RetryJobRequest, context: grpc_aio.ServicerContext
102
+ ) -> RetryJobResult:
103
+ response_queue: asyncio.Queue[CommandRetryJobResponse] = asyncio.Queue()
104
+ await command_queue.put(
105
+ CommandRetryJob(
106
+ queued_job_uuid=request.uuid, response_queue=response_queue
107
+ )
108
+ )
109
+ response = await response_queue.get()
110
+ if response.queued_job_uuid is not None:
111
+ return RetryJobResult(
112
+ successfully_queued=True, queued_job_uuid=response.queued_job_uuid
113
+ )
114
+ else:
115
+ return RetryJobResult(successfully_queued=False, queued_job_uuid="")
116
+
57
117
  async def CheckHealth(
58
- self, request: CheckHealthRequest, context: aio.ServicerContext
118
+ self, request: CheckHealthRequest, context: grpc_aio.ServicerContext
59
119
  ) -> CheckHealthResult:
60
120
  return CheckHealthResult(success=True)
61
121
 
62
122
  async def ListQueuedJobs(
63
- self, request: ListQueuedJobsRequest, context: aio.ServicerContext
123
+ self, request: ListQueuedJobsRequest, context: grpc_aio.ServicerContext
64
124
  ) -> ListQueuedJobsResult:
65
125
  if (
66
126
  request.limit < ListQueuedJobsConstants.LIMIT_MIN
@@ -90,10 +150,22 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
90
150
  job_ref_name=item.job_ref_name,
91
151
  num_attempts=item.num_attempts,
92
152
  submitted_at=proto_timestamp,
153
+ status=item.status,
93
154
  )
94
155
  )
95
156
  return ListQueuedJobsResult(queued_jobs=response_list)
96
157
 
158
+ async def VaccuumQueuedJobs(
159
+ self, request: VaccuumQueuedJobsRequest, context: grpc_aio.ServicerContext
160
+ ) -> VaccuumQueuedJobsResult:
161
+ response_queue: asyncio.Queue[CommandVaccuumQueuedJobsResponse] = (
162
+ asyncio.Queue()
163
+ )
164
+ await command_queue.put(
165
+ CommandVaccuumQueuedJobs(response_queue=response_queue)
166
+ )
167
+ return VaccuumQueuedJobsResult()
168
+
97
169
  add_CommandServerServicer_to_server(CommandServerHandler(), server)
98
170
 
99
171
  listen_addr = f"[::]:{get_local_admin_server_port()}"
@@ -3,8 +3,11 @@ import "google/protobuf/timestamp.proto";
3
3
 
4
4
  service CommandServer {
5
5
  rpc EnqueueJob(EnqueueJobRequest) returns (EnqueueJobResult) {}
6
+ rpc RetryJob(RetryJobRequest) returns (RetryJobResult) {}
6
7
  rpc CheckHealth(CheckHealthRequest) returns (CheckHealthResult) {}
7
8
  rpc ListQueuedJobs(ListQueuedJobsRequest) returns (ListQueuedJobsResult) {}
9
+ rpc VaccuumQueuedJobs(VaccuumQueuedJobsRequest) returns (VaccuumQueuedJobsResult) {}
10
+ rpc CancelJob(CancelJobRequest) returns (CancelJobResult) {}
8
11
  }
9
12
 
10
13
  message EnqueueJobRequest {
@@ -17,6 +20,21 @@ message EnqueueJobResult {
17
20
  string queued_job_uuid = 2;
18
21
  }
19
22
 
23
+ message RetryJobRequest {
24
+ string uuid = 1;
25
+ }
26
+
27
+ message RetryJobResult {
28
+ bool successfully_queued = 1;
29
+ string queued_job_uuid = 2;
30
+ }
31
+
32
+ message VaccuumQueuedJobsRequest {
33
+ }
34
+
35
+ message VaccuumQueuedJobsResult {
36
+ }
37
+
20
38
  message CheckHealthRequest {}
21
39
 
22
40
  message CheckHealthResult {
@@ -34,7 +52,22 @@ message ListQueuedJobsResult {
34
52
  string job_ref_name = 2;
35
53
  int64 num_attempts = 3;
36
54
  google.protobuf.Timestamp submitted_at = 4;
55
+ string status = 5;
37
56
  }
38
57
 
39
58
  repeated ListQueuedJobsResultItem queued_jobs = 1;
40
59
  }
60
+
61
+ message CancelJobRequest {
62
+ string job_uuid = 1;
63
+ }
64
+
65
+ enum CancelJobStatus {
66
+ CANCELLED_WITH_RESTART = 0;
67
+ NO_JOB_FOUND = 1;
68
+ JOB_ALREADY_COMPLETED = 2;
69
+ }
70
+
71
+ message CancelJobResult {
72
+ CancelJobStatus status = 1;
73
+ }
@@ -18,7 +18,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__
18
18
 
19
19
 
20
20
  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
21
- b'\nQuncountable/integration/queue_runner/command_server/protocol/command_server.proto\x1a\x1fgoogle/protobuf/timestamp.proto"E\n\x11\x45nqueueJobRequest\x12\x14\n\x0cjob_ref_name\x18\x01 \x01(\t\x12\x1a\n\x12serialized_payload\x18\x02 \x01(\t"H\n\x10\x45nqueueJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x14\n\x12\x43heckHealthRequest"$\n\x11\x43heckHealthResult\x12\x0f\n\x07success\x18\x01 \x01(\x08"6\n\x15ListQueuedJobsRequest\x12\x0e\n\x06offset\x18\x01 \x01(\r\x12\r\n\x05limit\x18\x02 \x01(\r"\xe4\x01\n\x14ListQueuedJobsResult\x12\x43\n\x0bqueued_jobs\x18\x01 \x03(\x0b\x32..ListQueuedJobsResult.ListQueuedJobsResultItem\x1a\x86\x01\n\x18ListQueuedJobsResultItem\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x14\n\x0cjob_ref_name\x18\x02 \x01(\t\x12\x14\n\x0cnum_attempts\x18\x03 \x01(\x03\x12\x30\n\x0csubmitted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\xc3\x01\n\rCommandServer\x12\x35\n\nEnqueueJob\x12\x12.EnqueueJobRequest\x1a\x11.EnqueueJobResult"\x00\x12\x38\n\x0b\x43heckHealth\x12\x13.CheckHealthRequest\x1a\x12.CheckHealthResult"\x00\x12\x41\n\x0eListQueuedJobs\x12\x16.ListQueuedJobsRequest\x1a\x15.ListQueuedJobsResult"\x00\x62\x06proto3'
21
+ b'\nQuncountable/integration/queue_runner/command_server/protocol/command_server.proto\x1a\x1fgoogle/protobuf/timestamp.proto"E\n\x11\x45nqueueJobRequest\x12\x14\n\x0cjob_ref_name\x18\x01 \x01(\t\x12\x1a\n\x12serialized_payload\x18\x02 \x01(\t"H\n\x10\x45nqueueJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x1f\n\x0fRetryJobRequest\x12\x0c\n\x04uuid\x18\x01 \x01(\t"F\n\x0eRetryJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x1a\n\x18VaccuumQueuedJobsRequest"\x19\n\x17VaccuumQueuedJobsResult"\x14\n\x12\x43heckHealthRequest"$\n\x11\x43heckHealthResult\x12\x0f\n\x07success\x18\x01 \x01(\x08"6\n\x15ListQueuedJobsRequest\x12\x0e\n\x06offset\x18\x01 \x01(\r\x12\r\n\x05limit\x18\x02 \x01(\r"\xf4\x01\n\x14ListQueuedJobsResult\x12\x43\n\x0bqueued_jobs\x18\x01 \x03(\x0b\x32..ListQueuedJobsResult.ListQueuedJobsResultItem\x1a\x96\x01\n\x18ListQueuedJobsResultItem\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x14\n\x0cjob_ref_name\x18\x02 \x01(\t\x12\x14\n\x0cnum_attempts\x18\x03 \x01(\x03\x12\x30\n\x0csubmitted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06status\x18\x05 \x01(\t"$\n\x10\x43\x61ncelJobRequest\x12\x10\n\x08job_uuid\x18\x01 \x01(\t"3\n\x0f\x43\x61ncelJobResult\x12 \n\x06status\x18\x01 \x01(\x0e\x32\x10.CancelJobStatus*Z\n\x0f\x43\x61ncelJobStatus\x12\x1a\n\x16\x43\x41NCELLED_WITH_RESTART\x10\x00\x12\x10\n\x0cNO_JOB_FOUND\x10\x01\x12\x19\n\x15JOB_ALREADY_COMPLETED\x10\x02\x32\xf4\x02\n\rCommandServer\x12\x35\n\nEnqueueJob\x12\x12.EnqueueJobRequest\x1a\x11.EnqueueJobResult"\x00\x12/\n\x08RetryJob\x12\x10.RetryJobRequest\x1a\x0f.RetryJobResult"\x00\x12\x38\n\x0b\x43heckHealth\x12\x13.CheckHealthRequest\x1a\x12.CheckHealthResult"\x00\x12\x41\n\x0eListQueuedJobs\x12\x16.ListQueuedJobsRequest\x1a\x15.ListQueuedJobsResult"\x00\x12J\n\x11VaccuumQueuedJobs\x12\x19.VaccuumQueuedJobsRequest\x1a\x18.VaccuumQueuedJobsResult"\x00\x12\x32\n\tCancelJob\x12\x11.CancelJobRequest\x1a\x10.CancelJobResult"\x00\x62\x06proto3'
22
22
  )
23
23
 
24
24
  _globals = globals()
@@ -30,20 +30,34 @@ _builder.BuildTopDescriptorsAndMessages(
30
30
  )
31
31
  if _descriptor._USE_C_DESCRIPTORS == False:
32
32
  DESCRIPTOR._options = None
33
+ _globals["_CANCELJOBSTATUS"]._serialized_start = 877
34
+ _globals["_CANCELJOBSTATUS"]._serialized_end = 967
33
35
  _globals["_ENQUEUEJOBREQUEST"]._serialized_start = 118
34
36
  _globals["_ENQUEUEJOBREQUEST"]._serialized_end = 187
35
37
  _globals["_ENQUEUEJOBRESULT"]._serialized_start = 189
36
38
  _globals["_ENQUEUEJOBRESULT"]._serialized_end = 261
37
- _globals["_CHECKHEALTHREQUEST"]._serialized_start = 263
38
- _globals["_CHECKHEALTHREQUEST"]._serialized_end = 283
39
- _globals["_CHECKHEALTHRESULT"]._serialized_start = 285
40
- _globals["_CHECKHEALTHRESULT"]._serialized_end = 321
41
- _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_start = 323
42
- _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_end = 377
43
- _globals["_LISTQUEUEDJOBSRESULT"]._serialized_start = 380
44
- _globals["_LISTQUEUEDJOBSRESULT"]._serialized_end = 608
45
- _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_start = 474
46
- _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_end = 608
47
- _globals["_COMMANDSERVER"]._serialized_start = 611
48
- _globals["_COMMANDSERVER"]._serialized_end = 806
39
+ _globals["_RETRYJOBREQUEST"]._serialized_start = 263
40
+ _globals["_RETRYJOBREQUEST"]._serialized_end = 294
41
+ _globals["_RETRYJOBRESULT"]._serialized_start = 296
42
+ _globals["_RETRYJOBRESULT"]._serialized_end = 366
43
+ _globals["_VACCUUMQUEUEDJOBSREQUEST"]._serialized_start = 368
44
+ _globals["_VACCUUMQUEUEDJOBSREQUEST"]._serialized_end = 394
45
+ _globals["_VACCUUMQUEUEDJOBSRESULT"]._serialized_start = 396
46
+ _globals["_VACCUUMQUEUEDJOBSRESULT"]._serialized_end = 421
47
+ _globals["_CHECKHEALTHREQUEST"]._serialized_start = 423
48
+ _globals["_CHECKHEALTHREQUEST"]._serialized_end = 443
49
+ _globals["_CHECKHEALTHRESULT"]._serialized_start = 445
50
+ _globals["_CHECKHEALTHRESULT"]._serialized_end = 481
51
+ _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_start = 483
52
+ _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_end = 537
53
+ _globals["_LISTQUEUEDJOBSRESULT"]._serialized_start = 540
54
+ _globals["_LISTQUEUEDJOBSRESULT"]._serialized_end = 784
55
+ _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_start = 634
56
+ _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_end = 784
57
+ _globals["_CANCELJOBREQUEST"]._serialized_start = 786
58
+ _globals["_CANCELJOBREQUEST"]._serialized_end = 822
59
+ _globals["_CANCELJOBRESULT"]._serialized_start = 824
60
+ _globals["_CANCELJOBRESULT"]._serialized_end = 875
61
+ _globals["_COMMANDSERVER"]._serialized_start = 970
62
+ _globals["_COMMANDSERVER"]._serialized_end = 1342
49
63
  # @@protoc_insertion_point(module_scope)
@@ -1,6 +1,7 @@
1
1
  # ruff: noqa
2
2
  from google.protobuf import timestamp_pb2 as _timestamp_pb2
3
3
  from google.protobuf.internal import containers as _containers
4
+ from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
4
5
  from google.protobuf import descriptor as _descriptor
5
6
  from google.protobuf import message as _message
6
7
  from typing import (
@@ -13,6 +14,16 @@ from typing import (
13
14
 
14
15
  DESCRIPTOR: _descriptor.FileDescriptor
15
16
 
17
+ class CancelJobStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
18
+ __slots__ = ()
19
+ CANCELLED_WITH_RESTART: _ClassVar[CancelJobStatus]
20
+ NO_JOB_FOUND: _ClassVar[CancelJobStatus]
21
+ JOB_ALREADY_COMPLETED: _ClassVar[CancelJobStatus]
22
+
23
+ CANCELLED_WITH_RESTART: CancelJobStatus
24
+ NO_JOB_FOUND: CancelJobStatus
25
+ JOB_ALREADY_COMPLETED: CancelJobStatus
26
+
16
27
  class EnqueueJobRequest(_message.Message):
17
28
  __slots__ = ("job_ref_name", "serialized_payload")
18
29
  JOB_REF_NAME_FIELD_NUMBER: _ClassVar[int]
@@ -35,6 +46,30 @@ class EnqueueJobResult(_message.Message):
35
46
  self, successfully_queued: bool = ..., queued_job_uuid: _Optional[str] = ...
36
47
  ) -> None: ...
37
48
 
49
+ class RetryJobRequest(_message.Message):
50
+ __slots__ = ("uuid",)
51
+ UUID_FIELD_NUMBER: _ClassVar[int]
52
+ uuid: str
53
+ def __init__(self, uuid: _Optional[str] = ...) -> None: ...
54
+
55
+ class RetryJobResult(_message.Message):
56
+ __slots__ = ("successfully_queued", "queued_job_uuid")
57
+ SUCCESSFULLY_QUEUED_FIELD_NUMBER: _ClassVar[int]
58
+ QUEUED_JOB_UUID_FIELD_NUMBER: _ClassVar[int]
59
+ successfully_queued: bool
60
+ queued_job_uuid: str
61
+ def __init__(
62
+ self, successfully_queued: bool = ..., queued_job_uuid: _Optional[str] = ...
63
+ ) -> None: ...
64
+
65
+ class VaccuumQueuedJobsRequest(_message.Message):
66
+ __slots__ = ()
67
+ def __init__(self) -> None: ...
68
+
69
+ class VaccuumQueuedJobsResult(_message.Message):
70
+ __slots__ = ()
71
+ def __init__(self) -> None: ...
72
+
38
73
  class CheckHealthRequest(_message.Message):
39
74
  __slots__ = ()
40
75
  def __init__(self) -> None: ...
@@ -58,21 +93,24 @@ class ListQueuedJobsRequest(_message.Message):
58
93
  class ListQueuedJobsResult(_message.Message):
59
94
  __slots__ = ("queued_jobs",)
60
95
  class ListQueuedJobsResultItem(_message.Message):
61
- __slots__ = ("uuid", "job_ref_name", "num_attempts", "submitted_at")
96
+ __slots__ = ("uuid", "job_ref_name", "num_attempts", "submitted_at", "status")
62
97
  UUID_FIELD_NUMBER: _ClassVar[int]
63
98
  JOB_REF_NAME_FIELD_NUMBER: _ClassVar[int]
64
99
  NUM_ATTEMPTS_FIELD_NUMBER: _ClassVar[int]
65
100
  SUBMITTED_AT_FIELD_NUMBER: _ClassVar[int]
101
+ STATUS_FIELD_NUMBER: _ClassVar[int]
66
102
  uuid: str
67
103
  job_ref_name: str
68
104
  num_attempts: int
69
105
  submitted_at: _timestamp_pb2.Timestamp
106
+ status: str
70
107
  def __init__(
71
108
  self,
72
109
  uuid: _Optional[str] = ...,
73
110
  job_ref_name: _Optional[str] = ...,
74
111
  num_attempts: _Optional[int] = ...,
75
112
  submitted_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...,
113
+ status: _Optional[str] = ...,
76
114
  ) -> None: ...
77
115
 
78
116
  QUEUED_JOBS_FIELD_NUMBER: _ClassVar[int]
@@ -85,3 +123,17 @@ class ListQueuedJobsResult(_message.Message):
85
123
  _Iterable[_Union[ListQueuedJobsResult.ListQueuedJobsResultItem, _Mapping]]
86
124
  ] = ...,
87
125
  ) -> None: ...
126
+
127
+ class CancelJobRequest(_message.Message):
128
+ __slots__ = ("job_uuid",)
129
+ JOB_UUID_FIELD_NUMBER: _ClassVar[int]
130
+ job_uuid: str
131
+ def __init__(self, job_uuid: _Optional[str] = ...) -> None: ...
132
+
133
+ class CancelJobResult(_message.Message):
134
+ __slots__ = ("status",)
135
+ STATUS_FIELD_NUMBER: _ClassVar[int]
136
+ status: CancelJobStatus
137
+ def __init__(
138
+ self, status: _Optional[_Union[CancelJobStatus, str]] = ...
139
+ ) -> None: ...