UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (84) hide show
  1. docs/requirements.txt +1 -1
  2. examples/integration-server/jobs/materials_auto/example_cron.py +1 -1
  3. examples/integration-server/jobs/materials_auto/example_instrument.py +68 -38
  4. examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
  5. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  6. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
  7. examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
  8. examples/integration-server/pyproject.toml +4 -4
  9. pkgs/argument_parser/argument_parser.py +20 -1
  10. pkgs/serialization_util/serialization_helpers.py +3 -1
  11. pkgs/type_spec/builder.py +43 -13
  12. pkgs/type_spec/builder_types.py +9 -0
  13. pkgs/type_spec/cross_output_links.py +2 -10
  14. pkgs/type_spec/emit_open_api.py +0 -12
  15. pkgs/type_spec/emit_python.py +72 -11
  16. pkgs/type_spec/emit_typescript.py +2 -2
  17. pkgs/type_spec/emit_typescript_util.py +28 -6
  18. pkgs/type_spec/load_types.py +1 -1
  19. pkgs/type_spec/parts/base.ts.prepart +3 -0
  20. pkgs/type_spec/type_info/emit_type_info.py +27 -3
  21. pkgs/type_spec/value_spec/__main__.py +2 -2
  22. uncountable/core/client.py +10 -3
  23. uncountable/integration/cli.py +89 -2
  24. uncountable/integration/executors/executors.py +1 -2
  25. uncountable/integration/executors/generic_upload_executor.py +1 -1
  26. uncountable/integration/job.py +3 -3
  27. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  28. uncountable/integration/queue_runner/command_server/command_client.py +63 -0
  29. uncountable/integration/queue_runner/command_server/command_server.py +77 -5
  30. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +33 -0
  31. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +27 -13
  32. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +53 -1
  33. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +135 -0
  34. uncountable/integration/queue_runner/command_server/types.py +44 -1
  35. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +132 -8
  36. uncountable/integration/queue_runner/datastore/interface.py +3 -0
  37. uncountable/integration/queue_runner/datastore/model.py +8 -1
  38. uncountable/integration/queue_runner/job_scheduler.py +78 -3
  39. uncountable/integration/queue_runner/types.py +2 -0
  40. uncountable/integration/queue_runner/worker.py +28 -26
  41. uncountable/integration/scheduler.py +64 -13
  42. uncountable/integration/server.py +36 -6
  43. uncountable/integration/telemetry.py +120 -7
  44. uncountable/integration/webhook_server/entrypoint.py +2 -0
  45. uncountable/types/__init__.py +18 -0
  46. uncountable/types/api/entity/list_aggregate.py +79 -0
  47. uncountable/types/api/entity/list_entities.py +25 -0
  48. uncountable/types/api/entity/set_barcode.py +43 -0
  49. uncountable/types/api/entity/transition_entity_phase.py +2 -1
  50. uncountable/types/api/files/download_file.py +15 -1
  51. uncountable/types/api/integrations/push_notification.py +2 -0
  52. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  53. uncountable/types/api/listing/__init__.py +1 -0
  54. uncountable/types/api/listing/fetch_listing.py +57 -0
  55. uncountable/types/api/notebooks/__init__.py +1 -0
  56. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  57. uncountable/types/api/outputs/get_output_organization.py +1 -1
  58. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  59. uncountable/types/api/recipes/get_recipes_data.py +29 -0
  60. uncountable/types/api/recipes/lock_recipes.py +2 -1
  61. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  62. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  63. uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
  64. uncountable/types/api/uploader/complete_async_parse.py +4 -0
  65. uncountable/types/async_batch_processor.py +222 -0
  66. uncountable/types/async_batch_t.py +4 -0
  67. uncountable/types/client_base.py +367 -2
  68. uncountable/types/client_config.py +1 -0
  69. uncountable/types/client_config_t.py +10 -0
  70. uncountable/types/entity_t.py +3 -1
  71. uncountable/types/integration_server_t.py +2 -0
  72. uncountable/types/listing.py +46 -0
  73. uncountable/types/listing_t.py +533 -0
  74. uncountable/types/notices.py +8 -0
  75. uncountable/types/notices_t.py +37 -0
  76. uncountable/types/queued_job.py +1 -0
  77. uncountable/types/queued_job_t.py +9 -0
  78. uncountable/types/sockets.py +9 -0
  79. uncountable/types/sockets_t.py +99 -0
  80. uncountable/types/uploader_t.py +3 -2
  81. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +4 -2
  82. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +84 -68
  83. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
  84. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
@@ -24,6 +24,11 @@ class CommandServerStub(object):
24
24
  request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.SerializeToString,
25
25
  response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.FromString,
26
26
  )
27
+ self.RetryJob = channel.unary_unary(
28
+ "/CommandServer/RetryJob",
29
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobRequest.SerializeToString,
30
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobResult.FromString,
31
+ )
27
32
  self.CheckHealth = channel.unary_unary(
28
33
  "/CommandServer/CheckHealth",
29
34
  request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
@@ -34,6 +39,16 @@ class CommandServerStub(object):
34
39
  request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsRequest.SerializeToString,
35
40
  response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsResult.FromString,
36
41
  )
42
+ self.VaccuumQueuedJobs = channel.unary_unary(
43
+ "/CommandServer/VaccuumQueuedJobs",
44
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsRequest.SerializeToString,
45
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsResult.FromString,
46
+ )
47
+ self.CancelJob = channel.unary_unary(
48
+ "/CommandServer/CancelJob",
49
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobRequest.SerializeToString,
50
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobResult.FromString,
51
+ )
37
52
 
38
53
 
39
54
  class CommandServerServicer(object):
@@ -45,6 +60,12 @@ class CommandServerServicer(object):
45
60
  context.set_details("Method not implemented!")
46
61
  raise NotImplementedError("Method not implemented!")
47
62
 
63
+ def RetryJob(self, request, context):
64
+ """Missing associated documentation comment in .proto file."""
65
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
66
+ context.set_details("Method not implemented!")
67
+ raise NotImplementedError("Method not implemented!")
68
+
48
69
  def CheckHealth(self, request, context):
49
70
  """Missing associated documentation comment in .proto file."""
50
71
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -57,6 +78,18 @@ class CommandServerServicer(object):
57
78
  context.set_details("Method not implemented!")
58
79
  raise NotImplementedError("Method not implemented!")
59
80
 
81
+ def VaccuumQueuedJobs(self, request, context):
82
+ """Missing associated documentation comment in .proto file."""
83
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
84
+ context.set_details("Method not implemented!")
85
+ raise NotImplementedError("Method not implemented!")
86
+
87
+ def CancelJob(self, request, context):
88
+ """Missing associated documentation comment in .proto file."""
89
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
90
+ context.set_details("Method not implemented!")
91
+ raise NotImplementedError("Method not implemented!")
92
+
60
93
 
61
94
  def add_CommandServerServicer_to_server(servicer, server):
62
95
  rpc_method_handlers = {
@@ -65,6 +98,11 @@ def add_CommandServerServicer_to_server(servicer, server):
65
98
  request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobRequest.FromString,
66
99
  response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.EnqueueJobResult.SerializeToString,
67
100
  ),
101
+ "RetryJob": grpc.unary_unary_rpc_method_handler(
102
+ servicer.RetryJob,
103
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobRequest.FromString,
104
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobResult.SerializeToString,
105
+ ),
68
106
  "CheckHealth": grpc.unary_unary_rpc_method_handler(
69
107
  servicer.CheckHealth,
70
108
  request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.FromString,
@@ -75,6 +113,16 @@ def add_CommandServerServicer_to_server(servicer, server):
75
113
  request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsRequest.FromString,
76
114
  response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsResult.SerializeToString,
77
115
  ),
116
+ "VaccuumQueuedJobs": grpc.unary_unary_rpc_method_handler(
117
+ servicer.VaccuumQueuedJobs,
118
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsRequest.FromString,
119
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsResult.SerializeToString,
120
+ ),
121
+ "CancelJob": grpc.unary_unary_rpc_method_handler(
122
+ servicer.CancelJob,
123
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobRequest.FromString,
124
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobResult.SerializeToString,
125
+ ),
78
126
  }
79
127
  generic_handler = grpc.method_handlers_generic_handler(
80
128
  "CommandServer", rpc_method_handlers
@@ -115,6 +163,35 @@ class CommandServer(object):
115
163
  metadata,
116
164
  )
117
165
 
166
+ @staticmethod
167
+ def RetryJob(
168
+ request,
169
+ target,
170
+ options=(),
171
+ channel_credentials=None,
172
+ call_credentials=None,
173
+ insecure=False,
174
+ compression=None,
175
+ wait_for_ready=None,
176
+ timeout=None,
177
+ metadata=None,
178
+ ):
179
+ return grpc.experimental.unary_unary(
180
+ request,
181
+ target,
182
+ "/CommandServer/RetryJob",
183
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobRequest.SerializeToString,
184
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.RetryJobResult.FromString,
185
+ options,
186
+ channel_credentials,
187
+ insecure,
188
+ call_credentials,
189
+ compression,
190
+ wait_for_ready,
191
+ timeout,
192
+ metadata,
193
+ )
194
+
118
195
  @staticmethod
119
196
  def CheckHealth(
120
197
  request,
@@ -172,3 +249,61 @@ class CommandServer(object):
172
249
  timeout,
173
250
  metadata,
174
251
  )
252
+
253
+ @staticmethod
254
+ def VaccuumQueuedJobs(
255
+ request,
256
+ target,
257
+ options=(),
258
+ channel_credentials=None,
259
+ call_credentials=None,
260
+ insecure=False,
261
+ compression=None,
262
+ wait_for_ready=None,
263
+ timeout=None,
264
+ metadata=None,
265
+ ):
266
+ return grpc.experimental.unary_unary(
267
+ request,
268
+ target,
269
+ "/CommandServer/VaccuumQueuedJobs",
270
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsRequest.SerializeToString,
271
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.VaccuumQueuedJobsResult.FromString,
272
+ options,
273
+ channel_credentials,
274
+ insecure,
275
+ call_credentials,
276
+ compression,
277
+ wait_for_ready,
278
+ timeout,
279
+ metadata,
280
+ )
281
+
282
+ @staticmethod
283
+ def CancelJob(
284
+ request,
285
+ target,
286
+ options=(),
287
+ channel_credentials=None,
288
+ call_credentials=None,
289
+ insecure=False,
290
+ compression=None,
291
+ wait_for_ready=None,
292
+ timeout=None,
293
+ metadata=None,
294
+ ):
295
+ return grpc.experimental.unary_unary(
296
+ request,
297
+ target,
298
+ "/CommandServer/CancelJob",
299
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobRequest.SerializeToString,
300
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CancelJobResult.FromString,
301
+ options,
302
+ channel_credentials,
303
+ insecure,
304
+ call_credentials,
305
+ compression,
306
+ wait_for_ready,
307
+ timeout,
308
+ metadata,
309
+ )
@@ -6,8 +6,17 @@ from enum import StrEnum
6
6
  from uncountable.types import queued_job_t
7
7
 
8
8
 
9
+ class CommandCancelJobStatus(StrEnum):
10
+ CANCELLED_WITH_RESTART = "cancelled_with_restart"
11
+ NO_JOB_FOUND = "no_job_found"
12
+ JOB_ALREADY_COMPLETED = "job_already_completed"
13
+
14
+
9
15
  class CommandType(StrEnum):
10
16
  ENQUEUE_JOB = "enqueue_job"
17
+ RETRY_JOB = "retry_job"
18
+ VACCUUM_QUEUED_JOBS = "vaccuum_queued_jobs"
19
+ CANCEL_JOB = "cancel_job"
11
20
 
12
21
 
13
22
  RT = typing.TypeVar("RT")
@@ -24,6 +33,16 @@ class CommandEnqueueJobResponse:
24
33
  queued_job_uuid: str
25
34
 
26
35
 
36
+ @dataclass(kw_only=True)
37
+ class CommandRetryJobResponse:
38
+ queued_job_uuid: str | None
39
+
40
+
41
+ @dataclass(kw_only=True)
42
+ class CommandVaccuumQueuedJobsResponse:
43
+ pass
44
+
45
+
27
46
  @dataclass(kw_only=True)
28
47
  class CommandEnqueueJob(CommandBase[CommandEnqueueJobResponse]):
29
48
  type: CommandType = CommandType.ENQUEUE_JOB
@@ -32,7 +51,31 @@ class CommandEnqueueJob(CommandBase[CommandEnqueueJobResponse]):
32
51
  response_queue: asyncio.Queue[CommandEnqueueJobResponse]
33
52
 
34
53
 
35
- _Command = CommandEnqueueJob
54
+ @dataclass(kw_only=True)
55
+ class CommandRetryJob(CommandBase[CommandRetryJobResponse]):
56
+ type: CommandType = CommandType.RETRY_JOB
57
+ queued_job_uuid: str
58
+
59
+
60
+ @dataclass(kw_only=True)
61
+ class CommandVaccuumQueuedJobs(CommandBase[CommandVaccuumQueuedJobsResponse]):
62
+ type: CommandType = CommandType.VACCUUM_QUEUED_JOBS
63
+
64
+
65
+ @dataclass(kw_only=True)
66
+ class CommandCancelJobResponse:
67
+ status: CommandCancelJobStatus
68
+
69
+
70
+ @dataclass(kw_only=True)
71
+ class CommandCancelJob(CommandBase[CommandCancelJobResponse]):
72
+ type: CommandType = CommandType.CANCEL_JOB
73
+ queued_job_uuid: str
74
+
75
+
76
+ _Command = (
77
+ CommandEnqueueJob | CommandRetryJob | CommandVaccuumQueuedJobs | CommandCancelJob
78
+ )
36
79
 
37
80
 
38
81
  CommandQueue = asyncio.Queue[_Command]
@@ -2,7 +2,7 @@ import datetime
2
2
  import uuid
3
3
  from datetime import UTC
4
4
 
5
- from sqlalchemy import delete, insert, select, update
5
+ from sqlalchemy import delete, insert, or_, select, text, update
6
6
  from sqlalchemy.engine import Engine
7
7
 
8
8
  from pkgs.argument_parser import CachedParser
@@ -14,6 +14,8 @@ from uncountable.types import queued_job_t
14
14
 
15
15
  queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
16
16
 
17
+ MAX_QUEUE_WINDOW_DAYS = 30
18
+
17
19
 
18
20
  class DatastoreSqlite(Datastore):
19
21
  def __init__(self, session_maker: DBSessionMaker) -> None:
@@ -23,6 +25,17 @@ class DatastoreSqlite(Datastore):
23
25
  @classmethod
24
26
  def setup(cls, engine: Engine) -> None:
25
27
  Base.metadata.create_all(engine)
28
+ with engine.connect() as connection:
29
+ if not bool(
30
+ connection.execute(
31
+ text(
32
+ "select exists (select 1 from pragma_table_info('queued_jobs') where name='status');"
33
+ )
34
+ ).scalar()
35
+ ):
36
+ connection.execute(
37
+ text("alter table queued_jobs add column status VARCHAR")
38
+ )
26
39
 
27
40
  def add_job_to_queue(
28
41
  self, job_payload: queued_job_t.QueuedJobPayload, job_ref_name: str
@@ -36,6 +49,7 @@ class DatastoreSqlite(Datastore):
36
49
  QueuedJob.id.key: queued_job_uuid,
37
50
  QueuedJob.job_ref_name.key: job_ref_name,
38
51
  QueuedJob.payload.key: serialized_payload,
52
+ QueuedJob.status.key: queued_job_t.JobStatus.QUEUED,
39
53
  QueuedJob.num_attempts: num_attempts,
40
54
  QueuedJob.submitted_at: submitted_at,
41
55
  })
@@ -44,10 +58,48 @@ class DatastoreSqlite(Datastore):
44
58
  queued_job_uuid=queued_job_uuid,
45
59
  job_ref_name=job_ref_name,
46
60
  payload=job_payload,
61
+ status=queued_job_t.JobStatus.QUEUED,
47
62
  submitted_at=submitted_at,
48
63
  num_attempts=num_attempts,
49
64
  )
50
65
 
66
+ def retry_job(
67
+ self,
68
+ queued_job_uuid: str,
69
+ ) -> queued_job_t.QueuedJob | None:
70
+ with self.session_maker() as session:
71
+ select_stmt = select(
72
+ QueuedJob.id,
73
+ QueuedJob.payload,
74
+ QueuedJob.num_attempts,
75
+ QueuedJob.job_ref_name,
76
+ QueuedJob.status,
77
+ QueuedJob.submitted_at,
78
+ ).filter(QueuedJob.id == queued_job_uuid)
79
+ existing_job = session.execute(select_stmt).one_or_none()
80
+
81
+ if (
82
+ existing_job is None
83
+ or existing_job.status != queued_job_t.JobStatus.FAILED
84
+ ):
85
+ return None
86
+
87
+ update_stmt = (
88
+ update(QueuedJob)
89
+ .values({QueuedJob.status.key: queued_job_t.JobStatus.QUEUED})
90
+ .filter(QueuedJob.id == queued_job_uuid)
91
+ )
92
+ session.execute(update_stmt)
93
+
94
+ return queued_job_t.QueuedJob(
95
+ queued_job_uuid=existing_job.id,
96
+ job_ref_name=existing_job.job_ref_name,
97
+ num_attempts=existing_job.num_attempts,
98
+ status=queued_job_t.JobStatus.QUEUED,
99
+ submitted_at=existing_job.submitted_at,
100
+ payload=queued_job_payload_parser.parse_storage(existing_job.payload),
101
+ )
102
+
51
103
  def increment_num_attempts(self, queued_job_uuid: str) -> int:
52
104
  with self.session_maker() as session:
53
105
  update_stmt = (
@@ -68,6 +120,17 @@ class DatastoreSqlite(Datastore):
68
120
  delete_stmt = delete(QueuedJob).filter(QueuedJob.id == queued_job_uuid)
69
121
  session.execute(delete_stmt)
70
122
 
123
+ def update_job_status(
124
+ self, queued_job_uuid: str, status: queued_job_t.JobStatus
125
+ ) -> None:
126
+ with self.session_maker() as session:
127
+ update_stmt = (
128
+ update(QueuedJob)
129
+ .values({QueuedJob.status.key: status})
130
+ .filter(QueuedJob.id == queued_job_uuid)
131
+ )
132
+ session.execute(update_stmt)
133
+
71
134
  def list_queued_job_metadata(
72
135
  self, offset: int = 0, limit: int | None = 100
73
136
  ) -> list[queued_job_t.QueuedJobMetadata]:
@@ -77,6 +140,7 @@ class DatastoreSqlite(Datastore):
77
140
  QueuedJob.id,
78
141
  QueuedJob.job_ref_name,
79
142
  QueuedJob.num_attempts,
143
+ QueuedJob.status,
80
144
  QueuedJob.submitted_at,
81
145
  )
82
146
  .order_by(QueuedJob.submitted_at)
@@ -89,6 +153,7 @@ class DatastoreSqlite(Datastore):
89
153
  queued_job_uuid=row.id,
90
154
  job_ref_name=row.job_ref_name,
91
155
  num_attempts=row.num_attempts,
156
+ status=row.status or queued_job_t.JobStatus.QUEUED,
92
157
  submitted_at=row.submitted_at,
93
158
  )
94
159
  for row in session.execute(select_statement)
@@ -106,9 +171,16 @@ class DatastoreSqlite(Datastore):
106
171
  QueuedJob.payload,
107
172
  QueuedJob.num_attempts,
108
173
  QueuedJob.job_ref_name,
174
+ QueuedJob.status,
109
175
  QueuedJob.submitted_at,
110
176
  )
111
177
  .filter(QueuedJob.job_ref_name == job_ref_name)
178
+ .filter(
179
+ or_(
180
+ QueuedJob.status == queued_job_t.JobStatus.QUEUED,
181
+ QueuedJob.status.is_(None),
182
+ )
183
+ )
112
184
  .limit(1)
113
185
  .order_by(QueuedJob.submitted_at)
114
186
  )
@@ -119,6 +191,7 @@ class DatastoreSqlite(Datastore):
119
191
  queued_job_uuid=row.id,
120
192
  job_ref_name=row.job_ref_name,
121
193
  num_attempts=row.num_attempts,
194
+ status=row.status or queued_job_t.JobStatus.QUEUED,
122
195
  submitted_at=row.submitted_at,
123
196
  payload=parsed_payload,
124
197
  )
@@ -127,13 +200,23 @@ class DatastoreSqlite(Datastore):
127
200
 
128
201
  def load_job_queue(self) -> list[queued_job_t.QueuedJob]:
129
202
  with self.session_maker() as session:
130
- select_stmt = select(
131
- QueuedJob.id,
132
- QueuedJob.payload,
133
- QueuedJob.num_attempts,
134
- QueuedJob.job_ref_name,
135
- QueuedJob.submitted_at,
136
- ).order_by(QueuedJob.submitted_at)
203
+ select_stmt = (
204
+ select(
205
+ QueuedJob.id,
206
+ QueuedJob.payload,
207
+ QueuedJob.num_attempts,
208
+ QueuedJob.job_ref_name,
209
+ QueuedJob.status,
210
+ QueuedJob.submitted_at,
211
+ )
212
+ .filter(
213
+ or_(
214
+ QueuedJob.status == queued_job_t.JobStatus.QUEUED,
215
+ QueuedJob.status.is_(None),
216
+ )
217
+ )
218
+ .order_by(QueuedJob.submitted_at)
219
+ )
137
220
 
138
221
  queued_jobs: list[queued_job_t.QueuedJob] = []
139
222
  for row in session.execute(select_stmt):
@@ -143,9 +226,50 @@ class DatastoreSqlite(Datastore):
143
226
  queued_job_uuid=row.id,
144
227
  job_ref_name=row.job_ref_name,
145
228
  num_attempts=row.num_attempts,
229
+ status=row.status or queued_job_t.JobStatus.QUEUED,
146
230
  submitted_at=row.submitted_at,
147
231
  payload=parsed_payload,
148
232
  )
149
233
  )
150
234
 
151
235
  return queued_jobs
236
+
237
+ def get_queued_job(self, *, uuid: str) -> queued_job_t.QueuedJob | None:
238
+ with self.session_maker() as session:
239
+ select_stmt = select(
240
+ QueuedJob.id,
241
+ QueuedJob.payload,
242
+ QueuedJob.num_attempts,
243
+ QueuedJob.job_ref_name,
244
+ QueuedJob.status,
245
+ QueuedJob.submitted_at,
246
+ ).filter(QueuedJob.id == uuid)
247
+
248
+ row = session.execute(select_stmt).one_or_none()
249
+ return (
250
+ queued_job_t.QueuedJob(
251
+ queued_job_uuid=row.id,
252
+ job_ref_name=row.job_ref_name,
253
+ num_attempts=row.num_attempts,
254
+ status=row.status or queued_job_t.JobStatus.QUEUED,
255
+ submitted_at=row.submitted_at,
256
+ payload=queued_job_payload_parser.parse_storage(row.payload),
257
+ )
258
+ if row is not None
259
+ else None
260
+ )
261
+
262
+ def vaccuum_queued_jobs(self) -> None:
263
+ with self.session_maker() as session:
264
+ delete_stmt = (
265
+ delete(QueuedJob)
266
+ .filter(QueuedJob.status == queued_job_t.JobStatus.QUEUED)
267
+ .filter(
268
+ QueuedJob.submitted_at
269
+ <= (
270
+ datetime.datetime.now(UTC)
271
+ - datetime.timedelta(days=MAX_QUEUE_WINDOW_DAYS)
272
+ )
273
+ )
274
+ )
275
+ session.execute(delete_stmt)
@@ -27,3 +27,6 @@ class Datastore(ABC):
27
27
  def list_queued_job_metadata(
28
28
  self, offset: int, limit: int | None
29
29
  ) -> list[queued_job_t.QueuedJobMetadata]: ...
30
+
31
+ @abstractmethod
32
+ def get_queued_job(self, *, uuid: str) -> queued_job_t.QueuedJob | None: ...
@@ -1,7 +1,9 @@
1
- from sqlalchemy import JSON, BigInteger, Column, DateTime, Text
1
+ from sqlalchemy import JSON, BigInteger, Column, DateTime, Enum, Text
2
2
  from sqlalchemy.orm import declarative_base
3
3
  from sqlalchemy.sql import func
4
4
 
5
+ from uncountable.types import queued_job_t
6
+
5
7
  Base = declarative_base()
6
8
 
7
9
 
@@ -15,3 +17,8 @@ class QueuedJob(Base):
15
17
  )
16
18
  payload = Column(JSON, nullable=False)
17
19
  num_attempts = Column(BigInteger, nullable=False, default=0, server_default="0")
20
+ status = Column(
21
+ Enum(queued_job_t.JobStatus, length=None),
22
+ default=queued_job_t.JobStatus.QUEUED,
23
+ nullable=True,
24
+ )
@@ -1,5 +1,7 @@
1
1
  import asyncio
2
+ import os
2
3
  import sys
4
+ import threading
3
5
  import typing
4
6
  from concurrent.futures import ProcessPoolExecutor
5
7
  from dataclasses import dataclass
@@ -10,8 +12,16 @@ from uncountable.integration.queue_runner.command_server import (
10
12
  CommandEnqueueJob,
11
13
  CommandEnqueueJobResponse,
12
14
  CommandQueue,
15
+ CommandRetryJob,
16
+ CommandRetryJobResponse,
13
17
  CommandTask,
14
18
  )
19
+ from uncountable.integration.queue_runner.command_server.types import (
20
+ CommandCancelJob,
21
+ CommandCancelJobResponse,
22
+ CommandCancelJobStatus,
23
+ CommandVaccuumQueuedJobs,
24
+ )
15
25
  from uncountable.integration.queue_runner.datastore import DatastoreSqlite
16
26
  from uncountable.integration.queue_runner.datastore.interface import Datastore
17
27
  from uncountable.integration.queue_runner.worker import Worker
@@ -19,7 +29,7 @@ from uncountable.integration.scan_profiles import load_profiles
19
29
  from uncountable.integration.telemetry import Logger
20
30
  from uncountable.types import job_definition_t, queued_job_t
21
31
 
22
- from .types import ResultQueue, ResultTask
32
+ from .types import RESTART_EXIT_CODE, ResultQueue, ResultTask
23
33
 
24
34
  _MAX_JOB_WORKERS = 5
25
35
 
@@ -99,7 +109,9 @@ async def start_scheduler(
99
109
  worker = job_worker_lookup[queued_job.job_ref_name]
100
110
  except KeyError as e:
101
111
  logger.log_exception(e)
102
- datastore.remove_job_from_queue(queued_job.queued_job_uuid)
112
+ datastore.update_job_status(
113
+ queued_job.queued_job_uuid, queued_job_t.JobStatus.FAILED
114
+ )
103
115
  return
104
116
  await worker.listen_queue.put(queued_job)
105
117
 
@@ -135,6 +147,53 @@ async def start_scheduler(
135
147
  CommandEnqueueJobResponse(queued_job_uuid=queued_job_uuid)
136
148
  )
137
149
 
150
+ async def _handle_cancel_job_command(command: CommandCancelJob) -> None:
151
+ queued_job = datastore.get_queued_job(uuid=command.queued_job_uuid)
152
+ if queued_job is None:
153
+ await command.response_queue.put(
154
+ CommandCancelJobResponse(status=CommandCancelJobStatus.NO_JOB_FOUND)
155
+ )
156
+ return
157
+
158
+ if queued_job.status == queued_job_t.JobStatus.QUEUED:
159
+ datastore.remove_job_from_queue(command.queued_job_uuid)
160
+ await command.response_queue.put(
161
+ CommandCancelJobResponse(
162
+ status=CommandCancelJobStatus.CANCELLED_WITH_RESTART
163
+ )
164
+ )
165
+
166
+ def delayed_exit() -> None:
167
+ os._exit(RESTART_EXIT_CODE)
168
+
169
+ threading.Timer(interval=5, function=delayed_exit).start()
170
+
171
+ else:
172
+ await command.response_queue.put(
173
+ CommandCancelJobResponse(
174
+ status=CommandCancelJobStatus.JOB_ALREADY_COMPLETED
175
+ )
176
+ )
177
+
178
+ async def _handle_retry_job_command(command: CommandRetryJob) -> None:
179
+ queued_job = datastore.retry_job(command.queued_job_uuid)
180
+ if queued_job is None:
181
+ await command.response_queue.put(
182
+ CommandRetryJobResponse(queued_job_uuid=None)
183
+ )
184
+ return
185
+
186
+ await enqueue_queued_job(queued_job)
187
+ await command.response_queue.put(
188
+ CommandRetryJobResponse(queued_job_uuid=queued_job.queued_job_uuid)
189
+ )
190
+
191
+ def _handle_vaccuum_queued_jobs_command(
192
+ command: CommandVaccuumQueuedJobs,
193
+ ) -> None:
194
+ logger.log_info("Vaccuuming queued jobs...")
195
+ datastore.vaccuum_queued_jobs()
196
+
138
197
  for queued_job in queued_jobs:
139
198
  await enqueue_queued_job(queued_job)
140
199
 
@@ -151,10 +210,26 @@ async def start_scheduler(
151
210
  match command:
152
211
  case CommandEnqueueJob():
153
212
  await _handle_enqueue_job_command(command=command)
213
+ case CommandRetryJob():
214
+ await _handle_retry_job_command(command=command)
215
+ case CommandVaccuumQueuedJobs():
216
+ _handle_vaccuum_queued_jobs_command(command=command)
217
+ case CommandCancelJob():
218
+ await _handle_cancel_job_command(command=command)
154
219
  case _:
155
220
  typing.assert_never(command)
156
221
  command_task = asyncio.create_task(command_queue.get())
157
222
  elif task == result_task:
158
223
  queued_job_result = result_task.result()
159
- datastore.remove_job_from_queue(queued_job_result.queued_job_uuid)
224
+ match queued_job_result.job_result.success:
225
+ case True:
226
+ datastore.update_job_status(
227
+ queued_job_result.queued_job_uuid,
228
+ queued_job_t.JobStatus.SUCCESS,
229
+ )
230
+ case False:
231
+ datastore.update_job_status(
232
+ queued_job_result.queued_job_uuid,
233
+ queued_job_t.JobStatus.FAILED,
234
+ )
160
235
  result_task = asyncio.create_task(result_queue.get())
@@ -5,3 +5,5 @@ from uncountable.types import queued_job_t
5
5
  ListenQueue = Queue[queued_job_t.QueuedJob]
6
6
  ResultQueue = Queue[queued_job_t.QueuedJobResult]
7
7
  ResultTask = Task[queued_job_t.QueuedJobResult]
8
+
9
+ RESTART_EXIT_CODE = 147