UncountablePythonSDK 0.0.125__py3-none-any.whl → 0.0.127__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (48) hide show
  1. examples/integration-server/jobs/materials_auto/example_instrument.py +67 -38
  2. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  3. examples/integration-server/jobs/materials_auto/profile.yaml +9 -0
  4. examples/integration-server/pyproject.toml +3 -3
  5. pkgs/type_spec/builder.py +19 -9
  6. pkgs/type_spec/emit_typescript.py +2 -2
  7. pkgs/type_spec/type_info/emit_type_info.py +14 -1
  8. pkgs/type_spec/value_spec/__main__.py +2 -2
  9. uncountable/integration/cli.py +29 -1
  10. uncountable/integration/executors/executors.py +1 -2
  11. uncountable/integration/executors/generic_upload_executor.py +1 -1
  12. uncountable/integration/job.py +1 -0
  13. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  14. uncountable/integration/queue_runner/command_server/command_client.py +39 -0
  15. uncountable/integration/queue_runner/command_server/command_server.py +37 -0
  16. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
  17. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +21 -13
  18. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +28 -1
  19. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +90 -0
  20. uncountable/integration/queue_runner/command_server/types.py +24 -1
  21. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +107 -8
  22. uncountable/integration/queue_runner/datastore/model.py +8 -1
  23. uncountable/integration/queue_runner/job_scheduler.py +42 -2
  24. uncountable/integration/queue_runner/worker.py +1 -1
  25. uncountable/integration/server.py +36 -6
  26. uncountable/types/__init__.py +8 -0
  27. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  28. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  29. uncountable/types/api/recipes/get_recipe_output_metadata.py +2 -2
  30. uncountable/types/api/recipes/get_recipes_data.py +16 -0
  31. uncountable/types/api/recipes/lock_recipes.py +2 -1
  32. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  33. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  34. uncountable/types/api/uploader/complete_async_parse.py +46 -0
  35. uncountable/types/async_batch_processor.py +124 -0
  36. uncountable/types/async_batch_t.py +2 -0
  37. uncountable/types/client_base.py +76 -0
  38. uncountable/types/entity_t.py +1 -1
  39. uncountable/types/queued_job.py +1 -0
  40. uncountable/types/queued_job_t.py +9 -0
  41. uncountable/types/sockets.py +9 -0
  42. uncountable/types/sockets_t.py +99 -0
  43. uncountable/types/uploader.py +24 -0
  44. uncountable/types/uploader_t.py +222 -0
  45. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/METADATA +1 -1
  46. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/RECORD +48 -42
  47. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/WHEEL +0 -0
  48. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,20 @@
1
+ import json
1
2
  import time
2
3
  from dataclasses import dataclass
3
4
  from decimal import Decimal
4
5
 
6
+ from pkgs.argument_parser.argument_parser import CachedParser
7
+ from pkgs.serialization_util import serialize_for_api
5
8
  from uncountable.integration.job import JobArguments, WebhookJob, register_job
6
9
  from uncountable.types import (
7
10
  base_t,
8
- entity_t,
9
11
  identifier_t,
10
- integrations_t,
11
12
  job_definition_t,
12
- publish_realtime_data_t,
13
+ sockets_t,
13
14
  )
14
- from uncountable.types.client_base import APIRequest
15
+ from uncountable.types.integration_session_t import IntegrationSessionInstrument
16
+ from websockets.sync.client import connect
17
+ from websockets.typing import Data
15
18
 
16
19
 
17
20
  @dataclass(kw_only=True)
@@ -24,45 +27,71 @@ class InstrumentExample(WebhookJob[InstrumentPayload]):
24
27
  def run(
25
28
  self, args: JobArguments, payload: InstrumentPayload
26
29
  ) -> job_definition_t.JobResult:
27
- equipment_data = args.client.get_entities_data(
28
- entity_type=entity_t.EntityType.EQUIPMENT,
29
- entity_ids=[payload.equipment_id],
30
- ).entity_details[0]
30
+ parser: CachedParser[sockets_t.SocketResponse] = CachedParser(
31
+ sockets_t.SocketResponse # type:ignore[arg-type]
32
+ )
31
33
 
32
- # Load the instrument's connection details from the entity
33
- instrument_id = None
34
- for field in equipment_data.field_values:
35
- if field.field_ref_name == "ins_instrument_id":
36
- instrument_id = field.value
34
+ def parse_message(message: Data) -> sockets_t.SocketEventData | None:
35
+ try:
36
+ return parser.parse_api(json.loads(message)).data
37
+ except ValueError as e:
38
+ return None
37
39
 
38
- if instrument_id is None:
39
- args.logger.log_error("Could not find instrument ID")
40
- return job_definition_t.JobResult(success=False)
41
-
42
- args.logger.log_info(f"Instrument ID: {instrument_id}")
43
-
44
- for i in range(10):
45
- req_args = publish_realtime_data_t.Arguments(
46
- data_package=integrations_t.DataPackageNumericReading(
47
- value=Decimal(i * 15),
48
- target_entity=entity_t.EntityIdentifier(
49
- identifier_key=identifier_t.IdentifierKeyId(
50
- id=payload.equipment_id
51
- ),
52
- type=entity_t.EntityType.EQUIPMENT,
53
- ),
54
- ),
55
- )
56
- api_request = APIRequest(
57
- method=publish_realtime_data_t.ENDPOINT_METHOD,
58
- endpoint=publish_realtime_data_t.ENDPOINT_PATH,
59
- args=req_args,
40
+ integration_session = IntegrationSessionInstrument(
41
+ equipment_key=identifier_t.IdentifierKeyId(id=payload.equipment_id)
42
+ )
43
+ registration_info = args.client.register_sockets_token(
44
+ socket_request=sockets_t.SocketRequestIntegrationSession(
45
+ integration_session=integration_session
60
46
  )
61
- args.client.do_request(
62
- api_request=api_request, return_type=publish_realtime_data_t.Data
47
+ ).response
48
+ token = registration_info.token
49
+ room_key = registration_info.room_key
50
+ args.logger.log_info(f"Token: {token}")
51
+
52
+ with connect(
53
+ "ws://host.docker.internal:8765",
54
+ additional_headers={
55
+ "Authorization": f"Bearer {token}",
56
+ "X-UNC-EXTERNAL": "true",
57
+ },
58
+ ) as ws:
59
+ ws.send(
60
+ json.dumps(
61
+ serialize_for_api(
62
+ sockets_t.JoinRoomWithTokenSocketClientMessage(token=token)
63
+ )
64
+ )
63
65
  )
64
- time.sleep(0.75)
66
+ for i in range(10):
67
+ args.logger.log_info("Sending reading...")
68
+ ws.send(
69
+ json.dumps(
70
+ serialize_for_api(
71
+ sockets_t.SendInstrumentReadingClientMessage(
72
+ value=Decimal(i * 100), room_key=room_key
73
+ )
74
+ )
75
+ )
76
+ )
77
+ time.sleep(0.75)
78
+
79
+ while True:
80
+ message = parse_message(ws.recv())
81
+ match message:
82
+ case sockets_t.UsersInRoomUpdatedEventData():
83
+ num_users = len(message.user_ids)
84
+ if num_users <= 1:
85
+ break
86
+ else:
87
+ args.logger.log_info(
88
+ f"Session still open, {num_users} users in room."
89
+ )
90
+ case _:
91
+ args.logger.log_info("Session still open...")
92
+ continue
65
93
 
94
+ args.logger.log_info("Session closed.")
66
95
  return job_definition_t.JobResult(success=True)
67
96
 
68
97
  @property
@@ -0,0 +1,61 @@
1
+ import random
2
+ from dataclasses import dataclass
3
+ from datetime import datetime
4
+ from decimal import Decimal
5
+
6
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
7
+ from uncountable.types import (
8
+ base_t,
9
+ identifier_t,
10
+ job_definition_t,
11
+ recipe_links_t,
12
+ set_recipe_outputs_t,
13
+ )
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PredictionsPayload:
18
+ output_id: base_t.ObjectId
19
+ recipe_ids: list[base_t.ObjectId]
20
+
21
+
22
+ @register_job
23
+ class PredictionsExample(WebhookJob[PredictionsPayload]):
24
+ def run(
25
+ self, args: JobArguments, payload: PredictionsPayload
26
+ ) -> job_definition_t.JobResult:
27
+ recipe_data = args.client.get_recipes_data(recipe_ids=payload.recipe_ids)
28
+ formatted_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
29
+
30
+ for recipe in recipe_data.recipes:
31
+ test_sample_name = f"Predictions Model ({formatted_datetime})"
32
+ created_recipe_id = args.client.create_recipe(
33
+ name=test_sample_name,
34
+ material_family_id=1,
35
+ workflow_id=1,
36
+ definition_key=identifier_t.IdentifierKeyRefName(
37
+ ref_name="unc_test_sample"
38
+ ),
39
+ ).result_id
40
+ args.client.set_recipe_outputs(
41
+ output_data=[
42
+ set_recipe_outputs_t.RecipeOutputValue(
43
+ recipe_id=created_recipe_id,
44
+ output_id=payload.output_id,
45
+ experiment_num=1,
46
+ value_numeric=Decimal(random.random() * 10),
47
+ )
48
+ ]
49
+ )
50
+ args.client.create_recipe_link(
51
+ recipe_from_key=identifier_t.IdentifierKeyId(id=recipe.recipe_id),
52
+ recipe_to_key=identifier_t.IdentifierKeyId(id=created_recipe_id),
53
+ link_type=recipe_links_t.RecipeLinkType.CHILD,
54
+ name=test_sample_name,
55
+ )
56
+
57
+ return job_definition_t.JobResult(success=True)
58
+
59
+ @property
60
+ def webhook_payload_type(self) -> type:
61
+ return PredictionsPayload
@@ -84,3 +84,12 @@ jobs:
84
84
  executor:
85
85
  type: script
86
86
  import_path: example_instrument
87
+ - id: example_predictions
88
+ type: webhook
89
+ name: Webook Predictions
90
+ signature_key_secret:
91
+ type: env
92
+ env_key: WH_PREDICTIONS_SIGNATURE_KEY
93
+ executor:
94
+ type: script
95
+ import_path: example_predictions
@@ -9,7 +9,7 @@ dependencies = [
9
9
  "ruff == 0.*",
10
10
  "openpyxl == 3.*",
11
11
  "more_itertools == 10.*",
12
- "types-paramiko ==3.5.0.20250801",
12
+ "types-paramiko ==4.0.0.20250822",
13
13
  "types-openpyxl == 3.*",
14
14
  "types-pysftp == 0.*",
15
15
  "types-pytz ==2025.*",
@@ -17,7 +17,8 @@ dependencies = [
17
17
  "types-simplejson == 3.*",
18
18
  "pandas-stubs",
19
19
  "xlrd == 2.*",
20
- "msgspec == 0.19.*"
20
+ "msgspec == 0.19.*",
21
+ "websockets==15.0.1",
21
22
  ]
22
23
 
23
24
  [tool.mypy]
@@ -194,7 +195,6 @@ lint.ignore = [
194
195
  "RUF022", # __all__ is not sorted. skip due to isort complication
195
196
  "UP017", # use datetime.UTC, TODO add back in
196
197
  "UP035", # replacing List with list, TODO add back in
197
- "UP038", # isinstance X | Y instead of (X, Y), TODO add back in
198
198
  # ## FROM RUFF UPGRADE
199
199
  "PLC2701", # private name imports. should add
200
200
  "PLR1702", # too many nested blocks -- add with config. skip
pkgs/type_spec/builder.py CHANGED
@@ -308,6 +308,7 @@ class SpecTypeDefn(SpecType):
308
308
  self._is_value_to_string = False
309
309
  self._is_valid_parameter = True
310
310
  self._is_dynamic_allowed = False
311
+ self._default_extant: PropertyExtant | None = None
311
312
  self.ext_info: Any = None
312
313
 
313
314
  def is_value_converted(self) -> bool:
@@ -340,6 +341,7 @@ class SpecTypeDefn(SpecType):
340
341
  "ext_info",
341
342
  "label",
342
343
  "is_dynamic_allowed",
344
+ "default_extant",
343
345
  ]
344
346
  + extra_names,
345
347
  )
@@ -351,6 +353,10 @@ class SpecTypeDefn(SpecType):
351
353
  assert isinstance(is_dynamic_allowed, bool)
352
354
  self._is_dynamic_allowed = is_dynamic_allowed
353
355
 
356
+ default_extant = data.get("default_extant")
357
+ if default_extant is not None:
358
+ self._default_extant = PropertyExtant(default_extant)
359
+
354
360
  def _process_property(
355
361
  self, builder: SpecBuilder, spec_name: str, data: RawDict
356
362
  ) -> SpecProperty:
@@ -369,18 +375,18 @@ class SpecTypeDefn(SpecType):
369
375
  ],
370
376
  )
371
377
  try:
372
- extant_type = data.get("extant")
378
+ extant_type_str = data.get("extant")
379
+ extant_type = (
380
+ PropertyExtant(extant_type_str) if extant_type_str is not None else None
381
+ )
382
+ extant = extant_type or self._default_extant
373
383
  if spec_name.endswith("?"):
374
- if extant_type is not None:
384
+ if extant is not None:
375
385
  raise Exception("cannot specify extant with ?")
376
386
  extant = PropertyExtant.optional
377
387
  name = spec_name[:-1]
378
388
  else:
379
- extant = (
380
- PropertyExtant.required
381
- if extant_type is None
382
- else PropertyExtant(extant_type)
383
- )
389
+ extant = extant or PropertyExtant.required
384
390
  name = spec_name
385
391
 
386
392
  property_name_case = self.name_case
@@ -1392,9 +1398,13 @@ class SpecBuilder:
1392
1398
  self.emit_id_source_enums: set[SpecTypeDefnStringEnum] = set()
1393
1399
 
1394
1400
  this_dir = os.path.dirname(os.path.realpath(__file__))
1395
- with open(f"{this_dir}/parts/base.py.prepart") as py_base_part:
1401
+ with open(
1402
+ f"{this_dir}/parts/base.py.prepart", encoding="utf-8"
1403
+ ) as py_base_part:
1396
1404
  self.preparts["python"][base_namespace_name] = py_base_part.read()
1397
- with open(f"{this_dir}/parts/base.ts.prepart") as ts_base_part:
1405
+ with open(
1406
+ f"{this_dir}/parts/base.ts.prepart", encoding="utf-8"
1407
+ ) as ts_base_part:
1398
1408
  self.preparts["typescript"][base_namespace_name] = ts_base_part.read()
1399
1409
 
1400
1410
  base_namespace.types["ObjectId"] = SpecTypeDefnObject(
@@ -285,14 +285,14 @@ export const apiCall = {wrap_call}(
285
285
  index_path = f"{config.endpoint_to_routes_output[endpoint.default_endpoint_key]}/{'/'.join(namespace.path[0:-1])}/index.tsx"
286
286
  api_name = f"Api{ts_type_name(namespace.path[0 - 1])}"
287
287
  if os.path.exists(index_path):
288
- with open(index_path) as index:
288
+ with open(index_path, encoding="utf-8") as index:
289
289
  index_data = index.read()
290
290
  need_index = index_data.find(api_name) == -1
291
291
  else:
292
292
  need_index = True
293
293
 
294
294
  if need_index:
295
- with open(index_path, "a") as index:
295
+ with open(index_path, "a", encoding="utf-8") as index:
296
296
  print(f"Updated API Index {index_path}")
297
297
  index.write(f'import * as {api_name} from "./{namespace.path[-1]}"\n\n')
298
298
  index.write(f"export {{ {api_name} }}\n")
@@ -41,10 +41,23 @@ def type_path_of(stype: builder.SpecType) -> object: # NamePath
41
41
  parts: list[object] = ["$literal"]
42
42
  for parameter in stype.parameters:
43
43
  assert isinstance(parameter, builder.SpecTypeLiteralWrapper)
44
+ emit_value = parameter.value
45
+ if isinstance(parameter.value_type, builder.SpecTypeDefnObject):
46
+ emit_value = parameter.value
47
+ assert isinstance(emit_value, (str, bool)), (
48
+ f"invalid-literal-value:{emit_value}"
49
+ )
50
+ elif isinstance(parameter.value_type, builder.SpecTypeDefnStringEnum):
51
+ key = parameter.value
52
+ assert isinstance(key, str)
53
+ emit_value = parameter.value_type.values[key].value
54
+ else:
55
+ raise Exception("unhandled-literal-type")
56
+
44
57
  # This allows expansion to enum literal values later
45
58
  parts.append([
46
59
  "$value",
47
- parameter.value,
60
+ emit_value,
48
61
  type_path_of(parameter.value_type),
49
62
  ])
50
63
  return parts
@@ -20,7 +20,7 @@ The accepted argument type must accept "None", it is not implied.
20
20
  """
21
21
 
22
22
  import sys
23
- from typing import TypeVar, cast
23
+ from typing import Match, Pattern, TypeVar, cast
24
24
 
25
25
  import regex as re
26
26
 
@@ -56,7 +56,7 @@ class Source:
56
56
  def has_more(self) -> bool:
57
57
  return self._at < len(self._text)
58
58
 
59
- def match(self, expression: re.Pattern) -> re.Match | None:
59
+ def match(self, expression: Pattern[str]) -> Match[str] | None:
60
60
  self.skip_space()
61
61
  m = expression.match(self._text, self._at)
62
62
  if m is not None:
@@ -8,6 +8,7 @@ from uncountable.core.environment import get_local_admin_server_port
8
8
  from uncountable.integration.queue_runner.command_server.command_client import (
9
9
  send_job_queue_message,
10
10
  send_list_queued_jobs_message,
11
+ send_retry_job_message,
11
12
  )
12
13
  from uncountable.integration.telemetry import Logger
13
14
  from uncountable.types import queued_job_t
@@ -70,12 +71,13 @@ def register_list_queued_jobs(
70
71
  port=get_local_admin_server_port(),
71
72
  )
72
73
 
73
- headers = ["UUID", "Job Ref Name", "Attempts", "Submitted At"]
74
+ headers = ["UUID", "Job Ref Name", "Attempts", "Status", "Submitted At"]
74
75
  rows = [
75
76
  [
76
77
  job.uuid,
77
78
  job.job_ref_name,
78
79
  job.num_attempts,
80
+ job.status,
79
81
  job.submitted_at.ToDatetime(tz.UTC).astimezone(tz.tzlocal()),
80
82
  ]
81
83
  for job in queued_jobs
@@ -85,6 +87,31 @@ def register_list_queued_jobs(
85
87
  list_queued_jobs_parser.set_defaults(func=_handle_list_queued_jobs)
86
88
 
87
89
 
90
+ def register_retry_job_parser(
91
+ sub_parser_manager: argparse._SubParsersAction,
92
+ parents: list[argparse.ArgumentParser],
93
+ ) -> None:
94
+ retry_failed_jobs_parser = sub_parser_manager.add_parser(
95
+ "retry-job",
96
+ parents=parents,
97
+ help="Retry failed job on the integration server",
98
+ description="Retry failed job on the integration server",
99
+ )
100
+
101
+ retry_failed_jobs_parser.add_argument(
102
+ "job_uuid", type=str, help="The uuid of the job to retry"
103
+ )
104
+
105
+ def _handle_retry_job(args: argparse.Namespace) -> None:
106
+ send_retry_job_message(
107
+ job_uuid=args.job_uuid,
108
+ host=args.host,
109
+ port=get_local_admin_server_port(),
110
+ )
111
+
112
+ retry_failed_jobs_parser.set_defaults(func=_handle_retry_job)
113
+
114
+
88
115
  def main() -> None:
89
116
  logger = Logger(get_current_span())
90
117
 
@@ -104,6 +131,7 @@ def main() -> None:
104
131
  )
105
132
 
106
133
  register_enqueue_job_parser(subparser_action, parents=[base_parser])
134
+ register_retry_job_parser(subparser_action, parents=[base_parser])
107
135
  register_list_queued_jobs(subparser_action, parents=[base_parser])
108
136
 
109
137
  args = main_parser.parse_args()
@@ -88,7 +88,6 @@ def execute_job(
88
88
  job_definition: job_definition_t.JobDefinition,
89
89
  profile_metadata: job_definition_t.ProfileMetadata,
90
90
  args: JobArguments,
91
- job_uuid: str,
92
91
  ) -> job_definition_t.JobResult:
93
92
  with args.logger.push_scope(job_definition.name) as job_logger:
94
93
  job = resolve_executor(job_definition.executor, profile_metadata)
@@ -104,7 +103,7 @@ def execute_job(
104
103
  run_entity = _create_run_entity(
105
104
  client=args.client,
106
105
  logging_settings=job_definition.logging_settings,
107
- job_uuid=job_uuid,
106
+ job_uuid=args.job_uuid,
108
107
  )
109
108
  result = job.run_outer(args=args)
110
109
  except Exception as e:
@@ -41,7 +41,7 @@ def _get_extension(filename: str) -> str | None:
41
41
 
42
42
  def _run_keyword_detection(data: io.BytesIO, keyword: str) -> bool:
43
43
  try:
44
- text = io.TextIOWrapper(data)
44
+ text = io.TextIOWrapper(data, encoding="utf-8")
45
45
  for line in text:
46
46
  if (
47
47
  keyword in line
@@ -49,6 +49,7 @@ class JobArguments:
49
49
  batch_processor: AsyncBatchProcessor
50
50
  logger: JobLogger
51
51
  payload: base_t.JsonValue
52
+ job_uuid: str
52
53
 
53
54
 
54
55
  # only for compatibility:
@@ -4,6 +4,8 @@ from .types import (
4
4
  CommandEnqueueJob,
5
5
  CommandEnqueueJobResponse,
6
6
  CommandQueue,
7
+ CommandRetryJob,
8
+ CommandRetryJobResponse,
7
9
  CommandServerBadResponse,
8
10
  CommandServerException,
9
11
  CommandServerTimeout,
@@ -16,6 +18,8 @@ __all__: list[str] = [
16
18
  "send_job_queue_message",
17
19
  "CommandEnqueueJob",
18
20
  "CommandEnqueueJobResponse",
21
+ "CommandRetryJob",
22
+ "CommandRetryJobResponse",
19
23
  "CommandTask",
20
24
  "CommandQueue",
21
25
  "CommandServerTimeout",
@@ -12,6 +12,10 @@ from uncountable.integration.queue_runner.command_server.protocol.command_server
12
12
  EnqueueJobResult,
13
13
  ListQueuedJobsRequest,
14
14
  ListQueuedJobsResult,
15
+ RetryJobRequest,
16
+ RetryJobResult,
17
+ VaccuumQueuedJobsRequest,
18
+ VaccuumQueuedJobsResult,
15
19
  )
16
20
  from uncountable.integration.queue_runner.command_server.types import (
17
21
  CommandServerBadResponse,
@@ -59,6 +63,26 @@ def send_job_queue_message(
59
63
  return response.queued_job_uuid
60
64
 
61
65
 
66
+ def send_retry_job_message(
67
+ *,
68
+ job_uuid: str,
69
+ host: str = "localhost",
70
+ port: int,
71
+ ) -> str:
72
+ with command_server_connection(host=host, port=port) as stub:
73
+ request = RetryJobRequest(uuid=job_uuid)
74
+
75
+ try:
76
+ response = stub.RetryJob(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
77
+ assert isinstance(response, RetryJobResult)
78
+ if not response.successfully_queued:
79
+ raise CommandServerBadResponse("queue operation was not successful")
80
+
81
+ return response.queued_job_uuid
82
+ except grpc.RpcError as e:
83
+ raise ValueError(e.details()) # type: ignore
84
+
85
+
62
86
  def check_health(*, host: str = _LOCAL_RPC_HOST, port: int) -> bool:
63
87
  with command_server_connection(host=host, port=port) as stub:
64
88
  request = CheckHealthRequest()
@@ -92,3 +116,18 @@ def send_list_queued_jobs_message(
92
116
 
93
117
  assert isinstance(response, ListQueuedJobsResult)
94
118
  return list(response.queued_jobs)
119
+
120
+
121
+ def send_vaccuum_queued_jobs_message(*, host: str = "localhost", port: int) -> None:
122
+ with command_server_connection(host=host, port=port) as stub:
123
+ request = VaccuumQueuedJobsRequest()
124
+
125
+ try:
126
+ response = stub.VaccuumQueuedJobs(
127
+ request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS
128
+ )
129
+ except grpc.RpcError as e:
130
+ raise ValueError(e.details()) # type: ignore
131
+
132
+ assert isinstance(response, VaccuumQueuedJobsResult)
133
+ return None
@@ -13,11 +13,19 @@ from uncountable.integration.queue_runner.command_server.protocol.command_server
13
13
  EnqueueJobResult,
14
14
  ListQueuedJobsRequest,
15
15
  ListQueuedJobsResult,
16
+ RetryJobRequest,
17
+ RetryJobResult,
18
+ VaccuumQueuedJobsRequest,
19
+ VaccuumQueuedJobsResult,
16
20
  )
17
21
  from uncountable.integration.queue_runner.command_server.types import (
18
22
  CommandEnqueueJob,
19
23
  CommandEnqueueJobResponse,
20
24
  CommandQueue,
25
+ CommandRetryJob,
26
+ CommandRetryJobResponse,
27
+ CommandVaccuumQueuedJobs,
28
+ CommandVaccuumQueuedJobsResponse,
21
29
  )
22
30
  from uncountable.integration.queue_runner.datastore import DatastoreSqlite
23
31
  from uncountable.types import queued_job_t
@@ -54,6 +62,23 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
54
62
  )
55
63
  return result
56
64
 
65
+ async def RetryJob(
66
+ self, request: RetryJobRequest, context: aio.ServicerContext
67
+ ) -> RetryJobResult:
68
+ response_queue: asyncio.Queue[CommandRetryJobResponse] = asyncio.Queue()
69
+ await command_queue.put(
70
+ CommandRetryJob(
71
+ queued_job_uuid=request.uuid, response_queue=response_queue
72
+ )
73
+ )
74
+ response = await response_queue.get()
75
+ if response.queued_job_uuid is not None:
76
+ return RetryJobResult(
77
+ successfully_queued=True, queued_job_uuid=response.queued_job_uuid
78
+ )
79
+ else:
80
+ return RetryJobResult(successfully_queued=False, queued_job_uuid="")
81
+
57
82
  async def CheckHealth(
58
83
  self, request: CheckHealthRequest, context: aio.ServicerContext
59
84
  ) -> CheckHealthResult:
@@ -90,10 +115,22 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
90
115
  job_ref_name=item.job_ref_name,
91
116
  num_attempts=item.num_attempts,
92
117
  submitted_at=proto_timestamp,
118
+ status=item.status,
93
119
  )
94
120
  )
95
121
  return ListQueuedJobsResult(queued_jobs=response_list)
96
122
 
123
+ async def VaccuumQueuedJobs(
124
+ self, request: VaccuumQueuedJobsRequest, context: aio.ServicerContext
125
+ ) -> VaccuumQueuedJobsResult:
126
+ response_queue: asyncio.Queue[CommandVaccuumQueuedJobsResponse] = (
127
+ asyncio.Queue()
128
+ )
129
+ await command_queue.put(
130
+ CommandVaccuumQueuedJobs(response_queue=response_queue)
131
+ )
132
+ return VaccuumQueuedJobsResult()
133
+
97
134
  add_CommandServerServicer_to_server(CommandServerHandler(), server)
98
135
 
99
136
  listen_addr = f"[::]:{get_local_admin_server_port()}"
@@ -3,8 +3,10 @@ import "google/protobuf/timestamp.proto";
3
3
 
4
4
  service CommandServer {
5
5
  rpc EnqueueJob(EnqueueJobRequest) returns (EnqueueJobResult) {}
6
+ rpc RetryJob(RetryJobRequest) returns (RetryJobResult) {}
6
7
  rpc CheckHealth(CheckHealthRequest) returns (CheckHealthResult) {}
7
8
  rpc ListQueuedJobs(ListQueuedJobsRequest) returns (ListQueuedJobsResult) {}
9
+ rpc VaccuumQueuedJobs(VaccuumQueuedJobsRequest) returns (VaccuumQueuedJobsResult) {}
8
10
  }
9
11
 
10
12
  message EnqueueJobRequest {
@@ -17,6 +19,21 @@ message EnqueueJobResult {
17
19
  string queued_job_uuid = 2;
18
20
  }
19
21
 
22
+ message RetryJobRequest {
23
+ string uuid = 1;
24
+ }
25
+
26
+ message RetryJobResult {
27
+ bool successfully_queued = 1;
28
+ string queued_job_uuid = 2;
29
+ }
30
+
31
+ message VaccuumQueuedJobsRequest {
32
+ }
33
+
34
+ message VaccuumQueuedJobsResult {
35
+ }
36
+
20
37
  message CheckHealthRequest {}
21
38
 
22
39
  message CheckHealthResult {
@@ -34,6 +51,7 @@ message ListQueuedJobsResult {
34
51
  string job_ref_name = 2;
35
52
  int64 num_attempts = 3;
36
53
  google.protobuf.Timestamp submitted_at = 4;
54
+ string status = 5;
37
55
  }
38
56
 
39
57
  repeated ListQueuedJobsResultItem queued_jobs = 1;