UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.128__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (49) hide show
  1. examples/integration-server/jobs/materials_auto/example_instrument.py +67 -38
  2. examples/integration-server/jobs/materials_auto/example_parse.py +87 -0
  3. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  4. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +3 -2
  5. examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
  6. examples/integration-server/pyproject.toml +3 -3
  7. pkgs/type_spec/builder.py +19 -9
  8. pkgs/type_spec/emit_typescript.py +2 -2
  9. pkgs/type_spec/type_info/emit_type_info.py +14 -1
  10. pkgs/type_spec/value_spec/__main__.py +2 -2
  11. uncountable/integration/cli.py +29 -1
  12. uncountable/integration/executors/executors.py +1 -2
  13. uncountable/integration/executors/generic_upload_executor.py +1 -1
  14. uncountable/integration/job.py +3 -3
  15. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  16. uncountable/integration/queue_runner/command_server/command_client.py +39 -0
  17. uncountable/integration/queue_runner/command_server/command_server.py +37 -0
  18. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
  19. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +21 -13
  20. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +28 -1
  21. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +90 -0
  22. uncountable/integration/queue_runner/command_server/types.py +24 -1
  23. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +107 -8
  24. uncountable/integration/queue_runner/datastore/model.py +8 -1
  25. uncountable/integration/queue_runner/job_scheduler.py +42 -2
  26. uncountable/integration/queue_runner/worker.py +1 -1
  27. uncountable/integration/server.py +36 -6
  28. uncountable/integration/telemetry.py +41 -7
  29. uncountable/types/__init__.py +4 -0
  30. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  31. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  32. uncountable/types/api/recipes/get_recipes_data.py +16 -0
  33. uncountable/types/api/recipes/lock_recipes.py +2 -1
  34. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  35. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  36. uncountable/types/api/uploader/complete_async_parse.py +4 -0
  37. uncountable/types/async_batch_processor.py +124 -0
  38. uncountable/types/async_batch_t.py +2 -0
  39. uncountable/types/client_base.py +57 -1
  40. uncountable/types/entity_t.py +1 -1
  41. uncountable/types/queued_job.py +1 -0
  42. uncountable/types/queued_job_t.py +9 -0
  43. uncountable/types/sockets.py +9 -0
  44. uncountable/types/sockets_t.py +99 -0
  45. uncountable/types/uploader_t.py +3 -2
  46. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/METADATA +1 -1
  47. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/RECORD +49 -45
  48. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/WHEEL +0 -0
  49. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,20 @@
1
+ import json
1
2
  import time
2
3
  from dataclasses import dataclass
3
4
  from decimal import Decimal
4
5
 
6
+ from pkgs.argument_parser.argument_parser import CachedParser
7
+ from pkgs.serialization_util import serialize_for_api
5
8
  from uncountable.integration.job import JobArguments, WebhookJob, register_job
6
9
  from uncountable.types import (
7
10
  base_t,
8
- entity_t,
9
11
  identifier_t,
10
- integrations_t,
11
12
  job_definition_t,
12
- publish_realtime_data_t,
13
+ sockets_t,
13
14
  )
14
- from uncountable.types.client_base import APIRequest
15
+ from uncountable.types.integration_session_t import IntegrationSessionInstrument
16
+ from websockets.sync.client import connect
17
+ from websockets.typing import Data
15
18
 
16
19
 
17
20
  @dataclass(kw_only=True)
@@ -24,45 +27,71 @@ class InstrumentExample(WebhookJob[InstrumentPayload]):
24
27
  def run(
25
28
  self, args: JobArguments, payload: InstrumentPayload
26
29
  ) -> job_definition_t.JobResult:
27
- equipment_data = args.client.get_entities_data(
28
- entity_type=entity_t.EntityType.EQUIPMENT,
29
- entity_ids=[payload.equipment_id],
30
- ).entity_details[0]
30
+ parser: CachedParser[sockets_t.SocketResponse] = CachedParser(
31
+ sockets_t.SocketResponse # type:ignore[arg-type]
32
+ )
31
33
 
32
- # Load the instrument's connection details from the entity
33
- instrument_id = None
34
- for field in equipment_data.field_values:
35
- if field.field_ref_name == "ins_instrument_id":
36
- instrument_id = field.value
34
+ def parse_message(message: Data) -> sockets_t.SocketEventData | None:
35
+ try:
36
+ return parser.parse_api(json.loads(message)).data
37
+ except ValueError as e:
38
+ return None
37
39
 
38
- if instrument_id is None:
39
- args.logger.log_error("Could not find instrument ID")
40
- return job_definition_t.JobResult(success=False)
41
-
42
- args.logger.log_info(f"Instrument ID: {instrument_id}")
43
-
44
- for i in range(10):
45
- req_args = publish_realtime_data_t.Arguments(
46
- data_package=integrations_t.DataPackageNumericReading(
47
- value=Decimal(i * 15),
48
- target_entity=entity_t.EntityIdentifier(
49
- identifier_key=identifier_t.IdentifierKeyId(
50
- id=payload.equipment_id
51
- ),
52
- type=entity_t.EntityType.EQUIPMENT,
53
- ),
54
- ),
55
- )
56
- api_request = APIRequest(
57
- method=publish_realtime_data_t.ENDPOINT_METHOD,
58
- endpoint=publish_realtime_data_t.ENDPOINT_PATH,
59
- args=req_args,
40
+ integration_session = IntegrationSessionInstrument(
41
+ equipment_key=identifier_t.IdentifierKeyId(id=payload.equipment_id)
42
+ )
43
+ registration_info = args.client.register_sockets_token(
44
+ socket_request=sockets_t.SocketRequestIntegrationSession(
45
+ integration_session=integration_session
60
46
  )
61
- args.client.do_request(
62
- api_request=api_request, return_type=publish_realtime_data_t.Data
47
+ ).response
48
+ token = registration_info.token
49
+ room_key = registration_info.room_key
50
+ args.logger.log_info(f"Token: {token}")
51
+
52
+ with connect(
53
+ "ws://host.docker.internal:8765",
54
+ additional_headers={
55
+ "Authorization": f"Bearer {token}",
56
+ "X-UNC-EXTERNAL": "true",
57
+ },
58
+ ) as ws:
59
+ ws.send(
60
+ json.dumps(
61
+ serialize_for_api(
62
+ sockets_t.JoinRoomWithTokenSocketClientMessage(token=token)
63
+ )
64
+ )
63
65
  )
64
- time.sleep(0.75)
66
+ for i in range(10):
67
+ args.logger.log_info("Sending reading...")
68
+ ws.send(
69
+ json.dumps(
70
+ serialize_for_api(
71
+ sockets_t.SendInstrumentReadingClientMessage(
72
+ value=Decimal(i * 100), room_key=room_key
73
+ )
74
+ )
75
+ )
76
+ )
77
+ time.sleep(0.75)
78
+
79
+ while True:
80
+ message = parse_message(ws.recv())
81
+ match message:
82
+ case sockets_t.UsersInRoomUpdatedEventData():
83
+ num_users = len(message.user_ids)
84
+ if num_users <= 1:
85
+ break
86
+ else:
87
+ args.logger.log_info(
88
+ f"Session still open, {num_users} users in room."
89
+ )
90
+ case _:
91
+ args.logger.log_info("Session still open...")
92
+ continue
65
93
 
94
+ args.logger.log_info("Session closed.")
66
95
  return job_definition_t.JobResult(success=True)
67
96
 
68
97
  @property
@@ -0,0 +1,87 @@
1
+ from dataclasses import dataclass
2
+
3
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
4
+ from uncountable.types import (
5
+ base_t,
6
+ generic_upload_t,
7
+ identifier_t,
8
+ job_definition_t,
9
+ uploader_t,
10
+ )
11
+
12
+
13
+ @dataclass(kw_only=True)
14
+ class ParsePayload:
15
+ async_job_id: base_t.ObjectId
16
+
17
+
18
+ @register_job
19
+ class ParseExample(WebhookJob[ParsePayload]):
20
+ def run(
21
+ self, args: JobArguments, payload: ParsePayload
22
+ ) -> job_definition_t.JobResult:
23
+ dummy_parsed_file_data: list[uploader_t.ParsedFileData] = [
24
+ uploader_t.ParsedFileData(
25
+ file_name="my_file_to_upload.xlsx",
26
+ file_structures=[
27
+ uploader_t.DataChannel(
28
+ type=uploader_t.StructureElementType.CHANNEL,
29
+ channel=uploader_t.TextChannelData(
30
+ name="column1",
31
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
32
+ data=[
33
+ uploader_t.StringValue(value="value1"),
34
+ uploader_t.StringValue(value="value4"),
35
+ uploader_t.StringValue(value="value7"),
36
+ ],
37
+ ),
38
+ ),
39
+ uploader_t.DataChannel(
40
+ type=uploader_t.StructureElementType.CHANNEL,
41
+ channel=uploader_t.TextChannelData(
42
+ name="column2",
43
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
44
+ data=[
45
+ uploader_t.StringValue(value="value2"),
46
+ uploader_t.StringValue(value="value5"),
47
+ uploader_t.StringValue(value="value8"),
48
+ ],
49
+ ),
50
+ ),
51
+ uploader_t.DataChannel(
52
+ type=uploader_t.StructureElementType.CHANNEL,
53
+ channel=uploader_t.TextChannelData(
54
+ name="column3",
55
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
56
+ data=[
57
+ uploader_t.StringValue(value="value3"),
58
+ uploader_t.StringValue(value="value6"),
59
+ uploader_t.StringValue(value="value9"),
60
+ ],
61
+ ),
62
+ ),
63
+ uploader_t.HeaderEntry(
64
+ type=uploader_t.StructureElementType.HEADER,
65
+ value=uploader_t.TextHeaderData(
66
+ name="file_source",
67
+ type=uploader_t.HeaderType.TEXT_HEADER,
68
+ data=uploader_t.StringValue(value="my_file_to_upload.xlsx"),
69
+ ),
70
+ ),
71
+ ],
72
+ )
73
+ ]
74
+
75
+ args.client.complete_async_parse(
76
+ parsed_file_data=dummy_parsed_file_data,
77
+ async_job_key=identifier_t.IdentifierKeyId(id=payload.async_job_id),
78
+ upload_destination=generic_upload_t.UploadDestinationRecipe(
79
+ recipe_key=identifier_t.IdentifierKeyId(id=1)
80
+ ),
81
+ )
82
+
83
+ return job_definition_t.JobResult(success=True)
84
+
85
+ @property
86
+ def webhook_payload_type(self) -> type:
87
+ return ParsePayload
@@ -0,0 +1,61 @@
1
+ import random
2
+ from dataclasses import dataclass
3
+ from datetime import datetime
4
+ from decimal import Decimal
5
+
6
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
7
+ from uncountable.types import (
8
+ base_t,
9
+ identifier_t,
10
+ job_definition_t,
11
+ recipe_links_t,
12
+ set_recipe_outputs_t,
13
+ )
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PredictionsPayload:
18
+ output_id: base_t.ObjectId
19
+ recipe_ids: list[base_t.ObjectId]
20
+
21
+
22
+ @register_job
23
+ class PredictionsExample(WebhookJob[PredictionsPayload]):
24
+ def run(
25
+ self, args: JobArguments, payload: PredictionsPayload
26
+ ) -> job_definition_t.JobResult:
27
+ recipe_data = args.client.get_recipes_data(recipe_ids=payload.recipe_ids)
28
+ formatted_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
29
+
30
+ for recipe in recipe_data.recipes:
31
+ test_sample_name = f"Predictions Model ({formatted_datetime})"
32
+ created_recipe_id = args.client.create_recipe(
33
+ name=test_sample_name,
34
+ material_family_id=1,
35
+ workflow_id=1,
36
+ definition_key=identifier_t.IdentifierKeyRefName(
37
+ ref_name="unc_test_sample"
38
+ ),
39
+ ).result_id
40
+ args.client.set_recipe_outputs(
41
+ output_data=[
42
+ set_recipe_outputs_t.RecipeOutputValue(
43
+ recipe_id=created_recipe_id,
44
+ output_id=payload.output_id,
45
+ experiment_num=1,
46
+ value_numeric=Decimal(random.random() * 10),
47
+ )
48
+ ]
49
+ )
50
+ args.client.create_recipe_link(
51
+ recipe_from_key=identifier_t.IdentifierKeyId(id=recipe.recipe_id),
52
+ recipe_to_key=identifier_t.IdentifierKeyId(id=created_recipe_id),
53
+ link_type=recipe_links_t.RecipeLinkType.CHILD,
54
+ name=test_sample_name,
55
+ )
56
+
57
+ return job_definition_t.JobResult(success=True)
58
+
59
+ @property
60
+ def webhook_payload_type(self) -> type:
61
+ return PredictionsPayload
@@ -2,7 +2,7 @@ from io import BytesIO
2
2
 
3
3
  from uncountable.core.file_upload import DataFileUpload, FileUpload
4
4
  from uncountable.integration.job import JobArguments, RunsheetWebhookJob, register_job
5
- from uncountable.types import entity_t
5
+ from uncountable.types import webhook_job_t
6
6
 
7
7
 
8
8
  @register_job
@@ -11,8 +11,9 @@ class StandardRunsheetGenerator(RunsheetWebhookJob):
11
11
  self,
12
12
  *,
13
13
  args: JobArguments,
14
- entities: list[entity_t.Entity],
14
+ payload: webhook_job_t.RunsheetWebhookPayload,
15
15
  ) -> FileUpload:
16
+ entities = payload.entities
16
17
  args.logger.log_info(f"Generating runsheet for {len(entities)} entities")
17
18
 
18
19
  content = []
@@ -84,3 +84,21 @@ jobs:
84
84
  executor:
85
85
  type: script
86
86
  import_path: example_instrument
87
+ - id: example_predictions
88
+ type: webhook
89
+ name: Webook Predictions
90
+ signature_key_secret:
91
+ type: env
92
+ env_key: WH_PREDICTIONS_SIGNATURE_KEY
93
+ executor:
94
+ type: script
95
+ import_path: example_predictions
96
+ - id: example_parse
97
+ type: webhook
98
+ name: Webhook Parse
99
+ signature_key_secret:
100
+ type: env
101
+ env_key: WH_PARSE_SIGNATURE_KEY
102
+ executor:
103
+ type: script
104
+ import_path: example_parse
@@ -9,7 +9,7 @@ dependencies = [
9
9
  "ruff == 0.*",
10
10
  "openpyxl == 3.*",
11
11
  "more_itertools == 10.*",
12
- "types-paramiko ==4.0.0.20250806",
12
+ "types-paramiko ==4.0.0.20250822",
13
13
  "types-openpyxl == 3.*",
14
14
  "types-pysftp == 0.*",
15
15
  "types-pytz ==2025.*",
@@ -17,7 +17,8 @@ dependencies = [
17
17
  "types-simplejson == 3.*",
18
18
  "pandas-stubs",
19
19
  "xlrd == 2.*",
20
- "msgspec == 0.19.*"
20
+ "msgspec == 0.19.*",
21
+ "websockets==15.0.1",
21
22
  ]
22
23
 
23
24
  [tool.mypy]
@@ -194,7 +195,6 @@ lint.ignore = [
194
195
  "RUF022", # __all__ is not sorted. skip due to isort complication
195
196
  "UP017", # use datetime.UTC, TODO add back in
196
197
  "UP035", # replacing List with list, TODO add back in
197
- "UP038", # isinstance X | Y instead of (X, Y), TODO add back in
198
198
  # ## FROM RUFF UPGRADE
199
199
  "PLC2701", # private name imports. should add
200
200
  "PLR1702", # too many nested blocks -- add with config. skip
pkgs/type_spec/builder.py CHANGED
@@ -308,6 +308,7 @@ class SpecTypeDefn(SpecType):
308
308
  self._is_value_to_string = False
309
309
  self._is_valid_parameter = True
310
310
  self._is_dynamic_allowed = False
311
+ self._default_extant: PropertyExtant | None = None
311
312
  self.ext_info: Any = None
312
313
 
313
314
  def is_value_converted(self) -> bool:
@@ -340,6 +341,7 @@ class SpecTypeDefn(SpecType):
340
341
  "ext_info",
341
342
  "label",
342
343
  "is_dynamic_allowed",
344
+ "default_extant",
343
345
  ]
344
346
  + extra_names,
345
347
  )
@@ -351,6 +353,10 @@ class SpecTypeDefn(SpecType):
351
353
  assert isinstance(is_dynamic_allowed, bool)
352
354
  self._is_dynamic_allowed = is_dynamic_allowed
353
355
 
356
+ default_extant = data.get("default_extant")
357
+ if default_extant is not None:
358
+ self._default_extant = PropertyExtant(default_extant)
359
+
354
360
  def _process_property(
355
361
  self, builder: SpecBuilder, spec_name: str, data: RawDict
356
362
  ) -> SpecProperty:
@@ -369,18 +375,18 @@ class SpecTypeDefn(SpecType):
369
375
  ],
370
376
  )
371
377
  try:
372
- extant_type = data.get("extant")
378
+ extant_type_str = data.get("extant")
379
+ extant_type = (
380
+ PropertyExtant(extant_type_str) if extant_type_str is not None else None
381
+ )
382
+ extant = extant_type or self._default_extant
373
383
  if spec_name.endswith("?"):
374
- if extant_type is not None:
384
+ if extant is not None:
375
385
  raise Exception("cannot specify extant with ?")
376
386
  extant = PropertyExtant.optional
377
387
  name = spec_name[:-1]
378
388
  else:
379
- extant = (
380
- PropertyExtant.required
381
- if extant_type is None
382
- else PropertyExtant(extant_type)
383
- )
389
+ extant = extant or PropertyExtant.required
384
390
  name = spec_name
385
391
 
386
392
  property_name_case = self.name_case
@@ -1392,9 +1398,13 @@ class SpecBuilder:
1392
1398
  self.emit_id_source_enums: set[SpecTypeDefnStringEnum] = set()
1393
1399
 
1394
1400
  this_dir = os.path.dirname(os.path.realpath(__file__))
1395
- with open(f"{this_dir}/parts/base.py.prepart") as py_base_part:
1401
+ with open(
1402
+ f"{this_dir}/parts/base.py.prepart", encoding="utf-8"
1403
+ ) as py_base_part:
1396
1404
  self.preparts["python"][base_namespace_name] = py_base_part.read()
1397
- with open(f"{this_dir}/parts/base.ts.prepart") as ts_base_part:
1405
+ with open(
1406
+ f"{this_dir}/parts/base.ts.prepart", encoding="utf-8"
1407
+ ) as ts_base_part:
1398
1408
  self.preparts["typescript"][base_namespace_name] = ts_base_part.read()
1399
1409
 
1400
1410
  base_namespace.types["ObjectId"] = SpecTypeDefnObject(
@@ -285,14 +285,14 @@ export const apiCall = {wrap_call}(
285
285
  index_path = f"{config.endpoint_to_routes_output[endpoint.default_endpoint_key]}/{'/'.join(namespace.path[0:-1])}/index.tsx"
286
286
  api_name = f"Api{ts_type_name(namespace.path[0 - 1])}"
287
287
  if os.path.exists(index_path):
288
- with open(index_path) as index:
288
+ with open(index_path, encoding="utf-8") as index:
289
289
  index_data = index.read()
290
290
  need_index = index_data.find(api_name) == -1
291
291
  else:
292
292
  need_index = True
293
293
 
294
294
  if need_index:
295
- with open(index_path, "a") as index:
295
+ with open(index_path, "a", encoding="utf-8") as index:
296
296
  print(f"Updated API Index {index_path}")
297
297
  index.write(f'import * as {api_name} from "./{namespace.path[-1]}"\n\n')
298
298
  index.write(f"export {{ {api_name} }}\n")
@@ -41,10 +41,23 @@ def type_path_of(stype: builder.SpecType) -> object: # NamePath
41
41
  parts: list[object] = ["$literal"]
42
42
  for parameter in stype.parameters:
43
43
  assert isinstance(parameter, builder.SpecTypeLiteralWrapper)
44
+ emit_value = parameter.value
45
+ if isinstance(parameter.value_type, builder.SpecTypeDefnObject):
46
+ emit_value = parameter.value
47
+ assert isinstance(emit_value, (str, bool)), (
48
+ f"invalid-literal-value:{emit_value}"
49
+ )
50
+ elif isinstance(parameter.value_type, builder.SpecTypeDefnStringEnum):
51
+ key = parameter.value
52
+ assert isinstance(key, str)
53
+ emit_value = parameter.value_type.values[key].value
54
+ else:
55
+ raise Exception("unhandled-literal-type")
56
+
44
57
  # This allows expansion to enum literal values later
45
58
  parts.append([
46
59
  "$value",
47
- parameter.value,
60
+ emit_value,
48
61
  type_path_of(parameter.value_type),
49
62
  ])
50
63
  return parts
@@ -20,7 +20,7 @@ The accepted argument type must accept "None", it is not implied.
20
20
  """
21
21
 
22
22
  import sys
23
- from typing import TypeVar, cast
23
+ from typing import Match, Pattern, TypeVar, cast
24
24
 
25
25
  import regex as re
26
26
 
@@ -56,7 +56,7 @@ class Source:
56
56
  def has_more(self) -> bool:
57
57
  return self._at < len(self._text)
58
58
 
59
- def match(self, expression: re.Pattern) -> re.Match | None:
59
+ def match(self, expression: Pattern[str]) -> Match[str] | None:
60
60
  self.skip_space()
61
61
  m = expression.match(self._text, self._at)
62
62
  if m is not None:
@@ -8,6 +8,7 @@ from uncountable.core.environment import get_local_admin_server_port
8
8
  from uncountable.integration.queue_runner.command_server.command_client import (
9
9
  send_job_queue_message,
10
10
  send_list_queued_jobs_message,
11
+ send_retry_job_message,
11
12
  )
12
13
  from uncountable.integration.telemetry import Logger
13
14
  from uncountable.types import queued_job_t
@@ -70,12 +71,13 @@ def register_list_queued_jobs(
70
71
  port=get_local_admin_server_port(),
71
72
  )
72
73
 
73
- headers = ["UUID", "Job Ref Name", "Attempts", "Submitted At"]
74
+ headers = ["UUID", "Job Ref Name", "Attempts", "Status", "Submitted At"]
74
75
  rows = [
75
76
  [
76
77
  job.uuid,
77
78
  job.job_ref_name,
78
79
  job.num_attempts,
80
+ job.status,
79
81
  job.submitted_at.ToDatetime(tz.UTC).astimezone(tz.tzlocal()),
80
82
  ]
81
83
  for job in queued_jobs
@@ -85,6 +87,31 @@ def register_list_queued_jobs(
85
87
  list_queued_jobs_parser.set_defaults(func=_handle_list_queued_jobs)
86
88
 
87
89
 
90
+ def register_retry_job_parser(
91
+ sub_parser_manager: argparse._SubParsersAction,
92
+ parents: list[argparse.ArgumentParser],
93
+ ) -> None:
94
+ retry_failed_jobs_parser = sub_parser_manager.add_parser(
95
+ "retry-job",
96
+ parents=parents,
97
+ help="Retry failed job on the integration server",
98
+ description="Retry failed job on the integration server",
99
+ )
100
+
101
+ retry_failed_jobs_parser.add_argument(
102
+ "job_uuid", type=str, help="The uuid of the job to retry"
103
+ )
104
+
105
+ def _handle_retry_job(args: argparse.Namespace) -> None:
106
+ send_retry_job_message(
107
+ job_uuid=args.job_uuid,
108
+ host=args.host,
109
+ port=get_local_admin_server_port(),
110
+ )
111
+
112
+ retry_failed_jobs_parser.set_defaults(func=_handle_retry_job)
113
+
114
+
88
115
  def main() -> None:
89
116
  logger = Logger(get_current_span())
90
117
 
@@ -104,6 +131,7 @@ def main() -> None:
104
131
  )
105
132
 
106
133
  register_enqueue_job_parser(subparser_action, parents=[base_parser])
134
+ register_retry_job_parser(subparser_action, parents=[base_parser])
107
135
  register_list_queued_jobs(subparser_action, parents=[base_parser])
108
136
 
109
137
  args = main_parser.parse_args()
@@ -88,7 +88,6 @@ def execute_job(
88
88
  job_definition: job_definition_t.JobDefinition,
89
89
  profile_metadata: job_definition_t.ProfileMetadata,
90
90
  args: JobArguments,
91
- job_uuid: str,
92
91
  ) -> job_definition_t.JobResult:
93
92
  with args.logger.push_scope(job_definition.name) as job_logger:
94
93
  job = resolve_executor(job_definition.executor, profile_metadata)
@@ -104,7 +103,7 @@ def execute_job(
104
103
  run_entity = _create_run_entity(
105
104
  client=args.client,
106
105
  logging_settings=job_definition.logging_settings,
107
- job_uuid=job_uuid,
106
+ job_uuid=args.job_uuid,
108
107
  )
109
108
  result = job.run_outer(args=args)
110
109
  except Exception as e:
@@ -41,7 +41,7 @@ def _get_extension(filename: str) -> str | None:
41
41
 
42
42
  def _run_keyword_detection(data: io.BytesIO, keyword: str) -> bool:
43
43
  try:
44
- text = io.TextIOWrapper(data)
44
+ text = io.TextIOWrapper(data, encoding="utf-8")
45
45
  for line in text:
46
46
  if (
47
47
  keyword in line
@@ -28,7 +28,6 @@ from uncountable.integration.secret_retrieval.retrieve_secret import retrieve_se
28
28
  from uncountable.integration.telemetry import JobLogger
29
29
  from uncountable.types import (
30
30
  base_t,
31
- entity_t,
32
31
  job_definition_t,
33
32
  queued_job_t,
34
33
  webhook_job_t,
@@ -49,6 +48,7 @@ class JobArguments:
49
48
  batch_processor: AsyncBatchProcessor
50
49
  logger: JobLogger
51
50
  payload: base_t.JsonValue
51
+ job_uuid: str
52
52
 
53
53
 
54
54
  # only for compatibility:
@@ -254,13 +254,13 @@ class RunsheetWebhookJob(WebhookJob[webhook_job_t.RunsheetWebhookPayload]):
254
254
  self,
255
255
  *,
256
256
  args: JobArguments,
257
- entities: list[entity_t.Entity],
257
+ payload: webhook_job_t.RunsheetWebhookPayload,
258
258
  ) -> FileUpload: ...
259
259
 
260
260
  def run(
261
261
  self, args: JobArguments, payload: webhook_job_t.RunsheetWebhookPayload
262
262
  ) -> JobResult:
263
- runsheet = self.build_runsheet(args=args, entities=payload.entities)
263
+ runsheet = self.build_runsheet(args=args, payload=payload)
264
264
 
265
265
  files = args.client.upload_files(file_uploads=[runsheet])
266
266
  args.client.complete_async_upload(
@@ -4,6 +4,8 @@ from .types import (
4
4
  CommandEnqueueJob,
5
5
  CommandEnqueueJobResponse,
6
6
  CommandQueue,
7
+ CommandRetryJob,
8
+ CommandRetryJobResponse,
7
9
  CommandServerBadResponse,
8
10
  CommandServerException,
9
11
  CommandServerTimeout,
@@ -16,6 +18,8 @@ __all__: list[str] = [
16
18
  "send_job_queue_message",
17
19
  "CommandEnqueueJob",
18
20
  "CommandEnqueueJobResponse",
21
+ "CommandRetryJob",
22
+ "CommandRetryJobResponse",
19
23
  "CommandTask",
20
24
  "CommandQueue",
21
25
  "CommandServerTimeout",
@@ -12,6 +12,10 @@ from uncountable.integration.queue_runner.command_server.protocol.command_server
12
12
  EnqueueJobResult,
13
13
  ListQueuedJobsRequest,
14
14
  ListQueuedJobsResult,
15
+ RetryJobRequest,
16
+ RetryJobResult,
17
+ VaccuumQueuedJobsRequest,
18
+ VaccuumQueuedJobsResult,
15
19
  )
16
20
  from uncountable.integration.queue_runner.command_server.types import (
17
21
  CommandServerBadResponse,
@@ -59,6 +63,26 @@ def send_job_queue_message(
59
63
  return response.queued_job_uuid
60
64
 
61
65
 
66
+ def send_retry_job_message(
67
+ *,
68
+ job_uuid: str,
69
+ host: str = "localhost",
70
+ port: int,
71
+ ) -> str:
72
+ with command_server_connection(host=host, port=port) as stub:
73
+ request = RetryJobRequest(uuid=job_uuid)
74
+
75
+ try:
76
+ response = stub.RetryJob(request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS)
77
+ assert isinstance(response, RetryJobResult)
78
+ if not response.successfully_queued:
79
+ raise CommandServerBadResponse("queue operation was not successful")
80
+
81
+ return response.queued_job_uuid
82
+ except grpc.RpcError as e:
83
+ raise ValueError(e.details()) # type: ignore
84
+
85
+
62
86
  def check_health(*, host: str = _LOCAL_RPC_HOST, port: int) -> bool:
63
87
  with command_server_connection(host=host, port=port) as stub:
64
88
  request = CheckHealthRequest()
@@ -92,3 +116,18 @@ def send_list_queued_jobs_message(
92
116
 
93
117
  assert isinstance(response, ListQueuedJobsResult)
94
118
  return list(response.queued_jobs)
119
+
120
+
121
+ def send_vaccuum_queued_jobs_message(*, host: str = "localhost", port: int) -> None:
122
+ with command_server_connection(host=host, port=port) as stub:
123
+ request = VaccuumQueuedJobsRequest()
124
+
125
+ try:
126
+ response = stub.VaccuumQueuedJobs(
127
+ request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS
128
+ )
129
+ except grpc.RpcError as e:
130
+ raise ValueError(e.details()) # type: ignore
131
+
132
+ assert isinstance(response, VaccuumQueuedJobsResult)
133
+ return None