UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.128__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (49) hide show
  1. examples/integration-server/jobs/materials_auto/example_instrument.py +67 -38
  2. examples/integration-server/jobs/materials_auto/example_parse.py +87 -0
  3. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  4. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +3 -2
  5. examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
  6. examples/integration-server/pyproject.toml +3 -3
  7. pkgs/type_spec/builder.py +19 -9
  8. pkgs/type_spec/emit_typescript.py +2 -2
  9. pkgs/type_spec/type_info/emit_type_info.py +14 -1
  10. pkgs/type_spec/value_spec/__main__.py +2 -2
  11. uncountable/integration/cli.py +29 -1
  12. uncountable/integration/executors/executors.py +1 -2
  13. uncountable/integration/executors/generic_upload_executor.py +1 -1
  14. uncountable/integration/job.py +3 -3
  15. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  16. uncountable/integration/queue_runner/command_server/command_client.py +39 -0
  17. uncountable/integration/queue_runner/command_server/command_server.py +37 -0
  18. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
  19. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +21 -13
  20. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +28 -1
  21. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +90 -0
  22. uncountable/integration/queue_runner/command_server/types.py +24 -1
  23. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +107 -8
  24. uncountable/integration/queue_runner/datastore/model.py +8 -1
  25. uncountable/integration/queue_runner/job_scheduler.py +42 -2
  26. uncountable/integration/queue_runner/worker.py +1 -1
  27. uncountable/integration/server.py +36 -6
  28. uncountable/integration/telemetry.py +41 -7
  29. uncountable/types/__init__.py +4 -0
  30. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  31. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  32. uncountable/types/api/recipes/get_recipes_data.py +16 -0
  33. uncountable/types/api/recipes/lock_recipes.py +2 -1
  34. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  35. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  36. uncountable/types/api/uploader/complete_async_parse.py +4 -0
  37. uncountable/types/async_batch_processor.py +124 -0
  38. uncountable/types/async_batch_t.py +2 -0
  39. uncountable/types/client_base.py +57 -1
  40. uncountable/types/entity_t.py +1 -1
  41. uncountable/types/queued_job.py +1 -0
  42. uncountable/types/queued_job_t.py +9 -0
  43. uncountable/types/sockets.py +9 -0
  44. uncountable/types/sockets_t.py +99 -0
  45. uncountable/types/uploader_t.py +3 -2
  46. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/METADATA +1 -1
  47. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/RECORD +49 -45
  48. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/WHEEL +0 -0
  49. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.128.dist-info}/top_level.txt +0 -0
@@ -10,8 +10,13 @@ from uncountable.integration.queue_runner.command_server import (
10
10
  CommandEnqueueJob,
11
11
  CommandEnqueueJobResponse,
12
12
  CommandQueue,
13
+ CommandRetryJob,
14
+ CommandRetryJobResponse,
13
15
  CommandTask,
14
16
  )
17
+ from uncountable.integration.queue_runner.command_server.types import (
18
+ CommandVaccuumQueuedJobs,
19
+ )
15
20
  from uncountable.integration.queue_runner.datastore import DatastoreSqlite
16
21
  from uncountable.integration.queue_runner.datastore.interface import Datastore
17
22
  from uncountable.integration.queue_runner.worker import Worker
@@ -99,7 +104,9 @@ async def start_scheduler(
99
104
  worker = job_worker_lookup[queued_job.job_ref_name]
100
105
  except KeyError as e:
101
106
  logger.log_exception(e)
102
- datastore.remove_job_from_queue(queued_job.queued_job_uuid)
107
+ datastore.update_job_status(
108
+ queued_job.queued_job_uuid, queued_job_t.JobStatus.FAILED
109
+ )
103
110
  return
104
111
  await worker.listen_queue.put(queued_job)
105
112
 
@@ -135,6 +142,25 @@ async def start_scheduler(
135
142
  CommandEnqueueJobResponse(queued_job_uuid=queued_job_uuid)
136
143
  )
137
144
 
145
+ async def _handle_retry_job_command(command: CommandRetryJob) -> None:
146
+ queued_job = datastore.retry_job(command.queued_job_uuid)
147
+ if queued_job is None:
148
+ await command.response_queue.put(
149
+ CommandRetryJobResponse(queued_job_uuid=None)
150
+ )
151
+ return
152
+
153
+ await enqueue_queued_job(queued_job)
154
+ await command.response_queue.put(
155
+ CommandRetryJobResponse(queued_job_uuid=queued_job.queued_job_uuid)
156
+ )
157
+
158
+ def _handle_vaccuum_queued_jobs_command(
159
+ command: CommandVaccuumQueuedJobs,
160
+ ) -> None:
161
+ logger.log_info("Vaccuuming queued jobs...")
162
+ datastore.vaccuum_queued_jobs()
163
+
138
164
  for queued_job in queued_jobs:
139
165
  await enqueue_queued_job(queued_job)
140
166
 
@@ -151,10 +177,24 @@ async def start_scheduler(
151
177
  match command:
152
178
  case CommandEnqueueJob():
153
179
  await _handle_enqueue_job_command(command=command)
180
+ case CommandRetryJob():
181
+ await _handle_retry_job_command(command=command)
182
+ case CommandVaccuumQueuedJobs():
183
+ _handle_vaccuum_queued_jobs_command(command=command)
154
184
  case _:
155
185
  typing.assert_never(command)
156
186
  command_task = asyncio.create_task(command_queue.get())
157
187
  elif task == result_task:
158
188
  queued_job_result = result_task.result()
159
- datastore.remove_job_from_queue(queued_job_result.queued_job_uuid)
189
+ match queued_job_result.job_result.success:
190
+ case True:
191
+ datastore.update_job_status(
192
+ queued_job_result.queued_job_uuid,
193
+ queued_job_t.JobStatus.SUCCESS,
194
+ )
195
+ case False:
196
+ datastore.update_job_status(
197
+ queued_job_result.queued_job_uuid,
198
+ queued_job_t.JobStatus.FAILED,
199
+ )
160
200
  result_task = asyncio.create_task(result_queue.get())
@@ -103,13 +103,13 @@ def run_queued_job(
103
103
  profile_metadata=job_details.profile_metadata,
104
104
  logger=job_logger,
105
105
  payload=payload,
106
+ job_uuid=queued_job.queued_job_uuid,
106
107
  )
107
108
 
108
109
  return execute_job(
109
110
  args=args,
110
111
  profile_metadata=job_details.profile_metadata,
111
112
  job_definition=job_details.job_definition,
112
- job_uuid=queued_job.queued_job_uuid,
113
113
  )
114
114
  except BaseException as e:
115
115
  job_logger.log_exception(e)
@@ -11,7 +11,11 @@ from apscheduler.triggers.cron import CronTrigger
11
11
  from opentelemetry.trace import get_current_span
12
12
  from sqlalchemy.engine.base import Engine
13
13
 
14
+ from uncountable.core.environment import get_local_admin_server_port
14
15
  from uncountable.integration.cron import CronJobArgs, cron_job_executor
16
+ from uncountable.integration.queue_runner.command_server.command_client import (
17
+ send_vaccuum_queued_jobs_message,
18
+ )
15
19
  from uncountable.integration.telemetry import Logger
16
20
  from uncountable.types import base_t, job_definition_t
17
21
  from uncountable.types.job_definition_t import (
@@ -21,6 +25,14 @@ from uncountable.types.job_definition_t import (
21
25
 
22
26
  _MAX_APSCHEDULER_CONCURRENT_JOBS = 1
23
27
 
28
+ VACCUUM_QUEUED_JOBS_JOB_ID = "vacuum_queued_jobs"
29
+
30
+ STATIC_JOB_IDS = {VACCUUM_QUEUED_JOBS_JOB_ID}
31
+
32
+
33
+ def vaccuum_queued_jobs() -> None:
34
+ send_vaccuum_queued_jobs_message(port=get_local_admin_server_port())
35
+
24
36
 
25
37
  class IntegrationServer:
26
38
  _scheduler: BaseScheduler
@@ -36,11 +48,27 @@ class IntegrationServer:
36
48
  )
37
49
  self._server_logger = Logger(get_current_span())
38
50
 
51
+ def _register_static_jobs(self) -> None:
52
+ all_job_ids = {job.id for job in self._scheduler.get_jobs()}
53
+ if VACCUUM_QUEUED_JOBS_JOB_ID in all_job_ids:
54
+ self._scheduler.remove_job(VACCUUM_QUEUED_JOBS_JOB_ID)
55
+
56
+ self._scheduler.add_job(
57
+ vaccuum_queued_jobs,
58
+ max_instances=1,
59
+ coalesce=True,
60
+ trigger=CronTrigger.from_crontab("5 4 * * 4"),
61
+ name="Vaccuum queued jobs",
62
+ id=VACCUUM_QUEUED_JOBS_JOB_ID,
63
+ kwargs={},
64
+ misfire_grace_time=None,
65
+ )
66
+
39
67
  def register_jobs(self, profiles: list[job_definition_t.ProfileMetadata]) -> None:
40
- valid_job_ids = []
68
+ valid_job_ids: set[str] = set()
41
69
  for profile_metadata in profiles:
42
70
  for job_defn in profile_metadata.jobs:
43
- valid_job_ids.append(job_defn.id)
71
+ valid_job_ids.add(job_defn.id)
44
72
  match job_defn:
45
73
  case CronJobDefinition():
46
74
  # Add to ap scheduler
@@ -90,10 +118,11 @@ class IntegrationServer:
90
118
  pass
91
119
  case _:
92
120
  assert_never(job_defn)
93
- all_jobs = self._scheduler.get_jobs()
94
- for job in all_jobs:
95
- if job.id not in valid_job_ids:
96
- self._scheduler.remove_job(job.id)
121
+ all_job_ids = {job.id for job in self._scheduler.get_jobs()}
122
+ invalid_job_ids = all_job_ids.difference(valid_job_ids.union(STATIC_JOB_IDS))
123
+
124
+ for job_id in invalid_job_ids:
125
+ self._scheduler.remove_job(job_id)
97
126
 
98
127
  def serve_forever(self) -> None:
99
128
  signal.pause()
@@ -106,6 +135,7 @@ class IntegrationServer:
106
135
 
107
136
  def __enter__(self) -> "IntegrationServer":
108
137
  self._start_apscheduler()
138
+ self._register_static_jobs()
109
139
  return self
110
140
 
111
141
  def __exit__(
@@ -12,7 +12,10 @@ from opentelemetry import _logs, trace
12
12
  from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
13
13
  from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
14
14
  from opentelemetry.sdk._logs import Logger as OTELLogger
15
- from opentelemetry.sdk._logs import LoggerProvider, LogRecord
15
+ from opentelemetry.sdk._logs import (
16
+ LoggerProvider,
17
+ LogRecord,
18
+ )
16
19
  from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter
17
20
  from opentelemetry.sdk.resources import Attributes, Resource
18
21
  from opentelemetry.sdk.trace import TracerProvider
@@ -95,8 +98,27 @@ class Logger:
95
98
  def current_trace_id(self) -> int | None:
96
99
  return self.current_span.get_span_context().trace_id
97
100
 
98
- def _patch_attributes(self, attributes: Attributes | None) -> Attributes:
99
- return attributes or {}
101
+ def _patch_attributes(
102
+ self,
103
+ attributes: Attributes | None,
104
+ *,
105
+ message: str | None = None,
106
+ severity: LogSeverity | None = None,
107
+ ) -> Attributes:
108
+ patched_attributes = {**(attributes if attributes is not None else {})}
109
+ if message is not None:
110
+ patched_attributes["message"] = message
111
+ elif "body" in patched_attributes:
112
+ patched_attributes["message"] = patched_attributes["body"]
113
+
114
+ if severity is not None:
115
+ patched_attributes["status"] = severity.lower()
116
+ elif "severity_text" in patched_attributes and isinstance(
117
+ patched_attributes["severity_text"], str
118
+ ):
119
+ patched_attributes["status"] = patched_attributes["severity_text"].lower()
120
+
121
+ return patched_attributes
100
122
 
101
123
  def _emit_log(
102
124
  self, message: str, *, severity: LogSeverity, attributes: Attributes | None
@@ -106,7 +128,9 @@ class Logger:
106
128
  body=message,
107
129
  severity_text=severity,
108
130
  timestamp=time.time_ns(),
109
- attributes=self._patch_attributes(attributes),
131
+ attributes=self._patch_attributes(
132
+ message=message, severity=severity, attributes=attributes
133
+ ),
110
134
  span_id=self.current_span_id,
111
135
  trace_id=self.current_trace_id,
112
136
  trace_flags=DEFAULT_TRACE_OPTIONS,
@@ -140,7 +164,9 @@ class Logger:
140
164
  attributes: Attributes | None = None,
141
165
  ) -> None:
142
166
  traceback_str = "".join(traceback.format_exception(exception))
143
- patched_attributes = self._patch_attributes(attributes)
167
+ patched_attributes = self._patch_attributes(
168
+ message=message, severity=LogSeverity.ERROR, attributes=attributes
169
+ )
144
170
  self.current_span.record_exception(
145
171
  exception=exception, attributes=patched_attributes
146
172
  )
@@ -171,9 +197,17 @@ class JobLogger(Logger):
171
197
  self.job_definition = job_definition
172
198
  super().__init__(base_span)
173
199
 
174
- def _patch_attributes(self, attributes: Attributes | None) -> Attributes:
200
+ def _patch_attributes(
201
+ self,
202
+ attributes: Attributes | None,
203
+ *,
204
+ message: str | None = None,
205
+ severity: LogSeverity | None = None,
206
+ ) -> Attributes:
175
207
  patched_attributes: dict[str, base_t.JsonValue] = {
176
- **(attributes if attributes is not None else {})
208
+ **super()._patch_attributes(
209
+ attributes=attributes, message=message, severity=severity
210
+ )
177
211
  }
178
212
  patched_attributes["profile.name"] = self.profile_metadata.name
179
213
  patched_attributes["profile.base_url"] = self.profile_metadata.base_url
@@ -93,6 +93,7 @@ from . import recipe_output_metadata_t as recipe_output_metadata_t
93
93
  from . import recipe_tags_t as recipe_tags_t
94
94
  from . import recipe_workflow_steps_t as recipe_workflow_steps_t
95
95
  from . import recipes_t as recipes_t
96
+ from .api.integrations import register_sockets_token as register_sockets_token_t
96
97
  from .api.recipes import remove_recipe_from_project as remove_recipe_from_project_t
97
98
  from .api.recipe_links import remove_recipe_link as remove_recipe_link_t
98
99
  from .api.entity import resolve_entity_ids as resolve_entity_ids_t
@@ -112,6 +113,7 @@ from .api.recipes import set_recipe_output_annotations as set_recipe_output_anno
112
113
  from .api.recipes import set_recipe_output_file as set_recipe_output_file_t
113
114
  from .api.recipes import set_recipe_outputs as set_recipe_outputs_t
114
115
  from .api.recipes import set_recipe_tags as set_recipe_tags_t
116
+ from .api.recipes import set_recipe_total as set_recipe_total_t
115
117
  from .api.entity import set_values as set_values_t
116
118
  from . import sockets_t as sockets_t
117
119
  from .api.entity import transition_entity_phase as transition_entity_phase_t
@@ -220,6 +222,7 @@ __all__: list[str] = [
220
222
  "recipe_tags_t",
221
223
  "recipe_workflow_steps_t",
222
224
  "recipes_t",
225
+ "register_sockets_token_t",
223
226
  "remove_recipe_from_project_t",
224
227
  "remove_recipe_link_t",
225
228
  "resolve_entity_ids_t",
@@ -239,6 +242,7 @@ __all__: list[str] = [
239
242
  "set_recipe_output_file_t",
240
243
  "set_recipe_outputs_t",
241
244
  "set_recipe_tags_t",
245
+ "set_recipe_total_t",
242
246
  "set_values_t",
243
247
  "sockets_t",
244
248
  "transition_entity_phase_t",
@@ -0,0 +1,41 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import base_t
12
+ from ... import sockets_t
13
+
14
+ __all__: list[str] = [
15
+ "Arguments",
16
+ "Data",
17
+ "ENDPOINT_METHOD",
18
+ "ENDPOINT_PATH",
19
+ ]
20
+
21
+ ENDPOINT_METHOD = "POST"
22
+ ENDPOINT_PATH = "api/external/integrations/register_sockets_token"
23
+
24
+
25
+ # DO NOT MODIFY -- This file is generated by type_spec
26
+ @serial_class(
27
+ named_type_path="sdk.api.integrations.register_sockets_token.Arguments",
28
+ )
29
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
30
+ class Arguments:
31
+ socket_request: sockets_t.SocketTokenRequest
32
+
33
+
34
+ # DO NOT MODIFY -- This file is generated by type_spec
35
+ @serial_class(
36
+ named_type_path="sdk.api.integrations.register_sockets_token.Data",
37
+ )
38
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
39
+ class Data:
40
+ response: sockets_t.SocketTokenResponse
41
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -93,7 +93,7 @@ class RecipeInputEditClearInputs(RecipeInputEditBase):
93
93
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
94
94
  class RecipeInputEditInputBase(RecipeInputEditBase):
95
95
  ingredient_key: identifier_t.IdentifierKey
96
- quantity_basis: recipe_inputs_t.QuantityBasis = recipe_inputs_t.QuantityBasis.MASS
96
+ quantity_basis: recipe_inputs_t.QuantityBasis | None = recipe_inputs_t.QuantityBasis.MASS
97
97
  input_value_type: recipe_inputs_t.InputValueType = recipe_inputs_t.InputValueType.VALUE
98
98
  value_numeric: Decimal | None = None
99
99
  value_str: str | None = None
@@ -6,8 +6,10 @@ from __future__ import annotations
6
6
  import typing # noqa: F401
7
7
  import datetime # noqa: F401
8
8
  from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
9
10
  import dataclasses
10
11
  from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_string_enum
11
13
  from ... import base_t
12
14
  from ... import experiment_groups_t
13
15
  from ... import inputs_t
@@ -25,6 +27,7 @@ __all__: list[str] = [
25
27
  "Recipe",
26
28
  "RecipeInput",
27
29
  "RecipeOutput",
30
+ "RecipeOutputInclusion",
28
31
  "RecipeStep",
29
32
  "RecipeStepGroup",
30
33
  "RecipeStepRelationship",
@@ -46,6 +49,19 @@ class Arguments:
46
49
  project_id: base_t.ObjectId | None = None
47
50
  offset: int | None = None
48
51
  limit: int | None = None
52
+ empty_output_behavior: RecipeOutputInclusion | None = None
53
+
54
+
55
+ # DO NOT MODIFY -- This file is generated by type_spec
56
+ @serial_string_enum(
57
+ labels={
58
+ "include_null": "Include Outputs without values",
59
+ "exclude_null": "Exclude Outputs without values",
60
+ },
61
+ )
62
+ class RecipeOutputInclusion(StrEnum):
63
+ INCLUDE_NULL = "include_null"
64
+ EXCLUDE_NULL = "exclude_null"
49
65
 
50
66
 
51
67
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,6 +9,7 @@ from decimal import Decimal # noqa: F401
9
9
  from enum import StrEnum
10
10
  import dataclasses
11
11
  from pkgs.serialization import serial_class
12
+ from ... import async_batch_t
12
13
  from ... import base_t
13
14
  from ... import identifier_t
14
15
 
@@ -58,6 +59,6 @@ class Arguments:
58
59
  named_type_path="sdk.api.recipes.lock_recipes.Data",
59
60
  )
60
61
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
61
- class Data:
62
+ class Data(async_batch_t.AsyncBatchActionReturn):
62
63
  pass
63
64
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,59 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import async_batch_t
12
+ from ... import base_t
13
+ from ... import identifier_t
14
+ from ... import recipe_inputs_t
15
+ from ... import recipe_workflow_steps_t
16
+
17
+ __all__: list[str] = [
18
+ "Arguments",
19
+ "Data",
20
+ "ENDPOINT_METHOD",
21
+ "ENDPOINT_PATH",
22
+ "ValueNumeric",
23
+ ]
24
+
25
+ ENDPOINT_METHOD = "POST"
26
+ ENDPOINT_PATH = "api/external/recipes/set_recipe_total"
27
+
28
+
29
+ # DO NOT MODIFY -- This file is generated by type_spec
30
+ @serial_class(
31
+ named_type_path="sdk.api.recipes.set_recipe_total.ValueNumeric",
32
+ to_string_values={"value"},
33
+ )
34
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
35
+ class ValueNumeric:
36
+ value: Decimal
37
+ quantity_basis: recipe_inputs_t.QuantityBasis
38
+
39
+
40
+ # DO NOT MODIFY -- This file is generated by type_spec
41
+ @serial_class(
42
+ named_type_path="sdk.api.recipes.set_recipe_total.Arguments",
43
+ )
44
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
45
+ class Arguments:
46
+ recipe_key: identifier_t.IdentifierKey
47
+ value: ValueNumeric
48
+ recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier | None = None
49
+ calculation_key: identifier_t.IdentifierKey | None = None
50
+
51
+
52
+ # DO NOT MODIFY -- This file is generated by type_spec
53
+ @serial_class(
54
+ named_type_path="sdk.api.recipes.set_recipe_total.Data",
55
+ )
56
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
57
+ class Data(async_batch_t.AsyncBatchActionReturn):
58
+ pass
59
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,6 +9,7 @@ from decimal import Decimal # noqa: F401
9
9
  from enum import StrEnum
10
10
  import dataclasses
11
11
  from pkgs.serialization import serial_class
12
+ from ... import async_batch_t
12
13
  from ... import base_t
13
14
  from ... import identifier_t
14
15
 
@@ -45,6 +46,6 @@ class Arguments:
45
46
  named_type_path="sdk.api.recipes.unlock_recipes.Data",
46
47
  )
47
48
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
48
- class Data:
49
+ class Data(async_batch_t.AsyncBatchActionReturn):
49
50
  pass
50
51
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -10,6 +10,8 @@ import dataclasses
10
10
  from pkgs.serialization import serial_class
11
11
  from ... import async_batch_t
12
12
  from ... import base_t
13
+ from ... import generic_upload_t
14
+ from ... import identifier_t
13
15
  from ... import uploader_t
14
16
 
15
17
  __all__: list[str] = [
@@ -30,6 +32,8 @@ ENDPOINT_PATH = "api/external/uploader/complete_async_parse"
30
32
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
31
33
  class Arguments:
32
34
  parsed_file_data: list[uploader_t.ParsedFileData]
35
+ async_job_key: identifier_t.IdentifierKey
36
+ upload_destination: generic_upload_t.UploadDestinationRecipe
33
37
 
34
38
 
35
39
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -13,6 +13,7 @@ import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe
13
13
  from uncountable.types import async_batch_t
14
14
  from uncountable.types import base_t
15
15
  import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
16
+ import uncountable.types.api.uploader.complete_async_parse as complete_async_parse_t
16
17
  import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
17
18
  import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
18
19
  import uncountable.types.api.entity.create_or_update_entity as create_or_update_entity_t
@@ -24,6 +25,7 @@ from uncountable.types import generic_upload_t
24
25
  import uncountable.types.api.entity.grant_entity_permissions as grant_entity_permissions_t
25
26
  from uncountable.types import identifier_t
26
27
  import uncountable.types.api.uploader.invoke_uploader as invoke_uploader_t
28
+ import uncountable.types.api.recipes.lock_recipes as lock_recipes_t
27
29
  import uncountable.types.api.entity.lookup_entity as lookup_entity_t
28
30
  from uncountable.types import notifications_t
29
31
  import uncountable.types.api.integrations.push_notification as push_notification_t
@@ -32,6 +34,8 @@ from uncountable.types import recipe_metadata_t
32
34
  from uncountable.types import recipe_workflow_steps_t
33
35
  import uncountable.types.api.entity.set_entity_field_values as set_entity_field_values_t
34
36
  import uncountable.types.api.recipes.set_recipe_metadata as set_recipe_metadata_t
37
+ import uncountable.types.api.recipes.unlock_recipes as unlock_recipes_t
38
+ from uncountable.types import uploader_t
35
39
  import uncountable.types.api.condition_parameters.upsert_condition_match as upsert_condition_match_t
36
40
  import uuid
37
41
  from abc import ABC, abstractmethod
@@ -187,6 +191,41 @@ class AsyncBatchProcessorBase(ABC):
187
191
  batch_reference=req.batch_reference,
188
192
  )
189
193
 
194
+ def complete_async_parse(
195
+ self,
196
+ *,
197
+ parsed_file_data: list[uploader_t.ParsedFileData],
198
+ async_job_key: identifier_t.IdentifierKey,
199
+ upload_destination: generic_upload_t.UploadDestinationRecipe,
200
+ depends_on: list[str] | None = None,
201
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
202
+ """Parses uploaded files asynchronously
203
+
204
+ :param depends_on: A list of batch reference keys to process before processing this request
205
+ """
206
+ args = complete_async_parse_t.Arguments(
207
+ parsed_file_data=parsed_file_data,
208
+ async_job_key=async_job_key,
209
+ upload_destination=upload_destination,
210
+ )
211
+ json_data = serialize_for_api(args)
212
+
213
+ batch_reference = str(uuid.uuid4())
214
+
215
+ req = async_batch_t.AsyncBatchRequest(
216
+ path=async_batch_t.AsyncBatchRequestPath.COMPLETE_ASYNC_PARSE,
217
+ data=json_data,
218
+ depends_on=depends_on,
219
+ batch_reference=batch_reference,
220
+ )
221
+
222
+ self._enqueue(req)
223
+
224
+ return async_batch_t.QueuedAsyncBatchRequest(
225
+ path=req.path,
226
+ batch_reference=req.batch_reference,
227
+ )
228
+
190
229
  def complete_async_upload(
191
230
  self,
192
231
  *,
@@ -461,6 +500,53 @@ class AsyncBatchProcessorBase(ABC):
461
500
  batch_reference=req.batch_reference,
462
501
  )
463
502
 
503
+ def lock_recipes(
504
+ self,
505
+ *,
506
+ type: lock_recipes_t.RecipeLockType = lock_recipes_t.RecipeLockType.ALL,
507
+ recipes: list[identifier_t.IdentifierKey],
508
+ globally_removable: bool,
509
+ lock_samples: bool | None = None,
510
+ comments: str | None = None,
511
+ depends_on: list[str] | None = None,
512
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
513
+ """Lock experiments. Experiments will require unlocking to be editable. Edits to the experiments are blocked while they are locked.
514
+
515
+ :param type: The type of lock to set.
516
+ All = both inputs and measurements are locked.
517
+ Inputs Only = only inputs are locked from editing.
518
+
519
+ :param recipes: The recipes to lock, a maximum of 100 can be sent
520
+ :param globally_removable: If true any user can unlock the experiment. If false the locking user is the only user that can unlock.
521
+ :param lock_samples: Should associated experiment test samples also be locked.
522
+ :param comments: Optional comment describing the purpose of locking
523
+ :param depends_on: A list of batch reference keys to process before processing this request
524
+ """
525
+ args = lock_recipes_t.Arguments(
526
+ type=type,
527
+ recipes=recipes,
528
+ globally_removable=globally_removable,
529
+ lock_samples=lock_samples,
530
+ comments=comments,
531
+ )
532
+ json_data = serialize_for_api(args)
533
+
534
+ batch_reference = str(uuid.uuid4())
535
+
536
+ req = async_batch_t.AsyncBatchRequest(
537
+ path=async_batch_t.AsyncBatchRequestPath.LOCK_RECIPES,
538
+ data=json_data,
539
+ depends_on=depends_on,
540
+ batch_reference=batch_reference,
541
+ )
542
+
543
+ self._enqueue(req)
544
+
545
+ return async_batch_t.QueuedAsyncBatchRequest(
546
+ path=req.path,
547
+ batch_reference=req.batch_reference,
548
+ )
549
+
464
550
  def lookup_entity(
465
551
  self,
466
552
  *,
@@ -603,6 +689,44 @@ class AsyncBatchProcessorBase(ABC):
603
689
  batch_reference=req.batch_reference,
604
690
  )
605
691
 
692
+ def unlock_recipes(
693
+ self,
694
+ *,
695
+ type: unlock_recipes_t.RecipeUnlockType = unlock_recipes_t.RecipeUnlockType.STANDARD,
696
+ recipes: list[identifier_t.IdentifierKey],
697
+ unlock_samples: bool | None = None,
698
+ depends_on: list[str] | None = None,
699
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
700
+ """Unlock experiments. Experiments will edtiable after unlocking if they are currently locked.
701
+
702
+ :param type: The method to unlock recipes. Default is standard.
703
+ :param recipes: The recipes to unlock, a maximum of 100 can be sent
704
+ :param unlock_samples: Should associated experiment test samples also be unlocked.
705
+ :param depends_on: A list of batch reference keys to process before processing this request
706
+ """
707
+ args = unlock_recipes_t.Arguments(
708
+ type=type,
709
+ recipes=recipes,
710
+ unlock_samples=unlock_samples,
711
+ )
712
+ json_data = serialize_for_api(args)
713
+
714
+ batch_reference = str(uuid.uuid4())
715
+
716
+ req = async_batch_t.AsyncBatchRequest(
717
+ path=async_batch_t.AsyncBatchRequestPath.UNLOCK_RECIPES,
718
+ data=json_data,
719
+ depends_on=depends_on,
720
+ batch_reference=batch_reference,
721
+ )
722
+
723
+ self._enqueue(req)
724
+
725
+ return async_batch_t.QueuedAsyncBatchRequest(
726
+ path=req.path,
727
+ batch_reference=req.batch_reference,
728
+ )
729
+
606
730
  def upsert_condition_match(
607
731
  self,
608
732
  *,
@@ -46,6 +46,8 @@ class AsyncBatchRequestPath(StrEnum):
46
46
  COMPLETE_ASYNC_UPLOAD = "runsheet/complete_async_upload"
47
47
  CREATE_MIX_ORDER = "recipes/create_mix_order"
48
48
  PUSH_NOTIFICATION = "integrations/push_notification"
49
+ COMPLETE_ASYNC_PARSE = "uploader/complete_async_parse"
50
+ SET_RECIPE_TOTAL = "recipes/set_recipe_total"
49
51
 
50
52
 
51
53
  # DO NOT MODIFY -- This file is generated by type_spec