UncountablePythonSDK 0.0.125__py3-none-any.whl → 0.0.127__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (48) hide show
  1. examples/integration-server/jobs/materials_auto/example_instrument.py +67 -38
  2. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  3. examples/integration-server/jobs/materials_auto/profile.yaml +9 -0
  4. examples/integration-server/pyproject.toml +3 -3
  5. pkgs/type_spec/builder.py +19 -9
  6. pkgs/type_spec/emit_typescript.py +2 -2
  7. pkgs/type_spec/type_info/emit_type_info.py +14 -1
  8. pkgs/type_spec/value_spec/__main__.py +2 -2
  9. uncountable/integration/cli.py +29 -1
  10. uncountable/integration/executors/executors.py +1 -2
  11. uncountable/integration/executors/generic_upload_executor.py +1 -1
  12. uncountable/integration/job.py +1 -0
  13. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  14. uncountable/integration/queue_runner/command_server/command_client.py +39 -0
  15. uncountable/integration/queue_runner/command_server/command_server.py +37 -0
  16. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
  17. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +21 -13
  18. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +28 -1
  19. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +90 -0
  20. uncountable/integration/queue_runner/command_server/types.py +24 -1
  21. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +107 -8
  22. uncountable/integration/queue_runner/datastore/model.py +8 -1
  23. uncountable/integration/queue_runner/job_scheduler.py +42 -2
  24. uncountable/integration/queue_runner/worker.py +1 -1
  25. uncountable/integration/server.py +36 -6
  26. uncountable/types/__init__.py +8 -0
  27. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  28. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  29. uncountable/types/api/recipes/get_recipe_output_metadata.py +2 -2
  30. uncountable/types/api/recipes/get_recipes_data.py +16 -0
  31. uncountable/types/api/recipes/lock_recipes.py +2 -1
  32. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  33. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  34. uncountable/types/api/uploader/complete_async_parse.py +46 -0
  35. uncountable/types/async_batch_processor.py +124 -0
  36. uncountable/types/async_batch_t.py +2 -0
  37. uncountable/types/client_base.py +76 -0
  38. uncountable/types/entity_t.py +1 -1
  39. uncountable/types/queued_job.py +1 -0
  40. uncountable/types/queued_job_t.py +9 -0
  41. uncountable/types/sockets.py +9 -0
  42. uncountable/types/sockets_t.py +99 -0
  43. uncountable/types/uploader.py +24 -0
  44. uncountable/types/uploader_t.py +222 -0
  45. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/METADATA +1 -1
  46. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/RECORD +48 -42
  47. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/WHEEL +0 -0
  48. {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/top_level.txt +0 -0
@@ -11,7 +11,11 @@ from apscheduler.triggers.cron import CronTrigger
11
11
  from opentelemetry.trace import get_current_span
12
12
  from sqlalchemy.engine.base import Engine
13
13
 
14
+ from uncountable.core.environment import get_local_admin_server_port
14
15
  from uncountable.integration.cron import CronJobArgs, cron_job_executor
16
+ from uncountable.integration.queue_runner.command_server.command_client import (
17
+ send_vaccuum_queued_jobs_message,
18
+ )
15
19
  from uncountable.integration.telemetry import Logger
16
20
  from uncountable.types import base_t, job_definition_t
17
21
  from uncountable.types.job_definition_t import (
@@ -21,6 +25,14 @@ from uncountable.types.job_definition_t import (
21
25
 
22
26
  _MAX_APSCHEDULER_CONCURRENT_JOBS = 1
23
27
 
28
+ VACCUUM_QUEUED_JOBS_JOB_ID = "vacuum_queued_jobs"
29
+
30
+ STATIC_JOB_IDS = {VACCUUM_QUEUED_JOBS_JOB_ID}
31
+
32
+
33
+ def vaccuum_queued_jobs() -> None:
34
+ send_vaccuum_queued_jobs_message(port=get_local_admin_server_port())
35
+
24
36
 
25
37
  class IntegrationServer:
26
38
  _scheduler: BaseScheduler
@@ -36,11 +48,27 @@ class IntegrationServer:
36
48
  )
37
49
  self._server_logger = Logger(get_current_span())
38
50
 
51
+ def _register_static_jobs(self) -> None:
52
+ all_job_ids = {job.id for job in self._scheduler.get_jobs()}
53
+ if VACCUUM_QUEUED_JOBS_JOB_ID in all_job_ids:
54
+ self._scheduler.remove_job(VACCUUM_QUEUED_JOBS_JOB_ID)
55
+
56
+ self._scheduler.add_job(
57
+ vaccuum_queued_jobs,
58
+ max_instances=1,
59
+ coalesce=True,
60
+ trigger=CronTrigger.from_crontab("5 4 * * 4"),
61
+ name="Vaccuum queued jobs",
62
+ id=VACCUUM_QUEUED_JOBS_JOB_ID,
63
+ kwargs={},
64
+ misfire_grace_time=None,
65
+ )
66
+
39
67
  def register_jobs(self, profiles: list[job_definition_t.ProfileMetadata]) -> None:
40
- valid_job_ids = []
68
+ valid_job_ids: set[str] = set()
41
69
  for profile_metadata in profiles:
42
70
  for job_defn in profile_metadata.jobs:
43
- valid_job_ids.append(job_defn.id)
71
+ valid_job_ids.add(job_defn.id)
44
72
  match job_defn:
45
73
  case CronJobDefinition():
46
74
  # Add to ap scheduler
@@ -90,10 +118,11 @@ class IntegrationServer:
90
118
  pass
91
119
  case _:
92
120
  assert_never(job_defn)
93
- all_jobs = self._scheduler.get_jobs()
94
- for job in all_jobs:
95
- if job.id not in valid_job_ids:
96
- self._scheduler.remove_job(job.id)
121
+ all_job_ids = {job.id for job in self._scheduler.get_jobs()}
122
+ invalid_job_ids = all_job_ids.difference(valid_job_ids.union(STATIC_JOB_IDS))
123
+
124
+ for job_id in invalid_job_ids:
125
+ self._scheduler.remove_job(job_id)
97
126
 
98
127
  def serve_forever(self) -> None:
99
128
  signal.pause()
@@ -106,6 +135,7 @@ class IntegrationServer:
106
135
 
107
136
  def __enter__(self) -> "IntegrationServer":
108
137
  self._start_apscheduler()
138
+ self._register_static_jobs()
109
139
  return self
110
140
 
111
141
  def __exit__(
@@ -16,6 +16,7 @@ from . import calculations_t as calculations_t
16
16
  from . import chemical_structure_t as chemical_structure_t
17
17
  from .api.recipes import clear_recipe_outputs as clear_recipe_outputs_t
18
18
  from . import client_config_t as client_config_t
19
+ from .api.uploader import complete_async_parse as complete_async_parse_t
19
20
  from .api.runsheet import complete_async_upload as complete_async_upload_t
20
21
  from .api.chemical import convert_chemical_formats as convert_chemical_formats_t
21
22
  from .api.entity import create_entities as create_entities_t
@@ -92,6 +93,7 @@ from . import recipe_output_metadata_t as recipe_output_metadata_t
92
93
  from . import recipe_tags_t as recipe_tags_t
93
94
  from . import recipe_workflow_steps_t as recipe_workflow_steps_t
94
95
  from . import recipes_t as recipes_t
96
+ from .api.integrations import register_sockets_token as register_sockets_token_t
95
97
  from .api.recipes import remove_recipe_from_project as remove_recipe_from_project_t
96
98
  from .api.recipe_links import remove_recipe_link as remove_recipe_link_t
97
99
  from .api.entity import resolve_entity_ids as resolve_entity_ids_t
@@ -111,6 +113,7 @@ from .api.recipes import set_recipe_output_annotations as set_recipe_output_anno
111
113
  from .api.recipes import set_recipe_output_file as set_recipe_output_file_t
112
114
  from .api.recipes import set_recipe_outputs as set_recipe_outputs_t
113
115
  from .api.recipes import set_recipe_tags as set_recipe_tags_t
116
+ from .api.recipes import set_recipe_total as set_recipe_total_t
114
117
  from .api.entity import set_values as set_values_t
115
118
  from . import sockets_t as sockets_t
116
119
  from .api.entity import transition_entity_phase as transition_entity_phase_t
@@ -119,6 +122,7 @@ from . import units_t as units_t
119
122
  from .api.entity import unlock_entity as unlock_entity_t
120
123
  from .api.recipes import unlock_recipes as unlock_recipes_t
121
124
  from .api.material_families import update_entity_material_families as update_entity_material_families_t
125
+ from . import uploader_t as uploader_t
122
126
  from .api.condition_parameters import upsert_condition_match as upsert_condition_match_t
123
127
  from .api.field_options import upsert_field_options as upsert_field_options_t
124
128
  from . import users_t as users_t
@@ -141,6 +145,7 @@ __all__: list[str] = [
141
145
  "chemical_structure_t",
142
146
  "clear_recipe_outputs_t",
143
147
  "client_config_t",
148
+ "complete_async_parse_t",
144
149
  "complete_async_upload_t",
145
150
  "convert_chemical_formats_t",
146
151
  "create_entities_t",
@@ -217,6 +222,7 @@ __all__: list[str] = [
217
222
  "recipe_tags_t",
218
223
  "recipe_workflow_steps_t",
219
224
  "recipes_t",
225
+ "register_sockets_token_t",
220
226
  "remove_recipe_from_project_t",
221
227
  "remove_recipe_link_t",
222
228
  "resolve_entity_ids_t",
@@ -236,6 +242,7 @@ __all__: list[str] = [
236
242
  "set_recipe_output_file_t",
237
243
  "set_recipe_outputs_t",
238
244
  "set_recipe_tags_t",
245
+ "set_recipe_total_t",
239
246
  "set_values_t",
240
247
  "sockets_t",
241
248
  "transition_entity_phase_t",
@@ -244,6 +251,7 @@ __all__: list[str] = [
244
251
  "unlock_entity_t",
245
252
  "unlock_recipes_t",
246
253
  "update_entity_material_families_t",
254
+ "uploader_t",
247
255
  "upsert_condition_match_t",
248
256
  "upsert_field_options_t",
249
257
  "users_t",
@@ -0,0 +1,41 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import base_t
12
+ from ... import sockets_t
13
+
14
+ __all__: list[str] = [
15
+ "Arguments",
16
+ "Data",
17
+ "ENDPOINT_METHOD",
18
+ "ENDPOINT_PATH",
19
+ ]
20
+
21
+ ENDPOINT_METHOD = "POST"
22
+ ENDPOINT_PATH = "api/external/integrations/register_sockets_token"
23
+
24
+
25
+ # DO NOT MODIFY -- This file is generated by type_spec
26
+ @serial_class(
27
+ named_type_path="sdk.api.integrations.register_sockets_token.Arguments",
28
+ )
29
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
30
+ class Arguments:
31
+ socket_request: sockets_t.SocketTokenRequest
32
+
33
+
34
+ # DO NOT MODIFY -- This file is generated by type_spec
35
+ @serial_class(
36
+ named_type_path="sdk.api.integrations.register_sockets_token.Data",
37
+ )
38
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
39
+ class Data:
40
+ response: sockets_t.SocketTokenResponse
41
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -93,7 +93,7 @@ class RecipeInputEditClearInputs(RecipeInputEditBase):
93
93
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
94
94
  class RecipeInputEditInputBase(RecipeInputEditBase):
95
95
  ingredient_key: identifier_t.IdentifierKey
96
- quantity_basis: recipe_inputs_t.QuantityBasis = recipe_inputs_t.QuantityBasis.MASS
96
+ quantity_basis: recipe_inputs_t.QuantityBasis | None = recipe_inputs_t.QuantityBasis.MASS
97
97
  input_value_type: recipe_inputs_t.InputValueType = recipe_inputs_t.InputValueType.VALUE
98
98
  value_numeric: Decimal | None = None
99
99
  value_str: str | None = None
@@ -42,8 +42,8 @@ class Arguments:
42
42
  class RecipeOutputMetadata:
43
43
  recipe_output_id: base_t.ObjectId
44
44
  recipe_output_metadata_field_id: base_t.ObjectId
45
- quantity_dec: Decimal
46
- quantity_json: base_t.JsonValue
45
+ quantity_dec: Decimal | None = None
46
+ quantity_json: base_t.JsonValue | None = None
47
47
 
48
48
 
49
49
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -6,8 +6,10 @@ from __future__ import annotations
6
6
  import typing # noqa: F401
7
7
  import datetime # noqa: F401
8
8
  from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
9
10
  import dataclasses
10
11
  from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_string_enum
11
13
  from ... import base_t
12
14
  from ... import experiment_groups_t
13
15
  from ... import inputs_t
@@ -25,6 +27,7 @@ __all__: list[str] = [
25
27
  "Recipe",
26
28
  "RecipeInput",
27
29
  "RecipeOutput",
30
+ "RecipeOutputInclusion",
28
31
  "RecipeStep",
29
32
  "RecipeStepGroup",
30
33
  "RecipeStepRelationship",
@@ -46,6 +49,19 @@ class Arguments:
46
49
  project_id: base_t.ObjectId | None = None
47
50
  offset: int | None = None
48
51
  limit: int | None = None
52
+ empty_output_behavior: RecipeOutputInclusion | None = None
53
+
54
+
55
+ # DO NOT MODIFY -- This file is generated by type_spec
56
+ @serial_string_enum(
57
+ labels={
58
+ "include_null": "Include Outputs without values",
59
+ "exclude_null": "Exclude Outputs without values",
60
+ },
61
+ )
62
+ class RecipeOutputInclusion(StrEnum):
63
+ INCLUDE_NULL = "include_null"
64
+ EXCLUDE_NULL = "exclude_null"
49
65
 
50
66
 
51
67
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,6 +9,7 @@ from decimal import Decimal # noqa: F401
9
9
  from enum import StrEnum
10
10
  import dataclasses
11
11
  from pkgs.serialization import serial_class
12
+ from ... import async_batch_t
12
13
  from ... import base_t
13
14
  from ... import identifier_t
14
15
 
@@ -58,6 +59,6 @@ class Arguments:
58
59
  named_type_path="sdk.api.recipes.lock_recipes.Data",
59
60
  )
60
61
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
61
- class Data:
62
+ class Data(async_batch_t.AsyncBatchActionReturn):
62
63
  pass
63
64
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,59 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import async_batch_t
12
+ from ... import base_t
13
+ from ... import identifier_t
14
+ from ... import recipe_inputs_t
15
+ from ... import recipe_workflow_steps_t
16
+
17
+ __all__: list[str] = [
18
+ "Arguments",
19
+ "Data",
20
+ "ENDPOINT_METHOD",
21
+ "ENDPOINT_PATH",
22
+ "ValueNumeric",
23
+ ]
24
+
25
+ ENDPOINT_METHOD = "POST"
26
+ ENDPOINT_PATH = "api/external/recipes/set_recipe_total"
27
+
28
+
29
+ # DO NOT MODIFY -- This file is generated by type_spec
30
+ @serial_class(
31
+ named_type_path="sdk.api.recipes.set_recipe_total.ValueNumeric",
32
+ to_string_values={"value"},
33
+ )
34
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
35
+ class ValueNumeric:
36
+ value: Decimal
37
+ quantity_basis: recipe_inputs_t.QuantityBasis
38
+
39
+
40
+ # DO NOT MODIFY -- This file is generated by type_spec
41
+ @serial_class(
42
+ named_type_path="sdk.api.recipes.set_recipe_total.Arguments",
43
+ )
44
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
45
+ class Arguments:
46
+ recipe_key: identifier_t.IdentifierKey
47
+ value: ValueNumeric
48
+ recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier | None = None
49
+ calculation_key: identifier_t.IdentifierKey | None = None
50
+
51
+
52
+ # DO NOT MODIFY -- This file is generated by type_spec
53
+ @serial_class(
54
+ named_type_path="sdk.api.recipes.set_recipe_total.Data",
55
+ )
56
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
57
+ class Data(async_batch_t.AsyncBatchActionReturn):
58
+ pass
59
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,6 +9,7 @@ from decimal import Decimal # noqa: F401
9
9
  from enum import StrEnum
10
10
  import dataclasses
11
11
  from pkgs.serialization import serial_class
12
+ from ... import async_batch_t
12
13
  from ... import base_t
13
14
  from ... import identifier_t
14
15
 
@@ -45,6 +46,6 @@ class Arguments:
45
46
  named_type_path="sdk.api.recipes.unlock_recipes.Data",
46
47
  )
47
48
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
48
- class Data:
49
+ class Data(async_batch_t.AsyncBatchActionReturn):
49
50
  pass
50
51
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,46 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import async_batch_t
12
+ from ... import base_t
13
+ from ... import generic_upload_t
14
+ from ... import identifier_t
15
+ from ... import uploader_t
16
+
17
+ __all__: list[str] = [
18
+ "Arguments",
19
+ "Data",
20
+ "ENDPOINT_METHOD",
21
+ "ENDPOINT_PATH",
22
+ ]
23
+
24
+ ENDPOINT_METHOD = "POST"
25
+ ENDPOINT_PATH = "api/external/uploader/complete_async_parse"
26
+
27
+
28
+ # DO NOT MODIFY -- This file is generated by type_spec
29
+ @serial_class(
30
+ named_type_path="sdk.api.uploader.complete_async_parse.Arguments",
31
+ )
32
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
33
+ class Arguments:
34
+ parsed_file_data: list[uploader_t.ParsedFileData]
35
+ async_job_key: identifier_t.IdentifierKey
36
+ upload_destination: generic_upload_t.UploadDestinationRecipe
37
+
38
+
39
+ # DO NOT MODIFY -- This file is generated by type_spec
40
+ @serial_class(
41
+ named_type_path="sdk.api.uploader.complete_async_parse.Data",
42
+ )
43
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
44
+ class Data(async_batch_t.AsyncBatchActionReturn):
45
+ pass
46
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -13,6 +13,7 @@ import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe
13
13
  from uncountable.types import async_batch_t
14
14
  from uncountable.types import base_t
15
15
  import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
16
+ import uncountable.types.api.uploader.complete_async_parse as complete_async_parse_t
16
17
  import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
17
18
  import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
18
19
  import uncountable.types.api.entity.create_or_update_entity as create_or_update_entity_t
@@ -24,6 +25,7 @@ from uncountable.types import generic_upload_t
24
25
  import uncountable.types.api.entity.grant_entity_permissions as grant_entity_permissions_t
25
26
  from uncountable.types import identifier_t
26
27
  import uncountable.types.api.uploader.invoke_uploader as invoke_uploader_t
28
+ import uncountable.types.api.recipes.lock_recipes as lock_recipes_t
27
29
  import uncountable.types.api.entity.lookup_entity as lookup_entity_t
28
30
  from uncountable.types import notifications_t
29
31
  import uncountable.types.api.integrations.push_notification as push_notification_t
@@ -32,6 +34,8 @@ from uncountable.types import recipe_metadata_t
32
34
  from uncountable.types import recipe_workflow_steps_t
33
35
  import uncountable.types.api.entity.set_entity_field_values as set_entity_field_values_t
34
36
  import uncountable.types.api.recipes.set_recipe_metadata as set_recipe_metadata_t
37
+ import uncountable.types.api.recipes.unlock_recipes as unlock_recipes_t
38
+ from uncountable.types import uploader_t
35
39
  import uncountable.types.api.condition_parameters.upsert_condition_match as upsert_condition_match_t
36
40
  import uuid
37
41
  from abc import ABC, abstractmethod
@@ -187,6 +191,41 @@ class AsyncBatchProcessorBase(ABC):
187
191
  batch_reference=req.batch_reference,
188
192
  )
189
193
 
194
+ def complete_async_parse(
195
+ self,
196
+ *,
197
+ parsed_file_data: list[uploader_t.ParsedFileData],
198
+ async_job_key: identifier_t.IdentifierKey,
199
+ upload_destination: generic_upload_t.UploadDestinationRecipe,
200
+ depends_on: list[str] | None = None,
201
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
202
+ """Parses uploaded files asynchronously
203
+
204
+ :param depends_on: A list of batch reference keys to process before processing this request
205
+ """
206
+ args = complete_async_parse_t.Arguments(
207
+ parsed_file_data=parsed_file_data,
208
+ async_job_key=async_job_key,
209
+ upload_destination=upload_destination,
210
+ )
211
+ json_data = serialize_for_api(args)
212
+
213
+ batch_reference = str(uuid.uuid4())
214
+
215
+ req = async_batch_t.AsyncBatchRequest(
216
+ path=async_batch_t.AsyncBatchRequestPath.COMPLETE_ASYNC_PARSE,
217
+ data=json_data,
218
+ depends_on=depends_on,
219
+ batch_reference=batch_reference,
220
+ )
221
+
222
+ self._enqueue(req)
223
+
224
+ return async_batch_t.QueuedAsyncBatchRequest(
225
+ path=req.path,
226
+ batch_reference=req.batch_reference,
227
+ )
228
+
190
229
  def complete_async_upload(
191
230
  self,
192
231
  *,
@@ -461,6 +500,53 @@ class AsyncBatchProcessorBase(ABC):
461
500
  batch_reference=req.batch_reference,
462
501
  )
463
502
 
503
+ def lock_recipes(
504
+ self,
505
+ *,
506
+ type: lock_recipes_t.RecipeLockType = lock_recipes_t.RecipeLockType.ALL,
507
+ recipes: list[identifier_t.IdentifierKey],
508
+ globally_removable: bool,
509
+ lock_samples: bool | None = None,
510
+ comments: str | None = None,
511
+ depends_on: list[str] | None = None,
512
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
513
+ """Lock experiments. Experiments will require unlocking to be editable. Edits to the experiments are blocked while they are locked.
514
+
515
+ :param type: The type of lock to set.
516
+ All = both inputs and measurements are locked.
517
+ Inputs Only = only inputs are locked from editing.
518
+
519
+ :param recipes: The recipes to lock, a maximum of 100 can be sent
520
+ :param globally_removable: If true any user can unlock the experiment. If false the locking user is the only user that can unlock.
521
+ :param lock_samples: Should associated experiment test samples also be locked.
522
+ :param comments: Optional comment describing the purpose of locking
523
+ :param depends_on: A list of batch reference keys to process before processing this request
524
+ """
525
+ args = lock_recipes_t.Arguments(
526
+ type=type,
527
+ recipes=recipes,
528
+ globally_removable=globally_removable,
529
+ lock_samples=lock_samples,
530
+ comments=comments,
531
+ )
532
+ json_data = serialize_for_api(args)
533
+
534
+ batch_reference = str(uuid.uuid4())
535
+
536
+ req = async_batch_t.AsyncBatchRequest(
537
+ path=async_batch_t.AsyncBatchRequestPath.LOCK_RECIPES,
538
+ data=json_data,
539
+ depends_on=depends_on,
540
+ batch_reference=batch_reference,
541
+ )
542
+
543
+ self._enqueue(req)
544
+
545
+ return async_batch_t.QueuedAsyncBatchRequest(
546
+ path=req.path,
547
+ batch_reference=req.batch_reference,
548
+ )
549
+
464
550
  def lookup_entity(
465
551
  self,
466
552
  *,
@@ -603,6 +689,44 @@ class AsyncBatchProcessorBase(ABC):
603
689
  batch_reference=req.batch_reference,
604
690
  )
605
691
 
692
+ def unlock_recipes(
693
+ self,
694
+ *,
695
+ type: unlock_recipes_t.RecipeUnlockType = unlock_recipes_t.RecipeUnlockType.STANDARD,
696
+ recipes: list[identifier_t.IdentifierKey],
697
+ unlock_samples: bool | None = None,
698
+ depends_on: list[str] | None = None,
699
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
700
+ """Unlock experiments. Experiments will edtiable after unlocking if they are currently locked.
701
+
702
+ :param type: The method to unlock recipes. Default is standard.
703
+ :param recipes: The recipes to unlock, a maximum of 100 can be sent
704
+ :param unlock_samples: Should associated experiment test samples also be unlocked.
705
+ :param depends_on: A list of batch reference keys to process before processing this request
706
+ """
707
+ args = unlock_recipes_t.Arguments(
708
+ type=type,
709
+ recipes=recipes,
710
+ unlock_samples=unlock_samples,
711
+ )
712
+ json_data = serialize_for_api(args)
713
+
714
+ batch_reference = str(uuid.uuid4())
715
+
716
+ req = async_batch_t.AsyncBatchRequest(
717
+ path=async_batch_t.AsyncBatchRequestPath.UNLOCK_RECIPES,
718
+ data=json_data,
719
+ depends_on=depends_on,
720
+ batch_reference=batch_reference,
721
+ )
722
+
723
+ self._enqueue(req)
724
+
725
+ return async_batch_t.QueuedAsyncBatchRequest(
726
+ path=req.path,
727
+ batch_reference=req.batch_reference,
728
+ )
729
+
606
730
  def upsert_condition_match(
607
731
  self,
608
732
  *,
@@ -46,6 +46,8 @@ class AsyncBatchRequestPath(StrEnum):
46
46
  COMPLETE_ASYNC_UPLOAD = "runsheet/complete_async_upload"
47
47
  CREATE_MIX_ORDER = "recipes/create_mix_order"
48
48
  PUSH_NOTIFICATION = "integrations/push_notification"
49
+ COMPLETE_ASYNC_PARSE = "uploader/complete_async_parse"
50
+ SET_RECIPE_TOTAL = "recipes/set_recipe_total"
49
51
 
50
52
 
51
53
  # DO NOT MODIFY -- This file is generated by type_spec