UncountablePythonSDK 0.0.128__py3-none-any.whl → 0.0.130__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (40) hide show
  1. docs/requirements.txt +1 -1
  2. examples/integration-server/jobs/materials_auto/example_instrument.py +4 -3
  3. examples/integration-server/jobs/materials_auto/example_parse.py +56 -3
  4. examples/integration-server/jobs/materials_auto/example_predictions.py +2 -2
  5. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +25 -22
  6. examples/integration-server/pyproject.toml +1 -1
  7. pkgs/argument_parser/argument_parser.py +20 -1
  8. pkgs/serialization_util/serialization_helpers.py +3 -1
  9. pkgs/type_spec/builder.py +9 -3
  10. pkgs/type_spec/builder_types.py +9 -0
  11. pkgs/type_spec/cross_output_links.py +2 -10
  12. pkgs/type_spec/emit_open_api.py +0 -12
  13. pkgs/type_spec/emit_python.py +72 -11
  14. pkgs/type_spec/emit_typescript_util.py +28 -6
  15. pkgs/type_spec/load_types.py +1 -1
  16. pkgs/type_spec/parts/base.ts.prepart +3 -0
  17. pkgs/type_spec/type_info/emit_type_info.py +13 -2
  18. uncountable/core/client.py +10 -3
  19. uncountable/integration/queue_runner/command_server/command_server.py +8 -7
  20. uncountable/integration/webhook_server/entrypoint.py +2 -0
  21. uncountable/types/__init__.py +8 -0
  22. uncountable/types/api/entity/list_aggregate.py +79 -0
  23. uncountable/types/api/entity/list_entities.py +25 -0
  24. uncountable/types/api/notebooks/__init__.py +1 -0
  25. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  26. uncountable/types/api/recipes/get_recipes_data.py +13 -0
  27. uncountable/types/async_batch_processor.py +20 -0
  28. uncountable/types/client_base.py +222 -0
  29. uncountable/types/client_config.py +1 -0
  30. uncountable/types/client_config_t.py +10 -0
  31. uncountable/types/entity_t.py +2 -0
  32. uncountable/types/integration_server_t.py +2 -0
  33. uncountable/types/listing.py +9 -0
  34. uncountable/types/listing_t.py +51 -0
  35. uncountable/types/structured_filters.py +21 -0
  36. uncountable/types/structured_filters_t.py +206 -0
  37. {uncountablepythonsdk-0.0.128.dist-info → uncountablepythonsdk-0.0.130.dist-info}/METADATA +1 -1
  38. {uncountablepythonsdk-0.0.128.dist-info → uncountablepythonsdk-0.0.130.dist-info}/RECORD +40 -32
  39. {uncountablepythonsdk-0.0.128.dist-info → uncountablepythonsdk-0.0.130.dist-info}/WHEEL +0 -0
  40. {uncountablepythonsdk-0.0.128.dist-info → uncountablepythonsdk-0.0.130.dist-info}/top_level.txt +0 -0
@@ -226,13 +226,15 @@ class Client(ClientMethods):
226
226
  except JSONDecodeError as e:
227
227
  raise SDKError("unable to process response", request_id=request_id) from e
228
228
 
229
- def _send_request(self, request: requests.Request) -> requests.Response:
229
+ def _send_request(
230
+ self, request: requests.Request, *, timeout: float | None = None
231
+ ) -> requests.Response:
230
232
  if self._cfg.extra_headers is not None:
231
233
  request.headers = {**request.headers, **self._cfg.extra_headers}
232
234
  if self._cfg.transform_request is not None:
233
235
  request = self._cfg.transform_request(request)
234
236
  prepared_request = request.prepare()
235
- response = self._session.send(prepared_request)
237
+ response = self._session.send(prepared_request, timeout=timeout)
236
238
  return response
237
239
 
238
240
  def do_request(self, *, api_request: APIRequest, return_type: type[DT]) -> DT:
@@ -257,7 +259,12 @@ class Client(ClientMethods):
257
259
  with push_scope_optional(self._cfg.logger, "api_call", attributes=attributes):
258
260
  if self._cfg.logger is not None:
259
261
  self._cfg.logger.log_info(api_request.endpoint, attributes=attributes)
260
- response = self._send_request(request)
262
+ timeout = (
263
+ api_request.request_options.timeout_secs
264
+ if api_request.request_options is not None
265
+ else None
266
+ )
267
+ response = self._send_request(request, timeout=timeout)
261
268
  response_data = self._get_response_json(response, request_id=request_id)
262
269
  cached_parser = self._get_cached_parser(return_type)
263
270
  try:
@@ -1,8 +1,9 @@
1
1
  import asyncio
2
2
 
3
+ import grpc.aio as grpc_aio
3
4
  import simplejson as json
4
5
  from google.protobuf.timestamp_pb2 import Timestamp
5
- from grpc import StatusCode, aio
6
+ from grpc import StatusCode
6
7
 
7
8
  from pkgs.argument_parser import CachedParser
8
9
  from uncountable.core.environment import get_local_admin_server_port
@@ -40,11 +41,11 @@ queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
40
41
 
41
42
 
42
43
  async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None:
43
- server = aio.server()
44
+ server = grpc_aio.server()
44
45
 
45
46
  class CommandServerHandler(CommandServerServicer):
46
47
  async def EnqueueJob(
47
- self, request: EnqueueJobRequest, context: aio.ServicerContext
48
+ self, request: EnqueueJobRequest, context: grpc_aio.ServicerContext
48
49
  ) -> EnqueueJobResult:
49
50
  payload_json = json.loads(request.serialized_payload)
50
51
  payload = queued_job_payload_parser.parse_api(payload_json)
@@ -63,7 +64,7 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
63
64
  return result
64
65
 
65
66
  async def RetryJob(
66
- self, request: RetryJobRequest, context: aio.ServicerContext
67
+ self, request: RetryJobRequest, context: grpc_aio.ServicerContext
67
68
  ) -> RetryJobResult:
68
69
  response_queue: asyncio.Queue[CommandRetryJobResponse] = asyncio.Queue()
69
70
  await command_queue.put(
@@ -80,12 +81,12 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
80
81
  return RetryJobResult(successfully_queued=False, queued_job_uuid="")
81
82
 
82
83
  async def CheckHealth(
83
- self, request: CheckHealthRequest, context: aio.ServicerContext
84
+ self, request: CheckHealthRequest, context: grpc_aio.ServicerContext
84
85
  ) -> CheckHealthResult:
85
86
  return CheckHealthResult(success=True)
86
87
 
87
88
  async def ListQueuedJobs(
88
- self, request: ListQueuedJobsRequest, context: aio.ServicerContext
89
+ self, request: ListQueuedJobsRequest, context: grpc_aio.ServicerContext
89
90
  ) -> ListQueuedJobsResult:
90
91
  if (
91
92
  request.limit < ListQueuedJobsConstants.LIMIT_MIN
@@ -121,7 +122,7 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
121
122
  return ListQueuedJobsResult(queued_jobs=response_list)
122
123
 
123
124
  async def VaccuumQueuedJobs(
124
- self, request: VaccuumQueuedJobsRequest, context: aio.ServicerContext
125
+ self, request: VaccuumQueuedJobsRequest, context: grpc_aio.ServicerContext
125
126
  ) -> VaccuumQueuedJobsResult:
126
127
  response_queue: asyncio.Queue[CommandVaccuumQueuedJobsResponse] = (
127
128
  asyncio.Queue()
@@ -73,6 +73,8 @@ def register_route(
73
73
 
74
74
 
75
75
  def main() -> None:
76
+ app.add_url_rule("/health", "health", lambda: ("OK", 200))
77
+
76
78
  profiles = load_profiles()
77
79
  for profile_metadata in profiles:
78
80
  server_logger = Logger(get_current_span())
@@ -2,6 +2,7 @@
2
2
  # ruff: noqa: E402 Q003
3
3
  # fmt: off
4
4
  # isort: skip_file
5
+ from .api.notebooks import add_notebook_content as add_notebook_content_t
5
6
  from .api.recipes import add_recipe_to_project as add_recipe_to_project_t
6
7
  from .api.recipes import add_time_series_data as add_time_series_data_t
7
8
  from .api.recipes import archive_recipes as archive_recipes_t
@@ -70,8 +71,10 @@ from . import integration_session_t as integration_session_t
70
71
  from . import integrations_t as integrations_t
71
72
  from .api.uploader import invoke_uploader as invoke_uploader_t
72
73
  from . import job_definition_t as job_definition_t
74
+ from .api.entity import list_aggregate as list_aggregate_t
73
75
  from .api.entity import list_entities as list_entities_t
74
76
  from .api.id_source import list_id_source as list_id_source_t
77
+ from . import listing_t as listing_t
75
78
  from .api.entity import lock_entity as lock_entity_t
76
79
  from .api.recipes import lock_recipes as lock_recipes_t
77
80
  from .api.entity import lookup_entity as lookup_entity_t
@@ -116,6 +119,7 @@ from .api.recipes import set_recipe_tags as set_recipe_tags_t
116
119
  from .api.recipes import set_recipe_total as set_recipe_total_t
117
120
  from .api.entity import set_values as set_values_t
118
121
  from . import sockets_t as sockets_t
122
+ from . import structured_filters_t as structured_filters_t
119
123
  from .api.entity import transition_entity_phase as transition_entity_phase_t
120
124
  from .api.recipes import unarchive_recipes as unarchive_recipes_t
121
125
  from . import units_t as units_t
@@ -131,6 +135,7 @@ from . import workflows_t as workflows_t
131
135
 
132
136
 
133
137
  __all__: list[str] = [
138
+ "add_notebook_content_t",
134
139
  "add_recipe_to_project_t",
135
140
  "add_time_series_data_t",
136
141
  "archive_recipes_t",
@@ -199,8 +204,10 @@ __all__: list[str] = [
199
204
  "integrations_t",
200
205
  "invoke_uploader_t",
201
206
  "job_definition_t",
207
+ "list_aggregate_t",
202
208
  "list_entities_t",
203
209
  "list_id_source_t",
210
+ "listing_t",
204
211
  "lock_entity_t",
205
212
  "lock_recipes_t",
206
213
  "lookup_entity_t",
@@ -245,6 +252,7 @@ __all__: list[str] = [
245
252
  "set_recipe_total_t",
246
253
  "set_values_t",
247
254
  "sockets_t",
255
+ "structured_filters_t",
248
256
  "transition_entity_phase_t",
249
257
  "unarchive_recipes_t",
250
258
  "units_t",
@@ -0,0 +1,79 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ import dataclasses
10
+ from pkgs.serialization import serial_class
11
+ from ... import base_t
12
+
13
+ __all__: list[str] = [
14
+ "Arguments",
15
+ "AttributeValue",
16
+ "ColumnAccess",
17
+ "ColumnResults",
18
+ "Data",
19
+ "ENDPOINT_METHOD",
20
+ "ENDPOINT_PATH",
21
+ ]
22
+
23
+ ENDPOINT_METHOD = "GET"
24
+ ENDPOINT_PATH = "api/external/entity/list_aggregate"
25
+
26
+
27
+ # DO NOT MODIFY -- This file is generated by type_spec
28
+ @serial_class(
29
+ named_type_path="sdk.api.entity.list_aggregate.AttributeValue",
30
+ unconverted_values={"value"},
31
+ )
32
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
33
+ class AttributeValue:
34
+ name: str
35
+ value: base_t.JsonValue
36
+
37
+
38
+ # DO NOT MODIFY -- This file is generated by type_spec
39
+ @serial_class(
40
+ named_type_path="sdk.api.entity.list_aggregate.Arguments",
41
+ unconverted_values={"attribute_values"},
42
+ )
43
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
44
+ class Arguments:
45
+ config_reference: str
46
+ attribute_values: list[AttributeValue] | None = None
47
+ offset: int | None = None
48
+ limit: int | None = None
49
+
50
+
51
+ # DO NOT MODIFY -- This file is generated by type_spec
52
+ @serial_class(
53
+ named_type_path="sdk.api.entity.list_aggregate.ColumnResults",
54
+ unconverted_values={"column_values"},
55
+ )
56
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
57
+ class ColumnResults:
58
+ column_values: list[base_t.JsonValue]
59
+
60
+
61
+ # DO NOT MODIFY -- This file is generated by type_spec
62
+ @serial_class(
63
+ named_type_path="sdk.api.entity.list_aggregate.ColumnAccess",
64
+ )
65
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
66
+ class ColumnAccess:
67
+ name: str
68
+ table_label: str | None
69
+
70
+
71
+ # DO NOT MODIFY -- This file is generated by type_spec
72
+ @serial_class(
73
+ named_type_path="sdk.api.entity.list_aggregate.Data",
74
+ )
75
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
76
+ class Data:
77
+ columns: list[ColumnAccess]
78
+ results: list[ColumnResults]
79
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -11,9 +11,12 @@ from pkgs.serialization import serial_class
11
11
  from pkgs.serialization import OpaqueKey
12
12
  from ... import base_t
13
13
  from ... import entity_t
14
+ from ... import identifier_t
14
15
 
15
16
  __all__: list[str] = [
17
+ "AdditionalFilterConfig",
16
18
  "Arguments",
19
+ "AttributeValue",
17
20
  "ColumnAccess",
18
21
  "Data",
19
22
  "ENDPOINT_METHOD",
@@ -25,6 +28,27 @@ ENDPOINT_METHOD = "GET"
25
28
  ENDPOINT_PATH = "api/external/entity/external_list_entities"
26
29
 
27
30
 
31
+ # DO NOT MODIFY -- This file is generated by type_spec
32
+ @serial_class(
33
+ named_type_path="sdk.api.entity.list_entities.AttributeValue",
34
+ unconverted_values={"value"},
35
+ )
36
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
37
+ class AttributeValue:
38
+ name: str
39
+ value: base_t.JsonValue
40
+
41
+
42
+ # DO NOT MODIFY -- This file is generated by type_spec
43
+ @serial_class(
44
+ named_type_path="sdk.api.entity.list_entities.AdditionalFilterConfig",
45
+ )
46
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
47
+ class AdditionalFilterConfig:
48
+ config_key: identifier_t.IdentifierKey
49
+ attribute_values: list[AttributeValue] | None = None
50
+
51
+
28
52
  # DO NOT MODIFY -- This file is generated by type_spec
29
53
  @serial_class(
30
54
  named_type_path="sdk.api.entity.list_entities.Arguments",
@@ -37,6 +61,7 @@ class Arguments:
37
61
  attributes: dict[OpaqueKey, base_t.JsonValue] | None = None
38
62
  offset: int | None = None
39
63
  limit: int | None = None
64
+ additional_filter_configs: list[AdditionalFilterConfig] | None = None
40
65
 
41
66
 
42
67
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,119 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
10
+ import dataclasses
11
+ from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_union_annotation
13
+ from pkgs.serialization import serial_string_enum
14
+ from ... import base_t
15
+ from ... import identifier_t
16
+
17
+ __all__: list[str] = [
18
+ "Arguments",
19
+ "Data",
20
+ "ENDPOINT_METHOD",
21
+ "ENDPOINT_PATH",
22
+ "InputContent",
23
+ "InputContentTextMarkdown",
24
+ "InputContentType",
25
+ "Location",
26
+ "LocationAppendToFirstPage",
27
+ "LocationType",
28
+ ]
29
+
30
+ ENDPOINT_METHOD = "POST"
31
+ ENDPOINT_PATH = "api/external/notebooks/add_notebook_content"
32
+
33
+
34
+ # DO NOT MODIFY -- This file is generated by type_spec
35
+ @serial_string_enum(
36
+ labels={
37
+ "append_to_first_page": "Append To First Page",
38
+ },
39
+ )
40
+ class LocationType(StrEnum):
41
+ APPEND_TO_FIRST_PAGE = "append_to_first_page"
42
+
43
+
44
+ # DO NOT MODIFY -- This file is generated by type_spec
45
+ @serial_class(
46
+ named_type_path="sdk.api.notebooks.add_notebook_content.LocationAppendToFirstPage",
47
+ parse_require={"type"},
48
+ )
49
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
50
+ class LocationAppendToFirstPage:
51
+ type: typing.Literal[LocationType.APPEND_TO_FIRST_PAGE] = LocationType.APPEND_TO_FIRST_PAGE
52
+
53
+
54
+ # DO NOT MODIFY -- This file is generated by type_spec
55
+ Location = typing.Annotated[
56
+ typing.Union[LocationAppendToFirstPage],
57
+ serial_union_annotation(
58
+ named_type_path="sdk.api.notebooks.add_notebook_content.Location",
59
+ discriminator="type",
60
+ discriminator_map={
61
+ "append_to_first_page": LocationAppendToFirstPage,
62
+ },
63
+ ),
64
+ ]
65
+
66
+
67
+ # DO NOT MODIFY -- This file is generated by type_spec
68
+ @serial_string_enum(
69
+ labels={
70
+ "text_markdown": "Text Markdown",
71
+ },
72
+ )
73
+ class InputContentType(StrEnum):
74
+ TEXT_MARKDOWN = "text_markdown"
75
+
76
+
77
+ # DO NOT MODIFY -- This file is generated by type_spec
78
+ @serial_class(
79
+ named_type_path="sdk.api.notebooks.add_notebook_content.InputContentTextMarkdown",
80
+ parse_require={"type"},
81
+ )
82
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
83
+ class InputContentTextMarkdown:
84
+ type: typing.Literal[InputContentType.TEXT_MARKDOWN] = InputContentType.TEXT_MARKDOWN
85
+ text_markdown: str
86
+
87
+
88
+ # DO NOT MODIFY -- This file is generated by type_spec
89
+ InputContent = typing.Annotated[
90
+ typing.Union[InputContentTextMarkdown],
91
+ serial_union_annotation(
92
+ named_type_path="sdk.api.notebooks.add_notebook_content.InputContent",
93
+ discriminator="type",
94
+ discriminator_map={
95
+ "text_markdown": InputContentTextMarkdown,
96
+ },
97
+ ),
98
+ ]
99
+
100
+
101
+ # DO NOT MODIFY -- This file is generated by type_spec
102
+ @serial_class(
103
+ named_type_path="sdk.api.notebooks.add_notebook_content.Arguments",
104
+ )
105
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
106
+ class Arguments:
107
+ notebook_key: identifier_t.IdentifierKey
108
+ location: Location
109
+ contents: list[InputContent]
110
+
111
+
112
+ # DO NOT MODIFY -- This file is generated by type_spec
113
+ @serial_class(
114
+ named_type_path="sdk.api.notebooks.add_notebook_content.Data",
115
+ )
116
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
117
+ class Data:
118
+ pass
119
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -26,6 +26,7 @@ __all__: list[str] = [
26
26
  "ENDPOINT_PATH",
27
27
  "Recipe",
28
28
  "RecipeInput",
29
+ "RecipeLockStatus",
29
30
  "RecipeOutput",
30
31
  "RecipeOutputInclusion",
31
32
  "RecipeStep",
@@ -154,6 +155,7 @@ class RecipeStepGroup:
154
155
  )
155
156
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
156
157
  class RecipeWorkflowStep:
158
+ recipe_workflow_step_name: str | None
157
159
  recipe_workflow_step_id: base_t.ObjectId
158
160
  workflow_step_id: base_t.ObjectId
159
161
  recipe_step_groups: list[RecipeStepGroup]
@@ -174,6 +176,16 @@ class RecipeStepRelationship:
174
176
  actual_quantity_dec: Decimal | None = None
175
177
 
176
178
 
179
+ # DO NOT MODIFY -- This file is generated by type_spec
180
+ @serial_class(
181
+ named_type_path="sdk.api.recipes.get_recipes_data.RecipeLockStatus",
182
+ )
183
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
184
+ class RecipeLockStatus:
185
+ has_locked_inputs: bool
186
+ has_locked_outputs: bool
187
+
188
+
177
189
  # DO NOT MODIFY -- This file is generated by type_spec
178
190
  @serial_class(
179
191
  named_type_path="sdk.api.recipes.get_recipes_data.Recipe",
@@ -193,6 +205,7 @@ class Recipe:
193
205
  tag_ids: list[base_t.ObjectId]
194
206
  experiment_group_ids: list[base_t.ObjectId]
195
207
  step_relationships: list[RecipeStepRelationship]
208
+ recipe_lock_status: RecipeLockStatus
196
209
  creating_user_id: base_t.ObjectId | None = None
197
210
  barcode_value: str | None = None
198
211
 
@@ -13,6 +13,7 @@ import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe
13
13
  from uncountable.types import async_batch_t
14
14
  from uncountable.types import base_t
15
15
  import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
16
+ from uncountable.types import client_config_t
16
17
  import uncountable.types.api.uploader.complete_async_parse as complete_async_parse_t
17
18
  import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
18
19
  import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
@@ -57,6 +58,7 @@ class AsyncBatchProcessorBase(ABC):
57
58
  equipment_key: identifier_t.IdentifierKey,
58
59
  material_family_ids: list[base_t.ObjectId],
59
60
  depends_on: list[str] | None = None,
61
+ _request_options: client_config_t.RequestOptions | None = None,
60
62
  ) -> async_batch_t.QueuedAsyncBatchRequest:
61
63
  """Create or return the input association for equipment
62
64
 
@@ -93,6 +95,7 @@ class AsyncBatchProcessorBase(ABC):
93
95
  input_key: identifier_t.IdentifierKey | None = None,
94
96
  show_in_listings: bool | None = None,
95
97
  depends_on: list[str] | None = None,
98
+ _request_options: client_config_t.RequestOptions | None = None,
96
99
  ) -> async_batch_t.QueuedAsyncBatchRequest:
97
100
  """Create or return the input association for a recipe
98
101
 
@@ -130,6 +133,7 @@ class AsyncBatchProcessorBase(ABC):
130
133
  recipe_key: identifier_t.IdentifierKey,
131
134
  ingredient_key: identifier_t.IdentifierKey,
132
135
  depends_on: list[str] | None = None,
136
+ _request_options: client_config_t.RequestOptions | None = None,
133
137
  ) -> async_batch_t.QueuedAsyncBatchRequest:
134
138
  """Create a new lot association for the provided recipe with the provided ingredient
135
139
 
@@ -164,6 +168,7 @@ class AsyncBatchProcessorBase(ABC):
164
168
  *,
165
169
  recipe_key: identifier_t.IdentifierKey,
166
170
  depends_on: list[str] | None = None,
171
+ _request_options: client_config_t.RequestOptions | None = None,
167
172
  ) -> async_batch_t.QueuedAsyncBatchRequest:
168
173
  """Clears all output values & output metadata for a given recipe
169
174
 
@@ -198,6 +203,7 @@ class AsyncBatchProcessorBase(ABC):
198
203
  async_job_key: identifier_t.IdentifierKey,
199
204
  upload_destination: generic_upload_t.UploadDestinationRecipe,
200
205
  depends_on: list[str] | None = None,
206
+ _request_options: client_config_t.RequestOptions | None = None,
201
207
  ) -> async_batch_t.QueuedAsyncBatchRequest:
202
208
  """Parses uploaded files asynchronously
203
209
 
@@ -232,6 +238,7 @@ class AsyncBatchProcessorBase(ABC):
232
238
  async_job_id: base_t.ObjectId,
233
239
  file_id: base_t.ObjectId,
234
240
  depends_on: list[str] | None = None,
241
+ _request_options: client_config_t.RequestOptions | None = None,
235
242
  ) -> async_batch_t.QueuedAsyncBatchRequest:
236
243
  """Processes an file id with a given async job id to be uploaded asynchronously
237
244
 
@@ -265,6 +272,7 @@ class AsyncBatchProcessorBase(ABC):
265
272
  recipe_key: identifier_t.IdentifierKey,
266
273
  recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
267
274
  depends_on: list[str] | None = None,
275
+ _request_options: client_config_t.RequestOptions | None = None,
268
276
  ) -> async_batch_t.QueuedAsyncBatchRequest:
269
277
  """Creates mix order on a recipe workflow step
270
278
 
@@ -301,6 +309,7 @@ class AsyncBatchProcessorBase(ABC):
301
309
  entity_key: identifier_t.IdentifierKey | None = None,
302
310
  on_create_init_field_values: list[field_values_t.FieldArgumentValue] | None = None,
303
311
  depends_on: list[str] | None = None,
312
+ _request_options: client_config_t.RequestOptions | None = None,
304
313
  ) -> async_batch_t.QueuedAsyncBatchRequest:
305
314
  """Creates or updates field values for an entity
306
315
 
@@ -344,6 +353,7 @@ class AsyncBatchProcessorBase(ABC):
344
353
  identifiers: recipe_identifiers_t.RecipeIdentifiers | None = None,
345
354
  definition_key: identifier_t.IdentifierKey | None = None,
346
355
  depends_on: list[str] | None = None,
356
+ _request_options: client_config_t.RequestOptions | None = None,
347
357
  ) -> async_batch_t.QueuedAsyncBatchRequest:
348
358
  """Returns the id of the recipe being created.
349
359
 
@@ -392,6 +402,7 @@ class AsyncBatchProcessorBase(ABC):
392
402
  recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
393
403
  edits: list[edit_recipe_inputs_t.RecipeInputEdit],
394
404
  depends_on: list[str] | None = None,
405
+ _request_options: client_config_t.RequestOptions | None = None,
395
406
  ) -> async_batch_t.QueuedAsyncBatchRequest:
396
407
  """Clear, update, or add inputs on a recipe
397
408
 
@@ -431,6 +442,7 @@ class AsyncBatchProcessorBase(ABC):
431
442
  user_group_keys: list[identifier_t.IdentifierKey] | None = None,
432
443
  all_users: bool | None = None,
433
444
  depends_on: list[str] | None = None,
445
+ _request_options: client_config_t.RequestOptions | None = None,
434
446
  ) -> async_batch_t.QueuedAsyncBatchRequest:
435
447
  """Grant entity permissions to a list of users or user groups or to all users.
436
448
 
@@ -470,6 +482,7 @@ class AsyncBatchProcessorBase(ABC):
470
482
  file_id: base_t.ObjectId | None = None,
471
483
  file_ids: list[base_t.ObjectId] | None = None,
472
484
  depends_on: list[str] | None = None,
485
+ _request_options: client_config_t.RequestOptions | None = None,
473
486
  ) -> async_batch_t.QueuedAsyncBatchRequest:
474
487
  """Runs a file through an uploader.
475
488
 
@@ -509,6 +522,7 @@ class AsyncBatchProcessorBase(ABC):
509
522
  lock_samples: bool | None = None,
510
523
  comments: str | None = None,
511
524
  depends_on: list[str] | None = None,
525
+ _request_options: client_config_t.RequestOptions | None = None,
512
526
  ) -> async_batch_t.QueuedAsyncBatchRequest:
513
527
  """Lock experiments. Experiments will require unlocking to be editable. Edits to the experiments are blocked while they are locked.
514
528
 
@@ -553,6 +567,7 @@ class AsyncBatchProcessorBase(ABC):
553
567
  entity_type: entity_t.EntityType,
554
568
  query: lookup_entity_t.LookupEntityQuery,
555
569
  depends_on: list[str] | None = None,
570
+ _request_options: client_config_t.RequestOptions | None = None,
556
571
  ) -> async_batch_t.QueuedAsyncBatchRequest:
557
572
  """Look up an entity based on an identifier or field values
558
573
 
@@ -589,6 +604,7 @@ class AsyncBatchProcessorBase(ABC):
589
604
  display_notice: bool = False,
590
605
  entity: entity_t.EntityIdentifier | None = None,
591
606
  depends_on: list[str] | None = None,
607
+ _request_options: client_config_t.RequestOptions | None = None,
592
608
  ) -> async_batch_t.QueuedAsyncBatchRequest:
593
609
  """Push a notification to a user or user group
594
610
 
@@ -625,6 +641,7 @@ class AsyncBatchProcessorBase(ABC):
625
641
  entity_identifier: entity_t.EntityIdentifier,
626
642
  field_values: list[field_values_t.FieldArgumentValue],
627
643
  depends_on: list[str] | None = None,
644
+ _request_options: client_config_t.RequestOptions | None = None,
628
645
  ) -> async_batch_t.QueuedAsyncBatchRequest:
629
646
  """Sets field values for an entity
630
647
 
@@ -660,6 +677,7 @@ class AsyncBatchProcessorBase(ABC):
660
677
  recipe_key: identifier_t.IdentifierKey,
661
678
  recipe_metadata: list[recipe_metadata_t.MetadataValue],
662
679
  depends_on: list[str] | None = None,
680
+ _request_options: client_config_t.RequestOptions | None = None,
663
681
  ) -> async_batch_t.QueuedAsyncBatchRequest:
664
682
  """Set metadata values on a recipe
665
683
 
@@ -696,6 +714,7 @@ class AsyncBatchProcessorBase(ABC):
696
714
  recipes: list[identifier_t.IdentifierKey],
697
715
  unlock_samples: bool | None = None,
698
716
  depends_on: list[str] | None = None,
717
+ _request_options: client_config_t.RequestOptions | None = None,
699
718
  ) -> async_batch_t.QueuedAsyncBatchRequest:
700
719
  """Unlock experiments. Experiments will edtiable after unlocking if they are currently locked.
701
720
 
@@ -736,6 +755,7 @@ class AsyncBatchProcessorBase(ABC):
736
755
  output_conditions: list[identifier_t.IdentifierKey] | None = None,
737
756
  existing_condition_match: identifier_t.IdentifierKey | None = None,
738
757
  depends_on: list[str] | None = None,
758
+ _request_options: client_config_t.RequestOptions | None = None,
739
759
  ) -> async_batch_t.QueuedAsyncBatchRequest:
740
760
  """Creates or updates condition match
741
761