UncountablePythonSDK 0.0.127__py3-none-any.whl → 0.0.129__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- examples/integration-server/jobs/materials_auto/example_instrument.py +4 -3
- examples/integration-server/jobs/materials_auto/example_parse.py +130 -0
- examples/integration-server/jobs/materials_auto/example_predictions.py +2 -2
- examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +3 -2
- examples/integration-server/jobs/materials_auto/profile.yaml +9 -0
- examples/integration-server/pyproject.toml +1 -1
- pkgs/serialization_util/serialization_helpers.py +3 -1
- pkgs/type_spec/builder.py +9 -3
- pkgs/type_spec/builder_types.py +9 -0
- pkgs/type_spec/cross_output_links.py +2 -10
- pkgs/type_spec/emit_open_api.py +0 -12
- pkgs/type_spec/emit_python.py +72 -11
- pkgs/type_spec/emit_typescript_util.py +28 -6
- pkgs/type_spec/load_types.py +1 -1
- pkgs/type_spec/type_info/emit_type_info.py +13 -2
- uncountable/core/client.py +10 -3
- uncountable/integration/job.py +2 -3
- uncountable/integration/queue_runner/command_server/command_server.py +8 -7
- uncountable/integration/telemetry.py +41 -7
- uncountable/integration/webhook_server/entrypoint.py +2 -0
- uncountable/types/__init__.py +2 -0
- uncountable/types/api/entity/list_aggregate.py +79 -0
- uncountable/types/api/entity/list_entities.py +25 -0
- uncountable/types/api/recipes/get_recipes_data.py +13 -0
- uncountable/types/async_batch_processor.py +20 -0
- uncountable/types/client_base.py +195 -1
- uncountable/types/client_config.py +1 -0
- uncountable/types/client_config_t.py +10 -0
- uncountable/types/entity_t.py +2 -0
- {uncountablepythonsdk-0.0.127.dist-info → uncountablepythonsdk-0.0.129.dist-info}/METADATA +1 -1
- {uncountablepythonsdk-0.0.127.dist-info → uncountablepythonsdk-0.0.129.dist-info}/RECORD +33 -30
- {uncountablepythonsdk-0.0.127.dist-info → uncountablepythonsdk-0.0.129.dist-info}/WHEEL +0 -0
- {uncountablepythonsdk-0.0.127.dist-info → uncountablepythonsdk-0.0.129.dist-info}/top_level.txt +0 -0
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
|
|
3
|
+
import grpc.aio as grpc_aio
|
|
3
4
|
import simplejson as json
|
|
4
5
|
from google.protobuf.timestamp_pb2 import Timestamp
|
|
5
|
-
from grpc import StatusCode
|
|
6
|
+
from grpc import StatusCode
|
|
6
7
|
|
|
7
8
|
from pkgs.argument_parser import CachedParser
|
|
8
9
|
from uncountable.core.environment import get_local_admin_server_port
|
|
@@ -40,11 +41,11 @@ queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
|
|
|
40
41
|
|
|
41
42
|
|
|
42
43
|
async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None:
|
|
43
|
-
server =
|
|
44
|
+
server = grpc_aio.server()
|
|
44
45
|
|
|
45
46
|
class CommandServerHandler(CommandServerServicer):
|
|
46
47
|
async def EnqueueJob(
|
|
47
|
-
self, request: EnqueueJobRequest, context:
|
|
48
|
+
self, request: EnqueueJobRequest, context: grpc_aio.ServicerContext
|
|
48
49
|
) -> EnqueueJobResult:
|
|
49
50
|
payload_json = json.loads(request.serialized_payload)
|
|
50
51
|
payload = queued_job_payload_parser.parse_api(payload_json)
|
|
@@ -63,7 +64,7 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
|
|
|
63
64
|
return result
|
|
64
65
|
|
|
65
66
|
async def RetryJob(
|
|
66
|
-
self, request: RetryJobRequest, context:
|
|
67
|
+
self, request: RetryJobRequest, context: grpc_aio.ServicerContext
|
|
67
68
|
) -> RetryJobResult:
|
|
68
69
|
response_queue: asyncio.Queue[CommandRetryJobResponse] = asyncio.Queue()
|
|
69
70
|
await command_queue.put(
|
|
@@ -80,12 +81,12 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
|
|
|
80
81
|
return RetryJobResult(successfully_queued=False, queued_job_uuid="")
|
|
81
82
|
|
|
82
83
|
async def CheckHealth(
|
|
83
|
-
self, request: CheckHealthRequest, context:
|
|
84
|
+
self, request: CheckHealthRequest, context: grpc_aio.ServicerContext
|
|
84
85
|
) -> CheckHealthResult:
|
|
85
86
|
return CheckHealthResult(success=True)
|
|
86
87
|
|
|
87
88
|
async def ListQueuedJobs(
|
|
88
|
-
self, request: ListQueuedJobsRequest, context:
|
|
89
|
+
self, request: ListQueuedJobsRequest, context: grpc_aio.ServicerContext
|
|
89
90
|
) -> ListQueuedJobsResult:
|
|
90
91
|
if (
|
|
91
92
|
request.limit < ListQueuedJobsConstants.LIMIT_MIN
|
|
@@ -121,7 +122,7 @@ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None
|
|
|
121
122
|
return ListQueuedJobsResult(queued_jobs=response_list)
|
|
122
123
|
|
|
123
124
|
async def VaccuumQueuedJobs(
|
|
124
|
-
self, request: VaccuumQueuedJobsRequest, context:
|
|
125
|
+
self, request: VaccuumQueuedJobsRequest, context: grpc_aio.ServicerContext
|
|
125
126
|
) -> VaccuumQueuedJobsResult:
|
|
126
127
|
response_queue: asyncio.Queue[CommandVaccuumQueuedJobsResponse] = (
|
|
127
128
|
asyncio.Queue()
|
|
@@ -12,7 +12,10 @@ from opentelemetry import _logs, trace
|
|
|
12
12
|
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
13
13
|
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
14
14
|
from opentelemetry.sdk._logs import Logger as OTELLogger
|
|
15
|
-
from opentelemetry.sdk._logs import
|
|
15
|
+
from opentelemetry.sdk._logs import (
|
|
16
|
+
LoggerProvider,
|
|
17
|
+
LogRecord,
|
|
18
|
+
)
|
|
16
19
|
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter
|
|
17
20
|
from opentelemetry.sdk.resources import Attributes, Resource
|
|
18
21
|
from opentelemetry.sdk.trace import TracerProvider
|
|
@@ -95,8 +98,27 @@ class Logger:
|
|
|
95
98
|
def current_trace_id(self) -> int | None:
|
|
96
99
|
return self.current_span.get_span_context().trace_id
|
|
97
100
|
|
|
98
|
-
def _patch_attributes(
|
|
99
|
-
|
|
101
|
+
def _patch_attributes(
|
|
102
|
+
self,
|
|
103
|
+
attributes: Attributes | None,
|
|
104
|
+
*,
|
|
105
|
+
message: str | None = None,
|
|
106
|
+
severity: LogSeverity | None = None,
|
|
107
|
+
) -> Attributes:
|
|
108
|
+
patched_attributes = {**(attributes if attributes is not None else {})}
|
|
109
|
+
if message is not None:
|
|
110
|
+
patched_attributes["message"] = message
|
|
111
|
+
elif "body" in patched_attributes:
|
|
112
|
+
patched_attributes["message"] = patched_attributes["body"]
|
|
113
|
+
|
|
114
|
+
if severity is not None:
|
|
115
|
+
patched_attributes["status"] = severity.lower()
|
|
116
|
+
elif "severity_text" in patched_attributes and isinstance(
|
|
117
|
+
patched_attributes["severity_text"], str
|
|
118
|
+
):
|
|
119
|
+
patched_attributes["status"] = patched_attributes["severity_text"].lower()
|
|
120
|
+
|
|
121
|
+
return patched_attributes
|
|
100
122
|
|
|
101
123
|
def _emit_log(
|
|
102
124
|
self, message: str, *, severity: LogSeverity, attributes: Attributes | None
|
|
@@ -106,7 +128,9 @@ class Logger:
|
|
|
106
128
|
body=message,
|
|
107
129
|
severity_text=severity,
|
|
108
130
|
timestamp=time.time_ns(),
|
|
109
|
-
attributes=self._patch_attributes(
|
|
131
|
+
attributes=self._patch_attributes(
|
|
132
|
+
message=message, severity=severity, attributes=attributes
|
|
133
|
+
),
|
|
110
134
|
span_id=self.current_span_id,
|
|
111
135
|
trace_id=self.current_trace_id,
|
|
112
136
|
trace_flags=DEFAULT_TRACE_OPTIONS,
|
|
@@ -140,7 +164,9 @@ class Logger:
|
|
|
140
164
|
attributes: Attributes | None = None,
|
|
141
165
|
) -> None:
|
|
142
166
|
traceback_str = "".join(traceback.format_exception(exception))
|
|
143
|
-
patched_attributes = self._patch_attributes(
|
|
167
|
+
patched_attributes = self._patch_attributes(
|
|
168
|
+
message=message, severity=LogSeverity.ERROR, attributes=attributes
|
|
169
|
+
)
|
|
144
170
|
self.current_span.record_exception(
|
|
145
171
|
exception=exception, attributes=patched_attributes
|
|
146
172
|
)
|
|
@@ -171,9 +197,17 @@ class JobLogger(Logger):
|
|
|
171
197
|
self.job_definition = job_definition
|
|
172
198
|
super().__init__(base_span)
|
|
173
199
|
|
|
174
|
-
def _patch_attributes(
|
|
200
|
+
def _patch_attributes(
|
|
201
|
+
self,
|
|
202
|
+
attributes: Attributes | None,
|
|
203
|
+
*,
|
|
204
|
+
message: str | None = None,
|
|
205
|
+
severity: LogSeverity | None = None,
|
|
206
|
+
) -> Attributes:
|
|
175
207
|
patched_attributes: dict[str, base_t.JsonValue] = {
|
|
176
|
-
**(
|
|
208
|
+
**super()._patch_attributes(
|
|
209
|
+
attributes=attributes, message=message, severity=severity
|
|
210
|
+
)
|
|
177
211
|
}
|
|
178
212
|
patched_attributes["profile.name"] = self.profile_metadata.name
|
|
179
213
|
patched_attributes["profile.base_url"] = self.profile_metadata.base_url
|
uncountable/types/__init__.py
CHANGED
|
@@ -70,6 +70,7 @@ from . import integration_session_t as integration_session_t
|
|
|
70
70
|
from . import integrations_t as integrations_t
|
|
71
71
|
from .api.uploader import invoke_uploader as invoke_uploader_t
|
|
72
72
|
from . import job_definition_t as job_definition_t
|
|
73
|
+
from .api.entity import list_aggregate as list_aggregate_t
|
|
73
74
|
from .api.entity import list_entities as list_entities_t
|
|
74
75
|
from .api.id_source import list_id_source as list_id_source_t
|
|
75
76
|
from .api.entity import lock_entity as lock_entity_t
|
|
@@ -199,6 +200,7 @@ __all__: list[str] = [
|
|
|
199
200
|
"integrations_t",
|
|
200
201
|
"invoke_uploader_t",
|
|
201
202
|
"job_definition_t",
|
|
203
|
+
"list_aggregate_t",
|
|
202
204
|
"list_entities_t",
|
|
203
205
|
"list_id_source_t",
|
|
204
206
|
"lock_entity_t",
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
import dataclasses
|
|
10
|
+
from pkgs.serialization import serial_class
|
|
11
|
+
from ... import base_t
|
|
12
|
+
|
|
13
|
+
__all__: list[str] = [
|
|
14
|
+
"Arguments",
|
|
15
|
+
"AttributeValue",
|
|
16
|
+
"ColumnAccess",
|
|
17
|
+
"ColumnResults",
|
|
18
|
+
"Data",
|
|
19
|
+
"ENDPOINT_METHOD",
|
|
20
|
+
"ENDPOINT_PATH",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "GET"
|
|
24
|
+
ENDPOINT_PATH = "api/external/entity/list_aggregate"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.entity.list_aggregate.AttributeValue",
|
|
30
|
+
unconverted_values={"value"},
|
|
31
|
+
)
|
|
32
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
33
|
+
class AttributeValue:
|
|
34
|
+
name: str
|
|
35
|
+
value: base_t.JsonValue
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
39
|
+
@serial_class(
|
|
40
|
+
named_type_path="sdk.api.entity.list_aggregate.Arguments",
|
|
41
|
+
unconverted_values={"attribute_values"},
|
|
42
|
+
)
|
|
43
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
44
|
+
class Arguments:
|
|
45
|
+
config_reference: str
|
|
46
|
+
attribute_values: list[AttributeValue] | None = None
|
|
47
|
+
offset: int | None = None
|
|
48
|
+
limit: int | None = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
52
|
+
@serial_class(
|
|
53
|
+
named_type_path="sdk.api.entity.list_aggregate.ColumnResults",
|
|
54
|
+
unconverted_values={"column_values"},
|
|
55
|
+
)
|
|
56
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
57
|
+
class ColumnResults:
|
|
58
|
+
column_values: list[base_t.JsonValue]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
62
|
+
@serial_class(
|
|
63
|
+
named_type_path="sdk.api.entity.list_aggregate.ColumnAccess",
|
|
64
|
+
)
|
|
65
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
66
|
+
class ColumnAccess:
|
|
67
|
+
name: str
|
|
68
|
+
table_label: str | None
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
72
|
+
@serial_class(
|
|
73
|
+
named_type_path="sdk.api.entity.list_aggregate.Data",
|
|
74
|
+
)
|
|
75
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
76
|
+
class Data:
|
|
77
|
+
columns: list[ColumnAccess]
|
|
78
|
+
results: list[ColumnResults]
|
|
79
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -11,9 +11,12 @@ from pkgs.serialization import serial_class
|
|
|
11
11
|
from pkgs.serialization import OpaqueKey
|
|
12
12
|
from ... import base_t
|
|
13
13
|
from ... import entity_t
|
|
14
|
+
from ... import identifier_t
|
|
14
15
|
|
|
15
16
|
__all__: list[str] = [
|
|
17
|
+
"AdditionalFilterConfig",
|
|
16
18
|
"Arguments",
|
|
19
|
+
"AttributeValue",
|
|
17
20
|
"ColumnAccess",
|
|
18
21
|
"Data",
|
|
19
22
|
"ENDPOINT_METHOD",
|
|
@@ -25,6 +28,27 @@ ENDPOINT_METHOD = "GET"
|
|
|
25
28
|
ENDPOINT_PATH = "api/external/entity/external_list_entities"
|
|
26
29
|
|
|
27
30
|
|
|
31
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
32
|
+
@serial_class(
|
|
33
|
+
named_type_path="sdk.api.entity.list_entities.AttributeValue",
|
|
34
|
+
unconverted_values={"value"},
|
|
35
|
+
)
|
|
36
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
37
|
+
class AttributeValue:
|
|
38
|
+
name: str
|
|
39
|
+
value: base_t.JsonValue
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
43
|
+
@serial_class(
|
|
44
|
+
named_type_path="sdk.api.entity.list_entities.AdditionalFilterConfig",
|
|
45
|
+
)
|
|
46
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
47
|
+
class AdditionalFilterConfig:
|
|
48
|
+
config_key: identifier_t.IdentifierKey
|
|
49
|
+
attribute_values: list[AttributeValue] | None = None
|
|
50
|
+
|
|
51
|
+
|
|
28
52
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
29
53
|
@serial_class(
|
|
30
54
|
named_type_path="sdk.api.entity.list_entities.Arguments",
|
|
@@ -37,6 +61,7 @@ class Arguments:
|
|
|
37
61
|
attributes: dict[OpaqueKey, base_t.JsonValue] | None = None
|
|
38
62
|
offset: int | None = None
|
|
39
63
|
limit: int | None = None
|
|
64
|
+
additional_filter_configs: list[AdditionalFilterConfig] | None = None
|
|
40
65
|
|
|
41
66
|
|
|
42
67
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -26,6 +26,7 @@ __all__: list[str] = [
|
|
|
26
26
|
"ENDPOINT_PATH",
|
|
27
27
|
"Recipe",
|
|
28
28
|
"RecipeInput",
|
|
29
|
+
"RecipeLockStatus",
|
|
29
30
|
"RecipeOutput",
|
|
30
31
|
"RecipeOutputInclusion",
|
|
31
32
|
"RecipeStep",
|
|
@@ -154,6 +155,7 @@ class RecipeStepGroup:
|
|
|
154
155
|
)
|
|
155
156
|
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
156
157
|
class RecipeWorkflowStep:
|
|
158
|
+
recipe_workflow_step_name: str | None
|
|
157
159
|
recipe_workflow_step_id: base_t.ObjectId
|
|
158
160
|
workflow_step_id: base_t.ObjectId
|
|
159
161
|
recipe_step_groups: list[RecipeStepGroup]
|
|
@@ -174,6 +176,16 @@ class RecipeStepRelationship:
|
|
|
174
176
|
actual_quantity_dec: Decimal | None = None
|
|
175
177
|
|
|
176
178
|
|
|
179
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
180
|
+
@serial_class(
|
|
181
|
+
named_type_path="sdk.api.recipes.get_recipes_data.RecipeLockStatus",
|
|
182
|
+
)
|
|
183
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
184
|
+
class RecipeLockStatus:
|
|
185
|
+
has_locked_inputs: bool
|
|
186
|
+
has_locked_outputs: bool
|
|
187
|
+
|
|
188
|
+
|
|
177
189
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
178
190
|
@serial_class(
|
|
179
191
|
named_type_path="sdk.api.recipes.get_recipes_data.Recipe",
|
|
@@ -193,6 +205,7 @@ class Recipe:
|
|
|
193
205
|
tag_ids: list[base_t.ObjectId]
|
|
194
206
|
experiment_group_ids: list[base_t.ObjectId]
|
|
195
207
|
step_relationships: list[RecipeStepRelationship]
|
|
208
|
+
recipe_lock_status: RecipeLockStatus
|
|
196
209
|
creating_user_id: base_t.ObjectId | None = None
|
|
197
210
|
barcode_value: str | None = None
|
|
198
211
|
|
|
@@ -13,6 +13,7 @@ import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe
|
|
|
13
13
|
from uncountable.types import async_batch_t
|
|
14
14
|
from uncountable.types import base_t
|
|
15
15
|
import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
|
|
16
|
+
from uncountable.types import client_config_t
|
|
16
17
|
import uncountable.types.api.uploader.complete_async_parse as complete_async_parse_t
|
|
17
18
|
import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
|
|
18
19
|
import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
|
|
@@ -57,6 +58,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
57
58
|
equipment_key: identifier_t.IdentifierKey,
|
|
58
59
|
material_family_ids: list[base_t.ObjectId],
|
|
59
60
|
depends_on: list[str] | None = None,
|
|
61
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
60
62
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
61
63
|
"""Create or return the input association for equipment
|
|
62
64
|
|
|
@@ -93,6 +95,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
93
95
|
input_key: identifier_t.IdentifierKey | None = None,
|
|
94
96
|
show_in_listings: bool | None = None,
|
|
95
97
|
depends_on: list[str] | None = None,
|
|
98
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
96
99
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
97
100
|
"""Create or return the input association for a recipe
|
|
98
101
|
|
|
@@ -130,6 +133,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
130
133
|
recipe_key: identifier_t.IdentifierKey,
|
|
131
134
|
ingredient_key: identifier_t.IdentifierKey,
|
|
132
135
|
depends_on: list[str] | None = None,
|
|
136
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
133
137
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
134
138
|
"""Create a new lot association for the provided recipe with the provided ingredient
|
|
135
139
|
|
|
@@ -164,6 +168,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
164
168
|
*,
|
|
165
169
|
recipe_key: identifier_t.IdentifierKey,
|
|
166
170
|
depends_on: list[str] | None = None,
|
|
171
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
167
172
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
168
173
|
"""Clears all output values & output metadata for a given recipe
|
|
169
174
|
|
|
@@ -198,6 +203,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
198
203
|
async_job_key: identifier_t.IdentifierKey,
|
|
199
204
|
upload_destination: generic_upload_t.UploadDestinationRecipe,
|
|
200
205
|
depends_on: list[str] | None = None,
|
|
206
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
201
207
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
202
208
|
"""Parses uploaded files asynchronously
|
|
203
209
|
|
|
@@ -232,6 +238,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
232
238
|
async_job_id: base_t.ObjectId,
|
|
233
239
|
file_id: base_t.ObjectId,
|
|
234
240
|
depends_on: list[str] | None = None,
|
|
241
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
235
242
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
236
243
|
"""Processes an file id with a given async job id to be uploaded asynchronously
|
|
237
244
|
|
|
@@ -265,6 +272,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
265
272
|
recipe_key: identifier_t.IdentifierKey,
|
|
266
273
|
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
|
|
267
274
|
depends_on: list[str] | None = None,
|
|
275
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
268
276
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
269
277
|
"""Creates mix order on a recipe workflow step
|
|
270
278
|
|
|
@@ -301,6 +309,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
301
309
|
entity_key: identifier_t.IdentifierKey | None = None,
|
|
302
310
|
on_create_init_field_values: list[field_values_t.FieldArgumentValue] | None = None,
|
|
303
311
|
depends_on: list[str] | None = None,
|
|
312
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
304
313
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
305
314
|
"""Creates or updates field values for an entity
|
|
306
315
|
|
|
@@ -344,6 +353,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
344
353
|
identifiers: recipe_identifiers_t.RecipeIdentifiers | None = None,
|
|
345
354
|
definition_key: identifier_t.IdentifierKey | None = None,
|
|
346
355
|
depends_on: list[str] | None = None,
|
|
356
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
347
357
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
348
358
|
"""Returns the id of the recipe being created.
|
|
349
359
|
|
|
@@ -392,6 +402,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
392
402
|
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
|
|
393
403
|
edits: list[edit_recipe_inputs_t.RecipeInputEdit],
|
|
394
404
|
depends_on: list[str] | None = None,
|
|
405
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
395
406
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
396
407
|
"""Clear, update, or add inputs on a recipe
|
|
397
408
|
|
|
@@ -431,6 +442,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
431
442
|
user_group_keys: list[identifier_t.IdentifierKey] | None = None,
|
|
432
443
|
all_users: bool | None = None,
|
|
433
444
|
depends_on: list[str] | None = None,
|
|
445
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
434
446
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
435
447
|
"""Grant entity permissions to a list of users or user groups or to all users.
|
|
436
448
|
|
|
@@ -470,6 +482,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
470
482
|
file_id: base_t.ObjectId | None = None,
|
|
471
483
|
file_ids: list[base_t.ObjectId] | None = None,
|
|
472
484
|
depends_on: list[str] | None = None,
|
|
485
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
473
486
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
474
487
|
"""Runs a file through an uploader.
|
|
475
488
|
|
|
@@ -509,6 +522,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
509
522
|
lock_samples: bool | None = None,
|
|
510
523
|
comments: str | None = None,
|
|
511
524
|
depends_on: list[str] | None = None,
|
|
525
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
512
526
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
513
527
|
"""Lock experiments. Experiments will require unlocking to be editable. Edits to the experiments are blocked while they are locked.
|
|
514
528
|
|
|
@@ -553,6 +567,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
553
567
|
entity_type: entity_t.EntityType,
|
|
554
568
|
query: lookup_entity_t.LookupEntityQuery,
|
|
555
569
|
depends_on: list[str] | None = None,
|
|
570
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
556
571
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
557
572
|
"""Look up an entity based on an identifier or field values
|
|
558
573
|
|
|
@@ -589,6 +604,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
589
604
|
display_notice: bool = False,
|
|
590
605
|
entity: entity_t.EntityIdentifier | None = None,
|
|
591
606
|
depends_on: list[str] | None = None,
|
|
607
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
592
608
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
593
609
|
"""Push a notification to a user or user group
|
|
594
610
|
|
|
@@ -625,6 +641,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
625
641
|
entity_identifier: entity_t.EntityIdentifier,
|
|
626
642
|
field_values: list[field_values_t.FieldArgumentValue],
|
|
627
643
|
depends_on: list[str] | None = None,
|
|
644
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
628
645
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
629
646
|
"""Sets field values for an entity
|
|
630
647
|
|
|
@@ -660,6 +677,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
660
677
|
recipe_key: identifier_t.IdentifierKey,
|
|
661
678
|
recipe_metadata: list[recipe_metadata_t.MetadataValue],
|
|
662
679
|
depends_on: list[str] | None = None,
|
|
680
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
663
681
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
664
682
|
"""Set metadata values on a recipe
|
|
665
683
|
|
|
@@ -696,6 +714,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
696
714
|
recipes: list[identifier_t.IdentifierKey],
|
|
697
715
|
unlock_samples: bool | None = None,
|
|
698
716
|
depends_on: list[str] | None = None,
|
|
717
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
699
718
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
700
719
|
"""Unlock experiments. Experiments will edtiable after unlocking if they are currently locked.
|
|
701
720
|
|
|
@@ -736,6 +755,7 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
736
755
|
output_conditions: list[identifier_t.IdentifierKey] | None = None,
|
|
737
756
|
existing_condition_match: identifier_t.IdentifierKey | None = None,
|
|
738
757
|
depends_on: list[str] | None = None,
|
|
758
|
+
_request_options: client_config_t.RequestOptions | None = None,
|
|
739
759
|
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
740
760
|
"""Creates or updates condition match
|
|
741
761
|
|