UncountablePythonSDK 0.0.113__py3-none-any.whl → 0.0.115__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- examples/integration-server/jobs/materials_auto/concurrent_cron.py +11 -0
- examples/integration-server/jobs/materials_auto/example_http.py +35 -0
- examples/integration-server/jobs/materials_auto/profile.yaml +25 -0
- pkgs/argument_parser/argument_parser.py +8 -3
- pkgs/type_spec/builder.py +8 -2
- pkgs/type_spec/non_discriminated_union_exceptions.py +14 -0
- pkgs/type_spec/parts/base.py.prepart +5 -8
- pkgs/type_spec/ui_entry_actions/generate_ui_entry_actions.py +18 -5
- pkgs/type_spec/value_spec/__main__.py +2 -2
- uncountable/core/environment.py +1 -1
- uncountable/integration/http_server/__init__.py +5 -0
- uncountable/integration/http_server/types.py +67 -0
- uncountable/integration/job.py +129 -5
- uncountable/integration/queue_runner/job_scheduler.py +10 -2
- uncountable/integration/server.py +2 -2
- uncountable/integration/telemetry.py +1 -1
- uncountable/integration/webhook_server/entrypoint.py +37 -112
- uncountable/types/__init__.py +6 -0
- uncountable/types/api/entity/export_entities.py +46 -0
- uncountable/types/api/entity/lookup_entity.py +15 -1
- uncountable/types/api/recipes/create_mix_order.py +44 -0
- uncountable/types/api/recipes/set_recipe_outputs.py +1 -0
- uncountable/types/async_batch_processor.py +34 -0
- uncountable/types/async_batch_t.py +1 -0
- uncountable/types/base_t.py +5 -8
- uncountable/types/client_base.py +49 -0
- uncountable/types/entity_t.py +3 -1
- uncountable/types/exports.py +8 -0
- uncountable/types/exports_t.py +33 -0
- uncountable/types/integration_server_t.py +2 -0
- uncountable/types/job_definition.py +2 -0
- uncountable/types/job_definition_t.py +26 -2
- {uncountablepythonsdk-0.0.113.dist-info → uncountablepythonsdk-0.0.115.dist-info}/METADATA +1 -1
- {uncountablepythonsdk-0.0.113.dist-info → uncountablepythonsdk-0.0.115.dist-info}/RECORD +36 -27
- {uncountablepythonsdk-0.0.113.dist-info → uncountablepythonsdk-0.0.115.dist-info}/WHEEL +1 -1
- {uncountablepythonsdk-0.0.113.dist-info → uncountablepythonsdk-0.0.115.dist-info}/top_level.txt +0 -0
|
@@ -1,146 +1,71 @@
|
|
|
1
|
-
import
|
|
2
|
-
import typing
|
|
3
|
-
from dataclasses import dataclass
|
|
1
|
+
import base64
|
|
4
2
|
|
|
5
3
|
import flask
|
|
6
|
-
import simplejson
|
|
7
4
|
from flask.typing import ResponseReturnValue
|
|
8
|
-
from flask.wrappers import Response
|
|
9
5
|
from opentelemetry.trace import get_current_span
|
|
10
6
|
from uncountable.core.environment import (
|
|
11
|
-
|
|
7
|
+
get_http_server_port,
|
|
12
8
|
get_server_env,
|
|
13
|
-
get_webhook_server_port,
|
|
14
|
-
)
|
|
15
|
-
from uncountable.integration.queue_runner.command_server.command_client import (
|
|
16
|
-
send_job_queue_message,
|
|
17
|
-
)
|
|
18
|
-
from uncountable.integration.queue_runner.command_server.types import (
|
|
19
|
-
CommandServerException,
|
|
20
9
|
)
|
|
10
|
+
from uncountable.integration.executors.script_executor import resolve_script_executor
|
|
11
|
+
from uncountable.integration.http_server import GenericHttpRequest, HttpException
|
|
12
|
+
from uncountable.integration.job import CustomHttpJob, WebhookJob
|
|
21
13
|
from uncountable.integration.scan_profiles import load_profiles
|
|
22
|
-
from uncountable.integration.secret_retrieval.retrieve_secret import retrieve_secret
|
|
23
14
|
from uncountable.integration.telemetry import Logger
|
|
24
|
-
from uncountable.types import
|
|
25
|
-
|
|
26
|
-
from pkgs.argument_parser import CachedParser
|
|
15
|
+
from uncountable.types import job_definition_t
|
|
27
16
|
|
|
28
17
|
app = flask.Flask(__name__)
|
|
29
18
|
|
|
30
19
|
|
|
31
|
-
@dataclass(kw_only=True)
|
|
32
|
-
class WebhookResponse:
|
|
33
|
-
pass
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
webhook_payload_parser = CachedParser(webhook_job_t.WebhookEventBody)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
class WebhookException(BaseException):
|
|
40
|
-
error_code: int
|
|
41
|
-
message: str
|
|
42
|
-
|
|
43
|
-
def __init__(self, *, error_code: int, message: str) -> None:
|
|
44
|
-
self.error_code = error_code
|
|
45
|
-
self.message = message
|
|
46
|
-
|
|
47
|
-
@staticmethod
|
|
48
|
-
def payload_failed_signature() -> "WebhookException":
|
|
49
|
-
return WebhookException(
|
|
50
|
-
error_code=401, message="webhook payload did not match signature"
|
|
51
|
-
)
|
|
52
|
-
|
|
53
|
-
@staticmethod
|
|
54
|
-
def no_signature_passed() -> "WebhookException":
|
|
55
|
-
return WebhookException(error_code=400, message="missing signature")
|
|
56
|
-
|
|
57
|
-
@staticmethod
|
|
58
|
-
def body_parse_error() -> "WebhookException":
|
|
59
|
-
return WebhookException(error_code=400, message="body parse error")
|
|
60
|
-
|
|
61
|
-
@staticmethod
|
|
62
|
-
def unknown_error() -> "WebhookException":
|
|
63
|
-
return WebhookException(error_code=500, message="internal server error")
|
|
64
|
-
|
|
65
|
-
def __str__(self) -> str:
|
|
66
|
-
return f"[{self.error_code}]: {self.message}"
|
|
67
|
-
|
|
68
|
-
def make_error_response(self) -> Response:
|
|
69
|
-
return Response(
|
|
70
|
-
status=self.error_code, response={"error": {"message": str(self)}}
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def _parse_webhook_payload(
|
|
75
|
-
*, raw_request_body: bytes, signature_key: str, passed_signature: str
|
|
76
|
-
) -> base_t.JsonValue:
|
|
77
|
-
request_body_signature = hmac.new(
|
|
78
|
-
signature_key.encode("utf-8"), msg=raw_request_body, digestmod="sha256"
|
|
79
|
-
).hexdigest()
|
|
80
|
-
|
|
81
|
-
if request_body_signature != passed_signature:
|
|
82
|
-
raise WebhookException.payload_failed_signature()
|
|
83
|
-
|
|
84
|
-
try:
|
|
85
|
-
request_body = simplejson.loads(raw_request_body.decode())
|
|
86
|
-
return typing.cast(base_t.JsonValue, request_body)
|
|
87
|
-
except (simplejson.JSONDecodeError, ValueError) as e:
|
|
88
|
-
raise WebhookException.body_parse_error() from e
|
|
89
|
-
|
|
90
|
-
|
|
91
20
|
def register_route(
|
|
92
21
|
*,
|
|
93
22
|
server_logger: Logger,
|
|
94
23
|
profile_meta: job_definition_t.ProfileMetadata,
|
|
95
|
-
job: job_definition_t.
|
|
24
|
+
job: job_definition_t.HttpJobDefinitionBase,
|
|
96
25
|
) -> None:
|
|
97
26
|
route = f"/{profile_meta.name}/{job.id}"
|
|
98
27
|
|
|
99
|
-
def
|
|
28
|
+
def handle_request() -> ResponseReturnValue:
|
|
100
29
|
with server_logger.push_scope(route):
|
|
101
30
|
try:
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
31
|
+
if not isinstance(job.executor, job_definition_t.JobExecutorScript):
|
|
32
|
+
raise HttpException.configuration_error(
|
|
33
|
+
message="[internal] http job must use a script executor"
|
|
34
|
+
)
|
|
35
|
+
job_instance = resolve_script_executor(
|
|
36
|
+
executor=job.executor, profile_metadata=profile_meta
|
|
105
37
|
)
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
38
|
+
if not isinstance(job_instance, (CustomHttpJob, WebhookJob)):
|
|
39
|
+
raise HttpException.configuration_error(
|
|
40
|
+
message="[internal] http job must descend from CustomHttpJob"
|
|
41
|
+
)
|
|
42
|
+
http_request = GenericHttpRequest(
|
|
43
|
+
body_base64=base64.b64encode(flask.request.get_data()).decode(),
|
|
44
|
+
headers=dict(flask.request.headers),
|
|
109
45
|
)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
signature_key=signature_key,
|
|
116
|
-
passed_signature=passed_signature,
|
|
46
|
+
job_instance.validate_request(
|
|
47
|
+
request=http_request, job_definition=job, profile_meta=profile_meta
|
|
48
|
+
)
|
|
49
|
+
http_response = job_instance.handle_request(
|
|
50
|
+
request=http_request, job_definition=job, profile_meta=profile_meta
|
|
117
51
|
)
|
|
118
52
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
)
|
|
126
|
-
),
|
|
127
|
-
port=get_local_admin_server_port(),
|
|
128
|
-
)
|
|
129
|
-
except CommandServerException as e:
|
|
130
|
-
raise WebhookException.unknown_error() from e
|
|
131
|
-
|
|
132
|
-
return flask.jsonify(WebhookResponse())
|
|
133
|
-
except WebhookException as e:
|
|
53
|
+
return flask.make_response(
|
|
54
|
+
http_response.response,
|
|
55
|
+
http_response.status_code,
|
|
56
|
+
http_response.headers,
|
|
57
|
+
)
|
|
58
|
+
except HttpException as e:
|
|
134
59
|
server_logger.log_exception(e)
|
|
135
60
|
return e.make_error_response()
|
|
136
61
|
except Exception as e:
|
|
137
62
|
server_logger.log_exception(e)
|
|
138
|
-
return
|
|
63
|
+
return HttpException.unknown_error().make_error_response()
|
|
139
64
|
|
|
140
65
|
app.add_url_rule(
|
|
141
66
|
route,
|
|
142
|
-
endpoint=f"
|
|
143
|
-
view_func=
|
|
67
|
+
endpoint=f"handle_request_{job.id}",
|
|
68
|
+
view_func=handle_request,
|
|
144
69
|
methods=["POST"],
|
|
145
70
|
)
|
|
146
71
|
|
|
@@ -152,7 +77,7 @@ def main() -> None:
|
|
|
152
77
|
for profile_metadata in profiles:
|
|
153
78
|
server_logger = Logger(get_current_span())
|
|
154
79
|
for job in profile_metadata.jobs:
|
|
155
|
-
if isinstance(job, job_definition_t.
|
|
80
|
+
if isinstance(job, job_definition_t.HttpJobDefinitionBase):
|
|
156
81
|
register_route(
|
|
157
82
|
server_logger=server_logger, profile_meta=profile_metadata, job=job
|
|
158
83
|
)
|
|
@@ -164,7 +89,7 @@ main()
|
|
|
164
89
|
if __name__ == "__main__":
|
|
165
90
|
app.run(
|
|
166
91
|
host="0.0.0.0",
|
|
167
|
-
port=
|
|
92
|
+
port=get_http_server_port(),
|
|
168
93
|
debug=get_server_env() == "playground",
|
|
169
94
|
exclude_patterns=[],
|
|
170
95
|
)
|
uncountable/types/__init__.py
CHANGED
|
@@ -21,6 +21,7 @@ from .api.chemical import convert_chemical_formats as convert_chemical_formats_t
|
|
|
21
21
|
from .api.entity import create_entities as create_entities_t
|
|
22
22
|
from .api.entity import create_entity as create_entity_t
|
|
23
23
|
from .api.inputs import create_inputs as create_inputs_t
|
|
24
|
+
from .api.recipes import create_mix_order as create_mix_order_t
|
|
24
25
|
from .api.entity import create_or_update_entity as create_or_update_entity_t
|
|
25
26
|
from .api.recipes import create_recipe as create_recipe_t
|
|
26
27
|
from .api.recipe_links import create_recipe_link as create_recipe_link_t
|
|
@@ -34,6 +35,8 @@ from . import entity_t as entity_t
|
|
|
34
35
|
from .api.batch import execute_batch as execute_batch_t
|
|
35
36
|
from .api.batch import execute_batch_load_async as execute_batch_load_async_t
|
|
36
37
|
from . import experiment_groups_t as experiment_groups_t
|
|
38
|
+
from .api.entity import export_entities as export_entities_t
|
|
39
|
+
from . import exports_t as exports_t
|
|
37
40
|
from . import field_values_t as field_values_t
|
|
38
41
|
from . import fields_t as fields_t
|
|
39
42
|
from . import generic_upload_t as generic_upload_t
|
|
@@ -135,6 +138,7 @@ __all__: list[str] = [
|
|
|
135
138
|
"create_entities_t",
|
|
136
139
|
"create_entity_t",
|
|
137
140
|
"create_inputs_t",
|
|
141
|
+
"create_mix_order_t",
|
|
138
142
|
"create_or_update_entity_t",
|
|
139
143
|
"create_recipe_t",
|
|
140
144
|
"create_recipe_link_t",
|
|
@@ -148,6 +152,8 @@ __all__: list[str] = [
|
|
|
148
152
|
"execute_batch_t",
|
|
149
153
|
"execute_batch_load_async_t",
|
|
150
154
|
"experiment_groups_t",
|
|
155
|
+
"export_entities_t",
|
|
156
|
+
"exports_t",
|
|
151
157
|
"field_values_t",
|
|
152
158
|
"fields_t",
|
|
153
159
|
"generic_upload_t",
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
import dataclasses
|
|
10
|
+
from pkgs.serialization import serial_class
|
|
11
|
+
from ... import async_batch_t
|
|
12
|
+
from ... import base_t
|
|
13
|
+
from ... import exports_t
|
|
14
|
+
from ... import identifier_t
|
|
15
|
+
|
|
16
|
+
__all__: list[str] = [
|
|
17
|
+
"Arguments",
|
|
18
|
+
"Data",
|
|
19
|
+
"ENDPOINT_METHOD",
|
|
20
|
+
"ENDPOINT_PATH",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "POST"
|
|
24
|
+
ENDPOINT_PATH = "api/external/entity/export_entities"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.entity.export_entities.Arguments",
|
|
30
|
+
)
|
|
31
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
32
|
+
class Arguments:
|
|
33
|
+
config_key: identifier_t.IdentifierKey
|
|
34
|
+
type: exports_t.ExportType = exports_t.ExportType.EXCEL
|
|
35
|
+
client_timezone: exports_t.ListingExportUserTimezone | None = None
|
|
36
|
+
limit: int | None = None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
40
|
+
@serial_class(
|
|
41
|
+
named_type_path="sdk.api.entity.export_entities.Data",
|
|
42
|
+
)
|
|
43
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
44
|
+
class Data(async_batch_t.AsyncBatchActionReturn):
|
|
45
|
+
pass
|
|
46
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -21,6 +21,7 @@ __all__: list[str] = [
|
|
|
21
21
|
"Data",
|
|
22
22
|
"ENDPOINT_METHOD",
|
|
23
23
|
"ENDPOINT_PATH",
|
|
24
|
+
"LookupEntityCompositeFieldValues",
|
|
24
25
|
"LookupEntityFieldValue",
|
|
25
26
|
"LookupEntityQuery",
|
|
26
27
|
"LookupEntityQueryBase",
|
|
@@ -35,6 +36,7 @@ ENDPOINT_PATH = "api/external/entity/lookup_entity"
|
|
|
35
36
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
36
37
|
class LookupEntityQueryType(StrEnum):
|
|
37
38
|
FIELD_VALUE = "field_value"
|
|
39
|
+
COMPOSITE_FIELD_VALUES = "composite_field_values"
|
|
38
40
|
|
|
39
41
|
|
|
40
42
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -69,14 +71,26 @@ class LookupEntityFieldValue:
|
|
|
69
71
|
value: LookupFieldArgumentValue
|
|
70
72
|
|
|
71
73
|
|
|
74
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
75
|
+
@serial_class(
|
|
76
|
+
named_type_path="sdk.api.entity.lookup_entity.LookupEntityCompositeFieldValues",
|
|
77
|
+
parse_require={"type"},
|
|
78
|
+
)
|
|
79
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
80
|
+
class LookupEntityCompositeFieldValues:
|
|
81
|
+
type: typing.Literal[LookupEntityQueryType.COMPOSITE_FIELD_VALUES] = LookupEntityQueryType.COMPOSITE_FIELD_VALUES
|
|
82
|
+
values: list[LookupFieldArgumentValue]
|
|
83
|
+
|
|
84
|
+
|
|
72
85
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
73
86
|
LookupEntityQuery = typing.Annotated[
|
|
74
|
-
|
|
87
|
+
LookupEntityFieldValue | LookupEntityCompositeFieldValues,
|
|
75
88
|
serial_union_annotation(
|
|
76
89
|
named_type_path="sdk.api.entity.lookup_entity.LookupEntityQuery",
|
|
77
90
|
discriminator="type",
|
|
78
91
|
discriminator_map={
|
|
79
92
|
"field_value": LookupEntityFieldValue,
|
|
93
|
+
"composite_field_values": LookupEntityCompositeFieldValues,
|
|
80
94
|
},
|
|
81
95
|
),
|
|
82
96
|
]
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
import dataclasses
|
|
10
|
+
from pkgs.serialization import serial_class
|
|
11
|
+
from ... import async_batch_t
|
|
12
|
+
from ... import base_t
|
|
13
|
+
from ... import identifier_t
|
|
14
|
+
from ... import recipe_workflow_steps_t
|
|
15
|
+
|
|
16
|
+
__all__: list[str] = [
|
|
17
|
+
"Arguments",
|
|
18
|
+
"Data",
|
|
19
|
+
"ENDPOINT_METHOD",
|
|
20
|
+
"ENDPOINT_PATH",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "POST"
|
|
24
|
+
ENDPOINT_PATH = "api/external/recipes/create_mix_order"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.recipes.create_mix_order.Arguments",
|
|
30
|
+
)
|
|
31
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
32
|
+
class Arguments:
|
|
33
|
+
recipe_key: identifier_t.IdentifierKey
|
|
34
|
+
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
38
|
+
@serial_class(
|
|
39
|
+
named_type_path="sdk.api.recipes.create_mix_order.Data",
|
|
40
|
+
)
|
|
41
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
42
|
+
class Data(async_batch_t.AsyncBatchActionReturn):
|
|
43
|
+
pass
|
|
44
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -61,6 +61,7 @@ class RecipeOutputValue:
|
|
|
61
61
|
value_str: str | None = None
|
|
62
62
|
value_curve: CurveValues | None = None
|
|
63
63
|
value_color: data_t.SupportedColorFormatColor | None = None
|
|
64
|
+
value_ingredient_id: base_t.ObjectId | None = None
|
|
64
65
|
formatting: recipes_t.RecipeAttributeFormatting | None = None
|
|
65
66
|
field_values: list[field_values_t.ArgumentValueRefName | field_values_t.ArgumentValueId] | None = None
|
|
66
67
|
|
|
@@ -14,6 +14,7 @@ from uncountable.types import async_batch_t
|
|
|
14
14
|
from uncountable.types import base_t
|
|
15
15
|
import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
|
|
16
16
|
import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
|
|
17
|
+
import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
|
|
17
18
|
import uncountable.types.api.entity.create_or_update_entity as create_or_update_entity_t
|
|
18
19
|
import uncountable.types.api.recipes.create_recipe as create_recipe_t
|
|
19
20
|
import uncountable.types.api.recipes.edit_recipe_inputs as edit_recipe_inputs_t
|
|
@@ -217,6 +218,39 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
217
218
|
batch_reference=req.batch_reference,
|
|
218
219
|
)
|
|
219
220
|
|
|
221
|
+
def create_mix_order(
|
|
222
|
+
self,
|
|
223
|
+
*,
|
|
224
|
+
recipe_key: identifier_t.IdentifierKey,
|
|
225
|
+
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
|
|
226
|
+
depends_on: list[str] | None = None,
|
|
227
|
+
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
228
|
+
"""Creates mix order on a recipe workflow step
|
|
229
|
+
|
|
230
|
+
:param depends_on: A list of batch reference keys to process before processing this request
|
|
231
|
+
"""
|
|
232
|
+
args = create_mix_order_t.Arguments(
|
|
233
|
+
recipe_key=recipe_key,
|
|
234
|
+
recipe_workflow_step_identifier=recipe_workflow_step_identifier,
|
|
235
|
+
)
|
|
236
|
+
json_data = serialize_for_api(args)
|
|
237
|
+
|
|
238
|
+
batch_reference = str(uuid.uuid4())
|
|
239
|
+
|
|
240
|
+
req = async_batch_t.AsyncBatchRequest(
|
|
241
|
+
path=async_batch_t.AsyncBatchRequestPath.CREATE_MIX_ORDER,
|
|
242
|
+
data=json_data,
|
|
243
|
+
depends_on=depends_on,
|
|
244
|
+
batch_reference=batch_reference,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
self._enqueue(req)
|
|
248
|
+
|
|
249
|
+
return async_batch_t.QueuedAsyncBatchRequest(
|
|
250
|
+
path=req.path,
|
|
251
|
+
batch_reference=req.batch_reference,
|
|
252
|
+
)
|
|
253
|
+
|
|
220
254
|
def create_or_update_entity(
|
|
221
255
|
self,
|
|
222
256
|
*,
|
|
@@ -44,6 +44,7 @@ class AsyncBatchRequestPath(StrEnum):
|
|
|
44
44
|
CREATE_RECIPE_LINK = "recipe_links/create_recipe_link"
|
|
45
45
|
UPSERT_CONDITION_MATCH = "condition_parameters/upsert_condition_match"
|
|
46
46
|
COMPLETE_ASYNC_UPLOAD = "runsheet/complete_async_upload"
|
|
47
|
+
CREATE_MIX_ORDER = "recipes/create_mix_order"
|
|
47
48
|
|
|
48
49
|
|
|
49
50
|
# DO NOT MODIFY -- This file is generated by type_spec
|
uncountable/types/base_t.py
CHANGED
|
@@ -74,16 +74,13 @@ def is_pure_json_value(value: ExtJsonValue) -> bool:
|
|
|
74
74
|
return True
|
|
75
75
|
|
|
76
76
|
if isinstance(value, list):
|
|
77
|
-
for item in value
|
|
78
|
-
if not is_pure_json_value(item):
|
|
79
|
-
return False
|
|
80
|
-
return True
|
|
77
|
+
return all(is_pure_json_value(item) for item in value)
|
|
81
78
|
|
|
82
79
|
if isinstance(value, dict):
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
80
|
+
return all(
|
|
81
|
+
is_pure_json_value(key) and is_pure_json_value(item)
|
|
82
|
+
for key, item in value.items()
|
|
83
|
+
)
|
|
87
84
|
|
|
88
85
|
return False
|
|
89
86
|
# === END section from base.prepart.py ===
|
uncountable/types/client_base.py
CHANGED
|
@@ -22,6 +22,7 @@ import uncountable.types.api.chemical.convert_chemical_formats as convert_chemic
|
|
|
22
22
|
import uncountable.types.api.entity.create_entities as create_entities_t
|
|
23
23
|
import uncountable.types.api.entity.create_entity as create_entity_t
|
|
24
24
|
import uncountable.types.api.inputs.create_inputs as create_inputs_t
|
|
25
|
+
import uncountable.types.api.recipes.create_mix_order as create_mix_order_t
|
|
25
26
|
import uncountable.types.api.entity.create_or_update_entity as create_or_update_entity_t
|
|
26
27
|
import uncountable.types.api.recipes.create_recipe as create_recipe_t
|
|
27
28
|
import uncountable.types.api.recipe_links.create_recipe_link as create_recipe_link_t
|
|
@@ -31,6 +32,8 @@ import uncountable.types.api.recipes.edit_recipe_inputs as edit_recipe_inputs_t
|
|
|
31
32
|
from uncountable.types import entity_t
|
|
32
33
|
import uncountable.types.api.batch.execute_batch as execute_batch_t
|
|
33
34
|
import uncountable.types.api.batch.execute_batch_load_async as execute_batch_load_async_t
|
|
35
|
+
import uncountable.types.api.entity.export_entities as export_entities_t
|
|
36
|
+
from uncountable.types import exports_t
|
|
34
37
|
from uncountable.types import field_values_t
|
|
35
38
|
from uncountable.types import generic_upload_t
|
|
36
39
|
import uncountable.types.api.recipes.get_column_calculation_values as get_column_calculation_values_t
|
|
@@ -382,6 +385,26 @@ class ClientMethods(ABC):
|
|
|
382
385
|
)
|
|
383
386
|
return self.do_request(api_request=api_request, return_type=create_inputs_t.Data)
|
|
384
387
|
|
|
388
|
+
def create_mix_order(
|
|
389
|
+
self,
|
|
390
|
+
*,
|
|
391
|
+
recipe_key: identifier_t.IdentifierKey,
|
|
392
|
+
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier,
|
|
393
|
+
) -> create_mix_order_t.Data:
|
|
394
|
+
"""Creates mix order on a recipe workflow step
|
|
395
|
+
|
|
396
|
+
"""
|
|
397
|
+
args = create_mix_order_t.Arguments(
|
|
398
|
+
recipe_key=recipe_key,
|
|
399
|
+
recipe_workflow_step_identifier=recipe_workflow_step_identifier,
|
|
400
|
+
)
|
|
401
|
+
api_request = APIRequest(
|
|
402
|
+
method=create_mix_order_t.ENDPOINT_METHOD,
|
|
403
|
+
endpoint=create_mix_order_t.ENDPOINT_PATH,
|
|
404
|
+
args=args,
|
|
405
|
+
)
|
|
406
|
+
return self.do_request(api_request=api_request, return_type=create_mix_order_t.Data)
|
|
407
|
+
|
|
385
408
|
def create_or_update_entity(
|
|
386
409
|
self,
|
|
387
410
|
*,
|
|
@@ -576,6 +599,32 @@ class ClientMethods(ABC):
|
|
|
576
599
|
)
|
|
577
600
|
return self.do_request(api_request=api_request, return_type=execute_batch_load_async_t.Data)
|
|
578
601
|
|
|
602
|
+
def export_entities(
|
|
603
|
+
self,
|
|
604
|
+
*,
|
|
605
|
+
config_key: identifier_t.IdentifierKey,
|
|
606
|
+
type: exports_t.ExportType = exports_t.ExportType.EXCEL,
|
|
607
|
+
client_timezone: exports_t.ListingExportUserTimezone | None = None,
|
|
608
|
+
limit: int | None = None,
|
|
609
|
+
) -> export_entities_t.Data:
|
|
610
|
+
"""Uses a structured loading configuration to export entities in the system. This endpoint is asynchronous, and returns the job ID that can be used to query the status of the export.
|
|
611
|
+
|
|
612
|
+
:param config_key: The configuration reference for the listing config
|
|
613
|
+
:param limit: The number of data points to return. If not filled in, all filtered entities will be included in the export.
|
|
614
|
+
"""
|
|
615
|
+
args = export_entities_t.Arguments(
|
|
616
|
+
config_key=config_key,
|
|
617
|
+
client_timezone=client_timezone,
|
|
618
|
+
limit=limit,
|
|
619
|
+
type=type,
|
|
620
|
+
)
|
|
621
|
+
api_request = APIRequest(
|
|
622
|
+
method=export_entities_t.ENDPOINT_METHOD,
|
|
623
|
+
endpoint=export_entities_t.ENDPOINT_PATH,
|
|
624
|
+
args=args,
|
|
625
|
+
)
|
|
626
|
+
return self.do_request(api_request=api_request, return_type=export_entities_t.Data)
|
|
627
|
+
|
|
579
628
|
def get_column_calculation_values(
|
|
580
629
|
self,
|
|
581
630
|
*,
|
uncountable/types/entity_t.py
CHANGED
|
@@ -131,6 +131,7 @@ __all__: list[str] = [
|
|
|
131
131
|
"recipe_audit_log": "Experiment Audit Log",
|
|
132
132
|
"recipe_calculation": "Recipe Calculation",
|
|
133
133
|
"recipe_check": "Experiment Check",
|
|
134
|
+
"recipe_component_witness": "Recipe Component Witness",
|
|
134
135
|
"recipe_export": "Recipe Export",
|
|
135
136
|
"recipe_goal": "Experiment Goal",
|
|
136
137
|
"recipe_ingredient": "Recipe Ingredient",
|
|
@@ -312,6 +313,7 @@ class EntityType(StrEnum):
|
|
|
312
313
|
RECIPE_AUDIT_LOG = "recipe_audit_log"
|
|
313
314
|
RECIPE_CALCULATION = "recipe_calculation"
|
|
314
315
|
RECIPE_CHECK = "recipe_check"
|
|
316
|
+
RECIPE_COMPONENT_WITNESS = "recipe_component_witness"
|
|
315
317
|
RECIPE_EXPORT = "recipe_export"
|
|
316
318
|
RECIPE_GOAL = "recipe_goal"
|
|
317
319
|
RECIPE_INGREDIENT = "recipe_ingredient"
|
|
@@ -390,7 +392,7 @@ class EntityType(StrEnum):
|
|
|
390
392
|
|
|
391
393
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
392
394
|
LimitedEntityType = typing.Annotated[
|
|
393
|
-
typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT],
|
|
395
|
+
typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT] | typing.Literal[EntityType.TIMESHEET_ENTRY],
|
|
394
396
|
serial_alias_annotation(
|
|
395
397
|
named_type_path="sdk.entity.LimitedEntityType",
|
|
396
398
|
),
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# ruff: noqa: E402 Q003
|
|
2
|
+
# fmt: off
|
|
3
|
+
# isort: skip_file
|
|
4
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
5
|
+
# Kept only for SDK backwards compatibility
|
|
6
|
+
from .exports_t import ListingExportUserTimezone as ListingExportUserTimezone
|
|
7
|
+
from .exports_t import ExportType as ExportType
|
|
8
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
from enum import StrEnum
|
|
10
|
+
import dataclasses
|
|
11
|
+
from pkgs.serialization import serial_class
|
|
12
|
+
from . import base_t
|
|
13
|
+
|
|
14
|
+
__all__: list[str] = [
|
|
15
|
+
"ExportType",
|
|
16
|
+
"ListingExportUserTimezone",
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
21
|
+
@serial_class(
|
|
22
|
+
named_type_path="sdk.exports.ListingExportUserTimezone",
|
|
23
|
+
)
|
|
24
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
25
|
+
class ListingExportUserTimezone:
|
|
26
|
+
timezone_name: str
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
30
|
+
class ExportType(StrEnum):
|
|
31
|
+
EXCEL = "excel"
|
|
32
|
+
PDF = "pdf"
|
|
33
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -18,6 +18,8 @@ from .job_definition_t import JobExecutor as JobExecutor
|
|
|
18
18
|
from .job_definition_t import JobLoggingSettings as JobLoggingSettings
|
|
19
19
|
from .job_definition_t import JobDefinitionBase as JobDefinitionBase
|
|
20
20
|
from .job_definition_t import CronJobDefinition as CronJobDefinition
|
|
21
|
+
from .job_definition_t import HttpJobDefinitionBase as HttpJobDefinitionBase
|
|
22
|
+
from .job_definition_t import CustomHttpJobDefinition as CustomHttpJobDefinition
|
|
21
23
|
from .job_definition_t import WebhookJobDefinition as WebhookJobDefinition
|
|
22
24
|
from .job_definition_t import JobDefinition as JobDefinition
|
|
23
25
|
from .job_definition_t import ProfileDefinition as ProfileDefinition
|