UncountablePythonSDK 0.0.75__py3-none-any.whl → 0.0.77__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- {UncountablePythonSDK-0.0.75.dist-info → UncountablePythonSDK-0.0.77.dist-info}/METADATA +2 -2
- {UncountablePythonSDK-0.0.75.dist-info → UncountablePythonSDK-0.0.77.dist-info}/RECORD +45 -39
- docs/requirements.txt +1 -1
- examples/async_batch.py +3 -3
- pkgs/argument_parser/argument_parser.py +7 -7
- pkgs/filesystem_utils/_s3_session.py +3 -3
- pkgs/serialization/serial_class.py +3 -0
- pkgs/type_spec/builder.py +26 -27
- pkgs/type_spec/config.py +3 -3
- pkgs/type_spec/emit_io_ts.py +6 -6
- pkgs/type_spec/emit_open_api.py +4 -4
- pkgs/type_spec/emit_python.py +20 -20
- pkgs/type_spec/emit_typescript.py +4 -4
- pkgs/type_spec/emit_typescript_util.py +4 -4
- pkgs/type_spec/type_info/emit_type_info.py +6 -6
- uncountable/core/environment.py +15 -2
- uncountable/core/file_upload.py +1 -3
- uncountable/integration/construct_client.py +3 -4
- uncountable/integration/executors/generic_upload_executor.py +7 -7
- uncountable/integration/executors/script_executor.py +3 -3
- uncountable/integration/queue_runner/job_scheduler.py +1 -1
- uncountable/integration/queue_runner/worker.py +3 -8
- uncountable/integration/scan_profiles.py +39 -15
- uncountable/integration/secret_retrieval/retrieve_secret.py +1 -1
- uncountable/integration/server.py +4 -12
- uncountable/integration/telemetry.py +2 -2
- uncountable/integration/webhook_server/entrypoint.py +4 -10
- uncountable/types/__init__.py +8 -0
- uncountable/types/api/entity/create_entities.py +4 -2
- uncountable/types/api/entity/create_entity.py +4 -2
- uncountable/types/api/entity/grant_entity_permissions.py +48 -0
- uncountable/types/api/recipes/get_column_calculation_values.py +58 -0
- uncountable/types/async_batch_processor.py +43 -0
- uncountable/types/async_batch_t.py +1 -0
- uncountable/types/auth_retrieval.py +12 -0
- uncountable/types/auth_retrieval_t.py +75 -0
- uncountable/types/client_base.py +62 -4
- uncountable/types/entity.py +3 -0
- uncountable/types/entity_t.py +20 -0
- uncountable/types/integration_server.py +9 -0
- uncountable/types/integration_server_t.py +37 -0
- uncountable/types/job_definition.py +1 -5
- uncountable/types/job_definition_t.py +20 -60
- {UncountablePythonSDK-0.0.75.dist-info → UncountablePythonSDK-0.0.77.dist-info}/WHEEL +0 -0
- {UncountablePythonSDK-0.0.75.dist-info → UncountablePythonSDK-0.0.77.dist-info}/top_level.txt +0 -0
|
@@ -153,7 +153,7 @@ def emit_type_ts(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
|
|
|
153
153
|
ctx.out.write(f"export type {stype.name} = {base_type}{{}}\n")
|
|
154
154
|
else:
|
|
155
155
|
if isinstance(stype, builder.SpecTypeDefnObject) and len(stype.parameters) > 0:
|
|
156
|
-
full_type_name = f
|
|
156
|
+
full_type_name = f"{stype.name}<{', '.join(stype.parameters)}>"
|
|
157
157
|
else:
|
|
158
158
|
full_type_name = stype.name
|
|
159
159
|
ctx.out.write(f"export type {full_type_name} = {base_type}{{")
|
|
@@ -201,7 +201,7 @@ def refer_to_impl(
|
|
|
201
201
|
return f"readonly ({spec})[]" if multi else f"readonly {spec}[]", False
|
|
202
202
|
if stype.defn_type.name == builder.BaseTypeName.s_union:
|
|
203
203
|
return (
|
|
204
|
-
f
|
|
204
|
+
f"({' | '.join([refer_to(ctx, p) for p in stype.parameters])})",
|
|
205
205
|
False,
|
|
206
206
|
)
|
|
207
207
|
if stype.defn_type.name == builder.BaseTypeName.s_literal:
|
|
@@ -209,11 +209,11 @@ def refer_to_impl(
|
|
|
209
209
|
for parameter in stype.parameters:
|
|
210
210
|
assert isinstance(parameter, builder.SpecTypeLiteralWrapper)
|
|
211
211
|
parts.append(refer_to(ctx, parameter))
|
|
212
|
-
return f
|
|
212
|
+
return f"({' | '.join(parts)})", False
|
|
213
213
|
if stype.defn_type.name == builder.BaseTypeName.s_optional:
|
|
214
214
|
return f"{refer_to(ctx, stype.parameters[0])} | null", True
|
|
215
215
|
if stype.defn_type.name == builder.BaseTypeName.s_tuple:
|
|
216
|
-
return f"[{
|
|
216
|
+
return f"[{', '.join([refer_to(ctx, p) for p in stype.parameters])}]", False
|
|
217
217
|
params = ", ".join([refer_to(ctx, p) for p in stype.parameters])
|
|
218
218
|
return f"{refer_to(ctx, stype.defn_type)}<{params}>", False
|
|
219
219
|
|
|
@@ -234,9 +234,9 @@ def _extract_and_validate_layout(
|
|
|
234
234
|
|
|
235
235
|
local_ref_name = None
|
|
236
236
|
if group.ref_name is not None:
|
|
237
|
-
assert (
|
|
238
|
-
|
|
239
|
-
)
|
|
237
|
+
assert base_layout is None or base_layout.get(group.ref_name) is None, (
|
|
238
|
+
f"group-name-duplicate-in-base:{group.ref_name}"
|
|
239
|
+
)
|
|
240
240
|
local_ref_name = group.ref_name
|
|
241
241
|
|
|
242
242
|
if group.extends:
|
|
@@ -255,9 +255,9 @@ def _extract_and_validate_layout(
|
|
|
255
255
|
assert group_ref_name in layout, f"missing-base-group:{group_ref_name}"
|
|
256
256
|
|
|
257
257
|
for prop_ref_name in stype.properties:
|
|
258
|
-
assert (
|
|
259
|
-
prop_ref_name
|
|
260
|
-
)
|
|
258
|
+
assert prop_ref_name in all_fields_group, (
|
|
259
|
+
f"layout-missing-field:{prop_ref_name}"
|
|
260
|
+
)
|
|
261
261
|
|
|
262
262
|
return layout
|
|
263
263
|
|
uncountable/core/environment.py
CHANGED
|
@@ -2,6 +2,8 @@ import functools
|
|
|
2
2
|
import os
|
|
3
3
|
from importlib.metadata import PackageNotFoundError, version
|
|
4
4
|
|
|
5
|
+
from uncountable.types import integration_server_t
|
|
6
|
+
|
|
5
7
|
|
|
6
8
|
@functools.cache
|
|
7
9
|
def get_version() -> str:
|
|
@@ -12,8 +14,8 @@ def get_version() -> str:
|
|
|
12
14
|
return version_str
|
|
13
15
|
|
|
14
16
|
|
|
15
|
-
def
|
|
16
|
-
return os.environ.get("
|
|
17
|
+
def get_server_env() -> str | None:
|
|
18
|
+
return os.environ.get("UNC_SERVER_ENV")
|
|
17
19
|
|
|
18
20
|
|
|
19
21
|
def get_webhook_server_port() -> int:
|
|
@@ -26,3 +28,14 @@ def get_local_admin_server_port() -> int:
|
|
|
26
28
|
|
|
27
29
|
def get_otel_enabled() -> bool:
|
|
28
30
|
return os.environ.get("UNC_OTEL_ENABLED") == "true"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_profiles_module() -> str:
|
|
34
|
+
return os.environ["UNC_PROFILES_MODULE"]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def get_integration_envs() -> list[integration_server_t.IntegrationEnvironment]:
|
|
38
|
+
return [
|
|
39
|
+
integration_server_t.IntegrationEnvironment(env)
|
|
40
|
+
for env in os.environ.get("UNC_INTEGRATION_ENVS", "prod").split(",")
|
|
41
|
+
]
|
uncountable/core/file_upload.py
CHANGED
|
@@ -99,9 +99,7 @@ class FileUploader:
|
|
|
99
99
|
file_bytes.bytes_data,
|
|
100
100
|
{"filename": file_bytes.name.encode()},
|
|
101
101
|
client_session=session,
|
|
102
|
-
config=aiotus.RetryConfiguration(
|
|
103
|
-
ssl=False if self._allow_insecure_tls else None
|
|
104
|
-
),
|
|
102
|
+
config=aiotus.RetryConfiguration(ssl=not self._allow_insecure_tls),
|
|
105
103
|
chunksize=_CHUNK_SIZE,
|
|
106
104
|
)
|
|
107
105
|
if location is None:
|
|
@@ -3,22 +3,21 @@ from uncountable.core.client import ClientConfig
|
|
|
3
3
|
from uncountable.core.types import AuthDetailsAll, AuthDetailsOAuth
|
|
4
4
|
from uncountable.integration.secret_retrieval.retrieve_secret import retrieve_secret
|
|
5
5
|
from uncountable.integration.telemetry import JobLogger
|
|
6
|
+
from uncountable.types import auth_retrieval_t
|
|
6
7
|
from uncountable.types.job_definition_t import (
|
|
7
|
-
AuthRetrievalBasic,
|
|
8
|
-
AuthRetrievalOAuth,
|
|
9
8
|
ProfileMetadata,
|
|
10
9
|
)
|
|
11
10
|
|
|
12
11
|
|
|
13
12
|
def _construct_auth_details(profile_meta: ProfileMetadata) -> AuthDetailsAll:
|
|
14
13
|
match profile_meta.auth_retrieval:
|
|
15
|
-
case AuthRetrievalOAuth():
|
|
14
|
+
case auth_retrieval_t.AuthRetrievalOAuth():
|
|
16
15
|
refresh_token = retrieve_secret(
|
|
17
16
|
profile_meta.auth_retrieval.refresh_token_secret,
|
|
18
17
|
profile_metadata=profile_meta,
|
|
19
18
|
)
|
|
20
19
|
return AuthDetailsOAuth(refresh_token=refresh_token)
|
|
21
|
-
case AuthRetrievalBasic():
|
|
20
|
+
case auth_retrieval_t.AuthRetrievalBasic():
|
|
22
21
|
api_id = retrieve_secret(
|
|
23
22
|
profile_meta.auth_retrieval.api_id_secret, profile_metadata=profile_meta
|
|
24
23
|
)
|
|
@@ -103,7 +103,7 @@ def _pull_remote_directory_data(
|
|
|
103
103
|
files_to_pull = _filter_by_max_files(remote_directory, files_to_pull)
|
|
104
104
|
|
|
105
105
|
logger.log_info(
|
|
106
|
-
f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {
|
|
106
|
+
f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {', '.join([f.filename for f in files_to_pull if f.filename is not None])}",
|
|
107
107
|
)
|
|
108
108
|
return filesystem_session.download_files(files_to_pull)
|
|
109
109
|
|
|
@@ -205,16 +205,16 @@ class GenericUploadJob(Job[None]):
|
|
|
205
205
|
secret_access_key = None
|
|
206
206
|
|
|
207
207
|
if self.data_source.endpoint_url is None:
|
|
208
|
-
assert (
|
|
209
|
-
|
|
210
|
-
)
|
|
208
|
+
assert self.data_source.cloud_provider is not None, (
|
|
209
|
+
"either cloud_provider or endpoint_url must be specified"
|
|
210
|
+
)
|
|
211
211
|
match self.data_source.cloud_provider:
|
|
212
212
|
case S3CloudProvider.AWS:
|
|
213
213
|
endpoint_url = "https://s3.amazonaws.com"
|
|
214
214
|
case S3CloudProvider.OVH:
|
|
215
|
-
assert (
|
|
216
|
-
|
|
217
|
-
)
|
|
215
|
+
assert self.data_source.region_name is not None, (
|
|
216
|
+
"region_name must be specified for cloud_provider OVH"
|
|
217
|
+
)
|
|
218
218
|
endpoint_url = f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
|
|
219
219
|
else:
|
|
220
220
|
endpoint_url = self.data_source.endpoint_url
|
|
@@ -19,7 +19,7 @@ def resolve_script_executor(
|
|
|
19
19
|
for _, job_class in inspect.getmembers(job_module, inspect.isclass):
|
|
20
20
|
if getattr(job_class, "_unc_job_registered", False):
|
|
21
21
|
found_jobs.append(job_class())
|
|
22
|
-
assert (
|
|
23
|
-
len(found_jobs)
|
|
24
|
-
)
|
|
22
|
+
assert len(found_jobs) == 1, (
|
|
23
|
+
f"expected exactly one job class in {executor.import_path}, found {len(found_jobs)}"
|
|
24
|
+
)
|
|
25
25
|
return found_jobs[0]
|
|
@@ -55,7 +55,7 @@ def _start_workers(
|
|
|
55
55
|
job_worker_lookup: dict[str, Worker] = {}
|
|
56
56
|
job_definition_lookup: dict[str, job_definition_t.JobDefinition] = {}
|
|
57
57
|
for profile in profiles:
|
|
58
|
-
for job_definition in profile.
|
|
58
|
+
for job_definition in profile.jobs:
|
|
59
59
|
job_definition_lookup[job_definition.id] = job_definition
|
|
60
60
|
job_worker_key = _get_job_worker_key(job_definition, profile.name)
|
|
61
61
|
if job_worker_key not in job_queue_worker_lookup:
|
|
@@ -51,16 +51,11 @@ class RegisteredJobDetails:
|
|
|
51
51
|
|
|
52
52
|
def get_registered_job_details(job_ref_name: str) -> RegisteredJobDetails:
|
|
53
53
|
profiles = load_profiles()
|
|
54
|
-
for
|
|
55
|
-
for job_definition in
|
|
54
|
+
for profile_metadata in profiles:
|
|
55
|
+
for job_definition in profile_metadata.jobs:
|
|
56
56
|
if job_definition.id == job_ref_name:
|
|
57
57
|
return RegisteredJobDetails(
|
|
58
|
-
profile_metadata=
|
|
59
|
-
name=profile.name,
|
|
60
|
-
base_url=profile.definition.base_url,
|
|
61
|
-
auth_retrieval=profile.definition.auth_retrieval,
|
|
62
|
-
client_options=profile.definition.client_options,
|
|
63
|
-
),
|
|
58
|
+
profile_metadata=profile_metadata,
|
|
64
59
|
job_definition=job_definition,
|
|
65
60
|
)
|
|
66
61
|
raise Exception(f"profile not found for job {job_ref_name}")
|
|
@@ -1,27 +1,26 @@
|
|
|
1
1
|
import functools
|
|
2
|
-
import os
|
|
3
|
-
from dataclasses import dataclass
|
|
4
2
|
from importlib import resources
|
|
5
3
|
|
|
6
4
|
from pkgs.argument_parser import CachedParser
|
|
7
|
-
from uncountable.
|
|
5
|
+
from uncountable.core import environment
|
|
6
|
+
from uncountable.types import integration_server_t, job_definition_t
|
|
8
7
|
|
|
9
8
|
profile_parser = CachedParser(job_definition_t.ProfileDefinition)
|
|
10
9
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class ProfileDetails:
|
|
14
|
-
name: str
|
|
15
|
-
definition: job_definition_t.ProfileDefinition
|
|
10
|
+
_DEFAULT_PROFILE_ENV = integration_server_t.IntegrationEnvironment.PROD
|
|
11
|
+
_IGNORED_PROFILE_FOLDERS = ["__pycache__"]
|
|
16
12
|
|
|
17
13
|
|
|
18
14
|
@functools.cache
|
|
19
|
-
def load_profiles() -> list[
|
|
20
|
-
profiles_module =
|
|
15
|
+
def load_profiles() -> list[job_definition_t.ProfileMetadata]:
|
|
16
|
+
profiles_module = environment.get_profiles_module()
|
|
17
|
+
integration_envs = environment.get_integration_envs()
|
|
21
18
|
profiles = [
|
|
22
|
-
entry
|
|
19
|
+
entry
|
|
20
|
+
for entry in resources.files(profiles_module).iterdir()
|
|
21
|
+
if entry.is_dir() and entry.name not in _IGNORED_PROFILE_FOLDERS
|
|
23
22
|
]
|
|
24
|
-
profile_details: list[
|
|
23
|
+
profile_details: list[job_definition_t.ProfileMetadata] = []
|
|
25
24
|
seen_job_ids: set[str] = set()
|
|
26
25
|
for profile_file in profiles:
|
|
27
26
|
profile_name = profile_file.name
|
|
@@ -34,9 +33,34 @@ def load_profiles() -> list[ProfileDetails]:
|
|
|
34
33
|
if job.id in seen_job_ids:
|
|
35
34
|
raise Exception(f"multiple jobs with id {job.id}")
|
|
36
35
|
seen_job_ids.add(job.id)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
36
|
+
|
|
37
|
+
if definition.environments is not None:
|
|
38
|
+
for integration_env in integration_envs:
|
|
39
|
+
environment_config = definition.environments.get(integration_env)
|
|
40
|
+
if environment_config is not None:
|
|
41
|
+
profile_details.append(
|
|
42
|
+
job_definition_t.ProfileMetadata(
|
|
43
|
+
name=profile_name,
|
|
44
|
+
jobs=definition.jobs,
|
|
45
|
+
base_url=environment_config.base_url,
|
|
46
|
+
auth_retrieval=environment_config.auth_retrieval,
|
|
47
|
+
client_options=environment_config.client_options,
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
elif _DEFAULT_PROFILE_ENV in integration_envs:
|
|
51
|
+
assert (
|
|
52
|
+
definition.base_url is not None
|
|
53
|
+
and definition.auth_retrieval is not None
|
|
54
|
+
), f"define environments in profile.yaml for {profile_name}"
|
|
55
|
+
profile_details.append(
|
|
56
|
+
job_definition_t.ProfileMetadata(
|
|
57
|
+
name=profile_name,
|
|
58
|
+
jobs=definition.jobs,
|
|
59
|
+
base_url=definition.base_url,
|
|
60
|
+
auth_retrieval=definition.auth_retrieval,
|
|
61
|
+
client_options=definition.client_options,
|
|
62
|
+
)
|
|
63
|
+
)
|
|
40
64
|
except FileNotFoundError as e:
|
|
41
65
|
print(f"WARN: profile.yaml not found for {profile_name}", e)
|
|
42
66
|
continue
|
|
@@ -12,12 +12,10 @@ from opentelemetry.trace import get_current_span
|
|
|
12
12
|
from sqlalchemy.engine.base import Engine
|
|
13
13
|
|
|
14
14
|
from uncountable.integration.cron import CronJobArgs, cron_job_executor
|
|
15
|
-
from uncountable.integration.scan_profiles import ProfileDetails
|
|
16
15
|
from uncountable.integration.telemetry import Logger
|
|
17
|
-
from uncountable.types import base_t
|
|
16
|
+
from uncountable.types import base_t, job_definition_t
|
|
18
17
|
from uncountable.types.job_definition_t import (
|
|
19
18
|
CronJobDefinition,
|
|
20
|
-
ProfileMetadata,
|
|
21
19
|
WebhookJobDefinition,
|
|
22
20
|
)
|
|
23
21
|
|
|
@@ -38,17 +36,11 @@ class IntegrationServer:
|
|
|
38
36
|
)
|
|
39
37
|
self._server_logger = Logger(get_current_span())
|
|
40
38
|
|
|
41
|
-
def register_jobs(self, profiles: list[
|
|
39
|
+
def register_jobs(self, profiles: list[job_definition_t.ProfileMetadata]) -> None:
|
|
42
40
|
valid_job_ids = []
|
|
43
|
-
for
|
|
44
|
-
for job_defn in
|
|
41
|
+
for profile_metadata in profiles:
|
|
42
|
+
for job_defn in profile_metadata.jobs:
|
|
45
43
|
valid_job_ids.append(job_defn.id)
|
|
46
|
-
profile_metadata = ProfileMetadata(
|
|
47
|
-
name=profile.name,
|
|
48
|
-
auth_retrieval=profile.definition.auth_retrieval,
|
|
49
|
-
base_url=profile.definition.base_url,
|
|
50
|
-
client_options=profile.definition.client_options,
|
|
51
|
-
)
|
|
52
44
|
match job_defn:
|
|
53
45
|
case CronJobDefinition():
|
|
54
46
|
# Add to ap scheduler
|
|
@@ -21,8 +21,8 @@ from opentelemetry.sdk.trace.export import (
|
|
|
21
21
|
from opentelemetry.trace import DEFAULT_TRACE_OPTIONS, Span, Tracer
|
|
22
22
|
|
|
23
23
|
from uncountable.core.environment import (
|
|
24
|
-
get_integration_env,
|
|
25
24
|
get_otel_enabled,
|
|
25
|
+
get_server_env,
|
|
26
26
|
get_version,
|
|
27
27
|
)
|
|
28
28
|
from uncountable.types import base_t, job_definition_t
|
|
@@ -41,7 +41,7 @@ def get_otel_resource() -> Resource:
|
|
|
41
41
|
unc_version = os.environ.get("UNC_VERSION")
|
|
42
42
|
if unc_version is not None:
|
|
43
43
|
attributes["service.version"] = unc_version
|
|
44
|
-
unc_env =
|
|
44
|
+
unc_env = get_server_env()
|
|
45
45
|
if unc_env is not None:
|
|
46
46
|
attributes["deployment.environment"] = unc_env
|
|
47
47
|
resource = Resource.create(attributes=_cast_attributes(attributes))
|
|
@@ -8,8 +8,8 @@ from flask.typing import ResponseReturnValue
|
|
|
8
8
|
from flask.wrappers import Response
|
|
9
9
|
from opentelemetry.trace import get_current_span
|
|
10
10
|
from uncountable.core.environment import (
|
|
11
|
-
get_integration_env,
|
|
12
11
|
get_local_admin_server_port,
|
|
12
|
+
get_server_env,
|
|
13
13
|
get_webhook_server_port,
|
|
14
14
|
)
|
|
15
15
|
from uncountable.integration.queue_runner.command_server.command_client import (
|
|
@@ -143,15 +143,9 @@ def register_route(
|
|
|
143
143
|
|
|
144
144
|
def main() -> None:
|
|
145
145
|
profiles = load_profiles()
|
|
146
|
-
for
|
|
146
|
+
for profile_metadata in profiles:
|
|
147
147
|
server_logger = Logger(get_current_span())
|
|
148
|
-
|
|
149
|
-
name=profile.name,
|
|
150
|
-
auth_retrieval=profile.definition.auth_retrieval,
|
|
151
|
-
base_url=profile.definition.base_url,
|
|
152
|
-
client_options=profile.definition.client_options,
|
|
153
|
-
)
|
|
154
|
-
for job in profile.definition.jobs:
|
|
148
|
+
for job in profile_metadata.jobs:
|
|
155
149
|
if isinstance(job, job_definition_t.WebhookJobDefinition):
|
|
156
150
|
register_route(
|
|
157
151
|
server_logger=server_logger, profile_meta=profile_metadata, job=job
|
|
@@ -165,6 +159,6 @@ if __name__ == "__main__":
|
|
|
165
159
|
app.run(
|
|
166
160
|
host="0.0.0.0",
|
|
167
161
|
port=get_webhook_server_port(),
|
|
168
|
-
debug=
|
|
162
|
+
debug=get_server_env() == "playground",
|
|
169
163
|
exclude_patterns=[],
|
|
170
164
|
)
|
uncountable/types/__init__.py
CHANGED
|
@@ -9,6 +9,7 @@ from .api.equipment import associate_equipment_input as associate_equipment_inpu
|
|
|
9
9
|
from .api.recipes import associate_recipe_as_input as associate_recipe_as_input_t
|
|
10
10
|
from .api.recipes import associate_recipe_as_lot as associate_recipe_as_lot_t
|
|
11
11
|
from . import async_batch_t as async_batch_t
|
|
12
|
+
from . import auth_retrieval_t as auth_retrieval_t
|
|
12
13
|
from . import base_t as base_t
|
|
13
14
|
from . import calculations_t as calculations_t
|
|
14
15
|
from . import chemical_structure_t as chemical_structure_t
|
|
@@ -31,6 +32,7 @@ from . import experiment_groups_t as experiment_groups_t
|
|
|
31
32
|
from . import field_values_t as field_values_t
|
|
32
33
|
from . import fields_t as fields_t
|
|
33
34
|
from . import generic_upload_t as generic_upload_t
|
|
35
|
+
from .api.recipes import get_column_calculation_values as get_column_calculation_values_t
|
|
34
36
|
from .api.recipes import get_curve as get_curve_t
|
|
35
37
|
from .api.entity import get_entities_data as get_entities_data_t
|
|
36
38
|
from .api.inputs import get_input_data as get_input_data_t
|
|
@@ -47,10 +49,12 @@ from .api.recipe_metadata import get_recipe_metadata_data as get_recipe_metadata
|
|
|
47
49
|
from .api.recipes import get_recipe_names as get_recipe_names_t
|
|
48
50
|
from .api.recipes import get_recipe_output_metadata as get_recipe_output_metadata_t
|
|
49
51
|
from .api.recipes import get_recipes_data as get_recipes_data_t
|
|
52
|
+
from .api.entity import grant_entity_permissions as grant_entity_permissions_t
|
|
50
53
|
from . import id_source_t as id_source_t
|
|
51
54
|
from . import identifier_t as identifier_t
|
|
52
55
|
from . import input_attributes_t as input_attributes_t
|
|
53
56
|
from . import inputs_t as inputs_t
|
|
57
|
+
from . import integration_server_t as integration_server_t
|
|
54
58
|
from .api.uploader import invoke_uploader as invoke_uploader_t
|
|
55
59
|
from . import job_definition_t as job_definition_t
|
|
56
60
|
from .api.entity import list_entities as list_entities_t
|
|
@@ -110,6 +114,7 @@ __all__: list[str] = [
|
|
|
110
114
|
"associate_recipe_as_input_t",
|
|
111
115
|
"associate_recipe_as_lot_t",
|
|
112
116
|
"async_batch_t",
|
|
117
|
+
"auth_retrieval_t",
|
|
113
118
|
"base_t",
|
|
114
119
|
"calculations_t",
|
|
115
120
|
"chemical_structure_t",
|
|
@@ -132,6 +137,7 @@ __all__: list[str] = [
|
|
|
132
137
|
"field_values_t",
|
|
133
138
|
"fields_t",
|
|
134
139
|
"generic_upload_t",
|
|
140
|
+
"get_column_calculation_values_t",
|
|
135
141
|
"get_curve_t",
|
|
136
142
|
"get_entities_data_t",
|
|
137
143
|
"get_input_data_t",
|
|
@@ -148,10 +154,12 @@ __all__: list[str] = [
|
|
|
148
154
|
"get_recipe_names_t",
|
|
149
155
|
"get_recipe_output_metadata_t",
|
|
150
156
|
"get_recipes_data_t",
|
|
157
|
+
"grant_entity_permissions_t",
|
|
151
158
|
"id_source_t",
|
|
152
159
|
"identifier_t",
|
|
153
160
|
"input_attributes_t",
|
|
154
161
|
"inputs_t",
|
|
162
|
+
"integration_server_t",
|
|
155
163
|
"invoke_uploader_t",
|
|
156
164
|
"job_definition_t",
|
|
157
165
|
"list_entities_t",
|
|
@@ -12,6 +12,7 @@ from pkgs.serialization import serial_class
|
|
|
12
12
|
from ... import base_t
|
|
13
13
|
from ... import entity_t
|
|
14
14
|
from ... import field_values_t
|
|
15
|
+
from ... import identifier_t
|
|
15
16
|
|
|
16
17
|
__all__: list[str] = [
|
|
17
18
|
"Arguments",
|
|
@@ -40,9 +41,10 @@ class EntityToCreate:
|
|
|
40
41
|
)
|
|
41
42
|
@dataclasses.dataclass(kw_only=True)
|
|
42
43
|
class Arguments:
|
|
43
|
-
|
|
44
|
-
entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG], typing.Literal[entity_t.EntityType.CONDITION_PARAMETER], typing.Literal[entity_t.EntityType.OUTPUT]]
|
|
44
|
+
entity_type: entity_t.LimitedEntityType
|
|
45
45
|
entities_to_create: list[EntityToCreate]
|
|
46
|
+
definition_id: typing.Optional[base_t.ObjectId] = None
|
|
47
|
+
definition_key: typing.Optional[identifier_t.IdentifierKey] = None
|
|
46
48
|
|
|
47
49
|
|
|
48
50
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -12,6 +12,7 @@ from pkgs.serialization import serial_class
|
|
|
12
12
|
from ... import base_t
|
|
13
13
|
from ... import entity_t
|
|
14
14
|
from ... import field_values_t
|
|
15
|
+
from ... import identifier_t
|
|
15
16
|
|
|
16
17
|
__all__: list[str] = [
|
|
17
18
|
"Arguments",
|
|
@@ -43,8 +44,9 @@ class EntityFieldInitialValue:
|
|
|
43
44
|
)
|
|
44
45
|
@dataclasses.dataclass(kw_only=True)
|
|
45
46
|
class Arguments:
|
|
46
|
-
|
|
47
|
-
|
|
47
|
+
entity_type: entity_t.LimitedEntityType
|
|
48
|
+
definition_id: typing.Optional[base_t.ObjectId] = None
|
|
49
|
+
definition_key: typing.Optional[identifier_t.IdentifierKey] = None
|
|
48
50
|
field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None
|
|
49
51
|
|
|
50
52
|
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# flake8: noqa: F821
|
|
3
|
+
# ruff: noqa: E402 Q003
|
|
4
|
+
# fmt: off
|
|
5
|
+
# isort: skip_file
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
import typing # noqa: F401
|
|
8
|
+
import datetime # noqa: F401
|
|
9
|
+
from decimal import Decimal # noqa: F401
|
|
10
|
+
import dataclasses
|
|
11
|
+
from pkgs.serialization import serial_class
|
|
12
|
+
from ... import async_batch_t
|
|
13
|
+
from ... import entity_t
|
|
14
|
+
from ... import identifier_t
|
|
15
|
+
|
|
16
|
+
__all__: list[str] = [
|
|
17
|
+
"Arguments",
|
|
18
|
+
"Data",
|
|
19
|
+
"ENDPOINT_METHOD",
|
|
20
|
+
"ENDPOINT_PATH",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "POST"
|
|
24
|
+
ENDPOINT_PATH = "api/external/entity/grant_entity_permissions"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.entity.grant_entity_permissions.Arguments",
|
|
30
|
+
)
|
|
31
|
+
@dataclasses.dataclass(kw_only=True)
|
|
32
|
+
class Arguments:
|
|
33
|
+
entity_type: entity_t.LimitedEntityType
|
|
34
|
+
entity_key: identifier_t.IdentifierKey
|
|
35
|
+
permission_types: list[entity_t.GrantableEntityPermissionType]
|
|
36
|
+
user_keys: typing.Optional[list[identifier_t.IdentifierKey]] = None
|
|
37
|
+
user_group_keys: typing.Optional[list[identifier_t.IdentifierKey]] = None
|
|
38
|
+
all_users: typing.Optional[bool] = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
42
|
+
@serial_class(
|
|
43
|
+
named_type_path="sdk.api.entity.grant_entity_permissions.Data",
|
|
44
|
+
)
|
|
45
|
+
@dataclasses.dataclass(kw_only=True)
|
|
46
|
+
class Data(async_batch_t.AsyncBatchActionReturn):
|
|
47
|
+
pass
|
|
48
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# flake8: noqa: F821
|
|
3
|
+
# ruff: noqa: E402 Q003
|
|
4
|
+
# fmt: off
|
|
5
|
+
# isort: skip_file
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
import typing # noqa: F401
|
|
8
|
+
import datetime # noqa: F401
|
|
9
|
+
from decimal import Decimal # noqa: F401
|
|
10
|
+
import dataclasses
|
|
11
|
+
from pkgs.serialization import serial_class
|
|
12
|
+
from ... import base_t
|
|
13
|
+
from ... import identifier_t
|
|
14
|
+
|
|
15
|
+
__all__: list[str] = [
|
|
16
|
+
"Arguments",
|
|
17
|
+
"Data",
|
|
18
|
+
"ENDPOINT_METHOD",
|
|
19
|
+
"ENDPOINT_PATH",
|
|
20
|
+
"RecipeColumnCalculationValue",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "GET"
|
|
24
|
+
ENDPOINT_PATH = "api/external/recipes/get_column_calculation_values"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.recipes.get_column_calculation_values.Arguments",
|
|
30
|
+
)
|
|
31
|
+
@dataclasses.dataclass(kw_only=True)
|
|
32
|
+
class Arguments:
|
|
33
|
+
recipe_keys: list[identifier_t.IdentifierKey]
|
|
34
|
+
calculation_key: identifier_t.IdentifierKey
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
38
|
+
@serial_class(
|
|
39
|
+
named_type_path="sdk.api.recipes.get_column_calculation_values.RecipeColumnCalculationValue",
|
|
40
|
+
to_string_values={"calculation_quantity"},
|
|
41
|
+
)
|
|
42
|
+
@dataclasses.dataclass(kw_only=True)
|
|
43
|
+
class RecipeColumnCalculationValue:
|
|
44
|
+
recipe_id: base_t.ObjectId
|
|
45
|
+
ingredient_id: base_t.ObjectId
|
|
46
|
+
recipe_step_id: base_t.ObjectId
|
|
47
|
+
recipe_input_id: base_t.ObjectId
|
|
48
|
+
calculation_quantity: typing.Optional[Decimal] = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
52
|
+
@serial_class(
|
|
53
|
+
named_type_path="sdk.api.recipes.get_column_calculation_values.Data",
|
|
54
|
+
)
|
|
55
|
+
@dataclasses.dataclass(kw_only=True)
|
|
56
|
+
class Data:
|
|
57
|
+
values: list[RecipeColumnCalculationValue]
|
|
58
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -15,7 +15,9 @@ from uncountable.types import base_t
|
|
|
15
15
|
import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
|
|
16
16
|
import uncountable.types.api.recipes.create_recipe as create_recipe_t
|
|
17
17
|
import uncountable.types.api.recipes.edit_recipe_inputs as edit_recipe_inputs_t
|
|
18
|
+
from uncountable.types import entity_t
|
|
18
19
|
from uncountable.types import generic_upload_t
|
|
20
|
+
import uncountable.types.api.entity.grant_entity_permissions as grant_entity_permissions_t
|
|
19
21
|
from uncountable.types import identifier_t
|
|
20
22
|
import uncountable.types.api.uploader.invoke_uploader as invoke_uploader_t
|
|
21
23
|
from uncountable.types import recipe_identifiers_t
|
|
@@ -230,6 +232,47 @@ class AsyncBatchProcessorBase(ABC):
|
|
|
230
232
|
batch_reference=req.batch_reference,
|
|
231
233
|
)
|
|
232
234
|
|
|
235
|
+
def grant_entity_permissions(
|
|
236
|
+
self,
|
|
237
|
+
*,
|
|
238
|
+
entity_type: entity_t.LimitedEntityType,
|
|
239
|
+
entity_key: identifier_t.IdentifierKey,
|
|
240
|
+
permission_types: list[entity_t.GrantableEntityPermissionType],
|
|
241
|
+
user_keys: typing.Optional[list[identifier_t.IdentifierKey]] = None,
|
|
242
|
+
user_group_keys: typing.Optional[list[identifier_t.IdentifierKey]] = None,
|
|
243
|
+
all_users: typing.Optional[bool] = None,
|
|
244
|
+
depends_on: typing.Optional[list[str]] = None,
|
|
245
|
+
) -> async_batch_t.QueuedAsyncBatchRequest:
|
|
246
|
+
"""Grant entity permissions to a list of users or user groups or to all users.
|
|
247
|
+
|
|
248
|
+
:param depends_on: A list of batch reference keys to process before processing this request
|
|
249
|
+
"""
|
|
250
|
+
args = grant_entity_permissions_t.Arguments(
|
|
251
|
+
entity_type=entity_type,
|
|
252
|
+
entity_key=entity_key,
|
|
253
|
+
permission_types=permission_types,
|
|
254
|
+
user_keys=user_keys,
|
|
255
|
+
user_group_keys=user_group_keys,
|
|
256
|
+
all_users=all_users,
|
|
257
|
+
)
|
|
258
|
+
json_data = serialize_for_api(args)
|
|
259
|
+
|
|
260
|
+
batch_reference = str(uuid.uuid4())
|
|
261
|
+
|
|
262
|
+
req = async_batch_t.AsyncBatchRequest(
|
|
263
|
+
path=async_batch_t.AsyncBatchRequestPath.GRANT_ENTITY_PERMISSIONS,
|
|
264
|
+
data=json_data,
|
|
265
|
+
depends_on=depends_on,
|
|
266
|
+
batch_reference=batch_reference,
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
self._enqueue(req)
|
|
270
|
+
|
|
271
|
+
return async_batch_t.QueuedAsyncBatchRequest(
|
|
272
|
+
path=req.path,
|
|
273
|
+
batch_reference=req.batch_reference,
|
|
274
|
+
)
|
|
275
|
+
|
|
233
276
|
def invoke_uploader(
|
|
234
277
|
self,
|
|
235
278
|
*,
|
|
@@ -34,6 +34,7 @@ class AsyncBatchRequestPath(StrEnum):
|
|
|
34
34
|
INVOKE_UPLOADER = "uploader/invoke_uploader"
|
|
35
35
|
ASSOCIATE_RECIPE_AS_INPUT = "recipes/associate_recipe_as_input"
|
|
36
36
|
CLEAR_RECIPE_OUTPUTS = "recipes/clear_recipe_outputs"
|
|
37
|
+
GRANT_ENTITY_PERMISSIONS = "entity/grant_entity_permissions"
|
|
37
38
|
|
|
38
39
|
|
|
39
40
|
# DO NOT MODIFY -- This file is generated by type_spec
|