UncountablePythonSDK 0.0.74__py3-none-any.whl → 0.0.76__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (35) hide show
  1. {UncountablePythonSDK-0.0.74.dist-info → UncountablePythonSDK-0.0.76.dist-info}/METADATA +2 -2
  2. {UncountablePythonSDK-0.0.74.dist-info → UncountablePythonSDK-0.0.76.dist-info}/RECORD +35 -29
  3. {UncountablePythonSDK-0.0.74.dist-info → UncountablePythonSDK-0.0.76.dist-info}/WHEEL +1 -1
  4. docs/requirements.txt +1 -1
  5. examples/async_batch.py +3 -3
  6. pkgs/serialization/serial_class.py +3 -0
  7. pkgs/type_spec/builder.py +7 -8
  8. pkgs/type_spec/emit_python.py +6 -6
  9. pkgs/type_spec/emit_typescript.py +7 -237
  10. pkgs/type_spec/emit_typescript_util.py +235 -0
  11. uncountable/core/environment.py +15 -2
  12. uncountable/integration/construct_client.py +3 -4
  13. uncountable/integration/queue_runner/job_scheduler.py +1 -1
  14. uncountable/integration/queue_runner/worker.py +17 -22
  15. uncountable/integration/scan_profiles.py +39 -15
  16. uncountable/integration/server.py +4 -12
  17. uncountable/integration/telemetry.py +2 -2
  18. uncountable/integration/webhook_server/entrypoint.py +4 -10
  19. uncountable/types/__init__.py +8 -0
  20. uncountable/types/api/entity/create_entities.py +4 -2
  21. uncountable/types/api/entity/create_entity.py +4 -2
  22. uncountable/types/api/entity/grant_entity_permissions.py +48 -0
  23. uncountable/types/api/recipes/get_column_calculation_values.py +58 -0
  24. uncountable/types/async_batch_processor.py +43 -0
  25. uncountable/types/async_batch_t.py +1 -0
  26. uncountable/types/auth_retrieval.py +12 -0
  27. uncountable/types/auth_retrieval_t.py +75 -0
  28. uncountable/types/client_base.py +62 -4
  29. uncountable/types/entity.py +3 -0
  30. uncountable/types/entity_t.py +20 -0
  31. uncountable/types/integration_server.py +9 -0
  32. uncountable/types/integration_server_t.py +37 -0
  33. uncountable/types/job_definition.py +1 -5
  34. uncountable/types/job_definition_t.py +20 -60
  35. {UncountablePythonSDK-0.0.74.dist-info → UncountablePythonSDK-0.0.76.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  import io
2
+ import typing
2
3
  from dataclasses import dataclass, field
3
4
 
4
5
  from . import builder, util
@@ -8,6 +9,23 @@ INDENT = " "
8
9
  MODIFY_NOTICE = "// DO NOT MODIFY -- This file is generated by type_spec\n"
9
10
 
10
11
 
12
+ base_name_map = {
13
+ builder.BaseTypeName.s_boolean: "boolean",
14
+ builder.BaseTypeName.s_date: "string", # IMPROVE: Aliased DateStr
15
+ builder.BaseTypeName.s_date_time: "string", # IMPROVE: Aliased DateTimeStr
16
+ # Decimal's are marked as to_string_values thus are strings in the front-end
17
+ builder.BaseTypeName.s_decimal: "string",
18
+ builder.BaseTypeName.s_dict: "PartialRecord",
19
+ builder.BaseTypeName.s_integer: "number",
20
+ builder.BaseTypeName.s_lossy_decimal: "number",
21
+ builder.BaseTypeName.s_opaque_key: "string",
22
+ builder.BaseTypeName.s_none: "null",
23
+ builder.BaseTypeName.s_string: "string",
24
+ # UNC: global types
25
+ builder.BaseTypeName.s_json_value: "JsonValue",
26
+ }
27
+
28
+
11
29
  @dataclass(kw_only=True)
12
30
  class EmitTypescriptContext:
13
31
  out: io.StringIO
@@ -28,3 +46,220 @@ def ts_name(name: str, name_case: builder.NameCase) -> str:
28
46
  return name
29
47
  bits = util.split_any_name(name)
30
48
  return "".join([bits[0], *[x.title() for x in bits[1:]]])
49
+
50
+
51
+ def emit_value_ts(
52
+ ctx: EmitTypescriptContext, stype: builder.SpecType, value: typing.Any
53
+ ) -> str:
54
+ """Mimics emit_python even if not all types are used in TypeScript yet"""
55
+ literal = builder.unwrap_literal_type(stype)
56
+ if literal is not None:
57
+ return emit_value_ts(ctx, literal.value_type, literal.value)
58
+
59
+ if stype.is_base_type(builder.BaseTypeName.s_string):
60
+ assert isinstance(value, str)
61
+ return util.encode_common_string(value)
62
+ elif stype.is_base_type(builder.BaseTypeName.s_integer):
63
+ assert isinstance(value, int)
64
+ return str(value)
65
+ elif stype.is_base_type(builder.BaseTypeName.s_boolean):
66
+ assert isinstance(value, bool)
67
+ return "true" if value else "false"
68
+ elif stype.is_base_type(builder.BaseTypeName.s_lossy_decimal):
69
+ return str(value)
70
+ elif stype.is_base_type(builder.BaseTypeName.s_decimal):
71
+ return f"'{value}'"
72
+ elif isinstance(stype, builder.SpecTypeInstance):
73
+ if stype.defn_type.is_base_type(builder.BaseTypeName.s_list):
74
+ sub_type = stype.parameters[0]
75
+ return (
76
+ "[" + ", ".join([emit_value_ts(ctx, sub_type, x) for x in value]) + "]"
77
+ )
78
+
79
+ if stype.defn_type.is_base_type(builder.BaseTypeName.s_dict):
80
+ key_type = stype.parameters[0]
81
+ value_type = stype.parameters[1]
82
+ return (
83
+ "{\n\t"
84
+ + ",\n\t".join(
85
+ "["
86
+ + emit_value_ts(ctx, key_type, dkey)
87
+ + "]: "
88
+ + emit_value_ts(ctx, value_type, dvalue)
89
+ for dkey, dvalue in value.items()
90
+ )
91
+ + "\n}"
92
+ )
93
+
94
+ if stype.defn_type.is_base_type(builder.BaseTypeName.s_optional):
95
+ sub_type = stype.parameters[0]
96
+ if value is None:
97
+ return "null"
98
+ return emit_value_ts(ctx, sub_type, value)
99
+
100
+ elif isinstance(stype, builder.SpecTypeDefnStringEnum):
101
+ return f"{refer_to(ctx, stype)}.{ts_enum_name(value, stype.name_case)}"
102
+
103
+ raise Exception("invalid constant type", value, stype)
104
+
105
+
106
+ def emit_type_ts(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
107
+ if not isinstance(stype, builder.SpecTypeDefn):
108
+ return
109
+
110
+ if stype.is_base or stype.is_predefined:
111
+ return
112
+
113
+ ctx.out.write("\n")
114
+ ctx.out.write(MODIFY_NOTICE)
115
+
116
+ if isinstance(stype, builder.SpecTypeDefnExternal):
117
+ assert not stype.is_exported, "expecting private names"
118
+ ctx.out.write(stype.external_map["ts"])
119
+ ctx.out.write("\n")
120
+ return
121
+
122
+ assert stype.is_exported, "expecting exported names"
123
+ if isinstance(stype, builder.SpecTypeDefnAlias):
124
+ ctx.out.write(f"export type {stype.name} = {refer_to(ctx, stype.alias)}\n")
125
+ return
126
+
127
+ if isinstance(stype, builder.SpecTypeDefnUnion):
128
+ ctx.out.write(
129
+ f"export type {stype.name} = {refer_to(ctx, stype.get_backing_type())}\n"
130
+ )
131
+ return
132
+
133
+ if isinstance(stype, builder.SpecTypeDefnStringEnum):
134
+ ctx.out.write(f"export enum {stype.name} {{\n")
135
+ assert stype.values
136
+ for name, entry in stype.values.items():
137
+ ctx.out.write(
138
+ f'{INDENT}{ts_enum_name(name, stype.name_case)} = "{entry.value}",\n'
139
+ )
140
+ ctx.out.write("}\n")
141
+ return
142
+
143
+ assert isinstance(stype, builder.SpecTypeDefnObject)
144
+ assert stype.base is not None
145
+
146
+ base_type = ""
147
+ if not stype.base.is_base:
148
+ base_type = f"{refer_to(ctx, stype.base)} & "
149
+
150
+ if stype.properties is None and base_type == "":
151
+ ctx.out.write(f"export type {stype.name} = TEmpty\n")
152
+ elif stype.properties is None:
153
+ ctx.out.write(f"export type {stype.name} = {base_type}{{}}\n")
154
+ else:
155
+ if isinstance(stype, builder.SpecTypeDefnObject) and len(stype.parameters) > 0:
156
+ full_type_name = f'{stype.name}<{", ".join(stype.parameters)}>'
157
+ else:
158
+ full_type_name = stype.name
159
+ ctx.out.write(f"export type {full_type_name} = {base_type}{{")
160
+ ctx.out.write("\n")
161
+ for prop in stype.properties.values():
162
+ ref_type = refer_to(ctx, prop.spec_type)
163
+ prop_name = ts_name(prop.name, prop.name_case)
164
+ if prop.has_default and not prop.parse_require:
165
+ # For now, we'll assume the generated types with defaults are meant as
166
+ # arguments, thus treat like extant==missing
167
+ # IMPROVE: if we can decide they are meant as output instead, then
168
+ # they should be marked as required
169
+ ctx.out.write(f"{INDENT}{prop_name}?: {ref_type}")
170
+ elif prop.extant == builder.PropertyExtant.missing:
171
+ # Unlike optional below, missing does not imply null is possible. They
172
+ # treated distinctly.
173
+ ctx.out.write(f"{INDENT}{prop_name}?: {ref_type}")
174
+ elif prop.extant == builder.PropertyExtant.optional:
175
+ # Need to add in |null since Python side can produce null's right now
176
+ # IMPROVE: It would be better if the serializer could instead omit the None's
177
+ # Dropping the null should be forward compatible
178
+ ctx.out.write(f"{INDENT}{prop_name}?: {ref_type} | null")
179
+ else:
180
+ ctx.out.write(f"{INDENT}{prop_name}: {ref_type}")
181
+ ctx.out.write("\n")
182
+ ctx.out.write("}\n")
183
+
184
+
185
+ def refer_to(ctx: EmitTypescriptContext, stype: builder.SpecType) -> str:
186
+ return refer_to_impl(ctx, stype)[0]
187
+
188
+
189
+ def refer_to_impl(
190
+ ctx: EmitTypescriptContext, stype: builder.SpecType
191
+ ) -> tuple[str, bool]:
192
+ """
193
+ @return (string-specific, multiple-types)
194
+ """
195
+ if isinstance(stype, builder.SpecTypeInstance):
196
+ if stype.defn_type.name == builder.BaseTypeName.s_list:
197
+ spec, multi = refer_to_impl(ctx, stype.parameters[0])
198
+ return f"({spec})[]" if multi else f"{spec}[]", False
199
+ if stype.defn_type.name == builder.BaseTypeName.s_readonly_array:
200
+ spec, multi = refer_to_impl(ctx, stype.parameters[0])
201
+ return f"readonly ({spec})[]" if multi else f"readonly {spec}[]", False
202
+ if stype.defn_type.name == builder.BaseTypeName.s_union:
203
+ return (
204
+ f'({" | ".join([refer_to(ctx, p) for p in stype.parameters])})',
205
+ False,
206
+ )
207
+ if stype.defn_type.name == builder.BaseTypeName.s_literal:
208
+ parts = []
209
+ for parameter in stype.parameters:
210
+ assert isinstance(parameter, builder.SpecTypeLiteralWrapper)
211
+ parts.append(refer_to(ctx, parameter))
212
+ return f'({" | ".join(parts)})', False
213
+ if stype.defn_type.name == builder.BaseTypeName.s_optional:
214
+ return f"{refer_to(ctx, stype.parameters[0])} | null", True
215
+ if stype.defn_type.name == builder.BaseTypeName.s_tuple:
216
+ return f"[{", ".join([refer_to(ctx, p) for p in stype.parameters])}]", False
217
+ params = ", ".join([refer_to(ctx, p) for p in stype.parameters])
218
+ return f"{refer_to(ctx, stype.defn_type)}<{params}>", False
219
+
220
+ if isinstance(stype, builder.SpecTypeLiteralWrapper):
221
+ return emit_value_ts(ctx, stype.value_type, stype.value), False
222
+
223
+ if isinstance(stype, builder.SpecTypeGenericParameter):
224
+ return stype.name, False
225
+
226
+ assert isinstance(stype, builder.SpecTypeDefn)
227
+ if stype.is_base: # assume correct namespace
228
+ if stype.name == builder.BaseTypeName.s_list:
229
+ return "any[]", False # TODO: generic type
230
+ return base_name_map[builder.BaseTypeName(stype.name)], False
231
+
232
+ if stype.namespace == ctx.namespace:
233
+ return stype.name, False
234
+
235
+ ctx.namespaces.add(stype.namespace)
236
+ return f"{resolve_namespace_ref(stype.namespace)}.{stype.name}", False
237
+
238
+
239
+ def ts_enum_name(name: str, name_case: builder.NameCase) -> str:
240
+ if name_case == builder.NameCase.js_upper:
241
+ return name.upper()
242
+ return ts_name(name, name_case)
243
+
244
+
245
+ def resolve_namespace_name(namespace: builder.SpecNamespace) -> str:
246
+ return namespace.name
247
+
248
+
249
+ def emit_namespace_imports_ts(
250
+ namespaces: set[builder.SpecNamespace],
251
+ out: io.StringIO,
252
+ current_namespace: builder.SpecNamespace,
253
+ ) -> None:
254
+ for ns in sorted(
255
+ namespaces,
256
+ key=lambda name: resolve_namespace_name(name),
257
+ ):
258
+ import_as = resolve_namespace_ref(ns)
259
+ import_path = (
260
+ "./"
261
+ if len(current_namespace.path) == 1
262
+ else "../" * (len(current_namespace.path) - 1)
263
+ )
264
+ import_from = f"{import_path}{resolve_namespace_name(ns)}"
265
+ out.write(f'import * as {import_as} from "{import_from}"\n') # noqa: E501
@@ -2,6 +2,8 @@ import functools
2
2
  import os
3
3
  from importlib.metadata import PackageNotFoundError, version
4
4
 
5
+ from uncountable.types import integration_server_t
6
+
5
7
 
6
8
  @functools.cache
7
9
  def get_version() -> str:
@@ -12,8 +14,8 @@ def get_version() -> str:
12
14
  return version_str
13
15
 
14
16
 
15
- def get_integration_env() -> str | None:
16
- return os.environ.get("UNC_INTEGRATION_ENV")
17
+ def get_server_env() -> str | None:
18
+ return os.environ.get("UNC_SERVER_ENV")
17
19
 
18
20
 
19
21
  def get_webhook_server_port() -> int:
@@ -26,3 +28,14 @@ def get_local_admin_server_port() -> int:
26
28
 
27
29
  def get_otel_enabled() -> bool:
28
30
  return os.environ.get("UNC_OTEL_ENABLED") == "true"
31
+
32
+
33
+ def get_profiles_module() -> str:
34
+ return os.environ["UNC_PROFILES_MODULE"]
35
+
36
+
37
+ def get_integration_envs() -> list[integration_server_t.IntegrationEnvironment]:
38
+ return [
39
+ integration_server_t.IntegrationEnvironment(env)
40
+ for env in os.environ.get("UNC_INTEGRATION_ENVS", "prod").split(",")
41
+ ]
@@ -3,22 +3,21 @@ from uncountable.core.client import ClientConfig
3
3
  from uncountable.core.types import AuthDetailsAll, AuthDetailsOAuth
4
4
  from uncountable.integration.secret_retrieval.retrieve_secret import retrieve_secret
5
5
  from uncountable.integration.telemetry import JobLogger
6
+ from uncountable.types import auth_retrieval_t
6
7
  from uncountable.types.job_definition_t import (
7
- AuthRetrievalBasic,
8
- AuthRetrievalOAuth,
9
8
  ProfileMetadata,
10
9
  )
11
10
 
12
11
 
13
12
  def _construct_auth_details(profile_meta: ProfileMetadata) -> AuthDetailsAll:
14
13
  match profile_meta.auth_retrieval:
15
- case AuthRetrievalOAuth():
14
+ case auth_retrieval_t.AuthRetrievalOAuth():
16
15
  refresh_token = retrieve_secret(
17
16
  profile_meta.auth_retrieval.refresh_token_secret,
18
17
  profile_metadata=profile_meta,
19
18
  )
20
19
  return AuthDetailsOAuth(refresh_token=refresh_token)
21
- case AuthRetrievalBasic():
20
+ case auth_retrieval_t.AuthRetrievalBasic():
22
21
  api_id = retrieve_secret(
23
22
  profile_meta.auth_retrieval.api_id_secret, profile_metadata=profile_meta
24
23
  )
@@ -55,7 +55,7 @@ def _start_workers(
55
55
  job_worker_lookup: dict[str, Worker] = {}
56
56
  job_definition_lookup: dict[str, job_definition_t.JobDefinition] = {}
57
57
  for profile in profiles:
58
- for job_definition in profile.definition.jobs:
58
+ for job_definition in profile.jobs:
59
59
  job_definition_lookup[job_definition.id] = job_definition
60
60
  job_worker_key = _get_job_worker_key(job_definition, profile.name)
61
61
  if job_worker_key not in job_queue_worker_lookup:
@@ -51,16 +51,11 @@ class RegisteredJobDetails:
51
51
 
52
52
  def get_registered_job_details(job_ref_name: str) -> RegisteredJobDetails:
53
53
  profiles = load_profiles()
54
- for profile in profiles:
55
- for job_definition in profile.definition.jobs:
54
+ for profile_metadata in profiles:
55
+ for job_definition in profile_metadata.jobs:
56
56
  if job_definition.id == job_ref_name:
57
57
  return RegisteredJobDetails(
58
- profile_metadata=job_definition_t.ProfileMetadata(
59
- name=profile.name,
60
- base_url=profile.definition.base_url,
61
- auth_retrieval=profile.definition.auth_retrieval,
62
- client_options=profile.definition.client_options,
63
- ),
58
+ profile_metadata=profile_metadata,
64
59
  job_definition=job_definition,
65
60
  )
66
61
  raise Exception(f"profile not found for job {job_ref_name}")
@@ -86,23 +81,23 @@ def run_queued_job(
86
81
  profile_metadata=job_details.profile_metadata,
87
82
  job_definition=job_details.job_definition,
88
83
  )
89
- client = construct_uncountable_client(
90
- profile_meta=job_details.profile_metadata, job_logger=job_logger
91
- )
92
- batch_processor = AsyncBatchProcessor(client=client)
84
+ try:
85
+ client = construct_uncountable_client(
86
+ profile_meta=job_details.profile_metadata, job_logger=job_logger
87
+ )
88
+ batch_processor = AsyncBatchProcessor(client=client)
93
89
 
94
- payload = _resolve_queued_job_payload(queued_job)
90
+ payload = _resolve_queued_job_payload(queued_job)
95
91
 
96
- args = JobArguments(
97
- job_definition=job_details.job_definition,
98
- client=client,
99
- batch_processor=batch_processor,
100
- profile_metadata=job_details.profile_metadata,
101
- logger=job_logger,
102
- payload=payload,
103
- )
92
+ args = JobArguments(
93
+ job_definition=job_details.job_definition,
94
+ client=client,
95
+ batch_processor=batch_processor,
96
+ profile_metadata=job_details.profile_metadata,
97
+ logger=job_logger,
98
+ payload=payload,
99
+ )
104
100
 
105
- try:
106
101
  return execute_job(
107
102
  args=args,
108
103
  profile_metadata=job_details.profile_metadata,
@@ -1,27 +1,26 @@
1
1
  import functools
2
- import os
3
- from dataclasses import dataclass
4
2
  from importlib import resources
5
3
 
6
4
  from pkgs.argument_parser import CachedParser
7
- from uncountable.types import job_definition_t
5
+ from uncountable.core import environment
6
+ from uncountable.types import integration_server_t, job_definition_t
8
7
 
9
8
  profile_parser = CachedParser(job_definition_t.ProfileDefinition)
10
9
 
11
-
12
- @dataclass(kw_only=True)
13
- class ProfileDetails:
14
- name: str
15
- definition: job_definition_t.ProfileDefinition
10
+ _DEFAULT_PROFILE_ENV = integration_server_t.IntegrationEnvironment.PROD
11
+ _IGNORED_PROFILE_FOLDERS = ["__pycache__"]
16
12
 
17
13
 
18
14
  @functools.cache
19
- def load_profiles() -> list[ProfileDetails]:
20
- profiles_module = os.environ["UNC_PROFILES_MODULE"]
15
+ def load_profiles() -> list[job_definition_t.ProfileMetadata]:
16
+ profiles_module = environment.get_profiles_module()
17
+ integration_envs = environment.get_integration_envs()
21
18
  profiles = [
22
- entry for entry in resources.files(profiles_module).iterdir() if entry.is_dir()
19
+ entry
20
+ for entry in resources.files(profiles_module).iterdir()
21
+ if entry.is_dir() and entry.name not in _IGNORED_PROFILE_FOLDERS
23
22
  ]
24
- profile_details: list[ProfileDetails] = []
23
+ profile_details: list[job_definition_t.ProfileMetadata] = []
25
24
  seen_job_ids: set[str] = set()
26
25
  for profile_file in profiles:
27
26
  profile_name = profile_file.name
@@ -34,9 +33,34 @@ def load_profiles() -> list[ProfileDetails]:
34
33
  if job.id in seen_job_ids:
35
34
  raise Exception(f"multiple jobs with id {job.id}")
36
35
  seen_job_ids.add(job.id)
37
- profile_details.append(
38
- ProfileDetails(name=profile_name, definition=definition)
39
- )
36
+
37
+ if definition.environments is not None:
38
+ for integration_env in integration_envs:
39
+ environment_config = definition.environments.get(integration_env)
40
+ if environment_config is not None:
41
+ profile_details.append(
42
+ job_definition_t.ProfileMetadata(
43
+ name=profile_name,
44
+ jobs=definition.jobs,
45
+ base_url=environment_config.base_url,
46
+ auth_retrieval=environment_config.auth_retrieval,
47
+ client_options=environment_config.client_options,
48
+ )
49
+ )
50
+ elif _DEFAULT_PROFILE_ENV in integration_envs:
51
+ assert (
52
+ definition.base_url is not None
53
+ and definition.auth_retrieval is not None
54
+ ), f"define environments in profile.yaml for {profile_name}"
55
+ profile_details.append(
56
+ job_definition_t.ProfileMetadata(
57
+ name=profile_name,
58
+ jobs=definition.jobs,
59
+ base_url=definition.base_url,
60
+ auth_retrieval=definition.auth_retrieval,
61
+ client_options=definition.client_options,
62
+ )
63
+ )
40
64
  except FileNotFoundError as e:
41
65
  print(f"WARN: profile.yaml not found for {profile_name}", e)
42
66
  continue
@@ -12,12 +12,10 @@ from opentelemetry.trace import get_current_span
12
12
  from sqlalchemy.engine.base import Engine
13
13
 
14
14
  from uncountable.integration.cron import CronJobArgs, cron_job_executor
15
- from uncountable.integration.scan_profiles import ProfileDetails
16
15
  from uncountable.integration.telemetry import Logger
17
- from uncountable.types import base_t
16
+ from uncountable.types import base_t, job_definition_t
18
17
  from uncountable.types.job_definition_t import (
19
18
  CronJobDefinition,
20
- ProfileMetadata,
21
19
  WebhookJobDefinition,
22
20
  )
23
21
 
@@ -38,17 +36,11 @@ class IntegrationServer:
38
36
  )
39
37
  self._server_logger = Logger(get_current_span())
40
38
 
41
- def register_jobs(self, profiles: list[ProfileDetails]) -> None:
39
+ def register_jobs(self, profiles: list[job_definition_t.ProfileMetadata]) -> None:
42
40
  valid_job_ids = []
43
- for profile in profiles:
44
- for job_defn in profile.definition.jobs:
41
+ for profile_metadata in profiles:
42
+ for job_defn in profile_metadata.jobs:
45
43
  valid_job_ids.append(job_defn.id)
46
- profile_metadata = ProfileMetadata(
47
- name=profile.name,
48
- auth_retrieval=profile.definition.auth_retrieval,
49
- base_url=profile.definition.base_url,
50
- client_options=profile.definition.client_options,
51
- )
52
44
  match job_defn:
53
45
  case CronJobDefinition():
54
46
  # Add to ap scheduler
@@ -21,8 +21,8 @@ from opentelemetry.sdk.trace.export import (
21
21
  from opentelemetry.trace import DEFAULT_TRACE_OPTIONS, Span, Tracer
22
22
 
23
23
  from uncountable.core.environment import (
24
- get_integration_env,
25
24
  get_otel_enabled,
25
+ get_server_env,
26
26
  get_version,
27
27
  )
28
28
  from uncountable.types import base_t, job_definition_t
@@ -41,7 +41,7 @@ def get_otel_resource() -> Resource:
41
41
  unc_version = os.environ.get("UNC_VERSION")
42
42
  if unc_version is not None:
43
43
  attributes["service.version"] = unc_version
44
- unc_env = get_integration_env()
44
+ unc_env = get_server_env()
45
45
  if unc_env is not None:
46
46
  attributes["deployment.environment"] = unc_env
47
47
  resource = Resource.create(attributes=_cast_attributes(attributes))
@@ -8,8 +8,8 @@ from flask.typing import ResponseReturnValue
8
8
  from flask.wrappers import Response
9
9
  from opentelemetry.trace import get_current_span
10
10
  from uncountable.core.environment import (
11
- get_integration_env,
12
11
  get_local_admin_server_port,
12
+ get_server_env,
13
13
  get_webhook_server_port,
14
14
  )
15
15
  from uncountable.integration.queue_runner.command_server.command_client import (
@@ -143,15 +143,9 @@ def register_route(
143
143
 
144
144
  def main() -> None:
145
145
  profiles = load_profiles()
146
- for profile in profiles:
146
+ for profile_metadata in profiles:
147
147
  server_logger = Logger(get_current_span())
148
- profile_metadata = job_definition_t.ProfileMetadata(
149
- name=profile.name,
150
- auth_retrieval=profile.definition.auth_retrieval,
151
- base_url=profile.definition.base_url,
152
- client_options=profile.definition.client_options,
153
- )
154
- for job in profile.definition.jobs:
148
+ for job in profile_metadata.jobs:
155
149
  if isinstance(job, job_definition_t.WebhookJobDefinition):
156
150
  register_route(
157
151
  server_logger=server_logger, profile_meta=profile_metadata, job=job
@@ -165,6 +159,6 @@ if __name__ == "__main__":
165
159
  app.run(
166
160
  host="0.0.0.0",
167
161
  port=get_webhook_server_port(),
168
- debug=get_integration_env() == "playground",
162
+ debug=get_server_env() == "playground",
169
163
  exclude_patterns=[],
170
164
  )
@@ -9,6 +9,7 @@ from .api.equipment import associate_equipment_input as associate_equipment_inpu
9
9
  from .api.recipes import associate_recipe_as_input as associate_recipe_as_input_t
10
10
  from .api.recipes import associate_recipe_as_lot as associate_recipe_as_lot_t
11
11
  from . import async_batch_t as async_batch_t
12
+ from . import auth_retrieval_t as auth_retrieval_t
12
13
  from . import base_t as base_t
13
14
  from . import calculations_t as calculations_t
14
15
  from . import chemical_structure_t as chemical_structure_t
@@ -31,6 +32,7 @@ from . import experiment_groups_t as experiment_groups_t
31
32
  from . import field_values_t as field_values_t
32
33
  from . import fields_t as fields_t
33
34
  from . import generic_upload_t as generic_upload_t
35
+ from .api.recipes import get_column_calculation_values as get_column_calculation_values_t
34
36
  from .api.recipes import get_curve as get_curve_t
35
37
  from .api.entity import get_entities_data as get_entities_data_t
36
38
  from .api.inputs import get_input_data as get_input_data_t
@@ -47,10 +49,12 @@ from .api.recipe_metadata import get_recipe_metadata_data as get_recipe_metadata
47
49
  from .api.recipes import get_recipe_names as get_recipe_names_t
48
50
  from .api.recipes import get_recipe_output_metadata as get_recipe_output_metadata_t
49
51
  from .api.recipes import get_recipes_data as get_recipes_data_t
52
+ from .api.entity import grant_entity_permissions as grant_entity_permissions_t
50
53
  from . import id_source_t as id_source_t
51
54
  from . import identifier_t as identifier_t
52
55
  from . import input_attributes_t as input_attributes_t
53
56
  from . import inputs_t as inputs_t
57
+ from . import integration_server_t as integration_server_t
54
58
  from .api.uploader import invoke_uploader as invoke_uploader_t
55
59
  from . import job_definition_t as job_definition_t
56
60
  from .api.entity import list_entities as list_entities_t
@@ -110,6 +114,7 @@ __all__: list[str] = [
110
114
  "associate_recipe_as_input_t",
111
115
  "associate_recipe_as_lot_t",
112
116
  "async_batch_t",
117
+ "auth_retrieval_t",
113
118
  "base_t",
114
119
  "calculations_t",
115
120
  "chemical_structure_t",
@@ -132,6 +137,7 @@ __all__: list[str] = [
132
137
  "field_values_t",
133
138
  "fields_t",
134
139
  "generic_upload_t",
140
+ "get_column_calculation_values_t",
135
141
  "get_curve_t",
136
142
  "get_entities_data_t",
137
143
  "get_input_data_t",
@@ -148,10 +154,12 @@ __all__: list[str] = [
148
154
  "get_recipe_names_t",
149
155
  "get_recipe_output_metadata_t",
150
156
  "get_recipes_data_t",
157
+ "grant_entity_permissions_t",
151
158
  "id_source_t",
152
159
  "identifier_t",
153
160
  "input_attributes_t",
154
161
  "inputs_t",
162
+ "integration_server_t",
155
163
  "invoke_uploader_t",
156
164
  "job_definition_t",
157
165
  "list_entities_t",
@@ -12,6 +12,7 @@ from pkgs.serialization import serial_class
12
12
  from ... import base_t
13
13
  from ... import entity_t
14
14
  from ... import field_values_t
15
+ from ... import identifier_t
15
16
 
16
17
  __all__: list[str] = [
17
18
  "Arguments",
@@ -40,9 +41,10 @@ class EntityToCreate:
40
41
  )
41
42
  @dataclasses.dataclass(kw_only=True)
42
43
  class Arguments:
43
- definition_id: base_t.ObjectId
44
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG], typing.Literal[entity_t.EntityType.OUTPUT]]
44
+ entity_type: entity_t.LimitedEntityType
45
45
  entities_to_create: list[EntityToCreate]
46
+ definition_id: typing.Optional[base_t.ObjectId] = None
47
+ definition_key: typing.Optional[identifier_t.IdentifierKey] = None
46
48
 
47
49
 
48
50
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -12,6 +12,7 @@ from pkgs.serialization import serial_class
12
12
  from ... import base_t
13
13
  from ... import entity_t
14
14
  from ... import field_values_t
15
+ from ... import identifier_t
15
16
 
16
17
  __all__: list[str] = [
17
18
  "Arguments",
@@ -43,8 +44,9 @@ class EntityFieldInitialValue:
43
44
  )
44
45
  @dataclasses.dataclass(kw_only=True)
45
46
  class Arguments:
46
- definition_id: base_t.ObjectId
47
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG], typing.Literal[entity_t.EntityType.OUTPUT]]
47
+ entity_type: entity_t.LimitedEntityType
48
+ definition_id: typing.Optional[base_t.ObjectId] = None
49
+ definition_key: typing.Optional[identifier_t.IdentifierKey] = None
48
50
  field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None
49
51
 
50
52