UncountablePythonSDK 0.0.110__py3-none-any.whl → 0.0.112__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- docs/conf.py +2 -2
- docs/justfile +1 -1
- examples/create_ingredient_sdk.py +34 -0
- examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +35 -0
- examples/integration-server/jobs/materials_auto/profile.yaml +9 -0
- examples/integration-server/pyproject.toml +2 -2
- pkgs/argument_parser/argument_parser.py +9 -6
- pkgs/type_spec/builder.py +19 -2
- pkgs/type_spec/emit_python.py +9 -3
- pkgs/type_spec/emit_typescript_util.py +16 -1
- pkgs/type_spec/parts/base.py.prepart +4 -0
- pkgs/type_spec/type_info/emit_type_info.py +12 -3
- pkgs/type_spec/ui_entry_actions/__init__.py +4 -0
- pkgs/type_spec/ui_entry_actions/generate_ui_entry_actions.py +294 -0
- pkgs/type_spec/value_spec/convert_type.py +13 -0
- uncountable/core/client.py +7 -4
- uncountable/integration/executors/generic_upload_executor.py +3 -2
- uncountable/integration/job.py +24 -1
- uncountable/integration/queue_runner/datastore/datastore_sqlite.py +3 -2
- uncountable/integration/scheduler.py +4 -3
- uncountable/types/__init__.py +4 -0
- uncountable/types/api/batch/execute_batch.py +4 -4
- uncountable/types/api/batch/execute_batch_load_async.py +2 -2
- uncountable/types/api/chemical/convert_chemical_formats.py +3 -3
- uncountable/types/api/condition_parameters/__init__.py +1 -0
- uncountable/types/api/condition_parameters/upsert_condition_match.py +72 -0
- uncountable/types/api/entity/create_entities.py +3 -3
- uncountable/types/api/entity/create_entity.py +3 -3
- uncountable/types/api/entity/create_or_update_entity.py +3 -2
- uncountable/types/api/entity/get_entities_data.py +3 -3
- uncountable/types/api/entity/grant_entity_permissions.py +3 -2
- uncountable/types/api/entity/list_entities.py +4 -4
- uncountable/types/api/entity/lock_entity.py +3 -2
- uncountable/types/api/entity/lookup_entity.py +5 -5
- uncountable/types/api/entity/resolve_entity_ids.py +3 -3
- uncountable/types/api/entity/set_entity_field_values.py +3 -2
- uncountable/types/api/entity/set_values.py +3 -2
- uncountable/types/api/entity/transition_entity_phase.py +5 -4
- uncountable/types/api/entity/unlock_entity.py +3 -2
- uncountable/types/api/equipment/associate_equipment_input.py +2 -2
- uncountable/types/api/field_options/upsert_field_options.py +4 -3
- uncountable/types/api/files/download_file.py +4 -3
- uncountable/types/api/id_source/list_id_source.py +3 -3
- uncountable/types/api/id_source/match_id_source.py +3 -3
- uncountable/types/api/input_groups/get_input_group_names.py +3 -3
- uncountable/types/api/inputs/create_inputs.py +6 -4
- uncountable/types/api/inputs/get_input_data.py +6 -6
- uncountable/types/api/inputs/get_input_names.py +3 -3
- uncountable/types/api/inputs/get_inputs_data.py +6 -6
- uncountable/types/api/inputs/set_input_attribute_values.py +3 -3
- uncountable/types/api/inputs/set_input_category.py +3 -2
- uncountable/types/api/inputs/set_input_subcategories.py +3 -2
- uncountable/types/api/inputs/set_intermediate_type.py +3 -2
- uncountable/types/api/material_families/update_entity_material_families.py +2 -2
- uncountable/types/api/outputs/get_output_data.py +6 -6
- uncountable/types/api/outputs/get_output_names.py +3 -3
- uncountable/types/api/outputs/resolve_output_conditions.py +5 -5
- uncountable/types/api/permissions/set_core_permissions.py +7 -6
- uncountable/types/api/project/get_projects.py +3 -3
- uncountable/types/api/project/get_projects_data.py +3 -3
- uncountable/types/api/recipe_links/create_recipe_link.py +3 -2
- uncountable/types/api/recipe_links/remove_recipe_link.py +3 -2
- uncountable/types/api/recipe_metadata/get_recipe_metadata_data.py +3 -3
- uncountable/types/api/recipes/add_recipe_to_project.py +3 -2
- uncountable/types/api/recipes/add_time_series_data.py +4 -3
- uncountable/types/api/recipes/archive_recipes.py +3 -2
- uncountable/types/api/recipes/associate_recipe_as_input.py +3 -2
- uncountable/types/api/recipes/associate_recipe_as_lot.py +3 -2
- uncountable/types/api/recipes/clear_recipe_outputs.py +3 -2
- uncountable/types/api/recipes/create_recipe.py +2 -2
- uncountable/types/api/recipes/create_recipes.py +4 -4
- uncountable/types/api/recipes/disassociate_recipe_as_input.py +3 -2
- uncountable/types/api/recipes/edit_recipe_inputs.py +18 -16
- uncountable/types/api/recipes/get_column_calculation_values.py +3 -3
- uncountable/types/api/recipes/get_curve.py +2 -2
- uncountable/types/api/recipes/get_recipe_calculations.py +3 -3
- uncountable/types/api/recipes/get_recipe_links.py +2 -2
- uncountable/types/api/recipes/get_recipe_names.py +3 -3
- uncountable/types/api/recipes/get_recipe_output_metadata.py +3 -3
- uncountable/types/api/recipes/get_recipes_data.py +11 -11
- uncountable/types/api/recipes/lock_recipes.py +4 -3
- uncountable/types/api/recipes/remove_recipe_from_project.py +3 -2
- uncountable/types/api/recipes/set_recipe_inputs.py +3 -3
- uncountable/types/api/recipes/set_recipe_metadata.py +3 -2
- uncountable/types/api/recipes/set_recipe_output_annotations.py +6 -6
- uncountable/types/api/recipes/set_recipe_output_file.py +3 -3
- uncountable/types/api/recipes/set_recipe_outputs.py +4 -4
- uncountable/types/api/recipes/set_recipe_tags.py +6 -6
- uncountable/types/api/recipes/unarchive_recipes.py +3 -2
- uncountable/types/api/recipes/unlock_recipes.py +3 -2
- uncountable/types/api/runsheet/__init__.py +1 -0
- uncountable/types/api/runsheet/complete_async_upload.py +41 -0
- uncountable/types/api/triggers/run_trigger.py +3 -2
- uncountable/types/api/uploader/invoke_uploader.py +2 -2
- uncountable/types/async_batch_processor.py +74 -0
- uncountable/types/async_batch_t.py +7 -5
- uncountable/types/auth_retrieval_t.py +4 -3
- uncountable/types/base_t.py +4 -0
- uncountable/types/calculations_t.py +1 -1
- uncountable/types/chemical_structure_t.py +2 -1
- uncountable/types/client_base.py +48 -0
- uncountable/types/client_config_t.py +2 -1
- uncountable/types/curves_t.py +2 -2
- uncountable/types/data_t.py +22 -21
- uncountable/types/entity_t.py +9 -3
- uncountable/types/experiment_groups_t.py +1 -1
- uncountable/types/field_values_t.py +20 -20
- uncountable/types/fields_t.py +1 -1
- uncountable/types/generic_upload_t.py +7 -6
- uncountable/types/id_source_t.py +5 -4
- uncountable/types/identifier_t.py +3 -3
- uncountable/types/input_attributes_t.py +1 -1
- uncountable/types/inputs_t.py +1 -1
- uncountable/types/integration_server_t.py +2 -1
- uncountable/types/job_definition_t.py +14 -13
- uncountable/types/outputs_t.py +1 -1
- uncountable/types/overrides_t.py +3 -2
- uncountable/types/phases_t.py +1 -1
- uncountable/types/queued_job_t.py +7 -7
- uncountable/types/recipe_identifiers_t.py +3 -3
- uncountable/types/recipe_links_t.py +1 -1
- uncountable/types/recipe_metadata_t.py +3 -3
- uncountable/types/recipe_output_metadata_t.py +1 -1
- uncountable/types/recipe_tags_t.py +1 -1
- uncountable/types/recipe_workflow_steps_t.py +5 -4
- uncountable/types/recipes_t.py +2 -1
- uncountable/types/response_t.py +2 -1
- uncountable/types/secret_retrieval_t.py +4 -3
- uncountable/types/units_t.py +1 -1
- uncountable/types/users_t.py +1 -1
- uncountable/types/webhook_job_t.py +4 -3
- uncountable/types/workflows_t.py +2 -2
- {uncountablepythonsdk-0.0.110.dist-info → uncountablepythonsdk-0.0.112.dist-info}/METADATA +2 -1
- {uncountablepythonsdk-0.0.110.dist-info → uncountablepythonsdk-0.0.112.dist-info}/RECORD +136 -128
- {uncountablepythonsdk-0.0.110.dist-info → uncountablepythonsdk-0.0.112.dist-info}/WHEEL +1 -1
- {uncountablepythonsdk-0.0.110.dist-info → uncountablepythonsdk-0.0.112.dist-info}/top_level.txt +0 -0
docs/conf.py
CHANGED
|
@@ -6,10 +6,10 @@
|
|
|
6
6
|
# -- Project information -----------------------------------------------------
|
|
7
7
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
import datetime
|
|
10
10
|
|
|
11
11
|
project = "Uncountable SDK"
|
|
12
|
-
copyright = f"{
|
|
12
|
+
copyright = f"{datetime.datetime.now(tz=datetime.UTC).date().year}, Uncountable Inc"
|
|
13
13
|
author = "Uncountable Inc"
|
|
14
14
|
|
|
15
15
|
# -- General configuration ---------------------------------------------------
|
docs/justfile
CHANGED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import uncountable.types.api.inputs.create_inputs as create_inputs
|
|
4
|
+
from uncountable.core import AuthDetailsApiKey, Client
|
|
5
|
+
from uncountable.types import field_values_t, inputs_t
|
|
6
|
+
|
|
7
|
+
client = Client(
|
|
8
|
+
base_url="http://localhost:5000",
|
|
9
|
+
auth_details=AuthDetailsApiKey(
|
|
10
|
+
api_id=os.environ["UNC_API_ID"],
|
|
11
|
+
api_secret_key=os.environ["UNC_API_SECRET_KEY"],
|
|
12
|
+
),
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
client.external_create_inputs(
|
|
16
|
+
inputs_to_create=[
|
|
17
|
+
create_inputs.InputToCreate(
|
|
18
|
+
name="sdk test ing",
|
|
19
|
+
material_family_ids=[1],
|
|
20
|
+
quantity_type=inputs_t.IngredientQuantityType.NUMERIC,
|
|
21
|
+
type=inputs_t.IngredientType.INGREDIENT,
|
|
22
|
+
field_values=[
|
|
23
|
+
field_values_t.FieldRefNameValue(
|
|
24
|
+
field_ref_name="carrieTestNumericField",
|
|
25
|
+
value="10",
|
|
26
|
+
),
|
|
27
|
+
field_values_t.FieldRefNameValue(
|
|
28
|
+
field_ref_name="carrieTestCheckboxField",
|
|
29
|
+
value=True,
|
|
30
|
+
),
|
|
31
|
+
],
|
|
32
|
+
)
|
|
33
|
+
]
|
|
34
|
+
)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from io import BytesIO
|
|
2
|
+
|
|
3
|
+
from uncountable.core.file_upload import DataFileUpload, FileUpload
|
|
4
|
+
from uncountable.integration.job import JobArguments, RunsheetWebhookJob, register_job
|
|
5
|
+
from uncountable.types import entity_t
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@register_job
|
|
9
|
+
class StandardRunsheetGenerator(RunsheetWebhookJob):
|
|
10
|
+
def build_runsheet(
|
|
11
|
+
self,
|
|
12
|
+
*,
|
|
13
|
+
args: JobArguments,
|
|
14
|
+
entities: list[entity_t.Entity],
|
|
15
|
+
) -> FileUpload:
|
|
16
|
+
args.logger.log_info(f"Generating runsheet for {len(entities)} entities")
|
|
17
|
+
|
|
18
|
+
content = []
|
|
19
|
+
content.append("STANDARD LAB RUNSHEET\n")
|
|
20
|
+
content.append("=" * 30 + "\n\n")
|
|
21
|
+
|
|
22
|
+
for entity in entities:
|
|
23
|
+
content.append(f"Type: {entity.type}\n")
|
|
24
|
+
content.append(f"ID: {entity.id}\n")
|
|
25
|
+
|
|
26
|
+
if hasattr(entity, "field_values") and entity.field_values:
|
|
27
|
+
content.append("Field Values:\n")
|
|
28
|
+
for field in entity.field_values:
|
|
29
|
+
content.append(f" - {field.name}: {field.value}\n")
|
|
30
|
+
|
|
31
|
+
content.append("\n")
|
|
32
|
+
|
|
33
|
+
runsheet_data = "".join(content).encode("utf-8")
|
|
34
|
+
|
|
35
|
+
return DataFileUpload(name="lab_runsheet.txt", data=BytesIO(runsheet_data))
|
|
@@ -41,3 +41,12 @@ jobs:
|
|
|
41
41
|
executor:
|
|
42
42
|
type: script
|
|
43
43
|
import_path: example_wh
|
|
44
|
+
- id: example_runsheet_wh
|
|
45
|
+
type: webhook
|
|
46
|
+
name: Runsheet Webhook
|
|
47
|
+
signature_key_secret:
|
|
48
|
+
type: env
|
|
49
|
+
env_key: WH_RUNSHEET_SIGNATURE_KEY
|
|
50
|
+
executor:
|
|
51
|
+
type: script
|
|
52
|
+
import_path: example_runsheet_wh
|
|
@@ -16,7 +16,8 @@ dependencies = [
|
|
|
16
16
|
"types-requests == 2.*",
|
|
17
17
|
"types-simplejson == 3.*",
|
|
18
18
|
"pandas-stubs",
|
|
19
|
-
"xlrd == 2.*"
|
|
19
|
+
"xlrd == 2.*",
|
|
20
|
+
"msgspec == 0.19.*"
|
|
20
21
|
]
|
|
21
22
|
|
|
22
23
|
[tool.mypy]
|
|
@@ -221,4 +222,3 @@ max-locals=50
|
|
|
221
222
|
|
|
222
223
|
[tool.setuptools]
|
|
223
224
|
py-modules = []
|
|
224
|
-
|
|
@@ -1,24 +1,25 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
|
+
import datetime
|
|
4
5
|
import math
|
|
5
6
|
import types
|
|
6
7
|
import typing
|
|
7
8
|
from abc import ABC, abstractmethod
|
|
8
9
|
from collections import defaultdict
|
|
9
|
-
from datetime import date
|
|
10
|
+
from datetime import date
|
|
10
11
|
from decimal import Decimal
|
|
11
12
|
from enum import Enum, auto
|
|
12
13
|
from importlib import resources
|
|
13
14
|
|
|
14
15
|
import dateutil.parser
|
|
16
|
+
import msgspec.yaml
|
|
15
17
|
|
|
16
18
|
from pkgs.serialization import (
|
|
17
19
|
MissingSentryType,
|
|
18
20
|
OpaqueKey,
|
|
19
21
|
get_serial_class_data,
|
|
20
22
|
get_serial_union_data,
|
|
21
|
-
yaml,
|
|
22
23
|
)
|
|
23
24
|
|
|
24
25
|
from ._is_enum import is_string_enum_class
|
|
@@ -335,10 +336,12 @@ def _build_parser_inner(
|
|
|
335
336
|
|
|
336
337
|
return parse_int
|
|
337
338
|
|
|
338
|
-
if parsed_type is datetime:
|
|
339
|
+
if parsed_type is datetime.datetime:
|
|
339
340
|
|
|
340
341
|
def parse_datetime(value: typing.Any) -> T:
|
|
341
|
-
if context.options.allow_direct_type and isinstance(
|
|
342
|
+
if context.options.allow_direct_type and isinstance(
|
|
343
|
+
value, datetime.datetime
|
|
344
|
+
):
|
|
342
345
|
return value # type: ignore
|
|
343
346
|
return dateutil.parser.isoparse(value) # type:ignore
|
|
344
347
|
|
|
@@ -561,11 +564,11 @@ class ParserBase(ABC, typing.Generic[T]):
|
|
|
561
564
|
|
|
562
565
|
def parse_yaml_file(self, path: str) -> T:
|
|
563
566
|
with open(path, encoding="utf-8") as data_in:
|
|
564
|
-
return self.parse_storage(yaml.
|
|
567
|
+
return self.parse_storage(msgspec.yaml.decode(data_in.read()))
|
|
565
568
|
|
|
566
569
|
def parse_yaml_resource(self, package: resources.Package, resource: str) -> T:
|
|
567
570
|
with resources.open_text(package, resource) as fp:
|
|
568
|
-
return self.parse_storage(yaml.
|
|
571
|
+
return self.parse_storage(msgspec.yaml.decode(fp.read()))
|
|
569
572
|
|
|
570
573
|
|
|
571
574
|
class CachedParser(ParserBase[T], typing.Generic[T]):
|
pkgs/type_spec/builder.py
CHANGED
|
@@ -624,7 +624,12 @@ class SpecTypeDefnUnion(SpecTypeDefn):
|
|
|
624
624
|
prop_type = unwrap_literal_type(discriminator_type.spec_type)
|
|
625
625
|
assert prop_type is not None
|
|
626
626
|
assert prop_type.is_value_to_string()
|
|
627
|
-
|
|
627
|
+
value_type = prop_type.value_type
|
|
628
|
+
if isinstance(value_type, SpecTypeDefnStringEnum):
|
|
629
|
+
assert isinstance(prop_type.value, str)
|
|
630
|
+
discriminant = value_type.values[prop_type.value].value
|
|
631
|
+
else:
|
|
632
|
+
discriminant = str(prop_type.value)
|
|
628
633
|
assert discriminant not in self.discriminator_map, (
|
|
629
634
|
f"duplicated-discriminant, {discriminant} in {sub_type}"
|
|
630
635
|
)
|
|
@@ -793,6 +798,7 @@ class SpecTypeDefnStringEnum(SpecTypeDefn):
|
|
|
793
798
|
TOKEN_ENDPOINT = "$endpoint"
|
|
794
799
|
TOKEN_EMIT_IO_TS = "$emit_io_ts"
|
|
795
800
|
TOKEN_EMIT_TYPE_INFO = "$emit_type_info"
|
|
801
|
+
TOKEN_EMIT_TYPE_INFO_PYTHON = "$emit_type_info_python"
|
|
796
802
|
# The import token is only for explicit ordering of the files, to process constants
|
|
797
803
|
# and enums correctly. It does not impact the final generation of files, or the
|
|
798
804
|
# language imports. Those are still auto-resolved.
|
|
@@ -1161,6 +1167,7 @@ class SpecNamespace:
|
|
|
1161
1167
|
self.endpoint: SpecEndpoint | None = None
|
|
1162
1168
|
self.emit_io_ts = False
|
|
1163
1169
|
self.emit_type_info = False
|
|
1170
|
+
self.emit_type_info_python = False
|
|
1164
1171
|
self.derive_types_from_io_ts = False
|
|
1165
1172
|
self._imports: list[str] | None = None
|
|
1166
1173
|
self.path = name.split(".")
|
|
@@ -1215,6 +1222,11 @@ class SpecNamespace:
|
|
|
1215
1222
|
self.emit_type_info = defn
|
|
1216
1223
|
continue
|
|
1217
1224
|
|
|
1225
|
+
if name == TOKEN_EMIT_TYPE_INFO_PYTHON:
|
|
1226
|
+
assert defn in (True, False)
|
|
1227
|
+
self.emit_type_info_python = defn
|
|
1228
|
+
continue
|
|
1229
|
+
|
|
1218
1230
|
if name == TOKEN_IMPORT:
|
|
1219
1231
|
assert self._imports is None
|
|
1220
1232
|
imports = [defn] if isinstance(defn, str) else defn
|
|
@@ -1276,7 +1288,12 @@ class SpecNamespace:
|
|
|
1276
1288
|
parsed_name = parse_type_str(full_name)[0]
|
|
1277
1289
|
name = parsed_name.name
|
|
1278
1290
|
|
|
1279
|
-
if name in [
|
|
1291
|
+
if name in [
|
|
1292
|
+
TOKEN_EMIT_IO_TS,
|
|
1293
|
+
TOKEN_EMIT_TYPE_INFO,
|
|
1294
|
+
TOKEN_IMPORT,
|
|
1295
|
+
TOKEN_EMIT_TYPE_INFO_PYTHON,
|
|
1296
|
+
]:
|
|
1280
1297
|
continue
|
|
1281
1298
|
|
|
1282
1299
|
builder.push_where(name)
|
pkgs/type_spec/emit_python.py
CHANGED
|
@@ -5,7 +5,7 @@ from decimal import Decimal
|
|
|
5
5
|
from typing import Any
|
|
6
6
|
|
|
7
7
|
from . import builder, util
|
|
8
|
-
from .builder import EndpointEmitType, EndpointSpecificPath
|
|
8
|
+
from .builder import EndpointEmitType, EndpointSpecificPath, base_namespace_name
|
|
9
9
|
from .config import PythonConfig
|
|
10
10
|
from .cross_output_links import get_path_links
|
|
11
11
|
from .emit_open_api_util import EmitOpenAPIStabilityLevel
|
|
@@ -907,13 +907,19 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
|
|
|
907
907
|
|
|
908
908
|
# Emit dataclass decorator
|
|
909
909
|
dataclass = "@dataclasses.dataclass"
|
|
910
|
-
|
|
910
|
+
refer_to(
|
|
911
|
+
ctx,
|
|
912
|
+
builder.SpecTypeDefnAlias(
|
|
913
|
+
namespace=ctx.builder.namespaces[base_namespace_name], name="ENABLE_SLOTS"
|
|
914
|
+
),
|
|
915
|
+
)
|
|
916
|
+
dc_args = ["slots=base_t.ENABLE_SLOTS"]
|
|
911
917
|
if stype.is_kw_only():
|
|
912
918
|
dc_args.append("kw_only=True")
|
|
913
919
|
if stype.is_hashable:
|
|
914
920
|
dc_args.extend(["frozen=True", "eq=True"])
|
|
915
921
|
if len(dc_args) > 0:
|
|
916
|
-
dataclass += f"({', '.join(dc_args)})"
|
|
922
|
+
dataclass += f"({', '.join(dc_args)}) # type: ignore[literal-required]"
|
|
917
923
|
|
|
918
924
|
ctx.out.write(f"{dataclass}\n")
|
|
919
925
|
ctx.out.write(class_out.getvalue())
|
|
@@ -103,7 +103,7 @@ def emit_value_ts(
|
|
|
103
103
|
elif isinstance(stype, builder.SpecTypeDefnStringEnum):
|
|
104
104
|
return f"{refer_to(ctx, stype)}.{ts_enum_name(value, stype.name_case)}"
|
|
105
105
|
|
|
106
|
-
raise Exception("invalid constant type", value, stype)
|
|
106
|
+
raise Exception("invalid constant type", value, stype, type(stype))
|
|
107
107
|
|
|
108
108
|
|
|
109
109
|
def emit_type_ts(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
|
|
@@ -266,3 +266,18 @@ def emit_namespace_imports_ts(
|
|
|
266
266
|
)
|
|
267
267
|
import_from = f"{import_path}{resolve_namespace_name(ns)}"
|
|
268
268
|
out.write(f'import * as {import_as} from "{import_from}"\n') # noqa: E501
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def emit_namespace_imports_from_root_ts(
|
|
272
|
+
namespaces: set[builder.SpecNamespace],
|
|
273
|
+
out: io.StringIO,
|
|
274
|
+
root: str,
|
|
275
|
+
) -> None:
|
|
276
|
+
for ns in sorted(
|
|
277
|
+
namespaces,
|
|
278
|
+
key=lambda name: resolve_namespace_name(name),
|
|
279
|
+
):
|
|
280
|
+
import_as = resolve_namespace_ref(ns)
|
|
281
|
+
out.write(
|
|
282
|
+
f'import * as {import_as} from "{root}/{resolve_namespace_name(ns)}"\n'
|
|
283
|
+
) # noqa: E501
|
|
@@ -28,6 +28,10 @@ REF_NAME_STRICT_REGEX = rf"{REF_NAME_STRICT_REGEX_STRING}"
|
|
|
28
28
|
ID_REGEX = r"-?[1-9][0-9]{0,20}"
|
|
29
29
|
|
|
30
30
|
|
|
31
|
+
# ENABLE_SLOTS should be removed after slots have been tested locally
|
|
32
|
+
import os
|
|
33
|
+
ENABLE_SLOTS = os.environ.get("UNC_ENABLE_DATACLASS_SLOTS") == "true"
|
|
34
|
+
|
|
31
35
|
if TYPE_CHECKING:
|
|
32
36
|
JsonValue = Union[JsonScalar, Mapping[str, "JsonValue"], Sequence["JsonValue"]]
|
|
33
37
|
ExtJsonValue = JsonValue
|
|
@@ -116,7 +116,7 @@ def asdict_for_yaml_dump(dataclass_instance: Any) -> Any:
|
|
|
116
116
|
|
|
117
117
|
|
|
118
118
|
def emit_type_info_python(build: builder.SpecBuilder, output: str) -> None:
|
|
119
|
-
type_map = _build_map_all(build)
|
|
119
|
+
type_map = _build_map_all(build, python=True)
|
|
120
120
|
|
|
121
121
|
stripped = _dict_null_strip(asdict_for_yaml_dump(type_map))
|
|
122
122
|
|
|
@@ -176,11 +176,14 @@ class MapAll:
|
|
|
176
176
|
namespaces: dict[str, MapNamespace]
|
|
177
177
|
|
|
178
178
|
|
|
179
|
-
def _build_map_all(build: builder.SpecBuilder) -> MapAll:
|
|
179
|
+
def _build_map_all(build: builder.SpecBuilder, *, python: bool = False) -> MapAll:
|
|
180
180
|
map_all = MapAll(namespaces={})
|
|
181
181
|
|
|
182
182
|
for namespace in build.namespaces.values():
|
|
183
|
-
if not namespace.emit_type_info:
|
|
183
|
+
if not python and not namespace.emit_type_info:
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
if python and not namespace.emit_type_info_python:
|
|
184
187
|
continue
|
|
185
188
|
|
|
186
189
|
map_namespace = MapNamespace(types={})
|
|
@@ -324,6 +327,12 @@ def _validate_type_ext_info(
|
|
|
324
327
|
prop = _pull_property_from_type_recursively(stype, name)
|
|
325
328
|
assert prop is not None, f"missing-label-field:{name}"
|
|
326
329
|
|
|
330
|
+
if ext_info.actions is not None:
|
|
331
|
+
assert stype.properties is not None
|
|
332
|
+
for action in ext_info.actions:
|
|
333
|
+
prop = _pull_property_from_type_recursively(stype, action.property)
|
|
334
|
+
assert prop is not None, f"missing-action-field:{action.property}"
|
|
335
|
+
|
|
327
336
|
if not stype.is_base and isinstance(stype.base, builder.SpecTypeDefnObject):
|
|
328
337
|
base_layout, _ = _validate_type_ext_info(stype.base)
|
|
329
338
|
else:
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from io import StringIO
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import assert_never
|
|
6
|
+
|
|
7
|
+
from main.base.types import (
|
|
8
|
+
ui_entry_actions_t,
|
|
9
|
+
)
|
|
10
|
+
from pkgs.argument_parser import snake_to_camel_case
|
|
11
|
+
from pkgs.serialization_util import serialize_for_api
|
|
12
|
+
from pkgs.type_spec import emit_typescript_util
|
|
13
|
+
from pkgs.type_spec.builder import (
|
|
14
|
+
BaseTypeName,
|
|
15
|
+
NameCase,
|
|
16
|
+
RawDict,
|
|
17
|
+
SpecBuilder,
|
|
18
|
+
SpecNamespace,
|
|
19
|
+
SpecTypeDefnObject,
|
|
20
|
+
)
|
|
21
|
+
from pkgs.type_spec.config import Config
|
|
22
|
+
from pkgs.type_spec.load_types import load_types
|
|
23
|
+
from pkgs.type_spec.util import rewrite_file
|
|
24
|
+
from pkgs.type_spec.value_spec.convert_type import convert_from_value_spec_type
|
|
25
|
+
|
|
26
|
+
_INIT_ACTION_INDEX_TYPE_DATA = {
|
|
27
|
+
"EntryActionInfo<InputT, OutputT>": {
|
|
28
|
+
"type": BaseTypeName.s_object,
|
|
29
|
+
"properties": {"inputs": {"type": "InputT"}, "outputs": {"type": "OutputT"}},
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
_TYPES_ROOT = "unc_types"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(kw_only=True)
|
|
36
|
+
class EntryActionTypeInfo:
|
|
37
|
+
inputs_type: SpecTypeDefnObject
|
|
38
|
+
outputs_type: SpecTypeDefnObject
|
|
39
|
+
name: str
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def ui_entry_variable_to_type_spec_type(
|
|
43
|
+
variable: ui_entry_actions_t.UiEntryActionVariable,
|
|
44
|
+
) -> str:
|
|
45
|
+
match variable:
|
|
46
|
+
case ui_entry_actions_t.UiEntryActionVariableString():
|
|
47
|
+
return BaseTypeName.s_string
|
|
48
|
+
case ui_entry_actions_t.UiEntryActionVariableEntityFields():
|
|
49
|
+
return "entity.Entity"
|
|
50
|
+
case _:
|
|
51
|
+
assert_never(variable)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def construct_inputs_type_data(
|
|
55
|
+
vars: dict[str, ui_entry_actions_t.UiEntryActionVariable],
|
|
56
|
+
) -> RawDict:
|
|
57
|
+
if len(vars) == 0:
|
|
58
|
+
return {"type": BaseTypeName.s_object}
|
|
59
|
+
properties: dict[str, dict[str, str]] = {}
|
|
60
|
+
for input_name, input_defn in (vars).items():
|
|
61
|
+
properties[f"{input_name}"] = {
|
|
62
|
+
"type": ui_entry_variable_to_type_spec_type(input_defn)
|
|
63
|
+
}
|
|
64
|
+
return {"type": BaseTypeName.s_object, "properties": properties}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def construct_outputs_type_data(
|
|
68
|
+
vars: dict[str, ui_entry_actions_t.UiEntryActionOutput],
|
|
69
|
+
) -> RawDict:
|
|
70
|
+
if len(vars) == 0:
|
|
71
|
+
return {"type": BaseTypeName.s_object}
|
|
72
|
+
properties: dict[str, dict[str, str]] = {}
|
|
73
|
+
for output_name, output_defn in (vars).items():
|
|
74
|
+
# All outputs are optional
|
|
75
|
+
properties[f"{output_name}"] = {
|
|
76
|
+
"type": f"Optional<{convert_from_value_spec_type(output_defn.vs_type)}>"
|
|
77
|
+
}
|
|
78
|
+
return {"type": BaseTypeName.s_object, "properties": properties}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def construct_outputs_type(
|
|
82
|
+
*,
|
|
83
|
+
action_scope: ui_entry_actions_t.ActionScope,
|
|
84
|
+
vars: dict[str, ui_entry_actions_t.UiEntryActionOutput],
|
|
85
|
+
builder: SpecBuilder,
|
|
86
|
+
namespace: SpecNamespace,
|
|
87
|
+
) -> SpecTypeDefnObject:
|
|
88
|
+
stype = SpecTypeDefnObject(
|
|
89
|
+
namespace=namespace,
|
|
90
|
+
name=emit_typescript_util.ts_type_name(f"{action_scope}_outputs"),
|
|
91
|
+
)
|
|
92
|
+
namespace.types[stype.name] = stype
|
|
93
|
+
stype.process(
|
|
94
|
+
builder=builder,
|
|
95
|
+
data=construct_outputs_type_data(vars=vars),
|
|
96
|
+
)
|
|
97
|
+
return stype
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def construct_inputs_type(
|
|
101
|
+
*,
|
|
102
|
+
action_scope: ui_entry_actions_t.ActionScope,
|
|
103
|
+
vars: dict[str, ui_entry_actions_t.UiEntryActionVariable],
|
|
104
|
+
builder: SpecBuilder,
|
|
105
|
+
namespace: SpecNamespace,
|
|
106
|
+
) -> SpecTypeDefnObject:
|
|
107
|
+
stype = SpecTypeDefnObject(
|
|
108
|
+
namespace=namespace,
|
|
109
|
+
name=emit_typescript_util.ts_type_name(f"{action_scope}_inputs"),
|
|
110
|
+
)
|
|
111
|
+
stype.process(builder=builder, data=construct_inputs_type_data(vars))
|
|
112
|
+
namespace.types[stype.name] = stype
|
|
113
|
+
return stype
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _get_types_root(destination_root: Path) -> Path:
|
|
117
|
+
return destination_root / "types"
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def emit_imports_ts(
|
|
121
|
+
namespaces: set[SpecNamespace],
|
|
122
|
+
out: StringIO,
|
|
123
|
+
) -> None:
|
|
124
|
+
for ns in sorted(
|
|
125
|
+
namespaces,
|
|
126
|
+
key=lambda ns: ns.name,
|
|
127
|
+
):
|
|
128
|
+
import_as = emit_typescript_util.resolve_namespace_ref(ns)
|
|
129
|
+
import_from = f"{_TYPES_ROOT}/{ns.name}"
|
|
130
|
+
out.write(f'import * as {import_as} from "{import_from}"\n')
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def emit_entry_action_definition(
|
|
134
|
+
*,
|
|
135
|
+
ctx: emit_typescript_util.EmitTypescriptContext,
|
|
136
|
+
defn: ui_entry_actions_t.UiEntryActionDefinition,
|
|
137
|
+
builder: SpecBuilder,
|
|
138
|
+
action_scope: ui_entry_actions_t.ActionScope,
|
|
139
|
+
) -> EntryActionTypeInfo:
|
|
140
|
+
inputs_type = construct_inputs_type(
|
|
141
|
+
action_scope=action_scope,
|
|
142
|
+
vars=defn.inputs,
|
|
143
|
+
builder=builder,
|
|
144
|
+
namespace=ctx.namespace,
|
|
145
|
+
)
|
|
146
|
+
outputs_type = construct_outputs_type(
|
|
147
|
+
action_scope=action_scope,
|
|
148
|
+
vars=defn.outputs,
|
|
149
|
+
builder=builder,
|
|
150
|
+
namespace=ctx.namespace,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return EntryActionTypeInfo(
|
|
154
|
+
inputs_type=inputs_type,
|
|
155
|
+
outputs_type=outputs_type,
|
|
156
|
+
name=action_scope,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def emit_query_index(
|
|
161
|
+
ctx: emit_typescript_util.EmitTypescriptContext,
|
|
162
|
+
defn_infos: list[EntryActionTypeInfo],
|
|
163
|
+
index_path: Path,
|
|
164
|
+
builder: SpecBuilder,
|
|
165
|
+
definitions: dict[
|
|
166
|
+
ui_entry_actions_t.ActionScope, ui_entry_actions_t.UiEntryActionDefinition
|
|
167
|
+
],
|
|
168
|
+
) -> bool:
|
|
169
|
+
query_index_type_data = {
|
|
170
|
+
**_INIT_ACTION_INDEX_TYPE_DATA,
|
|
171
|
+
"EntityActionTypeLookup": {
|
|
172
|
+
"type": BaseTypeName.s_object,
|
|
173
|
+
"properties": {
|
|
174
|
+
defn_info.name: {
|
|
175
|
+
"type": f"EntryActionInfo<{defn_info.inputs_type.name},{defn_info.outputs_type.name}>",
|
|
176
|
+
"name_case": NameCase.preserve,
|
|
177
|
+
}
|
|
178
|
+
for defn_info in defn_infos
|
|
179
|
+
},
|
|
180
|
+
},
|
|
181
|
+
"InputInfo": {
|
|
182
|
+
"type": BaseTypeName.s_object,
|
|
183
|
+
"properties": {
|
|
184
|
+
"value_spec_var": {"type": "String"},
|
|
185
|
+
"variable": {"type": "ui_entry_actions.UiEntryActionVariable"},
|
|
186
|
+
},
|
|
187
|
+
},
|
|
188
|
+
"OutputInfo": {
|
|
189
|
+
"type": BaseTypeName.s_object,
|
|
190
|
+
"properties": {
|
|
191
|
+
"name": {"type": "String"},
|
|
192
|
+
"desc": {"type": "String"},
|
|
193
|
+
"type": {"type": "value_spec.BaseType"},
|
|
194
|
+
},
|
|
195
|
+
},
|
|
196
|
+
"DefinitionInfo": {
|
|
197
|
+
"type": BaseTypeName.s_object,
|
|
198
|
+
"properties": {
|
|
199
|
+
"inputs": {
|
|
200
|
+
"type": "ReadonlyArray<InputInfo>",
|
|
201
|
+
},
|
|
202
|
+
"outputs": {
|
|
203
|
+
"type": "ReadonlyArray<OutputInfo>",
|
|
204
|
+
},
|
|
205
|
+
},
|
|
206
|
+
},
|
|
207
|
+
}
|
|
208
|
+
ctx.namespace.prescan(query_index_type_data)
|
|
209
|
+
ctx.namespace.process(
|
|
210
|
+
builder=builder,
|
|
211
|
+
data=query_index_type_data,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
defn_lookup_info = {}
|
|
215
|
+
for scope, defn in definitions.items():
|
|
216
|
+
inputs = []
|
|
217
|
+
outputs = []
|
|
218
|
+
for name, input in defn.inputs.items():
|
|
219
|
+
inputs.append(
|
|
220
|
+
serialize_for_api({
|
|
221
|
+
"value_spec_var": snake_to_camel_case(name),
|
|
222
|
+
"variable": input,
|
|
223
|
+
})
|
|
224
|
+
)
|
|
225
|
+
for name, output in defn.outputs.items():
|
|
226
|
+
outputs.append(
|
|
227
|
+
serialize_for_api({
|
|
228
|
+
"name": name,
|
|
229
|
+
"desc": output.description,
|
|
230
|
+
"type": output.vs_type,
|
|
231
|
+
})
|
|
232
|
+
)
|
|
233
|
+
defn_lookup_info[scope] = {"inputs": inputs, "outputs": outputs}
|
|
234
|
+
|
|
235
|
+
defn_lookup_out = f"export const DEFINITION_LOOKUP = {json.dumps(defn_lookup_info, sort_keys=True, indent=2)} as const\n\nexport const DEFINITION_LOOKUP_TYPED = DEFINITION_LOOKUP as Record<UiEntryActionsT.ActionScope, DefinitionInfo>\n"
|
|
236
|
+
|
|
237
|
+
for stype in ctx.namespace.types.values():
|
|
238
|
+
emit_typescript_util.emit_type_ts(
|
|
239
|
+
ctx=ctx,
|
|
240
|
+
stype=stype,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
import_buffer = StringIO()
|
|
244
|
+
emit_typescript_util.emit_namespace_imports_from_root_ts(
|
|
245
|
+
namespaces=ctx.namespaces,
|
|
246
|
+
out=import_buffer,
|
|
247
|
+
root=_TYPES_ROOT,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
return rewrite_file(
|
|
251
|
+
content=import_buffer.getvalue() + ctx.out.getvalue() + defn_lookup_out,
|
|
252
|
+
filename=str(index_path),
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def generate_entry_actions_typescript(
|
|
257
|
+
*,
|
|
258
|
+
definitions: dict[
|
|
259
|
+
ui_entry_actions_t.ActionScope, ui_entry_actions_t.UiEntryActionDefinition
|
|
260
|
+
],
|
|
261
|
+
destination_root: Path,
|
|
262
|
+
materials_type_spec_config: Config,
|
|
263
|
+
) -> None:
|
|
264
|
+
builder = load_types(materials_type_spec_config)
|
|
265
|
+
assert builder is not None
|
|
266
|
+
|
|
267
|
+
definition_buffer = StringIO()
|
|
268
|
+
index_namespace = SpecNamespace(name="index")
|
|
269
|
+
ctx = emit_typescript_util.EmitTypescriptContext(
|
|
270
|
+
out=definition_buffer,
|
|
271
|
+
namespace=index_namespace,
|
|
272
|
+
)
|
|
273
|
+
builder.namespaces[index_namespace.name] = index_namespace
|
|
274
|
+
|
|
275
|
+
defn_infos: list[EntryActionTypeInfo] = []
|
|
276
|
+
|
|
277
|
+
for action_scope, defn in definitions.items():
|
|
278
|
+
defn_infos.append(
|
|
279
|
+
emit_entry_action_definition(
|
|
280
|
+
action_scope=action_scope,
|
|
281
|
+
ctx=ctx,
|
|
282
|
+
defn=defn,
|
|
283
|
+
builder=builder,
|
|
284
|
+
)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
index_path = _get_types_root(destination_root) / "index.ts"
|
|
288
|
+
emit_query_index(
|
|
289
|
+
ctx=ctx,
|
|
290
|
+
builder=builder,
|
|
291
|
+
defn_infos=defn_infos,
|
|
292
|
+
definitions=definitions,
|
|
293
|
+
index_path=index_path,
|
|
294
|
+
)
|
|
@@ -61,3 +61,16 @@ def convert_to_value_spec_type(parsed: ParsedTypePath) -> value_spec_t.ValueType
|
|
|
61
61
|
return value_spec_t.ValueType(base_type=mapped.base_type, parameters=parameters)
|
|
62
62
|
|
|
63
63
|
# Our formatter was duplicating the previous line for an unknown reason, this comment blocks that
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def convert_from_value_spec_type(
|
|
67
|
+
base_type: value_spec_t.BaseType,
|
|
68
|
+
) -> str:
|
|
69
|
+
for type_spec_type, mapped_type in TYPE_MAP.items():
|
|
70
|
+
if (
|
|
71
|
+
mapped_type.base_type == base_type
|
|
72
|
+
and mapped_type.param_count == 0
|
|
73
|
+
and mapped_type.variable_param_count is False
|
|
74
|
+
):
|
|
75
|
+
return type_spec_type
|
|
76
|
+
raise ValueError(f"invalid value spec type {base_type}")
|