UncountablePythonSDK 0.0.30__py3-none-any.whl → 0.0.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (30) hide show
  1. {UncountablePythonSDK-0.0.30.dist-info → UncountablePythonSDK-0.0.33.dist-info}/METADATA +3 -3
  2. {UncountablePythonSDK-0.0.30.dist-info → UncountablePythonSDK-0.0.33.dist-info}/RECORD +30 -24
  3. {UncountablePythonSDK-0.0.30.dist-info → UncountablePythonSDK-0.0.33.dist-info}/WHEEL +1 -1
  4. docs/requirements.txt +5 -5
  5. pkgs/argument_parser/argument_parser.py +41 -2
  6. pkgs/argument_parser/case_convert.py +4 -3
  7. pkgs/serialization/__init__.py +2 -0
  8. pkgs/serialization/serial_class.py +31 -37
  9. pkgs/serialization/serial_union.py +81 -0
  10. pkgs/serialization_util/__init__.py +1 -7
  11. pkgs/serialization_util/convert_to_snakecase.py +27 -0
  12. pkgs/serialization_util/serialization_helpers.py +48 -59
  13. pkgs/type_spec/builder.py +57 -1
  14. pkgs/type_spec/emit_open_api.py +6 -0
  15. pkgs/type_spec/emit_python.py +23 -0
  16. pkgs/type_spec/emit_typescript.py +6 -0
  17. pkgs/type_spec/type_info/emit_type_info.py +43 -9
  18. uncountable/types/__init__.py +8 -0
  19. uncountable/types/api/entity/list_entities.py +7 -0
  20. uncountable/types/api/inputs/set_intermediate_type.py +43 -0
  21. uncountable/types/api/recipes/add_recipe_to_project.py +35 -0
  22. uncountable/types/api/recipes/associate_recipe_as_input.py +1 -0
  23. uncountable/types/api/recipes/edit_recipe_inputs.py +39 -1
  24. uncountable/types/api/recipes/remove_recipe_from_project.py +35 -0
  25. uncountable/types/api/recipes/set_recipe_outputs.py +2 -0
  26. uncountable/types/client_base.py +72 -0
  27. uncountable/types/inputs.py +1 -0
  28. uncountable/types/recipe_workflow_steps.py +14 -3
  29. uncountable/types/recipes.py +21 -0
  30. {UncountablePythonSDK-0.0.30.dist-info → UncountablePythonSDK-0.0.33.dist-info}/top_level.txt +0 -0
@@ -5,16 +5,16 @@ from decimal import Decimal
5
5
  from typing import (
6
6
  TYPE_CHECKING,
7
7
  Any,
8
- Optional,
8
+ ClassVar,
9
+ Protocol,
9
10
  TypeVar,
10
11
  Union,
12
+ overload,
11
13
  )
12
14
 
13
- from pkgs.argument_parser import camel_to_snake_case, snake_to_camel_case
15
+ from pkgs.argument_parser import snake_to_camel_case
14
16
  from pkgs.serialization import (
15
17
  MISSING_SENTRY,
16
- MissingSentryType,
17
- MissingType,
18
18
  OpaqueKey,
19
19
  get_serial_class_data,
20
20
  )
@@ -28,52 +28,60 @@ if TYPE_CHECKING:
28
28
  else:
29
29
  JsonValue = Union[JsonScalar, dict[str, Any], list[Any]]
30
30
 
31
+ T = TypeVar("T")
32
+
33
+
34
+ class Dataclass(Protocol):
35
+ __dataclass_fields__: ClassVar[dict] # type: ignore[type-arg,unused-ignore]
36
+
31
37
 
32
- def key_convert_to_camelcase(o: Any) -> Any:
38
+ def identity(x: T) -> T:
39
+ return x
40
+
41
+
42
+ def key_convert_to_camelcase(o: Any) -> str:
33
43
  if isinstance(o, OpaqueKey):
34
44
  return o
35
45
  if isinstance(o, enum.Enum):
36
- return o.value
46
+ return o.value # type: ignore[no-any-return]
37
47
  if isinstance(o, str):
38
48
  return snake_to_camel_case(o)
39
- return o
49
+ return o # type: ignore[no-any-return]
40
50
 
41
51
 
42
- def _convert_dict(d: Any) -> Any:
52
+ def _convert_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
43
53
  return {
44
- key_convert_to_camelcase(k): convert_to_camelcase(v)
54
+ key_convert_to_camelcase(k): serialize_for_api(v)
45
55
  for k, v in d.items()
46
56
  if v != MISSING_SENTRY
47
57
  }
48
58
 
49
59
 
50
- def _serialize_dict(d: Any) -> dict[str, Any]:
51
- return {k: serialize(v) for k, v in d.items() if v != MISSING_SENTRY}
60
+ def _serialize_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
61
+ return {k: serialize_for_storage(v) for k, v in d.items() if v != MISSING_SENTRY}
52
62
 
53
63
 
54
- def _convert_dataclass(d: Any) -> Any:
64
+ def _convert_dataclass(d: Dataclass) -> dict[str, JsonValue]:
55
65
  dct = type(d)
56
66
  scd = get_serial_class_data(dct)
57
67
 
58
- def key_convert(key: Any) -> Any:
68
+ def key_convert(key: str) -> str:
59
69
  if scd.has_unconverted_key(key):
60
70
  return key
61
71
  return key_convert_to_camelcase(key)
62
72
 
63
- def value_convert(key: Any, value: Any) -> Any:
64
- if value is None:
65
- return None
73
+ def value_convert(key: str, value: Any) -> JsonValue:
66
74
  if scd.has_to_string_value(key):
67
75
  # Limit to types we know we need to support to avoid surprises
68
76
  # Generics, like List/Dict would need to be per-value stringified
69
77
  assert isinstance(value, (Decimal, int))
70
78
  return str(value)
71
79
  if scd.has_unconverted_value(key):
72
- return value
73
- return convert_to_camelcase(value)
80
+ return value # type: ignore[no-any-return]
81
+ return serialize_for_api(value) # type: ignore[no-any-return,unused-ignore]
74
82
 
75
83
  return {
76
- key_convert(k): value_convert(k, v)
84
+ key_convert(k): (value_convert(k, v) if v is not None else None)
77
85
  for k, v in d.__dict__.items()
78
86
  if v != MISSING_SENTRY
79
87
  }
@@ -83,24 +91,35 @@ _SERIALIZATION_FUNCS_STANDARD = {
83
91
  SerializationType.ENUM: lambda x: str(x.value),
84
92
  SerializationType.DATE: lambda x: x.isoformat(),
85
93
  SerializationType.TIMEDELTA: lambda x: x.total_seconds(),
86
- SerializationType.UNKNOWN: lambda x: x,
94
+ SerializationType.UNKNOWN: identity,
87
95
  }
88
96
 
89
- _CONVERSION_SERIALIZATION_FUNCS = {
97
+ _CONVERSION_SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
90
98
  **_SERIALIZATION_FUNCS_STANDARD,
91
99
  SerializationType.NAMED_TUPLE: lambda x: _convert_dict(x._asdict()),
92
- SerializationType.ITERABLE: lambda x: [convert_to_camelcase(v) for v in x],
100
+ SerializationType.ITERABLE: lambda x: [serialize_for_api(v) for v in x],
93
101
  SerializationType.DICT: _convert_dict,
94
102
  SerializationType.DATACLASS: _convert_dataclass,
95
103
  }
96
104
 
97
105
 
98
- def convert_to_camelcase(obj: Any) -> Any:
99
- """@DEPRECATED prefer serialize_for_api"""
100
- return serialize_for_api(obj)
106
+ @overload
107
+ def serialize_for_api(obj: None) -> None: ...
108
+
109
+
110
+ @overload
111
+ def serialize_for_api(obj: dict[str, Any]) -> dict[str, JsonValue]: ...
101
112
 
102
113
 
103
- def serialize_for_api(obj: Any) -> Any:
114
+ @overload
115
+ def serialize_for_api(obj: Dataclass) -> dict[str, JsonValue]: ...
116
+
117
+
118
+ @overload
119
+ def serialize_for_api(obj: Any) -> JsonValue: ...
120
+
121
+
122
+ def serialize_for_api(obj: Any) -> JsonValue:
104
123
  """
105
124
  Serialize to a parsed-JSON format suitably encoded for API output.
106
125
 
@@ -122,15 +141,10 @@ _SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
122
141
  **_SERIALIZATION_FUNCS_STANDARD,
123
142
  **_SERIALIZATION_FUNCS_DICT,
124
143
  SerializationType.NAMED_TUPLE: lambda x: _serialize_dict(x._asdict()),
125
- SerializationType.ITERABLE: lambda x: [serialize(v) for v in x],
144
+ SerializationType.ITERABLE: lambda x: [serialize_for_storage(v) for v in x],
126
145
  }
127
146
 
128
147
 
129
- def serialize(obj: Any) -> Any:
130
- """@DEPRECATED: prefer serialize_for_storage"""
131
- return serialize_for_storage(obj)
132
-
133
-
134
148
  def serialize_for_storage(obj: Any) -> JsonValue:
135
149
  """
136
150
  Convert a value into the pseudo-JSON form for
@@ -142,34 +156,9 @@ def serialize_for_storage(obj: Any) -> JsonValue:
142
156
  return _SERIALIZATION_FUNCS[serialization_type](obj)
143
157
 
144
158
 
145
- def serialize_for_storage_dict(obj: Any) -> dict[str, JsonValue]:
159
+ def serialize_for_storage_dict(obj: dict | Dataclass) -> dict[str, JsonValue]: # type: ignore[type-arg]
146
160
  """
147
161
  Same as serialize for storage but guarantees outer object is a dictionary
148
162
  """
149
- serialization_type = get_serialization_type(type(obj)) # type: ignore
163
+ serialization_type = get_serialization_type(type(obj))
150
164
  return _SERIALIZATION_FUNCS_DICT[serialization_type](obj)
151
-
152
-
153
- def key_convert_to_snake_case(o: Any) -> Any:
154
- if isinstance(o, OpaqueKey):
155
- return o
156
- if isinstance(o, str):
157
- return camel_to_snake_case(o)
158
- return o
159
-
160
-
161
- def convert_dict_to_snake_case(data: Any) -> Any:
162
- return {
163
- key_convert_to_snake_case(k): convert_dict_to_snake_case(v)
164
- if isinstance(v, dict)
165
- else v
166
- for k, v in data.items()
167
- if v != MISSING_SENTRY
168
- }
169
-
170
-
171
- T = TypeVar("T")
172
-
173
-
174
- def resolve_missing_to_none(val: MissingType[T]) -> Optional[T]:
175
- return val if not isinstance(val, MissingSentryType) else None
pkgs/type_spec/builder.py CHANGED
@@ -102,6 +102,8 @@ class DefnTypeName(StrEnum):
102
102
  s_string_enum = "StringEnum"
103
103
  # a particular literal value
104
104
  s_string_literal = "_StringLiteral"
105
+ # A union of several other types
106
+ s_union = "Union"
105
107
 
106
108
 
107
109
  base_namespace_name = "base"
@@ -547,13 +549,65 @@ class SpecTypeDefnAlias(SpecTypeDefn):
547
549
  super().base_process(builder, data, ["type", "desc", "alias", "discriminator"])
548
550
  self.alias = builder.parse_type(self.namespace, data["alias"])
549
551
  self.desc = data.get("desc", None)
550
- # Should be limited to Union type aliases
551
552
  self.discriminator = data.get("discriminator", None)
552
553
 
553
554
  def get_referenced_types(self) -> list[SpecType]:
554
555
  return [self.alias]
555
556
 
556
557
 
558
+ class SpecTypeDefnUnion(SpecTypeDefn):
559
+ def __init__(self, namespace: SpecNamespace, name: str) -> None:
560
+ super().__init__(namespace, name)
561
+ self.discriminator: str | None = None
562
+ self.types: list[SpecType] = []
563
+ self._alias_type: SpecType | None = None
564
+ self.discriminator_map: dict[str, SpecType] | None = None
565
+ self.desc: str | None = None
566
+
567
+ def process(self, builder: SpecBuilder, data: RawDict) -> None:
568
+ super().base_process(builder, data, ["type", "desc", "types", "discriminator"])
569
+
570
+ self.desc = data.get("desc", None)
571
+ self.discriminator = data.get("discriminator", None)
572
+
573
+ for sub_type_str in data["types"]:
574
+ sub_type = builder.parse_type(self.namespace, sub_type_str)
575
+ self.types.append(sub_type)
576
+
577
+ base_type = builder.namespaces[base_namespace_name].types[BaseTypeName.s_union]
578
+ self._backing_type = SpecTypeInstance(base_type, self.types)
579
+
580
+ if self.discriminator is not None:
581
+ self.discriminator_map = {}
582
+ for sub_type in self.types:
583
+ builder.push_where(sub_type.name)
584
+ assert isinstance(
585
+ sub_type, SpecTypeDefnObject
586
+ ), "union-type-must-be-object"
587
+ assert sub_type.properties is not None
588
+ discriminator_type = sub_type.properties.get(self.discriminator)
589
+ assert (
590
+ discriminator_type is not None
591
+ ), f"missing-discriminator-field: {sub_type}"
592
+ prop_type = unwrap_literal_type(discriminator_type.spec_type)
593
+ assert prop_type is not None
594
+ assert prop_type.is_value_to_string()
595
+ discriminant = str(prop_type.value)
596
+ assert (
597
+ discriminant not in self.discriminator_map
598
+ ), f"duplicated-discriminant, {discriminant} in {sub_type}"
599
+ self.discriminator_map[discriminant] = sub_type
600
+
601
+ builder.pop_where()
602
+
603
+ def get_referenced_types(self) -> list[SpecType]:
604
+ return self.types
605
+
606
+ def get_backing_type(self) -> SpecType:
607
+ assert self._backing_type is not None
608
+ return self._backing_type
609
+
610
+
557
611
  class SpecTypeDefnExternal(SpecTypeDefn):
558
612
  external_map: dict[str, str]
559
613
 
@@ -1017,6 +1071,8 @@ class SpecNamespace:
1017
1071
  spec_type: SpecTypeDefn
1018
1072
  if defn_type == DefnTypeName.s_alias:
1019
1073
  spec_type = SpecTypeDefnAlias(self, name)
1074
+ elif defn_type == DefnTypeName.s_union:
1075
+ spec_type = SpecTypeDefnUnion(self, name)
1020
1076
  elif defn_type == DefnTypeName.s_external:
1021
1077
  spec_type = SpecTypeDefnExternal(self, name)
1022
1078
  elif defn_type == DefnTypeName.s_string_enum:
@@ -430,6 +430,12 @@ def _emit_type(
430
430
  ctx.types[stype.name] = open_api_type(ctx, stype.alias, config=config)
431
431
  return
432
432
 
433
+ if isinstance(stype, builder.SpecTypeDefnUnion):
434
+ ctx.types[stype.name] = open_api_type(
435
+ ctx, stype.get_backing_type(), config=config
436
+ )
437
+ return
438
+
433
439
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
434
440
  # TODO: check that these are always string enums
435
441
  # IMPROVE: reflect the enum names in the description
@@ -43,6 +43,7 @@ class TrackingContext:
43
43
  use_serial_string_enum: bool = False
44
44
  use_dataclass: bool = False
45
45
  use_serial_class: bool = False
46
+ use_serial_union: bool = False
46
47
  use_missing: bool = False
47
48
  use_opaque_key: bool = False
48
49
 
@@ -219,6 +220,8 @@ def _emit_types_imports(*, out: io.StringIO, ctx: Context) -> None:
219
220
  out.write("from dataclasses import dataclass\n")
220
221
  if ctx.use_serial_class:
221
222
  out.write("from pkgs.serialization import serial_class\n")
223
+ if ctx.use_serial_union:
224
+ out.write("from pkgs.serialization import serial_union_annotation\n")
222
225
  if ctx.use_serial_string_enum:
223
226
  out.write("from pkgs.serialization import serial_string_enum\n")
224
227
  if ctx.use_missing:
@@ -727,6 +730,26 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
727
730
  ctx.out.write(f"{stype.name} = {refer_to(ctx, stype.alias)}\n")
728
731
  return
729
732
 
733
+ if isinstance(stype, builder.SpecTypeDefnUnion):
734
+ ctx.use_serial_union = True
735
+ ctx.out.write(f"{stype.name} = typing.Annotated[\n")
736
+ ctx.out.write(f"{INDENT}{refer_to(ctx, stype.get_backing_type())},\n")
737
+ ctx.out.write(f"{INDENT}serial_union_annotation(\n")
738
+ if stype.discriminator is not None:
739
+ ctx.out.write(
740
+ f"{INDENT * 2}discriminator={util.encode_common_string(stype.discriminator)},\n"
741
+ )
742
+ if stype.discriminator_map is not None:
743
+ ctx.out.write(f"{INDENT * 2}discriminator_map={{\n")
744
+ for key, value in stype.discriminator_map.items():
745
+ ctx.out.write(
746
+ f"{INDENT * 3}{util.encode_common_string(key)}: {refer_to(ctx, value)},\n"
747
+ )
748
+ ctx.out.write(f"{INDENT * 2}}},\n")
749
+ ctx.out.write(f"{INDENT}),\n")
750
+ ctx.out.write("]\n")
751
+ return
752
+
730
753
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
731
754
  return _emit_string_enum(ctx, stype)
732
755
 
@@ -303,6 +303,12 @@ def _emit_type(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
303
303
  ctx.out.write(f"export type {stype.name} = {refer_to(ctx, stype.alias)}\n")
304
304
  return
305
305
 
306
+ if isinstance(stype, builder.SpecTypeDefnUnion):
307
+ ctx.out.write(
308
+ f"export type {stype.name} = {refer_to(ctx, stype.get_backing_type())}\n"
309
+ )
310
+ return
311
+
306
312
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
307
313
  ctx.out.write(f"export enum {stype.name} {{\n")
308
314
  assert stype.values
@@ -1,5 +1,6 @@
1
1
  import copy
2
2
  import dataclasses
3
+ import decimal
3
4
  import io
4
5
  import json
5
6
  from typing import Any, Optional, TypeAlias, Union, cast
@@ -7,7 +8,10 @@ from typing import Any, Optional, TypeAlias, Union, cast
7
8
  from main.base.types import data_t
8
9
  from main.base.types.base import PureJsonValue
9
10
  from pkgs.argument_parser import CachedParser
10
- from pkgs.serialization_util import serialize_for_api, serialize_for_storage
11
+ from pkgs.serialization_util import (
12
+ serialize_for_api,
13
+ serialize_for_storage,
14
+ )
11
15
 
12
16
  from .. import builder, util
13
17
  from ..emit_typescript_util import MODIFY_NOTICE, ts_name
@@ -69,12 +73,21 @@ def _dict_null_strip(data: dict[str, object]) -> dict[str, object]:
69
73
  }
70
74
 
71
75
 
76
+ class JsonEncoder(json.JSONEncoder):
77
+ """We have some defaults of special types that we need to emit"""
78
+
79
+ def default(self, obj: object) -> object:
80
+ if isinstance(obj, decimal.Decimal):
81
+ return str(obj)
82
+ return json.JSONEncoder.default(self, obj)
83
+
84
+
72
85
  def emit_type_info(build: builder.SpecBuilder, output: str) -> None:
73
86
  type_map = _build_map_all(build)
74
87
 
75
88
  # sort for stability, indent for smaller diffs
76
89
  stripped = _dict_null_strip(dataclasses.asdict(type_map))
77
- serial = json.dumps(stripped, sort_keys=True, indent=2)
90
+ serial = json.dumps(stripped, sort_keys=True, indent=2, cls=JsonEncoder)
78
91
  type_map_out = io.StringIO()
79
92
  type_map_out.write(MODIFY_NOTICE)
80
93
  type_map_out.write(f"export const TYPE_MAP = {serial}")
@@ -247,17 +260,25 @@ def _extract_and_validate_layout(
247
260
  return layout
248
261
 
249
262
 
250
- def _validate_type_ext_info(stype: builder.SpecTypeDefnObject) -> ExtInfoLayout | None:
263
+ def _validate_type_ext_info(
264
+ stype: builder.SpecTypeDefnObject,
265
+ ) -> tuple[ExtInfoLayout | None, Optional[data_t.ExtInfo]]:
251
266
  ext_info = _parse_ext_info(stype.ext_info)
252
267
  if ext_info is None:
253
- return None
268
+ return None, None
269
+
270
+ if ext_info.label_fields is not None:
271
+ assert stype.properties is not None
272
+ for name in ext_info.label_fields:
273
+ prop = stype.properties.get(name)
274
+ assert prop is not None, f"missing-label-field:{name}"
254
275
 
255
276
  if not stype.is_base and isinstance(stype.base, builder.SpecTypeDefnObject):
256
- base_layout = _validate_type_ext_info(stype.base)
277
+ base_layout, _ = _validate_type_ext_info(stype.base)
257
278
  else:
258
279
  base_layout = None
259
280
 
260
- return _extract_and_validate_layout(stype, ext_info, base_layout)
281
+ return _extract_and_validate_layout(stype, ext_info, base_layout), ext_info
261
282
 
262
283
 
263
284
  def _build_map_type(
@@ -270,7 +291,7 @@ def _build_map_type(
270
291
  and not stype.is_base
271
292
  and stype.base is not None
272
293
  ):
273
- _validate_type_ext_info(stype)
294
+ _, ext_info = _validate_type_ext_info(stype)
274
295
 
275
296
  properties: dict[str, MapProperty] = {}
276
297
  map_type = MapTypeObject(
@@ -279,7 +300,7 @@ def _build_map_type(
279
300
  properties=properties,
280
301
  desc=stype.desc,
281
302
  base_type_path=type_path_of(stype.base),
282
- ext_info=_convert_ext_info(stype.ext_info),
303
+ ext_info=serialize_for_api(ext_info), # type: ignore[arg-type]
283
304
  )
284
305
 
285
306
  if stype.properties is not None:
@@ -293,7 +314,7 @@ def _build_map_type(
293
314
  api_name=ts_name(prop.name, prop.name_case),
294
315
  extant=prop.extant,
295
316
  type_path=type_path_of(prop.spec_type),
296
- ext_info=serialize_for_api(parts.ext_info),
317
+ ext_info=serialize_for_api(parts.ext_info), # type: ignore[arg-type]
297
318
  desc=parts.desc,
298
319
  default=prop.default,
299
320
  )
@@ -311,6 +332,19 @@ def _build_map_type(
311
332
  discriminator=stype.discriminator,
312
333
  )
313
334
 
335
+ if isinstance(stype, builder.SpecTypeDefnUnion):
336
+ # Emit as a basic alias for now, as the front-end supports only those for now
337
+ # IMPROVE: We should emit a proper union type and support that
338
+ backing = stype.get_backing_type()
339
+ return MapTypeAlias(
340
+ type_name=stype.name,
341
+ label=stype.label,
342
+ desc=stype.desc,
343
+ alias_type_path=type_path_of(backing),
344
+ ext_info=_convert_ext_info(stype.ext_info),
345
+ discriminator=stype.discriminator,
346
+ )
347
+
314
348
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
315
349
  return MapStringEnum(
316
350
  type_name=stype.name,
@@ -3,6 +3,7 @@
3
3
  # ruff: noqa: E402
4
4
  # fmt: off
5
5
  # isort: skip_file
6
+ from .api.recipes import add_recipe_to_project as add_recipe_to_project_t
6
7
  from .api.recipes import archive_recipes as archive_recipes_t
7
8
  from .api.recipes import associate_recipe_as_input as associate_recipe_as_input_t
8
9
  from .api.recipes import associate_recipe_as_lot as associate_recipe_as_lot_t
@@ -61,6 +62,8 @@ from . import recipe_metadata as recipe_metadata_t
61
62
  from . import recipe_output_metadata as recipe_output_metadata_t
62
63
  from . import recipe_tags as recipe_tags_t
63
64
  from . import recipe_workflow_steps as recipe_workflow_steps_t
65
+ from . import recipes as recipes_t
66
+ from .api.recipes import remove_recipe_from_project as remove_recipe_from_project_t
64
67
  from .api.entity import resolve_entity_ids as resolve_entity_ids_t
65
68
  from .api.outputs import resolve_output_conditions as resolve_output_conditions_t
66
69
  from . import response as response_t
@@ -69,6 +72,7 @@ from .api.permissions import set_core_permissions as set_core_permissions_t
69
72
  from .api.inputs import set_input_attribute_values as set_input_attribute_values_t
70
73
  from .api.inputs import set_input_category as set_input_category_t
71
74
  from .api.inputs import set_input_subcategories as set_input_subcategories_t
75
+ from .api.inputs import set_intermediate_type as set_intermediate_type_t
72
76
  from .api.recipes import set_recipe_inputs as set_recipe_inputs_t
73
77
  from .api.recipes import set_recipe_metadata as set_recipe_metadata_t
74
78
  from .api.recipes import set_recipe_outputs as set_recipe_outputs_t
@@ -85,6 +89,7 @@ from . import workflows as workflows_t
85
89
 
86
90
 
87
91
  __all__: list[str] = [
92
+ "add_recipe_to_project_t",
88
93
  "archive_recipes_t",
89
94
  "associate_recipe_as_input_t",
90
95
  "associate_recipe_as_lot_t",
@@ -143,6 +148,8 @@ __all__: list[str] = [
143
148
  "recipe_output_metadata_t",
144
149
  "recipe_tags_t",
145
150
  "recipe_workflow_steps_t",
151
+ "recipes_t",
152
+ "remove_recipe_from_project_t",
146
153
  "resolve_entity_ids_t",
147
154
  "resolve_output_conditions_t",
148
155
  "response_t",
@@ -151,6 +158,7 @@ __all__: list[str] = [
151
158
  "set_input_attribute_values_t",
152
159
  "set_input_category_t",
153
160
  "set_input_subcategories_t",
161
+ "set_intermediate_type_t",
154
162
  "set_recipe_inputs_t",
155
163
  "set_recipe_metadata_t",
156
164
  "set_recipe_outputs_t",
@@ -8,6 +8,7 @@ import typing # noqa: F401
8
8
  import datetime # noqa: F401
9
9
  from decimal import Decimal # noqa: F401
10
10
  from dataclasses import dataclass
11
+ from pkgs.serialization import serial_class
11
12
  from pkgs.serialization import OpaqueKey
12
13
  from ... import base as base_t
13
14
  from ... import entity as entity_t
@@ -26,6 +27,9 @@ ENDPOINT_PATH = "api/external/entity/external_list_entities"
26
27
 
27
28
 
28
29
  # DO NOT MODIFY -- This file is generated by type_spec
30
+ @serial_class(
31
+ unconverted_values={"attributes"},
32
+ )
29
33
  @dataclass(kw_only=True)
30
34
  class Arguments:
31
35
  entity_type: entity_t.EntityType
@@ -36,6 +40,9 @@ class Arguments:
36
40
 
37
41
 
38
42
  # DO NOT MODIFY -- This file is generated by type_spec
43
+ @serial_class(
44
+ unconverted_values={"column_values"},
45
+ )
39
46
  @dataclass(kw_only=True)
40
47
  class EntityResult:
41
48
  entity: entity_t.Entity
@@ -0,0 +1,43 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # flake8: noqa: F821
3
+ # ruff: noqa: E402
4
+ # fmt: off
5
+ # isort: skip_file
6
+ from __future__ import annotations
7
+ import typing # noqa: F401
8
+ import datetime # noqa: F401
9
+ from decimal import Decimal # noqa: F401
10
+ from pkgs.strenum_compat import StrEnum
11
+ from dataclasses import dataclass
12
+ from ... import identifier as identifier_t
13
+
14
+ __all__: list[str] = [
15
+ "Arguments",
16
+ "Data",
17
+ "ENDPOINT_METHOD",
18
+ "ENDPOINT_PATH",
19
+ "IntermediateType",
20
+ ]
21
+
22
+ ENDPOINT_METHOD = "POST"
23
+ ENDPOINT_PATH = "api/external/inputs/set_intermediate_type"
24
+
25
+
26
+ # DO NOT MODIFY -- This file is generated by type_spec
27
+ class IntermediateType(StrEnum):
28
+ FINAL_PRODUCT = "final_product"
29
+ COMPOUND_AS_INTERMEDIATE = "compound_as_intermediate"
30
+
31
+
32
+ # DO NOT MODIFY -- This file is generated by type_spec
33
+ @dataclass(kw_only=True)
34
+ class Arguments:
35
+ input_key: identifier_t.IdentifierKey
36
+ intermediate_type: IntermediateType
37
+
38
+
39
+ # DO NOT MODIFY -- This file is generated by type_spec
40
+ @dataclass(kw_only=True)
41
+ class Data:
42
+ pass
43
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,35 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # flake8: noqa: F821
3
+ # ruff: noqa: E402
4
+ # fmt: off
5
+ # isort: skip_file
6
+ from __future__ import annotations
7
+ import typing # noqa: F401
8
+ import datetime # noqa: F401
9
+ from decimal import Decimal # noqa: F401
10
+ from dataclasses import dataclass
11
+ from ... import identifier as identifier_t
12
+
13
+ __all__: list[str] = [
14
+ "Arguments",
15
+ "Data",
16
+ "ENDPOINT_METHOD",
17
+ "ENDPOINT_PATH",
18
+ ]
19
+
20
+ ENDPOINT_METHOD = "POST"
21
+ ENDPOINT_PATH = "api/external/recipes/add_recipe_to_project"
22
+
23
+
24
+ # DO NOT MODIFY -- This file is generated by type_spec
25
+ @dataclass(kw_only=True)
26
+ class Arguments:
27
+ recipe_key: identifier_t.IdentifierKey
28
+ project_key: identifier_t.IdentifierKey
29
+
30
+
31
+ # DO NOT MODIFY -- This file is generated by type_spec
32
+ @dataclass(kw_only=True)
33
+ class Data:
34
+ pass
35
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -27,6 +27,7 @@ ENDPOINT_PATH = "api/external/recipes/associate_recipe_as_input"
27
27
  class Arguments:
28
28
  recipe_key: identifier_t.IdentifierKey
29
29
  input_key: typing.Optional[identifier_t.IdentifierKey] = None
30
+ show_in_listings: typing.Optional[bool] = None
30
31
 
31
32
 
32
33
  # DO NOT MODIFY -- This file is generated by type_spec