UncountablePythonSDK 0.0.31__py3-none-any.whl → 0.0.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: UncountablePythonSDK
3
- Version: 0.0.31
3
+ Version: 0.0.33
4
4
  Summary: Uncountable SDK
5
5
  Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
6
6
  Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
@@ -19,7 +19,7 @@ Description-Content-Type: text/markdown
19
19
  Requires-Dist: aiotus ==0.*
20
20
  Requires-Dist: aiohttp ==3.*
21
21
  Requires-Dist: requests ==2.*
22
- Requires-Dist: SQLAlchemy ==1.4.*
22
+ Requires-Dist: SQLAlchemy >=1.4.0
23
23
  Requires-Dist: APScheduler ==3.*
24
24
  Requires-Dist: python-dateutil ==2.*
25
25
  Requires-Dist: shelljob ==0.*
@@ -21,26 +21,28 @@ pkgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  pkgs/argument_parser/__init__.py,sha256=CsQ6QoPKSLLRVl-z6URAmPkiUL9ZPZoV4rJHgy_-RjA,385
22
22
  pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXtOZM,257
23
23
  pkgs/argument_parser/_is_namedtuple.py,sha256=Rjc1bKanIPPogl3qG5JPBxglG1TqWYOo1nxxhBASQWY,265
24
- pkgs/argument_parser/argument_parser.py,sha256=eqw4ge9Sh9z35Vztq_dUQUvVMluo7YgnmMjHNMhrhVw,15937
24
+ pkgs/argument_parser/argument_parser.py,sha256=pcU4IhgFkea-I6PhHUex43BoVaQvnoRV-Mw3qOqCdBQ,17274
25
25
  pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
26
- pkgs/serialization/__init__.py,sha256=quvXMSl1szddLTr4Yxo9KA9oBMoeX7qGpFkkAplFBbY,603
26
+ pkgs/serialization/__init__.py,sha256=LifasRW0a50A3qRFmo2bf3FQ6TXhZWOTz2-CVTgPjcQ,753
27
27
  pkgs/serialization/missing_sentry.py,sha256=aM_9KxbCk9dVvXvcOKgkIQBqFWvLhv8QlIUCiuFEXMo,806
28
28
  pkgs/serialization/opaque_key.py,sha256=FIfXEE0DA1U8R_taFbQ1RCoTSgehrPjP06-qvo-GeNQ,177
29
29
  pkgs/serialization/serial_class.py,sha256=r0hrQdIbJA_X0W0_jKEVrxi_JzVRT9qHCjsUgGu3cCI,5290
30
- pkgs/serialization_util/__init__.py,sha256=4vX5j1pvd1NkznSVqwWqunVyOvQtLCgVuwRjVwDk7qg,447
30
+ pkgs/serialization/serial_union.py,sha256=z8Ptj4bVHyb1ROfg0UPTwZ6Ef6iXLr0YJfAH5o_PU9A,2601
31
+ pkgs/serialization_util/__init__.py,sha256=MVKqHTUl2YnWZAFG9xCxu1SgmkQ5xPofrAGlYg6h7rI,330
31
32
  pkgs/serialization_util/_get_type_for_serialization.py,sha256=dW5_W9MFd6wgWfW5qlWork-GBb-QFLtiOZkjk2Zqn2M,1177
32
- pkgs/serialization_util/serialization_helpers.py,sha256=koyGyvdlEmGFK1BUiHtM2gTzAyHygbe1JpKLdZ-Aw84,5124
33
+ pkgs/serialization_util/convert_to_snakecase.py,sha256=H2BAo5ZdcCDN77RpLb-uP0s7-FQ5Ukwnsd3VYc1vD0M,583
34
+ pkgs/serialization_util/serialization_helpers.py,sha256=byotZo00SVREW_aM0sbKQEzuI7O3zwjk_nB_OQ6S5M8,4999
33
35
  pkgs/strenum_compat/__init__.py,sha256=wXRFeNvBm8RU6dy1PFJ5sRLgUIEeH_DVR95Sv5qpGbk,59
34
36
  pkgs/strenum_compat/strenum_compat.py,sha256=uOUAgpYTjHs1MX8dG81jRlyTkt3KNbkV_25zp7xTX2s,36
35
37
  pkgs/type_spec/__init__.py,sha256=h5DmJTca4QVV10sZR1x0-MlkZfuGYDfapR3zHvXfzto,19
36
38
  pkgs/type_spec/__main__.py,sha256=5bJaX9Y_-FavP0qwzhk-z-V97UY7uaezJTa1zhO_HHQ,1048
37
- pkgs/type_spec/builder.py,sha256=1C_ELB7KfK118QxZDLLc_Uf_nh5TAg96ilpOCfEQuN4,43685
39
+ pkgs/type_spec/builder.py,sha256=un86i9LqTmCMVj-g6lrZ8lU4JZEElzCfUlsn--GkTvA,46049
38
40
  pkgs/type_spec/config.py,sha256=INfEiDcUsZFUKasHprsE6i33siPB0RnfmTKOsWcGnQ8,5043
39
41
  pkgs/type_spec/emit_io_ts.py,sha256=Ghd8XYqyNYldHQDepwa9GLfHXcoi48ztBw84K28ETic,5707
40
- pkgs/type_spec/emit_open_api.py,sha256=rAxfPVsqJU7ass76dPhImgPao6AW6xyz-rMaQDhSp1I,23822
42
+ pkgs/type_spec/emit_open_api.py,sha256=Aw7Ct1itmAqhb_nsM9yDz87EoF0XWHM56MhKqtOLOio,24005
41
43
  pkgs/type_spec/emit_open_api_util.py,sha256=XAA6zH59aZWLVl0BvKAICXXl4sdBqx01QAtv5oB0bMI,2266
42
- pkgs/type_spec/emit_python.py,sha256=vuVgyts9zfEgRMTGsI6LKejMnq9_lxLxawjxoz4p7DE,44098
43
- pkgs/type_spec/emit_typescript.py,sha256=4hpCJwiDf-v8LJaNFVfFtf8zvtG73YNPFwwa_5NuffI,17729
44
+ pkgs/type_spec/emit_python.py,sha256=zP3AWJ5u0vzDcnvzSehCUgvXM0J9ZUtfLBVHerW6_wI,45164
45
+ pkgs/type_spec/emit_typescript.py,sha256=cdr5h8N70PuwORcvhURUujzwH9r1LVwJB8V2EoipGkw,17917
44
46
  pkgs/type_spec/emit_typescript_util.py,sha256=93FzJnpYse4PKFzgdw4DGV4zFTi5tF4WR-CIi7cW498,873
45
47
  pkgs/type_spec/load_types.py,sha256=xEHwdB_miR3vNs161Oy1luafE0VC-yk9-utAyCJmbEo,3629
46
48
  pkgs/type_spec/open_api_util.py,sha256=IGh-_snGPST_P_8FdYtO8MTEa9PUxRW6Rzg9X9EgQik,7114
@@ -52,7 +54,7 @@ pkgs/type_spec/actions_registry/emit_typescript.py,sha256=ben0W7qwaVCzLO-t3NEJPP
52
54
  pkgs/type_spec/parts/base.py.prepart,sha256=wGNoDyQnLolHRZGRwHQX5TrPfKnu558NXCocYvqyroc,2174
53
55
  pkgs/type_spec/parts/base.ts.prepart,sha256=2FJJvpg2olCcavxj0nbYWdwKl6KeScour2JjSvN42l8,1001
54
56
  pkgs/type_spec/type_info/__main__.py,sha256=pmVjVqXyVh8vKTNCTFgz80Sg74C5BKToP3E6GS-X_So,857
55
- pkgs/type_spec/type_info/emit_type_info.py,sha256=S3K5_VHbhwR0A7ZiJ5Qed2zjkici-J2ZdVKPDpc2OrI,12362
57
+ pkgs/type_spec/type_info/emit_type_info.py,sha256=XqAyJgzkYY2woG5O-IA5O9gnfja_slTZGd-GnS7xPts,13280
56
58
  pkgs/type_spec/value_spec/__init__.py,sha256=Z-grlcZtxAfEXhPHsK0nD7PFLGsv4eqvunaPN7_TA84,83
57
59
  pkgs/type_spec/value_spec/__main__.py,sha256=-9L5pXYx02plnTetqNknaUZPieLRtzbyWdZDT6B-cWA,8294
58
60
  pkgs/type_spec/value_spec/convert_type.py,sha256=SAYyEV6orQJJbkXSE4hhtOQJ2vKUXJCKPeYPrB8G9oA,2272
@@ -102,7 +104,7 @@ uncountable/types/recipe_links.py,sha256=RldSV7SdeBYa0bx02DzMg4jfPdgrlMRE40T16Fd
102
104
  uncountable/types/recipe_metadata.py,sha256=cebGg_lJzqZzGnKnDgmuQFrw4Xhoz6HEiGM6G0az120,1437
103
105
  uncountable/types/recipe_output_metadata.py,sha256=XJA8R1r4NTzyR_DhMkmH4ZtYD-vqpvBMji9Be8OcFmo,613
104
106
  uncountable/types/recipe_tags.py,sha256=lYpksHAxXCcIjZKR7JoZOTH2cBSovwxZaHwjZy_yqiQ,581
105
- uncountable/types/recipe_workflow_steps.py,sha256=LmyFwWWwJv30vuaQ4qtd0hzDdeJaIxHQZqwRb1Wi_6A,2626
107
+ uncountable/types/recipe_workflow_steps.py,sha256=ZMZI6SteOTVSolhPPZsSqU139i4NYFc1ACbS1rSBLJQ,2997
106
108
  uncountable/types/recipes.py,sha256=tY8MNmQiky94eIFOxSLyflXVno3pfDygxJ6WPqJlyDU,549
107
109
  uncountable/types/response.py,sha256=ZI0CG7ZxBM2k5_W-6mNMU3UlB0p1i-0nrwOvsMaS-vU,620
108
110
  uncountable/types/units.py,sha256=_kZ7KkXIbRiY2fOdkTsbJBpWRah5TCC2WWiG05e-1DA,565
@@ -163,7 +165,7 @@ uncountable/types/api/recipes/associate_recipe_as_lot.py,sha256=bTYjbnY3B7GKz4MV
163
165
  uncountable/types/api/recipes/create_recipe.py,sha256=jizKdsc761zrJXOi0xlmge7-Z9QlzRQdbLNtUoVLQCI,1420
164
166
  uncountable/types/api/recipes/create_recipes.py,sha256=qwIYa8hfcjY7_VOFt9lxmVtJ-HOJqQN3GDNSbZsRCZU,1544
165
167
  uncountable/types/api/recipes/disassociate_recipe_as_input.py,sha256=L25fpiK1Y5PByPVVgsZy9t4podz3xSSLIwKHj8CUrSg,913
166
- uncountable/types/api/recipes/edit_recipe_inputs.py,sha256=I0o-LKBjjNryQqwKM4g6jdy6YtX7P2Ga2lKzXU_RuRk,4153
168
+ uncountable/types/api/recipes/edit_recipe_inputs.py,sha256=_dLulVZLqi-CrFIVMRts8h0OHx-nUG3vFOSKS1juGUc,4568
167
169
  uncountable/types/api/recipes/get_curve.py,sha256=UIWfpqtU5sQokaxwYfQFNFl6HMyzWEF_Sjd8UMz0U88,939
168
170
  uncountable/types/api/recipes/get_recipe_calculations.py,sha256=eQmkdZzCEuq8S2f_kf_7GPvDLX1pTnY1CRmkK0SkMCI,1472
169
171
  uncountable/types/api/recipes/get_recipe_links.py,sha256=hk5dfQjv7yU2r-S9b8vwWEJLPHqU0-M6SFiTLMR3fVk,985
@@ -178,7 +180,7 @@ uncountable/types/api/recipes/set_recipe_tags.py,sha256=U710hgq9-t6QZGRB-ZGHskpt
178
180
  uncountable/types/api/recipes/unarchive_recipes.py,sha256=WcwFYbBsX2SKXnoBQ8locnRn7Bj1rHdtrURQVOfqgfU,814
179
181
  uncountable/types/api/triggers/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
180
182
  uncountable/types/api/triggers/run_trigger.py,sha256=9m9M8-nlGB_sAU2Qm2lWugp4h4Osqj6QpjNfU8osd1U,901
181
- UncountablePythonSDK-0.0.31.dist-info/METADATA,sha256=jaFYbK371JiK653-RWGnpqIWCo1Ql0gQRJ4B9_0L4EQ,1577
182
- UncountablePythonSDK-0.0.31.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
183
- UncountablePythonSDK-0.0.31.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
184
- UncountablePythonSDK-0.0.31.dist-info/RECORD,,
183
+ UncountablePythonSDK-0.0.33.dist-info/METADATA,sha256=bPMG0vmoieJ8foThqNXZK5DRWNQO0mZz3ypltJAx6Ck,1577
184
+ UncountablePythonSDK-0.0.33.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
185
+ UncountablePythonSDK-0.0.33.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
186
+ UncountablePythonSDK-0.0.33.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (70.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -10,7 +10,12 @@ from importlib import resources
10
10
  import dateutil.parser
11
11
  import yaml
12
12
 
13
- from pkgs.serialization import MissingSentryType, OpaqueKey, get_serial_class_data
13
+ from pkgs.serialization import (
14
+ MissingSentryType,
15
+ OpaqueKey,
16
+ get_serial_class_data,
17
+ get_serial_union_data,
18
+ )
14
19
 
15
20
  from ._is_enum import is_string_enum_class
16
21
  from ._is_namedtuple import is_namedtuple_type
@@ -112,6 +117,23 @@ def _invoke_membership_parser(
112
117
  raise ValueError(f"Expected value from {expected_values} but got value {value}")
113
118
 
114
119
 
120
+ def _build_parser_discriminated_union(
121
+ discriminator: str, discriminator_map: dict[str, ParserFunction[T]]
122
+ ) -> ParserFunction[T]:
123
+ def parse(value: typing.Any) -> typing.Any:
124
+ discriminant = value.get(discriminator)
125
+ if discriminant is None:
126
+ raise ValueError("missing-union-discriminant")
127
+ if not isinstance(discriminant, str):
128
+ raise ValueError("union-discriminant-is-not-string")
129
+ parser = discriminator_map.get(discriminant)
130
+ if parser is None:
131
+ raise ValueError("missing-type-for-union-discriminant", discriminant)
132
+ return parser(value)
133
+
134
+ return parse
135
+
136
+
115
137
  def _build_parser_inner(
116
138
  parsed_type: type[T],
117
139
  context: ParserContext,
@@ -130,6 +152,23 @@ def _build_parser_inner(
130
152
  are cached now, as they don't use the argument, and they're known to be safe.
131
153
  This is also enough to support some recursion.
132
154
  """
155
+
156
+ serial_union = get_serial_union_data(parsed_type)
157
+ if serial_union is not None:
158
+ discriminator = serial_union.discriminator
159
+ discriminator_map = serial_union.discriminator_map
160
+ if discriminator is None or discriminator_map is None:
161
+ # fallback to standard union parsing
162
+ parsed_type = serial_union.get_union_underlying()
163
+ else:
164
+ return _build_parser_discriminated_union(
165
+ discriminator,
166
+ {
167
+ key: _build_parser_inner(value, context)
168
+ for key, value in discriminator_map.items()
169
+ },
170
+ )
171
+
133
172
  if dataclasses.is_dataclass(parsed_type):
134
173
  return _build_parser_dataclass(parsed_type, context) # type: ignore[arg-type]
135
174
 
@@ -341,7 +380,7 @@ def _build_parser_dataclass(
341
380
 
342
381
  except Exception as e:
343
382
  raise ValueError(
344
- f"unable to parse field:{field.name}", field_raw_value
383
+ f"unable-to-parse-field:{field.name}", field_raw_value
345
384
  ) from e
346
385
 
347
386
  if context.options.strict_property_parsing:
@@ -8,3 +8,5 @@ from .serial_class import get_serial_class_data as get_serial_class_data
8
8
  from .serial_class import get_serial_string_enum_data as get_serial_string_enum_data
9
9
  from .serial_class import serial_class as serial_class
10
10
  from .serial_class import serial_string_enum as serial_string_enum
11
+ from .serial_union import serial_union_annotation as serial_union_annotation
12
+ from .serial_union import get_serial_union_data as get_serial_union_data
@@ -0,0 +1,81 @@
1
+ import dataclasses
2
+ import typing
3
+
4
+ T = typing.TypeVar("T")
5
+
6
+
7
+ class IdentityHashWrapper(typing.Generic[T]):
8
+ """This allows unhashable types to be used in the SerialUnion, like dict.
9
+ Since we have only one copy of the types themselves, we rely on
10
+ object identity for the hashing."""
11
+
12
+ def __init__(self, inner: T) -> None:
13
+ self.inner = inner
14
+
15
+ def __hash__(self) -> int:
16
+ return id(self.inner)
17
+
18
+
19
+ @dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
20
+ class _SerialUnion:
21
+ """
22
+ This class is to be kept private, to provide flexibility in registration/lookup.
23
+ Places that need the data should access it via help classes/methods.
24
+ """
25
+
26
+ # If specified, indicates the Union has a discriminator which should be used to
27
+ # determine which type to parse.
28
+ discriminator: typing.Optional[str] = None
29
+ discriminator_map: typing.Optional[IdentityHashWrapper[dict[str, type]]] = None
30
+
31
+
32
+ def serial_union_annotation(
33
+ *,
34
+ discriminator: typing.Optional[str] = None,
35
+ discriminator_map: typing.Optional[dict[str, type]] = None,
36
+ ) -> _SerialUnion:
37
+ return _SerialUnion(
38
+ discriminator=discriminator,
39
+ discriminator_map=IdentityHashWrapper(discriminator_map)
40
+ if discriminator_map is not None
41
+ else None,
42
+ )
43
+
44
+
45
+ def _get_serial_union(parsed_type: type[T]) -> _SerialUnion | None:
46
+ if not hasattr(parsed_type, "__metadata__"):
47
+ return None
48
+ metadata = parsed_type.__metadata__ # type:ignore[attr-defined]
49
+ if not isinstance(metadata, tuple) or len(metadata) != 1:
50
+ return None
51
+ serial = metadata[0]
52
+ if not isinstance(serial, _SerialUnion):
53
+ return None
54
+ return serial
55
+
56
+
57
+ class SerialClassInspector(typing.Generic[T]):
58
+ def __init__(self, parsed_type: type[T], serial_union: _SerialUnion) -> None:
59
+ self._parsed_type = parsed_type
60
+ self._serial_union = serial_union
61
+
62
+ def get_union_underlying(self) -> type[T]:
63
+ return typing.get_args(self._parsed_type)[0] # type:ignore[no-any-return]
64
+
65
+ @property
66
+ def discriminator(self) -> typing.Optional[str]:
67
+ return self._serial_union.discriminator
68
+
69
+ @property
70
+ def discriminator_map(self) -> typing.Optional[dict[str, type]]:
71
+ if self._serial_union.discriminator_map is None:
72
+ return None
73
+ return self._serial_union.discriminator_map.inner
74
+
75
+
76
+ def get_serial_union_data(parsed_type: type[T]) -> SerialClassInspector[T] | None:
77
+ serial = _get_serial_union(parsed_type)
78
+ if serial is None:
79
+ return None
80
+
81
+ return SerialClassInspector(parsed_type, serial)
@@ -1,8 +1,5 @@
1
+ from .convert_to_snakecase import convert_dict_to_snake_case
1
2
  from .serialization_helpers import (
2
- convert_dict_to_snake_case,
3
- convert_to_camelcase,
4
- resolve_missing_to_none,
5
- serialize,
6
3
  serialize_for_api,
7
4
  serialize_for_storage,
8
5
  serialize_for_storage_dict,
@@ -10,9 +7,6 @@ from .serialization_helpers import (
10
7
 
11
8
  __all__: list[str] = [
12
9
  "convert_dict_to_snake_case",
13
- "convert_to_camelcase",
14
- "resolve_missing_to_none",
15
- "serialize",
16
10
  "serialize_for_api",
17
11
  "serialize_for_storage",
18
12
  "serialize_for_storage_dict",
@@ -0,0 +1,27 @@
1
+ from typing import (
2
+ Any,
3
+ )
4
+
5
+ from pkgs.argument_parser import camel_to_snake_case
6
+ from pkgs.serialization import (
7
+ MISSING_SENTRY,
8
+ OpaqueKey,
9
+ )
10
+
11
+
12
+ def _key_convert_to_snake_case(o: Any) -> Any:
13
+ if isinstance(o, OpaqueKey):
14
+ return o
15
+ if isinstance(o, str):
16
+ return camel_to_snake_case(o)
17
+ return o
18
+
19
+
20
+ def convert_dict_to_snake_case(data: Any) -> Any:
21
+ return {
22
+ _key_convert_to_snake_case(k): convert_dict_to_snake_case(v)
23
+ if isinstance(v, dict)
24
+ else v
25
+ for k, v in data.items()
26
+ if v != MISSING_SENTRY
27
+ }
@@ -5,16 +5,16 @@ from decimal import Decimal
5
5
  from typing import (
6
6
  TYPE_CHECKING,
7
7
  Any,
8
- Optional,
8
+ ClassVar,
9
+ Protocol,
9
10
  TypeVar,
10
11
  Union,
12
+ overload,
11
13
  )
12
14
 
13
- from pkgs.argument_parser import camel_to_snake_case, snake_to_camel_case
15
+ from pkgs.argument_parser import snake_to_camel_case
14
16
  from pkgs.serialization import (
15
17
  MISSING_SENTRY,
16
- MissingSentryType,
17
- MissingType,
18
18
  OpaqueKey,
19
19
  get_serial_class_data,
20
20
  )
@@ -28,52 +28,60 @@ if TYPE_CHECKING:
28
28
  else:
29
29
  JsonValue = Union[JsonScalar, dict[str, Any], list[Any]]
30
30
 
31
+ T = TypeVar("T")
32
+
33
+
34
+ class Dataclass(Protocol):
35
+ __dataclass_fields__: ClassVar[dict] # type: ignore[type-arg,unused-ignore]
36
+
31
37
 
32
- def key_convert_to_camelcase(o: Any) -> Any:
38
+ def identity(x: T) -> T:
39
+ return x
40
+
41
+
42
+ def key_convert_to_camelcase(o: Any) -> str:
33
43
  if isinstance(o, OpaqueKey):
34
44
  return o
35
45
  if isinstance(o, enum.Enum):
36
- return o.value
46
+ return o.value # type: ignore[no-any-return]
37
47
  if isinstance(o, str):
38
48
  return snake_to_camel_case(o)
39
- return o
49
+ return o # type: ignore[no-any-return]
40
50
 
41
51
 
42
- def _convert_dict(d: Any) -> Any:
52
+ def _convert_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
43
53
  return {
44
- key_convert_to_camelcase(k): convert_to_camelcase(v)
54
+ key_convert_to_camelcase(k): serialize_for_api(v)
45
55
  for k, v in d.items()
46
56
  if v != MISSING_SENTRY
47
57
  }
48
58
 
49
59
 
50
- def _serialize_dict(d: Any) -> dict[str, Any]:
51
- return {k: serialize(v) for k, v in d.items() if v != MISSING_SENTRY}
60
+ def _serialize_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
61
+ return {k: serialize_for_storage(v) for k, v in d.items() if v != MISSING_SENTRY}
52
62
 
53
63
 
54
- def _convert_dataclass(d: Any) -> Any:
64
+ def _convert_dataclass(d: Dataclass) -> dict[str, JsonValue]:
55
65
  dct = type(d)
56
66
  scd = get_serial_class_data(dct)
57
67
 
58
- def key_convert(key: Any) -> Any:
68
+ def key_convert(key: str) -> str:
59
69
  if scd.has_unconverted_key(key):
60
70
  return key
61
71
  return key_convert_to_camelcase(key)
62
72
 
63
- def value_convert(key: Any, value: Any) -> Any:
64
- if value is None:
65
- return None
73
+ def value_convert(key: str, value: Any) -> JsonValue:
66
74
  if scd.has_to_string_value(key):
67
75
  # Limit to types we know we need to support to avoid surprises
68
76
  # Generics, like List/Dict would need to be per-value stringified
69
77
  assert isinstance(value, (Decimal, int))
70
78
  return str(value)
71
79
  if scd.has_unconverted_value(key):
72
- return value
73
- return convert_to_camelcase(value)
80
+ return value # type: ignore[no-any-return]
81
+ return serialize_for_api(value) # type: ignore[no-any-return,unused-ignore]
74
82
 
75
83
  return {
76
- key_convert(k): value_convert(k, v)
84
+ key_convert(k): (value_convert(k, v) if v is not None else None)
77
85
  for k, v in d.__dict__.items()
78
86
  if v != MISSING_SENTRY
79
87
  }
@@ -83,24 +91,35 @@ _SERIALIZATION_FUNCS_STANDARD = {
83
91
  SerializationType.ENUM: lambda x: str(x.value),
84
92
  SerializationType.DATE: lambda x: x.isoformat(),
85
93
  SerializationType.TIMEDELTA: lambda x: x.total_seconds(),
86
- SerializationType.UNKNOWN: lambda x: x,
94
+ SerializationType.UNKNOWN: identity,
87
95
  }
88
96
 
89
- _CONVERSION_SERIALIZATION_FUNCS = {
97
+ _CONVERSION_SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
90
98
  **_SERIALIZATION_FUNCS_STANDARD,
91
99
  SerializationType.NAMED_TUPLE: lambda x: _convert_dict(x._asdict()),
92
- SerializationType.ITERABLE: lambda x: [convert_to_camelcase(v) for v in x],
100
+ SerializationType.ITERABLE: lambda x: [serialize_for_api(v) for v in x],
93
101
  SerializationType.DICT: _convert_dict,
94
102
  SerializationType.DATACLASS: _convert_dataclass,
95
103
  }
96
104
 
97
105
 
98
- def convert_to_camelcase(obj: Any) -> Any:
99
- """@DEPRECATED prefer serialize_for_api"""
100
- return serialize_for_api(obj)
106
+ @overload
107
+ def serialize_for_api(obj: None) -> None: ...
108
+
109
+
110
+ @overload
111
+ def serialize_for_api(obj: dict[str, Any]) -> dict[str, JsonValue]: ...
101
112
 
102
113
 
103
- def serialize_for_api(obj: Any) -> Any:
114
+ @overload
115
+ def serialize_for_api(obj: Dataclass) -> dict[str, JsonValue]: ...
116
+
117
+
118
+ @overload
119
+ def serialize_for_api(obj: Any) -> JsonValue: ...
120
+
121
+
122
+ def serialize_for_api(obj: Any) -> JsonValue:
104
123
  """
105
124
  Serialize to a parsed-JSON format suitably encoded for API output.
106
125
 
@@ -122,15 +141,10 @@ _SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
122
141
  **_SERIALIZATION_FUNCS_STANDARD,
123
142
  **_SERIALIZATION_FUNCS_DICT,
124
143
  SerializationType.NAMED_TUPLE: lambda x: _serialize_dict(x._asdict()),
125
- SerializationType.ITERABLE: lambda x: [serialize(v) for v in x],
144
+ SerializationType.ITERABLE: lambda x: [serialize_for_storage(v) for v in x],
126
145
  }
127
146
 
128
147
 
129
- def serialize(obj: Any) -> Any:
130
- """@DEPRECATED: prefer serialize_for_storage"""
131
- return serialize_for_storage(obj)
132
-
133
-
134
148
  def serialize_for_storage(obj: Any) -> JsonValue:
135
149
  """
136
150
  Convert a value into the pseudo-JSON form for
@@ -142,34 +156,9 @@ def serialize_for_storage(obj: Any) -> JsonValue:
142
156
  return _SERIALIZATION_FUNCS[serialization_type](obj)
143
157
 
144
158
 
145
- def serialize_for_storage_dict(obj: Any) -> dict[str, JsonValue]:
159
+ def serialize_for_storage_dict(obj: dict | Dataclass) -> dict[str, JsonValue]: # type: ignore[type-arg]
146
160
  """
147
161
  Same as serialize for storage but guarantees outer object is a dictionary
148
162
  """
149
- serialization_type = get_serialization_type(type(obj)) # type: ignore
163
+ serialization_type = get_serialization_type(type(obj))
150
164
  return _SERIALIZATION_FUNCS_DICT[serialization_type](obj)
151
-
152
-
153
- def key_convert_to_snake_case(o: Any) -> Any:
154
- if isinstance(o, OpaqueKey):
155
- return o
156
- if isinstance(o, str):
157
- return camel_to_snake_case(o)
158
- return o
159
-
160
-
161
- def convert_dict_to_snake_case(data: Any) -> Any:
162
- return {
163
- key_convert_to_snake_case(k): convert_dict_to_snake_case(v)
164
- if isinstance(v, dict)
165
- else v
166
- for k, v in data.items()
167
- if v != MISSING_SENTRY
168
- }
169
-
170
-
171
- T = TypeVar("T")
172
-
173
-
174
- def resolve_missing_to_none(val: MissingType[T]) -> Optional[T]:
175
- return val if not isinstance(val, MissingSentryType) else None
pkgs/type_spec/builder.py CHANGED
@@ -102,6 +102,8 @@ class DefnTypeName(StrEnum):
102
102
  s_string_enum = "StringEnum"
103
103
  # a particular literal value
104
104
  s_string_literal = "_StringLiteral"
105
+ # A union of several other types
106
+ s_union = "Union"
105
107
 
106
108
 
107
109
  base_namespace_name = "base"
@@ -547,13 +549,65 @@ class SpecTypeDefnAlias(SpecTypeDefn):
547
549
  super().base_process(builder, data, ["type", "desc", "alias", "discriminator"])
548
550
  self.alias = builder.parse_type(self.namespace, data["alias"])
549
551
  self.desc = data.get("desc", None)
550
- # Should be limited to Union type aliases
551
552
  self.discriminator = data.get("discriminator", None)
552
553
 
553
554
  def get_referenced_types(self) -> list[SpecType]:
554
555
  return [self.alias]
555
556
 
556
557
 
558
+ class SpecTypeDefnUnion(SpecTypeDefn):
559
+ def __init__(self, namespace: SpecNamespace, name: str) -> None:
560
+ super().__init__(namespace, name)
561
+ self.discriminator: str | None = None
562
+ self.types: list[SpecType] = []
563
+ self._alias_type: SpecType | None = None
564
+ self.discriminator_map: dict[str, SpecType] | None = None
565
+ self.desc: str | None = None
566
+
567
+ def process(self, builder: SpecBuilder, data: RawDict) -> None:
568
+ super().base_process(builder, data, ["type", "desc", "types", "discriminator"])
569
+
570
+ self.desc = data.get("desc", None)
571
+ self.discriminator = data.get("discriminator", None)
572
+
573
+ for sub_type_str in data["types"]:
574
+ sub_type = builder.parse_type(self.namespace, sub_type_str)
575
+ self.types.append(sub_type)
576
+
577
+ base_type = builder.namespaces[base_namespace_name].types[BaseTypeName.s_union]
578
+ self._backing_type = SpecTypeInstance(base_type, self.types)
579
+
580
+ if self.discriminator is not None:
581
+ self.discriminator_map = {}
582
+ for sub_type in self.types:
583
+ builder.push_where(sub_type.name)
584
+ assert isinstance(
585
+ sub_type, SpecTypeDefnObject
586
+ ), "union-type-must-be-object"
587
+ assert sub_type.properties is not None
588
+ discriminator_type = sub_type.properties.get(self.discriminator)
589
+ assert (
590
+ discriminator_type is not None
591
+ ), f"missing-discriminator-field: {sub_type}"
592
+ prop_type = unwrap_literal_type(discriminator_type.spec_type)
593
+ assert prop_type is not None
594
+ assert prop_type.is_value_to_string()
595
+ discriminant = str(prop_type.value)
596
+ assert (
597
+ discriminant not in self.discriminator_map
598
+ ), f"duplicated-discriminant, {discriminant} in {sub_type}"
599
+ self.discriminator_map[discriminant] = sub_type
600
+
601
+ builder.pop_where()
602
+
603
+ def get_referenced_types(self) -> list[SpecType]:
604
+ return self.types
605
+
606
+ def get_backing_type(self) -> SpecType:
607
+ assert self._backing_type is not None
608
+ return self._backing_type
609
+
610
+
557
611
  class SpecTypeDefnExternal(SpecTypeDefn):
558
612
  external_map: dict[str, str]
559
613
 
@@ -1017,6 +1071,8 @@ class SpecNamespace:
1017
1071
  spec_type: SpecTypeDefn
1018
1072
  if defn_type == DefnTypeName.s_alias:
1019
1073
  spec_type = SpecTypeDefnAlias(self, name)
1074
+ elif defn_type == DefnTypeName.s_union:
1075
+ spec_type = SpecTypeDefnUnion(self, name)
1020
1076
  elif defn_type == DefnTypeName.s_external:
1021
1077
  spec_type = SpecTypeDefnExternal(self, name)
1022
1078
  elif defn_type == DefnTypeName.s_string_enum:
@@ -430,6 +430,12 @@ def _emit_type(
430
430
  ctx.types[stype.name] = open_api_type(ctx, stype.alias, config=config)
431
431
  return
432
432
 
433
+ if isinstance(stype, builder.SpecTypeDefnUnion):
434
+ ctx.types[stype.name] = open_api_type(
435
+ ctx, stype.get_backing_type(), config=config
436
+ )
437
+ return
438
+
433
439
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
434
440
  # TODO: check that these are always string enums
435
441
  # IMPROVE: reflect the enum names in the description
@@ -43,6 +43,7 @@ class TrackingContext:
43
43
  use_serial_string_enum: bool = False
44
44
  use_dataclass: bool = False
45
45
  use_serial_class: bool = False
46
+ use_serial_union: bool = False
46
47
  use_missing: bool = False
47
48
  use_opaque_key: bool = False
48
49
 
@@ -219,6 +220,8 @@ def _emit_types_imports(*, out: io.StringIO, ctx: Context) -> None:
219
220
  out.write("from dataclasses import dataclass\n")
220
221
  if ctx.use_serial_class:
221
222
  out.write("from pkgs.serialization import serial_class\n")
223
+ if ctx.use_serial_union:
224
+ out.write("from pkgs.serialization import serial_union_annotation\n")
222
225
  if ctx.use_serial_string_enum:
223
226
  out.write("from pkgs.serialization import serial_string_enum\n")
224
227
  if ctx.use_missing:
@@ -727,6 +730,26 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
727
730
  ctx.out.write(f"{stype.name} = {refer_to(ctx, stype.alias)}\n")
728
731
  return
729
732
 
733
+ if isinstance(stype, builder.SpecTypeDefnUnion):
734
+ ctx.use_serial_union = True
735
+ ctx.out.write(f"{stype.name} = typing.Annotated[\n")
736
+ ctx.out.write(f"{INDENT}{refer_to(ctx, stype.get_backing_type())},\n")
737
+ ctx.out.write(f"{INDENT}serial_union_annotation(\n")
738
+ if stype.discriminator is not None:
739
+ ctx.out.write(
740
+ f"{INDENT * 2}discriminator={util.encode_common_string(stype.discriminator)},\n"
741
+ )
742
+ if stype.discriminator_map is not None:
743
+ ctx.out.write(f"{INDENT * 2}discriminator_map={{\n")
744
+ for key, value in stype.discriminator_map.items():
745
+ ctx.out.write(
746
+ f"{INDENT * 3}{util.encode_common_string(key)}: {refer_to(ctx, value)},\n"
747
+ )
748
+ ctx.out.write(f"{INDENT * 2}}},\n")
749
+ ctx.out.write(f"{INDENT}),\n")
750
+ ctx.out.write("]\n")
751
+ return
752
+
730
753
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
731
754
  return _emit_string_enum(ctx, stype)
732
755
 
@@ -303,6 +303,12 @@ def _emit_type(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
303
303
  ctx.out.write(f"export type {stype.name} = {refer_to(ctx, stype.alias)}\n")
304
304
  return
305
305
 
306
+ if isinstance(stype, builder.SpecTypeDefnUnion):
307
+ ctx.out.write(
308
+ f"export type {stype.name} = {refer_to(ctx, stype.get_backing_type())}\n"
309
+ )
310
+ return
311
+
306
312
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
307
313
  ctx.out.write(f"export enum {stype.name} {{\n")
308
314
  assert stype.values
@@ -1,5 +1,6 @@
1
1
  import copy
2
2
  import dataclasses
3
+ import decimal
3
4
  import io
4
5
  import json
5
6
  from typing import Any, Optional, TypeAlias, Union, cast
@@ -7,7 +8,10 @@ from typing import Any, Optional, TypeAlias, Union, cast
7
8
  from main.base.types import data_t
8
9
  from main.base.types.base import PureJsonValue
9
10
  from pkgs.argument_parser import CachedParser
10
- from pkgs.serialization_util import serialize_for_api, serialize_for_storage
11
+ from pkgs.serialization_util import (
12
+ serialize_for_api,
13
+ serialize_for_storage,
14
+ )
11
15
 
12
16
  from .. import builder, util
13
17
  from ..emit_typescript_util import MODIFY_NOTICE, ts_name
@@ -69,12 +73,21 @@ def _dict_null_strip(data: dict[str, object]) -> dict[str, object]:
69
73
  }
70
74
 
71
75
 
76
+ class JsonEncoder(json.JSONEncoder):
77
+ """We have some defaults of special types that we need to emit"""
78
+
79
+ def default(self, obj: object) -> object:
80
+ if isinstance(obj, decimal.Decimal):
81
+ return str(obj)
82
+ return json.JSONEncoder.default(self, obj)
83
+
84
+
72
85
  def emit_type_info(build: builder.SpecBuilder, output: str) -> None:
73
86
  type_map = _build_map_all(build)
74
87
 
75
88
  # sort for stability, indent for smaller diffs
76
89
  stripped = _dict_null_strip(dataclasses.asdict(type_map))
77
- serial = json.dumps(stripped, sort_keys=True, indent=2)
90
+ serial = json.dumps(stripped, sort_keys=True, indent=2, cls=JsonEncoder)
78
91
  type_map_out = io.StringIO()
79
92
  type_map_out.write(MODIFY_NOTICE)
80
93
  type_map_out.write(f"export const TYPE_MAP = {serial}")
@@ -287,7 +300,7 @@ def _build_map_type(
287
300
  properties=properties,
288
301
  desc=stype.desc,
289
302
  base_type_path=type_path_of(stype.base),
290
- ext_info=serialize_for_api(ext_info),
303
+ ext_info=serialize_for_api(ext_info), # type: ignore[arg-type]
291
304
  )
292
305
 
293
306
  if stype.properties is not None:
@@ -301,7 +314,7 @@ def _build_map_type(
301
314
  api_name=ts_name(prop.name, prop.name_case),
302
315
  extant=prop.extant,
303
316
  type_path=type_path_of(prop.spec_type),
304
- ext_info=serialize_for_api(parts.ext_info),
317
+ ext_info=serialize_for_api(parts.ext_info), # type: ignore[arg-type]
305
318
  desc=parts.desc,
306
319
  default=prop.default,
307
320
  )
@@ -319,6 +332,19 @@ def _build_map_type(
319
332
  discriminator=stype.discriminator,
320
333
  )
321
334
 
335
+ if isinstance(stype, builder.SpecTypeDefnUnion):
336
+ # Emit as a basic alias for now, as the front-end supports only those for now
337
+ # IMPROVE: We should emit a proper union type and support that
338
+ backing = stype.get_backing_type()
339
+ return MapTypeAlias(
340
+ type_name=stype.name,
341
+ label=stype.label,
342
+ desc=stype.desc,
343
+ alias_type_path=type_path_of(backing),
344
+ ext_info=_convert_ext_info(stype.ext_info),
345
+ discriminator=stype.discriminator,
346
+ )
347
+
322
348
  if isinstance(stype, builder.SpecTypeDefnStringEnum):
323
349
  return MapStringEnum(
324
350
  type_name=stype.name,
@@ -10,6 +10,7 @@ from decimal import Decimal # noqa: F401
10
10
  from pkgs.strenum_compat import StrEnum
11
11
  from dataclasses import dataclass
12
12
  from pkgs.serialization import serial_class
13
+ from pkgs.serialization import serial_union_annotation
13
14
  from ... import identifier as identifier_t
14
15
  from ... import recipe_inputs as recipe_inputs_t
15
16
  from ... import recipe_workflow_steps as recipe_workflow_steps_t
@@ -114,7 +115,18 @@ class RecipeInputEditUpdateAnnotations(RecipeInputEditInputBase):
114
115
 
115
116
 
116
117
  # DO NOT MODIFY -- This file is generated by type_spec
117
- RecipeInputEdit = typing.Union[RecipeInputEditClearInputs, RecipeInputEditUpsertInput, RecipeInputEditAddInput, RecipeInputEditUpdateAnnotations]
118
+ RecipeInputEdit = typing.Annotated[
119
+ typing.Union[RecipeInputEditClearInputs, RecipeInputEditUpsertInput, RecipeInputEditAddInput, RecipeInputEditUpdateAnnotations],
120
+ serial_union_annotation(
121
+ discriminator="type",
122
+ discriminator_map={
123
+ "clear_inputs": RecipeInputEditClearInputs,
124
+ "upsert_input": RecipeInputEditUpsertInput,
125
+ "add_input": RecipeInputEditAddInput,
126
+ "update_annotations": RecipeInputEditUpdateAnnotations,
127
+ },
128
+ ),
129
+ ]
118
130
 
119
131
 
120
132
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -10,6 +10,7 @@ from decimal import Decimal # noqa: F401
10
10
  from pkgs.strenum_compat import StrEnum
11
11
  from dataclasses import dataclass
12
12
  from pkgs.serialization import serial_class
13
+ from pkgs.serialization import serial_union_annotation
13
14
  from . import identifier as identifier_t
14
15
 
15
16
  __all__: list[str] = [
@@ -64,14 +65,24 @@ class RecipeWorkflowStepIdentifierWorkflowStep(RecipeWorkflowStepIdentifierBase)
64
65
 
65
66
  # DO NOT MODIFY -- This file is generated by type_spec
66
67
  @serial_class(
67
- parse_require={"key_type"},
68
+ parse_require={"type"},
68
69
  )
69
70
  @dataclass(kw_only=True)
70
71
  class RecipeWorkflowStepIdentifierKey:
71
- key_type: typing.Literal[RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY] = RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY
72
+ type: typing.Literal[RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY] = RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY
72
73
  recipe_workflow_step_key: identifier_t.IdentifierKey
73
74
 
74
75
 
75
76
  # DO NOT MODIFY -- This file is generated by type_spec
76
- RecipeWorkflowStepIdentifier = typing.Union[RecipeWorkflowStepIdentifierDefault, RecipeWorkflowStepIdentifierWorkflowStep, RecipeWorkflowStepIdentifierKey]
77
+ RecipeWorkflowStepIdentifier = typing.Annotated[
78
+ typing.Union[RecipeWorkflowStepIdentifierDefault, RecipeWorkflowStepIdentifierWorkflowStep, RecipeWorkflowStepIdentifierKey],
79
+ serial_union_annotation(
80
+ discriminator="type",
81
+ discriminator_map={
82
+ "default": RecipeWorkflowStepIdentifierDefault,
83
+ "workflow_step": RecipeWorkflowStepIdentifierWorkflowStep,
84
+ "identifier_key": RecipeWorkflowStepIdentifierKey,
85
+ },
86
+ ),
87
+ ]
77
88
  # DO NOT MODIFY -- This file is generated by type_spec