UncountablePythonSDK 0.0.31__py3-none-any.whl → 0.0.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- {UncountablePythonSDK-0.0.31.dist-info → UncountablePythonSDK-0.0.34.dist-info}/METADATA +2 -2
- {UncountablePythonSDK-0.0.31.dist-info → UncountablePythonSDK-0.0.34.dist-info}/RECORD +19 -17
- {UncountablePythonSDK-0.0.31.dist-info → UncountablePythonSDK-0.0.34.dist-info}/WHEEL +1 -1
- pkgs/argument_parser/argument_parser.py +41 -2
- pkgs/serialization/__init__.py +2 -0
- pkgs/serialization/serial_union.py +81 -0
- pkgs/serialization_util/__init__.py +1 -7
- pkgs/serialization_util/convert_to_snakecase.py +27 -0
- pkgs/serialization_util/serialization_helpers.py +128 -73
- pkgs/type_spec/builder.py +57 -1
- pkgs/type_spec/emit_open_api.py +6 -0
- pkgs/type_spec/emit_python.py +23 -0
- pkgs/type_spec/emit_typescript.py +6 -0
- pkgs/type_spec/type_info/emit_type_info.py +30 -4
- uncountable/types/api/id_source/match_id_source.py +0 -4
- uncountable/types/api/recipes/edit_recipe_inputs.py +13 -1
- uncountable/types/api/recipes/get_recipes_data.py +0 -3
- uncountable/types/recipe_workflow_steps.py +14 -3
- {UncountablePythonSDK-0.0.31.dist-info → UncountablePythonSDK-0.0.34.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: UncountablePythonSDK
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.34
|
|
4
4
|
Summary: Uncountable SDK
|
|
5
5
|
Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
|
|
6
6
|
Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
|
|
@@ -19,7 +19,7 @@ Description-Content-Type: text/markdown
|
|
|
19
19
|
Requires-Dist: aiotus ==0.*
|
|
20
20
|
Requires-Dist: aiohttp ==3.*
|
|
21
21
|
Requires-Dist: requests ==2.*
|
|
22
|
-
Requires-Dist: SQLAlchemy
|
|
22
|
+
Requires-Dist: SQLAlchemy >=1.4.0
|
|
23
23
|
Requires-Dist: APScheduler ==3.*
|
|
24
24
|
Requires-Dist: python-dateutil ==2.*
|
|
25
25
|
Requires-Dist: shelljob ==0.*
|
|
@@ -21,26 +21,28 @@ pkgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
21
21
|
pkgs/argument_parser/__init__.py,sha256=CsQ6QoPKSLLRVl-z6URAmPkiUL9ZPZoV4rJHgy_-RjA,385
|
|
22
22
|
pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXtOZM,257
|
|
23
23
|
pkgs/argument_parser/_is_namedtuple.py,sha256=Rjc1bKanIPPogl3qG5JPBxglG1TqWYOo1nxxhBASQWY,265
|
|
24
|
-
pkgs/argument_parser/argument_parser.py,sha256=
|
|
24
|
+
pkgs/argument_parser/argument_parser.py,sha256=pcU4IhgFkea-I6PhHUex43BoVaQvnoRV-Mw3qOqCdBQ,17274
|
|
25
25
|
pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
|
|
26
|
-
pkgs/serialization/__init__.py,sha256=
|
|
26
|
+
pkgs/serialization/__init__.py,sha256=LifasRW0a50A3qRFmo2bf3FQ6TXhZWOTz2-CVTgPjcQ,753
|
|
27
27
|
pkgs/serialization/missing_sentry.py,sha256=aM_9KxbCk9dVvXvcOKgkIQBqFWvLhv8QlIUCiuFEXMo,806
|
|
28
28
|
pkgs/serialization/opaque_key.py,sha256=FIfXEE0DA1U8R_taFbQ1RCoTSgehrPjP06-qvo-GeNQ,177
|
|
29
29
|
pkgs/serialization/serial_class.py,sha256=r0hrQdIbJA_X0W0_jKEVrxi_JzVRT9qHCjsUgGu3cCI,5290
|
|
30
|
-
pkgs/
|
|
30
|
+
pkgs/serialization/serial_union.py,sha256=z8Ptj4bVHyb1ROfg0UPTwZ6Ef6iXLr0YJfAH5o_PU9A,2601
|
|
31
|
+
pkgs/serialization_util/__init__.py,sha256=MVKqHTUl2YnWZAFG9xCxu1SgmkQ5xPofrAGlYg6h7rI,330
|
|
31
32
|
pkgs/serialization_util/_get_type_for_serialization.py,sha256=dW5_W9MFd6wgWfW5qlWork-GBb-QFLtiOZkjk2Zqn2M,1177
|
|
32
|
-
pkgs/serialization_util/
|
|
33
|
+
pkgs/serialization_util/convert_to_snakecase.py,sha256=H2BAo5ZdcCDN77RpLb-uP0s7-FQ5Ukwnsd3VYc1vD0M,583
|
|
34
|
+
pkgs/serialization_util/serialization_helpers.py,sha256=DpDrPMc0XS_dAvkLpiON0fVQHeU6t24jfRIeYUe3FJA,6916
|
|
33
35
|
pkgs/strenum_compat/__init__.py,sha256=wXRFeNvBm8RU6dy1PFJ5sRLgUIEeH_DVR95Sv5qpGbk,59
|
|
34
36
|
pkgs/strenum_compat/strenum_compat.py,sha256=uOUAgpYTjHs1MX8dG81jRlyTkt3KNbkV_25zp7xTX2s,36
|
|
35
37
|
pkgs/type_spec/__init__.py,sha256=h5DmJTca4QVV10sZR1x0-MlkZfuGYDfapR3zHvXfzto,19
|
|
36
38
|
pkgs/type_spec/__main__.py,sha256=5bJaX9Y_-FavP0qwzhk-z-V97UY7uaezJTa1zhO_HHQ,1048
|
|
37
|
-
pkgs/type_spec/builder.py,sha256=
|
|
39
|
+
pkgs/type_spec/builder.py,sha256=un86i9LqTmCMVj-g6lrZ8lU4JZEElzCfUlsn--GkTvA,46049
|
|
38
40
|
pkgs/type_spec/config.py,sha256=INfEiDcUsZFUKasHprsE6i33siPB0RnfmTKOsWcGnQ8,5043
|
|
39
41
|
pkgs/type_spec/emit_io_ts.py,sha256=Ghd8XYqyNYldHQDepwa9GLfHXcoi48ztBw84K28ETic,5707
|
|
40
|
-
pkgs/type_spec/emit_open_api.py,sha256=
|
|
42
|
+
pkgs/type_spec/emit_open_api.py,sha256=Aw7Ct1itmAqhb_nsM9yDz87EoF0XWHM56MhKqtOLOio,24005
|
|
41
43
|
pkgs/type_spec/emit_open_api_util.py,sha256=XAA6zH59aZWLVl0BvKAICXXl4sdBqx01QAtv5oB0bMI,2266
|
|
42
|
-
pkgs/type_spec/emit_python.py,sha256=
|
|
43
|
-
pkgs/type_spec/emit_typescript.py,sha256=
|
|
44
|
+
pkgs/type_spec/emit_python.py,sha256=zP3AWJ5u0vzDcnvzSehCUgvXM0J9ZUtfLBVHerW6_wI,45164
|
|
45
|
+
pkgs/type_spec/emit_typescript.py,sha256=cdr5h8N70PuwORcvhURUujzwH9r1LVwJB8V2EoipGkw,17917
|
|
44
46
|
pkgs/type_spec/emit_typescript_util.py,sha256=93FzJnpYse4PKFzgdw4DGV4zFTi5tF4WR-CIi7cW498,873
|
|
45
47
|
pkgs/type_spec/load_types.py,sha256=xEHwdB_miR3vNs161Oy1luafE0VC-yk9-utAyCJmbEo,3629
|
|
46
48
|
pkgs/type_spec/open_api_util.py,sha256=IGh-_snGPST_P_8FdYtO8MTEa9PUxRW6Rzg9X9EgQik,7114
|
|
@@ -52,7 +54,7 @@ pkgs/type_spec/actions_registry/emit_typescript.py,sha256=ben0W7qwaVCzLO-t3NEJPP
|
|
|
52
54
|
pkgs/type_spec/parts/base.py.prepart,sha256=wGNoDyQnLolHRZGRwHQX5TrPfKnu558NXCocYvqyroc,2174
|
|
53
55
|
pkgs/type_spec/parts/base.ts.prepart,sha256=2FJJvpg2olCcavxj0nbYWdwKl6KeScour2JjSvN42l8,1001
|
|
54
56
|
pkgs/type_spec/type_info/__main__.py,sha256=pmVjVqXyVh8vKTNCTFgz80Sg74C5BKToP3E6GS-X_So,857
|
|
55
|
-
pkgs/type_spec/type_info/emit_type_info.py,sha256=
|
|
57
|
+
pkgs/type_spec/type_info/emit_type_info.py,sha256=XqAyJgzkYY2woG5O-IA5O9gnfja_slTZGd-GnS7xPts,13280
|
|
56
58
|
pkgs/type_spec/value_spec/__init__.py,sha256=Z-grlcZtxAfEXhPHsK0nD7PFLGsv4eqvunaPN7_TA84,83
|
|
57
59
|
pkgs/type_spec/value_spec/__main__.py,sha256=-9L5pXYx02plnTetqNknaUZPieLRtzbyWdZDT6B-cWA,8294
|
|
58
60
|
pkgs/type_spec/value_spec/convert_type.py,sha256=SAYyEV6orQJJbkXSE4hhtOQJ2vKUXJCKPeYPrB8G9oA,2272
|
|
@@ -102,7 +104,7 @@ uncountable/types/recipe_links.py,sha256=RldSV7SdeBYa0bx02DzMg4jfPdgrlMRE40T16Fd
|
|
|
102
104
|
uncountable/types/recipe_metadata.py,sha256=cebGg_lJzqZzGnKnDgmuQFrw4Xhoz6HEiGM6G0az120,1437
|
|
103
105
|
uncountable/types/recipe_output_metadata.py,sha256=XJA8R1r4NTzyR_DhMkmH4ZtYD-vqpvBMji9Be8OcFmo,613
|
|
104
106
|
uncountable/types/recipe_tags.py,sha256=lYpksHAxXCcIjZKR7JoZOTH2cBSovwxZaHwjZy_yqiQ,581
|
|
105
|
-
uncountable/types/recipe_workflow_steps.py,sha256=
|
|
107
|
+
uncountable/types/recipe_workflow_steps.py,sha256=ZMZI6SteOTVSolhPPZsSqU139i4NYFc1ACbS1rSBLJQ,2997
|
|
106
108
|
uncountable/types/recipes.py,sha256=tY8MNmQiky94eIFOxSLyflXVno3pfDygxJ6WPqJlyDU,549
|
|
107
109
|
uncountable/types/response.py,sha256=ZI0CG7ZxBM2k5_W-6mNMU3UlB0p1i-0nrwOvsMaS-vU,620
|
|
108
110
|
uncountable/types/units.py,sha256=_kZ7KkXIbRiY2fOdkTsbJBpWRah5TCC2WWiG05e-1DA,565
|
|
@@ -128,7 +130,7 @@ uncountable/types/api/field_options/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKH
|
|
|
128
130
|
uncountable/types/api/field_options/upsert_field_options.py,sha256=xYtC68AabmTrYn_yV19C91yZv9tfohaRxmvCjMQ5vy8,1144
|
|
129
131
|
uncountable/types/api/id_source/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
|
|
130
132
|
uncountable/types/api/id_source/list_id_source.py,sha256=S_NdAd1FIgMtCfpRK9bs4ZIJH7HdyHWImD1qiPuAKMg,1157
|
|
131
|
-
uncountable/types/api/id_source/match_id_source.py,sha256=
|
|
133
|
+
uncountable/types/api/id_source/match_id_source.py,sha256=6aaAXcuOIy0FqKw0CK4xde8o9YmryDNhX46WUEnMrRk,1048
|
|
132
134
|
uncountable/types/api/input_groups/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
|
|
133
135
|
uncountable/types/api/input_groups/get_input_group_names.py,sha256=LdHWWEfVNGys6Tudienjich56Zz4bj7uXznpyYitCYA,1033
|
|
134
136
|
uncountable/types/api/inputs/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
|
|
@@ -163,13 +165,13 @@ uncountable/types/api/recipes/associate_recipe_as_lot.py,sha256=bTYjbnY3B7GKz4MV
|
|
|
163
165
|
uncountable/types/api/recipes/create_recipe.py,sha256=jizKdsc761zrJXOi0xlmge7-Z9QlzRQdbLNtUoVLQCI,1420
|
|
164
166
|
uncountable/types/api/recipes/create_recipes.py,sha256=qwIYa8hfcjY7_VOFt9lxmVtJ-HOJqQN3GDNSbZsRCZU,1544
|
|
165
167
|
uncountable/types/api/recipes/disassociate_recipe_as_input.py,sha256=L25fpiK1Y5PByPVVgsZy9t4podz3xSSLIwKHj8CUrSg,913
|
|
166
|
-
uncountable/types/api/recipes/edit_recipe_inputs.py,sha256=
|
|
168
|
+
uncountable/types/api/recipes/edit_recipe_inputs.py,sha256=_dLulVZLqi-CrFIVMRts8h0OHx-nUG3vFOSKS1juGUc,4568
|
|
167
169
|
uncountable/types/api/recipes/get_curve.py,sha256=UIWfpqtU5sQokaxwYfQFNFl6HMyzWEF_Sjd8UMz0U88,939
|
|
168
170
|
uncountable/types/api/recipes/get_recipe_calculations.py,sha256=eQmkdZzCEuq8S2f_kf_7GPvDLX1pTnY1CRmkK0SkMCI,1472
|
|
169
171
|
uncountable/types/api/recipes/get_recipe_links.py,sha256=hk5dfQjv7yU2r-S9b8vwWEJLPHqU0-M6SFiTLMR3fVk,985
|
|
170
172
|
uncountable/types/api/recipes/get_recipe_names.py,sha256=uCpXZq5oWjr9a_Vf-yYPaVS72XOlLHgAlju6KHeQ3UA,986
|
|
171
173
|
uncountable/types/api/recipes/get_recipe_output_metadata.py,sha256=L9s2ykPP4pd02Pc98LDisY8bgV8CToS6t6fXKTWqGRw,1464
|
|
172
|
-
uncountable/types/api/recipes/get_recipes_data.py,sha256=
|
|
174
|
+
uncountable/types/api/recipes/get_recipes_data.py,sha256=nX4sCRY_RxztVqV-DGVpAvpayy6pn6cumS2pD1xmC5k,5429
|
|
173
175
|
uncountable/types/api/recipes/remove_recipe_from_project.py,sha256=cr-VnqgBNek_WInmJln0UBn1GHMNQtRw3gsFTY_G91M,872
|
|
174
176
|
uncountable/types/api/recipes/set_recipe_inputs.py,sha256=lFVfv-o_O5wHuMZdH63qlG4exFTlJM078oSAtb3XNxA,1426
|
|
175
177
|
uncountable/types/api/recipes/set_recipe_metadata.py,sha256=Ba6ttd1JuS_Ypt-KpckSviWtOcQ-OTdTEJiaSYyoQL8,933
|
|
@@ -178,7 +180,7 @@ uncountable/types/api/recipes/set_recipe_tags.py,sha256=U710hgq9-t6QZGRB-ZGHskpt
|
|
|
178
180
|
uncountable/types/api/recipes/unarchive_recipes.py,sha256=WcwFYbBsX2SKXnoBQ8locnRn7Bj1rHdtrURQVOfqgfU,814
|
|
179
181
|
uncountable/types/api/triggers/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
|
|
180
182
|
uncountable/types/api/triggers/run_trigger.py,sha256=9m9M8-nlGB_sAU2Qm2lWugp4h4Osqj6QpjNfU8osd1U,901
|
|
181
|
-
UncountablePythonSDK-0.0.
|
|
182
|
-
UncountablePythonSDK-0.0.
|
|
183
|
-
UncountablePythonSDK-0.0.
|
|
184
|
-
UncountablePythonSDK-0.0.
|
|
183
|
+
UncountablePythonSDK-0.0.34.dist-info/METADATA,sha256=ECBUfqxWJ1erx7_ZX0HEmWccgQVW7HD7U1MZigiJkLU,1577
|
|
184
|
+
UncountablePythonSDK-0.0.34.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
|
185
|
+
UncountablePythonSDK-0.0.34.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
|
|
186
|
+
UncountablePythonSDK-0.0.34.dist-info/RECORD,,
|
|
@@ -10,7 +10,12 @@ from importlib import resources
|
|
|
10
10
|
import dateutil.parser
|
|
11
11
|
import yaml
|
|
12
12
|
|
|
13
|
-
from pkgs.serialization import
|
|
13
|
+
from pkgs.serialization import (
|
|
14
|
+
MissingSentryType,
|
|
15
|
+
OpaqueKey,
|
|
16
|
+
get_serial_class_data,
|
|
17
|
+
get_serial_union_data,
|
|
18
|
+
)
|
|
14
19
|
|
|
15
20
|
from ._is_enum import is_string_enum_class
|
|
16
21
|
from ._is_namedtuple import is_namedtuple_type
|
|
@@ -112,6 +117,23 @@ def _invoke_membership_parser(
|
|
|
112
117
|
raise ValueError(f"Expected value from {expected_values} but got value {value}")
|
|
113
118
|
|
|
114
119
|
|
|
120
|
+
def _build_parser_discriminated_union(
|
|
121
|
+
discriminator: str, discriminator_map: dict[str, ParserFunction[T]]
|
|
122
|
+
) -> ParserFunction[T]:
|
|
123
|
+
def parse(value: typing.Any) -> typing.Any:
|
|
124
|
+
discriminant = value.get(discriminator)
|
|
125
|
+
if discriminant is None:
|
|
126
|
+
raise ValueError("missing-union-discriminant")
|
|
127
|
+
if not isinstance(discriminant, str):
|
|
128
|
+
raise ValueError("union-discriminant-is-not-string")
|
|
129
|
+
parser = discriminator_map.get(discriminant)
|
|
130
|
+
if parser is None:
|
|
131
|
+
raise ValueError("missing-type-for-union-discriminant", discriminant)
|
|
132
|
+
return parser(value)
|
|
133
|
+
|
|
134
|
+
return parse
|
|
135
|
+
|
|
136
|
+
|
|
115
137
|
def _build_parser_inner(
|
|
116
138
|
parsed_type: type[T],
|
|
117
139
|
context: ParserContext,
|
|
@@ -130,6 +152,23 @@ def _build_parser_inner(
|
|
|
130
152
|
are cached now, as they don't use the argument, and they're known to be safe.
|
|
131
153
|
This is also enough to support some recursion.
|
|
132
154
|
"""
|
|
155
|
+
|
|
156
|
+
serial_union = get_serial_union_data(parsed_type)
|
|
157
|
+
if serial_union is not None:
|
|
158
|
+
discriminator = serial_union.discriminator
|
|
159
|
+
discriminator_map = serial_union.discriminator_map
|
|
160
|
+
if discriminator is None or discriminator_map is None:
|
|
161
|
+
# fallback to standard union parsing
|
|
162
|
+
parsed_type = serial_union.get_union_underlying()
|
|
163
|
+
else:
|
|
164
|
+
return _build_parser_discriminated_union(
|
|
165
|
+
discriminator,
|
|
166
|
+
{
|
|
167
|
+
key: _build_parser_inner(value, context)
|
|
168
|
+
for key, value in discriminator_map.items()
|
|
169
|
+
},
|
|
170
|
+
)
|
|
171
|
+
|
|
133
172
|
if dataclasses.is_dataclass(parsed_type):
|
|
134
173
|
return _build_parser_dataclass(parsed_type, context) # type: ignore[arg-type]
|
|
135
174
|
|
|
@@ -341,7 +380,7 @@ def _build_parser_dataclass(
|
|
|
341
380
|
|
|
342
381
|
except Exception as e:
|
|
343
382
|
raise ValueError(
|
|
344
|
-
f"unable
|
|
383
|
+
f"unable-to-parse-field:{field.name}", field_raw_value
|
|
345
384
|
) from e
|
|
346
385
|
|
|
347
386
|
if context.options.strict_property_parsing:
|
pkgs/serialization/__init__.py
CHANGED
|
@@ -8,3 +8,5 @@ from .serial_class import get_serial_class_data as get_serial_class_data
|
|
|
8
8
|
from .serial_class import get_serial_string_enum_data as get_serial_string_enum_data
|
|
9
9
|
from .serial_class import serial_class as serial_class
|
|
10
10
|
from .serial_class import serial_string_enum as serial_string_enum
|
|
11
|
+
from .serial_union import serial_union_annotation as serial_union_annotation
|
|
12
|
+
from .serial_union import get_serial_union_data as get_serial_union_data
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
import typing
|
|
3
|
+
|
|
4
|
+
T = typing.TypeVar("T")
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class IdentityHashWrapper(typing.Generic[T]):
|
|
8
|
+
"""This allows unhashable types to be used in the SerialUnion, like dict.
|
|
9
|
+
Since we have only one copy of the types themselves, we rely on
|
|
10
|
+
object identity for the hashing."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, inner: T) -> None:
|
|
13
|
+
self.inner = inner
|
|
14
|
+
|
|
15
|
+
def __hash__(self) -> int:
|
|
16
|
+
return id(self.inner)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
|
|
20
|
+
class _SerialUnion:
|
|
21
|
+
"""
|
|
22
|
+
This class is to be kept private, to provide flexibility in registration/lookup.
|
|
23
|
+
Places that need the data should access it via help classes/methods.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
# If specified, indicates the Union has a discriminator which should be used to
|
|
27
|
+
# determine which type to parse.
|
|
28
|
+
discriminator: typing.Optional[str] = None
|
|
29
|
+
discriminator_map: typing.Optional[IdentityHashWrapper[dict[str, type]]] = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def serial_union_annotation(
|
|
33
|
+
*,
|
|
34
|
+
discriminator: typing.Optional[str] = None,
|
|
35
|
+
discriminator_map: typing.Optional[dict[str, type]] = None,
|
|
36
|
+
) -> _SerialUnion:
|
|
37
|
+
return _SerialUnion(
|
|
38
|
+
discriminator=discriminator,
|
|
39
|
+
discriminator_map=IdentityHashWrapper(discriminator_map)
|
|
40
|
+
if discriminator_map is not None
|
|
41
|
+
else None,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _get_serial_union(parsed_type: type[T]) -> _SerialUnion | None:
|
|
46
|
+
if not hasattr(parsed_type, "__metadata__"):
|
|
47
|
+
return None
|
|
48
|
+
metadata = parsed_type.__metadata__ # type:ignore[attr-defined]
|
|
49
|
+
if not isinstance(metadata, tuple) or len(metadata) != 1:
|
|
50
|
+
return None
|
|
51
|
+
serial = metadata[0]
|
|
52
|
+
if not isinstance(serial, _SerialUnion):
|
|
53
|
+
return None
|
|
54
|
+
return serial
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class SerialClassInspector(typing.Generic[T]):
|
|
58
|
+
def __init__(self, parsed_type: type[T], serial_union: _SerialUnion) -> None:
|
|
59
|
+
self._parsed_type = parsed_type
|
|
60
|
+
self._serial_union = serial_union
|
|
61
|
+
|
|
62
|
+
def get_union_underlying(self) -> type[T]:
|
|
63
|
+
return typing.get_args(self._parsed_type)[0] # type:ignore[no-any-return]
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def discriminator(self) -> typing.Optional[str]:
|
|
67
|
+
return self._serial_union.discriminator
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def discriminator_map(self) -> typing.Optional[dict[str, type]]:
|
|
71
|
+
if self._serial_union.discriminator_map is None:
|
|
72
|
+
return None
|
|
73
|
+
return self._serial_union.discriminator_map.inner
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def get_serial_union_data(parsed_type: type[T]) -> SerialClassInspector[T] | None:
|
|
77
|
+
serial = _get_serial_union(parsed_type)
|
|
78
|
+
if serial is None:
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
return SerialClassInspector(parsed_type, serial)
|
|
@@ -1,8 +1,5 @@
|
|
|
1
|
+
from .convert_to_snakecase import convert_dict_to_snake_case
|
|
1
2
|
from .serialization_helpers import (
|
|
2
|
-
convert_dict_to_snake_case,
|
|
3
|
-
convert_to_camelcase,
|
|
4
|
-
resolve_missing_to_none,
|
|
5
|
-
serialize,
|
|
6
3
|
serialize_for_api,
|
|
7
4
|
serialize_for_storage,
|
|
8
5
|
serialize_for_storage_dict,
|
|
@@ -10,9 +7,6 @@ from .serialization_helpers import (
|
|
|
10
7
|
|
|
11
8
|
__all__: list[str] = [
|
|
12
9
|
"convert_dict_to_snake_case",
|
|
13
|
-
"convert_to_camelcase",
|
|
14
|
-
"resolve_missing_to_none",
|
|
15
|
-
"serialize",
|
|
16
10
|
"serialize_for_api",
|
|
17
11
|
"serialize_for_storage",
|
|
18
12
|
"serialize_for_storage_dict",
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from typing import (
|
|
2
|
+
Any,
|
|
3
|
+
)
|
|
4
|
+
|
|
5
|
+
from pkgs.argument_parser import camel_to_snake_case
|
|
6
|
+
from pkgs.serialization import (
|
|
7
|
+
MISSING_SENTRY,
|
|
8
|
+
OpaqueKey,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _key_convert_to_snake_case(o: Any) -> Any:
|
|
13
|
+
if isinstance(o, OpaqueKey):
|
|
14
|
+
return o
|
|
15
|
+
if isinstance(o, str):
|
|
16
|
+
return camel_to_snake_case(o)
|
|
17
|
+
return o
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def convert_dict_to_snake_case(data: Any) -> Any:
|
|
21
|
+
return {
|
|
22
|
+
_key_convert_to_snake_case(k): convert_dict_to_snake_case(v)
|
|
23
|
+
if isinstance(v, dict)
|
|
24
|
+
else v
|
|
25
|
+
for k, v in data.items()
|
|
26
|
+
if v != MISSING_SENTRY
|
|
27
|
+
}
|
|
@@ -1,20 +1,27 @@
|
|
|
1
|
+
import dataclasses
|
|
1
2
|
import datetime
|
|
2
3
|
import enum
|
|
4
|
+
import functools
|
|
3
5
|
from collections.abc import Callable, Mapping, Sequence
|
|
4
6
|
from decimal import Decimal
|
|
5
7
|
from typing import (
|
|
6
8
|
TYPE_CHECKING,
|
|
7
9
|
Any,
|
|
8
|
-
|
|
10
|
+
ClassVar,
|
|
11
|
+
Protocol,
|
|
9
12
|
TypeVar,
|
|
10
13
|
Union,
|
|
14
|
+
overload,
|
|
11
15
|
)
|
|
12
16
|
|
|
13
|
-
|
|
17
|
+
try:
|
|
18
|
+
import flask
|
|
19
|
+
except Exception:
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
from pkgs.argument_parser import snake_to_camel_case
|
|
14
23
|
from pkgs.serialization import (
|
|
15
24
|
MISSING_SENTRY,
|
|
16
|
-
MissingSentryType,
|
|
17
|
-
MissingType,
|
|
18
25
|
OpaqueKey,
|
|
19
26
|
get_serial_class_data,
|
|
20
27
|
)
|
|
@@ -28,79 +35,157 @@ if TYPE_CHECKING:
|
|
|
28
35
|
else:
|
|
29
36
|
JsonValue = Union[JsonScalar, dict[str, Any], list[Any]]
|
|
30
37
|
|
|
38
|
+
T = TypeVar("T")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Dataclass(Protocol):
|
|
42
|
+
__dataclass_fields__: ClassVar[dict] # type: ignore[type-arg,unused-ignore]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def identity(x: T) -> T:
|
|
46
|
+
return x
|
|
31
47
|
|
|
32
|
-
|
|
48
|
+
|
|
49
|
+
def key_convert_to_camelcase(o: Any) -> str:
|
|
33
50
|
if isinstance(o, OpaqueKey):
|
|
34
51
|
return o
|
|
35
|
-
if isinstance(o, enum.
|
|
52
|
+
if isinstance(o, enum.StrEnum):
|
|
36
53
|
return o.value
|
|
54
|
+
if isinstance(o, enum.Enum):
|
|
55
|
+
try:
|
|
56
|
+
print(
|
|
57
|
+
"DEPRECATED_SERIALIZATION--non-string-enum-used-as-key",
|
|
58
|
+
flask.request.path,
|
|
59
|
+
)
|
|
60
|
+
except Exception:
|
|
61
|
+
pass
|
|
62
|
+
return o.value # type: ignore[no-any-return]
|
|
37
63
|
if isinstance(o, str):
|
|
38
64
|
return snake_to_camel_case(o)
|
|
39
|
-
|
|
65
|
+
if isinstance(o, int):
|
|
66
|
+
# temporary bypass to maintain behavior
|
|
67
|
+
return o # type: ignore[return-value]
|
|
68
|
+
try:
|
|
69
|
+
print("DEPRECATED_SERIALIZATION--non-string-used-as-key", o, flask.request.path)
|
|
70
|
+
except Exception:
|
|
71
|
+
pass
|
|
72
|
+
return o # type: ignore[no-any-return]
|
|
40
73
|
|
|
41
74
|
|
|
42
|
-
def _convert_dict(d: Any) ->
|
|
75
|
+
def _convert_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
|
|
43
76
|
return {
|
|
44
|
-
key_convert_to_camelcase(k):
|
|
77
|
+
key_convert_to_camelcase(k): serialize_for_api(v)
|
|
45
78
|
for k, v in d.items()
|
|
46
79
|
if v != MISSING_SENTRY
|
|
47
80
|
}
|
|
48
81
|
|
|
49
82
|
|
|
50
|
-
def _serialize_dict(d: Any) -> dict[str,
|
|
51
|
-
return {k:
|
|
83
|
+
def _serialize_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
|
|
84
|
+
return {k: serialize_for_storage(v) for k, v in d.items() if v != MISSING_SENTRY}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _to_string_value(value: Any) -> str:
|
|
88
|
+
assert isinstance(value, (Decimal, int))
|
|
89
|
+
return str(value)
|
|
52
90
|
|
|
53
91
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
92
|
+
@dataclasses.dataclass(kw_only=True)
|
|
93
|
+
class DataclassConversions:
|
|
94
|
+
key_conversions: dict[str, str]
|
|
95
|
+
value_conversion_functions: dict[str, Callable[[Any], JsonValue]]
|
|
57
96
|
|
|
58
|
-
|
|
97
|
+
|
|
98
|
+
@functools.lru_cache(maxsize=10000)
|
|
99
|
+
def _get_dataclass_conversion_lookups(dataclass_type: Any) -> DataclassConversions:
|
|
100
|
+
scd = get_serial_class_data(dataclass_type)
|
|
101
|
+
|
|
102
|
+
key_conversions: dict[str, str] = {}
|
|
103
|
+
value_conversion_functions: dict[str, Callable[[Any], JsonValue]] = {}
|
|
104
|
+
|
|
105
|
+
for field in dataclasses.fields(dataclass_type):
|
|
106
|
+
key = field.name
|
|
59
107
|
if scd.has_unconverted_key(key):
|
|
60
|
-
|
|
61
|
-
|
|
108
|
+
key_conversions[key] = key
|
|
109
|
+
else:
|
|
110
|
+
key_conversions[key] = key_convert_to_camelcase(key)
|
|
62
111
|
|
|
63
|
-
def value_convert(key: Any, value: Any) -> Any:
|
|
64
|
-
if value is None:
|
|
65
|
-
return None
|
|
66
112
|
if scd.has_to_string_value(key):
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
113
|
+
value_conversion_functions[key] = _to_string_value
|
|
114
|
+
elif scd.has_unconverted_value(key):
|
|
115
|
+
value_conversion_functions[key] = identity
|
|
116
|
+
else:
|
|
117
|
+
value_conversion_functions[key] = serialize_for_api
|
|
118
|
+
|
|
119
|
+
return DataclassConversions(
|
|
120
|
+
key_conversions=key_conversions,
|
|
121
|
+
value_conversion_functions=value_conversion_functions,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _convert_dataclass(d: Any) -> dict[str, JsonValue]:
|
|
126
|
+
conversions = _get_dataclass_conversion_lookups(type(d)) # type: ignore[arg-type]
|
|
127
|
+
# return {
|
|
128
|
+
# conversions.key_conversions[k]: (
|
|
129
|
+
# conversions.value_conversion_functions[k](v) if v is not None else None
|
|
130
|
+
# )
|
|
131
|
+
# for k, v in d.__dict__.items()
|
|
132
|
+
# if v != MISSING_SENTRY
|
|
133
|
+
# }
|
|
134
|
+
|
|
135
|
+
serialized_data_class: dict[str, JsonValue] = {}
|
|
136
|
+
for k, v in d.__dict__.items():
|
|
137
|
+
if v == MISSING_SENTRY:
|
|
138
|
+
continue
|
|
139
|
+
if k not in conversions.key_conversions:
|
|
140
|
+
try:
|
|
141
|
+
print(
|
|
142
|
+
"DEPRECATED_SERIALIZATION--missing-dataclass-key",
|
|
143
|
+
k,
|
|
144
|
+
flask.request.path,
|
|
145
|
+
)
|
|
146
|
+
except Exception:
|
|
147
|
+
pass
|
|
148
|
+
continue
|
|
149
|
+
serialized_data_class[conversions.key_conversions[k]] = (
|
|
150
|
+
conversions.value_conversion_functions[k](v) if v is not None else None
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return serialized_data_class
|
|
80
154
|
|
|
81
155
|
|
|
82
156
|
_SERIALIZATION_FUNCS_STANDARD = {
|
|
83
157
|
SerializationType.ENUM: lambda x: str(x.value),
|
|
84
158
|
SerializationType.DATE: lambda x: x.isoformat(),
|
|
85
159
|
SerializationType.TIMEDELTA: lambda x: x.total_seconds(),
|
|
86
|
-
SerializationType.UNKNOWN:
|
|
160
|
+
SerializationType.UNKNOWN: identity,
|
|
87
161
|
}
|
|
88
162
|
|
|
89
|
-
_CONVERSION_SERIALIZATION_FUNCS = {
|
|
163
|
+
_CONVERSION_SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
|
|
90
164
|
**_SERIALIZATION_FUNCS_STANDARD,
|
|
91
165
|
SerializationType.NAMED_TUPLE: lambda x: _convert_dict(x._asdict()),
|
|
92
|
-
SerializationType.ITERABLE: lambda x: [
|
|
166
|
+
SerializationType.ITERABLE: lambda x: [serialize_for_api(v) for v in x],
|
|
93
167
|
SerializationType.DICT: _convert_dict,
|
|
94
168
|
SerializationType.DATACLASS: _convert_dataclass,
|
|
95
169
|
}
|
|
96
170
|
|
|
97
171
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
172
|
+
@overload
|
|
173
|
+
def serialize_for_api(obj: None) -> None: ...
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@overload
|
|
177
|
+
def serialize_for_api(obj: dict[str, Any]) -> dict[str, JsonValue]: ...
|
|
178
|
+
|
|
101
179
|
|
|
180
|
+
@overload
|
|
181
|
+
def serialize_for_api(obj: Dataclass) -> dict[str, JsonValue]: ...
|
|
102
182
|
|
|
103
|
-
|
|
183
|
+
|
|
184
|
+
@overload
|
|
185
|
+
def serialize_for_api(obj: Any) -> JsonValue: ...
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def serialize_for_api(obj: Any) -> JsonValue:
|
|
104
189
|
"""
|
|
105
190
|
Serialize to a parsed-JSON format suitably encoded for API output.
|
|
106
191
|
|
|
@@ -122,15 +207,10 @@ _SERIALIZATION_FUNCS: dict[SerializationType, Callable[[Any], JsonValue]] = {
|
|
|
122
207
|
**_SERIALIZATION_FUNCS_STANDARD,
|
|
123
208
|
**_SERIALIZATION_FUNCS_DICT,
|
|
124
209
|
SerializationType.NAMED_TUPLE: lambda x: _serialize_dict(x._asdict()),
|
|
125
|
-
SerializationType.ITERABLE: lambda x: [
|
|
210
|
+
SerializationType.ITERABLE: lambda x: [serialize_for_storage(v) for v in x],
|
|
126
211
|
}
|
|
127
212
|
|
|
128
213
|
|
|
129
|
-
def serialize(obj: Any) -> Any:
|
|
130
|
-
"""@DEPRECATED: prefer serialize_for_storage"""
|
|
131
|
-
return serialize_for_storage(obj)
|
|
132
|
-
|
|
133
|
-
|
|
134
214
|
def serialize_for_storage(obj: Any) -> JsonValue:
|
|
135
215
|
"""
|
|
136
216
|
Convert a value into the pseudo-JSON form for
|
|
@@ -142,34 +222,9 @@ def serialize_for_storage(obj: Any) -> JsonValue:
|
|
|
142
222
|
return _SERIALIZATION_FUNCS[serialization_type](obj)
|
|
143
223
|
|
|
144
224
|
|
|
145
|
-
def serialize_for_storage_dict(obj:
|
|
225
|
+
def serialize_for_storage_dict(obj: dict | Dataclass) -> dict[str, JsonValue]: # type: ignore[type-arg]
|
|
146
226
|
"""
|
|
147
227
|
Same as serialize for storage but guarantees outer object is a dictionary
|
|
148
228
|
"""
|
|
149
|
-
serialization_type = get_serialization_type(type(obj))
|
|
229
|
+
serialization_type = get_serialization_type(type(obj))
|
|
150
230
|
return _SERIALIZATION_FUNCS_DICT[serialization_type](obj)
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
def key_convert_to_snake_case(o: Any) -> Any:
|
|
154
|
-
if isinstance(o, OpaqueKey):
|
|
155
|
-
return o
|
|
156
|
-
if isinstance(o, str):
|
|
157
|
-
return camel_to_snake_case(o)
|
|
158
|
-
return o
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
def convert_dict_to_snake_case(data: Any) -> Any:
|
|
162
|
-
return {
|
|
163
|
-
key_convert_to_snake_case(k): convert_dict_to_snake_case(v)
|
|
164
|
-
if isinstance(v, dict)
|
|
165
|
-
else v
|
|
166
|
-
for k, v in data.items()
|
|
167
|
-
if v != MISSING_SENTRY
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
T = TypeVar("T")
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
def resolve_missing_to_none(val: MissingType[T]) -> Optional[T]:
|
|
175
|
-
return val if not isinstance(val, MissingSentryType) else None
|
pkgs/type_spec/builder.py
CHANGED
|
@@ -102,6 +102,8 @@ class DefnTypeName(StrEnum):
|
|
|
102
102
|
s_string_enum = "StringEnum"
|
|
103
103
|
# a particular literal value
|
|
104
104
|
s_string_literal = "_StringLiteral"
|
|
105
|
+
# A union of several other types
|
|
106
|
+
s_union = "Union"
|
|
105
107
|
|
|
106
108
|
|
|
107
109
|
base_namespace_name = "base"
|
|
@@ -547,13 +549,65 @@ class SpecTypeDefnAlias(SpecTypeDefn):
|
|
|
547
549
|
super().base_process(builder, data, ["type", "desc", "alias", "discriminator"])
|
|
548
550
|
self.alias = builder.parse_type(self.namespace, data["alias"])
|
|
549
551
|
self.desc = data.get("desc", None)
|
|
550
|
-
# Should be limited to Union type aliases
|
|
551
552
|
self.discriminator = data.get("discriminator", None)
|
|
552
553
|
|
|
553
554
|
def get_referenced_types(self) -> list[SpecType]:
|
|
554
555
|
return [self.alias]
|
|
555
556
|
|
|
556
557
|
|
|
558
|
+
class SpecTypeDefnUnion(SpecTypeDefn):
|
|
559
|
+
def __init__(self, namespace: SpecNamespace, name: str) -> None:
|
|
560
|
+
super().__init__(namespace, name)
|
|
561
|
+
self.discriminator: str | None = None
|
|
562
|
+
self.types: list[SpecType] = []
|
|
563
|
+
self._alias_type: SpecType | None = None
|
|
564
|
+
self.discriminator_map: dict[str, SpecType] | None = None
|
|
565
|
+
self.desc: str | None = None
|
|
566
|
+
|
|
567
|
+
def process(self, builder: SpecBuilder, data: RawDict) -> None:
|
|
568
|
+
super().base_process(builder, data, ["type", "desc", "types", "discriminator"])
|
|
569
|
+
|
|
570
|
+
self.desc = data.get("desc", None)
|
|
571
|
+
self.discriminator = data.get("discriminator", None)
|
|
572
|
+
|
|
573
|
+
for sub_type_str in data["types"]:
|
|
574
|
+
sub_type = builder.parse_type(self.namespace, sub_type_str)
|
|
575
|
+
self.types.append(sub_type)
|
|
576
|
+
|
|
577
|
+
base_type = builder.namespaces[base_namespace_name].types[BaseTypeName.s_union]
|
|
578
|
+
self._backing_type = SpecTypeInstance(base_type, self.types)
|
|
579
|
+
|
|
580
|
+
if self.discriminator is not None:
|
|
581
|
+
self.discriminator_map = {}
|
|
582
|
+
for sub_type in self.types:
|
|
583
|
+
builder.push_where(sub_type.name)
|
|
584
|
+
assert isinstance(
|
|
585
|
+
sub_type, SpecTypeDefnObject
|
|
586
|
+
), "union-type-must-be-object"
|
|
587
|
+
assert sub_type.properties is not None
|
|
588
|
+
discriminator_type = sub_type.properties.get(self.discriminator)
|
|
589
|
+
assert (
|
|
590
|
+
discriminator_type is not None
|
|
591
|
+
), f"missing-discriminator-field: {sub_type}"
|
|
592
|
+
prop_type = unwrap_literal_type(discriminator_type.spec_type)
|
|
593
|
+
assert prop_type is not None
|
|
594
|
+
assert prop_type.is_value_to_string()
|
|
595
|
+
discriminant = str(prop_type.value)
|
|
596
|
+
assert (
|
|
597
|
+
discriminant not in self.discriminator_map
|
|
598
|
+
), f"duplicated-discriminant, {discriminant} in {sub_type}"
|
|
599
|
+
self.discriminator_map[discriminant] = sub_type
|
|
600
|
+
|
|
601
|
+
builder.pop_where()
|
|
602
|
+
|
|
603
|
+
def get_referenced_types(self) -> list[SpecType]:
|
|
604
|
+
return self.types
|
|
605
|
+
|
|
606
|
+
def get_backing_type(self) -> SpecType:
|
|
607
|
+
assert self._backing_type is not None
|
|
608
|
+
return self._backing_type
|
|
609
|
+
|
|
610
|
+
|
|
557
611
|
class SpecTypeDefnExternal(SpecTypeDefn):
|
|
558
612
|
external_map: dict[str, str]
|
|
559
613
|
|
|
@@ -1017,6 +1071,8 @@ class SpecNamespace:
|
|
|
1017
1071
|
spec_type: SpecTypeDefn
|
|
1018
1072
|
if defn_type == DefnTypeName.s_alias:
|
|
1019
1073
|
spec_type = SpecTypeDefnAlias(self, name)
|
|
1074
|
+
elif defn_type == DefnTypeName.s_union:
|
|
1075
|
+
spec_type = SpecTypeDefnUnion(self, name)
|
|
1020
1076
|
elif defn_type == DefnTypeName.s_external:
|
|
1021
1077
|
spec_type = SpecTypeDefnExternal(self, name)
|
|
1022
1078
|
elif defn_type == DefnTypeName.s_string_enum:
|
pkgs/type_spec/emit_open_api.py
CHANGED
|
@@ -430,6 +430,12 @@ def _emit_type(
|
|
|
430
430
|
ctx.types[stype.name] = open_api_type(ctx, stype.alias, config=config)
|
|
431
431
|
return
|
|
432
432
|
|
|
433
|
+
if isinstance(stype, builder.SpecTypeDefnUnion):
|
|
434
|
+
ctx.types[stype.name] = open_api_type(
|
|
435
|
+
ctx, stype.get_backing_type(), config=config
|
|
436
|
+
)
|
|
437
|
+
return
|
|
438
|
+
|
|
433
439
|
if isinstance(stype, builder.SpecTypeDefnStringEnum):
|
|
434
440
|
# TODO: check that these are always string enums
|
|
435
441
|
# IMPROVE: reflect the enum names in the description
|
pkgs/type_spec/emit_python.py
CHANGED
|
@@ -43,6 +43,7 @@ class TrackingContext:
|
|
|
43
43
|
use_serial_string_enum: bool = False
|
|
44
44
|
use_dataclass: bool = False
|
|
45
45
|
use_serial_class: bool = False
|
|
46
|
+
use_serial_union: bool = False
|
|
46
47
|
use_missing: bool = False
|
|
47
48
|
use_opaque_key: bool = False
|
|
48
49
|
|
|
@@ -219,6 +220,8 @@ def _emit_types_imports(*, out: io.StringIO, ctx: Context) -> None:
|
|
|
219
220
|
out.write("from dataclasses import dataclass\n")
|
|
220
221
|
if ctx.use_serial_class:
|
|
221
222
|
out.write("from pkgs.serialization import serial_class\n")
|
|
223
|
+
if ctx.use_serial_union:
|
|
224
|
+
out.write("from pkgs.serialization import serial_union_annotation\n")
|
|
222
225
|
if ctx.use_serial_string_enum:
|
|
223
226
|
out.write("from pkgs.serialization import serial_string_enum\n")
|
|
224
227
|
if ctx.use_missing:
|
|
@@ -727,6 +730,26 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
|
|
|
727
730
|
ctx.out.write(f"{stype.name} = {refer_to(ctx, stype.alias)}\n")
|
|
728
731
|
return
|
|
729
732
|
|
|
733
|
+
if isinstance(stype, builder.SpecTypeDefnUnion):
|
|
734
|
+
ctx.use_serial_union = True
|
|
735
|
+
ctx.out.write(f"{stype.name} = typing.Annotated[\n")
|
|
736
|
+
ctx.out.write(f"{INDENT}{refer_to(ctx, stype.get_backing_type())},\n")
|
|
737
|
+
ctx.out.write(f"{INDENT}serial_union_annotation(\n")
|
|
738
|
+
if stype.discriminator is not None:
|
|
739
|
+
ctx.out.write(
|
|
740
|
+
f"{INDENT * 2}discriminator={util.encode_common_string(stype.discriminator)},\n"
|
|
741
|
+
)
|
|
742
|
+
if stype.discriminator_map is not None:
|
|
743
|
+
ctx.out.write(f"{INDENT * 2}discriminator_map={{\n")
|
|
744
|
+
for key, value in stype.discriminator_map.items():
|
|
745
|
+
ctx.out.write(
|
|
746
|
+
f"{INDENT * 3}{util.encode_common_string(key)}: {refer_to(ctx, value)},\n"
|
|
747
|
+
)
|
|
748
|
+
ctx.out.write(f"{INDENT * 2}}},\n")
|
|
749
|
+
ctx.out.write(f"{INDENT}),\n")
|
|
750
|
+
ctx.out.write("]\n")
|
|
751
|
+
return
|
|
752
|
+
|
|
730
753
|
if isinstance(stype, builder.SpecTypeDefnStringEnum):
|
|
731
754
|
return _emit_string_enum(ctx, stype)
|
|
732
755
|
|
|
@@ -303,6 +303,12 @@ def _emit_type(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
|
|
|
303
303
|
ctx.out.write(f"export type {stype.name} = {refer_to(ctx, stype.alias)}\n")
|
|
304
304
|
return
|
|
305
305
|
|
|
306
|
+
if isinstance(stype, builder.SpecTypeDefnUnion):
|
|
307
|
+
ctx.out.write(
|
|
308
|
+
f"export type {stype.name} = {refer_to(ctx, stype.get_backing_type())}\n"
|
|
309
|
+
)
|
|
310
|
+
return
|
|
311
|
+
|
|
306
312
|
if isinstance(stype, builder.SpecTypeDefnStringEnum):
|
|
307
313
|
ctx.out.write(f"export enum {stype.name} {{\n")
|
|
308
314
|
assert stype.values
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import copy
|
|
2
2
|
import dataclasses
|
|
3
|
+
import decimal
|
|
3
4
|
import io
|
|
4
5
|
import json
|
|
5
6
|
from typing import Any, Optional, TypeAlias, Union, cast
|
|
@@ -7,7 +8,10 @@ from typing import Any, Optional, TypeAlias, Union, cast
|
|
|
7
8
|
from main.base.types import data_t
|
|
8
9
|
from main.base.types.base import PureJsonValue
|
|
9
10
|
from pkgs.argument_parser import CachedParser
|
|
10
|
-
from pkgs.serialization_util import
|
|
11
|
+
from pkgs.serialization_util import (
|
|
12
|
+
serialize_for_api,
|
|
13
|
+
serialize_for_storage,
|
|
14
|
+
)
|
|
11
15
|
|
|
12
16
|
from .. import builder, util
|
|
13
17
|
from ..emit_typescript_util import MODIFY_NOTICE, ts_name
|
|
@@ -69,12 +73,21 @@ def _dict_null_strip(data: dict[str, object]) -> dict[str, object]:
|
|
|
69
73
|
}
|
|
70
74
|
|
|
71
75
|
|
|
76
|
+
class JsonEncoder(json.JSONEncoder):
|
|
77
|
+
"""We have some defaults of special types that we need to emit"""
|
|
78
|
+
|
|
79
|
+
def default(self, obj: object) -> object:
|
|
80
|
+
if isinstance(obj, decimal.Decimal):
|
|
81
|
+
return str(obj)
|
|
82
|
+
return json.JSONEncoder.default(self, obj)
|
|
83
|
+
|
|
84
|
+
|
|
72
85
|
def emit_type_info(build: builder.SpecBuilder, output: str) -> None:
|
|
73
86
|
type_map = _build_map_all(build)
|
|
74
87
|
|
|
75
88
|
# sort for stability, indent for smaller diffs
|
|
76
89
|
stripped = _dict_null_strip(dataclasses.asdict(type_map))
|
|
77
|
-
serial = json.dumps(stripped, sort_keys=True, indent=2)
|
|
90
|
+
serial = json.dumps(stripped, sort_keys=True, indent=2, cls=JsonEncoder)
|
|
78
91
|
type_map_out = io.StringIO()
|
|
79
92
|
type_map_out.write(MODIFY_NOTICE)
|
|
80
93
|
type_map_out.write(f"export const TYPE_MAP = {serial}")
|
|
@@ -287,7 +300,7 @@ def _build_map_type(
|
|
|
287
300
|
properties=properties,
|
|
288
301
|
desc=stype.desc,
|
|
289
302
|
base_type_path=type_path_of(stype.base),
|
|
290
|
-
ext_info=serialize_for_api(ext_info),
|
|
303
|
+
ext_info=serialize_for_api(ext_info), # type: ignore[arg-type]
|
|
291
304
|
)
|
|
292
305
|
|
|
293
306
|
if stype.properties is not None:
|
|
@@ -301,7 +314,7 @@ def _build_map_type(
|
|
|
301
314
|
api_name=ts_name(prop.name, prop.name_case),
|
|
302
315
|
extant=prop.extant,
|
|
303
316
|
type_path=type_path_of(prop.spec_type),
|
|
304
|
-
ext_info=serialize_for_api(parts.ext_info),
|
|
317
|
+
ext_info=serialize_for_api(parts.ext_info), # type: ignore[arg-type]
|
|
305
318
|
desc=parts.desc,
|
|
306
319
|
default=prop.default,
|
|
307
320
|
)
|
|
@@ -319,6 +332,19 @@ def _build_map_type(
|
|
|
319
332
|
discriminator=stype.discriminator,
|
|
320
333
|
)
|
|
321
334
|
|
|
335
|
+
if isinstance(stype, builder.SpecTypeDefnUnion):
|
|
336
|
+
# Emit as a basic alias for now, as the front-end supports only those for now
|
|
337
|
+
# IMPROVE: We should emit a proper union type and support that
|
|
338
|
+
backing = stype.get_backing_type()
|
|
339
|
+
return MapTypeAlias(
|
|
340
|
+
type_name=stype.name,
|
|
341
|
+
label=stype.label,
|
|
342
|
+
desc=stype.desc,
|
|
343
|
+
alias_type_path=type_path_of(backing),
|
|
344
|
+
ext_info=_convert_ext_info(stype.ext_info),
|
|
345
|
+
discriminator=stype.discriminator,
|
|
346
|
+
)
|
|
347
|
+
|
|
322
348
|
if isinstance(stype, builder.SpecTypeDefnStringEnum):
|
|
323
349
|
return MapStringEnum(
|
|
324
350
|
type_name=stype.name,
|
|
@@ -8,7 +8,6 @@ import typing # noqa: F401
|
|
|
8
8
|
import datetime # noqa: F401
|
|
9
9
|
from decimal import Decimal # noqa: F401
|
|
10
10
|
from dataclasses import dataclass
|
|
11
|
-
from pkgs.serialization import serial_class
|
|
12
11
|
from ... import base as base_t
|
|
13
12
|
from ... import id_source as id_source_t
|
|
14
13
|
|
|
@@ -39,9 +38,6 @@ class Match:
|
|
|
39
38
|
|
|
40
39
|
|
|
41
40
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
42
|
-
@serial_class(
|
|
43
|
-
unconverted_values={"results"},
|
|
44
|
-
)
|
|
45
41
|
@dataclass(kw_only=True)
|
|
46
42
|
class Data:
|
|
47
43
|
results: list[Match]
|
|
@@ -10,6 +10,7 @@ from decimal import Decimal # noqa: F401
|
|
|
10
10
|
from pkgs.strenum_compat import StrEnum
|
|
11
11
|
from dataclasses import dataclass
|
|
12
12
|
from pkgs.serialization import serial_class
|
|
13
|
+
from pkgs.serialization import serial_union_annotation
|
|
13
14
|
from ... import identifier as identifier_t
|
|
14
15
|
from ... import recipe_inputs as recipe_inputs_t
|
|
15
16
|
from ... import recipe_workflow_steps as recipe_workflow_steps_t
|
|
@@ -114,7 +115,18 @@ class RecipeInputEditUpdateAnnotations(RecipeInputEditInputBase):
|
|
|
114
115
|
|
|
115
116
|
|
|
116
117
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
117
|
-
RecipeInputEdit = typing.
|
|
118
|
+
RecipeInputEdit = typing.Annotated[
|
|
119
|
+
typing.Union[RecipeInputEditClearInputs, RecipeInputEditUpsertInput, RecipeInputEditAddInput, RecipeInputEditUpdateAnnotations],
|
|
120
|
+
serial_union_annotation(
|
|
121
|
+
discriminator="type",
|
|
122
|
+
discriminator_map={
|
|
123
|
+
"clear_inputs": RecipeInputEditClearInputs,
|
|
124
|
+
"upsert_input": RecipeInputEditUpsertInput,
|
|
125
|
+
"add_input": RecipeInputEditAddInput,
|
|
126
|
+
"update_annotations": RecipeInputEditUpdateAnnotations,
|
|
127
|
+
},
|
|
128
|
+
),
|
|
129
|
+
]
|
|
118
130
|
|
|
119
131
|
|
|
120
132
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -15,7 +15,6 @@ from ... import inputs as inputs_t
|
|
|
15
15
|
from ... import outputs as outputs_t
|
|
16
16
|
from ... import recipe_metadata as recipe_metadata_t
|
|
17
17
|
from ... import recipe_tags as recipe_tags_t
|
|
18
|
-
from ... import users as users_t
|
|
19
18
|
from ... import workflows as workflows_t
|
|
20
19
|
|
|
21
20
|
__all__: list[str] = [
|
|
@@ -97,7 +96,6 @@ class RecipeInput:
|
|
|
97
96
|
quantity_json: base_t.JsonValue
|
|
98
97
|
curve_id: typing.Optional[base_t.ObjectId]
|
|
99
98
|
actual_quantity_json: base_t.JsonValue
|
|
100
|
-
input_type: str
|
|
101
99
|
behavior: str
|
|
102
100
|
quantity_dec: typing.Optional[Decimal] = None
|
|
103
101
|
actual_quantity_dec: typing.Optional[Decimal] = None
|
|
@@ -170,7 +168,6 @@ class Data:
|
|
|
170
168
|
inputs: list[inputs_t.SimpleInput]
|
|
171
169
|
outputs: list[outputs_t.SimpleOutput]
|
|
172
170
|
output_conditions: list[SimpleOutputCondition]
|
|
173
|
-
users: list[users_t.SimpleUser]
|
|
174
171
|
recipe_tags: list[recipe_tags_t.SimpleRecipeTag]
|
|
175
172
|
experiment_groups: list[experiment_groups_t.SimpleExperimentGroup]
|
|
176
173
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -10,6 +10,7 @@ from decimal import Decimal # noqa: F401
|
|
|
10
10
|
from pkgs.strenum_compat import StrEnum
|
|
11
11
|
from dataclasses import dataclass
|
|
12
12
|
from pkgs.serialization import serial_class
|
|
13
|
+
from pkgs.serialization import serial_union_annotation
|
|
13
14
|
from . import identifier as identifier_t
|
|
14
15
|
|
|
15
16
|
__all__: list[str] = [
|
|
@@ -64,14 +65,24 @@ class RecipeWorkflowStepIdentifierWorkflowStep(RecipeWorkflowStepIdentifierBase)
|
|
|
64
65
|
|
|
65
66
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
66
67
|
@serial_class(
|
|
67
|
-
parse_require={"
|
|
68
|
+
parse_require={"type"},
|
|
68
69
|
)
|
|
69
70
|
@dataclass(kw_only=True)
|
|
70
71
|
class RecipeWorkflowStepIdentifierKey:
|
|
71
|
-
|
|
72
|
+
type: typing.Literal[RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY] = RecipeWorkflowStepIdentifierType.IDENTIFIER_KEY
|
|
72
73
|
recipe_workflow_step_key: identifier_t.IdentifierKey
|
|
73
74
|
|
|
74
75
|
|
|
75
76
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
76
|
-
RecipeWorkflowStepIdentifier = typing.
|
|
77
|
+
RecipeWorkflowStepIdentifier = typing.Annotated[
|
|
78
|
+
typing.Union[RecipeWorkflowStepIdentifierDefault, RecipeWorkflowStepIdentifierWorkflowStep, RecipeWorkflowStepIdentifierKey],
|
|
79
|
+
serial_union_annotation(
|
|
80
|
+
discriminator="type",
|
|
81
|
+
discriminator_map={
|
|
82
|
+
"default": RecipeWorkflowStepIdentifierDefault,
|
|
83
|
+
"workflow_step": RecipeWorkflowStepIdentifierWorkflowStep,
|
|
84
|
+
"identifier_key": RecipeWorkflowStepIdentifierKey,
|
|
85
|
+
},
|
|
86
|
+
),
|
|
87
|
+
]
|
|
77
88
|
# DO NOT MODIFY -- This file is generated by type_spec
|
{UncountablePythonSDK-0.0.31.dist-info → UncountablePythonSDK-0.0.34.dist-info}/top_level.txt
RENAMED
|
File without changes
|