tracdap-runtime 0.8.0b3__py3-none-any.whl → 0.8.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tracdap/rt/_impl/core/config_parser.py +55 -37
- tracdap/rt/_impl/core/data.py +63 -32
- tracdap/rt/_impl/core/storage.py +4 -1
- tracdap/rt/_impl/core/struct.py +547 -0
- tracdap/rt/_impl/core/type_system.py +73 -33
- tracdap/rt/_impl/core/validation.py +56 -15
- tracdap/rt/_impl/exec/context.py +64 -10
- tracdap/rt/_impl/exec/dev_mode.py +25 -14
- tracdap/rt/_impl/exec/functions.py +79 -29
- tracdap/rt/_impl/grpc/codec.py +1 -1
- tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.py +2 -2
- tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2_grpc.py +1 -1
- tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.py +31 -19
- tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.pyi +48 -2
- tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.py +2 -2
- tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.pyi +3 -3
- tracdap/rt/_impl/grpc/tracdap/metadata/{stoarge_pb2.py → storage_pb2.py} +3 -3
- tracdap/rt/_impl/static_api.py +9 -1
- tracdap/rt/_plugins/storage_sql.py +12 -5
- tracdap/rt/_version.py +1 -1
- tracdap/rt/api/__init__.py +1 -23
- tracdap/rt/api/constants.py +57 -0
- tracdap/rt/api/experimental.py +32 -0
- tracdap/rt/api/hook.py +11 -0
- tracdap/rt/api/static_api.py +54 -2
- tracdap/rt/config/__init__.py +1 -4
- tracdap/rt/config/common.py +0 -34
- tracdap/rt/config/platform.py +6 -26
- tracdap/rt/metadata/__init__.py +31 -29
- tracdap/rt/metadata/data.py +40 -0
- tracdap/rt/metadata/file.py +2 -0
- tracdap/rt/metadata/object.py +1 -1
- {tracdap_runtime-0.8.0b3.dist-info → tracdap_runtime-0.8.0rc1.dist-info}/METADATA +17 -14
- {tracdap_runtime-0.8.0b3.dist-info → tracdap_runtime-0.8.0rc1.dist-info}/RECORD +39 -38
- {tracdap_runtime-0.8.0b3.dist-info → tracdap_runtime-0.8.0rc1.dist-info}/WHEEL +1 -1
- tracdap/rt/api/file_types.py +0 -29
- /tracdap/rt/_impl/grpc/tracdap/metadata/{stoarge_pb2.pyi → storage_pb2.pyi} +0 -0
- /tracdap/rt/metadata/{stoarge.py → storage.py} +0 -0
- {tracdap_runtime-0.8.0b3.dist-info → tracdap_runtime-0.8.0rc1.dist-info}/LICENSE +0 -0
- {tracdap_runtime-0.8.0b3.dist-info → tracdap_runtime-0.8.0rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,547 @@
|
|
1
|
+
# Licensed to the Fintech Open Source Foundation (FINOS) under one or
|
2
|
+
# more contributor license agreements. See the NOTICE file distributed
|
3
|
+
# with this work for additional information regarding copyright ownership.
|
4
|
+
# FINOS licenses this file to you under the Apache License, Version 2.0
|
5
|
+
# (the "License"); you may not use this file except in compliance with the
|
6
|
+
# License. You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
import dataclasses as _dc
|
17
|
+
import datetime as _dt
|
18
|
+
import decimal as _decimal
|
19
|
+
import enum as _enum
|
20
|
+
import json as _json
|
21
|
+
import types as _ts
|
22
|
+
import typing as _tp
|
23
|
+
import uuid as _uuid
|
24
|
+
|
25
|
+
import yaml as _yaml
|
26
|
+
|
27
|
+
try:
|
28
|
+
import pydantic as _pyd # noqa
|
29
|
+
except ModuleNotFoundError:
|
30
|
+
_pyd = None
|
31
|
+
|
32
|
+
import tracdap.rt.metadata as _meta
|
33
|
+
import tracdap.rt.exceptions as _ex
|
34
|
+
import tracdap.rt._impl.core.logging as _logging
|
35
|
+
import tracdap.rt._impl.core.type_system as _meta_types
|
36
|
+
|
37
|
+
|
38
|
+
class StructProcessor:
|
39
|
+
|
40
|
+
__primitive_types: dict[type, _meta.BasicType] = {
|
41
|
+
bool: _meta.BasicType.BOOLEAN,
|
42
|
+
int: _meta.BasicType.INTEGER,
|
43
|
+
float: _meta.BasicType.FLOAT,
|
44
|
+
str: _meta.BasicType.STRING,
|
45
|
+
_decimal.Decimal: _meta.BasicType.DECIMAL,
|
46
|
+
_dt.date: _meta.BasicType.DATE,
|
47
|
+
_dt.datetime: _meta.BasicType.DATETIME
|
48
|
+
}
|
49
|
+
|
50
|
+
# Support both new and old styles for generic, union and optional types
|
51
|
+
# Old-style annotations are still valid, even when the new style is fully supported
|
52
|
+
__generic_types: list[type] = [
|
53
|
+
_ts.GenericAlias,
|
54
|
+
type(_tp.List[int]),
|
55
|
+
type(_tp.Union[int, str]),
|
56
|
+
type(_tp.Optional[int])
|
57
|
+
]
|
58
|
+
|
59
|
+
__union_types: list[type] = [
|
60
|
+
_tp.Union,
|
61
|
+
_tp.Optional
|
62
|
+
]
|
63
|
+
|
64
|
+
# UnionType was added to the types module in Python 3.10, we support 3.9 (Jan 2025)
|
65
|
+
if hasattr(_ts, "UnionType"):
|
66
|
+
__generic_types.append(_ts.UnionType)
|
67
|
+
__union_types.append(_ts.UnionType)
|
68
|
+
|
69
|
+
|
70
|
+
@classmethod
|
71
|
+
def define_struct(cls, python_type: type) -> _meta.StructSchema:
|
72
|
+
|
73
|
+
if _dc.is_dataclass(python_type):
|
74
|
+
return cls._define_struct_for_dataclass(python_type)
|
75
|
+
|
76
|
+
if _pyd and issubclass(python_type, _pyd.BaseModel):
|
77
|
+
return cls._define_struct_for_pydantic(python_type)
|
78
|
+
|
79
|
+
raise _ex.EUnexpected()
|
80
|
+
|
81
|
+
@classmethod
|
82
|
+
def load_struct(cls, src: _tp.TextIO, src_format: str) -> _tp.Any:
|
83
|
+
|
84
|
+
try:
|
85
|
+
|
86
|
+
if src_format == "yaml" or src_format == "yml":
|
87
|
+
config_dict = _yaml.safe_load(src)
|
88
|
+
|
89
|
+
elif src_format == "json":
|
90
|
+
config_dict = _json.load(src)
|
91
|
+
|
92
|
+
else:
|
93
|
+
msg = f"Format not recognised: " + src_format
|
94
|
+
raise _ex.EConfigLoad(msg)
|
95
|
+
|
96
|
+
return config_dict
|
97
|
+
|
98
|
+
except UnicodeDecodeError as e:
|
99
|
+
err = f"Contents of the config file is garbled and cannot be read ({str(e)})"
|
100
|
+
raise _ex.EConfigParse(err) from e
|
101
|
+
|
102
|
+
except _json.decoder.JSONDecodeError as e:
|
103
|
+
err = f"Config file contains invalid JSON ({str(e)})"
|
104
|
+
raise _ex.EConfigParse(err) from e
|
105
|
+
|
106
|
+
except (_yaml.parser.ParserError, _yaml.reader.ReaderError) as e:
|
107
|
+
err = f"Config file contains invalid YAML ({str(e)})"
|
108
|
+
raise _ex.EConfigParse(err) from e
|
109
|
+
|
110
|
+
@classmethod
|
111
|
+
def save_struct(cls, struct: _tp.Any, dst: _tp.TextIO, dst_format: str):
|
112
|
+
|
113
|
+
StructQuoter.quote(struct, dst, dst_format)
|
114
|
+
|
115
|
+
@classmethod
|
116
|
+
def parse_struct(cls, data: dict, schema: _meta.StructSchema = None, python_type: type = None) -> object:
|
117
|
+
|
118
|
+
parser = StructParser()
|
119
|
+
return parser.parse(python_type, data)
|
120
|
+
|
121
|
+
@classmethod
|
122
|
+
def _define_struct_for_dataclass(cls, python_type: _dc.dataclass) -> _meta.StructSchema:
|
123
|
+
|
124
|
+
type_hints = _tp.get_type_hints(python_type)
|
125
|
+
trac_fields = dict()
|
126
|
+
|
127
|
+
for dc_field in _dc.fields(python_type):
|
128
|
+
|
129
|
+
field_name = dc_field.name
|
130
|
+
python_type = type_hints[field_name]
|
131
|
+
|
132
|
+
trac_field = cls._define_field(python_type, dc_field=dc_field)
|
133
|
+
trac_fields[field_name] = trac_field
|
134
|
+
|
135
|
+
return _meta.StructSchema(fields=trac_fields)
|
136
|
+
|
137
|
+
@classmethod
|
138
|
+
def _define_struct_for_pydantic(cls, python_type: "type[_pyd.BaseModel]") -> _meta.StructSchema:
|
139
|
+
|
140
|
+
type_hints = _tp.get_type_hints(python_type)
|
141
|
+
trac_fields = dict()
|
142
|
+
|
143
|
+
for field_name, pyd_field in python_type.model_fields.items():
|
144
|
+
|
145
|
+
python_type = type_hints[field_name]
|
146
|
+
|
147
|
+
trac_field = cls._define_field(python_type, pyd_field=pyd_field)
|
148
|
+
|
149
|
+
if trac_field is not None:
|
150
|
+
trac_fields[field_name] = trac_field
|
151
|
+
|
152
|
+
return _meta.StructSchema(fields=trac_fields)
|
153
|
+
|
154
|
+
@classmethod
|
155
|
+
def _define_field(
|
156
|
+
cls, python_type: type, *,
|
157
|
+
dc_field: _dc.Field = None,
|
158
|
+
pyd_field: "_pyd.fields.FieldInfo" = None) \
|
159
|
+
-> _meta.StructField:
|
160
|
+
|
161
|
+
if python_type in cls.__primitive_types:
|
162
|
+
return cls._define_primitive_field(python_type, dc_field=dc_field, pyd_field=pyd_field)
|
163
|
+
|
164
|
+
elif any(map(lambda _t: isinstance(python_type, _t), cls.__generic_types)):
|
165
|
+
return cls._define_generic_field(python_type, pyd_field=pyd_field)
|
166
|
+
|
167
|
+
elif dc_field is not None and _dc.is_dataclass(python_type):
|
168
|
+
pass
|
169
|
+
|
170
|
+
elif pyd_field is not None and issubclass(python_type, _pyd.BaseModel):
|
171
|
+
pass
|
172
|
+
|
173
|
+
else:
|
174
|
+
raise _ex.ETracInternal("Cannot encode field type: " + str(python_type))
|
175
|
+
|
176
|
+
@classmethod
|
177
|
+
def _define_primitive_field(
|
178
|
+
cls, python_type: type, optional=False, *,
|
179
|
+
dc_field: _dc.Field = None,
|
180
|
+
pyd_field: "_pyd.fields.FieldInfo" = None) \
|
181
|
+
-> _meta.StructField:
|
182
|
+
|
183
|
+
struct_field = _meta.StructField()
|
184
|
+
struct_field.fieldType = _meta.TypeDescriptor(basicType=cls.__primitive_types[python_type])
|
185
|
+
struct_field.notNull = not optional
|
186
|
+
|
187
|
+
if dc_field is not None and dc_field.default is not _dc.MISSING:
|
188
|
+
struct_field.defaultValue = _meta_types.MetadataCodec.encode_value(dc_field.default)
|
189
|
+
|
190
|
+
if pyd_field is not None and pyd_field.default is not _pyd.fields.PydanticUndefined:
|
191
|
+
struct_field.defaultValue = _meta_types.MetadataCodec.encode_value(pyd_field.default)
|
192
|
+
|
193
|
+
return struct_field
|
194
|
+
|
195
|
+
@classmethod
|
196
|
+
def _define_generic_field(
|
197
|
+
cls, python_type: type, *,
|
198
|
+
dc_field: _dc.Field = None,
|
199
|
+
pyd_field: "_pyd.fields.FieldInfo" = None) -> _meta.StructField:
|
200
|
+
|
201
|
+
origin = _tp.get_origin(python_type)
|
202
|
+
args = _tp.get_args(python_type)
|
203
|
+
|
204
|
+
# types.NoneType not available in Python 3.9, so use type(None) instead
|
205
|
+
if origin in cls.__union_types and len(args) == 2 and args[1] is type(None):
|
206
|
+
optional_type = args[0]
|
207
|
+
return cls._define_primitive_field(optional_type, optional=True, dc_field=dc_field, pyd_field=pyd_field)
|
208
|
+
|
209
|
+
elif origin in [list, _tp.List]:
|
210
|
+
list_type = args[0]
|
211
|
+
pass
|
212
|
+
|
213
|
+
elif origin in [dict, _tp.Dict]:
|
214
|
+
key_type = args[0]
|
215
|
+
value_type = args[1]
|
216
|
+
pass
|
217
|
+
|
218
|
+
else:
|
219
|
+
raise _ex.ETracInternal("Cannot encode field type: " + str(python_type))
|
220
|
+
|
221
|
+
|
222
|
+
class StructParser:
|
223
|
+
|
224
|
+
# New implementation of STRUCT parsing, copied from config_parser
|
225
|
+
# After a period of stabilization, config_parser will switch to this implementation
|
226
|
+
# It will need to inherit the class with overrides for handling dev-mode locations
|
227
|
+
|
228
|
+
__primitive_types: _tp.Dict[type, callable] = {
|
229
|
+
bool: bool,
|
230
|
+
int: int,
|
231
|
+
float: float,
|
232
|
+
str: str,
|
233
|
+
_decimal.Decimal: _decimal.Decimal,
|
234
|
+
_dt.date: _dt.date.fromisoformat,
|
235
|
+
_dt.datetime: _dt.datetime.fromisoformat
|
236
|
+
}
|
237
|
+
|
238
|
+
# Support both new and old styles for generic, union and optional types
|
239
|
+
# Old-style annotations are still valid, even when the new style is fully supported
|
240
|
+
__generic_types: list[type] = [
|
241
|
+
_ts.GenericAlias,
|
242
|
+
type(_tp.List[int]),
|
243
|
+
type(_tp.Optional[int])
|
244
|
+
]
|
245
|
+
|
246
|
+
# UnionType was added to the types module in Python 3.10, we support 3.9 (Jan 2025)
|
247
|
+
if hasattr(_ts, "UnionType"):
|
248
|
+
__generic_types.append(_ts.UnionType)
|
249
|
+
|
250
|
+
__STRUCT_TYPE = _tp.TypeVar("__STRUCT_TYPE")
|
251
|
+
|
252
|
+
def __init__(self):
|
253
|
+
self._log = _logging.logger_for_object(self)
|
254
|
+
self._errors = []
|
255
|
+
|
256
|
+
def parse(self, config_class: type[__STRUCT_TYPE], config_dict: dict) -> __STRUCT_TYPE:
|
257
|
+
|
258
|
+
# If config is empty, return a default (blank) config
|
259
|
+
if config_dict is None or len(config_dict) == 0:
|
260
|
+
return config_class()
|
261
|
+
|
262
|
+
config = self._parse_value("", config_dict, config_class)
|
263
|
+
|
264
|
+
if any(self._errors):
|
265
|
+
|
266
|
+
message = "One or more conformance errors in STRUCT data"
|
267
|
+
|
268
|
+
for (location, error) in self._errors:
|
269
|
+
location_info = f" (in {location})" if location else ""
|
270
|
+
message = message + f"\n{error}{location_info}"
|
271
|
+
|
272
|
+
raise _ex.EConfigParse(message)
|
273
|
+
|
274
|
+
return config
|
275
|
+
|
276
|
+
def _parse_value(self, location: str, raw_value: _tp.Any, annotation: type):
|
277
|
+
|
278
|
+
if raw_value is None:
|
279
|
+
return None
|
280
|
+
|
281
|
+
if annotation in StructParser.__primitive_types:
|
282
|
+
return self._parse_primitive(location, raw_value, annotation)
|
283
|
+
|
284
|
+
# Allow parsing of generic primitives, this allows for e.g. param maps of mixed primitive types
|
285
|
+
if annotation == _tp.Any:
|
286
|
+
|
287
|
+
if type(raw_value) in StructParser.__primitive_types:
|
288
|
+
return self._parse_primitive(location, raw_value, type(raw_value))
|
289
|
+
else:
|
290
|
+
return self._error(location, f"Expected a primitive value, got '{str(raw_value)}'")
|
291
|
+
|
292
|
+
if isinstance(annotation, _enum.EnumMeta):
|
293
|
+
return self._parse_enum(location, raw_value, annotation)
|
294
|
+
|
295
|
+
if any(map(lambda _t: isinstance(annotation, _t), self.__generic_types)):
|
296
|
+
return self._parse_generic_class(location, raw_value, annotation) # noqa
|
297
|
+
|
298
|
+
if _dc.is_dataclass(annotation):
|
299
|
+
return self._parser_dataclass(location, raw_value, annotation)
|
300
|
+
|
301
|
+
# Basic support for Pydantic, if it is installed
|
302
|
+
if _pyd and issubclass(annotation, _pyd.BaseModel):
|
303
|
+
return self._parser_pydantic_model(location, raw_value, annotation)
|
304
|
+
|
305
|
+
return self._error(location, f"Cannot parse value of type {annotation.__name__}")
|
306
|
+
|
307
|
+
def _parse_primitive(self, location: str, raw_value: _tp.Any, simple_type: type):
|
308
|
+
|
309
|
+
parse_func = StructParser.__primitive_types[simple_type]
|
310
|
+
|
311
|
+
try:
|
312
|
+
if isinstance(raw_value, simple_type):
|
313
|
+
return raw_value
|
314
|
+
|
315
|
+
elif isinstance(raw_value, str):
|
316
|
+
return parse_func(raw_value)
|
317
|
+
|
318
|
+
elif simple_type == str:
|
319
|
+
return str(raw_value)
|
320
|
+
|
321
|
+
else:
|
322
|
+
raise TypeError
|
323
|
+
|
324
|
+
except (ValueError, TypeError):
|
325
|
+
return self._error(location, f"Expected primitive type {simple_type.__name__}, got '{str(raw_value)}'")
|
326
|
+
|
327
|
+
def _parse_enum(self, location: str, raw_value: _tp.Any, enum_type: _enum.EnumMeta):
|
328
|
+
|
329
|
+
if not isinstance(raw_value, str):
|
330
|
+
return self._error(location, f"Expected {enum_type.__name__} (string), got {str(raw_value)}")
|
331
|
+
|
332
|
+
try:
|
333
|
+
enum_value = self._parse_enum_value(raw_value, enum_type)
|
334
|
+
|
335
|
+
if isinstance(enum_value.value, tuple):
|
336
|
+
enum_value._value_ = enum_value.value[0]
|
337
|
+
|
338
|
+
return enum_type.__new__(enum_type, enum_value)
|
339
|
+
|
340
|
+
except KeyError:
|
341
|
+
return self._error(location, f"Invalid value for {enum_type.__name__}: {raw_value}")
|
342
|
+
|
343
|
+
@staticmethod
|
344
|
+
def _parse_enum_value(raw_value: str, enum_type: _enum.EnumMeta):
|
345
|
+
|
346
|
+
try:
|
347
|
+
return enum_type.__members__[raw_value]
|
348
|
+
|
349
|
+
except KeyError:
|
350
|
+
|
351
|
+
# Try a case-insensitive match as a fallback
|
352
|
+
for enum_member in enum_type.__members__:
|
353
|
+
if enum_member.upper() == raw_value.upper():
|
354
|
+
return enum_type.__members__[enum_member]
|
355
|
+
|
356
|
+
# Re-raise the exception if case-insensitive match fails
|
357
|
+
raise
|
358
|
+
|
359
|
+
def _parser_dataclass(self, location: str, raw_dict: _tp.Any, python_type: type) -> object:
|
360
|
+
|
361
|
+
type_hints = _tp.get_type_hints(python_type)
|
362
|
+
init_values = dict()
|
363
|
+
missing_values = list()
|
364
|
+
|
365
|
+
for dc_field in _dc.fields(python_type): # noqa
|
366
|
+
|
367
|
+
field_name = dc_field.name
|
368
|
+
field_location = self._child_location(location, field_name)
|
369
|
+
field_type = type_hints[field_name]
|
370
|
+
field_raw_value = raw_dict.get(field_name)
|
371
|
+
|
372
|
+
if field_raw_value is not None:
|
373
|
+
field_value = self._parse_value(field_location, field_raw_value, field_type)
|
374
|
+
init_values[field_name] = field_value
|
375
|
+
|
376
|
+
elif dc_field.default is _dc.MISSING:
|
377
|
+
self._error(location, f"Missing required value '{field_name}'")
|
378
|
+
missing_values.append(field_name)
|
379
|
+
|
380
|
+
# Do not try to construct an invalid instance
|
381
|
+
if any(missing_values):
|
382
|
+
return None
|
383
|
+
|
384
|
+
return python_type(**init_values)
|
385
|
+
|
386
|
+
def _parser_pydantic_model(self, location: str, raw_dict: _tp.Any, python_type: "type[_pyd.BaseModel]") -> object:
|
387
|
+
|
388
|
+
type_hints = _tp.get_type_hints(python_type)
|
389
|
+
init_values = dict()
|
390
|
+
missing_values = list()
|
391
|
+
|
392
|
+
for field_name, pyd_field in python_type.model_fields.items():
|
393
|
+
|
394
|
+
field_location = self._child_location(location, field_name)
|
395
|
+
field_type = type_hints[field_name]
|
396
|
+
field_raw_value = raw_dict.get(field_name)
|
397
|
+
|
398
|
+
if field_raw_value is not None:
|
399
|
+
field_value = self._parse_value(field_location, field_raw_value, field_type)
|
400
|
+
init_values[field_name] = field_value
|
401
|
+
|
402
|
+
elif pyd_field.is_required():
|
403
|
+
self._error(location, f"Missing required value '{field_name}'")
|
404
|
+
missing_values.append(field_name)
|
405
|
+
|
406
|
+
# Do not try to construct an invalid instance
|
407
|
+
if any(missing_values):
|
408
|
+
return None
|
409
|
+
|
410
|
+
return python_type(**init_values)
|
411
|
+
|
412
|
+
def _parse_generic_class(self, location: str, raw_value: _tp.Any, metaclass: type):
|
413
|
+
|
414
|
+
origin = _tp.get_origin(metaclass)
|
415
|
+
args = _tp.get_args(metaclass)
|
416
|
+
|
417
|
+
if origin == _tp.List or origin == list:
|
418
|
+
|
419
|
+
list_type = args[0]
|
420
|
+
|
421
|
+
if not isinstance(raw_value, list):
|
422
|
+
return self._error(location, f"Expected a list, got {type(raw_value)}")
|
423
|
+
|
424
|
+
return [
|
425
|
+
self._parse_value(self._child_location(location, idx), item, list_type)
|
426
|
+
for (idx, item) in enumerate(raw_value)]
|
427
|
+
|
428
|
+
if origin == _tp.Dict or origin == dict:
|
429
|
+
|
430
|
+
key_type = args[0]
|
431
|
+
value_type = args[1]
|
432
|
+
|
433
|
+
if not isinstance(raw_value, dict):
|
434
|
+
return self._error(location, f"Expected {metaclass} (dict), got {type(raw_value)}")
|
435
|
+
|
436
|
+
return {
|
437
|
+
self._parse_value(self._child_location(location, key), key, key_type):
|
438
|
+
self._parse_value(self._child_location(location, key), value, value_type)
|
439
|
+
for key, value in raw_value.items()}
|
440
|
+
|
441
|
+
# Handle Optional, which is a shorthand for tp.Union[type, None]
|
442
|
+
if origin == tp.Union and len(args) == 2 and args[1] == type(None): # noqa
|
443
|
+
|
444
|
+
if raw_value is not None:
|
445
|
+
return self._parse_value(location, raw_value, args[0])
|
446
|
+
else:
|
447
|
+
return None
|
448
|
+
|
449
|
+
return self._error(location, f"Struct parser does not support generic type '{str(origin)}'")
|
450
|
+
|
451
|
+
def _error(self, location: str, error: str) -> None:
|
452
|
+
self._errors.append((location, error))
|
453
|
+
return None
|
454
|
+
|
455
|
+
@classmethod
|
456
|
+
def _is_primitive(cls, obj: _tp.Union[type, _tp.Any]) -> bool:
|
457
|
+
|
458
|
+
if isinstance(obj, type):
|
459
|
+
return obj in cls.__primitive_types
|
460
|
+
else:
|
461
|
+
return type(obj) in cls.__primitive_types
|
462
|
+
|
463
|
+
@staticmethod
|
464
|
+
def _child_location(parent_location: str, item: _tp.Union[str, int]):
|
465
|
+
|
466
|
+
if parent_location is None or parent_location == "":
|
467
|
+
return item
|
468
|
+
elif isinstance(item, int):
|
469
|
+
return f"{parent_location}[{item}]"
|
470
|
+
else:
|
471
|
+
return f"{parent_location}.{item}"
|
472
|
+
|
473
|
+
|
474
|
+
class StructQuoter:
|
475
|
+
|
476
|
+
# New implementation of STRUCT quoting, copied from config_parser
|
477
|
+
# After a period of stabilization, config_parser will switch to this implementation
|
478
|
+
|
479
|
+
JSON_FORMAT = "json"
|
480
|
+
YAML_FORMAT = "yaml"
|
481
|
+
INDENT = 3
|
482
|
+
|
483
|
+
@classmethod
|
484
|
+
def quote(cls, obj: _tp.Any, dst: _tp.TextIO, dst_format: str):
|
485
|
+
|
486
|
+
if dst_format.lower() == cls.JSON_FORMAT:
|
487
|
+
return cls.quote_json(obj, dst)
|
488
|
+
|
489
|
+
if dst_format.lower() == cls.YAML_FORMAT:
|
490
|
+
return cls.quote_yaml(obj, dst)
|
491
|
+
|
492
|
+
# TODO :This is probably an error in the user-supplied parameters
|
493
|
+
raise _ex.ETracInternal(f"Unsupported output format [{dst_format}]")
|
494
|
+
|
495
|
+
@classmethod
|
496
|
+
def quote_json(cls, obj: _tp.Any, dst: _tp.TextIO):
|
497
|
+
|
498
|
+
_json.dump(obj, dst, cls=StructQuoter._JsonEncoder, indent=cls.INDENT)
|
499
|
+
|
500
|
+
@classmethod
|
501
|
+
def quote_yaml(cls, obj: _tp.Any, dst: _tp.TextIO):
|
502
|
+
|
503
|
+
_yaml.dump(obj, dst,indent=cls.INDENT, Dumper=StructQuoter._YamlDumper)
|
504
|
+
|
505
|
+
class _JsonEncoder(_json.JSONEncoder):
|
506
|
+
|
507
|
+
def __init__(self, **kwargs):
|
508
|
+
|
509
|
+
super().__init__(**kwargs)
|
510
|
+
|
511
|
+
# Do not force-escape non-ascii characters, output UTF-8 instead
|
512
|
+
self.ensure_ascii = False
|
513
|
+
|
514
|
+
def default(self, o: _tp.Any) -> str:
|
515
|
+
|
516
|
+
if isinstance(o, _enum.Enum):
|
517
|
+
return o.name
|
518
|
+
if isinstance(o, _uuid.UUID):
|
519
|
+
return str(o)
|
520
|
+
if isinstance(o, _dt.date):
|
521
|
+
return o.isoformat()
|
522
|
+
if isinstance(o, _dt.datetime):
|
523
|
+
return o.isoformat()
|
524
|
+
elif _dc.is_dataclass(o):
|
525
|
+
return o.__dict__
|
526
|
+
elif _pyd and isinstance(o, _pyd.BaseModel):
|
527
|
+
return o.__dict__ # noqa
|
528
|
+
else:
|
529
|
+
return super().default(o)
|
530
|
+
|
531
|
+
class _YamlDumper(_yaml.Dumper):
|
532
|
+
|
533
|
+
def __init__(self, *args, **kwargs):
|
534
|
+
super().__init__(*args, **kwargs)
|
535
|
+
|
536
|
+
def ignore_aliases(self, data):
|
537
|
+
return True
|
538
|
+
|
539
|
+
def represent_data(self, data):
|
540
|
+
if isinstance(data, _enum.Enum):
|
541
|
+
return self.represent_str(data.name)
|
542
|
+
if _dc.is_dataclass(data):
|
543
|
+
return self.represent_dict(data.__dict__)
|
544
|
+
elif _pyd and isinstance(data, _pyd.BaseModel):
|
545
|
+
return self.represent_dict(data.__dict__)
|
546
|
+
else:
|
547
|
+
return super().represent_data(data)
|