player-tools-dsl-generator 0.13.0.dev2215819__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- player_tools_dsl_generator/__init__.py +0 -0
- player_tools_dsl_generator/__main__.py +54 -0
- player_tools_dsl_generator/generator.py +992 -0
- player_tools_dsl_generator/utils.py +54 -0
- player_tools_dsl_generator-0.13.0.dev2215819.dist-info/METADATA +6 -0
- player_tools_dsl_generator-0.13.0.dev2215819.dist-info/RECORD +7 -0
- player_tools_dsl_generator-0.13.0.dev2215819.dist-info/WHEEL +4 -0
|
File without changes
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module entrypoint for generating Player Components
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
if __name__ == "__main__":
|
|
6
|
+
|
|
7
|
+
from argparse import ArgumentParser
|
|
8
|
+
from os.path import join
|
|
9
|
+
from json import load
|
|
10
|
+
from sys import exit
|
|
11
|
+
|
|
12
|
+
from player_tools_xlr_types.deserializer import deserialize_xlr_node
|
|
13
|
+
from player_tools_xlr_types.nodes import NamedType, ObjectType
|
|
14
|
+
from .generator import generate_python_classes
|
|
15
|
+
|
|
16
|
+
# Parse Args
|
|
17
|
+
parser = ArgumentParser()
|
|
18
|
+
parser.add_argument("-i", "--input", dest="input",
|
|
19
|
+
help="Directory containing a manifest.json " \
|
|
20
|
+
"that should be used for generation")
|
|
21
|
+
parser.add_argument("-o", "--output",
|
|
22
|
+
dest="output",
|
|
23
|
+
default = "./dist",
|
|
24
|
+
help="Where to write the generated classes to")
|
|
25
|
+
|
|
26
|
+
args = parser.parse_args()
|
|
27
|
+
input = args.input
|
|
28
|
+
output = args.output
|
|
29
|
+
|
|
30
|
+
if not args.input:
|
|
31
|
+
print("Error, must supply an input directory with `-i` or --input`")
|
|
32
|
+
print("Exiting with status -1")
|
|
33
|
+
exit(-1)
|
|
34
|
+
|
|
35
|
+
# Start Processing
|
|
36
|
+
with open(join(input, 'manifest.json'), 'r', encoding="utf-8") as manifest_json:
|
|
37
|
+
manifest = load(manifest_json)
|
|
38
|
+
capabilities = manifest['capabilities']
|
|
39
|
+
|
|
40
|
+
#Generate Assets
|
|
41
|
+
assets = capabilities.get('Assets',[])
|
|
42
|
+
for asset in assets:
|
|
43
|
+
with open(join(input, asset+".json"), "r", encoding="utf-8") as f:
|
|
44
|
+
asset_json = f.read()
|
|
45
|
+
asset_ast: NamedType[ObjectType] = deserialize_xlr_node(asset_json) # type: ignore
|
|
46
|
+
generate_python_classes(asset_ast, "asset", output)
|
|
47
|
+
|
|
48
|
+
# Generate Views
|
|
49
|
+
views = capabilities.get('Views',[])
|
|
50
|
+
for view in views:
|
|
51
|
+
with open(join(input, view+".json"), "r", encoding="utf-8") as f:
|
|
52
|
+
asset_json = f.read()
|
|
53
|
+
asset_ast: NamedType[ObjectType] = deserialize_xlr_node(asset_json) # type: ignore
|
|
54
|
+
generate_python_classes(asset_ast, "view", output)
|
|
@@ -0,0 +1,992 @@
|
|
|
1
|
+
"""
|
|
2
|
+
XLR to Python Class Generator
|
|
3
|
+
|
|
4
|
+
Converts XLR NamedType[ObjectType] nodes into Python classes using AST generation.
|
|
5
|
+
Top-level ObjectTypes become Asset classes, nested ObjectTypes become Serializable classes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import ast
|
|
9
|
+
from typing import Any, List, Dict, Literal, Optional, Union
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from copy import deepcopy
|
|
12
|
+
|
|
13
|
+
from player_tools_xlr_types.nodes import (
|
|
14
|
+
AndType,
|
|
15
|
+
NamedType,
|
|
16
|
+
ObjectProperty,
|
|
17
|
+
ObjectType,
|
|
18
|
+
NodeType,
|
|
19
|
+
OrType,
|
|
20
|
+
RefType
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
from player_tools_xlr_types.guards import (
|
|
24
|
+
is_and_type,
|
|
25
|
+
is_any_type,
|
|
26
|
+
is_named_type_with_generics,
|
|
27
|
+
is_null_type,
|
|
28
|
+
is_object_type,
|
|
29
|
+
is_array_type,
|
|
30
|
+
is_primitive_const,
|
|
31
|
+
is_record_type,
|
|
32
|
+
is_string_type,
|
|
33
|
+
is_number_type,
|
|
34
|
+
is_boolean_type,
|
|
35
|
+
is_named_type,
|
|
36
|
+
is_or_type,
|
|
37
|
+
is_ref_type,
|
|
38
|
+
is_undefined_type,
|
|
39
|
+
is_unknown_type
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
from .utils import (
|
|
43
|
+
COMMON_AST_NODES,
|
|
44
|
+
PropertyInfo,
|
|
45
|
+
PLAYER_DSL_PACKAGE,
|
|
46
|
+
clean_property_name,
|
|
47
|
+
generate_class_name,
|
|
48
|
+
ast_to_source
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def generate_python_classes(
|
|
52
|
+
named_object_type: NamedType[ObjectType],
|
|
53
|
+
type: Literal['asset', 'view'],
|
|
54
|
+
output_dir: str = "."
|
|
55
|
+
) -> str:
|
|
56
|
+
"""
|
|
57
|
+
Generate Python classes from a NamedType[ObjectType] and write to file.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
named_object_type: NamedType wrapping an ObjectType
|
|
61
|
+
output_dir: Directory to write the generated file
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
Path to the generated file
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
ValueError: If input is not a NamedType[ObjectType]
|
|
68
|
+
"""
|
|
69
|
+
if not is_named_type(named_object_type) or not is_object_type(named_object_type.base_node):
|
|
70
|
+
raise ValueError("Input must be a NamedType[ObjectType]")
|
|
71
|
+
|
|
72
|
+
generator = ClassGenerator(named_object_type, output_dir, type)
|
|
73
|
+
return generator.generate()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ClassGenerator:
|
|
77
|
+
"""Generates Python classes from XLR ObjectType nodes."""
|
|
78
|
+
|
|
79
|
+
def __init__(
|
|
80
|
+
self,
|
|
81
|
+
named_object_type: NamedType[ObjectType],
|
|
82
|
+
output_dir: str,
|
|
83
|
+
type: Literal['asset', 'view']
|
|
84
|
+
):
|
|
85
|
+
|
|
86
|
+
self.type = type.title()
|
|
87
|
+
self.named_object_type = named_object_type
|
|
88
|
+
self.output_dir = Path(output_dir)
|
|
89
|
+
self.output_dir.mkdir(exist_ok=True)
|
|
90
|
+
|
|
91
|
+
self.classes_to_generate: Dict[str, Any] = dict()
|
|
92
|
+
self.classes: List[str] = [named_object_type.name]
|
|
93
|
+
self.generic_tokens = dict(
|
|
94
|
+
(obj.symbol, obj) for obj in named_object_type.genericTokens) \
|
|
95
|
+
if is_named_type_with_generics(named_object_type) \
|
|
96
|
+
else dict()
|
|
97
|
+
|
|
98
|
+
# Collect all nested ObjectTypes that need separate classes
|
|
99
|
+
self._collect_nested_objects(named_object_type, '')
|
|
100
|
+
|
|
101
|
+
def _get_properties_info(self, object_type: ObjectType) -> List[PropertyInfo]:
|
|
102
|
+
"""Pre-process property information to avoid repeated work."""
|
|
103
|
+
|
|
104
|
+
properties_info = []
|
|
105
|
+
for original_name, prop_obj in object_type.properties.items():
|
|
106
|
+
#Handle expansion of
|
|
107
|
+
node = prop_obj.node
|
|
108
|
+
|
|
109
|
+
if is_ref_type(prop_obj.node) and self.generic_tokens.get(prop_obj.node.ref, None):
|
|
110
|
+
node = deepcopy(prop_obj.node)
|
|
111
|
+
node: NodeType = self.generic_tokens[prop_obj.node.ref].default # type: ignore
|
|
112
|
+
node.title = prop_obj.node.title
|
|
113
|
+
node.description = prop_obj.node.description
|
|
114
|
+
|
|
115
|
+
clean_name = clean_property_name(original_name)
|
|
116
|
+
python_type = self._convert_xlr_to_ast(node, clean_name)
|
|
117
|
+
type = self._make_optional_type(python_type) if not prop_obj.required else python_type
|
|
118
|
+
|
|
119
|
+
properties_info.append(PropertyInfo(
|
|
120
|
+
clean_name=clean_name,
|
|
121
|
+
original_name=original_name,
|
|
122
|
+
node=node,
|
|
123
|
+
required=prop_obj.required,
|
|
124
|
+
type=type
|
|
125
|
+
))
|
|
126
|
+
|
|
127
|
+
return properties_info
|
|
128
|
+
|
|
129
|
+
def _make_optional_type(self, python_type: ast.expr) -> ast.expr:
|
|
130
|
+
"""Create Optional[T] type annotation."""
|
|
131
|
+
return ast.Subscript(
|
|
132
|
+
value=COMMON_AST_NODES['Optional'],
|
|
133
|
+
slice=python_type,
|
|
134
|
+
ctx=ast.Load()
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
def generate(self) -> str:
|
|
138
|
+
"""Generate all classes and write to file."""
|
|
139
|
+
# Create AST module
|
|
140
|
+
module = ast.Module(body=[], type_ignores=[])
|
|
141
|
+
|
|
142
|
+
# Add imports
|
|
143
|
+
self._add_imports(module)
|
|
144
|
+
base_length = len(module.body)
|
|
145
|
+
|
|
146
|
+
# Generate main class (extends Asset)
|
|
147
|
+
main_class = self._generate_main_class()
|
|
148
|
+
# Generate nested classes (extend Slotable)
|
|
149
|
+
for class_name in self.classes:
|
|
150
|
+
object_type = self.classes_to_generate.get(class_name, None)
|
|
151
|
+
if object_type is not None :
|
|
152
|
+
nested_class = self._generate_nested_class(class_name, object_type)
|
|
153
|
+
module.body.insert(base_length,nested_class)
|
|
154
|
+
|
|
155
|
+
#Add main class at the end to avoid forward imports
|
|
156
|
+
module.body.append(main_class)
|
|
157
|
+
|
|
158
|
+
# Convert AST to source code
|
|
159
|
+
source_code = ast_to_source(module)
|
|
160
|
+
|
|
161
|
+
# Write to file
|
|
162
|
+
filename = f"{self.named_object_type.name}.py"
|
|
163
|
+
file_path = self.output_dir / filename
|
|
164
|
+
|
|
165
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
166
|
+
f.write(source_code)
|
|
167
|
+
|
|
168
|
+
return str(file_path)
|
|
169
|
+
|
|
170
|
+
def _collect_nested_objects(
|
|
171
|
+
self, node: Union[NodeType, NamedType],
|
|
172
|
+
parent_prop: Optional[str]
|
|
173
|
+
) -> None:
|
|
174
|
+
"""Recursively collect all nested ObjectTypes that need separate classes."""
|
|
175
|
+
if is_object_type(node):
|
|
176
|
+
self._collect_from_object_type(node, parent_prop if parent_prop else "ERRORERRORERROR")
|
|
177
|
+
elif is_array_type(node):
|
|
178
|
+
self._collect_nested_objects(node.elementType, parent_prop)
|
|
179
|
+
elif is_or_type(node):
|
|
180
|
+
for element in node._or: #pylint: disable=protected-access
|
|
181
|
+
self._collect_nested_objects(element, parent_prop)
|
|
182
|
+
elif is_and_type(node):
|
|
183
|
+
for element in node._and: #pylint: disable=protected-access
|
|
184
|
+
self._collect_nested_objects(element,parent_prop)
|
|
185
|
+
|
|
186
|
+
def _collect_from_object_type(self, node: ObjectType, parent_prop: str) -> None:
|
|
187
|
+
"""Helper method to collect nested objects from ObjectType nodes."""
|
|
188
|
+
|
|
189
|
+
# Handle generics by using default
|
|
190
|
+
if is_named_type_with_generics(node):
|
|
191
|
+
for generic_token in node.genericTokens:
|
|
192
|
+
token = generic_token.default
|
|
193
|
+
symbol = generic_token.symbol
|
|
194
|
+
if (not is_ref_type(token) and is_object_type(token) and
|
|
195
|
+
symbol not in self.classes_to_generate):
|
|
196
|
+
self._collect_nested_objects(token, parent_prop)
|
|
197
|
+
|
|
198
|
+
# Handle named types
|
|
199
|
+
if is_named_type(node):
|
|
200
|
+
class_name = node.name
|
|
201
|
+
if class_name not in self.classes:
|
|
202
|
+
self.classes.append(class_name)
|
|
203
|
+
self.classes_to_generate[class_name] = node
|
|
204
|
+
else:
|
|
205
|
+
class_name = (
|
|
206
|
+
generate_class_name(node.title.split(".")[-1]) \
|
|
207
|
+
if node.title
|
|
208
|
+
else parent_prop
|
|
209
|
+
).title()
|
|
210
|
+
if class_name not in self.classes:
|
|
211
|
+
self.classes.append(class_name)
|
|
212
|
+
self.classes_to_generate[class_name] = node
|
|
213
|
+
|
|
214
|
+
# Process properties
|
|
215
|
+
for prop_name, prop_obj in node.properties.items():
|
|
216
|
+
prop_node = prop_obj.node
|
|
217
|
+
self._collect_nested_objects(prop_node, prop_name)
|
|
218
|
+
|
|
219
|
+
def _create_super_call(self, is_asset: bool) -> ast.Expr:
|
|
220
|
+
"""Create super().__init__() call for both Asset and Serializable classes."""
|
|
221
|
+
if is_asset:
|
|
222
|
+
args: List[ast.expr] = [
|
|
223
|
+
ast.Name(id='id', ctx=ast.Load()), ast.Name(id='self.type', ctx=ast.Load())
|
|
224
|
+
]
|
|
225
|
+
else:
|
|
226
|
+
args = []
|
|
227
|
+
|
|
228
|
+
return ast.Expr(
|
|
229
|
+
value=ast.Call(
|
|
230
|
+
func=ast.Attribute(
|
|
231
|
+
value=ast.Call(
|
|
232
|
+
func=COMMON_AST_NODES['super'],
|
|
233
|
+
args=[],
|
|
234
|
+
keywords=[]
|
|
235
|
+
),
|
|
236
|
+
attr='__init__',
|
|
237
|
+
ctx=ast.Load()
|
|
238
|
+
),
|
|
239
|
+
args=args,
|
|
240
|
+
keywords=[]
|
|
241
|
+
)
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def _add_imports(self, module: ast.Module) -> None:
|
|
245
|
+
"""Add any potential necessary import statements."""
|
|
246
|
+
imports = [
|
|
247
|
+
# from typing import Optional, List, Any, Union
|
|
248
|
+
ast.ImportFrom(
|
|
249
|
+
module='typing',
|
|
250
|
+
names=[
|
|
251
|
+
ast.alias(name='Optional', asname=None),
|
|
252
|
+
ast.alias(name='List', asname=None),
|
|
253
|
+
ast.alias(name='Any', asname=None),
|
|
254
|
+
ast.alias(name='Union', asname=None),
|
|
255
|
+
ast.alias(name='Dict', asname=None),
|
|
256
|
+
ast.alias(name='Literal', asname=None)
|
|
257
|
+
],
|
|
258
|
+
level=0
|
|
259
|
+
),
|
|
260
|
+
ast.ImportFrom(
|
|
261
|
+
module= f'{PLAYER_DSL_PACKAGE}.view',
|
|
262
|
+
names=[
|
|
263
|
+
ast.alias(name='Asset', asname=None),
|
|
264
|
+
ast.alias(name='Slotable', asname=None)
|
|
265
|
+
],
|
|
266
|
+
level=0
|
|
267
|
+
),
|
|
268
|
+
]
|
|
269
|
+
|
|
270
|
+
if self.type == "View":
|
|
271
|
+
imports.append(
|
|
272
|
+
ast.ImportFrom(
|
|
273
|
+
module=f'{PLAYER_DSL_PACKAGE}.view',
|
|
274
|
+
names=[ast.alias(name='View', asname=None)],
|
|
275
|
+
level=0
|
|
276
|
+
))
|
|
277
|
+
|
|
278
|
+
module.body.extend(imports)
|
|
279
|
+
|
|
280
|
+
def _generate_main_class(self) -> ast.ClassDef:
|
|
281
|
+
"""Generate the main class that extends Asset"""
|
|
282
|
+
class_name = self.named_object_type.name
|
|
283
|
+
object_type = self.named_object_type.base_node
|
|
284
|
+
|
|
285
|
+
#Only extend from View if there is no validation prop
|
|
286
|
+
extends_name = "Asset" if any(key == "validation" for key in object_type.properties.keys())\
|
|
287
|
+
else self.type
|
|
288
|
+
|
|
289
|
+
# Create class definition
|
|
290
|
+
class_def = ast.ClassDef(
|
|
291
|
+
name=class_name,
|
|
292
|
+
bases=[ast.Name(id=extends_name, ctx=ast.Load())],
|
|
293
|
+
keywords=[],
|
|
294
|
+
decorator_list=[],
|
|
295
|
+
body=[],
|
|
296
|
+
lineno=1,
|
|
297
|
+
col_offset=0
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
# Handle the type override
|
|
301
|
+
if object_type.extends :
|
|
302
|
+
extended_node = object_type.extends
|
|
303
|
+
if is_ref_type(extended_node) and \
|
|
304
|
+
extended_node.ref.startswith("Asset") and \
|
|
305
|
+
extended_node.genericArguments and \
|
|
306
|
+
len(extended_node.genericArguments) == 1:
|
|
307
|
+
|
|
308
|
+
asset_arg = extended_node.genericArguments[0]
|
|
309
|
+
if(asset_arg and is_string_type(asset_arg) and asset_arg.const):
|
|
310
|
+
type_prop = ast.AnnAssign(
|
|
311
|
+
target=ast.Name(id="type", ctx=ast.Store()),
|
|
312
|
+
annotation=ast.Name(id="str", ctx=ast.Load()),
|
|
313
|
+
value=ast.Constant(value=asset_arg.const),
|
|
314
|
+
simple=1
|
|
315
|
+
)
|
|
316
|
+
class_def.body.append(type_prop)
|
|
317
|
+
|
|
318
|
+
# Add constant ID property
|
|
319
|
+
type_prop = ast.AnnAssign(
|
|
320
|
+
target=ast.Name(id="id", ctx=ast.Store()),
|
|
321
|
+
annotation=ast.Name(id="str", ctx=ast.Load()),
|
|
322
|
+
value=None,
|
|
323
|
+
simple=1
|
|
324
|
+
)
|
|
325
|
+
class_def.body.append(type_prop)
|
|
326
|
+
|
|
327
|
+
# Add type annotations for properties
|
|
328
|
+
self._add_property_annotations(class_def, object_type)
|
|
329
|
+
|
|
330
|
+
# Add __init__ method
|
|
331
|
+
init_method = self._generate_init_method(object_type, is_asset=True)
|
|
332
|
+
class_def.body.append(init_method)
|
|
333
|
+
|
|
334
|
+
# Add with* methods (getters/setters)
|
|
335
|
+
with_methods = self._generate_with_methods(object_type)
|
|
336
|
+
class_def.body.extend(with_methods)
|
|
337
|
+
|
|
338
|
+
return class_def
|
|
339
|
+
|
|
340
|
+
def _generate_nested_class(self, class_name: str, object_type: ObjectType) -> ast.ClassDef:
|
|
341
|
+
"""Generate a nested class that extends Serializable."""
|
|
342
|
+
# Create class definition
|
|
343
|
+
class_def = ast.ClassDef(
|
|
344
|
+
name=class_name,
|
|
345
|
+
bases=[ast.Name(id='Slotable', ctx=ast.Load())],
|
|
346
|
+
keywords=[],
|
|
347
|
+
decorator_list=[],
|
|
348
|
+
body=[],
|
|
349
|
+
lineno=1,
|
|
350
|
+
col_offset=0
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
# Add type annotations for properties
|
|
354
|
+
self._add_property_annotations(class_def, object_type)
|
|
355
|
+
|
|
356
|
+
# Add __init__ method
|
|
357
|
+
init_method = self._generate_init_method(object_type, is_asset=False)
|
|
358
|
+
class_def.body.append(init_method)
|
|
359
|
+
|
|
360
|
+
# Add with* methods (getters/setters)
|
|
361
|
+
with_methods = self._generate_with_methods(object_type)
|
|
362
|
+
class_def.body.extend(with_methods)
|
|
363
|
+
return class_def
|
|
364
|
+
|
|
365
|
+
def _add_property_annotations(self, class_def: ast.ClassDef, object_type: ObjectType) -> None:
|
|
366
|
+
"""Add type annotations for all properties using cached property info."""
|
|
367
|
+
|
|
368
|
+
properties_info = self._get_properties_info(object_type)
|
|
369
|
+
new_names: list[ast.expr] = []
|
|
370
|
+
original_names: list[ast.expr] = []
|
|
371
|
+
for prop_info in properties_info:
|
|
372
|
+
value = None
|
|
373
|
+
annotation = prop_info.type
|
|
374
|
+
if is_primitive_const(prop_info.node):
|
|
375
|
+
value = ast.Constant(value=prop_info.node.const) # type: ignore
|
|
376
|
+
annotation = COMMON_AST_NODES[prop_info.node.type] # type: ignore
|
|
377
|
+
if prop_info.clean_name != prop_info.original_name:
|
|
378
|
+
new_names.append(ast.Constant(value=prop_info.clean_name))
|
|
379
|
+
original_names.append(ast.Constant(value=prop_info.original_name))
|
|
380
|
+
|
|
381
|
+
annotation = ast.AnnAssign(
|
|
382
|
+
target=ast.Name(id=prop_info.clean_name, ctx=ast.Store()),
|
|
383
|
+
annotation=annotation,
|
|
384
|
+
value=value,
|
|
385
|
+
simple=1
|
|
386
|
+
)
|
|
387
|
+
class_def.body.append(annotation)
|
|
388
|
+
|
|
389
|
+
if new_names:
|
|
390
|
+
map_arg = ast.Assign(
|
|
391
|
+
targets=[ast.Name(id="_propMap", ctx=ast.Store())],
|
|
392
|
+
value=ast.Dict(keys=list(new_names), values=list(original_names))
|
|
393
|
+
)
|
|
394
|
+
class_def.body.append(map_arg)
|
|
395
|
+
|
|
396
|
+
def _generate_init_method(self, object_type: ObjectType, is_asset: bool) -> ast.FunctionDef:
|
|
397
|
+
"""Generate __init__ method for the class using cached property info."""
|
|
398
|
+
properties_info = self._get_properties_info(object_type)
|
|
399
|
+
properties_info.sort(key=lambda x: x.required, reverse=True)
|
|
400
|
+
|
|
401
|
+
# Build arguments list
|
|
402
|
+
required_args, optional_args= [ast.arg(arg='self', annotation=None)], []
|
|
403
|
+
defaults: List[Any] = [None]
|
|
404
|
+
|
|
405
|
+
# Add ID parameter for Asset classes
|
|
406
|
+
if is_asset:
|
|
407
|
+
optional_args.append(ast.arg(arg='id', annotation=ast.Subscript(
|
|
408
|
+
value=COMMON_AST_NODES['Optional'],
|
|
409
|
+
slice=COMMON_AST_NODES['string'],
|
|
410
|
+
ctx=ast.Load()
|
|
411
|
+
)))
|
|
412
|
+
|
|
413
|
+
# Add parameters for each property
|
|
414
|
+
for prop_info in properties_info:
|
|
415
|
+
if is_primitive_const(prop_info.node):
|
|
416
|
+
continue
|
|
417
|
+
if prop_info.required:
|
|
418
|
+
required_args.append(ast.arg(arg=prop_info.clean_name, annotation=prop_info.type))
|
|
419
|
+
defaults.append(None)
|
|
420
|
+
else:
|
|
421
|
+
optional_args.append(ast.arg(arg=prop_info.clean_name, annotation=prop_info.type))
|
|
422
|
+
defaults.append(COMMON_AST_NODES['None'])
|
|
423
|
+
|
|
424
|
+
# Add default for ID
|
|
425
|
+
defaults.insert(len(required_args), COMMON_AST_NODES['None'])
|
|
426
|
+
|
|
427
|
+
# Create function definition
|
|
428
|
+
init_def = ast.FunctionDef(
|
|
429
|
+
name='__init__',
|
|
430
|
+
args=ast.arguments(
|
|
431
|
+
posonlyargs=[],
|
|
432
|
+
args=required_args + optional_args,
|
|
433
|
+
vararg=None,
|
|
434
|
+
kwonlyargs=[],
|
|
435
|
+
kw_defaults=[],
|
|
436
|
+
kwarg=None,
|
|
437
|
+
defaults=defaults
|
|
438
|
+
),
|
|
439
|
+
body=[],
|
|
440
|
+
decorator_list=[]
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
# Add super().__init__() call
|
|
444
|
+
init_def.body.append(self._create_super_call(is_asset))
|
|
445
|
+
|
|
446
|
+
# Add property assignments
|
|
447
|
+
for prop_info in properties_info:
|
|
448
|
+
if is_primitive_const(prop_info.node):
|
|
449
|
+
continue
|
|
450
|
+
assignment = ast.Assign(
|
|
451
|
+
targets=[
|
|
452
|
+
ast.Attribute(
|
|
453
|
+
value=COMMON_AST_NODES['self'],
|
|
454
|
+
attr=prop_info.clean_name,
|
|
455
|
+
ctx=ast.Store()
|
|
456
|
+
)
|
|
457
|
+
],
|
|
458
|
+
value=ast.Name(id=prop_info.clean_name, ctx=ast.Load())
|
|
459
|
+
)
|
|
460
|
+
init_def.body.append(assignment)
|
|
461
|
+
|
|
462
|
+
return init_def
|
|
463
|
+
|
|
464
|
+
def _generate_with_methods(self, object_type: ObjectType) -> list[ast.FunctionDef]:
|
|
465
|
+
"""Generate with* methods (getters/setters) for each property"""
|
|
466
|
+
methods = []
|
|
467
|
+
properties_info = self._get_properties_info(object_type)
|
|
468
|
+
for prop_info in properties_info:
|
|
469
|
+
if is_primitive_const(prop_info.node):
|
|
470
|
+
continue
|
|
471
|
+
# Generate method name: with + PascalCase property name
|
|
472
|
+
method_name = f"with{prop_info.clean_name.replace('_', '').title()}"
|
|
473
|
+
|
|
474
|
+
# Check property type to determine method generation strategy
|
|
475
|
+
if self._is_slot(prop_info.node):
|
|
476
|
+
# Asset property: use _withSlot
|
|
477
|
+
methods.append(self._generate_asset_with_method(method_name, prop_info))
|
|
478
|
+
elif is_array_type(prop_info.node):
|
|
479
|
+
# Array property: generate set and append methods
|
|
480
|
+
methods.extend(self._generate_array_with_methods(method_name, prop_info))
|
|
481
|
+
else:
|
|
482
|
+
# Regular property: simple setter
|
|
483
|
+
methods.append(self._generate_simple_with_method(method_name, prop_info))
|
|
484
|
+
|
|
485
|
+
return methods
|
|
486
|
+
|
|
487
|
+
def _is_slot(self, node: NodeType) -> bool:
|
|
488
|
+
"""Check if a property is an Asset type or array of Assets."""
|
|
489
|
+
if is_ref_type(node):
|
|
490
|
+
ref_name = node.ref
|
|
491
|
+
return ref_name.startswith('Asset')
|
|
492
|
+
elif is_array_type(node) and is_ref_type(node.elementType):
|
|
493
|
+
ref_name = node.elementType.ref
|
|
494
|
+
return ref_name.startswith('Asset')
|
|
495
|
+
return False
|
|
496
|
+
|
|
497
|
+
def _generate_simple_with_method(
|
|
498
|
+
self,
|
|
499
|
+
method_name: str,
|
|
500
|
+
prop_info: PropertyInfo
|
|
501
|
+
) -> ast.FunctionDef:
|
|
502
|
+
"""Generate a simple with* method for regular properties."""
|
|
503
|
+
method_def = ast.FunctionDef(
|
|
504
|
+
name=method_name,
|
|
505
|
+
args=ast.arguments(
|
|
506
|
+
posonlyargs=[],
|
|
507
|
+
args=[
|
|
508
|
+
ast.arg(arg='self', annotation=None),
|
|
509
|
+
ast.arg(arg='value', annotation=prop_info.type)
|
|
510
|
+
],
|
|
511
|
+
vararg=None,
|
|
512
|
+
kwonlyargs=[],
|
|
513
|
+
kw_defaults=[],
|
|
514
|
+
kwarg=None,
|
|
515
|
+
defaults=[]
|
|
516
|
+
),
|
|
517
|
+
body=[
|
|
518
|
+
# self.prop_name = value
|
|
519
|
+
ast.Assign(
|
|
520
|
+
targets=[ast.Attribute(
|
|
521
|
+
value=COMMON_AST_NODES['self'],
|
|
522
|
+
attr=prop_info.clean_name,
|
|
523
|
+
ctx=ast.Store())
|
|
524
|
+
],
|
|
525
|
+
value=ast.Name(id='value', ctx=ast.Load())
|
|
526
|
+
),
|
|
527
|
+
# return self
|
|
528
|
+
ast.Return(value=COMMON_AST_NODES['self'])
|
|
529
|
+
],
|
|
530
|
+
decorator_list=[]
|
|
531
|
+
)
|
|
532
|
+
return method_def
|
|
533
|
+
|
|
534
|
+
def _generate_asset_with_method(
|
|
535
|
+
self,
|
|
536
|
+
method_name: str,
|
|
537
|
+
prop_info: PropertyInfo
|
|
538
|
+
) -> ast.FunctionDef:
|
|
539
|
+
"""Generate a with* method for Asset properties using _withSlot."""
|
|
540
|
+
is_array_of_assets = is_array_type(prop_info.node)
|
|
541
|
+
|
|
542
|
+
is_asset_wrapper = prop_info.node.ref.startswith("AssetWrapper") \
|
|
543
|
+
if is_ref_type(prop_info.node) else False
|
|
544
|
+
|
|
545
|
+
body = [
|
|
546
|
+
ast.Expr(
|
|
547
|
+
value=ast.Call(
|
|
548
|
+
func=ast.Attribute(
|
|
549
|
+
value=COMMON_AST_NODES['self'],
|
|
550
|
+
attr='_withSlot',
|
|
551
|
+
ctx=ast.Load()
|
|
552
|
+
),
|
|
553
|
+
args=[
|
|
554
|
+
ast.Constant(value=prop_info.clean_name),
|
|
555
|
+
ast.Name(id='value', ctx=ast.Load()),
|
|
556
|
+
ast.Constant(value=is_asset_wrapper), # wrapInAssetWrapper
|
|
557
|
+
ast.Constant(value=is_array_of_assets) # isArray
|
|
558
|
+
],
|
|
559
|
+
keywords=[]
|
|
560
|
+
)
|
|
561
|
+
),
|
|
562
|
+
ast.Return(value=COMMON_AST_NODES['self'])
|
|
563
|
+
]
|
|
564
|
+
|
|
565
|
+
method_def = ast.FunctionDef(
|
|
566
|
+
name=method_name,
|
|
567
|
+
args=ast.arguments(
|
|
568
|
+
posonlyargs=[],
|
|
569
|
+
args=[
|
|
570
|
+
ast.arg(arg='self', annotation=None),
|
|
571
|
+
ast.arg(arg='value', annotation=prop_info.type)
|
|
572
|
+
],
|
|
573
|
+
vararg=None,
|
|
574
|
+
kwonlyargs=[],
|
|
575
|
+
kw_defaults=[],
|
|
576
|
+
kwarg=None,
|
|
577
|
+
defaults=[]
|
|
578
|
+
),
|
|
579
|
+
body=body,
|
|
580
|
+
decorator_list=[]
|
|
581
|
+
)
|
|
582
|
+
return method_def
|
|
583
|
+
|
|
584
|
+
def _generate_array_with_methods(
|
|
585
|
+
self,
|
|
586
|
+
method_name: str,
|
|
587
|
+
prop_info: PropertyInfo
|
|
588
|
+
) -> list[ast.FunctionDef]:
|
|
589
|
+
"""Generate with* methods for array properties (set and append)."""
|
|
590
|
+
methods = []
|
|
591
|
+
|
|
592
|
+
# Get element type for append method
|
|
593
|
+
element_type = (self._convert_xlr_to_ast(prop_info.node.elementType,
|
|
594
|
+
f"{prop_info.clean_name}") if is_array_type(prop_info.node) \
|
|
595
|
+
else COMMON_AST_NODES['Any']
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
# Method 1: Set entire array
|
|
599
|
+
set_body = self._create_array_set_body(prop_info)
|
|
600
|
+
|
|
601
|
+
set_method = ast.FunctionDef(
|
|
602
|
+
name=method_name,
|
|
603
|
+
args=ast.arguments(
|
|
604
|
+
posonlyargs=[],
|
|
605
|
+
args=[
|
|
606
|
+
ast.arg(arg='self', annotation=None),
|
|
607
|
+
ast.arg(arg='values', annotation=prop_info.type)
|
|
608
|
+
],
|
|
609
|
+
vararg=None,
|
|
610
|
+
kwonlyargs=[],
|
|
611
|
+
kw_defaults=[],
|
|
612
|
+
kwarg=None,
|
|
613
|
+
defaults=[]
|
|
614
|
+
),
|
|
615
|
+
body=set_body,
|
|
616
|
+
decorator_list=[]
|
|
617
|
+
)
|
|
618
|
+
methods.append(set_method)
|
|
619
|
+
|
|
620
|
+
# Method 2: Append to array
|
|
621
|
+
append_method_name = method_name.replace('with', 'add')
|
|
622
|
+
append_body = self._create_array_append_body(prop_info)
|
|
623
|
+
|
|
624
|
+
append_method = ast.FunctionDef(
|
|
625
|
+
name=append_method_name,
|
|
626
|
+
args=ast.arguments(
|
|
627
|
+
posonlyargs=[],
|
|
628
|
+
args=[
|
|
629
|
+
ast.arg(arg='self', annotation=None),
|
|
630
|
+
ast.arg(arg='value', annotation=element_type)
|
|
631
|
+
],
|
|
632
|
+
vararg=None,
|
|
633
|
+
kwonlyargs=[],
|
|
634
|
+
kw_defaults=[],
|
|
635
|
+
kwarg=None,
|
|
636
|
+
defaults=[]
|
|
637
|
+
),
|
|
638
|
+
body=append_body,
|
|
639
|
+
decorator_list=[]
|
|
640
|
+
)
|
|
641
|
+
methods.append(append_method)
|
|
642
|
+
|
|
643
|
+
return methods
|
|
644
|
+
|
|
645
|
+
def _create_array_set_body(self, prop_info: PropertyInfo) -> list[ast.stmt]:
|
|
646
|
+
"""Create body for array setter method."""
|
|
647
|
+
# Asset array: use _withSlot
|
|
648
|
+
return [
|
|
649
|
+
ast.Expr(
|
|
650
|
+
value=ast.Call(
|
|
651
|
+
func=ast.Attribute(
|
|
652
|
+
value=COMMON_AST_NODES['self'],
|
|
653
|
+
attr='_withSlot',
|
|
654
|
+
ctx=ast.Load()
|
|
655
|
+
),
|
|
656
|
+
args=[
|
|
657
|
+
ast.Constant(value=prop_info.clean_name),
|
|
658
|
+
ast.Name(id='values', ctx=ast.Load()),
|
|
659
|
+
ast.Constant(value=True), # wrapInAssetWrapper
|
|
660
|
+
ast.Constant(value=True) # isArray
|
|
661
|
+
],
|
|
662
|
+
keywords=[]
|
|
663
|
+
)
|
|
664
|
+
),
|
|
665
|
+
ast.Return(value=COMMON_AST_NODES['self'])
|
|
666
|
+
]
|
|
667
|
+
|
|
668
|
+
def _create_array_append_body(self, prop_info: PropertyInfo) -> list[ast.stmt]:
|
|
669
|
+
"""Create body for array append method."""
|
|
670
|
+
return [
|
|
671
|
+
# Initialize array if None
|
|
672
|
+
ast.If(
|
|
673
|
+
test=ast.Compare(
|
|
674
|
+
left=ast.Attribute(
|
|
675
|
+
value=COMMON_AST_NODES['self'],
|
|
676
|
+
attr=prop_info.clean_name,
|
|
677
|
+
ctx=ast.Load()
|
|
678
|
+
),
|
|
679
|
+
ops=[ast.Is()],
|
|
680
|
+
comparators=[ast.Constant(value=None)]
|
|
681
|
+
),
|
|
682
|
+
body=[
|
|
683
|
+
ast.Assign(
|
|
684
|
+
targets=[ast.Attribute(
|
|
685
|
+
value=COMMON_AST_NODES['self'],
|
|
686
|
+
attr=prop_info.clean_name,
|
|
687
|
+
ctx=ast.Store())
|
|
688
|
+
],
|
|
689
|
+
value=ast.List(elts=[], ctx=ast.Load())
|
|
690
|
+
)
|
|
691
|
+
],
|
|
692
|
+
orelse=[]
|
|
693
|
+
),
|
|
694
|
+
# Append the value
|
|
695
|
+
ast.Expr(
|
|
696
|
+
value=ast.Call(
|
|
697
|
+
func=ast.Attribute(
|
|
698
|
+
value=ast.Attribute(
|
|
699
|
+
value=COMMON_AST_NODES['self'],
|
|
700
|
+
attr=prop_info.clean_name,
|
|
701
|
+
ctx=ast.Load()
|
|
702
|
+
),
|
|
703
|
+
attr='append',
|
|
704
|
+
ctx=ast.Load()
|
|
705
|
+
),
|
|
706
|
+
args=[ast.Name(id='value', ctx=ast.Load())],
|
|
707
|
+
keywords=[]
|
|
708
|
+
)
|
|
709
|
+
),
|
|
710
|
+
ast.Return(value=COMMON_AST_NODES['self'])
|
|
711
|
+
]
|
|
712
|
+
|
|
713
|
+
def _convert_xlr_to_ast(self, node: NodeType, prop_name: str) -> ast.expr:
|
|
714
|
+
"""Convert XLR type to Python type annotation (internal)."""
|
|
715
|
+
|
|
716
|
+
if is_primitive_const(node):
|
|
717
|
+
return ast.Constant(value=node.const) # type: ignore
|
|
718
|
+
if is_string_type(node):
|
|
719
|
+
return COMMON_AST_NODES['string']
|
|
720
|
+
elif is_number_type(node):
|
|
721
|
+
return COMMON_AST_NODES['number']
|
|
722
|
+
elif is_boolean_type(node):
|
|
723
|
+
return COMMON_AST_NODES['boolean']
|
|
724
|
+
elif is_null_type(node) or is_unknown_type(node) or is_undefined_type(node):
|
|
725
|
+
return COMMON_AST_NODES['None']
|
|
726
|
+
elif is_any_type(node):
|
|
727
|
+
return COMMON_AST_NODES['Any']
|
|
728
|
+
elif is_array_type(node):
|
|
729
|
+
element_type = self._convert_xlr_to_ast(node.elementType, prop_name)
|
|
730
|
+
return ast.Subscript(
|
|
731
|
+
value=COMMON_AST_NODES['List'],
|
|
732
|
+
slice=element_type,
|
|
733
|
+
ctx=ast.Load()
|
|
734
|
+
)
|
|
735
|
+
elif is_record_type(node):
|
|
736
|
+
key_type = self._convert_xlr_to_ast(node.keyType, prop_name)
|
|
737
|
+
value_type = self._convert_xlr_to_ast(node.valueType, prop_name)
|
|
738
|
+
|
|
739
|
+
return ast.Subscript(
|
|
740
|
+
value=COMMON_AST_NODES['Dict'],
|
|
741
|
+
slice=ast.Tuple(elts=[key_type, value_type], ctx=ast.Load()),
|
|
742
|
+
ctx=ast.Load()
|
|
743
|
+
)
|
|
744
|
+
elif is_object_type(node):
|
|
745
|
+
# Use the generated class name
|
|
746
|
+
class_name: str = node.name if is_named_type(node) \
|
|
747
|
+
else generate_class_name(prop_name)
|
|
748
|
+
escaped_class_name = "'"+class_name+"'"
|
|
749
|
+
return ast.Name(id=escaped_class_name, ctx=ast.Load())
|
|
750
|
+
elif is_or_type(node):
|
|
751
|
+
return self._handle_or_type(node, prop_name)
|
|
752
|
+
elif is_and_type(node):
|
|
753
|
+
return self._handle_and_type(node, prop_name)
|
|
754
|
+
elif is_ref_type(node):
|
|
755
|
+
return self._handle_ref_type(node)
|
|
756
|
+
else:
|
|
757
|
+
return COMMON_AST_NODES['Any']
|
|
758
|
+
|
|
759
|
+
def _handle_or_type(self, node: OrType, prop_name: str) -> ast.expr:
|
|
760
|
+
"""Handle or type nodes."""
|
|
761
|
+
# Handle Literal Types
|
|
762
|
+
if all(is_primitive_const(t) for t in node.or_types):
|
|
763
|
+
# python type checker doesn't keep the inference from the previous check
|
|
764
|
+
union_types: List[ast.expr] = [ast.Constant(
|
|
765
|
+
value=or_type.const) for or_type in node.or_types # type: ignore
|
|
766
|
+
]
|
|
767
|
+
|
|
768
|
+
if len(union_types) == 1:
|
|
769
|
+
return union_types[0]
|
|
770
|
+
|
|
771
|
+
return ast.Subscript(
|
|
772
|
+
value=COMMON_AST_NODES['Literal'],
|
|
773
|
+
slice=ast.Tuple(elts=union_types, ctx=ast.Load()),
|
|
774
|
+
ctx=ast.Load()
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
else:
|
|
778
|
+
# Handle Union types
|
|
779
|
+
union_types = []
|
|
780
|
+
|
|
781
|
+
for type in node.or_types:
|
|
782
|
+
if not is_primitive_const(type):
|
|
783
|
+
union_types.append(self._convert_xlr_to_ast(type, prop_name))
|
|
784
|
+
else:
|
|
785
|
+
union_types.append(
|
|
786
|
+
ast.Subscript(
|
|
787
|
+
value=COMMON_AST_NODES['Literal'],
|
|
788
|
+
slice=ast.Tuple(elts=[ast.Constant(type.const)], ctx=ast.Load()),
|
|
789
|
+
ctx=ast.Load()
|
|
790
|
+
)
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
if len(union_types) == 1:
|
|
794
|
+
return union_types[0]
|
|
795
|
+
|
|
796
|
+
return ast.Subscript(
|
|
797
|
+
value=COMMON_AST_NODES['Union'],
|
|
798
|
+
slice=ast.Tuple(elts=union_types, ctx=ast.Load()),
|
|
799
|
+
ctx=ast.Load()
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
def _flatten_and_types(self, and_types: List[NodeType]) -> List[NodeType]:
|
|
803
|
+
"""Recursively flatten nested AndType nodes into a single list."""
|
|
804
|
+
flattened = []
|
|
805
|
+
for and_type in and_types:
|
|
806
|
+
if is_and_type(and_type):
|
|
807
|
+
# Recursively flatten nested AndType
|
|
808
|
+
flattened.extend(self._flatten_and_types(and_type.and_types))
|
|
809
|
+
else:
|
|
810
|
+
flattened.append(and_type)
|
|
811
|
+
return flattened
|
|
812
|
+
|
|
813
|
+
def _handle_and_type(self, node: AndType, prop_name: str) -> ast.expr:
|
|
814
|
+
"""Handle and (intersection) type nodes."""
|
|
815
|
+
and_types = node.and_types
|
|
816
|
+
|
|
817
|
+
# First, check if any elements are nested AndTypes and flatten them
|
|
818
|
+
if any(is_and_type(t) for t in and_types):
|
|
819
|
+
and_types = self._flatten_and_types(and_types)
|
|
820
|
+
|
|
821
|
+
# Check if all elements are object types
|
|
822
|
+
if all(is_object_type(t) for t in and_types):
|
|
823
|
+
return self._merge_object_types(and_types, prop_name, node.name)
|
|
824
|
+
|
|
825
|
+
# Check if any element is a union - need to calculate intersection
|
|
826
|
+
elif any(is_or_type(t) for t in and_types):
|
|
827
|
+
return self._handle_intersection_with_unions(and_types, prop_name)
|
|
828
|
+
|
|
829
|
+
# For other cases, fall back to Union (Python doesn't have native intersection types)
|
|
830
|
+
else:
|
|
831
|
+
intersection_types = [
|
|
832
|
+
self._convert_xlr_to_ast(and_type, prop_name) for and_type in and_types
|
|
833
|
+
]
|
|
834
|
+
|
|
835
|
+
if len(intersection_types) == 1:
|
|
836
|
+
return intersection_types[0]
|
|
837
|
+
|
|
838
|
+
# Python doesn't have intersection types, so we use Union as approximation
|
|
839
|
+
return ast.Subscript(
|
|
840
|
+
value=COMMON_AST_NODES['Union'],
|
|
841
|
+
slice=ast.Tuple(elts=intersection_types, ctx=ast.Load()),
|
|
842
|
+
ctx=ast.Load()
|
|
843
|
+
)
|
|
844
|
+
|
|
845
|
+
def _merge_object_types(
|
|
846
|
+
self,
|
|
847
|
+
object_types: List[NodeType],
|
|
848
|
+
prop_name: str,
|
|
849
|
+
name: Optional[str] = ""
|
|
850
|
+
) -> ast.expr:
|
|
851
|
+
"""Merge multiple object types into a single object type with combined properties."""
|
|
852
|
+
|
|
853
|
+
# Create merged properties dictionary
|
|
854
|
+
merged_properties = {}
|
|
855
|
+
|
|
856
|
+
for obj_type in object_types:
|
|
857
|
+
# Resolve the actual ObjectType (could be wrapped in NamedType)
|
|
858
|
+
actual_obj_type = obj_type.base_node if is_named_type(obj_type) else obj_type
|
|
859
|
+
|
|
860
|
+
if is_object_type(actual_obj_type):
|
|
861
|
+
# Merge properties from this object type
|
|
862
|
+
for prop_name_key, prop_obj in actual_obj_type.properties.items():
|
|
863
|
+
if prop_name_key in merged_properties:
|
|
864
|
+
# Property exists in both objects - need to handle conflict
|
|
865
|
+
# For now, make it required if either requires it
|
|
866
|
+
existing_prop = merged_properties[prop_name_key]
|
|
867
|
+
merged_properties[prop_name_key] = ObjectProperty(
|
|
868
|
+
required=existing_prop.required or prop_obj.required,
|
|
869
|
+
node=prop_obj.node # Use the later definition
|
|
870
|
+
)
|
|
871
|
+
else:
|
|
872
|
+
merged_properties[prop_name_key] = prop_obj
|
|
873
|
+
|
|
874
|
+
# Create new merged ObjectType
|
|
875
|
+
merged_obj_type = ObjectType(properties=merged_properties)
|
|
876
|
+
|
|
877
|
+
# Generate a class name for the merged type
|
|
878
|
+
merged_class_name = name if name \
|
|
879
|
+
else self._generate_merged_class_name(prop_name, object_types)
|
|
880
|
+
|
|
881
|
+
# Add to classes to generate if not already present
|
|
882
|
+
if merged_class_name not in self.classes:
|
|
883
|
+
self.classes.append(merged_class_name)
|
|
884
|
+
self.classes_to_generate[merged_class_name] = merged_obj_type
|
|
885
|
+
|
|
886
|
+
# Return AST reference to the merged class
|
|
887
|
+
return ast.Name(id=merged_class_name, ctx=ast.Load())
|
|
888
|
+
|
|
889
|
+
def _generate_merged_class_name(self, base_name: str, object_types: List[NodeType]) -> str:
|
|
890
|
+
"""Generate a unique class name for merged object types."""
|
|
891
|
+
# Clean the base name
|
|
892
|
+
clean_base = clean_property_name(base_name).replace('_', '').title()
|
|
893
|
+
|
|
894
|
+
# Try to create a meaningful name from the merged types
|
|
895
|
+
type_names = []
|
|
896
|
+
for obj_type in object_types:
|
|
897
|
+
if is_named_type(obj_type):
|
|
898
|
+
type_names.append(obj_type.name)
|
|
899
|
+
elif hasattr(obj_type, 'name') and obj_type.name:
|
|
900
|
+
type_names.append(obj_type.name)
|
|
901
|
+
|
|
902
|
+
if type_names:
|
|
903
|
+
merged_name = ''.join(type_names) + clean_base
|
|
904
|
+
else:
|
|
905
|
+
merged_name = f"Merged{clean_base}"
|
|
906
|
+
|
|
907
|
+
return merged_name
|
|
908
|
+
|
|
909
|
+
def _handle_intersection_with_unions(
|
|
910
|
+
self,
|
|
911
|
+
and_types: List[NodeType],
|
|
912
|
+
prop_name: str
|
|
913
|
+
) -> ast.expr:
|
|
914
|
+
"""Handle intersections that include union types."""
|
|
915
|
+
# Separate union types from non-union types
|
|
916
|
+
union_types = [t for t in and_types if is_or_type(t)]
|
|
917
|
+
non_union_types = [t for t in and_types if not is_or_type(t)]
|
|
918
|
+
|
|
919
|
+
if len(union_types) == 0:
|
|
920
|
+
# No unions, shouldn't reach here but handle gracefully
|
|
921
|
+
return self._convert_xlr_to_ast(and_types[0], prop_name)
|
|
922
|
+
|
|
923
|
+
# For each combination of union members, intersect with non-union types
|
|
924
|
+
result_types = []
|
|
925
|
+
|
|
926
|
+
# Start with the first union's members
|
|
927
|
+
first_union = union_types[0]
|
|
928
|
+
current_combinations = first_union.or_types.copy()
|
|
929
|
+
|
|
930
|
+
# For each additional union, create combinations
|
|
931
|
+
for union_type in union_types[1:]:
|
|
932
|
+
new_combinations = []
|
|
933
|
+
for existing in current_combinations:
|
|
934
|
+
for union_member in union_type.or_types:
|
|
935
|
+
# Create intersection of existing and union_member
|
|
936
|
+
new_combinations.append([existing, union_member])
|
|
937
|
+
current_combinations = new_combinations
|
|
938
|
+
|
|
939
|
+
# Now intersect each combination with non-union types
|
|
940
|
+
for combination in current_combinations:
|
|
941
|
+
if isinstance(combination, list):
|
|
942
|
+
# Multiple types to intersect
|
|
943
|
+
intersection_candidate = combination + non_union_types
|
|
944
|
+
else:
|
|
945
|
+
# Single type to intersect with non-union types
|
|
946
|
+
intersection_candidate = [combination] + non_union_types
|
|
947
|
+
|
|
948
|
+
# Check if all are objects for merging
|
|
949
|
+
if all(is_object_type(t) for t in intersection_candidate):
|
|
950
|
+
result_types.append(
|
|
951
|
+
self._merge_object_types(intersection_candidate,
|
|
952
|
+
f"{prop_name}_intersection")
|
|
953
|
+
)
|
|
954
|
+
else:
|
|
955
|
+
# Convert to Python types and use Union
|
|
956
|
+
py_types = [self._convert_xlr_to_ast(t, prop_name) for t in intersection_candidate]
|
|
957
|
+
if len(py_types) == 1:
|
|
958
|
+
result_types.append(py_types[0])
|
|
959
|
+
else:
|
|
960
|
+
result_types.append(ast.Subscript(
|
|
961
|
+
value=COMMON_AST_NODES['Union'],
|
|
962
|
+
slice=ast.Tuple(elts=py_types, ctx=ast.Load()),
|
|
963
|
+
ctx=ast.Load()
|
|
964
|
+
))
|
|
965
|
+
|
|
966
|
+
# Return union of all result types
|
|
967
|
+
if len(result_types) == 1:
|
|
968
|
+
return result_types[0]
|
|
969
|
+
else:
|
|
970
|
+
return ast.Subscript(
|
|
971
|
+
value=COMMON_AST_NODES['Union'],
|
|
972
|
+
slice=ast.Tuple(elts=result_types, ctx=ast.Load()),
|
|
973
|
+
ctx=ast.Load()
|
|
974
|
+
)
|
|
975
|
+
|
|
976
|
+
def _handle_ref_type(self, node: RefType) -> ast.expr:
|
|
977
|
+
"""Handle reference type nodes."""
|
|
978
|
+
ref_name = node.ref
|
|
979
|
+
|
|
980
|
+
maybe_ref = self.generic_tokens.get(ref_name, None)
|
|
981
|
+
if maybe_ref and maybe_ref.default and maybe_ref.default.name:
|
|
982
|
+
return ast.Name(id=maybe_ref.default.name, ctx=ast.Load())
|
|
983
|
+
|
|
984
|
+
# Check if this is a reference to an Asset type (AssetWrapper)
|
|
985
|
+
if ref_name.startswith('AssetWrapper'):
|
|
986
|
+
return COMMON_AST_NODES['Asset']
|
|
987
|
+
elif ref_name in ('Expression', 'Binding'):
|
|
988
|
+
return COMMON_AST_NODES['string']
|
|
989
|
+
else:
|
|
990
|
+
# For other references, try to resolve to a generated class name
|
|
991
|
+
# or use the ref name directly
|
|
992
|
+
return ast.Name(id=ref_name, ctx=ast.Load())
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Generation Utilities
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import ast
|
|
6
|
+
from typing import NamedTuple
|
|
7
|
+
|
|
8
|
+
from player_tools_xlr_types.nodes import NodeType
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
COMMON_AST_NODES = {
|
|
12
|
+
'string': ast.Name(id='str', ctx=ast.Load()),
|
|
13
|
+
'number': ast.Name(id='int', ctx=ast.Load()), # could be a float?
|
|
14
|
+
'boolean': ast.Name(id='bool', ctx=ast.Load()),
|
|
15
|
+
'Any': ast.Name(id='Any', ctx=ast.Load()),
|
|
16
|
+
'None': ast.Name(id='None', ctx=ast.Load()),
|
|
17
|
+
'Asset': ast.Name(id='Asset', ctx=ast.Load()),
|
|
18
|
+
'Optional': ast.Name(id='Optional', ctx=ast.Load()),
|
|
19
|
+
'List': ast.Name(id='List', ctx=ast.Load()),
|
|
20
|
+
'Union': ast.Name(id='Union', ctx=ast.Load()),
|
|
21
|
+
'Dict': ast.Name(id='Dict', ctx=ast.Load()),
|
|
22
|
+
'Literal': ast.Name(id='Literal', ctx=ast.Load()),
|
|
23
|
+
'self': ast.Name(id='self', ctx=ast.Load()),
|
|
24
|
+
'super': ast.Name(id='super', ctx=ast.Load())
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
PLAYER_DSL_PACKAGE = 'player_tools_dsl'
|
|
28
|
+
|
|
29
|
+
class PropertyInfo(NamedTuple):
|
|
30
|
+
"""Cached property information to avoid repeated processing."""
|
|
31
|
+
clean_name: str
|
|
32
|
+
original_name: str
|
|
33
|
+
node: NodeType
|
|
34
|
+
required: bool
|
|
35
|
+
type: ast.expr
|
|
36
|
+
|
|
37
|
+
def clean_property_name(prop_name: str) -> str:
|
|
38
|
+
"""Clean property name by removing quotes and replacing hyphens."""
|
|
39
|
+
return prop_name.replace('"', '').replace('\'','').replace('-', '_')
|
|
40
|
+
|
|
41
|
+
def generate_class_name(prop_name: str) -> str:
|
|
42
|
+
"""Generate class name from property name."""
|
|
43
|
+
return clean_property_name(prop_name).replace('_', "").title()
|
|
44
|
+
|
|
45
|
+
def ast_to_source(module: ast.Module) -> str:
|
|
46
|
+
"""Convert AST module to source code string."""
|
|
47
|
+
# Fix line numbers and column offsets
|
|
48
|
+
for node in ast.walk(module):
|
|
49
|
+
if not hasattr(node, 'lineno'):
|
|
50
|
+
node.lineno = 1 # type: ignore
|
|
51
|
+
if not hasattr(node, 'col_offset'):
|
|
52
|
+
node.col_offset = 0 # type: ignore
|
|
53
|
+
|
|
54
|
+
return ast.unparse(module)
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
player_tools_dsl_generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
player_tools_dsl_generator/__main__.py,sha256=epNLhkU4rYnfhS_6vZAH-Qu9f-Acdw-x_snSKBhndkw,2033
|
|
3
|
+
player_tools_dsl_generator/generator.py,sha256=vm-EwwjMWVw7nL_cPJNMrGYi2AsnPcGn_2ScKpIrIhc,37520
|
|
4
|
+
player_tools_dsl_generator/utils.py,sha256=4aJHHi4zHlsbdif-iujdO6_2Xn8Eg2U2EEOcychH8ms,1809
|
|
5
|
+
player_tools_dsl_generator-0.13.0.dev2215819.dist-info/WHEEL,sha256=sobxWSyDDkdg_rinUth-jxhXHqoNqlmNMJY3aTZn2Us,91
|
|
6
|
+
player_tools_dsl_generator-0.13.0.dev2215819.dist-info/METADATA,sha256=czISYVEGlsx4_vYp55Fs3jFd673LCjnE9gEZk3_LoyI,147
|
|
7
|
+
player_tools_dsl_generator-0.13.0.dev2215819.dist-info/RECORD,,
|