nsj-rest-lib2 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,285 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ """
5
+ Gerador de DTO e Entity a partir de um EDL JSON (v1.x) — AGORA usando 'repository'
6
+ no lugar de 'storage', e propagando o de-para de colunas:
7
+ - Entity usa os nomes físicos vindos de model.repository.columns[*].column (quando houver).
8
+ - DTOField ganha entity_field="<nome_do_atributo_na_Entity>" para cada propriedade.
9
+
10
+ Uso:
11
+ python generator.py caminho/para/arquivo.edl.json
12
+ """
13
+
14
+ import json
15
+ import sys
16
+ import keyword
17
+ from typing import Any, Dict, List, Tuple
18
+
19
+ # -----------------------
20
+ # Helpers de transformação
21
+ # -----------------------
22
+
23
+
24
+ def py_identifier(name: str) -> str:
25
+ """Garante que o nome é um identificador Python válido."""
26
+ n = (name or "").strip().replace("-", "_")
27
+ if not n:
28
+ n = "_"
29
+ if not n.isidentifier() or keyword.iskeyword(n):
30
+ n = f"{n}_"
31
+ return n
32
+
33
+
34
+ def detect_pk(model_props: Dict[str, Any]) -> str:
35
+ for logical, meta in model_props.items():
36
+ if isinstance(meta, dict) and meta.get("pk") is True:
37
+ return logical
38
+ return ""
39
+
40
+
41
+ def to_python_type(prop: Dict[str, Any]) -> str:
42
+ t = (prop.get("type") or "string").lower()
43
+ fmt = (prop.get("format") or "").lower()
44
+
45
+ if fmt in {"uuid"}:
46
+ return "uuid.UUID"
47
+ if t in {"datetime"}:
48
+ return "datetime.datetime"
49
+ if t in {"date"}:
50
+ return "datetime.date"
51
+ if t in {"integer"}:
52
+ return "int"
53
+ if t in {"number"}:
54
+ return "float"
55
+ if t in {"boolean"}:
56
+ return "bool"
57
+ return "str"
58
+
59
+
60
+ def build_entity_field_name(logical: str, columns_map: Dict[str, Any]) -> str:
61
+ """
62
+ Retorna o nome do atributo na Entity para um dado campo lógico,
63
+ priorizando o nome físico em repository.columns[logical].column (se existir).
64
+ """
65
+ col_meta = (columns_map or {}).get(logical) or {}
66
+ entity_attr = col_meta.get("column") or logical
67
+ return py_identifier(entity_attr)
68
+
69
+
70
+ def dto_field_args(
71
+ logical: str,
72
+ prop: Dict[str, Any],
73
+ required: List[str],
74
+ columns_map: Dict[str, Any],
75
+ ) -> Dict[str, Any]:
76
+ args: Dict[str, Any] = {}
77
+
78
+ # Política: sempre expor (resume=True) e nunca ocultar campos que existem na origem
79
+ args["resume"] = True
80
+
81
+ # PK / not_null
82
+ if prop.get("pk") is True:
83
+ args["pk"] = True
84
+ args["not_null"] = True
85
+ if logical in set(required):
86
+ args["not_null"] = True
87
+
88
+ # Strings: strip + limites
89
+ t = (prop.get("type") or "string").lower()
90
+ if t == "string":
91
+ args["strip"] = True
92
+ if "length" in prop:
93
+ args["max"] = int(prop["length"])
94
+ if "minimum" in prop:
95
+ args["min"] = int(prop["minimum"])
96
+
97
+ # Números
98
+ if t in {"integer", "number"}:
99
+ if "minimum" in prop:
100
+ args["min"] = (
101
+ int(prop["minimum"]) if t == "integer" else float(prop["minimum"])
102
+ )
103
+ if "maximum" in prop:
104
+ args["max"] = (
105
+ int(prop["maximum"]) if t == "integer" else float(prop["maximum"])
106
+ )
107
+
108
+ # Formatos especiais
109
+ fmt = (prop.get("format") or "").lower()
110
+ if fmt == "uuid":
111
+ args["validator"] = "DTOFieldValidators().validate_uuid"
112
+ args["min"] = 36
113
+ args["max"] = 36
114
+
115
+ # Default lógico (quando presente)
116
+ if "default" in prop and prop["default"] is not None:
117
+ default_val = prop["default"]
118
+ if isinstance(default_val, (int, float, bool)):
119
+ args["default_value"] = repr(default_val)
120
+ elif isinstance(default_val, str) and default_val.endswith("()"):
121
+ args["default_value"] = default_val
122
+ else:
123
+ args["default_value"] = repr(default_val)
124
+
125
+ # Sempre informar o nome do atributo correspondente na Entity
126
+ args["entity_field"] = build_entity_field_name(logical, columns_map)
127
+
128
+ return args
129
+
130
+
131
+ def render_dto(edl: Dict[str, Any]) -> Tuple[str, str]:
132
+ model = edl.get("model", {}) or {}
133
+ props: Dict[str, Any] = model.get("properties", {}) or {}
134
+ required: List[str] = model.get("required", []) or []
135
+ repository = model.get("repository", {}) or {}
136
+ columns_map: Dict[str, Any] = repository.get("columns", {}) or {}
137
+
138
+ entity_name_full = edl.get("id") or "Entity"
139
+ class_base = entity_name_full.split(".")[-1]
140
+ class_name = f"{class_base[0].upper()}{class_base[1:]}"
141
+ dto_class = f"{class_name}DTO"
142
+
143
+ # imports
144
+ need_uuid = any(((p.get("format") or "").lower() == "uuid") for p in props.values())
145
+ need_datetime = any(
146
+ ((p.get("type") or "").lower() in {"datetime", "date"}) for p in props.values()
147
+ )
148
+
149
+ header_imports = [
150
+ "from nsj_rest_lib.decorator.dto import DTO",
151
+ "from nsj_rest_lib.descriptor.dto_field import DTOField",
152
+ "from nsj_rest_lib.descriptor.dto_field_validators import DTOFieldValidators",
153
+ "from nsj_rest_lib.dto.dto_base import DTOBase",
154
+ ]
155
+ if need_uuid:
156
+ header_imports.insert(0, "import uuid")
157
+ if need_datetime:
158
+ header_imports.insert(0, "import datetime")
159
+
160
+ lines: List[str] = []
161
+ lines.extend(header_imports)
162
+ lines.append("")
163
+ lines.append("")
164
+ lines.append("@DTO()")
165
+ lines.append(f"class {dto_class}(DTOBase):")
166
+ if not props:
167
+ lines.append(" pass")
168
+ return (dto_class, "\n".join(lines))
169
+
170
+ for logical in props:
171
+ meta = props[logical] or {}
172
+ py_type = to_python_type(meta)
173
+ field_args = dto_field_args(logical, meta, required, columns_map)
174
+
175
+ # Monta chamada DTOField(...)
176
+ arg_parts = []
177
+ for k, v in field_args.items():
178
+ if k == "validator":
179
+ arg_parts.append(f"{k}={v}")
180
+ else:
181
+ arg_parts.append(f"{k}={repr(v)}")
182
+ args_str = ", ".join(arg_parts) if arg_parts else ""
183
+
184
+ lines.append("")
185
+ lines.append(f" {py_identifier(logical)}: {py_type} = DTOField({args_str})")
186
+
187
+ return (dto_class, "\n".join(lines))
188
+
189
+
190
+ def render_entity(edl: Dict[str, Any]) -> Tuple[str, str]:
191
+ model = edl.get("model", {}) or {}
192
+ props: Dict[str, Any] = model.get("properties", {}) or {}
193
+ repository = model.get("repository", {}) or {}
194
+ api = model.get("api", {}) or {}
195
+
196
+ entity_name_full = edl.get("id") or "Entity"
197
+ class_base = entity_name_full.split(".")[-1]
198
+ class_name = f"{class_base[0].upper()}{class_base[1:]}"
199
+ entity_class = f"{class_name}Entity"
200
+
201
+ table_name = repository.get("map") or "schema.tabela"
202
+ pk_field = detect_pk(props) or "id"
203
+
204
+ # default_order_fields = api.default_sort (removendo prefixos '+'|'-')
205
+ default_sort: List[str] = []
206
+ for item in api.get("default_sort", []) or []:
207
+ fld = str(item).lstrip("+-")
208
+ if fld in props:
209
+ default_sort.append(fld)
210
+ if not default_sort:
211
+ default_sort = [pk_field] if pk_field else []
212
+
213
+ # imports
214
+ need_uuid = any(((p.get("format") or "").lower() == "uuid") for p in props.values())
215
+ need_datetime = any(
216
+ ((p.get("type") or "").lower() in {"datetime", "date"}) for p in props.values()
217
+ )
218
+
219
+ header_imports = [
220
+ "from nsj_rest_lib.entity.entity_base import EntityBase",
221
+ "from nsj_rest_lib.decorator.entity import Entity",
222
+ ]
223
+ if need_uuid:
224
+ header_imports.insert(0, "import uuid")
225
+ if need_datetime:
226
+ header_imports.insert(0, "import datetime")
227
+
228
+ columns_map: Dict[str, Any] = repository.get("columns", {}) or {}
229
+
230
+ lines: List[str] = []
231
+ lines.extend(header_imports)
232
+ lines.append("")
233
+ lines.append("")
234
+ lines.append("@Entity(")
235
+ lines.append(f" table_name={repr(table_name)},")
236
+ lines.append(f" pk_field={repr(py_identifier(pk_field))},")
237
+ if default_sort:
238
+ lines.append(
239
+ f" default_order_fields={[py_identifier(x) for x in default_sort]},"
240
+ )
241
+ lines.append(")")
242
+ lines.append(f"class {entity_class}(EntityBase):")
243
+
244
+ if not props:
245
+ lines.append(" pass")
246
+ return (entity_class, "\n".join(lines))
247
+
248
+ for logical, meta in props.items():
249
+ py_type = to_python_type(meta)
250
+ entity_attr = build_entity_field_name(logical, columns_map)
251
+ lines.append(f" {entity_attr}: {py_type} = None")
252
+
253
+ return (entity_class, "\n".join(lines))
254
+
255
+
256
+ def generate_from_edl(edl: Dict[str, Any]) -> Tuple[str, str, str, str]:
257
+ dto_class_name, dto_code = render_dto(edl)
258
+ entity_class_name, entity_code = render_entity(edl)
259
+ return (dto_class_name, dto_code, entity_class_name, entity_code)
260
+
261
+
262
+ # -----------------------
263
+ # CLI
264
+ # -----------------------
265
+
266
+
267
+ def main():
268
+ if len(sys.argv) < 2:
269
+ print("Uso: python generator.py caminho/para/arquivo.edl.json", file=sys.stderr)
270
+ sys.exit(2)
271
+
272
+ with open(sys.argv[1], "r", encoding="utf-8") as f:
273
+ edl = json.load(f)
274
+
275
+ _, dto_code, _, entity_code = generate_from_edl(edl)
276
+
277
+ sep = "\n" + ("#" * 80) + "\n"
278
+ print(sep + "# DTO\n" + sep)
279
+ print(dto_code)
280
+ print(sep + "# ENTITY\n" + sep)
281
+ print(entity_code)
282
+
283
+
284
+ if __name__ == "__main__":
285
+ main()
@@ -0,0 +1,267 @@
1
+ from typing import Any
2
+
3
+ from nsj_rest_lib2.compiler.compiler_structures import (
4
+ IndexCompilerStructure,
5
+ PropertiesCompilerStructure,
6
+ )
7
+ from nsj_rest_lib2.compiler.dto_compiler import DTOCompiler
8
+ from nsj_rest_lib2.compiler.edl_model.repository_model import RepositoryModel
9
+ from nsj_rest_lib2.compiler.entity_compiler import EntityCompiler
10
+ from nsj_rest_lib2.compiler.property_compiler import EDLPropertyCompiler
11
+
12
+ from nsj_rest_lib2.compiler.edl_model.entity_model import EntityModel
13
+
14
+ from nsj_rest_lib2.settings import get_logger
15
+
16
+ # TODO Relacionamentos
17
+ # TODO Classes Abstratas
18
+ # TODO Partial Classes
19
+ # TODO Migrations
20
+ # TODO Migrar para a nsj_rest_lib2
21
+ # TODO Alterar o padrão de nomenclatura para snake_case
22
+
23
+
24
+ class CompilerResult:
25
+ def __init__(self):
26
+ self.dto_class_name: str | None = None
27
+ self.dto_code: str | None = None
28
+ self.entity_class_name: str | None = None
29
+ self.entity_code: str | None = None
30
+ # TODO Informacoes das rotas
31
+
32
+
33
+ class EDLCompiler:
34
+ def __init__(self) -> None:
35
+ self._properties_compiler = EDLPropertyCompiler()
36
+ self._dto_compiler = DTOCompiler()
37
+ self._entity_compiler = EntityCompiler()
38
+
39
+ def compile_models(
40
+ self, entity_models: dict[str, EntityModel]
41
+ ) -> list[CompilerResult]:
42
+
43
+ compiler_results = []
44
+ for entity_model_id in entity_models:
45
+ entity_model = entity_models[entity_model_id]
46
+ if not entity_model.abstract:
47
+ compiler_result = self._compile_model(entity_model, entity_models)
48
+ compiler_results.append(compiler_result)
49
+
50
+ return compiler_results
51
+
52
+ def compile_model_from_edl(
53
+ self,
54
+ edl_json: dict[str, Any],
55
+ dependencies_edls: list[dict[str, Any]],
56
+ ) -> CompilerResult:
57
+ entity_model = EntityModel(**edl_json)
58
+
59
+ entity_models = []
60
+ for dependency_edl in dependencies_edls:
61
+ dependency_entity_model = EntityModel(**dependency_edl)
62
+ entity_models.append(dependency_entity_model)
63
+
64
+ return self.compile_model(entity_model, entity_models)
65
+
66
+ def compile_model(
67
+ self,
68
+ entity_model: EntityModel,
69
+ dependencies_models: list[EntityModel],
70
+ ) -> CompilerResult:
71
+ entity_models = {}
72
+ for dependency_entity_model in dependencies_models:
73
+ complete_entity_id = (
74
+ f"{dependency_entity_model.escopo}/{dependency_entity_model.id}"
75
+ )
76
+ entity_models[complete_entity_id] = dependency_entity_model
77
+
78
+ return self._compile_model(entity_model, entity_models)
79
+
80
+ def _compile_model(
81
+ self,
82
+ entity_model: EntityModel,
83
+ entity_models: dict[str, EntityModel],
84
+ ) -> CompilerResult:
85
+
86
+ # Criando um mapa de índices por nome de property
87
+ # TODO Implementar tratamento dos índices de apoio às query (não de unicidade)
88
+ map_indexes_by_property: dict[str, list[IndexCompilerStructure]] = {}
89
+ map_unique_by_property: dict[str, IndexCompilerStructure] = {}
90
+ self._make_unique_map_by_property(
91
+ map_indexes_by_property, map_unique_by_property, entity_model, entity_models
92
+ )
93
+
94
+ # Criando uma cópia das coleções necessárias à compilação das properties
95
+ # (a ideia é ser possível alterar as coleções sem afetar a entidade modelo,
96
+ # o que será necessário para o tratamento de traits, etc - os quais serão
97
+ # uma classe nova, resultado da união dessas propriedades).
98
+ properties_structure = PropertiesCompilerStructure()
99
+ self._make_properties_structures(
100
+ properties_structure, entity_model, entity_models
101
+ )
102
+
103
+ # Criando a lista de atributos do DTO e da Entity; e recuperando as chaves primarias
104
+ ast_dto_attributes, ast_entity_attributes, props_pk, enum_classes = (
105
+ self._properties_compiler.compile(
106
+ properties_structure,
107
+ map_unique_by_property,
108
+ entity_model,
109
+ )
110
+ )
111
+
112
+ # Gerando o código do DTO
113
+ dto_class_name, code_dto = self._dto_compiler.compile(
114
+ entity_model, ast_dto_attributes, enum_classes
115
+ )
116
+
117
+ # Gerando o código da Entity
118
+ entity_class_name, code_entity = self._entity_compiler.compile(
119
+ entity_model, ast_entity_attributes, props_pk
120
+ )
121
+
122
+ # Retornando o resultado
123
+ compiler_result = CompilerResult()
124
+ compiler_result.entity_class_name = entity_class_name
125
+ compiler_result.entity_code = code_entity
126
+ compiler_result.dto_class_name = dto_class_name
127
+ compiler_result.dto_code = code_dto
128
+
129
+ return compiler_result
130
+
131
+ def _make_properties_structures(
132
+ self,
133
+ properties_structure: PropertiesCompilerStructure,
134
+ entity_model: EntityModel,
135
+ entity_models: dict[str, EntityModel],
136
+ ):
137
+ if not entity_model:
138
+ return
139
+
140
+ # Populando com as propriedades do trait
141
+ if entity_model.trait_from:
142
+ trait_model = entity_models[entity_model.trait_from]
143
+
144
+ self._make_properties_structures(
145
+ properties_structure,
146
+ trait_model,
147
+ entity_models,
148
+ )
149
+
150
+ # Populando com as propriedades da entidade atual
151
+ properties_structure.properties.update(entity_model.properties)
152
+ if entity_model.main_properties:
153
+ properties_structure.main_properties.extend(entity_model.main_properties)
154
+ if entity_model.required:
155
+ properties_structure.required.extend(entity_model.required)
156
+ if entity_model.partition_data:
157
+ properties_structure.partition_data.extend(entity_model.partition_data)
158
+ if entity_model.search_properties:
159
+ properties_structure.search_properties.extend(
160
+ entity_model.search_properties
161
+ )
162
+ if entity_model.metric_label:
163
+ properties_structure.metric_label.extend(entity_model.metric_label)
164
+
165
+ if entity_model.trait_properties:
166
+ properties_structure.trait_properties.update(entity_model.trait_properties)
167
+
168
+ if entity_model.repository.properties:
169
+ properties_structure.entity_properties.update(
170
+ entity_model.repository.properties
171
+ )
172
+
173
+ def _make_unique_map_by_property(
174
+ self,
175
+ map_indexes_by_property: dict[str, list[IndexCompilerStructure]],
176
+ map_unique_by_property: dict[str, IndexCompilerStructure],
177
+ entity_model: EntityModel,
178
+ entity_models: dict[str, EntityModel],
179
+ deep: int = 1,
180
+ ):
181
+
182
+ if not entity_model:
183
+ return
184
+
185
+ # Varrendo e organizando os índices
186
+ if entity_model.repository.indexes:
187
+ for index in entity_model.repository.indexes:
188
+ for pkey in index.columns:
189
+ if index.unique:
190
+ if pkey in map_unique_by_property:
191
+ if deep > 1:
192
+ get_logger().warning(
193
+ f"Propriedade '{pkey}' possui mais de um índice de unicidade (sendo um herdado). Por isso a replicação (herdada) será ignorada."
194
+ )
195
+ continue
196
+ else:
197
+ raise Exception(
198
+ f"Propriedade '{pkey}' possui mais de um índice de unicidade."
199
+ ) # TODO Verificar esse modo de tratar erros
200
+
201
+ map_unique_by_property[pkey] = IndexCompilerStructure(
202
+ index, deep > 1
203
+ )
204
+ else:
205
+ list_index = map_indexes_by_property.setdefault(pkey, [])
206
+ list_index.append(IndexCompilerStructure(index, deep > 1))
207
+
208
+ # Populando com as propriedades do trait
209
+ if entity_model.trait_from:
210
+ trait_model = entity_models[entity_model.trait_from]
211
+
212
+ self._make_unique_map_by_property(
213
+ map_indexes_by_property,
214
+ map_unique_by_property,
215
+ trait_model,
216
+ entity_models,
217
+ deep=deep + 1,
218
+ )
219
+
220
+ def list_dependencies(
221
+ self, edl_json: dict[str, Any]
222
+ ) -> tuple[list[str], EntityModel]:
223
+ entity_model = EntityModel(**edl_json)
224
+
225
+ return (self._list_dependencies(entity_model), entity_model)
226
+
227
+ def _list_dependencies(self, entity_model: EntityModel) -> list[str]:
228
+ entities: list[str] = []
229
+ if entity_model.trait_from:
230
+ entities.append(entity_model.trait_from)
231
+
232
+ return entities
233
+
234
+
235
+ if __name__ == "__main__":
236
+ import json
237
+
238
+ files = [
239
+ "exemplos_doc/core.pessoa.edl.json",
240
+ "exemplos_doc/core.pessoa(cliente).edl.json",
241
+ ]
242
+
243
+ entities = {}
244
+ for file in files:
245
+ with open(file) as f:
246
+ edl_json = json.load(f)
247
+
248
+ # Instanciando o objeto de modelo de entidade a partir do JSON,
249
+ # e já realizando as validações básicas de tipo e estrutura.
250
+ print(f"Validando arquivo: {file}")
251
+ entity_model = EntityModel(**edl_json)
252
+
253
+ complete_entity_id = f"{entity_model.escopo}/{entity_model.id}"
254
+ entities[complete_entity_id] = entity_model
255
+
256
+ compiler = EDLCompiler()
257
+ compiler_results = compiler.compile_models(entities)
258
+
259
+ for compiler_result in compiler_results:
260
+ print("==========================================================")
261
+ print(f"Entity: {compiler_result.entity_class_name}")
262
+ print(f"{compiler_result.entity_code}")
263
+ print("\n")
264
+ print("==========================================================")
265
+ print(f"DTO: {compiler_result.dto_class_name}")
266
+ print(f"{compiler_result.dto_code}")
267
+ print("\n")
@@ -0,0 +1,24 @@
1
+ from nsj_rest_lib2.compiler.edl_model.column_meta_model import ColumnMetaModel
2
+ from nsj_rest_lib2.compiler.edl_model.index_model import IndexModel
3
+ from nsj_rest_lib2.compiler.edl_model.property_meta_model import PropertyMetaModel
4
+ from nsj_rest_lib2.compiler.edl_model.trait_property_meta_model import (
5
+ TraitPropertyMetaModel,
6
+ )
7
+
8
+
9
+ class IndexCompilerStructure:
10
+ def __init__(self, index_model: IndexModel, inherited: bool) -> None:
11
+ self.index_model: IndexModel = index_model
12
+ self.inherited: bool = inherited
13
+
14
+
15
+ class PropertiesCompilerStructure:
16
+ def __init__(self) -> None:
17
+ self.properties: dict[str, PropertyMetaModel] = {}
18
+ self.main_properties: list[str] = []
19
+ self.required: list[str] = []
20
+ self.partition_data: list[str] = []
21
+ self.search_properties: list[str] = []
22
+ self.metric_label: list[str] = []
23
+ self.entity_properties: dict[str, ColumnMetaModel] = {}
24
+ self.trait_properties: dict[str, TraitPropertyMetaModel] = {}
@@ -0,0 +1,83 @@
1
+ import ast
2
+
3
+ import black
4
+
5
+ from nsj_rest_lib2.compiler.edl_model.entity_model import EntityModel
6
+ from nsj_rest_lib2.compiler.util.str_util import CompilerStrUtil
7
+
8
+
9
+ class DTOCompiler:
10
+ def __init__(self):
11
+ pass
12
+
13
+ def compile(
14
+ self,
15
+ entity_model: EntityModel,
16
+ ast_dto_attributes: list[ast.stmt],
17
+ enum_classes: list[ast.stmt],
18
+ ) -> tuple[str, str]:
19
+ """
20
+ Compila o código do DTO a partir do AST e retorna o código compilado.
21
+
22
+ :param entity_model: Modelo de entidade
23
+ :type entity_model: EntityModel
24
+
25
+ :param ast_dto_attributes: Atributos do DTO
26
+ :type ast_dto_attributes: list[ast.stmt]
27
+
28
+ :param enum_classes: Classes de enumeração
29
+ :type enum_classes: list[ast.stmt]
30
+
31
+ :return: Código compilado do DTO
32
+ :rtype: str
33
+ """
34
+ imports = [
35
+ # import datetime
36
+ ast.Import(names=[ast.alias(name="datetime", asname=None)]),
37
+ # import enum
38
+ ast.Import(names=[ast.alias(name="enum", asname=None)]),
39
+ # import uuid
40
+ ast.Import(names=[ast.alias(name="uuid", asname=None)]),
41
+ # from nsj_rest_lib.decorator.dto import DTO
42
+ ast.ImportFrom(
43
+ module="nsj_rest_lib.decorator.dto",
44
+ names=[ast.alias(name="DTO", asname=None)],
45
+ level=0,
46
+ ),
47
+ # from nsj_rest_lib.descriptor.dto_field import DTOField
48
+ ast.ImportFrom(
49
+ module="nsj_rest_lib.descriptor.dto_field",
50
+ names=[ast.alias(name="DTOField", asname=None)],
51
+ level=0,
52
+ ),
53
+ ]
54
+
55
+ class_name = f"{CompilerStrUtil.to_pascal_case(entity_model.id)}DTO"
56
+ ast_class = ast.ClassDef(
57
+ name=class_name,
58
+ bases=[ast.Name(id="DTOBase", ctx=ast.Load())],
59
+ keywords=[],
60
+ decorator_list=[
61
+ ast.Call(
62
+ func=ast.Name(id="DTO", ctx=ast.Load()),
63
+ args=[],
64
+ keywords=[],
65
+ )
66
+ ],
67
+ body=ast_dto_attributes,
68
+ )
69
+
70
+ # Definindo o módulo
71
+ module = ast.Module(
72
+ body=imports + enum_classes + [ast_class],
73
+ type_ignores=[],
74
+ )
75
+ module = ast.fix_missing_locations(module)
76
+
77
+ # Compilando o AST do DTO para o código Python
78
+ code = ast.unparse(module)
79
+
80
+ # Chamando o black para formatar o código Python do DTO
81
+ code = black.format_str(code, mode=black.FileMode())
82
+
83
+ return (class_name, code)
File without changes