nsj-rest-lib2 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,285 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ """
5
+ Gerador de DTO e Entity a partir de um EDL JSON (v1.x) — AGORA usando 'repository'
6
+ no lugar de 'storage', e propagando o de-para de colunas:
7
+ - Entity usa os nomes físicos vindos de model.repository.columns[*].column (quando houver).
8
+ - DTOField ganha entity_field="<nome_do_atributo_na_Entity>" para cada propriedade.
9
+
10
+ Uso:
11
+ python generator.py caminho/para/arquivo.edl.json
12
+ """
13
+
14
+ import json
15
+ import sys
16
+ import keyword
17
+ from typing import Any, Dict, List, Tuple
18
+
19
+ # -----------------------
20
+ # Helpers de transformação
21
+ # -----------------------
22
+
23
+
24
+ def py_identifier(name: str) -> str:
25
+ """Garante que o nome é um identificador Python válido."""
26
+ n = (name or "").strip().replace("-", "_")
27
+ if not n:
28
+ n = "_"
29
+ if not n.isidentifier() or keyword.iskeyword(n):
30
+ n = f"{n}_"
31
+ return n
32
+
33
+
34
+ def detect_pk(model_props: Dict[str, Any]) -> str:
35
+ for logical, meta in model_props.items():
36
+ if isinstance(meta, dict) and meta.get("pk") is True:
37
+ return logical
38
+ return ""
39
+
40
+
41
+ def to_python_type(prop: Dict[str, Any]) -> str:
42
+ t = (prop.get("type") or "string").lower()
43
+ fmt = (prop.get("format") or "").lower()
44
+
45
+ if fmt in {"uuid"}:
46
+ return "uuid.UUID"
47
+ if t in {"datetime"}:
48
+ return "datetime.datetime"
49
+ if t in {"date"}:
50
+ return "datetime.date"
51
+ if t in {"integer"}:
52
+ return "int"
53
+ if t in {"number"}:
54
+ return "float"
55
+ if t in {"boolean"}:
56
+ return "bool"
57
+ return "str"
58
+
59
+
60
+ def build_entity_field_name(logical: str, columns_map: Dict[str, Any]) -> str:
61
+ """
62
+ Retorna o nome do atributo na Entity para um dado campo lógico,
63
+ priorizando o nome físico em repository.columns[logical].column (se existir).
64
+ """
65
+ col_meta = (columns_map or {}).get(logical) or {}
66
+ entity_attr = col_meta.get("column") or logical
67
+ return py_identifier(entity_attr)
68
+
69
+
70
+ def dto_field_args(
71
+ logical: str,
72
+ prop: Dict[str, Any],
73
+ required: List[str],
74
+ columns_map: Dict[str, Any],
75
+ ) -> Dict[str, Any]:
76
+ args: Dict[str, Any] = {}
77
+
78
+ # Política: sempre expor (resume=True) e nunca ocultar campos que existem na origem
79
+ args["resume"] = True
80
+
81
+ # PK / not_null
82
+ if prop.get("pk") is True:
83
+ args["pk"] = True
84
+ args["not_null"] = True
85
+ if logical in set(required):
86
+ args["not_null"] = True
87
+
88
+ # Strings: strip + limites
89
+ t = (prop.get("type") or "string").lower()
90
+ if t == "string":
91
+ args["strip"] = True
92
+ if "length" in prop:
93
+ args["max"] = int(prop["length"])
94
+ if "minimum" in prop:
95
+ args["min"] = int(prop["minimum"])
96
+
97
+ # Números
98
+ if t in {"integer", "number"}:
99
+ if "minimum" in prop:
100
+ args["min"] = (
101
+ int(prop["minimum"]) if t == "integer" else float(prop["minimum"])
102
+ )
103
+ if "maximum" in prop:
104
+ args["max"] = (
105
+ int(prop["maximum"]) if t == "integer" else float(prop["maximum"])
106
+ )
107
+
108
+ # Formatos especiais
109
+ fmt = (prop.get("format") or "").lower()
110
+ if fmt == "uuid":
111
+ args["validator"] = "DTOFieldValidators().validate_uuid"
112
+ args["min"] = 36
113
+ args["max"] = 36
114
+
115
+ # Default lógico (quando presente)
116
+ if "default" in prop and prop["default"] is not None:
117
+ default_val = prop["default"]
118
+ if isinstance(default_val, (int, float, bool)):
119
+ args["default_value"] = repr(default_val)
120
+ elif isinstance(default_val, str) and default_val.endswith("()"):
121
+ args["default_value"] = default_val
122
+ else:
123
+ args["default_value"] = repr(default_val)
124
+
125
+ # Sempre informar o nome do atributo correspondente na Entity
126
+ args["entity_field"] = build_entity_field_name(logical, columns_map)
127
+
128
+ return args
129
+
130
+
131
+ def render_dto(edl: Dict[str, Any]) -> Tuple[str, str]:
132
+ model = edl.get("model", {}) or {}
133
+ props: Dict[str, Any] = model.get("properties", {}) or {}
134
+ required: List[str] = model.get("required", []) or []
135
+ repository = model.get("repository", {}) or {}
136
+ columns_map: Dict[str, Any] = repository.get("columns", {}) or {}
137
+
138
+ entity_name_full = edl.get("id") or "Entity"
139
+ class_base = entity_name_full.split(".")[-1]
140
+ class_name = f"{class_base[0].upper()}{class_base[1:]}"
141
+ dto_class = f"{class_name}DTO"
142
+
143
+ # imports
144
+ need_uuid = any(((p.get("format") or "").lower() == "uuid") for p in props.values())
145
+ need_datetime = any(
146
+ ((p.get("type") or "").lower() in {"datetime", "date"}) for p in props.values()
147
+ )
148
+
149
+ header_imports = [
150
+ "from nsj_rest_lib.decorator.dto import DTO",
151
+ "from nsj_rest_lib.descriptor.dto_field import DTOField",
152
+ "from nsj_rest_lib.descriptor.dto_field_validators import DTOFieldValidators",
153
+ "from nsj_rest_lib.dto.dto_base import DTOBase",
154
+ ]
155
+ if need_uuid:
156
+ header_imports.insert(0, "import uuid")
157
+ if need_datetime:
158
+ header_imports.insert(0, "import datetime")
159
+
160
+ lines: List[str] = []
161
+ lines.extend(header_imports)
162
+ lines.append("")
163
+ lines.append("")
164
+ lines.append("@DTO()")
165
+ lines.append(f"class {dto_class}(DTOBase):")
166
+ if not props:
167
+ lines.append(" pass")
168
+ return (dto_class, "\n".join(lines))
169
+
170
+ for logical in props:
171
+ meta = props[logical] or {}
172
+ py_type = to_python_type(meta)
173
+ field_args = dto_field_args(logical, meta, required, columns_map)
174
+
175
+ # Monta chamada DTOField(...)
176
+ arg_parts = []
177
+ for k, v in field_args.items():
178
+ if k == "validator":
179
+ arg_parts.append(f"{k}={v}")
180
+ else:
181
+ arg_parts.append(f"{k}={repr(v)}")
182
+ args_str = ", ".join(arg_parts) if arg_parts else ""
183
+
184
+ lines.append("")
185
+ lines.append(f" {py_identifier(logical)}: {py_type} = DTOField({args_str})")
186
+
187
+ return (dto_class, "\n".join(lines))
188
+
189
+
190
+ def render_entity(edl: Dict[str, Any]) -> Tuple[str, str]:
191
+ model = edl.get("model", {}) or {}
192
+ props: Dict[str, Any] = model.get("properties", {}) or {}
193
+ repository = model.get("repository", {}) or {}
194
+ api = model.get("api", {}) or {}
195
+
196
+ entity_name_full = edl.get("id") or "Entity"
197
+ class_base = entity_name_full.split(".")[-1]
198
+ class_name = f"{class_base[0].upper()}{class_base[1:]}"
199
+ entity_class = f"{class_name}Entity"
200
+
201
+ table_name = repository.get("map") or "schema.tabela"
202
+ pk_field = detect_pk(props) or "id"
203
+
204
+ # default_order_fields = api.default_sort (removendo prefixos '+'|'-')
205
+ default_sort: List[str] = []
206
+ for item in api.get("default_sort", []) or []:
207
+ fld = str(item).lstrip("+-")
208
+ if fld in props:
209
+ default_sort.append(fld)
210
+ if not default_sort:
211
+ default_sort = [pk_field] if pk_field else []
212
+
213
+ # imports
214
+ need_uuid = any(((p.get("format") or "").lower() == "uuid") for p in props.values())
215
+ need_datetime = any(
216
+ ((p.get("type") or "").lower() in {"datetime", "date"}) for p in props.values()
217
+ )
218
+
219
+ header_imports = [
220
+ "from nsj_rest_lib.entity.entity_base import EntityBase",
221
+ "from nsj_rest_lib.decorator.entity import Entity",
222
+ ]
223
+ if need_uuid:
224
+ header_imports.insert(0, "import uuid")
225
+ if need_datetime:
226
+ header_imports.insert(0, "import datetime")
227
+
228
+ columns_map: Dict[str, Any] = repository.get("columns", {}) or {}
229
+
230
+ lines: List[str] = []
231
+ lines.extend(header_imports)
232
+ lines.append("")
233
+ lines.append("")
234
+ lines.append("@Entity(")
235
+ lines.append(f" table_name={repr(table_name)},")
236
+ lines.append(f" pk_field={repr(py_identifier(pk_field))},")
237
+ if default_sort:
238
+ lines.append(
239
+ f" default_order_fields={[py_identifier(x) for x in default_sort]},"
240
+ )
241
+ lines.append(")")
242
+ lines.append(f"class {entity_class}(EntityBase):")
243
+
244
+ if not props:
245
+ lines.append(" pass")
246
+ return (entity_class, "\n".join(lines))
247
+
248
+ for logical, meta in props.items():
249
+ py_type = to_python_type(meta)
250
+ entity_attr = build_entity_field_name(logical, columns_map)
251
+ lines.append(f" {entity_attr}: {py_type} = None")
252
+
253
+ return (entity_class, "\n".join(lines))
254
+
255
+
256
+ def generate_from_edl(edl: Dict[str, Any]) -> Tuple[str, str, str, str]:
257
+ dto_class_name, dto_code = render_dto(edl)
258
+ entity_class_name, entity_code = render_entity(edl)
259
+ return (dto_class_name, dto_code, entity_class_name, entity_code)
260
+
261
+
262
+ # -----------------------
263
+ # CLI
264
+ # -----------------------
265
+
266
+
267
+ def main():
268
+ if len(sys.argv) < 2:
269
+ print("Uso: python generator.py caminho/para/arquivo.edl.json", file=sys.stderr)
270
+ sys.exit(2)
271
+
272
+ with open(sys.argv[1], "r", encoding="utf-8") as f:
273
+ edl = json.load(f)
274
+
275
+ _, dto_code, _, entity_code = generate_from_edl(edl)
276
+
277
+ sep = "\n" + ("#" * 80) + "\n"
278
+ print(sep + "# DTO\n" + sep)
279
+ print(dto_code)
280
+ print(sep + "# ENTITY\n" + sep)
281
+ print(entity_code)
282
+
283
+
284
+ if __name__ == "__main__":
285
+ main()
@@ -0,0 +1,288 @@
1
+ from typing import Any
2
+
3
+ from nsj_rest_lib2.compiler.compiler_structures import (
4
+ IndexCompilerStructure,
5
+ PropertiesCompilerStructure,
6
+ )
7
+ from nsj_rest_lib2.compiler.dto_compiler import DTOCompiler
8
+ from nsj_rest_lib2.compiler.edl_model.repository_model import RepositoryModel
9
+ from nsj_rest_lib2.compiler.entity_compiler import EntityCompiler
10
+ from nsj_rest_lib2.compiler.property_compiler import EDLPropertyCompiler
11
+
12
+ from nsj_rest_lib2.compiler.edl_model.entity_model import EntityModel
13
+
14
+ from nsj_rest_lib2.settings import get_logger
15
+
16
+ # TODO Rever a questão do tempo de expiração ser renovado, na leitura do redis
17
+ # TODO Atualizar o status da entidade pelo worker de compilação (e talvez parar uma compilação, quando se delete uma entidade)
18
+ # TODO Relacionamentos
19
+ # TODO Classes Abstratas
20
+ # TODO Partial Classes
21
+ # TODO Migrations
22
+ # TODO Migrar para a nsj_rest_lib2
23
+ # TODO Alterar o padrão de nomenclatura para snake_case
24
+
25
+
26
+ class CompilerResult:
27
+ def __init__(self):
28
+ self.dto_class_name: str | None = None
29
+ self.dto_code: str | None = None
30
+ self.entity_class_name: str | None = None
31
+ self.entity_code: str | None = None
32
+ # TODO Informacoes das rotas
33
+
34
+
35
+ class EDLCompiler:
36
+ def __init__(self) -> None:
37
+ self._properties_compiler = EDLPropertyCompiler()
38
+ self._dto_compiler = DTOCompiler()
39
+ self._entity_compiler = EntityCompiler()
40
+
41
+ def compile_models(
42
+ self, entity_models: dict[str, EntityModel]
43
+ ) -> list[CompilerResult]:
44
+
45
+ compiler_results = []
46
+ for entity_model_id in entity_models:
47
+ entity_model = entity_models[entity_model_id]
48
+ if not entity_model.abstract:
49
+ compiler_result = self._compile_model(entity_model, entity_models)
50
+ compiler_results.append(compiler_result)
51
+
52
+ return compiler_results
53
+
54
+ def compile_model_from_edl(
55
+ self,
56
+ edl_json: dict[str, Any],
57
+ dependencies_edls: list[dict[str, Any]],
58
+ ) -> CompilerResult:
59
+ entity_model = EntityModel(**edl_json)
60
+
61
+ entity_models = []
62
+ for dependency_edl in dependencies_edls:
63
+ dependency_entity_model = EntityModel(**dependency_edl)
64
+ entity_models.append(dependency_entity_model)
65
+
66
+ return self.compile_model(entity_model, entity_models)
67
+
68
+ def compile_model(
69
+ self,
70
+ entity_model: EntityModel,
71
+ dependencies_models: list[EntityModel],
72
+ ) -> CompilerResult:
73
+ entity_models = {}
74
+ for dependency_entity_model in dependencies_models:
75
+ complete_entity_id = (
76
+ f"{dependency_entity_model.escopo}/{dependency_entity_model.id}"
77
+ )
78
+ entity_models[complete_entity_id] = dependency_entity_model
79
+
80
+ return self._compile_model(entity_model, entity_models)
81
+
82
+ def _compile_model(
83
+ self,
84
+ entity_model: EntityModel,
85
+ entity_models: dict[str, EntityModel],
86
+ ) -> CompilerResult:
87
+
88
+ # Criando um mapa de índices por nome de property
89
+ # TODO Implementar tratamento dos índices de apoio às query (não de unicidade)
90
+ map_indexes_by_property: dict[str, list[IndexCompilerStructure]] = {}
91
+ map_unique_by_property: dict[str, IndexCompilerStructure] = {}
92
+ self._make_unique_map_by_property(
93
+ map_indexes_by_property, map_unique_by_property, entity_model, entity_models
94
+ )
95
+
96
+ # Criando uma cópia das coleções necessárias à compilação das properties
97
+ # (a ideia é ser possível alterar as coleções sem afetar a entidade modelo,
98
+ # o que será necessário para o tratamento de traits, etc - os quais serão
99
+ # uma classe nova, resultado da união dessas propriedades).
100
+ properties_structure = PropertiesCompilerStructure()
101
+ self._make_properties_structures(
102
+ properties_structure, entity_model, entity_models
103
+ )
104
+
105
+ # Criando a lista de atributos do DTO e da Entity; e recuperando as chaves primarias
106
+ ast_dto_attributes, ast_entity_attributes, props_pk, enum_classes = (
107
+ self._properties_compiler.compile(
108
+ properties_structure,
109
+ map_unique_by_property,
110
+ entity_model,
111
+ )
112
+ )
113
+
114
+ # Gerando o código do DTO
115
+ dto_class_name, code_dto = self._dto_compiler.compile(
116
+ entity_model, ast_dto_attributes, enum_classes
117
+ )
118
+
119
+ # Gerando o código da Entity
120
+ entity_class_name, code_entity = self._entity_compiler.compile(
121
+ entity_model, ast_entity_attributes, props_pk
122
+ )
123
+
124
+ # Retornando o resultado
125
+ compiler_result = CompilerResult()
126
+ compiler_result.entity_class_name = entity_class_name
127
+ compiler_result.entity_code = code_entity
128
+ compiler_result.dto_class_name = dto_class_name
129
+ compiler_result.dto_code = code_dto
130
+
131
+ return compiler_result
132
+
133
+ def _make_properties_structures(
134
+ self,
135
+ properties_structure: PropertiesCompilerStructure,
136
+ entity_model: EntityModel,
137
+ entity_models: dict[str, EntityModel],
138
+ ):
139
+ if not entity_model:
140
+ return
141
+
142
+ # Populando com as propriedades do trait
143
+ if entity_model.trait_from:
144
+ trait_model = entity_models[entity_model.trait_from]
145
+
146
+ self._make_properties_structures(
147
+ properties_structure,
148
+ trait_model,
149
+ entity_models,
150
+ )
151
+
152
+ # Populando com as propriedades da entidade atual
153
+ properties_structure.properties.update(entity_model.properties)
154
+ if entity_model.main_properties:
155
+ properties_structure.main_properties.extend(entity_model.main_properties)
156
+ if entity_model.required:
157
+ properties_structure.required.extend(entity_model.required)
158
+ if entity_model.partition_data:
159
+ properties_structure.partition_data.extend(entity_model.partition_data)
160
+ if entity_model.search_properties:
161
+ properties_structure.search_properties.extend(
162
+ entity_model.search_properties
163
+ )
164
+ if entity_model.metric_label:
165
+ properties_structure.metric_label.extend(entity_model.metric_label)
166
+
167
+ if entity_model.trait_properties:
168
+ properties_structure.trait_properties.update(entity_model.trait_properties)
169
+
170
+ if entity_model.repository.properties:
171
+ properties_structure.entity_properties.update(
172
+ entity_model.repository.properties
173
+ )
174
+
175
+ def _make_unique_map_by_property(
176
+ self,
177
+ map_indexes_by_property: dict[str, list[IndexCompilerStructure]],
178
+ map_unique_by_property: dict[str, IndexCompilerStructure],
179
+ entity_model: EntityModel,
180
+ entity_models: dict[str, EntityModel],
181
+ deep: int = 1,
182
+ ):
183
+
184
+ if not entity_model:
185
+ return
186
+
187
+ # Varrendo e organizando os índices
188
+ if entity_model.repository.indexes:
189
+ for index in entity_model.repository.indexes:
190
+ for pkey in index.columns:
191
+ if index.unique:
192
+ if pkey in map_unique_by_property:
193
+ if deep > 1:
194
+ get_logger().warning(
195
+ f"Propriedade '{pkey}' possui mais de um índice de unicidade (sendo um herdado). Por isso a replicação (herdada) será ignorada."
196
+ )
197
+ continue
198
+ else:
199
+ raise Exception(
200
+ f"Propriedade '{pkey}' possui mais de um índice de unicidade."
201
+ ) # TODO Verificar esse modo de tratar erros
202
+
203
+ map_unique_by_property[pkey] = IndexCompilerStructure(
204
+ index, deep > 1
205
+ )
206
+ else:
207
+ list_index = map_indexes_by_property.setdefault(pkey, [])
208
+ list_index.append(IndexCompilerStructure(index, deep > 1))
209
+
210
+ # Populando com as propriedades do trait
211
+ if entity_model.trait_from:
212
+ trait_model = entity_models[entity_model.trait_from]
213
+
214
+ self._make_unique_map_by_property(
215
+ map_indexes_by_property,
216
+ map_unique_by_property,
217
+ trait_model,
218
+ entity_models,
219
+ deep=deep + 1,
220
+ )
221
+
222
+ def list_dependencies(
223
+ self, edl_json: dict[str, Any]
224
+ ) -> tuple[list[str], EntityModel]:
225
+ entity_model = EntityModel(**edl_json)
226
+
227
+ return (self._list_dependencies(entity_model), entity_model)
228
+
229
+ def _list_dependencies(self, entity_model: EntityModel) -> list[str]:
230
+ entities: list[str] = []
231
+ if entity_model.trait_from:
232
+ entities.append(entity_model.trait_from)
233
+
234
+ return entities
235
+
236
+
237
+ def get_files_from_directory(directory):
238
+ files = []
239
+ for file in os.listdir(directory):
240
+ if file.endswith(".json"):
241
+ files.append(os.path.join(directory, file))
242
+ return files
243
+
244
+
245
+ if __name__ == "__main__":
246
+ import argparse
247
+ import json
248
+ import os
249
+
250
+ parser = argparse.ArgumentParser(
251
+ description="Compila arquivos EDL para classes Python"
252
+ )
253
+ parser.add_argument(
254
+ "-d",
255
+ "--directory",
256
+ help="Diretório com arquivos .json para compilar",
257
+ type=str,
258
+ required=True,
259
+ )
260
+ args = parser.parse_args()
261
+
262
+ files = get_files_from_directory(args.directory)
263
+
264
+ entities = {}
265
+ for file in files:
266
+ with open(file) as f:
267
+ edl_json = json.load(f)
268
+
269
+ # Instanciando o objeto de modelo de entidade a partir do JSON,
270
+ # e já realizando as validações básicas de tipo e estrutura.
271
+ print(f"Validando arquivo: {file}")
272
+ entity_model = EntityModel(**edl_json)
273
+
274
+ complete_entity_id = f"{entity_model.escopo}/{entity_model.id}"
275
+ entities[complete_entity_id] = entity_model
276
+
277
+ compiler = EDLCompiler()
278
+ compiler_results = compiler.compile_models(entities)
279
+
280
+ for compiler_result in compiler_results:
281
+ print("==========================================================")
282
+ print(f"Entity: {compiler_result.entity_class_name}")
283
+ print(f"{compiler_result.entity_code}")
284
+ print("\n")
285
+ print("==========================================================")
286
+ print(f"DTO: {compiler_result.dto_class_name}")
287
+ print(f"{compiler_result.dto_code}")
288
+ print("\n")
@@ -0,0 +1,24 @@
1
+ from nsj_rest_lib2.compiler.edl_model.column_meta_model import ColumnMetaModel
2
+ from nsj_rest_lib2.compiler.edl_model.index_model import IndexModel
3
+ from nsj_rest_lib2.compiler.edl_model.property_meta_model import PropertyMetaModel
4
+ from nsj_rest_lib2.compiler.edl_model.trait_property_meta_model import (
5
+ TraitPropertyMetaModel,
6
+ )
7
+
8
+
9
+ class IndexCompilerStructure:
10
+ def __init__(self, index_model: IndexModel, inherited: bool) -> None:
11
+ self.index_model: IndexModel = index_model
12
+ self.inherited: bool = inherited
13
+
14
+
15
+ class PropertiesCompilerStructure:
16
+ def __init__(self) -> None:
17
+ self.properties: dict[str, PropertyMetaModel] = {}
18
+ self.main_properties: list[str] = []
19
+ self.required: list[str] = []
20
+ self.partition_data: list[str] = []
21
+ self.search_properties: list[str] = []
22
+ self.metric_label: list[str] = []
23
+ self.entity_properties: dict[str, ColumnMetaModel] = {}
24
+ self.trait_properties: dict[str, TraitPropertyMetaModel] = {}
@@ -0,0 +1,83 @@
1
+ import ast
2
+
3
+ import black
4
+
5
+ from nsj_rest_lib2.compiler.edl_model.entity_model import EntityModel
6
+ from nsj_rest_lib2.compiler.util.str_util import CompilerStrUtil
7
+
8
+
9
+ class DTOCompiler:
10
+ def __init__(self):
11
+ pass
12
+
13
+ def compile(
14
+ self,
15
+ entity_model: EntityModel,
16
+ ast_dto_attributes: list[ast.stmt],
17
+ enum_classes: list[ast.stmt],
18
+ ) -> tuple[str, str]:
19
+ """
20
+ Compila o código do DTO a partir do AST e retorna o código compilado.
21
+
22
+ :param entity_model: Modelo de entidade
23
+ :type entity_model: EntityModel
24
+
25
+ :param ast_dto_attributes: Atributos do DTO
26
+ :type ast_dto_attributes: list[ast.stmt]
27
+
28
+ :param enum_classes: Classes de enumeração
29
+ :type enum_classes: list[ast.stmt]
30
+
31
+ :return: Código compilado do DTO
32
+ :rtype: str
33
+ """
34
+ imports = [
35
+ # import datetime
36
+ ast.Import(names=[ast.alias(name="datetime", asname=None)]),
37
+ # import enum
38
+ ast.Import(names=[ast.alias(name="enum", asname=None)]),
39
+ # import uuid
40
+ ast.Import(names=[ast.alias(name="uuid", asname=None)]),
41
+ # from nsj_rest_lib.decorator.dto import DTO
42
+ ast.ImportFrom(
43
+ module="nsj_rest_lib.decorator.dto",
44
+ names=[ast.alias(name="DTO", asname=None)],
45
+ level=0,
46
+ ),
47
+ # from nsj_rest_lib.descriptor.dto_field import DTOField
48
+ ast.ImportFrom(
49
+ module="nsj_rest_lib.descriptor.dto_field",
50
+ names=[ast.alias(name="DTOField", asname=None)],
51
+ level=0,
52
+ ),
53
+ ]
54
+
55
+ class_name = f"{CompilerStrUtil.to_pascal_case(entity_model.id)}DTO"
56
+ ast_class = ast.ClassDef(
57
+ name=class_name,
58
+ bases=[ast.Name(id="DTOBase", ctx=ast.Load())],
59
+ keywords=[],
60
+ decorator_list=[
61
+ ast.Call(
62
+ func=ast.Name(id="DTO", ctx=ast.Load()),
63
+ args=[],
64
+ keywords=[],
65
+ )
66
+ ],
67
+ body=ast_dto_attributes,
68
+ )
69
+
70
+ # Definindo o módulo
71
+ module = ast.Module(
72
+ body=imports + enum_classes + [ast_class],
73
+ type_ignores=[],
74
+ )
75
+ module = ast.fix_missing_locations(module)
76
+
77
+ # Compilando o AST do DTO para o código Python
78
+ code = ast.unparse(module)
79
+
80
+ # Chamando o black para formatar o código Python do DTO
81
+ code = black.format_str(code, mode=black.FileMode())
82
+
83
+ return (class_name, code)
File without changes