nsj-rest-lib2 0.0.29__py3-none-any.whl → 0.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,201 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from nsj_rest_lib2.compiler.migration_compiler_util import MigrationCompilerUtil
6
+
7
+
8
+ class MigrationCompilerAlterTable:
9
+ def compile(
10
+ self,
11
+ table_name: str,
12
+ column_specs: list[dict[str, Any]],
13
+ pk_columns: list[str],
14
+ rename_operations: list[tuple[str, str]] | None = None,
15
+ fk_specs: list[dict[str, str]] | None = None,
16
+ ) -> list[str]:
17
+ lines: list[str] = []
18
+ rename_operations = rename_operations or []
19
+ fk_specs = fk_specs or []
20
+
21
+ for old_col, new_col in rename_operations:
22
+ lines.append(
23
+ f" IF exists_column('{table_name}', '{old_col}') AND NOT exists_column('{table_name}', '{new_col}') THEN"
24
+ )
25
+ lines.append(
26
+ f" ALTER TABLE {table_name} RENAME COLUMN {old_col} TO {new_col};"
27
+ )
28
+ lines.append(" END IF;")
29
+
30
+ for spec in column_specs:
31
+ column_name = spec["column_name"]
32
+ default_literal = (
33
+ MigrationCompilerUtil.quote_literal(spec["default"])
34
+ if spec["default"] is not None
35
+ else None
36
+ )
37
+ default_literal_text = (
38
+ f"{default_literal}::text" if default_literal is not None else None
39
+ )
40
+ not_null_literal = "TRUE" if spec["not_null"] else "FALSE"
41
+
42
+ lines.append(f" -- coluna {column_name}")
43
+ lines.append(
44
+ f" IF NOT exists_column('{table_name}', '{column_name}') THEN"
45
+ )
46
+ lines.append(
47
+ MigrationCompilerUtil.indent_sql(
48
+ self._build_add_column_sql(table_name, spec), 12
49
+ )[0]
50
+ )
51
+
52
+ self._append_enum_and_max_checks(lines, table_name, column_name, spec, 12)
53
+
54
+ if spec["description"]:
55
+ lines.append(
56
+ " "
57
+ + MigrationCompilerUtil.comment_on_column(
58
+ table_name, column_name, spec["description"]
59
+ )
60
+ + ";"
61
+ )
62
+
63
+ lines.append(" ELSE")
64
+
65
+ # Alteração de tipo
66
+ lines.append(
67
+ f" IF NOT validate_column_properties('{table_name}', '{column_name}', '{spec['sql_type']}') THEN"
68
+ )
69
+ lines.append(
70
+ f" ALTER TABLE {table_name} ALTER COLUMN {column_name} TYPE {spec['sql_type']};"
71
+ )
72
+ lines.append(" END IF;")
73
+
74
+ # Alteração de nulidade
75
+ lines.append(f" IF {not_null_literal} THEN")
76
+ lines.append(
77
+ f" IF NOT column_is_not_null('{table_name}', '{column_name}') THEN"
78
+ )
79
+ lines.append(
80
+ f" ALTER TABLE {table_name} ALTER COLUMN {column_name} SET NOT NULL;"
81
+ )
82
+ lines.append(" END IF;")
83
+ lines.append(" ELSE")
84
+ lines.append(
85
+ f" IF column_is_not_null('{table_name}', '{column_name}') THEN"
86
+ )
87
+ lines.append(
88
+ f" ALTER TABLE {table_name} ALTER COLUMN {column_name} DROP NOT NULL;"
89
+ )
90
+ lines.append(" END IF;")
91
+ lines.append(" END IF;")
92
+
93
+ # Alteração de default
94
+ if default_literal is None:
95
+ lines.append(
96
+ f" IF column_default_expr('{table_name}', '{column_name}') IS NOT NULL THEN"
97
+ )
98
+ lines.append(
99
+ f" ALTER TABLE {table_name} ALTER COLUMN {column_name} DROP DEFAULT;"
100
+ )
101
+ lines.append(" END IF;")
102
+ else:
103
+ lines.append(
104
+ f" IF NOT column_default_equals('{table_name}', '{column_name}', {default_literal_text}) THEN"
105
+ )
106
+ lines.append(
107
+ f" ALTER TABLE {table_name} ALTER COLUMN {column_name} SET DEFAULT {default_literal};"
108
+ )
109
+ lines.append(" END IF;")
110
+
111
+ # Checks e comentários
112
+ self._append_enum_and_max_checks(lines, table_name, column_name, spec, 12)
113
+
114
+ if spec["description"]:
115
+ lines.append(
116
+ " "
117
+ + MigrationCompilerUtil.comment_on_column(
118
+ table_name, column_name, spec["description"]
119
+ )
120
+ + ";"
121
+ )
122
+
123
+ lines.append(" END IF;")
124
+
125
+ if pk_columns:
126
+ lines.append(f" IF NOT table_has_primary_key('{table_name}') THEN")
127
+ lines.append(
128
+ f" {MigrationCompilerUtil.add_primary_key(table_name, pk_columns)};"
129
+ )
130
+ lines.append(" END IF;")
131
+
132
+ for fk in fk_specs:
133
+ lines.append(
134
+ f" IF NOT fk_constraint_matches('{table_name}', '{fk['column_name']}', '{fk['ref_table']}', '{fk['ref_column']}') THEN"
135
+ )
136
+ fk_constraint_name = MigrationCompilerUtil.check_constraint_name(
137
+ table_name, fk["column_name"], "fk"
138
+ )
139
+ lines.append(
140
+ f" {MigrationCompilerUtil.drop_constraint_if_exists(table_name, fk_constraint_name)};"
141
+ )
142
+ lines.append(
143
+ f" ALTER TABLE {table_name} ADD CONSTRAINT {fk_constraint_name} FOREIGN KEY ({fk['column_name']}) REFERENCES {fk['ref_table']}({fk['ref_column']});"
144
+ )
145
+ lines.append(" END IF;")
146
+
147
+ return lines
148
+
149
+ def _append_enum_and_max_checks(
150
+ self,
151
+ lines: list[str],
152
+ table_name: str,
153
+ column_name: str,
154
+ spec: dict[str, Any],
155
+ indent: int,
156
+ ) -> None:
157
+ if spec["enum_values"]:
158
+ formatted_values = ", ".join(
159
+ MigrationCompilerUtil.quote_literal(v) for v in spec["enum_values"]
160
+ )
161
+ lines.append(
162
+ f"{' ' * indent}IF NOT enum_constraint_matches('{table_name}', '{column_name}', ARRAY[{formatted_values}]) THEN"
163
+ )
164
+ lines.extend(
165
+ MigrationCompilerUtil.indent_sql(
166
+ ";\n".join(
167
+ MigrationCompilerUtil.add_enum_check_constraint(
168
+ table_name, column_name, spec["enum_values"]
169
+ )
170
+ )
171
+ + ";",
172
+ indent + 4,
173
+ )
174
+ )
175
+ lines.append(f"{' ' * indent}END IF;")
176
+
177
+ if spec["maximum"] is not None and spec["is_numeric"]:
178
+ lines.append(
179
+ f"{' ' * indent}IF NOT max_constraint_matches('{table_name}', '{column_name}', {spec['maximum']}) THEN"
180
+ )
181
+ lines.extend(
182
+ MigrationCompilerUtil.indent_sql(
183
+ ";\n".join(
184
+ MigrationCompilerUtil.add_max_check_constraint(
185
+ table_name, column_name, spec["maximum"]
186
+ )
187
+ )
188
+ + ";",
189
+ indent + 4,
190
+ )
191
+ )
192
+ lines.append(f"{' ' * indent}END IF;")
193
+
194
+ def _build_add_column_sql(self, table_name: str, spec: dict[str, Any]) -> str:
195
+ clause = f"ALTER TABLE {table_name} ADD COLUMN {spec['column_name']} {spec['sql_type']}"
196
+ if spec["not_null"]:
197
+ clause += " NOT NULL"
198
+ if spec["default"] is not None:
199
+ clause += f" DEFAULT {MigrationCompilerUtil.quote_literal(spec['default'])}"
200
+ clause += ";"
201
+ return clause
@@ -0,0 +1,75 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from nsj_rest_lib2.compiler.migration_compiler_util import MigrationCompilerUtil
6
+
7
+
8
+ class MigrationCompilerCreateTable:
9
+ def compile(
10
+ self,
11
+ table_name: str,
12
+ column_specs: list[dict[str, Any]],
13
+ pk_columns: list[str],
14
+ fk_specs: list[dict[str, str]],
15
+ ) -> str:
16
+ column_definitions: list[str] = []
17
+ for spec in column_specs:
18
+ col_def = f"{spec['column_name']} {spec['sql_type']}"
19
+ if spec["not_null"]:
20
+ col_def += " NOT NULL"
21
+ if spec["default"] is not None:
22
+ col_def += f" DEFAULT {MigrationCompilerUtil.quote_literal(spec['default'])}"
23
+ column_definitions.append(col_def)
24
+
25
+ if pk_columns:
26
+ pk_constraint = (
27
+ f"CONSTRAINT {table_name}_pkey PRIMARY KEY ({', '.join(pk_columns)})"
28
+ )
29
+ column_definitions.append(pk_constraint)
30
+
31
+ for fk in fk_specs:
32
+ fk_name = MigrationCompilerUtil.check_constraint_name(
33
+ table_name, fk["column_name"], "fk"
34
+ )
35
+ column_definitions.append(
36
+ f"CONSTRAINT {fk_name} FOREIGN KEY ({fk['column_name']}) REFERENCES {fk['ref_table']}({fk['ref_column']})"
37
+ )
38
+
39
+ columns_block = ",\n ".join(column_definitions)
40
+ create_lines = [
41
+ f"CREATE TABLE {table_name} (",
42
+ f" {columns_block}",
43
+ ");",
44
+ ]
45
+
46
+ for spec in column_specs:
47
+ if spec["description"]:
48
+ create_lines.append(
49
+ MigrationCompilerUtil.comment_on_column(
50
+ table_name, spec["column_name"], spec["description"]
51
+ )
52
+ + ";"
53
+ )
54
+
55
+ if spec["enum_values"]:
56
+ create_lines.extend(
57
+ [
58
+ stmt + ";"
59
+ for stmt in MigrationCompilerUtil.add_enum_check_constraint(
60
+ table_name, spec["column_name"], spec["enum_values"]
61
+ )
62
+ ]
63
+ )
64
+
65
+ if spec["maximum"] is not None and spec["is_numeric"]:
66
+ create_lines.extend(
67
+ [
68
+ stmt + ";"
69
+ for stmt in MigrationCompilerUtil.add_max_check_constraint(
70
+ table_name, spec["column_name"], spec["maximum"]
71
+ )
72
+ ]
73
+ )
74
+
75
+ return "\n".join(create_lines)
@@ -0,0 +1,144 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from nsj_rest_lib2.compiler.edl_model.primitives import PrimitiveTypes, PropertyType
6
+
7
+
8
+ class MigrationCompilerUtil:
9
+ """
10
+ Funções utilitárias para montagem de trechos SQL usados nas migrações.
11
+ """
12
+
13
+ SQL_TYPE_MAP = {
14
+ PrimitiveTypes.STRING: "varchar(250)",
15
+ PrimitiveTypes.TEXT: "varchar",
16
+ PrimitiveTypes.NUMBER: "numeric(20,8)",
17
+ PrimitiveTypes.INTEGER: "integer",
18
+ PrimitiveTypes.BOOLEAN: "boolean",
19
+ PrimitiveTypes.UUID: "uuid",
20
+ PrimitiveTypes.CURRENCY: "numeric(20,4)",
21
+ PrimitiveTypes.QUANTITY: "numeric(20,4)",
22
+ PrimitiveTypes.CPF: "varchar(20)",
23
+ PrimitiveTypes.CNPJ: "varchar(20)",
24
+ PrimitiveTypes.CPF_CNPJ: "varchar(20)",
25
+ PrimitiveTypes.EMAIL: "varchar(100)",
26
+ PrimitiveTypes.DATE: "date",
27
+ PrimitiveTypes.DATETIME: "timestamp with time zone",
28
+ PrimitiveTypes.DURATION: "interval",
29
+ }
30
+
31
+ @staticmethod
32
+ def resolve_sql_type(datatype: PropertyType, max_length: int | None) -> str:
33
+ if not isinstance(datatype, PrimitiveTypes):
34
+ raise ValueError(f"Tipo de propriedade não suportado para SQL: {datatype}")
35
+
36
+ base_type = MigrationCompilerUtil.SQL_TYPE_MAP.get(datatype)
37
+ if not base_type:
38
+ raise ValueError(f"Tipo de propriedade não mapeado para SQL: {datatype}")
39
+
40
+ if datatype in (PrimitiveTypes.STRING, PrimitiveTypes.TEXT) and max_length:
41
+ return f"varchar({max_length})"
42
+
43
+ return base_type
44
+
45
+ @staticmethod
46
+ def is_numeric(datatype: PropertyType) -> bool:
47
+ if not isinstance(datatype, PrimitiveTypes):
48
+ return False
49
+ return datatype in (
50
+ PrimitiveTypes.NUMBER,
51
+ PrimitiveTypes.INTEGER,
52
+ PrimitiveTypes.CURRENCY,
53
+ PrimitiveTypes.QUANTITY,
54
+ )
55
+
56
+ @staticmethod
57
+ def quote_literal(value: Any) -> str:
58
+ if value is None:
59
+ return "null"
60
+ if isinstance(value, bool):
61
+ return "true" if value else "false"
62
+ if isinstance(value, (int, float)):
63
+ return str(value)
64
+ escaped = str(value).replace("'", "''")
65
+ return f"'{escaped}'"
66
+
67
+ @staticmethod
68
+ def comment_on_column(table_name: str, column_name: str, description: str) -> str:
69
+ escaped = description.replace("'", "''")
70
+ return (
71
+ f"COMMENT ON COLUMN {table_name}.{column_name} IS '{escaped}'"
72
+ )
73
+
74
+ @staticmethod
75
+ def drop_constraint_if_exists(table_name: str, constraint_name: str) -> str:
76
+ return f"ALTER TABLE {table_name} DROP CONSTRAINT IF EXISTS {constraint_name}"
77
+
78
+ @staticmethod
79
+ def check_constraint_name(table_name: str, column_name: str, suffix: str) -> str:
80
+ safe_table = table_name.replace(".", "_")
81
+ return f"{safe_table}_{column_name}_{suffix}"
82
+
83
+ @staticmethod
84
+ def add_enum_check_constraint(
85
+ table_name: str, column_name: str, values: list[Any]
86
+ ) -> list[str]:
87
+ statements: list[str] = []
88
+ constraint_name = MigrationCompilerUtil.check_constraint_name(
89
+ table_name, column_name, "enum_chk"
90
+ )
91
+ statements.append(
92
+ MigrationCompilerUtil.drop_constraint_if_exists(
93
+ table_name, constraint_name
94
+ )
95
+ )
96
+ if values:
97
+ formatted_values = ", ".join(
98
+ MigrationCompilerUtil.quote_literal(v) for v in values
99
+ )
100
+ statements.append(
101
+ f"ALTER TABLE {table_name} ADD CONSTRAINT {constraint_name} "
102
+ f"CHECK ({column_name} IN ({formatted_values}))"
103
+ )
104
+ return statements
105
+
106
+ @staticmethod
107
+ def add_max_check_constraint(
108
+ table_name: str, column_name: str, maximum: int | float
109
+ ) -> list[str]:
110
+ statements: list[str] = []
111
+ constraint_name = MigrationCompilerUtil.check_constraint_name(
112
+ table_name, column_name, "max_chk"
113
+ )
114
+ statements.append(
115
+ MigrationCompilerUtil.drop_constraint_if_exists(
116
+ table_name, constraint_name
117
+ )
118
+ )
119
+ statements.append(
120
+ f"ALTER TABLE {table_name} ADD CONSTRAINT {constraint_name} "
121
+ f"CHECK ({column_name} <= {maximum})"
122
+ )
123
+ return statements
124
+
125
+ @staticmethod
126
+ def add_primary_key(
127
+ table_name: str, column_names: list[str], constraint_name: str | None = None
128
+ ) -> str:
129
+ constraint = constraint_name or f"{table_name}_pkey"
130
+ columns = ", ".join(column_names)
131
+ return (
132
+ f"ALTER TABLE {table_name} "
133
+ f"ADD CONSTRAINT {constraint} PRIMARY KEY ({columns})"
134
+ )
135
+
136
+ @staticmethod
137
+ def drop_primary_key(table_name: str, constraint_name: str | None = None) -> str:
138
+ constraint = constraint_name or f"{table_name}_pkey"
139
+ return f"ALTER TABLE {table_name} DROP CONSTRAINT IF EXISTS {constraint}"
140
+
141
+ @staticmethod
142
+ def indent_sql(sql: str, spaces: int) -> list[str]:
143
+ prefix = " " * spaces
144
+ return [prefix + line if line else prefix for line in sql.split("\n")]
@@ -1,6 +1,5 @@
1
1
  import ast
2
2
  import datetime
3
- import re
4
3
  import uuid
5
4
 
6
5
  from nsj_rest_lib2.compiler.compiler_structures import (
@@ -13,8 +12,6 @@ from nsj_rest_lib2.compiler.edl_model.primitives import (
13
12
  BasicTypes,
14
13
  CardinalityTypes,
15
14
  PrimitiveTypes,
16
- REGEX_EXTERNAL_REF,
17
- REGEX_INTERNAL_REF,
18
15
  STR_BASED_TYPES,
19
16
  )
20
17
  from nsj_rest_lib2.compiler.edl_model.property_meta_model import PropertyMetaModel
@@ -30,6 +27,7 @@ from nsj_rest_lib2.compiler.util.type_naming_util import (
30
27
  compile_namespace_keys,
31
28
  )
32
29
  from nsj_rest_lib2.compiler.util.type_util import TypeUtil
30
+ from nsj_rest_lib2.compiler.util.relation_ref import RelationRef, RelationRefParser
33
31
 
34
32
 
35
33
  class EDLPropertyCompiler:
@@ -164,18 +162,15 @@ class EDLPropertyCompiler:
164
162
  fixed_filters.append((pkey, trait_fixed_filters[pkey]))
165
163
 
166
164
  elif isinstance(prop.type, str):
167
- # Tratando propriedade de relacionamento
168
- external_match = re.match(REGEX_EXTERNAL_REF, prop.type)
169
- internal_match = re.match(REGEX_INTERNAL_REF, prop.type)
170
-
171
- if external_match:
172
- # Resolvendo o id da entidade
173
- related_entity_id = external_match.group(2)
174
- related_entity_key = external_match.group(0)
165
+ relation_ref = RelationRefParser.parse(prop.type)
166
+ if not relation_ref:
167
+ raise Exception(
168
+ f"Tipo da propriedade '{pkey}' não suportado: {prop.type}"
169
+ )
175
170
 
171
+ if relation_ref.is_external:
176
172
  self._compile_external_relation(
177
- related_entity_id,
178
- related_entity_key,
173
+ relation_ref,
179
174
  entity_model,
180
175
  entity_models,
181
176
  properties_structure,
@@ -187,11 +182,9 @@ class EDLPropertyCompiler:
187
182
  prop,
188
183
  )
189
184
 
190
- elif internal_match:
191
- related_entity_id = internal_match.group(1)
192
-
185
+ elif relation_ref.ref_type == "internal":
193
186
  self._compile_internal_relation(
194
- related_entity_id,
187
+ relation_ref,
195
188
  entity_model,
196
189
  properties_structure,
197
190
  target_dto_attributes,
@@ -277,8 +270,7 @@ class EDLPropertyCompiler:
277
270
 
278
271
  def _compile_external_relation(
279
272
  self,
280
- related_entity_id: str,
281
- related_entity_key: str,
273
+ relation_ref: RelationRef,
282
274
  entity_model: EntityModelBase,
283
275
  entity_models: dict[str, EntityModel],
284
276
  properties_structure: PropertiesCompilerStructure,
@@ -289,17 +281,22 @@ class EDLPropertyCompiler:
289
281
  pkey: str,
290
282
  prop: PropertyMetaModel,
291
283
  ):
292
- # Resolvendo o nome das classes de DTO e Entity
293
- related_dto_class_name = compile_dto_class_name(related_entity_id)
294
- related_entity_class_name = compile_entity_class_name(related_entity_id)
284
+ related_entity_key = relation_ref.entity_key
285
+ if not related_entity_key:
286
+ raise Exception(
287
+ f"Entidade '{entity_model.id}' possui uma referência externa inválida em '{pkey}': {prop.type}"
288
+ )
295
289
 
296
- # Resolvendo o caminho do import
297
290
  related_entity = entity_models.get(related_entity_key)
298
291
  if not related_entity:
299
292
  raise Exception(
300
293
  f"Entidade '{entity_model.id}' possui uma referência externa para uma entidade inexistente: '{related_entity_key}', por meio da propriedade: '{pkey}'."
301
294
  )
302
295
 
296
+ related_dto_class_name, related_entity_class_name = (
297
+ self._resolve_related_class_names(relation_ref)
298
+ )
299
+
303
300
  tenant = related_entity.tenant
304
301
  grupo_empresarial = related_entity.grupo_empresarial
305
302
  grupo_key, tenant_key, default_key = compile_namespace_keys(
@@ -355,6 +352,7 @@ class EDLPropertyCompiler:
355
352
  related_dto_class_name,
356
353
  related_entity_class_name,
357
354
  prop,
355
+ relation_type="AGGREGATION",
358
356
  )
359
357
 
360
358
  elif prop.cardinality == CardinalityTypes.CN_N:
@@ -365,9 +363,20 @@ class EDLPropertyCompiler:
365
363
  f"Propriedade '{pkey}' da entidade '{entity_model.id}' possui cardinalidade inválida ou não suportada: {prop.cardinality}"
366
364
  )
367
365
 
366
+ def _resolve_related_class_names(
367
+ self, relation_ref: RelationRef
368
+ ) -> tuple[str, str]:
369
+ prefix = relation_ref.prefx_class_name
370
+ target_id = relation_ref.target_id
371
+
372
+ return (
373
+ compile_dto_class_name(target_id, prefix),
374
+ compile_entity_class_name(target_id, prefix),
375
+ )
376
+
368
377
  def _compile_internal_relation(
369
378
  self,
370
- related_entity_id: str,
379
+ relation_ref: RelationRef,
371
380
  entity_model: EntityModelBase,
372
381
  properties_structure: PropertiesCompilerStructure,
373
382
  ast_dto_attributes: list[ast.stmt],
@@ -378,10 +387,10 @@ class EDLPropertyCompiler:
378
387
  ):
379
388
  # Resolvendo o nome das classes de DTO e Entity
380
389
  related_dto_class_name = compile_dto_class_name(
381
- related_entity_id, f"{prefx_class_name}_{entity_model.id}"
390
+ relation_ref.entity, f"{prefx_class_name}_{entity_model.id}"
382
391
  )
383
392
  related_entity_class_name = compile_entity_class_name(
384
- related_entity_id, f"{prefx_class_name}_{entity_model.id}"
393
+ relation_ref.entity, f"{prefx_class_name}_{entity_model.id}"
385
394
  )
386
395
 
387
396
  # Instanciando o ast
@@ -405,6 +414,7 @@ class EDLPropertyCompiler:
405
414
  related_dto_class_name,
406
415
  related_entity_class_name,
407
416
  prop,
417
+ relation_type="COMPOSITION",
408
418
  )
409
419
 
410
420
  elif prop.cardinality == CardinalityTypes.CN_N:
@@ -558,8 +568,9 @@ class EDLPropertyCompiler:
558
568
  related_dto_class_name: str,
559
569
  related_entity_class_name: str,
560
570
  prop: PropertyMetaModel,
571
+ relation_type: str,
561
572
  ):
562
- # Propriedade do property descriptor
573
+ # Keywords for DTOOneToOneField
563
574
  keywords = [
564
575
  ast.keyword(
565
576
  arg="entity_type",
@@ -615,32 +626,13 @@ class EDLPropertyCompiler:
615
626
  if properties_structure.required and pkey in properties_structure.required:
616
627
  keywords.append(ast.keyword(arg="not_null", value=ast.Constant(True)))
617
628
 
629
+ # 'resume' now belongs to the inner DTOField (matches desired format)
618
630
  if (
619
631
  properties_structure.main_properties
620
632
  and pkey in properties_structure.main_properties
621
633
  ):
622
634
  keywords.append(ast.keyword(arg="resume", value=ast.Constant(True)))
623
635
 
624
- resume_fields = properties_structure.main_resume_fields.get(pkey)
625
- if resume_fields:
626
- keywords.append(
627
- ast.keyword(
628
- arg="resume_fields",
629
- value=ast.List(
630
- elts=[ast.Constant(value=field) for field in resume_fields],
631
- ctx=ast.Load(),
632
- ),
633
- )
634
- )
635
-
636
- if prop.validator:
637
- keywords.append(
638
- ast.keyword(
639
- arg="validator",
640
- value=ast.Name(prop.validator, ctx=ast.Load()),
641
- )
642
- )
643
-
644
636
  # Resolvendo a coluna usada no relacionamento
645
637
  if (
646
638
  not properties_structure.entity_properties
@@ -659,13 +651,23 @@ class EDLPropertyCompiler:
659
651
  if "/" in relation_column:
660
652
  owner_relation = True
661
653
  relation_column = relation_column.split("/")[-1]
654
+
662
655
 
663
- # TODO Verificar, porque desconfio que o apontamento para a coluna de relacionamento
664
- # para o caso do relacionamento OTHER, não é suportado pelo RestLib (acho que, quando
665
- # o dono é OTHER, o RestLib sempre aponta para a PK da entidade corrente).
666
656
  keywords.append(
667
657
  ast.keyword(
668
- arg="relation_field",
658
+ arg="relation_type",
659
+ value=ast.Attribute(
660
+ value=ast.Name(id="OTORelationType", ctx=ast.Load()),
661
+ attr=relation_type,
662
+ ctx=ast.Load(),
663
+ ),
664
+ )
665
+ )
666
+
667
+ # Build the inner field descriptor with the entity column mapping
668
+ keywords.append(
669
+ ast.keyword(
670
+ arg="entity_field",
669
671
  value=ast.Constant(value=relation_column),
670
672
  )
671
673
  )
@@ -682,22 +684,18 @@ class EDLPropertyCompiler:
682
684
  )
683
685
  )
684
686
  else:
685
- dto_property_name = f"relation_1_1_self_column_{relation_column}"
686
- # Adicionando propriedade, para o campo de relação, no DTO (quando for o dono da relação)
687
- ast_dto_attributes.append(
688
- self._build_dto_property_ast(
689
- dto_property_name,
690
- PrimitiveTypes.UUID,
691
- keywords=[
692
- ast.keyword(
693
- arg="entity_field",
694
- value=ast.Constant(value=relation_column),
695
- ),
696
- ],
687
+ # Dono da relação: informa explicitamente como SELF e garante o atributo na Entity
688
+ keywords.append(
689
+ ast.keyword(
690
+ arg="entity_relation_owner",
691
+ value=ast.Attribute(
692
+ value=ast.Name(id="EntityRelationOwner", ctx=ast.Load()),
693
+ attr="SELF",
694
+ ctx=ast.Load(),
695
+ ),
697
696
  )
698
697
  )
699
-
700
- # Adicionando propriedade, para o campo de relação, no Entity (quando for o dono da relação)
698
+ # Adicionando propriedade no Entity para a coluna de relação
701
699
  ast_entity_attributes.append(
702
700
  self._build_entity_property_ast(relation_column, PrimitiveTypes.UUID)
703
701
  )
@@ -710,7 +708,7 @@ class EDLPropertyCompiler:
710
708
  ctx=ast.Load(),
711
709
  ),
712
710
  value=ast.Call(
713
- func=ast.Name(id="DTOObjectField", ctx=ast.Load()),
711
+ func=ast.Name(id="DTOOneToOneField", ctx=ast.Load()),
714
712
  args=[],
715
713
  keywords=keywords,
716
714
  ),