quipus-generate 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quipus_generate-0.1.0/LICENSE +0 -0
- quipus_generate-0.1.0/PKG-INFO +8 -0
- quipus_generate-0.1.0/README.md +0 -0
- quipus_generate-0.1.0/project/__init__.py +0 -0
- quipus_generate-0.1.0/project/cli.py +49 -0
- quipus_generate-0.1.0/project/docker_modified.py +44 -0
- quipus_generate-0.1.0/project/entity_generator.py +425 -0
- quipus_generate-0.1.0/project/main_modified.py +46 -0
- quipus_generate-0.1.0/project/microservice_generator.py +431 -0
- quipus_generate-0.1.0/project/migration_generator.py +74 -0
- quipus_generate-0.1.0/project/proyect_generator.py +176 -0
- quipus_generate-0.1.0/project/readme_modified.py +35 -0
- quipus_generate-0.1.0/project/site_generator.py +30 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/PKG-INFO +8 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/SOURCES.txt +19 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/dependency_links.txt +1 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/entry_points.txt +2 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/requires.txt +1 -0
- quipus_generate-0.1.0/quipus_generate.egg-info/top_level.txt +1 -0
- quipus_generate-0.1.0/setup.cfg +4 -0
- quipus_generate-0.1.0/setup.py +18 -0
File without changes
|
File without changes
|
File without changes
|
@@ -0,0 +1,49 @@
|
|
1
|
+
import argparse
|
2
|
+
|
3
|
+
from microservice_generator import create as microservice
|
4
|
+
from docker_modified import modify as docker_modify
|
5
|
+
from migration_generator import create as migration
|
6
|
+
from readme_modified import modify as readme_modify
|
7
|
+
from main_modified import modify as main_modify
|
8
|
+
from proyect_generator import create as project
|
9
|
+
from entity_generator import create as entity
|
10
|
+
from site_generator import create as site
|
11
|
+
|
12
|
+
def main():
|
13
|
+
parser = argparse.ArgumentParser(description="Generador de proyectos FastAPI con docker-compose y base de datos postgres.")
|
14
|
+
subparsers = parser.add_subparsers(dest="accion", required=True, help="Acciones disponibles.")
|
15
|
+
# Comando para inicializar un nuevo proyecto
|
16
|
+
init_parser = subparsers.add_parser("init", help="Iniciar un nuevo proyecto.")
|
17
|
+
init_parser.add_argument("name", help="Nombre del proyecto.")
|
18
|
+
|
19
|
+
# Comando para crear un microservicio
|
20
|
+
microservice_parser = subparsers.add_parser("microservice", help="Crear un microservicio con Arquitectura Hexagonal.")
|
21
|
+
microservice_parser.add_argument("name", help="Nombre del microservicio.")
|
22
|
+
microservice_parser.add_argument("entity", help="Nombre de la entidad.")
|
23
|
+
|
24
|
+
# Comando para crear una entity
|
25
|
+
entity_parser = subparsers.add_parser("entity", help="Crear una entity con Arquitecura Hexagonal.")
|
26
|
+
entity_parser.add_argument("name", help="Nombre de la entity.")
|
27
|
+
|
28
|
+
args = parser.parse_args()
|
29
|
+
|
30
|
+
if args.accion == "init":
|
31
|
+
project(args.name)
|
32
|
+
|
33
|
+
elif args.accion == "microservice":
|
34
|
+
microservice(args.name)
|
35
|
+
docker_modify(args.name)
|
36
|
+
site(args.name)
|
37
|
+
entity(args.entity, args.name)
|
38
|
+
migration(args.entity, args.name)
|
39
|
+
main_modify(args.entity, args.name)
|
40
|
+
readme_modify(args.entity, args.name)
|
41
|
+
|
42
|
+
elif args.accion == "entity":
|
43
|
+
entity(args.name)
|
44
|
+
migration(args.name)
|
45
|
+
main_modify(args.name)
|
46
|
+
readme_modify(args.name)
|
47
|
+
|
48
|
+
if __name__ == "__main__":
|
49
|
+
main()
|
@@ -0,0 +1,44 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
def modify(name):
|
4
|
+
compose_file = "docker-compose.yaml"
|
5
|
+
|
6
|
+
block = (
|
7
|
+
f" {name}:\n"
|
8
|
+
" build:\n"
|
9
|
+
f" context: ./{name}\n"
|
10
|
+
" dockerfile: Dockerfile\n"
|
11
|
+
f" container_name: {name}-service\n"
|
12
|
+
" volumes:\n"
|
13
|
+
f" - ./{name}:/app\n"
|
14
|
+
" command: uvicorn app.main:run --host 0.0.0.0 --port ${SERVICE_PORT} --reload\n"
|
15
|
+
" networks:\n"
|
16
|
+
" - quipus-network\n"
|
17
|
+
" expose:\n"
|
18
|
+
" - ${SERVICE_PORT}\n"
|
19
|
+
" depends_on:\n"
|
20
|
+
" - db\n"
|
21
|
+
)
|
22
|
+
|
23
|
+
# Leer el contenido actual
|
24
|
+
with open(compose_file, "r") as f:
|
25
|
+
lines = f.readlines()
|
26
|
+
|
27
|
+
# Buscar el índice de la línea que contiene el servicio nginx
|
28
|
+
insert_index = None
|
29
|
+
for i, line in enumerate(lines):
|
30
|
+
if line.strip().startswith("nginx:") and line.startswith(" "): # indentación 2 espacios
|
31
|
+
insert_index = i
|
32
|
+
break
|
33
|
+
|
34
|
+
# Insertar el bloque antes de nginx si no está presente
|
35
|
+
if insert_index is not None:
|
36
|
+
if f"{name}:" not in "".join(lines): # Evitar insertar duplicado
|
37
|
+
lines.insert(insert_index, block)
|
38
|
+
with open(compose_file, "w") as f:
|
39
|
+
f.writelines(lines)
|
40
|
+
print(f"✅ Servicio '{name}' insertado correctamente antes de 'nginx'")
|
41
|
+
else:
|
42
|
+
print(f"ℹ️ El servicio '{name}' ya existe en el archivo docker-compose.yaml")
|
43
|
+
else:
|
44
|
+
print("❌ No se encontró el servicio 'nginx:' en el archivo")
|
@@ -0,0 +1,425 @@
|
|
1
|
+
import inflection
|
2
|
+
import os
|
3
|
+
|
4
|
+
def create(name, microservice = None):
|
5
|
+
|
6
|
+
estructura = [
|
7
|
+
"application/services",
|
8
|
+
"application/use_cases",
|
9
|
+
"domain/entities",
|
10
|
+
"domain/repositories",
|
11
|
+
"domain/value_objects",
|
12
|
+
"infrastructure/dto",
|
13
|
+
"infrastructure/models",
|
14
|
+
"infrastructure/repositories",
|
15
|
+
"infrastructure/routers",
|
16
|
+
]
|
17
|
+
|
18
|
+
archivos = {
|
19
|
+
f"application/services/{name}_service.py":(
|
20
|
+
"from typing import List, Optional, Dict, Any\n"
|
21
|
+
"\n"
|
22
|
+
f"from app.{name}.domain.repositories.{name}_repository import {name.capitalize()}Repository\n"
|
23
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
24
|
+
"\n"
|
25
|
+
f"class {name.capitalize()}Service:\n"
|
26
|
+
f" def __init__(self, repository: {name.capitalize()}Repository):\n"
|
27
|
+
" self.repository = repository\n"
|
28
|
+
"\n"
|
29
|
+
" def list(self,\n"
|
30
|
+
" trashed: int,\n"
|
31
|
+
" paginate: int,\n"
|
32
|
+
" page: int,\n"
|
33
|
+
" rows: int,\n"
|
34
|
+
" filters: Optional[Dict[str, Any]] = None\n"
|
35
|
+
f" ) -> List[{name.capitalize()}]:\n"
|
36
|
+
" return self.repository.all(trashed, paginate, page, rows, filters)\n"
|
37
|
+
"\n"
|
38
|
+
f" def search(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
39
|
+
" return self.repository.find(id)\n"
|
40
|
+
"\n"
|
41
|
+
f" def save(self, {name}: {name.capitalize()}) -> {name.capitalize()}:\n"
|
42
|
+
f" return self.repository.create({name})\n"
|
43
|
+
"\n"
|
44
|
+
f" def modify(self, id: str, {name}: {name.capitalize()}) -> Optional[{name.capitalize()}]:\n"
|
45
|
+
f" return self.repository.update(id, {name})\n"
|
46
|
+
"\n"
|
47
|
+
f" def remove(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
48
|
+
" return self.repository.delete(id)\n"
|
49
|
+
),
|
50
|
+
f"application/use_cases/create_{name}.py":(
|
51
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
52
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
53
|
+
"\n"
|
54
|
+
f"class Create{name.capitalize()}:\n"
|
55
|
+
f" def __init__(self, service: {name.capitalize()}Service):\n"
|
56
|
+
" self.service = service\n"
|
57
|
+
"\n"
|
58
|
+
f" def execute(self, {name}: {name.capitalize()}) -> {name.capitalize()}:\n"
|
59
|
+
f" return self.service.save({name})\n"
|
60
|
+
),
|
61
|
+
f"application/use_cases/delete_{name}.py":(
|
62
|
+
"from typing import Optional\n"
|
63
|
+
"\n"
|
64
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
65
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
66
|
+
"\n"
|
67
|
+
f"class Delete{name.capitalize()}:\n"
|
68
|
+
f" def __init__(self, service: {name.capitalize()}Service):\n"
|
69
|
+
" self.service = service\n"
|
70
|
+
"\n"
|
71
|
+
f" def execute(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
72
|
+
" return self.service.remove(id)\n"
|
73
|
+
),
|
74
|
+
f"application/use_cases/get_{inflection.pluralize(name)}.py":(
|
75
|
+
"from typing import List, Optional, Dict, Any\n"
|
76
|
+
"\n"
|
77
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
78
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
79
|
+
"\n"
|
80
|
+
f"class Get{ inflection.pluralize(name).capitalize()}:\n"
|
81
|
+
f" def __init__(self, service: {name.capitalize()}Service):\n"
|
82
|
+
" self.service = service\n"
|
83
|
+
"\n"
|
84
|
+
" def execute(self,\n"
|
85
|
+
" trashed: int,\n"
|
86
|
+
" paginate: int,\n"
|
87
|
+
" page: int,\n"
|
88
|
+
" rows: int,\n"
|
89
|
+
" filters: Optional[Dict[str, Any]] = None\n"
|
90
|
+
f" ) -> List[{name.capitalize()}]:\n"
|
91
|
+
" return self.service.list(trashed, paginate, page, rows, filters)\n"
|
92
|
+
),
|
93
|
+
f"application/use_cases/get_{name}_by_id.py":(
|
94
|
+
"from typing import Optional\n"
|
95
|
+
"\n"
|
96
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
97
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
98
|
+
"\n"
|
99
|
+
f"class Get{name.capitalize()}ById:\n"
|
100
|
+
f" def __init__(self, service: {name.capitalize()}Service):\n"
|
101
|
+
" self.service = service\n"
|
102
|
+
"\n"
|
103
|
+
f" def execute(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
104
|
+
" return self.service.search(id)\n"
|
105
|
+
),
|
106
|
+
f"application/use_cases/update_{name}.py":(
|
107
|
+
"from typing import Optional\n"
|
108
|
+
"\n"
|
109
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
110
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
111
|
+
"\n"
|
112
|
+
f"class Update{name.capitalize()}:\n"
|
113
|
+
f" def __init__(self, service: {name.capitalize()}Service):\n"
|
114
|
+
" self.service = service\n"
|
115
|
+
"\n"
|
116
|
+
f" def execute(self, id: str, {name}: {name.capitalize()}) -> Optional[{name.capitalize()}]:\n"
|
117
|
+
f" return self.service.modify(id, {name})\n"
|
118
|
+
|
119
|
+
),
|
120
|
+
f"domain/entities/{name}.py":(
|
121
|
+
"from pydantic import BaseModel, Field\n"
|
122
|
+
"\n"
|
123
|
+
"# import value objects\n"
|
124
|
+
f"from app.{name}.domain.value_objects.datetime import DateTime\n"
|
125
|
+
"\n"
|
126
|
+
f"class {name.capitalize()}(BaseModel):\n"
|
127
|
+
f" id: str = Field(..., description=\"Unique identifier for the {name}\")\n"
|
128
|
+
f" # field: str = Field(..., description=\"field of the {name}\")\n"
|
129
|
+
f" example: str = Field(..., description=\"Example field\")\n"
|
130
|
+
f" created_at: DateTime = Field(..., description=\"Timestamp when the {name} was created\")\n"
|
131
|
+
f" updated_at: DateTime = Field(..., description=\"Timestamp when the {name} was last updated\")\n"
|
132
|
+
f" deleted_at: DateTime = Field(None, description=\"Timestamp when the {name} was deleted\")\n"
|
133
|
+
),
|
134
|
+
f"domain/repositories/{name}_repository.py":(
|
135
|
+
"from typing import List, Optional, Dict, Any\n"
|
136
|
+
"from abc import ABC, abstractmethod\n"
|
137
|
+
"\n"
|
138
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
139
|
+
"\n"
|
140
|
+
f"class {name.capitalize()}Repository(ABC):\n"
|
141
|
+
" @abstractmethod\n"
|
142
|
+
" def all(self,\n"
|
143
|
+
" trashed: int,\n"
|
144
|
+
" paginate: int,\n"
|
145
|
+
" page: int,\n"
|
146
|
+
" rows: int,\n"
|
147
|
+
" filters: Optional[Dict[str, Any]] = None\n"
|
148
|
+
f" ) -> List[{name.capitalize()}]:\n"
|
149
|
+
" pass\n"
|
150
|
+
"\n"
|
151
|
+
" @abstractmethod\n"
|
152
|
+
f" def find(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
153
|
+
" pass\n"
|
154
|
+
"\n"
|
155
|
+
" @abstractmethod\n"
|
156
|
+
f" def create(self, {name}: {name.capitalize()}) -> {name.capitalize()}:\n"
|
157
|
+
" pass\n"
|
158
|
+
"\n"
|
159
|
+
" @abstractmethod\n"
|
160
|
+
f" def update(self, id:str, {name}: {name.capitalize()}) -> Optional[{name.capitalize()}]:\n"
|
161
|
+
" pass\n"
|
162
|
+
"\n"
|
163
|
+
" @abstractmethod\n"
|
164
|
+
f" def delete(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
165
|
+
" pass\n"
|
166
|
+
),
|
167
|
+
f"domain/value_objects/datetime.py":(
|
168
|
+
"from pydantic import BaseModel, Field, field_validator\n"
|
169
|
+
"from datetime import datetime\n"
|
170
|
+
"\n"
|
171
|
+
"class DateTime(BaseModel):\n"
|
172
|
+
" \"\"\"\n"
|
173
|
+
" DateTime represents a date and time value in the system.\n"
|
174
|
+
" It is a value object that encapsulates the date and time information.\n"
|
175
|
+
" \"\"\"\n"
|
176
|
+
"\n"
|
177
|
+
" datetime: str = Field(..., description=\"Date and time value\")\n"
|
178
|
+
"\n"
|
179
|
+
" @field_validator(\"datetime\")\n"
|
180
|
+
" def validate_datetime(cls, value: str) -> None:\n"
|
181
|
+
" \"\"\"\n"
|
182
|
+
" Validar que el formato de la feha y hora se Y-m-d H:M:S. o vacio.\n"
|
183
|
+
" \"\"\"\n"
|
184
|
+
" try:\n"
|
185
|
+
" if value != \"\":\n"
|
186
|
+
" datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%fZ\")\n"
|
187
|
+
" except ValueError:\n"
|
188
|
+
" raise ValueError(\"El formato de fecha y hora no es válido. Debe ser YYYY-MM-DDTHH:MM:SSZ.\")\n"
|
189
|
+
"\n"
|
190
|
+
" def __eq__(self, other: object) -> bool:\n"
|
191
|
+
" return isinstance(other, DateTime) and self.datetime == other.datetime\n"
|
192
|
+
"\n"
|
193
|
+
" def __str__(self) -> str:\n"
|
194
|
+
" return self.datetime.isoformat() # Returns the ISO format of the datetime\n"
|
195
|
+
"\n"
|
196
|
+
" class Config:\n"
|
197
|
+
" json_schema_extra = {\n"
|
198
|
+
" \"example\": {\n"
|
199
|
+
" \"datetime\": \"2023-10-01 12:00:00\"\n"
|
200
|
+
" }\n"
|
201
|
+
" }\n"
|
202
|
+
),
|
203
|
+
f"infrastructure/dto/input_{name}.py":(
|
204
|
+
"from pydantic import BaseModel, Field\n"
|
205
|
+
"\n"
|
206
|
+
"# import value objects\n"
|
207
|
+
"\n"
|
208
|
+
f"class Input{name.capitalize()}(BaseModel):\n"
|
209
|
+
" example: str = Field(..., description=\"Example field\")\n"
|
210
|
+
),
|
211
|
+
f"infrastructure/models/{name}_model.py":(
|
212
|
+
"from typing import Optional, List\n"
|
213
|
+
"import uuid\n"
|
214
|
+
"\n"
|
215
|
+
"from sqlalchemy import Column, String, DateTime\n"
|
216
|
+
"from sqlalchemy.dialects.postgresql import UUID\n"
|
217
|
+
"from sqlalchemy.sql import func\n"
|
218
|
+
"import uuid\n"
|
219
|
+
"\n"
|
220
|
+
"from config.database import Base\n"
|
221
|
+
"\n"
|
222
|
+
f"class {name.capitalize()}Model(Base):\n"
|
223
|
+
f" __tablename__ = \"{inflection.pluralize(name)}\"\n"
|
224
|
+
"\n"
|
225
|
+
" # id es de tipo uuid\n"
|
226
|
+
" id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)\n"
|
227
|
+
" example = Column(String, unique=True, index=True, nullable=False)\n"
|
228
|
+
" created_at = Column(DateTime, server_default=func.now(), nullable=False)\n"
|
229
|
+
" updated_at = Column(DateTime(timezone=True), default=func.now(), onupdate=func.now(), nullable=False)\n"
|
230
|
+
" deleted_at = Column(DateTime, nullable=True)\n"
|
231
|
+
"\n"
|
232
|
+
" def to_dict(self, include: Optional[List[str]] = None) -> dict:\n"
|
233
|
+
" data = {\n"
|
234
|
+
" \"id\": str(self.id),\n"
|
235
|
+
" \"example\": self.example,\n"
|
236
|
+
" \"deleted_at\": self.deleted_at\n"
|
237
|
+
" }\n"
|
238
|
+
"\n"
|
239
|
+
" if include:\n"
|
240
|
+
" return {k: v for k, v in data.items() if k in include}\n"
|
241
|
+
" return data\n"
|
242
|
+
),f"infrastructure/repositories/{name}_repository_impl.py":(
|
243
|
+
"from typing import List, Optional, Dict, Any\n"
|
244
|
+
"from datetime import datetime\n"
|
245
|
+
"\n"
|
246
|
+
"from app.Traits.filters import Filterable\n"
|
247
|
+
"\n"
|
248
|
+
f"from app.{name}.domain.repositories.{name}_repository import {name.capitalize()}Repository\n"
|
249
|
+
f"from app.{name}.infrastructure.models.{name}_model import {name.capitalize()}Model\n"
|
250
|
+
f"from app.{name}.domain.entities.{name} import {name.capitalize()}\n"
|
251
|
+
"\n"
|
252
|
+
f"class {name.capitalize()}RepositoryImpl({name.capitalize()}Repository):\n"
|
253
|
+
" def __init__(self, db):\n"
|
254
|
+
" self.db = db\n"
|
255
|
+
" self.filterable = Filterable()\n"
|
256
|
+
"\n"
|
257
|
+
" def all(self,\n"
|
258
|
+
" trashed: int,\n"
|
259
|
+
" paginate: int,\n"
|
260
|
+
" page: int,\n"
|
261
|
+
" rows: int,\n"
|
262
|
+
" filters: Optional[Dict[str, Any]] = None\n"
|
263
|
+
f" ) -> List[{name.capitalize()}]:\n"
|
264
|
+
"\n"
|
265
|
+
f" query = self.db.query({name.capitalize()}Model)\n"
|
266
|
+
"\n"
|
267
|
+
" columns_change = {\n"
|
268
|
+
f" 'example': {name.capitalize()}Model.example,\n"
|
269
|
+
" }\n"
|
270
|
+
"\n"
|
271
|
+
" if trashed == 0:\n"
|
272
|
+
f" query = query.filter({name.capitalize()}Model.deleted_at.is_(None))\n"
|
273
|
+
"\n"
|
274
|
+
" if filters:\n"
|
275
|
+
" query = self.filterable.apply_filters(query, filters, columns_change)\n"
|
276
|
+
"\n"
|
277
|
+
" if paginate == 1:\n"
|
278
|
+
" total = query.count()\n"
|
279
|
+
" ofset = (page - 1) * rows\n"
|
280
|
+
" items = query.ofset(ofset).limit(rows).all()\n"
|
281
|
+
" else:\n"
|
282
|
+
" items = query.all()\n"
|
283
|
+
" total = len(items)\n"
|
284
|
+
"\n"
|
285
|
+
" data = [item.to_dict() for item in items]\n"
|
286
|
+
"\n"
|
287
|
+
" return {\n"
|
288
|
+
" 'total': total,\n"
|
289
|
+
" 'data': data,\n"
|
290
|
+
" }\n"
|
291
|
+
"\n"
|
292
|
+
f" def find(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
293
|
+
f" return self.db.query({name.capitalize()}Model).filter_by(id=id).first()\n"
|
294
|
+
"\n"
|
295
|
+
f" def create(self, {name}: {name.capitalize()}) -> {name.capitalize()}:\n"
|
296
|
+
" try:\n"
|
297
|
+
f" {name}_model = {name.capitalize()}Model(**{name}.model_dump())\n"
|
298
|
+
f" self.db.add({name}_model)\n"
|
299
|
+
" self.db.commit()\n"
|
300
|
+
f" self.db.refresh({name}_model)\n"
|
301
|
+
f" return {name}_model\n"
|
302
|
+
" except Exception as e:\n"
|
303
|
+
" self.db.rollback()\n"
|
304
|
+
" raise e\n"
|
305
|
+
"\n"
|
306
|
+
f" def update(self, id: str, {name}: {name.capitalize()}) -> Optional[{name.capitalize()}]:\n"
|
307
|
+
f" existing_{name} = self.find(id)\n"
|
308
|
+
f" if existing_{name}:\n"
|
309
|
+
f" for key, value in {name}.model_dump(exclude_unset=True).items():\n"
|
310
|
+
f" setattr(existing_{name}, key, value)\n"
|
311
|
+
" self.db.commit()\n"
|
312
|
+
f" self.db.refresh(existing_{name})\n"
|
313
|
+
f" return existing_{name}\n"
|
314
|
+
" return None\n"
|
315
|
+
"\n"
|
316
|
+
f" def delete(self, id: str) -> Optional[{name.capitalize()}]:\n"
|
317
|
+
f" {name} = self.find(id)\n"
|
318
|
+
"\n"
|
319
|
+
f" if {name} and {name}.deleted_at is None:\n"
|
320
|
+
f" {name}.deleted_at = datetime.now()\n"
|
321
|
+
" self.db.commit()\n"
|
322
|
+
f" self.db.refresh({name})\n"
|
323
|
+
f" elif {name} and {name}.deleted_at is not None:\n"
|
324
|
+
f" {name}.deleted_at = None\n"
|
325
|
+
" self.db.commit()\n"
|
326
|
+
f" self.db.refresh({name})\n"
|
327
|
+
"\n"
|
328
|
+
f" return {name}\n"
|
329
|
+
),
|
330
|
+
f"infrastructure/routers/{name}_router.py":(
|
331
|
+
"from fastapi import APIRouter, Depends\n"
|
332
|
+
"from sqlalchemy.orm import Session\n"
|
333
|
+
"from uuid import UUID\n"
|
334
|
+
"\n"
|
335
|
+
f"from app.{name}.infrastructure.repositories.{name}_repository_impl import {name.capitalize()}RepositoryImpl\n"
|
336
|
+
f"from app.{name}.application.services.{name}_service import {name.capitalize()}Service\n"
|
337
|
+
"\n"
|
338
|
+
f"from app.{name}.application.use_cases.get_{name}_by_id import Get{name.capitalize()}ById\n"
|
339
|
+
f"from app.{name}.application.use_cases.create_{name} import Create{name.capitalize()}\n"
|
340
|
+
f"from app.{name}.application.use_cases.delete_{name} import Delete{name.capitalize()}\n"
|
341
|
+
f"from app.{name}.application.use_cases.update_{name} import Update{name.capitalize()}\n"
|
342
|
+
f"from app.{name}.application.use_cases.get_{inflection.pluralize(name)} import Get{inflection.pluralize(name).capitalize()}\n"
|
343
|
+
"\n"
|
344
|
+
"from app.components.filter.infrastructure.dto.input_filter import InputFilter\n"
|
345
|
+
f"from app.{name}.infrastructure.dto.input_{name} import Input{name.capitalize()}\n"
|
346
|
+
"\n"
|
347
|
+
"from config.database import database\n"
|
348
|
+
"\n"
|
349
|
+
f"class {name.capitalize()}API:\n"
|
350
|
+
" def __init__(self):\n"
|
351
|
+
f" self.router = APIRouter(prefix=\"/{name}\", tags=[\"{name.capitalize()}\"])\n"
|
352
|
+
" self._setup_routes()\n"
|
353
|
+
"\n"
|
354
|
+
" def _setup_routes(self):\n"
|
355
|
+
f" self.router.get(\"\")(self.get_{name})\n"
|
356
|
+
f" self.router.get(\"/{{id}}\")(self.get_{name}_by_id)\n"
|
357
|
+
f" self.router.post(\"\")(self.create_{name})\n"
|
358
|
+
f" self.router.put(\"/{{id}}\")(self.update_{name})\n"
|
359
|
+
f" self.router.delete(\"/{{id}}\")(self.delete_{name})\n"
|
360
|
+
"\n"
|
361
|
+
f" async def get_{name}(self, payload: InputFilter = Depends(InputFilter.from_request_body), db: Session = Depends(database.get_db)):\n"
|
362
|
+
f" repository = {name.capitalize()}RepositoryImpl(db)\n"
|
363
|
+
f" service = {name.capitalize()}Service(repository)\n"
|
364
|
+
f" use_case = Get{inflection.pluralize(name).capitalize()}(service)\n"
|
365
|
+
" categories = use_case.execute(**payload.model_dump())\n"
|
366
|
+
" return categories\n"
|
367
|
+
"\n"
|
368
|
+
f" def get_{name}_by_id(self, id: UUID, db: Session = Depends(database.get_db)):\n"
|
369
|
+
f" repository = {name.capitalize()}RepositoryImpl(db)\n"
|
370
|
+
f" service = {name.capitalize()}Service(repository)\n"
|
371
|
+
f" use_case = Get{name.capitalize()}ById(service)\n"
|
372
|
+
f" {name} = use_case.execute(id)\n"
|
373
|
+
f" return {name}\n"
|
374
|
+
"\n"
|
375
|
+
f" def create_{name}(self, {name}: Input{name.capitalize()}, db: Session = Depends(database.get_db)):\n"
|
376
|
+
f" repository = {name.capitalize()}RepositoryImpl(db)\n"
|
377
|
+
f" service = {name.capitalize()}Service(repository)\n"
|
378
|
+
f" use_case = Create{name.capitalize()}(service)\n"
|
379
|
+
f" created_{name} = use_case.execute({name})\n"
|
380
|
+
f" return created_{name}\n"
|
381
|
+
"\n"
|
382
|
+
f" def update_{name}(self, id: UUID, {name}: Input{name.capitalize()}, db: Session = Depends(database.get_db)):\n"
|
383
|
+
f" repository = {name.capitalize()}RepositoryImpl(db)\n"
|
384
|
+
f" service = {name.capitalize()}Service(repository)\n"
|
385
|
+
f" use_case = Update{name.capitalize()}(service)\n"
|
386
|
+
f" updated_{name} = use_case.execute(id, {name})\n"
|
387
|
+
f" return updated_{name}\n"
|
388
|
+
"\n"
|
389
|
+
f" def delete_{name}(self, id: UUID, db: Session = Depends(database.get_db)):\n"
|
390
|
+
f" repository = {name.capitalize()}RepositoryImpl(db)\n"
|
391
|
+
f" service = {name.capitalize()}Service(repository)\n"
|
392
|
+
f" use_case = Delete{name.capitalize()}(service)\n"
|
393
|
+
f" deleted_{name} = use_case.execute(id)\n"
|
394
|
+
f" return deleted_{name}\n"
|
395
|
+
"\n"
|
396
|
+
f"router = {name.capitalize()}API().router\n"
|
397
|
+
)
|
398
|
+
}
|
399
|
+
|
400
|
+
if microservice is not None:
|
401
|
+
path = f"{microservice}/app/{name}"
|
402
|
+
else:
|
403
|
+
path = f"app/{name}"
|
404
|
+
|
405
|
+
# Si la entidad ya existe, no la crea
|
406
|
+
if os.path.exists(path):
|
407
|
+
print(f"ℹ️ La entidad {name} ya existe.")
|
408
|
+
return
|
409
|
+
|
410
|
+
if microservice is not None:
|
411
|
+
os.makedirs(microservice + '/app/' + name, exist_ok=True)
|
412
|
+
else:
|
413
|
+
os.makedirs(path, exist_ok=True)
|
414
|
+
|
415
|
+
# Crear subdirectorios
|
416
|
+
for carpeta in estructura:
|
417
|
+
os.makedirs(os.path.join(path, carpeta), exist_ok=True)
|
418
|
+
|
419
|
+
# Crear archivos vacíos
|
420
|
+
for archivo, content in archivos.items():
|
421
|
+
ruta_archivo = os.path.join(path, archivo)
|
422
|
+
with open(ruta_archivo, "w") as f:
|
423
|
+
f.write(content)
|
424
|
+
|
425
|
+
print(f"✅ Entidad {name} creada con éxito.")
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
def modify(name, microservice=None):
|
4
|
+
|
5
|
+
if microservice is not None:
|
6
|
+
main_file = f"{microservice}/app/main.py"
|
7
|
+
else:
|
8
|
+
main_file = "app/main.py"
|
9
|
+
|
10
|
+
import_line = f"from app.{name}.infrastructure.routers.{name}_router import router as {name}_router\n"
|
11
|
+
router_line = f"run.include_router({name}_router, prefix=\"/api/v1\", tags=[\"{name.capitalize()}\"])\n"
|
12
|
+
|
13
|
+
# Leer contenido actual
|
14
|
+
with open(main_file, "r") as f:
|
15
|
+
lines = f.readlines()
|
16
|
+
|
17
|
+
# Verificar si ya existe
|
18
|
+
if import_line in lines and router_line in lines:
|
19
|
+
print("ℹ️ Las líneas ya están en main.py")
|
20
|
+
else:
|
21
|
+
# === 1. Insertar IMPORT si no existe ===
|
22
|
+
if import_line not in lines:
|
23
|
+
import_indices = [i for i, line in enumerate(lines) if line.startswith("import") or line.startswith("from")]
|
24
|
+
last_import_idx = import_indices[-1] if import_indices else 0
|
25
|
+
lines.insert(last_import_idx + 2, import_line + "\n")
|
26
|
+
|
27
|
+
# === 2. Insertar include_router en el lugar correcto ===
|
28
|
+
# Buscar todas las líneas que incluyen run.include_router
|
29
|
+
router_indices = [i for i, line in enumerate(lines) if "run.include_router" in line]
|
30
|
+
|
31
|
+
if router_indices:
|
32
|
+
# Insertar justo después del último include_router
|
33
|
+
last_router_idx = router_indices[-1]
|
34
|
+
if router_line not in lines:
|
35
|
+
lines.insert(last_router_idx + 1, router_line)
|
36
|
+
else:
|
37
|
+
# Si no hay ningún run.include_router, insertar al final del archivo
|
38
|
+
if not lines[-1].endswith("\n"):
|
39
|
+
lines[-1] += "\n"
|
40
|
+
lines.append("\n" + router_line)
|
41
|
+
|
42
|
+
# Guardar archivo actualizado
|
43
|
+
with open(main_file, "w") as f:
|
44
|
+
f.writelines(lines)
|
45
|
+
|
46
|
+
print("✅ main.py actualizado exitosamente.")
|