tai-sql 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tai_sql/__init__.py +26 -0
- tai_sql/cli/commands/createdb.py +123 -0
- tai_sql/cli/commands/generate.py +26 -0
- tai_sql/cli/commands/init.py +185 -0
- tai_sql/cli/commands/push.py +312 -0
- tai_sql/cli/commands/utils/ddl.py +705 -0
- tai_sql/cli/commands/utils/schema.py +63 -0
- tai_sql/cli/main.py +128 -0
- tai_sql/core.py +362 -0
- tai_sql/generators/__init__.py +5 -0
- tai_sql/generators/base.py +108 -0
- tai_sql/generators/crud/__init__.py +118 -0
- tai_sql/generators/crud/asyn/__init__.py +145 -0
- tai_sql/generators/crud/asyn/templates/__init__.py.jinja2 +102 -0
- tai_sql/generators/crud/asyn/templates/endpoints.py.jinja2 +588 -0
- tai_sql/generators/crud/asyn/templates/session_manager.py.jinja2 +161 -0
- tai_sql/generators/crud/sync/__init__.py +146 -0
- tai_sql/generators/crud/sync/templates/__init__.py.jinja2 +102 -0
- tai_sql/generators/crud/sync/templates/endpoints.py.jinja2 +583 -0
- tai_sql/generators/crud/sync/templates/session_manager.py.jinja2 +161 -0
- tai_sql/generators/models/__init__.py +94 -0
- tai_sql/generators/models/templates/__init__.py.jinja2 +79 -0
- tai_sql/manager.py +106 -0
- tai_sql/orm/__init__.py +7 -0
- tai_sql/orm/mappers/__init__.py +9 -0
- tai_sql/orm/mappers/columns.py +189 -0
- tai_sql/orm/mappers/relations.py +302 -0
- tai_sql/orm/mappers/table.py +214 -0
- tai_sql/orm/mappers/utils.py +216 -0
- tai_sql-0.1.1.dist-info/LICENSE +674 -0
- tai_sql-0.1.1.dist-info/METADATA +42 -0
- tai_sql-0.1.1.dist-info/RECORD +34 -0
- tai_sql-0.1.1.dist-info/WHEEL +4 -0
- tai_sql-0.1.1.dist-info/entry_points.txt +3 -0
tai_sql/__init__.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Declarative models for SQLAlchemy.
|
|
3
|
+
This module provides the base classes and utilities to define
|
|
4
|
+
models using SQLAlchemy's declarative system.
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
# Importar la instancia global
|
|
9
|
+
from .manager import db
|
|
10
|
+
from .core import datasource, generate, env, connection_string, params
|
|
11
|
+
from .orm import Table, column, relation
|
|
12
|
+
from sqlalchemy.types import BigInteger as bigint
|
|
13
|
+
|
|
14
|
+
# Exportar los elementos principales
|
|
15
|
+
__all__ = [
|
|
16
|
+
'db',
|
|
17
|
+
'datasource',
|
|
18
|
+
'generate',
|
|
19
|
+
'env',
|
|
20
|
+
'connection_string',
|
|
21
|
+
'params',
|
|
22
|
+
'Table',
|
|
23
|
+
'column',
|
|
24
|
+
'relation',
|
|
25
|
+
'bigint'
|
|
26
|
+
]
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import click
|
|
3
|
+
from sqlalchemy import text, create_engine, URL
|
|
4
|
+
from sqlalchemy.exc import OperationalError, ProgrammingError
|
|
5
|
+
|
|
6
|
+
from tai_sql import db
|
|
7
|
+
from .utils.schema import Schema
|
|
8
|
+
|
|
9
|
+
class DBCommand(Schema):
|
|
10
|
+
"""
|
|
11
|
+
Command to create a new database.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def exists(self) -> bool:
|
|
15
|
+
"""
|
|
16
|
+
Verifica si existe la base de datos especificada en la configuración.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
bool: True si la base de datos existe, False en caso contrario
|
|
20
|
+
"""
|
|
21
|
+
try:
|
|
22
|
+
# Obtener la URL de conexión del engine
|
|
23
|
+
|
|
24
|
+
click.echo(f"🔍 Verificando existencia de la base de datos: {db.provider.database}")
|
|
25
|
+
|
|
26
|
+
with db.engine.connect() as conn:
|
|
27
|
+
if db.provider.drivername == "postgresql":
|
|
28
|
+
# PostgreSQL
|
|
29
|
+
result = conn.execute(text(
|
|
30
|
+
"SELECT 1 FROM pg_database WHERE datname = :db_name"
|
|
31
|
+
), {"db_name": db.provider.database})
|
|
32
|
+
|
|
33
|
+
elif db.provider.drivername == "mysql":
|
|
34
|
+
# MySQL/MariaDB
|
|
35
|
+
result = conn.execute(text(
|
|
36
|
+
"SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = :db_name"
|
|
37
|
+
), {"db_name": db.provider.database})
|
|
38
|
+
|
|
39
|
+
elif db.provider.drivername == "sqlite":
|
|
40
|
+
# SQLite - verificar si el archivo existe
|
|
41
|
+
db_file = db.provider.database
|
|
42
|
+
exists = os.path.exists(db_file)
|
|
43
|
+
return exists
|
|
44
|
+
|
|
45
|
+
else:
|
|
46
|
+
click.echo(f"❌ Tipo de base de datos no implementado: {db.provider.drivername}", err=True)
|
|
47
|
+
return False
|
|
48
|
+
|
|
49
|
+
exists = result.fetchone() is not None
|
|
50
|
+
return exists
|
|
51
|
+
|
|
52
|
+
except (OperationalError, ProgrammingError) as e:
|
|
53
|
+
click.echo(f"❌ La base de datos no existe", err=True)
|
|
54
|
+
return False
|
|
55
|
+
except Exception as e:
|
|
56
|
+
click.echo(f"❌ Error inesperado: {e}", err=True)
|
|
57
|
+
return False
|
|
58
|
+
|
|
59
|
+
def create(self) -> bool:
|
|
60
|
+
"""
|
|
61
|
+
Crea la base de datos especificada en la configuración.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
bool: True si la base de datos se creó exitosamente o ya existía, False en caso contrario
|
|
65
|
+
"""
|
|
66
|
+
try:
|
|
67
|
+
# Verificar si ya existe
|
|
68
|
+
if self.exists():
|
|
69
|
+
click.echo("ℹ️ La base de datos ya existe")
|
|
70
|
+
return True
|
|
71
|
+
|
|
72
|
+
click.echo(f"🚀 Creando base de datos: {db.provider.database}")
|
|
73
|
+
|
|
74
|
+
if db.provider.drivername == "sqlite":
|
|
75
|
+
# SQLite - crear archivo de base de datos vacío
|
|
76
|
+
db_file = db.provider.database
|
|
77
|
+
|
|
78
|
+
# Crear directorios padre si no existen
|
|
79
|
+
os.makedirs(os.path.dirname(db_file) if os.path.dirname(db_file) else '.', exist_ok=True)
|
|
80
|
+
|
|
81
|
+
with db.engine.connect() as conn:
|
|
82
|
+
pass # La conexión crea el archivo
|
|
83
|
+
|
|
84
|
+
click.echo(f"✅ Base de datos SQLite creada: {db_file}")
|
|
85
|
+
return True
|
|
86
|
+
|
|
87
|
+
engine = create_engine(
|
|
88
|
+
URL.create(
|
|
89
|
+
drivername=db.provider.drivername,
|
|
90
|
+
username=db.provider.username,
|
|
91
|
+
password=db.provider.password,
|
|
92
|
+
host=db.provider.host,
|
|
93
|
+
port=db.provider.port
|
|
94
|
+
)
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
with engine.connect() as conn:
|
|
98
|
+
if db.provider.drivername == 'postgresql':
|
|
99
|
+
# PostgreSQL requires autocommit mode for CREATE DATABASE
|
|
100
|
+
conn = conn.execution_options(autocommit=True)
|
|
101
|
+
conn.execute(text("COMMIT"))
|
|
102
|
+
conn.execute(text(f'CREATE DATABASE "{db.provider.database}"'))
|
|
103
|
+
elif db.provider.drivername == 'mysql':
|
|
104
|
+
# MySQL can use regular transaction mode
|
|
105
|
+
conn = conn.execution_options(autocommit=True)
|
|
106
|
+
conn.execute(text(f'CREATE DATABASE "{db.provider.database}"'))
|
|
107
|
+
|
|
108
|
+
else:
|
|
109
|
+
click.echo(f"❌ Tipo de base de datos no soportado: {db.provider.drivername}", err=True)
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
click.echo(f"✅ Base de datos {db.provider.database} creada exitosamente")
|
|
113
|
+
|
|
114
|
+
# Verificar que se creó correctamente
|
|
115
|
+
return self.exists()
|
|
116
|
+
|
|
117
|
+
except (OperationalError, ProgrammingError) as e:
|
|
118
|
+
click.echo(f"❌ Error al crear la base de datos: {e}", err=True)
|
|
119
|
+
return False
|
|
120
|
+
except Exception as e:
|
|
121
|
+
click.echo(f"❌ Error inesperado: {e}", err=True)
|
|
122
|
+
return False
|
|
123
|
+
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import click
|
|
2
|
+
|
|
3
|
+
from tai_sql import db
|
|
4
|
+
|
|
5
|
+
from .utils.schema import Schema
|
|
6
|
+
|
|
7
|
+
class GenerateCommand(Schema):
|
|
8
|
+
|
|
9
|
+
def run_generators(self):
|
|
10
|
+
"""Run the configured generators."""
|
|
11
|
+
# Ejecutar cada generador
|
|
12
|
+
for generator in db.generators:
|
|
13
|
+
try:
|
|
14
|
+
generator_name = generator.__class__.__name__
|
|
15
|
+
click.echo(f"Ejecutando generador: {generator_name}")
|
|
16
|
+
|
|
17
|
+
# El generador se encargará de descubrir los modelos internamente
|
|
18
|
+
result = generator.generate()
|
|
19
|
+
|
|
20
|
+
click.echo(f"✅ Generador {generator_name} completado con éxito.")
|
|
21
|
+
if result:
|
|
22
|
+
click.echo(f" Resultado: {result}")
|
|
23
|
+
except Exception as e:
|
|
24
|
+
import logging
|
|
25
|
+
logging.exception(e)
|
|
26
|
+
click.echo(f"❌ Error al ejecutar el generador {generator_name}: {str(e)}", err=True)
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import sys
|
|
3
|
+
import os
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
# ...existing code...
|
|
8
|
+
|
|
9
|
+
class NewSchemaCommand:
|
|
10
|
+
|
|
11
|
+
def __init__(self, namespace: str, schema_name: str):
|
|
12
|
+
self.namespace = namespace
|
|
13
|
+
self.schema_name = schema_name
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
def subnamespace(self) -> str:
|
|
17
|
+
"""Retorna el subnamespace basado en el namespace"""
|
|
18
|
+
return self.namespace.replace('-', '_')
|
|
19
|
+
|
|
20
|
+
def exists(self) -> bool:
|
|
21
|
+
"""Verifica si el esquema ya existe"""
|
|
22
|
+
schemas_dir = Path(self.namespace) / 'schemas'
|
|
23
|
+
return (schemas_dir / f'{self.schema_name}.py').exists()
|
|
24
|
+
|
|
25
|
+
def create(self):
|
|
26
|
+
"""Crea el esquema con la estructura básica"""
|
|
27
|
+
click.echo(f"🚀 Creando esquema '{self.schema_name}' en '{self.namespace}/schemas'...")
|
|
28
|
+
|
|
29
|
+
if self.exists():
|
|
30
|
+
click.echo(f"❌ Error: El esquema '{self.schema_name}' ya existe en '{self.namespace}/schemas'.", err=True)
|
|
31
|
+
sys.exit(1)
|
|
32
|
+
|
|
33
|
+
# Crear directorio para el esquema
|
|
34
|
+
schemas_dir = Path(self.namespace) / 'schemas'
|
|
35
|
+
schemas_dir.mkdir(parents=True, exist_ok=True)
|
|
36
|
+
|
|
37
|
+
# Crear main.py con el contenido exacto del ejemplo
|
|
38
|
+
content = self.get_content()
|
|
39
|
+
(schemas_dir / f'{self.schema_name}.py').write_text(content, encoding='utf-8')
|
|
40
|
+
|
|
41
|
+
click.echo(f" ✅ '{self.schema_name}.py' creado en '{self.namespace}/schemas/'")
|
|
42
|
+
|
|
43
|
+
def get_content(self) -> str:
|
|
44
|
+
"""Retorna el contenido exacto del archivo main.py de ejemplo"""
|
|
45
|
+
return f'''# -*- coding: utf-8 -*-
|
|
46
|
+
"""
|
|
47
|
+
Fuente principal para la definición de esquemas y generación de modelos CRUD.
|
|
48
|
+
Usa el contenido de tai_sql para definir tablas, relaciones y generar automáticamente modelos y CRUDs.
|
|
49
|
+
Usa tai_sql.generators para generar modelos y CRUDs basados en las tablas definidas.
|
|
50
|
+
Ejecuta por consola tai_sql generate para generar los recursos definidos en este esquema.
|
|
51
|
+
"""
|
|
52
|
+
from __future__ import annotations
|
|
53
|
+
from typing import List, Optional
|
|
54
|
+
# from datetime import datetime
|
|
55
|
+
from tai_sql import *
|
|
56
|
+
from tai_sql.generators import ModelsGenerator, CRUDGenerator
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# Configurar el datasource
|
|
60
|
+
datasource(
|
|
61
|
+
provider=env('{self.schema_name.upper()}_DATABASE_URL') # Además de env, también puedes usar (para testing) connection_string y params
|
|
62
|
+
# Revisa la documentación de la función datasource para más opciones
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
# Configurar los generadores
|
|
66
|
+
generate(
|
|
67
|
+
ModelsGenerator(
|
|
68
|
+
output_dir='{self.namespace}/{self.subnamespace}/{self.schema_name}/models' # Directorio donde se generarán los modelos
|
|
69
|
+
),
|
|
70
|
+
CRUDGenerator(
|
|
71
|
+
output_dir='{self.namespace}/{self.subnamespace}/{self.schema_name}/crud', # Directorio donde se generarán los CRUDs
|
|
72
|
+
models_import_path='{self.subnamespace}.{self.schema_name}.models', # Ruta de importación de los modelos generados
|
|
73
|
+
mode='sync' # Modo de generación: 'sync' para síncrono, 'async' para asíncrono, 'both' para ambos
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Definición de tablas y relaciones
|
|
78
|
+
|
|
79
|
+
# Ejemplo de definición de tablas y relaciones. Eliminar estos modelos y definir los tuyos propios.
|
|
80
|
+
class User(Table):
|
|
81
|
+
__tablename__ = "user"
|
|
82
|
+
|
|
83
|
+
id: int = column(primary_key=True, autoincrement=True)
|
|
84
|
+
name: str
|
|
85
|
+
email: Optional[str] # Nullable
|
|
86
|
+
|
|
87
|
+
posts: List[Post] # Relación one-to-many (implícita) con la tabla Post
|
|
88
|
+
|
|
89
|
+
class Post(Table):
|
|
90
|
+
__tablename__ = "post"
|
|
91
|
+
|
|
92
|
+
id: int = column(primary_key=True, autoincrement=True)
|
|
93
|
+
title: str = 'Post Title' # Valor por defecto
|
|
94
|
+
content: str
|
|
95
|
+
author_id: int
|
|
96
|
+
|
|
97
|
+
author: User = relation(fields=['author_id'], references=['id'], backref='posts') # Relación many-to-one con la tabla User
|
|
98
|
+
'''
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class InitCommand:
|
|
102
|
+
|
|
103
|
+
def __init__(self, namespace: str, schema_name: str):
|
|
104
|
+
self.namespace = namespace
|
|
105
|
+
self.schema_name = schema_name
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def subnamespace(self) -> str:
|
|
109
|
+
"""Retorna el subnamespace basado en el namespace"""
|
|
110
|
+
return self.namespace.replace('-', '_')
|
|
111
|
+
|
|
112
|
+
def check_poetry(self):
|
|
113
|
+
"""Verifica que Poetry esté instalado y disponible"""
|
|
114
|
+
try:
|
|
115
|
+
subprocess.run(['poetry', '--version'], check=True, capture_output=True)
|
|
116
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
117
|
+
click.echo("❌ Error: Poetry no está instalado o no está en el PATH", err=True)
|
|
118
|
+
click.echo("Instala Poetry desde: https://python-poetry.org/docs/#installation")
|
|
119
|
+
sys.exit(1)
|
|
120
|
+
|
|
121
|
+
def check_directory_is_avaliable(self):
|
|
122
|
+
"""Verifica que el directorio del proyecto no exista"""
|
|
123
|
+
if os.path.exists(self.namespace):
|
|
124
|
+
click.echo(f"❌ Error: el directorio '{self.namespace}' ya existe", err=True)
|
|
125
|
+
sys.exit(1)
|
|
126
|
+
|
|
127
|
+
def check_virtualenv(self):
|
|
128
|
+
"""Verifica que el entorno virtual de Poetry esté activo"""
|
|
129
|
+
if 'VIRTUAL_ENV' not in os.environ:
|
|
130
|
+
click.echo("❌ Error: No hay entorno virutal activo", err=True)
|
|
131
|
+
click.echo(" Puedes crear uno con 'pyenv virtualenv <env_name>' y asignarlo con 'pyenv local <env_name>'", err=True)
|
|
132
|
+
sys.exit(1)
|
|
133
|
+
|
|
134
|
+
def create_project(self):
|
|
135
|
+
"""Crea el proyecto base con Poetry"""
|
|
136
|
+
click.echo(f"🚀 Creando '{self.namespace}'...")
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
subprocess.run(['poetry', 'new', self.namespace],
|
|
140
|
+
check=True,
|
|
141
|
+
capture_output=True)
|
|
142
|
+
subprocess.run(['poetry', 'install'],
|
|
143
|
+
cwd=self.namespace,
|
|
144
|
+
check=True,
|
|
145
|
+
capture_output=True)
|
|
146
|
+
click.echo(f"✅ poetry new '{self.namespace}': OK")
|
|
147
|
+
except subprocess.CalledProcessError as e:
|
|
148
|
+
click.echo(f"❌ Error: {e}", err=True)
|
|
149
|
+
sys.exit(1)
|
|
150
|
+
|
|
151
|
+
def add_dependencies(self):
|
|
152
|
+
"""Añade las dependencias necesarias al proyecto"""
|
|
153
|
+
click.echo("📦 Añadiendo dependencias...")
|
|
154
|
+
|
|
155
|
+
dependencies = ['sqlalchemy', 'psycopg2-binary']
|
|
156
|
+
|
|
157
|
+
for dep in dependencies:
|
|
158
|
+
try:
|
|
159
|
+
subprocess.run(['poetry', 'add', dep],
|
|
160
|
+
cwd=self.namespace,
|
|
161
|
+
check=True,
|
|
162
|
+
capture_output=True)
|
|
163
|
+
click.echo(f" ✅ {dep} añadido")
|
|
164
|
+
except subprocess.CalledProcessError as e:
|
|
165
|
+
click.echo(f" ❌ Error al añadir dependencia {dep}: {e}", err=True)
|
|
166
|
+
sys.exit(1)
|
|
167
|
+
|
|
168
|
+
def add_schemas_folder(self):
|
|
169
|
+
"""Crea la estructura adicional del proyecto"""
|
|
170
|
+
new_schema = NewSchemaCommand(self.namespace, self.schema_name)
|
|
171
|
+
new_schema.create()
|
|
172
|
+
|
|
173
|
+
def msg(self):
|
|
174
|
+
"""Muestra el mensaje de éxito y next steps"""
|
|
175
|
+
click.echo()
|
|
176
|
+
click.echo(f'🎉 ¡Proyecto "{self.namespace}" creado exitosamente!')
|
|
177
|
+
click.echo()
|
|
178
|
+
click.echo("📋 Próximos pasos:")
|
|
179
|
+
click.echo(" 1. Configurar DATABASE_URL en tu entorno:")
|
|
180
|
+
click.echo(" export DATABASE_URL='postgresql://user:pass@localhost:5432/dbname'")
|
|
181
|
+
click.echo(" 2. Definir tus modelos en schemas/main.py")
|
|
182
|
+
click.echo(" 3. Crear recursos:")
|
|
183
|
+
click.echo(" tai-sql generate")
|
|
184
|
+
click.echo()
|
|
185
|
+
click.echo("🔗 Documentación: https://github.com/tu-repo/tai-sql")
|
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import sys
|
|
3
|
+
import click
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import List
|
|
6
|
+
from sqlalchemy import MetaData, Column, text, Engine
|
|
7
|
+
from sqlalchemy.schema import Table, UniqueConstraint
|
|
8
|
+
|
|
9
|
+
from tai_sql import db
|
|
10
|
+
from tai_sql.orm import Table
|
|
11
|
+
from .utils.schema import Schema
|
|
12
|
+
from .utils.ddl import DDLManager, CreateStatement, AlterColumnStatement, ForeignKeyStatement
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class DriftManager:
|
|
17
|
+
"""
|
|
18
|
+
Clase para almacenar los cambios detectados en el esquema
|
|
19
|
+
"""
|
|
20
|
+
engine: Engine = field(default_factory=lambda: db.engine)
|
|
21
|
+
metadata: MetaData = field(default_factory=lambda: MetaData(db.schema if db.provider.drivername == 'postgresql' else None))
|
|
22
|
+
existing_metadata: MetaData = field(default_factory=lambda: MetaData(db.schema if db.provider.drivername == 'postgresql' else None))
|
|
23
|
+
new_tables: dict[str, list[Table]] = field(default_factory=dict)
|
|
24
|
+
existing_tables: dict[str, list[Table]] = field(default_factory=dict)
|
|
25
|
+
columns_to_add: dict[str, list[Column]] = field(default_factory=dict)
|
|
26
|
+
columns_to_drop: dict[str, list[Column]] = field(default_factory=dict)
|
|
27
|
+
columns_to_modify: dict[str, list[Column]] = field(default_factory=dict)
|
|
28
|
+
|
|
29
|
+
def detect(self) -> None:
|
|
30
|
+
"""
|
|
31
|
+
Detecta cambios entre el esquema definido y el esquema actual
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
dict: Diccionario con los cambios detectados
|
|
35
|
+
"""
|
|
36
|
+
click.echo("🔎 Detectando cambios en el esquema...")
|
|
37
|
+
|
|
38
|
+
self.existing_metadata.reflect(bind=self.engine)
|
|
39
|
+
|
|
40
|
+
db_tables = set(self.existing_metadata.tables.keys())
|
|
41
|
+
schema_tables = set(self.metadata.tables.keys())
|
|
42
|
+
|
|
43
|
+
# Tablas nuevas
|
|
44
|
+
new_tables = list(schema_tables - db_tables)
|
|
45
|
+
|
|
46
|
+
for table_name in new_tables:
|
|
47
|
+
new_table = self.metadata.tables[table_name]
|
|
48
|
+
self.new_tables[table_name] = new_table
|
|
49
|
+
|
|
50
|
+
# Tablas existentes
|
|
51
|
+
existing_tables = list(schema_tables & db_tables)
|
|
52
|
+
|
|
53
|
+
# Analizar cambios en columnas para tablas existentes
|
|
54
|
+
for table_name in existing_tables:
|
|
55
|
+
current_table = self.existing_metadata.tables[table_name]
|
|
56
|
+
self.existing_tables[table_name] = current_table
|
|
57
|
+
new_table = self.metadata.tables[table_name]
|
|
58
|
+
|
|
59
|
+
constraint_columns = []
|
|
60
|
+
# Check constrains
|
|
61
|
+
for constraint in current_table.constraints:
|
|
62
|
+
if isinstance(constraint, UniqueConstraint):
|
|
63
|
+
constraint_columns = [col.name for col in constraint.columns]
|
|
64
|
+
|
|
65
|
+
current_columns = {col.name: col for col in current_table.columns}
|
|
66
|
+
new_columns = {col.name: col for col in new_table.columns}
|
|
67
|
+
|
|
68
|
+
# Columnas a añadir
|
|
69
|
+
columns_to_add = set(new_columns.keys()) - set(current_columns.keys())
|
|
70
|
+
if columns_to_add:
|
|
71
|
+
self.columns_to_add[table_name] = [
|
|
72
|
+
new_columns[col_name] for col_name in columns_to_add
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
# Columnas a eliminar (comentado por seguridad)
|
|
76
|
+
columns_to_drop = set(current_columns.keys()) - set(new_columns.keys())
|
|
77
|
+
if columns_to_drop:
|
|
78
|
+
self.columns_to_drop[table_name] = [
|
|
79
|
+
current_columns[col_name] for col_name in columns_to_drop
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
# Columnas a modificar
|
|
83
|
+
for col_name in set(current_columns.keys()) & set(new_columns.keys()):
|
|
84
|
+
current_col = current_columns[col_name]
|
|
85
|
+
new_col = new_columns[col_name]
|
|
86
|
+
|
|
87
|
+
if str(current_col.type) == 'TIMESTAMP' and str(new_col.type) == 'DATETIME':
|
|
88
|
+
# Manejar caso especial de TIMESTAMP vs TIMESTAMP WITH TIME ZONE
|
|
89
|
+
type_changed = False
|
|
90
|
+
else:
|
|
91
|
+
# Comparar tipo de dato
|
|
92
|
+
type_changed = str(current_col.type) != str(new_col.type)
|
|
93
|
+
|
|
94
|
+
# Comparar nullable
|
|
95
|
+
nullable_changed = current_col.nullable != new_col.nullable
|
|
96
|
+
|
|
97
|
+
# Comparar valor por defecto
|
|
98
|
+
# default_changed = str(current_col.server_default) != str(new_col.server_default)
|
|
99
|
+
|
|
100
|
+
# Comparar primary key
|
|
101
|
+
pk_changed = current_col.primary_key != new_col.primary_key
|
|
102
|
+
|
|
103
|
+
# Comparar autoincrement
|
|
104
|
+
autoincrement_changed = current_col.autoincrement != new_col.autoincrement
|
|
105
|
+
|
|
106
|
+
if col_name in constraint_columns:
|
|
107
|
+
current_col.unique = True
|
|
108
|
+
|
|
109
|
+
# Comparar uniqueness
|
|
110
|
+
unique_changed = current_col.unique != new_col.unique
|
|
111
|
+
|
|
112
|
+
if type_changed or nullable_changed or pk_changed or autoincrement_changed or unique_changed:
|
|
113
|
+
if table_name not in self.columns_to_modify:
|
|
114
|
+
self.columns_to_modify[table_name] = []
|
|
115
|
+
self.columns_to_modify[table_name].append(new_col)
|
|
116
|
+
|
|
117
|
+
def show(self):
|
|
118
|
+
"""Muestra un resumen de los cambios detectados"""
|
|
119
|
+
click.echo("📋 Resumen de cambios:")
|
|
120
|
+
|
|
121
|
+
if self.new_tables:
|
|
122
|
+
click.echo(f" 🆕 {len(self.new_tables)} tabla(s) nueva(s): {', '.join(self.new_tables)}")
|
|
123
|
+
|
|
124
|
+
if self.columns_to_add:
|
|
125
|
+
total_columns = sum(len(cols) for cols in self.columns_to_add.values())
|
|
126
|
+
click.echo(f" ➕ {total_columns} columna(s) a añadir en {len(self.columns_to_add)} tabla(s)")
|
|
127
|
+
|
|
128
|
+
if self.columns_to_drop:
|
|
129
|
+
total_columns = sum(len(cols) for cols in self.columns_to_drop.values())
|
|
130
|
+
click.echo(f" ⚠️ {total_columns} columna(s) serían eliminadas")
|
|
131
|
+
|
|
132
|
+
if self.columns_to_modify:
|
|
133
|
+
total_columns = sum(len(cols) for cols in self.columns_to_modify.values())
|
|
134
|
+
click.echo(f" ✏️ {total_columns} columna(s) a modificar en {len(self.columns_to_modify)} tabla(s)")
|
|
135
|
+
|
|
136
|
+
if not self.new_tables and not self.columns_to_add and not self.columns_to_drop and not self.columns_to_modify:
|
|
137
|
+
click.echo(" ✅ No se detectaron cambios")
|
|
138
|
+
|
|
139
|
+
click.echo()
|
|
140
|
+
|
|
141
|
+
class PushCommand(Schema):
|
|
142
|
+
"""
|
|
143
|
+
Comando para generar y ejecutar sentencias DDL CREATE TABLE basadas en un schema.
|
|
144
|
+
|
|
145
|
+
Este comando procesa un archivo de schema, genera las sentencias DDL necesarias
|
|
146
|
+
para crear las tablas definidas y las ejecuta en la base de datos configurada.
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
def __init__(self, schema_file: str):
|
|
150
|
+
super().__init__(schema_file)
|
|
151
|
+
self.schema_file = schema_file
|
|
152
|
+
self.ddl_manager = DDLManager()
|
|
153
|
+
self.drift_manager = DriftManager()
|
|
154
|
+
|
|
155
|
+
def load_schema(self) -> MetaData:
|
|
156
|
+
"""
|
|
157
|
+
Carga y ejecuta el archivo de schema para obtener las definiciones de tablas
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
MetaData: Metadata de SQLAlchemy con las tablas definidas
|
|
161
|
+
"""
|
|
162
|
+
click.echo("📖 Cargando definiciones de schema...")
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
# Limpiar estado previo
|
|
166
|
+
db.tables = []
|
|
167
|
+
|
|
168
|
+
Table.analyze()
|
|
169
|
+
Table.validate()
|
|
170
|
+
|
|
171
|
+
db.tables = Table.registry
|
|
172
|
+
|
|
173
|
+
for table in db.tables:
|
|
174
|
+
# Convertir la definición de tai_sql a tabla SQLAlchemy
|
|
175
|
+
sqlalchemy_table = table.to_sqlalchemy_table(self.drift_manager.metadata)
|
|
176
|
+
click.echo(f" 📋 Tabla: {sqlalchemy_table.name}")
|
|
177
|
+
|
|
178
|
+
return self.drift_manager.metadata
|
|
179
|
+
|
|
180
|
+
except Exception as e:
|
|
181
|
+
raise Exception(f"Error al cargar schema: {e}")
|
|
182
|
+
|
|
183
|
+
def validate_schema_names(self):
|
|
184
|
+
"""
|
|
185
|
+
Valida que los nombres de tablas y columnas no sean palabras reservadas
|
|
186
|
+
"""
|
|
187
|
+
click.echo("🔍 Validando nombres de tablas y columnas...")
|
|
188
|
+
|
|
189
|
+
warnings = []
|
|
190
|
+
|
|
191
|
+
for table in self.drift_manager.metadata.tables.values():
|
|
192
|
+
# Validar nombre de tabla
|
|
193
|
+
if table.name.lower() in self.ddl_manager.ddl.reserved_words:
|
|
194
|
+
warnings.append(f"⚠️ Tabla '{table.name}' es una palabra reservada")
|
|
195
|
+
|
|
196
|
+
# Validar nombres de columnas
|
|
197
|
+
for column in table.columns:
|
|
198
|
+
if column.name.lower() in self.ddl_manager.ddl.reserved_words:
|
|
199
|
+
warnings.append(f"⚠️ Columna '{column.name}' en tabla '{table.name}' es una palabra reservada")
|
|
200
|
+
|
|
201
|
+
if warnings:
|
|
202
|
+
click.echo("❌ Se encontraron problemas con nombres:")
|
|
203
|
+
for warning in warnings:
|
|
204
|
+
click.echo(f" {warning}")
|
|
205
|
+
click.echo()
|
|
206
|
+
click.echo("💡 Sugerencias:")
|
|
207
|
+
click.echo(" - Cambia 'user' por 'users' o 'app_user'")
|
|
208
|
+
click.echo(" - Cambia 'order' por 'orders' o 'user_order'")
|
|
209
|
+
click.echo(" - Usa nombres descriptivos que no sean palabras reservadas")
|
|
210
|
+
click.echo()
|
|
211
|
+
|
|
212
|
+
if not click.confirm("¿Continuar de todas formas? (se manejará automáticamente)"):
|
|
213
|
+
click.echo("❌ Operación cancelada por el usuario")
|
|
214
|
+
sys.exit(1)
|
|
215
|
+
|
|
216
|
+
click.echo("✅ Validación de nombres completada")
|
|
217
|
+
|
|
218
|
+
def generate(self) -> None:
|
|
219
|
+
"""
|
|
220
|
+
Genera las sentencias DDL considerando cambios incrementales
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
Lista de sentencias DDL como strings
|
|
224
|
+
"""
|
|
225
|
+
|
|
226
|
+
# Limpiar sentencias previas
|
|
227
|
+
self.ddl_manager.clear()
|
|
228
|
+
# Detectar cambios
|
|
229
|
+
self.drift_manager.detect()
|
|
230
|
+
|
|
231
|
+
# Mostrar resumen de cambios
|
|
232
|
+
self.drift_manager.show()
|
|
233
|
+
|
|
234
|
+
new_tables = self.drift_manager.new_tables
|
|
235
|
+
new_cols = self.drift_manager.columns_to_add
|
|
236
|
+
delete_cols = self.drift_manager.columns_to_drop
|
|
237
|
+
modify_cols = self.drift_manager.columns_to_modify
|
|
238
|
+
|
|
239
|
+
if new_tables:
|
|
240
|
+
self.ddl_manager.generate_creations(new_tables.values())
|
|
241
|
+
|
|
242
|
+
# Generar migraciones para tablas existentes
|
|
243
|
+
if new_cols or delete_cols or modify_cols:
|
|
244
|
+
self.ddl_manager.generate_migrations(new_cols, delete_cols, modify_cols)
|
|
245
|
+
|
|
246
|
+
return self.ddl_manager.statements
|
|
247
|
+
|
|
248
|
+
def execute(self):
|
|
249
|
+
"""Ejecuta las sentencias DDL en la base de datos"""
|
|
250
|
+
if not self.ddl_manager.statements:
|
|
251
|
+
click.echo("ℹ️ No hay cambios para aplicar")
|
|
252
|
+
return
|
|
253
|
+
|
|
254
|
+
click.echo("⚙️ Ejecutando sentencias DDL...")
|
|
255
|
+
|
|
256
|
+
try:
|
|
257
|
+
executed_count = 0
|
|
258
|
+
|
|
259
|
+
with self.drift_manager.engine.connect() as conn:
|
|
260
|
+
# Usar transacción para todas las operaciones
|
|
261
|
+
trans = conn.begin()
|
|
262
|
+
|
|
263
|
+
try:
|
|
264
|
+
for stmt in self.ddl_manager.statements:
|
|
265
|
+
|
|
266
|
+
if isinstance(stmt, CreateStatement):
|
|
267
|
+
# Ejecutar CREATE TABLE
|
|
268
|
+
conn.execute(text(stmt.text))
|
|
269
|
+
executed_count += 1
|
|
270
|
+
click.echo(f" ✅ Tabla {stmt.table_name} creada")
|
|
271
|
+
|
|
272
|
+
elif isinstance(stmt, AlterColumnStatement):
|
|
273
|
+
# Ejecutar ALTER TABLE
|
|
274
|
+
|
|
275
|
+
if stmt.column.unique:
|
|
276
|
+
result = stmt.check_unique_constraints()
|
|
277
|
+
|
|
278
|
+
if result:
|
|
279
|
+
click.echo(" ❌ UniqueConstraint error:")
|
|
280
|
+
click.echo(f' ⚠️ Columna "{stmt.column_name}" tiene valores duplicados en {stmt.table_name}, se omitirá la modificación')
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
if isinstance(stmt.text, List):
|
|
284
|
+
for sub_stmt in stmt.text:
|
|
285
|
+
conn.execute(text(sub_stmt))
|
|
286
|
+
else:
|
|
287
|
+
|
|
288
|
+
conn.execute(text(stmt.text))
|
|
289
|
+
|
|
290
|
+
executed_count += 1
|
|
291
|
+
|
|
292
|
+
if stmt.column_name:
|
|
293
|
+
click.echo(f" ✅ Columna {stmt.column_name} añadida/modificada en {stmt.table_name}")
|
|
294
|
+
|
|
295
|
+
elif isinstance(stmt, ForeignKeyStatement):
|
|
296
|
+
# Ejecutar ALTER TABLE
|
|
297
|
+
conn.execute(text(stmt.text))
|
|
298
|
+
executed_count += 1
|
|
299
|
+
click.echo(f" ✅ Foreign Key: {stmt.table_name}.{stmt.fk.parent.name} → {stmt.fk.column.table.name}.{stmt.fk.column.name} añadida a {stmt.table_name}")
|
|
300
|
+
|
|
301
|
+
trans.commit()
|
|
302
|
+
click.echo(f" 🎉 {executed_count} operación(es) ejecutada(s) exitosamente")
|
|
303
|
+
|
|
304
|
+
except Exception as e:
|
|
305
|
+
trans.rollback()
|
|
306
|
+
raise e
|
|
307
|
+
|
|
308
|
+
except Exception as e:
|
|
309
|
+
raise Exception(f"Error al ejecutar DDL: {e}")
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
|