evolutia 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- evolutia/__init__.py +9 -0
- evolutia/async_llm_providers.py +157 -0
- evolutia/cache/__init__.py +9 -0
- evolutia/cache/exercise_cache.py +226 -0
- evolutia/cache/llm_cache.py +487 -0
- evolutia/complexity_validator.py +33 -31
- evolutia/config_manager.py +53 -40
- evolutia/evolutia_engine.py +341 -66
- evolutia/exam_generator.py +44 -43
- evolutia/exceptions.py +38 -0
- evolutia/exercise_analyzer.py +42 -59
- evolutia/imports.py +175 -0
- evolutia/llm_providers.py +223 -61
- evolutia/material_extractor.py +166 -88
- evolutia/rag/rag_indexer.py +107 -90
- evolutia/rag/rag_retriever.py +130 -103
- evolutia/retry_utils.py +280 -0
- evolutia/utils/json_parser.py +29 -19
- evolutia/utils/markdown_parser.py +185 -159
- evolutia/utils/math_extractor.py +153 -144
- evolutia/validation/__init__.py +1 -0
- evolutia/validation/args_validator.py +253 -0
- evolutia/validation/config_validator.py +502 -0
- evolutia/variation_generator.py +82 -70
- evolutia-0.1.3.dist-info/METADATA +536 -0
- evolutia-0.1.3.dist-info/RECORD +37 -0
- {evolutia-0.1.1.dist-info → evolutia-0.1.3.dist-info}/WHEEL +1 -1
- evolutia_cli.py +22 -9
- evolutia-0.1.1.dist-info/METADATA +0 -221
- evolutia-0.1.1.dist-info/RECORD +0 -27
- {evolutia-0.1.1.dist-info → evolutia-0.1.3.dist-info}/entry_points.txt +0 -0
- {evolutia-0.1.1.dist-info → evolutia-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {evolutia-0.1.1.dist-info → evolutia-0.1.3.dist-info}/top_level.txt +0 -0
evolutia/retry_utils.py
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilidades para manejo de errores y reintentos en EvolutIA.
|
|
3
|
+
Incluye decoradores para reintentos automáticos en llamadas a APIs externas.
|
|
4
|
+
"""
|
|
5
|
+
import asyncio
|
|
6
|
+
import functools
|
|
7
|
+
import logging
|
|
8
|
+
import time
|
|
9
|
+
from typing import Type, Tuple, Optional, Callable
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def retry_async(
|
|
15
|
+
max_retries: int = 3,
|
|
16
|
+
initial_delay: float = 1.0,
|
|
17
|
+
max_delay: float = 10.0,
|
|
18
|
+
exponential_backoff: bool = True,
|
|
19
|
+
exceptions: Tuple[Type[Exception], ...] = (Exception,),
|
|
20
|
+
on_retry: Optional[Callable] = None
|
|
21
|
+
):
|
|
22
|
+
"""
|
|
23
|
+
Decorador para reintentar funciones asíncronas con backoff exponencial.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
max_retries: Número máximo de reintentos (default: 3)
|
|
27
|
+
initial_delay: Retraso inicial en segundos (default: 1.0)
|
|
28
|
+
max_delay: Retraso máximo en segundos (default: 10.0)
|
|
29
|
+
exponential_backoff: Si True, usa backoff exponencial (default: True)
|
|
30
|
+
exceptions: Tupla de excepciones que disparan reintentos (default: all Exception)
|
|
31
|
+
on_retry: Callback opcional que se ejecuta antes de cada reintento
|
|
32
|
+
|
|
33
|
+
Example:
|
|
34
|
+
```python
|
|
35
|
+
@retry_async(max_retries=3, exceptions=(TimeoutError, ConnectionError))
|
|
36
|
+
async def fetch_data(url: str) -> Dict:
|
|
37
|
+
async with aiohttp.ClientSession() as session:
|
|
38
|
+
async with session.get(url) as response:
|
|
39
|
+
return await response.json()
|
|
40
|
+
```
|
|
41
|
+
"""
|
|
42
|
+
def decorator(func):
|
|
43
|
+
@functools.wraps(func)
|
|
44
|
+
async def wrapper(*args, **kwargs):
|
|
45
|
+
last_exception = None
|
|
46
|
+
|
|
47
|
+
for attempt in range(max_retries + 1):
|
|
48
|
+
try:
|
|
49
|
+
return await func(*args, **kwargs)
|
|
50
|
+
except exceptions as e:
|
|
51
|
+
last_exception = e
|
|
52
|
+
|
|
53
|
+
if attempt == max_retries:
|
|
54
|
+
logger.error(
|
|
55
|
+
f"[RetryAsync] Función '{func.__name__}' falló después de "
|
|
56
|
+
f"{max_retries + 1} intentos. Error: {e}"
|
|
57
|
+
)
|
|
58
|
+
raise
|
|
59
|
+
|
|
60
|
+
# Calcular delay con backoff exponencial
|
|
61
|
+
if exponential_backoff:
|
|
62
|
+
delay = min(initial_delay * (2 ** attempt), max_delay)
|
|
63
|
+
else:
|
|
64
|
+
delay = min(initial_delay + attempt, max_delay)
|
|
65
|
+
|
|
66
|
+
logger.warning(
|
|
67
|
+
f"[RetryAsync] Intento {attempt + 1}/{max_retries + 1} falló para "
|
|
68
|
+
f"'{func.__name__}'. Retentando en {delay:.1f}s... Error: {e}"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Ejecutar callback si está definido
|
|
72
|
+
if on_retry:
|
|
73
|
+
await on_retry(attempt + 1, e, *args, **kwargs)
|
|
74
|
+
|
|
75
|
+
# Esperar antes del siguiente intento
|
|
76
|
+
await asyncio.sleep(delay)
|
|
77
|
+
|
|
78
|
+
# Esto nunca debería ejecutarse, pero mypy lo requiere
|
|
79
|
+
raise last_exception if last_exception else RuntimeError("Unexpected error in retry_async")
|
|
80
|
+
|
|
81
|
+
return wrapper
|
|
82
|
+
return decorator
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def retry_sync(
|
|
86
|
+
max_retries: int = 3,
|
|
87
|
+
initial_delay: float = 1.0,
|
|
88
|
+
max_delay: float = 10.0,
|
|
89
|
+
exponential_backoff: bool = True,
|
|
90
|
+
exceptions: Tuple[Type[Exception], ...] = (Exception,),
|
|
91
|
+
on_retry: Optional[Callable] = None
|
|
92
|
+
):
|
|
93
|
+
"""
|
|
94
|
+
Decorador para reintentar funciones síncronas con backoff exponencial.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
max_retries: Número máximo de reintentos (default: 3)
|
|
98
|
+
initial_delay: Retraso inicial en segundos (default: 1.0)
|
|
99
|
+
max_delay: Retraso máximo en segundos (default: 10.0)
|
|
100
|
+
exponential_backoff: Si True, usa backoff exponencial (default: True)
|
|
101
|
+
exceptions: Tupla de excepciones que disparan reintentos (default: all Exception)
|
|
102
|
+
on_retry: Callback opcional que se ejecuta antes de cada reintento
|
|
103
|
+
|
|
104
|
+
Example:
|
|
105
|
+
```python
|
|
106
|
+
@retry_sync(max_retries=3, exceptions=(TimeoutError, ConnectionError))
|
|
107
|
+
def fetch_data(url: str) -> Dict:
|
|
108
|
+
response = requests.get(url, timeout=10)
|
|
109
|
+
return response.json()
|
|
110
|
+
```
|
|
111
|
+
"""
|
|
112
|
+
def decorator(func):
|
|
113
|
+
@functools.wraps(func)
|
|
114
|
+
def wrapper(*args, **kwargs):
|
|
115
|
+
last_exception = None
|
|
116
|
+
|
|
117
|
+
for attempt in range(max_retries + 1):
|
|
118
|
+
try:
|
|
119
|
+
return func(*args, **kwargs)
|
|
120
|
+
except exceptions as e:
|
|
121
|
+
last_exception = e
|
|
122
|
+
|
|
123
|
+
if attempt == max_retries:
|
|
124
|
+
logger.error(
|
|
125
|
+
f"[RetrySync] Función '{func.__name__}' falló después de "
|
|
126
|
+
f"{max_retries + 1} intentos. Error: {e}"
|
|
127
|
+
)
|
|
128
|
+
raise
|
|
129
|
+
|
|
130
|
+
# Calcular delay con backoff exponencial
|
|
131
|
+
if exponential_backoff:
|
|
132
|
+
delay = min(initial_delay * (2 ** attempt), max_delay)
|
|
133
|
+
else:
|
|
134
|
+
delay = min(initial_delay + attempt, max_delay)
|
|
135
|
+
|
|
136
|
+
logger.warning(
|
|
137
|
+
f"[RetrySync] Intento {attempt + 1}/{max_retries + 1} falló para "
|
|
138
|
+
f"'{func.__name__}'. Retentando en {delay:.1f}s... Error: {e}"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Ejecutar callback si está definido
|
|
142
|
+
if on_retry:
|
|
143
|
+
on_retry(attempt + 1, e, *args, **kwargs)
|
|
144
|
+
|
|
145
|
+
# Esperar antes del siguiente intento
|
|
146
|
+
time.sleep(delay)
|
|
147
|
+
|
|
148
|
+
# Esto nunca debería ejecutarse, pero mypy lo requiere
|
|
149
|
+
raise last_exception if last_exception else RuntimeError("Unexpected error in retry_sync")
|
|
150
|
+
|
|
151
|
+
return wrapper
|
|
152
|
+
return decorator
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class CircuitBreaker:
|
|
156
|
+
"""
|
|
157
|
+
Implementa el patrón Circuit Breaker para evitar llamadas a servicios fallidos.
|
|
158
|
+
|
|
159
|
+
Estados: CLOSED (normal), OPEN (fallo), HALF_OPEN (recuperando)
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
def __init__(
|
|
163
|
+
self,
|
|
164
|
+
failure_threshold: int = 5,
|
|
165
|
+
timeout: float = 60.0,
|
|
166
|
+
expected_exception: Type[Exception] = Exception
|
|
167
|
+
):
|
|
168
|
+
"""
|
|
169
|
+
Inicializa el circuit breaker.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
failure_threshold: Número de fallos consecutivos para abrir el circuito
|
|
173
|
+
timeout: Tiempo en segundos antes de intentar recuperar (OPEN → HALF_OPEN)
|
|
174
|
+
expected_exception: Tipo de excepción a considerar como fallo
|
|
175
|
+
"""
|
|
176
|
+
self.failure_threshold = failure_threshold
|
|
177
|
+
self.timeout = timeout
|
|
178
|
+
self.expected_exception = expected_exception
|
|
179
|
+
|
|
180
|
+
self.failure_count = 0
|
|
181
|
+
self.last_failure_time = None
|
|
182
|
+
self.state = "CLOSED" # CLOSED, OPEN, HALF_OPEN
|
|
183
|
+
|
|
184
|
+
def is_allowed(self) -> bool:
|
|
185
|
+
"""
|
|
186
|
+
Verifica si se permite ejecutar la operación.
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
True si el circuito está cerrado o medio abierto, False si está abierto
|
|
190
|
+
"""
|
|
191
|
+
if self.state == "CLOSED":
|
|
192
|
+
return True
|
|
193
|
+
|
|
194
|
+
if self.state == "OPEN":
|
|
195
|
+
# Verificar si es hora de intentar recuperar
|
|
196
|
+
if time.time() - self.last_failure_time > self.timeout:
|
|
197
|
+
self.state = "HALF_OPEN"
|
|
198
|
+
logger.info("[CircuitBreaker] Cambiando de OPEN a HALF_OPEN")
|
|
199
|
+
return True
|
|
200
|
+
return False
|
|
201
|
+
|
|
202
|
+
if self.state == "HALF_OPEN":
|
|
203
|
+
return True
|
|
204
|
+
|
|
205
|
+
return False
|
|
206
|
+
|
|
207
|
+
def record_success(self):
|
|
208
|
+
"""Registra un éxito exitoso."""
|
|
209
|
+
if self.state == "HALF_OPEN":
|
|
210
|
+
self.state = "CLOSED"
|
|
211
|
+
logger.info("[CircuitBreaker] Cambiando de HALF_OPEN a CLOSED")
|
|
212
|
+
|
|
213
|
+
self.failure_count = 0
|
|
214
|
+
|
|
215
|
+
def record_failure(self):
|
|
216
|
+
"""Registra un fallo."""
|
|
217
|
+
self.failure_count += 1
|
|
218
|
+
self.last_failure_time = time.time()
|
|
219
|
+
|
|
220
|
+
if self.failure_count >= self.failure_threshold:
|
|
221
|
+
if self.state != "OPEN":
|
|
222
|
+
logger.warning(
|
|
223
|
+
f"[CircuitBreaker] Abriendo circuito después de "
|
|
224
|
+
f"{self.failure_count} fallos consecutivos"
|
|
225
|
+
)
|
|
226
|
+
self.state = "OPEN"
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def with_circuit_breaker(circuit_breaker: CircuitBreaker):
|
|
230
|
+
"""
|
|
231
|
+
Decorador que usa un Circuit Breaker para proteger funciones.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
circuit_breaker: Instancia de Circuit Breaker
|
|
235
|
+
|
|
236
|
+
Example:
|
|
237
|
+
```python
|
|
238
|
+
cb = CircuitBreaker(failure_threshold=5, timeout=60.0)
|
|
239
|
+
|
|
240
|
+
@with_circuit_breaker(cb)
|
|
241
|
+
async def call_api(url: str) -> Dict:
|
|
242
|
+
async with aiohttp.ClientSession() as session:
|
|
243
|
+
async with session.get(url) as response:
|
|
244
|
+
return await response.json()
|
|
245
|
+
```
|
|
246
|
+
"""
|
|
247
|
+
def decorator(func):
|
|
248
|
+
@functools.wraps(func)
|
|
249
|
+
async def async_wrapper(*args, **kwargs):
|
|
250
|
+
if not circuit_breaker.is_allowed():
|
|
251
|
+
raise Exception(f"Circuit breaker is OPEN for {func.__name__}")
|
|
252
|
+
|
|
253
|
+
try:
|
|
254
|
+
result = await func(*args, **kwargs)
|
|
255
|
+
circuit_breaker.record_success()
|
|
256
|
+
return result
|
|
257
|
+
except circuit_breaker.expected_exception as e:
|
|
258
|
+
circuit_breaker.record_failure()
|
|
259
|
+
raise
|
|
260
|
+
|
|
261
|
+
@functools.wraps(func)
|
|
262
|
+
def sync_wrapper(*args, **kwargs):
|
|
263
|
+
if not circuit_breaker.is_allowed():
|
|
264
|
+
raise Exception(f"Circuit breaker is OPEN for {func.__name__}")
|
|
265
|
+
|
|
266
|
+
try:
|
|
267
|
+
result = func(*args, **kwargs)
|
|
268
|
+
circuit_breaker.record_success()
|
|
269
|
+
return result
|
|
270
|
+
except circuit_breaker.expected_exception as e:
|
|
271
|
+
circuit_breaker.record_failure()
|
|
272
|
+
raise
|
|
273
|
+
|
|
274
|
+
# Detectar si la función es async
|
|
275
|
+
if asyncio.iscoroutinefunction(func):
|
|
276
|
+
return async_wrapper
|
|
277
|
+
else:
|
|
278
|
+
return sync_wrapper
|
|
279
|
+
|
|
280
|
+
return decorator
|
evolutia/utils/json_parser.py
CHANGED
|
@@ -5,7 +5,7 @@ que pueden contener LaTeX o formatos markdown incorrectos.
|
|
|
5
5
|
import json
|
|
6
6
|
import re
|
|
7
7
|
import logging
|
|
8
|
-
from typing import Dict, Any, Optional
|
|
8
|
+
from typing import Dict, Any, Optional, Union
|
|
9
9
|
|
|
10
10
|
logger = logging.getLogger(__name__)
|
|
11
11
|
|
|
@@ -13,25 +13,33 @@ def extract_and_parse_json(text: str) -> Optional[Dict[str, Any]]:
|
|
|
13
13
|
"""
|
|
14
14
|
Intenta extraer y parsear un objeto JSON de un texto arbitrario.
|
|
15
15
|
Maneja bloques de código markdown y errores comunes de escape en LaTeX.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
text: Texto que puede contener JSON (con o sin bloques markdown)
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Diccionario con el JSON parseado o None si falla
|
|
16
22
|
"""
|
|
17
23
|
if not text:
|
|
24
|
+
logger.debug("[JsonParser] Texto vacío, retornando None")
|
|
18
25
|
return None
|
|
19
|
-
|
|
26
|
+
|
|
20
27
|
# 1. Limpieza básica y extracción de bloque de código
|
|
21
28
|
clean_text = text.strip()
|
|
22
|
-
|
|
29
|
+
|
|
23
30
|
code_block_pattern = re.compile(r'```(?:json)?\s*(.*?)```', re.DOTALL)
|
|
24
31
|
match = code_block_pattern.search(clean_text)
|
|
25
|
-
|
|
32
|
+
|
|
26
33
|
if match:
|
|
27
34
|
clean_text = match.group(1).strip()
|
|
28
|
-
|
|
35
|
+
logger.debug("[JsonParser] Bloque de código JSON detectado y extraído")
|
|
36
|
+
|
|
29
37
|
# HEURÍSTICA DE LATEX AGRESIVA
|
|
30
38
|
# En contextos matemáticos, secuencias como \frac, \textbf, \theta son muy comunes.
|
|
31
39
|
# json.loads interpreta \f, \b, \t como caracteres de control (form feed, backspace, tab).
|
|
32
40
|
# Esto corrompe el LaTeX (ej: \theta -> tab + heta).
|
|
33
41
|
# Por lo tanto, aplicamos una limpieza PREVIA al intento de parseo estándar para estas secuencias.
|
|
34
|
-
|
|
42
|
+
|
|
35
43
|
# Whitelist de escapes que REALMENTE queremos preservar como controles JSON estándar:
|
|
36
44
|
# " -> \" (comillas dentro de string)
|
|
37
45
|
# \ -> \\ (backslash literal ya escapado)
|
|
@@ -39,31 +47,33 @@ def extract_and_parse_json(text: str) -> Optional[Dict[str, Any]]:
|
|
|
39
47
|
# n -> \n (newline - muy común y necesario)
|
|
40
48
|
# r -> \r (carriage return)
|
|
41
49
|
# u -> \uXXXX (unicode - aunque \usepackage podría ser problematico, \u requiere 4 hex digits, asi que \usepackage falla json.loads y lo capturamos despues)
|
|
42
|
-
|
|
50
|
+
|
|
43
51
|
# REMOVIDOS de whitelist (se escaparán a doble backslash):
|
|
44
52
|
# t -> Para proteger \theta, \textbf, \text, etc.
|
|
45
53
|
# f -> Para proteger \frac, \forall, etc.
|
|
46
54
|
# b -> Para proteger \begin, \beta, etc.
|
|
47
|
-
|
|
55
|
+
|
|
48
56
|
# Regex: Lookbehind negativo para asegurar que no está ya escapado (?<!\\)
|
|
49
57
|
# Lookahead negativo para permitir solo los de whitelist (?!["\\/nru])
|
|
50
58
|
# Así, \t se convierte en \\t (literal \t string), \n se queda como \n (control char).
|
|
51
|
-
|
|
59
|
+
|
|
52
60
|
regex_latex_fix = r'(?<!\\)\\(?!["\\/nru])'
|
|
53
|
-
|
|
61
|
+
|
|
54
62
|
try:
|
|
55
63
|
# Aplicar fix agresivo
|
|
56
64
|
fixed_text = re.sub(regex_latex_fix, r'\\\\', clean_text)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
65
|
+
result = json.loads(fixed_text, strict=False)
|
|
66
|
+
logger.info(f"[JsonParser] JSON parseado exitosamente con fix LaTeX (longitud={len(str(result))})")
|
|
67
|
+
return result
|
|
68
|
+
except json.JSONDecodeError as e:
|
|
69
|
+
logger.debug(f"[JsonParser] Falló parseo con fix LaTeX: {e}")
|
|
62
70
|
|
|
63
71
|
try:
|
|
64
|
-
|
|
72
|
+
result = json.loads(clean_text, strict=False)
|
|
73
|
+
logger.info(f"[JsonParser] JSON parseado exitosamente sin fix LaTeX (longitud={len(str(result))})")
|
|
74
|
+
return result
|
|
65
75
|
except json.JSONDecodeError as e:
|
|
66
|
-
logger.debug(f"Fallo parseo JSON
|
|
67
|
-
|
|
68
|
-
logger.error(f"No se pudo parsear JSON. Texto original (inicio): {text[:100]}...")
|
|
76
|
+
logger.debug(f"[JsonParser] Fallo parseo JSON sin fix: {e}")
|
|
77
|
+
|
|
78
|
+
logger.error(f"[JsonParser] No se pudo parsear JSON. Texto original (inicio): {text[:100]}...")
|
|
69
79
|
return None
|