iatoolkit 0.3.9__py3-none-any.whl → 0.107.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of iatoolkit might be problematic. Click here for more details.

Files changed (150) hide show
  1. iatoolkit/__init__.py +27 -35
  2. iatoolkit/base_company.py +3 -35
  3. iatoolkit/cli_commands.py +18 -47
  4. iatoolkit/common/__init__.py +0 -0
  5. iatoolkit/common/exceptions.py +48 -0
  6. iatoolkit/common/interfaces/__init__.py +0 -0
  7. iatoolkit/common/interfaces/asset_storage.py +34 -0
  8. iatoolkit/common/interfaces/database_provider.py +39 -0
  9. iatoolkit/common/model_registry.py +159 -0
  10. iatoolkit/common/routes.py +138 -0
  11. iatoolkit/common/session_manager.py +26 -0
  12. iatoolkit/common/util.py +353 -0
  13. iatoolkit/company_registry.py +66 -29
  14. iatoolkit/core.py +514 -0
  15. iatoolkit/infra/__init__.py +5 -0
  16. iatoolkit/infra/brevo_mail_app.py +123 -0
  17. iatoolkit/infra/call_service.py +140 -0
  18. iatoolkit/infra/connectors/__init__.py +5 -0
  19. iatoolkit/infra/connectors/file_connector.py +17 -0
  20. iatoolkit/infra/connectors/file_connector_factory.py +57 -0
  21. iatoolkit/infra/connectors/google_cloud_storage_connector.py +53 -0
  22. iatoolkit/infra/connectors/google_drive_connector.py +68 -0
  23. iatoolkit/infra/connectors/local_file_connector.py +46 -0
  24. iatoolkit/infra/connectors/s3_connector.py +33 -0
  25. iatoolkit/infra/google_chat_app.py +57 -0
  26. iatoolkit/infra/llm_providers/__init__.py +0 -0
  27. iatoolkit/infra/llm_providers/deepseek_adapter.py +278 -0
  28. iatoolkit/infra/llm_providers/gemini_adapter.py +350 -0
  29. iatoolkit/infra/llm_providers/openai_adapter.py +124 -0
  30. iatoolkit/infra/llm_proxy.py +268 -0
  31. iatoolkit/infra/llm_response.py +45 -0
  32. iatoolkit/infra/redis_session_manager.py +122 -0
  33. iatoolkit/locales/en.yaml +222 -0
  34. iatoolkit/locales/es.yaml +225 -0
  35. iatoolkit/repositories/__init__.py +5 -0
  36. iatoolkit/repositories/database_manager.py +187 -0
  37. iatoolkit/repositories/document_repo.py +33 -0
  38. iatoolkit/repositories/filesystem_asset_repository.py +36 -0
  39. iatoolkit/repositories/llm_query_repo.py +105 -0
  40. iatoolkit/repositories/models.py +279 -0
  41. iatoolkit/repositories/profile_repo.py +171 -0
  42. iatoolkit/repositories/vs_repo.py +150 -0
  43. iatoolkit/services/__init__.py +5 -0
  44. iatoolkit/services/auth_service.py +193 -0
  45. {services → iatoolkit/services}/benchmark_service.py +7 -7
  46. iatoolkit/services/branding_service.py +153 -0
  47. iatoolkit/services/company_context_service.py +214 -0
  48. iatoolkit/services/configuration_service.py +375 -0
  49. iatoolkit/services/dispatcher_service.py +134 -0
  50. {services → iatoolkit/services}/document_service.py +20 -8
  51. iatoolkit/services/embedding_service.py +148 -0
  52. iatoolkit/services/excel_service.py +156 -0
  53. {services → iatoolkit/services}/file_processor_service.py +36 -21
  54. iatoolkit/services/history_manager_service.py +208 -0
  55. iatoolkit/services/i18n_service.py +104 -0
  56. iatoolkit/services/jwt_service.py +80 -0
  57. iatoolkit/services/language_service.py +89 -0
  58. iatoolkit/services/license_service.py +82 -0
  59. iatoolkit/services/llm_client_service.py +438 -0
  60. iatoolkit/services/load_documents_service.py +174 -0
  61. iatoolkit/services/mail_service.py +213 -0
  62. {services → iatoolkit/services}/profile_service.py +200 -101
  63. iatoolkit/services/prompt_service.py +303 -0
  64. iatoolkit/services/query_service.py +467 -0
  65. iatoolkit/services/search_service.py +55 -0
  66. iatoolkit/services/sql_service.py +169 -0
  67. iatoolkit/services/tool_service.py +246 -0
  68. iatoolkit/services/user_feedback_service.py +117 -0
  69. iatoolkit/services/user_session_context_service.py +213 -0
  70. iatoolkit/static/images/fernando.jpeg +0 -0
  71. iatoolkit/static/images/iatoolkit_core.png +0 -0
  72. iatoolkit/static/images/iatoolkit_logo.png +0 -0
  73. iatoolkit/static/js/chat_feedback_button.js +80 -0
  74. iatoolkit/static/js/chat_filepond.js +85 -0
  75. iatoolkit/static/js/chat_help_content.js +124 -0
  76. iatoolkit/static/js/chat_history_button.js +110 -0
  77. iatoolkit/static/js/chat_logout_button.js +36 -0
  78. iatoolkit/static/js/chat_main.js +401 -0
  79. iatoolkit/static/js/chat_model_selector.js +227 -0
  80. iatoolkit/static/js/chat_onboarding_button.js +103 -0
  81. iatoolkit/static/js/chat_prompt_manager.js +94 -0
  82. iatoolkit/static/js/chat_reload_button.js +38 -0
  83. iatoolkit/static/styles/chat_iatoolkit.css +559 -0
  84. iatoolkit/static/styles/chat_modal.css +133 -0
  85. iatoolkit/static/styles/chat_public.css +135 -0
  86. iatoolkit/static/styles/documents.css +598 -0
  87. iatoolkit/static/styles/landing_page.css +398 -0
  88. iatoolkit/static/styles/llm_output.css +148 -0
  89. iatoolkit/static/styles/onboarding.css +176 -0
  90. iatoolkit/system_prompts/__init__.py +0 -0
  91. iatoolkit/system_prompts/query_main.prompt +30 -23
  92. iatoolkit/system_prompts/sql_rules.prompt +47 -12
  93. iatoolkit/templates/_company_header.html +45 -0
  94. iatoolkit/templates/_login_widget.html +42 -0
  95. iatoolkit/templates/base.html +78 -0
  96. iatoolkit/templates/change_password.html +66 -0
  97. iatoolkit/templates/chat.html +337 -0
  98. iatoolkit/templates/chat_modals.html +185 -0
  99. iatoolkit/templates/error.html +51 -0
  100. iatoolkit/templates/forgot_password.html +51 -0
  101. iatoolkit/templates/onboarding_shell.html +106 -0
  102. iatoolkit/templates/signup.html +79 -0
  103. iatoolkit/views/__init__.py +5 -0
  104. iatoolkit/views/base_login_view.py +96 -0
  105. iatoolkit/views/change_password_view.py +116 -0
  106. iatoolkit/views/chat_view.py +76 -0
  107. iatoolkit/views/embedding_api_view.py +65 -0
  108. iatoolkit/views/forgot_password_view.py +75 -0
  109. iatoolkit/views/help_content_api_view.py +54 -0
  110. iatoolkit/views/history_api_view.py +56 -0
  111. iatoolkit/views/home_view.py +63 -0
  112. iatoolkit/views/init_context_api_view.py +74 -0
  113. iatoolkit/views/llmquery_api_view.py +59 -0
  114. iatoolkit/views/load_company_configuration_api_view.py +49 -0
  115. iatoolkit/views/load_document_api_view.py +65 -0
  116. iatoolkit/views/login_view.py +170 -0
  117. iatoolkit/views/logout_api_view.py +57 -0
  118. iatoolkit/views/profile_api_view.py +46 -0
  119. iatoolkit/views/prompt_api_view.py +37 -0
  120. iatoolkit/views/root_redirect_view.py +22 -0
  121. iatoolkit/views/signup_view.py +100 -0
  122. iatoolkit/views/static_page_view.py +27 -0
  123. iatoolkit/views/user_feedback_api_view.py +60 -0
  124. iatoolkit/views/users_api_view.py +33 -0
  125. iatoolkit/views/verify_user_view.py +60 -0
  126. iatoolkit-0.107.4.dist-info/METADATA +268 -0
  127. iatoolkit-0.107.4.dist-info/RECORD +132 -0
  128. iatoolkit-0.107.4.dist-info/licenses/LICENSE +21 -0
  129. iatoolkit-0.107.4.dist-info/licenses/LICENSE_COMMUNITY.md +15 -0
  130. {iatoolkit-0.3.9.dist-info → iatoolkit-0.107.4.dist-info}/top_level.txt +0 -1
  131. iatoolkit/iatoolkit.py +0 -413
  132. iatoolkit/system_prompts/arquitectura.prompt +0 -32
  133. iatoolkit-0.3.9.dist-info/METADATA +0 -252
  134. iatoolkit-0.3.9.dist-info/RECORD +0 -32
  135. services/__init__.py +0 -5
  136. services/api_service.py +0 -75
  137. services/dispatcher_service.py +0 -351
  138. services/excel_service.py +0 -98
  139. services/history_service.py +0 -45
  140. services/jwt_service.py +0 -91
  141. services/load_documents_service.py +0 -212
  142. services/mail_service.py +0 -62
  143. services/prompt_manager_service.py +0 -172
  144. services/query_service.py +0 -334
  145. services/search_service.py +0 -32
  146. services/sql_service.py +0 -42
  147. services/tasks_service.py +0 -188
  148. services/user_feedback_service.py +0 -67
  149. services/user_session_context_service.py +0 -85
  150. {iatoolkit-0.3.9.dist-info → iatoolkit-0.107.4.dist-info}/WHEEL +0 -0
@@ -0,0 +1,124 @@
1
+ # Copyright (c) 2024 Fernando Libedinsky
2
+ # Product: IAToolkit
3
+ #
4
+ # IAToolkit is open source software.
5
+
6
+ import logging
7
+ from typing import Dict, List, Optional
8
+ from iatoolkit.infra.llm_response import LLMResponse, ToolCall, Usage
9
+ from iatoolkit.common.exceptions import IAToolkitException
10
+ import html
11
+ from typing import List
12
+
13
+ class OpenAIAdapter:
14
+ """Adaptador para la API de OpenAI"""
15
+
16
+ def __init__(self, openai_client):
17
+ self.client = openai_client
18
+
19
+ def create_response(self,
20
+ model: str,
21
+ input: List[Dict],
22
+ previous_response_id: Optional[str] = None,
23
+ context_history: Optional[List[Dict]] = None,
24
+ tools: Optional[List[Dict]] = None,
25
+ text: Optional[Dict] = None,
26
+ reasoning: Optional[Dict] = None,
27
+ tool_choice: str = "auto") -> LLMResponse:
28
+ """Llamada a la API de OpenAI y mapeo a estructura común"""
29
+ try:
30
+ # Preparar parámetros para OpenAI
31
+ params = {
32
+ 'model': model,
33
+ 'input': input
34
+ }
35
+
36
+ if previous_response_id:
37
+ params['previous_response_id'] = previous_response_id
38
+ if tools:
39
+ params['tools'] = tools
40
+ if text:
41
+ params['text'] = text
42
+ if reasoning:
43
+ params['reasoning'] = reasoning
44
+ if tool_choice != "auto":
45
+ params['tool_choice'] = tool_choice
46
+
47
+ # Llamar a la API de OpenAI
48
+ openai_response = self.client.responses.create(**params)
49
+
50
+ # Mapear la respuesta a estructura común
51
+ return self._map_openai_response(openai_response)
52
+
53
+ except Exception as e:
54
+ error_message = f"Error calling OpenAI API: {str(e)}"
55
+ logging.error(error_message)
56
+
57
+ raise IAToolkitException(IAToolkitException.ErrorType.LLM_ERROR, error_message)
58
+
59
+ def _map_openai_response(self, openai_response) -> LLMResponse:
60
+ """Mapear respuesta de OpenAI a estructura común"""
61
+ # Mapear tool calls
62
+ tool_calls = []
63
+ if hasattr(openai_response, 'output') and openai_response.output:
64
+ for tool_call in openai_response.output:
65
+ if hasattr(tool_call, 'type') and tool_call.type == "function_call":
66
+ tool_calls.append(ToolCall(
67
+ call_id=getattr(tool_call, 'call_id', ''),
68
+ type=tool_call.type,
69
+ name=getattr(tool_call, 'name', ''),
70
+ arguments=getattr(tool_call, 'arguments', '{}')
71
+ ))
72
+
73
+ # Mapear usage
74
+ usage = Usage(
75
+ input_tokens=openai_response.usage.input_tokens if openai_response.usage else 0,
76
+ output_tokens=openai_response.usage.output_tokens if openai_response.usage else 0,
77
+ total_tokens=openai_response.usage.total_tokens if openai_response.usage else 0
78
+ )
79
+
80
+ # Reasoning content extracted from Responses output items (type="reasoning")
81
+ reasoning_list = self._extract_reasoning_content(openai_response)
82
+ reasoning_str = "\n".join(reasoning_list)
83
+
84
+ return LLMResponse(
85
+ id=openai_response.id,
86
+ model=openai_response.model,
87
+ status=openai_response.status,
88
+ output_text=getattr(openai_response, 'output_text', ''),
89
+ output=tool_calls,
90
+ usage=usage,
91
+ reasoning_content=reasoning_str
92
+ )
93
+
94
+ def _extract_reasoning_content(self, openai_response) -> List[str]:
95
+ """
96
+ Extract reasoning summaries (preferred) or reasoning content fragments from Responses API output.
97
+
98
+ Format required by caller:
99
+ 1. reason is ...
100
+ 2. reason is ...
101
+ """
102
+ reasons: List[str] = []
103
+
104
+ output_items = getattr(openai_response, "output", None) or []
105
+ for item in output_items:
106
+ if getattr(item, "type", None) != "reasoning":
107
+ continue
108
+
109
+ # 1) Preferred: reasoning summaries (requires reasoning={"summary":"auto"} or similar)
110
+ summary = getattr(item, "summary", None) or []
111
+ for s in summary:
112
+ text = getattr(s, "text", None)
113
+ if text:
114
+ reasons.append(str(text).strip())
115
+
116
+ # 2) Fallback: some responses may carry reasoning content in "content"
117
+ # (e.g., content parts like {"type":"reasoning_text","text":"..."}).
118
+ content = getattr(item, "content", None) or []
119
+ for c in content:
120
+ text = getattr(c, "text", None)
121
+ if text:
122
+ reasons.append(str(text).strip())
123
+
124
+ return reasons
@@ -0,0 +1,268 @@
1
+ # Copyright (c) 2024 Fernando Libedinsky
2
+ # Product: IAToolkit
3
+ #
4
+ # IAToolkit is open source software.
5
+
6
+
7
+ from iatoolkit.services.configuration_service import ConfigurationService
8
+ from iatoolkit.infra.llm_providers.openai_adapter import OpenAIAdapter
9
+ from iatoolkit.infra.llm_providers.gemini_adapter import GeminiAdapter
10
+ from iatoolkit.infra.llm_providers.deepseek_adapter import DeepseekAdapter
11
+ # from iatoolkit.infra.llm_providers.anthropic_adapter import AnthropicAdapter
12
+ from iatoolkit.common.exceptions import IAToolkitException
13
+ from iatoolkit.common.util import Utility
14
+ from iatoolkit.infra.llm_response import LLMResponse
15
+ from iatoolkit.common.model_registry import ModelRegistry
16
+
17
+ from openai import OpenAI # For OpenAI and xAI (OpenAI-compatible)
18
+ # from anthropic import Anthropic # For Claude (Anthropic)
19
+
20
+ from typing import Dict, List, Any, Tuple
21
+ import os
22
+ import threading
23
+ from injector import inject
24
+
25
+
26
+ class LLMProxy:
27
+ """
28
+ Proxy for routing calls to the correct LLM adapter and managing the creation of LLM clients.
29
+ """
30
+
31
+ # Class-level cache for low-level clients (per provider + API key)
32
+ _clients_cache: Dict[Tuple[str, str], Any] = {}
33
+ _clients_cache_lock = threading.Lock()
34
+
35
+ # Provider identifiers
36
+ PROVIDER_OPENAI = "openai"
37
+ PROVIDER_GEMINI = "gemini"
38
+ PROVIDER_DEEPSEEK = "deepseek"
39
+ PROVIDER_XAI = "xai"
40
+ PROVIDER_ANTHROPIC = "anthropic"
41
+
42
+ @inject
43
+ def __init__(
44
+ self,
45
+ util: Utility,
46
+ configuration_service: ConfigurationService,
47
+ model_registry: ModelRegistry,
48
+ ):
49
+ """
50
+ Init a new instance of the proxy. It can be a base factory or a working instance with configured clients.
51
+ Pre-built clients can be injected for tests or special environments.
52
+ """
53
+ self.util = util
54
+ self.configuration_service = configuration_service
55
+ self.model_registry = model_registry
56
+
57
+ # adapter cache por provider
58
+ self.adapters: Dict[str, Any] = {}
59
+
60
+ # -------------------------------------------------------------------------
61
+ # Public API
62
+ # -------------------------------------------------------------------------
63
+
64
+ def create_response(self, company_short_name: str, model: str, input: List[Dict], **kwargs) -> LLMResponse:
65
+ """
66
+ Route the call to the correct adapter based on the model name.
67
+ This method is the single entry point used by the rest of the application.
68
+ """
69
+ if not company_short_name:
70
+ raise IAToolkitException(
71
+ IAToolkitException.ErrorType.API_KEY,
72
+ "company_short_name is required in kwargs to resolve LLM credentials."
73
+ )
74
+
75
+ # Determine the provider based on the model name
76
+ provider = self._resolve_provider_from_model(model)
77
+
78
+ adapter = self._get_or_create_adapter(
79
+ provider=provider,
80
+ company_short_name=company_short_name,
81
+ )
82
+
83
+ # Delegate to the adapter (OpenAI, Gemini, DeepSeek, xAI, Anthropic, etc.)
84
+ return adapter.create_response(model=model, input=input, **kwargs)
85
+
86
+ # -------------------------------------------------------------------------
87
+ # Provider resolution
88
+ # -------------------------------------------------------------------------
89
+
90
+ def _resolve_provider_from_model(self, model: str) -> str:
91
+ """
92
+ Determine which provider must be used for a given model name.
93
+ This uses Utility helper methods, so you can keep all naming logic in one place.
94
+ """
95
+ provider_key = self.model_registry.get_provider(model)
96
+
97
+ if provider_key == "openai":
98
+ return self.PROVIDER_OPENAI
99
+ if provider_key == "gemini":
100
+ return self.PROVIDER_GEMINI
101
+ if provider_key == "deepseek":
102
+ return self.PROVIDER_DEEPSEEK
103
+ if provider_key == "xai":
104
+ return self.PROVIDER_XAI
105
+ if provider_key == "anthropic":
106
+ return self.PROVIDER_ANTHROPIC
107
+
108
+ raise IAToolkitException(
109
+ IAToolkitException.ErrorType.MODEL,
110
+ f"Unknown or unsupported model: {model}"
111
+ )
112
+
113
+ # -------------------------------------------------------------------------
114
+ # Adapter management
115
+ # -------------------------------------------------------------------------
116
+
117
+ def _get_or_create_adapter(self, provider: str, company_short_name: str) -> Any:
118
+ """
119
+ Return an adapter instance for the given provider.
120
+ If none exists yet, create it using a cached or new low-level client.
121
+ """
122
+ # If already created, just return it
123
+ if provider in self.adapters and self.adapters[provider] is not None:
124
+ return self.adapters[provider]
125
+
126
+ # Otherwise, create a low-level client from configuration
127
+ api_key = self._get_api_key_from_config(company_short_name, provider)
128
+ client = self._get_or_create_client(provider, api_key)
129
+
130
+ # Wrap client with the correct adapter
131
+ if provider == self.PROVIDER_OPENAI:
132
+ adapter = OpenAIAdapter(client)
133
+ elif provider == self.PROVIDER_GEMINI:
134
+ adapter = GeminiAdapter(client)
135
+ elif provider == self.PROVIDER_DEEPSEEK:
136
+ adapter = DeepseekAdapter(client)
137
+ else:
138
+ raise IAToolkitException(
139
+ IAToolkitException.ErrorType.MODEL,
140
+ f"Provider not supported in _get_or_create_adapter: {provider}"
141
+ )
142
+
143
+ '''
144
+ elif provider == self.PROVIDER_XAI:
145
+ adapter = XAIAdapter(client)
146
+ elif provider == self.PROVIDER_ANTHROPIC:
147
+ adapter = AnthropicAdapter(client)
148
+ '''
149
+ self.adapters[provider] = adapter
150
+ return adapter
151
+
152
+ # -------------------------------------------------------------------------
153
+ # Client cache
154
+ # -------------------------------------------------------------------------
155
+
156
+ def _get_or_create_client(self, provider: str, api_key: str) -> Any:
157
+ """
158
+ Return a low-level client for the given provider and API key.
159
+ Uses a class-level cache to avoid recreating clients.
160
+ """
161
+ cache_key = (provider, api_key or "")
162
+
163
+ with self._clients_cache_lock:
164
+ if cache_key in self._clients_cache:
165
+ return self._clients_cache[cache_key]
166
+
167
+ client = self._create_client_for_provider(provider, api_key)
168
+ self._clients_cache[cache_key] = client
169
+ return client
170
+
171
+ def _create_client_for_provider(self, provider: str, api_key: str) -> Any:
172
+ """
173
+ Actually create the low-level client for a provider.
174
+ This is the only place where provider-specific client construction lives.
175
+ """
176
+ if provider == self.PROVIDER_OPENAI:
177
+ # Standard OpenAI client for GPT models
178
+ return OpenAI(api_key=api_key)
179
+
180
+ if provider == self.PROVIDER_XAI:
181
+ # xAI Grok is OpenAI-compatible; we can use the OpenAI client with a different base_url.
182
+ return OpenAI(
183
+ api_key=api_key,
184
+ base_url="https://api.x.ai/v1",
185
+ )
186
+
187
+ if provider == self.PROVIDER_DEEPSEEK:
188
+ # Example: if you use the official deepseek client or OpenAI-compatible wrapper
189
+ # return DeepSeekAPI(api_key=api_key)
190
+
191
+ # We use OpenAI client with a DeepSeek base_url:
192
+ return OpenAI(
193
+ api_key=api_key,
194
+ base_url="https://api.deepseek.com",
195
+ )
196
+
197
+ if provider == self.PROVIDER_GEMINI:
198
+ # Example placeholder: you may already have a Gemini client factory elsewhere.
199
+ # Here you could create and configure the Gemini client (e.g. google.generativeai).
200
+ #
201
+ import google.generativeai as genai
202
+
203
+ genai.configure(api_key=api_key)
204
+ return genai
205
+ if provider == self.PROVIDER_ANTHROPIC:
206
+ # Example using Anthropic official client:
207
+ #
208
+ # from anthropic import Anthropic
209
+ # return Anthropic(api_key=api_key)
210
+ raise IAToolkitException(
211
+ IAToolkitException.ErrorType.API_KEY,
212
+ "Anthropic client creation must be implemented in _create_client_for_provider."
213
+ )
214
+
215
+ raise IAToolkitException(
216
+ IAToolkitException.ErrorType.MODEL,
217
+ f"Provider not supported in _create_client_for_provider: {provider}"
218
+ )
219
+
220
+ # -------------------------------------------------------------------------
221
+ # Configuration helpers
222
+ # -------------------------------------------------------------------------
223
+ def _get_api_key_from_config(self, company_short_name: str, provider: str) -> str:
224
+ """
225
+ Read the LLM API key from company configuration and environment variables.
226
+
227
+ Resolución de prioridad:
228
+ 1. llm.provider_api_keys[provider] -> env var específica por proveedor.
229
+ 2. llm.api-key -> env var global (compatibilidad hacia atrás).
230
+ """
231
+ llm_config = self.configuration_service.get_configuration(company_short_name, "llm")
232
+
233
+ if not llm_config:
234
+ # Mantener compatibilidad con los tests: el mensaje debe indicar
235
+ # que no hay API key configurada.
236
+ raise IAToolkitException(
237
+ IAToolkitException.ErrorType.API_KEY,
238
+ f"Company '{company_short_name}' doesn't have an API key configured."
239
+ )
240
+
241
+ provider_keys = llm_config.get("provider_api_keys") or {}
242
+ env_var_name = None
243
+
244
+ # 1) Intentar api-key específica por proveedor (si existe el bloque provider_api_keys)
245
+ if provider_keys and isinstance(provider_keys, dict):
246
+ env_var_name = provider_keys.get(provider)
247
+
248
+ # 2) Fallback: usar api-key global si no hay específica
249
+ if not env_var_name and llm_config.get("api-key"):
250
+ env_var_name = llm_config["api-key"]
251
+
252
+ if not env_var_name:
253
+ raise IAToolkitException(
254
+ IAToolkitException.ErrorType.API_KEY,
255
+ f"Company '{company_short_name}' doesn't have an API key configured "
256
+ f"for provider '{provider}'."
257
+ )
258
+
259
+ api_key_value = os.getenv(env_var_name, "")
260
+
261
+ if not api_key_value:
262
+ raise IAToolkitException(
263
+ IAToolkitException.ErrorType.API_KEY,
264
+ f"Environment variable '{env_var_name}' for company '{company_short_name}' "
265
+ f"and provider '{provider}' is not set or is empty."
266
+ )
267
+
268
+ return api_key_value
@@ -0,0 +1,45 @@
1
+ # Copyright (c) 2024 Fernando Libedinsky
2
+ # Product: IAToolkit
3
+ #
4
+ # IAToolkit is open source software.
5
+
6
+ from dataclasses import dataclass
7
+ from typing import Dict, List, Any, Optional
8
+
9
+ @dataclass
10
+ class ToolCall:
11
+ """Representa una llamada a herramienta en formato común"""
12
+ call_id: str
13
+ type: str # 'function_call'
14
+ name: str
15
+ arguments: str
16
+
17
+
18
+ @dataclass
19
+ class Usage:
20
+ """Información de uso de tokens en formato común"""
21
+ input_tokens: int
22
+ output_tokens: int
23
+ total_tokens: int
24
+
25
+
26
+ @dataclass
27
+ class LLMResponse:
28
+ """Estructura común para respuestas de diferentes LLMs"""
29
+ id: str
30
+ model: str
31
+ status: str # 'completed', 'failed', etc.
32
+ output_text: str
33
+ output: List[ToolCall] # lista de tool calls
34
+ usage: Usage
35
+ reasoning_content: str = None # campo opcional para Chain of Thought
36
+
37
+
38
+ def __post_init__(self):
39
+ """Asegura que output sea una lista"""
40
+ if self.output is None:
41
+ self.output = []
42
+
43
+ if self.reasoning_content is None:
44
+ self.reasoning_content = ""
45
+
@@ -0,0 +1,122 @@
1
+ # Copyright (c) 2024 Fernando Libedinsky
2
+ # Product: IAToolkit
3
+ #
4
+ # IAToolkit is open source software.
5
+
6
+ import logging
7
+ import os
8
+ import redis
9
+ import json
10
+ from urllib.parse import urlparse
11
+
12
+
13
+ class RedisSessionManager:
14
+ """
15
+ SessionManager que usa Redis directamente para datos persistentes como llm_history.
16
+ Separado de Flask session para tener control total sobre el ciclo de vida de los datos.
17
+ """
18
+ _client = None
19
+
20
+ @classmethod
21
+ def _get_client(cls):
22
+ if cls._client is None:
23
+ # Usar exactamente los mismos parámetros que Flask-Session
24
+ url = urlparse(os.environ.get("REDIS_URL"))
25
+ cls._client = redis.Redis(
26
+ host=url.hostname,
27
+ port=url.port,
28
+ password=url.password,
29
+ ssl=(url.scheme == "rediss"),
30
+ ssl_cert_reqs=None,
31
+ decode_responses=True # Importante para strings
32
+ )
33
+ # verify connection
34
+ cls._client.ping()
35
+ info = cls._client.info(section="server")
36
+ db = cls._client.connection_pool.connection_kwargs.get('db', 0)
37
+ return cls._client
38
+
39
+ @classmethod
40
+ def set(cls, key: str, value: str, **kwargs):
41
+ """
42
+ Método set flexible que pasa argumentos opcionales (como ex, nx)
43
+ directamente al cliente de redis.
44
+ """
45
+ client = cls._get_client()
46
+ # Pasa todos los argumentos de palabra clave adicionales al cliente real
47
+ result = client.set(key, value, **kwargs)
48
+ return result
49
+
50
+ @classmethod
51
+ def get(cls, key: str, default: str = ""):
52
+ client = cls._get_client()
53
+ value = client.get(key)
54
+ result = value if value is not None else default
55
+ return result
56
+
57
+ @classmethod
58
+ def hset(cls, key: str, field: str, value: str):
59
+ """
60
+ Establece un campo en un Hash de Redis.
61
+ """
62
+ client = cls._get_client()
63
+ return client.hset(key, field, value)
64
+
65
+ @classmethod
66
+ def hget(cls, key: str, field: str):
67
+ """
68
+ Obtiene el valor de un campo de un Hash de Redis.
69
+ Devuelve None si la clave o el campo no existen.
70
+ """
71
+ client = cls._get_client()
72
+ return client.hget(key, field)
73
+
74
+ @classmethod
75
+ def hdel(cls, key: str, *fields):
76
+ """
77
+ Elimina uno o más campos de un Hash de Redis.
78
+ """
79
+ client = cls._get_client()
80
+ return client.hdel(key, *fields)
81
+
82
+ @classmethod
83
+ def pipeline(cls):
84
+ """
85
+ Inicia una transacción (pipeline) de Redis.
86
+ """
87
+ client = cls._get_client()
88
+ return client.pipeline()
89
+
90
+
91
+ @classmethod
92
+ def remove(cls, key: str):
93
+ client = cls._get_client()
94
+ result = client.delete(key)
95
+ return result
96
+
97
+ @classmethod
98
+ def exists(cls, key: str) -> bool:
99
+ """Verifica si una clave existe en Redis."""
100
+ client = cls._get_client()
101
+ # El comando EXISTS de Redis devuelve un entero (0 o 1). Lo convertimos a booleano.
102
+ return bool(client.exists(key))
103
+
104
+ @classmethod
105
+ def set_json(cls, key: str, value: dict, ex: int = None):
106
+ json_str = json.dumps(value)
107
+ return cls.set(key, json_str, ex=ex)
108
+
109
+ @classmethod
110
+ def get_json(cls, key: str, default: dict = None):
111
+ if default is None:
112
+ default = {}
113
+
114
+ json_str = cls.get(key, "")
115
+ if not json_str:
116
+ return default
117
+
118
+ try:
119
+ return json.loads(json_str)
120
+ except json.JSONDecodeError:
121
+ logging.warning(f"[RedisSessionManager] Invalid JSON in key '{key}': {json_str}")
122
+ return default