iatoolkit 0.71.4__py3-none-any.whl → 1.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iatoolkit/__init__.py +19 -7
- iatoolkit/base_company.py +1 -71
- iatoolkit/cli_commands.py +9 -21
- iatoolkit/common/exceptions.py +2 -0
- iatoolkit/common/interfaces/__init__.py +0 -0
- iatoolkit/common/interfaces/asset_storage.py +34 -0
- iatoolkit/common/interfaces/database_provider.py +38 -0
- iatoolkit/common/model_registry.py +159 -0
- iatoolkit/common/routes.py +53 -32
- iatoolkit/common/util.py +17 -12
- iatoolkit/company_registry.py +55 -14
- iatoolkit/{iatoolkit.py → core.py} +102 -72
- iatoolkit/infra/{mail_app.py → brevo_mail_app.py} +15 -37
- iatoolkit/infra/llm_providers/__init__.py +0 -0
- iatoolkit/infra/llm_providers/deepseek_adapter.py +278 -0
- iatoolkit/infra/{gemini_adapter.py → llm_providers/gemini_adapter.py} +11 -17
- iatoolkit/infra/{openai_adapter.py → llm_providers/openai_adapter.py} +41 -7
- iatoolkit/infra/llm_proxy.py +235 -134
- iatoolkit/infra/llm_response.py +5 -0
- iatoolkit/locales/en.yaml +134 -4
- iatoolkit/locales/es.yaml +293 -162
- iatoolkit/repositories/database_manager.py +92 -22
- iatoolkit/repositories/document_repo.py +7 -0
- iatoolkit/repositories/filesystem_asset_repository.py +36 -0
- iatoolkit/repositories/llm_query_repo.py +36 -22
- iatoolkit/repositories/models.py +86 -95
- iatoolkit/repositories/profile_repo.py +64 -13
- iatoolkit/repositories/vs_repo.py +31 -28
- iatoolkit/services/auth_service.py +1 -1
- iatoolkit/services/branding_service.py +1 -1
- iatoolkit/services/company_context_service.py +96 -39
- iatoolkit/services/configuration_service.py +329 -67
- iatoolkit/services/dispatcher_service.py +51 -227
- iatoolkit/services/document_service.py +10 -1
- iatoolkit/services/embedding_service.py +9 -6
- iatoolkit/services/excel_service.py +50 -2
- iatoolkit/services/file_processor_service.py +0 -5
- iatoolkit/services/history_manager_service.py +208 -0
- iatoolkit/services/jwt_service.py +1 -1
- iatoolkit/services/knowledge_base_service.py +412 -0
- iatoolkit/services/language_service.py +8 -2
- iatoolkit/services/license_service.py +82 -0
- iatoolkit/{infra/llm_client.py → services/llm_client_service.py} +42 -29
- iatoolkit/services/load_documents_service.py +18 -47
- iatoolkit/services/mail_service.py +171 -25
- iatoolkit/services/profile_service.py +69 -36
- iatoolkit/services/{prompt_manager_service.py → prompt_service.py} +136 -25
- iatoolkit/services/query_service.py +229 -203
- iatoolkit/services/sql_service.py +116 -34
- iatoolkit/services/tool_service.py +246 -0
- iatoolkit/services/user_feedback_service.py +18 -6
- iatoolkit/services/user_session_context_service.py +121 -51
- iatoolkit/static/images/iatoolkit_core.png +0 -0
- iatoolkit/static/images/iatoolkit_logo.png +0 -0
- iatoolkit/static/js/chat_feedback_button.js +1 -1
- iatoolkit/static/js/chat_help_content.js +4 -4
- iatoolkit/static/js/chat_main.js +61 -9
- iatoolkit/static/js/chat_model_selector.js +227 -0
- iatoolkit/static/js/chat_onboarding_button.js +1 -1
- iatoolkit/static/js/chat_reload_button.js +4 -1
- iatoolkit/static/styles/chat_iatoolkit.css +59 -3
- iatoolkit/static/styles/chat_public.css +28 -0
- iatoolkit/static/styles/documents.css +598 -0
- iatoolkit/static/styles/landing_page.css +223 -7
- iatoolkit/static/styles/llm_output.css +34 -1
- iatoolkit/system_prompts/__init__.py +0 -0
- iatoolkit/system_prompts/query_main.prompt +28 -3
- iatoolkit/system_prompts/sql_rules.prompt +47 -12
- iatoolkit/templates/_company_header.html +30 -5
- iatoolkit/templates/_login_widget.html +3 -3
- iatoolkit/templates/base.html +13 -0
- iatoolkit/templates/chat.html +45 -3
- iatoolkit/templates/forgot_password.html +3 -2
- iatoolkit/templates/onboarding_shell.html +1 -2
- iatoolkit/templates/signup.html +3 -0
- iatoolkit/views/base_login_view.py +8 -3
- iatoolkit/views/change_password_view.py +1 -1
- iatoolkit/views/chat_view.py +76 -0
- iatoolkit/views/forgot_password_view.py +9 -4
- iatoolkit/views/history_api_view.py +3 -3
- iatoolkit/views/home_view.py +4 -2
- iatoolkit/views/init_context_api_view.py +1 -1
- iatoolkit/views/llmquery_api_view.py +4 -3
- iatoolkit/views/load_company_configuration_api_view.py +49 -0
- iatoolkit/views/{file_store_api_view.py → load_document_api_view.py} +15 -11
- iatoolkit/views/login_view.py +25 -8
- iatoolkit/views/logout_api_view.py +10 -2
- iatoolkit/views/prompt_api_view.py +1 -1
- iatoolkit/views/rag_api_view.py +216 -0
- iatoolkit/views/root_redirect_view.py +22 -0
- iatoolkit/views/signup_view.py +12 -4
- iatoolkit/views/static_page_view.py +27 -0
- iatoolkit/views/users_api_view.py +33 -0
- iatoolkit/views/verify_user_view.py +1 -1
- iatoolkit-1.4.2.dist-info/METADATA +268 -0
- iatoolkit-1.4.2.dist-info/RECORD +133 -0
- iatoolkit-1.4.2.dist-info/licenses/LICENSE_COMMUNITY.md +15 -0
- iatoolkit/repositories/tasks_repo.py +0 -52
- iatoolkit/services/history_service.py +0 -37
- iatoolkit/services/search_service.py +0 -55
- iatoolkit/services/tasks_service.py +0 -188
- iatoolkit/templates/about.html +0 -13
- iatoolkit/templates/index.html +0 -145
- iatoolkit/templates/login_simulation.html +0 -45
- iatoolkit/views/external_login_view.py +0 -73
- iatoolkit/views/index_view.py +0 -14
- iatoolkit/views/login_simulation_view.py +0 -93
- iatoolkit/views/tasks_api_view.py +0 -72
- iatoolkit/views/tasks_review_api_view.py +0 -55
- iatoolkit-0.71.4.dist-info/METADATA +0 -276
- iatoolkit-0.71.4.dist-info/RECORD +0 -122
- {iatoolkit-0.71.4.dist-info → iatoolkit-1.4.2.dist-info}/WHEEL +0 -0
- {iatoolkit-0.71.4.dist-info → iatoolkit-1.4.2.dist-info}/licenses/LICENSE +0 -0
- {iatoolkit-0.71.4.dist-info → iatoolkit-1.4.2.dist-info}/top_level.txt +0 -0
|
@@ -15,55 +15,101 @@ class UserSessionContextService:
|
|
|
15
15
|
Esto mejora la atomicidad y la eficiencia.
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
def _get_session_key(self, company_short_name: str, user_identifier: str) -> Optional[str]:
|
|
18
|
+
def _get_session_key(self, company_short_name: str, user_identifier: str, model: str = None) -> Optional[str]:
|
|
19
19
|
"""Devuelve la clave única de Redis para el Hash de sesión del usuario."""
|
|
20
20
|
user_identifier = (user_identifier or "").strip()
|
|
21
21
|
if not company_short_name or not user_identifier:
|
|
22
22
|
return None
|
|
23
|
-
return f"session:{company_short_name}/{user_identifier}"
|
|
24
23
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
24
|
+
model_key = "" if not model else f"-{model}"
|
|
25
|
+
return f"session:{company_short_name}/{user_identifier}{model_key}"
|
|
26
|
+
|
|
27
|
+
def clear_all_context(self, company_short_name: str, user_identifier: str, model: str = None):
|
|
28
|
+
"""Clears LLM-related context for a user (history and response IDs), preserving profile_data."""
|
|
29
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
28
30
|
if session_key:
|
|
29
|
-
# RedisSessionManager.remove(session_key)
|
|
30
31
|
# 'profile_data' should not be deleted
|
|
31
|
-
RedisSessionManager.hdel(session_key,
|
|
32
|
-
RedisSessionManager.hdel(session_key,
|
|
33
|
-
RedisSessionManager.hdel(session_key,
|
|
32
|
+
RedisSessionManager.hdel(session_key, "context_version")
|
|
33
|
+
RedisSessionManager.hdel(session_key, "context_history")
|
|
34
|
+
RedisSessionManager.hdel(session_key, "last_response_id")
|
|
34
35
|
|
|
35
|
-
def clear_llm_history(self, company_short_name: str, user_identifier: str):
|
|
36
|
-
"""
|
|
37
|
-
session_key = self._get_session_key(company_short_name, user_identifier)
|
|
36
|
+
def clear_llm_history(self, company_short_name: str, user_identifier: str, model: str = None):
|
|
37
|
+
"""Clears only LLM history fields (last_response_id and context_history)."""
|
|
38
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
38
39
|
if session_key:
|
|
39
|
-
RedisSessionManager.hdel(session_key,
|
|
40
|
+
RedisSessionManager.hdel(session_key, "last_response_id", "context_history")
|
|
40
41
|
|
|
41
|
-
def get_last_response_id(self, company_short_name: str, user_identifier: str) -> Optional[str]:
|
|
42
|
-
|
|
42
|
+
def get_last_response_id(self, company_short_name: str, user_identifier: str, model: str = None) -> Optional[str]:
|
|
43
|
+
"""Returns the last LLM response ID for this user/model combination."""
|
|
44
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
43
45
|
if not session_key:
|
|
44
46
|
return None
|
|
45
|
-
return RedisSessionManager.hget(session_key,
|
|
46
|
-
|
|
47
|
-
def save_last_response_id(self,
|
|
48
|
-
|
|
47
|
+
return RedisSessionManager.hget(session_key, "last_response_id")
|
|
48
|
+
|
|
49
|
+
def save_last_response_id(self,
|
|
50
|
+
company_short_name: str,
|
|
51
|
+
user_identifier: str,
|
|
52
|
+
response_id: str,
|
|
53
|
+
model: str = None,
|
|
54
|
+
):
|
|
55
|
+
"""Persists the last LLM response ID for this user/model combination."""
|
|
56
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
49
57
|
if session_key:
|
|
50
|
-
RedisSessionManager.hset(session_key,
|
|
51
|
-
|
|
52
|
-
def
|
|
53
|
-
|
|
58
|
+
RedisSessionManager.hset(session_key, "last_response_id", response_id)
|
|
59
|
+
|
|
60
|
+
def get_initial_response_id(self,
|
|
61
|
+
company_short_name: str,
|
|
62
|
+
user_identifier: str,
|
|
63
|
+
model: str = None,
|
|
64
|
+
) -> Optional[str]:
|
|
65
|
+
"""
|
|
66
|
+
Returns the initial LLM response ID for this user/model combination.
|
|
67
|
+
This ID represents the state right after the context was set on the LLM.
|
|
68
|
+
"""
|
|
69
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
70
|
+
if not session_key:
|
|
71
|
+
return None
|
|
72
|
+
return RedisSessionManager.hget(session_key, "initial_response_id")
|
|
73
|
+
|
|
74
|
+
def save_initial_response_id(self,
|
|
75
|
+
company_short_name: str,
|
|
76
|
+
user_identifier: str,
|
|
77
|
+
response_id: str,
|
|
78
|
+
model: str = None,
|
|
79
|
+
):
|
|
80
|
+
"""Persists the initial LLM response ID for this user/model combination."""
|
|
81
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
82
|
+
if session_key:
|
|
83
|
+
RedisSessionManager.hset(session_key, "initial_response_id", response_id)
|
|
84
|
+
|
|
85
|
+
def save_context_history(
|
|
86
|
+
self,
|
|
87
|
+
company_short_name: str,
|
|
88
|
+
user_identifier: str,
|
|
89
|
+
context_history: List[Dict],
|
|
90
|
+
model: str = None,
|
|
91
|
+
):
|
|
92
|
+
"""Serializes and stores the context history for this user/model combination."""
|
|
93
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
54
94
|
if session_key:
|
|
55
95
|
try:
|
|
56
96
|
history_json = json.dumps(context_history)
|
|
57
|
-
RedisSessionManager.hset(session_key,
|
|
97
|
+
RedisSessionManager.hset(session_key, "context_history", history_json)
|
|
58
98
|
except (TypeError, ValueError) as e:
|
|
59
|
-
logging.error(f"Error
|
|
60
|
-
|
|
61
|
-
def get_context_history(
|
|
62
|
-
|
|
99
|
+
logging.error(f"Error serializing context_history for {session_key}: {e}")
|
|
100
|
+
|
|
101
|
+
def get_context_history(
|
|
102
|
+
self,
|
|
103
|
+
company_short_name: str,
|
|
104
|
+
user_identifier: str,
|
|
105
|
+
model: str = None,
|
|
106
|
+
) -> Optional[List[Dict]]:
|
|
107
|
+
"""Reads and deserializes the context history for this user/model combination."""
|
|
108
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
63
109
|
if not session_key:
|
|
64
|
-
return
|
|
110
|
+
return []
|
|
65
111
|
|
|
66
|
-
history_json = RedisSessionManager.hget(session_key,
|
|
112
|
+
history_json = RedisSessionManager.hget(session_key, "context_history")
|
|
67
113
|
if not history_json:
|
|
68
114
|
return []
|
|
69
115
|
|
|
@@ -95,37 +141,61 @@ class UserSessionContextService:
|
|
|
95
141
|
except json.JSONDecodeError:
|
|
96
142
|
return {}
|
|
97
143
|
|
|
98
|
-
def save_context_version(self,
|
|
99
|
-
|
|
144
|
+
def save_context_version(self,
|
|
145
|
+
company_short_name: str,
|
|
146
|
+
user_identifier: str,
|
|
147
|
+
version: str,
|
|
148
|
+
model: str = None,
|
|
149
|
+
):
|
|
150
|
+
"""Saves the context version for this user/model combination."""
|
|
151
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
100
152
|
if session_key:
|
|
101
|
-
RedisSessionManager.hset(session_key,
|
|
102
|
-
|
|
103
|
-
def get_context_version(self,
|
|
104
|
-
|
|
153
|
+
RedisSessionManager.hset(session_key, "context_version", version)
|
|
154
|
+
|
|
155
|
+
def get_context_version(self,
|
|
156
|
+
company_short_name: str,
|
|
157
|
+
user_identifier: str,
|
|
158
|
+
model: str = None,
|
|
159
|
+
) -> Optional[str]:
|
|
160
|
+
"""Returns the context version for this user/model combination."""
|
|
161
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
105
162
|
if not session_key:
|
|
106
163
|
return None
|
|
107
|
-
return RedisSessionManager.hget(session_key,
|
|
108
|
-
|
|
109
|
-
def save_prepared_context(self,
|
|
110
|
-
|
|
111
|
-
|
|
164
|
+
return RedisSessionManager.hget(session_key, "context_version")
|
|
165
|
+
|
|
166
|
+
def save_prepared_context(self,
|
|
167
|
+
company_short_name: str,
|
|
168
|
+
user_identifier: str,
|
|
169
|
+
context: str,
|
|
170
|
+
version: str,
|
|
171
|
+
model: str = None,
|
|
172
|
+
):
|
|
173
|
+
"""Stores a pre-rendered system context and its version, ready to be sent to the LLM."""
|
|
174
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
112
175
|
if session_key:
|
|
113
|
-
RedisSessionManager.hset(session_key,
|
|
114
|
-
RedisSessionManager.hset(session_key,
|
|
115
|
-
|
|
116
|
-
def get_and_clear_prepared_context(self,
|
|
117
|
-
|
|
118
|
-
|
|
176
|
+
RedisSessionManager.hset(session_key, "prepared_context", context)
|
|
177
|
+
RedisSessionManager.hset(session_key, "prepared_context_version", version)
|
|
178
|
+
|
|
179
|
+
def get_and_clear_prepared_context(self,
|
|
180
|
+
company_short_name: str,
|
|
181
|
+
user_identifier: str,
|
|
182
|
+
model: str = None,
|
|
183
|
+
) -> tuple:
|
|
184
|
+
"""
|
|
185
|
+
Atomically retrieves the prepared context and its version and then deletes them
|
|
186
|
+
to guarantee they are consumed only once.
|
|
187
|
+
"""
|
|
188
|
+
session_key = self._get_session_key(company_short_name, user_identifier, model=model)
|
|
119
189
|
if not session_key:
|
|
120
190
|
return None, None
|
|
121
191
|
|
|
122
192
|
pipe = RedisSessionManager.pipeline()
|
|
123
|
-
pipe.hget(session_key,
|
|
124
|
-
pipe.hget(session_key,
|
|
125
|
-
pipe.hdel(session_key,
|
|
193
|
+
pipe.hget(session_key, "prepared_context")
|
|
194
|
+
pipe.hget(session_key, "prepared_context_version")
|
|
195
|
+
pipe.hdel(session_key, "prepared_context", "prepared_context_version")
|
|
126
196
|
results = pipe.execute()
|
|
127
197
|
|
|
128
|
-
# results[0]
|
|
198
|
+
# results[0] is the context, results[1] is the version
|
|
129
199
|
return (results[0], results[1]) if results else (None, None)
|
|
130
200
|
|
|
131
201
|
# --- Métodos de Bloqueo ---
|
|
Binary file
|
|
Binary file
|
|
@@ -53,7 +53,7 @@ $('#feedbackModal').on('hidden.bs.modal', function () {
|
|
|
53
53
|
$('.star').removeClass('active');
|
|
54
54
|
});
|
|
55
55
|
|
|
56
|
-
//
|
|
56
|
+
// Tool for the star rating system
|
|
57
57
|
window.gfg = function (rating) {
|
|
58
58
|
$('.star').removeClass('active');
|
|
59
59
|
$('.star').each(function (index) {
|
|
@@ -59,7 +59,7 @@ $(document).ready(function () {
|
|
|
59
59
|
cat.questions.forEach(q => contentHtml += `<li>${q}</li>`);
|
|
60
60
|
contentHtml += `</ul>`;
|
|
61
61
|
});
|
|
62
|
-
accordionHtml += createAccordionItem('examples', '
|
|
62
|
+
accordionHtml += createAccordionItem('examples', 'Sample questions', contentHtml, true);
|
|
63
63
|
}
|
|
64
64
|
|
|
65
65
|
if (data.data_sources) {
|
|
@@ -68,7 +68,7 @@ $(document).ready(function () {
|
|
|
68
68
|
contentHtml += `<dt>${p.source}</dt><dd>${p.description}</dd>`;
|
|
69
69
|
});
|
|
70
70
|
contentHtml += `</dl>`;
|
|
71
|
-
accordionHtml += createAccordionItem('sources', '
|
|
71
|
+
accordionHtml += createAccordionItem('sources', 'Data available', contentHtml );
|
|
72
72
|
}
|
|
73
73
|
|
|
74
74
|
if (data.best_practices) {
|
|
@@ -81,7 +81,7 @@ $(document).ready(function () {
|
|
|
81
81
|
contentHtml += `</dd>`;
|
|
82
82
|
});
|
|
83
83
|
contentHtml += `</dl>`;
|
|
84
|
-
accordionHtml += createAccordionItem('practices', '
|
|
84
|
+
accordionHtml += createAccordionItem('practices', 'Best practices', contentHtml);
|
|
85
85
|
}
|
|
86
86
|
|
|
87
87
|
if (data.capabilities) {
|
|
@@ -89,7 +89,7 @@ $(document).ready(function () {
|
|
|
89
89
|
contentHtml += `<div class="col-md-6"><h6 class="fw-bold">Puede hacer:</h6><ul>${data.capabilities.can_do.map(item => `<li>${item}</li>`).join('')}</ul></div>`;
|
|
90
90
|
contentHtml += `<div class="col-md-6"><h6 class="fw-bold">No puede hacer:</h6><ul>${data.capabilities.cannot_do.map(item => `<li>${item}</li>`).join('')}</ul></div>`;
|
|
91
91
|
contentHtml += `</div>`;
|
|
92
|
-
accordionHtml += createAccordionItem('capabilities', '
|
|
92
|
+
accordionHtml += createAccordionItem('capabilities', 'Capabilities and limits', contentHtml);
|
|
93
93
|
}
|
|
94
94
|
|
|
95
95
|
container.html(accordionHtml);
|
iatoolkit/static/js/chat_main.js
CHANGED
|
@@ -110,17 +110,51 @@ const handleChatMessage = async function () {
|
|
|
110
110
|
prompt_name: promptName,
|
|
111
111
|
client_data: clientData,
|
|
112
112
|
files: filesBase64.map(f => ({ filename: f.name, content: f.base64 })),
|
|
113
|
-
user_identifier: window.user_identifier
|
|
113
|
+
user_identifier: window.user_identifier,
|
|
114
|
+
model: (window.currentLlmModel || window.defaultLlmModel || '')
|
|
115
|
+
|
|
114
116
|
};
|
|
115
117
|
|
|
116
118
|
const responseData = await callToolkit("/api/llm_query", data, "POST");
|
|
117
119
|
if (responseData && responseData.answer) {
|
|
118
|
-
|
|
119
|
-
|
|
120
|
+
// CAMBIO: contenedor principal para la respuesta del bot
|
|
121
|
+
const botMessageContainer = $('<div>').addClass('bot-message-container');
|
|
122
|
+
|
|
123
|
+
// 1. Si hay reasoning_content, agregar el acordeón colapsable
|
|
124
|
+
if (responseData.reasoning_content) {
|
|
125
|
+
const uniqueId = 'reasoning-' + Date.now(); // ID único para el collapse
|
|
126
|
+
|
|
127
|
+
const reasoningBlock = $(`
|
|
128
|
+
<div class="reasoning-block">
|
|
129
|
+
<button class="reasoning-toggle btn btn-sm btn-link text-decoration-none p-0"
|
|
130
|
+
type="button"
|
|
131
|
+
data-bs-toggle="collapse"
|
|
132
|
+
data-bs-target="#${uniqueId}"
|
|
133
|
+
aria-expanded="false"
|
|
134
|
+
aria-controls="${uniqueId}">
|
|
135
|
+
<i class="bi bi-lightbulb me-1"></i> ${t_js('show_reasoning')}
|
|
136
|
+
</button>
|
|
137
|
+
|
|
138
|
+
<div class="collapse mt-2" id="${uniqueId}">
|
|
139
|
+
<div class="reasoning-card">
|
|
140
|
+
${responseData.reasoning_content}
|
|
141
|
+
</div>
|
|
142
|
+
</div>
|
|
143
|
+
</div>
|
|
144
|
+
`);
|
|
145
|
+
botMessageContainer.append(reasoningBlock);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// 2. Agregar la respuesta final
|
|
149
|
+
const answerSection = $('<div>').addClass('answer-section llm-output').append(responseData.answer);
|
|
150
|
+
botMessageContainer.append(answerSection);
|
|
151
|
+
|
|
152
|
+
// 3. Mostrar el contenedor completo
|
|
153
|
+
displayBotMessage(botMessageContainer);
|
|
154
|
+
|
|
120
155
|
}
|
|
121
156
|
} catch (error) {
|
|
122
157
|
if (error.name === 'AbortError') {
|
|
123
|
-
console.log('Petición abortada por el usuario.');
|
|
124
158
|
|
|
125
159
|
// Usando jQuery estándar para construir el elemento ---
|
|
126
160
|
const icon = $('<i>').addClass('bi bi-stop-circle me-2'); // Icono sin "fill" para un look más ligero
|
|
@@ -208,7 +242,12 @@ const toggleSendStopButtons = function (showStop) {
|
|
|
208
242
|
* @returns {Promise<object|null>} The response data or null on error.
|
|
209
243
|
*/
|
|
210
244
|
const callToolkit = async function(apiPath, data, method, timeoutMs = 500000) {
|
|
211
|
-
|
|
245
|
+
// normalize the url for avoiding double //
|
|
246
|
+
const base = (window.iatoolkit_base_url || '').replace(/\/+$/, '');
|
|
247
|
+
const company = (window.companyShortName || '').replace(/^\/+|\/+$/g, '');
|
|
248
|
+
const path = apiPath.startsWith('/') ? apiPath : `/${apiPath}`;
|
|
249
|
+
const url = `${base}/${company}${path}`;
|
|
250
|
+
|
|
212
251
|
|
|
213
252
|
abortController = new AbortController();
|
|
214
253
|
const timeoutId = setTimeout(() => abortController.abort(), timeoutMs);
|
|
@@ -230,22 +269,35 @@ const callToolkit = async function(apiPath, data, method, timeoutMs = 500000) {
|
|
|
230
269
|
}
|
|
231
270
|
const response = await fetch(url, fetchOptions);
|
|
232
271
|
clearTimeout(timeoutId);
|
|
272
|
+
|
|
273
|
+
// answer is NOT OK (status != 200)
|
|
233
274
|
if (!response.ok) {
|
|
234
275
|
try {
|
|
235
276
|
// Intentamos leer el error como JSON, que es el formato esperado de nuestra API.
|
|
236
277
|
const errorData = await response.json();
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
278
|
+
|
|
279
|
+
// if it's a iatoolkit error (409 o 400 with a message), shot it on the chat
|
|
280
|
+
if (errorData && (errorData.error_message || errorData.error)) {
|
|
281
|
+
const errorMessage = errorData.error_message || errorData.error || t_js('unknown_server_error');
|
|
282
|
+
const errorIcon = '<i class="bi bi-exclamation-triangle"></i>';
|
|
283
|
+
const endpointError = $('<div>').addClass('error-section').html(errorIcon + `<p>${errorMessage}</p>`);
|
|
284
|
+
displayBotMessage(endpointError);
|
|
285
|
+
} else {
|
|
286
|
+
// if there is not message, we show a generic error message
|
|
287
|
+
throw new Error(`Server error: ${response.status}`);
|
|
288
|
+
}
|
|
241
289
|
} catch (e) {
|
|
242
290
|
// Si response.json() falla, es porque el cuerpo no era JSON (ej. un 502 con HTML).
|
|
243
291
|
// Mostramos un error genérico y más claro para el usuario.
|
|
244
292
|
const errorMessage = `Error de comunicación con el servidor (${response.status}). Por favor, intente de nuevo más tarde.`;
|
|
245
293
|
toastr.error(errorMessage);
|
|
246
294
|
}
|
|
295
|
+
|
|
296
|
+
// stop the flow on the calling function
|
|
247
297
|
return null;
|
|
248
298
|
}
|
|
299
|
+
|
|
300
|
+
// if the answer is OK
|
|
249
301
|
return await response.json();
|
|
250
302
|
} catch (error) {
|
|
251
303
|
clearTimeout(timeoutId);
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
// src/iatoolkit/static/js/chat_model_selector.js
|
|
2
|
+
// Gestión del selector de modelo LLM en la barra superior.
|
|
3
|
+
|
|
4
|
+
// Estado global del modelo actual (visible también para otros scripts)
|
|
5
|
+
window.currentLlmModel = window.currentLlmModel || null;
|
|
6
|
+
|
|
7
|
+
(function () {
|
|
8
|
+
/**
|
|
9
|
+
* Lee el modelo guardado en localStorage (si existe y es válido).
|
|
10
|
+
*/
|
|
11
|
+
function loadStoredModelId() {
|
|
12
|
+
try {
|
|
13
|
+
const stored = localStorage.getItem('iatoolkit.selected_llm_model');
|
|
14
|
+
return stored || null;
|
|
15
|
+
} catch (e) {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Guarda el modelo seleccionado en localStorage para esta instancia de navegador.
|
|
22
|
+
* No es crítico: si falla, simplemente no persistimos.
|
|
23
|
+
*/
|
|
24
|
+
function storeModelId(modelId) {
|
|
25
|
+
try {
|
|
26
|
+
if (!modelId) {
|
|
27
|
+
localStorage.removeItem('iatoolkit.selected_llm_model');
|
|
28
|
+
} else {
|
|
29
|
+
localStorage.setItem('iatoolkit.selected_llm_model', modelId);
|
|
30
|
+
}
|
|
31
|
+
} catch (e) {
|
|
32
|
+
// No hacemos nada: fallo silencioso
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Devuelve la lista de modelos disponibles desde la variable global.
|
|
38
|
+
*/
|
|
39
|
+
function getAvailableModels() {
|
|
40
|
+
const raw = window.availableLlmModels;
|
|
41
|
+
if (!Array.isArray(raw)) {
|
|
42
|
+
return [];
|
|
43
|
+
}
|
|
44
|
+
return raw.map(m => ({
|
|
45
|
+
id: m.id,
|
|
46
|
+
label: m.label || m.id,
|
|
47
|
+
description: m.description || ''
|
|
48
|
+
})).filter(m => !!m.id);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Inicializa el estado de currentLlmModel usando SIEMPRE la config de company.yaml:
|
|
53
|
+
* 1) defaultLlmModel (company.yaml)
|
|
54
|
+
* 2) si no existe o no está en la lista, usa el primer modelo disponible.
|
|
55
|
+
*
|
|
56
|
+
* No se lee nada de localStorage en este punto: cada apertura de chat
|
|
57
|
+
* arranca desde la configuración de la compañía.
|
|
58
|
+
*/
|
|
59
|
+
function initCurrentModel() {
|
|
60
|
+
const models = getAvailableModels();
|
|
61
|
+
const defaultId = (window.defaultLlmModel || '').trim() || null;
|
|
62
|
+
|
|
63
|
+
let resolved = null;
|
|
64
|
+
|
|
65
|
+
if (defaultId && models.some(m => m.id === defaultId)) {
|
|
66
|
+
resolved = defaultId;
|
|
67
|
+
} else if (models.length > 0) {
|
|
68
|
+
resolved = models[0].id;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
window.currentLlmModel = resolved;
|
|
72
|
+
return resolved;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Pinta la lista de modelos en el popup y marca el seleccionado.
|
|
77
|
+
*/
|
|
78
|
+
function renderModelList() {
|
|
79
|
+
const listEl = document.getElementById('llm-model-list');
|
|
80
|
+
if (!listEl) return;
|
|
81
|
+
|
|
82
|
+
const models = getAvailableModels();
|
|
83
|
+
const activeId = window.currentLlmModel;
|
|
84
|
+
listEl.innerHTML = '';
|
|
85
|
+
|
|
86
|
+
if (!models.length) {
|
|
87
|
+
const emptyItem = document.createElement('div');
|
|
88
|
+
emptyItem.className = 'list-group-item small text-muted';
|
|
89
|
+
emptyItem.textContent = 'No hay modelos configurados.';
|
|
90
|
+
listEl.appendChild(emptyItem);
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
models.forEach(model => {
|
|
95
|
+
const item = document.createElement('button');
|
|
96
|
+
item.type = 'button';
|
|
97
|
+
item.className = 'list-group-item list-group-item-action small';
|
|
98
|
+
|
|
99
|
+
const isActive = model.id === activeId;
|
|
100
|
+
if (isActive) {
|
|
101
|
+
item.classList.add('active');
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
item.innerHTML = `
|
|
105
|
+
<div class="d-flex justify-content-between align-items-center">
|
|
106
|
+
<div>
|
|
107
|
+
<div class="fw-semibold">${model.label}</div>
|
|
108
|
+
${model.description
|
|
109
|
+
? `<div class="text-muted" style="font-size: 0.8rem;">${model.description}</div>`
|
|
110
|
+
: ''
|
|
111
|
+
}
|
|
112
|
+
</div>
|
|
113
|
+
${isActive ? '<i class="bi bi-check-lg ms-2"></i>' : ''}
|
|
114
|
+
</div>
|
|
115
|
+
`;
|
|
116
|
+
|
|
117
|
+
item.addEventListener('click', () => {
|
|
118
|
+
selectModel(model.id);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
listEl.appendChild(item);
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Actualiza el label del botón principal con el modelo actual.
|
|
127
|
+
*/
|
|
128
|
+
function updateButtonLabel() {
|
|
129
|
+
const labelEl = document.getElementById('llm-model-button-label');
|
|
130
|
+
if (!labelEl) return;
|
|
131
|
+
|
|
132
|
+
const models = getAvailableModels();
|
|
133
|
+
const activeId = window.currentLlmModel;
|
|
134
|
+
const activeModel = models.find(m => m.id === activeId);
|
|
135
|
+
|
|
136
|
+
if (activeModel) {
|
|
137
|
+
labelEl.textContent = activeModel.label;
|
|
138
|
+
} else if (window.defaultLlmModel) {
|
|
139
|
+
labelEl.textContent = window.defaultLlmModel;
|
|
140
|
+
} else {
|
|
141
|
+
labelEl.textContent = 'Modelo IA';
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Selecciona un modelo: actualiza estado global, UI y almacenamiento local.
|
|
147
|
+
*/
|
|
148
|
+
function selectModel(modelId) {
|
|
149
|
+
if (!modelId) return;
|
|
150
|
+
|
|
151
|
+
const models = getAvailableModels();
|
|
152
|
+
const exists = models.some(m => m.id === modelId);
|
|
153
|
+
if (!exists) return;
|
|
154
|
+
|
|
155
|
+
window.currentLlmModel = modelId;
|
|
156
|
+
storeModelId(modelId);
|
|
157
|
+
updateButtonLabel();
|
|
158
|
+
renderModelList();
|
|
159
|
+
hidePopup();
|
|
160
|
+
|
|
161
|
+
if (typeof toastr !== 'undefined') {
|
|
162
|
+
toastr.info(`Modelo actualizado a "${models.find(m => m.id === modelId).label}".`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Muestra/oculta el popup anclado al botón.
|
|
168
|
+
*/
|
|
169
|
+
function togglePopup() {
|
|
170
|
+
const popup = document.getElementById('llm-model-popup');
|
|
171
|
+
const btn = document.getElementById('llm-model-button');
|
|
172
|
+
if (!popup || !btn) return;
|
|
173
|
+
|
|
174
|
+
const isVisible = popup.style.display === 'block';
|
|
175
|
+
|
|
176
|
+
if (isVisible) {
|
|
177
|
+
hidePopup();
|
|
178
|
+
} else {
|
|
179
|
+
const rect = btn.getBoundingClientRect();
|
|
180
|
+
popup.style.display = 'block';
|
|
181
|
+
|
|
182
|
+
// Posicionamos justo debajo del botón, alineado a la izquierda
|
|
183
|
+
popup.style.top = `${rect.bottom + window.scrollY + 4}px`;
|
|
184
|
+
popup.style.left = `${rect.left + window.scrollX}px`;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
function hidePopup() {
|
|
189
|
+
const popup = document.getElementById('llm-model-popup');
|
|
190
|
+
if (popup) {
|
|
191
|
+
popup.style.display = 'none';
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Cierra el popup si el usuario hace click fuera.
|
|
197
|
+
*/
|
|
198
|
+
function setupOutsideClickHandler() {
|
|
199
|
+
document.addEventListener('click', (event) => {
|
|
200
|
+
const popup = document.getElementById('llm-model-popup');
|
|
201
|
+
const btn = document.getElementById('llm-model-button');
|
|
202
|
+
if (!popup || !btn) return;
|
|
203
|
+
|
|
204
|
+
if (popup.style.display !== 'block') return;
|
|
205
|
+
|
|
206
|
+
if (!popup.contains(event.target) && !btn.contains(event.target)) {
|
|
207
|
+
hidePopup();
|
|
208
|
+
}
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
document.addEventListener('DOMContentLoaded', () => {
|
|
213
|
+
// Inicializar estado inicial del modelo
|
|
214
|
+
initCurrentModel();
|
|
215
|
+
updateButtonLabel();
|
|
216
|
+
renderModelList();
|
|
217
|
+
setupOutsideClickHandler();
|
|
218
|
+
|
|
219
|
+
const btn = document.getElementById('llm-model-button');
|
|
220
|
+
if (btn) {
|
|
221
|
+
btn.addEventListener('click', (event) => {
|
|
222
|
+
event.stopPropagation();
|
|
223
|
+
togglePopup();
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
});
|
|
227
|
+
})();
|
|
@@ -45,7 +45,7 @@
|
|
|
45
45
|
if (elTitle) elTitle.textContent = c.title || '';
|
|
46
46
|
if (elText) elText.innerHTML = c.text || '';
|
|
47
47
|
if (elExample && c.example) {
|
|
48
|
-
elExample.innerHTML = ('
|
|
48
|
+
elExample.innerHTML = ('Example ' + ': ' + c.example) || '';
|
|
49
49
|
}
|
|
50
50
|
else
|
|
51
51
|
elExample.innerHTML = '';
|
|
@@ -18,7 +18,10 @@ $(document).ready(function () {
|
|
|
18
18
|
|
|
19
19
|
// 2. prepare the api parameters
|
|
20
20
|
const apiPath = '/api/init-context';
|
|
21
|
-
const payload = {
|
|
21
|
+
const payload = {
|
|
22
|
+
'user_identifier': window.user_identifier,
|
|
23
|
+
'model': (window.currentLlmModel || window.defaultLlmModel || '')
|
|
24
|
+
};
|
|
22
25
|
|
|
23
26
|
// 3. make the call to callToolkit
|
|
24
27
|
const data = await callToolkit(apiPath, payload, 'POST');
|