iatoolkit 0.91.1__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. iatoolkit/__init__.py +6 -4
  2. iatoolkit/base_company.py +0 -16
  3. iatoolkit/cli_commands.py +3 -14
  4. iatoolkit/common/exceptions.py +1 -0
  5. iatoolkit/common/interfaces/__init__.py +0 -0
  6. iatoolkit/common/interfaces/asset_storage.py +34 -0
  7. iatoolkit/common/interfaces/database_provider.py +43 -0
  8. iatoolkit/common/model_registry.py +159 -0
  9. iatoolkit/common/routes.py +47 -5
  10. iatoolkit/common/util.py +32 -13
  11. iatoolkit/company_registry.py +5 -0
  12. iatoolkit/core.py +51 -20
  13. iatoolkit/infra/connectors/file_connector_factory.py +1 -0
  14. iatoolkit/infra/connectors/s3_connector.py +4 -2
  15. iatoolkit/infra/llm_providers/__init__.py +0 -0
  16. iatoolkit/infra/llm_providers/deepseek_adapter.py +278 -0
  17. iatoolkit/infra/{gemini_adapter.py → llm_providers/gemini_adapter.py} +11 -17
  18. iatoolkit/infra/{openai_adapter.py → llm_providers/openai_adapter.py} +41 -7
  19. iatoolkit/infra/llm_proxy.py +235 -134
  20. iatoolkit/infra/llm_response.py +5 -0
  21. iatoolkit/locales/en.yaml +158 -2
  22. iatoolkit/locales/es.yaml +158 -0
  23. iatoolkit/repositories/database_manager.py +52 -47
  24. iatoolkit/repositories/document_repo.py +7 -0
  25. iatoolkit/repositories/filesystem_asset_repository.py +36 -0
  26. iatoolkit/repositories/llm_query_repo.py +2 -0
  27. iatoolkit/repositories/models.py +72 -79
  28. iatoolkit/repositories/profile_repo.py +59 -3
  29. iatoolkit/repositories/vs_repo.py +22 -24
  30. iatoolkit/services/company_context_service.py +126 -53
  31. iatoolkit/services/configuration_service.py +299 -73
  32. iatoolkit/services/dispatcher_service.py +21 -3
  33. iatoolkit/services/file_processor_service.py +0 -5
  34. iatoolkit/services/history_manager_service.py +43 -24
  35. iatoolkit/services/knowledge_base_service.py +425 -0
  36. iatoolkit/{infra/llm_client.py → services/llm_client_service.py} +38 -29
  37. iatoolkit/services/load_documents_service.py +26 -48
  38. iatoolkit/services/profile_service.py +32 -4
  39. iatoolkit/services/prompt_service.py +32 -30
  40. iatoolkit/services/query_service.py +51 -26
  41. iatoolkit/services/sql_service.py +122 -74
  42. iatoolkit/services/tool_service.py +26 -11
  43. iatoolkit/services/user_session_context_service.py +115 -63
  44. iatoolkit/static/js/chat_main.js +44 -4
  45. iatoolkit/static/js/chat_model_selector.js +227 -0
  46. iatoolkit/static/js/chat_onboarding_button.js +1 -1
  47. iatoolkit/static/js/chat_reload_button.js +4 -1
  48. iatoolkit/static/styles/chat_iatoolkit.css +58 -2
  49. iatoolkit/static/styles/llm_output.css +34 -1
  50. iatoolkit/system_prompts/query_main.prompt +26 -2
  51. iatoolkit/templates/base.html +13 -0
  52. iatoolkit/templates/chat.html +45 -2
  53. iatoolkit/templates/onboarding_shell.html +0 -1
  54. iatoolkit/views/base_login_view.py +7 -2
  55. iatoolkit/views/chat_view.py +76 -0
  56. iatoolkit/views/configuration_api_view.py +163 -0
  57. iatoolkit/views/load_document_api_view.py +14 -10
  58. iatoolkit/views/login_view.py +8 -3
  59. iatoolkit/views/rag_api_view.py +216 -0
  60. iatoolkit/views/users_api_view.py +33 -0
  61. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/METADATA +4 -4
  62. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/RECORD +66 -58
  63. iatoolkit/repositories/tasks_repo.py +0 -52
  64. iatoolkit/services/search_service.py +0 -55
  65. iatoolkit/services/tasks_service.py +0 -188
  66. iatoolkit/views/tasks_api_view.py +0 -72
  67. iatoolkit/views/tasks_review_api_view.py +0 -55
  68. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/WHEEL +0 -0
  69. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/licenses/LICENSE +0 -0
  70. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/licenses/LICENSE_COMMUNITY.md +0 -0
  71. {iatoolkit-0.91.1.dist-info → iatoolkit-1.7.0.dist-info}/top_level.txt +0 -0
@@ -3,165 +3,266 @@
3
3
  #
4
4
  # IAToolkit is open source software.
5
5
 
6
- from typing import Dict, List, Any
7
- from abc import ABC, abstractmethod
8
- from iatoolkit.common.util import Utility
9
- from iatoolkit.infra.llm_response import LLMResponse
6
+
10
7
  from iatoolkit.services.configuration_service import ConfigurationService
11
- from iatoolkit.infra.openai_adapter import OpenAIAdapter
12
- from iatoolkit.infra.gemini_adapter import GeminiAdapter
8
+ from iatoolkit.infra.llm_providers.openai_adapter import OpenAIAdapter
9
+ from iatoolkit.infra.llm_providers.gemini_adapter import GeminiAdapter
10
+ from iatoolkit.infra.llm_providers.deepseek_adapter import DeepseekAdapter
11
+ # from iatoolkit.infra.llm_providers.anthropic_adapter import AnthropicAdapter
13
12
  from iatoolkit.common.exceptions import IAToolkitException
14
- from iatoolkit.repositories.models import Company
15
- from openai import OpenAI
16
- import google.generativeai as genai
13
+ from iatoolkit.common.util import Utility
14
+ from iatoolkit.infra.llm_response import LLMResponse
15
+ from iatoolkit.common.model_registry import ModelRegistry
16
+
17
+ from openai import OpenAI # For OpenAI and xAI (OpenAI-compatible)
18
+ # from anthropic import Anthropic # For Claude (Anthropic)
19
+
20
+ from typing import Dict, List, Any, Tuple
17
21
  import os
18
22
  import threading
19
- from enum import Enum
20
23
  from injector import inject
21
24
 
22
25
 
23
- class LLMProvider(Enum):
24
- OPENAI = "openai"
25
- GEMINI = "gemini"
26
-
27
-
28
- class LLMAdapter(ABC):
29
- """common interface for LLM adapters"""
30
-
31
- @abstractmethod
32
- def create_response(self, *args, **kwargs) -> LLMResponse:
33
- pass
34
-
35
-
36
26
  class LLMProxy:
37
27
  """
38
- Proxy que enruta las llamadas al adaptador correcto y gestiona la creación
39
- de los clientes de los proveedores de LLM.
28
+ Proxy for routing calls to the correct LLM adapter and managing the creation of LLM clients.
40
29
  """
41
- _clients_cache = {}
30
+
31
+ # Class-level cache for low-level clients (per provider + API key)
32
+ _clients_cache: Dict[Tuple[str, str], Any] = {}
42
33
  _clients_cache_lock = threading.Lock()
43
34
 
35
+ # Provider identifiers
36
+ PROVIDER_OPENAI = "openai"
37
+ PROVIDER_GEMINI = "gemini"
38
+ PROVIDER_DEEPSEEK = "deepseek"
39
+ PROVIDER_XAI = "xai"
40
+ PROVIDER_ANTHROPIC = "anthropic"
41
+
44
42
  @inject
45
- def __init__(self, util: Utility,
46
- configuration_service: ConfigurationService,
47
- openai_client = None,
48
- gemini_client = None):
43
+ def __init__(
44
+ self,
45
+ util: Utility,
46
+ configuration_service: ConfigurationService,
47
+ model_registry: ModelRegistry,
48
+ ):
49
49
  """
50
- Inicializa una instancia del proxy. Puede ser una instancia "base" (fábrica)
51
- o una instancia de "trabajo" con clientes configurados.
50
+ Init a new instance of the proxy. It can be a base factory or a working instance with configured clients.
51
+ Pre-built clients can be injected for tests or special environments.
52
52
  """
53
53
  self.util = util
54
54
  self.configuration_service = configuration_service
55
- self.openai_adapter = OpenAIAdapter(openai_client) if openai_client else None
56
- self.gemini_adapter = GeminiAdapter(gemini_client) if gemini_client else None
55
+ self.model_registry = model_registry
56
+
57
+ # adapter cache por provider
58
+ self.adapters: Dict[str, Any] = {}
59
+
60
+ # -------------------------------------------------------------------------
61
+ # Public API
62
+ # -------------------------------------------------------------------------
63
+
64
+ def create_response(self, company_short_name: str, model: str, input: List[Dict], **kwargs) -> LLMResponse:
65
+ """
66
+ Route the call to the correct adapter based on the model name.
67
+ This method is the single entry point used by the rest of the application.
68
+ """
69
+ if not company_short_name:
70
+ raise IAToolkitException(
71
+ IAToolkitException.ErrorType.API_KEY,
72
+ "company_short_name is required in kwargs to resolve LLM credentials."
73
+ )
74
+
75
+ # Determine the provider based on the model name
76
+ provider = self._resolve_provider_from_model(model)
77
+
78
+ adapter = self._get_or_create_adapter(
79
+ provider=provider,
80
+ company_short_name=company_short_name,
81
+ )
82
+
83
+ # Delegate to the adapter (OpenAI, Gemini, DeepSeek, xAI, Anthropic, etc.)
84
+ return adapter.create_response(model=model, input=input, **kwargs)
85
+
86
+ # -------------------------------------------------------------------------
87
+ # Provider resolution
88
+ # -------------------------------------------------------------------------
89
+
90
+ def _resolve_provider_from_model(self, model: str) -> str:
91
+ """
92
+ Determine which provider must be used for a given model name.
93
+ This uses Utility helper methods, so you can keep all naming logic in one place.
94
+ """
95
+ provider_key = self.model_registry.get_provider(model)
96
+
97
+ if provider_key == "openai":
98
+ return self.PROVIDER_OPENAI
99
+ if provider_key == "gemini":
100
+ return self.PROVIDER_GEMINI
101
+ if provider_key == "deepseek":
102
+ return self.PROVIDER_DEEPSEEK
103
+ if provider_key == "xai":
104
+ return self.PROVIDER_XAI
105
+ if provider_key == "anthropic":
106
+ return self.PROVIDER_ANTHROPIC
107
+
108
+ raise IAToolkitException(
109
+ IAToolkitException.ErrorType.MODEL,
110
+ f"Unknown or unsupported model: {model}"
111
+ )
112
+
113
+ # -------------------------------------------------------------------------
114
+ # Adapter management
115
+ # -------------------------------------------------------------------------
116
+
117
+ def _get_or_create_adapter(self, provider: str, company_short_name: str) -> Any:
118
+ """
119
+ Return an adapter instance for the given provider.
120
+ If none exists yet, create it using a cached or new low-level client.
121
+ """
122
+ # If already created, just return it
123
+ if provider in self.adapters and self.adapters[provider] is not None:
124
+ return self.adapters[provider]
125
+
126
+ # Otherwise, create a low-level client from configuration
127
+ api_key = self._get_api_key_from_config(company_short_name, provider)
128
+ client = self._get_or_create_client(provider, api_key)
129
+
130
+ # Wrap client with the correct adapter
131
+ if provider == self.PROVIDER_OPENAI:
132
+ adapter = OpenAIAdapter(client)
133
+ elif provider == self.PROVIDER_GEMINI:
134
+ adapter = GeminiAdapter(client)
135
+ elif provider == self.PROVIDER_DEEPSEEK:
136
+ adapter = DeepseekAdapter(client)
137
+ else:
138
+ raise IAToolkitException(
139
+ IAToolkitException.ErrorType.MODEL,
140
+ f"Provider not supported in _get_or_create_adapter: {provider}"
141
+ )
142
+
143
+ '''
144
+ elif provider == self.PROVIDER_XAI:
145
+ adapter = XAIAdapter(client)
146
+ elif provider == self.PROVIDER_ANTHROPIC:
147
+ adapter = AnthropicAdapter(client)
148
+ '''
149
+ self.adapters[provider] = adapter
150
+ return adapter
151
+
152
+ # -------------------------------------------------------------------------
153
+ # Client cache
154
+ # -------------------------------------------------------------------------
57
155
 
58
- def create_for_company(self, company: Company) -> 'LLMProxy':
156
+ def _get_or_create_client(self, provider: str, api_key: str) -> Any:
59
157
  """
60
- Crea y configura una nueva instancia de LLMProxy para una empresa específica.
158
+ Return a low-level client for the given provider and API key.
159
+ Uses a class-level cache to avoid recreating clients.
61
160
  """
62
- try:
63
- openai_client = self._get_llm_connection(company, LLMProvider.OPENAI)
64
- except IAToolkitException:
65
- openai_client = None
161
+ cache_key = (provider, api_key or "")
66
162
 
67
- try:
68
- gemini_client = self._get_llm_connection(company, LLMProvider.GEMINI)
69
- except IAToolkitException:
70
- gemini_client = None
163
+ with self._clients_cache_lock:
164
+ if cache_key in self._clients_cache:
165
+ return self._clients_cache[cache_key]
71
166
 
72
- if not openai_client and not gemini_client:
167
+ client = self._create_client_for_provider(provider, api_key)
168
+ self._clients_cache[cache_key] = client
169
+ return client
170
+
171
+ def _create_client_for_provider(self, provider: str, api_key: str) -> Any:
172
+ """
173
+ Actually create the low-level client for a provider.
174
+ This is the only place where provider-specific client construction lives.
175
+ """
176
+ if provider == self.PROVIDER_OPENAI:
177
+ # Standard OpenAI client for GPT models
178
+ return OpenAI(api_key=api_key)
179
+
180
+ if provider == self.PROVIDER_XAI:
181
+ # xAI Grok is OpenAI-compatible; we can use the OpenAI client with a different base_url.
182
+ return OpenAI(
183
+ api_key=api_key,
184
+ base_url="https://api.x.ai/v1",
185
+ )
186
+
187
+ if provider == self.PROVIDER_DEEPSEEK:
188
+ # Example: if you use the official deepseek client or OpenAI-compatible wrapper
189
+ # return DeepSeekAPI(api_key=api_key)
190
+
191
+ # We use OpenAI client with a DeepSeek base_url:
192
+ return OpenAI(
193
+ api_key=api_key,
194
+ base_url="https://api.deepseek.com",
195
+ )
196
+
197
+ if provider == self.PROVIDER_GEMINI:
198
+ # Example placeholder: you may already have a Gemini client factory elsewhere.
199
+ # Here you could create and configure the Gemini client (e.g. google.generativeai).
200
+ #
201
+ import google.generativeai as genai
202
+
203
+ genai.configure(api_key=api_key)
204
+ return genai
205
+ if provider == self.PROVIDER_ANTHROPIC:
206
+ # Example using Anthropic official client:
207
+ #
208
+ # from anthropic import Anthropic
209
+ # return Anthropic(api_key=api_key)
73
210
  raise IAToolkitException(
74
211
  IAToolkitException.ErrorType.API_KEY,
75
- f"La empresa '{company.name}' no tiene configuradas API keys para ningún proveedor LLM."
212
+ "Anthropic client creation must be implemented in _create_client_for_provider."
76
213
  )
77
214
 
78
- # Devuelve una NUEVA instancia con los clientes configurados
79
- return LLMProxy(
80
- util=self.util,
81
- configuration_service=self.configuration_service,
82
- openai_client=openai_client,
83
- gemini_client=gemini_client)
84
-
85
- def create_response(self, model: str, input: List[Dict], **kwargs) -> LLMResponse:
86
- """Enruta la llamada al adaptador correcto basado en el modelo."""
87
- # Se asume que esta instancia ya tiene los clientes configurados por `create_for_company`
88
- if self.util.is_openai_model(model):
89
- if not self.openai_adapter:
90
- raise IAToolkitException(IAToolkitException.ErrorType.API_KEY,
91
- f"No se configuró cliente OpenAI, pero se solicitó modelo OpenAI: {model}")
92
- return self.openai_adapter.create_response(model=model, input=input, **kwargs)
93
- elif self.util.is_gemini_model(model):
94
- if not self.gemini_adapter:
95
- raise IAToolkitException(IAToolkitException.ErrorType.API_KEY,
96
- f"No se configuró cliente Gemini, pero se solicitó modelo Gemini: {model}")
97
- return self.gemini_adapter.create_response(model=model, input=input, **kwargs)
98
- else:
99
- raise IAToolkitException(IAToolkitException.ErrorType.LLM_ERROR, f"Modelo no soportado: {model}")
100
-
101
- def _get_llm_connection(self, company: Company, provider: LLMProvider) -> Any:
102
- """Obtiene una conexión de cliente para un proveedor, usando un caché para reutilizarla."""
103
- cache_key = f"{company.short_name}_{provider.value}"
104
- client = LLMProxy._clients_cache.get(cache_key)
105
-
106
- if not client:
107
- with LLMProxy._clients_cache_lock:
108
- client = LLMProxy._clients_cache.get(cache_key)
109
- if not client:
110
- if provider == LLMProvider.OPENAI:
111
- client = self._create_openai_client(company)
112
- elif provider == LLMProvider.GEMINI:
113
- client = self._create_gemini_client(company)
114
- else:
115
- raise IAToolkitException(f"provider not supported: {provider.value}")
116
-
117
- if client:
118
- LLMProxy._clients_cache[cache_key] = client
119
-
120
- if not client:
121
- raise IAToolkitException(IAToolkitException.ErrorType.API_KEY, f"No se pudo crear el cliente para {provider.value}")
122
-
123
- return client
124
-
125
- def _create_openai_client(self, company: Company) -> OpenAI:
126
- """Crea un cliente de OpenAI con la API key."""
127
- decrypted_api_key = ''
128
- llm_config = self.configuration_service.get_configuration(company.short_name, 'llm')
129
-
130
- # Try to get API key name from config first
131
- if llm_config and llm_config.get('api-key'):
132
- api_key_env_var = llm_config['api-key']
133
- decrypted_api_key = os.getenv(api_key_env_var, '')
134
- else:
135
- # Fallback to old logic
136
- if company.openai_api_key:
137
- decrypted_api_key = self.util.decrypt_key(company.openai_api_key)
138
- else:
139
- decrypted_api_key = os.getenv("OPENAI_API_KEY", '')
140
-
141
- if not decrypted_api_key:
142
- raise IAToolkitException(IAToolkitException.ErrorType.API_KEY,
143
- f"La empresa '{company.name}' no tiene API key de OpenAI.")
144
- return OpenAI(api_key=decrypted_api_key)
145
-
146
- def _create_gemini_client(self, company: Company) -> Any:
147
- """Configura y devuelve el cliente de Gemini."""
148
-
149
- decrypted_api_key = ''
150
- llm_config = self.configuration_service.get_configuration(company.short_name, 'llm')
151
-
152
- # Try to get API key name from config first
153
- if llm_config and llm_config.get('api-key'):
154
- api_key_env_var = llm_config['api-key']
155
- decrypted_api_key = os.getenv(api_key_env_var, '')
156
- else:
157
- # Fallback to old logic
158
- if company.gemini_api_key:
159
- decrypted_api_key = self.util.decrypt_key(company.gemini_api_key)
160
- else:
161
- decrypted_api_key = os.getenv("GEMINI_API_KEY", '')
215
+ raise IAToolkitException(
216
+ IAToolkitException.ErrorType.MODEL,
217
+ f"Provider not supported in _create_client_for_provider: {provider}"
218
+ )
162
219
 
163
- if not decrypted_api_key:
164
- return None
165
- genai.configure(api_key=decrypted_api_key)
166
- return genai
220
+ # -------------------------------------------------------------------------
221
+ # Configuration helpers
222
+ # -------------------------------------------------------------------------
223
+ def _get_api_key_from_config(self, company_short_name: str, provider: str) -> str:
224
+ """
225
+ Read the LLM API key from company configuration and environment variables.
226
+
227
+ Resolución de prioridad:
228
+ 1. llm.provider_api_keys[provider] -> env var específica por proveedor.
229
+ 2. llm.api-key -> env var global (compatibilidad hacia atrás).
230
+ """
231
+ llm_config = self.configuration_service.get_configuration(company_short_name, "llm")
232
+
233
+ if not llm_config:
234
+ # Mantener compatibilidad con los tests: el mensaje debe indicar
235
+ # que no hay API key configurada.
236
+ raise IAToolkitException(
237
+ IAToolkitException.ErrorType.API_KEY,
238
+ f"Company '{company_short_name}' doesn't have an API key configured."
239
+ )
240
+
241
+ provider_keys = llm_config.get("provider_api_keys") or {}
242
+ env_var_name = None
243
+
244
+ # 1) Intentar api-key específica por proveedor (si existe el bloque provider_api_keys)
245
+ if provider_keys and isinstance(provider_keys, dict):
246
+ env_var_name = provider_keys.get(provider)
247
+
248
+ # 2) Fallback: usar api-key global si no hay específica
249
+ if not env_var_name and llm_config.get("api-key"):
250
+ env_var_name = llm_config["api-key"]
251
+
252
+ if not env_var_name:
253
+ raise IAToolkitException(
254
+ IAToolkitException.ErrorType.API_KEY,
255
+ f"Company '{company_short_name}' doesn't have an API key configured "
256
+ f"for provider '{provider}'."
257
+ )
258
+
259
+ api_key_value = os.getenv(env_var_name, "")
260
+
261
+ if not api_key_value:
262
+ raise IAToolkitException(
263
+ IAToolkitException.ErrorType.API_KEY,
264
+ f"Environment variable '{env_var_name}' for company '{company_short_name}' "
265
+ f"and provider '{provider}' is not set or is empty."
266
+ )
167
267
 
268
+ return api_key_value
@@ -32,9 +32,14 @@ class LLMResponse:
32
32
  output_text: str
33
33
  output: List[ToolCall] # lista de tool calls
34
34
  usage: Usage
35
+ reasoning_content: str = None # campo opcional para Chain of Thought
36
+
35
37
 
36
38
  def __post_init__(self):
37
39
  """Asegura que output sea una lista"""
38
40
  if self.output is None:
39
41
  self.output = []
40
42
 
43
+ if self.reasoning_content is None:
44
+ self.reasoning_content = ""
45
+
iatoolkit/locales/en.yaml CHANGED
@@ -46,6 +46,112 @@ ui:
46
46
  prompts_available: "Available prompts"
47
47
  init_context: "Initializing the context ..."
48
48
 
49
+ admin:
50
+ workspace: "Workspace"
51
+ configuration: "Configuration"
52
+ company_config: "Company Configuration (company.yaml)"
53
+ prompts: "Prompts"
54
+ prompts_description: "System Prompts"
55
+ knowledge: "Knowledge Base"
56
+ knowledge_rag: "RAG (Vector)"
57
+ knowledge_static: "Static Context"
58
+ schemas: "Schemas"
59
+ schemas_description: "Data Definitions (YAML)"
60
+ context_description: "Static Markdown"
61
+ administration: "Administration"
62
+ monitoring: "Monitoring"
63
+ teams: "Team"
64
+ billing: "Billing"
65
+ company: "Company"
66
+ context: "Context"
67
+ files: "Files"
68
+ select_file: "Select a file for editing"
69
+ select_category: "Select a category"
70
+ editing: "Editing"
71
+ no_file_selected: "No file selected"
72
+ new: "New"
73
+ confirm: "Confirm"
74
+ cancel: "Cancel"
75
+ new_file: "New file"
76
+ delete_file: "Delete file"
77
+ rename_file: "Rename file"
78
+ saved_ok: "File saved successfully"
79
+ save_file: "Save"
80
+ credentials: "Email and password required for login."
81
+ deleted_ok: "Deleted successfully"
82
+ user_manager: "User management"
83
+ admin_page_title: "Admin access"
84
+ load_configuration: "Save configuration"
85
+ goto_chat: "Go to chat"
86
+ logout: "Close session"
87
+ error_loading: "Error loading file content"
88
+ loading: "Loading..."
89
+
90
+ db_explorer:
91
+ data_explorer: "Data Explorer"
92
+ database_connection: "Database Connection"
93
+ tables: "Tables"
94
+ data_explorer_description: "Explore your database tables ."
95
+ select_database: "Please select a database above to view its tables."
96
+ not_table_selected: "No table selected"
97
+ view_yaml: "View YAML"
98
+ save_schema: "Save schema"
99
+ select_table: "Select a table from the left panel"
100
+ table_semantics: "Table Semantics"
101
+ table_description: "Description (AI Context)"
102
+ table_placeholder: "Describe what data this table contains (e.g., 'Active and inactive customer records including billing addresses')..."
103
+ column_metadata: "Column Metadata"
104
+ auto_detect: "Auto-detected from DB"
105
+ meta_column: "Column"
106
+ meta_type: "Type"
107
+ meta_description: "Description"
108
+ meta_synonyms: "Synonyms"
109
+ pii_sesitive: "PII Sensitive"
110
+
111
+ config:
112
+ editor_description: "IAToolkit configuration file"
113
+ title: "Configuration Editor"
114
+ sections: "Sections"
115
+ refresh: "Refresh"
116
+ validate: "Validate"
117
+ select_section: "Select a section from the left panel"
118
+ no_section_selected: "No section selected"
119
+ view_yaml: "View YAML"
120
+
121
+
122
+ rag:
123
+ ingestion: "Ingestion"
124
+ ingestion_description: "Ingest documents into the knowledge base."
125
+ workbench: "Workbench"
126
+ documents: "Documents"
127
+ retrieval_lab: "Retrieval Lab."
128
+ retrieval_description: "Test semantic search and context retrieval."
129
+ query_placeholder: "Enter a question to query the knowledge base..."
130
+ search_button: "Search"
131
+ filter: "Filter"
132
+ search_results_title: "Ready to test"
133
+ search_results_description: "Results will appear here"
134
+ filename: "Filename"
135
+ filename_placeholder: "Contains..."
136
+ user: "User"
137
+ status: "Status"
138
+ all_status: "All statuses"
139
+ status_active: "Active"
140
+ status_pending: "Pending"
141
+ status_processing: "Processing"
142
+ status_failed: "Failed"
143
+ created_at: "Created"
144
+ date_range: "Date range"
145
+ delete_confirmation: "Delete File?"
146
+ delete_message: "This action cannot be undone. The file will be permanently removed."
147
+ delete_button: "Delete"
148
+ delete_cancel: "Cancel"
149
+ target_collection: "Target collection"
150
+ select_collection_placeholder: "Select a collection"
151
+ collection_required: "Collection is required"
152
+ collection: "Collection"
153
+ all_collections: "All collections"
154
+
49
155
  tooltips:
50
156
  history: "History of my queries"
51
157
  reload_context: "Force Context Reload"
@@ -53,6 +159,7 @@ ui:
53
159
  usage_guide: "Usage Guide"
54
160
  onboarding: "How to ask better questions"
55
161
  logout: "Log out"
162
+ preferences: "Preferences"
56
163
  use_prompt_assistant: "Use Prompt Assistant"
57
164
  attach_files: "Attach files"
58
165
  view_attached_files: "View attached files"
@@ -87,12 +194,14 @@ errors:
87
194
  session_creation_failed: "Could not create user session."
88
195
  authentication_required: "Authentication required. No session cookie or API Key provided."
89
196
  invalid_api_key: "Invalid or inactive API Key."
197
+ api_key_name_required: "api_key_name parameter is required."
198
+
90
199
  no_user_identifier_api: "No user_identifier provided for API call."
200
+ no_company_permissions: "Do not have permissions to admin this company."
91
201
  templates:
92
202
  company_not_found: "Company not found."
93
203
  home_template_not_found: "The home page template for the company '{company_short_name}' is not configured."
94
204
  template_not_found: "Template not found: '{template_name}'."
95
-
96
205
  processing_error: "An error occurred while processing the custom home page template: {error}"
97
206
  general:
98
207
  unexpected_error: "An unexpected error has occurred. Please contact support."
@@ -135,7 +244,6 @@ errors:
135
244
  cannot_read_excel: "Cannot read Excel file"
136
245
  cannot_read_csv: "Cannot read CSV file"
137
246
 
138
-
139
247
  api_responses:
140
248
  context_reloaded_success: "The context has been successfully reloaded."
141
249
 
@@ -144,6 +252,19 @@ services:
144
252
  start_query: "Hello, what can I help you with today?"
145
253
  mail_change_password: "mail sent for password change"
146
254
 
255
+ rag:
256
+ ingestion:
257
+ duplicate: "Document '{filename}' already exists for company '{company_short_name}'. Skipping ingestion."
258
+ failed: "Failed to ingest document: {error}"
259
+ processing_failed: "Processing failed: {error}"
260
+ empty_text: "Extracted text is empty."
261
+ search:
262
+ query_required: "Query is required."
263
+ company_not_found: "Company '{company_short_name}' not found."
264
+ management:
265
+ delete_success: "Document deleted."
266
+ not_found: "Document not found."
267
+ action_not_found: "Action '{action}' not found."
147
268
 
148
269
  flash_messages:
149
270
  password_changed_success: "Your password has been successfully reset. You can now log in."
@@ -172,4 +293,39 @@ js_messages:
172
293
  reload_init: "init reloading context in background..."
173
294
  no_history_found: "No query history found."
174
295
  example: "Example:"
296
+ show_reasoning: "Show reasoning"
297
+ unsaved: "Modified (unsaved)"
298
+ select_file: "Select a file from the list"
299
+ no_file_selected: "No file selected"
300
+ select_company: "Select company"
301
+ delete_ok: "File deleted successfully"
302
+ delete_failed: "File deletion failed"
303
+ rename_ok: "File rename successfully"
304
+ file_created: "File created successfully"
305
+ not_saved: 'Could not save'
306
+ saved_ok: "Saved successfully"
307
+ error_saving: "Error saving file"
308
+ invalid_file_name: "Invalid filename. Use only letters, numbers, underscores, hyphens, and dots."
309
+ config_loaded: "Configuration loaded successfully."
310
+ config_load_error: "Error loading configuration."
311
+ search_placeholder: "Search users..."
312
+ showing: "Showing"
313
+ records: "Records"
314
+ db_user: "User"
315
+ db_role: "Role"
316
+ db_verified: "Verified"
317
+ db_created: "Created"
318
+ db_last_access: "Last access"
319
+ db_filename: "Filename"
320
+ db_user: "User"
321
+ db_status: "Status"
322
+ db_collection: "Collection"
323
+ editor_no_file_selected: "No file selected"
324
+ error_loading: "Error loading file content"
325
+ cant_load_company: "Could not load company.yaml"
326
+ config_saved: "Configuration saved successfully."
327
+ config_error: "Error saving configuration."
328
+
329
+
330
+
175
331