iatoolkit 1.9.0__py3-none-any.whl → 1.15.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. iatoolkit/__init__.py +1 -1
  2. iatoolkit/common/routes.py +1 -1
  3. iatoolkit/common/util.py +8 -123
  4. iatoolkit/core.py +1 -0
  5. iatoolkit/infra/connectors/file_connector.py +10 -2
  6. iatoolkit/infra/connectors/google_drive_connector.py +3 -0
  7. iatoolkit/infra/connectors/local_file_connector.py +3 -0
  8. iatoolkit/infra/connectors/s3_connector.py +24 -1
  9. iatoolkit/infra/llm_providers/deepseek_adapter.py +17 -1
  10. iatoolkit/infra/llm_providers/gemini_adapter.py +117 -18
  11. iatoolkit/infra/llm_providers/openai_adapter.py +175 -18
  12. iatoolkit/infra/llm_response.py +13 -0
  13. iatoolkit/locales/en.yaml +47 -2
  14. iatoolkit/locales/es.yaml +45 -1
  15. iatoolkit/repositories/llm_query_repo.py +44 -33
  16. iatoolkit/services/company_context_service.py +294 -133
  17. iatoolkit/services/dispatcher_service.py +1 -1
  18. iatoolkit/services/knowledge_base_service.py +26 -4
  19. iatoolkit/services/llm_client_service.py +58 -2
  20. iatoolkit/services/prompt_service.py +236 -330
  21. iatoolkit/services/query_service.py +37 -18
  22. iatoolkit/services/storage_service.py +92 -0
  23. iatoolkit/static/js/chat_filepond.js +188 -63
  24. iatoolkit/static/js/chat_main.js +105 -52
  25. iatoolkit/static/styles/chat_iatoolkit.css +96 -0
  26. iatoolkit/system_prompts/query_main.prompt +24 -41
  27. iatoolkit/templates/chat.html +15 -6
  28. iatoolkit/views/base_login_view.py +1 -1
  29. iatoolkit/views/categories_api_view.py +43 -3
  30. iatoolkit/views/chat_view.py +1 -1
  31. iatoolkit/views/login_view.py +1 -1
  32. iatoolkit/views/prompt_api_view.py +1 -1
  33. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/METADATA +1 -1
  34. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/RECORD +38 -37
  35. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/WHEEL +0 -0
  36. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/licenses/LICENSE +0 -0
  37. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/licenses/LICENSE_COMMUNITY.md +0 -0
  38. {iatoolkit-1.9.0.dist-info → iatoolkit-1.15.3.dist-info}/top_level.txt +0 -0
@@ -4,6 +4,7 @@
4
4
  # IAToolkit is open source software.
5
5
 
6
6
  from injector import inject
7
+ from iatoolkit import current_iatoolkit
7
8
  from iatoolkit.common.interfaces.asset_storage import AssetRepository, AssetType
8
9
  from iatoolkit.repositories.llm_query_repo import LLMQueryRepo
9
10
  from iatoolkit.services.i18n_service import I18nService
@@ -18,9 +19,9 @@ import os
18
19
 
19
20
  # iatoolkit system prompts definitions
20
21
  _SYSTEM_PROMPTS = [
21
- {'name': 'query_main', 'description': 'iatoolkit main prompt'},
22
- {'name': 'format_styles', 'description': 'output format styles'},
23
- {'name': 'sql_rules', 'description': 'instructions for SQL queries'}
22
+ {'name': 'query_main', 'description': 'iatoolkit main prompt', 'order': 1},
23
+ {'name': 'format_styles', 'description': 'output format styles', 'order': 2},
24
+ {'name': 'sql_rules', 'description': 'instructions for SQL queries', 'order': 3},
24
25
  ]
25
26
 
26
27
  class PromptService:
@@ -35,6 +36,189 @@ class PromptService:
35
36
  self.profile_repo = profile_repo
36
37
  self.i18n_service = i18n_service
37
38
 
39
+ def get_prompts(self, company_short_name: str, include_all: bool = False) -> dict:
40
+ try:
41
+ # validate company
42
+ company = self.profile_repo.get_company_by_short_name(company_short_name)
43
+ if not company:
44
+ return {"error": self.i18n_service.t('errors.company_not_found', company_short_name=company_short_name)}
45
+
46
+ # get all the company prompts
47
+ # If include_all is True, repo should return everything for the company
48
+ # Otherwise, it should return only active prompts
49
+ all_prompts = self.llm_query_repo.get_prompts(company, include_all=include_all)
50
+
51
+ # Deduplicate prompts by id
52
+ all_prompts = list({p.id: p for p in all_prompts}.values())
53
+
54
+ # group by category
55
+ prompts_by_category = defaultdict(list)
56
+ for prompt in all_prompts:
57
+ # Filter logic moved here or in repo.
58
+ # If include_all is False, we only want active prompts (and maybe only specific types)
59
+ if not include_all:
60
+
61
+ # Standard user view: excludes system/agent hidden prompts if any?
62
+ if prompt.prompt_type != PromptType.COMPANY.value:
63
+ continue
64
+
65
+ # Grouping logic
66
+ cat_key = (0, "Uncategorized") # Default
67
+ if prompt.category:
68
+ cat_key = (prompt.category.order, prompt.category.name)
69
+
70
+ prompts_by_category[cat_key].append(prompt)
71
+
72
+ # sort each category by order
73
+ for cat_key in prompts_by_category:
74
+ prompts_by_category[cat_key].sort(key=lambda p: p.order)
75
+
76
+ categorized_prompts = []
77
+
78
+ # sort categories by order
79
+ sorted_categories = sorted(prompts_by_category.items(), key=lambda item: item[0][0])
80
+
81
+ for (cat_order, cat_name), prompts in sorted_categories:
82
+ categorized_prompts.append({
83
+ 'category_name': cat_name,
84
+ 'category_order': cat_order,
85
+ 'prompts': [
86
+ {
87
+ 'prompt': p.name,
88
+ 'description': p.description,
89
+ 'type': p.prompt_type,
90
+ 'active': p.active,
91
+ 'custom_fields': p.custom_fields,
92
+ 'order': p.order
93
+ }
94
+ for p in prompts
95
+ ]
96
+ })
97
+
98
+ return {'message': categorized_prompts}
99
+
100
+ except Exception as e:
101
+ logging.error(f"error in get_prompts: {e}")
102
+ return {'error': str(e)}
103
+
104
+
105
+ def get_prompt_content(self, company: Company, prompt_name: str):
106
+ try:
107
+ # get the prompt from database
108
+ prompt = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
109
+ if not prompt:
110
+ raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND,
111
+ f"prompt not found '{prompt}' for company '{company.short_name}'")
112
+
113
+ try:
114
+ # read the prompt content from asset repository
115
+ user_prompt_content = self.asset_repo.read_text(
116
+ company.short_name,
117
+ AssetType.PROMPT,
118
+ prompt.filename
119
+ )
120
+ except FileNotFoundError:
121
+ raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
122
+ f"prompt file '{prompt.filename}' does not exist for company '{company.short_name}'")
123
+ except Exception as e:
124
+ raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
125
+ f"error while reading prompt: '{prompt_name}': {e}")
126
+
127
+ return user_prompt_content
128
+
129
+ except IAToolkitException:
130
+ raise
131
+ except Exception as e:
132
+ logging.exception(
133
+ f"error loading prompt '{prompt_name}' content for '{company.short_name}': {e}")
134
+ raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
135
+ f'error loading prompt "{prompt_name}" content for company {company.short_name}: {str(e)}')
136
+
137
+ def save_prompt(self, company_short_name: str, prompt_name: str, data: dict):
138
+ """
139
+ Create or Update a prompt.
140
+ 1. Saves the Jinja content to the .prompt asset file.
141
+ 2. Updates the Database.
142
+ """
143
+ company = self.profile_repo.get_company_by_short_name(company_short_name)
144
+ if not company:
145
+ raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
146
+ f"Company {company_short_name} not found")
147
+
148
+ # Validate category if present
149
+ category_id = None
150
+ if 'category' in data:
151
+ # simple lookup, assuming category names are unique per company
152
+ cat = self.llm_query_repo.get_category_by_name(company.id, data['category'])
153
+ if cat:
154
+ category_id = cat.id
155
+
156
+ # 1. save the phisical part of the prompt (content)
157
+ if 'content' in data:
158
+ filename = f"{prompt_name}.prompt"
159
+ filename = filename.lower().replace(' ', '_')
160
+ self.asset_repo.write_text(company_short_name, AssetType.PROMPT, filename, data['content'])
161
+
162
+ # 2. update the prompt in the database
163
+ new_prompt = Prompt(
164
+ company_id=company.id,
165
+ name=prompt_name,
166
+ description=data.get('description', ''),
167
+ order=data.get('order', 1),
168
+ category_id=category_id,
169
+ active=data.get('active', True),
170
+ prompt_type=data.get('prompt_type', 'company'),
171
+ filename=f"{prompt_name.lower().replace(' ', '_')}.prompt",
172
+ custom_fields=data.get('custom_fields', [])
173
+ )
174
+ self.llm_query_repo.create_or_update_prompt(new_prompt)
175
+
176
+ def delete_prompt(self, company_short_name: str, prompt_name: str):
177
+ """
178
+ Deletes a prompt:
179
+ 1. Removes from DB.
180
+ 2. Removes from YAML config.
181
+ 3. (Optional) Deletes/Archives physical file.
182
+ """
183
+ company = self.profile_repo.get_company_by_short_name(company_short_name)
184
+ if not company:
185
+ raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME, f"Company not found")
186
+
187
+ prompt_db = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
188
+ if not prompt_db:
189
+ raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND, f"Prompt {prompt_name} not found")
190
+
191
+ # 1. Remove from DB
192
+ self.llm_query_repo.delete_prompt(prompt_db)
193
+
194
+ def get_system_prompt(self):
195
+ try:
196
+ system_prompt_content = []
197
+
198
+ # read all the system prompts from the database
199
+ system_prompts = self.llm_query_repo.get_system_prompts()
200
+
201
+ for prompt in system_prompts:
202
+ try:
203
+ content = importlib.resources.read_text('iatoolkit.system_prompts', prompt.filename)
204
+ system_prompt_content.append(content)
205
+ except FileNotFoundError:
206
+ logging.warning(f"Prompt file does not exist in the package: {prompt.filename}")
207
+ except Exception as e:
208
+ raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
209
+ f"error reading system prompt '{prompt.filename}': {e}")
210
+
211
+ # join the system prompts into a single string
212
+ return "\n".join(system_prompt_content)
213
+
214
+ except IAToolkitException:
215
+ raise
216
+ except Exception as e:
217
+ logging.exception(
218
+ f"Error al obtener el contenido del prompt de sistema: {e}")
219
+ raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
220
+ f'error reading the system prompts": {str(e)}')
221
+
38
222
  def sync_company_prompts(self, company_short_name: str, prompt_list: list, categories_config: list):
39
223
  """
40
224
  Synchronizes prompt categories and prompts from YAML config to Database.
@@ -50,7 +234,13 @@ class PromptService:
50
234
  raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
51
235
  f'Company {company_short_name} not found')
52
236
 
237
+ # Register system prompts
53
238
  self._register_system_prompts(company)
239
+
240
+ # community edition has its own prompt management
241
+ if not current_iatoolkit().is_community:
242
+ return
243
+
54
244
  try:
55
245
  # 1. Sync Categories
56
246
  category_map = {}
@@ -88,7 +278,7 @@ class PromptService:
88
278
  order=prompt_data.get('order'),
89
279
  category_id=category_obj.id,
90
280
  active=prompt_data.get('active', True),
91
- prompt_type=PromptType.COMPANY.value,
281
+ prompt_type=prompt_data.get('prompt_type', PromptType.COMPANY.value).lower(),
92
282
  filename=filename,
93
283
  custom_fields=prompt_data.get('custom_fields', [])
94
284
  )
@@ -112,6 +302,13 @@ class PromptService:
112
302
  """
113
303
  Synchronizes system prompts defined in Dispatcher/Code to Database.
114
304
  """
305
+
306
+ # if there are system prompts already registered, skip
307
+ # if self.llm_query_repo.get_system_prompts(): return
308
+
309
+ sys_category = PromptCategory(company_id=company.id, name="System", order=0)
310
+ self.llm_query_repo.create_or_update_prompt_category(sys_category)
311
+
115
312
  try:
116
313
  defined_names = set()
117
314
 
@@ -124,8 +321,8 @@ class PromptService:
124
321
  company_id=company.id,
125
322
  name=prompt_name,
126
323
  description=prompt_data['description'],
127
- order=i + 1,
128
- category_id=None,
324
+ order=prompt_data['order'],
325
+ category_id=sys_category.id,
129
326
  active=True,
130
327
  prompt_type=PromptType.SYSTEM.value,
131
328
  filename=prompt_filename,
@@ -134,9 +331,8 @@ class PromptService:
134
331
  self.llm_query_repo.create_or_update_prompt(new_prompt)
135
332
 
136
333
  # add prompt to company assets
137
- if not self.asset_repo.exists(company.short_name, AssetType.PROMPT, prompt_filename):
138
- prompt_content = importlib.resources.read_text('iatoolkit.system_prompts', prompt_filename)
139
- self.asset_repo.write_text(company.short_name, AssetType.PROMPT, prompt_filename, prompt_content)
334
+ prompt_content = importlib.resources.read_text('iatoolkit.system_prompts', prompt_filename)
335
+ self.asset_repo.write_text(company.short_name, AssetType.PROMPT, prompt_filename, prompt_content)
140
336
 
141
337
  # Cleanup old system prompts
142
338
  existing_sys_prompts = self.llm_query_repo.get_system_prompts()
@@ -150,335 +346,45 @@ class PromptService:
150
346
  self.llm_query_repo.rollback()
151
347
  raise IAToolkitException(IAToolkitException.ErrorType.DATABASE_ERROR, str(e))
152
348
 
153
- def create_prompt(self,
154
- prompt_name: str,
155
- description: str,
156
- order: int,
157
- company: Company = None,
158
- category: PromptCategory = None,
159
- active: bool = True,
160
- prompt_type: PromptType = PromptType.COMPANY,
161
- custom_fields: list = []
162
- ):
163
- """
164
- Direct creation method (used by sync or direct calls).
165
- Validates file existence before creating DB entry.
166
- """
167
- prompt_filename = prompt_name.lower() + '.prompt'
168
- if prompt_type == PromptType.SYSTEM:
169
- if not importlib.resources.files('iatoolkit.system_prompts').joinpath(prompt_filename).is_file():
170
- raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
171
- f'missing system prompt file: {prompt_filename}')
172
- else:
173
- if not self.asset_repo.exists(company.short_name, AssetType.PROMPT, prompt_filename):
174
- raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
175
- f'missing prompt file: {prompt_filename} in prompts/')
176
-
177
- if custom_fields:
178
- for f in custom_fields:
179
- if ('data_key' not in f) or ('label' not in f):
180
- raise IAToolkitException(IAToolkitException.ErrorType.INVALID_PARAMETER,
181
- f'The field "custom_fields" must contain the following keys: data_key y label')
182
-
183
- # add default value for data_type
184
- if 'type' not in f:
185
- f['type'] = 'text'
186
-
187
- prompt = Prompt(
188
- company_id=company.id if company else None,
189
- name=prompt_name,
190
- description=description,
191
- order=order,
192
- category_id=category.id if category and prompt_type != PromptType.SYSTEM else None,
193
- active=active,
194
- filename=prompt_filename,
195
- prompt_type=prompt_type.value,
196
- custom_fields=custom_fields
197
- )
198
-
199
- try:
200
- self.llm_query_repo.create_or_update_prompt(prompt)
201
- except Exception as e:
202
- raise IAToolkitException(IAToolkitException.ErrorType.DATABASE_ERROR,
203
- f'error creating prompt "{prompt_name}": {str(e)}')
204
-
205
- def get_prompt_content(self, company: Company, prompt_name: str):
206
- try:
207
- # get the prompt
208
- prompt = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
209
- if not prompt:
210
- raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND,
211
- f"prompt not found '{prompt}' for company '{company.short_name}'")
212
-
213
- try:
214
- if (prompt.prompt_type == PromptType.SYSTEM.value and
215
- not self.asset_repo.exists(company.short_name, AssetType.PROMPT, prompt.filename)):
216
- user_prompt_content = importlib.resources.read_text('iatoolkit.system_prompts', prompt.filename)
217
- else:
218
- user_prompt_content = self.asset_repo.read_text(
219
- company.short_name,
220
- AssetType.PROMPT,
221
- prompt.filename
222
- )
223
- except FileNotFoundError:
224
- raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
225
- f"prompt file '{prompt.filename}' does not exist for company '{company.short_name}'")
226
- except Exception as e:
227
- raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
228
- f"error while reading prompt: '{prompt_name}': {e}")
229
-
230
- return user_prompt_content
231
-
232
- except IAToolkitException:
233
- raise
234
- except Exception as e:
235
- logging.exception(
236
- f"error loading prompt '{prompt_name}' content for '{company.short_name}': {e}")
237
- raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
238
- f'error loading prompt "{prompt_name}" content for company {company.short_name}: {str(e)}')
239
-
240
- def save_prompt(self, company_short_name: str, prompt_name: str, data: dict):
349
+ def sync_prompt_categories(self, company_short_name: str, categories_config: list):
241
350
  """
242
- Create or Update a prompt.
243
- 1. Saves the Jinja content to the .prompt file.
244
- 2. Updates the Metadata (params, description) in company.yaml using ConfigurationService.
245
- 3. Updates the Database.
351
+ Syncs only the prompt categories based on a simple list of names.
352
+ The order in the list determines the 'order' field in DB.
353
+ Removes categories not present in the list.
354
+ Finally, updates the YAML configuration.
246
355
  """
247
356
  company = self.profile_repo.get_company_by_short_name(company_short_name)
248
357
  if not company:
249
358
  raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
250
- f"Company {company_short_name} not found")
251
-
252
- # Validate category if present
253
- category_id = None
254
- if 'category' in data:
255
- # simple lookup, assuming category names are unique per company
256
- cat = self.llm_query_repo.get_category_by_name(company.id, data['category'])
257
- if cat:
258
- category_id = cat.id
259
-
260
- # 1. save the phisical part of the prompt (content)
261
- if 'content' in data:
262
- filename = f"{prompt_name}.prompt"
263
- filename = filename.lower().replace(' ', '_')
264
- self.asset_repo.write_text(company_short_name, AssetType.PROMPT, filename, data['content'])
265
-
266
- # 2. Sync the metadata with company.yaml (lazy import here)
267
- # Extract the fields that go to the YAML
268
- yaml_metadata = {
269
- 'name': prompt_name,
270
- 'description': data.get('description', ''),
271
- 'category': data.get('category'),
272
- 'prompt_type': data.get('prompt_type', 'company'),
273
- 'order': data.get('order', 1),
274
- 'active': data.get('active', True),
275
- 'custom_fields': data.get('custom_fields', [])
276
- }
277
-
278
- self._sync_to_configuration(company_short_name, yaml_metadata)
279
-
280
- # 3. Reflejar cambios en la BD inmediatamente (para no esperar recarga)
281
- # Esto es opcional si confías en que _sync_to_configuration recargará la config,
282
- # pero es más seguro actualizar la entidad actual.
283
- prompt_db = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
284
- if not prompt_db:
285
- # Create new prompt in DB immediately for responsiveness
286
- new_prompt = Prompt(
287
- company_id=company.id,
288
- name=prompt_name,
289
- description=yaml_metadata['description'],
290
- order=yaml_metadata['order'],
291
- category_id=category_id,
292
- active=yaml_metadata['active'],
293
- prompt_type=yaml_metadata['prompt_type'],
294
- filename=f"{prompt_name.lower().replace(' ', '_')}.prompt",
295
- custom_fields=yaml_metadata['custom_fields']
296
- )
297
- self.llm_query_repo.create_or_update_prompt(new_prompt)
298
- else:
299
- prompt_db.description = yaml_metadata['description']
300
- prompt_db.category_id = category_id
301
- prompt_db.order = yaml_metadata['order']
302
- prompt_db.custom_fields = yaml_metadata['custom_fields']
303
- prompt_db.active = yaml_metadata['active']
304
- self.llm_query_repo.create_or_update_prompt(prompt_db)
305
-
306
- def _sync_to_configuration(self, company_short_name: str, prompt_data: dict):
307
- """
308
- Usa ConfigurationService para inyectar este prompt en la lista 'prompts.prompt_list' del YAML.
309
- """
310
- # --- LAZY IMPORT para evitar Circular Dependency ---
311
- from iatoolkit import current_iatoolkit
312
- from iatoolkit.services.configuration_service import ConfigurationService
313
-
314
- config_service = current_iatoolkit().get_injector().get(ConfigurationService)
315
-
316
- # 1. Obtenemos la configuración actual cruda (sin objetos Python)
317
- # Necesitamos leer la estructura para encontrar si el prompt ya existe en la lista.
318
- full_config = config_service._load_and_merge_configs(company_short_name)
319
-
320
- prompts_config = full_config.get('prompts', {})
321
- # Normalizar estructura si prompts es una lista o un dict
322
- if isinstance(prompts_config, list):
323
- # Estructura antigua o simple, la convertimos a dict
324
- prompts_config = {'prompt_list': prompts_config, 'prompt_categories': []}
325
-
326
- prompt_list = prompts_config.get('prompt_list', [])
327
-
328
- # 2. Buscar si el prompt ya existe en la lista
329
- found_index = -1
330
- for i, p in enumerate(prompt_list):
331
- if p.get('name') == prompt_data['name']:
332
- found_index = i
333
- break
334
-
335
- # 3. Construir la ruta de actualización (key path)
336
- if found_index >= 0:
337
- # Actualizar existente: "prompts.prompt_list.3"
338
- # Nota: prompt_data contiene keys como 'description', 'custom_fields', etc.
339
- # ConfigurationService.update_configuration_key espera una clave y un valor.
340
- # Podríamos actualizar todo el objeto del prompt en la lista.
341
- key_path = f"prompts.prompt_list.{found_index}"
342
- config_service.update_configuration_key(company_short_name, key_path, prompt_data)
343
- else:
344
- # Crear nuevo: Agregar a la lista
345
- # Usamos el método add_configuration_key que creaste anteriormente
346
- config_service.add_configuration_key(company_short_name, "prompts.prompt_list", str(len(prompt_list)), prompt_data)
359
+ f'Company {company_short_name} not found')
347
360
 
348
- def get_system_prompt(self):
349
361
  try:
350
- system_prompt_content = []
351
-
352
- # read all the system prompts from the database
353
- system_prompts = self.llm_query_repo.get_system_prompts()
362
+ processed_categories_ids = []
354
363
 
355
- for prompt in system_prompts:
356
- try:
357
- content = importlib.resources.read_text('iatoolkit.system_prompts', prompt.filename)
358
- system_prompt_content.append(content)
359
- except FileNotFoundError:
360
- logging.warning(f"Prompt file does not exist in the package: {prompt.filename}")
361
- except Exception as e:
362
- raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
363
- f"error reading system prompt '{prompt.filename}': {e}")
364
-
365
- # join the system prompts into a single string
366
- return "\n".join(system_prompt_content)
367
-
368
- except IAToolkitException:
369
- raise
370
- except Exception as e:
371
- logging.exception(
372
- f"Error al obtener el contenido del prompt de sistema: {e}")
373
- raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
374
- f'error reading the system prompts": {str(e)}')
375
-
376
- def get_user_prompts(self, company_short_name: str, include_all: bool = False) -> dict:
377
- try:
378
- # validate company
379
- company = self.profile_repo.get_company_by_short_name(company_short_name)
380
- if not company:
381
- return {"error": self.i18n_service.t('errors.company_not_found', company_short_name=company_short_name)}
382
-
383
- # get all the prompts
384
- # If include_all is True, repo should return everything for the company
385
- all_prompts = self.llm_query_repo.get_prompts(company, include_all=include_all)
386
-
387
- # Deduplicate prompts by id
388
- all_prompts = list({p.id: p for p in all_prompts}.values())
389
-
390
- # group by category
391
- prompts_by_category = defaultdict(list)
392
- for prompt in all_prompts:
393
- # Filter logic moved here or in repo.
394
- # If include_all is False, we only want active prompts (and maybe only specific types)
395
- if not include_all:
396
- if not prompt.active:
397
- continue
398
- # Standard user view: usually excludes system/agent hidden prompts if any?
399
- # Current requirement: "solo los de tipo company, activos" for end users
400
- if prompt.prompt_type != PromptType.COMPANY.value:
401
- continue
402
-
403
- # Grouping logic
404
- cat_key = (0, "Uncategorized") # Default
405
- if prompt.category:
406
- cat_key = (prompt.category.order, prompt.category.name)
407
-
408
- prompts_by_category[cat_key].append(prompt)
409
-
410
- # sort each category by order
411
- for cat_key in prompts_by_category:
412
- prompts_by_category[cat_key].sort(key=lambda p: p.order)
413
-
414
- categorized_prompts = []
415
-
416
- # sort categories by order
417
- sorted_categories = sorted(prompts_by_category.items(), key=lambda item: item[0][0])
418
-
419
- for (cat_order, cat_name), prompts in sorted_categories:
420
- categorized_prompts.append({
421
- 'category_name': cat_name,
422
- 'category_order': cat_order,
423
- 'prompts': [
424
- {
425
- 'prompt': p.name,
426
- 'description': p.description,
427
- 'type': p.prompt_type,
428
- 'active': p.active,
429
- 'custom_fields': p.custom_fields,
430
- 'order': p.order
431
- }
432
- for p in prompts
433
- ]
434
- })
364
+ # 1. Update/Create Categories
365
+ for idx, cat_name in enumerate(categories_config):
366
+ # Order is 0-based index or 1-based, consistent with current usage (seems 0 or 1 is fine, usually 0 for arrays)
367
+ new_cat = PromptCategory(
368
+ company_id=company.id,
369
+ name=cat_name,
370
+ order=idx
371
+ )
372
+ persisted_cat = self.llm_query_repo.create_or_update_prompt_category(new_cat)
373
+ processed_categories_ids.append(persisted_cat.id)
374
+
375
+ # 2. Delete missing categories
376
+ # We fetch all categories for the company and delete those not in processed_ids
377
+ all_categories = self.llm_query_repo.get_all_categories(company.id)
378
+ for cat in all_categories:
379
+ if cat.id not in processed_categories_ids:
380
+ # Depending on logic, we might want to check if they have prompts assigned.
381
+ # Usually, sync logic implies "force state", so we delete.
382
+ # SQLAlchemy cascading might handle prompts or set them to null depending on model config.
383
+ self.llm_query_repo.session.delete(cat)
435
384
 
436
- return {'message': categorized_prompts}
385
+ self.llm_query_repo.commit()
437
386
 
438
387
  except Exception as e:
439
- logging.error(f"error in get_prompts: {e}")
440
- return {'error': str(e)}
441
-
442
- def delete_prompt(self, company_short_name: str, prompt_name: str):
443
- """
444
- Deletes a prompt:
445
- 1. Removes from DB.
446
- 2. Removes from YAML config.
447
- 3. (Optional) Deletes/Archives physical file.
448
- """
449
- company = self.profile_repo.get_company_by_short_name(company_short_name)
450
- if not company:
451
- raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME, f"Company not found")
452
-
453
- prompt_db = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
454
- if not prompt_db:
455
- raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND, f"Prompt {prompt_name} not found")
456
-
457
- # 1. Remove from DB
458
- self.llm_query_repo.delete_prompt(prompt_db)
459
-
460
- # 2. Remove from Configuration (Lazy import)
461
- from iatoolkit import current_iatoolkit
462
- from iatoolkit.services.configuration_service import ConfigurationService
463
- config_service = current_iatoolkit().get_injector().get(ConfigurationService)
464
-
465
- # We need to find the index to remove it from the list in YAML
466
- full_config = config_service._load_and_merge_configs(company_short_name)
467
- prompts_list = full_config.get('prompts', {}).get('prompt_list', [])
468
-
469
- found_index = -1
470
- for i, p in enumerate(prompts_list):
471
- if p.get('name') == prompt_name:
472
- found_index = i
473
- break
474
-
475
- if found_index >= 0:
476
- # This is tricky with current ConfigService if it doesn't support list item deletion easily.
477
- # Assuming we might need to implement a 'delete_configuration_key' or similar,
478
- # OR just leave it in config but update DB. For now, let's assume manual config cleanup or
479
- # implement a specific removal if ConfigService supports it.
480
- # If ConfigService doesn't support removal, we might just mark it inactive in config.
481
- pass
482
- # config_service.remove_list_item(company_short_name, "prompts.prompt_list", found_index)
483
-
484
-
388
+ self.llm_query_repo.rollback()
389
+ logging.exception(f"Error syncing prompt categories: {e}")
390
+ raise IAToolkitException(IAToolkitException.ErrorType.DATABASE_ERROR, str(e))