iatoolkit 1.7.0__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,8 @@ from iatoolkit.repositories.llm_query_repo import LLMQueryRepo
9
9
  from iatoolkit.services.i18n_service import I18nService
10
10
  from iatoolkit.repositories.profile_repo import ProfileRepo
11
11
  from collections import defaultdict
12
- from iatoolkit.repositories.models import Prompt, PromptCategory, Company
12
+ from iatoolkit.repositories.models import (Prompt, PromptCategory,
13
+ Company, PromptType)
13
14
  from iatoolkit.common.exceptions import IAToolkitException
14
15
  import importlib.resources
15
16
  import logging
@@ -34,14 +35,14 @@ class PromptService:
34
35
  self.profile_repo = profile_repo
35
36
  self.i18n_service = i18n_service
36
37
 
37
- def sync_company_prompts(self, company_short_name: str, prompts_config: list, categories_config: list):
38
+ def sync_company_prompts(self, company_short_name: str, prompt_list: list, categories_config: list):
38
39
  """
39
40
  Synchronizes prompt categories and prompts from YAML config to Database.
40
41
  Strategies:
41
42
  - Categories: Create or Update existing based on name.
42
43
  - Prompts: Create or Update existing based on name. Soft-delete or Delete unused.
43
44
  """
44
- if not prompts_config:
45
+ if not prompt_list:
45
46
  return
46
47
 
47
48
  company = self.profile_repo.get_company_by_short_name(company_short_name)
@@ -49,6 +50,7 @@ class PromptService:
49
50
  raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
50
51
  f'Company {company_short_name} not found')
51
52
 
53
+ self._register_system_prompts(company)
52
54
  try:
53
55
  # 1. Sync Categories
54
56
  category_map = {}
@@ -66,7 +68,7 @@ class PromptService:
66
68
  # 2. Sync Prompts
67
69
  defined_prompt_names = set()
68
70
 
69
- for prompt_data in prompts_config:
71
+ for prompt_data in prompt_list:
70
72
  category_name = prompt_data.get('category')
71
73
  if not category_name or category_name not in category_map:
72
74
  logging.warning(
@@ -86,7 +88,7 @@ class PromptService:
86
88
  order=prompt_data.get('order'),
87
89
  category_id=category_obj.id,
88
90
  active=prompt_data.get('active', True),
89
- is_system_prompt=False,
91
+ prompt_type=PromptType.COMPANY.value,
90
92
  filename=filename,
91
93
  custom_fields=prompt_data.get('custom_fields', [])
92
94
  )
@@ -106,7 +108,7 @@ class PromptService:
106
108
  self.llm_query_repo.rollback()
107
109
  raise IAToolkitException(IAToolkitException.ErrorType.DATABASE_ERROR, str(e))
108
110
 
109
- def register_system_prompts(self):
111
+ def _register_system_prompts(self, company: Company):
110
112
  """
111
113
  Synchronizes system prompts defined in Dispatcher/Code to Database.
112
114
  """
@@ -116,20 +118,26 @@ class PromptService:
116
118
  for i, prompt_data in enumerate(_SYSTEM_PROMPTS):
117
119
  prompt_name = prompt_data['name']
118
120
  defined_names.add(prompt_name)
121
+ prompt_filename = f"{prompt_name}.prompt"
119
122
 
120
123
  new_prompt = Prompt(
121
- company_id=None, # System prompts have no company
124
+ company_id=company.id,
122
125
  name=prompt_name,
123
126
  description=prompt_data['description'],
124
127
  order=i + 1,
125
128
  category_id=None,
126
129
  active=True,
127
- is_system_prompt=True,
128
- filename=f"{prompt_name}.prompt",
130
+ prompt_type=PromptType.SYSTEM.value,
131
+ filename=prompt_filename,
129
132
  custom_fields=[]
130
133
  )
131
134
  self.llm_query_repo.create_or_update_prompt(new_prompt)
132
135
 
136
+ # add prompt to company assets
137
+ if not self.asset_repo.exists(company.short_name, AssetType.PROMPT, prompt_filename):
138
+ prompt_content = importlib.resources.read_text('iatoolkit.system_prompts', prompt_filename)
139
+ self.asset_repo.write_text(company.short_name, AssetType.PROMPT, prompt_filename, prompt_content)
140
+
133
141
  # Cleanup old system prompts
134
142
  existing_sys_prompts = self.llm_query_repo.get_system_prompts()
135
143
  for p in existing_sys_prompts:
@@ -149,7 +157,7 @@ class PromptService:
149
157
  company: Company = None,
150
158
  category: PromptCategory = None,
151
159
  active: bool = True,
152
- is_system_prompt: bool = False,
160
+ prompt_type: PromptType = PromptType.COMPANY,
153
161
  custom_fields: list = []
154
162
  ):
155
163
  """
@@ -157,7 +165,7 @@ class PromptService:
157
165
  Validates file existence before creating DB entry.
158
166
  """
159
167
  prompt_filename = prompt_name.lower() + '.prompt'
160
- if is_system_prompt:
168
+ if prompt_type == PromptType.SYSTEM:
161
169
  if not importlib.resources.files('iatoolkit.system_prompts').joinpath(prompt_filename).is_file():
162
170
  raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
163
171
  f'missing system prompt file: {prompt_filename}')
@@ -181,10 +189,10 @@ class PromptService:
181
189
  name=prompt_name,
182
190
  description=description,
183
191
  order=order,
184
- category_id=category.id if category and not is_system_prompt else None,
192
+ category_id=category.id if category and prompt_type != PromptType.SYSTEM else None,
185
193
  active=active,
186
194
  filename=prompt_filename,
187
- is_system_prompt=is_system_prompt,
195
+ prompt_type=prompt_type.value,
188
196
  custom_fields=custom_fields
189
197
  )
190
198
 
@@ -196,21 +204,25 @@ class PromptService:
196
204
 
197
205
  def get_prompt_content(self, company: Company, prompt_name: str):
198
206
  try:
199
- # get the user prompt
200
- user_prompt = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
201
- if not user_prompt:
207
+ # get the prompt
208
+ prompt = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
209
+ if not prompt:
202
210
  raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND,
203
- f"prompt not found '{prompt_name}' for company '{company.short_name}'")
211
+ f"prompt not found '{prompt}' for company '{company.short_name}'")
204
212
 
205
213
  try:
206
- user_prompt_content = self.asset_repo.read_text(
207
- company.short_name,
208
- AssetType.PROMPT,
209
- user_prompt.filename
210
- )
214
+ if (prompt.prompt_type == PromptType.SYSTEM.value and
215
+ not self.asset_repo.exists(company.short_name, AssetType.PROMPT, prompt.filename)):
216
+ user_prompt_content = importlib.resources.read_text('iatoolkit.system_prompts', prompt.filename)
217
+ else:
218
+ user_prompt_content = self.asset_repo.read_text(
219
+ company.short_name,
220
+ AssetType.PROMPT,
221
+ prompt.filename
222
+ )
211
223
  except FileNotFoundError:
212
224
  raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
213
- f"prompt file '{user_prompt.filename}' does not exist for company '{company.short_name}'")
225
+ f"prompt file '{prompt.filename}' does not exist for company '{company.short_name}'")
214
226
  except Exception as e:
215
227
  raise IAToolkitException(IAToolkitException.ErrorType.FILE_IO_ERROR,
216
228
  f"error while reading prompt: '{prompt_name}': {e}")
@@ -225,6 +237,114 @@ class PromptService:
225
237
  raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
226
238
  f'error loading prompt "{prompt_name}" content for company {company.short_name}: {str(e)}')
227
239
 
240
+ def save_prompt(self, company_short_name: str, prompt_name: str, data: dict):
241
+ """
242
+ Create or Update a prompt.
243
+ 1. Saves the Jinja content to the .prompt file.
244
+ 2. Updates the Metadata (params, description) in company.yaml using ConfigurationService.
245
+ 3. Updates the Database.
246
+ """
247
+ company = self.profile_repo.get_company_by_short_name(company_short_name)
248
+ if not company:
249
+ raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME,
250
+ f"Company {company_short_name} not found")
251
+
252
+ # Validate category if present
253
+ category_id = None
254
+ if 'category' in data:
255
+ # simple lookup, assuming category names are unique per company
256
+ cat = self.llm_query_repo.get_category_by_name(company.id, data['category'])
257
+ if cat:
258
+ category_id = cat.id
259
+
260
+ # 1. save the phisical part of the prompt (content)
261
+ if 'content' in data:
262
+ filename = f"{prompt_name}.prompt"
263
+ filename = filename.lower().replace(' ', '_')
264
+ self.asset_repo.write_text(company_short_name, AssetType.PROMPT, filename, data['content'])
265
+
266
+ # 2. Sync the metadata with company.yaml (lazy import here)
267
+ # Extract the fields that go to the YAML
268
+ yaml_metadata = {
269
+ 'name': prompt_name,
270
+ 'description': data.get('description', ''),
271
+ 'category': data.get('category'),
272
+ 'prompt_type': data.get('prompt_type', 'company'),
273
+ 'order': data.get('order', 1),
274
+ 'active': data.get('active', True),
275
+ 'custom_fields': data.get('custom_fields', [])
276
+ }
277
+
278
+ self._sync_to_configuration(company_short_name, yaml_metadata)
279
+
280
+ # 3. Reflejar cambios en la BD inmediatamente (para no esperar recarga)
281
+ # Esto es opcional si confías en que _sync_to_configuration recargará la config,
282
+ # pero es más seguro actualizar la entidad actual.
283
+ prompt_db = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
284
+ if not prompt_db:
285
+ # Create new prompt in DB immediately for responsiveness
286
+ new_prompt = Prompt(
287
+ company_id=company.id,
288
+ name=prompt_name,
289
+ description=yaml_metadata['description'],
290
+ order=yaml_metadata['order'],
291
+ category_id=category_id,
292
+ active=yaml_metadata['active'],
293
+ prompt_type=yaml_metadata['prompt_type'],
294
+ filename=f"{prompt_name.lower().replace(' ', '_')}.prompt",
295
+ custom_fields=yaml_metadata['custom_fields']
296
+ )
297
+ self.llm_query_repo.create_or_update_prompt(new_prompt)
298
+ else:
299
+ prompt_db.description = yaml_metadata['description']
300
+ prompt_db.category_id = category_id
301
+ prompt_db.order = yaml_metadata['order']
302
+ prompt_db.custom_fields = yaml_metadata['custom_fields']
303
+ prompt_db.active = yaml_metadata['active']
304
+ self.llm_query_repo.create_or_update_prompt(prompt_db)
305
+
306
+ def _sync_to_configuration(self, company_short_name: str, prompt_data: dict):
307
+ """
308
+ Usa ConfigurationService para inyectar este prompt en la lista 'prompts.prompt_list' del YAML.
309
+ """
310
+ # --- LAZY IMPORT para evitar Circular Dependency ---
311
+ from iatoolkit import current_iatoolkit
312
+ from iatoolkit.services.configuration_service import ConfigurationService
313
+
314
+ config_service = current_iatoolkit().get_injector().get(ConfigurationService)
315
+
316
+ # 1. Obtenemos la configuración actual cruda (sin objetos Python)
317
+ # Necesitamos leer la estructura para encontrar si el prompt ya existe en la lista.
318
+ full_config = config_service._load_and_merge_configs(company_short_name)
319
+
320
+ prompts_config = full_config.get('prompts', {})
321
+ # Normalizar estructura si prompts es una lista o un dict
322
+ if isinstance(prompts_config, list):
323
+ # Estructura antigua o simple, la convertimos a dict
324
+ prompts_config = {'prompt_list': prompts_config, 'prompt_categories': []}
325
+
326
+ prompt_list = prompts_config.get('prompt_list', [])
327
+
328
+ # 2. Buscar si el prompt ya existe en la lista
329
+ found_index = -1
330
+ for i, p in enumerate(prompt_list):
331
+ if p.get('name') == prompt_data['name']:
332
+ found_index = i
333
+ break
334
+
335
+ # 3. Construir la ruta de actualización (key path)
336
+ if found_index >= 0:
337
+ # Actualizar existente: "prompts.prompt_list.3"
338
+ # Nota: prompt_data contiene keys como 'description', 'custom_fields', etc.
339
+ # ConfigurationService.update_configuration_key espera una clave y un valor.
340
+ # Podríamos actualizar todo el objeto del prompt en la lista.
341
+ key_path = f"prompts.prompt_list.{found_index}"
342
+ config_service.update_configuration_key(company_short_name, key_path, prompt_data)
343
+ else:
344
+ # Crear nuevo: Agregar a la lista
345
+ # Usamos el método add_configuration_key que creaste anteriormente
346
+ config_service.add_configuration_key(company_short_name, "prompts.prompt_list", str(len(prompt_list)), prompt_data)
347
+
228
348
  def get_system_prompt(self):
229
349
  try:
230
350
  system_prompt_content = []
@@ -253,7 +373,7 @@ class PromptService:
253
373
  raise IAToolkitException(IAToolkitException.ErrorType.PROMPT_ERROR,
254
374
  f'error reading the system prompts": {str(e)}')
255
375
 
256
- def get_user_prompts(self, company_short_name: str) -> dict:
376
+ def get_user_prompts(self, company_short_name: str, include_all: bool = False) -> dict:
257
377
  try:
258
378
  # validate company
259
379
  company = self.profile_repo.get_company_by_short_name(company_short_name)
@@ -261,15 +381,31 @@ class PromptService:
261
381
  return {"error": self.i18n_service.t('errors.company_not_found', company_short_name=company_short_name)}
262
382
 
263
383
  # get all the prompts
264
- all_prompts = self.llm_query_repo.get_prompts(company)
384
+ # If include_all is True, repo should return everything for the company
385
+ all_prompts = self.llm_query_repo.get_prompts(company, include_all=include_all)
386
+
387
+ # Deduplicate prompts by id
388
+ all_prompts = list({p.id: p for p in all_prompts}.values())
265
389
 
266
390
  # group by category
267
391
  prompts_by_category = defaultdict(list)
268
392
  for prompt in all_prompts:
269
- if prompt.active:
270
- if prompt.category:
271
- cat_key = (prompt.category.order, prompt.category.name)
272
- prompts_by_category[cat_key].append(prompt)
393
+ # Filter logic moved here or in repo.
394
+ # If include_all is False, we only want active prompts (and maybe only specific types)
395
+ if not include_all:
396
+ if not prompt.active:
397
+ continue
398
+ # Standard user view: usually excludes system/agent hidden prompts if any?
399
+ # Current requirement: "solo los de tipo company, activos" for end users
400
+ if prompt.prompt_type != PromptType.COMPANY.value:
401
+ continue
402
+
403
+ # Grouping logic
404
+ cat_key = (0, "Uncategorized") # Default
405
+ if prompt.category:
406
+ cat_key = (prompt.category.order, prompt.category.name)
407
+
408
+ prompts_by_category[cat_key].append(prompt)
273
409
 
274
410
  # sort each category by order
275
411
  for cat_key in prompts_by_category:
@@ -288,6 +424,8 @@ class PromptService:
288
424
  {
289
425
  'prompt': p.name,
290
426
  'description': p.description,
427
+ 'type': p.prompt_type,
428
+ 'active': p.active,
291
429
  'custom_fields': p.custom_fields,
292
430
  'order': p.order
293
431
  }
@@ -301,3 +439,46 @@ class PromptService:
301
439
  logging.error(f"error in get_prompts: {e}")
302
440
  return {'error': str(e)}
303
441
 
442
+ def delete_prompt(self, company_short_name: str, prompt_name: str):
443
+ """
444
+ Deletes a prompt:
445
+ 1. Removes from DB.
446
+ 2. Removes from YAML config.
447
+ 3. (Optional) Deletes/Archives physical file.
448
+ """
449
+ company = self.profile_repo.get_company_by_short_name(company_short_name)
450
+ if not company:
451
+ raise IAToolkitException(IAToolkitException.ErrorType.INVALID_NAME, f"Company not found")
452
+
453
+ prompt_db = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
454
+ if not prompt_db:
455
+ raise IAToolkitException(IAToolkitException.ErrorType.DOCUMENT_NOT_FOUND, f"Prompt {prompt_name} not found")
456
+
457
+ # 1. Remove from DB
458
+ self.llm_query_repo.delete_prompt(prompt_db)
459
+
460
+ # 2. Remove from Configuration (Lazy import)
461
+ from iatoolkit import current_iatoolkit
462
+ from iatoolkit.services.configuration_service import ConfigurationService
463
+ config_service = current_iatoolkit().get_injector().get(ConfigurationService)
464
+
465
+ # We need to find the index to remove it from the list in YAML
466
+ full_config = config_service._load_and_merge_configs(company_short_name)
467
+ prompts_list = full_config.get('prompts', {}).get('prompt_list', [])
468
+
469
+ found_index = -1
470
+ for i, p in enumerate(prompts_list):
471
+ if p.get('name') == prompt_name:
472
+ found_index = i
473
+ break
474
+
475
+ if found_index >= 0:
476
+ # This is tricky with current ConfigService if it doesn't support list item deletion easily.
477
+ # Assuming we might need to implement a 'delete_configuration_key' or similar,
478
+ # OR just leave it in config but update DB. For now, let's assume manual config cleanup or
479
+ # implement a specific removal if ConfigService supports it.
480
+ # If ConfigService doesn't support removal, we might just mark it inactive in config.
481
+ pass
482
+ # config_service.remove_list_item(company_short_name, "prompts.prompt_list", found_index)
483
+
484
+
@@ -0,0 +1,71 @@
1
+ # Copyright (c) 2024 Fernando Libedinsky
2
+ # Product: IAToolkit
3
+ #
4
+ # IAToolkit is open source software.
5
+
6
+ from flask import jsonify
7
+ from flask.views import MethodView
8
+ from injector import inject
9
+ from iatoolkit.services.auth_service import AuthService
10
+ from iatoolkit.services.profile_service import ProfileService
11
+ from iatoolkit.services.configuration_service import ConfigurationService
12
+ from iatoolkit.services.knowledge_base_service import KnowledgeBaseService
13
+ from iatoolkit.repositories.llm_query_repo import LLMQueryRepo
14
+ from iatoolkit.repositories.models import PromptType, PromptCategory
15
+ import logging
16
+
17
+ class CategoriesApiView(MethodView):
18
+ """
19
+ Endpoint to retrieve all available categories and types in the system.
20
+ Useful for populating dropdowns in the frontend.
21
+ """
22
+ @inject
23
+ def __init__(self,
24
+ auth_service: AuthService,
25
+ profile_service: ProfileService,
26
+ configuration_service: ConfigurationService,
27
+ knowledge_base_service: KnowledgeBaseService,
28
+ llm_query_repo: LLMQueryRepo):
29
+ self.auth_service = auth_service
30
+ self.profile_service = profile_service
31
+ self.knowledge_base_service = knowledge_base_service
32
+ self.llm_query_repo = llm_query_repo
33
+ self.configuration_service = configuration_service
34
+
35
+ def get(self, company_short_name):
36
+ try:
37
+ # 1. Verify Authentication
38
+ auth_result = self.auth_service.verify()
39
+ if not auth_result.get("success"):
40
+ return jsonify(auth_result), 401
41
+
42
+ # 2. Get Company
43
+ company = self.profile_service.get_company_by_short_name(company_short_name)
44
+ if not company:
45
+ return jsonify({"error": "Company not found"}), 404
46
+
47
+ # 3. Gather Categories
48
+ response_data = {
49
+ "prompt_types": [t.value for t in PromptType],
50
+ "prompt_categories": [],
51
+ "collection_types": [],
52
+ # Future categories can be added here (e.g., tool_types, user_roles)
53
+ }
54
+
55
+ # A. Prompt Categories (from DB)
56
+ prompt_cats = self.llm_query_repo.get_all_categories(company_id=company.id)
57
+ response_data["prompt_categories"] = [c.name for c in prompt_cats]
58
+
59
+ # B. Collection Types (from KnowledgeBaseService)
60
+ response_data["collection_types"] = self.knowledge_base_service.get_collection_names(company_short_name)
61
+
62
+ # C. LLM Models (from ConfigurationService)
63
+ _, llm_models = self.configuration_service.get_llm_configuration(company_short_name)
64
+ # Extract only IDs
65
+ response_data["llm_models"] = [m['id'] for m in llm_models if 'id' in m]
66
+
67
+ return jsonify(response_data)
68
+
69
+ except Exception as e:
70
+ logging.exception(f"Error fetching categories for {company_short_name}: {e}")
71
+ return jsonify({"status": "error", "message": str(e)}), 500
@@ -62,7 +62,7 @@ class ConfigurationApiView(MethodView):
62
62
  Body: { "key": "llm.model", "value": "gpt-4" }
63
63
  """
64
64
  try:
65
- auth_result = self.auth_service.verify(anonymous=False) # Require valid user for updates
65
+ auth_result = self.auth_service.verify()
66
66
  if not auth_result.get("success"):
67
67
  return jsonify(auth_result), 401
68
68
 
@@ -3,9 +3,11 @@
3
3
  #
4
4
  # IAToolkit is open source software.
5
5
 
6
- from flask import jsonify
6
+ from flask import jsonify, request
7
7
  from flask.views import MethodView
8
8
  from iatoolkit.services.prompt_service import PromptService
9
+ from iatoolkit.services.profile_service import ProfileService
10
+ from iatoolkit.repositories.llm_query_repo import LLMQueryRepo
9
11
  from iatoolkit.services.auth_service import AuthService
10
12
  from injector import inject
11
13
  import logging
@@ -15,23 +17,102 @@ class PromptApiView(MethodView):
15
17
  @inject
16
18
  def __init__(self,
17
19
  auth_service: AuthService,
18
- prompt_service: PromptService ):
20
+ prompt_service: PromptService,
21
+ profile_service: ProfileService,
22
+ llm_query_repo: LLMQueryRepo):
19
23
  self.auth_service = auth_service
20
24
  self.prompt_service = prompt_service
25
+ self.profile_service = profile_service
26
+ self.llm_query_repo = llm_query_repo
21
27
 
22
- def get(self, company_short_name):
28
+ def get(self, company_short_name, prompt_name=None):
29
+ """
30
+ GET /: Lista el árbol de prompts (Categorías > Prompts).
31
+ GET /<name>: Devuelve detalle completo: metadata + contenido texto.
32
+ """
23
33
  try:
24
34
  # get access credentials
25
35
  auth_result = self.auth_service.verify(anonymous=True)
26
36
  if not auth_result.get("success"):
27
37
  return jsonify(auth_result), auth_result.get('status_code')
28
38
 
29
- response = self.prompt_service.get_user_prompts(company_short_name)
30
- if "error" in response:
31
- return {'error_message': response["error"]}, 402
39
+ company = self.profile_service.get_company_by_short_name(company_short_name)
40
+ if not company:
41
+ return jsonify({"error": "Company not found"}), 404
42
+
43
+ if prompt_name:
44
+ # get the prompt object from database
45
+ prompt_obj = self.llm_query_repo.get_prompt_by_name(company, prompt_name)
46
+ if not prompt_obj:
47
+ return jsonify({"error": "Prompt not found"}), 404
48
+
49
+ # get the prompt content
50
+ content = self.prompt_service.get_prompt_content(company, prompt_name)
51
+
52
+ return jsonify({
53
+ "meta": prompt_obj.to_dict(),
54
+ "content": content
55
+ })
56
+ else:
57
+ # Check for query param to include all prompts (admin view)
58
+ include_all = request.args.get('all', 'false').lower() == 'true'
59
+
60
+ # return prompts based on filter
61
+ return jsonify(self.prompt_service.get_user_prompts(company_short_name, include_all=include_all))
32
62
 
33
- return response, 200
34
63
  except Exception as e:
35
64
  logging.exception(
36
65
  f"unexpected error getting company prompts: {e}")
37
66
  return jsonify({"error_message": str(e)}), 500
67
+
68
+ def put(self, company_short_name, prompt_name):
69
+ try:
70
+ auth_result = self.auth_service.verify()
71
+ if not auth_result.get("success"):
72
+ return jsonify(auth_result), 401
73
+
74
+ data = request.get_json()
75
+
76
+ # The service handles file magic and YAML sync
77
+ self.prompt_service.save_prompt(company_short_name, prompt_name, data)
78
+
79
+ return jsonify({"status": "success"})
80
+ except Exception as e:
81
+ logging.exception(f"Error saving prompt {prompt_name}: {e}")
82
+ return jsonify({"status": "error", "message": str(e)}), 500
83
+
84
+ def post(self, company_short_name, prompt_name=None):
85
+ """Creates a new prompt."""
86
+ try:
87
+ auth_result = self.auth_service.verify()
88
+ if not auth_result.get("success"):
89
+ return jsonify(auth_result), 401
90
+
91
+ data = request.get_json()
92
+ # If prompt_name is not in URL, check body
93
+ target_name = prompt_name if prompt_name else data.get('name')
94
+
95
+ if not target_name:
96
+ return jsonify({"status": "error", "message": "Prompt name is required"}), 400
97
+
98
+ # Reuse save_prompt logic which handles create/update
99
+ self.prompt_service.save_prompt(company_short_name, target_name, data)
100
+
101
+ return jsonify({"status": "success"})
102
+ except Exception as e:
103
+ logging.exception(f"Error creating prompt: {e}")
104
+ return jsonify({"status": "error", "message": str(e)}), 500
105
+
106
+ def delete(self, company_short_name, prompt_name):
107
+ """Deletes a prompt."""
108
+ try:
109
+ auth_result = self.auth_service.verify()
110
+ if not auth_result.get("success"):
111
+ return jsonify(auth_result), 401
112
+
113
+ self.prompt_service.delete_prompt(company_short_name, prompt_name)
114
+
115
+ return jsonify({"status": "success"})
116
+ except Exception as e:
117
+ logging.exception(f"Error deleting prompt {prompt_name}: {e}")
118
+ return jsonify({"status": "error", "message": str(e)}), 500
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iatoolkit
3
- Version: 1.7.0
3
+ Version: 1.9.0
4
4
  Summary: IAToolkit
5
5
  Author: Fernando Libedinsky
6
6
  License-Expression: MIT