ws-bom-robot-app 0.0.67__tar.gz → 0.0.68__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. {ws_bom_robot_app-0.0.67/ws_bom_robot_app.egg-info → ws_bom_robot_app-0.0.68}/PKG-INFO +2 -2
  2. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/README.md +1 -1
  3. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/setup.py +1 -1
  4. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/config.py +7 -7
  5. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/main.py +1 -1
  6. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/providers/llm_manager.py +3 -3
  7. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/tool_builder.py +1 -1
  8. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/tool_manager.py +33 -20
  9. ws_bom_robot_app-0.0.68/ws_bom_robot_app/llm/utils/cms.py +109 -0
  10. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68/ws_bom_robot_app.egg-info}/PKG-INFO +2 -2
  11. ws_bom_robot_app-0.0.67/ws_bom_robot_app/llm/utils/cms.py +0 -77
  12. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/MANIFEST.in +0 -0
  13. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/pyproject.toml +0 -0
  14. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/requirements.txt +0 -0
  15. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/setup.cfg +0 -0
  16. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/__init__.py +0 -0
  17. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/auth.py +0 -0
  18. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/cron_manager.py +0 -0
  19. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/__init__.py +0 -0
  20. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/agent_context.py +0 -0
  21. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/agent_description.py +0 -0
  22. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/agent_handler.py +0 -0
  23. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/agent_lcel.py +0 -0
  24. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/api.py +0 -0
  25. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/defaut_prompt.py +0 -0
  26. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/feedbacks/__init__.py +0 -0
  27. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/feedbacks/feedback_manager.py +0 -0
  28. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/models/__init__.py +0 -0
  29. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/models/api.py +0 -0
  30. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/models/base.py +0 -0
  31. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/models/feedback.py +0 -0
  32. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/models/kb.py +0 -0
  33. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/nebuly_handler.py +0 -0
  34. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/providers/__init__.py +0 -0
  35. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/settings.py +0 -0
  36. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/__init__.py +0 -0
  37. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/models/__init__.py +0 -0
  38. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/models/main.py +0 -0
  39. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/tools/utils.py +0 -0
  40. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/__init__.py +0 -0
  41. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/agent.py +0 -0
  42. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/chunker.py +0 -0
  43. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/download.py +0 -0
  44. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/kb.py +0 -0
  45. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/print.py +0 -0
  46. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/secrets.py +0 -0
  47. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/utils/webhooks.py +0 -0
  48. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/__init__.py +0 -0
  49. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/__init__.py +0 -0
  50. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/base.py +0 -0
  51. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/chroma.py +0 -0
  52. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/faiss.py +0 -0
  53. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/manager.py +0 -0
  54. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/db/qdrant.py +0 -0
  55. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/generator.py +0 -0
  56. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/__init__.py +0 -0
  57. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/azure.py +0 -0
  58. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/base.py +0 -0
  59. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/confluence.py +0 -0
  60. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/dropbox.py +0 -0
  61. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/gcs.py +0 -0
  62. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/github.py +0 -0
  63. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/googledrive.py +0 -0
  64. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/jira.py +0 -0
  65. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/manager.py +0 -0
  66. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/s3.py +0 -0
  67. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/sftp.py +0 -0
  68. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/sharepoint.py +0 -0
  69. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/sitemap.py +0 -0
  70. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/integration/slack.py +0 -0
  71. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/loader/__init__.py +0 -0
  72. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/loader/base.py +0 -0
  73. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/loader/docling.py +0 -0
  74. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/llm/vector_store/loader/json_loader.py +0 -0
  75. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/main.py +0 -0
  76. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/task_manager.py +0 -0
  77. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app/util.py +0 -0
  78. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app.egg-info/SOURCES.txt +0 -0
  79. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app.egg-info/dependency_links.txt +0 -0
  80. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app.egg-info/requires.txt +0 -0
  81. {ws_bom_robot_app-0.0.67 → ws_bom_robot_app-0.0.68}/ws_bom_robot_app.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.67
3
+ Version: 0.0.68
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -244,7 +244,7 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
244
244
  launch debugger
245
245
 
246
246
  ```pwsh
247
- streamlit run debugger.py --server.port 6011
247
+ streamlit run debugger.py --server.port 8051
248
248
  ```
249
249
 
250
250
  dockerize base image
@@ -179,7 +179,7 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
179
179
  launch debugger
180
180
 
181
181
  ```pwsh
182
- streamlit run debugger.py --server.port 6011
182
+ streamlit run debugger.py --server.port 8051
183
183
  ```
184
184
 
185
185
  dockerize base image
@@ -4,7 +4,7 @@ _requirements = [line.split('#')[0].strip() for line in open("requirements.txt")
4
4
 
5
5
  setup(
6
6
  name="ws_bom_robot_app",
7
- version="0.0.67",
7
+ version="0.0.68",
8
8
  description="A FastAPI application serving ws bom/robot/llm platform ai.",
9
9
  long_description=open("README.md", encoding='utf-8').read(),
10
10
  long_description_content_type="text/markdown",
@@ -54,7 +54,7 @@ class Settings(BaseSettings):
54
54
 
55
55
  class RuntimeOptions(BaseModel):
56
56
  @staticmethod
57
- def _get_number_of_workers() -> int:
57
+ def _get_sys_arg(arg: str, default: int) -> int:
58
58
  """
59
59
  Returns the number of worker processes to use for the application.
60
60
 
@@ -72,18 +72,18 @@ class Settings(BaseSettings):
72
72
  """
73
73
  import sys
74
74
  try:
75
- for i, arg in enumerate(sys.argv):
76
- if arg == "--workers" and i + 1 < len(sys.argv):
75
+ for i, argv in enumerate(sys.argv):
76
+ if argv == f"--{arg}" and i + 1 < len(sys.argv):
77
77
  return int(sys.argv[i + 1])
78
78
  except (ValueError, IndexError):
79
79
  pass
80
- return 1
80
+ return default
81
81
  debug: bool
82
+ tcp_port: int = _get_sys_arg("port", 6001)
82
83
  loader_show_progress: bool
83
84
  loader_silent_errors: bool
84
- number_of_workers: int = _get_number_of_workers()
85
- is_multi_process: bool = _get_number_of_workers() > 1
86
-
85
+ number_of_workers: int = _get_sys_arg("workers", 1)
86
+ is_multi_process: bool = _get_sys_arg("workers", 1) > 1
87
87
 
88
88
  def runtime_options(self) -> RuntimeOptions:
89
89
  """_summary_
@@ -85,7 +85,7 @@ async def __stream(rq: StreamRequest, ctx: Request, queue: Queue,formatted: bool
85
85
  client = LangSmithClient(
86
86
  api_key= rq.secrets.get("langChainApiKey", "")
87
87
  )
88
- trace = LangChainTracer(project_name=rq.lang_chain_project,client=client,tags=[str(ctx.base_url)])
88
+ trace = LangChainTracer(project_name=rq.lang_chain_project,client=client,tags=[str(ctx.base_url) if ctx else ''])
89
89
  callbacks.append(trace)
90
90
 
91
91
  __llm: LlmInterface =rq.get_llm()
@@ -183,9 +183,9 @@ class IBM(LlmInterface):
183
183
  from ibm_watsonx_ai import APIClient,Credentials
184
184
  super().__init__(config)
185
185
  self.__base_url = self.config.api_url or os.getenv("WATSONX_URL") or "https://us-south.ml.cloud.ibm.com"
186
- self.__apy_key = self.config.api_key or os.getenv("WATSONX_APIKEY")
186
+ self.__api_key = self.config.api_key or os.getenv("WATSONX_APIKEY")
187
187
  self.__client = APIClient(
188
- credentials=Credentials(url=self.__base_url,api_key=self.__apy_key),
188
+ credentials=Credentials(url=self.__base_url,api_key=self.__api_key),
189
189
  project_id=os.getenv("WATSONX_PROJECTID") or "default"
190
190
  )
191
191
  def get_llm(self):
@@ -202,7 +202,7 @@ class IBM(LlmInterface):
202
202
  today = date.today().strftime("%Y-%m-%d")
203
203
  url = f"{self.__base_url}/ml/v1/foundation_model_specs?version={today}&filters=task_generation,task_summarization:and"
204
204
  headers = {
205
- "Authorization": f"Bearer {self.__apy_key}",
205
+ "Authorization": f"Bearer {self.__api_key}",
206
206
  "Content-Type": "application/json"
207
207
  }
208
208
  response = requests.get(url, headers=headers)
@@ -18,7 +18,7 @@ async def __process_proxy_tool(proxy_tool: LlmAppTool) -> LlmAppTool | None:
18
18
  if not app:
19
19
  raise ValueError(f"App with id {app_id} not found.")
20
20
  tool_id = secrets.get("toolId")
21
- tool = next((t for t in app.app_tools if t.id == tool_id), None)
21
+ tool = next((t for t in app.rq.app_tools if app.rq.app_tools and t.id == tool_id), None)
22
22
  if not tool:
23
23
  raise ValueError(f"Tool with function_id {tool_id} not found in app {app.name}.")
24
24
  #override derived tool with proxy tool props
@@ -166,6 +166,7 @@ class ToolManager:
166
166
  return result
167
167
 
168
168
  async def proxy_app_chat(self, query: str) -> str | None:
169
+ from ws_bom_robot_app.llm.models.api import LlmMessage
169
170
  secrets = self.app_tool.secrets_to_dict()
170
171
  app_id = secrets.get("appId")
171
172
  if not app_id:
@@ -173,26 +174,38 @@ class ToolManager:
173
174
  app: CmsApp = await get_app_by_id(app_id)
174
175
  if not app:
175
176
  raise ValueError(f"App with id {app_id} not found.")
176
- url = f"{config.robot_cms_host}/api/llm/message?locale=en&raw=true"
177
- auth = config.robot_cms_auth
178
- headers = {"Authorization": auth} if auth else {}
179
- async with aiohttp.ClientSession() as session:
180
- data = {
181
- "appKey": app.credentials.app_key,
182
- "apiKey": app.credentials.api_key,
183
- "messages": [
184
- {
185
- "role": "user",
186
- "content": query
187
- }
188
- ]
189
- }
190
- async with session.post(url, json=data, headers=headers) as response:
191
- if response.status == 200:
192
- return await response.text()
193
- else:
194
- raise ValueError(f"Error fetching chat response: {response.status}")
195
- return None
177
+ # message
178
+ app.rq.messages.append(LlmMessage(role="user", content=query))
179
+ # tracing
180
+ if str(secrets.get("disable_tracing", False)).lower() in ['1','true','yes']:
181
+ app.rq.lang_chain_tracing = False
182
+ app.rq.lang_chain_project = ''
183
+ app.rq.secrets['nebulyApiKey'] = ''
184
+ # http: for debugging purposes
185
+ if str(secrets.get("use_http", False)).lower() in ['1','true','yes']:
186
+ import base64
187
+ url = f"http://localhost:{config.runtime_options().tcp_port}/api/llm/stream/raw"
188
+ auth = f"Basic {base64.b64encode((config.robot_user + ':' + config.robot_password).encode('utf-8')).decode('utf-8')}"
189
+ headers = {"Authorization": auth} if auth else {}
190
+ async with aiohttp.ClientSession() as session:
191
+ _data = app.rq.model_dump(mode='json',by_alias=True,exclude_unset=True,exclude_none=True, exclude_defaults=True)
192
+ async with session.post(url, json=_data, headers=headers) as response:
193
+ if response.status == 200:
194
+ return await response.text()
195
+ else:
196
+ raise ValueError(f"Error fetching chat response: {response.status}")
197
+ return None
198
+ else: # default
199
+ try:
200
+ from ws_bom_robot_app.llm.main import stream
201
+ chunks = []
202
+ async for chunk in stream(rq=app.rq, ctx=None, formatted=False):
203
+ chunks.append(chunk)
204
+ rs = ''.join(chunks) if chunks else None
205
+ return rs
206
+ except Exception as e:
207
+ print(f"[!] Error in proxy_app_chat: {e}")
208
+ return None
196
209
 
197
210
  async def proxy_app_tool(self) -> None:
198
211
  return None
@@ -0,0 +1,109 @@
1
+ import logging, aiohttp
2
+ from typing import Any, List, Optional
3
+ from pydantic import AliasChoices, BaseModel, ConfigDict, Field
4
+ from ws_bom_robot_app.llm.models.api import LlmAppTool, LlmRules, StreamRequest
5
+ from ws_bom_robot_app.llm.models.kb import LlmKbEndpoint, LlmKbIntegration
6
+ from ws_bom_robot_app.util import cache_with_ttl
7
+
8
+ class CmsAppCredential(BaseModel):
9
+ app_key: str = Field(..., description="The app key for the credential", validation_alias=AliasChoices("appKey","app_key"))
10
+ api_key: str = Field(..., description="The api key for the credential", validation_alias=AliasChoices("apiKey","api_key"))
11
+ model_config = ConfigDict(extra='ignore')
12
+ class CmsApp(BaseModel):
13
+ id: str = Field(..., description="Unique identifier for the app")
14
+ name: str = Field(..., description="Name of the app")
15
+ mode: str
16
+ prompt_samples: Optional[List[str]]
17
+ credentials: CmsAppCredential = None
18
+ rq: StreamRequest
19
+ kb: Optional[Any] = None
20
+ model_config = ConfigDict(extra='ignore')
21
+
22
+ @cache_with_ttl(600) # Cache for 10 minutes
23
+ async def get_apps() -> list[CmsApp]:
24
+ import json
25
+ from ws_bom_robot_app.config import config
26
+ class DictObject(object):
27
+ def __init__(self, dict_):
28
+ self.__dict__.update(dict_)
29
+ def __repr__(self):
30
+ return json.dumps(self.__dict__)
31
+ @classmethod
32
+ def from_dict(cls, d):
33
+ return json.loads(json.dumps(d), object_hook=DictObject)
34
+ def __attr(obj, *attrs, default=None):
35
+ for attr in attrs:
36
+ obj = getattr(obj, attr, default)
37
+ if obj is None:
38
+ break
39
+ return obj
40
+ host = config.robot_cms_host
41
+ if host:
42
+ url = f"{host}/api/llmApp?depth=1&pagination=false&locale=it"
43
+ auth = config.robot_cms_auth
44
+ headers = {"Authorization": auth} if auth else {}
45
+ async with aiohttp.ClientSession() as session:
46
+ async with session.get(url, headers=headers) as response:
47
+ if response.status == 200:
48
+ _apps=[]
49
+ cms_apps = await response.json()
50
+ for cms_app in cms_apps:
51
+ if __attr(cms_app,"isActive",default=True) == True:
52
+ _cms_app_dict = DictObject.from_dict(cms_app)
53
+ _app: CmsApp = CmsApp(
54
+ id=_cms_app_dict.id,
55
+ name=_cms_app_dict.name,
56
+ mode=_cms_app_dict.mode,
57
+ prompt_samples=[__attr(sample,'sampleInputText') or f"{sample.__dict__}" for sample in _cms_app_dict.contents.sampleInputTexts],
58
+ credentials=CmsAppCredential(app_key=_cms_app_dict.settings.credentials.appKey,api_key=_cms_app_dict.settings.credentials.apiKey),
59
+ rq=StreamRequest(
60
+ #thread_id=str(uuid.uuid1()),
61
+ messages=[],
62
+ secrets={
63
+ "apiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','openAIApiKey', default=''),
64
+ "langChainApiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','langChainApiKey', default=''),
65
+ "nebulyApiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','nebulyApiKey', default=''),
66
+ },
67
+ system_message=__attr(_cms_app_dict.settings,'llmConfig','prompt','prompt','systemMessage') if __attr(_cms_app_dict.settings,'llmConfig','prompt','prompt','systemMessage') else __attr(_cms_app_dict.settings,'llmConfig','prompt','systemMessage'),
68
+ provider= __attr(_cms_app_dict.settings,'llmConfig','provider') or 'openai',
69
+ model= __attr(_cms_app_dict.settings,'llmConfig','model') or 'gpt-4o',
70
+ temperature=_cms_app_dict.settings.llmConfig.temperature or 0,
71
+ app_tools=[LlmAppTool(**tool) for tool in cms_app.get('settings').get('appTools',[])],
72
+ rules=LlmRules(
73
+ vector_type=__attr(_cms_app_dict.settings,'rules','vectorDbType', default='faiss'),
74
+ vector_db=__attr(_cms_app_dict.settings,'rules','vectorDbFile','filename'),
75
+ threshold=__attr(_cms_app_dict.settings,'rules','threshold', default=0.7)
76
+ ) if __attr(_cms_app_dict.settings,'rules','vectorDbFile','filename') else None,
77
+ #fine_tuned_model=__attr(_cms_app_dict.settings,'llmConfig','fineTunedModel'),
78
+ lang_chain_tracing= __attr(_cms_app_dict.settings,'llmConfig','langChainTracing', default=False),
79
+ lang_chain_project= __attr(_cms_app_dict.settings,'llmConfig','langChainProject', default='')
80
+ )
81
+ )
82
+ if _app.rq.app_tools:
83
+ for tool in _app.rq.app_tools:
84
+ _knowledgeBase = tool.knowledgeBase
85
+ tool.integrations = [LlmKbIntegration(**item) for item in _knowledgeBase.get('integrations')] if _knowledgeBase.get('integrations') else []
86
+ try:
87
+ tool.endpoints = [LlmKbEndpoint(**item) for item in _knowledgeBase.get('externalEndpoints')] if _knowledgeBase.get('externalEndpoints') else []
88
+ except Exception as e:
89
+ logging.error(f"Error parsing endpoints for app {_cms_app_dict.name} tool {tool.name}: {e}")
90
+ tool.vector_db = _knowledgeBase.get('vectorDbFile').get('filename') if _knowledgeBase.get('vectorDbFile') else None
91
+ tool.vector_type = _knowledgeBase.get('vectorDbType') if _knowledgeBase.get('vectorDbType') else 'faiss'
92
+ del tool.knowledgeBase
93
+ _apps.append(_app)
94
+ return _apps
95
+ else:
96
+ logging.error(f"Error fetching cms apps: {response.status}")
97
+ else:
98
+ logging.error("robot_cms_host environment variable is not set.")
99
+ return []
100
+
101
+
102
+ async def get_app_by_id(app_id: str) -> CmsApp | None:
103
+ apps = await get_apps()
104
+ app = next((a for a in apps if a.id == app_id), None)
105
+ if app:
106
+ return app
107
+ else:
108
+ logging.error(f"App with id {app_id} not found.")
109
+ return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.67
3
+ Version: 0.0.68
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -244,7 +244,7 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
244
244
  launch debugger
245
245
 
246
246
  ```pwsh
247
- streamlit run debugger.py --server.port 6011
247
+ streamlit run debugger.py --server.port 8051
248
248
  ```
249
249
 
250
250
  dockerize base image
@@ -1,77 +0,0 @@
1
- import logging, aiohttp
2
- from typing import List, Optional
3
-
4
- from pydantic import AliasChoices, BaseModel, ConfigDict, Field
5
- from ws_bom_robot_app.llm.models.api import LlmAppTool
6
- from ws_bom_robot_app.util import cache_with_ttl
7
-
8
- class CmsAppCredential(BaseModel):
9
- app_key: str = Field(..., description="The app key for the credential", validation_alias=AliasChoices("appKey","app_key"))
10
- api_key: str = Field(..., description="The api key for the credential", validation_alias=AliasChoices("apiKey","api_key"))
11
- model_config = ConfigDict(extra='ignore')
12
- class CmsApp(BaseModel):
13
- id: str = Field(..., description="Unique identifier for the app")
14
- name: str = Field(..., description="Name of the app")
15
- credentials: CmsAppCredential = None
16
- app_tools: Optional[List[LlmAppTool]] = Field([], validation_alias=AliasChoices("appTools","app_tools"))
17
- model_config = ConfigDict(extra='ignore')
18
-
19
- @cache_with_ttl(600) # Cache for 10 minutes
20
- async def get_apps() -> list[CmsApp]:
21
- import json, os
22
- from ws_bom_robot_app.config import config
23
- class DictObject(object):
24
- def __init__(self, dict_):
25
- self.__dict__.update(dict_)
26
- def __repr__(self):
27
- return json.dumps(self.__dict__)
28
- @classmethod
29
- def from_dict(cls, d):
30
- return json.loads(json.dumps(d), object_hook=DictObject)
31
- def __attr(obj, *attrs, default=None):
32
- for attr in attrs:
33
- obj = getattr(obj, attr, default)
34
- if obj is None:
35
- break
36
- return obj
37
- host = config.robot_cms_host
38
- if host:
39
- url = f"{host}/api/llmApp?depth=1&pagination=false"
40
- auth = config.robot_cms_auth
41
- headers = {"Authorization": auth} if auth else {}
42
- async with aiohttp.ClientSession() as session:
43
- async with session.get(url, headers=headers) as response:
44
- if response.status == 200:
45
- _apps=[]
46
- cms_apps = await response.json()
47
- for cms_app in cms_apps:
48
- if __attr(cms_app,"isActive",default=True) == True:
49
- _cms_app_dict = DictObject.from_dict(cms_app)
50
- _app: CmsApp = CmsApp(
51
- id=_cms_app_dict.id,
52
- name=_cms_app_dict.name,
53
- credentials=CmsAppCredential(app_key=_cms_app_dict.settings.credentials.appKey,api_key=_cms_app_dict.settings.credentials.apiKey),
54
- app_tools=[LlmAppTool(**tool) for tool in cms_app.get('settings').get('appTools',[])]
55
- )
56
- if _app.app_tools:
57
- for tool in _app.app_tools:
58
- _knowledgeBase = tool.knowledgeBase
59
- tool.vector_db = _knowledgeBase.get('vectorDbFile').get('filename') if _knowledgeBase.get('vectorDbFile') else None
60
- tool.vector_type = _knowledgeBase.get('vectorDbType') if _knowledgeBase.get('vectorDbType') else 'faiss'
61
- del tool.knowledgeBase
62
- _apps.append(_app)
63
- return _apps
64
- else:
65
- logging.error(f"Error fetching cms apps: {response.status}")
66
- else:
67
- logging.error("robot_cms_host environment variable is not set.")
68
- return []
69
-
70
- async def get_app_by_id(app_id: str) -> CmsApp | None:
71
- apps = await get_apps()
72
- app = next((a for a in apps if a.id == app_id), None)
73
- if app:
74
- return app
75
- else:
76
- logging.error(f"App with id {app_id} not found.")
77
- return None