ws-bom-robot-app 0.0.67__py3-none-any.whl → 0.0.68__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -54,7 +54,7 @@ class Settings(BaseSettings):
54
54
 
55
55
  class RuntimeOptions(BaseModel):
56
56
  @staticmethod
57
- def _get_number_of_workers() -> int:
57
+ def _get_sys_arg(arg: str, default: int) -> int:
58
58
  """
59
59
  Returns the number of worker processes to use for the application.
60
60
 
@@ -72,18 +72,18 @@ class Settings(BaseSettings):
72
72
  """
73
73
  import sys
74
74
  try:
75
- for i, arg in enumerate(sys.argv):
76
- if arg == "--workers" and i + 1 < len(sys.argv):
75
+ for i, argv in enumerate(sys.argv):
76
+ if argv == f"--{arg}" and i + 1 < len(sys.argv):
77
77
  return int(sys.argv[i + 1])
78
78
  except (ValueError, IndexError):
79
79
  pass
80
- return 1
80
+ return default
81
81
  debug: bool
82
+ tcp_port: int = _get_sys_arg("port", 6001)
82
83
  loader_show_progress: bool
83
84
  loader_silent_errors: bool
84
- number_of_workers: int = _get_number_of_workers()
85
- is_multi_process: bool = _get_number_of_workers() > 1
86
-
85
+ number_of_workers: int = _get_sys_arg("workers", 1)
86
+ is_multi_process: bool = _get_sys_arg("workers", 1) > 1
87
87
 
88
88
  def runtime_options(self) -> RuntimeOptions:
89
89
  """_summary_
@@ -85,7 +85,7 @@ async def __stream(rq: StreamRequest, ctx: Request, queue: Queue,formatted: bool
85
85
  client = LangSmithClient(
86
86
  api_key= rq.secrets.get("langChainApiKey", "")
87
87
  )
88
- trace = LangChainTracer(project_name=rq.lang_chain_project,client=client,tags=[str(ctx.base_url)])
88
+ trace = LangChainTracer(project_name=rq.lang_chain_project,client=client,tags=[str(ctx.base_url) if ctx else ''])
89
89
  callbacks.append(trace)
90
90
 
91
91
  __llm: LlmInterface =rq.get_llm()
@@ -183,9 +183,9 @@ class IBM(LlmInterface):
183
183
  from ibm_watsonx_ai import APIClient,Credentials
184
184
  super().__init__(config)
185
185
  self.__base_url = self.config.api_url or os.getenv("WATSONX_URL") or "https://us-south.ml.cloud.ibm.com"
186
- self.__apy_key = self.config.api_key or os.getenv("WATSONX_APIKEY")
186
+ self.__api_key = self.config.api_key or os.getenv("WATSONX_APIKEY")
187
187
  self.__client = APIClient(
188
- credentials=Credentials(url=self.__base_url,api_key=self.__apy_key),
188
+ credentials=Credentials(url=self.__base_url,api_key=self.__api_key),
189
189
  project_id=os.getenv("WATSONX_PROJECTID") or "default"
190
190
  )
191
191
  def get_llm(self):
@@ -202,7 +202,7 @@ class IBM(LlmInterface):
202
202
  today = date.today().strftime("%Y-%m-%d")
203
203
  url = f"{self.__base_url}/ml/v1/foundation_model_specs?version={today}&filters=task_generation,task_summarization:and"
204
204
  headers = {
205
- "Authorization": f"Bearer {self.__apy_key}",
205
+ "Authorization": f"Bearer {self.__api_key}",
206
206
  "Content-Type": "application/json"
207
207
  }
208
208
  response = requests.get(url, headers=headers)
@@ -18,7 +18,7 @@ async def __process_proxy_tool(proxy_tool: LlmAppTool) -> LlmAppTool | None:
18
18
  if not app:
19
19
  raise ValueError(f"App with id {app_id} not found.")
20
20
  tool_id = secrets.get("toolId")
21
- tool = next((t for t in app.app_tools if t.id == tool_id), None)
21
+ tool = next((t for t in app.rq.app_tools if app.rq.app_tools and t.id == tool_id), None)
22
22
  if not tool:
23
23
  raise ValueError(f"Tool with function_id {tool_id} not found in app {app.name}.")
24
24
  #override derived tool with proxy tool props
@@ -166,6 +166,7 @@ class ToolManager:
166
166
  return result
167
167
 
168
168
  async def proxy_app_chat(self, query: str) -> str | None:
169
+ from ws_bom_robot_app.llm.models.api import LlmMessage
169
170
  secrets = self.app_tool.secrets_to_dict()
170
171
  app_id = secrets.get("appId")
171
172
  if not app_id:
@@ -173,26 +174,38 @@ class ToolManager:
173
174
  app: CmsApp = await get_app_by_id(app_id)
174
175
  if not app:
175
176
  raise ValueError(f"App with id {app_id} not found.")
176
- url = f"{config.robot_cms_host}/api/llm/message?locale=en&raw=true"
177
- auth = config.robot_cms_auth
178
- headers = {"Authorization": auth} if auth else {}
179
- async with aiohttp.ClientSession() as session:
180
- data = {
181
- "appKey": app.credentials.app_key,
182
- "apiKey": app.credentials.api_key,
183
- "messages": [
184
- {
185
- "role": "user",
186
- "content": query
187
- }
188
- ]
189
- }
190
- async with session.post(url, json=data, headers=headers) as response:
191
- if response.status == 200:
192
- return await response.text()
193
- else:
194
- raise ValueError(f"Error fetching chat response: {response.status}")
195
- return None
177
+ # message
178
+ app.rq.messages.append(LlmMessage(role="user", content=query))
179
+ # tracing
180
+ if str(secrets.get("disable_tracing", False)).lower() in ['1','true','yes']:
181
+ app.rq.lang_chain_tracing = False
182
+ app.rq.lang_chain_project = ''
183
+ app.rq.secrets['nebulyApiKey'] = ''
184
+ # http: for debugging purposes
185
+ if str(secrets.get("use_http", False)).lower() in ['1','true','yes']:
186
+ import base64
187
+ url = f"http://localhost:{config.runtime_options().tcp_port}/api/llm/stream/raw"
188
+ auth = f"Basic {base64.b64encode((config.robot_user + ':' + config.robot_password).encode('utf-8')).decode('utf-8')}"
189
+ headers = {"Authorization": auth} if auth else {}
190
+ async with aiohttp.ClientSession() as session:
191
+ _data = app.rq.model_dump(mode='json',by_alias=True,exclude_unset=True,exclude_none=True, exclude_defaults=True)
192
+ async with session.post(url, json=_data, headers=headers) as response:
193
+ if response.status == 200:
194
+ return await response.text()
195
+ else:
196
+ raise ValueError(f"Error fetching chat response: {response.status}")
197
+ return None
198
+ else: # default
199
+ try:
200
+ from ws_bom_robot_app.llm.main import stream
201
+ chunks = []
202
+ async for chunk in stream(rq=app.rq, ctx=None, formatted=False):
203
+ chunks.append(chunk)
204
+ rs = ''.join(chunks) if chunks else None
205
+ return rs
206
+ except Exception as e:
207
+ print(f"[!] Error in proxy_app_chat: {e}")
208
+ return None
196
209
 
197
210
  async def proxy_app_tool(self) -> None:
198
211
  return None
@@ -1,8 +1,8 @@
1
1
  import logging, aiohttp
2
- from typing import List, Optional
3
-
2
+ from typing import Any, List, Optional
4
3
  from pydantic import AliasChoices, BaseModel, ConfigDict, Field
5
- from ws_bom_robot_app.llm.models.api import LlmAppTool
4
+ from ws_bom_robot_app.llm.models.api import LlmAppTool, LlmRules, StreamRequest
5
+ from ws_bom_robot_app.llm.models.kb import LlmKbEndpoint, LlmKbIntegration
6
6
  from ws_bom_robot_app.util import cache_with_ttl
7
7
 
8
8
  class CmsAppCredential(BaseModel):
@@ -12,13 +12,16 @@ class CmsAppCredential(BaseModel):
12
12
  class CmsApp(BaseModel):
13
13
  id: str = Field(..., description="Unique identifier for the app")
14
14
  name: str = Field(..., description="Name of the app")
15
+ mode: str
16
+ prompt_samples: Optional[List[str]]
15
17
  credentials: CmsAppCredential = None
16
- app_tools: Optional[List[LlmAppTool]] = Field([], validation_alias=AliasChoices("appTools","app_tools"))
18
+ rq: StreamRequest
19
+ kb: Optional[Any] = None
17
20
  model_config = ConfigDict(extra='ignore')
18
21
 
19
22
  @cache_with_ttl(600) # Cache for 10 minutes
20
23
  async def get_apps() -> list[CmsApp]:
21
- import json, os
24
+ import json
22
25
  from ws_bom_robot_app.config import config
23
26
  class DictObject(object):
24
27
  def __init__(self, dict_):
@@ -36,7 +39,7 @@ async def get_apps() -> list[CmsApp]:
36
39
  return obj
37
40
  host = config.robot_cms_host
38
41
  if host:
39
- url = f"{host}/api/llmApp?depth=1&pagination=false"
42
+ url = f"{host}/api/llmApp?depth=1&pagination=false&locale=it"
40
43
  auth = config.robot_cms_auth
41
44
  headers = {"Authorization": auth} if auth else {}
42
45
  async with aiohttp.ClientSession() as session:
@@ -50,12 +53,40 @@ async def get_apps() -> list[CmsApp]:
50
53
  _app: CmsApp = CmsApp(
51
54
  id=_cms_app_dict.id,
52
55
  name=_cms_app_dict.name,
56
+ mode=_cms_app_dict.mode,
57
+ prompt_samples=[__attr(sample,'sampleInputText') or f"{sample.__dict__}" for sample in _cms_app_dict.contents.sampleInputTexts],
53
58
  credentials=CmsAppCredential(app_key=_cms_app_dict.settings.credentials.appKey,api_key=_cms_app_dict.settings.credentials.apiKey),
54
- app_tools=[LlmAppTool(**tool) for tool in cms_app.get('settings').get('appTools',[])]
59
+ rq=StreamRequest(
60
+ #thread_id=str(uuid.uuid1()),
61
+ messages=[],
62
+ secrets={
63
+ "apiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','openAIApiKey', default=''),
64
+ "langChainApiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','langChainApiKey', default=''),
65
+ "nebulyApiKey": __attr(_cms_app_dict.settings,'llmConfig','secrets','nebulyApiKey', default=''),
66
+ },
67
+ system_message=__attr(_cms_app_dict.settings,'llmConfig','prompt','prompt','systemMessage') if __attr(_cms_app_dict.settings,'llmConfig','prompt','prompt','systemMessage') else __attr(_cms_app_dict.settings,'llmConfig','prompt','systemMessage'),
68
+ provider= __attr(_cms_app_dict.settings,'llmConfig','provider') or 'openai',
69
+ model= __attr(_cms_app_dict.settings,'llmConfig','model') or 'gpt-4o',
70
+ temperature=_cms_app_dict.settings.llmConfig.temperature or 0,
71
+ app_tools=[LlmAppTool(**tool) for tool in cms_app.get('settings').get('appTools',[])],
72
+ rules=LlmRules(
73
+ vector_type=__attr(_cms_app_dict.settings,'rules','vectorDbType', default='faiss'),
74
+ vector_db=__attr(_cms_app_dict.settings,'rules','vectorDbFile','filename'),
75
+ threshold=__attr(_cms_app_dict.settings,'rules','threshold', default=0.7)
76
+ ) if __attr(_cms_app_dict.settings,'rules','vectorDbFile','filename') else None,
77
+ #fine_tuned_model=__attr(_cms_app_dict.settings,'llmConfig','fineTunedModel'),
78
+ lang_chain_tracing= __attr(_cms_app_dict.settings,'llmConfig','langChainTracing', default=False),
79
+ lang_chain_project= __attr(_cms_app_dict.settings,'llmConfig','langChainProject', default='')
80
+ )
55
81
  )
56
- if _app.app_tools:
57
- for tool in _app.app_tools:
82
+ if _app.rq.app_tools:
83
+ for tool in _app.rq.app_tools:
58
84
  _knowledgeBase = tool.knowledgeBase
85
+ tool.integrations = [LlmKbIntegration(**item) for item in _knowledgeBase.get('integrations')] if _knowledgeBase.get('integrations') else []
86
+ try:
87
+ tool.endpoints = [LlmKbEndpoint(**item) for item in _knowledgeBase.get('externalEndpoints')] if _knowledgeBase.get('externalEndpoints') else []
88
+ except Exception as e:
89
+ logging.error(f"Error parsing endpoints for app {_cms_app_dict.name} tool {tool.name}: {e}")
59
90
  tool.vector_db = _knowledgeBase.get('vectorDbFile').get('filename') if _knowledgeBase.get('vectorDbFile') else None
60
91
  tool.vector_type = _knowledgeBase.get('vectorDbType') if _knowledgeBase.get('vectorDbType') else 'faiss'
61
92
  del tool.knowledgeBase
@@ -67,6 +98,7 @@ async def get_apps() -> list[CmsApp]:
67
98
  logging.error("robot_cms_host environment variable is not set.")
68
99
  return []
69
100
 
101
+
70
102
  async def get_app_by_id(app_id: str) -> CmsApp | None:
71
103
  apps = await get_apps()
72
104
  app = next((a for a in apps if a.id == app_id), None)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.67
3
+ Version: 0.0.68
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -244,7 +244,7 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
244
244
  launch debugger
245
245
 
246
246
  ```pwsh
247
- streamlit run debugger.py --server.port 6011
247
+ streamlit run debugger.py --server.port 8051
248
248
  ```
249
249
 
250
250
  dockerize base image
@@ -1,6 +1,6 @@
1
1
  ws_bom_robot_app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  ws_bom_robot_app/auth.py,sha256=84nIbmJsMrNs0sxIQGEHbjsjc2P6ZrZZGSn8dkiL6is,895
3
- ws_bom_robot_app/config.py,sha256=F-7jQ80rXESHUP-agwJhYpEUR3EJJb3rcr46-xrCGww,4417
3
+ ws_bom_robot_app/config.py,sha256=QvoWds1DkBHqd3jAxDQtcmQSN8B6SrpBGERDXhTlswk,4490
4
4
  ws_bom_robot_app/cron_manager.py,sha256=pFHV7SZtp6GRmmLD9K1Mb1TE9Ev9n5mIiFScrc7tpCo,9221
5
5
  ws_bom_robot_app/main.py,sha256=1vx0k2fEcE53IC5zcE2EUCwQPcUHM4pvuKSun_E0a9I,6501
6
6
  ws_bom_robot_app/task_manager.py,sha256=Q3Il2TtkP0FoG9zHEBu48pZGXzimTtvWQsoH6wdvQs0,16077
@@ -12,7 +12,7 @@ ws_bom_robot_app/llm/agent_handler.py,sha256=BQ-f--Z5QCJDp-7tzSG_CKrANUCqG65S09p
12
12
  ws_bom_robot_app/llm/agent_lcel.py,sha256=GGZcGBKsSBbZQ-_MPI3NUMvT7lTerYgwKs3o74stwSU,2252
13
13
  ws_bom_robot_app/llm/api.py,sha256=2bF-UFczY9LuBqPxKObM0TOWYbZgVztX1RiIz5MSorU,5042
14
14
  ws_bom_robot_app/llm/defaut_prompt.py,sha256=LlCd_nSMkMmHESfiiiQYfnJyB6Pp-LSs4CEKdYW4vFk,1106
15
- ws_bom_robot_app/llm/main.py,sha256=fss9hSkL317fH2F6g6AhufXt_nS5_dz7muPYkdSRCjk,5220
15
+ ws_bom_robot_app/llm/main.py,sha256=vzUfaLCRk2SYujD00hnrTiHEVLYgZcbSw6LUea43siU,5235
16
16
  ws_bom_robot_app/llm/nebuly_handler.py,sha256=d4TI5XbvIYJLCxHcCUa6QUxsgwKW_4ItCYe4ocn7IJo,7900
17
17
  ws_bom_robot_app/llm/settings.py,sha256=EkFGCppORenStH9W4e6_dYvQ-5p6xiEMpmUHBqNqG9M,117
18
18
  ws_bom_robot_app/llm/feedbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -23,17 +23,17 @@ ws_bom_robot_app/llm/models/base.py,sha256=1TqxuTK3rjJEALn7lvgoen_1ba3R2brAgGx6E
23
23
  ws_bom_robot_app/llm/models/feedback.py,sha256=zh1jLqPRLzNlxInkCMoiJbfSu0-tiOEYHM7FhC46PkM,1692
24
24
  ws_bom_robot_app/llm/models/kb.py,sha256=oVSw6_dmNxikAHrPqcfxDXz9M0ezLIYuxpgvzfs_Now,9514
25
25
  ws_bom_robot_app/llm/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- ws_bom_robot_app/llm/providers/llm_manager.py,sha256=Nr5SMJkQI2FX5VNlql8zhHU9BBfQVCO5PKzt1iF6aKk,11422
26
+ ws_bom_robot_app/llm/providers/llm_manager.py,sha256=DZKb8pKEbR2uECyzsqr_L825zpAIanuljJgsk17aqUw,11422
27
27
  ws_bom_robot_app/llm/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- ws_bom_robot_app/llm/tools/tool_builder.py,sha256=p5CeLCuAilhtEAbPaiAWKGtuIWl8vfilZjYJ9Kw1dLg,3200
29
- ws_bom_robot_app/llm/tools/tool_manager.py,sha256=Z7yJSIn86BvGtWCkpT48pbu59qLpNe6XtpnoFI-jQU8,14073
28
+ ws_bom_robot_app/llm/tools/tool_builder.py,sha256=QTRG1c-EnH4APP10IyfZxEkqK9KitUsutXUvDRKeAhU,3224
29
+ ws_bom_robot_app/llm/tools/tool_manager.py,sha256=1IgRXxdB7DU3gbIlfT_aMUWZyWuanFTAFwu3VaYKxfE,14990
30
30
  ws_bom_robot_app/llm/tools/utils.py,sha256=tdmOAk8l4HVzw67z3brA9yX-1WLu91paU-WmXHyz4Bg,1883
31
31
  ws_bom_robot_app/llm/tools/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  ws_bom_robot_app/llm/tools/models/main.py,sha256=1hICqHs-KS2heenkH7b2eH0N2GrPaaNGBrn64cl_A40,827
33
33
  ws_bom_robot_app/llm/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  ws_bom_robot_app/llm/utils/agent.py,sha256=_CY5Dji3UeAIi2iuU7ttz4fml1q8aCFgVWOv970x8Fw,1411
35
35
  ws_bom_robot_app/llm/utils/chunker.py,sha256=N7570xBYlObneg-fsvDhPAJ-Pv8C8OaYZOBK6q7LmMI,607
36
- ws_bom_robot_app/llm/utils/cms.py,sha256=Cg5jVi0Peg3PNKOkWIx3SIISv8vf2zn0FP9JJumyeJE,3384
36
+ ws_bom_robot_app/llm/utils/cms.py,sha256=Q3VMYXU4Ls-0qjsCe1YBt8I5vcU03XrE6NylVVVUf3A,6051
37
37
  ws_bom_robot_app/llm/utils/download.py,sha256=iAUxH_NiCpTPtGzhC4hBtxotd2HPFt2MBhttslIxqiI,3194
38
38
  ws_bom_robot_app/llm/utils/kb.py,sha256=jja45WCbNI7SGEgqDS99nErlwB5eY8Ga7BMnhdMHZ90,1279
39
39
  ws_bom_robot_app/llm/utils/print.py,sha256=IsPYEWRJqu-dqlJA3F9OnnIS4rOq_EYX1Ljp3BvDnww,774
@@ -66,7 +66,7 @@ ws_bom_robot_app/llm/vector_store/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5
66
66
  ws_bom_robot_app/llm/vector_store/loader/base.py,sha256=L_ugekNuAq0N9O-24wtlHSNHkqSeD-KsJrfGt_FX9Oc,5340
67
67
  ws_bom_robot_app/llm/vector_store/loader/docling.py,sha256=yP0zgXLeFAlByaYuj-6cYariuknckrFds0dxdRcnVz8,3456
68
68
  ws_bom_robot_app/llm/vector_store/loader/json_loader.py,sha256=qo9ejRZyKv_k6jnGgXnu1W5uqsMMtgqK_uvPpZQ0p74,833
69
- ws_bom_robot_app-0.0.67.dist-info/METADATA,sha256=w7nmGhnnW3UDsAzLbEyCW63vuDNNgDqQ3klAE5buO40,8497
70
- ws_bom_robot_app-0.0.67.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
- ws_bom_robot_app-0.0.67.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
72
- ws_bom_robot_app-0.0.67.dist-info/RECORD,,
69
+ ws_bom_robot_app-0.0.68.dist-info/METADATA,sha256=VA_28oqsMRhYDKMTBmH8-kaHSJtTwIE1JxCa1XsJfMc,8497
70
+ ws_bom_robot_app-0.0.68.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
+ ws_bom_robot_app-0.0.68.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
72
+ ws_bom_robot_app-0.0.68.dist-info/RECORD,,