ws-bom-robot-app 0.0.61__py3-none-any.whl → 0.0.63__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ from pydantic_settings import BaseSettings
4
4
  import os
5
5
 
6
6
  class Settings(BaseSettings):
7
+ USER_AGENT: str = 'ws-bom-robot'
7
8
  robot_env: str = 'local'
8
9
  robot_user: str = 'user'
9
10
  robot_password: str = 'password'
@@ -28,7 +29,6 @@ class Settings(BaseSettings):
28
29
  GOOGLE_API_KEY: str = ''
29
30
  NEBULY_API_URL: str =''
30
31
  GOOGLE_APPLICATION_CREDENTIALS: str = '' # path to google credentials iam file, e.d. ./.secrets/google-credentials.json
31
- TAVILY_API_KEY: str = '' #TODO DELETE
32
32
  model_config = ConfigDict(
33
33
  env_file='./.env',
34
34
  extra='ignore',
@@ -36,6 +36,7 @@ class Settings(BaseSettings):
36
36
  )
37
37
  def __init__(self, **kwargs):
38
38
  super().__init__(**kwargs)
39
+ os.environ["USER_AGENT"] = self.USER_AGENT
39
40
  os.environ["OPENAI_API_KEY"] = self.OPENAI_API_KEY
40
41
  os.environ["OLLAMA_API_URL"] = self.OLLAMA_API_URL
41
42
  os.environ["ANTHROPIC_API_KEY"] = self.ANTHROPIC_API_KEY
@@ -44,8 +45,6 @@ class Settings(BaseSettings):
44
45
  os.environ["GOOGLE_API_KEY"] = self.GOOGLE_API_KEY
45
46
  os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.GOOGLE_APPLICATION_CREDENTIALS
46
47
  os.environ["NEBULY_API_URL"] = self.NEBULY_API_URL
47
- # TODO DELETE
48
- os.environ["TAVILY_API_KEY"] = self.TAVILY_API_KEY
49
48
 
50
49
  class RuntimeOptions(BaseModel):
51
50
  @staticmethod
@@ -59,14 +59,14 @@ class CronManager:
59
59
  Job('cleanup-task',task_manager.cleanup_task, interval=5 * 60),
60
60
  Job('cleanup-data',kb_cleanup_data_file, interval=180 * 60),
61
61
  ]
62
- def __get_jobstore_strategy() -> JobstoreStrategy:
62
+ def __get_jobstore_strategy(self) -> JobstoreStrategy:
63
63
  if True or config.runtime_options().is_multi_process:
64
64
  return MemoryJobstoreStrategy()
65
65
  return PersistentJobstoreStrategy()
66
66
  def __init__(self, strategy: JobstoreStrategy = None, enable_defaults: bool = True):
67
67
  self.enable_defaults = enable_defaults
68
68
  if strategy is None:
69
- strategy = CronManager.__get_jobstore_strategy()
69
+ strategy = self.__get_jobstore_strategy()
70
70
  jobstores = strategy.get_jobstore()
71
71
  self.scheduler: BackgroundScheduler = BackgroundScheduler(jobstores=jobstores)
72
72
  self.__scheduler_is_running = False
@@ -60,7 +60,7 @@ class AgentHandler(AsyncCallbackHandler):
60
60
  tags: Optional[List[str]] = None,
61
61
  **kwargs: Any,
62
62
  ) -> None:
63
- if token and "llm_chain" not in tags:
63
+ if token and "llm_chain" not in (tags or []):
64
64
  token = _parse_token(self.llm,token)
65
65
  if token:
66
66
  self.stream_buffer += token # append new data to pending buffer
@@ -24,8 +24,7 @@ class AgentLcel:
24
24
 
25
25
  async def __create_prompt(self, input: dict) -> ChatPromptTemplate:
26
26
  message : LlmMessage = input[self.memory_key][-1]
27
- input = message.content
28
- rules_prompt = await get_rules(self.embeddings, self.rules, input) if self.rules else ""
27
+ rules_prompt = await get_rules(self.embeddings, self.rules, message.content) if self.rules else ""
29
28
  system = default_prompt + (tool_prompt(render_text_description(self.__tools)) if len(self.__tools)>0 else "") + self.sys_message + rules_prompt
30
29
  return ChatPromptTemplate([
31
30
  ("system", system),
@@ -26,6 +26,18 @@ def _stream_headers(rq: StreamRequest) -> Mapping[str, str]:
26
26
  "X-thread-id": rq.msg_id or str(uuid4()),
27
27
  "X-msg-id": rq.msg_id or str(uuid4()),
28
28
  }
29
+
30
+ @router.get("/cms/app", tags=["cms"])
31
+ async def cms_apps():
32
+ from ws_bom_robot_app.llm.utils.cms import get_apps
33
+ return await get_apps()
34
+
35
+ @router.get("/cms/app/{id}", tags=["cms"])
36
+ async def cms_app_by_id(id: str):
37
+ from ws_bom_robot_app.llm.utils.cms import get_app_by_id
38
+ return await get_app_by_id(id)
39
+
40
+
29
41
  @router.post("/stream")
30
42
  async def _stream(rq: StreamRequest, ctx: Request) -> StreamingResponse:
31
43
  return StreamingResponse(stream(rq, ctx), media_type="application/json", headers=_stream_headers(rq))
@@ -59,14 +59,6 @@ class NebulyFeedback(FeedbackInterface):
59
59
  raise Exception(f"Error sending feedback: {response.status_code} - {response.text}")
60
60
  return response.text
61
61
 
62
- def __buildAiMessage(self) -> str:
63
- message = eval(self.config.message_output)
64
- message_output = ""
65
- for chunk in message:
66
- if isinstance(chunk, dict) and "text" in chunk:
67
- message_output += chunk["text"]
68
- return message_output
69
-
70
62
  class FeedbackManager:
71
63
  #class variables (static)
72
64
  _list: dict[str,FeedbackInterface] = {
@@ -89,6 +89,8 @@ async def __stream(rq: StreamRequest, ctx: Request, queue: Queue,formatted: bool
89
89
  callbacks.append(trace)
90
90
 
91
91
  __llm: LlmInterface =rq.get_llm()
92
+ for tool in rq.app_tools:
93
+ tool.thread_id = rq.thread_id
92
94
  processor = AgentLcel(
93
95
  llm=__llm,
94
96
  sys_message=rq.system_message,
@@ -36,6 +36,7 @@ class LlmAppToolDbSettings(BaseModel):
36
36
 
37
37
  class LlmAppTool(BaseModel):
38
38
  id: Optional[str] = None
39
+ thread_id: Optional[str] = Field(None, validation_alias=AliasChoices("threadId","thread_id"))
39
40
  name: str
40
41
  description: Optional[str] = None
41
42
  type: str
@@ -53,6 +54,11 @@ class LlmAppTool(BaseModel):
53
54
  vector_type: Optional[str] = Field('faiss', validation_alias=AliasChoices("vectorDbType","vector_type"))
54
55
  vector_db: Optional[str] = Field(None, validation_alias=AliasChoices("vectorDbFile","vector_db"))
55
56
  is_active: Optional[bool] = Field(True, validation_alias=AliasChoices("isActive","is_active"))
57
+ def secrets_to_dict(self) -> Dict[str, str]:
58
+ _secrets = {}
59
+ for d in self.secrets or []:
60
+ _secrets[d.get("secretId")] = d.get("secretValue")
61
+ return _secrets
56
62
  def get_vector_filtering(self) -> Optional[Tuple[str, List[AttributeInfo]]]:
57
63
  _description = None
58
64
  _metadata = None
@@ -1,12 +1,54 @@
1
+ import asyncio
1
2
  from asyncio import Queue
2
- from langchain.tools import Tool, StructuredTool
3
+ from langchain.tools import StructuredTool
3
4
  from ws_bom_robot_app.llm.models.api import LlmAppTool
4
5
  from ws_bom_robot_app.llm.tools.tool_manager import ToolManager
5
6
  from ws_bom_robot_app.llm.providers.llm_manager import LlmInterface
6
7
 
8
+ async def __process_proxy_tool(proxy_tool: LlmAppTool) -> LlmAppTool | None:
9
+ import os
10
+ from ws_bom_robot_app.llm.utils.cms import CmsApp, get_app_by_id
11
+ from ws_bom_robot_app.config import config
12
+ try:
13
+ secrets = proxy_tool.secrets_to_dict()
14
+ app_id = secrets.get("appId")
15
+ if not app_id:
16
+ raise ValueError("Tool configuration is invalid. 'appId' is required.")
17
+ app: CmsApp = await get_app_by_id(app_id)
18
+ if not app:
19
+ raise ValueError(f"App with id {app_id} not found.")
20
+ tool_id = secrets.get("toolId")
21
+ tool = next((t for t in app.app_tools if t.id == tool_id), None)
22
+ if not tool:
23
+ raise ValueError(f"Tool with function_id {tool_id} not found in app {app.name}.")
24
+ #override derived tool with proxy tool props
25
+ tool.name = proxy_tool.name if proxy_tool.name else tool.name
26
+ tool.description = proxy_tool.description if proxy_tool.description else tool.description
27
+ tool.function_id = proxy_tool.function_id if proxy_tool.function_id else tool.function_id
28
+ tool.function_description = proxy_tool.function_description if proxy_tool.function_description else tool.function_description
29
+ #normalize vector_db
30
+ if tool.vector_db:
31
+ tool.vector_db = os.path.join(
32
+ os.path.join(config.robot_data_folder,config.robot_data_db_folder,config.robot_data_db_folder_store),
33
+ os.path.splitext(os.path.basename(tool.vector_db))[0]) if tool.vector_db else None
34
+ return tool
35
+ except Exception as e:
36
+ print(f"[!] Error in proxy_app_tool: {e}")
37
+ return None
38
+
7
39
  def get_structured_tools(llm: LlmInterface, tools: list[LlmAppTool], callbacks:list, queue: Queue) -> list[StructuredTool]:
8
40
  _structured_tools :list[StructuredTool] = []
9
41
  for tool in [tool for tool in tools if tool.is_active]:
42
+ if tool.function_name == "proxy_app_tool":
43
+ # override the tool
44
+ loop = asyncio.get_event_loop()
45
+ if loop.is_running():
46
+ import nest_asyncio
47
+ nest_asyncio.apply()
48
+ processed_tool = loop.run_until_complete(__process_proxy_tool(tool))
49
+ if processed_tool is None:
50
+ continue
51
+ tool = processed_tool
10
52
  if _tool_config := ToolManager._list.get(tool.function_name):
11
53
  _tool_instance = ToolManager(llm, tool, callbacks, queue)
12
54
  _structured_tool = StructuredTool.from_function(
@@ -1,14 +1,14 @@
1
1
  from asyncio import Queue
2
+ import aiohttp
2
3
  from typing import Optional, Type, Callable
3
- from ws_bom_robot_app.llm.models.api import LlmAppTool
4
+ from ws_bom_robot_app.config import config
5
+ from ws_bom_robot_app.llm.models.api import LlmApp,LlmAppTool
4
6
  from ws_bom_robot_app.llm.providers.llm_manager import LlmInterface
7
+ from ws_bom_robot_app.llm.utils.cms import CmsApp, get_app_by_id
5
8
  from ws_bom_robot_app.llm.vector_store.db.manager import VectorDbManager
6
9
  from ws_bom_robot_app.llm.tools.utils import getRandomWaitingMessage, translate_text
7
10
  from ws_bom_robot_app.llm.tools.models.main import NoopInput,DocumentRetrieverInput,ImageGeneratorInput,LlmChainInput,SearchOnlineInput,EmailSenderInput
8
11
  from pydantic import BaseModel, ConfigDict
9
- import smtplib
10
- from email.mime.multipart import MIMEMultipart
11
- from email.mime.text import MIMEText
12
12
 
13
13
  class ToolConfig(BaseModel):
14
14
  function: Callable
@@ -165,23 +165,50 @@ class ToolManager:
165
165
  result = await chain.ainvoke({"input": input})
166
166
  return result
167
167
 
168
+ async def proxy_app_chat(self, query: str) -> str | None:
169
+ secrets = self.app_tool.secrets_to_dict()
170
+ app_id = secrets.get("appId")
171
+ if not app_id:
172
+ raise ValueError("Tool configuration is invalid. 'appId' is required.")
173
+ app: CmsApp = await get_app_by_id(app_id)
174
+ if not app:
175
+ raise ValueError(f"App with id {app_id} not found.")
176
+ url = f"{config.robot_cms_host}/api/llm/message?locale=en&raw=true"
177
+ auth = config.robot_cms_auth
178
+ headers = {"Authorization": auth} if auth else {}
179
+ async with aiohttp.ClientSession() as session:
180
+ data = {
181
+ "appKey": app.credentials.app_key,
182
+ "apiKey": app.credentials.api_key,
183
+ "messages": [
184
+ {
185
+ "role": "user",
186
+ "content": query
187
+ }
188
+ ]
189
+ }
190
+ async with session.post(url, json=data, headers=headers) as response:
191
+ if response.status == 200:
192
+ return await response.text()
193
+ else:
194
+ raise ValueError(f"Error fetching chat response: {response.status}")
195
+ return None
168
196
 
169
- async def search_online(self, query: str):
197
+ async def proxy_app_tool(self) -> None:
198
+ return None
199
+
200
+ async def _fetch_urls(self, urls: list[str]) -> list[dict]:
201
+ import aiohttp, asyncio
170
202
  from ws_bom_robot_app.llm.tools.utils import fetch_page, extract_content_with_trafilatura
171
- from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
172
- import aiohttp, asyncio, ast
173
- # Wrapper DuckDuckGo
174
- search = DuckDuckGoSearchAPIWrapper(max_results=10)
175
- try:
176
- raw_results = search.results(query, max_results=10)
177
- except Exception as e:
178
- print(f"[!] Errore ricerca: {e}")
179
- urls = [r["link"] for r in raw_results]
203
+ if not urls:
204
+ return []
180
205
  async with aiohttp.ClientSession() as session:
181
206
  tasks = [fetch_page(session, url) for url in urls]
182
- responses = await asyncio.gather(*tasks)
207
+ responses = await asyncio.gather(*tasks, return_exceptions=True)
183
208
  final_results = []
184
209
  for item in responses:
210
+ if isinstance(item, Exception):
211
+ continue
185
212
  url = item["url"]
186
213
  html = item["html"]
187
214
  if html:
@@ -194,13 +221,20 @@ class ToolManager:
194
221
  final_results.append({"url": url, "content": "Page not found"})
195
222
  return final_results
196
223
 
197
- async def search_online_google(self, query: str):
224
+ async def search_online(self, query: str) -> list[dict]:
225
+ from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
226
+ # Wrapper DuckDuckGo
227
+ search = DuckDuckGoSearchAPIWrapper(max_results=10)
228
+ try:
229
+ raw_results = search.results(query, max_results=10)
230
+ except Exception as e:
231
+ return f"[!] Errore ricerca: {e}"
232
+ urls = [r["link"] for r in raw_results]
233
+ return await self._fetch_urls(urls)
234
+
235
+ async def search_online_google(self, query: str) -> list[dict]:
198
236
  from langchain_google_community import GoogleSearchAPIWrapper
199
- from ws_bom_robot_app.llm.tools.utils import fetch_page, extract_content_with_trafilatura
200
- import aiohttp, asyncio
201
- secrets = {}
202
- for d in self.app_tool.secrets:
203
- secrets[d.get("secretId")] = d.get("secretValue")
237
+ secrets = self.app_tool.secrets_to_dict()
204
238
  search_type = secrets.get("searchType")
205
239
  if search_type:
206
240
  search_kwargs = {"searchType" : search_type}
@@ -218,27 +252,19 @@ class ToolManager:
218
252
  num_results=secrets.get("num_results", 5)
219
253
  )
220
254
  urls = [r["link"] for r in raw_results]
221
- async with aiohttp.ClientSession() as session:
222
- tasks = [fetch_page(session, url) for url in urls]
223
- responses = await asyncio.gather(*tasks)
224
- final_results = []
225
- for item in responses:
226
- url = item["url"]
227
- html = item["html"]
228
- if html:
229
- content = await extract_content_with_trafilatura(html)
230
- if content:
231
- final_results.append({"url": url, "content": content, "type": "web"})
232
- else:
233
- final_results.append({"url": url, "content": "No content found", "type": "web"})
234
- else:
235
- final_results.append({"url": url, "content": "Page not found", "type": "web"})
236
- return final_results
237
-
255
+ return await self._fetch_urls(urls)
238
256
 
239
257
  async def send_email(self, email_subject: str, body: str, to_email:str):
258
+ import smtplib
259
+ from email.mime.multipart import MIMEMultipart
260
+ from email.mime.text import MIMEText
240
261
  secrets = self.app_tool.secrets
241
262
  secrets = {item["secretId"]: item["secretValue"] for item in secrets}
263
+ import urllib.parse as urlparse
264
+ url_preview = secrets.get("url_preview", "")
265
+ if url_preview and url_preview != "":
266
+ message_tread = "Puoi visualizzare la chat su questo indirizzo: " + urlparse.urljoin(url_preview, f"?llmThreadId={self.app_tool.thread_id}")
267
+ body = body.replace("##url_preview##", message_tread)
242
268
  # Email configuration
243
269
  smtp_server = secrets.get("smtp_server")
244
270
  smtp_port = secrets.get("smtp_port")
@@ -274,12 +300,15 @@ class ToolManager:
274
300
 
275
301
  #class variables (static)
276
302
  _list: dict[str,ToolConfig] = {
277
- "document_retriever": ToolConfig(function=document_retriever, model=DocumentRetrieverInput),
278
- "image_generator": ToolConfig(function=image_generator, model=ImageGeneratorInput),
279
- "llm_chain": ToolConfig(function=llm_chain, model=LlmChainInput),
280
- "search_online": ToolConfig(function=search_online, model=SearchOnlineInput),
281
- "search_online_google": ToolConfig(function=search_online_google, model=SearchOnlineInput),
282
- "send_email": ToolConfig(function=send_email, model=EmailSenderInput),
303
+ f"{document_retriever.__name__}": ToolConfig(function=document_retriever, model=DocumentRetrieverInput),
304
+ f"{image_generator.__name__}": ToolConfig(function=image_generator, model=ImageGeneratorInput),
305
+ f"{llm_chain.__name__}": ToolConfig(function=llm_chain, model=LlmChainInput),
306
+ f"{search_online.__name__}": ToolConfig(function=search_online, model=SearchOnlineInput),
307
+ f"{search_online_google.__name__}": ToolConfig(function=search_online_google, model=SearchOnlineInput),
308
+ f"{send_email.__name__}": ToolConfig(function=send_email, model=EmailSenderInput),
309
+ f"{proxy_app_chat.__name__}": ToolConfig(function=proxy_app_chat, model=DocumentRetrieverInput),
310
+ f"{proxy_app_tool.__name__}": ToolConfig(function=proxy_app_tool, model=NoopInput),
311
+
283
312
  }
284
313
 
285
314
  #instance methods
@@ -5,21 +5,21 @@ from ws_bom_robot_app.llm.utils.print import HiddenPrints
5
5
  from ws_bom_robot_app.llm.vector_store.db.manager import VectorDbManager
6
6
  import warnings
7
7
 
8
- async def get_rules(embeddings: Embeddings, rules: LlmRules, input: str | list) -> str:
8
+ async def get_rules(embeddings: Embeddings, rules: LlmRules, query: str | list) -> str:
9
9
  with warnings.catch_warnings():
10
10
  warnings.simplefilter("ignore", category=Warning)
11
11
  # check if the input is multimodal and convert it to text
12
- if isinstance(input, list):
13
- input = " ".join(obj.get("text", "") for obj in input)
12
+ if isinstance(query, list):
13
+ query = " ".join(obj.get("text", "") for obj in query)
14
14
  # check if the input is empty or the rules are not provided
15
- if any([input=="",rules is None,rules and rules.vector_db == "",rules and not os.path.exists(rules.vector_db)]):
15
+ if any([query=="",rules is None,rules and rules.vector_db == "",rules and not os.path.exists(rules.vector_db)]):
16
16
  return ""
17
17
  # get the rules from the vector db and return prompt with rules
18
18
  rules_prompt = ""
19
19
  rules_doc = await VectorDbManager.get_strategy(rules.vector_type).invoke(
20
20
  embeddings,
21
21
  rules.vector_db,
22
- input,
22
+ query,
23
23
  search_type="similarity_score_threshold",
24
24
  search_kwargs={
25
25
  "score_threshold": rules.threshold,
@@ -0,0 +1,77 @@
1
+ import logging, aiohttp
2
+ from typing import List, Optional
3
+
4
+ from pydantic import AliasChoices, BaseModel, ConfigDict, Field
5
+ from ws_bom_robot_app.llm.models.api import LlmAppTool
6
+ from ws_bom_robot_app.util import cache_with_ttl
7
+
8
+ class CmsAppCredential(BaseModel):
9
+ app_key: str = Field(..., description="The app key for the credential", validation_alias=AliasChoices("appKey","app_key"))
10
+ api_key: str = Field(..., description="The api key for the credential", validation_alias=AliasChoices("apiKey","api_key"))
11
+ model_config = ConfigDict(extra='ignore')
12
+ class CmsApp(BaseModel):
13
+ id: str = Field(..., description="Unique identifier for the app")
14
+ name: str = Field(..., description="Name of the app")
15
+ credentials: CmsAppCredential = None
16
+ app_tools: Optional[List[LlmAppTool]] = Field([], validation_alias=AliasChoices("appTools","app_tools"))
17
+ model_config = ConfigDict(extra='ignore')
18
+
19
+ @cache_with_ttl(600) # Cache for 10 minutes
20
+ async def get_apps() -> list[CmsApp]:
21
+ import json, os
22
+ from ws_bom_robot_app.config import config
23
+ class DictObject(object):
24
+ def __init__(self, dict_):
25
+ self.__dict__.update(dict_)
26
+ def __repr__(self):
27
+ return json.dumps(self.__dict__)
28
+ @classmethod
29
+ def from_dict(cls, d):
30
+ return json.loads(json.dumps(d), object_hook=DictObject)
31
+ def __attr(obj, *attrs, default=None):
32
+ for attr in attrs:
33
+ obj = getattr(obj, attr, default)
34
+ if obj is None:
35
+ break
36
+ return obj
37
+ host = config.robot_cms_host
38
+ if host:
39
+ url = f"{host}/api/llmApp?depth=1&pagination=false"
40
+ auth = config.robot_cms_auth
41
+ headers = {"Authorization": auth} if auth else {}
42
+ async with aiohttp.ClientSession() as session:
43
+ async with session.get(url, headers=headers) as response:
44
+ if response.status == 200:
45
+ _apps=[]
46
+ cms_apps = await response.json()
47
+ for cms_app in cms_apps:
48
+ if __attr(cms_app,"isActive",default=True) == True:
49
+ _cms_app_dict = DictObject.from_dict(cms_app)
50
+ _app: CmsApp = CmsApp(
51
+ id=_cms_app_dict.id,
52
+ name=_cms_app_dict.name,
53
+ credentials=CmsAppCredential(app_key=_cms_app_dict.settings.credentials.appKey,api_key=_cms_app_dict.settings.credentials.apiKey),
54
+ app_tools=[LlmAppTool(**tool) for tool in cms_app.get('settings').get('appTools',[])]
55
+ )
56
+ if _app.app_tools:
57
+ for tool in _app.app_tools:
58
+ _knowledgeBase = tool.knowledgeBase
59
+ tool.vector_db = _knowledgeBase.get('vectorDbFile').get('filename') if _knowledgeBase.get('vectorDbFile') else None
60
+ tool.vector_type = _knowledgeBase.get('vectorDbType') if _knowledgeBase.get('vectorDbType') else 'faiss'
61
+ del tool.knowledgeBase
62
+ _apps.append(_app)
63
+ return _apps
64
+ else:
65
+ logging.error(f"Error fetching cms apps: {response.status}")
66
+ else:
67
+ logging.error("robot_cms_host environment variable is not set.")
68
+ return []
69
+
70
+ async def get_app_by_id(app_id: str) -> CmsApp | None:
71
+ apps = await get_apps()
72
+ app = next((a for a in apps if a.id == app_id), None)
73
+ if app:
74
+ return app
75
+ else:
76
+ logging.error(f"App with id {app_id} not found.")
77
+ return None
@@ -350,13 +350,15 @@ class DatabaseTaskManagerStrategy(TaskManagerStrategy):
350
350
  session.commit()
351
351
  #endregion
352
352
 
353
- # global instance
353
+ #region global
354
354
  def __get_taskmanager_strategy() -> TaskManagerStrategy:
355
+ """ Factory function to get the appropriate task manager strategy based on the runtime configuration."""
355
356
  if config.runtime_options().is_multi_process:
356
357
  return DatabaseTaskManagerStrategy()
357
358
  return MemoryTaskManagerStrategy()
358
359
  task_manager = __get_taskmanager_strategy()
359
360
  _log.info(f"Task manager strategy: {task_manager.__class__.__name__}")
361
+ #endregion
360
362
 
361
363
  #region api
362
364
  router = APIRouter(prefix="/api/task", tags=["task"])
ws_bom_robot_app/util.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import logging.handlers
2
2
  import os, logging, json
3
- from typing import TypeVar, Generic
3
+ from typing import Callable, TypeVar, Generic
4
4
  from functools import wraps
5
5
  from .config import config
6
6
 
@@ -23,25 +23,64 @@ _log: logging.Logger = locals().get("_loc", logger_instance(__name__))
23
23
  #endregion
24
24
 
25
25
  #region cache
26
- class cache(Generic[T]):
27
- def _filepath() -> str:
28
- return os.path.join('.data',f'{T.__module__}.{T.__name__}.json')
29
- @staticmethod
30
- def get() -> list[T]:
31
- filepath: str = cache._filepath()
32
- if os.path.exists(filepath):
33
- with open(filepath, 'r') as file:
34
- content = file.read()
35
- items: list[T] = json.loads(content)
36
- return items
37
- return None
38
- @staticmethod
39
- def set(items: list[T]):
40
- with open(cache._filepath(), 'w') as file:
41
- file.write(json.dumps(items))
42
- @staticmethod
43
- def clear():
44
- os.remove(cache._filepath())
26
+ _cache = {}
27
+ _cache_timestamps = {}
28
+
29
+ def cache_with_ttl(ttl_seconds: int):
30
+ """
31
+ Decorator for caching async function results with TTL (Time To Live)
32
+
33
+ Args:
34
+ ttl_seconds: Cache expiration time in seconds
35
+ """
36
+ import time
37
+ def decorator(func: Callable) -> Callable:
38
+ @wraps(func)
39
+ async def wrapper(*args, **kwargs):
40
+ # Create cache key from function name and arguments
41
+ cache_key = f"{func.__name__}:{hash(str(args) + str(sorted(kwargs.items())))}"
42
+
43
+ current_time = time.time()
44
+
45
+ # Check if cached result exists and is still valid
46
+ if (cache_key in _cache and
47
+ cache_key in _cache_timestamps and
48
+ current_time - _cache_timestamps[cache_key] < ttl_seconds):
49
+ return _cache[cache_key]
50
+
51
+ # Call the original function and cache the result
52
+ result = await func(*args, **kwargs)
53
+ _cache[cache_key] = result
54
+ _cache_timestamps[cache_key] = current_time
55
+
56
+ return result
57
+ return wrapper
58
+ return decorator
59
+
60
+ def clear_cache(id: str = None):
61
+ """Clear the cache by id function"""
62
+ cache_key_prefix = f"{id}:"
63
+ keys_to_remove = [key for key in _cache.keys() if key.startswith(cache_key_prefix)]
64
+ for key in keys_to_remove:
65
+ _cache.pop(key, None)
66
+ _cache_timestamps.pop(key, None)
67
+
68
+ def get_cache_info(id: str) -> dict:
69
+ """Get information about current cache status"""
70
+ import time
71
+ current_time = time.time()
72
+ cache_info = {}
73
+
74
+ for key, timestamp in _cache_timestamps.items():
75
+ if key.startswith(f"{id}:"):
76
+ remaining_ttl = 600 - (current_time - timestamp)
77
+ cache_info[key] = {
78
+ "cached_at": timestamp,
79
+ "remaining_ttl": max(0, remaining_ttl),
80
+ "is_expired": remaining_ttl <= 0
81
+ }
82
+
83
+ return cache_info
45
84
  #endregion
46
85
 
47
86
  def _get_timer_wrapper(is_async=False):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.61
3
+ Version: 0.0.63
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -13,24 +13,23 @@ Description-Content-Type: text/markdown
13
13
  Requires-Dist: standardwebhooks==1.0.0
14
14
  Requires-Dist: apscheduler==3.11.0
15
15
  Requires-Dist: aiofiles==24.1.0
16
- Requires-Dist: pydantic==2.10.6
17
- Requires-Dist: pydantic-settings==2.7.1
18
- Requires-Dist: fastapi[standard]==0.115.8
16
+ Requires-Dist: pydantic==2.11.7
17
+ Requires-Dist: pydantic-settings==2.10.1
18
+ Requires-Dist: fastapi[standard]==0.115.14
19
19
  Requires-Dist: chevron==0.14.0
20
- Requires-Dist: trafilatura==2.0.0
21
- Requires-Dist: langchain==0.3.25
22
- Requires-Dist: langchain-community==0.3.24
23
- Requires-Dist: langchain-core==0.3.59
24
- Requires-Dist: langchain-openai==0.3.16
20
+ Requires-Dist: langchain==0.3.26
21
+ Requires-Dist: langchain-community==0.3.26
22
+ Requires-Dist: langchain-core==0.3.67
23
+ Requires-Dist: langchain-openai==0.3.27
25
24
  Requires-Dist: langchain-anthropic==0.3.6
26
25
  Requires-Dist: langchain-google-genai==2.0.7
27
- Requires-Dist: langchain-google-vertexai==2.0.13
28
- Requires-Dist: langchain-groq==0.3.2
29
- Requires-Dist: langchain-ollama==0.3.2
30
- Requires-Dist: faiss-cpu==1.9.0
31
- Requires-Dist: chromadb==0.6.3
32
- Requires-Dist: langchain_chroma==0.2.1
33
- Requires-Dist: fastembed==0.5.1
26
+ Requires-Dist: langchain-google-vertexai==2.0.27
27
+ Requires-Dist: langchain-groq==0.3.5
28
+ Requires-Dist: langchain-ollama==0.3.3
29
+ Requires-Dist: faiss-cpu==1.11.0
30
+ Requires-Dist: chromadb==1.0.13
31
+ Requires-Dist: langchain_chroma==0.2.4
32
+ Requires-Dist: fastembed==0.7.1
34
33
  Requires-Dist: langchain-qdrant==0.2.0
35
34
  Requires-Dist: lark==1.2.2
36
35
  Requires-Dist: unstructured==0.16.21
@@ -48,9 +47,10 @@ Requires-Dist: unstructured-ingest[sftp]
48
47
  Requires-Dist: unstructured-ingest[sharepoint]
49
48
  Requires-Dist: unstructured-ingest[slack]
50
49
  Requires-Dist: html5lib==1.1
51
- Requires-Dist: markdownify==0.14.1
50
+ Requires-Dist: markdownify==1.1.0
52
51
  Requires-Dist: duckduckgo-search==8.0.4
53
52
  Requires-Dist: langchain_google_community==2.0.7
53
+ Requires-Dist: trafilatura==2.0.0
54
54
  Dynamic: author
55
55
  Dynamic: author-email
56
56
  Dynamic: classifier
@@ -1,38 +1,39 @@
1
1
  ws_bom_robot_app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  ws_bom_robot_app/auth.py,sha256=84nIbmJsMrNs0sxIQGEHbjsjc2P6ZrZZGSn8dkiL6is,895
3
- ws_bom_robot_app/config.py,sha256=9W3cz92hbogDrtbCcybpCY29zCka7G225CNQEptpx30,4183
4
- ws_bom_robot_app/cron_manager.py,sha256=0Yt5AMTPGlXZ_M5ck0SKMX8wvzoPsseEezg_s0Q3HKY,9224
3
+ ws_bom_robot_app/config.py,sha256=JFO4HrFnzUjFfv5MH0Tzwda0krllEqmTufHZ6J-BgEI,4147
4
+ ws_bom_robot_app/cron_manager.py,sha256=pFHV7SZtp6GRmmLD9K1Mb1TE9Ev9n5mIiFScrc7tpCo,9221
5
5
  ws_bom_robot_app/main.py,sha256=zO3B-v-v9ESASvw8IaQj9Y9hNvNmOxohFmA0R82EybQ,6518
6
- ws_bom_robot_app/task_manager.py,sha256=Zedzs2R3O-wNSQOqs4jorgFwPRi-ji_0TN4mGfk-VvE,15958
7
- ws_bom_robot_app/util.py,sha256=b49ItlZgh2Wzw-6K8k5Wa44eVgjQ0JmWQwJnEaQBVGw,3502
6
+ ws_bom_robot_app/task_manager.py,sha256=Q3Il2TtkP0FoG9zHEBu48pZGXzimTtvWQsoH6wdvQs0,16077
7
+ ws_bom_robot_app/util.py,sha256=RjVD6B9sHje788Lndqq5DHy6TJM0KLs9qx3JYt81Wyk,4834
8
8
  ws_bom_robot_app/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  ws_bom_robot_app/llm/agent_context.py,sha256=uatHJ8wcRly6h0S762BgfzDMpmcwCHwNzwo37aWjeE0,1305
10
10
  ws_bom_robot_app/llm/agent_description.py,sha256=5IP0qFSJvaE3zjGS7f0W1DuiegP0RHXRMBoDC5pCofA,4779
11
- ws_bom_robot_app/llm/agent_handler.py,sha256=PzdDpBnfUdqxKuMpHcVYgVf0hxIFOJwdxT9YIyFGdYY,7766
12
- ws_bom_robot_app/llm/agent_lcel.py,sha256=8d10b43BXqE4rfXE5uh8YGT67o1bw0q0l7QXFT6wPKA,2320
13
- ws_bom_robot_app/llm/api.py,sha256=1nzQ7g2n_DlX6Ixo5ecS10UvyyKJ42qZQ6aD8-EI7BE,4709
11
+ ws_bom_robot_app/llm/agent_handler.py,sha256=-9ia0bpNXgqLGFCSmAiU5ogdoJo30yl-XUNdXONdvbs,7774
12
+ ws_bom_robot_app/llm/agent_lcel.py,sha256=e1ZELfUEpqMXJwLWxyOtgiUmVwoO3Aw-ua4u05XociA,2297
13
+ ws_bom_robot_app/llm/api.py,sha256=2bF-UFczY9LuBqPxKObM0TOWYbZgVztX1RiIz5MSorU,5042
14
14
  ws_bom_robot_app/llm/defaut_prompt.py,sha256=D9dn8yPveu0bVwGM1wQWLYftmBs5O76o0R_caLLll8w,1121
15
- ws_bom_robot_app/llm/main.py,sha256=UK33yI_0zDCdM5zKe9h7c_qzM41PIANvRFCxjGlAzlI,5140
15
+ ws_bom_robot_app/llm/main.py,sha256=je8-Q-WtzQMtwEGzsUqHpLjVtMq8IObLi8VFIa6tbeo,5204
16
16
  ws_bom_robot_app/llm/nebuly_handler.py,sha256=w895twhPgtRUH_jZz1pbX4W2leq8A3O_9gUwp_ridoY,8033
17
17
  ws_bom_robot_app/llm/settings.py,sha256=DCLaGZwxlw0xE46LpfUgin_FHD8_XJIthCgI6r2UDlM,121
18
18
  ws_bom_robot_app/llm/feedbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- ws_bom_robot_app/llm/feedbacks/feedback_manager.py,sha256=bnP0FEJTyrzT0YzqCVE73EC07Eu_4FLxVu3Cy-5Si0o,3211
19
+ ws_bom_robot_app/llm/feedbacks/feedback_manager.py,sha256=vNcZLG9IKhurAk7hjBqyFgQTjnh3Cd4GnxeYsX7ZdiA,2922
20
20
  ws_bom_robot_app/llm/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- ws_bom_robot_app/llm/models/api.py,sha256=DPhL_207RRN4qwPlKxbLrARnsYVAkdMYYUV7MkbN7Rk,10751
21
+ ws_bom_robot_app/llm/models/api.py,sha256=rsPH1y-hMkwXnvt5344yIDMY_StbbgB5AS3Gz37bOhs,11036
22
22
  ws_bom_robot_app/llm/models/base.py,sha256=1TqxuTK3rjJEALn7lvgoen_1ba3R2brAgGx6EDTtDZo,152
23
23
  ws_bom_robot_app/llm/models/feedback.py,sha256=pYNQGxNOBgeAAfdJLI95l7ePLBI5tVdsgnyjp5oMOQU,1722
24
24
  ws_bom_robot_app/llm/models/kb.py,sha256=oVSw6_dmNxikAHrPqcfxDXz9M0ezLIYuxpgvzfs_Now,9514
25
25
  ws_bom_robot_app/llm/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  ws_bom_robot_app/llm/providers/llm_manager.py,sha256=zIkxgTLYQCcup2Ixf4eWap4mNinuJH2YmkjLjZGDyJM,8371
27
27
  ws_bom_robot_app/llm/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- ws_bom_robot_app/llm/tools/tool_builder.py,sha256=p_Q32_-OSydcxzj69PgPIuiny816zYv5dVsCHSY0ELc,1188
29
- ws_bom_robot_app/llm/tools/tool_manager.py,sha256=I5HPQov-9ELSiNDhxMsm9-zOqZ77J_E5c6IDOXX_CFk,12935
28
+ ws_bom_robot_app/llm/tools/tool_builder.py,sha256=vQ8BX3MFd3WX2WlYAibbC4PpzxevdrdqKGk0SpKPrPs,3265
29
+ ws_bom_robot_app/llm/tools/tool_manager.py,sha256=I8CHAdXx5KO1aw1NkDEQjCls9a-E8K6JrCQ-rzfImWY,14390
30
30
  ws_bom_robot_app/llm/tools/utils.py,sha256=Ba7ScFZPVJ3ke8KLO8ik1wyR2f_zC99Bikqx0OGnKoI,1924
31
31
  ws_bom_robot_app/llm/tools/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  ws_bom_robot_app/llm/tools/models/main.py,sha256=1hICqHs-KS2heenkH7b2eH0N2GrPaaNGBrn64cl_A40,827
33
33
  ws_bom_robot_app/llm/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- ws_bom_robot_app/llm/utils/agent.py,sha256=ISF9faaD5tBi-8sbgQpgfqWT1JIVcgv_lRhyaNAkI2Q,1445
34
+ ws_bom_robot_app/llm/utils/agent.py,sha256=uFuSfYMfGIE2WCKGNSKL-T2SDFn-tUKvbAYbGTPIw6g,1445
35
35
  ws_bom_robot_app/llm/utils/chunker.py,sha256=N7570xBYlObneg-fsvDhPAJ-Pv8C8OaYZOBK6q7LmMI,607
36
+ ws_bom_robot_app/llm/utils/cms.py,sha256=KlHnJ66h_TX3vy_HQld3PvbXw1hI_wVoOJVPlI1Zi0E,3461
36
37
  ws_bom_robot_app/llm/utils/download.py,sha256=GaRypPgkx16HfYRj-upX9kvmjfAdFFb5TP4P97scWeA,3273
37
38
  ws_bom_robot_app/llm/utils/kb.py,sha256=jja45WCbNI7SGEgqDS99nErlwB5eY8Ga7BMnhdMHZ90,1279
38
39
  ws_bom_robot_app/llm/utils/print.py,sha256=HK3zhZOd4cEyXZ8QcudLtTIfqqtMOERce_yTofS8NXo,803
@@ -65,7 +66,7 @@ ws_bom_robot_app/llm/vector_store/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5
65
66
  ws_bom_robot_app/llm/vector_store/loader/base.py,sha256=L_ugekNuAq0N9O-24wtlHSNHkqSeD-KsJrfGt_FX9Oc,5340
66
67
  ws_bom_robot_app/llm/vector_store/loader/docling.py,sha256=yP0zgXLeFAlByaYuj-6cYariuknckrFds0dxdRcnVz8,3456
67
68
  ws_bom_robot_app/llm/vector_store/loader/json_loader.py,sha256=LDppW0ZATo4_1hh-KlsAM3TLawBvwBxva_a7k5Oz1sc,858
68
- ws_bom_robot_app-0.0.61.dist-info/METADATA,sha256=9Ph9kKWlaMASpyHCpha4k40H2clLDe2KbmUSq2D1uLw,8456
69
- ws_bom_robot_app-0.0.61.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
- ws_bom_robot_app-0.0.61.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
71
- ws_bom_robot_app-0.0.61.dist-info/RECORD,,
69
+ ws_bom_robot_app-0.0.63.dist-info/METADATA,sha256=X2pbeuERrYrwt-8VhjEK19qE027RIRkSu3L5zbeQsaY,8459
70
+ ws_bom_robot_app-0.0.63.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
+ ws_bom_robot_app-0.0.63.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
72
+ ws_bom_robot_app-0.0.63.dist-info/RECORD,,