ws-bom-robot-app 0.0.62__py3-none-any.whl → 0.0.64__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ws_bom_robot_app/config.py +2 -0
- ws_bom_robot_app/llm/main.py +2 -0
- ws_bom_robot_app/llm/models/api.py +1 -0
- ws_bom_robot_app/llm/providers/llm_manager.py +80 -26
- ws_bom_robot_app/llm/tools/tool_manager.py +6 -1
- ws_bom_robot_app/main.py +2 -2
- {ws_bom_robot_app-0.0.62.dist-info → ws_bom_robot_app-0.0.64.dist-info}/METADATA +2 -1
- {ws_bom_robot_app-0.0.62.dist-info → ws_bom_robot_app-0.0.64.dist-info}/RECORD +10 -10
- {ws_bom_robot_app-0.0.62.dist-info → ws_bom_robot_app-0.0.64.dist-info}/WHEEL +0 -0
- {ws_bom_robot_app-0.0.62.dist-info → ws_bom_robot_app-0.0.64.dist-info}/top_level.txt +0 -0
ws_bom_robot_app/config.py
CHANGED
|
@@ -27,6 +27,7 @@ class Settings(BaseSettings):
|
|
|
27
27
|
OLLAMA_API_URL: str = 'http://localhost:11434'
|
|
28
28
|
GROQ_API_KEY: str = ''
|
|
29
29
|
GOOGLE_API_KEY: str = ''
|
|
30
|
+
WATSONX_APIKEY: str = '' # used for ibm watsonx
|
|
30
31
|
NEBULY_API_URL: str =''
|
|
31
32
|
GOOGLE_APPLICATION_CREDENTIALS: str = '' # path to google credentials iam file, e.d. ./.secrets/google-credentials.json
|
|
32
33
|
model_config = ConfigDict(
|
|
@@ -44,6 +45,7 @@ class Settings(BaseSettings):
|
|
|
44
45
|
os.environ["GROQ_API_KEY"] = self.GROQ_API_KEY
|
|
45
46
|
os.environ["GOOGLE_API_KEY"] = self.GOOGLE_API_KEY
|
|
46
47
|
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.GOOGLE_APPLICATION_CREDENTIALS
|
|
48
|
+
os.environ["WATSONX_APIKEY"] = self.WATSONX_APIKEY
|
|
47
49
|
os.environ["NEBULY_API_URL"] = self.NEBULY_API_URL
|
|
48
50
|
|
|
49
51
|
class RuntimeOptions(BaseModel):
|
ws_bom_robot_app/llm/main.py
CHANGED
|
@@ -89,6 +89,8 @@ async def __stream(rq: StreamRequest, ctx: Request, queue: Queue,formatted: bool
|
|
|
89
89
|
callbacks.append(trace)
|
|
90
90
|
|
|
91
91
|
__llm: LlmInterface =rq.get_llm()
|
|
92
|
+
for tool in rq.app_tools:
|
|
93
|
+
tool.thread_id = rq.thread_id
|
|
92
94
|
processor = AgentLcel(
|
|
93
95
|
llm=__llm,
|
|
94
96
|
sys_message=rq.system_message,
|
|
@@ -37,6 +37,31 @@ class LlmInterface:
|
|
|
37
37
|
from langchain.agents.output_parsers.openai_tools import OpenAIToolsAgentOutputParser
|
|
38
38
|
return OpenAIToolsAgentOutputParser()
|
|
39
39
|
|
|
40
|
+
class Anthropic(LlmInterface):
|
|
41
|
+
def get_llm(self):
|
|
42
|
+
from langchain_anthropic import ChatAnthropic
|
|
43
|
+
return ChatAnthropic(
|
|
44
|
+
api_key=self.config.api_key or os.getenv("ANTHROPIC_API_KEY"),
|
|
45
|
+
model=self.config.model,
|
|
46
|
+
temperature=self.config.temperature,
|
|
47
|
+
streaming=True,
|
|
48
|
+
stream_usage=True
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
"""
|
|
52
|
+
def get_embeddings(self):
|
|
53
|
+
from langchain_voyageai import VoyageAIEmbeddings
|
|
54
|
+
return VoyageAIEmbeddings(
|
|
55
|
+
api_key=self.config.embedding_api_key, #voyage api key
|
|
56
|
+
model="voyage-3")
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def get_models(self):
|
|
60
|
+
import anthropic
|
|
61
|
+
client = anthropic.Client(api_key=self.config.api_key or os.getenv("ANTHROPIC_API_KEY"))
|
|
62
|
+
response = client.models.list()
|
|
63
|
+
return response.data
|
|
64
|
+
|
|
40
65
|
class OpenAI(LlmInterface):
|
|
41
66
|
def __init__(self, config: LlmConfig):
|
|
42
67
|
super().__init__(config)
|
|
@@ -133,31 +158,6 @@ class Gvertex(LlmInterface):
|
|
|
133
158
|
{"id":"gemini-1.5-pro-002"}
|
|
134
159
|
]
|
|
135
160
|
|
|
136
|
-
class Anthropic(LlmInterface):
|
|
137
|
-
def get_llm(self):
|
|
138
|
-
from langchain_anthropic import ChatAnthropic
|
|
139
|
-
return ChatAnthropic(
|
|
140
|
-
api_key=self.config.api_key or os.getenv("ANTHROPIC_API_KEY"),
|
|
141
|
-
model=self.config.model,
|
|
142
|
-
temperature=self.config.temperature,
|
|
143
|
-
streaming=True,
|
|
144
|
-
stream_usage=True
|
|
145
|
-
)
|
|
146
|
-
|
|
147
|
-
"""
|
|
148
|
-
def get_embeddings(self):
|
|
149
|
-
from langchain_voyageai import VoyageAIEmbeddings
|
|
150
|
-
return VoyageAIEmbeddings(
|
|
151
|
-
api_key=self.config.embedding_api_key, #voyage api key
|
|
152
|
-
model="voyage-3")
|
|
153
|
-
"""
|
|
154
|
-
|
|
155
|
-
def get_models(self):
|
|
156
|
-
import anthropic
|
|
157
|
-
client = anthropic.Client(api_key=self.config.api_key or os.getenv("ANTHROPIC_API_KEY"))
|
|
158
|
-
response = client.models.list()
|
|
159
|
-
return response.data
|
|
160
|
-
|
|
161
161
|
class Groq(LlmInterface):
|
|
162
162
|
def get_llm(self):
|
|
163
163
|
from langchain_groq import ChatGroq
|
|
@@ -179,10 +179,63 @@ class Groq(LlmInterface):
|
|
|
179
179
|
response = requests.get(url, headers=headers)
|
|
180
180
|
return response.json().get("data", [])
|
|
181
181
|
|
|
182
|
+
class IBM(LlmInterface):
|
|
183
|
+
def __init__(self, config: LlmConfig):
|
|
184
|
+
super().__init__(config)
|
|
185
|
+
self.__apy_key = self.config.api_key or os.getenv("WATSONX_APIKEY")
|
|
186
|
+
self.__base_url = self.config.api_url or "https://us-south.ml.cloud.ibm.com"
|
|
187
|
+
def get_llm(self):
|
|
188
|
+
from langchain_ibm import ChatWatsonx
|
|
189
|
+
return ChatWatsonx(
|
|
190
|
+
model_id=self.config.model,
|
|
191
|
+
url=self.__base_url,
|
|
192
|
+
apikey=self.__apy_key
|
|
193
|
+
)
|
|
194
|
+
def get_models(self):
|
|
195
|
+
import requests
|
|
196
|
+
from datetime import date
|
|
197
|
+
try:
|
|
198
|
+
# https://cloud.ibm.com/apidocs/watsonx-ai#list-foundation-model-specs
|
|
199
|
+
today = date.today().strftime("%Y-%m-%d")
|
|
200
|
+
url = f"{self.__base_url}/ml/v1/foundation_model_specs?version={today}&filters=task_generation,task_summarization:and"
|
|
201
|
+
headers = {
|
|
202
|
+
"Authorization": f"Bearer {self.__apy_key}",
|
|
203
|
+
"Content-Type": "application/json"
|
|
204
|
+
}
|
|
205
|
+
response = requests.get(url, headers=headers)
|
|
206
|
+
models = response.json().get("resources", [])
|
|
207
|
+
return [{
|
|
208
|
+
"id": model['model_id'],
|
|
209
|
+
"provider": model['provider'],
|
|
210
|
+
"tasks": model['task_ids'],
|
|
211
|
+
"limits": model.get('model_limits', {}),
|
|
212
|
+
} for model in models if model['provider'].lower() in ['ibm','meta','mistral ai']]
|
|
213
|
+
except Exception as e:
|
|
214
|
+
print(f"Error fetching models from IBM WatsonX: {e}")
|
|
215
|
+
# https://www.ibm.com/products/watsonx-ai/foundation-models
|
|
216
|
+
return [
|
|
217
|
+
{"id":"granite-3-3-8b-instruct"},
|
|
218
|
+
{"id":"granite-vision-3-2-2b"},
|
|
219
|
+
{"id":"granite-13b-instruct"},
|
|
220
|
+
{"id":"llama-4-scout-17b-16e-instruct"},
|
|
221
|
+
{"id":"llama-3-3-70b-instruct"},
|
|
222
|
+
{"id":"mistral-medium-2505"},
|
|
223
|
+
{"id":"mistral-small-3-1-24b-instruct-2503"},
|
|
224
|
+
{"id":"mistral-large-2"}
|
|
225
|
+
]
|
|
226
|
+
|
|
227
|
+
def get_embeddings(self):
|
|
228
|
+
from langchain_ibm import WatsonxEmbeddings
|
|
229
|
+
return WatsonxEmbeddings(
|
|
230
|
+
model_id="ibm/granite-embedding-107m-multilingual", #https://www.ibm.com/products/watsonx-ai/foundation-models
|
|
231
|
+
url=self.__base_url,
|
|
232
|
+
apikey=self.__apy_key
|
|
233
|
+
)
|
|
234
|
+
|
|
182
235
|
class Ollama(LlmInterface):
|
|
183
236
|
def __init__(self, config: LlmConfig):
|
|
184
237
|
super().__init__(config)
|
|
185
|
-
self.__base_url = self.config.api_url or os.getenv("OLLAMA_API_URL")
|
|
238
|
+
self.__base_url = self.config.api_url or os.getenv("OLLAMA_API_URL") or "http://localhost:11434"
|
|
186
239
|
def get_llm(self):
|
|
187
240
|
from langchain_ollama.chat_models import ChatOllama
|
|
188
241
|
return ChatOllama(
|
|
@@ -221,6 +274,7 @@ class LlmManager:
|
|
|
221
274
|
"google": Google,
|
|
222
275
|
"gvertex": Gvertex,
|
|
223
276
|
"groq": Groq,
|
|
277
|
+
"ibm": IBM,
|
|
224
278
|
"openai": OpenAI,
|
|
225
279
|
"ollama": Ollama
|
|
226
280
|
}
|
|
@@ -228,7 +228,7 @@ class ToolManager:
|
|
|
228
228
|
try:
|
|
229
229
|
raw_results = search.results(query, max_results=10)
|
|
230
230
|
except Exception as e:
|
|
231
|
-
|
|
231
|
+
return f"[!] Errore ricerca: {e}"
|
|
232
232
|
urls = [r["link"] for r in raw_results]
|
|
233
233
|
return await self._fetch_urls(urls)
|
|
234
234
|
|
|
@@ -260,6 +260,11 @@ class ToolManager:
|
|
|
260
260
|
from email.mime.text import MIMEText
|
|
261
261
|
secrets = self.app_tool.secrets
|
|
262
262
|
secrets = {item["secretId"]: item["secretValue"] for item in secrets}
|
|
263
|
+
import urllib.parse as urlparse
|
|
264
|
+
url_preview = secrets.get("url_preview", "")
|
|
265
|
+
if url_preview and url_preview != "":
|
|
266
|
+
message_tread = "Puoi visualizzare la chat su questo indirizzo: " + urlparse.urljoin(url_preview, f"?llmThreadId={self.app_tool.thread_id}")
|
|
267
|
+
body = body.replace("##url_preview##", message_tread)
|
|
263
268
|
# Email configuration
|
|
264
269
|
smtp_server = secrets.get("smtp_server")
|
|
265
270
|
smtp_port = secrets.get("smtp_port")
|
ws_bom_robot_app/main.py
CHANGED
|
@@ -70,7 +70,7 @@ def __get_disk_info():
|
|
|
70
70
|
return _disks
|
|
71
71
|
@app.get("/api/diag",tags=["diag"])
|
|
72
72
|
def diag(authenticate: bool = Depends(authenticate)):
|
|
73
|
-
import
|
|
73
|
+
import importlib,psutil
|
|
74
74
|
from ws_bom_robot_app.llm.providers.llm_manager import LlmManager as wsllm
|
|
75
75
|
from ws_bom_robot_app.llm.vector_store.db.manager import VectorDbManager as wsdb
|
|
76
76
|
from ws_bom_robot_app.llm.vector_store.loader.base import Loader as wsldr
|
|
@@ -81,7 +81,7 @@ def diag(authenticate: bool = Depends(authenticate)):
|
|
|
81
81
|
svmem = psutil.virtual_memory()
|
|
82
82
|
swap = psutil.swap_memory()
|
|
83
83
|
try:
|
|
84
|
-
ws_bom_robot_app_version =
|
|
84
|
+
ws_bom_robot_app_version = importlib.metadata.version("ws_bom_robot_app")
|
|
85
85
|
except:
|
|
86
86
|
ws_bom_robot_app_version = "unknown"
|
|
87
87
|
peer_process_ids = [c.pid for c in psutil.Process(os.getppid()).children()] if config.runtime_options().is_multi_process else None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ws_bom_robot_app
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.64
|
|
4
4
|
Summary: A FastAPI application serving ws bom/robot/llm platform ai.
|
|
5
5
|
Home-page: https://github.com/websolutespa/bom
|
|
6
6
|
Author: Websolute Spa
|
|
@@ -22,6 +22,7 @@ Requires-Dist: langchain-community==0.3.26
|
|
|
22
22
|
Requires-Dist: langchain-core==0.3.67
|
|
23
23
|
Requires-Dist: langchain-openai==0.3.27
|
|
24
24
|
Requires-Dist: langchain-anthropic==0.3.6
|
|
25
|
+
Requires-Dist: langchain-ibm==0.3.14
|
|
25
26
|
Requires-Dist: langchain-google-genai==2.0.7
|
|
26
27
|
Requires-Dist: langchain-google-vertexai==2.0.27
|
|
27
28
|
Requires-Dist: langchain-groq==0.3.5
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
ws_bom_robot_app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
ws_bom_robot_app/auth.py,sha256=84nIbmJsMrNs0sxIQGEHbjsjc2P6ZrZZGSn8dkiL6is,895
|
|
3
|
-
ws_bom_robot_app/config.py,sha256=
|
|
3
|
+
ws_bom_robot_app/config.py,sha256=izfyxYJE4GHNFNsi3UWDluyJySDwoVx1QMQhMzQIQCw,4260
|
|
4
4
|
ws_bom_robot_app/cron_manager.py,sha256=pFHV7SZtp6GRmmLD9K1Mb1TE9Ev9n5mIiFScrc7tpCo,9221
|
|
5
|
-
ws_bom_robot_app/main.py,sha256=
|
|
5
|
+
ws_bom_robot_app/main.py,sha256=1vx0k2fEcE53IC5zcE2EUCwQPcUHM4pvuKSun_E0a9I,6501
|
|
6
6
|
ws_bom_robot_app/task_manager.py,sha256=Q3Il2TtkP0FoG9zHEBu48pZGXzimTtvWQsoH6wdvQs0,16077
|
|
7
7
|
ws_bom_robot_app/util.py,sha256=RjVD6B9sHje788Lndqq5DHy6TJM0KLs9qx3JYt81Wyk,4834
|
|
8
8
|
ws_bom_robot_app/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -12,21 +12,21 @@ ws_bom_robot_app/llm/agent_handler.py,sha256=BQ-f--Z5QCJDp-7tzSG_CKrANUCqG65S09p
|
|
|
12
12
|
ws_bom_robot_app/llm/agent_lcel.py,sha256=GGZcGBKsSBbZQ-_MPI3NUMvT7lTerYgwKs3o74stwSU,2252
|
|
13
13
|
ws_bom_robot_app/llm/api.py,sha256=2bF-UFczY9LuBqPxKObM0TOWYbZgVztX1RiIz5MSorU,5042
|
|
14
14
|
ws_bom_robot_app/llm/defaut_prompt.py,sha256=LlCd_nSMkMmHESfiiiQYfnJyB6Pp-LSs4CEKdYW4vFk,1106
|
|
15
|
-
ws_bom_robot_app/llm/main.py,sha256
|
|
15
|
+
ws_bom_robot_app/llm/main.py,sha256=NlrEi3fRuNAyHeyOXSZvjSWePSLekahNOZoJ2XrbBbA,5068
|
|
16
16
|
ws_bom_robot_app/llm/nebuly_handler.py,sha256=WDKDx7ItBv39dhAkYtRciA11YWUwZ7HjEOI2cgr-5NI,7851
|
|
17
17
|
ws_bom_robot_app/llm/settings.py,sha256=EkFGCppORenStH9W4e6_dYvQ-5p6xiEMpmUHBqNqG9M,117
|
|
18
18
|
ws_bom_robot_app/llm/feedbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
19
|
ws_bom_robot_app/llm/feedbacks/feedback_manager.py,sha256=WcKgzlOb8VFG7yqHoIOO_R6LAzdzE4YIRFCVOGBSgfM,2856
|
|
20
20
|
ws_bom_robot_app/llm/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
-
ws_bom_robot_app/llm/models/api.py,sha256=
|
|
21
|
+
ws_bom_robot_app/llm/models/api.py,sha256=rsPH1y-hMkwXnvt5344yIDMY_StbbgB5AS3Gz37bOhs,11036
|
|
22
22
|
ws_bom_robot_app/llm/models/base.py,sha256=1TqxuTK3rjJEALn7lvgoen_1ba3R2brAgGx6EDTtDZo,152
|
|
23
23
|
ws_bom_robot_app/llm/models/feedback.py,sha256=zh1jLqPRLzNlxInkCMoiJbfSu0-tiOEYHM7FhC46PkM,1692
|
|
24
24
|
ws_bom_robot_app/llm/models/kb.py,sha256=oVSw6_dmNxikAHrPqcfxDXz9M0ezLIYuxpgvzfs_Now,9514
|
|
25
25
|
ws_bom_robot_app/llm/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
-
ws_bom_robot_app/llm/providers/llm_manager.py,sha256=
|
|
26
|
+
ws_bom_robot_app/llm/providers/llm_manager.py,sha256=8lScD-tG1o2g_tM04Ju96j2JyLIAa5Z8Gm5ttQfDOFQ,10750
|
|
27
27
|
ws_bom_robot_app/llm/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
28
|
ws_bom_robot_app/llm/tools/tool_builder.py,sha256=p5CeLCuAilhtEAbPaiAWKGtuIWl8vfilZjYJ9Kw1dLg,3200
|
|
29
|
-
ws_bom_robot_app/llm/tools/tool_manager.py,sha256=
|
|
29
|
+
ws_bom_robot_app/llm/tools/tool_manager.py,sha256=Z7yJSIn86BvGtWCkpT48pbu59qLpNe6XtpnoFI-jQU8,14073
|
|
30
30
|
ws_bom_robot_app/llm/tools/utils.py,sha256=tdmOAk8l4HVzw67z3brA9yX-1WLu91paU-WmXHyz4Bg,1883
|
|
31
31
|
ws_bom_robot_app/llm/tools/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
32
|
ws_bom_robot_app/llm/tools/models/main.py,sha256=1hICqHs-KS2heenkH7b2eH0N2GrPaaNGBrn64cl_A40,827
|
|
@@ -66,7 +66,7 @@ ws_bom_robot_app/llm/vector_store/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5
|
|
|
66
66
|
ws_bom_robot_app/llm/vector_store/loader/base.py,sha256=L_ugekNuAq0N9O-24wtlHSNHkqSeD-KsJrfGt_FX9Oc,5340
|
|
67
67
|
ws_bom_robot_app/llm/vector_store/loader/docling.py,sha256=yP0zgXLeFAlByaYuj-6cYariuknckrFds0dxdRcnVz8,3456
|
|
68
68
|
ws_bom_robot_app/llm/vector_store/loader/json_loader.py,sha256=qo9ejRZyKv_k6jnGgXnu1W5uqsMMtgqK_uvPpZQ0p74,833
|
|
69
|
-
ws_bom_robot_app-0.0.
|
|
70
|
-
ws_bom_robot_app-0.0.
|
|
71
|
-
ws_bom_robot_app-0.0.
|
|
72
|
-
ws_bom_robot_app-0.0.
|
|
69
|
+
ws_bom_robot_app-0.0.64.dist-info/METADATA,sha256=vsebKjT-mL9G6m1ob_MrGcbOXJkmbjzzA3RZNNjO-UE,8497
|
|
70
|
+
ws_bom_robot_app-0.0.64.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
71
|
+
ws_bom_robot_app-0.0.64.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
|
|
72
|
+
ws_bom_robot_app-0.0.64.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|