ws-bom-robot-app 0.0.79__tar.gz → 0.0.80__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. {ws_bom_robot_app-0.0.79/ws_bom_robot_app.egg-info → ws_bom_robot_app-0.0.80}/PKG-INFO +51 -62
  2. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/README.md +50 -61
  3. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/setup.py +1 -1
  4. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/config.py +1 -1
  5. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/models/api.py +33 -0
  6. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80/ws_bom_robot_app.egg-info}/PKG-INFO +51 -62
  7. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/MANIFEST.in +0 -0
  8. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/pyproject.toml +0 -0
  9. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/requirements.txt +0 -0
  10. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/setup.cfg +0 -0
  11. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/__init__.py +0 -0
  12. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/auth.py +0 -0
  13. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/cron_manager.py +0 -0
  14. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/__init__.py +0 -0
  15. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/agent_context.py +0 -0
  16. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/agent_description.py +0 -0
  17. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/agent_handler.py +0 -0
  18. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/agent_lcel.py +0 -0
  19. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/api.py +0 -0
  20. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/defaut_prompt.py +0 -0
  21. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/feedbacks/__init__.py +0 -0
  22. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/feedbacks/feedback_manager.py +0 -0
  23. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/main.py +0 -0
  24. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/models/__init__.py +0 -0
  25. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/models/base.py +0 -0
  26. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/models/feedback.py +0 -0
  27. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/models/kb.py +0 -0
  28. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/nebuly_handler.py +0 -0
  29. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/providers/__init__.py +0 -0
  30. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/providers/llm_manager.py +0 -0
  31. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/__init__.py +0 -0
  32. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/models/__init__.py +0 -0
  33. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/models/main.py +0 -0
  34. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/tool_builder.py +0 -0
  35. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/tool_manager.py +0 -0
  36. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/tools/utils.py +0 -0
  37. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/__init__.py +0 -0
  38. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/agent.py +0 -0
  39. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/chunker.py +0 -0
  40. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/cleanup.py +0 -0
  41. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/cms.py +0 -0
  42. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/download.py +0 -0
  43. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/print.py +0 -0
  44. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/secrets.py +0 -0
  45. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/utils/webhooks.py +0 -0
  46. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/__init__.py +0 -0
  47. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/__init__.py +0 -0
  48. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/base.py +0 -0
  49. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/chroma.py +0 -0
  50. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/faiss.py +0 -0
  51. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/manager.py +0 -0
  52. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/db/qdrant.py +0 -0
  53. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/generator.py +0 -0
  54. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/__init__.py +0 -0
  55. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/azure.py +0 -0
  56. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/base.py +0 -0
  57. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/confluence.py +0 -0
  58. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/dropbox.py +0 -0
  59. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/gcs.py +0 -0
  60. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/github.py +0 -0
  61. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/googledrive.py +0 -0
  62. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/jira.py +0 -0
  63. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/manager.py +0 -0
  64. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/s3.py +0 -0
  65. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/sftp.py +0 -0
  66. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/sharepoint.py +0 -0
  67. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/sitemap.py +0 -0
  68. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/slack.py +0 -0
  69. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/integration/thron.py +0 -0
  70. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/loader/__init__.py +0 -0
  71. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/loader/base.py +0 -0
  72. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/loader/docling.py +0 -0
  73. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/llm/vector_store/loader/json_loader.py +0 -0
  74. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/main.py +0 -0
  75. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/task_manager.py +0 -0
  76. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app/util.py +0 -0
  77. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app.egg-info/SOURCES.txt +0 -0
  78. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app.egg-info/dependency_links.txt +0 -0
  79. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app.egg-info/requires.txt +0 -0
  80. {ws_bom_robot_app-0.0.79 → ws_bom_robot_app-0.0.80}/ws_bom_robot_app.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.79
3
+ Version: 0.0.80
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -83,18 +83,30 @@ from ws_bom_robot_app import main
83
83
  app = main.app
84
84
  ```
85
85
 
86
- FIll `.env` with the following code:
87
-
88
- ```env
89
- #robot_env=local/development/production
90
- robot_env=local
91
- robot_user='[user]'
92
- robot_password='[pwd]'
93
- robot_data_folder='./.data'
94
- robot_cms_auth='[auth]'
95
- robot_cms_host='https://[DOMAIN]'
96
- robot_cms_db_folder=llmVectorDb
97
- robot_cms_files_folder=llmKbFile
86
+ Create a `.env` file in the root directory with the following configuration:
87
+
88
+ ```properties
89
+ # robot configuration
90
+ robot_env=development
91
+ robot_user=your_username
92
+ USER_AGENT=ws-bom-robot-app
93
+
94
+ # cms (bowl) configuration
95
+ robot_cms_host='http://localhost:4000'
96
+ robot_cms_auth='users API-Key your-api-key-here'
97
+
98
+ # llm providers: fill one or more of these with your API keys
99
+ DEEPSEEK_API_KEY="your-deepseek-api-key"
100
+ OPENAI_API_KEY="your-openai-api-key"
101
+ GOOGLE_API_KEY="your-google-api-key"
102
+ ANTHROPIC_API_KEY="your-anthropic-api-key"
103
+ GROQ_API_KEY="your-groq-api-key"
104
+ # ibm
105
+ WATSONX_URL="https://eu-gb.ml.cloud.ibm.com"
106
+ WATSONX_APIKEY="your-watsonx-api-key"
107
+ WATSONX_PROJECTID="your-watsonx-project-id"
108
+ # gvertex: ensure to mount the file in docker
109
+ GOOGLE_APPLICATION_CREDENTIALS="./.data/secrets/google-credentials.json"
98
110
  ```
99
111
 
100
112
  ## 🚀 Run the app
@@ -125,38 +137,39 @@ robot_cms_files_folder=llmKbFile
125
137
  - [swagger](http://localhost:6001/docs)
126
138
  - [redoc](http://localhost:6001/redoc)
127
139
 
128
- ### 💬 multimodal chat
140
+ ---
129
141
 
130
- The multimodal message allows users to interact with the application using both text and media files.
131
- `robot` accept multimodal input in a uniform way, regarding the llm provider used. Can also be used the llm/model specific input format.
142
+ ## 🐳 Docker
132
143
 
133
- - simple message
144
+ dockerize base image
134
145
 
135
- ```json
136
- {
137
- "role": "user",
138
- "content": "What is the capital of France?"
139
- }
146
+ ```pwsh
147
+ <# cpu #>
148
+ docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
149
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
150
+ <# gpu #>
151
+ docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
152
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
140
153
  ```
141
154
 
142
- - multimodal message
143
-
144
- ```json
145
- {
146
- "role": "user",
147
- "content": [
148
- {"type": "text", "text": "Read carefully all the attachments, analize the content and provide a summary for each one:"},
149
- {"type": "image", "url": "https://www.example.com/image/foo.jpg"},
150
- {"type": "file", "url": "https://www.example.com/pdf/bar.pdf"},
151
- {"type": "file", "url": "data:plain/text;base64,CiAgICAgIF9fX19fCiAgICAgLyAgIC..."}, # base64 encoded file
152
- {"type": "media", "mime_type": "plain/text", "data": "CiAgICAgIF9fX19fCiAgICAgLyAgIC..."} # google/gemini specific input format
153
- ]
154
- }
155
+ dockerize app
156
+
157
+ ```pwsh
158
+ docker build -f Dockerfile -t ws-bom-robot-app .
159
+ docker run --rm --name ws-bom-robot-app -d --env-file .env -p 6001:6001 ws-bom-robot-app
160
+ ```
161
+
162
+ docker run mounted to src (dev mode)
163
+
164
+ ```pwsh
165
+ docker run --rm --name ws-bom-robot-app-src -d --env-file .env -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app fastapi dev ./ws_bom_robot_app/main.py --host 0.0.0.0 --port 6001
155
166
  ```
156
167
 
157
168
  ---
158
169
 
159
- ## 🔖 Windows requirements
170
+ ## 🔖 Windows requirements (for RAG functionality only)
171
+
172
+ > ⚠️ While it's strongly recommended to use a docker container for development, you can run the app on Windows with the following requirements
160
173
 
161
174
  ### libmagic (mandatory)
162
175
 
@@ -267,7 +280,7 @@ prospector ./ws_bom_robot_app -t dodgy -t bandit
267
280
  prospector ./ws_bom_robot_app -t pyroma
268
281
  ```
269
282
 
270
- lauch pytest
283
+ #### 🧪 run tests
271
284
 
272
285
  ```pwsh
273
286
  !py -m pip install -U pytest pytest-asyncio pytest-mock pytest-cov pyclean
@@ -278,36 +291,12 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
278
291
  # pytest --cov=ws_bom_robot_app --log-cli-level=info ./tests/app/llm/vector_store/db
279
292
  ```
280
293
 
281
- launch debugger
294
+ #### 🐞 start debugger
282
295
 
283
296
  ```pwsh
284
297
  streamlit run debugger.py --server.port 8051
285
298
  ```
286
299
 
287
- dockerize base image
288
-
289
- ```pwsh
290
- <# cpu #>
291
- docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
292
- docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
293
- <# gpu #>
294
- docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
295
- docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
296
- ```
297
-
298
- dockerize app
299
-
300
- ```pwsh
301
- docker build -f Dockerfile -t ws-bom-robot-app .
302
- docker run --rm --name ws-bom-robot-app -d -p 6001:6001 ws-bom-robot-app
303
- ```
304
-
305
- docker run mounted to src
306
-
307
- ```pwsh
308
- docker run --rm --name ws-bom-robot-app-src -d -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app
309
- ```
310
-
311
300
  ### ✈️ publish
312
301
 
313
302
  - [testpypi](https://test.pypi.org/project/ws-bom-robot-app/)
@@ -17,18 +17,30 @@ from ws_bom_robot_app import main
17
17
  app = main.app
18
18
  ```
19
19
 
20
- FIll `.env` with the following code:
21
-
22
- ```env
23
- #robot_env=local/development/production
24
- robot_env=local
25
- robot_user='[user]'
26
- robot_password='[pwd]'
27
- robot_data_folder='./.data'
28
- robot_cms_auth='[auth]'
29
- robot_cms_host='https://[DOMAIN]'
30
- robot_cms_db_folder=llmVectorDb
31
- robot_cms_files_folder=llmKbFile
20
+ Create a `.env` file in the root directory with the following configuration:
21
+
22
+ ```properties
23
+ # robot configuration
24
+ robot_env=development
25
+ robot_user=your_username
26
+ USER_AGENT=ws-bom-robot-app
27
+
28
+ # cms (bowl) configuration
29
+ robot_cms_host='http://localhost:4000'
30
+ robot_cms_auth='users API-Key your-api-key-here'
31
+
32
+ # llm providers: fill one or more of these with your API keys
33
+ DEEPSEEK_API_KEY="your-deepseek-api-key"
34
+ OPENAI_API_KEY="your-openai-api-key"
35
+ GOOGLE_API_KEY="your-google-api-key"
36
+ ANTHROPIC_API_KEY="your-anthropic-api-key"
37
+ GROQ_API_KEY="your-groq-api-key"
38
+ # ibm
39
+ WATSONX_URL="https://eu-gb.ml.cloud.ibm.com"
40
+ WATSONX_APIKEY="your-watsonx-api-key"
41
+ WATSONX_PROJECTID="your-watsonx-project-id"
42
+ # gvertex: ensure to mount the file in docker
43
+ GOOGLE_APPLICATION_CREDENTIALS="./.data/secrets/google-credentials.json"
32
44
  ```
33
45
 
34
46
  ## 🚀 Run the app
@@ -59,38 +71,39 @@ robot_cms_files_folder=llmKbFile
59
71
  - [swagger](http://localhost:6001/docs)
60
72
  - [redoc](http://localhost:6001/redoc)
61
73
 
62
- ### 💬 multimodal chat
74
+ ---
63
75
 
64
- The multimodal message allows users to interact with the application using both text and media files.
65
- `robot` accept multimodal input in a uniform way, regarding the llm provider used. Can also be used the llm/model specific input format.
76
+ ## 🐳 Docker
66
77
 
67
- - simple message
78
+ dockerize base image
68
79
 
69
- ```json
70
- {
71
- "role": "user",
72
- "content": "What is the capital of France?"
73
- }
80
+ ```pwsh
81
+ <# cpu #>
82
+ docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
83
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
84
+ <# gpu #>
85
+ docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
86
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
74
87
  ```
75
88
 
76
- - multimodal message
77
-
78
- ```json
79
- {
80
- "role": "user",
81
- "content": [
82
- {"type": "text", "text": "Read carefully all the attachments, analize the content and provide a summary for each one:"},
83
- {"type": "image", "url": "https://www.example.com/image/foo.jpg"},
84
- {"type": "file", "url": "https://www.example.com/pdf/bar.pdf"},
85
- {"type": "file", "url": "data:plain/text;base64,CiAgICAgIF9fX19fCiAgICAgLyAgIC..."}, # base64 encoded file
86
- {"type": "media", "mime_type": "plain/text", "data": "CiAgICAgIF9fX19fCiAgICAgLyAgIC..."} # google/gemini specific input format
87
- ]
88
- }
89
+ dockerize app
90
+
91
+ ```pwsh
92
+ docker build -f Dockerfile -t ws-bom-robot-app .
93
+ docker run --rm --name ws-bom-robot-app -d --env-file .env -p 6001:6001 ws-bom-robot-app
94
+ ```
95
+
96
+ docker run mounted to src (dev mode)
97
+
98
+ ```pwsh
99
+ docker run --rm --name ws-bom-robot-app-src -d --env-file .env -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app fastapi dev ./ws_bom_robot_app/main.py --host 0.0.0.0 --port 6001
89
100
  ```
90
101
 
91
102
  ---
92
103
 
93
- ## 🔖 Windows requirements
104
+ ## 🔖 Windows requirements (for RAG functionality only)
105
+
106
+ > ⚠️ While it's strongly recommended to use a docker container for development, you can run the app on Windows with the following requirements
94
107
 
95
108
  ### libmagic (mandatory)
96
109
 
@@ -201,7 +214,7 @@ prospector ./ws_bom_robot_app -t dodgy -t bandit
201
214
  prospector ./ws_bom_robot_app -t pyroma
202
215
  ```
203
216
 
204
- lauch pytest
217
+ #### 🧪 run tests
205
218
 
206
219
  ```pwsh
207
220
  !py -m pip install -U pytest pytest-asyncio pytest-mock pytest-cov pyclean
@@ -212,36 +225,12 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
212
225
  # pytest --cov=ws_bom_robot_app --log-cli-level=info ./tests/app/llm/vector_store/db
213
226
  ```
214
227
 
215
- launch debugger
228
+ #### 🐞 start debugger
216
229
 
217
230
  ```pwsh
218
231
  streamlit run debugger.py --server.port 8051
219
232
  ```
220
233
 
221
- dockerize base image
222
-
223
- ```pwsh
224
- <# cpu #>
225
- docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
226
- docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
227
- <# gpu #>
228
- docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
229
- docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
230
- ```
231
-
232
- dockerize app
233
-
234
- ```pwsh
235
- docker build -f Dockerfile -t ws-bom-robot-app .
236
- docker run --rm --name ws-bom-robot-app -d -p 6001:6001 ws-bom-robot-app
237
- ```
238
-
239
- docker run mounted to src
240
-
241
- ```pwsh
242
- docker run --rm --name ws-bom-robot-app-src -d -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app
243
- ```
244
-
245
234
  ### ✈️ publish
246
235
 
247
236
  - [testpypi](https://test.pypi.org/project/ws-bom-robot-app/)
@@ -4,7 +4,7 @@ _requirements = [line.split('#')[0].strip() for line in open("requirements.txt")
4
4
 
5
5
  setup(
6
6
  name="ws_bom_robot_app",
7
- version="0.0.79",
7
+ version="0.0.80",
8
8
  description="A FastAPI application serving ws bom/robot/llm platform ai.",
9
9
  long_description=open("README.md", encoding='utf-8').read(),
10
10
  long_description_content_type="text/markdown",
@@ -32,7 +32,7 @@ class Settings(BaseSettings):
32
32
  WATSONX_URL: str = ''
33
33
  WATSONX_APIKEY: str = ''
34
34
  WATSONX_PROJECTID: str = ''
35
- NEBULY_API_URL: str =''
35
+ NEBULY_API_URL: str ='https://backend.nebuly.com/'
36
36
  GOOGLE_APPLICATION_CREDENTIALS: str = '' # path to google credentials iam file, e.d. ./.secrets/google-credentials.json
37
37
  model_config = ConfigDict(
38
38
  env_file='./.env',
@@ -11,6 +11,39 @@ import os, shutil, uuid
11
11
  from ws_bom_robot_app.config import Settings, config
12
12
 
13
13
  class LlmMessage(BaseModel):
14
+ """
15
+ 💬 multimodal chat
16
+
17
+ The multimodal message allows users to interact with the application using both text and media files.
18
+ `robot` accept multimodal input in a uniform way, regarding the llm provider used.
19
+
20
+ - simple message
21
+
22
+ ```json
23
+ {
24
+ "role": "user",
25
+ "content": "What is the capital of France?"
26
+ }
27
+ ```
28
+
29
+ - multimodal message
30
+
31
+ ```jsonc
32
+ {
33
+ "role": "user",
34
+ "content": [
35
+ { "type": "text", "text": "Read carefully all the attachments, analize the content and provide a summary for each one:" },
36
+ { "type": "image", "url": "https://www.example.com/image/foo.jpg" },
37
+ { "type": "file", "url": "https://www.example.com/pdf/bar.pdf" },
38
+ { "type": "file", "url": "data:plain/text;base64,CiAgICAgIF9fX19fCiAgICAgLyAgIC..." }, // base64 encoded file
39
+ { "type": "media", "mime_type": "plain/text", "data": "CiAgICAgIF9fX19fCiAgICAgLyAgIC..." } // google/gemini specific input format
40
+ ]
41
+ }
42
+ ```
43
+
44
+ > 💡 `url` can be a remote url or a base64 representation of the file: [rfc 2397](https://datatracker.ietf.org/doc/html/rfc2397).
45
+ Can also be used the llm/model specific input format.
46
+ """
14
47
  role: str
15
48
  content: Union[str, list]
16
49
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.79
3
+ Version: 0.0.80
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -83,18 +83,30 @@ from ws_bom_robot_app import main
83
83
  app = main.app
84
84
  ```
85
85
 
86
- FIll `.env` with the following code:
87
-
88
- ```env
89
- #robot_env=local/development/production
90
- robot_env=local
91
- robot_user='[user]'
92
- robot_password='[pwd]'
93
- robot_data_folder='./.data'
94
- robot_cms_auth='[auth]'
95
- robot_cms_host='https://[DOMAIN]'
96
- robot_cms_db_folder=llmVectorDb
97
- robot_cms_files_folder=llmKbFile
86
+ Create a `.env` file in the root directory with the following configuration:
87
+
88
+ ```properties
89
+ # robot configuration
90
+ robot_env=development
91
+ robot_user=your_username
92
+ USER_AGENT=ws-bom-robot-app
93
+
94
+ # cms (bowl) configuration
95
+ robot_cms_host='http://localhost:4000'
96
+ robot_cms_auth='users API-Key your-api-key-here'
97
+
98
+ # llm providers: fill one or more of these with your API keys
99
+ DEEPSEEK_API_KEY="your-deepseek-api-key"
100
+ OPENAI_API_KEY="your-openai-api-key"
101
+ GOOGLE_API_KEY="your-google-api-key"
102
+ ANTHROPIC_API_KEY="your-anthropic-api-key"
103
+ GROQ_API_KEY="your-groq-api-key"
104
+ # ibm
105
+ WATSONX_URL="https://eu-gb.ml.cloud.ibm.com"
106
+ WATSONX_APIKEY="your-watsonx-api-key"
107
+ WATSONX_PROJECTID="your-watsonx-project-id"
108
+ # gvertex: ensure to mount the file in docker
109
+ GOOGLE_APPLICATION_CREDENTIALS="./.data/secrets/google-credentials.json"
98
110
  ```
99
111
 
100
112
  ## 🚀 Run the app
@@ -125,38 +137,39 @@ robot_cms_files_folder=llmKbFile
125
137
  - [swagger](http://localhost:6001/docs)
126
138
  - [redoc](http://localhost:6001/redoc)
127
139
 
128
- ### 💬 multimodal chat
140
+ ---
129
141
 
130
- The multimodal message allows users to interact with the application using both text and media files.
131
- `robot` accept multimodal input in a uniform way, regarding the llm provider used. Can also be used the llm/model specific input format.
142
+ ## 🐳 Docker
132
143
 
133
- - simple message
144
+ dockerize base image
134
145
 
135
- ```json
136
- {
137
- "role": "user",
138
- "content": "What is the capital of France?"
139
- }
146
+ ```pwsh
147
+ <# cpu #>
148
+ docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
149
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
150
+ <# gpu #>
151
+ docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
152
+ docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
140
153
  ```
141
154
 
142
- - multimodal message
143
-
144
- ```json
145
- {
146
- "role": "user",
147
- "content": [
148
- {"type": "text", "text": "Read carefully all the attachments, analize the content and provide a summary for each one:"},
149
- {"type": "image", "url": "https://www.example.com/image/foo.jpg"},
150
- {"type": "file", "url": "https://www.example.com/pdf/bar.pdf"},
151
- {"type": "file", "url": "data:plain/text;base64,CiAgICAgIF9fX19fCiAgICAgLyAgIC..."}, # base64 encoded file
152
- {"type": "media", "mime_type": "plain/text", "data": "CiAgICAgIF9fX19fCiAgICAgLyAgIC..."} # google/gemini specific input format
153
- ]
154
- }
155
+ dockerize app
156
+
157
+ ```pwsh
158
+ docker build -f Dockerfile -t ws-bom-robot-app .
159
+ docker run --rm --name ws-bom-robot-app -d --env-file .env -p 6001:6001 ws-bom-robot-app
160
+ ```
161
+
162
+ docker run mounted to src (dev mode)
163
+
164
+ ```pwsh
165
+ docker run --rm --name ws-bom-robot-app-src -d --env-file .env -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app fastapi dev ./ws_bom_robot_app/main.py --host 0.0.0.0 --port 6001
155
166
  ```
156
167
 
157
168
  ---
158
169
 
159
- ## 🔖 Windows requirements
170
+ ## 🔖 Windows requirements (for RAG functionality only)
171
+
172
+ > ⚠️ While it's strongly recommended to use a docker container for development, you can run the app on Windows with the following requirements
160
173
 
161
174
  ### libmagic (mandatory)
162
175
 
@@ -267,7 +280,7 @@ prospector ./ws_bom_robot_app -t dodgy -t bandit
267
280
  prospector ./ws_bom_robot_app -t pyroma
268
281
  ```
269
282
 
270
- lauch pytest
283
+ #### 🧪 run tests
271
284
 
272
285
  ```pwsh
273
286
  !py -m pip install -U pytest pytest-asyncio pytest-mock pytest-cov pyclean
@@ -278,36 +291,12 @@ pytest --cov=ws_bom_robot_app --log-cli-level=info
278
291
  # pytest --cov=ws_bom_robot_app --log-cli-level=info ./tests/app/llm/vector_store/db
279
292
  ```
280
293
 
281
- launch debugger
294
+ #### 🐞 start debugger
282
295
 
283
296
  ```pwsh
284
297
  streamlit run debugger.py --server.port 8051
285
298
  ```
286
299
 
287
- dockerize base image
288
-
289
- ```pwsh
290
- <# cpu #>
291
- docker build -f Dockerfile-robot-base-cpu -t ghcr.io/websolutespa/ws-bom-robot-base:cpu .
292
- docker push ghcr.io/websolutespa/ws-bom-robot-base:cpu
293
- <# gpu #>
294
- docker build -f Dockerfile-robot-base-gpu -t ghcr.io/websolutespa/ws-bom-robot-base:gpu .
295
- docker push ghcr.io/websolutespa/ws-bom-robot-base:gpu
296
- ```
297
-
298
- dockerize app
299
-
300
- ```pwsh
301
- docker build -f Dockerfile -t ws-bom-robot-app .
302
- docker run --rm --name ws-bom-robot-app -d -p 6001:6001 ws-bom-robot-app
303
- ```
304
-
305
- docker run mounted to src
306
-
307
- ```pwsh
308
- docker run --rm --name ws-bom-robot-app-src -d -v "$(pwd)/ws_bom_robot_app:/app/ws_bom_robot_app" -v "$(pwd)/.data:/app/.data" -v "$(pwd)/tmp:/tmp" -p 6001:6001 ws-bom-robot-app
309
- ```
310
-
311
300
  ### ✈️ publish
312
301
 
313
302
  - [testpypi](https://test.pypi.org/project/ws-bom-robot-app/)