PraisonAI 0.0.73__tar.gz → 0.0.74__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (50) hide show
  1. {praisonai-0.0.73 → praisonai-0.0.74}/PKG-INFO +27 -7
  2. {praisonai-0.0.73 → praisonai-0.0.74}/README.md +17 -0
  3. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/cli.py +30 -0
  4. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/deploy.py +1 -1
  5. praisonai-0.0.74/praisonai/ui/realtime.py +368 -0
  6. praisonai-0.0.74/praisonai/ui/realtimeclient/__init__.py +650 -0
  7. praisonai-0.0.74/praisonai/ui/realtimeclient/tools.py +192 -0
  8. {praisonai-0.0.73 → praisonai-0.0.74}/pyproject.toml +6 -3
  9. {praisonai-0.0.73 → praisonai-0.0.74}/LICENSE +0 -0
  10. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/__init__.py +0 -0
  11. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/__main__.py +0 -0
  12. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/agents_generator.py +0 -0
  13. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/auto.py +0 -0
  14. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/chainlit_ui.py +0 -0
  15. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/inbuilt_tools/__init__.py +0 -0
  16. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
  17. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/inc/__init__.py +0 -0
  18. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/inc/config.py +0 -0
  19. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/inc/models.py +0 -0
  20. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/android-chrome-192x192.png +0 -0
  21. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/android-chrome-512x512.png +0 -0
  22. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/apple-touch-icon.png +0 -0
  23. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/fantasy.svg +0 -0
  24. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/favicon-16x16.png +0 -0
  25. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/favicon-32x32.png +0 -0
  26. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/favicon.ico +0 -0
  27. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/game.svg +0 -0
  28. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/logo_dark.png +0 -0
  29. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/logo_light.png +0 -0
  30. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/movie.svg +0 -0
  31. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/public/thriller.svg +0 -0
  32. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/__init__.py +0 -0
  33. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/build.py +0 -0
  34. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/config.yaml +0 -0
  35. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/post_install.py +0 -0
  36. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/setup_conda_env.py +0 -0
  37. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/setup/setup_conda_env.sh +0 -0
  38. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/test.py +0 -0
  39. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/train.py +0 -0
  40. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/chat.py +0 -0
  41. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/code.py +0 -0
  42. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/context.py +0 -0
  43. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/fantasy.svg +0 -0
  44. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/game.svg +0 -0
  45. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/logo_dark.png +0 -0
  46. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/logo_light.png +0 -0
  47. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/movie.svg +0 -0
  48. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/public/thriller.svg +0 -0
  49. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/ui/sql_alchemy.py +0 -0
  50. {praisonai-0.0.73 → praisonai-0.0.74}/praisonai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: PraisonAI
3
- Version: 0.0.73
3
+ Version: 0.0.74
4
4
  Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -17,27 +17,30 @@ Provides-Extra: cohere
17
17
  Provides-Extra: google
18
18
  Provides-Extra: gradio
19
19
  Provides-Extra: openai
20
+ Provides-Extra: realtime
20
21
  Provides-Extra: train
21
22
  Provides-Extra: ui
22
23
  Requires-Dist: agentops (>=0.3.12) ; extra == "agentops"
23
- Requires-Dist: aiosqlite (>=0.20.0) ; extra == "chat" or extra == "code"
24
- Requires-Dist: chainlit (==1.2.0) ; extra == "ui" or extra == "chat" or extra == "code"
25
- Requires-Dist: crawl4ai (==0.3.4) ; extra == "chat" or extra == "code"
24
+ Requires-Dist: aiosqlite (>=0.20.0) ; extra == "chat" or extra == "code" or extra == "realtime"
25
+ Requires-Dist: chainlit (==1.3.0rc1) ; extra == "ui" or extra == "chat" or extra == "code" or extra == "realtime"
26
+ Requires-Dist: crawl4ai (==0.3.4) ; extra == "chat" or extra == "code" or extra == "realtime"
26
27
  Requires-Dist: crewai (>=0.32.0)
27
28
  Requires-Dist: flask (>=3.0.0) ; extra == "api"
28
29
  Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
29
- Requires-Dist: greenlet (>=3.0.3) ; extra == "chat" or extra == "code"
30
+ Requires-Dist: greenlet (>=3.0.3) ; extra == "chat" or extra == "code" or extra == "realtime"
30
31
  Requires-Dist: langchain-anthropic (>=0.1.13) ; extra == "anthropic"
31
32
  Requires-Dist: langchain-cohere (>=0.1.4) ; extra == "cohere"
32
33
  Requires-Dist: langchain-google-genai (>=1.0.4) ; extra == "google"
33
34
  Requires-Dist: langchain-openai (>=0.1.7) ; extra == "openai"
34
- Requires-Dist: litellm (>=1.41.8) ; extra == "chat" or extra == "code"
35
+ Requires-Dist: litellm (>=1.41.8) ; extra == "chat" or extra == "code" or extra == "realtime"
35
36
  Requires-Dist: markdown (>=3.5)
37
+ Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
36
38
  Requires-Dist: praisonai-tools (>=0.0.7)
37
39
  Requires-Dist: pyautogen (>=0.2.19)
38
40
  Requires-Dist: pyparsing (>=3.0.0)
39
41
  Requires-Dist: rich (>=13.7)
40
- Requires-Dist: tavily-python (==0.5.0) ; extra == "chat" or extra == "code"
42
+ Requires-Dist: tavily-python (==0.5.0) ; extra == "chat" or extra == "code" or extra == "realtime"
43
+ Requires-Dist: websockets (>=12.0) ; extra == "realtime"
41
44
  Project-URL: Homepage, https://docs.praison.ai
42
45
  Project-URL: Repository, https://github.com/mervinpraison/PraisonAI
43
46
  Description-Content-Type: text/markdown
@@ -79,6 +82,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
79
82
  | **UI** | Multi Agents such as CrewAI or AutoGen | [https://docs.praison.ai/ui/ui](https://docs.praison.ai/ui/ui) |
80
83
  | **Chat** | Chat with 100+ LLMs, single AI Agent | [https://docs.praison.ai/ui/chat](https://docs.praison.ai/ui/chat) |
81
84
  | **Code** | Chat with entire Codebase, single AI Agent | [https://docs.praison.ai/ui/code](https://docs.praison.ai/ui/code) |
85
+ | **Realtime** | Real-time voice interaction with AI | [https://docs.praison.ai/ui/realtime](https://docs.praison.ai/ui/realtime) |
82
86
 
83
87
  | Other Features | Description | Docs |
84
88
  |---|---|---|
@@ -100,6 +104,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
100
104
  | **PraisonAI Code** | `pip install "praisonai[code]"` |
101
105
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
102
106
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
107
+ | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
103
108
 
104
109
  ## Key Features
105
110
 
@@ -110,6 +115,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
110
115
  - Interactive UIs
111
116
  - YAML-based Configuration
112
117
  - Custom Tool Integration
118
+ - Internet Search Capability (using Crawl4AI and Tavily)
119
+ - Vision Language Model (VLM) Support
120
+ - Real-time Voice Interaction
113
121
 
114
122
  ## TL;DR Multi Agents
115
123
 
@@ -222,6 +230,14 @@ export OPENAI_API_KEY="Enter your API key"
222
230
  praisonai chat
223
231
  ```
224
232
 
233
+ ### Internet Search
234
+
235
+ Praison AI Chat and Praison AI Code now includes internet search capabilities using Crawl4AI and Tavily, allowing you to retrieve up-to-date information during your conversations.
236
+
237
+ ### Vision Language Model Support
238
+
239
+ You can now upload images and ask questions based on them using Vision Language Models. This feature enables visual understanding and analysis within your chat sessions.
240
+
225
241
  ## Praison AI Code
226
242
 
227
243
  ```bash
@@ -230,6 +246,10 @@ export OPENAI_API_KEY="Enter your API key"
230
246
  praisonai code
231
247
  ```
232
248
 
249
+ ### Internet Search
250
+
251
+ Praison AI Code also includes internet search functionality, enabling you to find relevant code snippets and programming information online.
252
+
233
253
  ## Create Custom Tools
234
254
 
235
255
  - https://docs.praison.ai/tools/custom/
@@ -35,6 +35,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
35
35
  | **UI** | Multi Agents such as CrewAI or AutoGen | [https://docs.praison.ai/ui/ui](https://docs.praison.ai/ui/ui) |
36
36
  | **Chat** | Chat with 100+ LLMs, single AI Agent | [https://docs.praison.ai/ui/chat](https://docs.praison.ai/ui/chat) |
37
37
  | **Code** | Chat with entire Codebase, single AI Agent | [https://docs.praison.ai/ui/code](https://docs.praison.ai/ui/code) |
38
+ | **Realtime** | Real-time voice interaction with AI | [https://docs.praison.ai/ui/realtime](https://docs.praison.ai/ui/realtime) |
38
39
 
39
40
  | Other Features | Description | Docs |
40
41
  |---|---|---|
@@ -56,6 +57,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
56
57
  | **PraisonAI Code** | `pip install "praisonai[code]"` |
57
58
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
58
59
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
60
+ | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
59
61
 
60
62
  ## Key Features
61
63
 
@@ -66,6 +68,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
66
68
  - Interactive UIs
67
69
  - YAML-based Configuration
68
70
  - Custom Tool Integration
71
+ - Internet Search Capability (using Crawl4AI and Tavily)
72
+ - Vision Language Model (VLM) Support
73
+ - Real-time Voice Interaction
69
74
 
70
75
  ## TL;DR Multi Agents
71
76
 
@@ -178,6 +183,14 @@ export OPENAI_API_KEY="Enter your API key"
178
183
  praisonai chat
179
184
  ```
180
185
 
186
+ ### Internet Search
187
+
188
+ Praison AI Chat and Praison AI Code now includes internet search capabilities using Crawl4AI and Tavily, allowing you to retrieve up-to-date information during your conversations.
189
+
190
+ ### Vision Language Model Support
191
+
192
+ You can now upload images and ask questions based on them using Vision Language Models. This feature enables visual understanding and analysis within your chat sessions.
193
+
181
194
  ## Praison AI Code
182
195
 
183
196
  ```bash
@@ -186,6 +199,10 @@ export OPENAI_API_KEY="Enter your API key"
186
199
  praisonai code
187
200
  ```
188
201
 
202
+ ### Internet Search
203
+
204
+ Praison AI Code also includes internet search functionality, enabling you to find relevant code snippets and programming information online.
205
+
189
206
  ## Create Custom Tools
190
207
 
191
208
  - https://docs.praison.ai/tools/custom/
@@ -130,6 +130,10 @@ class PraisonAI:
130
130
  self.create_code_interface()
131
131
  return
132
132
 
133
+ if getattr(args, 'realtime', False):
134
+ self.create_realtime_interface()
135
+ return
136
+
133
137
  if args.agent_file == 'train':
134
138
  package_root = os.path.dirname(os.path.abspath(__file__))
135
139
  config_yaml_destination = os.path.join(os.getcwd(), 'config.yaml')
@@ -256,6 +260,7 @@ class PraisonAI:
256
260
  parser.add_argument("--hf", type=str, help="Hugging Face model name")
257
261
  parser.add_argument("--ollama", type=str, help="Ollama model name")
258
262
  parser.add_argument("--dataset", type=str, help="Dataset name for training", default="yahma/alpaca-cleaned")
263
+ parser.add_argument("--realtime", action="store_true", help="Start the realtime voice interaction interface")
259
264
  args, unknown_args = parser.parse_known_args()
260
265
 
261
266
  if unknown_args and unknown_args[0] == '-b' and unknown_args[1] == 'api:app':
@@ -270,6 +275,8 @@ class PraisonAI:
270
275
  if args.agent_file == 'code':
271
276
  args.ui = 'chainlit'
272
277
  args.code = True
278
+ if args.agent_file == 'realtime':
279
+ args.realtime = True
273
280
 
274
281
  return args
275
282
 
@@ -416,6 +423,29 @@ class PraisonAI:
416
423
  else:
417
424
  print("ERROR: Chainlit is not installed. Please install it with 'pip install \"praisonai\[ui]\"' to use the UI.")
418
425
 
426
+ def create_realtime_interface(self):
427
+ """
428
+ Create a Chainlit interface for the realtime voice interaction application.
429
+ """
430
+ if CHAINLIT_AVAILABLE:
431
+ import praisonai
432
+ os.environ["CHAINLIT_PORT"] = "8088" # Ensure this port is not in use by another service
433
+ root_path = os.path.join(os.path.expanduser("~"), ".praison")
434
+ os.environ["CHAINLIT_APP_ROOT"] = root_path
435
+ public_folder = os.path.join(os.path.dirname(praisonai.__file__), 'public')
436
+ if not os.path.exists(os.path.join(root_path, "public")):
437
+ if os.path.exists(public_folder):
438
+ shutil.copytree(public_folder, os.path.join(root_path, "public"), dirs_exist_ok=True)
439
+ logging.info("Public folder copied successfully!")
440
+ else:
441
+ logging.info("Public folder not found in the package.")
442
+ else:
443
+ logging.info("Public folder already exists.")
444
+ realtime_ui_path = os.path.join(os.path.dirname(praisonai.__file__), 'ui', 'realtime.py')
445
+ chainlit_run([realtime_ui_path])
446
+ else:
447
+ print("ERROR: Realtime UI is not installed. Please install it with 'pip install \"praisonai[realtime]\"' to use the realtime UI.")
448
+
419
449
  if __name__ == "__main__":
420
450
  praison_ai = PraisonAI()
421
451
  praison_ai.main()
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==0.0.73 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==0.0.74 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
@@ -0,0 +1,368 @@
1
+ import os
2
+ import asyncio
3
+ import sqlite3
4
+ from datetime import datetime
5
+ from uuid import uuid4
6
+
7
+ from openai import AsyncOpenAI
8
+ import chainlit as cl
9
+ from chainlit.logger import logger
10
+ from chainlit.input_widget import TextInput
11
+ from chainlit.types import ThreadDict
12
+
13
+ from realtimeclient import RealtimeClient
14
+ from realtimeclient.tools import tools
15
+ from sql_alchemy import SQLAlchemyDataLayer
16
+ import chainlit.data as cl_data
17
+ from literalai.helper import utc_now
18
+ import json
19
+
20
+ # Set up CHAINLIT_AUTH_SECRET
21
+ CHAINLIT_AUTH_SECRET = os.getenv("CHAINLIT_AUTH_SECRET")
22
+
23
+ if not CHAINLIT_AUTH_SECRET:
24
+ os.environ["CHAINLIT_AUTH_SECRET"] = "p8BPhQChpg@J>jBz$wGxqLX2V>yTVgP*7Ky9H$aV:axW~ANNX-7_T:o@lnyCBu^U"
25
+ CHAINLIT_AUTH_SECRET = os.getenv("CHAINLIT_AUTH_SECRET")
26
+
27
+ # Database path
28
+ DB_PATH = os.path.expanduser("~/.praison/database.sqlite")
29
+
30
+ def initialize_db():
31
+ os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
32
+ conn = sqlite3.connect(DB_PATH)
33
+ cursor = conn.cursor()
34
+ cursor.execute('''
35
+ CREATE TABLE IF NOT EXISTS users (
36
+ id UUID PRIMARY KEY,
37
+ identifier TEXT NOT NULL UNIQUE,
38
+ metadata JSONB NOT NULL,
39
+ createdAt TEXT
40
+ )
41
+ ''')
42
+ cursor.execute('''
43
+ CREATE TABLE IF NOT EXISTS threads (
44
+ id UUID PRIMARY KEY,
45
+ createdAt TEXT,
46
+ name TEXT,
47
+ userId UUID,
48
+ userIdentifier TEXT,
49
+ tags TEXT[],
50
+ metadata JSONB NOT NULL DEFAULT '{}',
51
+ FOREIGN KEY (userId) REFERENCES users(id) ON DELETE CASCADE
52
+ )
53
+ ''')
54
+ cursor.execute('''
55
+ CREATE TABLE IF NOT EXISTS steps (
56
+ id UUID PRIMARY KEY,
57
+ name TEXT NOT NULL,
58
+ type TEXT NOT NULL,
59
+ threadId UUID NOT NULL,
60
+ parentId UUID,
61
+ disableFeedback BOOLEAN NOT NULL DEFAULT 0,
62
+ streaming BOOLEAN NOT NULL DEFAULT 0,
63
+ waitForAnswer BOOLEAN DEFAULT 0,
64
+ isError BOOLEAN NOT NULL DEFAULT 0,
65
+ metadata JSONB DEFAULT '{}',
66
+ tags TEXT[],
67
+ input TEXT,
68
+ output TEXT,
69
+ createdAt TEXT,
70
+ start TEXT,
71
+ end TEXT,
72
+ generation JSONB,
73
+ showInput TEXT,
74
+ language TEXT,
75
+ indent INT,
76
+ FOREIGN KEY (threadId) REFERENCES threads (id) ON DELETE CASCADE
77
+ )
78
+ ''')
79
+ cursor.execute('''
80
+ CREATE TABLE IF NOT EXISTS elements (
81
+ id UUID PRIMARY KEY,
82
+ threadId UUID,
83
+ type TEXT,
84
+ url TEXT,
85
+ chainlitKey TEXT,
86
+ name TEXT NOT NULL,
87
+ display TEXT,
88
+ objectKey TEXT,
89
+ size TEXT,
90
+ page INT,
91
+ language TEXT,
92
+ forId UUID,
93
+ mime TEXT,
94
+ FOREIGN KEY (threadId) REFERENCES threads (id) ON DELETE CASCADE
95
+ )
96
+ ''')
97
+ cursor.execute('''
98
+ CREATE TABLE IF NOT EXISTS feedbacks (
99
+ id UUID PRIMARY KEY,
100
+ forId UUID NOT NULL,
101
+ value INT NOT NULL,
102
+ threadId UUID,
103
+ comment TEXT
104
+ )
105
+ ''')
106
+ cursor.execute('''
107
+ CREATE TABLE IF NOT EXISTS settings (
108
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
109
+ key TEXT UNIQUE,
110
+ value TEXT
111
+ )
112
+ ''')
113
+ conn.commit()
114
+ conn.close()
115
+
116
+ def save_setting(key: str, value: str):
117
+ """Saves a setting to the database."""
118
+ conn = sqlite3.connect(DB_PATH)
119
+ cursor = conn.cursor()
120
+ cursor.execute(
121
+ """
122
+ INSERT OR REPLACE INTO settings (id, key, value)
123
+ VALUES ((SELECT id FROM settings WHERE key = ?), ?, ?)
124
+ """,
125
+ (key, key, value),
126
+ )
127
+ conn.commit()
128
+ conn.close()
129
+
130
+ def load_setting(key: str) -> str:
131
+ """Loads a setting from the database."""
132
+ conn = sqlite3.connect(DB_PATH)
133
+ cursor = conn.cursor()
134
+ cursor.execute('SELECT value FROM settings WHERE key = ?', (key,))
135
+ result = cursor.fetchone()
136
+ conn.close()
137
+ return result[0] if result else None
138
+
139
+ # Initialize the database
140
+ initialize_db()
141
+
142
+ # Set up SQLAlchemy data layer
143
+ cl_data._data_layer = SQLAlchemyDataLayer(conninfo=f"sqlite+aiosqlite:///{DB_PATH}")
144
+
145
+ client = AsyncOpenAI()
146
+
147
+ @cl.on_chat_start
148
+ async def start():
149
+ initialize_db()
150
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME", "gpt-4o-mini")
151
+ cl.user_session.set("model_name", model_name)
152
+ cl.user_session.set("message_history", []) # Initialize message history
153
+ logger.debug(f"Model name: {model_name}")
154
+ # settings = cl.ChatSettings(
155
+ # [
156
+ # TextInput(
157
+ # id="model_name",
158
+ # label="Enter the Model Name",
159
+ # placeholder="e.g., gpt-4o-mini",
160
+ # initial=model_name
161
+ # )
162
+ # ]
163
+ # )
164
+ # cl.user_session.set("settings", settings)
165
+ # await settings.send()
166
+ await cl.Message(
167
+ content="Welcome to the PraisonAI realtime. Press `P` to talk!"
168
+ ).send()
169
+ await setup_openai_realtime()
170
+
171
+ @cl.on_message
172
+ async def on_message(message: cl.Message):
173
+ openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
174
+ message_history = cl.user_session.get("message_history", [])
175
+
176
+ if openai_realtime and openai_realtime.is_connected():
177
+ current_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
178
+ prompt = f"Current time Just for reference: {current_date}\n\n{message.content}"
179
+
180
+ # Add user message to history
181
+ message_history.append({"role": "user", "content": prompt})
182
+ cl.user_session.set("message_history", message_history)
183
+
184
+ await openai_realtime.send_user_message_content([{ "type": 'input_text', "text": message.content }])
185
+ else:
186
+ await cl.Message(content="Please activate voice mode before sending messages!").send()
187
+
188
+ async def setup_openai_realtime():
189
+ """Instantiate and configure the OpenAI Realtime Client"""
190
+ openai_realtime = RealtimeClient(api_key=os.getenv("OPENAI_API_KEY"))
191
+ cl.user_session.set("track_id", str(uuid4()))
192
+
193
+ async def handle_conversation_updated(event):
194
+ item = event.get("item")
195
+ delta = event.get("delta")
196
+ """Currently used to stream audio back to the client."""
197
+ if delta:
198
+ if 'audio' in delta:
199
+ audio = delta['audio'] # Int16Array, audio added
200
+ await cl.context.emitter.send_audio_chunk(cl.OutputAudioChunk(mimeType="pcm16", data=audio, track=cl.user_session.get("track_id")))
201
+ if 'transcript' in delta:
202
+ transcript = delta['transcript'] # string, transcript added
203
+ logger.debug(f"Transcript delta: {transcript}")
204
+ if 'text' in delta:
205
+ text = delta['text'] # string, text added
206
+ logger.debug(f"Text delta: {text}")
207
+ if 'arguments' in delta:
208
+ arguments = delta['arguments'] # string, function arguments added
209
+ logger.debug(f"Function arguments delta: {arguments}")
210
+
211
+ async def handle_item_completed(event):
212
+ """Used to populate the chat context with transcription once an item is completed."""
213
+ try:
214
+ item = event.get("item")
215
+ logger.debug(f"Item completed: {json.dumps(item, indent=2, default=str)}")
216
+ await openai_realtime._send_chainlit_message(item)
217
+
218
+ # Add assistant message to history
219
+ message_history = cl.user_session.get("message_history", [])
220
+ content = item.get("formatted", {}).get("text", "") or item.get("formatted", {}).get("transcript", "")
221
+ if content:
222
+ message_history.append({"role": "assistant", "content": content})
223
+ cl.user_session.set("message_history", message_history)
224
+ except Exception as e:
225
+ error_message = f"Error in handle_item_completed: {str(e)}"
226
+ logger.error(error_message)
227
+ debug_item = json.dumps(item, indent=2, default=str)
228
+ logger.error(f"Item causing error: {debug_item}")
229
+
230
+ async def handle_conversation_interrupt(event):
231
+ """Used to cancel the client previous audio playback."""
232
+ cl.user_session.set("track_id", str(uuid4()))
233
+ await cl.context.emitter.send_audio_interrupt()
234
+
235
+ async def handle_error(event):
236
+ logger.error(event)
237
+ await cl.Message(content=f"Error: {event}", author="System").send()
238
+
239
+ openai_realtime.on('conversation.updated', handle_conversation_updated)
240
+ openai_realtime.on('conversation.item.completed', handle_item_completed)
241
+ openai_realtime.on('conversation.interrupted', handle_conversation_interrupt)
242
+ openai_realtime.on('error', handle_error)
243
+
244
+ cl.user_session.set("openai_realtime", openai_realtime)
245
+ coros = [openai_realtime.add_tool(tool_def, tool_handler) for tool_def, tool_handler in tools]
246
+ await asyncio.gather(*coros)
247
+
248
+ @cl.on_settings_update
249
+ async def setup_agent(settings):
250
+ logger.debug(settings)
251
+ cl.user_session.set("settings", settings)
252
+ model_name = settings["model_name"]
253
+ cl.user_session.set("model_name", model_name)
254
+
255
+ # Save in settings table
256
+ save_setting("model_name", model_name)
257
+
258
+ # Save in thread metadata
259
+ thread_id = cl.user_session.get("thread_id")
260
+ if thread_id:
261
+ thread = await cl_data._data_layer.get_thread(thread_id)
262
+ if thread:
263
+ metadata = thread.get("metadata", {})
264
+ if isinstance(metadata, str):
265
+ try:
266
+ metadata = json.loads(metadata)
267
+ except json.JSONDecodeError:
268
+ metadata = {}
269
+
270
+ metadata["model_name"] = model_name
271
+
272
+ # Always store metadata as a dictionary
273
+ await cl_data._data_layer.update_thread(thread_id, metadata=metadata)
274
+
275
+ # Update the user session with the new metadata
276
+ cl.user_session.set("metadata", metadata)
277
+
278
+ @cl.on_audio_start
279
+ async def on_audio_start():
280
+ try:
281
+ openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
282
+ await openai_realtime.connect()
283
+ logger.info("Connected to OpenAI realtime")
284
+ return True
285
+ except Exception as e:
286
+ await cl.ErrorMessage(content=f"Failed to connect to OpenAI realtime: {e}").send()
287
+ return False
288
+
289
+ @cl.on_audio_chunk
290
+ async def on_audio_chunk(chunk: cl.InputAudioChunk):
291
+ openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
292
+ if openai_realtime.is_connected():
293
+ await openai_realtime.append_input_audio(chunk.data)
294
+ else:
295
+ logger.info("RealtimeClient is not connected")
296
+
297
+ @cl.on_audio_end
298
+ @cl.on_chat_end
299
+ @cl.on_stop
300
+ async def on_end():
301
+ openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
302
+ if openai_realtime and openai_realtime.is_connected():
303
+ await openai_realtime.disconnect()
304
+
305
+ @cl.password_auth_callback
306
+ def auth_callback(username: str, password: str):
307
+ # You can customize this function to use your own authentication logic
308
+ expected_username = os.getenv("CHAINLIT_USERNAME", "admin")
309
+ expected_password = os.getenv("CHAINLIT_PASSWORD", "admin")
310
+ if (username, password) == (expected_username, expected_password):
311
+ return cl.User(
312
+ identifier=username, metadata={"role": "ADMIN", "provider": "credentials"}
313
+ )
314
+ else:
315
+ return None
316
+
317
+ @cl.on_chat_resume
318
+ async def on_chat_resume(thread: ThreadDict):
319
+ logger.info(f"Resuming chat: {thread['id']}")
320
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-4o-mini"
321
+ logger.debug(f"Model name: {model_name}")
322
+ settings = cl.ChatSettings(
323
+ [
324
+ TextInput(
325
+ id="model_name",
326
+ label="Enter the Model Name",
327
+ placeholder="e.g., gpt-4o-mini",
328
+ initial=model_name
329
+ )
330
+ ]
331
+ )
332
+ await settings.send()
333
+ thread_id = thread["id"]
334
+ cl.user_session.set("thread_id", thread["id"])
335
+
336
+ # Ensure metadata is a dictionary
337
+ metadata = thread.get("metadata", {})
338
+ if isinstance(metadata, str):
339
+ try:
340
+ metadata = json.loads(metadata)
341
+ except json.JSONDecodeError:
342
+ metadata = {}
343
+
344
+ cl.user_session.set("metadata", metadata)
345
+
346
+ message_history = []
347
+ steps = thread["steps"]
348
+
349
+ for message in steps:
350
+ msg_type = message.get("type")
351
+ if msg_type == "user_message":
352
+ message_history.append({"role": "user", "content": message.get("output", "")})
353
+ elif msg_type == "assistant_message":
354
+ message_history.append({"role": "assistant", "content": message.get("output", "")})
355
+ elif msg_type == "run":
356
+ # Handle 'run' type messages
357
+ if message.get("isError"):
358
+ message_history.append({"role": "system", "content": f"Error: {message.get('output', '')}"})
359
+ else:
360
+ # You might want to handle non-error 'run' messages differently
361
+ pass
362
+ else:
363
+ logger.warning(f"Message without recognized type: {message}")
364
+
365
+ cl.user_session.set("message_history", message_history)
366
+
367
+ # Reconnect to OpenAI realtime
368
+ await setup_openai_realtime()