PraisonAI 0.1.3__tar.gz → 0.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (52) hide show
  1. {praisonai-0.1.3 → praisonai-0.1.5}/PKG-INFO +23 -14
  2. {praisonai-0.1.3 → praisonai-0.1.5}/README.md +13 -11
  3. praisonai-0.1.5/praisonai/api/call.py +190 -0
  4. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/cli.py +14 -1
  5. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/deploy.py +1 -1
  6. {praisonai-0.1.3 → praisonai-0.1.5}/pyproject.toml +10 -2
  7. {praisonai-0.1.3 → praisonai-0.1.5}/LICENSE +0 -0
  8. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/__init__.py +0 -0
  9. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/__main__.py +0 -0
  10. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/agents_generator.py +0 -0
  11. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/auto.py +0 -0
  12. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/chainlit_ui.py +0 -0
  13. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/inbuilt_tools/__init__.py +0 -0
  14. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
  15. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/inc/__init__.py +0 -0
  16. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/inc/config.py +0 -0
  17. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/inc/models.py +0 -0
  18. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/android-chrome-192x192.png +0 -0
  19. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/android-chrome-512x512.png +0 -0
  20. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/apple-touch-icon.png +0 -0
  21. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/fantasy.svg +0 -0
  22. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/favicon-16x16.png +0 -0
  23. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/favicon-32x32.png +0 -0
  24. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/favicon.ico +0 -0
  25. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/game.svg +0 -0
  26. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/logo_dark.png +0 -0
  27. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/logo_light.png +0 -0
  28. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/movie.svg +0 -0
  29. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/public/thriller.svg +0 -0
  30. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/__init__.py +0 -0
  31. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/build.py +0 -0
  32. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/config.yaml +0 -0
  33. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/post_install.py +0 -0
  34. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/setup_conda_env.py +0 -0
  35. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/setup/setup_conda_env.sh +0 -0
  36. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/test.py +0 -0
  37. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/train.py +0 -0
  38. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/chat.py +0 -0
  39. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/code.py +0 -0
  40. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/context.py +0 -0
  41. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/fantasy.svg +0 -0
  42. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/game.svg +0 -0
  43. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/logo_dark.png +0 -0
  44. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/logo_light.png +0 -0
  45. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/movie.svg +0 -0
  46. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/public/thriller.svg +0 -0
  47. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/realtime.py +0 -0
  48. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/realtimeclient/__init__.py +0 -0
  49. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/realtimeclient/realtimedocs.txt +0 -0
  50. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/realtimeclient/tools.py +0 -0
  51. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/ui/sql_alchemy.py +0 -0
  52. {praisonai-0.1.3 → praisonai-0.1.5}/praisonai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: PraisonAI
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3.12
11
11
  Provides-Extra: agentops
12
12
  Provides-Extra: anthropic
13
13
  Provides-Extra: api
14
+ Provides-Extra: call
14
15
  Provides-Extra: chat
15
16
  Provides-Extra: code
16
17
  Provides-Extra: cohere
@@ -26,6 +27,8 @@ Requires-Dist: chainlit (==1.3.0rc1) ; extra == "ui" or extra == "chat" or extra
26
27
  Requires-Dist: crawl4ai (==0.3.4) ; extra == "chat" or extra == "code" or extra == "realtime"
27
28
  Requires-Dist: crewai (>=0.32.0)
28
29
  Requires-Dist: duckduckgo_search (>=6.3.0) ; extra == "realtime"
30
+ Requires-Dist: fastapi (>=0.95.0) ; extra == "call"
31
+ Requires-Dist: flaml[automl] (>=2.3.1) ; extra == "call"
29
32
  Requires-Dist: flask (>=3.0.0) ; extra == "api"
30
33
  Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
31
34
  Requires-Dist: greenlet (>=3.0.3) ; extra == "chat" or extra == "code" or extra == "realtime"
@@ -38,10 +41,14 @@ Requires-Dist: markdown (>=3.5)
38
41
  Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
39
42
  Requires-Dist: praisonai-tools (>=0.0.7)
40
43
  Requires-Dist: pyautogen (>=0.2.19)
44
+ Requires-Dist: pyngrok (>=1.4.0) ; extra == "call"
41
45
  Requires-Dist: pyparsing (>=3.0.0)
42
- Requires-Dist: rich (>=13.7)
46
+ Requires-Dist: python-dotenv (>=0.19.0) ; extra == "call"
47
+ Requires-Dist: rich (>=13.7) ; extra == "call"
43
48
  Requires-Dist: tavily-python (==0.5.0) ; extra == "chat" or extra == "code" or extra == "realtime"
44
- Requires-Dist: websockets (>=12.0) ; extra == "realtime"
49
+ Requires-Dist: twilio (>=7.0.0) ; extra == "call"
50
+ Requires-Dist: uvicorn (>=0.20.0) ; extra == "call"
51
+ Requires-Dist: websockets (>=12.0) ; extra == "realtime" or extra == "call"
45
52
  Requires-Dist: yfinance (>=0.2.44) ; extra == "realtime"
46
53
  Project-URL: Homepage, https://docs.praison.ai
47
54
  Project-URL: Repository, https://github.com/mervinpraison/PraisonAI
@@ -49,16 +56,16 @@ Description-Content-Type: text/markdown
49
56
 
50
57
  <p align="center">
51
58
  <picture>
52
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/praisonai-logo-large.png">
53
- <source media="(prefers-color-scheme: light)" srcset="docs/images/praisonai-logo-black-large.png">
54
- <img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
59
+ <source media="(prefers-color-scheme: dark)" srcset="docs/logo/dark.png" />
60
+ <source media="(prefers-color-scheme: light)" srcset="docs/logo/light.png" />
61
+ <img alt="PraisonAI Logo" src="docs/logo/light.png" />
55
62
  </picture>
56
63
  </p>
57
64
 
58
65
  <p align="center">
59
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads"></a>
60
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version"></a>
61
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License"></a>
66
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads" /></a>
67
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version" /></a>
68
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License" /></a>
62
69
  </p>
63
70
 
64
71
  <div align="center">
@@ -71,9 +78,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
71
78
 
72
79
  <div align="center">
73
80
  <picture>
74
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png">
75
- <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png">
76
- <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png">
81
+ <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png" />
82
+ <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png" />
83
+ <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png" />
77
84
  </picture>
78
85
  </div>
79
86
 
@@ -95,8 +102,8 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
95
102
 
96
103
  | | Cookbook | Open in Colab |
97
104
  | ------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
98
- | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
99
- | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
105
+ | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
106
+ | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
100
107
 
101
108
  ## Install
102
109
 
@@ -107,6 +114,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
107
114
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
108
115
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
109
116
  | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
117
+ | **PraisonAI Call** | `pip install "praisonai[call]"` |
110
118
 
111
119
  ## Key Features
112
120
 
@@ -374,3 +382,4 @@ This configuration ensures that your development dependencies are correctly cate
374
382
 
375
383
  Praison AI is an open-sourced software licensed under the **[MIT license](https://opensource.org/licenses/MIT)**.
376
384
 
385
+
@@ -1,15 +1,15 @@
1
1
  <p align="center">
2
2
  <picture>
3
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/praisonai-logo-large.png">
4
- <source media="(prefers-color-scheme: light)" srcset="docs/images/praisonai-logo-black-large.png">
5
- <img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
3
+ <source media="(prefers-color-scheme: dark)" srcset="docs/logo/dark.png" />
4
+ <source media="(prefers-color-scheme: light)" srcset="docs/logo/light.png" />
5
+ <img alt="PraisonAI Logo" src="docs/logo/light.png" />
6
6
  </picture>
7
7
  </p>
8
8
 
9
9
  <p align="center">
10
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads"></a>
11
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version"></a>
12
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License"></a>
10
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads" /></a>
11
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version" /></a>
12
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License" /></a>
13
13
  </p>
14
14
 
15
15
  <div align="center">
@@ -22,9 +22,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
22
22
 
23
23
  <div align="center">
24
24
  <picture>
25
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png">
26
- <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png">
27
- <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png">
25
+ <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png" />
26
+ <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png" />
27
+ <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png" />
28
28
  </picture>
29
29
  </div>
30
30
 
@@ -46,8 +46,8 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
46
46
 
47
47
  | | Cookbook | Open in Colab |
48
48
  | ------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
49
- | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
50
- | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
49
+ | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
50
+ | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
51
51
 
52
52
  ## Install
53
53
 
@@ -58,6 +58,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
58
58
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
59
59
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
60
60
  | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
61
+ | **PraisonAI Call** | `pip install "praisonai[call]"` |
61
62
 
62
63
  ## Key Features
63
64
 
@@ -324,3 +325,4 @@ This configuration ensures that your development dependencies are correctly cate
324
325
  ## License
325
326
 
326
327
  Praison AI is an open-sourced software licensed under the **[MIT license](https://opensource.org/licenses/MIT)**.
328
+
@@ -0,0 +1,190 @@
1
+ import os
2
+ import json
3
+ import base64
4
+ import asyncio
5
+ import websockets
6
+ from fastapi import FastAPI, WebSocket, Request
7
+ from fastapi.responses import HTMLResponse
8
+ from fastapi.websockets import WebSocketDisconnect
9
+ from twilio.twiml.voice_response import VoiceResponse, Connect
10
+ from dotenv import load_dotenv
11
+ import uvicorn
12
+ from pyngrok import ngrok, conf
13
+ from rich import print
14
+ import argparse
15
+
16
+ load_dotenv()
17
+
18
+ # Configuration
19
+ OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') # requires OpenAI Realtime API Access
20
+ PORT = int(os.getenv('PORT', 8090))
21
+ NGROK_AUTH_TOKEN = os.getenv('NGROK_AUTH_TOKEN')
22
+ PUBLIC = os.getenv('PUBLIC', 'false').lower() == 'true'
23
+ SYSTEM_MESSAGE = (
24
+ "You are a helpful and bubbly AI assistant who loves to chat about "
25
+ "anything the user is interested in and is prepared to offer them facts. "
26
+ "You have a penchant for dad jokes, owl jokes, and rickrolling – subtly. "
27
+ "Always stay positive, but work in a joke when appropriate."
28
+ )
29
+ VOICE = 'alloy'
30
+ LOG_EVENT_TYPES = [
31
+ 'response.content.done', 'rate_limits.updated', 'response.done',
32
+ 'input_audio_buffer.committed', 'input_audio_buffer.speech_stopped',
33
+ 'input_audio_buffer.speech_started', 'session.created'
34
+ ]
35
+
36
+ app = FastAPI()
37
+
38
+ if not OPENAI_API_KEY:
39
+ raise ValueError('Missing the OpenAI API key. Please set it in the .env file.')
40
+
41
+ @app.get("/", response_class=HTMLResponse)
42
+ async def index_page():
43
+ return """
44
+ <html>
45
+ <head>
46
+ <title>Praison AI Call Server</title>
47
+ </head>
48
+ <body>
49
+ <h1>Praison AI Call Server is running!</h1>
50
+ </body>
51
+ </html>
52
+ """
53
+
54
+ @app.api_route("/call", methods=["GET", "POST"])
55
+ async def handle_incoming_call(request: Request):
56
+ """Handle incoming call and return TwiML response to connect to Media Stream."""
57
+ response = VoiceResponse()
58
+ response.say("")
59
+ response.pause(length=1)
60
+ response.say("O.K. you can start talking!")
61
+ host = request.url.hostname
62
+ connect = Connect()
63
+ connect.stream(url=f'wss://{host}/media-stream')
64
+ response.append(connect)
65
+ return HTMLResponse(content=str(response), media_type="application/xml")
66
+
67
+ @app.websocket("/media-stream")
68
+ async def handle_media_stream(websocket: WebSocket):
69
+ """Handle WebSocket connections between Twilio and OpenAI."""
70
+ print("Client connected")
71
+ await websocket.accept()
72
+
73
+ async with websockets.connect(
74
+ 'wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01',
75
+ extra_headers={
76
+ "Authorization": f"Bearer {OPENAI_API_KEY}",
77
+ "OpenAI-Beta": "realtime=v1"
78
+ }
79
+ ) as openai_ws:
80
+ await send_session_update(openai_ws)
81
+ stream_sid = None
82
+
83
+ async def receive_from_twilio():
84
+ """Receive audio data from Twilio and send it to the OpenAI Realtime API."""
85
+ nonlocal stream_sid
86
+ try:
87
+ async for message in websocket.iter_text():
88
+ data = json.loads(message)
89
+ if data['event'] == 'media' and openai_ws.open:
90
+ audio_append = {
91
+ "type": "input_audio_buffer.append",
92
+ "audio": data['media']['payload']
93
+ }
94
+ await openai_ws.send(json.dumps(audio_append))
95
+ elif data['event'] == 'start':
96
+ stream_sid = data['start']['streamSid']
97
+ print(f"Incoming stream has started {stream_sid}")
98
+ except WebSocketDisconnect:
99
+ print("Client disconnected.")
100
+ if openai_ws.open:
101
+ await openai_ws.close()
102
+
103
+ async def send_to_twilio():
104
+ """Receive events from the OpenAI Realtime API, send audio back to Twilio."""
105
+ nonlocal stream_sid
106
+ try:
107
+ async for openai_message in openai_ws:
108
+ response = json.loads(openai_message)
109
+ if response['type'] in LOG_EVENT_TYPES:
110
+ print(f"Received event: {response['type']}", response)
111
+ if response['type'] == 'session.updated':
112
+ print("Session updated successfully:", response)
113
+ if response['type'] == 'response.audio.delta' and response.get('delta'):
114
+ # Audio from OpenAI
115
+ try:
116
+ audio_payload = base64.b64encode(base64.b64decode(response['delta'])).decode('utf-8')
117
+ audio_delta = {
118
+ "event": "media",
119
+ "streamSid": stream_sid,
120
+ "media": {
121
+ "payload": audio_payload
122
+ }
123
+ }
124
+ await websocket.send_json(audio_delta)
125
+ except Exception as e:
126
+ print(f"Error processing audio data: {e}")
127
+ except Exception as e:
128
+ print(f"Error in send_to_twilio: {e}")
129
+
130
+ await asyncio.gather(receive_from_twilio(), send_to_twilio())
131
+
132
+ async def send_session_update(openai_ws):
133
+ """Send session update to OpenAI WebSocket."""
134
+ session_update = {
135
+ "type": "session.update",
136
+ "session": {
137
+ "turn_detection": {
138
+ "type": "server_vad",
139
+ "threshold": 0.5,
140
+ "prefix_padding_ms": 300,
141
+ "silence_duration_ms": 200
142
+ },
143
+ "input_audio_format": "g711_ulaw",
144
+ "output_audio_format": "g711_ulaw",
145
+ # "input_audio_transcription": { "model": 'whisper-1' },
146
+ # "transcription_models": [{"model": "whisper-1"}],
147
+ "voice": VOICE,
148
+ "tools": [],
149
+ "tool_choice": "auto",
150
+ "instructions": SYSTEM_MESSAGE,
151
+ "modalities": ["text", "audio"],
152
+ "temperature": 0.8
153
+ }
154
+ }
155
+ print('Sending session update:', json.dumps(session_update))
156
+ await openai_ws.send(json.dumps(session_update))
157
+
158
+ def setup_public_url(port):
159
+ if NGROK_AUTH_TOKEN:
160
+ conf.get_default().auth_token = NGROK_AUTH_TOKEN
161
+ public_url = ngrok.connect(addr=str(port)).public_url
162
+ print(f"Praison AI Voice URL: {public_url}/call")
163
+ return public_url
164
+
165
+ def run_server(port: int, use_public: bool = False):
166
+ """Run the FastAPI server using uvicorn."""
167
+ if use_public:
168
+ setup_public_url(port)
169
+ else:
170
+ print(f"Starting Praison AI Call Server on http://localhost:{port}/call")
171
+ uvicorn.run(app, host="0.0.0.0", port=port, log_level="warning")
172
+
173
+ def main(args=None):
174
+ """Run the Praison AI Call Server."""
175
+ parser = argparse.ArgumentParser(description="Run the Praison AI Call Server.")
176
+ parser.add_argument('--public', action='store_true', help="Use ngrok to expose the server publicly")
177
+ parser.add_argument('--port', type=int, default=PORT, help="Port to run the server on")
178
+
179
+ if args is None:
180
+ args = parser.parse_args()
181
+ else:
182
+ args = parser.parse_args(args)
183
+
184
+ port = args.port
185
+ use_public = args.public or PUBLIC
186
+
187
+ run_server(port=port, use_public=use_public)
188
+
189
+ if __name__ == "__main__":
190
+ main()
@@ -16,6 +16,8 @@ from .inc.config import generate_config
16
16
  import shutil
17
17
  import subprocess
18
18
  import logging
19
+ import importlib
20
+ import praisonai.api.call as call_module
19
21
  logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO'), format='%(asctime)s - %(levelname)s - %(message)s')
20
22
 
21
23
  try:
@@ -134,6 +136,13 @@ class PraisonAI:
134
136
  self.create_realtime_interface()
135
137
  return
136
138
 
139
+ if getattr(args, 'call', False):
140
+ call_args = []
141
+ if args.public:
142
+ call_args.append('--public')
143
+ call_module.main(call_args)
144
+ return
145
+
137
146
  if args.agent_file == 'train':
138
147
  package_root = os.path.dirname(os.path.abspath(__file__))
139
148
  config_yaml_destination = os.path.join(os.getcwd(), 'config.yaml')
@@ -261,6 +270,8 @@ class PraisonAI:
261
270
  parser.add_argument("--ollama", type=str, help="Ollama model name")
262
271
  parser.add_argument("--dataset", type=str, help="Dataset name for training", default="yahma/alpaca-cleaned")
263
272
  parser.add_argument("--realtime", action="store_true", help="Start the realtime voice interaction interface")
273
+ parser.add_argument("--call", action="store_true", help="Start the PraisonAI Call server")
274
+ parser.add_argument("--public", action="store_true", help="Use ngrok to expose the server publicly (only with --call)")
264
275
  args, unknown_args = parser.parse_known_args()
265
276
 
266
277
  if unknown_args and unknown_args[0] == '-b' and unknown_args[1] == 'api:app':
@@ -277,6 +288,8 @@ class PraisonAI:
277
288
  args.code = True
278
289
  if args.agent_file == 'realtime':
279
290
  args.realtime = True
291
+ if args.agent_file == 'call':
292
+ args.call = True
280
293
 
281
294
  return args
282
295
 
@@ -448,4 +461,4 @@ class PraisonAI:
448
461
 
449
462
  if __name__ == "__main__":
450
463
  praison_ai = PraisonAI()
451
- praison_ai.main()
464
+ praison_ai.main()
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==0.1.3 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==0.1.5 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "PraisonAI"
3
- version = "0.1.3"
3
+ version = "0.1.5"
4
4
  description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
5
5
  authors = ["Mervin Praison"]
6
6
  license = ""
@@ -38,6 +38,12 @@ websockets = {version = ">=12.0", optional = true}
38
38
  plotly = {version = ">=5.24.0", optional = true}
39
39
  yfinance = {version = ">=0.2.44", optional = true}
40
40
  duckduckgo_search = {version = ">=6.3.0", optional = true}
41
+ twilio = {version = ">=7.0.0", optional = true}
42
+ fastapi = {version = ">=0.95.0", optional = true}
43
+ uvicorn = {version = ">=0.20.0", optional = true}
44
+ python-dotenv = {version = ">=0.19.0", optional = true}
45
+ flaml = {version = ">=2.3.1", extras = ["automl"], optional = true}
46
+ pyngrok = {version = ">=1.4.0", optional = true}
41
47
 
42
48
  [tool.poetry.group.docs.dependencies]
43
49
  mkdocs = "*"
@@ -97,6 +103,7 @@ build-backend = "poetry.core.masonry.api"
97
103
  praisonai = "praisonai.__main__:main"
98
104
  setup-conda-env = "setup.setup_conda_env:main"
99
105
  post-install = "setup.post_install:main"
106
+ praisonai-call = "praisonai.api.call:main"
100
107
 
101
108
  [tool.poetry.extras]
102
109
  ui = ["chainlit"]
@@ -111,6 +118,7 @@ chat = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4
111
118
  code = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4ai"]
112
119
  train = ["setup-conda-env"]
113
120
  realtime = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4ai", "websockets", "plotly", "yfinance", "duckduckgo_search"]
121
+ call = ["twilio", "fastapi", "uvicorn", "websockets", "python-dotenv", "flaml", "pyngrok", "rich"]
114
122
 
115
123
  [tool.poetry-dynamic-versioning]
116
124
  enable = true
@@ -119,4 +127,4 @@ style = "semver"
119
127
 
120
128
  [tool.poetry.build]
121
129
  generate-setup-file = false
122
- script = "praisonai/setup/post_install.py"
130
+ script = "praisonai/setup/post_install.py"
File without changes
File without changes
File without changes
File without changes