PraisonAI 0.1.2__tar.gz → 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (52) hide show
  1. {praisonai-0.1.2 → praisonai-0.1.4}/PKG-INFO +24 -14
  2. {praisonai-0.1.2 → praisonai-0.1.4}/README.md +13 -11
  3. praisonai-0.1.4/praisonai/api/call.py +188 -0
  4. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/cli.py +10 -1
  5. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/deploy.py +1 -1
  6. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/realtime.py +20 -13
  7. {praisonai-0.1.2 → praisonai-0.1.4}/pyproject.toml +11 -2
  8. {praisonai-0.1.2 → praisonai-0.1.4}/LICENSE +0 -0
  9. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/__init__.py +0 -0
  10. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/__main__.py +0 -0
  11. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/agents_generator.py +0 -0
  12. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/auto.py +0 -0
  13. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/chainlit_ui.py +0 -0
  14. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/inbuilt_tools/__init__.py +0 -0
  15. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
  16. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/inc/__init__.py +0 -0
  17. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/inc/config.py +0 -0
  18. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/inc/models.py +0 -0
  19. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/android-chrome-192x192.png +0 -0
  20. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/android-chrome-512x512.png +0 -0
  21. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/apple-touch-icon.png +0 -0
  22. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/fantasy.svg +0 -0
  23. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/favicon-16x16.png +0 -0
  24. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/favicon-32x32.png +0 -0
  25. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/favicon.ico +0 -0
  26. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/game.svg +0 -0
  27. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/logo_dark.png +0 -0
  28. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/logo_light.png +0 -0
  29. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/movie.svg +0 -0
  30. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/public/thriller.svg +0 -0
  31. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/__init__.py +0 -0
  32. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/build.py +0 -0
  33. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/config.yaml +0 -0
  34. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/post_install.py +0 -0
  35. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/setup_conda_env.py +0 -0
  36. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/setup/setup_conda_env.sh +0 -0
  37. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/test.py +0 -0
  38. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/train.py +0 -0
  39. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/chat.py +0 -0
  40. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/code.py +0 -0
  41. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/context.py +0 -0
  42. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/fantasy.svg +0 -0
  43. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/game.svg +0 -0
  44. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/logo_dark.png +0 -0
  45. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/logo_light.png +0 -0
  46. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/movie.svg +0 -0
  47. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/public/thriller.svg +0 -0
  48. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/realtimeclient/__init__.py +0 -0
  49. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/realtimeclient/realtimedocs.txt +0 -0
  50. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/realtimeclient/tools.py +0 -0
  51. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/ui/sql_alchemy.py +0 -0
  52. {praisonai-0.1.2 → praisonai-0.1.4}/praisonai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: PraisonAI
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3.12
11
11
  Provides-Extra: agentops
12
12
  Provides-Extra: anthropic
13
13
  Provides-Extra: api
14
+ Provides-Extra: call
14
15
  Provides-Extra: chat
15
16
  Provides-Extra: code
16
17
  Provides-Extra: cohere
@@ -26,6 +27,8 @@ Requires-Dist: chainlit (==1.3.0rc1) ; extra == "ui" or extra == "chat" or extra
26
27
  Requires-Dist: crawl4ai (==0.3.4) ; extra == "chat" or extra == "code" or extra == "realtime"
27
28
  Requires-Dist: crewai (>=0.32.0)
28
29
  Requires-Dist: duckduckgo_search (>=6.3.0) ; extra == "realtime"
30
+ Requires-Dist: fastapi (>=0.95.0) ; extra == "call"
31
+ Requires-Dist: flaml[automl] (>=2.3.1) ; extra == "call"
29
32
  Requires-Dist: flask (>=3.0.0) ; extra == "api"
30
33
  Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
31
34
  Requires-Dist: greenlet (>=3.0.3) ; extra == "chat" or extra == "code" or extra == "realtime"
@@ -38,10 +41,15 @@ Requires-Dist: markdown (>=3.5)
38
41
  Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
39
42
  Requires-Dist: praisonai-tools (>=0.0.7)
40
43
  Requires-Dist: pyautogen (>=0.2.19)
44
+ Requires-Dist: pyngrok (>=1.4.0) ; extra == "call"
41
45
  Requires-Dist: pyparsing (>=3.0.0)
42
- Requires-Dist: rich (>=13.7)
46
+ Requires-Dist: python-dotenv (>=0.19.0) ; extra == "call"
47
+ Requires-Dist: rich (>=13.7) ; extra == "call"
43
48
  Requires-Dist: tavily-python (==0.5.0) ; extra == "chat" or extra == "code" or extra == "realtime"
44
- Requires-Dist: websockets (>=12.0) ; extra == "realtime"
49
+ Requires-Dist: twilio (>=7.0.0) ; extra == "call"
50
+ Requires-Dist: typer (>=0.9.0) ; extra == "call"
51
+ Requires-Dist: uvicorn (>=0.20.0) ; extra == "call"
52
+ Requires-Dist: websockets (>=12.0) ; extra == "realtime" or extra == "call"
45
53
  Requires-Dist: yfinance (>=0.2.44) ; extra == "realtime"
46
54
  Project-URL: Homepage, https://docs.praison.ai
47
55
  Project-URL: Repository, https://github.com/mervinpraison/PraisonAI
@@ -49,16 +57,16 @@ Description-Content-Type: text/markdown
49
57
 
50
58
  <p align="center">
51
59
  <picture>
52
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/praisonai-logo-large.png">
53
- <source media="(prefers-color-scheme: light)" srcset="docs/images/praisonai-logo-black-large.png">
54
- <img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
60
+ <source media="(prefers-color-scheme: dark)" srcset="docs/logo/dark.png" />
61
+ <source media="(prefers-color-scheme: light)" srcset="docs/logo/light.png" />
62
+ <img alt="PraisonAI Logo" src="docs/logo/light.png" />
55
63
  </picture>
56
64
  </p>
57
65
 
58
66
  <p align="center">
59
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads"></a>
60
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version"></a>
61
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License"></a>
67
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads" /></a>
68
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version" /></a>
69
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License" /></a>
62
70
  </p>
63
71
 
64
72
  <div align="center">
@@ -71,9 +79,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
71
79
 
72
80
  <div align="center">
73
81
  <picture>
74
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png">
75
- <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png">
76
- <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png">
82
+ <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png" />
83
+ <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png" />
84
+ <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png" />
77
85
  </picture>
78
86
  </div>
79
87
 
@@ -95,8 +103,8 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
95
103
 
96
104
  | | Cookbook | Open in Colab |
97
105
  | ------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
98
- | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
99
- | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
106
+ | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
107
+ | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
100
108
 
101
109
  ## Install
102
110
 
@@ -107,6 +115,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
107
115
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
108
116
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
109
117
  | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
118
+ | **PraisonAI Call** | `pip install "praisonai[call]"` |
110
119
 
111
120
  ## Key Features
112
121
 
@@ -374,3 +383,4 @@ This configuration ensures that your development dependencies are correctly cate
374
383
 
375
384
  Praison AI is an open-sourced software licensed under the **[MIT license](https://opensource.org/licenses/MIT)**.
376
385
 
386
+
@@ -1,15 +1,15 @@
1
1
  <p align="center">
2
2
  <picture>
3
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/praisonai-logo-large.png">
4
- <source media="(prefers-color-scheme: light)" srcset="docs/images/praisonai-logo-black-large.png">
5
- <img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
3
+ <source media="(prefers-color-scheme: dark)" srcset="docs/logo/dark.png" />
4
+ <source media="(prefers-color-scheme: light)" srcset="docs/logo/light.png" />
5
+ <img alt="PraisonAI Logo" src="docs/logo/light.png" />
6
6
  </picture>
7
7
  </p>
8
8
 
9
9
  <p align="center">
10
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads"></a>
11
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version"></a>
12
- <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License"></a>
10
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://static.pepy.tech/badge/PraisonAI" alt="Total Downloads" /></a>
11
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/github/v/release/MervinPraison/PraisonAI" alt="Latest Stable Version" /></a>
12
+ <a href="https://github.com/MervinPraison/PraisonAI"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License" /></a>
13
13
  </p>
14
14
 
15
15
  <div align="center">
@@ -22,9 +22,9 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
22
22
 
23
23
  <div align="center">
24
24
  <picture>
25
- <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png">
26
- <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png">
27
- <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png">
25
+ <source media="(prefers-color-scheme: dark)" srcset="docs/images/architecture-dark.png" />
26
+ <source media="(prefers-color-scheme: light)" srcset="docs/images/architecture-light.png" />
27
+ <img alt="PraisonAI Architecture" src="docs/images/architecture-light.png" />
28
28
  </picture>
29
29
  </div>
30
30
 
@@ -46,8 +46,8 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
46
46
 
47
47
  | | Cookbook | Open in Colab |
48
48
  | ------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
49
- | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
50
- | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
49
+ | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
50
+ | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" /></a> |
51
51
 
52
52
  ## Install
53
53
 
@@ -58,6 +58,7 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
58
58
  | **PraisonAI Chat** | `pip install "praisonai[chat]"` |
59
59
  | **PraisonAI Train** | `pip install "praisonai[train]"` |
60
60
  | **PraisonAI Realtime** | `pip install "praisonai[realtime]"` |
61
+ | **PraisonAI Call** | `pip install "praisonai[call]"` |
61
62
 
62
63
  ## Key Features
63
64
 
@@ -324,3 +325,4 @@ This configuration ensures that your development dependencies are correctly cate
324
325
  ## License
325
326
 
326
327
  Praison AI is an open-sourced software licensed under the **[MIT license](https://opensource.org/licenses/MIT)**.
328
+
@@ -0,0 +1,188 @@
1
+ import os
2
+ import json
3
+ import base64
4
+ import asyncio
5
+ import websockets
6
+ from fastapi import FastAPI, WebSocket, Request
7
+ from fastapi.responses import HTMLResponse
8
+ from fastapi.websockets import WebSocketDisconnect
9
+ from twilio.twiml.voice_response import VoiceResponse, Connect, Say, Stream
10
+ from dotenv import load_dotenv
11
+ import typer
12
+ import uvicorn
13
+ from pyngrok import ngrok
14
+ from rich import print
15
+
16
+ load_dotenv()
17
+
18
+ # Configuration
19
+ OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') # requires OpenAI Realtime API Access
20
+ PORT = int(os.getenv('PORT', 8090))
21
+ SYSTEM_MESSAGE = (
22
+ "You are a helpful and bubbly AI assistant who loves to chat about "
23
+ "anything the user is interested in and is prepared to offer them facts. "
24
+ "You have a penchant for dad jokes, owl jokes, and rickrolling – subtly. "
25
+ "Always stay positive, but work in a joke when appropriate."
26
+ )
27
+ VOICE = 'alloy'
28
+ LOG_EVENT_TYPES = [
29
+ 'response.content.done', 'rate_limits.updated', 'response.done',
30
+ 'input_audio_buffer.committed', 'input_audio_buffer.speech_stopped',
31
+ 'input_audio_buffer.speech_started', 'session.created'
32
+ ]
33
+
34
+ app = FastAPI()
35
+
36
+ if not OPENAI_API_KEY:
37
+ raise ValueError('Missing the OpenAI API key. Please set it in the .env file.')
38
+
39
+ @app.get("/", response_class=HTMLResponse)
40
+ async def index_page():
41
+ return """
42
+ <html>
43
+ <head>
44
+ <title>Praison AI Call Server</title>
45
+ </head>
46
+ <body>
47
+ <h1>Praison AI Call Server is running!</h1>
48
+ </body>
49
+ </html>
50
+ """
51
+
52
+ @app.api_route("/call", methods=["GET", "POST"])
53
+ async def handle_incoming_call(request: Request):
54
+ """Handle incoming call and return TwiML response to connect to Media Stream."""
55
+ response = VoiceResponse()
56
+ response.say("")
57
+ response.pause(length=1)
58
+ response.say("O.K. you can start talking!")
59
+ host = request.url.hostname
60
+ connect = Connect()
61
+ connect.stream(url=f'wss://{host}/media-stream')
62
+ response.append(connect)
63
+ return HTMLResponse(content=str(response), media_type="application/xml")
64
+
65
+ @app.websocket("/media-stream")
66
+ async def handle_media_stream(websocket: WebSocket):
67
+ """Handle WebSocket connections between Twilio and OpenAI."""
68
+ print("Client connected")
69
+ await websocket.accept()
70
+
71
+ async with websockets.connect(
72
+ 'wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01',
73
+ extra_headers={
74
+ "Authorization": f"Bearer {OPENAI_API_KEY}",
75
+ "OpenAI-Beta": "realtime=v1"
76
+ }
77
+ ) as openai_ws:
78
+ await send_session_update(openai_ws)
79
+ stream_sid = None
80
+
81
+ async def receive_from_twilio():
82
+ """Receive audio data from Twilio and send it to the OpenAI Realtime API."""
83
+ nonlocal stream_sid
84
+ try:
85
+ async for message in websocket.iter_text():
86
+ data = json.loads(message)
87
+ if data['event'] == 'media' and openai_ws.open:
88
+ audio_append = {
89
+ "type": "input_audio_buffer.append",
90
+ "audio": data['media']['payload']
91
+ }
92
+ await openai_ws.send(json.dumps(audio_append))
93
+ elif data['event'] == 'start':
94
+ stream_sid = data['start']['streamSid']
95
+ print(f"Incoming stream has started {stream_sid}")
96
+ except WebSocketDisconnect:
97
+ print("Client disconnected.")
98
+ if openai_ws.open:
99
+ await openai_ws.close()
100
+
101
+ async def send_to_twilio():
102
+ """Receive events from the OpenAI Realtime API, send audio back to Twilio."""
103
+ nonlocal stream_sid
104
+ try:
105
+ async for openai_message in openai_ws:
106
+ response = json.loads(openai_message)
107
+ if response['type'] in LOG_EVENT_TYPES:
108
+ print(f"Received event: {response['type']}", response)
109
+ if response['type'] == 'session.updated':
110
+ print("Session updated successfully:", response)
111
+ if response['type'] == 'response.audio.delta' and response.get('delta'):
112
+ # Audio from OpenAI
113
+ try:
114
+ audio_payload = base64.b64encode(base64.b64decode(response['delta'])).decode('utf-8')
115
+ audio_delta = {
116
+ "event": "media",
117
+ "streamSid": stream_sid,
118
+ "media": {
119
+ "payload": audio_payload
120
+ }
121
+ }
122
+ await websocket.send_json(audio_delta)
123
+ except Exception as e:
124
+ print(f"Error processing audio data: {e}")
125
+ except Exception as e:
126
+ print(f"Error in send_to_twilio: {e}")
127
+
128
+ await asyncio.gather(receive_from_twilio(), send_to_twilio())
129
+
130
+ async def send_session_update(openai_ws):
131
+ """Send session update to OpenAI WebSocket."""
132
+ session_update = {
133
+ "type": "session.update",
134
+ "session": {
135
+ "turn_detection": {
136
+ "type": "server_vad",
137
+ "threshold": 0.5,
138
+ "prefix_padding_ms": 300,
139
+ "silence_duration_ms": 200
140
+ },
141
+ "input_audio_format": "g711_ulaw",
142
+ "output_audio_format": "g711_ulaw",
143
+ # "input_audio_transcription": { "model": 'whisper-1' },
144
+ # "transcription_models": [{"model": "whisper-1"}],
145
+ "voice": VOICE,
146
+ "tools": [],
147
+ "tool_choice": "auto",
148
+ "instructions": SYSTEM_MESSAGE,
149
+ "modalities": ["text", "audio"],
150
+ "temperature": 0.8
151
+ }
152
+ }
153
+ print('Sending session update:', json.dumps(session_update))
154
+ await openai_ws.send(json.dumps(session_update))
155
+
156
+ def run_server(port: int, use_ngrok: bool = False):
157
+ """Run the FastAPI server using uvicorn."""
158
+ if use_ngrok:
159
+ public_url = ngrok.connect(port).public_url
160
+ # print(f"Ngrok tunnel established: {public_url}")
161
+ print(f"Praison AI Voice URL: {public_url}/call")
162
+
163
+ print(f"Starting Praison AI Call Server on port {port}...")
164
+ uvicorn.run(app, host="0.0.0.0", port=port, log_level="warning")
165
+
166
+ app_cli = typer.Typer()
167
+
168
+ @app_cli.command()
169
+ def main(
170
+ port: int = typer.Option(8090, help="Port to run the server on"),
171
+ ngrok: bool = typer.Option(False, help="Use ngrok to expose the server")
172
+ ):
173
+ """Run the Praison AI Call Server."""
174
+ # print(f"Received port value: {port}") # Debug print
175
+ # print(f"Use ngrok: {ngrok}") # Debug print
176
+
177
+ # Extract the actual port value from the OptionInfo object
178
+ if isinstance(port, typer.models.OptionInfo):
179
+ port_value = port.default
180
+ else:
181
+ port_value = port
182
+
183
+ port_int = int(port_value)
184
+
185
+ run_server(port=port_int, use_ngrok=ngrok)
186
+
187
+ if __name__ == "__main__":
188
+ app_cli()
@@ -16,6 +16,8 @@ from .inc.config import generate_config
16
16
  import shutil
17
17
  import subprocess
18
18
  import logging
19
+ import importlib
20
+ import praisonai.api.call as call_module
19
21
  logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO'), format='%(asctime)s - %(levelname)s - %(message)s')
20
22
 
21
23
  try:
@@ -134,6 +136,10 @@ class PraisonAI:
134
136
  self.create_realtime_interface()
135
137
  return
136
138
 
139
+ if getattr(args, 'call', False):
140
+ call_module.main()
141
+ return
142
+
137
143
  if args.agent_file == 'train':
138
144
  package_root = os.path.dirname(os.path.abspath(__file__))
139
145
  config_yaml_destination = os.path.join(os.getcwd(), 'config.yaml')
@@ -261,6 +267,7 @@ class PraisonAI:
261
267
  parser.add_argument("--ollama", type=str, help="Ollama model name")
262
268
  parser.add_argument("--dataset", type=str, help="Dataset name for training", default="yahma/alpaca-cleaned")
263
269
  parser.add_argument("--realtime", action="store_true", help="Start the realtime voice interaction interface")
270
+ parser.add_argument("--call", action="store_true", help="Start the PraisonAI Call server")
264
271
  args, unknown_args = parser.parse_known_args()
265
272
 
266
273
  if unknown_args and unknown_args[0] == '-b' and unknown_args[1] == 'api:app':
@@ -277,6 +284,8 @@ class PraisonAI:
277
284
  args.code = True
278
285
  if args.agent_file == 'realtime':
279
286
  args.realtime = True
287
+ if args.agent_file == 'call':
288
+ args.call = True
280
289
 
281
290
  return args
282
291
 
@@ -448,4 +457,4 @@ class PraisonAI:
448
457
 
449
458
  if __name__ == "__main__":
450
459
  praison_ai = PraisonAI()
451
- praison_ai.main()
460
+ praison_ai.main()
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==0.1.2 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==0.1.4 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
@@ -6,7 +6,6 @@ from uuid import uuid4
6
6
 
7
7
  from openai import AsyncOpenAI
8
8
  import chainlit as cl
9
- from chainlit.logger import logger
10
9
  from chainlit.input_widget import TextInput
11
10
  from chainlit.types import ThreadDict
12
11
 
@@ -16,6 +15,25 @@ from sql_alchemy import SQLAlchemyDataLayer
16
15
  import chainlit.data as cl_data
17
16
  from literalai.helper import utc_now
18
17
  import json
18
+ import logging
19
+ import importlib.util
20
+ from importlib import import_module
21
+ from pathlib import Path
22
+
23
+ # Set up logging
24
+ logger = logging.getLogger(__name__)
25
+ log_level = os.getenv("LOGLEVEL", "INFO").upper()
26
+ logger.handlers = []
27
+
28
+ # Set up logging to console
29
+ console_handler = logging.StreamHandler()
30
+ console_handler.setLevel(log_level)
31
+ console_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
32
+ console_handler.setFormatter(console_formatter)
33
+ logger.addHandler(console_handler)
34
+
35
+ # Set the logging level for the logger
36
+ logger.setLevel(log_level)
19
37
 
20
38
  # Set up CHAINLIT_AUTH_SECRET
21
39
  CHAINLIT_AUTH_SECRET = os.getenv("CHAINLIT_AUTH_SECRET")
@@ -144,19 +162,8 @@ cl_data._data_layer = SQLAlchemyDataLayer(conninfo=f"sqlite+aiosqlite:///{DB_PAT
144
162
 
145
163
  client = AsyncOpenAI()
146
164
 
147
- # Add these new imports and code
148
- import importlib.util
149
- import logging
150
- from importlib import import_module
151
- from pathlib import Path
152
-
153
- # Set up logging
154
- logging.basicConfig(level=logging.INFO)
155
- logger = logging.getLogger(__name__)
156
-
157
165
  # Try to import tools from the root directory
158
- root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
159
- tools_path = os.path.join(root_dir, 'tools.py')
166
+ tools_path = os.path.join(os.getcwd(), 'tools.py')
160
167
  logger.info(f"Tools path: {tools_path}")
161
168
 
162
169
  def import_tools_from_file(file_path):
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "PraisonAI"
3
- version = "0.1.2"
3
+ version = "0.1.4"
4
4
  description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
5
5
  authors = ["Mervin Praison"]
6
6
  license = ""
@@ -38,6 +38,13 @@ websockets = {version = ">=12.0", optional = true}
38
38
  plotly = {version = ">=5.24.0", optional = true}
39
39
  yfinance = {version = ">=0.2.44", optional = true}
40
40
  duckduckgo_search = {version = ">=6.3.0", optional = true}
41
+ twilio = {version = ">=7.0.0", optional = true}
42
+ fastapi = {version = ">=0.95.0", optional = true}
43
+ uvicorn = {version = ">=0.20.0", optional = true}
44
+ python-dotenv = {version = ">=0.19.0", optional = true}
45
+ typer = {version = ">=0.9.0", optional = true}
46
+ flaml = {version = ">=2.3.1", extras = ["automl"], optional = true}
47
+ pyngrok = {version = ">=1.4.0", optional = true}
41
48
 
42
49
  [tool.poetry.group.docs.dependencies]
43
50
  mkdocs = "*"
@@ -97,6 +104,7 @@ build-backend = "poetry.core.masonry.api"
97
104
  praisonai = "praisonai.__main__:main"
98
105
  setup-conda-env = "setup.setup_conda_env:main"
99
106
  post-install = "setup.post_install:main"
107
+ praisonai-call = "praisonai.api.call:main"
100
108
 
101
109
  [tool.poetry.extras]
102
110
  ui = ["chainlit"]
@@ -111,6 +119,7 @@ chat = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4
111
119
  code = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4ai"]
112
120
  train = ["setup-conda-env"]
113
121
  realtime = ["chainlit", "litellm", "aiosqlite", "greenlet", "tavily-python", "crawl4ai", "websockets", "plotly", "yfinance", "duckduckgo_search"]
122
+ call = ["twilio", "fastapi", "uvicorn", "websockets", "python-dotenv", "typer", "flaml", "pyngrok", "rich"]
114
123
 
115
124
  [tool.poetry-dynamic-versioning]
116
125
  enable = true
@@ -119,4 +128,4 @@ style = "semver"
119
128
 
120
129
  [tool.poetry.build]
121
130
  generate-setup-file = false
122
- script = "praisonai/setup/post_install.py"
131
+ script = "praisonai/setup/post_install.py"
File without changes
File without changes
File without changes
File without changes