PraisonAI 2.0.61__cp313-cp313-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- praisonai/__init__.py +6 -0
- praisonai/__main__.py +10 -0
- praisonai/agents_generator.py +648 -0
- praisonai/api/call.py +292 -0
- praisonai/auto.py +238 -0
- praisonai/chainlit_ui.py +304 -0
- praisonai/cli.py +518 -0
- praisonai/deploy.py +138 -0
- praisonai/inbuilt_tools/__init__.py +24 -0
- praisonai/inbuilt_tools/autogen_tools.py +117 -0
- praisonai/inc/__init__.py +2 -0
- praisonai/inc/config.py +96 -0
- praisonai/inc/models.py +128 -0
- praisonai/public/android-chrome-192x192.png +0 -0
- praisonai/public/android-chrome-512x512.png +0 -0
- praisonai/public/apple-touch-icon.png +0 -0
- praisonai/public/fantasy.svg +3 -0
- praisonai/public/favicon-16x16.png +0 -0
- praisonai/public/favicon-32x32.png +0 -0
- praisonai/public/favicon.ico +0 -0
- praisonai/public/game.svg +3 -0
- praisonai/public/logo_dark.png +0 -0
- praisonai/public/logo_light.png +0 -0
- praisonai/public/movie.svg +3 -0
- praisonai/public/praison-ai-agents-architecture-dark.png +0 -0
- praisonai/public/praison-ai-agents-architecture.png +0 -0
- praisonai/public/thriller.svg +3 -0
- praisonai/setup/__init__.py +1 -0
- praisonai/setup/build.py +21 -0
- praisonai/setup/config.yaml +60 -0
- praisonai/setup/post_install.py +23 -0
- praisonai/setup/setup_conda_env.py +25 -0
- praisonai/setup/setup_conda_env.sh +72 -0
- praisonai/setup.py +16 -0
- praisonai/test.py +105 -0
- praisonai/train.py +276 -0
- praisonai/ui/README.md +21 -0
- praisonai/ui/agents.py +822 -0
- praisonai/ui/callbacks.py +57 -0
- praisonai/ui/chat.py +387 -0
- praisonai/ui/code.py +440 -0
- praisonai/ui/colab.py +474 -0
- praisonai/ui/colab_chainlit.py +81 -0
- praisonai/ui/components/aicoder.py +269 -0
- praisonai/ui/config/.chainlit/config.toml +120 -0
- praisonai/ui/config/.chainlit/translations/bn.json +231 -0
- praisonai/ui/config/.chainlit/translations/en-US.json +229 -0
- praisonai/ui/config/.chainlit/translations/gu.json +231 -0
- praisonai/ui/config/.chainlit/translations/he-IL.json +231 -0
- praisonai/ui/config/.chainlit/translations/hi.json +231 -0
- praisonai/ui/config/.chainlit/translations/kn.json +231 -0
- praisonai/ui/config/.chainlit/translations/ml.json +231 -0
- praisonai/ui/config/.chainlit/translations/mr.json +231 -0
- praisonai/ui/config/.chainlit/translations/ta.json +231 -0
- praisonai/ui/config/.chainlit/translations/te.json +231 -0
- praisonai/ui/config/.chainlit/translations/zh-CN.json +229 -0
- praisonai/ui/config/chainlit.md +1 -0
- praisonai/ui/config/translations/bn.json +231 -0
- praisonai/ui/config/translations/en-US.json +229 -0
- praisonai/ui/config/translations/gu.json +231 -0
- praisonai/ui/config/translations/he-IL.json +231 -0
- praisonai/ui/config/translations/hi.json +231 -0
- praisonai/ui/config/translations/kn.json +231 -0
- praisonai/ui/config/translations/ml.json +231 -0
- praisonai/ui/config/translations/mr.json +231 -0
- praisonai/ui/config/translations/ta.json +231 -0
- praisonai/ui/config/translations/te.json +231 -0
- praisonai/ui/config/translations/zh-CN.json +229 -0
- praisonai/ui/context.py +283 -0
- praisonai/ui/db.py +291 -0
- praisonai/ui/public/fantasy.svg +3 -0
- praisonai/ui/public/game.svg +3 -0
- praisonai/ui/public/logo_dark.png +0 -0
- praisonai/ui/public/logo_light.png +0 -0
- praisonai/ui/public/movie.svg +3 -0
- praisonai/ui/public/praison.css +3 -0
- praisonai/ui/public/thriller.svg +3 -0
- praisonai/ui/realtime.py +476 -0
- praisonai/ui/realtimeclient/__init__.py +653 -0
- praisonai/ui/realtimeclient/realtimedocs.txt +1484 -0
- praisonai/ui/realtimeclient/tools.py +236 -0
- praisonai/ui/sql_alchemy.py +707 -0
- praisonai/ui/tools.md +133 -0
- praisonai/version.py +1 -0
- praisonai-2.0.61.dist-info/LICENSE +20 -0
- praisonai-2.0.61.dist-info/METADATA +679 -0
- praisonai-2.0.61.dist-info/RECORD +89 -0
- praisonai-2.0.61.dist-info/WHEEL +4 -0
- praisonai-2.0.61.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import yfinance as yf
|
|
2
|
+
import chainlit as cl
|
|
3
|
+
import plotly
|
|
4
|
+
import json
|
|
5
|
+
from tavily import TavilyClient
|
|
6
|
+
from crawl4ai import WebCrawler
|
|
7
|
+
import os
|
|
8
|
+
import logging
|
|
9
|
+
import asyncio
|
|
10
|
+
from openai import OpenAI
|
|
11
|
+
import base64
|
|
12
|
+
from io import BytesIO
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
from duckduckgo_search import DDGS
|
|
15
|
+
|
|
16
|
+
# Set up logging
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
log_level = os.getenv("LOGLEVEL", "INFO").upper()
|
|
19
|
+
logger.setLevel(log_level)
|
|
20
|
+
|
|
21
|
+
# Set Tavily API key
|
|
22
|
+
tavily_api_key = os.getenv("TAVILY_API_KEY")
|
|
23
|
+
tavily_client = TavilyClient(api_key=tavily_api_key) if tavily_api_key else None
|
|
24
|
+
|
|
25
|
+
# Set up OpenAI client
|
|
26
|
+
openai_client = OpenAI()
|
|
27
|
+
|
|
28
|
+
query_stock_price_def = {
|
|
29
|
+
"name": "query_stock_price",
|
|
30
|
+
"description": "Queries the latest stock price information for a given stock symbol.",
|
|
31
|
+
"parameters": {
|
|
32
|
+
"type": "object",
|
|
33
|
+
"properties": {
|
|
34
|
+
"symbol": {
|
|
35
|
+
"type": "string",
|
|
36
|
+
"description": "The stock symbol to query (e.g., 'AAPL' for Apple Inc.)"
|
|
37
|
+
},
|
|
38
|
+
"period": {
|
|
39
|
+
"type": "string",
|
|
40
|
+
"description": "The time period for which to retrieve stock data (e.g., '1d' for one day, '1mo' for one month)"
|
|
41
|
+
}
|
|
42
|
+
},
|
|
43
|
+
"required": ["symbol", "period"]
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async def query_stock_price_handler(symbol, period):
|
|
48
|
+
"""
|
|
49
|
+
Queries the latest stock price information for a given stock symbol.
|
|
50
|
+
"""
|
|
51
|
+
try:
|
|
52
|
+
stock = yf.Ticker(symbol)
|
|
53
|
+
hist = stock.history(period=period)
|
|
54
|
+
if hist.empty:
|
|
55
|
+
return {"error": "No data found for the given symbol."}
|
|
56
|
+
return hist.to_json()
|
|
57
|
+
|
|
58
|
+
except Exception as e:
|
|
59
|
+
return {"error": str(e)}
|
|
60
|
+
|
|
61
|
+
query_stock_price = (query_stock_price_def, query_stock_price_handler)
|
|
62
|
+
|
|
63
|
+
draw_plotly_chart_def = {
|
|
64
|
+
"name": "draw_plotly_chart",
|
|
65
|
+
"description": "Draws a Plotly chart based on the provided JSON figure and displays it with an accompanying message.",
|
|
66
|
+
"parameters": {
|
|
67
|
+
"type": "object",
|
|
68
|
+
"properties": {
|
|
69
|
+
"message": {
|
|
70
|
+
"type": "string",
|
|
71
|
+
"description": "The message to display alongside the chart"
|
|
72
|
+
},
|
|
73
|
+
"plotly_json_fig": {
|
|
74
|
+
"type": "string",
|
|
75
|
+
"description": "A JSON string representing the Plotly figure to be drawn"
|
|
76
|
+
}
|
|
77
|
+
},
|
|
78
|
+
"required": ["message", "plotly_json_fig"]
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
async def draw_plotly_chart_handler(message: str, plotly_json_fig):
|
|
83
|
+
fig = plotly.io.from_json(plotly_json_fig)
|
|
84
|
+
elements = [cl.Plotly(name="chart", figure=fig, display="inline")]
|
|
85
|
+
|
|
86
|
+
await cl.Message(content=message, elements=elements).send()
|
|
87
|
+
return {"status": "success"} # Add a return value
|
|
88
|
+
|
|
89
|
+
draw_plotly_chart = (draw_plotly_chart_def, draw_plotly_chart_handler)
|
|
90
|
+
|
|
91
|
+
tavily_web_search_def = {
|
|
92
|
+
"name": "tavily_web_search",
|
|
93
|
+
"description": "Search the web using Tavily API and crawl the resulting URLs",
|
|
94
|
+
"parameters": {
|
|
95
|
+
"type": "object",
|
|
96
|
+
"properties": {
|
|
97
|
+
"query": {"type": "string", "description": "Search query"}
|
|
98
|
+
},
|
|
99
|
+
"required": ["query"]
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
async def tavily_web_search_handler(query):
|
|
104
|
+
current_date = datetime.now().strftime("%d %B %Y")
|
|
105
|
+
query_with_date = query + f" {current_date}"
|
|
106
|
+
|
|
107
|
+
if tavily_client:
|
|
108
|
+
try:
|
|
109
|
+
response = tavily_client.search(query_with_date)
|
|
110
|
+
logger.debug(f"Tavily search response: {response}")
|
|
111
|
+
results = process_tavily_results(response)
|
|
112
|
+
except Exception as e:
|
|
113
|
+
logger.error(f"Error in Tavily search: {str(e)}")
|
|
114
|
+
results = await fallback_to_duckduckgo(query_with_date)
|
|
115
|
+
else:
|
|
116
|
+
logger.info("Tavily API key is not set. Using DuckDuckGo as fallback.")
|
|
117
|
+
results = await fallback_to_duckduckgo(query_with_date)
|
|
118
|
+
|
|
119
|
+
return json.dumps({
|
|
120
|
+
"query": query,
|
|
121
|
+
"results": results
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
def process_tavily_results(response):
|
|
125
|
+
crawler = WebCrawler()
|
|
126
|
+
crawler.warmup()
|
|
127
|
+
results = []
|
|
128
|
+
for result in response.get('results', []):
|
|
129
|
+
url = result.get('url')
|
|
130
|
+
if url:
|
|
131
|
+
try:
|
|
132
|
+
crawl_result = crawler.run(url=url)
|
|
133
|
+
results.append({
|
|
134
|
+
"content": result.get('content'),
|
|
135
|
+
"url": url,
|
|
136
|
+
"full_content": crawl_result.markdown
|
|
137
|
+
})
|
|
138
|
+
except Exception as e:
|
|
139
|
+
logger.error(f"Error crawling {url}: {str(e)}")
|
|
140
|
+
results.append({
|
|
141
|
+
"content": result.get('content'),
|
|
142
|
+
"url": url,
|
|
143
|
+
"full_content": "Error: Unable to crawl this URL"
|
|
144
|
+
})
|
|
145
|
+
return results
|
|
146
|
+
|
|
147
|
+
async def fallback_to_duckduckgo(query):
|
|
148
|
+
try:
|
|
149
|
+
with DDGS() as ddgs:
|
|
150
|
+
ddg_results = list(ddgs.text(query, max_results=5))
|
|
151
|
+
|
|
152
|
+
logger.debug(f"DuckDuckGo search results: {ddg_results}")
|
|
153
|
+
|
|
154
|
+
crawler = WebCrawler()
|
|
155
|
+
crawler.warmup()
|
|
156
|
+
results = []
|
|
157
|
+
|
|
158
|
+
for result in ddg_results:
|
|
159
|
+
url = result.get('href')
|
|
160
|
+
if url:
|
|
161
|
+
try:
|
|
162
|
+
crawl_result = crawler.run(url=url)
|
|
163
|
+
results.append({
|
|
164
|
+
"content": result.get('body'),
|
|
165
|
+
"url": url,
|
|
166
|
+
"full_content": crawl_result.markdown
|
|
167
|
+
})
|
|
168
|
+
except Exception as e:
|
|
169
|
+
logger.error(f"Error crawling {url}: {str(e)}")
|
|
170
|
+
results.append({
|
|
171
|
+
"content": result.get('body'),
|
|
172
|
+
"url": url,
|
|
173
|
+
"full_content": "Error: Unable to crawl this URL"
|
|
174
|
+
})
|
|
175
|
+
else:
|
|
176
|
+
results.append({
|
|
177
|
+
"content": result.get('body'),
|
|
178
|
+
"url": "N/A",
|
|
179
|
+
"full_content": "No URL provided for crawling"
|
|
180
|
+
})
|
|
181
|
+
|
|
182
|
+
return results
|
|
183
|
+
except Exception as e:
|
|
184
|
+
logger.error(f"Error in DuckDuckGo search: {str(e)}")
|
|
185
|
+
return []
|
|
186
|
+
|
|
187
|
+
tavily_web_search = (tavily_web_search_def, tavily_web_search_handler)
|
|
188
|
+
|
|
189
|
+
# New image generation tool
|
|
190
|
+
generate_image_def = {
|
|
191
|
+
"name": "generate_image",
|
|
192
|
+
"description": "Generate an image based on a text prompt using DALL-E 3",
|
|
193
|
+
"parameters": {
|
|
194
|
+
"type": "object",
|
|
195
|
+
"properties": {
|
|
196
|
+
"prompt": {"type": "string", "description": "The text prompt to generate the image"},
|
|
197
|
+
"size": {"type": "string", "description": "Image size (1024x1024, 1024x1792, or 1792x1024)", "default": "1024x1024"},
|
|
198
|
+
"quality": {"type": "string", "description": "Image quality (standard or hd)", "default": "standard"},
|
|
199
|
+
},
|
|
200
|
+
"required": ["prompt"]
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
async def generate_image_handler(prompt, size="1024x1024", quality="standard"):
|
|
205
|
+
try:
|
|
206
|
+
response = openai_client.images.generate(
|
|
207
|
+
model="dall-e-3",
|
|
208
|
+
prompt=prompt,
|
|
209
|
+
size=size,
|
|
210
|
+
quality=quality,
|
|
211
|
+
n=1,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
image_url = response.data[0].url
|
|
215
|
+
|
|
216
|
+
# Download the image
|
|
217
|
+
import requests
|
|
218
|
+
image_content = requests.get(image_url).content
|
|
219
|
+
|
|
220
|
+
# Convert image to base64
|
|
221
|
+
image_base64 = base64.b64encode(image_content).decode('utf-8')
|
|
222
|
+
|
|
223
|
+
# Create a Chainlit Image element
|
|
224
|
+
image_element = cl.Image(content=image_content, name="generated_image", display="inline")
|
|
225
|
+
|
|
226
|
+
# Send the image in a Chainlit message
|
|
227
|
+
await cl.Message(content=f"Generated image for prompt: '{prompt}'", elements=[image_element]).send()
|
|
228
|
+
|
|
229
|
+
return {"status": "success", "message": "Image generated and displayed"}
|
|
230
|
+
except Exception as e:
|
|
231
|
+
logger.error(f"Error generating image: {str(e)}")
|
|
232
|
+
return {"status": "error", "message": str(e)}
|
|
233
|
+
|
|
234
|
+
generate_image = (generate_image_def, generate_image_handler)
|
|
235
|
+
|
|
236
|
+
tools = [query_stock_price, draw_plotly_chart, tavily_web_search, generate_image]
|