PraisonAI 0.0.72__tar.gz → 0.0.73__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- {praisonai-0.0.72 → praisonai-0.0.73}/PKG-INFO +1 -1
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/deploy.py +1 -1
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/chat.py +49 -4
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/code.py +57 -10
- {praisonai-0.0.72 → praisonai-0.0.73}/pyproject.toml +1 -1
- {praisonai-0.0.72 → praisonai-0.0.73}/LICENSE +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/README.md +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/__init__.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/__main__.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/agents_generator.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/auto.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/chainlit_ui.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/cli.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/inbuilt_tools/__init__.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/inc/__init__.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/inc/config.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/inc/models.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/android-chrome-192x192.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/android-chrome-512x512.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/apple-touch-icon.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/fantasy.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/favicon-16x16.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/favicon-32x32.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/favicon.ico +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/game.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/logo_dark.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/logo_light.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/movie.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/public/thriller.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/__init__.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/build.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/config.yaml +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/post_install.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/setup_conda_env.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/setup/setup_conda_env.sh +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/test.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/train.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/context.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/fantasy.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/game.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/logo_dark.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/logo_light.png +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/movie.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/public/thriller.svg +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/ui/sql_alchemy.py +0 -0
- {praisonai-0.0.72 → praisonai-0.0.73}/praisonai/version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.73
|
|
4
4
|
Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10,<3.13
|
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==0.0.
|
|
59
|
+
file.write("RUN pip install flask praisonai==0.0.73 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
|
@@ -17,6 +17,9 @@ from sql_alchemy import SQLAlchemyDataLayer
|
|
|
17
17
|
from tavily import TavilyClient
|
|
18
18
|
from crawl4ai import WebCrawler
|
|
19
19
|
import asyncio
|
|
20
|
+
from PIL import Image
|
|
21
|
+
import io
|
|
22
|
+
import base64
|
|
20
23
|
|
|
21
24
|
# Set up logging
|
|
22
25
|
logger = logging.getLogger(__name__)
|
|
@@ -292,11 +295,32 @@ async def main(message: cl.Message):
|
|
|
292
295
|
message_history = cl.user_session.get("message_history", [])
|
|
293
296
|
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
294
297
|
|
|
295
|
-
#
|
|
298
|
+
# Check if an image was uploaded with this message
|
|
299
|
+
image = None
|
|
300
|
+
if message.elements and isinstance(message.elements[0], cl.Image):
|
|
301
|
+
image_element = message.elements[0]
|
|
302
|
+
try:
|
|
303
|
+
# Open the image and keep it in memory
|
|
304
|
+
image = Image.open(image_element.path)
|
|
305
|
+
image.load() # This ensures the file is fully loaded into memory
|
|
306
|
+
cl.user_session.set("image", image)
|
|
307
|
+
except Exception as e:
|
|
308
|
+
logger.error(f"Error processing image: {str(e)}")
|
|
309
|
+
await cl.Message(content="There was an error processing the uploaded image. Please try again.").send()
|
|
310
|
+
return
|
|
311
|
+
|
|
312
|
+
# Prepare user message
|
|
296
313
|
user_message = f"""
|
|
297
|
-
Answer the question and use tools if needed:\n
|
|
314
|
+
Answer the question and use tools if needed:\n
|
|
315
|
+
|
|
298
316
|
Current Date and Time: {now}
|
|
317
|
+
|
|
318
|
+
User Question: {message.content}
|
|
299
319
|
"""
|
|
320
|
+
|
|
321
|
+
if image:
|
|
322
|
+
user_message = f"Image uploaded. {user_message}"
|
|
323
|
+
|
|
300
324
|
message_history.append({"role": "user", "content": user_message})
|
|
301
325
|
|
|
302
326
|
msg = cl.Message(content="")
|
|
@@ -309,6 +333,19 @@ Current Date and Time: {now}
|
|
|
309
333
|
"stream": True,
|
|
310
334
|
}
|
|
311
335
|
|
|
336
|
+
# If an image is uploaded, include it in the message
|
|
337
|
+
if image:
|
|
338
|
+
buffered = io.BytesIO()
|
|
339
|
+
image.save(buffered, format="PNG")
|
|
340
|
+
img_str = base64.b64encode(buffered.getvalue()).decode()
|
|
341
|
+
|
|
342
|
+
completion_params["messages"][-1] = {
|
|
343
|
+
"role": "user",
|
|
344
|
+
"content": [
|
|
345
|
+
{"type": "text", "text": user_message},
|
|
346
|
+
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{img_str}"}}
|
|
347
|
+
]
|
|
348
|
+
}
|
|
312
349
|
# Only add tools and tool_choice if Tavily API key is available
|
|
313
350
|
if tavily_api_key:
|
|
314
351
|
completion_params["tools"] = tools
|
|
@@ -359,6 +396,7 @@ Current Date and Time: {now}
|
|
|
359
396
|
cl.user_session.set("message_history", message_history)
|
|
360
397
|
await msg.update()
|
|
361
398
|
|
|
399
|
+
# Handle tool calls if any
|
|
362
400
|
if tavily_api_key and tool_calls:
|
|
363
401
|
available_functions = {
|
|
364
402
|
"tavily_web_search": tavily_web_search,
|
|
@@ -411,7 +449,7 @@ Current Date and Time: {now}
|
|
|
411
449
|
msg.content = full_response
|
|
412
450
|
await msg.update()
|
|
413
451
|
else:
|
|
414
|
-
# If no tool calls
|
|
452
|
+
# If no tool calls, the full_response is already set
|
|
415
453
|
msg.content = full_response
|
|
416
454
|
await msg.update()
|
|
417
455
|
|
|
@@ -433,7 +471,7 @@ async def send_count():
|
|
|
433
471
|
).send()
|
|
434
472
|
|
|
435
473
|
@cl.on_chat_resume
|
|
436
|
-
async def on_chat_resume(thread: ThreadDict):
|
|
474
|
+
async def on_chat_resume(thread: ThreadDict):
|
|
437
475
|
logger.info(f"Resuming chat: {thread['id']}")
|
|
438
476
|
model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-4o-mini"
|
|
439
477
|
logger.debug(f"Model name: {model_name}")
|
|
@@ -481,3 +519,10 @@ async def on_chat_resume(thread: ThreadDict): # Change the type hint here
|
|
|
481
519
|
logger.warning(f"Message without recognized type: {message}")
|
|
482
520
|
|
|
483
521
|
cl.user_session.set("message_history", message_history)
|
|
522
|
+
|
|
523
|
+
# Check if there's an image in the thread metadata
|
|
524
|
+
image_data = metadata.get("image")
|
|
525
|
+
if image_data:
|
|
526
|
+
image = Image.open(io.BytesIO(base64.b64decode(image_data)))
|
|
527
|
+
cl.user_session.set("image", image)
|
|
528
|
+
await cl.Message(content="Previous image loaded. You can continue asking questions about it or upload a new image.").send()
|
|
@@ -18,6 +18,9 @@ from context import ContextGatherer
|
|
|
18
18
|
from tavily import TavilyClient
|
|
19
19
|
from datetime import datetime
|
|
20
20
|
from crawl4ai import WebCrawler
|
|
21
|
+
from PIL import Image
|
|
22
|
+
import io
|
|
23
|
+
import base64
|
|
21
24
|
|
|
22
25
|
# Set up logging
|
|
23
26
|
logger = logging.getLogger(__name__)
|
|
@@ -303,16 +306,37 @@ tools = [{
|
|
|
303
306
|
async def main(message: cl.Message):
|
|
304
307
|
model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-4o-mini"
|
|
305
308
|
message_history = cl.user_session.get("message_history", [])
|
|
306
|
-
message_history.append({"role": "user", "content": message.content})
|
|
307
309
|
gatherer = ContextGatherer()
|
|
308
310
|
context, token_count, context_tree = gatherer.run()
|
|
309
311
|
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
312
|
+
|
|
313
|
+
# Check if an image was uploaded with this message
|
|
314
|
+
image = None
|
|
315
|
+
if message.elements and isinstance(message.elements[0], cl.Image):
|
|
316
|
+
image_element = message.elements[0]
|
|
317
|
+
try:
|
|
318
|
+
# Open the image and keep it in memory
|
|
319
|
+
image = Image.open(image_element.path)
|
|
320
|
+
image.load() # This ensures the file is fully loaded into memory
|
|
321
|
+
cl.user_session.set("image", image)
|
|
322
|
+
except Exception as e:
|
|
323
|
+
logger.error(f"Error processing image: {str(e)}")
|
|
324
|
+
await cl.Message(content="There was an error processing the uploaded image. Please try again.").send()
|
|
325
|
+
return
|
|
326
|
+
|
|
327
|
+
# Prepare user message
|
|
328
|
+
user_message = f"""
|
|
329
|
+
Answer the question and use tools if needed:\n{message.content}.\n\n
|
|
330
|
+
Current Date and Time: {now}
|
|
331
|
+
|
|
332
|
+
Context:
|
|
333
|
+
{context}
|
|
334
|
+
"""
|
|
335
|
+
|
|
336
|
+
if image:
|
|
337
|
+
user_message = f"Image uploaded. {user_message}"
|
|
338
|
+
|
|
339
|
+
message_history.append({"role": "user", "content": user_message})
|
|
316
340
|
|
|
317
341
|
msg = cl.Message(content="")
|
|
318
342
|
await msg.send()
|
|
@@ -320,11 +344,27 @@ async def main(message: cl.Message):
|
|
|
320
344
|
# Prepare the completion parameters
|
|
321
345
|
completion_params = {
|
|
322
346
|
"model": model_name,
|
|
323
|
-
"messages":
|
|
347
|
+
"messages": message_history,
|
|
324
348
|
"stream": True,
|
|
325
349
|
}
|
|
326
350
|
|
|
327
|
-
#
|
|
351
|
+
# If an image is uploaded, include it in the message
|
|
352
|
+
if image:
|
|
353
|
+
buffered = io.BytesIO()
|
|
354
|
+
image.save(buffered, format="PNG")
|
|
355
|
+
img_str = base64.b64encode(buffered.getvalue()).decode()
|
|
356
|
+
|
|
357
|
+
completion_params["messages"][-1] = {
|
|
358
|
+
"role": "user",
|
|
359
|
+
"content": [
|
|
360
|
+
{"type": "text", "text": user_message},
|
|
361
|
+
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{img_str}"}}
|
|
362
|
+
]
|
|
363
|
+
}
|
|
364
|
+
# Use a vision-capable model when an image is present
|
|
365
|
+
completion_params["model"] = "gpt-4-vision-preview" # Adjust this to your actual vision-capable model
|
|
366
|
+
|
|
367
|
+
# Only add tools and tool_choice if Tavily API key is available and no image is uploaded
|
|
328
368
|
if tavily_api_key:
|
|
329
369
|
completion_params["tools"] = tools
|
|
330
370
|
completion_params["tool_choice"] = "auto"
|
|
@@ -380,7 +420,7 @@ async def main(message: cl.Message):
|
|
|
380
420
|
available_functions = {
|
|
381
421
|
"tavily_web_search": tavily_web_search,
|
|
382
422
|
}
|
|
383
|
-
messages =
|
|
423
|
+
messages = message_history + [{"role": "assistant", "content": None, "function_call": {
|
|
384
424
|
"name": tool_calls[0]['function']['name'],
|
|
385
425
|
"arguments": tool_calls[0]['function']['arguments']
|
|
386
426
|
}}]
|
|
@@ -497,3 +537,10 @@ async def on_chat_resume(thread: ThreadDict):
|
|
|
497
537
|
logger.warning(f"Message without recognized type: {message}")
|
|
498
538
|
|
|
499
539
|
cl.user_session.set("message_history", message_history)
|
|
540
|
+
|
|
541
|
+
# Check if there's an image in the thread metadata
|
|
542
|
+
image_data = metadata.get("image")
|
|
543
|
+
if image_data:
|
|
544
|
+
image = Image.open(io.BytesIO(base64.b64decode(image_data)))
|
|
545
|
+
cl.user_session.set("image", image)
|
|
546
|
+
await cl.Message(content="Previous image loaded. You can continue asking questions about it, upload a new image, or just chat.").send()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "PraisonAI"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.73"
|
|
4
4
|
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
5
|
authors = ["Mervin Praison"]
|
|
6
6
|
license = ""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|