PraisonAI 0.0.37__tar.gz → 0.0.38__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- {praisonai-0.0.37 → praisonai-0.0.38}/PKG-INFO +12 -2
- {praisonai-0.0.37 → praisonai-0.0.38}/README.md +3 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/agents_generator.py +36 -2
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/auto.py +2 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/chainlit_ui.py +2 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/deploy.py +1 -1
- praisonai-0.0.38/praisonai/inc/__init__.py +2 -0
- praisonai-0.0.38/praisonai/inc/models.py +124 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/pyproject.toml +17 -4
- {praisonai-0.0.37 → praisonai-0.0.38}/LICENSE +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/__init__.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/__main__.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/cli.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/inbuilt_tools/__init__.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/public/fantasy.svg +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/public/game.svg +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/public/movie.svg +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/public/thriller.svg +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/test.py +0 -0
- {praisonai-0.0.37 → praisonai-0.0.38}/praisonai/version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.38
|
|
4
4
|
Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10,<3.13
|
|
@@ -9,14 +9,22 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.11
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.12
|
|
11
11
|
Provides-Extra: agentops
|
|
12
|
+
Provides-Extra: anthropic
|
|
12
13
|
Provides-Extra: api
|
|
14
|
+
Provides-Extra: cohere
|
|
15
|
+
Provides-Extra: google
|
|
13
16
|
Provides-Extra: gradio
|
|
17
|
+
Provides-Extra: openai
|
|
14
18
|
Provides-Extra: ui
|
|
15
|
-
Requires-Dist: agentops (
|
|
19
|
+
Requires-Dist: agentops (>=0.2.6) ; extra == "agentops"
|
|
16
20
|
Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui"
|
|
17
21
|
Requires-Dist: crewai (>=0.32.0)
|
|
18
22
|
Requires-Dist: flask (>=3.0.0) ; extra == "api"
|
|
19
23
|
Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
|
|
24
|
+
Requires-Dist: langchain-anthropic (>=0.1.13) ; extra == "anthropic"
|
|
25
|
+
Requires-Dist: langchain-cohere (>=0.1.4) ; extra == "cohere"
|
|
26
|
+
Requires-Dist: langchain-google-genai (>=1.0.4) ; extra == "google"
|
|
27
|
+
Requires-Dist: langchain-openai (>=0.1.7) ; extra == "openai"
|
|
20
28
|
Requires-Dist: markdown (>=3.5)
|
|
21
29
|
Requires-Dist: praisonai-tools (>=0.0.7)
|
|
22
30
|
Requires-Dist: pyautogen (>=0.2.19)
|
|
@@ -357,4 +365,6 @@ OPENAI_API_KEY=your-mistral-api-key
|
|
|
357
365
|
- Submit a pull request via GitHub's web interface.
|
|
358
366
|
- Await feedback from project maintainers.
|
|
359
367
|
|
|
368
|
+
## Star History
|
|
360
369
|
|
|
370
|
+
[](https://docs.praison.ai)
|
|
@@ -329,3 +329,6 @@ OPENAI_API_KEY=your-mistral-api-key
|
|
|
329
329
|
- Submit a pull request via GitHub's web interface.
|
|
330
330
|
- Await feedback from project maintainers.
|
|
331
331
|
|
|
332
|
+
## Star History
|
|
333
|
+
|
|
334
|
+
[](https://docs.praison.ai)
|
|
@@ -18,6 +18,7 @@ from praisonai_tools import (
|
|
|
18
18
|
YoutubeVideoSearchTool
|
|
19
19
|
)
|
|
20
20
|
from .inbuilt_tools import *
|
|
21
|
+
from .inc import PraisonAIModel
|
|
21
22
|
import inspect
|
|
22
23
|
from pathlib import Path
|
|
23
24
|
import importlib
|
|
@@ -270,7 +271,7 @@ class AgentsGenerator:
|
|
|
270
271
|
result = "### Output ###\n"+response[-1].summary if hasattr(response[-1], 'summary') else ""
|
|
271
272
|
if agentops_exists:
|
|
272
273
|
agentops.end_session("Success")
|
|
273
|
-
else:
|
|
274
|
+
else: # framework=crewai
|
|
274
275
|
if agentops_exists:
|
|
275
276
|
agentops.init(os.environ.get("AGENTOPS_API_KEY"), tags=["crewai"])
|
|
276
277
|
for role, details in config['roles'].items():
|
|
@@ -280,7 +281,40 @@ class AgentsGenerator:
|
|
|
280
281
|
|
|
281
282
|
# Adding tools to the agent if exists
|
|
282
283
|
agent_tools = [tools_dict[tool] for tool in details.get('tools', []) if tool in tools_dict]
|
|
283
|
-
|
|
284
|
+
|
|
285
|
+
llm_model = details.get('llm') # Get the llm configuration
|
|
286
|
+
if llm_model:
|
|
287
|
+
llm = PraisonAIModel(
|
|
288
|
+
model=llm_model.get("model", os.environ.get("MODEL_NAME", "openai/gpt-4o")),
|
|
289
|
+
).get_model()
|
|
290
|
+
else:
|
|
291
|
+
llm = PraisonAIModel().get_model()
|
|
292
|
+
|
|
293
|
+
function_calling_llm_model = details.get('function_calling_llm')
|
|
294
|
+
if function_calling_llm_model:
|
|
295
|
+
function_calling_llm = PraisonAIModel(
|
|
296
|
+
model=function_calling_llm_model.get("model", os.environ.get("MODEL_NAME", "openai/gpt-4o")),
|
|
297
|
+
).get_model()
|
|
298
|
+
else:
|
|
299
|
+
function_calling_llm = PraisonAIModel().get_model()
|
|
300
|
+
|
|
301
|
+
agent = Agent(
|
|
302
|
+
role=role_filled,
|
|
303
|
+
goal=goal_filled,
|
|
304
|
+
backstory=backstory_filled,
|
|
305
|
+
tools=agent_tools,
|
|
306
|
+
allow_delegation=details.get('allow_delegation', False),
|
|
307
|
+
llm=llm,
|
|
308
|
+
function_calling_llm=function_calling_llm,
|
|
309
|
+
max_iter=details.get('max_iter', 15),
|
|
310
|
+
max_rpm=details.get('max_rpm'),
|
|
311
|
+
max_execution_time=details.get('max_execution_time'),
|
|
312
|
+
verbose=details.get('verbose', True),
|
|
313
|
+
cache=details.get('cache', True),
|
|
314
|
+
system_template=details.get('system_template'),
|
|
315
|
+
prompt_template=details.get('prompt_template'),
|
|
316
|
+
response_template=details.get('response_template'),
|
|
317
|
+
)
|
|
284
318
|
|
|
285
319
|
# Set agent callback if provided
|
|
286
320
|
if self.agent_callback:
|
|
@@ -6,6 +6,8 @@ import os
|
|
|
6
6
|
import json
|
|
7
7
|
import yaml
|
|
8
8
|
from rich import print
|
|
9
|
+
import logging
|
|
10
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
9
11
|
|
|
10
12
|
# Define Pydantic models outside of the generate method
|
|
11
13
|
class TaskDetails(BaseModel):
|
|
@@ -10,6 +10,8 @@ from dotenv import load_dotenv
|
|
|
10
10
|
load_dotenv()
|
|
11
11
|
from contextlib import redirect_stdout
|
|
12
12
|
from io import StringIO
|
|
13
|
+
import logging
|
|
14
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
13
15
|
|
|
14
16
|
framework = "crewai"
|
|
15
17
|
config_list = [
|
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==0.0.
|
|
59
|
+
file.write("RUN pip install flask praisonai==0.0.38 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
# praisonai/inc/models.py
|
|
2
|
+
import os
|
|
3
|
+
import logging
|
|
4
|
+
logger = logging.getLogger(__name__)
|
|
5
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
6
|
+
|
|
7
|
+
# Conditionally import modules based on availability
|
|
8
|
+
try:
|
|
9
|
+
from langchain_openai import ChatOpenAI # pip install langchain-openai
|
|
10
|
+
OPENAI_AVAILABLE = True
|
|
11
|
+
except ImportError:
|
|
12
|
+
OPENAI_AVAILABLE = False
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
from langchain_google_genai import ChatGoogleGenerativeAI # pip install langchain-google-genai
|
|
16
|
+
GOOGLE_GENAI_AVAILABLE = True
|
|
17
|
+
except ImportError:
|
|
18
|
+
GOOGLE_GENAI_AVAILABLE = False
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
from langchain_anthropic import ChatAnthropic # pip install langchain-anthropic
|
|
22
|
+
ANTHROPIC_AVAILABLE = True
|
|
23
|
+
except ImportError:
|
|
24
|
+
ANTHROPIC_AVAILABLE = False
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
from langchain_cohere import ChatCohere # pip install langchain-cohere
|
|
28
|
+
COHERE_AVAILABLE = True
|
|
29
|
+
except ImportError:
|
|
30
|
+
COHERE_AVAILABLE = False
|
|
31
|
+
|
|
32
|
+
class PraisonAIModel:
|
|
33
|
+
def __init__(self, model=None, api_key_var=None, base_url=None):
|
|
34
|
+
"""
|
|
35
|
+
Initializes the PraisonAIModel with the provided parameters or environment variables.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
model (str, optional): The name of the OpenAI model. Defaults to None.
|
|
39
|
+
api_key_var (str, optional): The OpenAI API key. Defaults to None.
|
|
40
|
+
base_url (str, optional): The base URL for the OpenAI API. Defaults to None.
|
|
41
|
+
"""
|
|
42
|
+
self.model = model or os.getenv("OPENAI_MODEL_NAME", "gpt-4o")
|
|
43
|
+
if self.model.startswith("openai/"):
|
|
44
|
+
self.api_key_var = "OPENAI_API_KEY"
|
|
45
|
+
self.base_url = base_url or "https://api.openai.com/v1"
|
|
46
|
+
self.model_name = self.model.replace("openai/", "")
|
|
47
|
+
elif self.model.startswith("groq/"):
|
|
48
|
+
self.api_key_var = "GROQ_API_KEY"
|
|
49
|
+
self.base_url = base_url or "https://api.groq.com/openai/v1"
|
|
50
|
+
self.model_name = self.model.replace("groq/", "")
|
|
51
|
+
elif self.model.startswith("cohere/"):
|
|
52
|
+
self.api_key_var = "COHERE_API_KEY"
|
|
53
|
+
self.base_url = ""
|
|
54
|
+
self.model_name = self.model.replace("cohere/", "")
|
|
55
|
+
elif self.model.startswith("ollama/"):
|
|
56
|
+
self.api_key_var = "OLLAMA_API_KEY"
|
|
57
|
+
self.base_url = base_url or "http://localhost:11434/v1"
|
|
58
|
+
self.model_name = self.model.replace("ollama/", "")
|
|
59
|
+
elif self.model.startswith("anthropic/"):
|
|
60
|
+
self.api_key_var = "ANTHROPIC_API_KEY"
|
|
61
|
+
self.base_url = ""
|
|
62
|
+
self.model_name = self.model.replace("anthropic/", "")
|
|
63
|
+
elif self.model.startswith("google/"):
|
|
64
|
+
self.api_key_var = "GOOGLE_API_KEY"
|
|
65
|
+
self.base_url = ""
|
|
66
|
+
self.model_name = self.model.replace("google/", "")
|
|
67
|
+
else:
|
|
68
|
+
self.api_key_var = api_key_var or "OPENAI_API_KEY"
|
|
69
|
+
self.base_url = base_url or os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
|
70
|
+
self.model_name = self.model
|
|
71
|
+
logger.debug(f"Initialized PraisonAIModel with model {self.model_name}, api_key_var {self.api_key_var}, and base_url {self.base_url}")
|
|
72
|
+
self.api_key = os.environ.get(self.api_key_var, "nokey")
|
|
73
|
+
|
|
74
|
+
def get_model(self):
|
|
75
|
+
"""
|
|
76
|
+
Returns an instance of the langchain Chat client with the configured parameters.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Chat: An instance of the langchain Chat client.
|
|
80
|
+
"""
|
|
81
|
+
if self.model.startswith("google/"):
|
|
82
|
+
if GOOGLE_GENAI_AVAILABLE:
|
|
83
|
+
return ChatGoogleGenerativeAI(
|
|
84
|
+
model=self.model_name,
|
|
85
|
+
google_api_key=self.api_key
|
|
86
|
+
)
|
|
87
|
+
else:
|
|
88
|
+
raise ImportError(
|
|
89
|
+
"Required Langchain Integration 'langchain-google-genai' not found. "
|
|
90
|
+
"Please install with 'pip install langchain-google-genai'"
|
|
91
|
+
)
|
|
92
|
+
elif self.model.startswith("cohere/"):
|
|
93
|
+
if COHERE_AVAILABLE:
|
|
94
|
+
return ChatCohere(
|
|
95
|
+
model=self.model_name,
|
|
96
|
+
cohere_api_key=self.api_key,
|
|
97
|
+
)
|
|
98
|
+
else:
|
|
99
|
+
raise ImportError(
|
|
100
|
+
"Required Langchain Integration 'langchain-cohere' not found. "
|
|
101
|
+
"Please install with 'pip install langchain-cohere'"
|
|
102
|
+
)
|
|
103
|
+
elif self.model.startswith("anthropic/"):
|
|
104
|
+
if ANTHROPIC_AVAILABLE:
|
|
105
|
+
return ChatAnthropic(
|
|
106
|
+
model=self.model_name,
|
|
107
|
+
anthropic_api_key=self.api_key,
|
|
108
|
+
)
|
|
109
|
+
else:
|
|
110
|
+
raise ImportError(
|
|
111
|
+
"Required Langchain Integration 'langchain-anthropic' not found. "
|
|
112
|
+
"Please install with 'pip install langchain-anthropic'"
|
|
113
|
+
)
|
|
114
|
+
elif OPENAI_AVAILABLE:
|
|
115
|
+
return ChatOpenAI(
|
|
116
|
+
model=self.model_name,
|
|
117
|
+
api_key=self.api_key,
|
|
118
|
+
base_url=self.base_url,
|
|
119
|
+
)
|
|
120
|
+
else:
|
|
121
|
+
raise ImportError(
|
|
122
|
+
"Required Langchain Integration 'langchain-openai' not found. "
|
|
123
|
+
"Please install with 'pip install langchain-openai'"
|
|
124
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "PraisonAI"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.38"
|
|
4
4
|
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
5
|
authors = ["Mervin Praison"]
|
|
6
6
|
license = ""
|
|
@@ -20,11 +20,15 @@ pyautogen = ">=0.2.19"
|
|
|
20
20
|
crewai = ">=0.32.0"
|
|
21
21
|
markdown = ">=3.5"
|
|
22
22
|
praisonai-tools = ">=0.0.7"
|
|
23
|
-
pyparsing = ">=3.0.0"
|
|
23
|
+
pyparsing = ">=3.0.0" # Added to fix a colab issue. Temp Fix.
|
|
24
24
|
chainlit = {version = "^1.1.301", optional = true}
|
|
25
25
|
gradio = {version = ">=4.26.0", optional = true}
|
|
26
26
|
flask = {version = ">=3.0.0", optional = true}
|
|
27
|
-
agentops = {version = "
|
|
27
|
+
agentops = {version = ">=0.2.6", optional = true}
|
|
28
|
+
langchain-google-genai = {version = ">=1.0.4", optional = true}
|
|
29
|
+
langchain-anthropic = {version = ">=0.1.13", optional = true}
|
|
30
|
+
langchain-openai = {version = ">=0.1.7", optional = true}
|
|
31
|
+
langchain-cohere = {version = ">=0.1.4", optional = true}
|
|
28
32
|
|
|
29
33
|
[tool.poetry.group.docs.dependencies]
|
|
30
34
|
mkdocs = "*"
|
|
@@ -66,12 +70,17 @@ unittest2 = "*"
|
|
|
66
70
|
chainlit = "^1.1.301"
|
|
67
71
|
gradio = ">=4.26.0"
|
|
68
72
|
flask = ">=3.0.0"
|
|
69
|
-
agentops = "
|
|
73
|
+
agentops = ">=0.2.6"
|
|
74
|
+
langchain-google-genai = ">=1.0.4"
|
|
75
|
+
langchain-anthropic = ">=0.1.13"
|
|
76
|
+
langchain-openai = ">=0.1.7"
|
|
77
|
+
langchain-cohere = ">=0.1.4"
|
|
70
78
|
|
|
71
79
|
[build-system]
|
|
72
80
|
requires = ["poetry-core"]
|
|
73
81
|
build-backend = "poetry.core.masonry.api"
|
|
74
82
|
|
|
83
|
+
|
|
75
84
|
[tool.poetry.scripts]
|
|
76
85
|
praisonai = "praisonai.__main__:main"
|
|
77
86
|
|
|
@@ -80,3 +89,7 @@ ui = ["chainlit"]
|
|
|
80
89
|
gradio = ["gradio"]
|
|
81
90
|
api = ["flask"]
|
|
82
91
|
agentops = ["agentops"]
|
|
92
|
+
google = ["langchain-google-genai"]
|
|
93
|
+
openai = ["langchain-openai"]
|
|
94
|
+
anthropic = ["langchain-anthropic"]
|
|
95
|
+
cohere = ["langchain-cohere"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|