PraisonAI 0.0.36__tar.gz → 0.0.38__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- {praisonai-0.0.36 → praisonai-0.0.38}/PKG-INFO +36 -2
- {praisonai-0.0.36 → praisonai-0.0.38}/README.md +27 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/agents_generator.py +48 -10
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/auto.py +2 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/chainlit_ui.py +2 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/deploy.py +1 -1
- praisonai-0.0.38/praisonai/inc/__init__.py +2 -0
- praisonai-0.0.38/praisonai/inc/models.py +124 -0
- praisonai-0.0.38/pyproject.toml +95 -0
- praisonai-0.0.36/pyproject.toml +0 -45
- {praisonai-0.0.36 → praisonai-0.0.38}/LICENSE +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/__init__.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/__main__.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/cli.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/inbuilt_tools/__init__.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/public/fantasy.svg +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/public/game.svg +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/public/movie.svg +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/public/thriller.svg +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/test.py +0 -0
- {praisonai-0.0.36 → praisonai-0.0.38}/praisonai/version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.38
|
|
4
4
|
Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10,<3.13
|
|
@@ -9,14 +9,22 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.11
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.12
|
|
11
11
|
Provides-Extra: agentops
|
|
12
|
+
Provides-Extra: anthropic
|
|
12
13
|
Provides-Extra: api
|
|
14
|
+
Provides-Extra: cohere
|
|
15
|
+
Provides-Extra: google
|
|
13
16
|
Provides-Extra: gradio
|
|
17
|
+
Provides-Extra: openai
|
|
14
18
|
Provides-Extra: ui
|
|
15
|
-
Requires-Dist: agentops (
|
|
19
|
+
Requires-Dist: agentops (>=0.2.6) ; extra == "agentops"
|
|
16
20
|
Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui"
|
|
17
21
|
Requires-Dist: crewai (>=0.32.0)
|
|
18
22
|
Requires-Dist: flask (>=3.0.0) ; extra == "api"
|
|
19
23
|
Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
|
|
24
|
+
Requires-Dist: langchain-anthropic (>=0.1.13) ; extra == "anthropic"
|
|
25
|
+
Requires-Dist: langchain-cohere (>=0.1.4) ; extra == "cohere"
|
|
26
|
+
Requires-Dist: langchain-google-genai (>=1.0.4) ; extra == "google"
|
|
27
|
+
Requires-Dist: langchain-openai (>=0.1.7) ; extra == "openai"
|
|
20
28
|
Requires-Dist: markdown (>=3.5)
|
|
21
29
|
Requires-Dist: praisonai-tools (>=0.0.7)
|
|
22
30
|
Requires-Dist: pyautogen (>=0.2.19)
|
|
@@ -299,6 +307,30 @@ gcloud run deploy praisonai-service \
|
|
|
299
307
|
--set-env-vars OPENAI_MODEL_NAME=${OPENAI_MODEL_NAME},OPENAI_API_KEY=${OPENAI_API_KEY},OPENAI_API_BASE=${OPENAI_API_BASE}
|
|
300
308
|
```
|
|
301
309
|
|
|
310
|
+
### Commands to Install Dependencies:
|
|
311
|
+
|
|
312
|
+
1. **Install all dependencies, including dev dependencies:**
|
|
313
|
+
```sh
|
|
314
|
+
poetry install
|
|
315
|
+
```
|
|
316
|
+
|
|
317
|
+
2. **Install only documentation dependencies:**
|
|
318
|
+
```sh
|
|
319
|
+
poetry install --with docs
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
3. **Install only test dependencies:**
|
|
323
|
+
```sh
|
|
324
|
+
poetry install --with test
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
4. **Install only dev dependencies:**
|
|
328
|
+
```sh
|
|
329
|
+
poetry install --with dev
|
|
330
|
+
```
|
|
331
|
+
|
|
332
|
+
This configuration ensures that your development dependencies are correctly categorized and installed as needed.
|
|
333
|
+
|
|
302
334
|
## Other Models
|
|
303
335
|
|
|
304
336
|
```bash
|
|
@@ -333,4 +365,6 @@ OPENAI_API_KEY=your-mistral-api-key
|
|
|
333
365
|
- Submit a pull request via GitHub's web interface.
|
|
334
366
|
- Await feedback from project maintainers.
|
|
335
367
|
|
|
368
|
+
## Star History
|
|
336
369
|
|
|
370
|
+
[](https://docs.praison.ai)
|
|
@@ -271,6 +271,30 @@ gcloud run deploy praisonai-service \
|
|
|
271
271
|
--set-env-vars OPENAI_MODEL_NAME=${OPENAI_MODEL_NAME},OPENAI_API_KEY=${OPENAI_API_KEY},OPENAI_API_BASE=${OPENAI_API_BASE}
|
|
272
272
|
```
|
|
273
273
|
|
|
274
|
+
### Commands to Install Dependencies:
|
|
275
|
+
|
|
276
|
+
1. **Install all dependencies, including dev dependencies:**
|
|
277
|
+
```sh
|
|
278
|
+
poetry install
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
2. **Install only documentation dependencies:**
|
|
282
|
+
```sh
|
|
283
|
+
poetry install --with docs
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
3. **Install only test dependencies:**
|
|
287
|
+
```sh
|
|
288
|
+
poetry install --with test
|
|
289
|
+
```
|
|
290
|
+
|
|
291
|
+
4. **Install only dev dependencies:**
|
|
292
|
+
```sh
|
|
293
|
+
poetry install --with dev
|
|
294
|
+
```
|
|
295
|
+
|
|
296
|
+
This configuration ensures that your development dependencies are correctly categorized and installed as needed.
|
|
297
|
+
|
|
274
298
|
## Other Models
|
|
275
299
|
|
|
276
300
|
```bash
|
|
@@ -305,3 +329,6 @@ OPENAI_API_KEY=your-mistral-api-key
|
|
|
305
329
|
- Submit a pull request via GitHub's web interface.
|
|
306
330
|
- Await feedback from project maintainers.
|
|
307
331
|
|
|
332
|
+
## Star History
|
|
333
|
+
|
|
334
|
+
[](https://docs.praison.ai)
|
|
@@ -18,6 +18,7 @@ from praisonai_tools import (
|
|
|
18
18
|
YoutubeVideoSearchTool
|
|
19
19
|
)
|
|
20
20
|
from .inbuilt_tools import *
|
|
21
|
+
from .inc import PraisonAIModel
|
|
21
22
|
import inspect
|
|
22
23
|
from pathlib import Path
|
|
23
24
|
import importlib
|
|
@@ -26,12 +27,12 @@ from praisonai_tools import BaseTool
|
|
|
26
27
|
import os
|
|
27
28
|
import logging
|
|
28
29
|
|
|
29
|
-
|
|
30
|
+
agentops_exists = False
|
|
30
31
|
try:
|
|
31
32
|
import agentops
|
|
32
|
-
|
|
33
|
+
agentops_exists = True
|
|
33
34
|
except ImportError:
|
|
34
|
-
|
|
35
|
+
agentops_exists = False
|
|
35
36
|
|
|
36
37
|
os.environ["OTEL_SDK_DISABLED"] = "true"
|
|
37
38
|
|
|
@@ -221,6 +222,8 @@ class AgentsGenerator:
|
|
|
221
222
|
# print(self.config_list)
|
|
222
223
|
llm_config = {"config_list": self.config_list}
|
|
223
224
|
|
|
225
|
+
if agentops_exists:
|
|
226
|
+
agentops.init(os.environ.get("AGENTOPS_API_KEY"), tags=["autogen"])
|
|
224
227
|
# Assuming the user proxy agent is set up as per your requirements
|
|
225
228
|
user_proxy = autogen.UserProxyAgent(
|
|
226
229
|
name="User",
|
|
@@ -264,10 +267,13 @@ class AgentsGenerator:
|
|
|
264
267
|
# Additional fields like carryover can be added based on dependencies
|
|
265
268
|
}
|
|
266
269
|
tasks.append(chat_task)
|
|
267
|
-
|
|
268
270
|
response = user_proxy.initiate_chats(tasks)
|
|
269
271
|
result = "### Output ###\n"+response[-1].summary if hasattr(response[-1], 'summary') else ""
|
|
270
|
-
|
|
272
|
+
if agentops_exists:
|
|
273
|
+
agentops.end_session("Success")
|
|
274
|
+
else: # framework=crewai
|
|
275
|
+
if agentops_exists:
|
|
276
|
+
agentops.init(os.environ.get("AGENTOPS_API_KEY"), tags=["crewai"])
|
|
271
277
|
for role, details in config['roles'].items():
|
|
272
278
|
role_filled = details['role'].format(topic=topic)
|
|
273
279
|
goal_filled = details['goal'].format(topic=topic)
|
|
@@ -275,7 +281,40 @@ class AgentsGenerator:
|
|
|
275
281
|
|
|
276
282
|
# Adding tools to the agent if exists
|
|
277
283
|
agent_tools = [tools_dict[tool] for tool in details.get('tools', []) if tool in tools_dict]
|
|
278
|
-
|
|
284
|
+
|
|
285
|
+
llm_model = details.get('llm') # Get the llm configuration
|
|
286
|
+
if llm_model:
|
|
287
|
+
llm = PraisonAIModel(
|
|
288
|
+
model=llm_model.get("model", os.environ.get("MODEL_NAME", "openai/gpt-4o")),
|
|
289
|
+
).get_model()
|
|
290
|
+
else:
|
|
291
|
+
llm = PraisonAIModel().get_model()
|
|
292
|
+
|
|
293
|
+
function_calling_llm_model = details.get('function_calling_llm')
|
|
294
|
+
if function_calling_llm_model:
|
|
295
|
+
function_calling_llm = PraisonAIModel(
|
|
296
|
+
model=function_calling_llm_model.get("model", os.environ.get("MODEL_NAME", "openai/gpt-4o")),
|
|
297
|
+
).get_model()
|
|
298
|
+
else:
|
|
299
|
+
function_calling_llm = PraisonAIModel().get_model()
|
|
300
|
+
|
|
301
|
+
agent = Agent(
|
|
302
|
+
role=role_filled,
|
|
303
|
+
goal=goal_filled,
|
|
304
|
+
backstory=backstory_filled,
|
|
305
|
+
tools=agent_tools,
|
|
306
|
+
allow_delegation=details.get('allow_delegation', False),
|
|
307
|
+
llm=llm,
|
|
308
|
+
function_calling_llm=function_calling_llm,
|
|
309
|
+
max_iter=details.get('max_iter', 15),
|
|
310
|
+
max_rpm=details.get('max_rpm'),
|
|
311
|
+
max_execution_time=details.get('max_execution_time'),
|
|
312
|
+
verbose=details.get('verbose', True),
|
|
313
|
+
cache=details.get('cache', True),
|
|
314
|
+
system_template=details.get('system_template'),
|
|
315
|
+
prompt_template=details.get('prompt_template'),
|
|
316
|
+
response_template=details.get('response_template'),
|
|
317
|
+
)
|
|
279
318
|
|
|
280
319
|
# Set agent callback if provided
|
|
281
320
|
if self.agent_callback:
|
|
@@ -294,15 +333,12 @@ class AgentsGenerator:
|
|
|
294
333
|
task.callback = self.task_callback
|
|
295
334
|
|
|
296
335
|
tasks.append(task)
|
|
297
|
-
|
|
298
|
-
agentops.init()
|
|
336
|
+
|
|
299
337
|
crew = Crew(
|
|
300
338
|
agents=list(agents.values()),
|
|
301
339
|
tasks=tasks,
|
|
302
340
|
verbose=2
|
|
303
341
|
)
|
|
304
|
-
if agentops:
|
|
305
|
-
agentops.end_session("Success")
|
|
306
342
|
|
|
307
343
|
self.logger.debug("Final Crew Configuration:")
|
|
308
344
|
self.logger.debug(f"Agents: {crew.agents}")
|
|
@@ -310,5 +346,7 @@ class AgentsGenerator:
|
|
|
310
346
|
|
|
311
347
|
response = crew.kickoff()
|
|
312
348
|
result = f"### Task Output ###\n{response}"
|
|
349
|
+
if agentops_exists:
|
|
350
|
+
agentops.end_session("Success")
|
|
313
351
|
return result
|
|
314
352
|
|
|
@@ -6,6 +6,8 @@ import os
|
|
|
6
6
|
import json
|
|
7
7
|
import yaml
|
|
8
8
|
from rich import print
|
|
9
|
+
import logging
|
|
10
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
9
11
|
|
|
10
12
|
# Define Pydantic models outside of the generate method
|
|
11
13
|
class TaskDetails(BaseModel):
|
|
@@ -10,6 +10,8 @@ from dotenv import load_dotenv
|
|
|
10
10
|
load_dotenv()
|
|
11
11
|
from contextlib import redirect_stdout
|
|
12
12
|
from io import StringIO
|
|
13
|
+
import logging
|
|
14
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
13
15
|
|
|
14
16
|
framework = "crewai"
|
|
15
17
|
config_list = [
|
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==0.0.
|
|
59
|
+
file.write("RUN pip install flask praisonai==0.0.38 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
# praisonai/inc/models.py
|
|
2
|
+
import os
|
|
3
|
+
import logging
|
|
4
|
+
logger = logging.getLogger(__name__)
|
|
5
|
+
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
6
|
+
|
|
7
|
+
# Conditionally import modules based on availability
|
|
8
|
+
try:
|
|
9
|
+
from langchain_openai import ChatOpenAI # pip install langchain-openai
|
|
10
|
+
OPENAI_AVAILABLE = True
|
|
11
|
+
except ImportError:
|
|
12
|
+
OPENAI_AVAILABLE = False
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
from langchain_google_genai import ChatGoogleGenerativeAI # pip install langchain-google-genai
|
|
16
|
+
GOOGLE_GENAI_AVAILABLE = True
|
|
17
|
+
except ImportError:
|
|
18
|
+
GOOGLE_GENAI_AVAILABLE = False
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
from langchain_anthropic import ChatAnthropic # pip install langchain-anthropic
|
|
22
|
+
ANTHROPIC_AVAILABLE = True
|
|
23
|
+
except ImportError:
|
|
24
|
+
ANTHROPIC_AVAILABLE = False
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
from langchain_cohere import ChatCohere # pip install langchain-cohere
|
|
28
|
+
COHERE_AVAILABLE = True
|
|
29
|
+
except ImportError:
|
|
30
|
+
COHERE_AVAILABLE = False
|
|
31
|
+
|
|
32
|
+
class PraisonAIModel:
|
|
33
|
+
def __init__(self, model=None, api_key_var=None, base_url=None):
|
|
34
|
+
"""
|
|
35
|
+
Initializes the PraisonAIModel with the provided parameters or environment variables.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
model (str, optional): The name of the OpenAI model. Defaults to None.
|
|
39
|
+
api_key_var (str, optional): The OpenAI API key. Defaults to None.
|
|
40
|
+
base_url (str, optional): The base URL for the OpenAI API. Defaults to None.
|
|
41
|
+
"""
|
|
42
|
+
self.model = model or os.getenv("OPENAI_MODEL_NAME", "gpt-4o")
|
|
43
|
+
if self.model.startswith("openai/"):
|
|
44
|
+
self.api_key_var = "OPENAI_API_KEY"
|
|
45
|
+
self.base_url = base_url or "https://api.openai.com/v1"
|
|
46
|
+
self.model_name = self.model.replace("openai/", "")
|
|
47
|
+
elif self.model.startswith("groq/"):
|
|
48
|
+
self.api_key_var = "GROQ_API_KEY"
|
|
49
|
+
self.base_url = base_url or "https://api.groq.com/openai/v1"
|
|
50
|
+
self.model_name = self.model.replace("groq/", "")
|
|
51
|
+
elif self.model.startswith("cohere/"):
|
|
52
|
+
self.api_key_var = "COHERE_API_KEY"
|
|
53
|
+
self.base_url = ""
|
|
54
|
+
self.model_name = self.model.replace("cohere/", "")
|
|
55
|
+
elif self.model.startswith("ollama/"):
|
|
56
|
+
self.api_key_var = "OLLAMA_API_KEY"
|
|
57
|
+
self.base_url = base_url or "http://localhost:11434/v1"
|
|
58
|
+
self.model_name = self.model.replace("ollama/", "")
|
|
59
|
+
elif self.model.startswith("anthropic/"):
|
|
60
|
+
self.api_key_var = "ANTHROPIC_API_KEY"
|
|
61
|
+
self.base_url = ""
|
|
62
|
+
self.model_name = self.model.replace("anthropic/", "")
|
|
63
|
+
elif self.model.startswith("google/"):
|
|
64
|
+
self.api_key_var = "GOOGLE_API_KEY"
|
|
65
|
+
self.base_url = ""
|
|
66
|
+
self.model_name = self.model.replace("google/", "")
|
|
67
|
+
else:
|
|
68
|
+
self.api_key_var = api_key_var or "OPENAI_API_KEY"
|
|
69
|
+
self.base_url = base_url or os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
|
70
|
+
self.model_name = self.model
|
|
71
|
+
logger.debug(f"Initialized PraisonAIModel with model {self.model_name}, api_key_var {self.api_key_var}, and base_url {self.base_url}")
|
|
72
|
+
self.api_key = os.environ.get(self.api_key_var, "nokey")
|
|
73
|
+
|
|
74
|
+
def get_model(self):
|
|
75
|
+
"""
|
|
76
|
+
Returns an instance of the langchain Chat client with the configured parameters.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Chat: An instance of the langchain Chat client.
|
|
80
|
+
"""
|
|
81
|
+
if self.model.startswith("google/"):
|
|
82
|
+
if GOOGLE_GENAI_AVAILABLE:
|
|
83
|
+
return ChatGoogleGenerativeAI(
|
|
84
|
+
model=self.model_name,
|
|
85
|
+
google_api_key=self.api_key
|
|
86
|
+
)
|
|
87
|
+
else:
|
|
88
|
+
raise ImportError(
|
|
89
|
+
"Required Langchain Integration 'langchain-google-genai' not found. "
|
|
90
|
+
"Please install with 'pip install langchain-google-genai'"
|
|
91
|
+
)
|
|
92
|
+
elif self.model.startswith("cohere/"):
|
|
93
|
+
if COHERE_AVAILABLE:
|
|
94
|
+
return ChatCohere(
|
|
95
|
+
model=self.model_name,
|
|
96
|
+
cohere_api_key=self.api_key,
|
|
97
|
+
)
|
|
98
|
+
else:
|
|
99
|
+
raise ImportError(
|
|
100
|
+
"Required Langchain Integration 'langchain-cohere' not found. "
|
|
101
|
+
"Please install with 'pip install langchain-cohere'"
|
|
102
|
+
)
|
|
103
|
+
elif self.model.startswith("anthropic/"):
|
|
104
|
+
if ANTHROPIC_AVAILABLE:
|
|
105
|
+
return ChatAnthropic(
|
|
106
|
+
model=self.model_name,
|
|
107
|
+
anthropic_api_key=self.api_key,
|
|
108
|
+
)
|
|
109
|
+
else:
|
|
110
|
+
raise ImportError(
|
|
111
|
+
"Required Langchain Integration 'langchain-anthropic' not found. "
|
|
112
|
+
"Please install with 'pip install langchain-anthropic'"
|
|
113
|
+
)
|
|
114
|
+
elif OPENAI_AVAILABLE:
|
|
115
|
+
return ChatOpenAI(
|
|
116
|
+
model=self.model_name,
|
|
117
|
+
api_key=self.api_key,
|
|
118
|
+
base_url=self.base_url,
|
|
119
|
+
)
|
|
120
|
+
else:
|
|
121
|
+
raise ImportError(
|
|
122
|
+
"Required Langchain Integration 'langchain-openai' not found. "
|
|
123
|
+
"Please install with 'pip install langchain-openai'"
|
|
124
|
+
)
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "PraisonAI"
|
|
3
|
+
version = "0.0.38"
|
|
4
|
+
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
|
+
authors = ["Mervin Praison"]
|
|
6
|
+
license = ""
|
|
7
|
+
readme = "README.md"
|
|
8
|
+
|
|
9
|
+
[tool.poetry.urls]
|
|
10
|
+
Homepage = "https://docs.praison.ai"
|
|
11
|
+
Repository = "https://github.com/mervinpraison/PraisonAI"
|
|
12
|
+
|
|
13
|
+
[tool.setuptools]
|
|
14
|
+
packages = ["PraisonAI"]
|
|
15
|
+
|
|
16
|
+
[tool.poetry.dependencies]
|
|
17
|
+
python = ">=3.10,<3.13"
|
|
18
|
+
rich = ">=13.7"
|
|
19
|
+
pyautogen = ">=0.2.19"
|
|
20
|
+
crewai = ">=0.32.0"
|
|
21
|
+
markdown = ">=3.5"
|
|
22
|
+
praisonai-tools = ">=0.0.7"
|
|
23
|
+
pyparsing = ">=3.0.0" # Added to fix a colab issue. Temp Fix.
|
|
24
|
+
chainlit = {version = "^1.1.301", optional = true}
|
|
25
|
+
gradio = {version = ">=4.26.0", optional = true}
|
|
26
|
+
flask = {version = ">=3.0.0", optional = true}
|
|
27
|
+
agentops = {version = ">=0.2.6", optional = true}
|
|
28
|
+
langchain-google-genai = {version = ">=1.0.4", optional = true}
|
|
29
|
+
langchain-anthropic = {version = ">=0.1.13", optional = true}
|
|
30
|
+
langchain-openai = {version = ">=0.1.7", optional = true}
|
|
31
|
+
langchain-cohere = {version = ">=0.1.4", optional = true}
|
|
32
|
+
|
|
33
|
+
[tool.poetry.group.docs.dependencies]
|
|
34
|
+
mkdocs = "*"
|
|
35
|
+
markdown-include = "*"
|
|
36
|
+
mkdocs-material = "*"
|
|
37
|
+
mkdocs-jupyter = "*"
|
|
38
|
+
mkdocs-glightbox = "*"
|
|
39
|
+
pymdown-extensions = "*"
|
|
40
|
+
Pygments = "*"
|
|
41
|
+
mkdocstrings = "*"
|
|
42
|
+
mkdocs-apidoc = "*"
|
|
43
|
+
mkdocstrings-python = "*"
|
|
44
|
+
pdoc3 = "*"
|
|
45
|
+
|
|
46
|
+
[tool.poetry.group.test.dependencies]
|
|
47
|
+
pytest = "8.2.2"
|
|
48
|
+
pre-commit = "3.7.1"
|
|
49
|
+
unittest-xml-reporting = "3.2.0"
|
|
50
|
+
xmlrunner = "*"
|
|
51
|
+
unittest2 = "*"
|
|
52
|
+
|
|
53
|
+
[tool.poetry.group.dev.dependencies]
|
|
54
|
+
pytest = "8.2.2"
|
|
55
|
+
pre-commit = "3.7.1"
|
|
56
|
+
unittest-xml-reporting = "3.2.0"
|
|
57
|
+
mkdocs = "*"
|
|
58
|
+
markdown-include = "*"
|
|
59
|
+
mkdocs-material = "*"
|
|
60
|
+
mkdocs-jupyter = "*"
|
|
61
|
+
mkdocs-glightbox = "*"
|
|
62
|
+
pymdown-extensions = "*"
|
|
63
|
+
Pygments = "*"
|
|
64
|
+
mkdocstrings = "*"
|
|
65
|
+
mkdocs-apidoc = "*"
|
|
66
|
+
mkdocstrings-python = "*"
|
|
67
|
+
pdoc3 = "*"
|
|
68
|
+
xmlrunner = "*"
|
|
69
|
+
unittest2 = "*"
|
|
70
|
+
chainlit = "^1.1.301"
|
|
71
|
+
gradio = ">=4.26.0"
|
|
72
|
+
flask = ">=3.0.0"
|
|
73
|
+
agentops = ">=0.2.6"
|
|
74
|
+
langchain-google-genai = ">=1.0.4"
|
|
75
|
+
langchain-anthropic = ">=0.1.13"
|
|
76
|
+
langchain-openai = ">=0.1.7"
|
|
77
|
+
langchain-cohere = ">=0.1.4"
|
|
78
|
+
|
|
79
|
+
[build-system]
|
|
80
|
+
requires = ["poetry-core"]
|
|
81
|
+
build-backend = "poetry.core.masonry.api"
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
[tool.poetry.scripts]
|
|
85
|
+
praisonai = "praisonai.__main__:main"
|
|
86
|
+
|
|
87
|
+
[tool.poetry.extras]
|
|
88
|
+
ui = ["chainlit"]
|
|
89
|
+
gradio = ["gradio"]
|
|
90
|
+
api = ["flask"]
|
|
91
|
+
agentops = ["agentops"]
|
|
92
|
+
google = ["langchain-google-genai"]
|
|
93
|
+
openai = ["langchain-openai"]
|
|
94
|
+
anthropic = ["langchain-anthropic"]
|
|
95
|
+
cohere = ["langchain-cohere"]
|
praisonai-0.0.36/pyproject.toml
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
[tool.poetry]
|
|
2
|
-
name = "PraisonAI"
|
|
3
|
-
version = "0.0.36"
|
|
4
|
-
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
|
-
authors = ["Mervin Praison"]
|
|
6
|
-
license = ""
|
|
7
|
-
readme = "README.md"
|
|
8
|
-
|
|
9
|
-
[tool.poetry.urls]
|
|
10
|
-
Homepage = "https://docs.praison.ai"
|
|
11
|
-
Repository = "https://github.com/mervinpraison/PraisonAI"
|
|
12
|
-
|
|
13
|
-
[tool.setuptools]
|
|
14
|
-
packages = ["PraisonAI"]
|
|
15
|
-
|
|
16
|
-
[tool.poetry.dependencies]
|
|
17
|
-
python = ">=3.10,<3.13"
|
|
18
|
-
rich = ">=13.7"
|
|
19
|
-
pyautogen = ">=0.2.19"
|
|
20
|
-
crewai = ">=0.32.0"
|
|
21
|
-
markdown = ">=3.5"
|
|
22
|
-
praisonai-tools = ">=0.0.7"
|
|
23
|
-
pyparsing = ">=3.0.0"
|
|
24
|
-
chainlit = {version = "^1.1.301", optional = true}
|
|
25
|
-
gradio = {version = ">=4.26.0", optional = true}
|
|
26
|
-
flask = {version = ">=3.0.0", optional = true}
|
|
27
|
-
agentops = {version = "==0.2.3", optional = true}
|
|
28
|
-
|
|
29
|
-
[tool.poetry.dev-dependencies]
|
|
30
|
-
pytest = "8.2.2"
|
|
31
|
-
pre-commit = "3.7.1"
|
|
32
|
-
unittest-xml-reporting = "3.2.0"
|
|
33
|
-
|
|
34
|
-
[build-system]
|
|
35
|
-
requires = ["poetry-core"]
|
|
36
|
-
build-backend = "poetry.core.masonry.api"
|
|
37
|
-
|
|
38
|
-
[tool.poetry.scripts]
|
|
39
|
-
praisonai = "praisonai.__main__:main"
|
|
40
|
-
|
|
41
|
-
[tool.poetry.extras]
|
|
42
|
-
ui = ["chainlit"]
|
|
43
|
-
gradio = ["gradio"]
|
|
44
|
-
api = ["flask"]
|
|
45
|
-
agentops = ["agentops"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|