PraisonAI 0.0.59rc2__cp312-cp312-manylinux_2_35_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (41) hide show
  1. praisonai/__init__.py +6 -0
  2. praisonai/__main__.py +10 -0
  3. praisonai/agents_generator.py +381 -0
  4. praisonai/auto.py +190 -0
  5. praisonai/chainlit_ui.py +304 -0
  6. praisonai/cli.py +337 -0
  7. praisonai/deploy.py +138 -0
  8. praisonai/inbuilt_tools/__init__.py +2 -0
  9. praisonai/inbuilt_tools/autogen_tools.py +209 -0
  10. praisonai/inc/__init__.py +2 -0
  11. praisonai/inc/models.py +128 -0
  12. praisonai/public/android-chrome-192x192.png +0 -0
  13. praisonai/public/android-chrome-512x512.png +0 -0
  14. praisonai/public/apple-touch-icon.png +0 -0
  15. praisonai/public/fantasy.svg +3 -0
  16. praisonai/public/favicon-16x16.png +0 -0
  17. praisonai/public/favicon-32x32.png +0 -0
  18. praisonai/public/favicon.ico +0 -0
  19. praisonai/public/game.svg +3 -0
  20. praisonai/public/logo_dark.png +0 -0
  21. praisonai/public/logo_light.png +0 -0
  22. praisonai/public/movie.svg +3 -0
  23. praisonai/public/thriller.svg +3 -0
  24. praisonai/test.py +105 -0
  25. praisonai/train.py +232 -0
  26. praisonai/ui/chat.py +304 -0
  27. praisonai/ui/code.py +318 -0
  28. praisonai/ui/context.py +283 -0
  29. praisonai/ui/public/fantasy.svg +3 -0
  30. praisonai/ui/public/game.svg +3 -0
  31. praisonai/ui/public/logo_dark.png +0 -0
  32. praisonai/ui/public/logo_light.png +0 -0
  33. praisonai/ui/public/movie.svg +3 -0
  34. praisonai/ui/public/thriller.svg +3 -0
  35. praisonai/ui/sql_alchemy.py +638 -0
  36. praisonai/version.py +1 -0
  37. praisonai-0.0.59rc2.dist-info/LICENSE +20 -0
  38. praisonai-0.0.59rc2.dist-info/METADATA +344 -0
  39. praisonai-0.0.59rc2.dist-info/RECORD +41 -0
  40. praisonai-0.0.59rc2.dist-info/WHEEL +4 -0
  41. praisonai-0.0.59rc2.dist-info/entry_points.txt +5 -0
praisonai/deploy.py ADDED
@@ -0,0 +1,138 @@
1
+ import subprocess
2
+ import os
3
+ from dotenv import load_dotenv
4
+
5
+ class CloudDeployer:
6
+ """
7
+ A class for deploying a cloud-based application.
8
+
9
+ Attributes:
10
+ None
11
+
12
+ Methods:
13
+ __init__(self):
14
+ Loads environment variables from .env file or system and sets them.
15
+
16
+ """
17
+ def __init__(self):
18
+ """
19
+ Loads environment variables from .env file or system and sets them.
20
+
21
+ Parameters:
22
+ self: An instance of the CloudDeployer class.
23
+
24
+ Returns:
25
+ None
26
+
27
+ Raises:
28
+ None
29
+
30
+ """
31
+ # Load environment variables from .env file or system
32
+ load_dotenv()
33
+ self.set_environment_variables()
34
+
35
+ def create_dockerfile(self):
36
+ """
37
+ Creates a Dockerfile for the application.
38
+
39
+ Parameters:
40
+ self: An instance of the CloudDeployer class.
41
+
42
+ Returns:
43
+ None
44
+
45
+ Raises:
46
+ None
47
+
48
+ This method creates a Dockerfile in the current directory with the specified content.
49
+ The Dockerfile is used to build a Docker image for the application.
50
+ The content of the Dockerfile includes instructions to use the Python 3.11-slim base image,
51
+ set the working directory to /app, copy the current directory contents into the container,
52
+ install the required Python packages (flask, praisonai, gunicorn, and markdown),
53
+ expose port 8080, and run the application using Gunicorn.
54
+ """
55
+ with open("Dockerfile", "w") as file:
56
+ file.write("FROM python:3.11-slim\n")
57
+ file.write("WORKDIR /app\n")
58
+ file.write("COPY . .\n")
59
+ file.write("RUN pip install flask praisonai==0.0.59rc2 gunicorn markdown\n")
60
+ file.write("EXPOSE 8080\n")
61
+ file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
+
63
+ def create_api_file(self):
64
+ """
65
+ Creates an API file for the application.
66
+
67
+ Parameters:
68
+ self (CloudDeployer): An instance of the CloudDeployer class.
69
+
70
+ Returns:
71
+ None
72
+
73
+ This method creates an API file named "api.py" in the current directory. The file contains a basic Flask application that uses the PraisonAI library to run a simple agent and returns the output as an HTML page. The application listens on the root path ("/") and uses the Markdown library to format the output.
74
+ """
75
+ with open("api.py", "w") as file:
76
+ file.write("from flask import Flask\n")
77
+ file.write("from praisonai import PraisonAI\n")
78
+ file.write("import markdown\n\n")
79
+ file.write("app = Flask(__name__)\n\n")
80
+ file.write("def basic():\n")
81
+ file.write(" praisonai = PraisonAI(agent_file=\"agents.yaml\")\n")
82
+ file.write(" return praisonai.run()\n\n")
83
+ file.write("@app.route('/')\n")
84
+ file.write("def home():\n")
85
+ file.write(" output = basic()\n")
86
+ file.write(" html_output = markdown.markdown(output)\n")
87
+ file.write(" return f'<html><body>{html_output}</body></html>'\n\n")
88
+ file.write("if __name__ == \"__main__\":\n")
89
+ file.write(" app.run(debug=True)\n")
90
+
91
+ def set_environment_variables(self):
92
+ """Sets environment variables with fallback to .env values or defaults."""
93
+ os.environ["OPENAI_MODEL_NAME"] = os.getenv("OPENAI_MODEL_NAME", "gpt-4o")
94
+ os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY", "Enter your API key")
95
+ os.environ["OPENAI_API_BASE"] = os.getenv("OPENAI_API_BASE", "https://api.openai.com/v1")
96
+
97
+ def run_commands(self):
98
+ """
99
+ Sets environment variables with fallback to .env values or defaults.
100
+
101
+ Parameters:
102
+ None
103
+
104
+ Returns:
105
+ None
106
+
107
+ Raises:
108
+ None
109
+
110
+ This method sets environment variables for the application. It uses the `os.environ` dictionary to set the following environment variables:
111
+
112
+ - `OPENAI_MODEL_NAME`: The name of the OpenAI model to use. If not specified in the .env file, it defaults to "gpt-4o".
113
+ - `OPENAI_API_KEY`: The API key for accessing the OpenAI API. If not specified in the .env file, it defaults to "Enter your API key".
114
+ - `OPENAI_API_BASE`: The base URL for the OpenAI API. If not specified in the .env file, it defaults to "https://api.openai.com/v1".
115
+ """
116
+ self.create_api_file()
117
+ self.create_dockerfile()
118
+ """Runs a sequence of shell commands for deployment, continues on error."""
119
+ commands = [
120
+ "yes | gcloud auth configure-docker us-central1-docker.pkg.dev",
121
+ "gcloud artifacts repositories create praisonai-repository --repository-format=docker --location=us-central1",
122
+ "docker build --platform linux/amd64 -t gcr.io/$(gcloud config get-value project)/praisonai-app:latest .",
123
+ "docker tag gcr.io/$(gcloud config get-value project)/praisonai-app:latest us-central1-docker.pkg.dev/$(gcloud config get-value project)/praisonai-repository/praisonai-app:latest",
124
+ "docker push us-central1-docker.pkg.dev/$(gcloud config get-value project)/praisonai-repository/praisonai-app:latest",
125
+ "gcloud run deploy praisonai-service --image us-central1-docker.pkg.dev/$(gcloud config get-value project)/praisonai-repository/praisonai-app:latest --platform managed --region us-central1 --allow-unauthenticated --set-env-vars OPENAI_MODEL_NAME=${OPENAI_MODEL_NAME},OPENAI_API_KEY=${OPENAI_API_KEY},OPENAI_API_BASE=${OPENAI_API_BASE}"
126
+ ]
127
+
128
+ for cmd in commands:
129
+ try:
130
+ subprocess.run(cmd, shell=True, check=True)
131
+ except subprocess.CalledProcessError as e:
132
+ print(f"ERROR: Command '{e.cmd}' failed with exit status {e.returncode}")
133
+ print(f"Continuing with the next command...")
134
+
135
+ # Usage
136
+ if __name__ == "__main__":
137
+ deployer = CloudDeployer()
138
+ deployer.run_commands()
@@ -0,0 +1,2 @@
1
+ # from .base_tool import BaseTool, Tool, tool
2
+ from .autogen_tools import *
@@ -0,0 +1,209 @@
1
+ # praisonai/inbuilt_tools/autogen_tools.py
2
+
3
+ from praisonai_tools import (
4
+ CodeDocsSearchTool, CSVSearchTool, DirectorySearchTool, DOCXSearchTool, DirectoryReadTool,
5
+ FileReadTool, TXTSearchTool, JSONSearchTool, MDXSearchTool, PDFSearchTool, RagTool,
6
+ ScrapeElementFromWebsiteTool, ScrapeWebsiteTool, WebsiteSearchTool, XMLSearchTool, YoutubeChannelSearchTool,
7
+ YoutubeVideoSearchTool
8
+ )
9
+ from typing import Any
10
+ from autogen import register_function
11
+ import os
12
+ import importlib
13
+ from pathlib import Path
14
+ import os
15
+ import inspect
16
+ import sys
17
+ import logging
18
+ logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO'), format='%(asctime)s - %(levelname)s - %(message)s')
19
+
20
+ def create_autogen_tool_function(tool_name):
21
+ def autogen_tool(assistant, user_proxy):
22
+ def register_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
23
+ def tool_func(query: str) -> Any:
24
+ tool_instance = tool_class()
25
+ return tool_instance.run(query=query)
26
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
27
+
28
+ root_directory = os.getcwd()
29
+ tools_py_path = os.path.join(root_directory, 'tools.py')
30
+ tools_dir_path = Path(root_directory) / 'tools'
31
+
32
+ if os.path.isfile(tools_py_path):
33
+ print(f"{tools_py_path} exists in the root directory. Loading {tools_py_path} and skipping tools folder.")
34
+ tool_module = importlib.import_module("tools")
35
+ elif tools_dir_path.is_dir():
36
+ print(f"tools folder exists in the root directory. Loading {tool_name} from tools/{tool_name}.py.")
37
+ tool_module = importlib.import_module(f"tools.{tool_name}")
38
+ else:
39
+ raise ImportError("Neither tools.py nor tools directory found in the root directory.")
40
+
41
+ Tool = getattr(tool_module, tool_name)
42
+
43
+ register_tool(Tool, tool_name, f"Description for {tool_name}", assistant, user_proxy)
44
+
45
+ return autogen_tool
46
+
47
+ # Load tools.py
48
+ sys.path.insert(0, os.getcwd())
49
+ root_directory = os.getcwd()
50
+ tools_py_path = os.path.join(root_directory, 'tools.py')
51
+ tools_dir_path = Path(root_directory) / 'tools'
52
+
53
+ tools_module = None
54
+
55
+ if os.path.isfile(tools_py_path):
56
+ logging.info(f"{tools_py_path} exists in the root directory. Loading {tools_py_path} and skipping tools folder.")
57
+ tools_module = importlib.import_module("tools")
58
+ elif tools_dir_path.is_dir():
59
+ logging.info(f"tools folder exists in the root directory. Loading {tool_name} from tools/{tool_name}.py.")
60
+ tools_module = importlib.import_module(f"tools.{tool_name}")
61
+
62
+ # Create autogen_TOOL_NAME_HERE function for each tool
63
+ if tools_module is not None:
64
+ for name, obj in inspect.getmembers(tools_module):
65
+ if inspect.isclass(obj):
66
+ globals()[f"autogen_{name}"] = create_autogen_tool_function(name)
67
+
68
+ def autogen_CodeDocsSearchTool(assistant, user_proxy):
69
+ def register_code_docs_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
70
+ def tool_func(docs_url: str, search_query: str) -> Any:
71
+ tool_instance = tool_class(docs_url=docs_url, search_query=search_query)
72
+ return tool_instance.run()
73
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
74
+ register_code_docs_search_tool(CodeDocsSearchTool, "code_docs_search_tool", "Search a Code Docs content(search_query: 'string', docs_url: 'string') - A tool that can be used to semantic search a query from a Code Docs content.", assistant, user_proxy)
75
+
76
+ def autogen_CSVSearchTool(assistant, user_proxy):
77
+ def register_csv_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
78
+ def tool_func(csv: str, search_query: str) -> Any:
79
+ tool_instance = tool_class(csv=csv, search_query=search_query)
80
+ return tool_instance.run()
81
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
82
+ register_csv_search_tool(CSVSearchTool, "csv_search_tool", "Search a CSV's content(search_query: 'string', csv: 'string') - A tool that can be used to semantic search a query from a CSV's content.", assistant, user_proxy)
83
+
84
+ def autogen_DirectorySearchTool(assistant, user_proxy):
85
+ def register_directory_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
86
+ def tool_func(directory: str, search_query: str) -> Any:
87
+ tool_instance = tool_class(directory=directory, search_query=search_query)
88
+ return tool_instance.run()
89
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
90
+ register_directory_search_tool(DirectorySearchTool, "directory_search_tool", "Search a directory's content(search_query: 'string', directory: 'string') - A tool that can be used to semantic search a query from a directory's content.", assistant, user_proxy)
91
+
92
+ def autogen_DOCXSearchTool(assistant, user_proxy):
93
+ def register_docx_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
94
+ def tool_func(docx: str, search_query: str) -> Any:
95
+ tool_instance = tool_class(docx=docx, search_query=search_query)
96
+ return tool_instance.run()
97
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
98
+ register_docx_search_tool(DOCXSearchTool, "docx_search_tool", "Search a DOCX's content(search_query: 'string', docx: 'string') - A tool that can be used to semantic search a query from a DOCX's content.", assistant, user_proxy)
99
+
100
+ # DirectoryReadTool
101
+ def autogen_DirectoryReadTool(assistant, user_proxy):
102
+ def register_directory_read_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
103
+ def tool_func(directory: str) -> Any:
104
+ tool_instance = tool_class(directory=directory)
105
+ return tool_instance.run()
106
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
107
+ register_directory_read_tool(DirectoryReadTool, "directory_read_tool", "List files in directory(directory: 'string') - A tool that can be used to recursively list a directory's content.", assistant, user_proxy)
108
+
109
+ # FileReadTool
110
+ def autogen_FileReadTool(assistant, user_proxy):
111
+ def register_file_read_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
112
+ def tool_func(file_path: str) -> Any:
113
+ tool_instance = tool_class(file_path=file_path)
114
+ return tool_instance.run()
115
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
116
+ register_file_read_tool(FileReadTool, "file_read_tool", "Read a file's content(file_path: 'string') - A tool that can be used to read a file's content.", assistant, user_proxy)
117
+
118
+ # TXTSearchTool
119
+ def autogen_TXTSearchTool(assistant, user_proxy):
120
+ def register_txt_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
121
+ def tool_func(txt: str, search_query: str) -> Any:
122
+ tool_instance = tool_class(txt=txt, search_query=search_query)
123
+ return tool_instance.run()
124
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
125
+ register_txt_search_tool(TXTSearchTool, "txt_search_tool", "Search a txt's content(search_query: 'string', txt: 'string') - A tool that can be used to semantic search a query from a txt's content.", assistant, user_proxy)
126
+
127
+ def autogen_JSONSearchTool(assistant, user_proxy):
128
+ def register_json_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
129
+ def tool_func(json_path: str, search_query: str) -> Any:
130
+ tool_instance = tool_class(json_path=json_path, search_query=search_query)
131
+ return tool_instance.run()
132
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
133
+ register_json_search_tool(JSONSearchTool, "json_search_tool", "Search a JSON's content(search_query: 'string', json_path: 'string') - A tool that can be used to semantic search a query from a JSON's content.", assistant, user_proxy)
134
+
135
+ def autogen_MDXSearchTool(assistant, user_proxy):
136
+ def register_mdx_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
137
+ def tool_func(mdx: str, search_query: str) -> Any:
138
+ tool_instance = tool_class(mdx=mdx, search_query=search_query)
139
+ return tool_instance.run()
140
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
141
+ register_mdx_search_tool(MDXSearchTool, "mdx_search_tool", "Search a MDX's content(search_query: 'string', mdx: 'string') - A tool that can be used to semantic search a query from a MDX's content.", assistant, user_proxy)
142
+
143
+ def autogen_PDFSearchTool(assistant, user_proxy):
144
+ def register_pdf_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
145
+ def tool_func(pdf: str, search_query: str) -> Any:
146
+ tool_instance = tool_class(pdf=pdf, search_query=search_query)
147
+ return tool_instance.run()
148
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
149
+ register_pdf_search_tool(PDFSearchTool, "pdf_search_tool", "Search a PDF's content(search_query: 'string', pdf: 'string') - A tool that can be used to semantic search a query from a PDF's content.", assistant, user_proxy)
150
+
151
+ def autogen_RagTool(assistant, user_proxy):
152
+ def register_rag_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
153
+ def tool_func(query: str, data: Any) -> Any:
154
+ tool_instance = tool_class(query=query, data=data)
155
+ return tool_instance.run()
156
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
157
+ register_rag_tool(RagTool, "rag_tool", "Knowledge base(query: 'string', data: Any) - A knowledge base that can be used to answer questions.", assistant, user_proxy)
158
+
159
+ def autogen_ScrapeElementFromWebsiteTool(assistant, user_proxy):
160
+ def register_scrape_element_from_website_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
161
+ def tool_func(website_url: str, element_query: str) -> Any:
162
+ tool_instance = tool_class(website_url=website_url, element_query=element_query)
163
+ return tool_instance.run()
164
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
165
+ register_scrape_element_from_website_tool(ScrapeElementFromWebsiteTool, "scrape_element_from_website_tool", "Scrape an element from a website(element_query: 'string', website_url: 'string') - A tool that can be used to scrape an element from a website.", assistant, user_proxy)
166
+
167
+ def autogen_ScrapeWebsiteTool(assistant, user_proxy):
168
+ def register_scrape_website_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
169
+ def tool_func(website_url: str) -> Any:
170
+ tool_instance = tool_class(website_url=website_url)
171
+ content = tool_instance.run()
172
+ # Ensure content is properly decoded as UTF-8 if it's a bytes object
173
+ if isinstance(content, bytes):
174
+ content = content.decode('utf-8')
175
+ return content
176
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
177
+ register_scrape_website_tool(ScrapeWebsiteTool, "scrape_website_tool", "Read website content(website_url: 'string') - A tool that can be used to read content from a specified website.", assistant, user_proxy)
178
+
179
+ def autogen_WebsiteSearchTool(assistant, user_proxy):
180
+ def register_website_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
181
+ def tool_func(website: str, search_query: str) -> Any:
182
+ tool_instance = tool_class(website=website, search_query=search_query)
183
+ return tool_instance.run()
184
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
185
+ register_website_search_tool(WebsiteSearchTool, "website_search_tool", "Search in a specific website(search_query: 'string', website: 'string') - A tool that can be used to semantic search a query from a specific URL content.", assistant, user_proxy)
186
+
187
+ def autogen_XMLSearchTool(assistant, user_proxy):
188
+ def register_xml_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
189
+ def tool_func(xml: str, search_query: str) -> Any:
190
+ tool_instance = tool_class(xml=xml, search_query=search_query)
191
+ return tool_instance.run()
192
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
193
+ register_xml_search_tool(XMLSearchTool, "xml_search_tool", "Search a XML's content(search_query: 'string', xml: 'string') - A tool that can be used to semantic search a query from a XML's content.", assistant, user_proxy)
194
+
195
+ def autogen_YoutubeChannelSearchTool(assistant, user_proxy):
196
+ def register_youtube_channel_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
197
+ def tool_func(youtube_channel_handle: str, search_query: str) -> Any:
198
+ tool_instance = tool_class(youtube_channel_handle=youtube_channel_handle, search_query=search_query)
199
+ return tool_instance.run()
200
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
201
+ register_youtube_channel_search_tool(YoutubeChannelSearchTool, "youtube_channel_search_tool", "Search a Youtube Channels content(search_query: 'string', youtube_channel_handle: 'string') - A tool that can be used to semantic search a query from a Youtube Channels content.", assistant, user_proxy)
202
+
203
+ def autogen_YoutubeVideoSearchTool(assistant, user_proxy):
204
+ def register_youtube_video_search_tool(tool_class, tool_name, tool_description, assistant, user_proxy):
205
+ def tool_func(youtube_video_url: str, search_query: str) -> Any:
206
+ tool_instance = tool_class(youtube_video_url=youtube_video_url, search_query=search_query)
207
+ return tool_instance.run()
208
+ register_function(tool_func, caller=assistant, executor=user_proxy, name=tool_name, description=tool_description)
209
+ register_youtube_video_search_tool(YoutubeVideoSearchTool, "youtube_video_search_tool", "Search a Youtube Video content(search_query: 'string', youtube_video_url: 'string') - A tool that can be used to semantic search a query from a Youtube Video content.", assistant, user_proxy)
@@ -0,0 +1,2 @@
1
+ # praisonai/inc/__init__.py
2
+ from .models import PraisonAIModel
@@ -0,0 +1,128 @@
1
+ # praisonai/inc/models.py
2
+ import os
3
+ import logging
4
+ logger = logging.getLogger(__name__)
5
+ logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
6
+
7
+ # Conditionally import modules based on availability
8
+ try:
9
+ from langchain_openai import ChatOpenAI # pip install langchain-openai
10
+ OPENAI_AVAILABLE = True
11
+ except ImportError:
12
+ OPENAI_AVAILABLE = False
13
+
14
+ try:
15
+ from langchain_google_genai import ChatGoogleGenerativeAI # pip install langchain-google-genai
16
+ GOOGLE_GENAI_AVAILABLE = True
17
+ except ImportError:
18
+ GOOGLE_GENAI_AVAILABLE = False
19
+
20
+ try:
21
+ from langchain_anthropic import ChatAnthropic # pip install langchain-anthropic
22
+ ANTHROPIC_AVAILABLE = True
23
+ except ImportError:
24
+ ANTHROPIC_AVAILABLE = False
25
+
26
+ try:
27
+ from langchain_cohere import ChatCohere # pip install langchain-cohere
28
+ COHERE_AVAILABLE = True
29
+ except ImportError:
30
+ COHERE_AVAILABLE = False
31
+
32
+ class PraisonAIModel:
33
+ def __init__(self, model=None, api_key_var=None, base_url=None):
34
+ """
35
+ Initializes the PraisonAIModel with the provided parameters or environment variables.
36
+
37
+ Args:
38
+ model (str, optional): The name of the OpenAI model. Defaults to None.
39
+ api_key_var (str, optional): The OpenAI API key. Defaults to None.
40
+ base_url (str, optional): The base URL for the OpenAI API. Defaults to None.
41
+ """
42
+ self.model = model or os.getenv("OPENAI_MODEL_NAME", "gpt-4o")
43
+ if self.model.startswith("openai/"):
44
+ self.api_key_var = "OPENAI_API_KEY"
45
+ self.base_url = base_url or "https://api.openai.com/v1"
46
+ self.model_name = self.model.replace("openai/", "")
47
+ elif self.model.startswith("groq/"):
48
+ self.api_key_var = "GROQ_API_KEY"
49
+ self.base_url = base_url or "https://api.groq.com/openai/v1"
50
+ self.model_name = self.model.replace("groq/", "")
51
+ elif self.model.startswith("cohere/"):
52
+ self.api_key_var = "COHERE_API_KEY"
53
+ self.base_url = ""
54
+ self.model_name = self.model.replace("cohere/", "")
55
+ elif self.model.startswith("ollama/"):
56
+ self.api_key_var = "OLLAMA_API_KEY"
57
+ self.base_url = base_url or "http://localhost:11434/v1"
58
+ self.model_name = self.model.replace("ollama/", "")
59
+ elif self.model.startswith("anthropic/"):
60
+ self.api_key_var = "ANTHROPIC_API_KEY"
61
+ self.base_url = ""
62
+ self.model_name = self.model.replace("anthropic/", "")
63
+ elif self.model.startswith("google/"):
64
+ self.api_key_var = "GOOGLE_API_KEY"
65
+ self.base_url = ""
66
+ self.model_name = self.model.replace("google/", "")
67
+ elif self.model.startswith("openrouter/"):
68
+ self.api_key_var = "OPENROUTER_API_KEY"
69
+ self.base_url = base_url or "https://openrouter.ai/api/v1"
70
+ self.model_name = self.model.replace("openrouter/", "")
71
+ else:
72
+ self.api_key_var = api_key_var or "OPENAI_API_KEY"
73
+ self.base_url = base_url or os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
74
+ self.model_name = self.model
75
+ logger.debug(f"Initialized PraisonAIModel with model {self.model_name}, api_key_var {self.api_key_var}, and base_url {self.base_url}")
76
+ self.api_key = os.environ.get(self.api_key_var, "nokey")
77
+
78
+ def get_model(self):
79
+ """
80
+ Returns an instance of the langchain Chat client with the configured parameters.
81
+
82
+ Returns:
83
+ Chat: An instance of the langchain Chat client.
84
+ """
85
+ if self.model.startswith("google/"):
86
+ if GOOGLE_GENAI_AVAILABLE:
87
+ return ChatGoogleGenerativeAI(
88
+ model=self.model_name,
89
+ google_api_key=self.api_key
90
+ )
91
+ else:
92
+ raise ImportError(
93
+ "Required Langchain Integration 'langchain-google-genai' not found. "
94
+ "Please install with 'pip install langchain-google-genai'"
95
+ )
96
+ elif self.model.startswith("cohere/"):
97
+ if COHERE_AVAILABLE:
98
+ return ChatCohere(
99
+ model=self.model_name,
100
+ cohere_api_key=self.api_key,
101
+ )
102
+ else:
103
+ raise ImportError(
104
+ "Required Langchain Integration 'langchain-cohere' not found. "
105
+ "Please install with 'pip install langchain-cohere'"
106
+ )
107
+ elif self.model.startswith("anthropic/"):
108
+ if ANTHROPIC_AVAILABLE:
109
+ return ChatAnthropic(
110
+ model=self.model_name,
111
+ anthropic_api_key=self.api_key,
112
+ )
113
+ else:
114
+ raise ImportError(
115
+ "Required Langchain Integration 'langchain-anthropic' not found. "
116
+ "Please install with 'pip install langchain-anthropic'"
117
+ )
118
+ elif OPENAI_AVAILABLE:
119
+ return ChatOpenAI(
120
+ model=self.model_name,
121
+ api_key=self.api_key,
122
+ base_url=self.base_url,
123
+ )
124
+ else:
125
+ raise ImportError(
126
+ "Required Langchain Integration 'langchain-openai' not found. "
127
+ "Please install with 'pip install langchain-openai'"
128
+ )
Binary file
@@ -0,0 +1,3 @@
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
3
+ <svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M840.5 798.2L662.3 599.5l-151 173.7-173.7-173.7-167.7 201c-21 30.4 0.9 71.8 37.9 71.6l594.7-3.3c36.2-0.1 57.8-40.3 38-70.6z" fill="#FFB89A" /><path d="M741.6 647.3l-52.3-47.7c-12.2-11.2-31.2-10.3-42.4 1.9s-10.3 31.2 1.9 42.4l52.3 47.7c5.8 5.3 13 7.8 20.2 7.8 8.1 0 16.2-3.3 22.2-9.8 11.2-12.1 10.3-31.1-1.9-42.3zM631.2 546.5c-12.4-11-31.4-9.8-42.3 2.6l-98.8 111.7-171-165.7L87.9 724.7c-11.8 11.7-11.8 30.7-0.1 42.4 5.9 5.9 13.6 8.9 21.3 8.9 7.6 0 15.3-2.9 21.1-8.7l189.4-188.1 173.8 168.5L633.8 589c11-12.5 9.8-31.5-2.6-42.5z" fill="#33CC99" /><path d="M721.3 342.8m-35.1 0a35.1 35.1 0 1 0 70.2 0 35.1 35.1 0 1 0-70.2 0Z" fill="#33CC99" /><path d="M743.2 175.1H191.6c-70.6 0-128.3 57.7-128.3 128.3v499.2c0 70.6 57.7 128.3 128.3 128.3h551.5c70.6 0 128.3-57.7 128.3-128.3V303.5c0.1-70.6-57.7-128.4-128.2-128.4z m68.3 627.6c0 18.1-7.1 35.2-20.1 48.2-13 13-30.1 20.1-48.2 20.1H191.6c-18.1 0-35.2-7.1-48.2-20.1-13-13-20.1-30.1-20.1-48.2V303.5c0-18.1 7.1-35.2 20.1-48.2 13-13 30.1-20.1 48.2-20.1h551.5c18.1 0 35.2 7.1 48.2 20.1 13 13 20.1 30.1 20.1 48.2v499.2z" fill="#45484C" /><path d="M799.7 90.9H237.2c-16.6 0-30 13.4-30 30s13.4 30 30 30h562.4c26.1 0 50.8 10.3 69.4 28.9 18.6 18.6 28.9 43.3 28.9 69.4v482.4c0 16.6 13.4 30 30 30s30-13.4 30-30V249.2C958 161.9 887 90.9 799.7 90.9z" fill="#45484C" /></svg>
Binary file
Binary file
Binary file
@@ -0,0 +1,3 @@
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
3
+ <svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M570.2 842c-50.6 0-278.7-180-278.7-401.9 0-58.8-2.9-133.1-1-183.9-50.8 3.2-91.4 45.7-91.4 97.3v272.1c37.4 194.7 137.5 334 255.2 334 69.5 0 132.9-48.6 180.9-128.5-20.8 7.1-42.6 10.9-65 10.9z" fill="#FFB89A" /><path d="M926.1 191.8C900.5 74.1 817.9 62.1 704.9 62.1c-29.1 0-60.3 0.8-93 0.8-36 0-70.5-1.1-102.5-1.1-109.7 0-189.8 12.5-201.3 123.7-20.4 198.3 30 617.1 306.1 617.1S939 414.3 926.1 191.8z m-76.9 268.5c-9.5 47.9-22.3 90.8-38.1 127.7-16.8 39.2-37 71.4-60 95.8-37.3 39.5-82.1 58.7-137 58.7-53.4 0-97.6-20.1-134.9-61.6-45.5-50.5-79.8-131.5-99-234.2-15.6-83.5-20.3-178.9-12.4-255.2 1.8-17.3 5.7-30.7 11.6-39.8 4.4-6.8 10.1-11.7 18.7-15.8 25.8-12.5 70.8-14.2 111.4-14.2 15 0 30.7 0.2 47.3 0.5 17.8 0.3 36.2 0.6 55.2 0.6 17.2 0 33.9-0.2 50-0.4 15.1-0.2 29.3-0.4 43.1-0.4 44.5 0 89.5 1.8 118 15.1 15.9 7.4 33.4 20.8 43.6 63 2.6 53.3 3.6 153.5-17.5 260.2z" fill="#4E5155" /><path d="M532 841.7c-32.5 22.3-70.6 33.7-113.2 33.7-29.7 0-57.3-6-82.1-17.7-23.2-11-44.7-27.4-63.9-48.7-46-50.9-80.3-131.3-99.2-232.4-15.1-80.6-19.6-172.9-12-246.8 3-29.5 12-50.2 27.5-63.2 14.2-12 35.1-19.2 65.8-22.9 16.5-2 28.2-16.9 26.3-33.3-2-16.5-16.9-28.2-33.3-26.3-42.9 5.1-73.8 16.7-97.4 36.5-27.9 23.5-43.8 57.2-48.5 103-8.2 79.3-3.4 178.1 12.7 264 9.7 51.9 23.4 99.4 40.6 141.2 19.8 48.1 44.4 88.6 73 120.4 51.6 57.2 115.7 86.2 190.6 86.2 55 0 104.5-14.9 147.2-44.2 13.7-9.4 17.1-28.1 7.7-41.7-9.4-13.7-28.1-17.2-41.8-7.8z" fill="#4E5155" /><path d="M519.7 248.5c-16.6 0-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3c0-16.6-13.5-30-30-30zM299.5 385.5c0-16.6-13.4-30-30-30s-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3zM754.6 248.5c-16.6 0-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3c0-16.6-13.4-30-30-30zM716.7 554.5c0-16.6-13.4-30-30-30H551v30c0 58.5 38.1 123.7 92.8 123.7 22.9 0 45-11.9 62.2-33.6 10.3-13 8.1-31.9-4.9-42.1-13-10.3-31.9-8.1-42.1 4.9-5.3 6.7-11.1 10.9-15.1 10.9-4.3 0-11.9-5.1-19.1-16.4-3.3-5.3-6.2-11.2-8.4-17.4h70.4c16.4 0 29.9-13.4 29.9-30zM401.6 704c-25.4 0-46.1-24.2-46.1-53.9 0-16.6-13.4-30-30-30s-30 13.4-30 30c0 62.8 47.6 113.9 106.1 113.9 16.6 0 30-13.4 30-30s-13.5-30-30-30z" fill="#33CC99" /></svg>
Binary file
Binary file
@@ -0,0 +1,3 @@
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
3
+ <svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M861.9 383.8H218.1c-36.4 0-66.1-29.8-66.1-66.1V288c0-36.4 29.8-66.1 66.1-66.1h643.8c36.4 0 66.1 29.8 66.1 66.1v29.7c0 36.3-29.8 66.1-66.1 66.1z" fill="#FFB89A" /><path d="M822.9 129.2H199.8c-77.2 0-140.4 63.2-140.4 140.4v487.2c0 77.2 63.2 140.4 140.4 140.4h623.1c77.2 0 140.4-63.2 140.4-140.4V269.6c0-77.2-63.2-140.4-140.4-140.4z m80.4 177H760.4L864.6 201c5.4 3.3 10.4 7.3 15 11.8 15.3 15.3 23.7 35.4 23.7 56.8v36.6z m-673.3 0l104-117h61.3l-109.1 117H230z m247.4-117h169.2L532 306.2H368.3l109.1-117z m248.8 0h65.6L676 306.2h-60l112.5-114.8-2.3-2.2zM143 212.9c15.3-15.3 35.4-23.7 56.8-23.7h53.9l-104 117h-30.4v-36.5c0.1-21.4 8.5-41.5 23.7-56.8z m736.6 600.7c-15.3 15.3-35.4 23.7-56.8 23.7h-623c-21.3 0-41.5-8.4-56.8-23.7-15.3-15.3-23.7-35.4-23.7-56.8V366.2h783.9v390.6c0.1 21.3-8.3 41.5-23.6 56.8z" fill="#45484C" /><path d="M400.5 770.6V430.9L534.1 508c14.3 8.3 19.3 26.6 11 41-8.3 14.3-26.6 19.3-41 11l-43.6-25.2v131.8l114.1-65.9-7.5-4.3c-14.3-8.3-19.3-26.6-11-41 8.3-14.3 26.6-19.3 41-11l97.5 56.3-294.1 169.9z" fill="#33CC99" /></svg>
@@ -0,0 +1,3 @@
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
3
+ <svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M188.3 766.5a94.4 135.8 0 1 0 188.8 0 94.4 135.8 0 1 0-188.8 0Z" fill="#FFB89A" /><path d="M931.5 397s0-0.1 0 0c-34.2-82.6-119.3-141-218.8-141-129.7 0-234.9 99.3-234.9 221.9 0 52.1 19.1 100.1 50.9 138 1 14.5 1.8 29.1 1.8 43.6 0 148.5 98.1 269 219.2 269 121 0 219.2-120.4 219.2-269 0-70.1-1.7-214.7-37.4-262.5z m-36.6 347.5c-8.7 25.3-21.1 47.9-36.8 67.1-29.8 36.5-68.3 56.7-108.5 56.7s-78.7-20.1-108.5-56.7c-15.7-19.2-28-41.8-36.8-67.1-9.3-26.9-13.9-55.5-13.9-85.1 0-16.8-1-33.5-2-47.7l-1.3-19.5-12.6-15c-24.1-28.6-36.8-63-36.8-99.3 0-89.3 78.5-161.9 174.9-161.9 36.4 0 71.4 10.3 101 29.7 28.4 18.7 65.5 81.7 65.5 81.7s17.9 27.5 24.7 98.2c4.5 46.5 5 95.9 5 133.8 0.1 29.6-4.6 58.2-13.9 85.1zM377.1 219.9c-51.8 0-93.8 42-93.8 93.8s42 93.8 93.8 93.8 93.8-42 93.8-93.8-42-93.8-93.8-93.8z m0 127.5c-18.6 0-33.8-15.2-33.8-33.8 0-18.6 15.2-33.8 33.8-33.8 18.6 0 33.8 15.2 33.8 33.8 0 18.7-15.1 33.8-33.8 33.8z" fill="#45484C" /><path d="M521.2 206.7m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M653 156.4m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M781.9 158.4m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M909 206.7m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M263.9 602.7c44.7 0 81 31.5 81 70.3 0 20.9-10.2 35.9-18.7 44.8l-15.9 19.7-0.5 27.2c0.7 7.2 0.6 16.9 0.6 24.7v4.8c0 33.7-27.4 61.2-61.2 61.2-14.9 0-33.3-9.6-48.1-25-15.2-15.9-24.6-35.9-24.6-52.3v-3.2c0-12.7 0-36.2 1-60.2 1.4-33 7.4-57.3 7.4-57.3 3.9-14.7 13.4-28.2 26.8-38 14.8-11 32.8-16.7 52.2-16.7m0-60c-66.4 0-122 42.4-137 99.4-10.9 23-10.4 112.6-10.4 135.9 0 66.9 65.8 137.3 132.7 137.3 66.9 0 121.2-54.3 121.2-121.2 0-9.2 0.3-23-0.8-34.9 22-23 35.4-53.2 35.4-86.3-0.1-71.9-63.2-130.2-141.1-130.2zM444.4 559.9c-26.4 0-47.8 21.4-47.8 47.8s21.4 47.8 47.8 47.8 47.8-21.4 47.8-47.8-21.4-47.8-47.8-47.8zM377.1 494.5c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5c0-15.3-12.3-27.5-27.5-27.5zM288.1 471.5c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5-12.4-27.5-27.5-27.5zM188.3 477.9c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5-12.3-27.5-27.5-27.5zM100.6 538.4c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5c-0.1-15.2-12.4-27.5-27.5-27.5z" fill="#45484C" /><path d="M670.1 584.6c-41.4 0-80.2-20.3-103.9-54.3-9.5-13.6-6.2-32.3 7.4-41.8 13.6-9.5 32.3-6.2 41.8 7.4 12.5 17.9 33 28.6 54.7 28.6 36.8 0 66.7-29.9 66.7-66.7 0-19.8-8.7-38.4-23.9-51.2-12.7-10.6-14.4-29.6-3.7-42.3s29.6-14.4 42.3-3.7c28.9 24.2 45.4 59.6 45.4 97.2-0.1 70-56.9 126.8-126.8 126.8z" fill="#33CC99" /><path d="M853 556.4c-26 0-49.6-14.5-60.1-36.9-7-15-0.6-32.9 14.4-39.9s32.9-0.6 39.9 14.4c0.3 0.6 2.2 2.4 5.8 2.4 1.2 0 2.3-0.2 3.3-0.6 15.5-5.9 32.8 1.8 38.7 17.3 5.9 15.5-1.8 32.8-17.3 38.7-7.9 3.1-16.2 4.6-24.7 4.6z" fill="#33CC99" /></svg>
praisonai/test.py ADDED
@@ -0,0 +1,105 @@
1
+ import yaml
2
+ import os
3
+ from rich import print
4
+ from dotenv import load_dotenv
5
+ from crewai import Agent, Task, Crew
6
+ load_dotenv()
7
+ import autogen
8
+ config_list = [
9
+ {
10
+ 'model': os.environ.get("OPENAI_MODEL_NAME", "gpt-4o-mini"),
11
+ 'base_url': os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1"),
12
+ 'api_key': os.environ.get("OPENAI_API_KEY")
13
+ }
14
+ ]
15
+
16
+ def generate_crew_and_kickoff(agent_file):
17
+ """
18
+ This function generates a crew of agents and kicks off tasks based on the configuration provided in a YAML file.
19
+
20
+ Parameters:
21
+ agent_file (str): The path to the YAML file containing the configuration for the agents and tasks.
22
+
23
+ Returns:
24
+ str: The result of the last task executed by the crew.
25
+ """
26
+
27
+ with open(agent_file, 'r') as f:
28
+ config = yaml.safe_load(f)
29
+
30
+ topic = config['topic']
31
+ framework = config['framework']
32
+
33
+ agents = {}
34
+ tasks = []
35
+ if framework == "autogen":
36
+ # Load the LLM configuration dynamically
37
+ print(config_list)
38
+ llm_config = {"config_list": config_list}
39
+
40
+ for role, details in config['roles'].items():
41
+ agent_name = details['role'].format(topic=topic).replace("{topic}", topic)
42
+ agent_goal = details['goal'].format(topic=topic)
43
+ # Creating an AssistantAgent for each role dynamically
44
+ agents[role] = autogen.AssistantAgent(
45
+ name=agent_name,
46
+ llm_config=llm_config,
47
+ system_message=details['backstory'].format(topic=topic)+". Reply \"TERMINATE\" in the end when everything is done.",
48
+ )
49
+
50
+ # Preparing tasks for initiate_chats
51
+ for task_name, task_details in details.get('tasks', {}).items():
52
+ description_filled = task_details['description'].format(topic=topic)
53
+ expected_output_filled = task_details['expected_output'].format(topic=topic)
54
+
55
+ chat_task = {
56
+ "recipient": agents[role],
57
+ "message": description_filled,
58
+ "summary_method": "last_msg", # Customize as needed
59
+ # Additional fields like carryover can be added based on dependencies
60
+ }
61
+ tasks.append(chat_task)
62
+
63
+ # Assuming the user proxy agent is set up as per your requirements
64
+ user = autogen.UserProxyAgent(
65
+ name="User",
66
+ human_input_mode="NEVER",
67
+ is_termination_msg=lambda x: (x.get("content") or "").rstrip().endswith("TERMINATE"),
68
+ code_execution_config={
69
+ "work_dir": "coding",
70
+ "use_docker": False,
71
+ },
72
+ # additional setup for the user proxy agent
73
+ )
74
+ response = user.initiate_chats(tasks)
75
+ result = "### Output ###\n"+response[-1].summary if hasattr(response[-1], 'summary') else ""
76
+ else:
77
+ for role, details in config['roles'].items():
78
+ role_filled = details['role'].format(topic=topic)
79
+ goal_filled = details['goal'].format(topic=topic)
80
+ backstory_filled = details['backstory'].format(topic=topic)
81
+
82
+ # Assume tools are loaded and handled here as per your requirements
83
+ agent = Agent(role=role_filled, goal=goal_filled, backstory=backstory_filled)
84
+ agents[role] = agent
85
+
86
+ for task_name, task_details in details.get('tasks', {}).items():
87
+ description_filled = task_details['description'].format(topic=topic)
88
+ expected_output_filled = task_details['expected_output'].format(topic=topic)
89
+
90
+ task = Task(description=description_filled, expected_output=expected_output_filled, agent=agent)
91
+ tasks.append(task)
92
+
93
+ crew = Crew(
94
+ agents=list(agents.values()),
95
+ tasks=tasks,
96
+ verbose=2
97
+ )
98
+
99
+ result = crew.kickoff()
100
+ return result
101
+
102
+ if __name__ == "__main__":
103
+ agent_file = "agents.yaml"
104
+ result = generate_crew_and_kickoff(agent_file)
105
+ print(result)