PraisonAI 0.0.47__tar.gz → 0.0.48__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (39) hide show
  1. {praisonai-0.0.47 → praisonai-0.0.48}/PKG-INFO +4 -3
  2. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/cli.py +35 -0
  3. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/deploy.py +1 -1
  4. praisonai-0.0.48/praisonai/ui/code.py +317 -0
  5. praisonai-0.0.48/praisonai/ui/context.py +140 -0
  6. {praisonai-0.0.47 → praisonai-0.0.48}/pyproject.toml +3 -2
  7. {praisonai-0.0.47 → praisonai-0.0.48}/LICENSE +0 -0
  8. {praisonai-0.0.47 → praisonai-0.0.48}/README.md +0 -0
  9. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/__init__.py +0 -0
  10. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/__main__.py +0 -0
  11. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/agents_generator.py +0 -0
  12. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/auto.py +0 -0
  13. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/chainlit_ui.py +0 -0
  14. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/inbuilt_tools/__init__.py +0 -0
  15. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
  16. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/inc/__init__.py +0 -0
  17. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/inc/models.py +0 -0
  18. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/android-chrome-192x192.png +0 -0
  19. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/android-chrome-512x512.png +0 -0
  20. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/apple-touch-icon.png +0 -0
  21. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/fantasy.svg +0 -0
  22. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/favicon-16x16.png +0 -0
  23. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/favicon-32x32.png +0 -0
  24. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/favicon.ico +0 -0
  25. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/game.svg +0 -0
  26. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/logo_dark.png +0 -0
  27. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/logo_light.png +0 -0
  28. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/movie.svg +0 -0
  29. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/public/thriller.svg +0 -0
  30. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/test.py +0 -0
  31. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/chat.py +0 -0
  32. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/fantasy.svg +0 -0
  33. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/game.svg +0 -0
  34. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/logo_dark.png +0 -0
  35. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/logo_light.png +0 -0
  36. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/movie.svg +0 -0
  37. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/public/thriller.svg +0 -0
  38. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/ui/sql_alchemy.py +0 -0
  39. {praisonai-0.0.47 → praisonai-0.0.48}/praisonai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: PraisonAI
3
- Version: 0.0.47
3
+ Version: 0.0.48
4
4
  Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -12,13 +12,14 @@ Provides-Extra: agentops
12
12
  Provides-Extra: anthropic
13
13
  Provides-Extra: api
14
14
  Provides-Extra: chat
15
+ Provides-Extra: code
15
16
  Provides-Extra: cohere
16
17
  Provides-Extra: google
17
18
  Provides-Extra: gradio
18
19
  Provides-Extra: openai
19
20
  Provides-Extra: ui
20
21
  Requires-Dist: agentops (>=0.2.6) ; extra == "agentops"
21
- Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui" or extra == "chat"
22
+ Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui" or extra == "chat" or extra == "code"
22
23
  Requires-Dist: crewai (>=0.32.0)
23
24
  Requires-Dist: flask (>=3.0.0) ; extra == "api"
24
25
  Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
@@ -26,7 +27,7 @@ Requires-Dist: langchain-anthropic (>=0.1.13) ; extra == "anthropic"
26
27
  Requires-Dist: langchain-cohere (>=0.1.4) ; extra == "cohere"
27
28
  Requires-Dist: langchain-google-genai (>=1.0.4) ; extra == "google"
28
29
  Requires-Dist: langchain-openai (>=0.1.7) ; extra == "openai"
29
- Requires-Dist: litellm (>=1.41.8) ; extra == "chat"
30
+ Requires-Dist: litellm (>=1.41.8) ; extra == "chat" or extra == "code"
30
31
  Requires-Dist: markdown (>=3.5)
31
32
  Requires-Dist: praisonai-tools (>=0.0.7)
32
33
  Requires-Dist: pyautogen (>=0.2.19)
@@ -95,6 +95,10 @@ class PraisonAI:
95
95
  self.create_chainlit_chat_interface()
96
96
  return
97
97
 
98
+ if getattr(args, 'code', False):
99
+ self.create_code_interface()
100
+ return
101
+
98
102
  invocation_cmd = "praisonai"
99
103
  version_string = f"PraisonAI version {__version__}"
100
104
 
@@ -177,6 +181,9 @@ class PraisonAI:
177
181
  if args.agent_file == 'chat':
178
182
  args.ui = 'chainlit'
179
183
  args.chat = True
184
+ if args.agent_file == 'code':
185
+ args.ui = 'chainlit'
186
+ args.code = True
180
187
 
181
188
  return args
182
189
 
@@ -207,6 +214,34 @@ class PraisonAI:
207
214
  chainlit_run([chat_ui_path])
208
215
  else:
209
216
  print("ERROR: Chat UI is not installed. Please install it with 'pip install \"praisonai\[chat]\"' to use the chat UI.")
217
+
218
+ def create_code_interface(self):
219
+ """
220
+ Create a Chainlit interface for the code application.
221
+
222
+ This function sets up a Chainlit application that listens for messages.
223
+ When a message is received, it runs PraisonAI with the provided message as the topic.
224
+ The generated agents are then used to perform tasks.
225
+
226
+ Returns:
227
+ None: This function does not return any value. It starts the Chainlit application.
228
+ """
229
+ if CHAINLIT_AVAILABLE:
230
+ import praisonai
231
+ os.environ["CHAINLIT_PORT"] = "8086"
232
+ public_folder = os.path.join(os.path.dirname(praisonai.__file__), 'public')
233
+ if not os.path.exists("public"): # Check if the folder exists in the current directory
234
+ if os.path.exists(public_folder):
235
+ shutil.copytree(public_folder, 'public', dirs_exist_ok=True)
236
+ logging.info("Public folder copied successfully!")
237
+ else:
238
+ logging.info("Public folder not found in the package.")
239
+ else:
240
+ logging.info("Public folder already exists.")
241
+ code_ui_path = os.path.join(os.path.dirname(praisonai.__file__), 'ui', 'code.py')
242
+ chainlit_run([code_ui_path])
243
+ else:
244
+ print("ERROR: Code UI is not installed. Please install it with 'pip install \"praisonai\[code]\"' to use the code UI.")
210
245
 
211
246
  def create_gradio_interface(self):
212
247
  """
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==0.0.47 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==0.0.48 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
@@ -0,0 +1,317 @@
1
+ import chainlit as cl
2
+ from chainlit.input_widget import TextInput
3
+ from chainlit.types import ThreadDict
4
+ from litellm import acompletion
5
+ import os
6
+ import sqlite3
7
+ from datetime import datetime
8
+ from typing import Dict, List, Optional
9
+ from dotenv import load_dotenv
10
+ load_dotenv()
11
+ import chainlit.data as cl_data
12
+ from chainlit.step import StepDict
13
+ from literalai.helper import utc_now
14
+ import logging
15
+ import json
16
+ from sql_alchemy import SQLAlchemyDataLayer
17
+ from context import ContextGatherer
18
+
19
+ # Set up logging
20
+ logger = logging.getLogger(__name__)
21
+ log_level = os.getenv("LOGLEVEL", "INFO").upper()
22
+ logger.handlers = []
23
+
24
+ # Set up logging to console
25
+ console_handler = logging.StreamHandler()
26
+ console_handler.setLevel(log_level)
27
+ console_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
28
+ console_handler.setFormatter(console_formatter)
29
+ logger.addHandler(console_handler)
30
+
31
+ # Set the logging level for the logger
32
+ logger.setLevel(log_level)
33
+
34
+ CHAINLIT_AUTH_SECRET = os.getenv("CHAINLIT_AUTH_SECRET")
35
+
36
+ if not CHAINLIT_AUTH_SECRET:
37
+ os.environ["CHAINLIT_AUTH_SECRET"] = "p8BPhQChpg@J>jBz$wGxqLX2V>yTVgP*7Ky9H$aV:axW~ANNX-7_T:o@lnyCBu^U"
38
+ CHAINLIT_AUTH_SECRET = os.getenv("CHAINLIT_AUTH_SECRET")
39
+
40
+ now = utc_now()
41
+
42
+ create_step_counter = 0
43
+
44
+ DB_PATH = "threads.db"
45
+
46
+ def initialize_db():
47
+ conn = sqlite3.connect(DB_PATH)
48
+ cursor = conn.cursor()
49
+ cursor.execute('''
50
+ CREATE TABLE IF NOT EXISTS users (
51
+ id UUID PRIMARY KEY,
52
+ identifier TEXT NOT NULL UNIQUE,
53
+ metadata JSONB NOT NULL,
54
+ createdAt TEXT
55
+ )
56
+ ''')
57
+ cursor.execute('''
58
+ CREATE TABLE IF NOT EXISTS threads (
59
+ id UUID PRIMARY KEY,
60
+ createdAt TEXT,
61
+ name TEXT,
62
+ userId UUID,
63
+ userIdentifier TEXT,
64
+ tags TEXT[],
65
+ metadata JSONB NOT NULL DEFAULT '{}',
66
+ FOREIGN KEY (userId) REFERENCES users(id) ON DELETE CASCADE
67
+ )
68
+ ''')
69
+ cursor.execute('''
70
+ CREATE TABLE IF NOT EXISTS steps (
71
+ id UUID PRIMARY KEY,
72
+ name TEXT NOT NULL,
73
+ type TEXT NOT NULL,
74
+ threadId UUID NOT NULL,
75
+ parentId UUID,
76
+ disableFeedback BOOLEAN NOT NULL,
77
+ streaming BOOLEAN NOT NULL,
78
+ waitForAnswer BOOLEAN,
79
+ isError BOOLEAN,
80
+ metadata JSONB,
81
+ tags TEXT[],
82
+ input TEXT,
83
+ output TEXT,
84
+ createdAt TEXT,
85
+ start TEXT,
86
+ end TEXT,
87
+ generation JSONB,
88
+ showInput TEXT,
89
+ language TEXT,
90
+ indent INT,
91
+ FOREIGN KEY (threadId) REFERENCES threads (id) ON DELETE CASCADE
92
+ )
93
+ ''')
94
+ cursor.execute('''
95
+ CREATE TABLE IF NOT EXISTS elements (
96
+ id UUID PRIMARY KEY,
97
+ threadId UUID,
98
+ type TEXT,
99
+ url TEXT,
100
+ chainlitKey TEXT,
101
+ name TEXT NOT NULL,
102
+ display TEXT,
103
+ objectKey TEXT,
104
+ size TEXT,
105
+ page INT,
106
+ language TEXT,
107
+ forId UUID,
108
+ mime TEXT,
109
+ FOREIGN KEY (threadId) REFERENCES threads (id) ON DELETE CASCADE
110
+ )
111
+ ''')
112
+ cursor.execute('''
113
+ CREATE TABLE IF NOT EXISTS feedbacks (
114
+ id UUID PRIMARY KEY,
115
+ forId UUID NOT NULL,
116
+ value INT NOT NULL,
117
+ threadId UUID,
118
+ comment TEXT
119
+ )
120
+ ''')
121
+ cursor.execute('''
122
+ CREATE TABLE IF NOT EXISTS settings (
123
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
124
+ key TEXT UNIQUE,
125
+ value TEXT
126
+ )
127
+ ''')
128
+ conn.commit()
129
+ conn.close()
130
+
131
+ def save_setting(key: str, value: str):
132
+ """Saves a setting to the database.
133
+
134
+ Args:
135
+ key: The setting key.
136
+ value: The setting value.
137
+ """
138
+ conn = sqlite3.connect(DB_PATH)
139
+ cursor = conn.cursor()
140
+ cursor.execute(
141
+ """
142
+ INSERT OR REPLACE INTO settings (id, key, value)
143
+ VALUES ((SELECT id FROM settings WHERE key = ?), ?, ?)
144
+ """,
145
+ (key, key, value),
146
+ )
147
+ conn.commit()
148
+ conn.close()
149
+
150
+ def load_setting(key: str) -> str:
151
+ """Loads a setting from the database.
152
+
153
+ Args:
154
+ key: The setting key.
155
+
156
+ Returns:
157
+ The setting value, or None if the key is not found.
158
+ """
159
+ conn = sqlite3.connect(DB_PATH)
160
+ cursor = conn.cursor()
161
+ cursor.execute('SELECT value FROM settings WHERE key = ?', (key,))
162
+ result = cursor.fetchone()
163
+ conn.close()
164
+ return result[0] if result else None
165
+
166
+
167
+ # Initialize the database
168
+ initialize_db()
169
+
170
+ deleted_thread_ids = [] # type: List[str]
171
+
172
+ cl_data._data_layer = SQLAlchemyDataLayer(conninfo=f"sqlite+aiosqlite:///{DB_PATH}")
173
+
174
+ @cl.on_chat_start
175
+ async def start():
176
+ initialize_db()
177
+ model_name = load_setting("model_name")
178
+
179
+ if model_name:
180
+ cl.user_session.set("model_name", model_name)
181
+ else:
182
+ # If no setting found, use default or environment variable
183
+ model_name = os.getenv("MODEL_NAME", "gpt-3.5-turbo")
184
+ cl.user_session.set("model_name", model_name)
185
+ logger.debug(f"Model name: {model_name}")
186
+ settings = cl.ChatSettings(
187
+ [
188
+ TextInput(
189
+ id="model_name",
190
+ label="Enter the Model Name",
191
+ placeholder="e.g., gpt-3.5-turbo",
192
+ initial=model_name
193
+ )
194
+ ]
195
+ )
196
+ cl.user_session.set("settings", settings)
197
+ await settings.send()
198
+ gatherer = ContextGatherer()
199
+ context, token_count, context_tree = gatherer.run()
200
+ msg = cl.Message(content="""Token Count: {token_count},
201
+ Files include: \n```bash\n{context_tree}\n"""
202
+ .format(token_count=token_count, context_tree=context_tree))
203
+ await msg.send()
204
+
205
+ @cl.on_settings_update
206
+ async def setup_agent(settings):
207
+ logger.debug(settings)
208
+ cl.user_session.set("settings", settings)
209
+ model_name = settings["model_name"]
210
+ cl.user_session.set("model_name", model_name)
211
+
212
+ # Save in settings table
213
+ save_setting("model_name", model_name)
214
+
215
+ # Save in thread metadata
216
+ thread_id = cl.user_session.get("thread_id")
217
+ if thread_id:
218
+ thread = await cl_data.get_thread(thread_id)
219
+ if thread:
220
+ metadata = thread.get("metadata", {})
221
+ metadata["model_name"] = model_name
222
+
223
+ # Always store metadata as a JSON string
224
+ await cl_data.update_thread(thread_id, metadata=json.dumps(metadata))
225
+
226
+ # Update the user session with the new metadata
227
+ cl.user_session.set("metadata", metadata)
228
+
229
+ @cl.on_message
230
+ async def main(message: cl.Message):
231
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-3.5-turbo"
232
+ message_history = cl.user_session.get("message_history", [])
233
+ message_history.append({"role": "user", "content": message.content})
234
+ gatherer = ContextGatherer()
235
+ context, token_count, context_tree = gatherer.run()
236
+ prompt_history = message_history
237
+ prompt_history.append({"role": "user", "content": """
238
+ Answer the question:\n{question}.\n\n
239
+ Below is the Context:\n{context}\n\n"""
240
+ .format(context=context, question=message.content)})
241
+
242
+ msg = cl.Message(content="")
243
+ await msg.send()
244
+
245
+ response = await acompletion(
246
+ model=model_name,
247
+ messages=prompt_history,
248
+ stream=True,
249
+ # temperature=0.7,
250
+ # max_tokens=500,
251
+ # top_p=1
252
+ )
253
+
254
+ full_response = ""
255
+ async for part in response:
256
+ if token := part['choices'][0]['delta']['content']:
257
+ await msg.stream_token(token)
258
+ full_response += token
259
+ logger.debug(f"Full response: {full_response}")
260
+ message_history.append({"role": "assistant", "content": full_response})
261
+ logger.debug(f"Message history: {message_history}")
262
+ cl.user_session.set("message_history", message_history)
263
+ await msg.update()
264
+
265
+ username = os.getenv("CHAINLIT_USERNAME", "admin") # Default to "admin" if not found
266
+ password = os.getenv("CHAINLIT_PASSWORD", "admin") # Default to "admin" if not found
267
+
268
+ @cl.password_auth_callback
269
+ def auth_callback(username: str, password: str):
270
+ if (username, password) == (username, password):
271
+ return cl.User(
272
+ identifier=username, metadata={"role": "ADMIN", "provider": "credentials"}
273
+ )
274
+ else:
275
+ return None
276
+
277
+ async def send_count():
278
+ await cl.Message(
279
+ f"Create step counter: {create_step_counter}", disable_feedback=True
280
+ ).send()
281
+
282
+ @cl.on_chat_resume
283
+ async def on_chat_resume(thread: cl_data.ThreadDict):
284
+ logger.info(f"Resuming chat: {thread['id']}")
285
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-3.5-turbo"
286
+ logger.debug(f"Model name: {model_name}")
287
+ settings = cl.ChatSettings(
288
+ [
289
+ TextInput(
290
+ id="model_name",
291
+ label="Enter the Model Name",
292
+ placeholder="e.g., gpt-3.5-turbo",
293
+ initial=model_name
294
+ )
295
+ ]
296
+ )
297
+ await settings.send()
298
+ thread_id = thread["id"]
299
+ cl.user_session.set("thread_id", thread["id"])
300
+
301
+ # The metadata should now already be a dictionary
302
+ metadata = thread.get("metadata", {})
303
+ cl.user_session.set("metadata", metadata)
304
+
305
+ message_history = cl.user_session.get("message_history", [])
306
+ steps = thread["steps"]
307
+
308
+ for message in steps:
309
+ msg_type = message.get("type")
310
+ if msg_type == "user_message":
311
+ message_history.append({"role": "user", "content": message.get("output", "")})
312
+ elif msg_type == "assistant_message":
313
+ message_history.append({"role": "assistant", "content": message.get("output", "")})
314
+ else:
315
+ logger.warning(f"Message without type: {message}")
316
+
317
+ cl.user_session.set("message_history", message_history)
@@ -0,0 +1,140 @@
1
+ import os
2
+ import fnmatch
3
+ import re
4
+
5
+ class ContextGatherer:
6
+ def __init__(self, directory='.', output_file='context.txt',
7
+ relevant_extensions=None, max_file_size=1_000_000, max_tokens=60000):
8
+ self.directory = directory
9
+ self.output_file = output_file
10
+ self.relevant_extensions = relevant_extensions or ['.py']
11
+ self.max_file_size = max_file_size
12
+ self.max_tokens = max_tokens
13
+ self.ignore_patterns = self.get_ignore_patterns()
14
+
15
+ def get_ignore_patterns(self):
16
+ """Read .gitignore file and return ignore patterns."""
17
+ default_patterns = [".*", "*.pyc", "__pycache__", ".git", ".gitignore", ".vscode",
18
+ ".idea", ".DS_Store", "*.lock", "*.pyc", ".env",
19
+ "docs", "tests", "test", "tmp", "temp",
20
+ "*.txt", "*.md", "*.json", "*.csv", "*.tsv", "*.yaml", "*.yml","public",
21
+ "*.sql", "*.sqlite", "*.db", "*.db3", "*.sqlite3", "*.log", "*.zip", "*.gz",
22
+ "*.tar", "*.rar", "*.7z", "*.pdf", "*.jpg", "*.jpeg", "*.png", "*.gif", "*.svg",
23
+ "cookbooks", "assets", "__pycache__", "dist", "build", "node_modules", "venv",]
24
+ gitignore_path = os.path.join(self.directory, '.gitignore')
25
+ if os.path.exists(gitignore_path):
26
+ with open(gitignore_path, 'r') as f:
27
+ gitignore_patterns = [line.strip() for line in f if line.strip() and not line.startswith('#')]
28
+ return list(set(default_patterns + gitignore_patterns))
29
+ return default_patterns
30
+
31
+ def should_ignore(self, file_path):
32
+ """Check if a file should be ignored based on patterns."""
33
+ relative_path = os.path.relpath(file_path, self.directory)
34
+ if relative_path.startswith('.'):
35
+ return True
36
+ for pattern in self.ignore_patterns:
37
+ if fnmatch.fnmatch(relative_path, pattern):
38
+ return True
39
+ return False
40
+
41
+ def is_relevant_file(self, file_path):
42
+ """Determine if a file is relevant for the context."""
43
+ if os.path.getsize(file_path) > self.max_file_size:
44
+ return False
45
+ return any(file_path.endswith(ext) for ext in self.relevant_extensions)
46
+
47
+ def gather_context(self):
48
+ """Gather context from relevant files in the directory."""
49
+ context = []
50
+ total_files = sum(len(files) for _, _, files in os.walk(self.directory))
51
+ processed_files = 0
52
+
53
+ for root, dirs, files in os.walk(self.directory):
54
+ dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
55
+ for file in files:
56
+ file_path = os.path.join(root, file)
57
+ if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
58
+ try:
59
+ with open(file_path, 'r', encoding='utf-8') as f:
60
+ content = f.read()
61
+ context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
62
+ except Exception as e:
63
+ print(f"Error reading {file_path}: {e}")
64
+ processed_files += 1
65
+ print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
66
+ print() # New line after progress indicator
67
+ return '\n'.join(context)
68
+
69
+ def count_tokens(self, text):
70
+ """Count the number of tokens in the given text using a simple tokenizer."""
71
+ # Split on whitespace and punctuation
72
+ tokens = re.findall(r'\b\w+\b|[^\w\s]', text)
73
+ return len(tokens)
74
+
75
+ def truncate_context(self, context):
76
+ """Truncate context to fit within the specified token limit."""
77
+ tokens = re.findall(r'\b\w+\b|[^\w\s]', context)
78
+ if len(tokens) > self.max_tokens:
79
+ truncated_tokens = tokens[:self.max_tokens]
80
+ return ' '.join(truncated_tokens)
81
+ return context
82
+
83
+ def save_context(self, context):
84
+ """Save the gathered context to a file."""
85
+ with open(self.output_file, 'w', encoding='utf-8') as f:
86
+ f.write(context)
87
+
88
+ def get_context_tree(self):
89
+ """Generate a formatted tree structure of the folder, including only relevant files."""
90
+ tree = []
91
+ start_dir = os.path.abspath(self.directory)
92
+
93
+ def add_to_tree(path, prefix=''):
94
+ contents = sorted(os.listdir(path))
95
+ pointers = [('└── ' if i == len(contents) - 1 else '├── ') for i in range(len(contents))]
96
+ for pointer, name in zip(pointers, contents):
97
+ full_path = os.path.join(path, name)
98
+ if self.should_ignore(full_path):
99
+ continue
100
+
101
+ rel_path = os.path.relpath(full_path, start_dir)
102
+ tree.append(f"{prefix}{pointer}{name}")
103
+
104
+ if os.path.isdir(full_path):
105
+ add_to_tree(full_path, prefix + (' ' if pointer == '└── ' else '│ '))
106
+ elif self.is_relevant_file(full_path):
107
+ continue # We've already added the file to the tree
108
+
109
+ add_to_tree(start_dir)
110
+ return '\n'.join(tree)
111
+
112
+ def run(self):
113
+ """Run the context gathering process and return the context and token count."""
114
+ context = self.gather_context()
115
+ context = self.truncate_context(context)
116
+ token_count = self.count_tokens(context)
117
+ print(f"Context gathered successfully.")
118
+ print(f"Total number of tokens (estimated): {token_count}")
119
+ # self.save_context(context)
120
+ context_tree = self.get_context_tree()
121
+ print("\nContext Tree Structure:")
122
+ print(context_tree)
123
+
124
+ return context, token_count, context_tree
125
+
126
+ def main():
127
+ gatherer = ContextGatherer(
128
+ directory='.',
129
+ output_file='context.txt',
130
+ relevant_extensions=['.py'],
131
+ max_file_size=500_000, # 500KB
132
+ max_tokens=60000
133
+ )
134
+ context, token_count, context_tree = gatherer.run()
135
+ print(f"\nThe context contains approximately {token_count} tokens.")
136
+ print("First 500 characters of context:")
137
+ print(context[:500] + "...")
138
+
139
+ if __name__ == "__main__":
140
+ main()
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "PraisonAI"
3
- version = "0.0.47"
3
+ version = "0.0.48"
4
4
  description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
5
5
  authors = ["Mervin Praison"]
6
6
  license = ""
@@ -95,4 +95,5 @@ google = ["langchain-google-genai"]
95
95
  openai = ["langchain-openai"]
96
96
  anthropic = ["langchain-anthropic"]
97
97
  cohere = ["langchain-cohere"]
98
- chat = ["chainlit", "litellm"]
98
+ chat = ["chainlit", "litellm"]
99
+ code = ["chainlit", "litellm"]
File without changes
File without changes
File without changes
File without changes