PraisonAI 0.0.54__py3-none-any.whl → 0.0.55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

praisonai/deploy.py CHANGED
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==0.0.54 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==0.0.55 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
praisonai/ui/context.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import os
2
2
  import fnmatch
3
- import re
4
3
  import yaml
5
4
  from pathlib import Path
6
5
  import logging
@@ -34,10 +33,12 @@ class ContextGatherer:
34
33
  self.max_file_size = max_file_size
35
34
  self.max_tokens = int(os.getenv("PRAISONAI_MAX_TOKENS", max_tokens))
36
35
  self.ignore_patterns = self.get_ignore_patterns()
36
+ self.include_paths = self.get_include_paths()
37
+ self.included_files = []
37
38
 
38
39
  def get_ignore_patterns(self):
39
40
  """
40
- Loads ignore patterns from various sources, prioritizing them in
41
+ Loads ignore patterns from various sources, prioritizing them in
41
42
  the following order:
42
43
  1. .praisonignore
43
44
  2. settings.yaml (under code.ignore_files)
@@ -95,6 +96,19 @@ class ContextGatherer:
95
96
  logger.debug(f"Final ignore patterns: {modified_ignore_patterns}")
96
97
  return modified_ignore_patterns
97
98
 
99
+ def get_include_paths(self):
100
+ include_paths = []
101
+
102
+ # 1. Load from .praisoninclude
103
+ include_file = os.path.join(self.directory, '.praisoninclude')
104
+ if os.path.exists(include_file):
105
+ with open(include_file, 'r') as f:
106
+ include_paths.extend(
107
+ line.strip() for line in f
108
+ if line.strip() and not line.startswith('#')
109
+ )
110
+ return include_paths
111
+
98
112
  def should_ignore(self, file_path):
99
113
  """
100
114
  Check if a file or directory should be ignored based on patterns.
@@ -116,31 +130,65 @@ class ContextGatherer:
116
130
  any(file_path.endswith(ext) for ext in self.relevant_extensions)
117
131
 
118
132
  def gather_context(self):
119
- """Gather context from relevant files, respecting ignore patterns."""
133
+ """Gather context from relevant files, respecting ignore patterns and include paths."""
120
134
  context = []
121
135
  total_files = 0
122
136
  processed_files = 0
123
137
 
124
- for root, dirs, files in os.walk(self.directory):
125
- total_files += len(files)
126
- dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
127
- for file in files:
128
- file_path = os.path.join(root, file)
129
- if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
138
+ if not self.include_paths:
139
+ # No include paths specified, process the entire directory
140
+ for root, dirs, files in os.walk(self.directory):
141
+ total_files += len(files)
142
+ dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
143
+ for file in files:
144
+ file_path = os.path.join(root, file)
145
+ if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
146
+ try:
147
+ with open(file_path, 'r', encoding='utf-8') as f:
148
+ content = f.read()
149
+ context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
150
+ self.included_files.append(Path(file_path).relative_to(self.directory))
151
+ except Exception as e:
152
+ logger.error(f"Error reading {file_path}: {e}")
153
+ processed_files += 1
154
+ print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
155
+ else:
156
+ # Process specified include paths
157
+ for include_path in self.include_paths:
158
+ full_path = os.path.join(self.directory, include_path)
159
+ if os.path.isdir(full_path):
160
+ for root, dirs, files in os.walk(full_path):
161
+ total_files += len(files)
162
+ dirs[:] = [d for d in dirs if not self.should_ignore(os.path.join(root, d))]
163
+ for file in files:
164
+ file_path = os.path.join(root, file)
165
+ if not self.should_ignore(file_path) and self.is_relevant_file(file_path):
166
+ try:
167
+ with open(file_path, 'r', encoding='utf-8') as f:
168
+ content = f.read()
169
+ context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
170
+ self.included_files.append(Path(file_path).relative_to(self.directory))
171
+ except Exception as e:
172
+ logger.error(f"Error reading {file_path}: {e}")
173
+ processed_files += 1
174
+ print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
175
+ elif os.path.isfile(full_path) and self.is_relevant_file(full_path):
130
176
  try:
131
- with open(file_path, 'r', encoding='utf-8') as f:
177
+ with open(full_path, 'r', encoding='utf-8') as f:
132
178
  content = f.read()
133
- context.append(f"File: {file_path}\n\n{content}\n\n{'='*50}\n")
179
+ context.append(f"File: {full_path}\n\n{content}\n\n{'='*50}\n")
180
+ self.included_files.append(Path(full_path).relative_to(self.directory))
134
181
  except Exception as e:
135
- logger.error(f"Error reading {file_path}: {e}")
136
- processed_files += 1
137
- print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
182
+ logger.error(f"Error reading {full_path}: {e}")
183
+ processed_files += 1
184
+ print(f"\rProcessed {processed_files}/{total_files} files", end="", flush=True)
185
+
138
186
  print() # New line after progress indicator
139
187
  return '\n'.join(context)
140
188
 
141
189
  def count_tokens(self, text):
142
190
  """Count tokens using a simple whitespace-based tokenizer."""
143
- return len(text.split())
191
+ return len(text.split())
144
192
 
145
193
  def truncate_context(self, context):
146
194
  """Truncate context to stay within the token limit."""
@@ -165,12 +213,9 @@ class ContextGatherer:
165
213
  contents = sorted(path.iterdir())
166
214
  pointers = [('└── ' if i == len(contents) - 1 else '├── ') for i in range(len(contents))]
167
215
  for pointer, item in zip(pointers, contents):
168
- # Use should_ignore for consistency
169
- if self.should_ignore(item):
170
- continue
171
-
172
216
  rel_path = item.relative_to(start_dir)
173
- tree.append(f"{prefix}{pointer}{rel_path}")
217
+ if rel_path in self.included_files:
218
+ tree.append(f"{prefix}{pointer}{rel_path}")
174
219
 
175
220
  if item.is_dir():
176
221
  add_to_tree(item, prefix + (' ' if pointer == '└── ' else '│ '))
@@ -193,6 +238,7 @@ class ContextGatherer:
193
238
  def main():
194
239
  gatherer = ContextGatherer()
195
240
  context, token_count, context_tree = gatherer.run()
241
+ print(context_tree)
196
242
  print(f"\nThe context contains approximately {token_count} tokens.")
197
243
  print("First 500 characters of context:")
198
244
  print(context[:500] + "...")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: PraisonAI
3
- Version: 0.0.54
3
+ Version: 0.0.55
4
4
  Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -69,14 +69,38 @@ Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, rep
69
69
  </picture>
70
70
  </div>
71
71
 
72
- ## Google Colab
72
+ ## Different User Interfaces:
73
+
74
+ | Interface | Description | URL |
75
+ |---|---|---|
76
+ | **UI** | Multi Agents such as CrewAI or AutoGen | [https://docs.praison.ai/ui/ui](https://docs.praison.ai/ui/ui) |
77
+ | **Chat** | Chat with 100+ LLMs, single AI Agent | [https://docs.praison.ai/ui/chat](https://docs.praison.ai/ui/chat) |
78
+ | **Code** | Chat with entire Codebase, single AI Agent | [https://docs.praison.ai/ui/code](https://docs.praison.ai/ui/code) |
79
+
80
+ ## Google Colab Multi Agents
73
81
 
74
82
  | | Cookbook | Open in Colab |
75
83
  | ------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
76
84
  | Basic | PraisonAI | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
77
85
  | Include Tools | PraisonAI Tools | <a target="_blank" href="https://colab.research.google.com/github/MervinPraison/PraisonAI/blob/main/cookbooks/praisonai-tools-googlecolab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> |
78
86
 
79
- ## TL;DR
87
+ ## Install
88
+
89
+ | PraisonAI | PraisonAI Code | PraisonAI Chat |
90
+ | --- | --- | --- |
91
+ | `pip install praisonai` | `pip install "praisonai[code]"` | `pip install "praisonai[chat]"` |
92
+
93
+ ## Key Features
94
+
95
+ - Automated AI Agents Creation
96
+ - Use CrewAI or AutoGen Framework
97
+ - 100+ LLM Support
98
+ - Chat with ENTIRE Codebase
99
+ - Interactive UIs
100
+ - YAML-based Configuration
101
+ - Custom Tool Integration
102
+
103
+ ## TL;DR Multi Agents
80
104
 
81
105
  ```bash
82
106
  pip install praisonai
@@ -85,14 +109,6 @@ praisonai --init create a movie script about dog in moon
85
109
  praisonai
86
110
  ```
87
111
 
88
- ## Different User Interfaces:
89
-
90
- | Interface | Description | URL |
91
- |---|---|---|
92
- | **UI** | Multi Agents such as CrewAI or AutoGen | [https://docs.praison.ai/ui/ui](https://docs.praison.ai/ui/ui) |
93
- | **Chat** | Chat with 100+ LLMs, single AI Agent | [https://docs.praison.ai/ui/chat](https://docs.praison.ai/ui/chat) |
94
- | **Code** | Chat with entire Codebase, single AI Agent | [https://docs.praison.ai/ui/code](https://docs.praison.ai/ui/code) |
95
-
96
112
  ## Table of Contents
97
113
 
98
114
  - [Installation](#installation)
@@ -109,7 +125,7 @@ praisonai
109
125
  - [Contributing](#contributing)
110
126
  - [Star History](#star-history)
111
127
 
112
- ## Installation
128
+ ## Installation Multi Agents
113
129
 
114
130
  ```bash
115
131
  pip install praisonai
@@ -195,56 +211,17 @@ export OPENAI_API_KEY="Enter your API key"
195
211
  praisonai chat
196
212
  ```
197
213
 
198
- ## Create Custom Tools
199
-
200
- - https://docs.praison.ai/tools/custom/
201
-
202
- ### Step 1: Pre-requisite to Create a Custom Tool
203
-
204
- `agents.yaml` file should be present in the current directory.
214
+ ## Praison AI Code
205
215
 
206
- If it doesn't exist, create it by running the command `praisonai --init research about the latest AI News and prepare a detailed report`.
207
-
208
- ### Step 2: to Create a Custom Tool
209
-
210
- Create a file called tools.py in the same directory as the agents.yaml file.
211
-
212
- ```python
213
- # example tools.py
214
- from duckduckgo_search import DDGS
215
- from praisonai_tools import BaseTool
216
-
217
- class InternetSearchTool(BaseTool):
218
- name: str = "InternetSearchTool"
219
- description: str = "Search Internet for relevant information based on a query or latest news"
220
-
221
- def _run(self, query: str):
222
- ddgs = DDGS()
223
- results = ddgs.text(keywords=query, region='wt-wt', safesearch='moderate', max_results=5)
224
- return results
216
+ ```bash
217
+ pip install "praisonai[code]"
218
+ export OPENAI_API_KEY="Enter your API key"
219
+ praisonai code
225
220
  ```
226
221
 
227
- ### Step 3: to Create a Custom Tool
228
-
229
- Add the tool to the agents.yaml file as show below under the tools section `- InternetSearchTool`.
222
+ ## Create Custom Tools
230
223
 
231
- ```yaml
232
- framework: crewai
233
- topic: research about the latest AI News and prepare a detailed report
234
- roles:
235
- research_analyst:
236
- backstory: Experienced in gathering and analyzing data related to AI news trends.
237
- goal: Analyze AI News trends
238
- role: Research Analyst
239
- tasks:
240
- gather_data:
241
- description:
242
- Conduct in-depth research on the latest AI News trends from reputable
243
- sources.
244
- expected_output: Comprehensive report on current AI News trends.
245
- tools:
246
- - InternetSearchTool
247
- ```
224
+ - https://docs.praison.ai/tools/custom/
248
225
 
249
226
  ## Agents Playbook
250
227
 
@@ -4,7 +4,7 @@ praisonai/agents_generator.py,sha256=8d1WRbubvEkBrW1HZ7_xnGyqgJi0yxmXa3MgTIqef1c
4
4
  praisonai/auto.py,sha256=9spTXqj47Hmmqv5QHRYE_RzSVHH_KoPbaZjskUj2UcE,7895
5
5
  praisonai/chainlit_ui.py,sha256=bNR7s509lp0I9JlJNvwCZRUZosC64qdvlFCt8NmFamQ,12216
6
6
  praisonai/cli.py,sha256=VaVEJlc8c_aE2SBY6xN7WIbHrqNcXGR2xrDzFAsD2B8,14504
7
- praisonai/deploy.py,sha256=H7UzS6kqr9MBhow4-Ah5rqOukKs2pBuBR8SkaCpcwDw,6028
7
+ praisonai/deploy.py,sha256=EYRn8A57NAsG5N3uFT5voZ2XA1TZV3qdGcqFQ1eeSYs,6028
8
8
  praisonai/inbuilt_tools/__init__.py,sha256=mUKnbL6Gram9c9f2m8wJwEzURBLmPEOcHzwySBH89YA,74
9
9
  praisonai/inbuilt_tools/autogen_tools.py,sha256=svYkM2N7DVFvbiwgoAS7U_MqTOD8rHf8VD3BaFUV5_Y,14907
10
10
  praisonai/inc/__init__.py,sha256=sPDlYBBwdk0VlWzaaM_lG0_LD07lS2HRGvPdxXJFiYg,62
@@ -24,7 +24,7 @@ praisonai/public/thriller.svg,sha256=2dYY72EcgbEyTxS4QzjAm37Y4srtPWEW4vCMFki98ZI
24
24
  praisonai/test.py,sha256=RZKq3UEFb6AnFFiHER3zBXfNmlteSLBlrTmOvnpnZLo,4092
25
25
  praisonai/ui/chat.py,sha256=S3a5u0mI7RO5QFbKckz4z8b32gRTiX8kauSHvQBTMco,9238
26
26
  praisonai/ui/code.py,sha256=KLJir8sfzNnZRm2mlAVprGBsZ6wId6yDLsSfN3A2Qdk,10012
27
- praisonai/ui/context.py,sha256=4Rn0BZg9IKMtkKk3s784dStvbMMogW8fMU4xb1gQ9dY,8484
27
+ praisonai/ui/context.py,sha256=xLVyRa8UDy1HJyMa7RSFz0Lkq4qQ-E4pPLfgzP51_k8,11281
28
28
  praisonai/ui/public/fantasy.svg,sha256=4Gs3kIOux-pjGtw6ogI_rv5_viVJxnE5gRwGilsSg0o,1553
29
29
  praisonai/ui/public/game.svg,sha256=y2QMaA01m8XzuDjTOBWzupOC3-TpnUl9ah89mIhviUw,2406
30
30
  praisonai/ui/public/logo_dark.png,sha256=frHz1zkrnivGssJgk9iy1cabojkVgm8B4MllFwL_CnI,17050
@@ -33,8 +33,8 @@ praisonai/ui/public/movie.svg,sha256=aJ2EQ8vXZusVsF2SeuAVxP4RFJzQ14T26ejrGYdBgzk
33
33
  praisonai/ui/public/thriller.svg,sha256=2dYY72EcgbEyTxS4QzjAm37Y4srtPWEW4vCMFki98ZI,3163
34
34
  praisonai/ui/sql_alchemy.py,sha256=HsyeRq-G9qbQobHWpTJHHKQiT4FvYw_7iuv-2PNh0IU,27419
35
35
  praisonai/version.py,sha256=ugyuFliEqtAwQmH4sTlc16YXKYbFWDmfyk87fErB8-8,21
36
- praisonai-0.0.54.dist-info/LICENSE,sha256=kqvFysVlnFxYOu0HxCe2HlmZmJtdmNGOxWRRkT9TsWc,1035
37
- praisonai-0.0.54.dist-info/METADATA,sha256=gN0h13dvW6KvZD2wpuGZkoaO8TRcBpytB3_8t79Tg7U,12084
38
- praisonai-0.0.54.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
39
- praisonai-0.0.54.dist-info/entry_points.txt,sha256=Qg41eW3A1-dvdV5tF7LqChfYof8Rihk2rN1fiEE3vnk,53
40
- praisonai-0.0.54.dist-info/RECORD,,
36
+ praisonai-0.0.55.dist-info/LICENSE,sha256=kqvFysVlnFxYOu0HxCe2HlmZmJtdmNGOxWRRkT9TsWc,1035
37
+ praisonai-0.0.55.dist-info/METADATA,sha256=NLCPVwWjyRuUZzzQPR6qnKoht2dApKIzyptTFk0Tl1c,11126
38
+ praisonai-0.0.55.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
39
+ praisonai-0.0.55.dist-info/entry_points.txt,sha256=Qg41eW3A1-dvdV5tF7LqChfYof8Rihk2rN1fiEE3vnk,53
40
+ praisonai-0.0.55.dist-info/RECORD,,