PraisonAI 2.2.35__cp313-cp313-manylinux_2_39_x86_64.whl → 2.2.37__cp313-cp313-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- praisonai/cli.py +48 -9
- praisonai/deploy.py +58 -11
- {praisonai-2.2.35.dist-info → praisonai-2.2.37.dist-info}/METADATA +2 -2
- {praisonai-2.2.35.dist-info → praisonai-2.2.37.dist-info}/RECORD +6 -6
- {praisonai-2.2.35.dist-info → praisonai-2.2.37.dist-info}/WHEEL +0 -0
- {praisonai-2.2.35.dist-info → praisonai-2.2.37.dist-info}/entry_points.txt +0 -0
praisonai/cli.py
CHANGED
|
@@ -170,6 +170,25 @@ class PraisonAI:
|
|
|
170
170
|
pass
|
|
171
171
|
return None
|
|
172
172
|
|
|
173
|
+
def read_file_if_provided(self, file_path):
|
|
174
|
+
"""
|
|
175
|
+
Read content from a file if the file path is provided.
|
|
176
|
+
Returns the file content or None if file cannot be read.
|
|
177
|
+
"""
|
|
178
|
+
if not file_path:
|
|
179
|
+
return None
|
|
180
|
+
|
|
181
|
+
try:
|
|
182
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
183
|
+
file_content = f.read().strip()
|
|
184
|
+
return file_content if file_content else None
|
|
185
|
+
except FileNotFoundError:
|
|
186
|
+
print(f"[red]ERROR: File not found: {file_path}[/red]")
|
|
187
|
+
sys.exit(1)
|
|
188
|
+
except Exception as e:
|
|
189
|
+
print(f"[red]ERROR: Failed to read file: {e}[/red]")
|
|
190
|
+
sys.exit(1)
|
|
191
|
+
|
|
173
192
|
def main(self):
|
|
174
193
|
"""
|
|
175
194
|
The main function of the PraisonAI object. It parses the command-line arguments,
|
|
@@ -189,15 +208,24 @@ class PraisonAI:
|
|
|
189
208
|
|
|
190
209
|
# Check for piped input from stdin
|
|
191
210
|
stdin_input = self.read_stdin_if_available()
|
|
211
|
+
|
|
212
|
+
# Check for file input if --file is provided
|
|
213
|
+
file_input = self.read_file_if_provided(getattr(args, 'file', None))
|
|
192
214
|
|
|
193
215
|
if args.command:
|
|
194
216
|
if args.command.startswith("tests.test") or args.command.startswith("tests/test"): # Argument used for testing purposes
|
|
195
217
|
print("test")
|
|
196
218
|
return "test"
|
|
197
219
|
else:
|
|
198
|
-
#
|
|
220
|
+
# Combine command with any available inputs (stdin and/or file)
|
|
221
|
+
combined_inputs = []
|
|
199
222
|
if stdin_input:
|
|
200
|
-
|
|
223
|
+
combined_inputs.append(stdin_input)
|
|
224
|
+
if file_input:
|
|
225
|
+
combined_inputs.append(file_input)
|
|
226
|
+
|
|
227
|
+
if combined_inputs:
|
|
228
|
+
combined_prompt = f"{args.command} {' '.join(combined_inputs)}"
|
|
201
229
|
result = self.handle_direct_prompt(combined_prompt)
|
|
202
230
|
print(result)
|
|
203
231
|
return result
|
|
@@ -206,20 +234,30 @@ class PraisonAI:
|
|
|
206
234
|
elif hasattr(args, 'direct_prompt') and args.direct_prompt:
|
|
207
235
|
# Only handle direct prompt if agent_file wasn't explicitly set in constructor
|
|
208
236
|
if original_agent_file == "agents.yaml": # Default value, so safe to use direct prompt
|
|
209
|
-
#
|
|
210
|
-
|
|
237
|
+
# Combine direct prompt with any available inputs (stdin and/or file)
|
|
238
|
+
prompt_parts = [args.direct_prompt]
|
|
211
239
|
if stdin_input:
|
|
212
|
-
|
|
240
|
+
prompt_parts.append(stdin_input)
|
|
241
|
+
if file_input:
|
|
242
|
+
prompt_parts.append(file_input)
|
|
243
|
+
prompt = ' '.join(prompt_parts)
|
|
213
244
|
result = self.handle_direct_prompt(prompt)
|
|
214
245
|
print(result)
|
|
215
246
|
return result
|
|
216
247
|
else:
|
|
217
248
|
# Agent file was explicitly set, ignore direct prompt and use the file
|
|
218
249
|
pass
|
|
219
|
-
elif stdin_input:
|
|
220
|
-
# If only stdin input is provided (no command), use it as direct prompt
|
|
221
|
-
if original_agent_file == "agents.yaml": # Default value, so safe to use
|
|
222
|
-
|
|
250
|
+
elif stdin_input or file_input:
|
|
251
|
+
# If only stdin/file input is provided (no command), use it as direct prompt
|
|
252
|
+
if original_agent_file == "agents.yaml": # Default value, so safe to use input as prompt
|
|
253
|
+
# Combine any available inputs
|
|
254
|
+
inputs = []
|
|
255
|
+
if stdin_input:
|
|
256
|
+
inputs.append(stdin_input)
|
|
257
|
+
if file_input:
|
|
258
|
+
inputs.append(file_input)
|
|
259
|
+
combined_input = ' '.join(inputs)
|
|
260
|
+
result = self.handle_direct_prompt(combined_input)
|
|
223
261
|
print(result)
|
|
224
262
|
return result
|
|
225
263
|
# If no command or direct_prompt, preserve agent_file from constructor (don't overwrite)
|
|
@@ -530,6 +568,7 @@ class PraisonAI:
|
|
|
530
568
|
parser.add_argument("--public", action="store_true", help="Use ngrok to expose the server publicly (only with --call)")
|
|
531
569
|
parser.add_argument("--merge", action="store_true", help="Merge existing agents.yaml with auto-generated agents instead of overwriting")
|
|
532
570
|
parser.add_argument("--claudecode", action="store_true", help="Enable Claude Code integration for file modifications and coding tasks")
|
|
571
|
+
parser.add_argument("--file", "-f", type=str, help="Read input from a file and append it to the prompt")
|
|
533
572
|
|
|
534
573
|
# If we're in a test environment, parse with empty args to avoid pytest interference
|
|
535
574
|
if in_test_env:
|
praisonai/deploy.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import subprocess
|
|
2
2
|
import os
|
|
3
|
+
import platform
|
|
3
4
|
from dotenv import load_dotenv
|
|
4
5
|
|
|
5
6
|
class CloudDeployer:
|
|
@@ -56,7 +57,7 @@ class CloudDeployer:
|
|
|
56
57
|
file.write("FROM python:3.11-slim\n")
|
|
57
58
|
file.write("WORKDIR /app\n")
|
|
58
59
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==2.2.
|
|
60
|
+
file.write("RUN pip install flask praisonai==2.2.37 gunicorn markdown\n")
|
|
60
61
|
file.write("EXPOSE 8080\n")
|
|
61
62
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
63
|
|
|
@@ -116,20 +117,66 @@ class CloudDeployer:
|
|
|
116
117
|
self.create_api_file()
|
|
117
118
|
self.create_dockerfile()
|
|
118
119
|
"""Runs a sequence of shell commands for deployment, continues on error."""
|
|
120
|
+
|
|
121
|
+
# Get project ID upfront for Windows compatibility
|
|
122
|
+
try:
|
|
123
|
+
result = subprocess.run(['gcloud', 'config', 'get-value', 'project'],
|
|
124
|
+
capture_output=True, text=True, check=True)
|
|
125
|
+
project_id = result.stdout.strip()
|
|
126
|
+
except subprocess.CalledProcessError:
|
|
127
|
+
print("ERROR: Failed to get GCP project ID. Ensure gcloud is configured.")
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
# Get environment variables
|
|
131
|
+
openai_model = os.environ.get('OPENAI_MODEL_NAME', 'gpt-4o')
|
|
132
|
+
openai_key = os.environ.get('OPENAI_API_KEY', 'Enter your API key')
|
|
133
|
+
openai_base = os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1')
|
|
134
|
+
|
|
135
|
+
# Build commands with actual values
|
|
119
136
|
commands = [
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
137
|
+
['gcloud', 'auth', 'configure-docker', 'us-central1-docker.pkg.dev'],
|
|
138
|
+
['gcloud', 'artifacts', 'repositories', 'create', 'praisonai-repository',
|
|
139
|
+
'--repository-format=docker', '--location=us-central1'],
|
|
140
|
+
['docker', 'build', '--platform', 'linux/amd64', '-t',
|
|
141
|
+
f'gcr.io/{project_id}/praisonai-app:latest', '.'],
|
|
142
|
+
['docker', 'tag', f'gcr.io/{project_id}/praisonai-app:latest',
|
|
143
|
+
f'us-central1-docker.pkg.dev/{project_id}/praisonai-repository/praisonai-app:latest'],
|
|
144
|
+
['docker', 'push',
|
|
145
|
+
f'us-central1-docker.pkg.dev/{project_id}/praisonai-repository/praisonai-app:latest'],
|
|
146
|
+
['gcloud', 'run', 'deploy', 'praisonai-service',
|
|
147
|
+
'--image', f'us-central1-docker.pkg.dev/{project_id}/praisonai-repository/praisonai-app:latest',
|
|
148
|
+
'--platform', 'managed', '--region', 'us-central1', '--allow-unauthenticated',
|
|
149
|
+
'--set-env-vars', f'OPENAI_MODEL_NAME={openai_model},OPENAI_API_KEY={openai_key},OPENAI_API_BASE={openai_base}']
|
|
126
150
|
]
|
|
127
|
-
|
|
128
|
-
for
|
|
151
|
+
|
|
152
|
+
# Run commands with appropriate handling for each platform
|
|
153
|
+
for i, cmd in enumerate(commands):
|
|
129
154
|
try:
|
|
130
|
-
|
|
155
|
+
if i == 0: # First command (gcloud auth configure-docker)
|
|
156
|
+
if platform.system() != 'Windows':
|
|
157
|
+
# On Unix, pipe 'yes' to auto-confirm
|
|
158
|
+
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
|
159
|
+
proc.communicate(input=b'Y\n')
|
|
160
|
+
if proc.returncode != 0:
|
|
161
|
+
raise subprocess.CalledProcessError(proc.returncode, cmd)
|
|
162
|
+
else:
|
|
163
|
+
# On Windows, try with --quiet flag to avoid prompts
|
|
164
|
+
cmd_with_quiet = cmd + ['--quiet']
|
|
165
|
+
try:
|
|
166
|
+
subprocess.run(cmd_with_quiet, check=True)
|
|
167
|
+
except subprocess.CalledProcessError:
|
|
168
|
+
# If --quiet fails, try without it
|
|
169
|
+
print("Note: You may need to manually confirm the authentication prompt")
|
|
170
|
+
subprocess.run(cmd, check=True)
|
|
171
|
+
else:
|
|
172
|
+
# Run other commands normally
|
|
173
|
+
subprocess.run(cmd, check=True)
|
|
131
174
|
except subprocess.CalledProcessError as e:
|
|
132
|
-
print(f"ERROR: Command
|
|
175
|
+
print(f"ERROR: Command failed with exit status {e.returncode}")
|
|
176
|
+
# Commands 2 (build) and 4 (push) and 5 (deploy) are critical
|
|
177
|
+
if i in [2, 4, 5]:
|
|
178
|
+
print("Critical command failed. Aborting deployment.")
|
|
179
|
+
return
|
|
133
180
|
print(f"Continuing with the next command...")
|
|
134
181
|
|
|
135
182
|
# Usage
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 2.2.
|
|
3
|
+
Version: 2.2.37
|
|
4
4
|
Summary: PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -64,7 +64,7 @@ Requires-Dist: playwright (>=1.47.0) ; extra == "code"
|
|
|
64
64
|
Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
|
|
65
65
|
Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "autogen"
|
|
66
66
|
Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "crewai"
|
|
67
|
-
Requires-Dist: praisonaiagents (>=0.0.
|
|
67
|
+
Requires-Dist: praisonaiagents (>=0.0.110)
|
|
68
68
|
Requires-Dist: pyautogen (>=0.2.19) ; extra == "autogen"
|
|
69
69
|
Requires-Dist: pydantic (<=2.10.1) ; extra == "chat"
|
|
70
70
|
Requires-Dist: pydantic (<=2.10.1) ; extra == "code"
|
|
@@ -5,8 +5,8 @@ praisonai/agents_generator.py,sha256=IMD5VTYL0fUEiCUcoADGAfe2tBtPHJa-tRmN8g525bM
|
|
|
5
5
|
praisonai/api/call.py,sha256=-dV9DKNDi4w9vN6K63TUh15_PC0M5KzYOmBqHbuJqq0,11079
|
|
6
6
|
praisonai/auto.py,sha256=0omuyIIuu-zBAXpsGo3JwuhX6zpjQg3ZtqbPtF5LZbg,12331
|
|
7
7
|
praisonai/chainlit_ui.py,sha256=1lmqZ7_W9Pp1ueFYLvOq1YoH5NnKy3blssDrVvn95pc,12236
|
|
8
|
-
praisonai/cli.py,sha256=
|
|
9
|
-
praisonai/deploy.py,sha256=
|
|
8
|
+
praisonai/cli.py,sha256=FGepKutQsDlpwVm21KXWu6G2ZSSdLjBa_mBgVHBnZLY,38260
|
|
9
|
+
praisonai/deploy.py,sha256=4FcnIqZTiu_prunPXlOWa3zUzWcnKvLmV1sKnedyN_8,8255
|
|
10
10
|
praisonai/inbuilt_tools/__init__.py,sha256=mZOEximj3zCyJHq9Lz0bGXhQpBsa_QR-R-yA9UKC3zI,565
|
|
11
11
|
praisonai/inbuilt_tools/autogen_tools.py,sha256=kJdEv61BTYvdHOaURNEpBcWq8Rs-oC03loNFTIjT-ak,4687
|
|
12
12
|
praisonai/inc/__init__.py,sha256=sPDlYBBwdk0VlWzaaM_lG0_LD07lS2HRGvPdxXJFiYg,62
|
|
@@ -74,7 +74,7 @@ praisonai/ui/sql_alchemy.py,sha256=ilWAWicUGja7ADbXW9_OgIYeyKNuAQ1ZI_RMqjmMI9k,2
|
|
|
74
74
|
praisonai/ui/tools.md,sha256=Ad3YH_ZCLMWlz3mDXllQnQ_S5l55LWqLdcZSh-EXrHI,3956
|
|
75
75
|
praisonai/upload_vision.py,sha256=lMpFn993UiYVJxRNZQTmcbPbEajQ5TFKCNGK1Icn_hg,5253
|
|
76
76
|
praisonai/version.py,sha256=ugyuFliEqtAwQmH4sTlc16YXKYbFWDmfyk87fErB8-8,21
|
|
77
|
-
praisonai-2.2.
|
|
78
|
-
praisonai-2.2.
|
|
79
|
-
praisonai-2.2.
|
|
80
|
-
praisonai-2.2.
|
|
77
|
+
praisonai-2.2.37.dist-info/METADATA,sha256=LMQOhjh_HZKAOxC0sVAiW4nrVP-j2Ow8QZ-xgaXNTZg,4762
|
|
78
|
+
praisonai-2.2.37.dist-info/WHEEL,sha256=dCzwOzx-VmbmLA5u8QpkARaxx3rsePBxa1nmZphhNQk,110
|
|
79
|
+
praisonai-2.2.37.dist-info/entry_points.txt,sha256=QSSfuXjZMhf16FZ201I_oSoX_s1nWYbi_4_UXPE3S-o,145
|
|
80
|
+
praisonai-2.2.37.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|