PraisonAI 0.0.59rc5__tar.gz → 0.0.59rc7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/PKG-INFO +1 -1
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/cli.py +58 -3
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/deploy.py +1 -1
- praisonai-0.0.59rc7/praisonai/setup/config.yaml +58 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/setup/setup_conda_env.sh +22 -10
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/pyproject.toml +1 -1
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/LICENSE +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/README.md +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/__init__.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/__main__.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/agents_generator.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/auto.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/chainlit_ui.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/inbuilt_tools/__init__.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/inc/__init__.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/inc/models.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/android-chrome-192x192.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/android-chrome-512x512.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/apple-touch-icon.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/fantasy.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/favicon-16x16.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/favicon-32x32.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/favicon.ico +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/game.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/logo_dark.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/logo_light.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/movie.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/public/thriller.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/setup/__init__.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/setup/build.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/setup/post_install.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/setup/setup_conda_env.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/test.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/train.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/chat.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/code.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/context.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/fantasy.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/game.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/logo_dark.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/logo_light.png +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/movie.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/public/thriller.svg +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/ui/sql_alchemy.py +0 -0
- {praisonai-0.0.59rc5 → praisonai-0.0.59rc7}/praisonai/version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.59rc7
|
|
4
4
|
Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10,<3.13
|
|
@@ -13,6 +13,7 @@ from .auto import AutoGenerator
|
|
|
13
13
|
from .agents_generator import AgentsGenerator
|
|
14
14
|
from .inbuilt_tools import *
|
|
15
15
|
import shutil
|
|
16
|
+
import subprocess
|
|
16
17
|
import logging
|
|
17
18
|
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO'), format='%(asctime)s - %(levelname)s - %(message)s')
|
|
18
19
|
|
|
@@ -27,6 +28,32 @@ try:
|
|
|
27
28
|
GRADIO_AVAILABLE = True
|
|
28
29
|
except ImportError:
|
|
29
30
|
GRADIO_AVAILABLE = False
|
|
31
|
+
|
|
32
|
+
def stream_subprocess(command):
|
|
33
|
+
"""
|
|
34
|
+
Execute a subprocess command and stream the output to the terminal in real-time.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
command (list): A list containing the command and its arguments.
|
|
38
|
+
"""
|
|
39
|
+
process = subprocess.Popen(
|
|
40
|
+
command,
|
|
41
|
+
stdout=subprocess.PIPE,
|
|
42
|
+
stderr=subprocess.STDOUT,
|
|
43
|
+
text=True,
|
|
44
|
+
bufsize=1,
|
|
45
|
+
universal_newlines=True
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
for line in iter(process.stdout.readline, ''):
|
|
49
|
+
print(line, end='')
|
|
50
|
+
sys.stdout.flush() # Ensure output is flushed immediately
|
|
51
|
+
|
|
52
|
+
process.stdout.close()
|
|
53
|
+
return_code = process.wait()
|
|
54
|
+
|
|
55
|
+
if return_code != 0:
|
|
56
|
+
raise subprocess.CalledProcessError(return_code, command)
|
|
30
57
|
|
|
31
58
|
class PraisonAI:
|
|
32
59
|
def __init__(self, agent_file="agents.yaml", framework="", auto=False, init=False, agent_yaml=None):
|
|
@@ -100,12 +127,40 @@ class PraisonAI:
|
|
|
100
127
|
return
|
|
101
128
|
|
|
102
129
|
if args.agent_file == 'train':
|
|
130
|
+
package_root = os.path.dirname(os.path.abspath(__file__))
|
|
131
|
+
config_yaml_source = os.path.join(package_root, 'setup', 'config.yaml')
|
|
132
|
+
config_yaml_destination = os.path.join(os.getcwd(), 'config.yaml')
|
|
133
|
+
|
|
134
|
+
if not os.path.exists(config_yaml_destination):
|
|
135
|
+
try:
|
|
136
|
+
shutil.copyfile(config_yaml_source, config_yaml_destination)
|
|
137
|
+
print("config.yaml copied to the current directory.")
|
|
138
|
+
except FileExistsError:
|
|
139
|
+
print("config.yaml already exists in the current directory. Skipping copy.")
|
|
140
|
+
else:
|
|
141
|
+
print("config.yaml already exists in the current directory. Skipping copy.")
|
|
142
|
+
|
|
103
143
|
if 'init' in sys.argv:
|
|
104
144
|
from praisonai.setup.setup_conda_env import main as setup_conda_main
|
|
105
145
|
setup_conda_main()
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
146
|
+
print("All packages installed")
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
result = subprocess.check_output(['conda', 'env', 'list'])
|
|
151
|
+
if 'unsloth_env' in result.decode('utf-8'):
|
|
152
|
+
print("Conda environment 'unsloth_env' found.")
|
|
153
|
+
else:
|
|
154
|
+
raise subprocess.CalledProcessError(1, 'grep')
|
|
155
|
+
except subprocess.CalledProcessError:
|
|
156
|
+
print("Conda environment 'unsloth_env' not found. Setting it up...")
|
|
157
|
+
from praisonai.setup.setup_conda_env import main as setup_conda_main
|
|
158
|
+
setup_conda_main()
|
|
159
|
+
print("All packages installed.")
|
|
160
|
+
|
|
161
|
+
train_args = sys.argv[2:] # Get all arguments after 'train'
|
|
162
|
+
train_script_path = os.path.join(package_root, 'train.py')
|
|
163
|
+
stream_subprocess(['conda', 'run', '--name', 'unsloth_env', 'python', train_script_path, 'train'])
|
|
109
164
|
return
|
|
110
165
|
|
|
111
166
|
invocation_cmd = "praisonai"
|
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==0.0.
|
|
59
|
+
file.write("RUN pip install flask praisonai==0.0.59rc7 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
model_name: "unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit"
|
|
2
|
+
hf_model_name: "mervinpraison/llama-3.1-tamilan-8B-test"
|
|
3
|
+
max_seq_length: 2048
|
|
4
|
+
load_in_4bit: true
|
|
5
|
+
lora_r: 16
|
|
6
|
+
lora_target_modules:
|
|
7
|
+
- "q_proj"
|
|
8
|
+
- "k_proj"
|
|
9
|
+
- "v_proj"
|
|
10
|
+
- "o_proj"
|
|
11
|
+
- "gate_proj"
|
|
12
|
+
- "up_proj"
|
|
13
|
+
- "down_proj"
|
|
14
|
+
lora_alpha: 16
|
|
15
|
+
lora_dropout: 0
|
|
16
|
+
lora_bias: "none"
|
|
17
|
+
use_gradient_checkpointing: "unsloth"
|
|
18
|
+
random_state: 3407
|
|
19
|
+
use_rslora: false
|
|
20
|
+
loftq_config: null
|
|
21
|
+
|
|
22
|
+
dataset:
|
|
23
|
+
- name: "yahma/alpaca-cleaned"
|
|
24
|
+
split_type: "train"
|
|
25
|
+
processing_func: "format_prompts"
|
|
26
|
+
rename:
|
|
27
|
+
input: "input"
|
|
28
|
+
output: "output"
|
|
29
|
+
instruction: "instruction"
|
|
30
|
+
filter_data: false
|
|
31
|
+
filter_column_value: "id"
|
|
32
|
+
filter_value: "alpaca"
|
|
33
|
+
num_samples: 20000
|
|
34
|
+
|
|
35
|
+
dataset_text_field: "text"
|
|
36
|
+
dataset_num_proc: 2
|
|
37
|
+
packing: false
|
|
38
|
+
|
|
39
|
+
per_device_train_batch_size: 2
|
|
40
|
+
gradient_accumulation_steps: 2
|
|
41
|
+
warmup_steps: 5
|
|
42
|
+
num_train_epochs: 1
|
|
43
|
+
max_steps: 10
|
|
44
|
+
learning_rate: 2.0e-4
|
|
45
|
+
logging_steps: 1
|
|
46
|
+
optim: "adamw_8bit"
|
|
47
|
+
weight_decay: 0.01
|
|
48
|
+
lr_scheduler_type: "linear"
|
|
49
|
+
seed: 3407
|
|
50
|
+
output_dir: "outputs"
|
|
51
|
+
|
|
52
|
+
quantization_method:
|
|
53
|
+
- "q4_k_m"
|
|
54
|
+
- "q8_0"
|
|
55
|
+
- "q5_k_m"
|
|
56
|
+
|
|
57
|
+
ollama_model: "llama3.1-tamilan-test"
|
|
58
|
+
model_parameters: "8b"
|
|
@@ -35,8 +35,16 @@ fi
|
|
|
35
35
|
# Create and activate the Conda environment
|
|
36
36
|
ENV_NAME="unsloth_env"
|
|
37
37
|
if conda info --envs | grep -q $ENV_NAME; then
|
|
38
|
-
echo "Environment $ENV_NAME already exists.
|
|
39
|
-
conda
|
|
38
|
+
echo "Environment $ENV_NAME already exists. Recreating..."
|
|
39
|
+
conda env remove -y -n $ENV_NAME # Remove existing environment
|
|
40
|
+
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
41
|
+
# macOS (both Intel and M1/M2)
|
|
42
|
+
conda create --name $ENV_NAME python=3.10 pytorch=2.3.0 -c pytorch -y
|
|
43
|
+
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
|
44
|
+
# Linux
|
|
45
|
+
conda create --name $ENV_NAME python=3.10 pytorch=2.3.0 cudatoolkit=11.8 -c pytorch -c nvidia -y
|
|
46
|
+
fi
|
|
47
|
+
# conda activate $ENV_NAME
|
|
40
48
|
else
|
|
41
49
|
echo "Creating new environment $ENV_NAME..."
|
|
42
50
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
@@ -45,16 +53,20 @@ else
|
|
|
45
53
|
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
|
46
54
|
# Linux
|
|
47
55
|
conda create --name $ENV_NAME python=3.10 pytorch=2.3.0 cudatoolkit=11.8 -c pytorch -c nvidia -y
|
|
48
|
-
conda activate $ENV_NAME
|
|
49
|
-
pip install xformers==0.0.26.post1
|
|
50
56
|
fi
|
|
57
|
+
# conda activate $ENV_NAME
|
|
51
58
|
fi
|
|
52
59
|
|
|
53
|
-
source $HOME/miniconda/bin/activate $ENV_NAME
|
|
60
|
+
# source $HOME/miniconda/bin/activate $ENV_NAME
|
|
61
|
+
|
|
62
|
+
# Get full path of pip
|
|
63
|
+
PIP_FULL_PATH=$(conda run -n $ENV_NAME which pip)
|
|
54
64
|
|
|
55
|
-
# Install other packages
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
65
|
+
# Install other packages within the activated environment
|
|
66
|
+
# Use PIP_FULL_PATH to run pip commands
|
|
67
|
+
$PIP_FULL_PATH install --upgrade pip
|
|
68
|
+
$PIP_FULL_PATH install "xformers==0.0.26.post1"
|
|
69
|
+
$PIP_FULL_PATH install "unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git@4e570be9ae4ced8cdc64e498125708e34942befc"
|
|
70
|
+
$PIP_FULL_PATH install --no-deps "trl<0.9.0" peft accelerate bitsandbytes
|
|
59
71
|
|
|
60
|
-
echo "Setup completed successfully!"
|
|
72
|
+
echo "Setup completed successfully!"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "PraisonAI"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.59rc7"
|
|
4
4
|
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
5
|
authors = ["Mervin Praison"]
|
|
6
6
|
license = ""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|