PraisonAI 0.0.42__tar.gz → 0.0.44__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- {praisonai-0.0.42 → praisonai-0.0.44}/PKG-INFO +7 -2
- {praisonai-0.0.42 → praisonai-0.0.44}/README.md +3 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/cli.py +28 -1
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/deploy.py +1 -1
- praisonai-0.0.44/praisonai/public/android-chrome-192x192.png +0 -0
- praisonai-0.0.44/praisonai/public/android-chrome-512x512.png +0 -0
- praisonai-0.0.44/praisonai/public/apple-touch-icon.png +0 -0
- praisonai-0.0.44/praisonai/public/favicon-16x16.png +0 -0
- praisonai-0.0.44/praisonai/public/favicon-32x32.png +0 -0
- praisonai-0.0.44/praisonai/public/favicon.ico +0 -0
- praisonai-0.0.44/praisonai/public/logo_dark.png +0 -0
- praisonai-0.0.44/praisonai/public/logo_light.png +0 -0
- praisonai-0.0.44/praisonai/ui/chat.py +432 -0
- praisonai-0.0.44/praisonai/ui/public/fantasy.svg +3 -0
- praisonai-0.0.44/praisonai/ui/public/game.svg +3 -0
- praisonai-0.0.44/praisonai/ui/public/logo_dark.png +0 -0
- praisonai-0.0.44/praisonai/ui/public/logo_light.png +0 -0
- praisonai-0.0.44/praisonai/ui/public/movie.svg +3 -0
- praisonai-0.0.44/praisonai/ui/public/thriller.svg +3 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/pyproject.toml +5 -2
- {praisonai-0.0.42 → praisonai-0.0.44}/LICENSE +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/__init__.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/__main__.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/agents_generator.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/auto.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/chainlit_ui.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/inbuilt_tools/__init__.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/inc/__init__.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/inc/models.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/public/fantasy.svg +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/public/game.svg +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/public/movie.svg +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/public/thriller.svg +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/test.py +0 -0
- {praisonai-0.0.42 → praisonai-0.0.44}/praisonai/version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.44
|
|
4
4
|
Summary: PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10,<3.13
|
|
@@ -11,13 +11,14 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
11
11
|
Provides-Extra: agentops
|
|
12
12
|
Provides-Extra: anthropic
|
|
13
13
|
Provides-Extra: api
|
|
14
|
+
Provides-Extra: chat
|
|
14
15
|
Provides-Extra: cohere
|
|
15
16
|
Provides-Extra: google
|
|
16
17
|
Provides-Extra: gradio
|
|
17
18
|
Provides-Extra: openai
|
|
18
19
|
Provides-Extra: ui
|
|
19
20
|
Requires-Dist: agentops (>=0.2.6) ; extra == "agentops"
|
|
20
|
-
Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui"
|
|
21
|
+
Requires-Dist: chainlit (>=1.1.301,<2.0.0) ; extra == "ui" or extra == "chat"
|
|
21
22
|
Requires-Dist: crewai (>=0.32.0)
|
|
22
23
|
Requires-Dist: flask (>=3.0.0) ; extra == "api"
|
|
23
24
|
Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
|
|
@@ -25,6 +26,7 @@ Requires-Dist: langchain-anthropic (>=0.1.13) ; extra == "anthropic"
|
|
|
25
26
|
Requires-Dist: langchain-cohere (>=0.1.4) ; extra == "cohere"
|
|
26
27
|
Requires-Dist: langchain-google-genai (>=1.0.4) ; extra == "google"
|
|
27
28
|
Requires-Dist: langchain-openai (>=0.1.7) ; extra == "openai"
|
|
29
|
+
Requires-Dist: litellm (>=1.41.8) ; extra == "chat"
|
|
28
30
|
Requires-Dist: markdown (>=3.5)
|
|
29
31
|
Requires-Dist: praisonai-tools (>=0.0.7)
|
|
30
32
|
Requires-Dist: pyautogen (>=0.2.19)
|
|
@@ -41,9 +43,12 @@ Description-Content-Type: text/markdown
|
|
|
41
43
|
<img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
|
|
42
44
|
</picture>
|
|
43
45
|
</p>
|
|
46
|
+
<div align="center">
|
|
44
47
|
|
|
45
48
|
# Praison AI
|
|
46
49
|
|
|
50
|
+
</div>
|
|
51
|
+
|
|
47
52
|
Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, represents a low-code, centralised framework designed to simplify the creation and orchestration of multi-agent systems for various LLM applications, emphasizing ease of use, customization, and human-agent interaction.
|
|
48
53
|
|
|
49
54
|
## TL;DR
|
|
@@ -5,9 +5,12 @@
|
|
|
5
5
|
<img alt="PraisonAI Logo" src="docs/images/praisonai-logo-black-large.png">
|
|
6
6
|
</picture>
|
|
7
7
|
</p>
|
|
8
|
+
<div align="center">
|
|
8
9
|
|
|
9
10
|
# Praison AI
|
|
10
11
|
|
|
12
|
+
</div>
|
|
13
|
+
|
|
11
14
|
Praison AI, leveraging both AutoGen and CrewAI or any other agent framework, represents a low-code, centralised framework designed to simplify the creation and orchestration of multi-agent systems for various LLM applications, emphasizing ease of use, customization, and human-agent interaction.
|
|
12
15
|
|
|
13
16
|
## TL;DR
|
|
@@ -84,6 +84,11 @@ class PraisonAI:
|
|
|
84
84
|
deployer = CloudDeployer()
|
|
85
85
|
deployer.run_commands()
|
|
86
86
|
return
|
|
87
|
+
|
|
88
|
+
if getattr(args, 'chat', False):
|
|
89
|
+
self.create_chainlit_chat_interface()
|
|
90
|
+
return
|
|
91
|
+
|
|
87
92
|
invocation_cmd = "praisonai"
|
|
88
93
|
version_string = f"PraisonAI version {__version__}"
|
|
89
94
|
|
|
@@ -163,8 +168,30 @@ class PraisonAI:
|
|
|
163
168
|
args.agent_file = 'agents.yaml'
|
|
164
169
|
if args.agent_file == 'ui':
|
|
165
170
|
args.ui = 'chainlit'
|
|
171
|
+
if args.agent_file == 'chat':
|
|
172
|
+
args.ui = 'chainlit'
|
|
173
|
+
args.chat = True
|
|
166
174
|
|
|
167
175
|
return args
|
|
176
|
+
|
|
177
|
+
def create_chainlit_chat_interface(self):
|
|
178
|
+
"""
|
|
179
|
+
Create a Chainlit interface for the chat application.
|
|
180
|
+
|
|
181
|
+
This function sets up a Chainlit application that listens for messages.
|
|
182
|
+
When a message is received, it runs PraisonAI with the provided message as the topic.
|
|
183
|
+
The generated agents are then used to perform tasks.
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
None: This function does not return any value. It starts the Chainlit application.
|
|
187
|
+
"""
|
|
188
|
+
if CHAINLIT_AVAILABLE:
|
|
189
|
+
import praisonai
|
|
190
|
+
os.environ["CHAINLIT_PORT"] = "8084"
|
|
191
|
+
chat_ui_path = os.path.join(os.path.dirname(praisonai.__file__), 'ui', 'chat.py')
|
|
192
|
+
chainlit_run([chat_ui_path])
|
|
193
|
+
else:
|
|
194
|
+
print("ERROR: Chat UI is not installed. Please install it with 'pip install \"praisonai\[chat]\"' to use the chat UI.")
|
|
168
195
|
|
|
169
196
|
def create_gradio_interface(self):
|
|
170
197
|
"""
|
|
@@ -247,7 +274,7 @@ class PraisonAI:
|
|
|
247
274
|
chainlit_ui_path = os.path.join(os.path.dirname(praisonai.__file__), 'chainlit_ui.py')
|
|
248
275
|
chainlit_run([chainlit_ui_path])
|
|
249
276
|
else:
|
|
250
|
-
print("ERROR: Chainlit is not installed. Please install it with 'pip install
|
|
277
|
+
print("ERROR: Chainlit is not installed. Please install it with 'pip install \"praisonai\[ui]\"' to use the UI.")
|
|
251
278
|
|
|
252
279
|
if __name__ == "__main__":
|
|
253
280
|
praison_ai = PraisonAI()
|
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==0.0.
|
|
59
|
+
file.write("RUN pip install flask praisonai==0.0.44 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
import chainlit as cl
|
|
2
|
+
from chainlit.input_widget import TextInput
|
|
3
|
+
from chainlit.types import ThreadDict
|
|
4
|
+
from litellm import acompletion
|
|
5
|
+
import os
|
|
6
|
+
import sqlite3
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Dict, List, Optional
|
|
9
|
+
from dotenv import load_dotenv
|
|
10
|
+
load_dotenv()
|
|
11
|
+
import chainlit.data as cl_data
|
|
12
|
+
from chainlit.step import StepDict
|
|
13
|
+
from literalai.helper import utc_now
|
|
14
|
+
import logging
|
|
15
|
+
|
|
16
|
+
# Set up logging
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
log_level = os.getenv("LOGLEVEL", "INFO").upper()
|
|
19
|
+
logger.handlers = []
|
|
20
|
+
|
|
21
|
+
# Set up logging to console
|
|
22
|
+
console_handler = logging.StreamHandler()
|
|
23
|
+
console_handler.setLevel(log_level)
|
|
24
|
+
console_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
25
|
+
console_handler.setFormatter(console_formatter)
|
|
26
|
+
logger.addHandler(console_handler)
|
|
27
|
+
|
|
28
|
+
# Set the logging level for the logger
|
|
29
|
+
logger.setLevel(log_level)
|
|
30
|
+
|
|
31
|
+
now = utc_now()
|
|
32
|
+
|
|
33
|
+
create_step_counter = 0
|
|
34
|
+
|
|
35
|
+
import json
|
|
36
|
+
|
|
37
|
+
DB_PATH = "threads.db"
|
|
38
|
+
|
|
39
|
+
def initialize_db():
|
|
40
|
+
conn = sqlite3.connect(DB_PATH)
|
|
41
|
+
cursor = conn.cursor()
|
|
42
|
+
cursor.execute('''
|
|
43
|
+
CREATE TABLE IF NOT EXISTS threads (
|
|
44
|
+
id TEXT PRIMARY KEY,
|
|
45
|
+
name TEXT,
|
|
46
|
+
createdAt TEXT,
|
|
47
|
+
userId TEXT,
|
|
48
|
+
userIdentifier TEXT
|
|
49
|
+
)
|
|
50
|
+
''')
|
|
51
|
+
cursor.execute('''
|
|
52
|
+
CREATE TABLE IF NOT EXISTS steps (
|
|
53
|
+
id TEXT PRIMARY KEY,
|
|
54
|
+
threadId TEXT,
|
|
55
|
+
name TEXT,
|
|
56
|
+
createdAt TEXT,
|
|
57
|
+
type TEXT,
|
|
58
|
+
output TEXT,
|
|
59
|
+
FOREIGN KEY (threadId) REFERENCES threads (id)
|
|
60
|
+
)
|
|
61
|
+
''')
|
|
62
|
+
cursor.execute('''
|
|
63
|
+
CREATE TABLE IF NOT EXISTS settings (
|
|
64
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
65
|
+
key TEXT UNIQUE,
|
|
66
|
+
value TEXT
|
|
67
|
+
)
|
|
68
|
+
''')
|
|
69
|
+
conn.commit()
|
|
70
|
+
conn.close()
|
|
71
|
+
|
|
72
|
+
def save_setting(key: str, value: str):
|
|
73
|
+
"""Saves a setting to the database.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
key: The setting key.
|
|
77
|
+
value: The setting value.
|
|
78
|
+
"""
|
|
79
|
+
conn = sqlite3.connect(DB_PATH)
|
|
80
|
+
cursor = conn.cursor()
|
|
81
|
+
cursor.execute(
|
|
82
|
+
"""
|
|
83
|
+
INSERT OR REPLACE INTO settings (id, key, value)
|
|
84
|
+
VALUES ((SELECT id FROM settings WHERE key = ?), ?, ?)
|
|
85
|
+
""",
|
|
86
|
+
(key, key, value),
|
|
87
|
+
)
|
|
88
|
+
conn.commit()
|
|
89
|
+
conn.close()
|
|
90
|
+
|
|
91
|
+
def load_setting(key: str) -> str:
|
|
92
|
+
"""Loads a setting from the database.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
key: The setting key.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
The setting value, or None if the key is not found.
|
|
99
|
+
"""
|
|
100
|
+
conn = sqlite3.connect(DB_PATH)
|
|
101
|
+
cursor = conn.cursor()
|
|
102
|
+
cursor.execute('SELECT value FROM settings WHERE key = ?', (key,))
|
|
103
|
+
result = cursor.fetchone()
|
|
104
|
+
conn.close()
|
|
105
|
+
return result[0] if result else None
|
|
106
|
+
|
|
107
|
+
def save_thread_to_db(thread):
|
|
108
|
+
conn = sqlite3.connect(DB_PATH)
|
|
109
|
+
cursor = conn.cursor()
|
|
110
|
+
cursor.execute('''
|
|
111
|
+
INSERT OR REPLACE INTO threads (id, name, createdAt, userId, userIdentifier)
|
|
112
|
+
VALUES (?, ?, ?, ?, ?)
|
|
113
|
+
''', (thread['id'], thread['name'], thread['createdAt'], thread['userId'], thread['userIdentifier']))
|
|
114
|
+
|
|
115
|
+
# No steps to save as steps are empty in the provided thread data
|
|
116
|
+
conn.commit()
|
|
117
|
+
conn.close()
|
|
118
|
+
logger.debug("Thread saved to DB")
|
|
119
|
+
|
|
120
|
+
def update_thread_in_db(thread):
|
|
121
|
+
conn = sqlite3.connect(DB_PATH)
|
|
122
|
+
cursor = conn.cursor()
|
|
123
|
+
|
|
124
|
+
# Insert or update the thread
|
|
125
|
+
cursor.execute('''
|
|
126
|
+
INSERT OR REPLACE INTO threads (id, name, createdAt, userId, userIdentifier)
|
|
127
|
+
VALUES (?, ?, ?, ?, ?)
|
|
128
|
+
''', (thread['id'], thread['name'], thread['createdAt'], thread['userId'], thread['userIdentifier']))
|
|
129
|
+
|
|
130
|
+
# Fetch message_history from metadata
|
|
131
|
+
message_history = cl.user_session.get("message_history", [])
|
|
132
|
+
|
|
133
|
+
# Ensure user messages come first followed by assistant messages
|
|
134
|
+
user_messages = [msg for msg in message_history if msg['role'] == 'user']
|
|
135
|
+
assistant_messages = [msg for msg in message_history if msg['role'] == 'assistant']
|
|
136
|
+
ordered_steps = [val for pair in zip(user_messages, assistant_messages) for val in pair]
|
|
137
|
+
|
|
138
|
+
# Generate steps from ordered message_history
|
|
139
|
+
steps = []
|
|
140
|
+
for idx, message in enumerate(ordered_steps):
|
|
141
|
+
step_id = f"{thread['id']}-step-{idx}"
|
|
142
|
+
step_type = 'user_message' if message['role'] == 'user' else 'assistant_message'
|
|
143
|
+
step_name = 'user' if message['role'] == 'user' else 'assistant'
|
|
144
|
+
created_at = message.get('createdAt', thread['createdAt']) # Use thread's createdAt if no timestamp in message
|
|
145
|
+
steps.append({
|
|
146
|
+
'id': step_id,
|
|
147
|
+
'threadId': thread['id'],
|
|
148
|
+
'name': step_name,
|
|
149
|
+
'createdAt': created_at,
|
|
150
|
+
'type': step_type,
|
|
151
|
+
'output': message['content']
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
# Insert all steps into the database
|
|
155
|
+
for step in steps:
|
|
156
|
+
cursor.execute('''
|
|
157
|
+
INSERT OR REPLACE INTO steps (id, threadId, name, createdAt, type, output)
|
|
158
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
159
|
+
''', (step['id'], step['threadId'], step['name'], step['createdAt'], step['type'], step['output']))
|
|
160
|
+
|
|
161
|
+
conn.commit()
|
|
162
|
+
conn.close()
|
|
163
|
+
logger.debug("Thread updated in DB")
|
|
164
|
+
|
|
165
|
+
def delete_thread_from_db(thread_id: str):
|
|
166
|
+
"""Deletes a thread and its steps from the database.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
thread_id: The ID of the thread to delete.
|
|
170
|
+
"""
|
|
171
|
+
conn = sqlite3.connect(DB_PATH)
|
|
172
|
+
cursor = conn.cursor()
|
|
173
|
+
cursor.execute('DELETE FROM threads WHERE id = ?', (thread_id,))
|
|
174
|
+
cursor.execute('DELETE FROM steps WHERE threadId = ?', (thread_id,))
|
|
175
|
+
conn.commit()
|
|
176
|
+
conn.close()
|
|
177
|
+
|
|
178
|
+
def load_threads_from_db():
|
|
179
|
+
conn = sqlite3.connect(DB_PATH)
|
|
180
|
+
cursor = conn.cursor()
|
|
181
|
+
cursor.execute('SELECT * FROM threads ORDER BY createdAt ASC')
|
|
182
|
+
thread_rows = cursor.fetchall()
|
|
183
|
+
threads = []
|
|
184
|
+
for thread_row in thread_rows:
|
|
185
|
+
cursor.execute('SELECT * FROM steps WHERE threadId = ? ORDER BY createdAt ASC', (thread_row[0],))
|
|
186
|
+
step_rows = cursor.fetchall()
|
|
187
|
+
steps = []
|
|
188
|
+
for step_row in step_rows:
|
|
189
|
+
steps.append({
|
|
190
|
+
"id": step_row[0],
|
|
191
|
+
"threadId": step_row[1],
|
|
192
|
+
"name": step_row[2],
|
|
193
|
+
"createdAt": step_row[3],
|
|
194
|
+
"type": step_row[4],
|
|
195
|
+
"output": step_row[5]
|
|
196
|
+
})
|
|
197
|
+
threads.append({
|
|
198
|
+
"id": thread_row[0],
|
|
199
|
+
"name": thread_row[1],
|
|
200
|
+
"createdAt": thread_row[2],
|
|
201
|
+
"userId": thread_row[3],
|
|
202
|
+
"userIdentifier": thread_row[4],
|
|
203
|
+
"steps": steps
|
|
204
|
+
})
|
|
205
|
+
conn.close()
|
|
206
|
+
logger.debug("Threads loaded from DB")
|
|
207
|
+
return threads
|
|
208
|
+
|
|
209
|
+
# Initialize the database
|
|
210
|
+
initialize_db()
|
|
211
|
+
thread_history = load_threads_from_db()
|
|
212
|
+
|
|
213
|
+
deleted_thread_ids = [] # type: List[str]
|
|
214
|
+
|
|
215
|
+
class TestDataLayer(cl_data.BaseDataLayer):
|
|
216
|
+
async def get_user(self, identifier: str):
|
|
217
|
+
logger.debug(f"Getting user: {identifier}")
|
|
218
|
+
return cl.PersistedUser(id="test", createdAt=now, identifier=identifier)
|
|
219
|
+
|
|
220
|
+
async def create_user(self, user: cl.User):
|
|
221
|
+
logger.debug(f"Creating user: {user.identifier}")
|
|
222
|
+
return cl.PersistedUser(id="test", createdAt=now, identifier=user.identifier)
|
|
223
|
+
|
|
224
|
+
async def update_thread(
|
|
225
|
+
self,
|
|
226
|
+
thread_id: str,
|
|
227
|
+
name: Optional[str] = None,
|
|
228
|
+
user_id: Optional[str] = None,
|
|
229
|
+
metadata: Optional[Dict] = None,
|
|
230
|
+
tags: Optional[List[str]] = None,
|
|
231
|
+
):
|
|
232
|
+
logger.debug(f"Updating thread: {thread_id}")
|
|
233
|
+
thread = next((t for t in thread_history if t["id"] == thread_id), None)
|
|
234
|
+
if thread:
|
|
235
|
+
if name:
|
|
236
|
+
thread["name"] = name
|
|
237
|
+
if metadata:
|
|
238
|
+
thread["metadata"] = metadata
|
|
239
|
+
if tags:
|
|
240
|
+
thread["tags"] = tags
|
|
241
|
+
|
|
242
|
+
logger.debug(f"Thread: {thread}")
|
|
243
|
+
cl.user_session.set("message_history", thread['metadata']['message_history'])
|
|
244
|
+
cl.user_session.set("thread_id", thread["id"])
|
|
245
|
+
update_thread_in_db(thread)
|
|
246
|
+
logger.debug(f"Thread updated: {thread_id}")
|
|
247
|
+
|
|
248
|
+
else:
|
|
249
|
+
thread_history.append(
|
|
250
|
+
{
|
|
251
|
+
"id": thread_id,
|
|
252
|
+
"name": name,
|
|
253
|
+
"metadata": metadata,
|
|
254
|
+
"tags": tags,
|
|
255
|
+
"createdAt": utc_now(),
|
|
256
|
+
"userId": user_id,
|
|
257
|
+
"userIdentifier": "admin",
|
|
258
|
+
"steps": [],
|
|
259
|
+
}
|
|
260
|
+
)
|
|
261
|
+
thread = {
|
|
262
|
+
"id": thread_id,
|
|
263
|
+
"name": name,
|
|
264
|
+
"metadata": metadata,
|
|
265
|
+
"tags": tags,
|
|
266
|
+
"createdAt": utc_now(),
|
|
267
|
+
"userId": user_id,
|
|
268
|
+
"userIdentifier": "admin",
|
|
269
|
+
"steps": [],
|
|
270
|
+
}
|
|
271
|
+
save_thread_to_db(thread)
|
|
272
|
+
logger.debug(f"Thread created: {thread_id}")
|
|
273
|
+
|
|
274
|
+
@cl_data.queue_until_user_message()
|
|
275
|
+
async def create_step(self, step_dict: StepDict):
|
|
276
|
+
global create_step_counter
|
|
277
|
+
create_step_counter += 1
|
|
278
|
+
|
|
279
|
+
thread = next(
|
|
280
|
+
(t for t in thread_history if t["id"] == step_dict.get("threadId")), None
|
|
281
|
+
)
|
|
282
|
+
if thread:
|
|
283
|
+
thread["steps"].append(step_dict)
|
|
284
|
+
|
|
285
|
+
async def get_thread_author(self, thread_id: str):
|
|
286
|
+
logger.debug(f"Getting thread author: {thread_id}")
|
|
287
|
+
return "admin"
|
|
288
|
+
|
|
289
|
+
async def list_threads(
|
|
290
|
+
self, pagination: cl_data.Pagination, filters: cl_data.ThreadFilter
|
|
291
|
+
) -> cl_data.PaginatedResponse[cl_data.ThreadDict]:
|
|
292
|
+
logger.debug(f"Listing threads")
|
|
293
|
+
return cl_data.PaginatedResponse(
|
|
294
|
+
data=[t for t in thread_history if t["id"] not in deleted_thread_ids][::-1],
|
|
295
|
+
pageInfo=cl_data.PageInfo(
|
|
296
|
+
hasNextPage=False, startCursor=None, endCursor=None
|
|
297
|
+
),
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
async def get_thread(self, thread_id: str):
|
|
301
|
+
logger.debug(f"Getting thread: {thread_id}")
|
|
302
|
+
thread_history = load_threads_from_db()
|
|
303
|
+
return next((t for t in thread_history if t["id"] == thread_id), None)
|
|
304
|
+
|
|
305
|
+
async def delete_thread(self, thread_id: str):
|
|
306
|
+
deleted_thread_ids.append(thread_id)
|
|
307
|
+
delete_thread_from_db(thread_id)
|
|
308
|
+
logger.debug(f"Deleted thread: {thread_id}")
|
|
309
|
+
|
|
310
|
+
cl_data._data_layer = TestDataLayer()
|
|
311
|
+
|
|
312
|
+
@cl.on_chat_start
|
|
313
|
+
async def start():
|
|
314
|
+
initialize_db()
|
|
315
|
+
model_name = load_setting("model_name")
|
|
316
|
+
|
|
317
|
+
if model_name:
|
|
318
|
+
cl.user_session.set("model_name", model_name)
|
|
319
|
+
else:
|
|
320
|
+
# If no setting found, use default or environment variable
|
|
321
|
+
model_name = os.getenv("MODEL_NAME", "gpt-3.5-turbo")
|
|
322
|
+
cl.user_session.set("model_name", model_name)
|
|
323
|
+
logger.debug(f"Model name: {model_name}")
|
|
324
|
+
settings = cl.ChatSettings(
|
|
325
|
+
[
|
|
326
|
+
TextInput(
|
|
327
|
+
id="model_name",
|
|
328
|
+
label="Enter the Model Name",
|
|
329
|
+
placeholder="e.g., gpt-3.5-turbo",
|
|
330
|
+
initial=model_name
|
|
331
|
+
)
|
|
332
|
+
]
|
|
333
|
+
)
|
|
334
|
+
cl.user_session.set("settings", settings)
|
|
335
|
+
await settings.send()
|
|
336
|
+
|
|
337
|
+
@cl.on_settings_update
|
|
338
|
+
async def setup_agent(settings):
|
|
339
|
+
logger.debug(settings)
|
|
340
|
+
cl.user_session.set("settings", settings)
|
|
341
|
+
model_name = settings["model_name"]
|
|
342
|
+
cl.user_session.set("model_name", model_name)
|
|
343
|
+
|
|
344
|
+
# Save in settings table
|
|
345
|
+
save_setting("model_name", model_name)
|
|
346
|
+
|
|
347
|
+
# Save in thread metadata
|
|
348
|
+
thread_id = cl.user_session.get("thread_id")
|
|
349
|
+
if thread_id:
|
|
350
|
+
thread = await cl_data.get_thread(thread_id)
|
|
351
|
+
if thread:
|
|
352
|
+
metadata = thread.get("metadata", {})
|
|
353
|
+
metadata["model_name"] = model_name
|
|
354
|
+
await cl_data.update_thread(thread_id, metadata=metadata)
|
|
355
|
+
|
|
356
|
+
@cl.on_message
|
|
357
|
+
async def main(message: cl.Message):
|
|
358
|
+
model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-3.5-turbo"
|
|
359
|
+
message_history = cl.user_session.get("message_history", [])
|
|
360
|
+
message_history.append({"role": "user", "content": message.content})
|
|
361
|
+
|
|
362
|
+
msg = cl.Message(content="")
|
|
363
|
+
await msg.send()
|
|
364
|
+
|
|
365
|
+
response = await acompletion(
|
|
366
|
+
model=model_name,
|
|
367
|
+
messages=message_history,
|
|
368
|
+
stream=True,
|
|
369
|
+
temperature=0.7,
|
|
370
|
+
max_tokens=500,
|
|
371
|
+
top_p=1
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
full_response = ""
|
|
375
|
+
async for part in response:
|
|
376
|
+
if token := part['choices'][0]['delta']['content']:
|
|
377
|
+
await msg.stream_token(token)
|
|
378
|
+
full_response += token
|
|
379
|
+
logger.debug(f"Full response: {full_response}")
|
|
380
|
+
message_history.append({"role": "assistant", "content": full_response})
|
|
381
|
+
logger.debug(f"Message history: {message_history}")
|
|
382
|
+
cl.user_session.set("message_history", message_history)
|
|
383
|
+
await msg.update()
|
|
384
|
+
|
|
385
|
+
username = os.getenv("CHAINLIT_USERNAME", "admin") # Default to "admin" if not found
|
|
386
|
+
password = os.getenv("CHAINLIT_PASSWORD", "admin") # Default to "admin" if not found
|
|
387
|
+
|
|
388
|
+
@cl.password_auth_callback
|
|
389
|
+
def auth_callback(username: str, password: str):
|
|
390
|
+
if (username, password) == (username, password):
|
|
391
|
+
return cl.User(
|
|
392
|
+
identifier=username, metadata={"role": "ADMIN", "provider": "credentials"}
|
|
393
|
+
)
|
|
394
|
+
else:
|
|
395
|
+
return None
|
|
396
|
+
|
|
397
|
+
async def send_count():
|
|
398
|
+
await cl.Message(
|
|
399
|
+
f"Create step counter: {create_step_counter}", disable_feedback=True
|
|
400
|
+
).send()
|
|
401
|
+
|
|
402
|
+
@cl.on_chat_resume
|
|
403
|
+
async def on_chat_resume(thread: cl_data.ThreadDict):
|
|
404
|
+
logger.info(f"Resuming chat: {thread['id']}")
|
|
405
|
+
model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-3.5-turbo"
|
|
406
|
+
logger.debug(f"Model name: {model_name}")
|
|
407
|
+
settings = cl.ChatSettings(
|
|
408
|
+
[
|
|
409
|
+
TextInput(
|
|
410
|
+
id="model_name",
|
|
411
|
+
label="Enter the Model Name",
|
|
412
|
+
placeholder="e.g., gpt-3.5-turbo",
|
|
413
|
+
initial=model_name
|
|
414
|
+
)
|
|
415
|
+
]
|
|
416
|
+
)
|
|
417
|
+
await settings.send()
|
|
418
|
+
thread_id = thread["id"]
|
|
419
|
+
cl.user_session.set("thread_id", thread["id"])
|
|
420
|
+
message_history = cl.user_session.get("message_history", [])
|
|
421
|
+
steps = thread["steps"]
|
|
422
|
+
|
|
423
|
+
for message in steps:
|
|
424
|
+
msg_type = message.get("type")
|
|
425
|
+
if msg_type == "user_message":
|
|
426
|
+
message_history.append({"role": "user", "content": message.get("output", "")})
|
|
427
|
+
elif msg_type == "assistant_message":
|
|
428
|
+
message_history.append({"role": "assistant", "content": message.get("output", "")})
|
|
429
|
+
else:
|
|
430
|
+
logger.warning(f"Message without type: {message}")
|
|
431
|
+
|
|
432
|
+
cl.user_session.set("message_history", message_history)
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
|
3
|
+
<svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M840.5 798.2L662.3 599.5l-151 173.7-173.7-173.7-167.7 201c-21 30.4 0.9 71.8 37.9 71.6l594.7-3.3c36.2-0.1 57.8-40.3 38-70.6z" fill="#FFB89A" /><path d="M741.6 647.3l-52.3-47.7c-12.2-11.2-31.2-10.3-42.4 1.9s-10.3 31.2 1.9 42.4l52.3 47.7c5.8 5.3 13 7.8 20.2 7.8 8.1 0 16.2-3.3 22.2-9.8 11.2-12.1 10.3-31.1-1.9-42.3zM631.2 546.5c-12.4-11-31.4-9.8-42.3 2.6l-98.8 111.7-171-165.7L87.9 724.7c-11.8 11.7-11.8 30.7-0.1 42.4 5.9 5.9 13.6 8.9 21.3 8.9 7.6 0 15.3-2.9 21.1-8.7l189.4-188.1 173.8 168.5L633.8 589c11-12.5 9.8-31.5-2.6-42.5z" fill="#33CC99" /><path d="M721.3 342.8m-35.1 0a35.1 35.1 0 1 0 70.2 0 35.1 35.1 0 1 0-70.2 0Z" fill="#33CC99" /><path d="M743.2 175.1H191.6c-70.6 0-128.3 57.7-128.3 128.3v499.2c0 70.6 57.7 128.3 128.3 128.3h551.5c70.6 0 128.3-57.7 128.3-128.3V303.5c0.1-70.6-57.7-128.4-128.2-128.4z m68.3 627.6c0 18.1-7.1 35.2-20.1 48.2-13 13-30.1 20.1-48.2 20.1H191.6c-18.1 0-35.2-7.1-48.2-20.1-13-13-20.1-30.1-20.1-48.2V303.5c0-18.1 7.1-35.2 20.1-48.2 13-13 30.1-20.1 48.2-20.1h551.5c18.1 0 35.2 7.1 48.2 20.1 13 13 20.1 30.1 20.1 48.2v499.2z" fill="#45484C" /><path d="M799.7 90.9H237.2c-16.6 0-30 13.4-30 30s13.4 30 30 30h562.4c26.1 0 50.8 10.3 69.4 28.9 18.6 18.6 28.9 43.3 28.9 69.4v482.4c0 16.6 13.4 30 30 30s30-13.4 30-30V249.2C958 161.9 887 90.9 799.7 90.9z" fill="#45484C" /></svg>
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
|
3
|
+
<svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M570.2 842c-50.6 0-278.7-180-278.7-401.9 0-58.8-2.9-133.1-1-183.9-50.8 3.2-91.4 45.7-91.4 97.3v272.1c37.4 194.7 137.5 334 255.2 334 69.5 0 132.9-48.6 180.9-128.5-20.8 7.1-42.6 10.9-65 10.9z" fill="#FFB89A" /><path d="M926.1 191.8C900.5 74.1 817.9 62.1 704.9 62.1c-29.1 0-60.3 0.8-93 0.8-36 0-70.5-1.1-102.5-1.1-109.7 0-189.8 12.5-201.3 123.7-20.4 198.3 30 617.1 306.1 617.1S939 414.3 926.1 191.8z m-76.9 268.5c-9.5 47.9-22.3 90.8-38.1 127.7-16.8 39.2-37 71.4-60 95.8-37.3 39.5-82.1 58.7-137 58.7-53.4 0-97.6-20.1-134.9-61.6-45.5-50.5-79.8-131.5-99-234.2-15.6-83.5-20.3-178.9-12.4-255.2 1.8-17.3 5.7-30.7 11.6-39.8 4.4-6.8 10.1-11.7 18.7-15.8 25.8-12.5 70.8-14.2 111.4-14.2 15 0 30.7 0.2 47.3 0.5 17.8 0.3 36.2 0.6 55.2 0.6 17.2 0 33.9-0.2 50-0.4 15.1-0.2 29.3-0.4 43.1-0.4 44.5 0 89.5 1.8 118 15.1 15.9 7.4 33.4 20.8 43.6 63 2.6 53.3 3.6 153.5-17.5 260.2z" fill="#4E5155" /><path d="M532 841.7c-32.5 22.3-70.6 33.7-113.2 33.7-29.7 0-57.3-6-82.1-17.7-23.2-11-44.7-27.4-63.9-48.7-46-50.9-80.3-131.3-99.2-232.4-15.1-80.6-19.6-172.9-12-246.8 3-29.5 12-50.2 27.5-63.2 14.2-12 35.1-19.2 65.8-22.9 16.5-2 28.2-16.9 26.3-33.3-2-16.5-16.9-28.2-33.3-26.3-42.9 5.1-73.8 16.7-97.4 36.5-27.9 23.5-43.8 57.2-48.5 103-8.2 79.3-3.4 178.1 12.7 264 9.7 51.9 23.4 99.4 40.6 141.2 19.8 48.1 44.4 88.6 73 120.4 51.6 57.2 115.7 86.2 190.6 86.2 55 0 104.5-14.9 147.2-44.2 13.7-9.4 17.1-28.1 7.7-41.7-9.4-13.7-28.1-17.2-41.8-7.8z" fill="#4E5155" /><path d="M519.7 248.5c-16.6 0-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3c0-16.6-13.5-30-30-30zM299.5 385.5c0-16.6-13.4-30-30-30s-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3zM754.6 248.5c-16.6 0-30 13.4-30 30v91.3c0 16.6 13.4 30 30 30s30-13.4 30-30v-91.3c0-16.6-13.4-30-30-30zM716.7 554.5c0-16.6-13.4-30-30-30H551v30c0 58.5 38.1 123.7 92.8 123.7 22.9 0 45-11.9 62.2-33.6 10.3-13 8.1-31.9-4.9-42.1-13-10.3-31.9-8.1-42.1 4.9-5.3 6.7-11.1 10.9-15.1 10.9-4.3 0-11.9-5.1-19.1-16.4-3.3-5.3-6.2-11.2-8.4-17.4h70.4c16.4 0 29.9-13.4 29.9-30zM401.6 704c-25.4 0-46.1-24.2-46.1-53.9 0-16.6-13.4-30-30-30s-30 13.4-30 30c0 62.8 47.6 113.9 106.1 113.9 16.6 0 30-13.4 30-30s-13.5-30-30-30z" fill="#33CC99" /></svg>
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
|
3
|
+
<svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M861.9 383.8H218.1c-36.4 0-66.1-29.8-66.1-66.1V288c0-36.4 29.8-66.1 66.1-66.1h643.8c36.4 0 66.1 29.8 66.1 66.1v29.7c0 36.3-29.8 66.1-66.1 66.1z" fill="#FFB89A" /><path d="M822.9 129.2H199.8c-77.2 0-140.4 63.2-140.4 140.4v487.2c0 77.2 63.2 140.4 140.4 140.4h623.1c77.2 0 140.4-63.2 140.4-140.4V269.6c0-77.2-63.2-140.4-140.4-140.4z m80.4 177H760.4L864.6 201c5.4 3.3 10.4 7.3 15 11.8 15.3 15.3 23.7 35.4 23.7 56.8v36.6z m-673.3 0l104-117h61.3l-109.1 117H230z m247.4-117h169.2L532 306.2H368.3l109.1-117z m248.8 0h65.6L676 306.2h-60l112.5-114.8-2.3-2.2zM143 212.9c15.3-15.3 35.4-23.7 56.8-23.7h53.9l-104 117h-30.4v-36.5c0.1-21.4 8.5-41.5 23.7-56.8z m736.6 600.7c-15.3 15.3-35.4 23.7-56.8 23.7h-623c-21.3 0-41.5-8.4-56.8-23.7-15.3-15.3-23.7-35.4-23.7-56.8V366.2h783.9v390.6c0.1 21.3-8.3 41.5-23.6 56.8z" fill="#45484C" /><path d="M400.5 770.6V430.9L534.1 508c14.3 8.3 19.3 26.6 11 41-8.3 14.3-26.6 19.3-41 11l-43.6-25.2v131.8l114.1-65.9-7.5-4.3c-14.3-8.3-19.3-26.6-11-41 8.3-14.3 26.6-19.3 41-11l97.5 56.3-294.1 169.9z" fill="#33CC99" /></svg>
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="utf-8"?>
|
|
2
|
+
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
|
3
|
+
<svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M188.3 766.5a94.4 135.8 0 1 0 188.8 0 94.4 135.8 0 1 0-188.8 0Z" fill="#FFB89A" /><path d="M931.5 397s0-0.1 0 0c-34.2-82.6-119.3-141-218.8-141-129.7 0-234.9 99.3-234.9 221.9 0 52.1 19.1 100.1 50.9 138 1 14.5 1.8 29.1 1.8 43.6 0 148.5 98.1 269 219.2 269 121 0 219.2-120.4 219.2-269 0-70.1-1.7-214.7-37.4-262.5z m-36.6 347.5c-8.7 25.3-21.1 47.9-36.8 67.1-29.8 36.5-68.3 56.7-108.5 56.7s-78.7-20.1-108.5-56.7c-15.7-19.2-28-41.8-36.8-67.1-9.3-26.9-13.9-55.5-13.9-85.1 0-16.8-1-33.5-2-47.7l-1.3-19.5-12.6-15c-24.1-28.6-36.8-63-36.8-99.3 0-89.3 78.5-161.9 174.9-161.9 36.4 0 71.4 10.3 101 29.7 28.4 18.7 65.5 81.7 65.5 81.7s17.9 27.5 24.7 98.2c4.5 46.5 5 95.9 5 133.8 0.1 29.6-4.6 58.2-13.9 85.1zM377.1 219.9c-51.8 0-93.8 42-93.8 93.8s42 93.8 93.8 93.8 93.8-42 93.8-93.8-42-93.8-93.8-93.8z m0 127.5c-18.6 0-33.8-15.2-33.8-33.8 0-18.6 15.2-33.8 33.8-33.8 18.6 0 33.8 15.2 33.8 33.8 0 18.7-15.1 33.8-33.8 33.8z" fill="#45484C" /><path d="M521.2 206.7m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M653 156.4m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M781.9 158.4m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M909 206.7m-50.3 0a50.3 50.3 0 1 0 100.6 0 50.3 50.3 0 1 0-100.6 0Z" fill="#45484C" /><path d="M263.9 602.7c44.7 0 81 31.5 81 70.3 0 20.9-10.2 35.9-18.7 44.8l-15.9 19.7-0.5 27.2c0.7 7.2 0.6 16.9 0.6 24.7v4.8c0 33.7-27.4 61.2-61.2 61.2-14.9 0-33.3-9.6-48.1-25-15.2-15.9-24.6-35.9-24.6-52.3v-3.2c0-12.7 0-36.2 1-60.2 1.4-33 7.4-57.3 7.4-57.3 3.9-14.7 13.4-28.2 26.8-38 14.8-11 32.8-16.7 52.2-16.7m0-60c-66.4 0-122 42.4-137 99.4-10.9 23-10.4 112.6-10.4 135.9 0 66.9 65.8 137.3 132.7 137.3 66.9 0 121.2-54.3 121.2-121.2 0-9.2 0.3-23-0.8-34.9 22-23 35.4-53.2 35.4-86.3-0.1-71.9-63.2-130.2-141.1-130.2zM444.4 559.9c-26.4 0-47.8 21.4-47.8 47.8s21.4 47.8 47.8 47.8 47.8-21.4 47.8-47.8-21.4-47.8-47.8-47.8zM377.1 494.5c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5c0-15.3-12.3-27.5-27.5-27.5zM288.1 471.5c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5-12.4-27.5-27.5-27.5zM188.3 477.9c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5-12.3-27.5-27.5-27.5zM100.6 538.4c-15.2 0-27.5 12.3-27.5 27.5s12.3 27.5 27.5 27.5 27.5-12.3 27.5-27.5c-0.1-15.2-12.4-27.5-27.5-27.5z" fill="#45484C" /><path d="M670.1 584.6c-41.4 0-80.2-20.3-103.9-54.3-9.5-13.6-6.2-32.3 7.4-41.8 13.6-9.5 32.3-6.2 41.8 7.4 12.5 17.9 33 28.6 54.7 28.6 36.8 0 66.7-29.9 66.7-66.7 0-19.8-8.7-38.4-23.9-51.2-12.7-10.6-14.4-29.6-3.7-42.3s29.6-14.4 42.3-3.7c28.9 24.2 45.4 59.6 45.4 97.2-0.1 70-56.9 126.8-126.8 126.8z" fill="#33CC99" /><path d="M853 556.4c-26 0-49.6-14.5-60.1-36.9-7-15-0.6-32.9 14.4-39.9s32.9-0.6 39.9 14.4c0.3 0.6 2.2 2.4 5.8 2.4 1.2 0 2.3-0.2 3.3-0.6 15.5-5.9 32.8 1.8 38.7 17.3 5.9 15.5-1.8 32.8-17.3 38.7-7.9 3.1-16.2 4.6-24.7 4.6z" fill="#33CC99" /></svg>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "PraisonAI"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.44"
|
|
4
4
|
description = "PraisonAI application combines AutoGen and CrewAI or similar frameworks into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customization, and efficient human-agent collaboration."
|
|
5
5
|
authors = ["Mervin Praison"]
|
|
6
6
|
license = ""
|
|
@@ -29,6 +29,7 @@ langchain-google-genai = {version = ">=1.0.4", optional = true}
|
|
|
29
29
|
langchain-anthropic = {version = ">=0.1.13", optional = true}
|
|
30
30
|
langchain-openai = {version = ">=0.1.7", optional = true}
|
|
31
31
|
langchain-cohere = {version = ">=0.1.4", optional = true}
|
|
32
|
+
litellm = {version = ">=1.41.8", optional = true}
|
|
32
33
|
|
|
33
34
|
[tool.poetry.group.docs.dependencies]
|
|
34
35
|
mkdocs = "*"
|
|
@@ -75,6 +76,7 @@ langchain-google-genai = ">=1.0.4"
|
|
|
75
76
|
langchain-anthropic = ">=0.1.13"
|
|
76
77
|
langchain-openai = ">=0.1.7"
|
|
77
78
|
langchain-cohere = ">=0.1.4"
|
|
79
|
+
litellm = ">=1.41.8"
|
|
78
80
|
|
|
79
81
|
[build-system]
|
|
80
82
|
requires = ["poetry-core"]
|
|
@@ -92,4 +94,5 @@ agentops = ["agentops"]
|
|
|
92
94
|
google = ["langchain-google-genai"]
|
|
93
95
|
openai = ["langchain-openai"]
|
|
94
96
|
anthropic = ["langchain-anthropic"]
|
|
95
|
-
cohere = ["langchain-cohere"]
|
|
97
|
+
cohere = ["langchain-cohere"]
|
|
98
|
+
chat = ["chainlit", "litellm"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|