PraisonAI 2.0.12__cp311-cp311-macosx_15_0_arm64.whl → 2.2.16__cp311-cp311-macosx_15_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (40) hide show
  1. praisonai/README.md +5 -0
  2. praisonai/agents_generator.py +83 -44
  3. praisonai/api/call.py +3 -3
  4. praisonai/auto.py +1 -1
  5. praisonai/cli.py +151 -16
  6. praisonai/deploy.py +1 -1
  7. praisonai/inbuilt_tools/__init__.py +1 -1
  8. praisonai/public/praison-ai-agents-architecture-dark.png +0 -0
  9. praisonai/public/praison-ai-agents-architecture.png +0 -0
  10. praisonai/setup/setup_conda_env.sh +55 -22
  11. praisonai/train.py +442 -156
  12. praisonai/train_vision.py +306 -0
  13. praisonai/ui/agents.py +822 -0
  14. praisonai/ui/callbacks.py +57 -0
  15. praisonai/ui/code.py +4 -2
  16. praisonai/ui/colab.py +474 -0
  17. praisonai/ui/colab_chainlit.py +81 -0
  18. praisonai/ui/config/chainlit.md +1 -1
  19. praisonai/ui/realtime.py +65 -10
  20. praisonai/ui/sql_alchemy.py +6 -5
  21. praisonai/ui/tools.md +133 -0
  22. praisonai/upload_vision.py +140 -0
  23. praisonai-2.2.16.dist-info/METADATA +103 -0
  24. {praisonai-2.0.12.dist-info → praisonai-2.2.16.dist-info}/RECORD +26 -29
  25. {praisonai-2.0.12.dist-info → praisonai-2.2.16.dist-info}/WHEEL +1 -1
  26. praisonai/ui/config/.chainlit/config.toml +0 -120
  27. praisonai/ui/config/.chainlit/translations/bn.json +0 -231
  28. praisonai/ui/config/.chainlit/translations/en-US.json +0 -229
  29. praisonai/ui/config/.chainlit/translations/gu.json +0 -231
  30. praisonai/ui/config/.chainlit/translations/he-IL.json +0 -231
  31. praisonai/ui/config/.chainlit/translations/hi.json +0 -231
  32. praisonai/ui/config/.chainlit/translations/kn.json +0 -231
  33. praisonai/ui/config/.chainlit/translations/ml.json +0 -231
  34. praisonai/ui/config/.chainlit/translations/mr.json +0 -231
  35. praisonai/ui/config/.chainlit/translations/ta.json +0 -231
  36. praisonai/ui/config/.chainlit/translations/te.json +0 -231
  37. praisonai/ui/config/.chainlit/translations/zh-CN.json +0 -229
  38. praisonai-2.0.12.dist-info/LICENSE +0 -20
  39. praisonai-2.0.12.dist-info/METADATA +0 -498
  40. {praisonai-2.0.12.dist-info → praisonai-2.2.16.dist-info}/entry_points.txt +0 -0
praisonai/ui/realtime.py CHANGED
@@ -186,11 +186,42 @@ try:
186
186
  if custom_tools_module:
187
187
  # Update the tools list with custom tools
188
188
  if hasattr(custom_tools_module, 'tools') and isinstance(custom_tools_module.tools, list):
189
- tools.extend(custom_tools_module.tools)
189
+ # Only add tools that have proper function definitions
190
+ for tool in custom_tools_module.tools:
191
+ if isinstance(tool, tuple) and len(tool) == 2:
192
+ tool_def, handler = tool
193
+ if isinstance(tool_def, dict) and "type" in tool_def and tool_def["type"] == "function":
194
+ # Convert class/function to proper tool definition
195
+ if "function" in tool_def:
196
+ func = tool_def["function"]
197
+ if hasattr(func, "__name__"):
198
+ tool_def = {
199
+ "name": func.__name__,
200
+ "description": func.__doc__ or f"Execute {func.__name__}",
201
+ "parameters": {
202
+ "type": "object",
203
+ "properties": {},
204
+ "required": []
205
+ }
206
+ }
207
+ tools.append((tool_def, handler))
208
+ else:
209
+ # Tool definition is already properly formatted
210
+ tools.append(tool)
190
211
  else:
212
+ # Process individual functions/classes
191
213
  for name, obj in custom_tools_module.__dict__.items():
192
214
  if callable(obj) and not name.startswith("__"):
193
- tools.append(({"type": "function", "function": obj}, obj))
215
+ tool_def = {
216
+ "name": name,
217
+ "description": obj.__doc__ or f"Execute {name}",
218
+ "parameters": {
219
+ "type": "object",
220
+ "properties": {},
221
+ "required": []
222
+ }
223
+ }
224
+ tools.append((tool_def, obj))
194
225
 
195
226
  except Exception as e:
196
227
  logger.warning(f"Error importing custom tools: {str(e)}. Continuing without custom tools.")
@@ -198,7 +229,7 @@ except Exception as e:
198
229
  @cl.on_chat_start
199
230
  async def start():
200
231
  initialize_db()
201
- model_name = load_setting("model_name") or os.getenv("MODEL_NAME", "gpt-4o-mini")
232
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME", "gpt-4o-mini-realtime-preview")
202
233
  cl.user_session.set("model_name", model_name)
203
234
  cl.user_session.set("message_history", []) # Initialize message history
204
235
  logger.debug(f"Model name: {model_name}")
@@ -207,7 +238,7 @@ async def start():
207
238
  # TextInput(
208
239
  # id="model_name",
209
240
  # label="Enter the Model Name",
210
- # placeholder="e.g., gpt-4o-mini",
241
+ # placeholder="e.g., gpt-4o-mini-realtime-preview",
211
242
  # initial=model_name
212
243
  # )
213
244
  # ]
@@ -287,14 +318,30 @@ async def setup_openai_realtime():
287
318
  logger.error(event)
288
319
  await cl.Message(content=f"Error: {event}", author="System").send()
289
320
 
321
+ # Register event handlers
290
322
  openai_realtime.on('conversation.updated', handle_conversation_updated)
291
323
  openai_realtime.on('conversation.item.completed', handle_item_completed)
292
324
  openai_realtime.on('conversation.interrupted', handle_conversation_interrupt)
293
325
  openai_realtime.on('error', handle_error)
294
326
 
295
327
  cl.user_session.set("openai_realtime", openai_realtime)
296
- coros = [openai_realtime.add_tool(tool_def, tool_handler) for tool_def, tool_handler in tools]
297
- await asyncio.gather(*coros)
328
+
329
+ # Filter out invalid tools and add valid ones
330
+ valid_tools = []
331
+ for tool_def, tool_handler in tools:
332
+ try:
333
+ if isinstance(tool_def, dict) and "name" in tool_def:
334
+ valid_tools.append((tool_def, tool_handler))
335
+ else:
336
+ logger.warning(f"Skipping invalid tool definition: {tool_def}")
337
+ except Exception as e:
338
+ logger.warning(f"Error processing tool: {e}")
339
+
340
+ if valid_tools:
341
+ coros = [openai_realtime.add_tool(tool_def, tool_handler) for tool_def, tool_handler in valid_tools]
342
+ await asyncio.gather(*coros)
343
+ else:
344
+ logger.warning("No valid tools found to add")
298
345
 
299
346
  @cl.on_settings_update
300
347
  async def setup_agent(settings):
@@ -330,11 +377,19 @@ async def setup_agent(settings):
330
377
  async def on_audio_start():
331
378
  try:
332
379
  openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
333
- await openai_realtime.connect()
380
+ if not openai_realtime:
381
+ await setup_openai_realtime()
382
+ openai_realtime = cl.user_session.get("openai_realtime")
383
+
384
+ if not openai_realtime.is_connected():
385
+ await openai_realtime.connect()
386
+
334
387
  logger.info("Connected to OpenAI realtime")
335
388
  return True
336
389
  except Exception as e:
337
- await cl.ErrorMessage(content=f"Failed to connect to OpenAI realtime: {e}").send()
390
+ error_msg = f"Failed to connect to OpenAI realtime: {str(e)}"
391
+ logger.error(error_msg)
392
+ await cl.ErrorMessage(content=error_msg).send()
338
393
  return False
339
394
 
340
395
  @cl.on_audio_chunk
@@ -368,14 +423,14 @@ def auth_callback(username: str, password: str):
368
423
  @cl.on_chat_resume
369
424
  async def on_chat_resume(thread: ThreadDict):
370
425
  logger.info(f"Resuming chat: {thread['id']}")
371
- model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-4o-mini"
426
+ model_name = load_setting("model_name") or os.getenv("MODEL_NAME") or "gpt-4o-mini-realtime-preview"
372
427
  logger.debug(f"Model name: {model_name}")
373
428
  settings = cl.ChatSettings(
374
429
  [
375
430
  TextInput(
376
431
  id="model_name",
377
432
  label="Enter the Model Name",
378
- placeholder="e.g., gpt-4o-mini",
433
+ placeholder="e.g., gpt-4o-mini-realtime-preview",
379
434
  initial=model_name
380
435
  )
381
436
  ]
@@ -9,7 +9,8 @@ import os
9
9
  import aiofiles
10
10
  import aiohttp
11
11
 
12
- from chainlit.data.base import BaseDataLayer, BaseStorageClient
12
+ from chainlit.data.base import BaseDataLayer
13
+ from chainlit.data.storage_clients.base import EXPIRY_TIME, BaseStorageClient
13
14
  from chainlit.data.utils import queue_until_user_message
14
15
  from chainlit.element import ElementDict
15
16
  from chainlit.logger import logger
@@ -69,7 +70,7 @@ class SQLAlchemyDataLayer(BaseDataLayer):
69
70
  logger.info("SQLAlchemyDataLayer storage client initialized")
70
71
  else:
71
72
  self.storage_provider = None
72
- logger.warn(
73
+ logger.warning(
73
74
  "SQLAlchemyDataLayer storage client is not initialized and elements will not be persisted!"
74
75
  )
75
76
 
@@ -97,11 +98,11 @@ class SQLAlchemyDataLayer(BaseDataLayer):
97
98
  return result.rowcount
98
99
  except SQLAlchemyError as e:
99
100
  await session.rollback()
100
- logger.warn(f"An error occurred: {e}")
101
+ logger.warning(f"An error occurred: {e}")
101
102
  return None
102
103
  except Exception as e:
103
104
  await session.rollback()
104
- logger.warn(f"An unexpected error occurred: {e}")
105
+ logger.warning(f"An unexpected error occurred: {e}")
105
106
  return None
106
107
 
107
108
  async def get_current_timestamp(self) -> str:
@@ -455,7 +456,7 @@ class SQLAlchemyDataLayer(BaseDataLayer):
455
456
  logger.info(f"SQLAlchemy: create_element, element_id = {element.id}")
456
457
 
457
458
  if not self.storage_provider:
458
- logger.warn("SQLAlchemy: create_element error. No storage client!")
459
+ logger.warning("SQLAlchemy: create_element error. No storage client!")
459
460
  return
460
461
  if not element.for_id:
461
462
  return
praisonai/ui/tools.md ADDED
@@ -0,0 +1,133 @@
1
+ # Understanding Tool Integration in AI Agents - A Beginner's Guide
2
+
3
+ ## Overview
4
+ This guide explains how to properly integrate tools (functions) that an AI agent can use, making them both understandable to the OpenAI API and executable by your code.
5
+
6
+ ## Key Components
7
+
8
+ ### 1. Tool Definition Structure
9
+ ```python
10
+ # Example tool definition in tools.py
11
+ def search_tool(query: str) -> list:
12
+ """
13
+ Perform a web search using DuckDuckGo.
14
+
15
+ Args:
16
+ query (str): The search query string.
17
+
18
+ Returns:
19
+ list: Search results with title, url, and snippet.
20
+ """
21
+ # Function implementation...
22
+ ```
23
+
24
+ ### 2. Tool Dictionary Format
25
+ ```python
26
+ tools_dict = {
27
+ 'search_tool': {
28
+ 'type': 'function',
29
+ 'function': {
30
+ 'name': 'search_tool',
31
+ 'description': '...',
32
+ 'parameters': {
33
+ 'type': 'object',
34
+ 'properties': {
35
+ 'query': {'type': 'string'}
36
+ }
37
+ }
38
+ },
39
+ 'callable': search_tool # The actual Python function
40
+ }
41
+ }
42
+ ```
43
+
44
+ ## The Two-Part System
45
+
46
+ ### Part 1: OpenAI API Communication
47
+ ```python
48
+ # task_tools: What OpenAI understands
49
+ task_tools = []
50
+ tool_def = tools_dict[tool_name].copy()
51
+ callable_func = tool_def.pop("callable") # Remove the Python function
52
+ task_tools.append(tool_def) # Add clean JSON-serializable definition
53
+ ```
54
+
55
+ ### Part 2: Function Execution
56
+ ```python
57
+ # role_tools: What your code executes
58
+ role_tools = []
59
+ role_tools.append(callable_func) # Store the actual function
60
+ agent.tools = role_tools # Give agent access to executable functions
61
+ ```
62
+
63
+ ## Putting It All Together
64
+
65
+ ```python
66
+ # Initialize empty lists
67
+ role_tools = [] # For executable functions
68
+ task_tools = [] # For OpenAI API definitions
69
+
70
+ # Process each tool
71
+ for tool_name in tools_list:
72
+ if tool_name in tools_dict:
73
+ # 1. Get the tool definition
74
+ tool_def = tools_dict[tool_name].copy()
75
+
76
+ # 2. Separate the callable function
77
+ callable_func = tool_def.pop("callable")
78
+
79
+ # 3. Store the function for execution
80
+ role_tools.append(callable_func)
81
+
82
+ # 4. Store the API definition
83
+ task_tools.append(tool_def)
84
+
85
+ # 5. Give agent access to functions
86
+ agent.tools = role_tools
87
+
88
+ # Create task with API definitions
89
+ task = Task(
90
+ description="...",
91
+ tools=task_tools, # OpenAI API will use these
92
+ agent=agent, # Agent has access to callable functions
93
+ # ... other parameters ...
94
+ )
95
+ ```
96
+
97
+ ## Why This Works
98
+
99
+ 1. **API Communication**
100
+ - OpenAI API receives clean JSON tool definitions
101
+ - No Python functions that would cause serialization errors
102
+
103
+ 2. **Function Execution**
104
+ - Agent has access to actual Python functions
105
+ - Can execute tools when OpenAI decides to use them
106
+
107
+ 3. **Separation of Concerns**
108
+ - `task_tools`: Describes what tools can do (for OpenAI)
109
+ - `role_tools`: Actually does the work (for Python)
110
+
111
+ ## Common Errors and Solutions
112
+
113
+ 1. **"Invalid type for 'tools[0]'"**
114
+ - Cause: Sending null or invalid tool definition to OpenAI
115
+ - Solution: Use proper tool definition format in `task_tools`
116
+
117
+ 2. **"Object of type function is not JSON serializable"**
118
+ - Cause: Trying to send Python function to OpenAI API
119
+ - Solution: Remove callable function from API definition
120
+
121
+ 3. **"Tool is not callable"**
122
+ - Cause: Agent doesn't have access to executable functions
123
+ - Solution: Set `agent.tools = role_tools`
124
+
125
+ ## Best Practices
126
+
127
+ 1. Always initialize both `task_tools` and `role_tools` lists
128
+ 2. Make clean copies of tool definitions to avoid modifying originals
129
+ 3. Keep tool definitions JSON-serializable for API communication
130
+ 4. Ensure agents have access to callable functions
131
+ 5. Document tool parameters and return values clearly
132
+
133
+ This structure maintains clean separation between API communication and actual function execution, making your AI agent system both reliable and maintainable.
@@ -0,0 +1,140 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ This script handles uploading trained vision models to Hugging Face and Ollama.
5
+ It reads configuration from config.yaml and provides options to upload in different formats.
6
+ """
7
+
8
+ import os
9
+ import yaml
10
+ import torch
11
+ import shutil
12
+ import subprocess
13
+ from unsloth import FastVisionModel
14
+
15
+ class UploadVisionModel:
16
+ def __init__(self, config_path="config.yaml"):
17
+ self.load_config(config_path)
18
+ self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
19
+ self.model = None
20
+ self.hf_tokenizer = None
21
+
22
+ def load_config(self, path):
23
+ """Load configuration from yaml file."""
24
+ with open(path, "r") as file:
25
+ self.config = yaml.safe_load(file)
26
+ print("DEBUG: Loaded config:", self.config)
27
+
28
+ def prepare_model(self):
29
+ """Load the trained model for uploading."""
30
+ print("DEBUG: Loading trained model and tokenizer...")
31
+ self.model, original_tokenizer = FastVisionModel.from_pretrained(
32
+ model_name=self.config.get("output_dir", "lora_model"),
33
+ load_in_4bit=self.config.get("load_in_4bit", True)
34
+ )
35
+ self.hf_tokenizer = original_tokenizer
36
+ print("DEBUG: Model and tokenizer loaded successfully.")
37
+
38
+ def save_model_merged(self):
39
+ """Save merged model to Hugging Face Hub."""
40
+ print(f"DEBUG: Saving merged model to Hugging Face Hub: {self.config['hf_model_name']}")
41
+ if os.path.exists(self.config["hf_model_name"]):
42
+ shutil.rmtree(self.config["hf_model_name"])
43
+ self.model.push_to_hub_merged(
44
+ self.config["hf_model_name"],
45
+ self.hf_tokenizer,
46
+ save_method="merged_16bit",
47
+ token=os.getenv("HF_TOKEN")
48
+ )
49
+ print("DEBUG: Model saved to Hugging Face Hub successfully.")
50
+
51
+ def push_model_gguf(self):
52
+ """Push model in GGUF format to Hugging Face Hub."""
53
+ print(f"DEBUG: Pushing GGUF model to Hugging Face Hub: {self.config['hf_model_name']}")
54
+ self.model.push_to_hub_gguf(
55
+ self.config["hf_model_name"],
56
+ self.hf_tokenizer,
57
+ quantization_method=self.config.get("quantization_method", "q4_k_m"),
58
+ token=os.getenv("HF_TOKEN")
59
+ )
60
+ print("DEBUG: GGUF model pushed to Hugging Face Hub successfully.")
61
+
62
+ def prepare_modelfile_content(self):
63
+ """Prepare Ollama modelfile content using Llama 3.2 vision template."""
64
+ output_model = self.config["hf_model_name"]
65
+
66
+ # Using Llama 3.2 vision template format
67
+ template = """{{- range $index, $_ := .Messages }}<|start_header_id|>{{ .Role }}<|end_header_id|>
68
+
69
+ {{ .Content }}
70
+ {{- if gt (len (slice $.Messages $index)) 1 }}<|eot_id|>
71
+ {{- else if ne .Role "assistant" }}<|eot_id|><|start_header_id|>assistant<|end_header_id|>
72
+
73
+ {{ end }}
74
+ {{- end }}"""
75
+
76
+ # Assemble the modelfile content with Llama 3.2 vision parameters
77
+ modelfile = f"FROM {output_model}\n"
78
+ modelfile += "TEMPLATE \"""" + template + "\"""\n"
79
+ modelfile += "PARAMETER temperature 0.6\n"
80
+ modelfile += "PARAMETER top_p 0.9\n"
81
+ return modelfile
82
+
83
+ def create_and_push_ollama_model(self):
84
+ """Create and push model to Ollama."""
85
+ print(f"DEBUG: Creating Ollama model: {self.config['ollama_model']}:{self.config['model_parameters']}")
86
+ modelfile_content = self.prepare_modelfile_content()
87
+ with open("Modelfile", "w") as file:
88
+ file.write(modelfile_content)
89
+
90
+ print("DEBUG: Starting Ollama server...")
91
+ subprocess.run(["ollama", "serve"])
92
+
93
+ print("DEBUG: Creating Ollama model...")
94
+ subprocess.run([
95
+ "ollama", "create",
96
+ f"{self.config['ollama_model']}:{self.config['model_parameters']}",
97
+ "-f", "Modelfile"
98
+ ])
99
+
100
+ print("DEBUG: Pushing model to Ollama...")
101
+ subprocess.run([
102
+ "ollama", "push",
103
+ f"{self.config['ollama_model']}:{self.config['model_parameters']}"
104
+ ])
105
+ print("DEBUG: Model pushed to Ollama successfully.")
106
+
107
+ def upload(self, target="all"):
108
+ """
109
+ Upload the model to specified targets.
110
+ Args:
111
+ target (str): One of 'all', 'huggingface', 'huggingface_gguf', or 'ollama'
112
+ """
113
+ self.prepare_model()
114
+
115
+ if target in ["all", "huggingface"]:
116
+ self.save_model_merged()
117
+
118
+ if target in ["all", "huggingface_gguf"]:
119
+ self.push_model_gguf()
120
+
121
+ if target in ["all", "ollama"]:
122
+ self.create_and_push_ollama_model()
123
+
124
+ def main():
125
+ import argparse
126
+ parser = argparse.ArgumentParser(description="Upload Vision Model to Various Platforms")
127
+ parser.add_argument("--config", default="config.yaml", help="Path to configuration file")
128
+ parser.add_argument(
129
+ "--target",
130
+ choices=["all", "huggingface", "huggingface_gguf", "ollama"],
131
+ default="all",
132
+ help="Target platform to upload to"
133
+ )
134
+ args = parser.parse_args()
135
+
136
+ uploader = UploadVisionModel(config_path=args.config)
137
+ uploader.upload(target=args.target)
138
+
139
+ if __name__ == "__main__":
140
+ main()
@@ -0,0 +1,103 @@
1
+ Metadata-Version: 2.3
2
+ Name: PraisonAI
3
+ Version: 2.2.16
4
+ Summary: PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration.
5
+ Author: Mervin Praison
6
+ Requires-Python: >=3.10,<3.13
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: Programming Language :: Python :: 3.10
9
+ Classifier: Programming Language :: Python :: 3.11
10
+ Classifier: Programming Language :: Python :: 3.12
11
+ Provides-Extra: agentops
12
+ Provides-Extra: anthropic
13
+ Provides-Extra: api
14
+ Provides-Extra: autogen
15
+ Provides-Extra: call
16
+ Provides-Extra: chat
17
+ Provides-Extra: code
18
+ Provides-Extra: cohere
19
+ Provides-Extra: crewai
20
+ Provides-Extra: google
21
+ Provides-Extra: gradio
22
+ Provides-Extra: openai
23
+ Provides-Extra: realtime
24
+ Provides-Extra: ui
25
+ Requires-Dist: PyYAML (>=6.0)
26
+ Requires-Dist: agentops (>=0.3.12) ; extra == "agentops"
27
+ Requires-Dist: aiosqlite (>=0.20.0) ; extra == "chat"
28
+ Requires-Dist: aiosqlite (>=0.20.0) ; extra == "code"
29
+ Requires-Dist: aiosqlite (>=0.20.0) ; extra == "realtime"
30
+ Requires-Dist: aiosqlite (>=0.20.0) ; extra == "ui"
31
+ Requires-Dist: chainlit (==2.5.5) ; extra == "chat"
32
+ Requires-Dist: chainlit (==2.5.5) ; extra == "code"
33
+ Requires-Dist: chainlit (==2.5.5) ; extra == "realtime"
34
+ Requires-Dist: chainlit (==2.5.5) ; extra == "ui"
35
+ Requires-Dist: crawl4ai (>=0.6.0) ; extra == "chat"
36
+ Requires-Dist: crawl4ai (>=0.6.0) ; extra == "code"
37
+ Requires-Dist: crawl4ai (>=0.6.0) ; extra == "realtime"
38
+ Requires-Dist: crewai (>=0.32.0) ; extra == "crewai"
39
+ Requires-Dist: crewai ; extra == "autogen"
40
+ Requires-Dist: duckduckgo_search (>=6.3.0) ; extra == "realtime"
41
+ Requires-Dist: fastapi (>=0.115.0) ; extra == "api"
42
+ Requires-Dist: fastapi (>=0.95.0) ; extra == "call"
43
+ Requires-Dist: flaml[automl] (>=2.3.1) ; extra == "call"
44
+ Requires-Dist: flask (>=3.0.0) ; extra == "api"
45
+ Requires-Dist: gradio (>=4.26.0) ; extra == "gradio"
46
+ Requires-Dist: greenlet (>=3.0.3) ; extra == "chat"
47
+ Requires-Dist: greenlet (>=3.0.3) ; extra == "code"
48
+ Requires-Dist: greenlet (>=3.0.3) ; extra == "realtime"
49
+ Requires-Dist: greenlet (>=3.0.3) ; extra == "ui"
50
+ Requires-Dist: instructor (>=1.3.3)
51
+ Requires-Dist: langchain-anthropic (>=0.3.0) ; extra == "anthropic"
52
+ Requires-Dist: langchain-cohere (>=0.3.0,<0.4.0) ; extra == "cohere"
53
+ Requires-Dist: langchain-google-genai (>=2.1.0) ; extra == "google"
54
+ Requires-Dist: langchain-openai (>=0.2.1,<0.3.0) ; extra == "openai"
55
+ Requires-Dist: litellm (>=1.68.0) ; extra == "chat"
56
+ Requires-Dist: litellm (>=1.68.0) ; extra == "code"
57
+ Requires-Dist: litellm (>=1.68.0) ; extra == "realtime"
58
+ Requires-Dist: markdown (>=3.5)
59
+ Requires-Dist: mcp (>=1.6.0)
60
+ Requires-Dist: openai (>=1.54.0) ; extra == "call"
61
+ Requires-Dist: playwright (>=1.47.0) ; extra == "chat"
62
+ Requires-Dist: playwright (>=1.47.0) ; extra == "code"
63
+ Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
64
+ Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "autogen"
65
+ Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "crewai"
66
+ Requires-Dist: praisonaiagents (>=0.0.89)
67
+ Requires-Dist: pyautogen (>=0.2.19) ; extra == "autogen"
68
+ Requires-Dist: pydantic (<=2.10.1) ; extra == "chat"
69
+ Requires-Dist: pydantic (<=2.10.1) ; extra == "code"
70
+ Requires-Dist: pydantic (<=2.10.1) ; extra == "ui"
71
+ Requires-Dist: pyngrok (>=1.4.0) ; extra == "call"
72
+ Requires-Dist: pyparsing (>=3.0.0)
73
+ Requires-Dist: python-dotenv (>=0.19.0)
74
+ Requires-Dist: rich (>=13.7)
75
+ Requires-Dist: rich ; extra == "call"
76
+ Requires-Dist: rich ; extra == "chat"
77
+ Requires-Dist: sqlalchemy (>=2.0.36) ; extra == "chat"
78
+ Requires-Dist: sqlalchemy (>=2.0.36) ; extra == "code"
79
+ Requires-Dist: sqlalchemy (>=2.0.36) ; extra == "realtime"
80
+ Requires-Dist: sqlalchemy (>=2.0.36) ; extra == "ui"
81
+ Requires-Dist: tavily-python (==0.5.0) ; extra == "chat"
82
+ Requires-Dist: tavily-python (==0.5.0) ; extra == "code"
83
+ Requires-Dist: tavily-python (==0.5.0) ; extra == "realtime"
84
+ Requires-Dist: twilio (>=7.0.0) ; extra == "call"
85
+ Requires-Dist: uvicorn (>=0.20.0) ; extra == "call"
86
+ Requires-Dist: uvicorn (>=0.34.0) ; extra == "api"
87
+ Requires-Dist: websockets (>=12.0) ; extra == "call"
88
+ Requires-Dist: websockets (>=12.0) ; extra == "realtime"
89
+ Requires-Dist: yfinance (>=0.2.44) ; extra == "realtime"
90
+ Project-URL: Homepage, https://docs.praison.ai
91
+ Project-URL: Repository, https://github.com/mervinpraison/PraisonAI
92
+ Description-Content-Type: text/markdown
93
+
94
+ # PraisonAI Package
95
+
96
+ This is the PraisonAI package, which serves as a wrapper for PraisonAIAgents.
97
+
98
+ It provides a simple and intuitive interface for working with AI agents and their capabilities.
99
+
100
+ ## Directory Structure
101
+
102
+ The main package code is located in the `praisonai` subdirectory.
103
+
@@ -1,12 +1,13 @@
1
+ praisonai/README.md,sha256=dXaEAByiWlJPE8_k-13lsNIEuvHdzmzJzJ8IVa84thM,195
1
2
  praisonai/__init__.py,sha256=JrgyPlzZfLlozoW7SHZ1nVJ63rLPR3ki2k5ZPywYrnI,175
2
3
  praisonai/__main__.py,sha256=MVgsjMThjBexHt4nhd760JCqvP4x0IQcwo8kULOK4FQ,144
3
- praisonai/agents_generator.py,sha256=9-KuU1--_cBI85y2J-9XjW0LYHTCMnKJhy5WAUrf3M0,26887
4
- praisonai/api/call.py,sha256=iHdAlgIH_oTsEbjaGGu1Jjo6DTfMR-SfFdtSxnOLCeY,11032
5
- praisonai/auto.py,sha256=uLDm8CU3L_3amZsd55yzf9RdBF1uW-BGSx7nl9ctNZ4,8680
4
+ praisonai/agents_generator.py,sha256=K8J0O_BKs1GkjzaA_GIg6610W5bYTiSc1ObSl4AJckM,28977
5
+ praisonai/api/call.py,sha256=krOfTCZM_bdbsNuWQ1PijzCHECkDvEi9jIvvZaDQUUU,11035
6
+ praisonai/auto.py,sha256=g9k1X94mx4tYxprEAtgzu-FPRffBM7cq2MZ0C3Po364,8683
6
7
  praisonai/chainlit_ui.py,sha256=bNR7s509lp0I9JlJNvwCZRUZosC64qdvlFCt8NmFamQ,12216
7
- praisonai/cli.py,sha256=O7abKND2MP_yDdD_OclPoiZG1JRoGc4u9KowbRzuQuQ,21209
8
- praisonai/deploy.py,sha256=c0jF6JDHfEPpDjx9js1PQegpELJ5mVBXr7VIU1_xkUA,6028
9
- praisonai/inbuilt_tools/__init__.py,sha256=fai4ZJIKz7-iOnGZv5jJX0wmT77PKa4x2jqyaJddKFA,569
8
+ praisonai/cli.py,sha256=qsSQHx1jXNyUK8QEVXhd5eCSCoZ2iJLYYe1fiCA3WT8,27336
9
+ praisonai/deploy.py,sha256=lhiVEIiUo8ho_fALzopkiPKUZ4PnYJdoL64d5gUNxc8,6028
10
+ praisonai/inbuilt_tools/__init__.py,sha256=mZOEximj3zCyJHq9Lz0bGXhQpBsa_QR-R-yA9UKC3zI,565
10
11
  praisonai/inbuilt_tools/autogen_tools.py,sha256=kJdEv61BTYvdHOaURNEpBcWq8Rs-oC03loNFTIjT-ak,4687
11
12
  praisonai/inc/__init__.py,sha256=sPDlYBBwdk0VlWzaaM_lG0_LD07lS2HRGvPdxXJFiYg,62
12
13
  praisonai/inc/config.py,sha256=up2-841ruK7MCUUT3xkWBA5S6WsY0sFODNfcT6Q4Wms,3333
@@ -22,33 +23,28 @@ praisonai/public/game.svg,sha256=y2QMaA01m8XzuDjTOBWzupOC3-TpnUl9ah89mIhviUw,240
22
23
  praisonai/public/logo_dark.png,sha256=frHz1zkrnivGssJgk9iy1cabojkVgm8B4MllFwL_CnI,17050
23
24
  praisonai/public/logo_light.png,sha256=8cQRti_Ysa30O3_7C3ku2w40LnVUUlUok47H-3ZZHSU,19656
24
25
  praisonai/public/movie.svg,sha256=aJ2EQ8vXZusVsF2SeuAVxP4RFJzQ14T26ejrGYdBgzk,1289
26
+ praisonai/public/praison-ai-agents-architecture-dark.png,sha256=eXb8nrRuTEn7bsHcQ8_AHLrO7c4-XPiEHz267oBE5V4,172088
27
+ praisonai/public/praison-ai-agents-architecture.png,sha256=a0-2bYW2j03H0ewUuOCBJnDZ3Ju2LrZMXPmbCANr6_w,171433
25
28
  praisonai/public/thriller.svg,sha256=2dYY72EcgbEyTxS4QzjAm37Y4srtPWEW4vCMFki98ZI,3163
26
29
  praisonai/setup/__init__.py,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
27
30
  praisonai/setup/build.py,sha256=NyTAXQ_UZ8vKo_KwCINp8ctmauZyCMDkw1rys3ay0ec,646
28
31
  praisonai/setup/config.yaml,sha256=sr_D1RIvv3LQ_eueOMZV0rAUiWTR-n2xuE1RhKK6b34,1211
29
32
  praisonai/setup/post_install.py,sha256=rlUYJhT4JXVrL2RQih5VUPr4PQEmpVypoRpe_vJDMBk,730
30
33
  praisonai/setup/setup_conda_env.py,sha256=4QiWrqgEObivzOMwfJgWaCPpUEpB68cQ6lFwVwFoufk,816
31
- praisonai/setup/setup_conda_env.sh,sha256=te7s0KHsTi7XM-vkNvE0dKC1HeU2tXxqE-sPUScV6fY,2718
34
+ praisonai/setup/setup_conda_env.sh,sha256=_pVbrXStZua6vUJTbuGiZam-zWsDDLWP0ZaFuPo2Onk,4087
32
35
  praisonai/setup.py,sha256=0jHgKnIPCtBZiGYaYyTz3PzrJI6nBy55VXk2UctXlDo,373
33
36
  praisonai/test.py,sha256=OL-wesjA5JTohr8rtr6kWoaS4ImkJg2l0GXJ-dUUfRU,4090
34
- praisonai/train.py,sha256=DvORlrwKOD-2v4r_z84eV3LsfzpNs-WnPKb5cQB3_t4,11071
37
+ praisonai/train.py,sha256=Cjb0TKU3esNrCk2OX24Qm1S1crRC00FdiGUYJLw3iPQ,24094
38
+ praisonai/train_vision.py,sha256=OLDtr5u9rszWQ80LC5iFy37yPuYguES6AQybm_2RtM4,12514
35
39
  praisonai/ui/README.md,sha256=QG9yucvBieVjCjWFzu6hL9xNtYllkoqyJ_q1b0YYAco,1124
40
+ praisonai/ui/agents.py,sha256=1qsWE2yCaQKhuc-1uLHdMfZJeOXzBtp4pe5q7bk2EuA,32813
41
+ praisonai/ui/callbacks.py,sha256=V4_-GjxmjDFmugUZGfQHKtNSysx7rT6i1UblbM_8lIM,1968
36
42
  praisonai/ui/chat.py,sha256=rlYwhTd3giBuvtK4Yc9kf6N9jfVT0VrZ-mLIzhANGiQ,13565
37
- praisonai/ui/code.py,sha256=GD_xQTo7qzpOM98tu4MOPsviJdXU__Ta3JIfsjoRe6U,15797
43
+ praisonai/ui/code.py,sha256=nrknYLOkWxdjwkXrLHgOEXFaO9MrKi5OZ3kb2XuIfOc,15991
44
+ praisonai/ui/colab.py,sha256=A2NceDVazMy53mIpp-NIn5w3y8aQKwQu5LmHTepVwlo,19584
45
+ praisonai/ui/colab_chainlit.py,sha256=wrB1O0ttRlmOH8aMxU8QdGpse-X54U87ZcEEA3R1aFg,2432
38
46
  praisonai/ui/components/aicoder.py,sha256=Xh95RSEJCel5mEGic4vdtzyNpHNULF3ymft9nbwglXY,11155
39
- praisonai/ui/config/.chainlit/config.toml,sha256=kxb2APgVauLtfxlcEMlv8GHse6p8AbNTsJhIfZF38bg,3824
40
- praisonai/ui/config/.chainlit/translations/bn.json,sha256=m2TAaGMS-18_siW5dw4sbosh0Wn8ENWWzdGYkHaBrXw,22679
41
- praisonai/ui/config/.chainlit/translations/en-US.json,sha256=QoQAg8P5Q5gbGASc-HAHcfhufk71-Uc1u_ewIBfHuLc,9821
42
- praisonai/ui/config/.chainlit/translations/gu.json,sha256=9wE-NsHf7j5VUFzfE-cCpESTyHtzVHRcZXAwC3ACMl0,21660
43
- praisonai/ui/config/.chainlit/translations/he-IL.json,sha256=uVS4q9wlxDWqCNXde_f34p4tXcyA89YBsVNRDiOkhaE,16974
44
- praisonai/ui/config/.chainlit/translations/hi.json,sha256=3zi4wbpTCv5Q1bOzakeVQqeRpaO5ZkPJJ6oV-dVtNF4,21079
45
- praisonai/ui/config/.chainlit/translations/kn.json,sha256=GRGmx2hPh8bSfpKJQHwwtV_UF45kaMc8S-rUJeh_ZA4,23514
46
- praisonai/ui/config/.chainlit/translations/ml.json,sha256=QK1mYIbx_0gmmR4Poy2mm3uttAgW2ZpMfo6jgDec6uc,24857
47
- praisonai/ui/config/.chainlit/translations/mr.json,sha256=JaU16y5uW-cH0x7w6RDztEmhqEtnerJ61h8azkNLqyg,21321
48
- praisonai/ui/config/.chainlit/translations/ta.json,sha256=8JPW6BwLN2dl9wuq5wSkMvazcY8lM5v1pqbBxwObGUw,24724
49
- praisonai/ui/config/.chainlit/translations/te.json,sha256=JzW2YXWg1qqvWgIvEgMelQz5s6EzTb_uD_3TEHAHiQw,23526
50
- praisonai/ui/config/.chainlit/translations/zh-CN.json,sha256=aLBSSSQ0yojlYGuMMlOYvkD_ruG9-d2AgnjJWhPODVw,11737
51
- praisonai/ui/config/chainlit.md,sha256=zhT-Le2gShsByFXBKfayO6sM29fs3PWTfy6fG93sNJ8,33
47
+ praisonai/ui/config/chainlit.md,sha256=YCjGjkKOeW0w711tkAdEfC6sPgBRm6G3bxYPFeHx72U,28
52
48
  praisonai/ui/config/translations/bn.json,sha256=m2TAaGMS-18_siW5dw4sbosh0Wn8ENWWzdGYkHaBrXw,22679
53
49
  praisonai/ui/config/translations/en-US.json,sha256=QoQAg8P5Q5gbGASc-HAHcfhufk71-Uc1u_ewIBfHuLc,9821
54
50
  praisonai/ui/config/translations/gu.json,sha256=9wE-NsHf7j5VUFzfE-cCpESTyHtzVHRcZXAwC3ACMl0,21660
@@ -69,14 +65,15 @@ praisonai/ui/public/logo_light.png,sha256=8cQRti_Ysa30O3_7C3ku2w40LnVUUlUok47H-3
69
65
  praisonai/ui/public/movie.svg,sha256=aJ2EQ8vXZusVsF2SeuAVxP4RFJzQ14T26ejrGYdBgzk,1289
70
66
  praisonai/ui/public/praison.css,sha256=fBYbJn4Uuv2AH6ThWkMmdAy_uBbw9a9ZeW0hIGsqotA,75
71
67
  praisonai/ui/public/thriller.svg,sha256=2dYY72EcgbEyTxS4QzjAm37Y4srtPWEW4vCMFki98ZI,3163
72
- praisonai/ui/realtime.py,sha256=qpgcGA8CIUfYuSXtQM0zSlxktFtUZXsryn0Tru-R5wU,15304
68
+ praisonai/ui/realtime.py,sha256=aVK-lbA57J9KHo3Lrknk4aaO1V1tRkiKXr_01zWrl30,17845
73
69
  praisonai/ui/realtimeclient/__init__.py,sha256=zA2xa7rBUSw77wFkndJMQNNPqdH6ywQ3uf4WSYHjNfs,27513
74
70
  praisonai/ui/realtimeclient/realtimedocs.txt,sha256=hmgd8Uwy2SkjSndyyF_-ZOaNxiyHwGaQLGc67DvV-sI,26395
75
71
  praisonai/ui/realtimeclient/tools.py,sha256=IJOYwVOBW5Ocn5_iV9pFkmSKR3WU3YpX3kwF0I3jikQ,7855
76
- praisonai/ui/sql_alchemy.py,sha256=cfyL9uFfuizKFvW0aZfUBlJWPQYI-YBi1v4vxlkb1BQ,29615
72
+ praisonai/ui/sql_alchemy.py,sha256=fPLPBJlrgV1_sRugirbNDCunqxqFB-CjV9TvCThU4nU,29686
73
+ praisonai/ui/tools.md,sha256=Ad3YH_ZCLMWlz3mDXllQnQ_S5l55LWqLdcZSh-EXrHI,3956
74
+ praisonai/upload_vision.py,sha256=lMpFn993UiYVJxRNZQTmcbPbEajQ5TFKCNGK1Icn_hg,5253
77
75
  praisonai/version.py,sha256=ugyuFliEqtAwQmH4sTlc16YXKYbFWDmfyk87fErB8-8,21
78
- praisonai-2.0.12.dist-info/LICENSE,sha256=kqvFysVlnFxYOu0HxCe2HlmZmJtdmNGOxWRRkT9TsWc,1035
79
- praisonai-2.0.12.dist-info/METADATA,sha256=QgU4n9umGXwi7q12a_XLgaoQtIOnHebzipJQ9k7kk4w,17346
80
- praisonai-2.0.12.dist-info/WHEEL,sha256=fmH-GaHMjP7_7QRgxgY6NpqU0nbYISt-cx_c84Y5gdY,106
81
- praisonai-2.0.12.dist-info/entry_points.txt,sha256=I_xc6a6MNTTfLxYmAxe0rgey0G-_hbY07oFW-ZDnkw4,135
82
- praisonai-2.0.12.dist-info/RECORD,,
76
+ praisonai-2.2.16.dist-info/METADATA,sha256=kkE0VtxjrUVoWNtqXDxS-oDhw9uqaZ66Jnhr8e0LQw4,4745
77
+ praisonai-2.2.16.dist-info/WHEEL,sha256=JifHWJGcCZRcCMb8CiAEwuMzhj7UT5Vox52jICNmydU,106
78
+ praisonai-2.2.16.dist-info/entry_points.txt,sha256=I_xc6a6MNTTfLxYmAxe0rgey0G-_hbY07oFW-ZDnkw4,135
79
+ praisonai-2.2.16.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.1
2
+ Generator: poetry-core 2.1.3
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp311-cp311-macosx_15_0_arm64