swarms 7.6.1__py3-none-any.whl → 7.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. swarms/__init__.py +1 -0
  2. swarms/agents/__init__.py +4 -5
  3. swarms/agents/flexion_agent.py +2 -1
  4. swarms/agents/reasoning_agents.py +10 -0
  5. swarms/client/__init__.py +15 -0
  6. swarms/prompts/multi_agent_collab_prompt.py +313 -0
  7. swarms/structs/__init__.py +10 -17
  8. swarms/structs/agent.py +178 -262
  9. swarms/structs/base_swarm.py +0 -7
  10. swarms/structs/concurrent_workflow.py +2 -2
  11. swarms/structs/conversation.py +16 -2
  12. swarms/structs/de_hallucination_swarm.py +8 -4
  13. swarms/structs/dynamic_conversational_swarm.py +226 -0
  14. swarms/structs/groupchat.py +80 -84
  15. swarms/structs/hiearchical_swarm.py +1 -1
  16. swarms/structs/hybrid_hiearchical_peer_swarm.py +256 -0
  17. swarms/structs/majority_voting.py +1 -1
  18. swarms/structs/mixture_of_agents.py +1 -1
  19. swarms/structs/multi_agent_exec.py +63 -139
  20. swarms/structs/multi_agent_orchestrator.py +1 -1
  21. swarms/structs/output_types.py +3 -0
  22. swarms/structs/rearrange.py +66 -205
  23. swarms/structs/sequential_workflow.py +34 -47
  24. swarms/structs/swarm_router.py +3 -2
  25. swarms/telemetry/bootup.py +19 -38
  26. swarms/telemetry/main.py +62 -22
  27. swarms/tools/tool_schema_base_model.py +57 -0
  28. swarms/utils/auto_download_check_packages.py +2 -2
  29. swarms/utils/disable_logging.py +0 -17
  30. swarms/utils/history_output_formatter.py +8 -3
  31. swarms/utils/litellm_wrapper.py +117 -1
  32. {swarms-7.6.1.dist-info → swarms-7.6.4.dist-info}/METADATA +1 -5
  33. {swarms-7.6.1.dist-info → swarms-7.6.4.dist-info}/RECORD +37 -37
  34. swarms/structs/agent_security.py +0 -318
  35. swarms/structs/airflow_swarm.py +0 -430
  36. swarms/structs/output_type.py +0 -18
  37. swarms/utils/agent_ops_check.py +0 -26
  38. swarms/utils/pandas_utils.py +0 -92
  39. /swarms/{structs/swarms_api.py → client/main.py} +0 -0
  40. {swarms-7.6.1.dist-info → swarms-7.6.4.dist-info}/LICENSE +0 -0
  41. {swarms-7.6.1.dist-info → swarms-7.6.4.dist-info}/WHEEL +0 -0
  42. {swarms-7.6.1.dist-info → swarms-7.6.4.dist-info}/entry_points.txt +0 -0
@@ -1,430 +0,0 @@
1
- import subprocess
2
- import sys
3
- import uuid
4
- import threading
5
- from concurrent.futures import (
6
- FIRST_COMPLETED,
7
- ThreadPoolExecutor,
8
- wait,
9
- )
10
- from dataclasses import dataclass
11
- from datetime import datetime, timedelta
12
- from enum import Enum
13
- from typing import Any, Dict, List, Optional, Set, Union
14
- from graphviz import Digraph
15
- from loguru import logger
16
-
17
- # Airflow imports
18
- try:
19
- from airflow import DAG
20
- from airflow.operators.python import PythonOperator
21
- except ImportError:
22
- logger.error(
23
- "Airflow is not installed. Please install it using 'pip install apache-airflow'."
24
- )
25
- subprocess.run(
26
- [sys.executable, "-m", "pip", "install", "apache-airflow"]
27
- )
28
- from airflow import DAG
29
- from airflow.operators.python import PythonOperator
30
-
31
- # Import the real Agent from swarms.
32
- from swarms.structs.conversation import Conversation
33
-
34
-
35
- class NodeType(Enum):
36
- AGENT = "agent"
37
- CALLABLE = "callable"
38
- TOOL = "tool"
39
-
40
-
41
- def dag_id():
42
- return uuid.uuid4().hex
43
-
44
-
45
- @dataclass
46
- class Node:
47
- """Represents a node in the DAG"""
48
-
49
- id: str
50
- type: NodeType
51
- component: Any # Agent, Callable, or Tool
52
- query: Optional[str] = None
53
- args: Optional[List[Any]] = None
54
- kwargs: Optional[Dict[str, Any]] = None
55
- concurrent: bool = False
56
-
57
-
58
- # ======= Airflow DAG Swarm Class =========
59
- class AirflowDAGSwarm:
60
- """
61
- A simplified and more intuitive DAG-based swarm for orchestrating agents, callables, and tools.
62
- Provides an easy-to-use API for building agent pipelines with support for concurrent execution.
63
- """
64
-
65
- def __init__(
66
- self,
67
- dag_id: str = dag_id(),
68
- description: str = "A DAG Swarm for Airflow",
69
- name: str = "Airflow DAG Swarm",
70
- schedule_interval: Union[timedelta, str] = timedelta(days=1),
71
- start_date: datetime = datetime(2025, 2, 14),
72
- default_args: Optional[Dict[str, Any]] = None,
73
- initial_message: Optional[str] = None,
74
- max_workers: int = 5,
75
- ):
76
- """Initialize the AirflowDAGSwarm with improved configuration."""
77
- self.dag_id = dag_id
78
- self.name = name
79
- self.description = description
80
- self.max_workers = max_workers
81
-
82
- self.default_args = default_args or {
83
- "owner": "airflow",
84
- "depends_on_past": False,
85
- "email_on_failure": False,
86
- "email_on_retry": False,
87
- "retries": 1,
88
- "retry_delay": timedelta(minutes=5),
89
- }
90
-
91
- self.dag = DAG(
92
- dag_id=dag_id,
93
- default_args=self.default_args,
94
- schedule_interval=schedule_interval,
95
- start_date=start_date,
96
- catchup=False,
97
- )
98
-
99
- self.nodes: Dict[str, Node] = {}
100
- self.edges: Dict[str, Set[str]] = (
101
- {}
102
- ) # node_id -> set of child node_ids
103
-
104
- # Initialize conversation
105
- self.conversation = Conversation()
106
- if initial_message:
107
- self.conversation.add("user", initial_message)
108
-
109
- self.lock = threading.Lock()
110
-
111
- def add_user_message(self, message: str) -> None:
112
- """Add a user message to the conversation."""
113
- with self.lock:
114
- self.conversation.add("user", message)
115
- logger.info(f"Added user message: {message}")
116
-
117
- def get_conversation_history(self) -> str:
118
- """Get the conversation history as JSON."""
119
- return self.conversation.to_json()
120
-
121
- def add_node(
122
- self,
123
- node_id: str,
124
- component: Any,
125
- node_type: NodeType,
126
- query: Optional[str] = None,
127
- args: Optional[List[Any]] = None,
128
- kwargs: Optional[Dict[str, Any]] = None,
129
- concurrent: bool = False,
130
- ) -> str:
131
- """
132
- Add a node to the DAG with improved type checking and validation.
133
-
134
- Args:
135
- node_id: Unique identifier for the node
136
- component: Agent, callable, or tool to execute
137
- node_type: Type of the node (AGENT, CALLABLE, or TOOL)
138
- query: Query string for agents
139
- args: Positional arguments for callables/tools
140
- kwargs: Keyword arguments for callables/tools
141
- concurrent: Whether to execute this node concurrently
142
-
143
- Returns:
144
- node_id: The ID of the created node
145
- """
146
- if node_id in self.nodes:
147
- raise ValueError(f"Node with ID {node_id} already exists")
148
-
149
- if node_type == NodeType.AGENT and not hasattr(
150
- component, "run"
151
- ):
152
- raise ValueError("Agent must have a 'run' method")
153
- elif node_type in (
154
- NodeType.CALLABLE,
155
- NodeType.TOOL,
156
- ) and not callable(component):
157
- raise ValueError(f"{node_type.value} must be callable")
158
-
159
- node = Node(
160
- id=node_id,
161
- type=node_type,
162
- component=component,
163
- query=query,
164
- args=args or [],
165
- kwargs=kwargs or {},
166
- concurrent=concurrent,
167
- )
168
-
169
- self.nodes[node_id] = node
170
- self.edges[node_id] = set()
171
- logger.info(f"Added {node_type.value} node: {node_id}")
172
- return node_id
173
-
174
- def add_edge(self, from_node: str, to_node: str) -> None:
175
- """
176
- Add a directed edge between two nodes in the DAG.
177
-
178
- Args:
179
- from_node: ID of the source node
180
- to_node: ID of the target node
181
- """
182
- if from_node not in self.nodes or to_node not in self.nodes:
183
- raise ValueError("Both nodes must exist in the DAG")
184
-
185
- self.edges[from_node].add(to_node)
186
- logger.info(f"Added edge: {from_node} -> {to_node}")
187
-
188
- def _execute_node(self, node: Node) -> str:
189
- """Execute a single node and return its output."""
190
- try:
191
- if node.type == NodeType.AGENT:
192
- query = (
193
- node.query
194
- or self.conversation.get_last_message_as_string()
195
- or "Default query"
196
- )
197
- logger.info(
198
- f"Executing agent node {node.id} with query: {query}"
199
- )
200
- return node.component.run(query)
201
-
202
- elif node.type in (NodeType.CALLABLE, NodeType.TOOL):
203
- logger.info(
204
- f"Executing {node.type.value} node {node.id}"
205
- )
206
- return node.component(
207
- *node.args,
208
- conversation=self.conversation,
209
- **node.kwargs,
210
- )
211
- except Exception as e:
212
- logger.exception(f"Error executing node {node.id}: {e}")
213
- return f"Error in node {node.id}: {str(e)}"
214
-
215
- def _get_root_nodes(self) -> List[str]:
216
- """Get nodes with no incoming edges."""
217
- all_nodes = set(self.nodes.keys())
218
- nodes_with_incoming = {
219
- node for edges in self.edges.values() for node in edges
220
- }
221
- return list(all_nodes - nodes_with_incoming)
222
-
223
- def run(self, **context: Any) -> str:
224
- """
225
- Execute the DAG with improved concurrency handling and error recovery.
226
-
227
- Returns:
228
- The final conversation state as a JSON string
229
- """
230
- logger.info("Starting swarm execution")
231
-
232
- # Track completed nodes and their results
233
- completed: Dict[str, str] = {}
234
-
235
- def can_execute_node(node_id: str) -> bool:
236
- """Check if all dependencies of a node are completed."""
237
- return all(
238
- dep in completed
239
- for dep_set in self.edges.values()
240
- for dep in dep_set
241
- if node_id in dep_set
242
- )
243
-
244
- with ThreadPoolExecutor(
245
- max_workers=self.max_workers
246
- ) as executor:
247
- # Initialize futures dict for concurrent root nodes
248
- futures_dict = {
249
- executor.submit(
250
- self._execute_node, self.nodes[node_id]
251
- ): node_id
252
- for node_id in self._get_root_nodes()
253
- if self.nodes[node_id].concurrent
254
- }
255
-
256
- # Execute nodes that shouldn't run concurrently
257
- for node_id in self._get_root_nodes():
258
- if not self.nodes[node_id].concurrent:
259
- output = self._execute_node(self.nodes[node_id])
260
- with self.lock:
261
- completed[node_id] = output
262
- self.conversation.add("assistant", output)
263
-
264
- # Process remaining nodes
265
- while futures_dict:
266
- done, _ = wait(
267
- futures_dict.keys(), return_when=FIRST_COMPLETED
268
- )
269
-
270
- for future in done:
271
- node_id = futures_dict.pop(future)
272
- try:
273
- output = future.result()
274
- with self.lock:
275
- completed[node_id] = output
276
- self.conversation.add("assistant", output)
277
- except Exception as e:
278
- logger.exception(
279
- f"Error in node {node_id}: {e}"
280
- )
281
- completed[node_id] = f"Error: {str(e)}"
282
-
283
- # Add new nodes that are ready to execute
284
- new_nodes = [
285
- node_id
286
- for node_id in self.nodes
287
- if node_id not in completed
288
- and can_execute_node(node_id)
289
- ]
290
-
291
- for node_id in new_nodes:
292
- if self.nodes[node_id].concurrent:
293
- future = executor.submit(
294
- self._execute_node,
295
- self.nodes[node_id],
296
- )
297
- futures_dict[future] = node_id
298
- else:
299
- output = self._execute_node(
300
- self.nodes[node_id]
301
- )
302
- with self.lock:
303
- completed[node_id] = output
304
- self.conversation.add(
305
- "assistant", output
306
- )
307
-
308
- return self.conversation.to_json()
309
-
310
- def visualize(
311
- self, filename: str = "dag_visualization", view: bool = True
312
- ) -> Digraph:
313
- """
314
- Generate a visualization of the DAG structure.
315
-
316
- Args:
317
- filename: Output filename for the visualization
318
- view: Whether to open the visualization
319
-
320
- Returns:
321
- Graphviz Digraph object
322
- """
323
- dot = Digraph(comment=f"DAG Visualization: {self.name}")
324
-
325
- # Add nodes
326
- for node_id, node in self.nodes.items():
327
- label = f"{node_id}\n({node.type.value})"
328
- shape = "box" if node.concurrent else "ellipse"
329
- dot.node(node_id, label, shape=shape)
330
-
331
- # Add edges
332
- for from_node, to_nodes in self.edges.items():
333
- for to_node in to_nodes:
334
- dot.edge(from_node, to_node)
335
-
336
- dot.render(filename, view=view, format="pdf")
337
- return dot
338
-
339
- def create_dag(self) -> DAG:
340
- """
341
- Create an Airflow DAG with a single PythonOperator that executes the entire swarm.
342
- In a production environment, you might break the components into multiple tasks.
343
-
344
- :return: The configured Airflow DAG.
345
- """
346
- logger.info("Creating Airflow DAG for swarm execution.")
347
- PythonOperator(
348
- task_id="run",
349
- python_callable=self.run,
350
- op_kwargs={
351
- "concurrent": False
352
- }, # Change to True for concurrent execution.
353
- dag=self.dag,
354
- )
355
- return self.dag
356
-
357
-
358
- # # ======= Example Usage =========
359
- # if __name__ == "__main__":
360
- # # Configure logger to output to console.
361
- # logger.remove()
362
- # logger.add(lambda msg: print(msg, end=""), level="DEBUG")
363
-
364
- # # Create the DAG swarm with an initial message
365
- # swarm = AirflowDAGSwarm(
366
- # dag_id="swarm_conversation_dag",
367
- # initial_message="Hello, how can I help you with financial planning?",
368
- # )
369
-
370
- # # Create a real financial agent using the swarms package.
371
- # financial_agent = Agent(
372
- # agent_name="Financial-Analysis-Agent",
373
- # system_prompt=FINANCIAL_AGENT_SYS_PROMPT,
374
- # model_name="gpt-4o-mini",
375
- # max_loops=1,
376
- # )
377
-
378
- # # Add the real agent with a specific query.
379
- # swarm.add_node(
380
- # "financial_advisor",
381
- # financial_agent,
382
- # NodeType.AGENT,
383
- # query="How can I establish a ROTH IRA to buy stocks and get a tax break? What are the criteria",
384
- # concurrent=True,
385
- # )
386
-
387
- # # Add a callable component.
388
- # def extra_processing(x: int, conversation: Conversation) -> str:
389
- # return f"Extra processing output with data {x} and conversation length {len(conversation.messages)}"
390
-
391
- # swarm.add_node(
392
- # "extra_processing",
393
- # extra_processing,
394
- # NodeType.CALLABLE,
395
- # args=[42],
396
- # concurrent=True,
397
- # )
398
-
399
- # # Add a tool component (for example, a tool to create a conversation graph).
400
- # def create_conversation_graph(conversation: Conversation) -> str:
401
- # # In this tool, we generate the graph and return a confirmation message.
402
- # swarm.visualize(
403
- # filename="swarm_conversation_tool_graph", view=False
404
- # )
405
- # return "Graph created."
406
-
407
- # swarm.add_node(
408
- # "conversation_graph",
409
- # create_conversation_graph,
410
- # NodeType.TOOL,
411
- # concurrent=False,
412
- # )
413
-
414
- # # Add edges to create the pipeline
415
- # swarm.add_edge("financial_advisor", "extra_processing")
416
- # swarm.add_edge("extra_processing", "conversation_graph")
417
-
418
- # # Execute the swarm
419
- # final_state = swarm.run()
420
- # logger.info(f"Final conversation: {final_state}")
421
-
422
- # # Visualize the DAG
423
- # print(
424
- # swarm.visualize(
425
- # filename="swarm_conversation_final", view=False
426
- # )
427
- # )
428
-
429
- # # Create the Airflow DAG.
430
- # dag = swarm.create_dag()
@@ -1,18 +0,0 @@
1
- from typing import Literal
2
-
3
- # Define the output_type using Literal
4
- OutputType = Literal[
5
- "all",
6
- "final",
7
- "list",
8
- "dict",
9
- ".json",
10
- ".md",
11
- ".txt",
12
- ".yaml",
13
- ".toml",
14
- "str",
15
- ]
16
-
17
- # Use the OutputType for type annotations
18
- output_type: OutputType
@@ -1,26 +0,0 @@
1
- from swarms.utils.loguru_logger import logger
2
- import os
3
-
4
-
5
- def try_import_agentops(*args, **kwargs):
6
- try:
7
- logger.info("Trying to import agentops")
8
- import agentops
9
-
10
- agentops.init(os.getenv("AGENTOPS_API_KEY"), *args, **kwargs)
11
-
12
- return "agentops imported successfully."
13
- except ImportError:
14
- logger.error("Could not import agentops")
15
-
16
-
17
- def end_session_agentops():
18
- try:
19
- logger.info("Trying to end session")
20
- import agentops
21
-
22
- agentops.end_session("Success")
23
- return "Session ended successfully."
24
- except ImportError:
25
- logger.error("Could not import agentops")
26
- return "Could not end session."
@@ -1,92 +0,0 @@
1
- import subprocess
2
- from typing import Any, Dict, List
3
-
4
- from swarms.utils.loguru_logger import initialize_logger
5
-
6
- from pydantic import BaseModel
7
-
8
- from swarms.structs.agent import Agent
9
-
10
- logger = initialize_logger(log_folder="pandas_utils")
11
-
12
-
13
- def display_agents_info(agents: List[Agent]) -> None:
14
- """
15
- Displays information about all agents in a list using a DataFrame.
16
-
17
- :param agents: List of Agent instances.
18
- """
19
- # Extracting relevant information from each agent
20
-
21
- try:
22
- import pandas as pd
23
- except ImportError:
24
- logger.error("Failed to import pandas")
25
- subprocess.run(["pip", "install", "pandas"])
26
- import pandas as pd
27
-
28
- agent_data = []
29
- for agent in agents:
30
- try:
31
- agent_info = {
32
- "ID": agent.id,
33
- "Name": agent.agent_name,
34
- "Description": agent.description,
35
- "max_loops": agent.max_loops,
36
- # "Docs": agent.docs,
37
- "System Prompt": agent.system_prompt,
38
- "LLM Model": agent.llm.model_name, # type: ignore
39
- }
40
- agent_data.append(agent_info)
41
- except AttributeError as e:
42
- logger.error(
43
- f"Failed to extract information from agent {agent}: {e}"
44
- )
45
- continue
46
-
47
- # Creating a DataFrame to display the data
48
- try:
49
- df = pd.DataFrame(agent_data)
50
- except Exception as e:
51
- logger.error(f"Failed to create DataFrame: {e}")
52
- return
53
-
54
- # Displaying the DataFrame
55
- try:
56
- print(df)
57
- except Exception as e:
58
- logger.error(f"Failed to print DataFrame: {e}")
59
-
60
-
61
- def dict_to_dataframe(data: Dict[str, Any]):
62
- """
63
- Converts a dictionary into a pandas DataFrame.
64
-
65
- :param data: Dictionary to convert.
66
- :return: A pandas DataFrame representation of the dictionary.
67
- """
68
- try:
69
- import pandas as pd
70
- except ImportError:
71
- logger.error("Failed to import pandas")
72
- subprocess.run(["pip", "install", "pandas"])
73
- import pandas as pd
74
-
75
- # Convert dictionary to DataFrame
76
- df = pd.json_normalize(data)
77
- return df
78
-
79
-
80
- def pydantic_model_to_dataframe(model: BaseModel) -> any:
81
- """
82
- Converts a Pydantic Base Model into a pandas DataFrame.
83
-
84
- :param model: Pydantic Base Model to convert.
85
- :return: A pandas DataFrame representation of the Pydantic model.
86
- """
87
- # Convert Pydantic model to dictionary
88
- model_dict = model.dict()
89
-
90
- # Convert dictionary to DataFrame
91
- df = dict_to_dataframe(model_dict)
92
- return df
File without changes
File without changes