swarms 7.6.0__py3-none-any.whl → 7.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swarms/agents/__init__.py +9 -2
- swarms/agents/agent_judge.py +119 -0
- swarms/agents/flexion_agent.py +625 -0
- swarms/agents/gkp_agent.py +581 -0
- swarms/agents/reasoning_agents.py +32 -0
- swarms/prompts/agent_judge_prompt.py +38 -0
- swarms/structs/__init__.py +7 -2
- swarms/structs/agent.py +37 -8
- swarms/structs/agent_builder.py +6 -8
- swarms/structs/concurrent_workflow.py +1 -1
- swarms/structs/deep_research_swarm.py +482 -0
- swarms/structs/dynamic_conversational_swarm.py +226 -0
- swarms/structs/hiearchical_swarm.py +1 -1
- swarms/structs/hybrid_hiearchical_peer_swarm.py +273 -0
- swarms/structs/majority_voting.py +1 -1
- swarms/structs/mixture_of_agents.py +1 -1
- swarms/structs/multi_agent_orchestrator.py +1 -1
- swarms/structs/output_types.py +3 -0
- swarms/structs/rearrange.py +1 -1
- swarms/structs/sequential_workflow.py +1 -1
- swarms/structs/swarm_router.py +12 -1
- swarms/structs/swarms_api.py +1 -1
- swarms/telemetry/main.py +7 -3
- swarms/tools/mcp_integration.py +554 -0
- swarms/tools/tool_schema_base_model.py +57 -0
- {swarms-7.6.0.dist-info → swarms-7.6.2.dist-info}/METADATA +79 -1
- {swarms-7.6.0.dist-info → swarms-7.6.2.dist-info}/RECORD +30 -24
- swarms/structs/agent_security.py +0 -318
- swarms/structs/airflow_swarm.py +0 -430
- swarms/structs/output_type.py +0 -18
- {swarms-7.6.0.dist-info → swarms-7.6.2.dist-info}/LICENSE +0 -0
- {swarms-7.6.0.dist-info → swarms-7.6.2.dist-info}/WHEEL +0 -0
- {swarms-7.6.0.dist-info → swarms-7.6.2.dist-info}/entry_points.txt +0 -0
swarms/structs/airflow_swarm.py
DELETED
@@ -1,430 +0,0 @@
|
|
1
|
-
import subprocess
|
2
|
-
import sys
|
3
|
-
import uuid
|
4
|
-
import threading
|
5
|
-
from concurrent.futures import (
|
6
|
-
FIRST_COMPLETED,
|
7
|
-
ThreadPoolExecutor,
|
8
|
-
wait,
|
9
|
-
)
|
10
|
-
from dataclasses import dataclass
|
11
|
-
from datetime import datetime, timedelta
|
12
|
-
from enum import Enum
|
13
|
-
from typing import Any, Dict, List, Optional, Set, Union
|
14
|
-
from graphviz import Digraph
|
15
|
-
from loguru import logger
|
16
|
-
|
17
|
-
# Airflow imports
|
18
|
-
try:
|
19
|
-
from airflow import DAG
|
20
|
-
from airflow.operators.python import PythonOperator
|
21
|
-
except ImportError:
|
22
|
-
logger.error(
|
23
|
-
"Airflow is not installed. Please install it using 'pip install apache-airflow'."
|
24
|
-
)
|
25
|
-
subprocess.run(
|
26
|
-
[sys.executable, "-m", "pip", "install", "apache-airflow"]
|
27
|
-
)
|
28
|
-
from airflow import DAG
|
29
|
-
from airflow.operators.python import PythonOperator
|
30
|
-
|
31
|
-
# Import the real Agent from swarms.
|
32
|
-
from swarms.structs.conversation import Conversation
|
33
|
-
|
34
|
-
|
35
|
-
class NodeType(Enum):
|
36
|
-
AGENT = "agent"
|
37
|
-
CALLABLE = "callable"
|
38
|
-
TOOL = "tool"
|
39
|
-
|
40
|
-
|
41
|
-
def dag_id():
|
42
|
-
return uuid.uuid4().hex
|
43
|
-
|
44
|
-
|
45
|
-
@dataclass
|
46
|
-
class Node:
|
47
|
-
"""Represents a node in the DAG"""
|
48
|
-
|
49
|
-
id: str
|
50
|
-
type: NodeType
|
51
|
-
component: Any # Agent, Callable, or Tool
|
52
|
-
query: Optional[str] = None
|
53
|
-
args: Optional[List[Any]] = None
|
54
|
-
kwargs: Optional[Dict[str, Any]] = None
|
55
|
-
concurrent: bool = False
|
56
|
-
|
57
|
-
|
58
|
-
# ======= Airflow DAG Swarm Class =========
|
59
|
-
class AirflowDAGSwarm:
|
60
|
-
"""
|
61
|
-
A simplified and more intuitive DAG-based swarm for orchestrating agents, callables, and tools.
|
62
|
-
Provides an easy-to-use API for building agent pipelines with support for concurrent execution.
|
63
|
-
"""
|
64
|
-
|
65
|
-
def __init__(
|
66
|
-
self,
|
67
|
-
dag_id: str = dag_id(),
|
68
|
-
description: str = "A DAG Swarm for Airflow",
|
69
|
-
name: str = "Airflow DAG Swarm",
|
70
|
-
schedule_interval: Union[timedelta, str] = timedelta(days=1),
|
71
|
-
start_date: datetime = datetime(2025, 2, 14),
|
72
|
-
default_args: Optional[Dict[str, Any]] = None,
|
73
|
-
initial_message: Optional[str] = None,
|
74
|
-
max_workers: int = 5,
|
75
|
-
):
|
76
|
-
"""Initialize the AirflowDAGSwarm with improved configuration."""
|
77
|
-
self.dag_id = dag_id
|
78
|
-
self.name = name
|
79
|
-
self.description = description
|
80
|
-
self.max_workers = max_workers
|
81
|
-
|
82
|
-
self.default_args = default_args or {
|
83
|
-
"owner": "airflow",
|
84
|
-
"depends_on_past": False,
|
85
|
-
"email_on_failure": False,
|
86
|
-
"email_on_retry": False,
|
87
|
-
"retries": 1,
|
88
|
-
"retry_delay": timedelta(minutes=5),
|
89
|
-
}
|
90
|
-
|
91
|
-
self.dag = DAG(
|
92
|
-
dag_id=dag_id,
|
93
|
-
default_args=self.default_args,
|
94
|
-
schedule_interval=schedule_interval,
|
95
|
-
start_date=start_date,
|
96
|
-
catchup=False,
|
97
|
-
)
|
98
|
-
|
99
|
-
self.nodes: Dict[str, Node] = {}
|
100
|
-
self.edges: Dict[str, Set[str]] = (
|
101
|
-
{}
|
102
|
-
) # node_id -> set of child node_ids
|
103
|
-
|
104
|
-
# Initialize conversation
|
105
|
-
self.conversation = Conversation()
|
106
|
-
if initial_message:
|
107
|
-
self.conversation.add("user", initial_message)
|
108
|
-
|
109
|
-
self.lock = threading.Lock()
|
110
|
-
|
111
|
-
def add_user_message(self, message: str) -> None:
|
112
|
-
"""Add a user message to the conversation."""
|
113
|
-
with self.lock:
|
114
|
-
self.conversation.add("user", message)
|
115
|
-
logger.info(f"Added user message: {message}")
|
116
|
-
|
117
|
-
def get_conversation_history(self) -> str:
|
118
|
-
"""Get the conversation history as JSON."""
|
119
|
-
return self.conversation.to_json()
|
120
|
-
|
121
|
-
def add_node(
|
122
|
-
self,
|
123
|
-
node_id: str,
|
124
|
-
component: Any,
|
125
|
-
node_type: NodeType,
|
126
|
-
query: Optional[str] = None,
|
127
|
-
args: Optional[List[Any]] = None,
|
128
|
-
kwargs: Optional[Dict[str, Any]] = None,
|
129
|
-
concurrent: bool = False,
|
130
|
-
) -> str:
|
131
|
-
"""
|
132
|
-
Add a node to the DAG with improved type checking and validation.
|
133
|
-
|
134
|
-
Args:
|
135
|
-
node_id: Unique identifier for the node
|
136
|
-
component: Agent, callable, or tool to execute
|
137
|
-
node_type: Type of the node (AGENT, CALLABLE, or TOOL)
|
138
|
-
query: Query string for agents
|
139
|
-
args: Positional arguments for callables/tools
|
140
|
-
kwargs: Keyword arguments for callables/tools
|
141
|
-
concurrent: Whether to execute this node concurrently
|
142
|
-
|
143
|
-
Returns:
|
144
|
-
node_id: The ID of the created node
|
145
|
-
"""
|
146
|
-
if node_id in self.nodes:
|
147
|
-
raise ValueError(f"Node with ID {node_id} already exists")
|
148
|
-
|
149
|
-
if node_type == NodeType.AGENT and not hasattr(
|
150
|
-
component, "run"
|
151
|
-
):
|
152
|
-
raise ValueError("Agent must have a 'run' method")
|
153
|
-
elif node_type in (
|
154
|
-
NodeType.CALLABLE,
|
155
|
-
NodeType.TOOL,
|
156
|
-
) and not callable(component):
|
157
|
-
raise ValueError(f"{node_type.value} must be callable")
|
158
|
-
|
159
|
-
node = Node(
|
160
|
-
id=node_id,
|
161
|
-
type=node_type,
|
162
|
-
component=component,
|
163
|
-
query=query,
|
164
|
-
args=args or [],
|
165
|
-
kwargs=kwargs or {},
|
166
|
-
concurrent=concurrent,
|
167
|
-
)
|
168
|
-
|
169
|
-
self.nodes[node_id] = node
|
170
|
-
self.edges[node_id] = set()
|
171
|
-
logger.info(f"Added {node_type.value} node: {node_id}")
|
172
|
-
return node_id
|
173
|
-
|
174
|
-
def add_edge(self, from_node: str, to_node: str) -> None:
|
175
|
-
"""
|
176
|
-
Add a directed edge between two nodes in the DAG.
|
177
|
-
|
178
|
-
Args:
|
179
|
-
from_node: ID of the source node
|
180
|
-
to_node: ID of the target node
|
181
|
-
"""
|
182
|
-
if from_node not in self.nodes or to_node not in self.nodes:
|
183
|
-
raise ValueError("Both nodes must exist in the DAG")
|
184
|
-
|
185
|
-
self.edges[from_node].add(to_node)
|
186
|
-
logger.info(f"Added edge: {from_node} -> {to_node}")
|
187
|
-
|
188
|
-
def _execute_node(self, node: Node) -> str:
|
189
|
-
"""Execute a single node and return its output."""
|
190
|
-
try:
|
191
|
-
if node.type == NodeType.AGENT:
|
192
|
-
query = (
|
193
|
-
node.query
|
194
|
-
or self.conversation.get_last_message_as_string()
|
195
|
-
or "Default query"
|
196
|
-
)
|
197
|
-
logger.info(
|
198
|
-
f"Executing agent node {node.id} with query: {query}"
|
199
|
-
)
|
200
|
-
return node.component.run(query)
|
201
|
-
|
202
|
-
elif node.type in (NodeType.CALLABLE, NodeType.TOOL):
|
203
|
-
logger.info(
|
204
|
-
f"Executing {node.type.value} node {node.id}"
|
205
|
-
)
|
206
|
-
return node.component(
|
207
|
-
*node.args,
|
208
|
-
conversation=self.conversation,
|
209
|
-
**node.kwargs,
|
210
|
-
)
|
211
|
-
except Exception as e:
|
212
|
-
logger.exception(f"Error executing node {node.id}: {e}")
|
213
|
-
return f"Error in node {node.id}: {str(e)}"
|
214
|
-
|
215
|
-
def _get_root_nodes(self) -> List[str]:
|
216
|
-
"""Get nodes with no incoming edges."""
|
217
|
-
all_nodes = set(self.nodes.keys())
|
218
|
-
nodes_with_incoming = {
|
219
|
-
node for edges in self.edges.values() for node in edges
|
220
|
-
}
|
221
|
-
return list(all_nodes - nodes_with_incoming)
|
222
|
-
|
223
|
-
def run(self, **context: Any) -> str:
|
224
|
-
"""
|
225
|
-
Execute the DAG with improved concurrency handling and error recovery.
|
226
|
-
|
227
|
-
Returns:
|
228
|
-
The final conversation state as a JSON string
|
229
|
-
"""
|
230
|
-
logger.info("Starting swarm execution")
|
231
|
-
|
232
|
-
# Track completed nodes and their results
|
233
|
-
completed: Dict[str, str] = {}
|
234
|
-
|
235
|
-
def can_execute_node(node_id: str) -> bool:
|
236
|
-
"""Check if all dependencies of a node are completed."""
|
237
|
-
return all(
|
238
|
-
dep in completed
|
239
|
-
for dep_set in self.edges.values()
|
240
|
-
for dep in dep_set
|
241
|
-
if node_id in dep_set
|
242
|
-
)
|
243
|
-
|
244
|
-
with ThreadPoolExecutor(
|
245
|
-
max_workers=self.max_workers
|
246
|
-
) as executor:
|
247
|
-
# Initialize futures dict for concurrent root nodes
|
248
|
-
futures_dict = {
|
249
|
-
executor.submit(
|
250
|
-
self._execute_node, self.nodes[node_id]
|
251
|
-
): node_id
|
252
|
-
for node_id in self._get_root_nodes()
|
253
|
-
if self.nodes[node_id].concurrent
|
254
|
-
}
|
255
|
-
|
256
|
-
# Execute nodes that shouldn't run concurrently
|
257
|
-
for node_id in self._get_root_nodes():
|
258
|
-
if not self.nodes[node_id].concurrent:
|
259
|
-
output = self._execute_node(self.nodes[node_id])
|
260
|
-
with self.lock:
|
261
|
-
completed[node_id] = output
|
262
|
-
self.conversation.add("assistant", output)
|
263
|
-
|
264
|
-
# Process remaining nodes
|
265
|
-
while futures_dict:
|
266
|
-
done, _ = wait(
|
267
|
-
futures_dict.keys(), return_when=FIRST_COMPLETED
|
268
|
-
)
|
269
|
-
|
270
|
-
for future in done:
|
271
|
-
node_id = futures_dict.pop(future)
|
272
|
-
try:
|
273
|
-
output = future.result()
|
274
|
-
with self.lock:
|
275
|
-
completed[node_id] = output
|
276
|
-
self.conversation.add("assistant", output)
|
277
|
-
except Exception as e:
|
278
|
-
logger.exception(
|
279
|
-
f"Error in node {node_id}: {e}"
|
280
|
-
)
|
281
|
-
completed[node_id] = f"Error: {str(e)}"
|
282
|
-
|
283
|
-
# Add new nodes that are ready to execute
|
284
|
-
new_nodes = [
|
285
|
-
node_id
|
286
|
-
for node_id in self.nodes
|
287
|
-
if node_id not in completed
|
288
|
-
and can_execute_node(node_id)
|
289
|
-
]
|
290
|
-
|
291
|
-
for node_id in new_nodes:
|
292
|
-
if self.nodes[node_id].concurrent:
|
293
|
-
future = executor.submit(
|
294
|
-
self._execute_node,
|
295
|
-
self.nodes[node_id],
|
296
|
-
)
|
297
|
-
futures_dict[future] = node_id
|
298
|
-
else:
|
299
|
-
output = self._execute_node(
|
300
|
-
self.nodes[node_id]
|
301
|
-
)
|
302
|
-
with self.lock:
|
303
|
-
completed[node_id] = output
|
304
|
-
self.conversation.add(
|
305
|
-
"assistant", output
|
306
|
-
)
|
307
|
-
|
308
|
-
return self.conversation.to_json()
|
309
|
-
|
310
|
-
def visualize(
|
311
|
-
self, filename: str = "dag_visualization", view: bool = True
|
312
|
-
) -> Digraph:
|
313
|
-
"""
|
314
|
-
Generate a visualization of the DAG structure.
|
315
|
-
|
316
|
-
Args:
|
317
|
-
filename: Output filename for the visualization
|
318
|
-
view: Whether to open the visualization
|
319
|
-
|
320
|
-
Returns:
|
321
|
-
Graphviz Digraph object
|
322
|
-
"""
|
323
|
-
dot = Digraph(comment=f"DAG Visualization: {self.name}")
|
324
|
-
|
325
|
-
# Add nodes
|
326
|
-
for node_id, node in self.nodes.items():
|
327
|
-
label = f"{node_id}\n({node.type.value})"
|
328
|
-
shape = "box" if node.concurrent else "ellipse"
|
329
|
-
dot.node(node_id, label, shape=shape)
|
330
|
-
|
331
|
-
# Add edges
|
332
|
-
for from_node, to_nodes in self.edges.items():
|
333
|
-
for to_node in to_nodes:
|
334
|
-
dot.edge(from_node, to_node)
|
335
|
-
|
336
|
-
dot.render(filename, view=view, format="pdf")
|
337
|
-
return dot
|
338
|
-
|
339
|
-
def create_dag(self) -> DAG:
|
340
|
-
"""
|
341
|
-
Create an Airflow DAG with a single PythonOperator that executes the entire swarm.
|
342
|
-
In a production environment, you might break the components into multiple tasks.
|
343
|
-
|
344
|
-
:return: The configured Airflow DAG.
|
345
|
-
"""
|
346
|
-
logger.info("Creating Airflow DAG for swarm execution.")
|
347
|
-
PythonOperator(
|
348
|
-
task_id="run",
|
349
|
-
python_callable=self.run,
|
350
|
-
op_kwargs={
|
351
|
-
"concurrent": False
|
352
|
-
}, # Change to True for concurrent execution.
|
353
|
-
dag=self.dag,
|
354
|
-
)
|
355
|
-
return self.dag
|
356
|
-
|
357
|
-
|
358
|
-
# # ======= Example Usage =========
|
359
|
-
# if __name__ == "__main__":
|
360
|
-
# # Configure logger to output to console.
|
361
|
-
# logger.remove()
|
362
|
-
# logger.add(lambda msg: print(msg, end=""), level="DEBUG")
|
363
|
-
|
364
|
-
# # Create the DAG swarm with an initial message
|
365
|
-
# swarm = AirflowDAGSwarm(
|
366
|
-
# dag_id="swarm_conversation_dag",
|
367
|
-
# initial_message="Hello, how can I help you with financial planning?",
|
368
|
-
# )
|
369
|
-
|
370
|
-
# # Create a real financial agent using the swarms package.
|
371
|
-
# financial_agent = Agent(
|
372
|
-
# agent_name="Financial-Analysis-Agent",
|
373
|
-
# system_prompt=FINANCIAL_AGENT_SYS_PROMPT,
|
374
|
-
# model_name="gpt-4o-mini",
|
375
|
-
# max_loops=1,
|
376
|
-
# )
|
377
|
-
|
378
|
-
# # Add the real agent with a specific query.
|
379
|
-
# swarm.add_node(
|
380
|
-
# "financial_advisor",
|
381
|
-
# financial_agent,
|
382
|
-
# NodeType.AGENT,
|
383
|
-
# query="How can I establish a ROTH IRA to buy stocks and get a tax break? What are the criteria",
|
384
|
-
# concurrent=True,
|
385
|
-
# )
|
386
|
-
|
387
|
-
# # Add a callable component.
|
388
|
-
# def extra_processing(x: int, conversation: Conversation) -> str:
|
389
|
-
# return f"Extra processing output with data {x} and conversation length {len(conversation.messages)}"
|
390
|
-
|
391
|
-
# swarm.add_node(
|
392
|
-
# "extra_processing",
|
393
|
-
# extra_processing,
|
394
|
-
# NodeType.CALLABLE,
|
395
|
-
# args=[42],
|
396
|
-
# concurrent=True,
|
397
|
-
# )
|
398
|
-
|
399
|
-
# # Add a tool component (for example, a tool to create a conversation graph).
|
400
|
-
# def create_conversation_graph(conversation: Conversation) -> str:
|
401
|
-
# # In this tool, we generate the graph and return a confirmation message.
|
402
|
-
# swarm.visualize(
|
403
|
-
# filename="swarm_conversation_tool_graph", view=False
|
404
|
-
# )
|
405
|
-
# return "Graph created."
|
406
|
-
|
407
|
-
# swarm.add_node(
|
408
|
-
# "conversation_graph",
|
409
|
-
# create_conversation_graph,
|
410
|
-
# NodeType.TOOL,
|
411
|
-
# concurrent=False,
|
412
|
-
# )
|
413
|
-
|
414
|
-
# # Add edges to create the pipeline
|
415
|
-
# swarm.add_edge("financial_advisor", "extra_processing")
|
416
|
-
# swarm.add_edge("extra_processing", "conversation_graph")
|
417
|
-
|
418
|
-
# # Execute the swarm
|
419
|
-
# final_state = swarm.run()
|
420
|
-
# logger.info(f"Final conversation: {final_state}")
|
421
|
-
|
422
|
-
# # Visualize the DAG
|
423
|
-
# print(
|
424
|
-
# swarm.visualize(
|
425
|
-
# filename="swarm_conversation_final", view=False
|
426
|
-
# )
|
427
|
-
# )
|
428
|
-
|
429
|
-
# # Create the Airflow DAG.
|
430
|
-
# dag = swarm.create_dag()
|
swarms/structs/output_type.py
DELETED
@@ -1,18 +0,0 @@
|
|
1
|
-
from typing import Literal
|
2
|
-
|
3
|
-
# Define the output_type using Literal
|
4
|
-
OutputType = Literal[
|
5
|
-
"all",
|
6
|
-
"final",
|
7
|
-
"list",
|
8
|
-
"dict",
|
9
|
-
".json",
|
10
|
-
".md",
|
11
|
-
".txt",
|
12
|
-
".yaml",
|
13
|
-
".toml",
|
14
|
-
"str",
|
15
|
-
]
|
16
|
-
|
17
|
-
# Use the OutputType for type annotations
|
18
|
-
output_type: OutputType
|
File without changes
|
File without changes
|
File without changes
|