swarms 7.7.1__py3-none-any.whl → 7.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swarms/prompts/ag_prompt.py +51 -19
- swarms/prompts/agent_system_prompts.py +13 -4
- swarms/prompts/multi_agent_collab_prompt.py +18 -0
- swarms/prompts/prompt.py +6 -10
- swarms/schemas/__init__.py +0 -3
- swarms/structs/__init__.py +3 -8
- swarms/structs/agent.py +211 -163
- swarms/structs/aop.py +8 -1
- swarms/structs/auto_swarm_builder.py +271 -210
- swarms/structs/conversation.py +23 -56
- swarms/structs/hiearchical_swarm.py +93 -122
- swarms/structs/ma_utils.py +96 -0
- swarms/structs/mixture_of_agents.py +20 -103
- swarms/structs/{multi_agent_orchestrator.py → multi_agent_router.py} +32 -95
- swarms/structs/output_types.py +3 -16
- swarms/structs/stopping_conditions.py +30 -0
- swarms/structs/swarm_router.py +57 -5
- swarms/structs/swarming_architectures.py +576 -185
- swarms/telemetry/main.py +6 -2
- swarms/tools/mcp_client.py +209 -53
- swarms/tools/mcp_integration.py +1 -53
- swarms/utils/formatter.py +15 -1
- swarms/utils/generate_keys.py +64 -0
- swarms/utils/history_output_formatter.py +2 -0
- {swarms-7.7.1.dist-info → swarms-7.7.3.dist-info}/METADATA +98 -263
- {swarms-7.7.1.dist-info → swarms-7.7.3.dist-info}/RECORD +29 -38
- swarms/schemas/agent_input_schema.py +0 -149
- swarms/structs/agents_available.py +0 -87
- swarms/structs/async_workflow.py +0 -818
- swarms/structs/graph_swarm.py +0 -612
- swarms/structs/octotools.py +0 -844
- swarms/structs/pulsar_swarm.py +0 -469
- swarms/structs/queue_swarm.py +0 -193
- swarms/structs/swarm_builder.py +0 -395
- swarms/structs/swarm_load_balancer.py +0 -344
- swarms/structs/swarm_output_type.py +0 -23
- swarms/structs/talk_hier.py +0 -729
- {swarms-7.7.1.dist-info → swarms-7.7.3.dist-info}/LICENSE +0 -0
- {swarms-7.7.1.dist-info → swarms-7.7.3.dist-info}/WHEEL +0 -0
- {swarms-7.7.1.dist-info → swarms-7.7.3.dist-info}/entry_points.txt +0 -0
swarms/structs/pulsar_swarm.py
DELETED
@@ -1,469 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import json
|
3
|
-
import time
|
4
|
-
from concurrent.futures import ThreadPoolExecutor, TimeoutError
|
5
|
-
from contextlib import contextmanager
|
6
|
-
from datetime import datetime
|
7
|
-
from typing import Any, Dict, List, Literal, Optional
|
8
|
-
|
9
|
-
import pulsar
|
10
|
-
from cryptography.fernet import Fernet
|
11
|
-
from loguru import logger
|
12
|
-
from prometheus_client import Counter, Histogram, start_http_server
|
13
|
-
from pydantic import BaseModel, Field
|
14
|
-
from pydantic.v1 import validator
|
15
|
-
from tenacity import retry, stop_after_attempt, wait_exponential
|
16
|
-
|
17
|
-
# Enhanced metrics
|
18
|
-
TASK_COUNTER = Counter(
|
19
|
-
"swarm_tasks_total", "Total number of tasks processed"
|
20
|
-
)
|
21
|
-
TASK_LATENCY = Histogram(
|
22
|
-
"swarm_task_duration_seconds", "Task processing duration"
|
23
|
-
)
|
24
|
-
TASK_FAILURES = Counter(
|
25
|
-
"swarm_task_failures_total", "Total number of task failures"
|
26
|
-
)
|
27
|
-
AGENT_ERRORS = Counter(
|
28
|
-
"swarm_agent_errors_total", "Total number of agent errors"
|
29
|
-
)
|
30
|
-
|
31
|
-
# Define types using Literal
|
32
|
-
TaskStatus = Literal["pending", "processing", "completed", "failed"]
|
33
|
-
TaskPriority = Literal["low", "medium", "high", "critical"]
|
34
|
-
|
35
|
-
|
36
|
-
class SecurityConfig(BaseModel):
|
37
|
-
"""Security configuration for the swarm"""
|
38
|
-
|
39
|
-
encryption_key: str = Field(
|
40
|
-
..., description="Encryption key for sensitive data"
|
41
|
-
)
|
42
|
-
tls_cert_path: Optional[str] = Field(
|
43
|
-
None, description="Path to TLS certificate"
|
44
|
-
)
|
45
|
-
tls_key_path: Optional[str] = Field(
|
46
|
-
None, description="Path to TLS private key"
|
47
|
-
)
|
48
|
-
auth_token: Optional[str] = Field(
|
49
|
-
None, description="Authentication token"
|
50
|
-
)
|
51
|
-
max_message_size: int = Field(
|
52
|
-
default=1048576, description="Maximum message size in bytes"
|
53
|
-
)
|
54
|
-
rate_limit: int = Field(
|
55
|
-
default=100, description="Maximum tasks per minute"
|
56
|
-
)
|
57
|
-
|
58
|
-
@validator("encryption_key")
|
59
|
-
def validate_encryption_key(cls, v):
|
60
|
-
if len(v) < 32:
|
61
|
-
raise ValueError(
|
62
|
-
"Encryption key must be at least 32 bytes long"
|
63
|
-
)
|
64
|
-
return v
|
65
|
-
|
66
|
-
|
67
|
-
class Task(BaseModel):
|
68
|
-
"""Enhanced task model with additional metadata and validation"""
|
69
|
-
|
70
|
-
task_id: str = Field(
|
71
|
-
..., description="Unique identifier for the task"
|
72
|
-
)
|
73
|
-
description: str = Field(
|
74
|
-
..., description="Task description or instructions"
|
75
|
-
)
|
76
|
-
output_type: Literal["string", "json", "file"] = Field("string")
|
77
|
-
status: TaskStatus = Field(default="pending")
|
78
|
-
priority: TaskPriority = Field(default="medium")
|
79
|
-
created_at: datetime = Field(default_factory=datetime.utcnow)
|
80
|
-
started_at: Optional[datetime] = None
|
81
|
-
completed_at: Optional[datetime] = None
|
82
|
-
retry_count: int = Field(default=0)
|
83
|
-
metadata: Dict[str, Any] = Field(default_factory=dict)
|
84
|
-
|
85
|
-
@validator("task_id")
|
86
|
-
def validate_task_id(cls, v):
|
87
|
-
if not v.strip():
|
88
|
-
raise ValueError("task_id cannot be empty")
|
89
|
-
return v
|
90
|
-
|
91
|
-
class Config:
|
92
|
-
json_encoders = {datetime: lambda v: v.isoformat()}
|
93
|
-
|
94
|
-
|
95
|
-
class TaskResult(BaseModel):
|
96
|
-
"""Model for task execution results"""
|
97
|
-
|
98
|
-
task_id: str
|
99
|
-
status: TaskStatus
|
100
|
-
result: Any
|
101
|
-
error_message: Optional[str] = None
|
102
|
-
execution_time: float
|
103
|
-
agent_id: str
|
104
|
-
|
105
|
-
|
106
|
-
@contextmanager
|
107
|
-
def task_timing():
|
108
|
-
"""Context manager for timing task execution"""
|
109
|
-
start_time = time.time()
|
110
|
-
try:
|
111
|
-
yield
|
112
|
-
finally:
|
113
|
-
duration = time.time() - start_time
|
114
|
-
TASK_LATENCY.observe(duration)
|
115
|
-
|
116
|
-
|
117
|
-
class SecurePulsarSwarm:
|
118
|
-
"""
|
119
|
-
Enhanced secure, scalable swarm system with improved reliability and security features.
|
120
|
-
"""
|
121
|
-
|
122
|
-
def __init__(
|
123
|
-
self,
|
124
|
-
name: str,
|
125
|
-
description: str,
|
126
|
-
agents: List[Any],
|
127
|
-
pulsar_url: str,
|
128
|
-
subscription_name: str,
|
129
|
-
topic_name: str,
|
130
|
-
security_config: SecurityConfig,
|
131
|
-
max_workers: int = 5,
|
132
|
-
retry_attempts: int = 3,
|
133
|
-
task_timeout: int = 300,
|
134
|
-
metrics_port: int = 8000,
|
135
|
-
):
|
136
|
-
"""Initialize the enhanced Pulsar Swarm"""
|
137
|
-
self.name = name
|
138
|
-
self.description = description
|
139
|
-
self.agents = agents
|
140
|
-
self.pulsar_url = pulsar_url
|
141
|
-
self.subscription_name = subscription_name
|
142
|
-
self.topic_name = topic_name
|
143
|
-
self.security_config = security_config
|
144
|
-
self.max_workers = max_workers
|
145
|
-
self.retry_attempts = retry_attempts
|
146
|
-
self.task_timeout = task_timeout
|
147
|
-
|
148
|
-
# Initialize encryption
|
149
|
-
self.cipher_suite = Fernet(
|
150
|
-
security_config.encryption_key.encode()
|
151
|
-
)
|
152
|
-
|
153
|
-
# Setup metrics server
|
154
|
-
start_http_server(metrics_port)
|
155
|
-
|
156
|
-
# Initialize Pulsar client with security settings
|
157
|
-
client_config = {
|
158
|
-
"authentication": (
|
159
|
-
None
|
160
|
-
if not security_config.auth_token
|
161
|
-
else pulsar.AuthenticationToken(
|
162
|
-
security_config.auth_token
|
163
|
-
)
|
164
|
-
),
|
165
|
-
"operation_timeout_seconds": 30,
|
166
|
-
"connection_timeout_seconds": 30,
|
167
|
-
"use_tls": bool(security_config.tls_cert_path),
|
168
|
-
"tls_trust_certs_file_path": security_config.tls_cert_path,
|
169
|
-
"tls_allow_insecure_connection": False,
|
170
|
-
}
|
171
|
-
|
172
|
-
self.client = pulsar.Client(self.pulsar_url, **client_config)
|
173
|
-
self.producer = self._create_producer()
|
174
|
-
self.consumer = self._create_consumer()
|
175
|
-
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
176
|
-
|
177
|
-
# Initialize rate limiting
|
178
|
-
self.last_execution_time = time.time()
|
179
|
-
self.execution_count = 0
|
180
|
-
|
181
|
-
logger.info(
|
182
|
-
f"Secure Pulsar Swarm '{self.name}' initialized with enhanced security features"
|
183
|
-
)
|
184
|
-
|
185
|
-
def _create_producer(self):
|
186
|
-
"""Create a secure producer with retry logic"""
|
187
|
-
return self.client.create_producer(
|
188
|
-
self.topic_name,
|
189
|
-
max_pending_messages=1000,
|
190
|
-
compression_type=pulsar.CompressionType.LZ4,
|
191
|
-
block_if_queue_full=True,
|
192
|
-
batching_enabled=True,
|
193
|
-
batching_max_publish_delay_ms=10,
|
194
|
-
)
|
195
|
-
|
196
|
-
def _create_consumer(self):
|
197
|
-
"""Create a secure consumer with retry logic"""
|
198
|
-
return self.client.subscribe(
|
199
|
-
self.topic_name,
|
200
|
-
subscription_name=self.subscription_name,
|
201
|
-
consumer_type=pulsar.ConsumerType.Shared,
|
202
|
-
message_listener=None,
|
203
|
-
receiver_queue_size=1000,
|
204
|
-
max_total_receiver_queue_size_across_partitions=50000,
|
205
|
-
)
|
206
|
-
|
207
|
-
def _encrypt_message(self, data: str) -> bytes:
|
208
|
-
"""Encrypt message data"""
|
209
|
-
return self.cipher_suite.encrypt(data.encode())
|
210
|
-
|
211
|
-
def _decrypt_message(self, data: bytes) -> str:
|
212
|
-
"""Decrypt message data"""
|
213
|
-
return self.cipher_suite.decrypt(data).decode()
|
214
|
-
|
215
|
-
@retry(
|
216
|
-
stop=stop_after_attempt(3),
|
217
|
-
wait=wait_exponential(multiplier=1, min=4, max=10),
|
218
|
-
)
|
219
|
-
def publish_task(self, task: Task) -> None:
|
220
|
-
"""Publish a task with enhanced security and reliability"""
|
221
|
-
try:
|
222
|
-
# Validate message size
|
223
|
-
task_data = task.json()
|
224
|
-
if len(task_data) > self.security_config.max_message_size:
|
225
|
-
raise ValueError(
|
226
|
-
"Task data exceeds maximum message size"
|
227
|
-
)
|
228
|
-
|
229
|
-
# Rate limiting
|
230
|
-
current_time = time.time()
|
231
|
-
if current_time - self.last_execution_time >= 60:
|
232
|
-
self.execution_count = 0
|
233
|
-
self.last_execution_time = current_time
|
234
|
-
|
235
|
-
if (
|
236
|
-
self.execution_count
|
237
|
-
>= self.security_config.rate_limit
|
238
|
-
):
|
239
|
-
raise ValueError("Rate limit exceeded")
|
240
|
-
|
241
|
-
# Encrypt and publish
|
242
|
-
encrypted_data = self._encrypt_message(task_data)
|
243
|
-
message_id = self.producer.send(encrypted_data)
|
244
|
-
|
245
|
-
self.execution_count += 1
|
246
|
-
logger.info(
|
247
|
-
f"Task {task.task_id} published successfully with message ID {message_id}"
|
248
|
-
)
|
249
|
-
|
250
|
-
except Exception as e:
|
251
|
-
TASK_FAILURES.inc()
|
252
|
-
logger.error(
|
253
|
-
f"Error publishing task {task.task_id}: {str(e)}"
|
254
|
-
)
|
255
|
-
raise
|
256
|
-
|
257
|
-
async def _process_task(self, task: Task) -> TaskResult:
|
258
|
-
"""Process a task with comprehensive error handling and monitoring"""
|
259
|
-
task.status = "processing"
|
260
|
-
task.started_at = datetime.utcnow()
|
261
|
-
|
262
|
-
with task_timing():
|
263
|
-
try:
|
264
|
-
# Select agent using round-robin
|
265
|
-
agent = self.agents.pop(0)
|
266
|
-
self.agents.append(agent)
|
267
|
-
|
268
|
-
# Execute task with timeout
|
269
|
-
future = self.executor.submit(
|
270
|
-
agent.run, task.description
|
271
|
-
)
|
272
|
-
result = future.result(timeout=self.task_timeout)
|
273
|
-
|
274
|
-
# Handle different output types
|
275
|
-
if task.output_type == "json":
|
276
|
-
result = json.loads(result)
|
277
|
-
elif task.output_type == "file":
|
278
|
-
file_path = f"output_{task.task_id}_{int(time.time())}.txt"
|
279
|
-
with open(file_path, "w") as f:
|
280
|
-
f.write(result)
|
281
|
-
result = {"file_path": file_path}
|
282
|
-
|
283
|
-
task.status = "completed"
|
284
|
-
task.completed_at = datetime.utcnow()
|
285
|
-
TASK_COUNTER.inc()
|
286
|
-
|
287
|
-
return TaskResult(
|
288
|
-
task_id=task.task_id,
|
289
|
-
status="completed",
|
290
|
-
result=result,
|
291
|
-
execution_time=time.time()
|
292
|
-
- task.started_at.timestamp(),
|
293
|
-
agent_id=agent.agent_name,
|
294
|
-
)
|
295
|
-
|
296
|
-
except TimeoutError:
|
297
|
-
TASK_FAILURES.inc()
|
298
|
-
error_msg = f"Task {task.task_id} timed out after {self.task_timeout} seconds"
|
299
|
-
logger.error(error_msg)
|
300
|
-
task.status = "failed"
|
301
|
-
return TaskResult(
|
302
|
-
task_id=task.task_id,
|
303
|
-
status="failed",
|
304
|
-
result=None,
|
305
|
-
error_message=error_msg,
|
306
|
-
execution_time=time.time()
|
307
|
-
- task.started_at.timestamp(),
|
308
|
-
agent_id=agent.agent_name,
|
309
|
-
)
|
310
|
-
|
311
|
-
except Exception as e:
|
312
|
-
TASK_FAILURES.inc()
|
313
|
-
AGENT_ERRORS.inc()
|
314
|
-
error_msg = (
|
315
|
-
f"Error processing task {task.task_id}: {str(e)}"
|
316
|
-
)
|
317
|
-
logger.error(error_msg)
|
318
|
-
task.status = "failed"
|
319
|
-
return TaskResult(
|
320
|
-
task_id=task.task_id,
|
321
|
-
status="failed",
|
322
|
-
result=None,
|
323
|
-
error_message=error_msg,
|
324
|
-
execution_time=time.time()
|
325
|
-
- task.started_at.timestamp(),
|
326
|
-
agent_id=agent.agent_name,
|
327
|
-
)
|
328
|
-
|
329
|
-
async def consume_tasks(self):
|
330
|
-
"""Enhanced task consumption with circuit breaker and backoff"""
|
331
|
-
consecutive_failures = 0
|
332
|
-
backoff_time = 1
|
333
|
-
|
334
|
-
while True:
|
335
|
-
try:
|
336
|
-
# Circuit breaker pattern
|
337
|
-
if consecutive_failures >= 5:
|
338
|
-
logger.warning(
|
339
|
-
f"Circuit breaker triggered. Waiting {backoff_time} seconds"
|
340
|
-
)
|
341
|
-
await asyncio.sleep(backoff_time)
|
342
|
-
backoff_time = min(backoff_time * 2, 60)
|
343
|
-
continue
|
344
|
-
|
345
|
-
# Receive message with timeout
|
346
|
-
message = await self.consumer.receive_async()
|
347
|
-
|
348
|
-
try:
|
349
|
-
# Decrypt and process message
|
350
|
-
decrypted_data = self._decrypt_message(
|
351
|
-
message.data()
|
352
|
-
)
|
353
|
-
task_data = json.loads(decrypted_data)
|
354
|
-
task = Task(**task_data)
|
355
|
-
|
356
|
-
# Process task
|
357
|
-
result = await self._process_task(task)
|
358
|
-
|
359
|
-
# Handle result
|
360
|
-
if result.status == "completed":
|
361
|
-
await self.consumer.acknowledge_async(message)
|
362
|
-
consecutive_failures = 0
|
363
|
-
backoff_time = 1
|
364
|
-
else:
|
365
|
-
if task.retry_count < self.retry_attempts:
|
366
|
-
task.retry_count += 1
|
367
|
-
await self.consumer.negative_acknowledge(
|
368
|
-
message
|
369
|
-
)
|
370
|
-
else:
|
371
|
-
await self.consumer.acknowledge_async(
|
372
|
-
message
|
373
|
-
)
|
374
|
-
logger.error(
|
375
|
-
f"Task {task.task_id} failed after {self.retry_attempts} attempts"
|
376
|
-
)
|
377
|
-
|
378
|
-
except Exception as e:
|
379
|
-
logger.error(
|
380
|
-
f"Error processing message: {str(e)}"
|
381
|
-
)
|
382
|
-
await self.consumer.negative_acknowledge(message)
|
383
|
-
consecutive_failures += 1
|
384
|
-
|
385
|
-
except Exception as e:
|
386
|
-
logger.error(f"Error in consume_tasks: {str(e)}")
|
387
|
-
consecutive_failures += 1
|
388
|
-
await asyncio.sleep(1)
|
389
|
-
|
390
|
-
def __enter__(self):
|
391
|
-
"""Context manager entry"""
|
392
|
-
return self
|
393
|
-
|
394
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
395
|
-
"""Context manager exit with proper cleanup"""
|
396
|
-
try:
|
397
|
-
self.producer.flush()
|
398
|
-
self.producer.close()
|
399
|
-
self.consumer.close()
|
400
|
-
self.client.close()
|
401
|
-
self.executor.shutdown(wait=True)
|
402
|
-
except Exception as e:
|
403
|
-
logger.error(f"Error during cleanup: {str(e)}")
|
404
|
-
|
405
|
-
|
406
|
-
# if __name__ == "__main__":
|
407
|
-
# # Example usage with security configuration
|
408
|
-
# security_config = SecurityConfig(
|
409
|
-
# encryption_key=secrets.token_urlsafe(32),
|
410
|
-
# tls_cert_path="/path/to/cert.pem",
|
411
|
-
# tls_key_path="/path/to/key.pem",
|
412
|
-
# auth_token="your-auth-token",
|
413
|
-
# max_message_size=1048576,
|
414
|
-
# rate_limit=100,
|
415
|
-
# )
|
416
|
-
|
417
|
-
# # Agent factory function
|
418
|
-
# def create_financial_agent() -> Agent:
|
419
|
-
# """Factory function to create a financial analysis agent."""
|
420
|
-
# return Agent(
|
421
|
-
# agent_name="Financial-Analysis-Agent",
|
422
|
-
# system_prompt=FINANCIAL_AGENT_SYS_PROMPT,
|
423
|
-
# model_name="gpt-4o-mini",
|
424
|
-
# max_loops=1,
|
425
|
-
# autosave=True,
|
426
|
-
# dashboard=False,
|
427
|
-
# verbose=True,
|
428
|
-
# dynamic_temperature_enabled=True,
|
429
|
-
# saved_state_path="finance_agent.json",
|
430
|
-
# user_name="swarms_corp",
|
431
|
-
# retry_attempts=1,
|
432
|
-
# context_length=200000,
|
433
|
-
# return_step_meta=False,
|
434
|
-
# output_type="string",
|
435
|
-
# streaming_on=False,
|
436
|
-
# )
|
437
|
-
|
438
|
-
# # Initialize agents (implementation not shown)
|
439
|
-
# agents = [create_financial_agent() for _ in range(3)]
|
440
|
-
|
441
|
-
# # Initialize the secure swarm
|
442
|
-
# with SecurePulsarSwarm(
|
443
|
-
# name="Secure Financial Swarm",
|
444
|
-
# description="Production-grade financial analysis swarm",
|
445
|
-
# agents=agents,
|
446
|
-
# pulsar_url="pulsar+ssl://localhost:6651",
|
447
|
-
# subscription_name="secure_financial_subscription",
|
448
|
-
# topic_name="secure_financial_tasks",
|
449
|
-
# security_config=security_config,
|
450
|
-
# max_workers=5,
|
451
|
-
# retry_attempts=3,
|
452
|
-
# task_timeout=300,
|
453
|
-
# metrics_port=8000,
|
454
|
-
# ) as swarm:
|
455
|
-
# # Example task
|
456
|
-
# task = Task(
|
457
|
-
# task_id=secrets.token_urlsafe(16),
|
458
|
-
# description="Analyze Q4 financial reports",
|
459
|
-
# output_type="json",
|
460
|
-
# priority="high",
|
461
|
-
# metadata={
|
462
|
-
# "department": "finance",
|
463
|
-
# "requester": "john.doe@company.com",
|
464
|
-
# },
|
465
|
-
# )
|
466
|
-
|
467
|
-
# # Run the swarm
|
468
|
-
# swarm.publish_task(task)
|
469
|
-
# asyncio.run(swarm.consume_tasks())
|
swarms/structs/queue_swarm.py
DELETED
@@ -1,193 +0,0 @@
|
|
1
|
-
import queue
|
2
|
-
import threading
|
3
|
-
from typing import List
|
4
|
-
from swarms.structs.agent import Agent
|
5
|
-
from pydantic import BaseModel
|
6
|
-
import os
|
7
|
-
from swarms.utils.loguru_logger import logger
|
8
|
-
from swarms.structs.base_swarm import BaseSwarm
|
9
|
-
import time
|
10
|
-
|
11
|
-
|
12
|
-
class AgentOutput(BaseModel):
|
13
|
-
agent_name: str
|
14
|
-
task: str
|
15
|
-
result: str
|
16
|
-
timestamp: str
|
17
|
-
|
18
|
-
|
19
|
-
class SwarmRunMetadata(BaseModel):
|
20
|
-
run_id: str
|
21
|
-
name: str
|
22
|
-
description: str
|
23
|
-
agents: List[str]
|
24
|
-
start_time: str
|
25
|
-
end_time: str
|
26
|
-
tasks_completed: int
|
27
|
-
outputs: List[AgentOutput]
|
28
|
-
|
29
|
-
|
30
|
-
class TaskQueueSwarm(BaseSwarm):
|
31
|
-
"""
|
32
|
-
A swarm that processes tasks from a queue using multiple agents on different threads.
|
33
|
-
|
34
|
-
Args:
|
35
|
-
agents (List[Agent]): A list of agents of class Agent.
|
36
|
-
name (str, optional): The name of the swarm. Defaults to "Task-Queue-Swarm".
|
37
|
-
description (str, optional): The description of the swarm. Defaults to "A swarm that processes tasks from a queue using multiple agents on different threads.".
|
38
|
-
autosave_on (bool, optional): Whether to automatically save the swarm metadata. Defaults to True.
|
39
|
-
save_file_path (str, optional): The file path to save the swarm metadata. Defaults to "swarm_run_metadata.json".
|
40
|
-
workspace_dir (str, optional): The directory path of the workspace. Defaults to os.getenv("WORKSPACE_DIR").
|
41
|
-
return_metadata_on (bool, optional): Whether to return the swarm metadata after running. Defaults to False.
|
42
|
-
max_loops (int, optional): The maximum number of loops to run the swarm. Defaults to 1.
|
43
|
-
|
44
|
-
Attributes:
|
45
|
-
agents (List[Agent]): A list of agents of class Agent.
|
46
|
-
task_queue (queue.Queue): A queue to store the tasks.
|
47
|
-
lock (threading.Lock): A lock for thread synchronization.
|
48
|
-
autosave_on (bool): Whether to automatically save the swarm metadata.
|
49
|
-
save_file_path (str): The file path to save the swarm metadata.
|
50
|
-
workspace_dir (str): The directory path of the workspace.
|
51
|
-
return_metadata_on (bool): Whether to return the swarm metadata after running.
|
52
|
-
max_loops (int): The maximum number of loops to run the swarm.
|
53
|
-
metadata (SwarmRunMetadata): The metadata of the swarm run.
|
54
|
-
"""
|
55
|
-
|
56
|
-
def __init__(
|
57
|
-
self,
|
58
|
-
agents: List[Agent],
|
59
|
-
name: str = "Task-Queue-Swarm",
|
60
|
-
description: str = "A swarm that processes tasks from a queue using multiple agents on different threads.",
|
61
|
-
autosave_on: bool = True,
|
62
|
-
save_file_path: str = "swarm_run_metadata.json",
|
63
|
-
workspace_dir: str = os.getenv("WORKSPACE_DIR"),
|
64
|
-
return_metadata_on: bool = False,
|
65
|
-
max_loops: int = 1,
|
66
|
-
*args,
|
67
|
-
**kwargs,
|
68
|
-
):
|
69
|
-
super().__init__(
|
70
|
-
name=name,
|
71
|
-
description=description,
|
72
|
-
agents=agents,
|
73
|
-
*args,
|
74
|
-
**kwargs,
|
75
|
-
)
|
76
|
-
self.agents = agents
|
77
|
-
self.task_queue = queue.Queue()
|
78
|
-
self.lock = threading.Lock()
|
79
|
-
self.autosave_on = autosave_on
|
80
|
-
self.save_file_path = save_file_path
|
81
|
-
self.workspace_dir = workspace_dir or os.getenv(
|
82
|
-
"WORKSPACE_DIR", "agent_workspace"
|
83
|
-
)
|
84
|
-
self.return_metadata_on = return_metadata_on
|
85
|
-
self.max_loops = max_loops
|
86
|
-
|
87
|
-
current_time = time.strftime("%Y%m%d%H%M%S")
|
88
|
-
self.metadata = SwarmRunMetadata(
|
89
|
-
run_id=f"swarm_run_{current_time}",
|
90
|
-
name=name,
|
91
|
-
description=description,
|
92
|
-
agents=[agent.agent_name for agent in agents],
|
93
|
-
start_time=current_time,
|
94
|
-
end_time="",
|
95
|
-
tasks_completed=0,
|
96
|
-
outputs=[],
|
97
|
-
)
|
98
|
-
|
99
|
-
def reliability_checks(self):
|
100
|
-
logger.info("Initializing reliability checks.")
|
101
|
-
|
102
|
-
if not self.agents:
|
103
|
-
raise ValueError(
|
104
|
-
"You must provide a non-empty list of Agent instances."
|
105
|
-
)
|
106
|
-
|
107
|
-
if self.max_loops <= 0:
|
108
|
-
raise ValueError("max_loops must be greater than zero.")
|
109
|
-
|
110
|
-
logger.info(
|
111
|
-
"Reliability checks successful. Swarm is ready for usage."
|
112
|
-
)
|
113
|
-
|
114
|
-
def add_task(self, task: str):
|
115
|
-
"""Adds a task to the queue."""
|
116
|
-
self.task_queue.put(task)
|
117
|
-
|
118
|
-
def _process_task(self, agent: Agent):
|
119
|
-
"""Processes tasks from the queue using the provided agent."""
|
120
|
-
while True:
|
121
|
-
try:
|
122
|
-
task = self.task_queue.get_nowait()
|
123
|
-
except queue.Empty:
|
124
|
-
break
|
125
|
-
try:
|
126
|
-
logger.info(
|
127
|
-
f"Agent {agent.agent_name} is running task: {task}"
|
128
|
-
)
|
129
|
-
result = agent.run(task)
|
130
|
-
with self.lock:
|
131
|
-
self.metadata.tasks_completed += 1
|
132
|
-
self.metadata.outputs.append(
|
133
|
-
AgentOutput(
|
134
|
-
agent_name=agent.agent_name,
|
135
|
-
task=task,
|
136
|
-
result=result,
|
137
|
-
timestamp=time.strftime(
|
138
|
-
"%Y-%m-%d %H:%M:%S"
|
139
|
-
),
|
140
|
-
)
|
141
|
-
)
|
142
|
-
logger.info(
|
143
|
-
f"Agent {agent.agent_name} completed task: {task}"
|
144
|
-
)
|
145
|
-
logger.debug(f"Result: {result}")
|
146
|
-
except Exception as e:
|
147
|
-
logger.error(
|
148
|
-
f"Agent {agent.agent_name} failed to complete task: {task}"
|
149
|
-
)
|
150
|
-
logger.exception(e)
|
151
|
-
finally:
|
152
|
-
self.task_queue.task_done()
|
153
|
-
|
154
|
-
def run(self):
|
155
|
-
"""Runs the swarm by having agents pick up tasks from the queue."""
|
156
|
-
logger.info(f"Starting swarm run: {self.metadata.run_id}")
|
157
|
-
|
158
|
-
threads = [
|
159
|
-
threading.Thread(
|
160
|
-
target=self._process_task, args=(agent,), daemon=True
|
161
|
-
)
|
162
|
-
for agent in self.agents
|
163
|
-
]
|
164
|
-
|
165
|
-
for thread in threads:
|
166
|
-
thread.start()
|
167
|
-
|
168
|
-
self.task_queue.join()
|
169
|
-
|
170
|
-
for thread in threads:
|
171
|
-
thread.join()
|
172
|
-
|
173
|
-
self.metadata.end_time = time.strftime("%Y%m%d%H%M%S")
|
174
|
-
|
175
|
-
if self.autosave_on:
|
176
|
-
self.save_json_to_file()
|
177
|
-
|
178
|
-
# if self.return_metadata_on:
|
179
|
-
# return self.metadata.model_dump_json(indent=4)
|
180
|
-
return self.export_metadata()
|
181
|
-
|
182
|
-
def save_json_to_file(self):
|
183
|
-
json_string = self.export_metadata()
|
184
|
-
file_path = os.path.join(
|
185
|
-
self.workspace_dir, self.save_file_path
|
186
|
-
)
|
187
|
-
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
188
|
-
with open(file_path, "w") as f:
|
189
|
-
f.write(json_string)
|
190
|
-
logger.info(f"Metadata saved to {file_path}")
|
191
|
-
|
192
|
-
def export_metadata(self):
|
193
|
-
return self.metadata.model_dump_json(indent=4)
|