swarms 7.9.8__py3-none-any.whl → 8.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,520 @@
1
+ import asyncio
2
+ import concurrent.futures
3
+ import functools
4
+ import inspect
5
+ import time
6
+ import pickle
7
+ from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor, as_completed
8
+ from functools import wraps
9
+ from typing import Any, Callable, Dict, List, Optional, Union, TypeVar, Generic
10
+ from dataclasses import dataclass
11
+ from enum import Enum
12
+
13
+ from swarms.utils.loguru_logger import initialize_logger
14
+
15
+ logger = initialize_logger("concurrent_wrapper")
16
+
17
+ T = TypeVar('T')
18
+ R = TypeVar('R')
19
+
20
+
21
+ # Global function for process pool execution (must be picklable)
22
+ def _execute_task_in_process(task_data):
23
+ """
24
+ Execute a task in a separate process.
25
+ This function must be at module level to be picklable.
26
+ """
27
+ func, task_args, task_kwargs, task_id, max_retries, retry_on_failure, retry_delay, return_exceptions = task_data
28
+
29
+ start_time = time.time()
30
+
31
+ for attempt in range(max_retries + 1):
32
+ try:
33
+ result = func(*task_args, **task_kwargs)
34
+ execution_time = time.time() - start_time
35
+ return ConcurrentResult(
36
+ value=result,
37
+ execution_time=execution_time,
38
+ worker_id=task_id
39
+ )
40
+ except Exception as e:
41
+ if attempt == max_retries or not retry_on_failure:
42
+ execution_time = time.time() - start_time
43
+ if return_exceptions:
44
+ return ConcurrentResult(
45
+ exception=e,
46
+ execution_time=execution_time,
47
+ worker_id=task_id
48
+ )
49
+ else:
50
+ raise
51
+ else:
52
+ time.sleep(retry_delay * (2 ** attempt))
53
+
54
+ # This should never be reached, but just in case
55
+ return ConcurrentResult(exception=Exception("Max retries exceeded"))
56
+
57
+
58
+ class ExecutorType(Enum):
59
+ """Enum for different types of executors."""
60
+ THREAD = "thread"
61
+ PROCESS = "process"
62
+ ASYNC = "async"
63
+
64
+
65
+ @dataclass
66
+ class ConcurrentConfig:
67
+ """Configuration for concurrent execution."""
68
+ name: Optional[str] = None
69
+ description: Optional[str] = None
70
+ max_workers: int = 4
71
+ timeout: Optional[float] = None
72
+ executor_type: ExecutorType = ExecutorType.THREAD
73
+ return_exceptions: bool = False
74
+ chunk_size: Optional[int] = None
75
+ ordered: bool = True
76
+ retry_on_failure: bool = False
77
+ max_retries: int = 3
78
+ retry_delay: float = 1.0
79
+
80
+
81
+ class ConcurrentResult(Generic[T]):
82
+ """Result wrapper for concurrent execution."""
83
+
84
+ def __init__(self, value: T = None, exception: Exception = None,
85
+ execution_time: float = 0.0, worker_id: Optional[int] = None):
86
+ self.value = value
87
+ self.exception = exception
88
+ self.execution_time = execution_time
89
+ self.worker_id = worker_id
90
+ self.success = exception is None
91
+
92
+ def __repr__(self):
93
+ if self.success:
94
+ return f"ConcurrentResult(value={self.value}, time={self.execution_time:.3f}s)"
95
+ else:
96
+ return f"ConcurrentResult(exception={type(self.exception).__name__}: {self.exception})"
97
+
98
+
99
+ def concurrent(
100
+ name: Optional[str] = None,
101
+ description: Optional[str] = None,
102
+ max_workers: int = 4,
103
+ timeout: Optional[float] = None,
104
+ executor_type: ExecutorType = ExecutorType.THREAD,
105
+ return_exceptions: bool = False,
106
+ chunk_size: Optional[int] = None,
107
+ ordered: bool = True,
108
+ retry_on_failure: bool = False,
109
+ max_retries: int = 3,
110
+ retry_delay: float = 1.0,
111
+ ):
112
+ """
113
+ A decorator that enables concurrent execution of functions.
114
+
115
+ Args:
116
+ name (Optional[str]): Name for the concurrent operation
117
+ description (Optional[str]): Description of the operation
118
+ max_workers (int): Maximum number of worker threads/processes
119
+ timeout (Optional[float]): Timeout in seconds for each task
120
+ executor_type (ExecutorType): Type of executor (thread, process, async)
121
+ return_exceptions (bool): Whether to return exceptions instead of raising
122
+ chunk_size (Optional[int]): Size of chunks for batch processing
123
+ ordered (bool): Whether to maintain order of results
124
+ retry_on_failure (bool): Whether to retry failed tasks
125
+ max_retries (int): Maximum number of retries per task
126
+ retry_delay (float): Delay between retries in seconds
127
+
128
+ Returns:
129
+ Callable: Decorated function that can execute concurrently
130
+ """
131
+
132
+ def decorator(func: Callable[..., T]) -> Callable[..., T]:
133
+ config = ConcurrentConfig(
134
+ name=name or func.__name__,
135
+ description=description or f"Concurrent execution of {func.__name__}",
136
+ max_workers=max_workers,
137
+ timeout=timeout,
138
+ executor_type=executor_type,
139
+ return_exceptions=return_exceptions,
140
+ chunk_size=chunk_size,
141
+ ordered=ordered,
142
+ retry_on_failure=retry_on_failure,
143
+ max_retries=max_retries,
144
+ retry_delay=retry_delay
145
+ )
146
+
147
+ @wraps(func)
148
+ def wrapper(*args, **kwargs):
149
+ return func(*args, **kwargs)
150
+
151
+ def _execute_single_task(task_args, task_kwargs, task_id=None):
152
+ """Execute a single task with retry logic."""
153
+ start_time = time.time()
154
+
155
+ for attempt in range(config.max_retries + 1):
156
+ try:
157
+ result = func(*task_args, **task_kwargs)
158
+ execution_time = time.time() - start_time
159
+ return ConcurrentResult(
160
+ value=result,
161
+ execution_time=execution_time,
162
+ worker_id=task_id
163
+ )
164
+ except Exception as e:
165
+ if attempt == config.max_retries or not config.retry_on_failure:
166
+ execution_time = time.time() - start_time
167
+ if config.return_exceptions:
168
+ return ConcurrentResult(
169
+ exception=e,
170
+ execution_time=execution_time,
171
+ worker_id=task_id
172
+ )
173
+ else:
174
+ raise
175
+ else:
176
+ logger.warning(
177
+ f"Task {task_id} failed (attempt {attempt + 1}/{config.max_retries + 1}): {e}"
178
+ )
179
+ time.sleep(config.retry_delay * (2 ** attempt))
180
+
181
+ def concurrent_execute(*args_list, **kwargs_list):
182
+ """Execute the function concurrently with multiple argument sets."""
183
+ if not args_list and not kwargs_list:
184
+ raise ValueError("At least one set of arguments must be provided")
185
+
186
+ # Prepare tasks
187
+ tasks = []
188
+ if args_list:
189
+ for args in args_list:
190
+ if isinstance(args, (list, tuple)):
191
+ tasks.append((args, {}))
192
+ else:
193
+ tasks.append(([args], {}))
194
+
195
+ if kwargs_list:
196
+ for kwargs in kwargs_list:
197
+ if isinstance(kwargs, dict):
198
+ tasks.append(((), kwargs))
199
+ else:
200
+ raise ValueError("kwargs_list must contain dictionaries")
201
+
202
+ logger.info(f"Starting concurrent execution of {len(tasks)} tasks with {config.max_workers} workers")
203
+ start_time = time.time()
204
+
205
+ try:
206
+ if config.executor_type == ExecutorType.THREAD:
207
+ results = _execute_with_thread_pool(tasks)
208
+ elif config.executor_type == ExecutorType.PROCESS:
209
+ results = _execute_with_process_pool(tasks)
210
+ elif config.executor_type == ExecutorType.ASYNC:
211
+ results = _execute_with_async(tasks)
212
+ else:
213
+ raise ValueError(f"Unsupported executor type: {config.executor_type}")
214
+
215
+ total_time = time.time() - start_time
216
+ successful_tasks = sum(1 for r in results if r.success)
217
+
218
+ logger.info(
219
+ f"Completed {len(tasks)} tasks in {total_time:.3f}s "
220
+ f"({successful_tasks}/{len(tasks)} successful)"
221
+ )
222
+
223
+ return results
224
+
225
+ except Exception as e:
226
+ logger.error(f"Concurrent execution failed: {e}")
227
+ raise
228
+
229
+ def _execute_with_thread_pool(tasks):
230
+ """Execute tasks using ThreadPoolExecutor."""
231
+ results = []
232
+
233
+ with ThreadPoolExecutor(max_workers=config.max_workers) as executor:
234
+ if config.ordered:
235
+ future_to_task = {
236
+ executor.submit(_execute_single_task, task[0], task[1], i): i
237
+ for i, task in enumerate(tasks)
238
+ }
239
+
240
+ for future in as_completed(future_to_task, timeout=config.timeout):
241
+ try:
242
+ result = future.result(timeout=config.timeout)
243
+ results.append(result)
244
+ except Exception as e:
245
+ if config.return_exceptions:
246
+ results.append(ConcurrentResult(exception=e))
247
+ else:
248
+ raise
249
+ else:
250
+ futures = [
251
+ executor.submit(_execute_single_task, task[0], task[1], i)
252
+ for i, task in enumerate(tasks)
253
+ ]
254
+
255
+ for future in as_completed(futures, timeout=config.timeout):
256
+ try:
257
+ result = future.result(timeout=config.timeout)
258
+ results.append(result)
259
+ except Exception as e:
260
+ if config.return_exceptions:
261
+ results.append(ConcurrentResult(exception=e))
262
+ else:
263
+ raise
264
+
265
+ return results
266
+
267
+ def _execute_with_process_pool(tasks):
268
+ """Execute tasks using ProcessPoolExecutor."""
269
+ results = []
270
+
271
+ # Prepare task data for process execution
272
+ task_data_list = []
273
+ for i, task in enumerate(tasks):
274
+ task_data = (
275
+ func, # The function to execute
276
+ task[0], # args
277
+ task[1], # kwargs
278
+ i, # task_id
279
+ config.max_retries,
280
+ config.retry_on_failure,
281
+ config.retry_delay,
282
+ config.return_exceptions
283
+ )
284
+ task_data_list.append(task_data)
285
+
286
+ with ProcessPoolExecutor(max_workers=config.max_workers) as executor:
287
+ if config.ordered:
288
+ future_to_task = {
289
+ executor.submit(_execute_task_in_process, task_data): i
290
+ for i, task_data in enumerate(task_data_list)
291
+ }
292
+
293
+ for future in as_completed(future_to_task, timeout=config.timeout):
294
+ try:
295
+ result = future.result(timeout=config.timeout)
296
+ results.append(result)
297
+ except Exception as e:
298
+ if config.return_exceptions:
299
+ results.append(ConcurrentResult(exception=e))
300
+ else:
301
+ raise
302
+ else:
303
+ futures = [
304
+ executor.submit(_execute_task_in_process, task_data)
305
+ for task_data in task_data_list
306
+ ]
307
+
308
+ for future in as_completed(futures, timeout=config.timeout):
309
+ try:
310
+ result = future.result(timeout=config.timeout)
311
+ results.append(result)
312
+ except Exception as e:
313
+ if config.return_exceptions:
314
+ results.append(ConcurrentResult(exception=e))
315
+ else:
316
+ raise
317
+
318
+ return results
319
+
320
+ async def _execute_with_async(tasks):
321
+ """Execute tasks using asyncio."""
322
+ async def _async_task(task_args, task_kwargs, task_id=None):
323
+ start_time = time.time()
324
+
325
+ for attempt in range(config.max_retries + 1):
326
+ try:
327
+ loop = asyncio.get_event_loop()
328
+ result = await loop.run_in_executor(
329
+ None,
330
+ lambda: func(*task_args, **task_kwargs)
331
+ )
332
+ execution_time = time.time() - start_time
333
+ return ConcurrentResult(
334
+ value=result,
335
+ execution_time=execution_time,
336
+ worker_id=task_id
337
+ )
338
+ except Exception as e:
339
+ if attempt == config.max_retries or not config.retry_on_failure:
340
+ execution_time = time.time() - start_time
341
+ if config.return_exceptions:
342
+ return ConcurrentResult(
343
+ exception=e,
344
+ execution_time=execution_time,
345
+ worker_id=task_id
346
+ )
347
+ else:
348
+ raise
349
+ else:
350
+ logger.warning(
351
+ f"Async task {task_id} failed (attempt {attempt + 1}/{config.max_retries + 1}): {e}"
352
+ )
353
+ await asyncio.sleep(config.retry_delay * (2 ** attempt))
354
+
355
+ semaphore = asyncio.Semaphore(config.max_workers)
356
+
357
+ async def _limited_task(task_args, task_kwargs, task_id):
358
+ async with semaphore:
359
+ return await _async_task(task_args, task_kwargs, task_id)
360
+
361
+ tasks_coros = [
362
+ _limited_task(task[0], task[1], i)
363
+ for i, task in enumerate(tasks)
364
+ ]
365
+
366
+ if config.ordered:
367
+ results = []
368
+ for coro in asyncio.as_completed(tasks_coros):
369
+ try:
370
+ result = await coro
371
+ results.append(result)
372
+ except Exception as e:
373
+ if config.return_exceptions:
374
+ results.append(ConcurrentResult(exception=e))
375
+ else:
376
+ raise
377
+ return results
378
+ else:
379
+ return await asyncio.gather(*tasks_coros, return_exceptions=config.return_exceptions)
380
+
381
+ def concurrent_batch(items: List[Any], batch_size: Optional[int] = None,
382
+ **kwargs) -> List[ConcurrentResult]:
383
+ """Execute the function concurrently on a batch of items."""
384
+ batch_size = batch_size or config.chunk_size or len(items)
385
+
386
+ tasks = []
387
+ for item in items:
388
+ if isinstance(item, (list, tuple)):
389
+ tasks.append((item, kwargs))
390
+ else:
391
+ tasks.append(([item], kwargs))
392
+
393
+ return concurrent_execute(*[task[0] for task in tasks],
394
+ **[task[1] for task in tasks])
395
+
396
+ def concurrent_map(items: List[Any], **kwargs) -> List[ConcurrentResult]:
397
+ """Map the function over a list of items concurrently."""
398
+ return concurrent_batch(items, **kwargs)
399
+
400
+ # Attach methods to the wrapper
401
+ wrapper.concurrent_execute = concurrent_execute
402
+ wrapper.concurrent_batch = concurrent_batch
403
+ wrapper.concurrent_map = concurrent_map
404
+ wrapper.config = config
405
+
406
+ # Add metadata
407
+ wrapper.__concurrent_config__ = config
408
+ wrapper.__concurrent_enabled__ = True
409
+
410
+ return wrapper
411
+
412
+ return decorator
413
+
414
+
415
+ def concurrent_class_executor(
416
+ name: Optional[str] = None,
417
+ description: Optional[str] = None,
418
+ max_workers: int = 4,
419
+ timeout: Optional[float] = None,
420
+ executor_type: ExecutorType = ExecutorType.THREAD,
421
+ return_exceptions: bool = False,
422
+ chunk_size: Optional[int] = None,
423
+ ordered: bool = True,
424
+ retry_on_failure: bool = False,
425
+ max_retries: int = 3,
426
+ retry_delay: float = 1.0,
427
+ methods: Optional[List[str]] = None,
428
+ ):
429
+ """
430
+ A decorator that enables concurrent execution for class methods.
431
+
432
+ Args:
433
+ name (Optional[str]): Name for the concurrent operation
434
+ description (Optional[str]): Description of the operation
435
+ max_workers (int): Maximum number of worker threads/processes
436
+ timeout (Optional[float]): Timeout in seconds for each task
437
+ executor_type (ExecutorType): Type of executor (thread, process, async)
438
+ return_exceptions (bool): Whether to return exceptions instead of raising
439
+ chunk_size (Optional[int]): Size of chunks for batch processing
440
+ ordered (bool): Whether to maintain order of results
441
+ retry_on_failure (bool): Whether to retry failed tasks
442
+ max_retries (int): Maximum number of retries per task
443
+ retry_delay (float): Delay between retries in seconds
444
+ methods (Optional[List[str]]): List of method names to make concurrent
445
+
446
+ Returns:
447
+ Class: Class with concurrent execution capabilities
448
+ """
449
+
450
+ def decorator(cls):
451
+ config = ConcurrentConfig(
452
+ name=name or f"{cls.__name__}_concurrent",
453
+ description=description or f"Concurrent execution for {cls.__name__}",
454
+ max_workers=max_workers,
455
+ timeout=timeout,
456
+ executor_type=executor_type,
457
+ return_exceptions=return_exceptions,
458
+ chunk_size=chunk_size,
459
+ ordered=ordered,
460
+ retry_on_failure=retry_on_failure,
461
+ max_retries=max_retries,
462
+ retry_delay=retry_delay
463
+ )
464
+
465
+ # Get methods to make concurrent
466
+ target_methods = methods or [
467
+ name for name, method in inspect.getmembers(cls, inspect.isfunction)
468
+ if not name.startswith('_')
469
+ ]
470
+
471
+ for method_name in target_methods:
472
+ if hasattr(cls, method_name):
473
+ original_method = getattr(cls, method_name)
474
+
475
+ # Create concurrent version of the method
476
+ concurrent_decorator = concurrent(
477
+ name=f"{cls.__name__}.{method_name}",
478
+ description=f"Concurrent execution of {cls.__name__}.{method_name}",
479
+ max_workers=config.max_workers,
480
+ timeout=config.timeout,
481
+ executor_type=config.executor_type,
482
+ return_exceptions=config.return_exceptions,
483
+ chunk_size=config.chunk_size,
484
+ ordered=config.ordered,
485
+ retry_on_failure=config.retry_on_failure,
486
+ max_retries=config.max_retries,
487
+ retry_delay=config.retry_delay,
488
+ )
489
+
490
+ # Apply the concurrent decorator to the method
491
+ setattr(cls, method_name, concurrent_decorator(original_method))
492
+
493
+ # Add class-level concurrent configuration
494
+ cls.__concurrent_config__ = config
495
+ cls.__concurrent_enabled__ = True
496
+
497
+ return cls
498
+
499
+ return decorator
500
+
501
+
502
+ # Convenience functions for common use cases
503
+ def thread_executor(**kwargs):
504
+ """Convenience decorator for thread-based concurrent execution."""
505
+ return concurrent(executor_type=ExecutorType.THREAD, **kwargs)
506
+
507
+
508
+ def process_executor(**kwargs):
509
+ """Convenience decorator for process-based concurrent execution."""
510
+ return concurrent(executor_type=ExecutorType.PROCESS, **kwargs)
511
+
512
+
513
+ def async_executor(**kwargs):
514
+ """Convenience decorator for async-based concurrent execution."""
515
+ return concurrent(executor_type=ExecutorType.ASYNC, **kwargs)
516
+
517
+
518
+ def batch_executor(batch_size: int = 10, **kwargs):
519
+ """Convenience decorator for batch processing."""
520
+ return concurrent(chunk_size=batch_size, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: swarms
3
- Version: 7.9.8
3
+ Version: 8.0.0
4
4
  Summary: Swarms - TGSC
5
5
  License: MIT
6
6
  Keywords: artificial intelligence,deep learning,optimizers,Prompt Engineering,swarms,agents,llms,transformers,multi-agent,swarms of agents,Enterprise-Grade Agents,Production-Grade Agents,Agents,Multi-Grade-Agents,Swarms,Transformers,LLMs,Prompt Engineering,Agents,Generative Agents,Generative AI,Agent Marketplace,Agent Store,quant,finance,algorithmic trading,portfolio optimization,risk management,financial modeling,machine learning for finance,natural language processing for finance
@@ -280,20 +280,26 @@ A `SequentialWorkflow` executes tasks in a strict order, forming a pipeline wher
280
280
  ```python
281
281
  from swarms import Agent, SequentialWorkflow
282
282
 
283
- # Initialize agents for a 3-step process
284
- # 1. Generate an idea
285
- idea_generator = Agent(agent_name="IdeaGenerator", system_prompt="Generate a unique startup idea.", model_name="gpt-4o-mini")
286
- # 2. Validate the idea
287
- validator = Agent(agent_name="Validator", system_prompt="Take this startup idea and analyze its market viability.", model_name="gpt-4o-mini")
288
- # 3. Create a pitch
289
- pitch_creator = Agent(agent_name="PitchCreator", system_prompt="Write a 3-sentence elevator pitch for this validated startup idea.", model_name="gpt-4o-mini")
283
+ # Agent 1: The Researcher
284
+ researcher = Agent(
285
+ agent_name="Researcher",
286
+ system_prompt="Your job is to research the provided topic and provide a detailed summary.",
287
+ model_name="gpt-4o-mini",
288
+ )
290
289
 
291
- # Create the sequential workflow
292
- workflow = SequentialWorkflow(agents=[idea_generator, validator, pitch_creator])
290
+ # Agent 2: The Writer
291
+ writer = Agent(
292
+ agent_name="Writer",
293
+ system_prompt="Your job is to take the research summary and write a beautiful, engaging blog post about it.",
294
+ model_name="gpt-4o-mini",
295
+ )
296
+
297
+ # Create a sequential workflow where the researcher's output feeds into the writer's input
298
+ workflow = SequentialWorkflow(agents=[researcher, writer])
293
299
 
294
- # Run the workflow
295
- elevator_pitch = workflow.run()
296
- print(elevator_pitch)
300
+ # Run the workflow on a task
301
+ final_post = workflow.run("The history and future of artificial intelligence")
302
+ print(final_post)
297
303
  ```
298
304
 
299
305
  -----
@@ -356,9 +362,7 @@ rearrange_system = AgentRearrange(
356
362
  flow=flow,
357
363
  )
358
364
 
359
- # Run the system
360
- # The researcher will generate content, and then both the writer and editor
361
- # will process that content in parallel.
365
+ # Run the swarm
362
366
  outputs = rearrange_system.run("Analyze the impact of AI on modern cinema.")
363
367
  print(outputs)
364
368
  ```
@@ -1,6 +1,6 @@
1
1
  swarms/__init__.py,sha256=0tc5msh0Sfk6V_KIwen_4p2kwkHBLjLxZVoAsFE-b80,513
2
2
  swarms/agents/__init__.py,sha256=hEp16SuPhlcLX84HAlaK_9RNf6JB0JQ27YTrTnvak04,1171
3
- swarms/agents/agent_judge.py,sha256=xT242CX5mV64cq2B-3RGkuEHiV5aD04P_Zq8_s64iMQ,3967
3
+ swarms/agents/agent_judge.py,sha256=FshQXqpcqEVsfeGiuXCIhmVa2o1dO9CuZN8UxXMAjLk,15924
4
4
  swarms/agents/agent_print.py,sha256=SXqWA2ZzXwRFdv8hkuYwOPMTasvaGTG6U29413qRCAA,918
5
5
  swarms/agents/ape_agent.py,sha256=Ws7PmfCL5ctxIdLjQ-lkTWoYMGMjnqqDuo4chPpsF0w,1046
6
6
  swarms/agents/auto_generate_swarm_config.py,sha256=7eJ873xS7PJmyreMaa5Uub8qFu-qIinuyMuogB2Ehjc,8474
@@ -12,8 +12,8 @@ swarms/agents/gkp_agent.py,sha256=5Jms3zHQ2qwJ6-PHDh9X-cFtAlH4dSUoDgRqN-xZzog,21
12
12
  swarms/agents/i_agent.py,sha256=_kqGt3a4SGB21_GP-KcV8A5p_9wneShs6eJUJYFdluw,12274
13
13
  swarms/agents/openai_assistant.py,sha256=mTSEtj26J0mc5pCeWrmMY0EXzTRYQfyfw_BtOqtcCHc,11044
14
14
  swarms/agents/react_agent.py,sha256=yM8lQoRsqJZicqtmgBrC7RHv0aKEb5oZHh4q5aAA_xs,5804
15
- swarms/agents/reasoning_agents.py,sha256=En3H6NeeLRTQfhdvn_5TdxPya3xXD4AZpDKWJ_btVVk,12672
16
- swarms/agents/reasoning_duo.py,sha256=4qw9RtwmWkWnDWjEtJqYFVLh--EV9YQd24-fQa0lMfM,3873
15
+ swarms/agents/reasoning_agents.py,sha256=TpYqtQ-Ogyg4zvqXU1qDj3M8UQL8EWS_nEzl58OIxiw,13100
16
+ swarms/agents/reasoning_duo.py,sha256=SwL1urMPvbeiLgbvOOd2xe6YNRUo9qIQ_cOghSyL_nk,5767
17
17
  swarms/agents/self_agent_builder.py,sha256=bX7xSwak6HiyK901VdeE8OlT4yqE0n7jyHcWJrkMeew,1104
18
18
  swarms/agents/tool_agent.py,sha256=G7rhBACsHsGUMT4H9eF5aY7e3Gx-5jOmJkhCF1jm9mU,5087
19
19
  swarms/artifacts/__init__.py,sha256=M111xTw7IVVt8gLDwW7Kau5n1YdwkL3vbCJPVeEWktI,83
@@ -106,8 +106,8 @@ swarms/schemas/llm_agent_schema.py,sha256=S5ZIF21q-I9EUtQpgKYZc361o5FGFOnrW_6YTK
106
106
  swarms/schemas/mcp_schemas.py,sha256=XZJ4HyiY_cv8Gvj-53ddjzXuqT9hBU2f0cHbhIKs_jY,1330
107
107
  swarms/schemas/swarms_api_schemas.py,sha256=uKqleW_7hNpqHi06yoba9jS2i9yzZp-SBV944MnkN68,6233
108
108
  swarms/schemas/tool_schema_base_model.py,sha256=0biTGIoibsPPP3fOrkC6WvNU5vXaalyccVKC1fpO_eg,1409
109
- swarms/structs/__init__.py,sha256=EvJy-Dt6BacI5VJik7AJdmGHn9Rav98eZthuJ5dIOIQ,4641
110
- swarms/structs/agent.py,sha256=YH19a3clZWOQWa57tfWGgytzJO3qJvUCRHgYIy2jP5g,119744
109
+ swarms/structs/__init__.py,sha256=lmcKJ5z4JAsbHhFHlX5Fi78pfd10AFX6kzlnum0d_FI,4709
110
+ swarms/structs/agent.py,sha256=kVc88kSWCuTy4JiSH1vqoe_1XH6VL84edFyjARt_d6c,120164
111
111
  swarms/structs/agent_builder.py,sha256=tYNpfO4_8cgfMHfgA5DAOWffHnt70p6CLt59esqfVCY,12133
112
112
  swarms/structs/agent_rag_handler.py,sha256=g17YRrNmf16TLvyFCCcsitVk3d-QNZmck_XYmjSN_YM,21372
113
113
  swarms/structs/agent_registry.py,sha256=il507cO1NF-d4ChyANVLuWrN8bXsEAi8_7bLJ_sTU6A,12112
@@ -127,8 +127,10 @@ swarms/structs/csv_to_agent.py,sha256=Zv41sjeWA50msq-paGHESzlxZyMU78DYDLNNKZtNfo
127
127
  swarms/structs/de_hallucination_swarm.py,sha256=9cC0rSSXGwYu6SRDwpeMbCcQ40C1WI1RE9SNapKRLOQ,10309
128
128
  swarms/structs/deep_research_swarm.py,sha256=cNLDI7_lT47q4bCRlMJO4IAO3Fu_iF3nPi3jgtg6CJQ,17026
129
129
  swarms/structs/dynamic_conversational_swarm.py,sha256=xm8x_0OCI4ijatgVt8dzHhLNUyMzqG2U_XQL14kcIS8,8354
130
+ swarms/structs/election_swarm.py,sha256=zPKoH1xKN9RLfM5TG53zE_856hb8jeYC-kbi5NdoDmU,10062
130
131
  swarms/structs/graph_workflow.py,sha256=TAaUG_J3898hhghPOp0WEAV3Zf0in6s48ZSVbSTX-vQ,8629
131
132
  swarms/structs/groupchat.py,sha256=jjH0BqU9Nrd_3jl9QzrcvbSce527SFpUaepawaRiw2o,15391
133
+ swarms/structs/heavy_swarm.py,sha256=p0i7wjRUtndH2Opoq5sZl6JwJ3n5jEx3clI9Dyf1gaE,66264
132
134
  swarms/structs/hiearchical_swarm.py,sha256=7yLnheE34bAk9pdjloXBQo-LvM_e7pxNzkMV7YsonOI,28942
133
135
  swarms/structs/hybrid_hiearchical_peer_swarm.py,sha256=0BrmzSVit-I_04DFfrs7onLblLA6PSPa0JE3-4j05FA,9316
134
136
  swarms/structs/image_batch_processor.py,sha256=31Z8vTVL4dw18QxGwb0Jg1nvp0YzX8lwVgGj_-KrkhY,8207
@@ -146,6 +148,7 @@ swarms/structs/multi_agent_exec.py,sha256=3hIGgwJ_mQwgD16N096jN48-DEIZWFPoetR2nC
146
148
  swarms/structs/multi_agent_router.py,sha256=A4MDd4_QAoarrLaxClnCxnEP62bSMFvyq77ttqeq3u8,11049
147
149
  swarms/structs/multi_model_gpu_manager.py,sha256=gHC6MmVia4etMD6RlpEdbqZtV7ng4f-6jVMH0Zrt8y4,47356
148
150
  swarms/structs/omni_agent_types.py,sha256=RdKLfZ-lXDJrEa0aJT_Rfx9TypJQo8SISqKz4fnLkAk,230
151
+ swarms/structs/qa_swarm.py,sha256=z4o5ycwbGnJIO5fol0FlFoziuQr5JPrL-vLD_XgjdEQ,8720
149
152
  swarms/structs/rearrange.py,sha256=V_CiIIMknZVmTwvr3a4zyimGtbAGaPPmna49nn2k88g,20661
150
153
  swarms/structs/round_robin.py,sha256=MGk623KiN9uSxTMG6MY_BIAkvEDh1RPwyl5Min7GLOU,7573
151
154
  swarms/structs/safe_loading.py,sha256=gmYX8G9TsvAIp6OCvREBZt5mwSFc-p-t1rSnDBfhEmE,7124
@@ -157,14 +160,14 @@ swarms/structs/swarm_eval.py,sha256=148E2R2zaCmt_LZYx15nmdFjybXHiQ2CZbl6pk77jNs,
157
160
  swarms/structs/swarm_id_generator.py,sha256=Wly7AtGM9e6VgzhYmfg8_gSOdxAdsOvWHJFK81cpQNQ,68
158
161
  swarms/structs/swarm_matcher.py,sha256=HUCxTWRnxT5Rix3CMKEuJCqNleqPA9xGrWFGw6rjcTw,26821
159
162
  swarms/structs/swarm_registry.py,sha256=P0XRrqp1qBNyt0BycqPQljUzKv9jClaQMhtaBMinhYg,5578
160
- swarms/structs/swarm_router.py,sha256=XO_KSsEKMpEpTzTvyUpqa06QFutIFlRds7aGNuhUeSI,26748
163
+ swarms/structs/swarm_router.py,sha256=zey-KopVKLRaIDn_KBSLWOpiJwJWffsh2u5VuZjwpRI,28488
161
164
  swarms/structs/swarming_architectures.py,sha256=guNQU2N7Ofuk01fZbU3tmBJymnZ9zdGULpPZAdaqCeA,28276
162
165
  swarms/structs/tree_swarm.py,sha256=AnIxrt0KhWxAQN8uGjfCcOq-XCmsuTJiH8Ex4mXy8V8,12500
163
166
  swarms/structs/utils.py,sha256=Mo6wHQYOB8baWZUKnAJN5Dsgubpo81umNwJIEDitb2A,1873
164
167
  swarms/structs/various_alt_swarms.py,sha256=qdBuOF31UjatlKRu-9bxwyRQzIjohRhTv_63YoUeYEY,27866
165
168
  swarms/telemetry/__init__.py,sha256=-DIOcGcgSKISGhMarT5_LsqcM9xU7AVhKt2l3iIslvo,255
166
169
  swarms/telemetry/bootup.py,sha256=0leCNCy5rhzL19EsOsqHWSDI85KVcWO6_5hLDS0h4sY,1155
167
- swarms/telemetry/log_executions.py,sha256=kC8vy0f55f4-DQPOMQIhREmYcnVaJPZOnnW5p50PnNM,1433
170
+ swarms/telemetry/log_executions.py,sha256=aOZPSICkvyat9sgI9oRmhtR-US_mqgqo0ANfzDP_GtI,9998
168
171
  swarms/telemetry/main.py,sha256=SZyuXVHJdOutCLcMrIZLEjss4i8l4jxjzrMwvhtp5pM,3489
169
172
  swarms/tools/__init__.py,sha256=6j4tc28dxLrlHG88Yot90PHuNZxhJFb27xqJ8GjwN-0,2268
170
173
  swarms/tools/base_tool.py,sha256=PBA4QAP9nN3DrNc5YgzdpkGWkbaxPlhYLcQUYHxq2xQ,107434
@@ -185,11 +188,13 @@ swarms/tools/tool_parse_exec.py,sha256=FW5XzkuNEs2YrroybjKChbCzDvaCs7ypknSDpYhfk
185
188
  swarms/tools/tool_registry.py,sha256=ULZmIKBTx9XRCJRD9hwXfY3iQw9v94arw-VV6jcuftY,7992
186
189
  swarms/tools/tool_utils.py,sha256=yXzzqG7Ytd8ybB8bsjNUNLaXIuIp9JbbpUKCiHxQqo8,2816
187
190
  swarms/utils/__init__.py,sha256=tpbhE-BTISDMXemSRSRJz-Rz7m_C05Q5Pre3d9H9SN4,1195
191
+ swarms/utils/agent_cache.py,sha256=GOzX7NVHYIEAtdFCa1a73KvWvESl43A9jcpkL6FLEs0,21794
188
192
  swarms/utils/any_to_str.py,sha256=Qi4N9ed6LYnCs2AeFYo1zwEfYhOKUesGVFUmVUz54KI,2936
189
193
  swarms/utils/audio_processing.py,sha256=Y3KaWG9WJrgquWCeaty20HWPIXfeuPAhcJFzoSBIQjE,9893
190
194
  swarms/utils/auto_download_check_packages.py,sha256=nnEPfUr2zy_Y6vPZF56zyWLamz0e2gskS4ecKSw30ww,4594
191
195
  swarms/utils/calculate_func_metrics.py,sha256=Nb5r7rWf809m5F7mWIYXZ0H_WeyGr78A2UZD2GHtJkM,5007
192
196
  swarms/utils/check_all_model_max_tokens.py,sha256=ZHIKlrU-L-OM2IJAbYkCoVyBKe2d0JrGhDC9QNppGIs,1519
197
+ swarms/utils/concurrent_wrapper.py,sha256=ElenyEdYzMiDbvRBqKbLc9xvMhB6NmS4ZPA7UtMynkw,21138
193
198
  swarms/utils/data_to_text.py,sha256=1PUoWokylp7MOrGNk1cmO3cJlfskdAIiImGk9ECwsKU,3427
194
199
  swarms/utils/disable_logging.py,sha256=KKPKQVfQqLPFgj03uveOoyeHOTlfEJt-yfLc3SA53Rk,2470
195
200
  swarms/utils/file_processing.py,sha256=QjQCIPTcwicQlfy656BXBYpIzMR0s2343E7ftnok5Uo,4865
@@ -210,8 +215,8 @@ swarms/utils/str_to_dict.py,sha256=T3Jsdjz87WIlkSo7jAW6BB80sv0Ns49WT1qXlOrdEoE,8
210
215
  swarms/utils/try_except_wrapper.py,sha256=uvDZDZJcH986EF0Ej6zZBLcqHJ58NHizPsAH5olrE7Q,3919
211
216
  swarms/utils/vllm_wrapper.py,sha256=sNkm4EbeMrqqmHidnvq5zTnofQAaARy3HIrNBu11lKs,5072
212
217
  swarms/utils/xml_utils.py,sha256=D4nEdo1nkHqSoTKrWylXBXjcHFhGaOYvvfGNQQoYV5o,2514
213
- swarms-7.9.8.dist-info/LICENSE,sha256=jwRtEmTWjLrEsvFB6QFdYs2cEeZPRMdj-UMOFkPF8_0,11363
214
- swarms-7.9.8.dist-info/METADATA,sha256=kFHDWgpAamZitdueXYE4QEfRVPcA2J5nbLVULHCpuKo,38991
215
- swarms-7.9.8.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
216
- swarms-7.9.8.dist-info/entry_points.txt,sha256=2K0rTtfO1X1WaO-waJlXIKw5Voa_EpAL_yU0HXE2Jgc,47
217
- swarms-7.9.8.dist-info/RECORD,,
218
+ swarms-8.0.0.dist-info/LICENSE,sha256=jwRtEmTWjLrEsvFB6QFdYs2cEeZPRMdj-UMOFkPF8_0,11363
219
+ swarms-8.0.0.dist-info/METADATA,sha256=IKf7l5_rtuibcObU_cfdd28c0doN3BchbHi8iFqyv2I,38842
220
+ swarms-8.0.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
221
+ swarms-8.0.0.dist-info/entry_points.txt,sha256=2K0rTtfO1X1WaO-waJlXIKw5Voa_EpAL_yU0HXE2Jgc,47
222
+ swarms-8.0.0.dist-info/RECORD,,
File without changes