swarms 7.6.2__py3-none-any.whl → 7.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swarms/__init__.py +1 -0
- swarms/agents/__init__.py +0 -3
- swarms/agents/flexion_agent.py +2 -1
- swarms/client/__init__.py +15 -0
- swarms/prompts/multi_agent_collab_prompt.py +313 -0
- swarms/structs/__init__.py +5 -17
- swarms/structs/agent.py +219 -255
- swarms/structs/base_swarm.py +0 -7
- swarms/structs/concurrent_workflow.py +1 -1
- swarms/structs/conversation.py +16 -2
- swarms/structs/de_hallucination_swarm.py +8 -4
- swarms/structs/groupchat.py +80 -84
- swarms/structs/hybrid_hiearchical_peer_swarm.py +23 -40
- swarms/structs/multi_agent_exec.py +63 -139
- swarms/structs/rearrange.py +65 -204
- swarms/structs/sequential_workflow.py +34 -47
- swarms/structs/swarm_router.py +2 -1
- swarms/telemetry/bootup.py +19 -38
- swarms/telemetry/main.py +56 -20
- swarms/tools/mcp_integration.py +321 -483
- swarms/utils/auto_download_check_packages.py +2 -2
- swarms/utils/disable_logging.py +0 -17
- swarms/utils/history_output_formatter.py +8 -3
- swarms/utils/litellm_wrapper.py +117 -1
- swarms/utils/vllm_wrapper.py +146 -0
- {swarms-7.6.2.dist-info → swarms-7.6.5.dist-info}/METADATA +1 -5
- {swarms-7.6.2.dist-info → swarms-7.6.5.dist-info}/RECORD +31 -31
- swarms/structs/auto_swarm.py +0 -229
- swarms/utils/agent_ops_check.py +0 -26
- swarms/utils/pandas_utils.py +0 -92
- /swarms/{structs/swarms_api.py → client/main.py} +0 -0
- {swarms-7.6.2.dist-info → swarms-7.6.5.dist-info}/LICENSE +0 -0
- {swarms-7.6.2.dist-info → swarms-7.6.5.dist-info}/WHEEL +0 -0
- {swarms-7.6.2.dist-info → swarms-7.6.5.dist-info}/entry_points.txt +0 -0
swarms/utils/disable_logging.py
CHANGED
@@ -5,20 +5,6 @@ import warnings
|
|
5
5
|
from threading import Thread
|
6
6
|
|
7
7
|
|
8
|
-
def disable_langchain():
|
9
|
-
"""
|
10
|
-
Disables the LangChain deprecation warning.
|
11
|
-
"""
|
12
|
-
from langchain_core._api.deprecation import (
|
13
|
-
LangChainDeprecationWarning,
|
14
|
-
)
|
15
|
-
|
16
|
-
# Ignore LangChainDeprecationWarning
|
17
|
-
warnings.filterwarnings(
|
18
|
-
"ignore", category=LangChainDeprecationWarning
|
19
|
-
)
|
20
|
-
|
21
|
-
|
22
8
|
def disable_logging():
|
23
9
|
"""
|
24
10
|
Disables logging for specific modules and sets up file and stream handlers.
|
@@ -47,7 +33,6 @@ def disable_logging():
|
|
47
33
|
"numexpr",
|
48
34
|
"git",
|
49
35
|
"wandb.docker.auth",
|
50
|
-
"langchain",
|
51
36
|
"distutils",
|
52
37
|
"urllib3",
|
53
38
|
"elasticsearch",
|
@@ -80,8 +65,6 @@ def disable_logging():
|
|
80
65
|
stream_handler.setLevel(logging.ERROR)
|
81
66
|
logging.getLogger().addHandler(stream_handler)
|
82
67
|
|
83
|
-
disable_langchain()
|
84
|
-
|
85
68
|
|
86
69
|
def set_logger_level(logger_name: str) -> None:
|
87
70
|
"""
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import yaml
|
1
2
|
from swarms.structs.conversation import Conversation
|
2
3
|
|
3
4
|
|
@@ -6,13 +7,17 @@ def history_output_formatter(
|
|
6
7
|
):
|
7
8
|
if type == "list":
|
8
9
|
return conversation.return_messages_as_list()
|
9
|
-
elif type == "dict":
|
10
|
+
elif type == "dict" or type == "dictionary":
|
10
11
|
return conversation.to_dict()
|
11
12
|
elif type == "string" or type == "str":
|
12
13
|
return conversation.get_str()
|
13
|
-
elif type == "final":
|
14
|
-
return conversation.
|
14
|
+
elif type == "final" or type == "last":
|
15
|
+
return conversation.get_final_message_content()
|
15
16
|
elif type == "json":
|
16
17
|
return conversation.to_json()
|
18
|
+
elif type == "all":
|
19
|
+
return conversation.get_str()
|
20
|
+
elif type == "yaml":
|
21
|
+
return yaml.safe_dump(conversation.to_dict(), sort_keys=False)
|
17
22
|
else:
|
18
23
|
raise ValueError(f"Invalid type: {type}")
|
swarms/utils/litellm_wrapper.py
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
import base64
|
2
|
+
import requests
|
3
|
+
|
1
4
|
import asyncio
|
2
5
|
from typing import List
|
3
6
|
|
@@ -24,6 +27,37 @@ except ImportError:
|
|
24
27
|
litellm.ssl_verify = False
|
25
28
|
|
26
29
|
|
30
|
+
def get_audio_base64(audio_source: str) -> str:
|
31
|
+
"""
|
32
|
+
Convert audio from a given source to a base64 encoded string.
|
33
|
+
|
34
|
+
This function handles both URLs and local file paths. If the audio source is a URL, it fetches the audio data
|
35
|
+
from the internet. If it is a local file path, it reads the audio data from the specified file.
|
36
|
+
|
37
|
+
Args:
|
38
|
+
audio_source (str): The source of the audio, which can be a URL or a local file path.
|
39
|
+
|
40
|
+
Returns:
|
41
|
+
str: A base64 encoded string representation of the audio data.
|
42
|
+
|
43
|
+
Raises:
|
44
|
+
requests.HTTPError: If the HTTP request to fetch audio data fails.
|
45
|
+
FileNotFoundError: If the local audio file does not exist.
|
46
|
+
"""
|
47
|
+
# Handle URL
|
48
|
+
if audio_source.startswith(("http://", "https://")):
|
49
|
+
response = requests.get(audio_source)
|
50
|
+
response.raise_for_status()
|
51
|
+
audio_data = response.content
|
52
|
+
# Handle local file
|
53
|
+
else:
|
54
|
+
with open(audio_source, "rb") as file:
|
55
|
+
audio_data = file.read()
|
56
|
+
|
57
|
+
encoded_string = base64.b64encode(audio_data).decode("utf-8")
|
58
|
+
return encoded_string
|
59
|
+
|
60
|
+
|
27
61
|
class LiteLLM:
|
28
62
|
"""
|
29
63
|
This class represents a LiteLLM.
|
@@ -42,6 +76,7 @@ class LiteLLM:
|
|
42
76
|
tools_list_dictionary: List[dict] = None,
|
43
77
|
tool_choice: str = "auto",
|
44
78
|
parallel_tool_calls: bool = False,
|
79
|
+
audio: str = None,
|
45
80
|
*args,
|
46
81
|
**kwargs,
|
47
82
|
):
|
@@ -65,6 +100,7 @@ class LiteLLM:
|
|
65
100
|
self.tools_list_dictionary = tools_list_dictionary
|
66
101
|
self.tool_choice = tool_choice
|
67
102
|
self.parallel_tool_calls = parallel_tool_calls
|
103
|
+
self.modalities = ["text"]
|
68
104
|
|
69
105
|
def _prepare_messages(self, task: str) -> list:
|
70
106
|
"""
|
@@ -87,7 +123,83 @@ class LiteLLM:
|
|
87
123
|
|
88
124
|
return messages
|
89
125
|
|
90
|
-
def
|
126
|
+
def audio_processing(self, task: str, audio: str):
|
127
|
+
"""
|
128
|
+
Process the audio for the given task.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
task (str): The task to be processed.
|
132
|
+
audio (str): The path or identifier for the audio file.
|
133
|
+
"""
|
134
|
+
self.modalities.append("audio")
|
135
|
+
|
136
|
+
encoded_string = get_audio_base64(audio)
|
137
|
+
|
138
|
+
# Append messages
|
139
|
+
self.messages.append(
|
140
|
+
{
|
141
|
+
"role": "user",
|
142
|
+
"content": [
|
143
|
+
{"type": "text", "text": task},
|
144
|
+
{
|
145
|
+
"type": "input_audio",
|
146
|
+
"input_audio": {
|
147
|
+
"data": encoded_string,
|
148
|
+
"format": "wav",
|
149
|
+
},
|
150
|
+
},
|
151
|
+
],
|
152
|
+
}
|
153
|
+
)
|
154
|
+
|
155
|
+
def vision_processing(self, task: str, image: str):
|
156
|
+
"""
|
157
|
+
Process the image for the given task.
|
158
|
+
"""
|
159
|
+
self.modalities.append("vision")
|
160
|
+
|
161
|
+
# Append messages
|
162
|
+
self.messages.append(
|
163
|
+
{
|
164
|
+
"role": "user",
|
165
|
+
"content": [
|
166
|
+
{"type": "text", "text": task},
|
167
|
+
{
|
168
|
+
"type": "image_url",
|
169
|
+
"image_url": {
|
170
|
+
"url": image,
|
171
|
+
# "detail": "high"
|
172
|
+
# "format": "image",
|
173
|
+
},
|
174
|
+
},
|
175
|
+
],
|
176
|
+
}
|
177
|
+
)
|
178
|
+
|
179
|
+
def handle_modalities(
|
180
|
+
self, task: str, audio: str = None, img: str = None
|
181
|
+
):
|
182
|
+
"""
|
183
|
+
Handle the modalities for the given task.
|
184
|
+
"""
|
185
|
+
if audio is not None:
|
186
|
+
self.audio_processing(task=task, audio=audio)
|
187
|
+
|
188
|
+
if img is not None:
|
189
|
+
self.vision_processing(task=task, image=img)
|
190
|
+
|
191
|
+
if audio is not None and img is not None:
|
192
|
+
self.audio_processing(task=task, audio=audio)
|
193
|
+
self.vision_processing(task=task, image=img)
|
194
|
+
|
195
|
+
def run(
|
196
|
+
self,
|
197
|
+
task: str,
|
198
|
+
audio: str = None,
|
199
|
+
img: str = None,
|
200
|
+
*args,
|
201
|
+
**kwargs,
|
202
|
+
):
|
91
203
|
"""
|
92
204
|
Run the LLM model for the given task.
|
93
205
|
|
@@ -103,6 +215,8 @@ class LiteLLM:
|
|
103
215
|
|
104
216
|
messages = self._prepare_messages(task)
|
105
217
|
|
218
|
+
self.handle_modalities(task=task, audio=audio, img=img)
|
219
|
+
|
106
220
|
if self.tools_list_dictionary is not None:
|
107
221
|
response = completion(
|
108
222
|
model=self.model_name,
|
@@ -111,6 +225,7 @@ class LiteLLM:
|
|
111
225
|
temperature=self.temperature,
|
112
226
|
max_tokens=self.max_tokens,
|
113
227
|
tools=self.tools_list_dictionary,
|
228
|
+
modalities=self.modalities,
|
114
229
|
tool_choice=self.tool_choice,
|
115
230
|
parallel_tool_calls=self.parallel_tool_calls,
|
116
231
|
*args,
|
@@ -130,6 +245,7 @@ class LiteLLM:
|
|
130
245
|
stream=self.stream,
|
131
246
|
temperature=self.temperature,
|
132
247
|
max_tokens=self.max_tokens,
|
248
|
+
modalities=self.modalities,
|
133
249
|
*args,
|
134
250
|
**kwargs,
|
135
251
|
)
|
@@ -0,0 +1,146 @@
|
|
1
|
+
from typing import List, Optional, Dict, Any
|
2
|
+
from loguru import logger
|
3
|
+
|
4
|
+
try:
|
5
|
+
from vllm import LLM, SamplingParams
|
6
|
+
except ImportError:
|
7
|
+
import subprocess
|
8
|
+
import sys
|
9
|
+
|
10
|
+
print("Installing vllm")
|
11
|
+
subprocess.check_call(
|
12
|
+
[sys.executable, "-m", "pip", "install", "-U", "vllm"]
|
13
|
+
)
|
14
|
+
print("vllm installed")
|
15
|
+
from vllm import LLM, SamplingParams
|
16
|
+
|
17
|
+
|
18
|
+
class VLLMWrapper:
|
19
|
+
"""
|
20
|
+
A wrapper class for vLLM that provides a similar interface to LiteLLM.
|
21
|
+
This class handles model initialization and inference using vLLM.
|
22
|
+
"""
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
model_name: str = "meta-llama/Llama-2-7b-chat-hf",
|
27
|
+
system_prompt: Optional[str] = None,
|
28
|
+
stream: bool = False,
|
29
|
+
temperature: float = 0.5,
|
30
|
+
max_tokens: int = 4000,
|
31
|
+
max_completion_tokens: int = 4000,
|
32
|
+
tools_list_dictionary: Optional[List[Dict[str, Any]]] = None,
|
33
|
+
tool_choice: str = "auto",
|
34
|
+
parallel_tool_calls: bool = False,
|
35
|
+
*args,
|
36
|
+
**kwargs,
|
37
|
+
):
|
38
|
+
"""
|
39
|
+
Initialize the vLLM wrapper with the given parameters.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
model_name (str): The name of the model to use. Defaults to "meta-llama/Llama-2-7b-chat-hf".
|
43
|
+
system_prompt (str, optional): The system prompt to use. Defaults to None.
|
44
|
+
stream (bool): Whether to stream the output. Defaults to False.
|
45
|
+
temperature (float): The temperature for sampling. Defaults to 0.5.
|
46
|
+
max_tokens (int): The maximum number of tokens to generate. Defaults to 4000.
|
47
|
+
max_completion_tokens (int): The maximum number of completion tokens. Defaults to 4000.
|
48
|
+
tools_list_dictionary (List[Dict[str, Any]], optional): List of available tools. Defaults to None.
|
49
|
+
tool_choice (str): How to choose tools. Defaults to "auto".
|
50
|
+
parallel_tool_calls (bool): Whether to allow parallel tool calls. Defaults to False.
|
51
|
+
"""
|
52
|
+
self.model_name = model_name
|
53
|
+
self.system_prompt = system_prompt
|
54
|
+
self.stream = stream
|
55
|
+
self.temperature = temperature
|
56
|
+
self.max_tokens = max_tokens
|
57
|
+
self.max_completion_tokens = max_completion_tokens
|
58
|
+
self.tools_list_dictionary = tools_list_dictionary
|
59
|
+
self.tool_choice = tool_choice
|
60
|
+
self.parallel_tool_calls = parallel_tool_calls
|
61
|
+
|
62
|
+
# Initialize vLLM
|
63
|
+
self.llm = LLM(model=model_name, **kwargs)
|
64
|
+
self.sampling_params = SamplingParams(
|
65
|
+
temperature=temperature,
|
66
|
+
max_tokens=max_tokens,
|
67
|
+
)
|
68
|
+
|
69
|
+
def _prepare_prompt(self, task: str) -> str:
|
70
|
+
"""
|
71
|
+
Prepare the prompt for the given task.
|
72
|
+
|
73
|
+
Args:
|
74
|
+
task (str): The task to prepare the prompt for.
|
75
|
+
|
76
|
+
Returns:
|
77
|
+
str: The prepared prompt.
|
78
|
+
"""
|
79
|
+
if self.system_prompt:
|
80
|
+
return f"{self.system_prompt}\n\nUser: {task}\nAssistant:"
|
81
|
+
return f"User: {task}\nAssistant:"
|
82
|
+
|
83
|
+
def run(self, task: str, *args, **kwargs) -> str:
|
84
|
+
"""
|
85
|
+
Run the model for the given task.
|
86
|
+
|
87
|
+
Args:
|
88
|
+
task (str): The task to run the model for.
|
89
|
+
*args: Additional positional arguments.
|
90
|
+
**kwargs: Additional keyword arguments.
|
91
|
+
|
92
|
+
Returns:
|
93
|
+
str: The model's response.
|
94
|
+
"""
|
95
|
+
try:
|
96
|
+
prompt = self._prepare_prompt(task)
|
97
|
+
|
98
|
+
outputs = self.llm.generate(prompt, self.sampling_params)
|
99
|
+
response = outputs[0].outputs[0].text.strip()
|
100
|
+
|
101
|
+
return response
|
102
|
+
|
103
|
+
except Exception as error:
|
104
|
+
logger.error(f"Error in VLLMWrapper: {error}")
|
105
|
+
raise error
|
106
|
+
|
107
|
+
def __call__(self, task: str, *args, **kwargs) -> str:
|
108
|
+
"""
|
109
|
+
Call the model for the given task.
|
110
|
+
|
111
|
+
Args:
|
112
|
+
task (str): The task to run the model for.
|
113
|
+
*args: Additional positional arguments.
|
114
|
+
**kwargs: Additional keyword arguments.
|
115
|
+
|
116
|
+
Returns:
|
117
|
+
str: The model's response.
|
118
|
+
"""
|
119
|
+
return self.run(task, *args, **kwargs)
|
120
|
+
|
121
|
+
def batched_run(
|
122
|
+
self, tasks: List[str], batch_size: int = 10
|
123
|
+
) -> List[str]:
|
124
|
+
"""
|
125
|
+
Run the model for multiple tasks in batches.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
tasks (List[str]): List of tasks to run.
|
129
|
+
batch_size (int): Size of each batch. Defaults to 10.
|
130
|
+
|
131
|
+
Returns:
|
132
|
+
List[str]: List of model responses.
|
133
|
+
"""
|
134
|
+
logger.info(
|
135
|
+
f"Running tasks in batches of size {batch_size}. Total tasks: {len(tasks)}"
|
136
|
+
)
|
137
|
+
results = []
|
138
|
+
|
139
|
+
for i in range(0, len(tasks), batch_size):
|
140
|
+
batch = tasks[i : i + batch_size]
|
141
|
+
for task in batch:
|
142
|
+
logger.info(f"Running task: {task}")
|
143
|
+
results.append(self.run(task))
|
144
|
+
|
145
|
+
logger.info("Completed all tasks.")
|
146
|
+
return results
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: swarms
|
3
|
-
Version: 7.6.
|
3
|
+
Version: 7.6.5
|
4
4
|
Summary: Swarms - TGSC
|
5
5
|
Home-page: https://github.com/kyegomez/swarms
|
6
6
|
License: MIT
|
@@ -467,8 +467,6 @@ We provide vast array of features to save agent states using json, yaml, toml, u
|
|
467
467
|
| `tokens_checks()` | Performs token checks for the agent. |
|
468
468
|
| `print_dashboard()` | Prints the dashboard of the agent. |
|
469
469
|
| `get_docs_from_doc_folders()` | Fetches all the documents from the doc folders. |
|
470
|
-
| `activate_agentops()` | Activates agent operations. |
|
471
|
-
| `check_end_session_agentops()` | Checks the end of the session for agent operations. |
|
472
470
|
|
473
471
|
|
474
472
|
|
@@ -520,8 +518,6 @@ agent.print_dashboard()
|
|
520
518
|
agent.get_docs_from_doc_folders()
|
521
519
|
|
522
520
|
# Activate agent ops
|
523
|
-
agent.activate_agentops()
|
524
|
-
agent.check_end_session_agentops()
|
525
521
|
|
526
522
|
# Dump the model to a JSON file
|
527
523
|
agent.model_dump_json()
|
@@ -1,12 +1,12 @@
|
|
1
|
-
swarms/__init__.py,sha256=
|
2
|
-
swarms/agents/__init__.py,sha256=
|
1
|
+
swarms/__init__.py,sha256=bc-57terLt5Uxlawsdq-ER6ukbVrxxWy8PEoG7VB5Ug,560
|
2
|
+
swarms/agents/__init__.py,sha256=ebUrX9rMccQokq5XthgnzGDMISn8EyQyOIOwVAlLs1E,1190
|
3
3
|
swarms/agents/agent_judge.py,sha256=xT242CX5mV64cq2B-3RGkuEHiV5aD04P_Zq8_s64iMQ,3967
|
4
4
|
swarms/agents/agent_print.py,sha256=SXqWA2ZzXwRFdv8hkuYwOPMTasvaGTG6U29413qRCAA,918
|
5
5
|
swarms/agents/ape_agent.py,sha256=1kz_65LJgjLlY1yv2WLBeVMs7sP9BgEVWk0w1f67YLc,1563
|
6
6
|
swarms/agents/auto_generate_swarm_config.py,sha256=7eJ873xS7PJmyreMaa5Uub8qFu-qIinuyMuogB2Ehjc,8474
|
7
7
|
swarms/agents/consistency_agent.py,sha256=41h0yvnjzmKsE8-q4UsN0ckHP7WWmB5E_z64ec9QaJM,7414
|
8
8
|
swarms/agents/create_agents_from_yaml.py,sha256=PgFIpuYZehxEl79BAK6TolSZwydDQzvGMAKhLsHuBbc,13008
|
9
|
-
swarms/agents/flexion_agent.py,sha256=
|
9
|
+
swarms/agents/flexion_agent.py,sha256=Agjq1rvTzboE8lT26-mcjp0tKQEjlUj_eVYsFjLIvN0,21468
|
10
10
|
swarms/agents/gkp_agent.py,sha256=5Jms3zHQ2qwJ6-PHDh9X-cFtAlH4dSUoDgRqN-xZzog,21067
|
11
11
|
swarms/agents/i_agent.py,sha256=_eKUcgPfiVqQpF5Q-Sv1tT-JZxIeNl9Fp7OrnjVUtz8,12276
|
12
12
|
swarms/agents/openai_assistant.py,sha256=mTSEtj26J0mc5pCeWrmMY0EXzTRYQfyfw_BtOqtcCHc,11044
|
@@ -19,6 +19,8 @@ swarms/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
19
|
swarms/cli/create_agent.py,sha256=o2V6EDQN477MxUOm0wShlr4uNyPpPzqpJhGM5tiuWUU,1128
|
20
20
|
swarms/cli/main.py,sha256=T9YsCrNIzvbQA8h5qlke-TbRP498Wu6R_KkAxRiabvs,15319
|
21
21
|
swarms/cli/onboarding_process.py,sha256=3-2LKoLhjnaPbX9iiasqXPZZpqmwm-ZrXawH88M4BIU,6940
|
22
|
+
swarms/client/__init__.py,sha256=VASeCEYoZskTU3NOw5tQ22uc2_7gyK0oLsxG_WB20ys,268
|
23
|
+
swarms/client/main.py,sha256=f-RpvfKfRK2AaapkyPN2ihXJvIGN4JWB_A7pJu4WyiU,13735
|
22
24
|
swarms/prompts/__init__.py,sha256=ZFcghAs4b0Rsjcc_DIFssiztZub5tJ66rxmJD2_tXpQ,747
|
23
25
|
swarms/prompts/accountant_swarm_prompts.py,sha256=swceN1B0fZCOedd3-y3gjNOHDmR-3H5YK17ytp7tTDM,11320
|
24
26
|
swarms/prompts/ag_prompt.py,sha256=_mydaflPh3kPpogqfObJ9UK5D8ySAGgtcO4BoNBN3LE,2658
|
@@ -44,6 +46,7 @@ swarms/prompts/idea2img.py,sha256=ZM-iD0ZFuqU2V1SY3V5Ds6ItTDd6JzQ4vpmz3faMzlM,88
|
|
44
46
|
swarms/prompts/legal_agent_prompt.py,sha256=DhIroYuviqrU7cgKz6F8U2LNSBykLH57AZqoH5AfD0U,3346
|
45
47
|
swarms/prompts/logistics.py,sha256=_XT_feYFgv-1IzHi5cPKuAp8xBjSX5KqwlPKh9xfK7E,4785
|
46
48
|
swarms/prompts/meta_system_prompt.py,sha256=GOsWqtGfFezpKyg8c876Y-kCkWGI563iS2-0AaI__MI,3374
|
49
|
+
swarms/prompts/multi_agent_collab_prompt.py,sha256=kyGWhoy50j0IxUBvxWAEBMLbIE8UbNOqixqCNHH2oFs,12610
|
47
50
|
swarms/prompts/multi_modal_autonomous_instruction_prompt.py,sha256=SHfaKs5Hj9sV4jgtVAFhv1N_N2e3jKwvdx_8G8-OdhM,10662
|
48
51
|
swarms/prompts/multi_modal_prompts.py,sha256=yvE9_RAFTKU1QhN0rNOelrO7jn5fjDARpcKussbBc2c,3511
|
49
52
|
swarms/prompts/multi_modal_visual_prompts.py,sha256=Apv5vqTzB7nBj7nnoMPO0fog3PL9KLrteEjYM_SjaEE,3225
|
@@ -78,31 +81,30 @@ swarms/schemas/__init__.py,sha256=EqqtVcpoptF1kfy19Wykp22ut4AA0z-yMQ5H9WB7ptA,18
|
|
78
81
|
swarms/schemas/agent_input_schema.py,sha256=qhPyThMx2on91yG9mzNdP_08GpMh1IRDHDwFna29jPs,6345
|
79
82
|
swarms/schemas/agent_step_schemas.py,sha256=a14gb58vR0xOwB_fwSJQbN6yb9HddEaT30E6hUrzEQA,2573
|
80
83
|
swarms/schemas/base_schemas.py,sha256=UvBLVWg2qRen4tK5GJz50v42SiX95EQ5qK7hfyAHTEU,3267
|
81
|
-
swarms/structs/__init__.py,sha256=
|
82
|
-
swarms/structs/agent.py,sha256=
|
84
|
+
swarms/structs/__init__.py,sha256=ER0HI-9RQI22i10x6XQj6TaKoWJgk1a5XIP1KxiBsCU,4310
|
85
|
+
swarms/structs/agent.py,sha256=IChi9EZiFUwCmj7dPa9H70TBkYd6VnTt-5Jx5TLPbWI,95234
|
83
86
|
swarms/structs/agent_builder.py,sha256=tYNpfO4_8cgfMHfgA5DAOWffHnt70p6CLt59esqfVCY,12133
|
84
87
|
swarms/structs/agent_registry.py,sha256=il507cO1NF-d4ChyANVLuWrN8bXsEAi8_7bLJ_sTU6A,12112
|
85
88
|
swarms/structs/agent_roles.py,sha256=8XEw6RjOOZelaZaWt4gXaYQm5WMLEhSO7W6Z8sQjmFg,582
|
86
89
|
swarms/structs/agent_router.py,sha256=YZw5AaK2yTvxkOA7ouED_4MoYgn0XZggvo1wrglp-4E,13017
|
87
90
|
swarms/structs/agents_available.py,sha256=SedxDim-0IWgGsNwJZxRIUMfKyAFFXdvXSYeBNu0zGw,2804
|
88
91
|
swarms/structs/async_workflow.py,sha256=7YWsLPyGY-1-mMxoIXWQ0FnYH6F227nxsS9PFAJoF9Q,26214
|
89
|
-
swarms/structs/auto_swarm.py,sha256=AHWswlEWDL_i3V8IP362tx6pi_B2arlZhALykrkI5OA,8215
|
90
92
|
swarms/structs/auto_swarm_builder.py,sha256=vPM5Kq59D_FvuWJB8hxgHuEvTXsxDxovlBnHGVQsM4o,10938
|
91
93
|
swarms/structs/base_structure.py,sha256=GDu4QJQQmhU7IyuFJHIh9UVThACCva-L7uoMbVD9l4s,15901
|
92
|
-
swarms/structs/base_swarm.py,sha256=
|
94
|
+
swarms/structs/base_swarm.py,sha256=LSGJDPJdyUCcK6698mNtjxoC1OU3s_J2NxC2k_ccGUs,23779
|
93
95
|
swarms/structs/base_workflow.py,sha256=DTfFwX3AdFYxACDYwUDqhsbcDZnITlg5TeEYyxmJBCc,11414
|
94
96
|
swarms/structs/concat.py,sha256=utezSxNyh1mIwXgdf8-dJ803NDPyEy79WE8zJHuooGk,732
|
95
|
-
swarms/structs/concurrent_workflow.py,sha256=
|
96
|
-
swarms/structs/conversation.py,sha256=
|
97
|
+
swarms/structs/concurrent_workflow.py,sha256=d1_slbALpxrdEGzZffUSAcEbONW0kc7fyTpVZTBmzi4,15517
|
98
|
+
swarms/structs/conversation.py,sha256=h4A4l9Mcucw1v-N0mOA4keZ9vf-l3t-kBINZlk_CrOA,18392
|
97
99
|
swarms/structs/csv_to_agent.py,sha256=ug9JqQFPguXeU9JQpSUXuVtOpHYdJhlpKJUJBovo694,9443
|
98
|
-
swarms/structs/de_hallucination_swarm.py,sha256=
|
100
|
+
swarms/structs/de_hallucination_swarm.py,sha256=9cC0rSSXGwYu6SRDwpeMbCcQ40C1WI1RE9SNapKRLOQ,10309
|
99
101
|
swarms/structs/deep_research_swarm.py,sha256=h4jtrcuiAKMWMYo8I7oaq6eFn8cJpqHhml58EveNbZ4,16756
|
100
102
|
swarms/structs/dynamic_conversational_swarm.py,sha256=n_d1jDCzBwiGb0QjJpW_MlXxqEkhGEhC1ttaebH7f3Q,8098
|
101
103
|
swarms/structs/graph_swarm.py,sha256=HPHlLWwdSPSe4o-I06ZOIgtBg72a06llEnv8-aglf3Q,20962
|
102
104
|
swarms/structs/graph_workflow.py,sha256=TAaUG_J3898hhghPOp0WEAV3Zf0in6s48ZSVbSTX-vQ,8629
|
103
|
-
swarms/structs/groupchat.py,sha256=
|
105
|
+
swarms/structs/groupchat.py,sha256=CivOw0QlpjugG8MIu5uGGVoA_0_oY6sBg0XlA08gViQ,15691
|
104
106
|
swarms/structs/hiearchical_swarm.py,sha256=XDEdy5kAISmWKraMR26VX47eCT4YgGTI2FNcBQzIvLE,35274
|
105
|
-
swarms/structs/hybrid_hiearchical_peer_swarm.py,sha256=
|
107
|
+
swarms/structs/hybrid_hiearchical_peer_swarm.py,sha256=wLdVc9gdfIUGzevWQbZN2Hvk54sjlmoTQO9RgUImD28,9216
|
106
108
|
swarms/structs/majority_voting.py,sha256=F_t_MOC3YCRyMw5N6qKdFThpaXZxwixRw592Ku5Uhto,10122
|
107
109
|
swarms/structs/malt.py,sha256=0ZOuLfOzaJ4vAVOM6J1aZ3yWAiKxfMkNIBNp8pjsEqE,19392
|
108
110
|
swarms/structs/matrix_swarm.py,sha256=qHuhOYrTyOv6ujHMe8PrQT-h-WmaCPCfX4ghv5L8UFI,9765
|
@@ -110,17 +112,17 @@ swarms/structs/meme_agent_persona_generator.py,sha256=b3kKlumhsV4KV88-GS3CUnGO1U
|
|
110
112
|
swarms/structs/mixture_of_agents.py,sha256=G8_MVMrDd0-ZD_gJ5YZgtTCUjl7COha9Me-vOYMXsAE,10575
|
111
113
|
swarms/structs/model_router.py,sha256=V5pZHYlxSmCvAA2Gsns7LaCz8dXtRi7pCvb-oLGHYIY,12739
|
112
114
|
swarms/structs/multi_agent_collab.py,sha256=odh2NQRR23LidsphCxUfAke369lDdgL__w3Xovu9jkA,7731
|
113
|
-
swarms/structs/multi_agent_exec.py,sha256=
|
115
|
+
swarms/structs/multi_agent_exec.py,sha256=Gxwr9mHADX3n29pdxor-dQDnKPSNdnicpCxBLmPwnLg,14344
|
114
116
|
swarms/structs/multi_agent_orchestrator.py,sha256=_trjXCW31ZeVR7N2hURLUPDZhYa-Wa3ADMk1wnNJdcQ,13400
|
115
117
|
swarms/structs/octotools.py,sha256=GZo0qtFM69W7vvewk6_k09vicgw0c0_v7MiPvEZCagE,31406
|
116
118
|
swarms/structs/omni_agent_types.py,sha256=RdKLfZ-lXDJrEa0aJT_Rfx9TypJQo8SISqKz4fnLkAk,230
|
117
119
|
swarms/structs/output_types.py,sha256=F1jNbDLJrubRIUyheMGMahJfGikbWZ_yNmbE9QVIz9A,280
|
118
120
|
swarms/structs/pulsar_swarm.py,sha256=4_L0GqPBgnA3AJajpNLgO4IAG6U36nIntFK9WNJScv8,15968
|
119
121
|
swarms/structs/queue_swarm.py,sha256=8vcA-rh280midcdgfA5IwJzBmMgdn71nRH6KndWu-DA,6770
|
120
|
-
swarms/structs/rearrange.py,sha256=
|
122
|
+
swarms/structs/rearrange.py,sha256=5u7HwTVVH414w9rhEQvLdltW1ACHjgwn-zS8-8JEXmA,22576
|
121
123
|
swarms/structs/round_robin.py,sha256=MGk623KiN9uSxTMG6MY_BIAkvEDh1RPwyl5Min7GLOU,7573
|
122
124
|
swarms/structs/safe_loading.py,sha256=gmYX8G9TsvAIp6OCvREBZt5mwSFc-p-t1rSnDBfhEmE,7124
|
123
|
-
swarms/structs/sequential_workflow.py,sha256=
|
125
|
+
swarms/structs/sequential_workflow.py,sha256=5jxHP-a2CzdclSXIrVWkQKXBr01VzrgOBIexR9s8diw,8492
|
124
126
|
swarms/structs/spreadsheet_swarm.py,sha256=ToX56QJjlm_nnC3MYppvKC_NTr9Zy_orkBzfxNLdwbA,14845
|
125
127
|
swarms/structs/stopping_conditions.py,sha256=Z0Jx0U2feAfQfuVV_IJGgal62DoVsGPN8K6HkkqB_bM,484
|
126
128
|
swarms/structs/swarm_arange.py,sha256=6fexCPsXRgdLbpY0p9rp_REipeXzsbv1_GOtD9B4HaI,15179
|
@@ -131,17 +133,16 @@ swarms/structs/swarm_load_balancer.py,sha256=pUCc5FEBcuJ_GmOFeTWBPfXlUdiTOjYcJqV
|
|
131
133
|
swarms/structs/swarm_matcher.py,sha256=E2KwHHEJxmW-UfTeMPWZ6VCmYdQ_I9_fwrfJbxD02GY,23322
|
132
134
|
swarms/structs/swarm_output_type.py,sha256=tW8Iqar1Jaf2Lzw66nAPc6MDk7-srQl5_XUKFvzoam4,755
|
133
135
|
swarms/structs/swarm_registry.py,sha256=P0XRrqp1qBNyt0BycqPQljUzKv9jClaQMhtaBMinhYg,5578
|
134
|
-
swarms/structs/swarm_router.py,sha256=
|
136
|
+
swarms/structs/swarm_router.py,sha256=05G0weYMUUo-20xgSeUnwCaH1lf6p1epXllI_iXo18Y,26854
|
135
137
|
swarms/structs/swarming_architectures.py,sha256=VvkSA9nQnF91V2C5-ALwSY1peZckeM1G4pPeQS7IVsE,13109
|
136
|
-
swarms/structs/swarms_api.py,sha256=f-RpvfKfRK2AaapkyPN2ihXJvIGN4JWB_A7pJu4WyiU,13735
|
137
138
|
swarms/structs/talk_hier.py,sha256=npyEuL52SCgQmMynIvGjfatNqOz4toq0EyhEtSNmQhQ,25649
|
138
139
|
swarms/structs/tree_swarm.py,sha256=AnIxrt0KhWxAQN8uGjfCcOq-XCmsuTJiH8Ex4mXy8V8,12500
|
139
140
|
swarms/structs/utils.py,sha256=Mo6wHQYOB8baWZUKnAJN5Dsgubpo81umNwJIEDitb2A,1873
|
140
141
|
swarms/structs/various_alt_swarms.py,sha256=qdBuOF31UjatlKRu-9bxwyRQzIjohRhTv_63YoUeYEY,27866
|
141
142
|
swarms/structs/workspace_manager.py,sha256=t0OgUP9dDU7xS6N3mAT2PbXvjHTsUK3nf2mxppcfZ70,5473
|
142
143
|
swarms/telemetry/__init__.py,sha256=yibtkHEbQRPUv6ir1FhDHlAO_3nwKJPQH4LjzBC2AuQ,661
|
143
|
-
swarms/telemetry/bootup.py,sha256=
|
144
|
-
swarms/telemetry/main.py,sha256=
|
144
|
+
swarms/telemetry/bootup.py,sha256=0leCNCy5rhzL19EsOsqHWSDI85KVcWO6_5hLDS0h4sY,1155
|
145
|
+
swarms/telemetry/main.py,sha256=QBQyO4JgIeMLGbdVPFaipIBasCMwuADB5wDNKQBEYDQ,9474
|
145
146
|
swarms/tools/__init__.py,sha256=pqIMcRQr4gtoNdbyI1N5k4upkYSBMxACJbxfB9yrV4c,1493
|
146
147
|
swarms/tools/base_tool.py,sha256=BiBCFHin8AyZO3FYOGA-n3M2o-F36xUeIBUiybnZYjI,15179
|
147
148
|
swarms/tools/cohere_func_call_schema.py,sha256=XJ6_yBMXCrV9KjN7v9Bk1iFj69TRlGIWYKsUTA1oGiQ,600
|
@@ -151,7 +152,7 @@ swarms/tools/function_util.py,sha256=DAnAPO0Ik__TAqL7IJzFmkukHnhpsW_QtALl3yj837g
|
|
151
152
|
swarms/tools/json_former.py,sha256=4ugLQ_EZpghhuhFsVKsy-ehin9K64pqVE2gLU7BTO_M,14376
|
152
153
|
swarms/tools/json_utils.py,sha256=WKMZjcJ0Vt6lgIjiTBenslcfjgRSLX4UWs4uDkKFMQI,1316
|
153
154
|
swarms/tools/logits_processor.py,sha256=NifZZ5w9yemWGJAJ5nHFrphtZVX1XlyesgvYZTxK1GM,2965
|
154
|
-
swarms/tools/mcp_integration.py,sha256=
|
155
|
+
swarms/tools/mcp_integration.py,sha256=rUXxC9NvXQ3V4B7Lt1AoI4ZYiCl2-T4FW3_689HTRZk,12839
|
155
156
|
swarms/tools/openai_func_calling_schema_pydantic.py,sha256=6BAH9kuaVTvJIbjgSSJ5XvHhWvWszPxgarkfUuE5Ads,978
|
156
157
|
swarms/tools/openai_tool_creator_decorator.py,sha256=SYZjHnARjWvnH9cBdj7Kc_Yy1muvNxMT3RQz8KkA2SE,2578
|
157
158
|
swarms/tools/py_func_to_openai_func_str.py,sha256=W112Gu0CmAiHrNWnRMcnoGiVZEy2FxAU4xMvnG9XP4g,15710
|
@@ -161,30 +162,29 @@ swarms/tools/tool_registry.py,sha256=ULZmIKBTx9XRCJRD9hwXfY3iQw9v94arw-VV6jcuftY
|
|
161
162
|
swarms/tools/tool_schema_base_model.py,sha256=0biTGIoibsPPP3fOrkC6WvNU5vXaalyccVKC1fpO_eg,1409
|
162
163
|
swarms/tools/tool_utils.py,sha256=yXzzqG7Ytd8ybB8bsjNUNLaXIuIp9JbbpUKCiHxQqo8,2816
|
163
164
|
swarms/utils/__init__.py,sha256=9qKE_11pxom74j3qExSm6Z_LwR5lrpC5YG17v22eLlo,975
|
164
|
-
swarms/utils/agent_ops_check.py,sha256=08UomeSv1uw_oEDlum0yG-5SsKkxqPRbeIWeKC75b08,685
|
165
165
|
swarms/utils/any_to_str.py,sha256=Qi4N9ed6LYnCs2AeFYo1zwEfYhOKUesGVFUmVUz54KI,2936
|
166
|
-
swarms/utils/auto_download_check_packages.py,sha256=
|
166
|
+
swarms/utils/auto_download_check_packages.py,sha256=mqx3jCetfkTuxTdeGLx-gGMB1xWOU5vata8lTKXLatk,4580
|
167
167
|
swarms/utils/calculate_func_metrics.py,sha256=Nb5r7rWf809m5F7mWIYXZ0H_WeyGr78A2UZD2GHtJkM,5007
|
168
168
|
swarms/utils/data_to_text.py,sha256=1PUoWokylp7MOrGNk1cmO3cJlfskdAIiImGk9ECwsKU,3427
|
169
|
-
swarms/utils/disable_logging.py,sha256
|
169
|
+
swarms/utils/disable_logging.py,sha256=KKPKQVfQqLPFgj03uveOoyeHOTlfEJt-yfLc3SA53Rk,2470
|
170
170
|
swarms/utils/file_processing.py,sha256=QjQCIPTcwicQlfy656BXBYpIzMR0s2343E7ftnok5Uo,4865
|
171
171
|
swarms/utils/formatter.py,sha256=YykmcuWXkxvQ7a2Vq6OzWuqUDiIwro6VrtSt4ITbXcU,4194
|
172
172
|
swarms/utils/function_caller_model.py,sha256=ZfgCMzOizNnuZipYLclTziECNHszH9p8RQcUq7VNr4Q,4156
|
173
|
-
swarms/utils/history_output_formatter.py,sha256=
|
173
|
+
swarms/utils/history_output_formatter.py,sha256=WHcd0xhSNRDKakXtkCjv0nW1NF-GM9SYcey3RrN5gl8,778
|
174
174
|
swarms/utils/litellm_tokenizer.py,sha256=0AAj4NffBe2eHii_3_5SpQAhSiBbunJR8MzaBTIm7hg,484
|
175
|
-
swarms/utils/litellm_wrapper.py,sha256=
|
175
|
+
swarms/utils/litellm_wrapper.py,sha256=cXZ6nUrHnGhpVgolgbpNsyKq1_TzupJs8vmw-_XtCRM,11255
|
176
176
|
swarms/utils/loguru_logger.py,sha256=hIoSK3NHLpe7eAmjHRURrEYzNXYC2gbR7_Vv63Yaydk,685
|
177
177
|
swarms/utils/markdown_message.py,sha256=RThHNnMf6ZLTlYK4vKn3yuewChaxWAYAWb0Xm_pTyIU,652
|
178
|
-
swarms/utils/pandas_utils.py,sha256=AA0wNWM05CrNovW7x9aY63Zhw7CIGMERmxvjH2Q-Jjc,2567
|
179
178
|
swarms/utils/parse_code.py,sha256=XFOLymbdP3HzMZuqsj7pwUyisvUmTm0ev9iThR_ambI,1987
|
180
179
|
swarms/utils/pdf_to_text.py,sha256=nkySOS_sJ4Jf4RP5SoDpMB5WfjJ_GGc5z8gJfn2cxOM,1311
|
181
180
|
swarms/utils/str_to_dict.py,sha256=T3Jsdjz87WIlkSo7jAW6BB80sv0Ns49WT1qXlOrdEoE,874
|
182
181
|
swarms/utils/swarm_reliability_checks.py,sha256=MsgUULt3HYg72D0HifZNmtCyJYpLA2UDA2wQixI-NbA,2562
|
183
182
|
swarms/utils/try_except_wrapper.py,sha256=appEGu9Afy3TmdkNNXUgQ9yU9lj2j0uNkIoW0JhVzzY,3917
|
184
183
|
swarms/utils/visualizer.py,sha256=0ylohEk62MAS6iPRaDOV03m9qo2k5J56tWlKJk_46p4,16927
|
184
|
+
swarms/utils/vllm_wrapper.py,sha256=OIGnU9Vf81vE_hul1FK-xEhChFK8fxqZX6-fhQeW22c,4987
|
185
185
|
swarms/utils/wrapper_clusterop.py,sha256=PMSCVM7ZT1vgj1D_MYAe835RR3SMLYxA-si2JS02yNQ,4220
|
186
|
-
swarms-7.6.
|
187
|
-
swarms-7.6.
|
188
|
-
swarms-7.6.
|
189
|
-
swarms-7.6.
|
190
|
-
swarms-7.6.
|
186
|
+
swarms-7.6.5.dist-info/LICENSE,sha256=jwRtEmTWjLrEsvFB6QFdYs2cEeZPRMdj-UMOFkPF8_0,11363
|
187
|
+
swarms-7.6.5.dist-info/METADATA,sha256=2w553wEExRZlKYKxX_LCvhpbyJbhefIxw43-I1Jvwmw,104909
|
188
|
+
swarms-7.6.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
189
|
+
swarms-7.6.5.dist-info/entry_points.txt,sha256=2K0rTtfO1X1WaO-waJlXIKw5Voa_EpAL_yU0HXE2Jgc,47
|
190
|
+
swarms-7.6.5.dist-info/RECORD,,
|