swarms 7.8.8__py3-none-any.whl → 7.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- swarms/cli/onboarding_process.py +1 -3
- swarms/prompts/collaborative_prompts.py +177 -0
- swarms/structs/agent.py +434 -128
- swarms/structs/concurrent_workflow.py +70 -196
- swarms/structs/conversation.py +6 -0
- swarms/structs/csv_to_agent.py +1 -3
- swarms/structs/interactive_groupchat.py +319 -12
- swarms/structs/ma_utils.py +25 -6
- swarms/structs/mixture_of_agents.py +88 -113
- swarms/structs/swarm_router.py +148 -187
- swarms/telemetry/__init__.py +4 -22
- swarms/telemetry/log_executions.py +43 -0
- swarms/telemetry/main.py +63 -325
- swarms/tools/__init__.py +10 -0
- swarms/tools/base_tool.py +15 -6
- swarms/tools/mcp_client_call.py +508 -0
- swarms/tools/py_func_to_openai_func_str.py +0 -1
- swarms/utils/auto_download_check_packages.py +4 -3
- swarms/utils/formatter.py +130 -13
- swarms/utils/history_output_formatter.py +2 -0
- swarms/utils/litellm_wrapper.py +5 -1
- swarms/utils/output_types.py +1 -1
- swarms-7.9.0.dist-info/METADATA +626 -0
- {swarms-7.8.8.dist-info → swarms-7.9.0.dist-info}/RECORD +27 -25
- swarms-7.8.8.dist-info/METADATA +0 -2119
- {swarms-7.8.8.dist-info → swarms-7.9.0.dist-info}/LICENSE +0 -0
- {swarms-7.8.8.dist-info → swarms-7.9.0.dist-info}/WHEEL +0 -0
- {swarms-7.8.8.dist-info → swarms-7.9.0.dist-info}/entry_points.txt +0 -0
swarms/utils/formatter.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import threading
|
2
2
|
import time
|
3
|
-
from typing import Any, Callable, Dict, List
|
3
|
+
from typing import Any, Callable, Dict, List, Optional
|
4
4
|
|
5
5
|
from rich.console import Console
|
6
6
|
from rich.live import Live
|
@@ -10,6 +10,23 @@ from rich.table import Table
|
|
10
10
|
from rich.text import Text
|
11
11
|
|
12
12
|
|
13
|
+
def choose_random_color():
|
14
|
+
import random
|
15
|
+
|
16
|
+
colors = [
|
17
|
+
"red",
|
18
|
+
"green",
|
19
|
+
"blue",
|
20
|
+
"yellow",
|
21
|
+
"magenta",
|
22
|
+
"cyan",
|
23
|
+
"white",
|
24
|
+
]
|
25
|
+
random_color = random.choice(colors)
|
26
|
+
|
27
|
+
return random_color
|
28
|
+
|
29
|
+
|
13
30
|
class Formatter:
|
14
31
|
"""
|
15
32
|
A class for formatting and printing rich text to the console.
|
@@ -32,18 +49,8 @@ class Formatter:
|
|
32
49
|
title (str, optional): The title of the panel. Defaults to "".
|
33
50
|
style (str, optional): The style of the panel. Defaults to "bold blue".
|
34
51
|
"""
|
35
|
-
|
36
|
-
|
37
|
-
colors = [
|
38
|
-
"red",
|
39
|
-
"green",
|
40
|
-
"blue",
|
41
|
-
"yellow",
|
42
|
-
"magenta",
|
43
|
-
"cyan",
|
44
|
-
"white",
|
45
|
-
]
|
46
|
-
random_color = random.choice(colors)
|
52
|
+
random_color = choose_random_color()
|
53
|
+
|
47
54
|
panel = Panel(
|
48
55
|
content, title=title, style=f"bold {random_color}"
|
49
56
|
)
|
@@ -145,5 +152,115 @@ class Formatter:
|
|
145
152
|
)
|
146
153
|
time.sleep(delay)
|
147
154
|
|
155
|
+
def print_streaming_panel(
|
156
|
+
self,
|
157
|
+
streaming_response,
|
158
|
+
title: str = "🤖 Agent Streaming Response",
|
159
|
+
style: str = None,
|
160
|
+
collect_chunks: bool = False,
|
161
|
+
on_chunk_callback: Optional[Callable] = None,
|
162
|
+
) -> str:
|
163
|
+
"""
|
164
|
+
Display real-time streaming response using Rich Live and Panel.
|
165
|
+
Similar to the approach used in litellm_stream.py.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
streaming_response: The streaming response generator from LiteLLM.
|
169
|
+
title (str): Title of the panel.
|
170
|
+
style (str): Style for the panel border (if None, will use random color).
|
171
|
+
collect_chunks (bool): Whether to collect individual chunks for conversation saving.
|
172
|
+
on_chunk_callback (Optional[Callable]): Callback function to call for each chunk.
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
str: The complete accumulated response text.
|
176
|
+
"""
|
177
|
+
# Get random color similar to non-streaming approach
|
178
|
+
random_color = choose_random_color()
|
179
|
+
panel_style = (
|
180
|
+
f"bold {random_color}" if style is None else style
|
181
|
+
)
|
182
|
+
text_style = (
|
183
|
+
"white" # Make text white instead of random color
|
184
|
+
)
|
185
|
+
|
186
|
+
def create_streaming_panel(text_obj, is_complete=False):
|
187
|
+
"""Create panel with proper text wrapping using Rich's built-in capabilities"""
|
188
|
+
panel_title = f"[white]{title}[/white]"
|
189
|
+
if is_complete:
|
190
|
+
panel_title += " [bold green]✅[/bold green]"
|
191
|
+
|
192
|
+
# Add blinking cursor if still streaming
|
193
|
+
display_text = Text.from_markup("")
|
194
|
+
display_text.append_text(text_obj)
|
195
|
+
if not is_complete:
|
196
|
+
display_text.append("▊", style="bold green blink")
|
197
|
+
|
198
|
+
panel = Panel(
|
199
|
+
display_text,
|
200
|
+
title=panel_title,
|
201
|
+
border_style=panel_style,
|
202
|
+
padding=(1, 2),
|
203
|
+
width=self.console.size.width, # Rich handles wrapping automatically
|
204
|
+
)
|
205
|
+
return panel
|
206
|
+
|
207
|
+
# Create a Text object for streaming content
|
208
|
+
streaming_text = Text()
|
209
|
+
complete_response = ""
|
210
|
+
chunks_collected = []
|
211
|
+
|
212
|
+
# TRUE streaming with Rich's automatic text wrapping
|
213
|
+
with Live(
|
214
|
+
create_streaming_panel(streaming_text),
|
215
|
+
console=self.console,
|
216
|
+
refresh_per_second=20,
|
217
|
+
) as live:
|
218
|
+
try:
|
219
|
+
for part in streaming_response:
|
220
|
+
if (
|
221
|
+
hasattr(part, "choices")
|
222
|
+
and part.choices
|
223
|
+
and part.choices[0].delta.content
|
224
|
+
):
|
225
|
+
# Add ONLY the new chunk to the Text object with random color style
|
226
|
+
chunk = part.choices[0].delta.content
|
227
|
+
streaming_text.append(chunk, style=text_style)
|
228
|
+
complete_response += chunk
|
229
|
+
|
230
|
+
# Collect chunks if requested
|
231
|
+
if collect_chunks:
|
232
|
+
chunks_collected.append(chunk)
|
233
|
+
|
234
|
+
# Call chunk callback if provided
|
235
|
+
if on_chunk_callback:
|
236
|
+
on_chunk_callback(chunk)
|
237
|
+
|
238
|
+
# Update display with new text - Rich handles all wrapping automatically
|
239
|
+
live.update(
|
240
|
+
create_streaming_panel(
|
241
|
+
streaming_text, is_complete=False
|
242
|
+
)
|
243
|
+
)
|
244
|
+
|
245
|
+
# Final update to show completion
|
246
|
+
live.update(
|
247
|
+
create_streaming_panel(
|
248
|
+
streaming_text, is_complete=True
|
249
|
+
)
|
250
|
+
)
|
251
|
+
|
252
|
+
except Exception as e:
|
253
|
+
# Handle any streaming errors gracefully
|
254
|
+
streaming_text.append(
|
255
|
+
f"\n[Error: {str(e)}]", style="bold red"
|
256
|
+
)
|
257
|
+
live.update(
|
258
|
+
create_streaming_panel(
|
259
|
+
streaming_text, is_complete=True
|
260
|
+
)
|
261
|
+
)
|
262
|
+
|
263
|
+
return complete_response
|
264
|
+
|
148
265
|
|
149
266
|
formatter = Formatter()
|
@@ -23,6 +23,8 @@ def history_output_formatter(
|
|
23
23
|
return yaml.safe_dump(conversation.to_dict(), sort_keys=False)
|
24
24
|
elif type == "dict-all-except-first":
|
25
25
|
return conversation.return_all_except_first()
|
26
|
+
elif type == "list-final":
|
27
|
+
return conversation.return_list_final()
|
26
28
|
elif type == "str-all-except-first":
|
27
29
|
return conversation.return_all_except_first_string()
|
28
30
|
elif type == "dict-final":
|
swarms/utils/litellm_wrapper.py
CHANGED
@@ -449,8 +449,12 @@ class LiteLLM:
|
|
449
449
|
# Make the completion call
|
450
450
|
response = completion(**completion_params)
|
451
451
|
|
452
|
+
# Handle streaming response
|
453
|
+
if self.stream:
|
454
|
+
return response # Return the streaming generator directly
|
455
|
+
|
452
456
|
# Handle tool-based response
|
453
|
-
|
457
|
+
elif self.tools_list_dictionary is not None:
|
454
458
|
return self.output_for_tools(response)
|
455
459
|
elif self.return_all is True:
|
456
460
|
return response.model_dump()
|
swarms/utils/output_types.py
CHANGED