devduck 0.1.0__py3-none-any.whl → 0.1.1766644714__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of devduck might be problematic. Click here for more details.

Files changed (37) hide show
  1. devduck/__init__.py +1439 -483
  2. devduck/__main__.py +7 -0
  3. devduck/_version.py +34 -0
  4. devduck/agentcore_handler.py +76 -0
  5. devduck/test_redduck.py +0 -1
  6. devduck/tools/__init__.py +47 -0
  7. devduck/tools/_ambient_input.py +423 -0
  8. devduck/tools/_tray_app.py +530 -0
  9. devduck/tools/agentcore_agents.py +197 -0
  10. devduck/tools/agentcore_config.py +441 -0
  11. devduck/tools/agentcore_invoke.py +423 -0
  12. devduck/tools/agentcore_logs.py +320 -0
  13. devduck/tools/ambient.py +157 -0
  14. devduck/tools/create_subagent.py +659 -0
  15. devduck/tools/fetch_github_tool.py +201 -0
  16. devduck/tools/install_tools.py +409 -0
  17. devduck/tools/ipc.py +546 -0
  18. devduck/tools/mcp_server.py +600 -0
  19. devduck/tools/scraper.py +935 -0
  20. devduck/tools/speech_to_speech.py +850 -0
  21. devduck/tools/state_manager.py +292 -0
  22. devduck/tools/store_in_kb.py +187 -0
  23. devduck/tools/system_prompt.py +608 -0
  24. devduck/tools/tcp.py +263 -94
  25. devduck/tools/tray.py +247 -0
  26. devduck/tools/use_github.py +438 -0
  27. devduck/tools/websocket.py +498 -0
  28. devduck-0.1.1766644714.dist-info/METADATA +717 -0
  29. devduck-0.1.1766644714.dist-info/RECORD +33 -0
  30. {devduck-0.1.0.dist-info → devduck-0.1.1766644714.dist-info}/entry_points.txt +1 -0
  31. devduck-0.1.1766644714.dist-info/licenses/LICENSE +201 -0
  32. devduck/install.sh +0 -42
  33. devduck-0.1.0.dist-info/METADATA +0 -106
  34. devduck-0.1.0.dist-info/RECORD +0 -11
  35. devduck-0.1.0.dist-info/licenses/LICENSE +0 -21
  36. {devduck-0.1.0.dist-info → devduck-0.1.1766644714.dist-info}/WHEEL +0 -0
  37. {devduck-0.1.0.dist-info → devduck-0.1.1766644714.dist-info}/top_level.txt +0 -0
devduck/tools/tcp.py CHANGED
@@ -1,38 +1,60 @@
1
- """TCP tool for Strands Agents to function as both server and client.
1
+ """TCP tool for DevDuck agents with real-time streaming support.
2
2
 
3
- This module provides TCP server and client functionality for Strands Agents,
4
- allowing them to communicate over TCP/IP networks. The tool runs server operations
5
- in background threads, enabling concurrent communication without blocking the main agent.
3
+ This module provides TCP server and client functionality for DevDuck agents,
4
+ allowing them to communicate over TCP/IP networks with real-time response streaming.
5
+ The tool runs server operations in background threads, enabling concurrent
6
+ communication without blocking the main agent.
6
7
 
7
8
  Key Features:
8
- 1. TCP Server: Listen for incoming connections and process them with an agent
9
- 2. TCP Client: Connect to remote TCP servers and exchange messages
10
- 3. Background Processing: Server runs in a background thread
11
- 4. Per-Connection Agents: Creates a fresh agent for each client connection
9
+ 1. TCP Server: Listen for incoming connections and process them with a DevDuck agent
10
+ 2. Real-time Streaming: Responses stream to clients as they're generated (non-blocking)
11
+ 3. TCP Client: Connect to remote TCP servers and exchange messages
12
+ 4. Background Processing: Server runs in a background thread
13
+ 5. Per-Connection DevDuck: Creates a fresh DevDuck instance for each client connection
14
+ 6. Callback Handler: Uses Strands callback system for efficient streaming
12
15
 
13
- Usage with Strands Agent:
16
+ How Streaming Works:
17
+ -------------------
18
+ Instead of blocking until the full response is ready, this implementation uses
19
+ Strands' callback_handler mechanism to stream data as it's generated:
14
20
 
15
- ```python
16
- from strands import Agent
17
- from strands_tools import tcp
21
+ - Text chunks stream immediately as the model generates them
22
+ - Tool invocations are announced in real-time
23
+ - Tool results are sent as they complete
24
+ - No buffering delays - everything is instant
25
+
26
+ Usage with DevDuck Agent:
18
27
 
19
- agent = Agent(tools=[tcp])
28
+ ```python
29
+ from devduck import devduck
20
30
 
21
- # Start a TCP server
22
- result = agent.tool.tcp(
31
+ # Start a streaming TCP server (each connection gets its own DevDuck instance)
32
+ result = devduck.agent.tool.tcp(
23
33
  action="start_server",
24
34
  host="127.0.0.1",
25
35
  port=8000,
26
36
  system_prompt="You are a helpful TCP server assistant.",
27
37
  )
28
38
 
29
- # Connect to a TCP server as client
30
- result = agent.tool.tcp(
31
- action="client_send", host="127.0.0.1", port=8000, message="Hello, server!"
39
+ # Connect as a client and receive streaming responses
40
+ result = devduck.agent.tool.tcp(
41
+ action="client_send",
42
+ host="127.0.0.1",
43
+ port=8000,
44
+ message="What's 2+2?"
32
45
  )
33
46
 
34
47
  # Stop the TCP server
35
- result = agent.tool.tcp(action="stop_server", port=8000)
48
+ result = devduck.agent.tool.tcp(action="stop_server", port=8000)
49
+ ```
50
+
51
+ For testing with netcat:
52
+ ```bash
53
+ # Start server from devduck
54
+ devduck "start a tcp server on port 8000"
55
+
56
+ # Connect with netcat and chat in real-time
57
+ nc localhost 8000
36
58
  ```
37
59
 
38
60
  See the tcp function docstring for more details on configuration options and parameters.
@@ -42,6 +64,7 @@ import logging
42
64
  import socket
43
65
  import threading
44
66
  import time
67
+ import os
45
68
  from typing import Any
46
69
 
47
70
  from strands import Agent, tool
@@ -52,44 +75,147 @@ logger = logging.getLogger(__name__)
52
75
  SERVER_THREADS: dict[int, dict[str, Any]] = {}
53
76
 
54
77
 
78
+ class TCPStreamingCallbackHandler:
79
+ """Callback handler that streams agent responses directly over TCP socket.
80
+
81
+ This handler implements real-time streaming of:
82
+ - Assistant responses (text chunks as they're generated)
83
+ - Tool invocations (names and status)
84
+ - Reasoning text (if enabled)
85
+ - Tool results (success/error status)
86
+
87
+ All data is sent immediately to the TCP client without buffering.
88
+ """
89
+
90
+ def __init__(self, client_socket: socket.socket):
91
+ """Initialize the streaming handler.
92
+
93
+ Args:
94
+ client_socket: The TCP socket to stream data to
95
+ """
96
+ self.socket = client_socket
97
+ self.tool_count = 0
98
+ self.previous_tool_use = None
99
+
100
+ def _send(self, data: str) -> None:
101
+ """Safely send data over TCP socket.
102
+
103
+ Args:
104
+ data: String data to send
105
+ """
106
+ try:
107
+ self.socket.sendall(data.encode())
108
+ except (BrokenPipeError, ConnectionResetError, OSError) as e:
109
+ logger.warning(f"Failed to send data over TCP: {e}")
110
+
111
+ def __call__(self, **kwargs: Any) -> None:
112
+ """Stream events to TCP socket in real-time.
113
+
114
+ Args:
115
+ **kwargs: Callback event data including:
116
+ - reasoningText (Optional[str]): Reasoning text to stream
117
+ - data (str): Text content to stream
118
+ - complete (bool): Whether this is the final chunk
119
+ - current_tool_use (dict): Current tool being invoked
120
+ - message (dict): Full message objects (for tool results)
121
+ """
122
+ reasoningText = kwargs.get("reasoningText", False)
123
+ data = kwargs.get("data", "")
124
+ complete = kwargs.get("complete", False)
125
+ current_tool_use = kwargs.get("current_tool_use", {})
126
+ message = kwargs.get("message", {})
127
+
128
+ # Skip reasoning text to keep output clean
129
+ if reasoningText:
130
+ self._send(reasoningText)
131
+
132
+ # Stream response text chunks
133
+ if data:
134
+ self._send(data)
135
+ if complete:
136
+ self._send("\n")
137
+
138
+ # Stream tool invocation notifications
139
+ if current_tool_use and current_tool_use.get("name"):
140
+ tool_name = current_tool_use.get("name", "Unknown tool")
141
+ if self.previous_tool_use != current_tool_use:
142
+ self.previous_tool_use = current_tool_use
143
+ self.tool_count += 1
144
+ self._send(f"\n🛠️ Tool #{self.tool_count}: {tool_name}\n")
145
+
146
+ # Stream tool results
147
+ if isinstance(message, dict) and message.get("role") == "user":
148
+ for content in message.get("content", []):
149
+ if isinstance(content, dict):
150
+ tool_result = content.get("toolResult")
151
+ if tool_result:
152
+ status = tool_result.get("status", "unknown")
153
+ if status == "success":
154
+ self._send(f"✅ Tool completed successfully\n")
155
+ else:
156
+ self._send(f"❌ Tool failed\n")
157
+
158
+
55
159
  def handle_client(
56
160
  client_socket: socket.socket,
57
161
  client_address: tuple,
58
162
  system_prompt: str,
59
163
  buffer_size: int,
60
- model: Any,
61
- parent_tools: list | None = None,
62
- callback_handler: Any = None,
63
- trace_attributes: dict | None = None,
64
164
  ) -> None:
65
- """Handle a client connection in the TCP server.
165
+ """Handle a client connection in the TCP server with streaming responses.
66
166
 
67
167
  Args:
68
168
  client_socket: The socket for the client connection
69
169
  client_address: The address of the client
70
170
  system_prompt: System prompt for creating a new agent for this connection
71
171
  buffer_size: Size of the message buffer
72
- model: Model instance from parent agent
73
- parent_tools: Tools inherited from the parent agent
74
- callback_handler: Callback handler from parent agent
75
- trace_attributes: Trace attributes from the parent agent
76
172
  """
77
173
  logger.info(f"Connection established with {client_address}")
78
174
 
79
- # Create a fresh agent instance for this client connection
80
- connection_agent = Agent(
81
- model=model,
82
- messages=[],
83
- tools=parent_tools or [],
84
- callback_handler=callback_handler,
85
- system_prompt=system_prompt,
86
- trace_attributes=trace_attributes or {},
87
- )
175
+ # Create a streaming callback handler for this connection
176
+ streaming_handler = TCPStreamingCallbackHandler(client_socket)
177
+
178
+ # Import DevDuck and create a new instance for this connection
179
+ # This gives us full DevDuck capabilities: system prompt building, self-healing, etc.
180
+ try:
181
+ from devduck import DevDuck
182
+
183
+ # Create a new DevDuck instance with auto_start_servers=False to avoid recursion
184
+ connection_devduck = DevDuck(auto_start_servers=False)
185
+
186
+ # Override the callback handler to enable streaming
187
+ if connection_devduck.agent:
188
+ connection_devduck.agent.callback_handler = streaming_handler
189
+
190
+ # Optionally override system prompt if provided
191
+ if system_prompt:
192
+ connection_devduck.agent.system_prompt += (
193
+ "\nCustom system prompt:" + system_prompt
194
+ )
195
+
196
+ connection_agent = connection_devduck.agent
197
+
198
+ except Exception as e:
199
+ logger.error(f"Failed to create DevDuck instance: {e}", exc_info=True)
200
+ # Fallback to basic Agent if DevDuck fails
201
+ from strands import Agent
202
+ from strands.models.ollama import OllamaModel
203
+
204
+ agent_model = OllamaModel(
205
+ host=os.getenv("OLLAMA_HOST", "http://localhost:11434"),
206
+ model_id=os.getenv("OLLAMA_MODEL", "qwen3:1.7b"),
207
+ temperature=1,
208
+ keep_alive="5m",
209
+ )
210
+
211
+ connection_agent = Agent(
212
+ model=agent_model,
213
+ tools=[],
214
+ system_prompt=system_prompt or "You are a helpful TCP server assistant.",
215
+ callback_handler=streaming_handler,
216
+ )
88
217
 
89
218
  try:
90
- # Send welcome message
91
- welcome_msg = "Welcome to Strands TCP Server! Send a message or 'exit' to close the connection.\n"
92
- client_socket.sendall(welcome_msg.encode())
93
219
 
94
220
  while True:
95
221
  # Receive data from the client
@@ -103,16 +229,24 @@ def handle_client(
103
229
  logger.info(f"Received from {client_address}: {message}")
104
230
 
105
231
  if message.lower() == "exit":
106
- client_socket.sendall(b"Connection closed by client request.\n")
232
+ streaming_handler._send("Connection closed by client request.\n")
107
233
  logger.info(f"Client {client_address} requested to exit")
108
234
  break
109
235
 
110
- # Process the message with the connection-specific agent
111
- response = connection_agent(message)
112
- response_text = str(response)
236
+ # Process the message - responses stream automatically via callback_handler
237
+ try:
238
+ streaming_handler._send(f"\n\n🦆: {message}\n\n")
239
+
240
+ # The agent call will stream responses directly to the socket
241
+ # through the callback_handler - no need to collect the response
242
+ connection_agent(message)
113
243
 
114
- # Send the response back to the client
115
- client_socket.sendall((response_text + "\n").encode())
244
+ # Send completion marker
245
+ streaming_handler._send("\n\n🦆\n\n")
246
+
247
+ except Exception as e:
248
+ logger.error(f"Error processing message: {e}")
249
+ streaming_handler._send(f"\n❌ Error processing message: {e}\n\n")
116
250
 
117
251
  except Exception as e:
118
252
  logger.error(f"Error handling client {client_address}: {e}")
@@ -127,9 +261,8 @@ def run_server(
127
261
  system_prompt: str,
128
262
  max_connections: int,
129
263
  buffer_size: int,
130
- parent_agent: Agent | None = None,
131
264
  ) -> None:
132
- """Run a TCP server that processes client requests with per-connection Strands agents.
265
+ """Run a TCP server that processes client requests with per-connection DevDuck instances.
133
266
 
134
267
  Args:
135
268
  host: Host address to bind the server
@@ -137,24 +270,12 @@ def run_server(
137
270
  system_prompt: System prompt for the server agents
138
271
  max_connections: Maximum number of concurrent connections
139
272
  buffer_size: Size of the message buffer
140
- parent_agent: Parent agent to inherit tools from
141
273
  """
142
274
  # Store server state
143
275
  SERVER_THREADS[port]["running"] = True
144
276
  SERVER_THREADS[port]["connections"] = 0
145
277
  SERVER_THREADS[port]["start_time"] = time.time()
146
278
 
147
- # Get model, tools, callback_handler and trace attributes from parent agent
148
- model = None
149
- callback_handler = None
150
- parent_tools = []
151
- trace_attributes = {}
152
- if parent_agent:
153
- model = parent_agent.model
154
- callback_handler = parent_agent.callback_handler
155
- parent_tools = list(parent_agent.tool_registry.registry.values())
156
- trace_attributes = parent_agent.trace_attributes
157
-
158
279
  # Create server socket
159
280
  server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
160
281
  server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@@ -175,7 +296,7 @@ def run_server(
175
296
  client_socket, client_address = server_socket.accept()
176
297
  SERVER_THREADS[port]["connections"] += 1
177
298
 
178
- # Handle client in a new thread with a fresh agent
299
+ # Handle client in a new thread with a fresh DevDuck instance
179
300
  client_thread = threading.Thread(
180
301
  target=handle_client,
181
302
  args=(
@@ -183,10 +304,6 @@ def run_server(
183
304
  client_address,
184
305
  system_prompt,
185
306
  buffer_size,
186
- model,
187
- parent_tools,
188
- callback_handler,
189
- trace_attributes,
190
307
  ),
191
308
  )
192
309
  client_thread.daemon = True
@@ -199,6 +316,12 @@ def run_server(
199
316
  if SERVER_THREADS[port]["running"]:
200
317
  logger.error(f"Error accepting connection: {e}")
201
318
 
319
+ except OSError as e:
320
+ # Port conflict - handled upstream, no need for scary errors
321
+ if "Address already in use" in str(e):
322
+ logger.debug(f"Port {port} unavailable (handled upstream)")
323
+ else:
324
+ logger.error(f"Server error on {host}:{port}: {e}")
202
325
  except Exception as e:
203
326
  logger.error(f"Server error on {host}:{port}: {e}")
204
327
  finally:
@@ -221,21 +344,29 @@ def tcp(
221
344
  timeout: int = 90,
222
345
  buffer_size: int = 4096,
223
346
  max_connections: int = 5,
224
- agent: Any = None,
225
347
  ) -> dict:
226
- """Create and manage TCP servers and clients for network communication with connection handling.
348
+ """Create and manage TCP servers and clients with real-time streaming for DevDuck instances.
227
349
 
228
- This function provides TCP server and client functionality for Strands agents,
350
+ This function provides TCP server and client functionality for DevDuck agents,
229
351
  allowing them to communicate over TCP/IP networks. Servers run in background
230
- threads with a new, fresh agent instance for each client connection.
352
+ threads with a new, fresh DevDuck instance for each client connection.
353
+
354
+ **Real-time Streaming:** Unlike traditional blocking TCP responses, this
355
+ implementation streams data as it's generated using Strands' callback_handler
356
+ mechanism. Clients receive:
357
+ - Text chunks as the model generates them (no buffering)
358
+ - Tool invocation notifications in real-time
359
+ - Tool completion status immediately
360
+ - Reasoning text (if enabled)
231
361
 
232
362
  How It Works:
233
363
  ------------
234
364
  1. Server Mode:
235
365
  - Starts a TCP server in a background thread
236
- - Creates a dedicated agent for EACH client connection
237
- - Inherits tools from the parent agent
238
- - Processes client messages and returns responses
366
+ - Creates a dedicated DevDuck instance for EACH client connection
367
+ - Attaches a streaming callback handler to send data immediately
368
+ - Each DevDuck has full self-healing, hot-reload, and all tools
369
+ - Processes client messages with non-blocking streaming responses
239
370
 
240
371
  2. Client Mode:
241
372
  - Connects to a TCP server
@@ -249,17 +380,18 @@ def tcp(
249
380
 
250
381
  Common Use Cases:
251
382
  ---------------
252
- - Network service automation
253
- - Inter-agent communication
254
- - Remote command and control
255
- - API gateway implementation
256
- - IoT device management
383
+ - Network service automation with real-time feedback
384
+ - Inter-agent communication with streaming
385
+ - Remote command and control (instant responsiveness)
386
+ - API gateway implementation with SSE-like behavior
387
+ - IoT device management with live updates
388
+ - Interactive chat services over raw TCP
257
389
 
258
390
  Args:
259
391
  action: Action to perform (start_server, stop_server, get_status, client_send)
260
392
  host: Host address for server or client connection
261
393
  port: Port number for server or client connection
262
- system_prompt: System prompt for the server agent (for start_server)
394
+ system_prompt: System prompt for the server DevDuck instances (for start_server)
263
395
  message: Message to send to the TCP server (for client_send action)
264
396
  timeout: Connection timeout in seconds (default: 90)
265
397
  buffer_size: Size of the message buffer in bytes (default: 4096)
@@ -270,19 +402,32 @@ def tcp(
270
402
 
271
403
  Notes:
272
404
  - Server instances persist until explicitly stopped
273
- - Each client connection gets its own agent instance
274
- - Connection agents inherit tools from the parent agent
405
+ - Each client connection gets its own DevDuck instance
406
+ - Connection DevDuck instances have all standard DevDuck capabilities
407
+ - Streaming is automatic via callback_handler (no configuration needed)
275
408
  - Client connections are stateless
276
- """
277
- # Get parent agent from tool context if available
278
- parent_agent = agent
409
+ - Compatible with any TCP client (netcat, telnet, custom clients)
410
+
411
+ Examples:
412
+ # Start a streaming server
413
+ devduck("start a tcp server on port 9000")
279
414
 
415
+ # Test with netcat
416
+ nc localhost 9000
417
+ > what is 2+2?
418
+ [Streaming response appears in real-time]
419
+
420
+ # Send message from another devduck instance
421
+ devduck("send 'hello world' to tcp server at localhost:9000")
422
+ """
280
423
  if action == "start_server":
281
424
  # Check if server already running on this port
282
425
  if port in SERVER_THREADS and SERVER_THREADS[port].get("running", False):
283
426
  return {
284
427
  "status": "error",
285
- "content": [{"text": f"❌ Error: TCP Server already running on port {port}"}],
428
+ "content": [
429
+ {"text": f"❌ Error: TCP Server already running on port {port}"}
430
+ ],
286
431
  }
287
432
 
288
433
  # Create server thread
@@ -295,7 +440,6 @@ def tcp(
295
440
  system_prompt,
296
441
  max_connections,
297
442
  buffer_size,
298
- parent_agent,
299
443
  ),
300
444
  )
301
445
  server_thread.daemon = True
@@ -307,7 +451,9 @@ def tcp(
307
451
  if not SERVER_THREADS[port].get("running", False):
308
452
  return {
309
453
  "status": "error",
310
- "content": [{"text": f"❌ Error: Failed to start TCP Server on {host}:{port}"}],
454
+ "content": [
455
+ {"text": f"❌ Error: Failed to start TCP Server on {host}:{port}"}
456
+ ],
311
457
  }
312
458
 
313
459
  return {
@@ -315,7 +461,14 @@ def tcp(
315
461
  "content": [
316
462
  {"text": f"✅ TCP Server started successfully on {host}:{port}"},
317
463
  {"text": f"System prompt: {system_prompt}"},
318
- {"text": "Server creates a new agent instance for each connection"},
464
+ {"text": "🌊 Real-time streaming enabled (non-blocking responses)"},
465
+ {
466
+ "text": "🦆 Server creates a new DevDuck instance for each connection"
467
+ },
468
+ {
469
+ "text": "🛠️ Each DevDuck has full self-healing, hot-reload, and all tools"
470
+ },
471
+ {"text": f"📝 Test with: nc localhost {port}"},
319
472
  ],
320
473
  }
321
474
 
@@ -323,7 +476,9 @@ def tcp(
323
476
  if port not in SERVER_THREADS or not SERVER_THREADS[port].get("running", False):
324
477
  return {
325
478
  "status": "error",
326
- "content": [{"text": f"❌ Error: No TCP Server running on port {port}"}],
479
+ "content": [
480
+ {"text": f"❌ Error: No TCP Server running on port {port}"}
481
+ ],
327
482
  }
328
483
 
329
484
  # Stop the server
@@ -350,7 +505,9 @@ def tcp(
350
505
  "status": "success",
351
506
  "content": [
352
507
  {"text": f"✅ TCP Server on port {port} stopped successfully"},
353
- {"text": f"Statistics: {connections} connections handled, uptime {uptime:.2f} seconds"},
508
+ {
509
+ "text": f"Statistics: {connections} connections handled, uptime {uptime:.2f} seconds"
510
+ },
354
511
  ],
355
512
  }
356
513
 
@@ -366,7 +523,9 @@ def tcp(
366
523
  if data.get("running", False):
367
524
  uptime = time.time() - data.get("start_time", time.time())
368
525
  connections = data.get("connections", 0)
369
- status_info.append(f"Port {port}: Running - {connections} connections, uptime {uptime:.2f}s")
526
+ status_info.append(
527
+ f"Port {port}: Running - {connections} connections, uptime {uptime:.2f}s"
528
+ )
370
529
  else:
371
530
  status_info.append(f"Port {port}: Stopped")
372
531
 
@@ -388,7 +547,9 @@ def tcp(
388
547
  if not message:
389
548
  return {
390
549
  "status": "error",
391
- "content": [{"text": "Error: No message provided for client_send action"}],
550
+ "content": [
551
+ {"text": "Error: No message provided for client_send action"}
552
+ ],
392
553
  }
393
554
 
394
555
  # Create client socket
@@ -426,12 +587,20 @@ def tcp(
426
587
  except TimeoutError:
427
588
  return {
428
589
  "status": "error",
429
- "content": [{"text": f"Error: Connection to {host}:{port} timed out after {timeout} seconds"}],
590
+ "content": [
591
+ {
592
+ "text": f"Error: Connection to {host}:{port} timed out after {timeout} seconds"
593
+ }
594
+ ],
430
595
  }
431
596
  except ConnectionRefusedError:
432
597
  return {
433
598
  "status": "error",
434
- "content": [{"text": f"Error: Connection to {host}:{port} refused - no server running on that port"}],
599
+ "content": [
600
+ {
601
+ "text": f"Error: Connection to {host}:{port} refused - no server running on that port"
602
+ }
603
+ ],
435
604
  }
436
605
  except Exception as e:
437
606
  return {
@@ -454,4 +623,4 @@ def tcp(
454
623
  f"start_server, stop_server, get_status, client_send"
455
624
  }
456
625
  ],
457
- }
626
+ }